1import { Tokenizer } from "https://esm.town/v/zzz/Tokenizer";
2
3// Demo of tokenizer to mimic behavior of https://platform.openai.com/tokenizer
4// Tokenizer uses "gpt-3.5-turbo" model by default but this demo uses davinci to match the playground
5export const TokenizerDemo = (async () => {
4export let weatherTomorrowGpt3Example = weatherTomorrowGpt3({
5 city: "New York City",
6 openAiKey: process.env.openai_key,
7});
2
3/**
4 * Calls the OpenAI moderation model. Useful for determining if OpenAI will flag something you did.
5 * https://platform.openai.com/docs/api-reference/moderations
6 */
7export let openAiModeration = async ({
8 apiKey,
9 input,
15}) => {
16 if (!apiKey) {
17 throw new Error("You must provide an OpenAI API Key");
18 }
19 const body: { model?: string, input: string|string[] } = {
24 }
25 const result = await fetchJSON(
26 "https://api.openai.com/v1/moderations",
27 {
28 method: "POST",
2import { simpleWeather } from "https://esm.town/v/patrickjm/simpleWeather";
3
4export let weatherTomorrowGpt3 = (params: { openAiKey: string, city: string }) =>
5 simpleWeather(params.city).then((weather) =>
6 gpt3({
7 openAiKey: params.openAiKey,
8 prompt: `
9 Given a JSON sequence, give a short, plain-English summary about the weather tomorrow.
6 options = {},
7) => {
8 // Initialize OpenAI API stub
9 const { Configuration, OpenAIApi } = await import("https://esm.sh/openai");
10 const configuration = new Configuration({
11 apiKey: process.env.openAIAPI,
12 });
13 const openai = new OpenAIApi(configuration);
14 // Request chat completion
15 const messages = typeof prompt === "string"
16 ? [{ role: "user", content: prompt }]
17 : prompt;
18 const { data } = await openai.createChatCompletion({
19 model: "gpt-3.5-turbo-0613",
20 messages,
2
3export const textToImageDalle = async (
4 openAIToken: string,
5 prompt: string,
6 n: number = 1,
17 };
18 } = await fetchJSON(
19 "https://api.openai.com/v1/images/generations",
20 {
21 method: "POST",
22 headers: {
23 "Content-Type": "application/json",
24 "Authorization": `Bearer ${openAIToken}`,
25 },
26 body: JSON.stringify({
2
3export const untitled_chocolateSquid = (async () => {
4 const { ChatOpenAI } = await import(
5 "https://esm.sh/langchain@0.0.146/chat_models/openai"
6 );
7 const { LLMChain } = await import("https://esm.sh/langchain@0.0.146/chains");
16 ["human", humanTemplate],
17 ]);
18 const chat = new ChatOpenAI({
19 temperature: 0,
20 openAIApiKey: process.env.OPENAI_API_KEY,
21 });
22 const chain = new LLMChain({
14 };
15 const getCompletion = async () => {
16 const response = await fetch("https://api.openai.com/v1/chat/completions", {
17 method: "POST",
18 headers: {
19 "Content-Type": "application/json",
20 Authorization: `Bearer ${process.env.openaiKey}`,
21 },
22 body: JSON.stringify(postData),
2
3export const rateArticleRelevance = async (interests: string, article: any) => {
4 const { default: OpenAI } = await import("npm:openai");
5 const openai = new OpenAI({
6 apiKey: untitled_tealCoral.OPENAI_API_KEY,
7 });
8 try {
13 Give a score from 0 to 10. Why did you give this score? Respond with the score only.
14 `;
15 const response = await openai.chat.completions.create({
16 messages: [
17 {
17 "worrydream", // Bret Victor (dynamicland)
18 "stevewoz", // Steve Wozniak (apple)
19 "sama", // Sam Altman (openai, yc)
20 "geoff", // Geoff Ralston (yc)
21 "dang", // Dan G (hn)