2import { simpleWeather } from "https://esm.town/v/patrickjm/simpleWeather";
3
4export let weatherTomorrowGpt3 = (params: { openAiKey: string, city: string }) =>
5 simpleWeather(params.city).then((weather) =>
6 gpt3({
7 openAiKey: params.openAiKey,
8 prompt: `
9 Given a JSON sequence, give a short, plain-English summary about the weather tomorrow.
6 options = {},
7) => {
8 // Initialize OpenAI API stub
9 const { Configuration, OpenAIApi } = await import("https://esm.sh/openai");
10 const configuration = new Configuration({
11 apiKey: process.env.openAIAPI,
12 });
13 const openai = new OpenAIApi(configuration);
14 // Request chat completion
15 const messages = typeof prompt === "string"
16 ? [{ role: "user", content: prompt }]
17 : prompt;
18 const { data } = await openai.createChatCompletion({
19 model: "gpt-3.5-turbo-0613",
20 messages,
2
3export const textToImageDalle = async (
4 openAIToken: string,
5 prompt: string,
6 n: number = 1,
17 };
18 } = await fetchJSON(
19 "https://api.openai.com/v1/images/generations",
20 {
21 method: "POST",
22 headers: {
23 "Content-Type": "application/json",
24 "Authorization": `Bearer ${openAIToken}`,
25 },
26 body: JSON.stringify({
2
3export const untitled_chocolateSquid = (async () => {
4 const { ChatOpenAI } = await import(
5 "https://esm.sh/langchain@0.0.146/chat_models/openai"
6 );
7 const { LLMChain } = await import("https://esm.sh/langchain@0.0.146/chains");
16 ["human", humanTemplate],
17 ]);
18 const chat = new ChatOpenAI({
19 temperature: 0,
20 openAIApiKey: process.env.OPENAI_API_KEY,
21 });
22 const chain = new LLMChain({
14 };
15 const getCompletion = async () => {
16 const response = await fetch("https://api.openai.com/v1/chat/completions", {
17 method: "POST",
18 headers: {
19 "Content-Type": "application/json",
20 Authorization: `Bearer ${process.env.openaiKey}`,
21 },
22 body: JSON.stringify(postData),
2
3export const rateArticleRelevance = async (interests: string, article: any) => {
4 const { default: OpenAI } = await import("npm:openai");
5 const openai = new OpenAI({
6 apiKey: untitled_tealCoral.OPENAI_API_KEY,
7 });
8 try {
13 Give a score from 0 to 10. Why did you give this score? Respond with the score only.
14 `;
15 const response = await openai.chat.completions.create({
16 messages: [
17 {
17 "worrydream", // Bret Victor (dynamicland)
18 "stevewoz", // Steve Wozniak (apple)
19 "sama", // Sam Altman (openai, yc)
20 "geoff", // Geoff Ralston (yc)
21 "dang", // Dan G (hn)
2
3export const langchainEx = (async () => {
4 const { OpenAI } = await import("https://esm.sh/langchain/llms/openai");
5 const { PromptTemplate } = await import("https://esm.sh/langchain/prompts");
6 const { LLMChain } = await import("https://esm.sh/langchain/chains");
7 const model = new OpenAI({
8 temperature: 0.9,
9 openAIApiKey: process.env.openai,
10 maxTokens: 100,
11 });
2
3export const conversationalRetrievalQAChainSummaryMemory = (async () => {
4 const { ChatOpenAI } = await import(
5 "https://esm.sh/langchain/chat_models/openai"
6 );
7 const { OpenAIEmbeddings } = await import(
8 "https://esm.sh/langchain/embeddings/openai"
9 );
10 const { ConversationSummaryMemory } = await import(
17 "https://esm.sh/langchain/chains"
18 );
19 const chatModel = new ChatOpenAI({
20 openAIApiKey: process.env.OPENAI_API_KEY,
21 });
22 /* Create the vectorstore */
28 ],
29 [{ id: 2 }, { id: 1 }, { id: 3 }],
30 new OpenAIEmbeddings({
31 openAIApiKey: process.env.OPENAI_API_KEY,
32 }),
33 );
5
6// Create a summary from a given text using GPT 4
7export const OpenAISummary = async (text: string, config: {
8 apiKey?: string;
9 jwt?: string;
14 const { success, reset } = await upstashRateLimiter(
15 "anon",
16 "@zzz.OpenAISummary",
17 2,
18 "60 s",
31 }
32 const agent = await AIAgent(
33 apiKey || process.env.OPENAI_API_KEY_GPT4,
34 );
35 const response = await agent.summarize(text, modelName);