3export async function getMemoryBuilder(spec: {
4 type: "buffer" | "summary" | "vector";
5 provider?: "openai";
6} = { type: "buffer" }, options = {}) {
7 const { cond, matches } = await import("npm:lodash-es");
15 ],
16 [
17 matches({ type: "summary", provider: "openai" }),
18 async () => {
19 const { ConversationSummaryMemory } = await import(
26 ],
27 [
28 matches({ type: "vector", provider: "openai" }),
29 async () => {
30 const { VectorStoreRetrieverMemory } = await import(
36 const builder = await getModelBuilder({
37 type: "embedding",
38 provider: "openai",
39 });
40 const model = await builder();
2
3export const multipleKeysAndMemoryConversationChainExample = (async () => {
4 const { ChatOpenAI } = await import(
5 "https://esm.sh/langchain/chat_models/openai"
6 );
7 const { BufferMemory } = await import("https://esm.sh/langchain/memory");
13 } = await import("https://esm.sh/langchain/prompts");
14 const { ConversationChain } = await import("https://esm.sh/langchain/chains");
15 const llm = new ChatOpenAI({
16 modelName: "gpt-3.5-turbo",
17 openAIApiKey: process.env.openai,
18 temperature: 0,
19 });
4export const runAgent = (async () => {
5 const { z } = await import("npm:zod");
6 const { ChatOpenAI } = await import("npm:langchain/chat_models/openai");
7 const { ChatAnthropic } = await import("npm:langchain/chat_models/anthropic");
8 const { DynamicTool, Tool, SerpAPI } = await import("npm:langchain/tools");
18 "npm:langchain/output_parsers"
19 );
20 const model = new ChatOpenAI({
21 openAIApiKey: process.env.OPENAI_API_KEY,
22 modelName: "gpt-4",
23 maxTokens: 2048,
2
3export const textToImageDalle = async (
4 openAIToken: string,
5 prompt: string,
6 n: number = 1,
13 }[];
14 } = await fetchJSON(
15 "https://api.openai.com/v1/images/generations",
16 {
17 method: "POST",
18 headers: {
19 "Content-Type": "application/json",
20 "Authorization": `Bearer ${openAIToken}`,
21 },
22 body: JSON.stringify({
4export let api = async ({ prompt = "讲一个笑话" }) => {
5 return gpt3({
6 openAiKey: process.env.openai_key,
7 prompt,
8 })
7 "Emphasize a morbid sense of humor.",
8 ].join("\n"),
9 openAiKey: process.env.openai_key,
10});
5 options = {},
6) => {
7 // Initialize OpenAI API stub
8 const { Configuration, OpenAIApi } = await import(
9 "https://esm.sh/openai@3.3.0"
10 );
11 const configuration = new Configuration({
12 apiKey: process.env.OPENAI,
13 });
14 const openai = new OpenAIApi(configuration);
15 // Request chat completion
16 const messages = typeof prompt === "string"
17 ? [{ role: "user", content: prompt }]
18 : prompt;
19 const { data } = await openai.createChatCompletion({
20 model: "gpt-3.5-turbo-0613",
21 messages,
4 const builder = await getModelBuilder({
5 type: "chat",
6 provider: "openai",
7 });
8 const model = await builder();
2
3export const multipleKeysAndMemoryConversationChainExample = (async () => {
4 const { ChatOpenAI } = await import(
5 "https://esm.sh/langchain/chat_models/openai"
6 );
7 const { BufferMemory } = await import("https://esm.sh/langchain/memory");
13 } = await import("https://esm.sh/langchain/prompts");
14 const { ConversationChain } = await import("https://esm.sh/langchain/chains");
15 const llm = new ChatOpenAI({
16 modelName: "gpt-3.5-turbo",
17 openAIApiKey: process.env.OPENAI_API_KEY,
18 temperature: 0,
19 });
13 });
14 const page = await browser.newPage();
15 await page.goto("https://en.wikipedia.org/wiki/OpenAI");
16 const intro = await page.evaluate(
17 `document.querySelector('p:nth-of-type(2)').innerText`,