1import process from "node:process";
2import OpenAI from "npm:openai";
3
4const openai = new OpenAI({ apiKey: process.env.openai });
5
6async function main() {
7 const response = await openai.chat.completions.create({
8 model: "gpt-4-vision-preview",
9 messages: [
1import process from "node:process";
2import { OpenAI } from "npm:openai";
3
4const openai = new OpenAI({ apiKey: process.env.openai });
5let chatCompletion = await openai.chat.completions.create({
6 messages: [{ role: "user", content: "Make a short joke or pun" }],
7 model: "gpt-3.5-turbo",
2
3export const rateArticleRelevance = async (interests: string, article: any) => {
4 const { default: OpenAI } = await import("npm:openai");
5 const openai = new OpenAI({ apiKey: process.env.OPENAI_KEY });
6
7 try {
12 Give a score from 0 to 10. Why did you give this score? Respond with the score only.
13 `;
14 const response = await openai.chat.completions.create({
15 messages: [
16 {
1import process from "node:process";
2import { ChatOpenAI } from "npm:langchain/chat_models/openai";
3
4const model = new ChatOpenAI({
5 temperature: 0.9,
6 openAIApiKey: process.env.openai,
7});
8
17 "Content-Type": "application/json",
18 // Update your token in https://val.town/settings/secrets
19 Authorization: `Bearer ${token || process.env.openaiKey}`,
20 };
21
22 const getCompelitoins = async (data) => {
23 const response = await fetch("https://api.openai.com/v1/completions", {
24 method: "POST",
25 headers: {
8 "Rewrite this fact about cats as if it was written for 3 year old:\n\n" +
9 fact;
10 const story = await fetch("https://api.openai.com/v1/chat/completions", {
11 method: "POST",
12 body: JSON.stringify({
16 }),
17 headers: {
18 "Authorization": `Bearer ${process.env.OPENAI}`,
19 "Content-Type": "application/json",
20 },
3export async function getMemoryBuilder(spec: {
4 type: "buffer" | "summary" | "vector";
5 provider?: "openai";
6} = { type: "buffer" }, options = {}) {
7 const { cond, matches } = await import("npm:lodash-es");
15 ],
16 [
17 matches({ type: "summary", provider: "openai" }),
18 async () => {
19 const { ConversationSummaryMemory } = await import(
26 ],
27 [
28 matches({ type: "vector", provider: "openai" }),
29 async () => {
30 const { VectorStoreRetrieverMemory } = await import(
36 const builder = await getModelBuilder({
37 type: "embedding",
38 provider: "openai",
39 });
40 const model = await builder();
2
3export const multipleKeysAndMemoryConversationChainExample = (async () => {
4 const { ChatOpenAI } = await import(
5 "https://esm.sh/langchain/chat_models/openai"
6 );
7 const { BufferMemory } = await import("https://esm.sh/langchain/memory");
13 } = await import("https://esm.sh/langchain/prompts");
14 const { ConversationChain } = await import("https://esm.sh/langchain/chains");
15 const llm = new ChatOpenAI({
16 modelName: "gpt-3.5-turbo",
17 openAIApiKey: process.env.openai,
18 temperature: 0,
19 });
4export const runAgent = (async () => {
5 const { z } = await import("npm:zod");
6 const { ChatOpenAI } = await import("npm:langchain/chat_models/openai");
7 const { ChatAnthropic } = await import("npm:langchain/chat_models/anthropic");
8 const { DynamicTool, Tool, SerpAPI } = await import("npm:langchain/tools");
18 "npm:langchain/output_parsers"
19 );
20 const model = new ChatOpenAI({
21 openAIApiKey: process.env.OPENAI_API_KEY,
22 modelName: "gpt-4",
23 maxTokens: 2048,
2
3export const textToImageDalle = async (
4 openAIToken: string,
5 prompt: string,
6 n: number = 1,
13 }[];
14 } = await fetchJSON(
15 "https://api.openai.com/v1/images/generations",
16 {
17 method: "POST",
18 headers: {
19 "Content-Type": "application/json",
20 "Authorization": `Bearer ${openAIToken}`,
21 },
22 body: JSON.stringify({