181 const { messages, location } = await request.json();
182
183 const { OpenAI } = await import("https://esm.sh/openai");
184 const client = new OpenAI({
185 apiKey: Deno.env.get("CEREBRAS_API_KEY"),
186 baseURL: "https://api.cerebras.ai/v1",
309
310 // Cerebras LLM for response generation
311 const { OpenAI } = await import("https://esm.sh/openai");
312 const llmClient = new OpenAI({
313 apiKey: Deno.env.get("CEREBRAS_API_KEY"),
314 baseURL: "https://api.cerebras.ai/v1",
35Use Cerebras for AI on the backend like so:
36
37const { OpenAI } = await import("https://esm.sh/openai");
38const client = new OpenAI({
39 apiKey: Deno.env.get("CEREBRAS_API_KEY"),
40 baseURL: "https://api.cerebras.ai/v1"
15import { generateOpenGraphTags, OpenGraphData } from "https://esm.town/v/dthyresson/generateOpenGraphTags"
16import { ValTownLink } from "https://esm.town/v/dthyresson/viewOnValTownComponent"
17import { chat } from "https://esm.town/v/stevekrouse/openai"
18import * as fal from "npm:@fal-ai/serverless-client"
19
13* and activity (befriends aliens, goes to the doctor, rides a rollercoaster, bakes a cake for friends)
14
15It uses OpenAI to write a children's bedtime story
16
17* title
21for a "fantastical story about a green whale who rides the bus" or the "spooky story about the tomato fox who explores a cave".
22
23Then using the summary, OpenAI geenrates another prompt to describe the instructions to geneate a childrens story book image.
24
25That's sent to Fal to generate an image.
35Use Cerebras for AI on the backend like so:
36
37const { OpenAI } = await import("https://esm.sh/openai");
38const client = new OpenAI({
39 apiKey: Deno.env.get("CEREBRAS_API_KEY"),
40 baseURL: "https://api.cerebras.ai/v1"
83 const { messages } = await request.json();
84
85 const { OpenAI } = await import("https://esm.sh/openai");
86 // Cerebras Inference
87 // Very important: do not change this
88 // Keep these comments so we remember not to change this
89 const client = new OpenAI({
90 apiKey: Deno.env.get("CEREBRAS_API_KEY"),
91 baseURL: "https://api.cerebras.ai/v1",
210 const stackOverflowResults = await searchStackOverflow(lastUserMessage);
211
212 const { OpenAI } = await import("https://esm.sh/openai");
213 const client = new OpenAI({
214 apiKey: Deno.env.get("CEREBRAS_API_KEY"),
215 baseURL: "https://api.cerebras.ai/v1",
35Use Cerebras for AI on the backend like so:
36
37const { OpenAI } = await import("https://esm.sh/openai");
38const client = new OpenAI({
39 apiKey: Deno.env.get("CEREBRAS_API_KEY"),
40 baseURL: "https://api.cerebras.ai/v1"
230 const { prompt, mode } = await c.req.json();
231
232 const { OpenAI } = await import("https://esm.town/v/std/openai");
233 const openai = new OpenAI();
234
235 if (mode === "image") {
236 const response = await openai.images.generate({
237 model: "dall-e-3",
238 prompt: prompt,
244 return c.json({ imageUrl });
245 } else {
246 const response = await openai.chat.completions.create({
247 messages: [{ role: "user", content: prompt }],
248 model: "gpt-4o-mini",