Val Town Code SearchReturn to Val Town

API Access

You can access search results via JSON API by adding format=json to your query:

https://codesearch.val.run/$%7Bsuccess?q=openai&page=75&format=json

For typeahead suggestions, use the /typeahead endpoint:

https://codesearch.val.run/typeahead?q=openai

Returns an array of strings in format "username" or "username/projectName"

Found 1616 results for "openai"(643ms)

cerebrasTemplateREADME.md2 matches

@daisukeUpdated 2 months ago
35Use Cerebras for AI on the backend like so:
36
37const { OpenAI } = await import("https://esm.sh/openai");
38const client = new OpenAI({
39 apiKey: Deno.env.get("CEREBRAS_API_KEY"),
40 baseURL: "https://api.cerebras.ai/v1"

cerebrasTemplatemain.tsx2 matches

@daisukeUpdated 2 months ago
83 const { messages } = await request.json();
84
85 const { OpenAI } = await import("https://esm.sh/openai");
86 // Cerebras Inference
87 // Very important: do not change this
88 // Keep these comments so we remember not to change this
89 const client = new OpenAI({
90 apiKey: Deno.env.get("CEREBRAS_API_KEY"),
91 baseURL: "https://api.cerebras.ai/v1",

AlwaysHeremain.tsx2 matches

@AIWBUpdated 2 months ago
309
310 // Cerebras LLM for response generation
311 const { OpenAI } = await import("https://esm.sh/openai");
312 const llmClient = new OpenAI({
313 apiKey: Deno.env.get("FIREWORKS_API"),
314 baseURL: "https://api.fireworks.ai/inference/v1",

AlwaysHereREADME.md2 matches

@AIWBUpdated 2 months ago
35Use Cerebras for AI on the backend like so:
36
37const { OpenAI } = await import("https://esm.sh/openai");
38const client = new OpenAI({
39 apiKey: Deno.env.get("CEREBRAS_API_KEY"),
40 baseURL: "https://api.cerebras.ai/v1"

InstantQueryApp2 matches

@vawogbemiUpdated 2 months ago
8
9 if (req.method == "POST") {
10 const { OpenAI } = await import("https://esm.town/v/std/openai");
11 const { text } = await req.json();
12 const client = new OpenAI();
13
14 const schema = await getInstantSchema(Deno.env.get("INSTANT_APP_ID"), Deno.env.get("INSTANT_ADMIN_TOKEN"));

translateToEnglishWithOpenAImain.tsx5 matches

@HadarhubaraUpdated 3 months ago
1import { OpenAI } from "https://esm.town/v/std/openai";
2
3export default async function translateToEnglishWithOpenAI(text: string) {
4 const openai = new OpenAI();
5 const completion = await openai.chat.completions.create({
6 messages: [
7 {
32}
33
34console.log(await translateToEnglishWithOpenAI("שלום"));

boldWhiteDinosaurartisticScarletScorpion3 matches

@VicodinUpdated 3 months ago
224
225export default async function server(request: Request): Promise<Response> {
226 const { OpenAI } = await import("https://esm.town/v/std/openai");
227 const { sqlite } = await import("https://esm.town/v/stevekrouse/sqlite");
228
269 `, [JSON.stringify(messages)]);
270
271 const openai = new OpenAI();
272 const systemPrompt = "Ты дружелюбный и умный ассистент. Отвечай четко и по существу.";
273
277 ];
278
279 const stream = await openai.chat.completions.create({
280 model: "gpt-4o-mini",
281 messages: modifiedMessages,

Clone_CHATGPTbubblyGreenBat3 matches

@CustodiaUpdated 3 months ago
168
169export default async function server(request: Request): Promise<Response> {
170 const { OpenAI } = await import("https://esm.town/v/std/openai");
171 const { sqlite } = await import("https://esm.town/v/stevekrouse/sqlite");
172
213 `, [JSON.stringify(messages)]);
214
215 const openai = new OpenAI();
216 const systemPrompt = "Ты дружелюбный и умный ассистент. Отвечай четко и по существу.";
217
221 ];
222
223 const stream = await openai.chat.completions.create({
224 model: "gpt-4o-mini",
225 messages: modifiedMessages,

Cloneklonmain.tsx3 matches

@CustodiaUpdated 3 months ago
224
225export default async function server(request: Request): Promise<Response> {
226 const { OpenAI } = await import("https://esm.town/v/std/openai");
227 const { sqlite } = await import("https://esm.town/v/stevekrouse/sqlite");
228
269 `, [JSON.stringify(messages)]);
270
271 const openai = new OpenAI();
272 const systemPrompt = "Ты дружелюбный и умный ассистент. Отвечай четко и по существу.";
273
277 ];
278
279 const stream = await openai.chat.completions.create({
280 model: "gpt-4o-mini",
281 messages: modifiedMessages,

enchantingScarletChickadeemain.tsx3 matches

@CustodiaUpdated 3 months ago
224
225export default async function server(request: Request): Promise<Response> {
226 const { OpenAI } = await import("https://esm.town/v/std/openai");
227 const { sqlite } = await import("https://esm.town/v/stevekrouse/sqlite");
228
269 `, [JSON.stringify(messages)]);
270
271 const openai = new OpenAI();
272 const systemPrompt = "Ты дружелюбный и умный ассистент. Отвечай четко и по существу.";
273
277 ];
278
279 const stream = await openai.chat.completions.create({
280 model: "gpt-4o-mini",
281 messages: modifiedMessages,

translateToEnglishWithOpenAI1 file match

@shlmtUpdated 4 days ago

testOpenAI1 file match

@stevekrouseUpdated 6 days ago
lost1991
import { OpenAI } from "https://esm.town/v/std/openai"; export default async function(req: Request): Promise<Response> { if (req.method === "OPTIONS") { return new Response(null, { headers: { "Access-Control-Allow-Origin": "*",