Val Town Code SearchReturn to Val Town

API Access

You can access search results via JSON API by adding format=json to your query:

https://codesearch.val.run/$%7Bart_info.art.src%7D?q=openai&page=81&format=json

For typeahead suggestions, use the /typeahead endpoint:

https://codesearch.val.run/typeahead?q=openai

Returns an array of strings in format "username" or "username/projectName"

Found 1623 results for "openai"(645ms)

constantGrayRavenmain.tsx3 matches

@RareVibesAiDad•Updated 3 months ago
3import React, { useEffect, useState } from "https://esm.sh/react@18.2.0";
4import { blob } from "https://esm.town/v/std/blob";
5import { OpenAI } from "https://esm.town/v/std/openai";
6
7// Operational Data Science Tool Report Generator
8async function generateDataScienceToolReport() {
9 const openai = new OpenAI();
10
11 // Comprehensive Data Science and Database Tool Categories
49 // Advanced Operational Analysis
50 async function generateOperationalInsights(category: string) {
51 const operationalCompletion = await openai.chat.completions.create({
52 model: "gpt-4o-mini",
53 messages: [

RareVibesBoostmain.tsx3 matches

@RareVibesAiDad•Updated 3 months ago
3import React, { useEffect, useState } from "https://esm.sh/react@18.2.0";
4import { blob } from "https://esm.town/v/std/blob";
5import { OpenAI } from "https://esm.town/v/std/openai";
6
7// Operational Data Science Tool Report Generator
8async function generateDataScienceToolReport() {
9 const openai = new OpenAI();
10
11 // Comprehensive Data Science and Database Tool Categories
49 // Advanced Operational Analysis
50 async function generateOperationalInsights(category: string) {
51 const operationalCompletion = await openai.chat.completions.create({
52 model: "gpt-4o-mini",
53 messages: [

aimain.tsx17 matches

@wizos•Updated 3 months ago
2import { Hono } from "npm:hono@3";
3import { cors } from "npm:hono/cors";
4import { createOpenAI } from "npm:@ai-sdk/openai";
5import { createAnthropic } from "npm:@ai-sdk/anthropic@0.0.48";
6import { google, createGoogleGenerativeAI } from 'npm:@ai-sdk/google';
30});
31
32const openai = createOpenAI({
33 // apiKey = Deno.env.get("OPENAI_API_KEY");
34 apiKey: Deno.env.get("OPENAI_API_KEY_COVERSHEET")
35});
36
37
38const groq = createOpenAI({
39 baseURL: 'https://api.groq.com/openai/v1',
40 apiKey: Deno.env.get("GROQ_API_KEY"),
41});
42
43const perplexity = createOpenAI({
44 apiKey: Deno.env.get("PERPLEXITY_API_KEY") ?? '',
45 baseURL: 'https://api.perplexity.ai/',
57 this.memories = options.memories || [];
58 this.messages = options.messages || [];
59 this.defaultProvider = options.provider || 'openai';
60 this.defaultModel = options.model;
61 this.defaultMaxTokens = options.maxTokens;
122 let result;
123 switch (provider) {
124 case 'openai':
125 result = await this.generateOpenAIResponse({ model, prompt, maxTokens, temperature, streaming, schema, system, messages, tools, ...additionalSettings });
126 break;
127 case 'anthropic':
171 }
172
173 async generateOpenAIResponse({ model, prompt, maxTokens, temperature, streaming, schema, system, messages, tools, embed, value, dimensions, user, ...additionalSettings }) {
174 const modelId = model || 'gpt-3.5-turbo';
175
176 if (embed) {
177 let result = await this.generateOpenAIEmbedding({ model, value, dimensions, user });
178 // console.log('embed!', result)
179 return result
181
182 const options = {
183 model: openai(modelId),
184 system,
185 temperature,
235 }
236
237 async generateOpenAIEmbedding({ model, value, dimensions, user }) {
238 const modelId = model || 'text-embedding-3-large';
239 const options = {
240 model: openai.embedding(modelId, {
241 dimensions,
242 user,
491
492app.get('/generate', async (c) => {
493 const provider = c.req.query('provider') || 'openai';
494 const model = c.req.query('model');
495 const prompt = c.req.query('prompt');
523 console.log("post/generate", { mode: 'post/generate', prompt, provider, model });
524 const response = await modelProvider.gen({
525 provider: provider || 'openai',
526 model,
527 prompt,

OpenTowniegenerateCode3 matches

@shouser•Updated 3 months ago
1import OpenAI from "https://esm.sh/openai";
2
3function parseValResponse(response: string) {
28 const system = await (await fetch(`${import.meta.url.split("/").slice(0, -1).join("/")}/system_prompt.txt`)).text();
29
30 const openai = new OpenAI({
31 baseURL: "https://openrouter.ai/api/v1",
32 apiKey: Deno.env.get("OPEN_ROUTER_KEY"),
33 });
34 console.log(messages);
35 const completion = await openai.chat.completions.create({
36 model: "deepseek/deepseek-r1",
37 messages: [

OpenTowniesystem_prompt.txt7 matches

@shouser•Updated 3 months ago
83
84 <library>
85 ## OpenAI
86
87 Val Town includes a free, proxied OpenAI:
88
89 ```ts
90 import { OpenAI } from "https://esm.town/v/std/openai";
91 const openai = new OpenAI();
92 const completion = await openai.chat.completions.create({
93 messages: [
94 { role: "user", content: "Say hello in a creative way" },
99 ```
100
101 OpenAI only works on the server. If the val includes client-side code, use dynamic imports to import this module in the server function, e.g.:
102 `const { OpenAI } = await import "https://esm.town/v/std/openai");`
103 </library>
104

MrIdentifymain.tsx3 matches

@efaztheistic•Updated 3 months ago
106 if (request.method === 'POST' && new URL(request.url).pathname === '/identify') {
107 try {
108 const { OpenAI } = await import("https://esm.town/v/std/openai");
109 const openai = new OpenAI();
110
111 const formData = await request.formData();
124 );
125
126 const response = await openai.chat.completions.create({
127 model: "gpt-4o",
128 messages: [

generateBackendSchemamain.tsx5 matches

@toowired•Updated 3 months ago
1import { OpenAI } from "https://esm.town/v/std/openai";
2
3interface SchemaResponse {
39 }
40
41 const openai = new OpenAI();
42 let completion;
43 try {
44 completion = await openai.chat.completions.create({
45 messages: [
46 { role: "system", content: "You are a backend schema generator. Generate a backend schema based on the given project description." },
51 });
52 } catch (error) {
53 console.error("Error calling OpenAI API:", error);
54 return new Response("Error generating schema. Please try again later.", { status: 500, headers });
55 }
59 schemaResponse = JSON.parse(completion.choices[0].message.content);
60 } catch (error) {
61 console.error("Error parsing OpenAI response:", error);
62 return new Response("Error processing the generated schema. Please try again.", { status: 500, headers });
63 }

sweetBlackHaremain.tsx4 matches

@imnk•Updated 3 months ago
480export default async function server(request: Request): Promise<Response> {
481 if (request.method === 'POST' && new URL(request.url).pathname === '/analyze') {
482 const { OpenAI } = await import("https://esm.town/v/std/openai");
483 const openai = new OpenAI();
484
485 try {
514
515 try {
516 const completion = await openai.chat.completions.create({
517 model: "gpt-4o-mini",
518 messages: [
578 }
579
580 console.error('OpenAI processing error:', processingError);
581 return new Response(
582 language === 'en'

graciousAmaranthMackerelmain.tsx3 matches

@imnk•Updated 3 months ago
206export default async function server(request: Request): Promise<Response> {
207 if (request.method === 'POST' && new URL(request.url).pathname === '/analyze') {
208 const { OpenAI } = await import("https://esm.town/v/std/openai");
209 const openai = new OpenAI();
210
211 try {
222 );
223
224 const completion = await openai.chat.completions.create({
225 model: "gpt-4o-mini",
226 messages: [

medicineLabelAnalyzerAppmain.tsx3 matches

@imnk•Updated 3 months ago
100export default async function server(request: Request): Promise<Response> {
101 if (request.method === 'POST' && new URL(request.url).pathname === '/analyze') {
102 const { OpenAI } = await import("https://esm.town/v/std/openai");
103 const openai = new OpenAI();
104
105 try {
116 );
117
118 const completion = await openai.chat.completions.create({
119 model: "gpt-4o-mini",
120 messages: [

translateToEnglishWithOpenAI1 file match

@shlmt•Updated 4 days ago

testOpenAI1 file match

@stevekrouse•Updated 6 days ago
lost1991
import { OpenAI } from "https://esm.town/v/std/openai"; export default async function(req: Request): Promise<Response> { if (req.method === "OPTIONS") { return new Response(null, { headers: { "Access-Control-Allow-Origin": "*",