Val Town Code SearchReturn to Val Town

API Access

You can access search results via JSON API by adding format=json to your query:

https://codesearch.val.run/image-url.jpg?q=openai&page=83&format=json

For typeahead suggestions, use the /typeahead endpoint:

https://codesearch.val.run/typeahead?q=openai

Returns an array of strings in format "username" or "username/projectName"

Found 1670 results for "openai"(923ms)

chatmain.tsx6 matches

@Newboon•Updated 3 months ago
5 options = {},
6) => {
7 // Initialize OpenAI API stub
8 const { Configuration, OpenAIApi } = await import(
9 "https://esm.sh/openai@3.3.0"
10 );
11 const configuration = new Configuration({
12 apiKey: process.env.OPENAI,
13 });
14 const openai = new OpenAIApi(configuration);
15 // Request chat completion
16 const messages = typeof prompt === "string"
17 ? [{ role: "user", content: prompt }]
18 : prompt;
19 const { data } = await openai.createChatCompletion({
20 model: "gpt-3.5-turbo-0613",
21 messages,

memorySampleSummarymain.tsx3 matches

@AIWB•Updated 3 months ago
1import { sqlite } from "https://esm.town/v/stevekrouse/sqlite";
2import { OpenAI } from "https://esm.town/v/std/openai";
3
4const KEY = "memorySampleSummary";
6const SCHEMA_VERSION = 1;
7
8const openai = new OpenAI();
9
10// Initialize the database
22// Generate embedding for a given text
23async function generateEmbedding(text: string): Promise<number[]> {
24 const response = await openai.embeddings.create({
25 model: "text-embedding-ada-002",
26 input: text,

caloriesmain.tsx1 match

@AIWB•Updated 3 months ago
2import { fileToDataURL } from "https://esm.town/v/stevekrouse/fileToDataURL";
3import { modifyImage } from "https://esm.town/v/stevekrouse/modifyImage";
4import { chat } from "https://esm.town/v/stevekrouse/openai";
5import { Hono } from "npm:hono@3";
6

aiMessengermain.tsx3 matches

@tesla6940•Updated 3 months ago
409export default async function server(request: Request): Promise<Response> {
410 const { sqlite } = await import("https://esm.town/v/stevekrouse/sqlite");
411 const { OpenAI } = await import("https://esm.town/v/std/openai");
412 const KEY = "aiMessenger";
413 const openai = new OpenAI();
414
415 const SCHEMA_VERSION = 5;
655 if (mode === "ai") {
656 // Generate AI response
657 const aiCompletion = await openai.chat.completions.create({
658 messages: [{
659 role: "user",

selfassuredCoralPigmain.tsx3 matches

@tesla6940•Updated 3 months ago
393export default async function server(request: Request): Promise<Response> {
394 const { sqlite } = await import("https://esm.town/v/stevekrouse/sqlite");
395 const { OpenAI } = await import("https://esm.town/v/std/openai");
396 const KEY = "selfassuredCoralPig";
397 const openai = new OpenAI();
398
399 const SCHEMA_VERSION = 5;
616 if (mode === 'ai') {
617 // Generate AI response
618 const aiCompletion = await openai.chat.completions.create({
619 messages: [{
620 role: "user",

OpenTowniesystem_prompt.txt7 matches

@AIWB•Updated 3 months ago
83
84 <library>
85 ## OpenAI
86
87 Val Town includes a free, proxied OpenAI:
88
89 ```ts
90 import { OpenAI } from "https://esm.town/v/std/openai";
91 const openai = new OpenAI();
92 const completion = await openai.chat.completions.create({
93 messages: [
94 { role: "user", content: "Say hello in a creative way" },
99 ```
100
101 OpenAI only works on the server. If the val includes client-side code, use dynamic imports to import this module in the server function, e.g.:
102 `const { OpenAI } = await import "https://esm.town/v/std/openai");`
103 </library>
104

OpenTowniegenerateCode3 matches

@AIWB•Updated 3 months ago
1import OpenAI from "https://esm.sh/openai";
2
3function parseValResponse(response: string) {
28 const system = await (await fetch(`${import.meta.url.split("/").slice(0, -1).join("/")}/system_prompt.txt`)).text();
29
30 const openai = new OpenAI({
31 baseURL: "https://openrouter.ai/api/v1",
32 apiKey: Deno.env.get("OPEN_ROUTER_KEY"),
33 });
34 console.log(messages);
35 const completion = await openai.chat.completions.create({
36 model: "deepseek/deepseek-r1",
37 messages: [

competentOlivePeacockmain.tsx3 matches

@awhitter•Updated 3 months ago
287 } else if (url.pathname === "/api/analyze") {
288 if (req.method === "POST") {
289 const { OpenAI } = await import("https://esm.town/v/std/openai");
290 const openai = new OpenAI();
291
292 try {
297 Full Content: ${fullContent}`;
298
299 const completion = await openai.chat.completions.create({
300 messages: [{ role: "user", content: prompt }],
301 model: "gpt-4o-mini",

r1main.tsx2 matches

@sboesen•Updated 3 months ago
1import { encode } from "https://deno.land/std@0.203.0/encoding/base64.ts";
2import OpenAI from "https://esm.sh/openai@4.24.1";
3import { email } from "https://esm.town/v/std/email";
4import { pdfText } from "jsr:@pdf/pdftext";
134async function sendRequestToFireworks(prompt, apiKey, model) {
135 try {
136 const client = new OpenAI({
137 baseURL: "https://api.fireworks.ai/inference/v1",
138 apiKey: apiKey,

OpenRouterChatCompletion_Testmain.tsx2 matches

@rozek•Updated 3 months ago
2
3 export default async function (Request:Request):Promise<Response> {
4 const OpenAIRequest = {
5 messages: [
6 { role:'system', content:'please answer the following question' },
14 method: 'POST',
15 headers:{ 'Content-Type':'application/json' },
16 body: JSON.stringify(OpenAIRequest)
17 });
18

translateToEnglishWithOpenAI1 file match

@shlmt•Updated 1 week ago

testOpenAI1 file match

@stevekrouse•Updated 1 week ago
lost1991
import { OpenAI } from "https://esm.town/v/std/openai"; export default async function(req: Request): Promise<Response> { if (req.method === "OPTIONS") { return new Response(null, { headers: { "Access-Control-Allow-Origin": "*",