Val Town Code SearchReturn to Val Town

API Access

You can access search results via JSON API by adding format=json to your query:

https://codesearch.val.run/$1?q=openai&page=23&format=json

For typeahead suggestions, use the /typeahead endpoint:

https://codesearch.val.run/typeahead?q=openai

Returns an array of strings in format "username" or "username/projectName"

Found 2121 results for "openai"(2166ms)

ASKNOVAREADME.md3 matches

@Abdullah7826•Updated 1 week ago
7- 🎨 Modern Neumorphism/Dark Mode design with theme toggle
8- 💬 Real-time chat interface with typing indicators
9- 🤖 OpenAI GPT integration with fallback mock responses
10- 📱 Fully responsive design (mobile/tablet/desktop)
11- ✨ Smooth animations and micro-interactions
41## Environment Variables
42
43- `OPENAI_API_KEY` - OpenAI API key (optional, falls back to mock responses)
44
45## Usage
63
64✅ **AI Integration**
65- OpenAI GPT-4o-mini integration with fallback mock responses
66- Context-aware conversations (sends last 10 messages)
67- Intelligent mock responses for demo purposes

ASKNOVAindex.ts8 matches

@Abdullah7826•Updated 1 week ago
1import { Hono } from "https://esm.sh/hono@3.11.7";
2import { readFile, serveFile } from "https://esm.town/v/std/utils@85-main/index.ts";
3import { OpenAI } from "https://esm.town/v/std/openai";
4import type { ChatRequest, ChatResponse, Message } from "../shared/types.ts";
5
49 let responseMessage = "";
50
51 // Try to use OpenAI if API key is available
52 const openaiKey = Deno.env.get('OPENAI_API_KEY');
53
54 if (openaiKey) {
55 try {
56 const openai = new OpenAI();
57
58 // Build conversation context
72 ];
73
74 const completion = await openai.chat.completions.create({
75 model: "gpt-4o-mini",
76 messages,
80
81 responseMessage = completion.choices[0]?.message?.content || "I'm sorry, I couldn't generate a response.";
82 } catch (openaiError) {
83 console.error("OpenAI API error:", openaiError);
84 // Fall back to mock response
85 responseMessage = getMockResponse(message);

untitled-2461project-ring-vision.tsx3 matches

@syskaul•Updated 1 week ago
1export default async function(req: Request): Promise<Response> {
2 // This will be our OpenAI Vision API integration
3 // For now, just a placeholder that shows we're ready
4
5 return new Response(
6 JSON.stringify({
7 message: "OpenAI Vision API integration ready",
8 status: "placeholder",
9 required_env_vars: [
10 "OPENAI_API_KEY",
11 ],
12 next_features: [

token-servermain.ts3 matches

@kwhinnery_openai•Updated 1 week ago
1const OPENAI_API_KEY = Deno.env.get("OPENAI_API_KEY");
2
3export async function handler(request: Request) {
4 const response = await fetch("https://api.openai.com/v1/realtime/sessions", {
5 method: "POST",
6 headers: {
7 "Content-Type": "application/json",
8 Authorization: `Bearer ${OPENAI_API_KEY}`,
9 },
10 body: JSON.stringify({

Towniesystem_prompt.txt4 matches

@charmaine•Updated 1 week ago
88Note: When changing a SQLite table's schema, change the table's name (e.g., add _2 or _3) to create a fresh table.
89
90### OpenAI
91
92```ts
93import { OpenAI } from "https://esm.town/v/std/openai";
94const openai = new OpenAI();
95const completion = await openai.chat.completions.create({
96 messages: [
97 { role: "user", content: "Say hello in a creative way" },

Townie.cursorrules4 matches

@charmaine•Updated 1 week ago
94Note: When changing a SQLite table's schema, change the table's name (e.g., add _2 or _3) to create a fresh table.
95
96### OpenAI
97
98```ts
99import { OpenAI } from "https://esm.town/v/std/openai";
100const openai = new OpenAI();
101const completion = await openai.chat.completions.create({
102 messages: [
103 { role: "user", content: "Say hello in a creative way" },

charmaineValSearch.cursorrules4 matches

@charmaine•Updated 1 week ago
94Note: When changing a SQLite table's schema, change the table's name (e.g., add _2 or _3) to create a fresh table.
95
96### OpenAI
97
98```ts
99import { OpenAI } from "https://esm.town/v/std/openai";
100const openai = new OpenAI();
101const completion = await openai.chat.completions.create({
102 messages: [
103 { role: "user", content: "Say hello in a creative way" },

Realestate2main.tsx13 matches

@Get•Updated 1 week ago
1import { fetch } from "https://esm.town/v/std/fetch";
2import { OpenAI } from "https://esm.town/v/std/openai";
3import { z } from "npm:zod";
4
635}
636
637async function callOpenAI(sysP: string, userP: string, mid: string, tid: string, log: LogFn): Promise<string | null> {
638 log("DB", "OpenAI", `Call tid=${tid}`, { sL: sysP.length, uL: userP.length }, mid, tid);
639 try { // @ts-ignore
640 const oai = new OpenAI();
641 const comp = await oai.chat.completions.create({
642 model: "gpt-4o-mini", // Consider gpt-4o for more complex tasks if budget allows
648 const usg = comp.usage;
649 if (!resT) {
650 log("WN", "OpenAI", `No text tid=${tid}.`, { usg, fin: comp.choices[0]?.finish_reason }, mid, tid);
651 return null;
652 }
653 log("IN", "OpenAI", `OK tid=${tid}`, { rL: resT.length, usg, fin: comp.choices[0]?.finish_reason }, mid, tid);
654 return resT.trim();
655 } catch (err: any) {
663 st: err.status,
664 };
665 log("ER", "OpenAI", `Fail tid=${tid}:${err.message}`, { e: eD }, mid, tid);
666 throw new Error(
667 `OpenAI API Call Failed: ${err.message}`
668 + (err.code ? ` (Code:${err.code}, Status:${err.status})` : (err.status ? ` (Status:${err.status})` : "")),
669 );
985 const filledUserPrompt = Utils.fillPromptTemplate(userPrompt, params);
986
987 const rawOpenAIResponse = await callOpenAI(systemPrompt, filledUserPrompt, mid, tid, logFn);
988 if (!rawOpenAIResponse) {
989 logFn("WN", agentConfig.name, `OpenAI call returned no content, tid=${tid}.`, {}, mid, tid);
990 return { mid, cid: tid, p: {} as TOD, e: `${agentConfig.name} Error: AI returned no content.` };
991 }
992 let outputData: TOD;
993 try {
994 outputData = agentConfig.outputParser(rawOpenAIResponse);
995 }
996 catch (parseError: any) {
999 agentConfig.name,
1000 `Output parsing failed, tid=${tid}. M: ${parseError.message}`,
1001 { rawResponsePreview: rawOpenAIResponse.slice(0, 500) },
1002 mid,
1003 tid,

mech2mainLogic.ts13 matches

@join•Updated 1 week ago
17} from "https://esm.town/v/join/mech2/src/coreTypes.ts";
18import { Utils } from "https://esm.town/v/join/mech2/src/utils.ts";
19import { OpenAI } from "https://esm.town/v/std/openai";
20
21export class LogAgent {
69}
70
71export async function callOpenAI(
72 sysP: string,
73 userP: string,
76 log: LogFn,
77): Promise<string | null> {
78 log("DB", "OpenAI", `Call tid=${tid}`, { sL: sysP.length, uL: userP.length }, mid, tid);
79 try {
80 const oai = new OpenAI();
81 const comp = await oai.chat.completions.create({
82 model: "gpt-4o-mini",
88 const usg = comp.usage;
89 if (!resT) {
90 log("WN", "OpenAI", `No text tid=${tid}.`, { usg, fin: comp.choices[0]?.finish_reason }, mid, tid);
91 return null;
92 }
93 log("IN", "OpenAI", `OK tid=${tid}`, { rL: resT.length, usg, fin: comp.choices[0]?.finish_reason }, mid, tid);
94 return resT.trim();
95 } catch (err: any) {
103 st: err.status,
104 };
105 log("ER", "OpenAI", `Fail tid=${tid}:${err.message}`, { e: eD }, mid, tid);
106 throw new Error(
107 `OpenAI API Call Failed: ${err.message}`
108 + (err.code ? ` (Code:${err.code}, Status:${err.status})` : (err.status ? ` (Status:${err.status})` : "")),
109 );
375 }
376 const filledUserPrompt = Utils.fillPromptTemplate(userPrompt, params);
377 const rawOpenAIResponse = await callOpenAI(systemPrompt, filledUserPrompt, mid, tid, logFn);
378 if (!rawOpenAIResponse) {
379 logFn("WN", agentConfig.name, `OpenAI no content tid=${tid}.`, {}, mid, tid);
380 return { mid, cid: tid, p: {} as TOD, e: `${agentConfig.name} Err:AI no content.` };
381 }
382 let outputData: TOD;
383 try {
384 outputData = agentConfig.outputParser(rawOpenAIResponse);
385 } catch (parseError: any) {
386 logFn(
388 agentConfig.name,
389 `ParseFail tid=${tid}. M:${parseError.message}`,
390 { rRPrev: rawOpenAIResponse.slice(0, 500) },
391 mid,
392 tid,

realestatemain.tsx13 matches

@join•Updated 1 week ago
1import { fetch } from "https://esm.town/v/std/fetch";
2import { OpenAI } from "https://esm.town/v/std/openai";
3import { z } from "npm:zod";
4
517}
518
519async function callOpenAI(sysP: string, userP: string, mid: string, tid: string, log: LogFn): Promise<string | null> {
520 log("DB", "OpenAI", `Call tid=${tid}`, { sL: sysP.length, uL: userP.length }, mid, tid);
521 try { // @ts-ignore
522 const oai = new OpenAI();
523 const comp = await oai.chat.completions.create({
524 model: "gpt-4o-mini", // Consider gpt-4o for more complex real estate tasks if needed
530 const usg = comp.usage;
531 if (!resT) {
532 log("WN", "OpenAI", `No text tid=${tid}.`, { usg, fin: comp.choices[0]?.finish_reason }, mid, tid);
533 return null;
534 }
535 log("IN", "OpenAI", `OK tid=${tid}`, { rL: resT.length, usg, fin: comp.choices[0]?.finish_reason }, mid, tid);
536 return resT.trim();
537 } catch (err: any) {
545 st: err.status,
546 };
547 log("ER", "OpenAI", `Fail tid=${tid}:${err.message}`, { e: eD }, mid, tid);
548 throw new Error(
549 `OpenAI API Call Failed: ${err.message}`
550 + (err.code ? ` (Code:${err.code}, Status:${err.status})` : (err.status ? ` (Status:${err.status})` : "")),
551 );
893 const filledUserPrompt = Utils.fillPromptTemplate(userPrompt, params);
894
895 const rawOpenAIResponse = await callOpenAI(systemPrompt, filledUserPrompt, mid, tid, logFn);
896 if (!rawOpenAIResponse) {
897 logFn("WN", agentConfig.name, `OpenAI call returned no content, tid=${tid}.`, {}, mid, tid);
898 return { mid, cid: tid, p: {} as TOD, e: `${agentConfig.name} Error: AI returned no content.` };
899 }
900 let outputData: TOD;
901 try {
902 outputData = agentConfig.outputParser(rawOpenAIResponse);
903 }
904 catch (parseError: any) {
907 agentConfig.name,
908 `Output parsing failed, tid=${tid}. M: ${parseError.message}`,
909 { rawResponsePreview: rawOpenAIResponse.slice(0, 500) },
910 mid,
911 tid,

openai-client1 file match

@cricks_unmixed4u•Updated 1 day ago

openai_enrichment6 file matches

@stevekrouse•Updated 3 days ago
reconsumeralization
import { OpenAI } from "https://esm.town/v/std/openai"; import { sqlite } from "https://esm.town/v/stevekrouse/sqlite"; /** * Practical Implementation of Collective Content Intelligence * Bridging advanced AI with collaborative content creation */ exp
kwhinnery_openai