Val Town Code SearchReturn to Val Town

API Access

You can access search results via JSON API by adding format=json to your query:

https://codesearch.val.run/$%7Bsuccess?q=openai&page=91&format=json

For typeahead suggestions, use the /typeahead endpoint:

https://codesearch.val.run/typeahead?q=openai

Returns an array of strings in format "username" or "username/projectName"

Found 1614 results for "openai"(600ms)

VEOPROMPTERmain.tsx5 matches

@AppleLamps•Updated 4 months ago
307 const url = new URL(request.url);
308 if (url.pathname === "/generate-prompts") {
309 const { OpenAI } = await import("https://esm.sh/openai@4.11.1");
310 const openai = new OpenAI({
311 apiKey: Deno.env.get("OPENAI_API_KEY"),
312 });
313
316
317 try {
318 const completion = await openai.chat.completions.create({
319 model: "chatgpt-4o-latest", // Changed from "gpt-4o-latest" to "chatgpt-4o-latest"
320 messages: [
364
365 if (!response) {
366 throw new Error("No response from OpenAI");
367 }
368

chatmain.tsx6 matches

@lisazz•Updated 4 months ago
5 options = {},
6) => {
7 // Initialize OpenAI API stub
8 const { Configuration, OpenAIApi } = await import(
9 "https://esm.sh/openai@3.3.0"
10 );
11 const configuration = new Configuration({
12 apiKey: process.env.OPENAI,
13 });
14 const openai = new OpenAIApi(configuration);
15 // Request chat completion
16 const messages = typeof prompt === "string"
17 ? [{ role: "user", content: prompt }]
18 : prompt;
19 const { data } = await openai.createChatCompletion({
20 model: "gpt-3.5-turbo-0613",
21 messages,

byobREADME.md1 match

@yawnxyz•Updated 4 months ago
11
12### Toolings
13* Llms can uses [tools](https://platform.openai.com/docs/guides/function-calling), meaning you can make this an agent and a whole lot more useful.
14
15

byobmain.tsx3 matches

@yawnxyz•Updated 4 months ago
1import { zodResponseFormat } from "https://esm.sh/openai/helpers/zod";
2import { z } from "https://esm.sh/zod";
3import { email } from "https://esm.town/v/std/email";
4import { OpenAI } from "https://esm.town/v/std/openai";
5
6export default async function(e: Email) {
7 const client = new OpenAI();
8
9 const Messages = z.object({

chatgptchessmain.tsx4 matches

@nguyenquangdinh82•Updated 4 months ago
1import { OpenAI } from "https://esm.town/v/std/openai?v=5"
2import { sqlite } from "https://esm.town/v/std/sqlite?v=6"
3import { Chess } from "npm:chess.js"
131 return c.html(`<body>
132 <div class='p-4'>
133 <h2 class='font-bold'>OpenAI Chess</h2>
134 <p class='pb-4'>Play chess against ChatGPT-4</p>
135 <div id="myBoard" style="width: 400px"></div>
171 chess.move(san)
172
173 const openai = new OpenAI()
174
175 let messages = []
182 args: [c.req.param().id, `Requesting response to ${san}`],
183 })
184 const completion = await openai.chat.completions.create({
185 messages: [
186 {

valTownChatGPTREADME.md1 match

@nguyenquangdinh82•Updated 4 months ago
1# ChatGPT Implemented in Val Town
2
3Demonstrated how to use assistants and threads with the OpenAI SDK and how to stream the response with Server-Sent Events.
4
5<p align=center>

valTownChatGPTmain.tsx8 matches

@nguyenquangdinh82•Updated 4 months ago
1/** @jsxImportSource https://esm.sh/react */
2import OpenAI from "npm:openai";
3import { renderToString } from "npm:react-dom/server";
4
5// This uses by personal API key, you'll need to provide your own if
6// you fork this. We'll be adding support to the std/openai lib soon!
7const openai = new OpenAI();
8import { Hono } from "npm:hono@3";
9
38 });
39
40 // Setup the SSE connection and stream back the response. OpenAI handles determining
41 // which message is the correct response based on what was last read from the
42 // thread. This is likely vulnerable to race conditions.
58const app = new Hono();
59app.get("/", async (c) => {
60 const thread = await openai.beta.threads.create();
61 const assistant = await openai.beta.assistants.create({
62 name: "",
63 instructions:
114app.post("/post-message", async (c) => {
115 let message = await c.req.text();
116 await openai.beta.threads.messages.create(
117 c.req.query("threadId"),
118 { role: "user", content: message },
132 ));
133 };
134 const run = openai.beta.threads.runs.stream(threadId, {
135 assistant_id: assistantId,
136 // Make sure we only display messages we haven't seen yet.

twitterAlertmain.tsx5 matches

@eseidel•Updated 4 months ago
1import { zodResponseFormat } from "https://esm.sh/openai/helpers/zod";
2import { z } from "https://esm.sh/zod";
3import { OpenAI } from "https://esm.town/v/std/openai";
4import { discordWebhook } from "https://esm.town/v/stevekrouse/discordWebhook";
5import { socialDataSearch, Tweet } from "https://esm.town/v/stevekrouse/socialDataSearch";
11 .join(" OR ") + " " + excludes;
12
13const openai = new OpenAI();
14
15const RelevanceSchema = z.object({
29
30 try {
31 const completion = await openai.beta.chat.completions.parse({
32 model: "gpt-4o-mini",
33 messages: [
42 return completion.choices[0].message.parsed;
43 } catch (error) {
44 console.error("Error parsing OpenAI response:", error);
45 return { isRelevant: false, confidence: 0, reason: "Error in processing" };
46 }

neatEmeraldVicunamain.tsx5 matches

@stevekrouse•Updated 4 months ago
1import { zodResponseFormat } from "https://esm.sh/openai/helpers/zod";
2import { z } from "https://esm.sh/zod";
3import { OpenAI } from "https://esm.town/v/std/openai";
4import { discordWebhook } from "https://esm.town/v/stevekrouse/discordWebhook";
5import { socialDataSearch, Tweet } from "https://esm.town/v/stevekrouse/socialDataSearch";
11 .join(" OR ") + " " + excludes;
12
13const openai = new OpenAI();
14
15const RelevanceSchema = z.object({
29
30 try {
31 const completion = await openai.beta.chat.completions.parse({
32 model: "gpt-4o-mini",
33 messages: [
42 return completion.choices[0].message.parsed;
43 } catch (error) {
44 console.error("Error parsing OpenAI response:", error);
45 return { isRelevant: false, confidence: 0, reason: "Error in processing" };
46 }

weatherGPTmain.tsx3 matches

@rohanchoudhary15•Updated 4 months ago
1import { email } from "https://esm.town/v/std/email?v=11";
2import { OpenAI } from "npm:openai";
3
4let location = "brooklyn ny";
8).then(r => r.json());
9
10const openai = new OpenAI();
11let chatCompletion = await openai.chat.completions.create({
12 messages: [{
13 role: "user",

translateToEnglishWithOpenAI1 file match

@shlmt•Updated 3 days ago

testOpenAI1 file match

@stevekrouse•Updated 5 days ago
lost1991
import { OpenAI } from "https://esm.town/v/std/openai"; export default async function(req: Request): Promise<Response> { if (req.method === "OPTIONS") { return new Response(null, { headers: { "Access-Control-Allow-Origin": "*",