Val Town Code SearchReturn to Val Town

API Access

You can access search results via JSON API by adding format=json to your query:

https://codesearch.val.run/image-url.jpg?q=openai&page=54&format=json

For typeahead suggestions, use the /typeahead endpoint:

https://codesearch.val.run/typeahead?q=openai

Returns an array of strings in format "username" or "username/projectName"

Found 1632 results for "openai"(564ms)

simpleSearchPagemain.tsx3 matches

@kamorudeenalo•Updated 1 month ago
136export default async function server(request: Request): Promise<Response> {
137 if (request.method === "POST") {
138 const { OpenAI } = await import("https://esm.town/v/std/openai");
139 const openai = new OpenAI();
140
141 try {
142 const { query } = await request.json();
143
144 const completion = await openai.chat.completions.create({
145 messages: [
146 {

My_Schedulermain.tsx3 matches

@shirahh•Updated 1 month ago
96export default async function server(request: Request): Promise<Response> {
97 const { sqlite } = await import("https://esm.town/v/stevekrouse/sqlite");
98 const { OpenAI } = await import("https://esm.town/v/std/openai");
99 const openai = new OpenAI();
100
101 const KEY = "My_Scheduler";
135 const taskList = tasks.rows.map(t => `${t.task} (${t.duration} mins, ${t.priority} priority)`).join(", ");
136
137 const suggestion = await openai.chat.completions.create({
138 messages: [
139 {

greatTealTiglonmain.tsx3 matches

@Anuoluwapo•Updated 1 month ago
1import { email } from "https://esm.town/v/std/email";
2import { OpenAI } from "https://esm.town/v/std/openai";
3import { sqlite } from "https://esm.town/v/stevekrouse/sqlite";
4
47
48export default async function (e: Email) {
49 const openai = new OpenAI();
50
51 // Ensure we have text content to analyze
72 try {
73 // Use GPT to draft an intelligent reply
74 const completion = await openai.chat.completions.create({
75 messages: [
76 {

C_smain.tsx3 matches

@abas•Updated 1 month ago
67
68 try {
69 const { OpenAI } = await import("https://esm.town/v/std/openai");
70 const openai = new OpenAI();
71
72 const systemPrompt = `
93 `;
94
95 const aiCompletion = await openai.chat.completions.create({
96 messages: [
97 { role: "system", content: systemPrompt },

memorySampleSummarymain.tsx1 match

@wangllm•Updated 1 month ago
4 const builder = await getMemoryBuilder({
5 type: "summary",
6 provider: "openai",
7 });
8 const memory = await builder();

tenaciousPeachHornetmain.tsx3 matches

@Argu•Updated 1 month ago
82
83 try {
84 const { OpenAI } = await import("https://esm.town/v/std/openai");
85 const openai = new OpenAI();
86
87 const completion = await openai.chat.completions.create({
88 messages: messages,
89 model: "gpt-4o-mini",

TopHackerNewsDailyEmailmain.tsx6 matches

@ianmenethil•Updated 1 month ago
1// import { loadPageContent } from "https://esm.town/v/charlypoly/browserbaseUtils";
2// import { OpenAI } from "https://esm.town/v/std/openai?v=4";
3// import { z } from "npm:zod";
4// import { zodToJsonSchema } from "npm:zod-to-json-schema";
25// });
26
27// // we create a OpenAI Tool that takes our schema as argument
28// const extractContentTool: any = {
29// type: "function",
36// };
37
38// const openai = new OpenAI();
39
40// // We ask OpenAI to extract the content from the given web page.
41// // The model will reach out to our `extract_content` tool and
42// // by doing so, the model will extract the required data to satisfy
43// // the requirement of `extract_content`s argument.
44// const completion = await openai.chat.completions.create({
45// model: "gpt-4-turbo",
46// messages: [
55// });
56
57// // we retrieve the serialized arguments generated by OpenAI
58// const result = completion.choices[0].message.tool_calls![0].function.arguments;
59// // the serialized arguments are parsed into a valid JavaScript array of objects

MyProjectmain.tsx3 matches

@mercyngatia•Updated 1 month ago
281export default async function server(request: Request): Promise<Response> {
282 const { sqlite } = await import("https://esm.town/v/stevekrouse/sqlite");
283 const { OpenAI } = await import("https://esm.town/v/std/openai");
284 const KEY = "MyProject";
285 const SCHEMA_VERSION = 3;
302 `);
303
304 const openai = new OpenAI();
305
306 if (request.method === "POST") {
328
329 if (request.url.includes("/ai-suggestion")) {
330 const completion = await openai.chat.completions.create({
331 messages: [
332 {

openaiStreamingDemomain.tsx4 matches

@stevekrouse•Updated 1 month ago
1import OpenAI from "npm:openai";
2const openai = new OpenAI();
3
4export default async (req) => {
8
9 // Create the stream with the signal
10 const stream = await openai.chat.completions.create({
11 model: "gpt-3.5-turbo",
12 messages: [{ role: "user", content: "Tell me a story" }],
20 // Handle connection closed event
21 req.signal.addEventListener("abort", () => {
22 console.log("Client disconnected, aborting OpenAI request");
23 controller.abort();
24 writer.close().catch(err => {

openaistreaminghtmlmain.tsx3 matches

@stevekrouse•Updated 1 month ago
1import OpenAI from "npm:openai";
2
3const openai = new OpenAI();
4
5export default async (req) => {
6 try {
7 const stream = await openai.chat.completions.create({
8 model: "gpt-3.5-turbo",
9 messages: [{

translateToEnglishWithOpenAI1 file match

@shlmt•Updated 6 days ago

testOpenAI1 file match

@stevekrouse•Updated 1 week ago
lost1991
import { OpenAI } from "https://esm.town/v/std/openai"; export default async function(req: Request): Promise<Response> { if (req.method === "OPTIONS") { return new Response(null, { headers: { "Access-Control-Allow-Origin": "*",