Val Town Code SearchReturn to Val Town

API Access

You can access search results via JSON API by adding format=json to your query:

https://codesearch.val.run/$%7Bsuccess?q=openai&page=69&format=json

For typeahead suggestions, use the /typeahead endpoint:

https://codesearch.val.run/typeahead?q=openai

Returns an array of strings in format "username" or "username/projectName"

Found 1614 results for "openai"(541ms)

Factifymain.tsx3 matches

@veer16•Updated 2 months ago
170export default async function server(request: Request): Promise<Response> {
171 if (request.method === 'POST' && new URL(request.url).pathname === '/analyze') {
172 const { OpenAI } = await import("https://esm.town/v/std/openai");
173 const openai = new OpenAI();
174
175 const { text } = await request.json();
176
177 const completion = await openai.chat.completions.create({
178 messages: [
179 {

chatGPTClonemain.tsx3 matches

@gigmx•Updated 2 months ago
52
53 // Configure LLM
54 await retryOperation(() => catClientRef.current.api.llm.upsertLlmSetting('LLMOpenAIConfig', {
55 openai_api_key: config.openaiApiKey
56 }));
57
149 secure: Deno.env.get("CCAT_CORE_USE_SECURE_PROTOCOLS") === "true",
150 credential: Deno.env.get("CCAT_API_KEY"),
151 openaiApiKey: Deno.env.get("OPENAI_API_KEY"),
152 };
153

vibrantBronzeAntmain.tsx4 matches

@faroutroy•Updated 2 months ago
110export default async function server(request: Request): Promise<Response> {
111 if (request.method === "POST" && new URL(request.url).pathname === "/analyze") {
112 const { OpenAI } = await import("https://esm.town/v/std/openai");
113 const openai = new OpenAI();
114
115 try {
158 const limitedData = jsonData.slice(0, 50);
159
160 // Use OpenAI for intelligent analysis with more specific instructions
161 const analysisPrompt = `Provide a concise data analysis and forecasting insights:
162 - Total rows: ${jsonData.length}
171 Dataset preview: ${JSON.stringify(limitedData.slice(0, 5))}`;
172
173 const completion = await openai.chat.completions.create({
174 messages: [{ role: "user", content: analysisPrompt }],
175 model: "gpt-4o-mini",

groundedWhiteFowlmain.tsx4 matches

@Shaff_thv•Updated 2 months ago
189export default async function server(request: Request) {
190 const { sqlite } = await import("https://esm.town/v/stevekrouse/sqlite");
191 const { OpenAI } = await import("https://esm.town/v/std/openai");
192 const KEY = "groundedWhiteFowl";
193 const SCHEMA_VERSION = 7; // Incrementing schema version for tracking
344 }
345
346 const openai = new OpenAI();
347
348 // Ensure messages are in the correct format for OpenAI
349 const formattedMessages = [
350 {
359 ];
360
361 const completion = await openai.chat.completions.create({
362 messages: formattedMessages,
363 model: "gpt-4o-mini",

VALLEREADME.md1 match

@gigmx•Updated 2 months ago
6* Fork this val to your own profile.
7* Make a folder for the temporary vals that get generated, take the ID from the URL, and put it in `tempValsParentFolderId`.
8* If you want to use OpenAI models you need to set the `OPENAI_API_KEY` [env var](https://www.val.town/settings/environment-variables).
9* If you want to use Anthropic models you need to set the `ANTHROPIC_API_KEY` [env var](https://www.val.town/settings/environment-variables).
10* Create a [Val Town API token](https://www.val.town/settings/api), open the browser preview of this val, and use the API token as the password to log in.

resumeAnalyzerHTTPmain.tsx3 matches

@azeem_rizvi72•Updated 2 months ago
184export default async function server(request: Request): Promise<Response> {
185 if (request.method === 'POST') {
186 const { OpenAI } = await import("https://esm.town/v/std/openai");
187 const openai = new OpenAI();
188
189 const formData = await request.formData();
196 const resumeText = await resumeFile.text();
197
198 const completion = await openai.chat.completions.create({
199 messages: [
200 {

researchAgentemailHandler2 matches

@charmainetest•Updated 2 months ago
2import process from "node:process";
3import { marked } from "npm:marked";
4import { OpenAI } from "npm:openai";
5
6function emailValHandlerExtra(inboundEmail: Email) {
33 );
34
35 const client = new OpenAI({ apiKey: process.env.PERPLEXITY_API_KEY, baseURL: "https://api.perplexity.ai" });
36 const response = await client.chat.completions.create({
37 model: "sonar",

test_projectindex.ts1 match

@charmainetest•Updated 2 months ago
2import { parseProject } from "https://esm.town/v/std/parseImportMeta/project";
3import { contentType } from "npm:mime-types@2.1.35";
4import openAI from "npm:openai";
5
6console.log("will this show up");

test_projectmain.js3 matches

@charmainetest•Updated 2 months ago
1import { blob } from "https://esm.town/v/std/blob";
2import { OpenAI } from "https://esm.town/v/std/openai";
3import Algebrite from "npm:algebrite";
4import { servePublicFile } from "./index.ts";
138 */
139async function handleProblemGeneration() {
140 const openai = new OpenAI();
141 const completion = await openai.chat.completions.create({
142 model: "gpt-4",
143 messages: [

researchAgentemailHandler2 matches

@charmaine•Updated 2 months ago
2import process from "node:process";
3import { marked } from "npm:marked";
4import { OpenAI } from "npm:openai";
5
6function pm(...lines: string[]): string {
16 );
17
18 const client = new OpenAI({ apiKey: process.env.PERPLEXITY_API_KEY, baseURL: "https://api.perplexity.ai" });
19 const response = await client.chat.completions.create({
20 model: "sonar",

translateToEnglishWithOpenAI1 file match

@shlmt•Updated 3 days ago

testOpenAI1 file match

@stevekrouse•Updated 5 days ago
lost1991
import { OpenAI } from "https://esm.town/v/std/openai"; export default async function(req: Request): Promise<Response> { if (req.method === "OPTIONS") { return new Response(null, { headers: { "Access-Control-Allow-Origin": "*",