Val Town Code SearchReturn to Val Town

API Access

You can access search results via JSON API by adding format=json to your query:

https://codesearch.val.run/$%7Bsuccess?q=openai&page=70&format=json

For typeahead suggestions, use the /typeahead endpoint:

https://codesearch.val.run/typeahead?q=openai

Returns an array of strings in format "username" or "username/projectName"

Found 1614 results for "openai"(1004ms)

researchAgentemailHandler2 matches

@thesephist•Updated 2 months ago
2import process from "node:process";
3import { marked } from "npm:marked";
4import { OpenAI } from "npm:openai";
5
6function pm(...lines: string[]): string {
16 );
17
18 const client = new OpenAI({ apiKey: process.env.PERPLEXITY_API_KEY, baseURL: "https://api.perplexity.ai" });
19 const response = await client.chat.completions.create({
20 model: "sonar",

GeminiVoiceCallAppmain.tsx3 matches

@srijanb69•Updated 2 months ago
148export default async function server(request: Request): Promise<Response> {
149 if (request.method === 'POST' && new URL(request.url).pathname === '/ai-response') {
150 const { OpenAI } = await import("https://esm.town/v/std/openai");
151 const openai = new OpenAI();
152
153 const body = await request.json();
154 const conversationHistory = body.conversation || [];
155
156 const completion = await openai.chat.completions.create({
157 messages: [
158 {

OpenAImain.tsx10 matches

@wangqiao1234•Updated 2 months ago
1import { type ClientOptions, OpenAI as RawOpenAI } from "npm:openai";
2
3/**
4 * API Client for interfacing with the OpenAI API. Uses Val Town credentials.
5 */
6export class OpenAI {
7 private rawOpenAIClient: RawOpenAI;
8
9 /**
10 * API Client for interfacing with the OpenAI API. Uses Val Town credentials.
11 *
12 * @param {number} [opts.timeout=10 minutes] - The maximum amount of time (in milliseconds) the client will wait for a response before timing out.
19 */
20 constructor(options: Omit<ClientOptions, "baseURL" | "apiKey" | "organization"> = {}) {
21 this.rawOpenAIClient = new RawOpenAI({
22 ...options,
23 baseURL: "https://std-openaiproxy.web.val.run/v1",
24 apiKey: Deno.env.get("valtown"),
25 organization: null,
28
29 get chat() {
30 return this.rawOpenAIClient.chat;
31 }
32
33 readonly beta = {
34 get chat(): RawOpenAI["beta"]["chat"] {
35 return this.rawOpenAIClient.beta.chat;
36 },
37 };

promptScorermain.tsx5 matches

@toowired•Updated 2 months ago
1import { OpenAI } from "https://esm.town/v/std/openai";
2
3export async function promptScorer({
8 evolved: string;
9}) {
10 const openai = new OpenAI();
11
12 const scoringPrompt =
29
30 try {
31 const completion = await openai.chat.completions.create({
32 model: "gpt-4o",
33 messages: [
49 return { score: isNaN(score) || score < 0 || score > 10 ? 5 : score }; // Default to 5 if parsing fails or score is out of range
50 } catch (error) {
51 console.error("Error calling OpenAI API:", error);
52 return { score: 5, error: "Failed to get score from OpenAI" };
53 }
54}

emojisearchindex4 matches

@maxm•Updated 2 months ago
13console.log(emojisWithInfo);
14
15import { OpenAI } from "npm:openai";
16
17// Initialize OpenAI client
18const openai = new OpenAI();
19
20async function getEmbedding(emoji: string): Promise<number[]> {
21 const result = await openai.embeddings.create({
22 input: emoji,
23 model: "text-embedding-3-small",

mathOfTheDayaccomplishedOrangePenguin3 matches

@charmaine•Updated 2 months ago
189
190export default async function server(request) {
191 const { OpenAI } = await import("https://esm.town/v/std/openai");
192 const { blob } = await import("https://esm.town/v/std/blob");
193 const url = new URL(request.url);
194
195 if (url.pathname === "/problem") {
196 const openai = new OpenAI();
197 const completion = await openai.chat.completions.create({
198 model: "gpt-4",
199 messages: [

chatAgentWithCustomPromptmain.tsx3 matches

@mjoshimanhar•Updated 2 months ago
1import { initializeAgentExecutorWithOptions } from "https://esm.sh/langchain/agents";
2import { ChatOpenAI } from "https://esm.sh/langchain/chat_models/openai";
3import { Calculator } from "https://esm.sh/langchain/tools/calculator";
4import { OpenAI } from "https://esm.town/v/std/openai";
5
6export default async function chatAgentWithCustomPrompt(input?: string) {
7 const model = new ChatOpenAI({
8 temperature: 0,
9 });

diverseAmberReindeerREADME.md1 match

@melissanf•Updated 2 months ago
25## **Tech Stack**
26- **Townie AI** – AI-based automation
27- **OpenAI API** – For generating blog content
28- **Built-in Databases** – To store and retrieve topics (if needed)
29

blogIdeaGeneratorAppmain.tsx4 matches

@melissanf•Updated 2 months ago
276 // Handle API and shared content routes
277 if (request.method === "POST") {
278 const { OpenAI } = await import("https://esm.town/v/std/openai");
279 const { blob } = await import("https://esm.town/v/std/blob");
280 const openai = new OpenAI();
281
282 const { topic, idea, type } = await request.json();
293 let response;
294 if (type === "ideas") {
295 response = await openai.chat.completions.create({
296 messages: [
297 {
317 });
318 } else if (type === "outline") {
319 response = await openai.chat.completions.create({
320 messages: [
321 {

blogIdeaGeneratorAppREADME.md1 match

@melissanf•Updated 2 months ago
15### Tech Stack
16* Townie AI – AI-based automation
17* OpenAI API – For generating blog content
18* Built-in Databases – To store and retrieve topics (if needed)
19* How to Deploy & Share

translateToEnglishWithOpenAI1 file match

@shlmt•Updated 3 days ago

testOpenAI1 file match

@stevekrouse•Updated 5 days ago
lost1991
import { OpenAI } from "https://esm.town/v/std/openai"; export default async function(req: Request): Promise<Response> { if (req.method === "OPTIONS") { return new Response(null, { headers: { "Access-Control-Allow-Origin": "*",