Val Town Code SearchReturn to Val Town

API Access

You can access search results via JSON API by adding format=json to your query:

https://codesearch.val.run/$%7Burl%7D?q=openai&page=82&format=json

For typeahead suggestions, use the /typeahead endpoint:

https://codesearch.val.run/typeahead?q=openai

Returns an array of strings in format "username" or "username/projectName"

Found 1675 results for "openai"(1001ms)

184 ): Promise<CompletionResponse> {
185 // Implement function calling for Google
186 // Note: Gemini has different function calling format than OpenAI
187 throw new Error("Function calling not yet implemented for Google provider");
188 }

LlmDashboardllm-provider-openai24 matches

@prashamtrivediUpdated 3 months ago
1/**
2 * llm-provider-openai.ts
3 * OpenAI-specific provider implementation
4 */
5
6import { OpenAI } from "https://esm.town/v/std/openai";
7import type {
8 BaseModel,
16
17/**
18 * OpenAI-specific parameter extensions
19 */
20export interface OpenAIParams {
21 reasoningEffort?: number; // For models with reasoning capabilities
22 jsonMode?: boolean; // Force JSON output
25
26/**
27 * OpenAI model configuration with provider-specific params
28 */
29export type OpenAIConfig = WithProviderParams<BaseModelConfig, OpenAIParams>;
30
31/**
32 * OpenAI-specific model interface
33 */
34interface OpenAIModel extends BaseModel {
35 providerSpecific: OpenAIParams;
36}
37
38export class OpenAIProvider implements BaseProvider {
39 readonly id = "openai";
40 readonly name = "OpenAI";
41 private client: OpenAI;
42
43 constructor() {
44 this.client = new OpenAI();
45 }
46
47 models: OpenAIModel[] = [
48 {
49 id: "gpt-4",
123 prompt: string | Message[],
124 modelId?: string,
125 config?: OpenAIConfig,
126 ): Promise<CompletionResponse> {
127 const model = this.models.find(m => m.id === (modelId || this.defaultModel));
167 };
168 } catch (error) {
169 throw new Error(`OpenAI API error: ${error.message}`);
170 }
171 }
175 functions: FunctionDefinition[],
176 modelId?: string,
177 config?: OpenAIConfig,
178 ): Promise<CompletionResponse> {
179 const model = this.models.find(m => m.id === (modelId || this.defaultModel));
223 };
224 } catch (error) {
225 throw new Error(`OpenAI API error: ${error.message}`);
226 }
227 }
228
229 private calculateCost(
230 model: OpenAIModel,
231 inputTokens: number,
232 outputTokens: number,
241 prompt: string | Message[],
242 modelId?: string,
243 config?: OpenAIConfig,
244 ): AsyncIterableIterator<CompletionResponse> {
245 const model = this.models.find(m => m.id === (modelId || this.defaultModel));
282 }
283 } catch (error) {
284 throw new Error(`OpenAI API error: ${error.message}`);
285 }
286 }
287}
288
289export default OpenAIProvider;

chatGPTClonemain.tsx4 matches

@CpkUpdated 3 months ago
89export default async function server(request: Request): Promise<Response> {
90 if (request.method === 'POST' && new URL(request.url).pathname === '/chat') {
91 const { OpenAI } = await import("https://esm.town/v/std/openai");
92 const openai = new OpenAI();
93
94 try {
95 const { messages } = await request.json();
96
97 const completion = await openai.chat.completions.create({
98 messages: messages,
99 model: "gpt-4o-mini",
107 });
108 } catch (error) {
109 console.error('OpenAI Error:', error);
110 return new Response(JSON.stringify({
111 message: 'Sorry, there was an error processing your request.'

BikramNpExplorerAppmain.tsx3 matches

@airUpdated 3 months ago
79export default async function server(request: Request): Promise<Response> {
80 if (request.method === 'POST' && new URL(request.url).pathname === '/chat') {
81 const { OpenAI } = await import("https://esm.town/v/std/openai");
82 const openai = new OpenAI();
83
84 const body = await request.json();
85 const completion = await openai.chat.completions.create({
86 messages: [
87 {

emailSummaryHandlermain.tsx3 matches

@saltcodUpdated 3 months ago
2import { email } from "https://esm.town/v/std/email";
3import { extractValInfo } from "https://esm.town/v/stevekrouse/extractValInfo";
4import { OpenAI } from "npm:openai";
5
6function stripHtmlBackticks(html: string): string {
9
10export default async function(e: Email) {
11 const openai = new OpenAI();
12 console.log(`from: ${e.from} to: ${e.to} subject: ${e.subject}, cc: ${e.cc}, bcc: ${e.bcc}`);
13
25 }
26
27 const summary = await openai.chat.completions.create({
28 messages: [
29 {

surprisingEmbeddingsbots5 matches

@tmcwUpdated 3 months ago
61 },
62 "ChatGPT-User": {
63 "operator": "[OpenAI](https://openai.com)",
64 "respect": "Yes",
65 "function": "Takes action based on user prompts.",
169 },
170 "GPTBot": {
171 "operator": "[OpenAI](https://openai.com)",
172 "respect": "Yes",
173 "function": "Scrapes data to train OpenAI's products.",
174 "frequency": "No information.",
175 "description":
239 },
240 "OAI-SearchBot": {
241 "operator": "[OpenAI](https://openai.com)",
242 "respect": "[Yes](https://platform.openai.com/docs/bots)",
243 "function": "Search result generation.",
244 "frequency": "No information.",

regexToBrainrotmain.tsx4 matches

@stainless_emUpdated 3 months ago
300 { getRandomRegexExplanation, saveRegexExplanation, getRegexExplanationById },
301 ReactMarkdown,
302 { OpenAI },
303 { renderToString },
304 { jsx, jsxs, Fragment },
306 import("https://esm.town/v/stainless_em/brainrotdb"),
307 import("npm:react-markdown@7"),
308 import("https://esm.town/v/std/openai"),
309 import("npm:react-dom@19/server.browser"),
310 import("npm:react@19/jsx-runtime"),
336 }
337
338 const openai = new OpenAI();
339
340 const abortController = new AbortController();
341 const completion = await openai.chat.completions.create({
342 messages: [
343 {

dogfoodingmain.tsx3 matches

@stevekrouseUpdated 3 months ago
86 />
87 <FeatureItem
88 href="https://www.val.town/v/std/openaiproxy"
89 title="OpenAI Proxy"
90 description="Provide a secure and efficient proxy for OpenAI API calls"
91 />
92 <FeatureItem

OpenAIREADME.md13 matches

@Yogareddy107Updated 3 months ago
1# OpenAI - [Docs ↗](https://docs.val.town/std/openai)
2
3Use OpenAI's chat completion API with [`std/openai`](https://www.val.town/v/std/openai). This integration enables access to OpenAI's language models without needing to acquire API keys.
4
5For free Val Town users, [all calls are sent to `gpt-4o-mini`](https://www.val.town/v/std/openaiproxy?v=12#L85).
6
7## Basic Usage
8
9```ts title="Example" val
10import { OpenAI } from "https://esm.town/v/std/openai";
11
12const openai = new OpenAI();
13
14const completion = await openai.chat.completions.create({
15 messages: [
16 { role: "user", content: "Say hello in a creative way" },
58## Limits
59
60While our wrapper simplifies the integration of OpenAI, there are a few limitations to keep in mind:
61
62* **Usage Quota**: We limit each user to 10 requests per minute.
65If these limits are too low, let us know! You can also get around the limitation by using your own keys:
66
671. Create your own API key on [OpenAI's website](https://platform.openai.com/api-keys)
682. Create an [environment variable](https://www.val.town/settings/environment-variables?adding=true) named `OPENAI_API_KEY`
693. Use the `OpenAI` client from `npm:openai`:
70
71```ts title="Example" val
72import { OpenAI } from "npm:openai";
73
74const openai = new OpenAI();
75```
76
77
78[📝 Edit docs](https://github.com/val-town/val-town-docs/edit/main/src/content/docs/std/openai.mdx)

OpenAImain.tsx9 matches

@Yogareddy107Updated 3 months ago
1import { type ClientOptions, OpenAI as RawOpenAI } from "npm:openai";
2
3/**
4 * API Client for interfacing with the OpenAI API. Uses Val Town credentials.
5 */
6export class OpenAI {
7 private rawOpenAIClient: RawOpenAI;
8
9 /**
10 * API Client for interfacing with the OpenAI API. Uses Val Town credentials.
11 *
12 * @param {number} [opts.timeout=10 minutes] - The maximum amount of time (in milliseconds) the client will wait for a response before timing out.
19 */
20 constructor(options: Omit<ClientOptions, "baseURL" | "apiKey" | "organization"> = {}) {
21 this.rawOpenAIClient = new RawOpenAI({
22 ...options,
23 baseURL: "https://std-openaiproxy.web.val.run/v1",
24 apiKey: Deno.env.get("valtown"),
25 organization: null,
28
29 get chat() {
30 return this.rawOpenAIClient.chat;
31 }
32
33 get beta() {
34 return {
35 chat: this.rawOpenAIClient.beta.chat,
36 };
37 }

translateToEnglishWithOpenAI1 file match

@shlmtUpdated 1 week ago

testOpenAI1 file match

@stevekrouseUpdated 1 week ago
lost1991
import { OpenAI } from "https://esm.town/v/std/openai"; export default async function(req: Request): Promise<Response> { if (req.method === "OPTIONS") { return new Response(null, { headers: { "Access-Control-Allow-Origin": "*",