1import { type ClientOptions, OpenAI as RawOpenAI } from "npm:openai";
2
3/**
4 * API Client for interfacing with the OpenAI API. Uses Val Town credentials.
5 */
6export class OpenAI {
7 private rawOpenAIClient: RawOpenAI;
8
9 /**
10 * API Client for interfacing with the OpenAI API. Uses Val Town credentials.
11 *
12 * @param {number} [opts.timeout=10 minutes] - The maximum amount of time (in milliseconds) the client will wait for a response before timing out.
19 */
20 constructor(options: Omit<ClientOptions, "baseURL" | "apiKey" | "organization"> = {}) {
21 this.rawOpenAIClient = new RawOpenAI({
22 ...options,
23 baseURL: "https://std-openaiproxy.web.val.run/v1",
24 apiKey: Deno.env.get("OPENAI_API_KEY"),
25 organization: null,
26 });
28
29 get chat() {
30 return this.rawOpenAIClient.chat;
31 }
32
33 get beta() {
34 return {
35 chat: this.rawOpenAIClient.beta.chat,
36 };
37 }
1# OpenAI - [Docs ↗](https://docs.val.town/std/openai)
2
3Use OpenAI's chat completion API with [`std/openai`](https://www.val.town/v/std/openai). This integration enables access to OpenAI's language models without needing to acquire API keys.
4
5For free Val Town users, [all calls are sent to `gpt-4o-mini`](https://www.val.town/v/std/openaiproxy?v=12#L85).
6
7## Basic Usage
8
9```ts title="Example" val
10import { OpenAI } from "https://esm.town/v/std/openai";
11
12const openai = new OpenAI();
13
14const completion = await openai.chat.completions.create({
15 messages: [
16 { role: "user", content: "Say hello in a creative way" },
58## Limits
59
60While our wrapper simplifies the integration of OpenAI, there are a few limitations to keep in mind:
61
62* **Usage Quota**: We limit each user to 10 requests per minute.
65If these limits are too low, let us know! You can also get around the limitation by using your own keys:
66
671. Create your own API key on [OpenAI's website](https://platform.openai.com/api-keys)
682. Create an [environment variable](https://www.val.town/settings/environment-variables?adding=true) named `OPENAI_API_KEY`
693. Use the `OpenAI` client from `npm:openai`:
70
71```ts title="Example" val
72import { OpenAI } from "npm:openai";
73
74const openai = new OpenAI();
75```
76
77
78[📝 Edit docs](https://github.com/val-town/val-town-docs/edit/main/src/content/docs/std/openai.mdx)
147export default async function server(request: Request): Promise<Response> {
148 if (request.method === 'POST' && new URL(request.url).pathname === '/translate') {
149 const { OpenAI } = await import("https://esm.town/v/std/openai");
150 const openai = new OpenAI();
151
152 const { text, sourceLanguage, targetLanguage } = await request.json();
153
154 const completion = await openai.chat.completions.create({
155 messages: [
156 {
2import process from "node:process";
3import { marked } from "npm:marked";
4import { OpenAI } from "npm:openai";
5
6function pm(...lines: string[]): string {
16 );
17
18 const client = new OpenAI({ apiKey: process.env.PERPLEXITY_API_KEY, baseURL: "https://api.perplexity.ai" });
19 const response = await client.chat.completions.create({
20 model: "sonar",
35 }
36
37 // Prepare the data for OpenAI
38 const reviewData = reviews.map(review => ({
39 reviewer: review.name,
42 }));
43
44 // Format the prompt for OpenAI
45 const prompt = `
46 You are judging a cake competition. Based on the following reviews of two cakes,
57
58 try {
59 // Make the API call to OpenAI
60 const OPENAI_API_KEY = Deno.env.get("OPENAI_API_KEY");
61
62 const response = await fetch("https://api.openai.com/v1/chat/completions", {
63 method: "POST",
64 headers: {
65 "Content-Type": "application/json",
66 "Authorization": `Bearer ${OPENAI_API_KEY}`,
67 },
68 body: JSON.stringify({
85 if (!response.ok) {
86 const errorData = await response.json();
87 console.error("OpenAI API error:", errorData);
88 throw new Error(`OpenAI API error: ${errorData.error?.message || "Unknown error"}`);
89 }
90
92 return data.choices[0].message.content.trim();
93 } catch (error) {
94 console.error("Error analyzing reviews with OpenAI:", error);
95 }
96}
296
297export default async function server(request: Request): Promise<Response> {
298 const { OpenAI } = await import("https://esm.town/v/std/openai");
299 const openai = new OpenAI();
300
301 if (request.method === "POST") {
302 try {
303 const { message } = await request.json();
304 const completion = await openai.chat.completions.create({
305 messages: [{ role: "user", content: message }],
306 model: "gpt-4o-mini",
296
297export default async function server(request: Request): Promise<Response> {
298 const { OpenAI } = await import("https://esm.town/v/std/openai");
299 const openai = new OpenAI();
300
301 if (request.method === "POST") {
302 try {
303 const { message } = await request.json();
304 const completion = await openai.chat.completions.create({
305 messages: [{ role: "user", content: message }],
306 model: "gpt-4o-mini",
109export default async function server(request: Request): Promise<Response> {
110 if (request.method === "POST" && new URL(request.url).pathname === "/chat") {
111 const { OpenAI } = await import("https://esm.town/v/std/openai");
112 const openai = new OpenAI();
113
114 const body = await request.json();
115 const messages = body.messages || [];
116
117 const stream = await openai.chat.completions.create({
118 model: "gpt-4o-mini",
119 messages: messages,
6
7export default async function server(request: Request): Promise<Response> {
8 let OpenAI, sqlite, blob;
9 try {
10 // Dynamic imports with error handling
11 const openAIModule = await import("https://esm.town/v/std/openai");
12 const sqliteModule = await import("https://esm.town/v/stevekrouse/sqlite");
13 const blobModule = await import("https://esm.town/v/std/blob");
14
15 OpenAI = openAIModule.OpenAI;
16 sqlite = sqliteModule.sqlite;
17 blob = blobModule.blob;
33 const { messages } = await request.json();
34 try {
35 if (!OpenAI) {
36 throw new Error("OpenAI module not initialized");
37 }
38
39 const openai = new OpenAI();
40 const chatCompletion = await openai.chat.completions.create({
41 model: "gpt-4o-mini",
42 messages: messages.map(m => ({
70
71 try {
72 if (!OpenAI) {
73 throw new Error("OpenAI module not initialized");
74 }
75
76 const openai = new OpenAI();
77 let mediaUrl;
78
79 switch (mode) {
80 case "image-gen":
81 const imageResponse = await openai.images.generate({
82 model: "dall-e-3",
83 prompt: input,
93 : "Transform a static photo into a dynamic, moving scene with subtle camera movements";
94
95 const photoToVideoResponse = await openai.images.generate({
96 model: "dall-e-3",
97 prompt: videoGenerationPrompt,
108
109 case "text-to-video":
110 const textToVideoResponse = await openai.images.generate({
111 model: "dall-e-3",
112 prompt: `Create a cinematic video scene representing: ${input}. Make it look like a movie trailer.`,
80export default async function server(request: Request): Promise<Response> {
81 if (request.method === 'POST' && new URL(request.url).pathname === '/chat') {
82 const { OpenAI } = await import("https://esm.town/v/std/openai");
83 const openai = new OpenAI();
84
85 const body = await request.json();
86 const completion = await openai.chat.completions.create({
87 messages: body.messages,
88 model: "gpt-4o-mini",