12
13* fork this val
14* use the fork's HTTP endpoint URL (in the form "https://XXX-openaichatcompletion.web.val.run") as AI server URL
15* **define an environment variable called "OpenAIChatCompletion"** with any kind of content (but without any blanks or control characters, e.g., a [UUID](https://rozek-uuidv4_generator.web.val.run)) and use that as your personal access key
16
17Now, you can ask everybody to provide their AI credentials and still use the OpenAI access provided by Val.Town for your personal tests.
18
19> **Nota bene: if the environment variable "OpenAIChatCompletion" has not been defined, access to your fork's endpoint is free for everybody!**
20
21In addition to the described authorization, this val also provides resource "throttling" (using val [floatingQuotaTracker](https://www.val.town/v/rozek/floatingQuotaTracker) in sqlite tables "OpenAIChatCompletion_Info" and "OpenAIChatCompletion_Log") and calculates some access statistics (using val [InvocationTracker](https://www.val.town/v/rozek/InvocationTracker) in sqlite tables "OpenAIChatCompletion_Usage_Info" and "OpenAIChatCompletion_Usage_Log")
91export default async function server(request: Request): Promise<Response> {
92 const { sqlite } = await import("https://esm.town/v/stevekrouse/sqlite");
93 const { OpenAI } = await import("https://esm.town/v/std/openai");
94 const KEY = "cloneChatGPT";
95 const SCHEMA_VERSION = 3;
104
105 if (request.method === "POST") {
106 const openai = new OpenAI();
107 const { messages } = await request.json();
108
109 try {
110 const completion = await openai.chat.completions.create({
111 messages: messages,
112 model: "gpt-4o-mini",
35Use Cerebras for AI on the backend like so:
36
37const { OpenAI } = await import("https://esm.sh/openai");
38const client = new OpenAI({
39 apiKey: Deno.env.get("CEREBRAS_API_KEY"),
40 baseURL: "https://api.cerebras.ai/v1"
83 const { messages } = await request.json();
84
85 const { OpenAI } = await import("https://esm.sh/openai");
86 // Cerebras Inference
87 // Very important: do not change this
88 // Keep these comments so we remember not to change this
89 const client = new OpenAI({
90 apiKey: Deno.env.get("CEREBRAS_API_KEY"),
91 baseURL: "https://api.cerebras.ai/v1",
107 if (request.method === "POST" && new URL(request.url).pathname === "/chat") {
108 try {
109 const { OpenAI } = await import("https://esm.town/v/std/openai");
110 const openai = new OpenAI();
111
112 const { messages } = await request.json();
113
114 const completion = await openai.chat.completions.create({
115 messages: messages,
116 model: "gpt-4o-mini",
122 });
123 } catch (error) {
124 console.error("OpenAI Error:", error);
125 return new Response(
126 JSON.stringify({
1import OpenAI from "https://esm.sh/openai";
2
3function parseValResponse(response: string) {
28 const system = await (await fetch(`${import.meta.url.split("/").slice(0, -1).join("/")}/system_prompt.txt`)).text();
29
30 const openai = new OpenAI({
31 baseURL: "https://openrouter.ai/api/v1",
32 apiKey: Deno.env.get("OPEN_ROUTER_KEY"),
33 });
34 console.log(messages);
35 const completion = await openai.chat.completions.create({
36 model: "deepseek/deepseek-r1",
37 messages: [
1import { OpenAI } from "https://esm.town/v/std/openai";
2
3export default async function translateToEnglishWithOpenAI(text: string) {
4 const openai = new OpenAI();
5 const completion = await openai.chat.completions.create({
6 messages: [
7 {
1import { OpenAI } from "https://esm.town/v/std/openai";
2
3const openai = new OpenAI();
4
5const completion = await openai.chat.completions.create({
6 messages: [
7 { role: "user", content: "Say hello in a creative way" },
83
84 <library>
85 ## OpenAI
86
87 Val Town includes a free, proxied OpenAI:
88
89 ```ts
90 import { OpenAI } from "https://esm.town/v/std/openai";
91 const openai = new OpenAI();
92 const completion = await openai.chat.completions.create({
93 messages: [
94 { role: "user", content: "Say hello in a creative way" },
99 ```
100
101 OpenAI only works on the server. If the val includes client-side code, use dynamic imports to import this module in the server function, e.g.:
102 `const { OpenAI } = await import "https://esm.town/v/std/openai");`
103 </library>
104
3import React, { useEffect, useState } from "https://esm.sh/react@18.2.0";
4import { blob } from "https://esm.town/v/std/blob";
5import { OpenAI } from "https://esm.town/v/std/openai";
6
7// Operational Data Science Tool Report Generator
8async function generateDataScienceToolReport() {
9 const openai = new OpenAI();
10
11 // Comprehensive Data Science and Database Tool Categories
49 // Advanced Operational Analysis
50 async function generateOperationalInsights(category: string) {
51 const operationalCompletion = await openai.chat.completions.create({
52 model: "gpt-4o-mini",
53 messages: [