Val Town Code SearchReturn to Val Town

API Access

You can access search results via JSON API by adding format=json to your query:

https://codesearch.val.run/$%7Burl%7D?q=openai&page=21&format=json

For typeahead suggestions, use the /typeahead endpoint:

https://codesearch.val.run/typeahead?q=openai

Returns an array of strings in format "username" or "username/projectName"

Found 1580 results for "openai"(1259ms)

gptToolsreadmeGPT9 matches

@nbbaier•Updated 2 weeks ago
1import { type WriterOptions } from "./WriterOptions";
2import { fetch } from "https://esm.town/v/std/fetch?v=4";
3import OpenAI, { type ClientOptions } from "npm:openai";
4
5export class ReadmeWriter {
6 model: string;
7 openai: OpenAI;
8 apiKey: string;
9 valtownKey: string;
10
11 constructor(options: WriterOptions) {
12 const { model, ...openaiOptions } = options;
13 this.model = model ? model : "gpt-3.5-turbo";
14 this.openai = new OpenAI(openaiOptions);
15 this.valtownKey = Deno.env.get("valtown");
16 }
47 }
48
49 private async performOpenAICall(prompt: string) {
50 try {
51 const response = await this.openai.chat.completions.create({
52 messages: [{ role: "system", content: prompt }],
53 model: this.model,
55
56 if (!response.choices || response.choices.length === 0) {
57 throw new Error("No response from OpenAI");
58 }
59
61
62 if (!readme) {
63 throw new Error("No readme returned by OpenAI. Try again.");
64 }
65
92 const { id, code } = await this.getVal(username, valName);
93 const prompt = this.createPrompt(code, userPrompt);
94 const readme = await this.performOpenAICall(prompt);
95 return { id, readme };
96 }

gptToolssqliteWriter11 matches

@nbbaier•Updated 2 weeks ago
1import { type WriterOptions } from "./WriterOptions";
2import { sqlite } from "https://esm.town/v/std/sqlite";
3import OpenAI from "npm:openai";
4
5interface QueryWriterOptons extends WriterOptions {
11 model: string;
12 apiKey: string;
13 openai: OpenAI;
14
15 constructor(options: QueryWriterOptons) {
16 const { table, model, ...openaiOptions } = options;
17 this.table = table;
18 this.model = model;
19 // this.apiKey = openaiOptions.apiKey ? openaiOptions.apiKey : Deno.env.get("OPENAI_API_KEY");
20 this.openai = new OpenAI(openaiOptions);
21 }
22
54
55 try {
56 const response = await this.openai.chat.completions.create({
57 messages: [{ role: "system", content: prompt }],
58 model: this.model,
60
61 if (!response.choices || response.choices.length === 0) {
62 throw new Error("No response from OpenAI");
63 }
64
66
67 if (!query) {
68 throw new Error("No SQL returned from OpenAI. Try again.");
69 }
70
80
81 try {
82 const response = await this.openai.chat.completions.create({
83 messages: [{ role: "system", content: prompt }],
84 model: this.model,
86
87 if (!response.choices || response.choices.length === 0) {
88 throw new Error("No response from OpenAI");
89 }
90
92
93 if (!query) {
94 throw new Error("No SQL returned from OpenAI. Try again.");
95 }
96

gptToolsWriterOptions1 match

@nbbaier•Updated 2 weeks ago
1import { type ClientOptions } from "npm:openai";
2
3export interface WriterOptions extends ClientOptions {

gptToolsdraftReadme10 matches

@nbbaier•Updated 2 weeks ago
1import { fetch } from "https://esm.town/v/std/fetch?v=4";
2import OpenAI, { type ClientOptions } from "npm:openai";
3
4export interface WriterOptions extends ClientOptions {
40}
41
42async function performOpenAICall(prompt: string, model: string, openaiOptions: ClientOptions) {
43 const openai = new OpenAI(openaiOptions);
44
45 try {
46 const response = await openai.chat.completions.create({
47 messages: [{ role: "system", content: prompt }],
48 model: model,
50
51 if (!response.choices || response.choices.length === 0) {
52 throw new Error("No response from OpenAI");
53 }
54
56
57 if (!readme) {
58 throw new Error("No readme returned by OpenAI. Try again.");
59 }
60
83
84async function draftReadme(options: WriterOptions) {
85 const { username, valName, model = "gpt-3.5-turbo", userPrompt, ...openaiOptions } = options;
86 const { id, code } = await getVal(username, valName);
87 const prompt = createPrompt(code, userPrompt);
88 const readme = await performOpenAICall(prompt, model, openaiOptions);
89 return readme;
90}
91
92async function writeReadme(options: WriterOptions) {
93 const { username, valName, model = "gpt-3.5-turbo", userPrompt, ...openaiOptions } = options;
94 const { id, code } = await getVal(username, valName);
95 const prompt = createPrompt(code, userPrompt);
96 const readme = await performOpenAICall(prompt, model, openaiOptions);
97 try {
98 const update = await updateReadme(id, readme);

fetchAndStoreOpenAiUsage2main.tsx4 matches

@nbbaier•Updated 2 weeks ago
1import { createDayTotal } from "https://esm.town/v/nbbaier/createDayTotal";
2import { cronEvalLogger as logger } from "https://esm.town/v/nbbaier/cronLogger";
3import { fetchOpenAiUsageData } from "https://esm.town/v/nbbaier/fetchOpenAiUsageData";
4import { updateBlobUsageDB } from "https://esm.town/v/nbbaier/updateBlobUsageDB";
5import { blob } from "https://esm.town/v/std/blob?v=11";
7import { DateTime } from "npm:luxon";
8
9const fetchAndStoreOpenAiUsage = async (interval: Interval) => {
10 const timeZone = "America/Chicago";
11 const date = DateTime.now();
15
16 try {
17 const { data, whisper_api_data, dalle_api_data } = await fetchOpenAiUsageData(today);
18
19 const day_total = await createDayTotal(data, whisper_api_data, dalle_api_data);
27};
28
29export default logger(fetchAndStoreOpenAiUsage);

openaiPricinggetAudioTotals2 matches

@nbbaier•Updated 2 weeks ago
1import { openAiPricing } from "./openAiPricing";
2
3export const getAudioTotals = (data) => {
4 const totals = {};
5 const { whisperPricing } = openAiPricing;
6 for (const obj of data) {
7 // Extract the snapshot_id and token totals from the current object

openaiPricinggetTextTotals2 matches

@nbbaier•Updated 2 weeks ago
1import { openAiPricing } from "./openAiPricing";
2import { email } from "https://esm.town/v/std/email?v=9";
3
10 };
11 } = {};
12 const { textPricing } = openAiPricing;
13 for (const obj of data) {
14 const { snapshot_id, n_context_tokens_total, n_generated_tokens_total }: {

openaiPricinggetImageTotals2 matches

@nbbaier•Updated 2 weeks ago
1import { openAiPricing } from "./openAiPricing";
2
3export const getImageTotals = (data) => {
4 const totals = {};
5 const { dallePricing } = openAiPricing;
6 for (const obj of data) {
7 // Extract the snapshot_id and token totals from the current object

openaiPricingfetchAndStoreOpenAiUsage4 matches

@nbbaier•Updated 2 weeks ago
1import { createDayTotal } from "./createDayTotal";
2import { fetchOpenAiUsageData } from "./fetchOpenAiUsageData";
3import { updateBlobUsageDB } from "./updateBlobUsageDB";
4import { fetch } from "https://esm.town/v/std/fetch";
6import { DateTime } from "npm:luxon";
7
8const fetchAndStoreOpenAiUsage = async () => {
9 const timeZone = "America/Chicago";
10 const date = DateTime.now();
14
15 try {
16 const { data, whisper_api_data, dalle_api_data } = await fetchOpenAiUsageData("2024-04-01");
17 console.log(data.length);
18 console.log(whisper_api_data.length);
28};
29
30await fetchAndStoreOpenAiUsage();

openaiPricingopenAiUsageTypes1 match

@nbbaier•Updated 2 weeks ago
45} & Partial<Record<Month, Day>>;
46
47export let openAiUsageTypes;

testOpenAI1 file match

@stevekrouse•Updated 1 day ago

testOpenAI1 file match

@shouser•Updated 3 days ago
lost1991
import { OpenAI } from "https://esm.town/v/std/openai"; export default async function(req: Request): Promise<Response> { if (req.method === "OPTIONS") { return new Response(null, { headers: { "Access-Control-Allow-Origin": "*",