Val Town Code SearchReturn to Val Town

API Access

You can access search results via JSON API by adding format=json to your query:

https://codesearch.val.run/$%7Bart_info.art.src%7D?q=openai&page=140&format=json

For typeahead suggestions, use the /typeahead endpoint:

https://codesearch.val.run/typeahead?q=openai

Returns an array of strings in format "username" or "username/projectName"

Found 1569 results for "openai"(752ms)

annoymain.tsx2 matches

@ajax•Updated 1 year ago
50 `;
51 console.log({ prompt });
52 const response = await fetch("https://api.openai.com/v1/completions", {
53 method: "POST",
54 headers: {
55 "Content-Type": "application/json",
56 "Authorization": "Bearer " + process.env.OPENAI_API_KEY, // Replace with your OpenAI API Key
57 },
58 body: JSON.stringify({

openAiFreeQuotaExceededmain.tsx3 matches

@patrickjm•Updated 1 year ago
1import { openAiFreeUsage } from "https://esm.town/v/patrickjm/openAiFreeUsage";
2
3export let openAiFreeQuotaExceeded = () =>
4 openAiFreeUsage.exceeded;

demoOpenAIGPT4Summarymain.tsx2 matches

@zzz•Updated 1 year ago
2import { runVal } from "https://esm.town/v/std/runVal";
3
4export let demoOpenAIGPT4Summary = await runVal(
5 "zzz.OpenAISummary",
6 confession,
7);

TokenizerDemomain.tsx1 match

@zzz•Updated 1 year ago
1import { Tokenizer } from "https://esm.town/v/zzz/Tokenizer";
2
3// Demo of tokenizer to mimic behavior of https://platform.openai.com/tokenizer
4// Tokenizer uses "gpt-3.5-turbo" model by default but this demo uses davinci to match the playground
5export const TokenizerDemo = (async () => {

weatherTomorrowGpt3Examplemain.tsx1 match

@patrickjm•Updated 1 year ago
4export let weatherTomorrowGpt3Example = weatherTomorrowGpt3({
5 city: "New York City",
6 openAiKey: process.env.openai_key,
7});

openAiModerationmain.tsx5 matches

@patrickjm•Updated 1 year ago
2
3/**
4 * Calls the OpenAI moderation model. Useful for determining if OpenAI will flag something you did.
5 * https://platform.openai.com/docs/api-reference/moderations
6 */
7export let openAiModeration = async ({
8 apiKey,
9 input,
15}) => {
16 if (!apiKey) {
17 throw new Error("You must provide an OpenAI API Key");
18 }
19 const body: { model?: string, input: string|string[] } = {
24 }
25 const result = await fetchJSON(
26 "https://api.openai.com/v1/moderations",
27 {
28 method: "POST",

weatherTomorrowGpt3main.tsx2 matches

@patrickjm•Updated 1 year ago
2import { simpleWeather } from "https://esm.town/v/patrickjm/simpleWeather";
3
4export let weatherTomorrowGpt3 = (params: { openAiKey: string, city: string }) =>
5 simpleWeather(params.city).then((weather) =>
6 gpt3({
7 openAiKey: params.openAiKey,
8 prompt: `
9 Given a JSON sequence, give a short, plain-English summary about the weather tomorrow.

chatmain.tsx5 matches

@steveb1313•Updated 1 year ago
6 options = {},
7) => {
8 // Initialize OpenAI API stub
9 const { Configuration, OpenAIApi } = await import("https://esm.sh/openai");
10 const configuration = new Configuration({
11 apiKey: process.env.openAIAPI,
12 });
13 const openai = new OpenAIApi(configuration);
14 // Request chat completion
15 const messages = typeof prompt === "string"
16 ? [{ role: "user", content: prompt }]
17 : prompt;
18 const { data } = await openai.createChatCompletion({
19 model: "gpt-3.5-turbo-0613",
20 messages,

textToImageDallemain.tsx3 matches

@stevekrouse•Updated 1 year ago
2
3export const textToImageDalle = async (
4 openAIToken: string,
5 prompt: string,
6 n: number = 1,
17 };
18 } = await fetchJSON(
19 "https://api.openai.com/v1/images/generations",
20 {
21 method: "POST",
22 headers: {
23 "Content-Type": "application/json",
24 "Authorization": `Bearer ${openAIToken}`,
25 },
26 body: JSON.stringify({

untitled_chocolateSquidmain.tsx4 matches

@jacoblee93•Updated 1 year ago
2
3export const untitled_chocolateSquid = (async () => {
4 const { ChatOpenAI } = await import(
5 "https://esm.sh/langchain@0.0.146/chat_models/openai"
6 );
7 const { LLMChain } = await import("https://esm.sh/langchain@0.0.146/chains");
16 ["human", humanTemplate],
17 ]);
18 const chat = new ChatOpenAI({
19 temperature: 0,
20 openAIApiKey: process.env.OPENAI_API_KEY,
21 });
22 const chain = new LLMChain({

testOpenAI1 file match

@shouser•Updated 1 day ago

testOpenAI1 file match

@stevekrouse•Updated 1 day ago
lost1991
import { OpenAI } from "https://esm.town/v/std/openai"; export default async function(req: Request): Promise<Response> { if (req.method === "OPTIONS") { return new Response(null, { headers: { "Access-Control-Allow-Origin": "*",