Val Town Code SearchReturn to Val Town

API Access

You can access search results via JSON API by adding format=json to your query:

https://codesearch.val.run/$%7Burl%7D?q=openai&page=179&format=json

For typeahead suggestions, use the /typeahead endpoint:

https://codesearch.val.run/typeahead?q=openai

Returns an array of strings in format "username" or "username/projectName"

Found 2042 results for "openai"(2033ms)

instructorExamplemain.tsx3 matches

@inkpotmonkey•Updated 1 year ago
1import Instructor from "https://esm.sh/@instructor-ai/instructor";
2import OpenAI from "https://esm.sh/openai";
3import { z } from "https://esm.sh/zod";
4
5const openAISecrets = {
6 apiKey: getApiKey(),
7 organization: getOrganisationKey(),
8};
9
10const oai = new OpenAI(openAISecrets);
11
12const client = Instructor({

weatherGPTmain.tsx3 matches

@liaolile•Updated 1 year ago
1import { email } from "https://esm.town/v/std/email?v=11";
2import { fetch } from "https://esm.town/v/std/fetch";
3import { OpenAI } from "npm:openai";
4
5let location = "shenzhen";
9).then(r => r.json());
10
11const openai = new OpenAI();
12let chatCompletion = await openai.chat.completions.create({
13 messages: [{
14 role: "user",

weatherGPTREADME.md1 match

@liaolile•Updated 1 year ago
1If you fork this, you'll need to set `OPENAI_API_KEY` in your [Val Town Secrets](https://www.val.town/settings/secrets).
2
3

gpt4Examplemain.tsx4 matches

@kyutarou•Updated 1 year ago
1import { OpenAI } from "npm:openai";
2Deno.env.get("OPENAI_API_KEY");
3
4const openai = new OpenAI();
5let chatCompletion = await openai.chat.completions.create({
6 messages: [{
7 role: "user",

gpt4ExampleREADME.md1 match

@kyutarou•Updated 1 year ago
3This uses the brand new `gpt-4-1106-preview`.
4
5To use this, set `OPENAI_API_KEY` in your [Val Town Secrets](https://www.val.town/settings/secrets).

get_weather_messagemain.tsx1 match

@cosmo•Updated 1 year ago
1import { chat } from "https://esm.town/v/cosmo/chat_openai";
2import { getCurrentWeather } from "https://esm.town/v/cosmo/get_current_weather";
3

chat_openaimain.tsx3 matches

@cosmo•Updated 1 year ago
1const { default: OpenAI } = await import("npm:openai");
2
3export async function chat(apiKey, messages) {
4 const openai = new OpenAI({ apiKey });
5
6 return openai.chat.completions.create({
7 messages,
8 model: "gpt-3.5-turbo",

draftReadmemain.tsx10 matches

@nbbaier•Updated 1 year ago
1import { fetch } from "https://esm.town/v/std/fetch?v=4";
2import OpenAI, { type ClientOptions } from "npm:openai";
3
4export interface WriterOptions extends ClientOptions {
40}
41
42async function performOpenAICall(prompt: string, model: string, openaiOptions: ClientOptions) {
43 const openai = new OpenAI(openaiOptions);
44
45 try {
46 const response = await openai.chat.completions.create({
47 messages: [{ role: "system", content: prompt }],
48 model: model,
50
51 if (!response.choices || response.choices.length === 0) {
52 throw new Error("No response from OpenAI");
53 }
54
56
57 if (!readme) {
58 throw new Error("No readme returned by OpenAI. Try again.");
59 }
60
83
84async function draftReadme(options: WriterOptions) {
85 const { username, valName, model = "gpt-3.5-turbo", userPrompt, ...openaiOptions } = options;
86 const { id, code } = await getVal(username, valName);
87 const prompt = createPrompt(code, userPrompt);
88 const readme = await performOpenAICall(prompt, model, openaiOptions);
89 return readme;
90}
91
92async function writeReadme(options: WriterOptions) {
93 const { username, valName, model = "gpt-3.5-turbo", userPrompt, ...openaiOptions } = options;
94 const { id, code } = await getVal(username, valName);
95 const prompt = createPrompt(code, userPrompt);
96 const readme = await performOpenAICall(prompt, model, openaiOptions);
97 try {
98 const update = await updateReadme(id, readme);

WriterOptionsmain.tsx1 match

@nbbaier•Updated 1 year ago
1import { type ClientOptions } from "npm:openai";
2
3export interface WriterOptions extends ClientOptions {

draftReadmeREADME.md3 matches

@nbbaier•Updated 1 year ago
1# Code Documentation Assistant
2
3The Code Documentation Assistant is an AI-powered tool that helps generate documentation for code. It uses the OpenAI GPT-3.5 Turbo model to generate readme files in GitHub-flavored markdown based on the provided code.
4
5## Usage
24 - `username` (string): The username of the code owner.
25 - `valName` (string): The name of the Val containing the code.
26 - `model` (optional, default: "gpt-3.5-turbo"): The OpenAI model to use for generating the readme.
27 - `userPrompt` (optional): Additional prompt to include in the documentation.
28
44 - `username` (string): The username of the code owner.
45 - `valName` (string): The name of the Val containing the code.
46 - `model` (optional, default: "gpt-3.5-turbo"): The OpenAI model to use for generating the readme.
47 - `userPrompt` (optional): Additional prompt to include in the documentation.
48

openai_enrichment6 file matches

@stevekrouse•Updated 1 hour ago

openaiproxy2 file matches

@MM05•Updated 19 hours ago
reconsumeralization
import { OpenAI } from "https://esm.town/v/std/openai"; import { sqlite } from "https://esm.town/v/stevekrouse/sqlite"; /** * Practical Implementation of Collective Content Intelligence * Bridging advanced AI with collaborative content creation */ exp
kwhinnery_openai