Val Town Code SearchReturn to Val Town

API Access

You can access search results via JSON API by adding format=json to your query:

https://codesearch.val.run/$%7Burl%7D?q=openai&page=137&format=json

For typeahead suggestions, use the /typeahead endpoint:

https://codesearch.val.run/typeahead?q=openai

Returns an array of strings in format "username" or "username/projectName"

Found 1578 results for "openai"(1706ms)

untitled_orangeImpalamain.tsx2 matches

@stevekrouse•Updated 1 year ago
2
3export const untitled_orangeImpala = fetchJSON(
4 "https://api.openai.com/v1/usage?date=2023-11-01",
5 {
6 headers: {
7 authorization: "Bearer " + Deno.env.get("openai"),
8 },
9 },

modelmain.tsx3 matches

@jacoblee93•Updated 1 year ago
1import { ChatOpenAI } from "langchain/chat_models/openai";
2
3const model = new ChatOpenAI({
4 temperature: 0.9,
5 openAIApiKey: @me.secrets.OPENAI_API_KEY,
6});
7

gpt4Examplemain.tsx3 matches

@stevekrouse•Updated 1 year ago
1import { OpenAI } from "npm:openai";
2
3const openai = new OpenAI();
4let chatCompletion = await openai.chat.completions.create({
5 messages: [{
6 role: "user",

gpt4ExampleREADME.md1 match

@stevekrouse•Updated 1 year ago
3This uses the brand new `gpt-4-1106-preview`.
4
5To use this, set `OPENAI_API_KEY` in your [Val Town Secrets](https://www.val.town/settings/secrets).
6
7Migrated from folder: Archive/IntroVideo/gpt4Example

gpt4TurboExamplemain.tsx3 matches

@stevekrouse•Updated 1 year ago
1import { OpenAI } from "npm:openai";
2
3const openai = new OpenAI();
4let chatCompletion = await openai.chat.completions.create({
5 messages: [{ role: "user", content: "Teach me a word I don't know" }],
6 model: "gpt-4-1106-preview",

emojiSearchBotmain.tsx3 matches

@stevekrouse•Updated 1 year ago
4import { twitterJSON } from "https://esm.town/v/stevekrouse/twitterJSON";
5import process from "node:process";
6import OpenAI from "npm:openai";
7
8const openai = new OpenAI({ apiKey: process.env.openai });
9
10export async function emojiSearchBot({ lastRunAt }: Interval) {
21 let attachment = result.includes.media.find(m => m.media_key === latestTweet.attachments.media_keys[0]);
22 if (attachment.type !== "photo") return;
23 const response = await openai.chat.completions.create({
24 model: "gpt-4-vision-preview",
25 messages: [

gpt4vDemomain.tsx3 matches

@stevekrouse•Updated 1 year ago
1import process from "node:process";
2import OpenAI from "npm:openai";
3
4const openai = new OpenAI({ apiKey: process.env.openai });
5
6async function main() {
7 const response = await openai.chat.completions.create({
8 model: "gpt-4-vision-preview",
9 messages: [

gptExamplemain.tsx3 matches

@stevekrouse•Updated 1 year ago
1import process from "node:process";
2import { OpenAI } from "npm:openai";
3
4const openai = new OpenAI({ apiKey: process.env.openai });
5let chatCompletion = await openai.chat.completions.create({
6 messages: [{ role: "user", content: "Make a short joke or pun" }],
7 model: "gpt-3.5-turbo",

rateArticleRelevancemain.tsx3 matches

@iakovos•Updated 1 year ago
2
3export const rateArticleRelevance = async (interests: string, article: any) => {
4 const { default: OpenAI } = await import("npm:openai");
5 const openai = new OpenAI({ apiKey: process.env.OPENAI_KEY });
6
7 try {
12 Give a score from 0 to 10. Why did you give this score? Respond with the score only.
13 `;
14 const response = await openai.chat.completions.create({
15 messages: [
16 {

modelInvokemain.tsx3 matches

@stevekrouse•Updated 1 year ago
1import process from "node:process";
2import { ChatOpenAI } from "npm:langchain/chat_models/openai";
3
4const model = new ChatOpenAI({
5 temperature: 0.9,
6 openAIApiKey: process.env.openai,
7});
8

testOpenAI1 file match

@stevekrouse•Updated 1 day ago

testOpenAI1 file match

@shouser•Updated 3 days ago
lost1991
import { OpenAI } from "https://esm.town/v/std/openai"; export default async function(req: Request): Promise<Response> { if (req.method === "OPTIONS") { return new Response(null, { headers: { "Access-Control-Allow-Origin": "*",