Val Town Code SearchReturn to Val Town

API Access

You can access search results via JSON API by adding format=json to your query:

https://codesearch.val.run/$%7Bart_info.art.src%7D?q=openai&page=141&format=json

For typeahead suggestions, use the /typeahead endpoint:

https://codesearch.val.run/typeahead?q=openai

Returns an array of strings in format "username" or "username/projectName"

Found 1569 results for "openai"(640ms)

newChatGPT35main.tsx2 matches

@bingo16•Updated 1 year ago
14 };
15 const getCompletion = async () => {
16 const response = await fetch("https://api.openai.com/v1/chat/completions", {
17 method: "POST",
18 headers: {
19 "Content-Type": "application/json",
20 Authorization: `Bearer ${process.env.openaiKey}`,
21 },
22 body: JSON.stringify(postData),

rateArticleRelevancemain.tsx4 matches

@vandyand•Updated 1 year ago
2
3export const rateArticleRelevance = async (interests: string, article: any) => {
4 const { default: OpenAI } = await import("npm:openai");
5 const openai = new OpenAI({
6 apiKey: untitled_tealCoral.OPENAI_API_KEY,
7 });
8 try {
13 Give a score from 0 to 10. Why did you give this score? Respond with the score only.
14 `;
15 const response = await openai.chat.completions.create({
16 messages: [
17 {

hackerNewsAuthorsmain.tsx1 match

@guidoism•Updated 1 year ago
17 "worrydream", // Bret Victor (dynamicland)
18 "stevewoz", // Steve Wozniak (apple)
19 "sama", // Sam Altman (openai, yc)
20 "geoff", // Geoff Ralston (yc)
21 "dang", // Dan G (hn)

langchainExmain.tsx3 matches

@stevekrouse•Updated 1 year ago
2
3export const langchainEx = (async () => {
4 const { OpenAI } = await import("https://esm.sh/langchain/llms/openai");
5 const { PromptTemplate } = await import("https://esm.sh/langchain/prompts");
6 const { LLMChain } = await import("https://esm.sh/langchain/chains");
7 const model = new OpenAI({
8 temperature: 0.9,
9 openAIApiKey: process.env.openai,
10 maxTokens: 100,
11 });
2
3export const conversationalRetrievalQAChainSummaryMemory = (async () => {
4 const { ChatOpenAI } = await import(
5 "https://esm.sh/langchain/chat_models/openai"
6 );
7 const { OpenAIEmbeddings } = await import(
8 "https://esm.sh/langchain/embeddings/openai"
9 );
10 const { ConversationSummaryMemory } = await import(
17 "https://esm.sh/langchain/chains"
18 );
19 const chatModel = new ChatOpenAI({
20 openAIApiKey: process.env.OPENAI_API_KEY,
21 });
22 /* Create the vectorstore */
28 ],
29 [{ id: 2 }, { id: 1 }, { id: 3 }],
30 new OpenAIEmbeddings({
31 openAIApiKey: process.env.OPENAI_API_KEY,
32 }),
33 );

OpenAISummarymain.tsx3 matches

@zzz•Updated 1 year ago
5
6// Create a summary from a given text using GPT 4
7export const OpenAISummary = async (text: string, config: {
8 apiKey?: string;
9 jwt?: string;
14 const { success, reset } = await upstashRateLimiter(
15 "anon",
16 "@zzz.OpenAISummary",
17 2,
18 "60 s",
31 }
32 const agent = await AIAgent(
33 apiKey || process.env.OPENAI_API_KEY_GPT4,
34 );
35 const response = await agent.summarize(text, modelName);

getModelBuildermain.tsx14 matches

@webup•Updated 1 year ago
3export async function getModelBuilder(spec: {
4 type?: "llm" | "chat" | "embedding";
5 provider?: "openai" | "huggingface";
6} = { type: "llm", provider: "openai" }, options?: any) {
7 const { extend, cond, matches, invoke } = await import("npm:lodash-es");
8 // Set up LangSmith tracer
17 // Set up API key for each providers
18 const args = extend({ callbacks }, options);
19 if (spec?.provider === "openai")
20 args.openAIApiKey = process.env.OPENAI;
21 else if (spec?.provider === "huggingface")
22 args.apiKey = process.env.HUGGINGFACE;
24 const setup = cond([
25 [
26 matches({ type: "llm", provider: "openai" }),
27 async () => {
28 const { OpenAI } = await import("npm:langchain/llms/openai");
29 return new OpenAI(args);
30 },
31 ],
32 [
33 matches({ type: "chat", provider: "openai" }),
34 async () => {
35 const { ChatOpenAI } = await import("npm:langchain/chat_models/openai");
36 return new ChatOpenAI(args);
37 },
38 ],
39 [
40 matches({ type: "embedding", provider: "openai" }),
41 async () => {
42 const { OpenAIEmbeddings } = await import(
43 "npm:langchain/embeddings/openai"
44 );
45 return new OpenAIEmbeddings(args);
46 },
47 ],

elevenlabsTTSmain.tsx1 match

@ale_annini•Updated 1 year ago
3
4export const elevenlabsTTS = async (req, res) => {
5 // https://platform.openai.com/docs/api-reference/images/create
6 // https://ale_annini-elevenlabstts.express.val.run/?args=[%22{\%22text\%22:\%22it%20beautiful\%22}%22]
7 const payload = {

chatGPTExamplemain.tsx1 match

@maxdrake•Updated 1 year ago
4 let repsonse_obj = await chatGPT(
5 "hello assistant",
6 [], // this can be an empty list, or if you're using this to continue a conversation, you can pass in someting of the form: https://platform.openai.com/docs/guides/chat/introduction
7 API_KEY
8 );

webscrapeWikipediaIntromain.tsx1 match

@vtdocs•Updated 1 year ago
4 const cheerio = await import("npm:cheerio");
5 const html = await fetchText(
6 "https://en.wikipedia.org/wiki/OpenAI",
7 );
8 const $ = cheerio.load(html);

testOpenAI1 file match

@shouser•Updated 1 day ago

testOpenAI1 file match

@stevekrouse•Updated 1 day ago
lost1991
import { OpenAI } from "https://esm.town/v/std/openai"; export default async function(req: Request): Promise<Response> { if (req.method === "OPTIONS") { return new Response(null, { headers: { "Access-Control-Allow-Origin": "*",