Val Town Code SearchReturn to Val Town

API Access

You can access search results via JSON API by adding format=json to your query:

https://codesearch.val.run/$%7Bsuccess?q=openai&page=139&format=json

For typeahead suggestions, use the /typeahead endpoint:

https://codesearch.val.run/typeahead?q=openai

Returns an array of strings in format "username" or "username/projectName"

Found 1576 results for "openai"(1386ms)

pipeSampleLLMBindmain.tsx1 match

@webup•Updated 1 year ago
7 const mb = await getModelBuilder({
8 type: "chat",
9 provider: "openai",
10 });
11 const model = await mb();

openaiOpenAPImain.tsx1 match

@stevekrouse•Updated 1 year ago
1export let openaiOpenAPI = `
2openapi: 3.0.0
3info:
44 };
45 let openApiResponse = await fetch(
46 "https://api.openai.com/v1/chat/completions",
47 requestOptions,
48 );

browserlessScrapeExamplemain.tsx1 match

@vtdocs•Updated 1 year ago
8 method: "POST",
9 body: JSON.stringify({
10 "url": "https://en.wikipedia.org/wiki/OpenAI",
11 "elements": [{
12 // The second <p> element on the page

conversationalQAChainExmain.tsx10 matches

@jacoblee93•Updated 1 year ago
2
3export const conversationalQAChainEx = (async () => {
4 const { ChatOpenAI } = await import(
5 "https://esm.sh/langchain/chat_models/openai"
6 );
7 const { HNSWLib } = await import(
8 "https://esm.sh/langchain/vectorstores/hnswlib"
9 );
10 const { OpenAIEmbeddings } = await import(
11 "https://esm.sh/langchain/embeddings/openai"
12 );
13 const { ConversationalRetrievalQAChain } = await import(
14 "https://esm.sh/langchain/chains"
15 );
16 const gpt35 = new ChatOpenAI({
17 openAIApiKey: process.env.OPENAI_API_KEY,
18 modelName: "gpt-3.5-turbo",
19 temperature: 0,
20 });
21 const gpt4 = new ChatOpenAI({
22 openAIApiKey: process.env.OPENAI_API_KEY,
23 modelName: "gpt-4",
24 temperature: 0,
27 ["Hello world", "Bye bye", "hello nice world", "bye", "hi"],
28 [{ id: 2 }, { id: 1 }, { id: 3 }, { id: 4 }, { id: 5 }],
29 new OpenAIEmbeddings({
30 openAIApiKey: process.env.OPENAI_API_KEY,
31 }),
32 );

untitled_silverPinnipedmain.tsx3 matches

@jacoblee93•Updated 1 year ago
3export const untitled_silverPinniped = (async () => {
4 const { ChatPromptTemplate } = await import("npm:langchain/prompts");
5 const { ChatOpenAI } = await import("npm:langchain/chat_models/openai");
6 const { StringOutputParser } = await import(
7 "npm:langchain/schema/output_parser"
15 ["human", "{input}"],
16 ]);
17 const model = new ChatOpenAI({
18 modelName: "gpt-4",
19 temperature: 0.2,
20 openAIApiKey: process.env.OPENAI_API_KEY,
21 });
22 // Output parser converts the chat message into a raw string. Also works with streaming.

ask_gpt4main.tsx3 matches

@scio•Updated 1 year ago
2
3export const ask_gpt4 = async (query) => {
4 const { OpenAI } = await import("https://deno.land/x/openai/mod.ts");
5 const openAI = new OpenAI(process.env.OPENAI_KEY);
6 const chatCompletion = await openAI.createChatCompletion({
7 model: "gpt-4",
8 messages: [

openAiTextCompletionmain.tsx7 matches

@patrickjm•Updated 1 year ago
1import { fetchJSON } from "https://esm.town/v/stevekrouse/fetchJSON?v=41";
2
3export let openAiTextCompletion = async (params: {
4 /** https://beta.openai.com/account/api-keys */
5 apiKey: string,
6 /** Optional. https://beta.openai.com/account/org-settings */
7 org?: string,
8
9 // REST args, see https://beta.openai.com/docs/api-reference/completions/create
10 prompt: string,
11 model?: string,
27 if (!params.apiKey) {
28 throw new Error(
29 "Please provide 'apiKey' param. See: https://beta.openai.com/account/api-keys "
30 );
31 }
33 args.stream = false;
34
35 const response = await fetchJSON("https://api.openai.com/v1/completions", {
36 method: "POST",
37 headers: {
38 Authorization: `Bearer ${params.apiKey}`,
39 ...(params.org ? { "OpenAI-Organization": params.org } : {}),
40 },
41 body: JSON.stringify(args),

getVectorStoreBuildermain.tsx1 match

@webup•Updated 1 year ago
5 type: "memory" | "baas";
6 provider?: "pinecone" | "milvus";
7} = { type: "memory" }, embed: "openai" | "huggingface" = "openai") {
8 const { cond, matches } = await import("npm:lodash-es");
9 const builder = await getModelBuilder({

completemain.tsx5 matches

@webup•Updated 1 year ago
2
3export const complete = async (prompt: string | object, options = {}) => {
4 // Initialize OpenAI API stub
5 const { Configuration, OpenAIApi } = await import("https://esm.sh/openai");
6 const configuration = new Configuration({
7 apiKey: process.env.OPENAI,
8 });
9 const openai = new OpenAIApi(configuration);
10 // Request chat completion
11 const completion = await openai.createCompletion({
12 model: "text-davinci-003",
13 prompt,

testOpenAI1 file match

@stevekrouse•Updated 15 hours ago

testOpenAI1 file match

@shouser•Updated 2 days ago
lost1991
import { OpenAI } from "https://esm.town/v/std/openai"; export default async function(req: Request): Promise<Response> { if (req.method === "OPTIONS") { return new Response(null, { headers: { "Access-Control-Allow-Origin": "*",