1Migrated from folder: Libraries/ai/OpenAI/openaiUploadFile
1import type { ChatCompletion, ChatCompletionCreateParamsNonStreaming, Message } from "npm:@types/openai";
2
3async function getOpenAI() {
4 // if you don't have a key, use our std library version
5 if (Deno.env.get("OPENAI_API_KEY") === undefined) {
6 const { OpenAI } = await import("https://esm.town/v/std/openai");
7 return new OpenAI();
8 } else {
9 const { OpenAI } = await import("npm:openai");
10 return new OpenAI();
11 }
12}
13
14/**
15 * Initiates a chat conversation with OpenAI's GPT model and retrieves the content of the first response.
16 * This function can handle both single string inputs and arrays of message objects.
17 * It supports various GPT models, allowing for flexibility in choosing the model based on the application's needs.
25 options?: Omit<ChatCompletionCreateParamsNonStreaming, "messages">,
26): Promise<ChatCompletion & { content: string }> {
27 const openai = await getOpenAI();
28 const messages = Array.isArray(input) ? input : [{ role: "user", content: input }];
29 const createParams: ChatCompletionCreateParamsNonStreaming = {
33 messages,
34 };
35 const completion = await openai.chat.completions.create(createParams);
36
37 return { ...completion, content: completion.choices[0].message.content };
20
21export default async function weatherGPT(req: Request) {
22 const { OpenAI } = await import("npm:openai");
23
24 if (new URL(req.url).pathname === "/data") {
51- @pomdtr/serve_readme
52
53### OpenAI
54
55- @pomdtr/ask_ai
1import type { ChatCompletion, ChatCompletionCreateParamsNonStreaming, Message } from "npm:@types/openai";
2
3async function getOpenAI() {
4 // if you don't have a key, use our std library version
5 if (Deno.env.get("OPENAI_API_KEY") === undefined) {
6 const { OpenAI } = await import("https://esm.town/v/std/openai");
7 return new OpenAI();
8 } else {
9 const { OpenAI } = await import("npm:openai");
10 return new OpenAI();
11 }
12}
20
21 return async function gpt(strings, ...values) {
22 const openai = await getOpenAI();
23
24 const input = strings.reduce((result, str, i) => {
34 };
35
36 const completion = await openai.chat.completions.create(createParams);
37
38 return { ...completion, content: completion.choices[0].message.content };
1Migrated from folder: projects/OpenAiUsage/fetchAndStoreOpenAiUsage2
1import type { ChatCompletion, ChatCompletionCreateParamsNonStreaming, Message } from "npm:@types/openai";
2
3async function getOpenAI() {
4 // if you don't have a key, use our std library version
5 if (Deno.env.get("OPENAI_API_KEY") === undefined) {
6 const { OpenAI } = await import("https://esm.town/v/std/openai");
7 return new OpenAI();
8 } else {
9 const { OpenAI } = await import("npm:openai");
10 return new OpenAI();
11 }
12}
13
14/**
15 * Initiates a chat conversation with OpenAI's GPT model and retrieves the content of the first response.
16 * This function can handle both single string inputs and arrays of message objects.
17 * It supports various GPT models, allowing for flexibility in choosing the model based on the application's needs.
28 },
29): Promise<ChatCompletion & { content: string }> {
30 const openai = await getOpenAI();
31 const messages = Array.isArray(input) ? input : [{ role: "user", content: input }];
32 const createParams: ChatCompletionCreateParamsNonStreaming = {
34 messages,
35 };
36 const completion = await openai.chat.completions.create(createParams);
37
38 return { ...completion, content: completion.choices[0].message.content };
1# OpenAI ChatGPT helper function
2
3This val uses your OpenAI token if you have one, and the @std/openai if not, so it provides limited OpenAI usage for free.
4
5```ts
6import { chat } from "https://esm.town/v/stevekrouse/openai";
7
8const { content } = await chat("Hello, GPT!");
11
12```ts
13import { chat } from "https://esm.town/v/stevekrouse/openai";
14
15const { content } = await chat(
51- @pomdtr/serve_readme
52
53### OpenAI
54
55- @pomdtr/ask_ai
1import { OpenAI } from "https://esm.town/v/std/openai?v=2";
2
3const openai = new OpenAI();
4const functionExpression = await openai.chat.completions.create({
5 "messages": [
6 { "role": "user", "content": "whats the weather in sf" },