4export const gpt3Unsafe = runVal("patrickjm.gpt3", {
5 prompt: "Write a haiku about being cool:",
6 openAiKey: process.env.openai,
7});
2
3export const gpt4_playground = (async (query) => {
4 const { OpenAI } = await import("https://deno.land/x/openai/mod.ts");
5 const openAI = new OpenAI(process.env.OPENAI_KEY);
6 const chatCompletion = openAI.createChatCompletion({
7 model: "gpt-4",
8 messages: [
16 console.log("GPT-4 takes a while...");
17 return await chatCompletion;
18})("Please explain how OpenAI GPT-4 is better than GPT-3");
23 <li style="margin-bottom:6px">Reference your vals: <div style="${CSScodeStyling};">@me.fizz.split('buzz').length</div></li>
24 <li style="margin-bottom:6px">Reference others' vals: <div style="${CSScodeStyling};">@stevekrouse.moreBuzz()</div></li>
25 <li style="margin-bottom:6px">Reference personal secrets: <div style="${CSScodeStyling};">@me.secrets.openai</div></li>
26 <li style="margin-bottom:6px">Import from npm: <div style="${CSScodeStyling};">const _ = await import("npm:lodash-es")</div></li>
27 <li>Run keyboard shortcut: <div style="${CSScodeStyling};">cmd+enter</div></li>
1import process from "node:process";
2
3export const openaiCompletion = async (prompt) => {
4 const { OpenAI } = await import("https://deno.land/x/openai/mod.ts");
5 const openAI = new OpenAI(process.env.OPENAI_API_KEY);
6 const completion = openAI.createCompletion({
7 model: "text-davinci-003",
8 prompt: prompt,
2import { runVal } from "https://esm.town/v/std/runVal";
3
4export let demoOpenAIGPTSummary = await runVal(
5 "zzz.OpenAISummary",
6 confession,
7 {
1import { fetch } from "https://esm.town/v/std/fetch";
2
3export let gpt3 = async (prompt: string, openAiApiKey: string): Promise<string> => {
4 if (!prompt || !openAiApiKey) {
5 let cat = await fetch("https://catfact.ninja/fact");
6 let { fact } = await cat.json();
9 );
10 }
11 const content = await fetch("https://api.openai.com/v1/chat/completions", {
12 method: "POST",
13 body: JSON.stringify({
17 }),
18 headers: {
19 "Authorization": `Bearer ${openAiApiKey}`,
20 "Content-Type": "application/json",
21 },
2
3export const streamingTest = (async () => {
4 const { OpenAI } = await import("https://esm.sh/langchain/llms/openai");
5 // To enable streaming, we pass in `streaming: true` to the LLM constructor.
6 // Additionally, we pass in a handler for the `handleLLMNewToken` event.
7 const chat = new OpenAI({
8 maxTokens: 25,
9 streaming: true,
10 openAIApiKey: process.env.OPENAI_API_KEY,
11 });
12 const response = await chat.call("Tell me a joke.", undefined, [
50 `;
51 console.log({ prompt });
52 const response = await fetch("https://api.openai.com/v1/completions", {
53 method: "POST",
54 headers: {
55 "Content-Type": "application/json",
56 "Authorization": "Bearer " + process.env.OPENAI_API_KEY, // Replace with your OpenAI API Key
57 },
58 body: JSON.stringify({
1import { openAiFreeUsage } from "https://esm.town/v/patrickjm/openAiFreeUsage";
2
3export let openAiFreeQuotaExceeded = () =>
4 openAiFreeUsage.exceeded;
2import { runVal } from "https://esm.town/v/std/runVal";
3
4export let demoOpenAIGPT4Summary = await runVal(
5 "zzz.OpenAISummary",
6 confession,
7);