1/** @jsxImportSource https://esm.sh/react */
2import { Hono } from "npm:hono@3";
3import OpenAI from "npm:openai";
4import { renderToString } from "npm:react-dom/server";
5
43 });
44};
45const openai = new OpenAI();
46
47const app = new Hono();
48app.get("/", async (c) => {
49 const thread = await openai.beta.threads.create();
50 const assistant = await openai.beta.assistants.create({
51 name: "",
52 instructions:
105 const message = c.req.query("message");
106
107 await openai.beta.threads.messages.create(
108 threadId,
109 { role: "user", content: message },
117 ));
118 };
119 const run = openai.beta.threads.runs.stream(threadId, {
120 assistant_id: assistantId,
121 // Make sure we only display messages we haven't seen yet.
1# ChatGPT Implemented in Val Town
2
3Demonstrated how to use assistants and threads with the OpenAI SDK and how to stream the response with Server-Sent Events.
4
5
10</p>
11
12**⚠️ Note: Requires your own OpenAI API key to get this to run in a fork**
13
14Migrated from folder: Archive/chatGPT
1import { getTweets } from "https://esm.town/v/geoffreylitt/getTweets";
2import { email } from "https://esm.town/v/std/email?v=12";
3import { OpenAI } from "https://esm.town/v/std/openai?v=4";
4import { discordWebhook } from "https://esm.town/v/stevekrouse/discordWebhook";
5import { twitterSearch } from "https://esm.town/v/stevekrouse/twitterSearch";
25];
26
27const openai = new OpenAI();
28
29export async function twitterAlert({ lastRunAt }: Interval) {
37
38 async function filterTweets(tweets) {
39 const completion = await openai.chat.completions.create({
40 messages: [
41 {
1import { getTweets } from "https://esm.town/v/geoffreylitt/getTweets";
2import { email } from "https://esm.town/v/std/email?v=12";
3import { OpenAI } from "https://esm.town/v/std/openai?v=4";
4import { discordWebhook } from "https://esm.town/v/stevekrouse/discordWebhook";
5import { twitterSearch } from "https://esm.town/v/stevekrouse/twitterSearch";
25];
26
27const openai = new OpenAI();
28
29export async function twitterAlert({ lastRunAt }: Interval) {
37
38 async function filterTweets(tweets) {
39 const completion = await openai.chat.completions.create({
40 messages: [
41 {
2import { cors } from "npm:hono/cors";
3import { embed, embedMany } from "npm:ai";
4import { openai } from "npm:@ai-sdk/openai";
5import lunr from "https://cdn.skypack.dev/lunr";
6
13}));
14
15openai.apiKey = Deno.env.get("OPENAI_API_KEY");
16
17class SemanticSearch {
55 async getEmbedding(text, modelName) {
56 const { embedding } = await embed({
57 model: openai.embedding(modelName),
58 value: text,
59 });
63 async getEmbeddings(texts, modelName) {
64 const { embeddings } = await embedMany({
65 model: openai.embedding(modelName),
66 values: texts,
67 });
1import { fetchJSON } from "https://esm.town/v/stevekrouse/fetchJSON?v=41";
2import { runVal } from "https://esm.town/v/std/runVal";
3import { OpenAI } from "https://esm.town/v/std/openai";
4const openai = new OpenAI();
5
6export const generateValCode = async (
19 \`\`\`
20 `;
21 const response = await openai.chat.completions.create({
22 model: "gpt-4o",
23 messages: [
1// import { openaiChatCompletion } from "https://esm.town/v/andreterron/openaiChatCompletion";
2import { OpenAI } from "https://esm.town/v/std/openai";
3const openai = new OpenAI();
4
5export const generateValCode = async (
20 \`\`\`
21 `;
22 const response = await openai.chat.completions.create({
23 openaiKey: key,
24 organization: org,
25 body: {
4export let generateValCodeAPI = (description: string) =>
5 generateValCode(
6 process.env.OPENAI_API_KEY,
7 description,
8 );
2import { cors } from 'npm:hono/cors';
3import { stream, streamSSE } from "https://deno.land/x/hono@v4.3.11/helper.ts";
4import { OpenAI } from "npm:openai";
5import { ai } from "https://esm.town/v/yawnxyz/ai";
6
7
8const app = new Hono();
9const openai = new OpenAI();
10
11app.use('*', cors({
98 let pageResult = "";
99
100 // // 2. Do one OpenAI inference to expand that URL to a longer page description
101 const pageDescriptionStream = await togetherAI.inference("mistralai/Mixtral-8x7B-Instruct-v0.1", {
102 prompt: `