1# OpenAI Streaming - Assistant and Threads
2
3An example of using OpenAI to stream back a chat with an assistant. This example sends two messages to the assistant and streams back the responses when they come in.
4
5Example response:
1import OpenAI from "npm:openai";
2const openai = new OpenAI();
3import process from "node:process";
4
5// Define our assistant.
6const assistant = await openai.beta.assistants.create({
7 name: "Val Tutor",
8 instructions: `You are a personal Val tutor.
14
15// Create a thread to chat in.
16const thread = await openai.beta.threads.create();
17
18// These are the messages we'll send to the assistant.
44 }, 100);
45
46 const message = await openai.beta.threads.messages.create(
47 thread.id,
48 { role: "user", content: messages[i] },
49 );
50
51 const run = openai.beta.threads.runs.stream(thread.id, {
52 assistant_id: assistant.id,
53 // Make sure we only display messages we haven't seen yet.
1import { getTweets } from "https://esm.town/v/geoffreylitt/getTweets";
2import { email } from "https://esm.town/v/std/email?v=12";
3import { OpenAI } from "https://esm.town/v/std/openai?v=4";
4import { discordWebhook } from "https://esm.town/v/stevekrouse/discordWebhook";
5import { twitterSearch } from "https://esm.town/v/stevekrouse/twitterSearch";
25];
26
27const openai = new OpenAI();
28
29export async function twitterAlert({ lastRunAt }: Interval) {
37
38 async function filterTweets(tweets) {
39 const completion = await openai.chat.completions.create({
40 messages: [
41 {
1import { getTweets } from "https://esm.town/v/geoffreylitt/getTweets";
2import { email } from "https://esm.town/v/std/email?v=12";
3import { OpenAI } from "https://esm.town/v/std/openai?v=4";
4import { discordWebhook } from "https://esm.town/v/stevekrouse/discordWebhook";
5import { twitterSearch } from "https://esm.town/v/stevekrouse/twitterSearch";
27];
28
29const openai = new OpenAI();
30
31export async function twitterAlert({ lastRunAt }: Interval) {
39
40 async function filterTweets(tweets) {
41 const completion = await openai.chat.completions.create({
42 messages: [
43 {
1import { getTweets } from "https://esm.town/v/geoffreylitt/getTweets";
2import { email } from "https://esm.town/v/std/email?v=12";
3import { OpenAI } from "https://esm.town/v/std/openai?v=4";
4import { discordWebhook } from "https://esm.town/v/stevekrouse/discordWebhook";
5import { twitterSearch } from "https://esm.town/v/stevekrouse/twitterSearch";
25];
26
27const openai = new OpenAI();
28
29export async function twitterAlert({ lastRunAt }: Interval) {
37
38 async function filterTweets(tweets) {
39 const completion = await openai.chat.completions.create({
40 messages: [
41 {
3import { basicAuth } from "https://esm.town/v/pomdtr/basicAuth?v=62";
4import { fetchText } from "https://esm.town/v/stevekrouse/fetchText";
5import { chat } from "https://esm.town/v/stevekrouse/openai";
6
7export default basicAuth(async (req) => {
132 await email({ subject: "Subject line", text: "Body of message" });
133
134 // OpenAI
135 import { OpenAI } from "https://esm.town/v/std/openai";
136 const openai = new OpenAI();
137 const completion = await openai.chat.completions.create({
138 messages: [
139 { role: "user", content: "Say hello in a creative way" },
1/** @jsxImportSource https://esm.sh/react */
2import OpenAI from "npm:openai";
3import { renderToString } from "npm:react-dom/server";
4
5// This uses by personal API key, you'll need to provide your own if
6// you fork this. We'll be adding support to the std/openai lib soon!
7const openai = new OpenAI();
8import { Hono } from "npm:hono@3";
9
38 });
39
40 // Setup the SSE connection and stream back the response. OpenAI handles determining
41 // which message is the correct response based on what was last read from the
42 // thread. This is likely vulnerable to race conditions.
58const app = new Hono();
59app.get("/", async (c) => {
60 const thread = await openai.beta.threads.create();
61 const assistant = await openai.beta.assistants.create({
62 name: "",
63 instructions:
114app.post("/post-message", async (c) => {
115 let message = await c.req.text();
116 await openai.beta.threads.messages.create(
117 c.req.query("threadId"),
118 { role: "user", content: message },
132 ));
133 };
134 const run = openai.beta.threads.runs.stream(threadId, {
135 assistant_id: assistantId,
136 // Make sure we only display messages we haven't seen yet.
1/** @jsxImportSource https://esm.sh/react */
2import { Hono } from "npm:hono@3";
3import OpenAI from "npm:openai";
4import { renderToString } from "npm:react-dom/server";
5
43 });
44};
45const openai = new OpenAI();
46
47const app = new Hono();
48app.get("/", async (c) => {
49 const thread = await openai.beta.threads.create();
50 const assistant = await openai.beta.assistants.create({
51 name: "",
52 instructions:
105 const message = c.req.query("message");
106
107 await openai.beta.threads.messages.create(
108 threadId,
109 { role: "user", content: message },
117 ));
118 };
119 const run = openai.beta.threads.runs.stream(threadId, {
120 assistant_id: assistantId,
121 // Make sure we only display messages we haven't seen yet.
1# ChatGPT Implemented in Val Town
2
3Demonstrated how to use assistants and threads with the OpenAI SDK and how to stream the response with Server-Sent Events.
4
5
10</p>
11
12**⚠️ Note: Requires your own OpenAI API key to get this to run in a fork**
13
14Migrated from folder: Archive/chatGPT
1import { getTweets } from "https://esm.town/v/geoffreylitt/getTweets";
2import { email } from "https://esm.town/v/std/email?v=12";
3import { OpenAI } from "https://esm.town/v/std/openai?v=4";
4import { discordWebhook } from "https://esm.town/v/stevekrouse/discordWebhook";
5import { twitterSearch } from "https://esm.town/v/stevekrouse/twitterSearch";
25];
26
27const openai = new OpenAI();
28
29export async function twitterAlert({ lastRunAt }: Interval) {
37
38 async function filterTweets(tweets) {
39 const completion = await openai.chat.completions.create({
40 messages: [
41 {