1import { getTweets } from "https://esm.town/v/geoffreylitt/getTweets";
2import { email } from "https://esm.town/v/std/email?v=12";
3import { OpenAI } from "https://esm.town/v/std/openai?v=4";
4import { discordWebhook } from "https://esm.town/v/stevekrouse/discordWebhook";
5import { twitterSearch } from "https://esm.town/v/stevekrouse/twitterSearch";
25];
26
27const openai = new OpenAI();
28
29export async function twitterAlert({ lastRunAt }: Interval) {
37
38 async function filterTweets(tweets) {
39 const completion = await openai.chat.completions.create({
40 messages: [
41 {
2import { cors } from "npm:hono/cors";
3import { embed, embedMany } from "npm:ai";
4import { openai } from "npm:@ai-sdk/openai";
5import lunr from "https://cdn.skypack.dev/lunr";
6
13}));
14
15openai.apiKey = Deno.env.get("OPENAI_API_KEY");
16
17class SemanticSearch {
55 async getEmbedding(text, modelName) {
56 const { embedding } = await embed({
57 model: openai.embedding(modelName),
58 value: text,
59 });
63 async getEmbeddings(texts, modelName) {
64 const { embeddings } = await embedMany({
65 model: openai.embedding(modelName),
66 values: texts,
67 });
1import { fetchJSON } from "https://esm.town/v/stevekrouse/fetchJSON?v=41";
2import { runVal } from "https://esm.town/v/std/runVal";
3import { OpenAI } from "https://esm.town/v/std/openai";
4const openai = new OpenAI();
5
6export const generateValCode = async (
19 \`\`\`
20 `;
21 const response = await openai.chat.completions.create({
22 model: "gpt-4o",
23 messages: [
1// import { openaiChatCompletion } from "https://esm.town/v/andreterron/openaiChatCompletion";
2import { OpenAI } from "https://esm.town/v/std/openai";
3const openai = new OpenAI();
4
5export const generateValCode = async (
20 \`\`\`
21 `;
22 const response = await openai.chat.completions.create({
23 openaiKey: key,
24 organization: org,
25 body: {
4export let generateValCodeAPI = (description: string) =>
5 generateValCode(
6 process.env.OPENAI_API_KEY,
7 description,
8 );
2import { cors } from 'npm:hono/cors';
3import { stream, streamSSE } from "https://deno.land/x/hono@v4.3.11/helper.ts";
4import { OpenAI } from "npm:openai";
5import { ai } from "https://esm.town/v/yawnxyz/ai";
6
7
8const app = new Hono();
9const openai = new OpenAI();
10
11app.use('*', cors({
98 let pageResult = "";
99
100 // // 2. Do one OpenAI inference to expand that URL to a longer page description
101 const pageDescriptionStream = await togetherAI.inference("mistralai/Mixtral-8x7B-Instruct-v0.1", {
102 prompt: `
1# Use GPT to generate vals on your account!
2
3Describe the val that you need, call this function, and you'll get a new val on your workspace generated by OpenAI's API!
4
5First, ensure you have a [Val Town API Token](https://www.val.town/settings/api), then call `@andreterron.createGeneratedVal({...})` like this [example](https://www.val.town/v/andreterron.untitled_tomatoKiwi):
2import { cors } from 'npm:hono/cors';
3import { stream } from "https://deno.land/x/hono@v4.3.11/helper.ts";
4import { OpenAI } from "npm:openai";
5
6const app = new Hono();
7const openai = new OpenAI();
8
9app.use('*', cors({
22
23const SOURCE_URL = ""; // leave blank for deno deploy / native
24// const SOURCE_URL = "https://yawnxyz-openAIHonoChatStreamSample.web.val.run"; // valtown as generator - no SSE
25// const SOURCE_URL = "https://funny-crow-81.deno.dev"; // deno deploy as generator
26
31 <head>
32 <meta charset="UTF-8" />
33 <title>OpenAI Streaming Example</title>
34 <style>
35 body {
48 </head>
49 <body>
50 <h1>OpenAI Streaming Example</h1>
51 <label for="prompt">Prompt:</label>
52 <input type="text" id="prompt" value="tell me a joke" />
178 return stream(c, async (stream) => {
179 try {
180 const chatStream = await openai.chat.completions.create({
181 model: "gpt-4",
182 messages: [{ role: "user", content: prompt }],
1import OpenAI from "npm:openai";
2
3const openai = new OpenAI();
4
5export default async (req) => {
22
23 // Generate the AI response
24 const stream = await openai.chat.completions.create({
25 model: "gpt-4o",
26 messages: [{