3import { sqlToJSON } from "https://esm.town/v/nbbaier/sqliteExportHelpers?v=22";
4import { db as allValsDb } from "https://esm.town/v/sqlite/db?v=9";
5import OpenAI from "npm:openai";
6import { truncateMessage } from "npm:openai-tokens";
7
8export default async function(interval: Interval) {
35 }
36
37 const openai = new OpenAI();
38 for (const val of newVals) {
39 const code = (await allValsDb.execute({
42 })).rows[0][0];
43
44 const embedding = await openai.embeddings.create({
45 model: "text-embedding-3-small",
46 input: truncateMessage(code, "text-embedding-3-small"),
3This val hosts an [HTTP server](https://janpaul123-valtownsemanticsearch.web.val.run/) that lets you search all vals based on vibes. If you search for "discord bot" it shows all vals that have "discord bot" vibes.
4
5It does this by comparing [embeddings from OpenAI](https://platform.openai.com/docs/guides/embeddings) generated for the code of all public vals, to an embedding of your search query.
6
7This is an experiment to see if and how we want to incorporate semantic search in the actual Val Town search page.
3Uses [Turso](https://turso.tech/) to search embeddings of all vals, using the [sqlite-vss](https://github.com/asg017/sqlite-vss) extension.
4
5- Call OpenAI to generate an embedding for the search query.
6- Query the `vss_vals_embeddings` table in Turso using `vss_search`.
7 - The `vss_vals_embeddings` table has been generated by [janpaul123/indexValsTurso](https://www.val.town/v/janpaul123/indexValsTurso). It is not run automatically.
1*Part of [Val Town Semantic Search](https://www.val.town/v/janpaul123/valtownsemanticsearch).*
2
3Generates OpenAI embeddings for all public vals, and stores them in [Turso](https://turso.tech/), using the [sqlite-vss](https://github.com/asg017/sqlite-vss) extension.
4
5- Create the `vals_embeddings` and `vss_vals_embeddings` tables in Turso if they don't already exist.
6- Get all val names from the [database of public vals](https://www.val.town/v/sqlite/db), made by [Achille Lacoin](https://www.val.town/u/pomdtr).
7- Get all val names from the `vals_embeddings` table and compute the difference (which ones are missing).
8- Iterate through all missing vals, get their code, get embeddings from OpenAI, and store the result in Turso.
9- When finished, update the `vss_vals_embeddings` table so we can efficiently query them with the [sqlite-vss](https://github.com/asg017/sqlite-vss) extension.
10 - This is blocked by a [bug in Turso](https://discord.com/channels/933071162680958986/1245378515679973420/1245378515679973420) that doesn't allow VSS indexes past a certain size.
9 }
10
11 const endpoint = 'https://api.openai.com/v1/chat/completions';
12 const model = 'gpt-4';
13
1import { OpenAI } from "https://esm.town/v/std/openai";
2
3const openai = new OpenAI();
4
5const completion = await openai.chat.completions.create({
6 messages: [
7 { role: "user", content: "Tell a story" },
3import { Hono } from "npm:hono@3";
4import { html } from "npm:hono@3/html";
5import { OpenAI } from "npm:openai";
6
7const app = new Hono();
8const openai = new OpenAI(Deno.env.get("OPENAI_API_KEY_VOICE"));
9
10class TranscriptionService {
11 async transcribeAudio(audioFile) {
12 try {
13 const transcription = await openai.audio.transcriptions.create({
14 file: audioFile,
15 model: "whisper-1",
19 return transcription;
20 } catch (error) {
21 console.error("OpenAI API error:", error);
22 throw error;
23 }
405
406 try {
407 const response = await openai.chat.completions.create({
408 model: "gpt-3.5-turbo",
409 messages: [
423 return c.text(translation);
424 } catch (error) {
425 console.error("OpenAI API error:", error);
426 return c.text("Error occurred during translation", 500);
427 }
438
439 try {
440 const mp3 = await openai.audio.speech.create({
441 model: "tts-1",
442 voice: voice,
450 });
451 } catch (error) {
452 console.error("OpenAI API error:", error);
453 return c.text("Error occurred during speech generation", 500);
454 }
3The app is set up so you can easily have a conversation between two people. The app will translate between the two selected languages, in each voice, as the speakers talk.
4
5Add your OpenAI API Key, and make sure to open in a separate window for Mic to work.
6
7Migrated from folder: Archive/translator
3import { basicAuth } from "https://esm.town/v/pomdtr/basicAuth?v=62";
4import { fetchText } from "https://esm.town/v/stevekrouse/fetchText";
5import { chat } from "https://esm.town/v/stevekrouse/openai";
6import cronstrue from "npm:cronstrue";
7import { Hono } from "npm:hono@3";
117 await email({ subject: "Subject line", text: "Body of message" });
118
119 // OpenAI
120 import { OpenAI } from "https://esm.town/v/std/openai";
121 const openai = new OpenAI();
122 const completion = await openai.chat.completions.create({
123 messages: [
124 { role: "user", content: "Say hello in a creative way" },
4import { fileToDataURL } from "https://esm.town/v/stevekrouse/fileToDataURL";
5import { modifyImage } from "https://esm.town/v/stevekrouse/modifyImage";
6import { chat } from "https://esm.town/v/stevekrouse/openai";
7import { Hono } from "npm:hono@3";
8