1import { Hono } from "https://esm.sh/hono";
2import { html } from "https://esm.sh/hono/html";
3import { OpenAI } from "https://esm.town/v/std/openai";
4
5const app = new Hono();
309
310app.get("/market-data", async (c) => {
311 const openai = new OpenAI();
312
313 try {
314 const completion = await openai.chat.completions.create({
315 model: "gpt-4o-mini",
316 messages: [
349 const { message } = await c.req.json();
350
351 const openai = new OpenAI();
352
353 const completion = await openai.chat.completions.create({
354 model: "gpt-4o-mini",
355 messages: [
322 if (request.method === 'POST' && new URL(request.url).pathname === '/generate-landing-page') {
323 try {
324 const { OpenAI } = await import("https://esm.town/v/std/openai");
325 const openai = new OpenAI();
326
327 const { isInitial, imageBase64, existingCode, editPrompt } = await request.json();
337
338 if (isInitial && imageBase64) {
339 const visionResponse = await openai.chat.completions.create({
340 model: "gpt-4-vision-preview",
341 messages: [
360 }
361
362 const codeGenerationResponse = await openai.chat.completions.create({
363 model: "gpt-4o-mini",
364 messages: [
224 const { intervieweeResponse, interviewPosition } = await request.json();
225
226 const { OpenAI } = await import("https://esm.town/v/std/openai");
227 const openai = new OpenAI();
228
229 const completion = await openai.chat.completions.create({
230 messages: [
231 {
3The app is set up so you can easily have a conversation between two people. The app will translate between the two selected languages, in each voice, as the speakers talk.
4
5Add your OpenAI API Key, and make sure to open in a separate window for Mic to work.
3import { Hono } from "npm:hono@3";
4import { html } from "npm:hono@3/html";
5import { OpenAI } from "npm:openai";
6
7const app = new Hono();
8const openai = new OpenAI(Deno.env.get("OPENAI_API_KEY_VOICE"));
9
10class TranscriptionService {
11 async transcribeAudio(audioFile) {
12 try {
13 const transcription = await openai.audio.transcriptions.create({
14 file: audioFile,
15 model: "whisper-1",
19 return transcription;
20 } catch (error) {
21 console.error("OpenAI API error:", error);
22 throw error;
23 }
405
406 try {
407 const response = await openai.chat.completions.create({
408 model: "gpt-3.5-turbo",
409 messages: [
423 return c.text(translation);
424 } catch (error) {
425 console.error("OpenAI API error:", error);
426 return c.text("Error occurred during translation", 500);
427 }
438
439 try {
440 const mp3 = await openai.audio.speech.create({
441 model: "tts-1",
442 voice: voice,
450 });
451 } catch (error) {
452 console.error("OpenAI API error:", error);
453 return c.text("Error occurred during speech generation", 500);
454 }
3The app is set up so you can easily have a conversation between two people. The app will translate between the two selected languages, in each voice, as the speakers talk.
4
5Add your OpenAI API Key, and make sure to open in a separate window for Mic to work.
2import { html } from "npm:hono@3/html";
3import { cors } from 'npm:hono/cors';
4import { OpenAI } from "npm:openai";
5
6const app = new Hono();
7const openai = new OpenAI(Deno.env.get("OPENAI_API_KEY_VOICE"));
8
9class TranscriptionService {
10 async transcribeAudio(audioFile) {
11 try {
12 const transcription = await openai.audio.transcriptions.create({
13 file: audioFile,
14 model: "whisper-1",
18 return transcription;
19 } catch (error) {
20 console.error('OpenAI API error:', error);
21 throw error;
22 }
350
351 try {
352 const response = await openai.chat.completions.create({
353 model: "gpt-3.5-turbo",
354 messages: [
368 return c.text(translation);
369 } catch (error) {
370 console.error('OpenAI API error:', error);
371 return c.text('Error occurred during translation', 500);
372 }
383
384 try {
385 const mp3 = await openai.audio.speech.create({
386 model: "tts-1",
387 voice: voice,
395 });
396 } catch (error) {
397 console.error('OpenAI API error:', error);
398 return c.text('Error occurred during speech generation', 500);
399 }
114 if (new URL(request.url).pathname === '/generate-idea') {
115 try {
116 const { OpenAI } = await import("https://esm.town/v/std/openai");
117 const openai = new OpenAI();
118
119 const completion = await openai.chat.completions.create({
120 messages: [
121 {
303 console.log(`Received ${request.method} request to ${request.url}`);
304
305 const { OpenAI } = await import("https://esm.town/v/std/openai");
306 const { sqlite } = await import("https://esm.town/v/stevekrouse/sqlite");
307 const openai = new OpenAI();
308 const url = new URL(request.url);
309
334 console.log("Generated prompt:", prompt);
335
336 const completion = await openai.chat.completions.create({
337 messages: [{ role: "user", content: prompt }],
338 model: "gpt-4o-mini",
1import { OpenAI } from "https://esm.town/v/std/openai";
2
3export default async function(request: Request) {
41 }
42
43 const openai = new OpenAI();
44
45 const summaryPrompt = "This is a news link: " + inputUrl
54
55 try {
56 // Get the article summary from OpenAI
57 const summaryResponse = await openai.chat.completions.create({
58 model: "gpt-4o-mini",
59 response_format: { type: "json_object" },
75 return new Response(
76 JSON.stringify({
77 error: `OpenAI processing failed: ${error.message}`,
78 errorStack: error.stack,
79 }),