get_country_factmain.tsx2 matches
1import { openai } from "npm:@ai-sdk/openai";
2import { generateText } from "npm:ai";
312}
13const { text: fact } = await generateText({
14model: openai("gpt-4o-mini"),
15system: "You are an expert in world trivia.",
16prompt: `Provide an interesting and fun fact about the country: ${body.country}.`,
get_recipes_by_countrymain.tsx2 matches
1import { openai } from "npm:@ai-sdk/openai";
2import { generateText } from "npm:ai";
313const countriesList = body.countries.join(", ");
14const { text: recipes } = await generateText({
15model: openai("gpt-4o-mini"),
16system: "You are a culinary expert.",
17prompt: `Provide a list of popular recipes from the following countries: ${countriesList}.`,
fallacyDetectorAImain.tsx7 matches
42];
4344// Mock fallacy detection for when OpenAI fails
45function mockFallacyDetection(text) {
46const lowercaseText = text.toLowerCase();
9596try {
97// Dynamically import OpenAI with error handling
98const openAIModule = await import("https://esm.town/v/std/openai").catch(err => {
99console.error("Failed to import OpenAI module:", err);
100throw new Error("Could not load AI analysis module");
101});
102103const OpenAI = openAIModule.OpenAI;
104const openai = new OpenAI();
105106const response = await openai.chat.completions.create({
107model: "gpt-4o-mini",
108messages: [
audioAIChatmain.tsx5 matches
136export default async function server(request: Request): Promise<Response> {
137if (request.method === 'POST') {
138const { OpenAI } = await import("https://esm.town/v/std/openai");
139const openai = new OpenAI();
140141// Parse multipart form data
148149// Transcribe audio
150const transcriptionResponse = await openai.audio.transcriptions.create({
151file: base64Audio,
152model: "whisper-1",
157158// Generate AI response
159const chatCompletion = await openai.chat.completions.create({
160messages: [
161{
175176// Generate audio response
177const speechResponse = await openai.audio.speech.create({
178model: "tts-1",
179voice: "nova",
avatarGeneratormain.tsx3 matches
82export default async function server(request: Request): Promise<Response> {
83if (request.method === 'POST') {
84const { OpenAI } = await import("https://esm.town/v/std/openai");
85const openai = new OpenAI();
86
87const { theme } = await request.json();
88
89const completion = await openai.chat.completions.create({
90messages: [
91{
75export default async function server(request: Request): Promise<Response> {
76if (request.method === 'POST') {
77const { OpenAI } = await import("https://esm.town/v/std/openai");
78const openai = new OpenAI();
79
80const { theme } = await request.json();
81
82const completion = await openai.chat.completions.create({
83messages: [
84{
MEDIANALIZE_PROMedicalreport.tsx5 matches
224export default async function server(request: Request): Promise<Response> {
225if (request.method === 'POST' && new URL(request.url).pathname === '/analyze') {
226const { OpenAI } = await import("https://esm.town/v/std/openai");
227const openai = new OpenAI();
228229const body = await request.json();
234235try {
236const completion = await openai.chat.completions.create({
237messages: [
238{
265});
266} catch (error) {
267console.error('OpenAI Analysis Error:', error);
268return new Response(JSON.stringify({
269diagnosis: 'Unable to generate analysis',
298299function parseAnalysis(analysis: string) {
300// Basic parsing of OpenAI response
301const sections = analysis.split('\n\n');
302return {
MEDIANALIZE_PROhealthcare.tsx14 matches
222}
223224// Dynamically import OpenAI with error handling
225let OpenAI;
226try {
227const module = await import("https://esm.town/v/std/openai");
228OpenAI = module.OpenAI;
229} catch (importError) {
230console.error("OpenAI Import Error:", importError);
231return new Response(
232JSON.stringify({
233error: "Failed to import OpenAI module",
234details: String(importError),
235}),
244}
245246// Ensure OpenAI is available
247if (!OpenAI) {
248return new Response(
249JSON.stringify({
250error: "OpenAI module not found",
251}),
252{
260}
261262// Create OpenAI instance
263const openai = new OpenAI();
264265// Create OpenAI completion with comprehensive error handling
266let completion;
267try {
268completion = await openai.chat.completions.create({
269messages: body.messages,
270model: "gpt-4o-mini",
272});
273} catch (completionError) {
274console.error("OpenAI Completion Error:", completionError);
275return new Response(
276JSON.stringify({
311312export default async function server(request: Request): Promise<Response> {
313const { OpenAI } = await import("https://esm.town/v/std/openai");
314const { sqlite } = await import("https://esm.town/v/stevekrouse/sqlite");
315339};
340341const openai = new OpenAI();
342const KEY = extractKey(new URL(import.meta.url));
343const SCHEMA_VERSION = 3; // Increment schema version for tool support
627628// Call Maverick to create the tool definition
629const maverickCompletion = await openai.chat.completions.create({
630model: "gpt-4o-mini",
631response_format: { type: "json_object" },
717`;
718719const oracleCompletion = await openai.chat.completions.create({
720model: "gpt-4o-mini",
721response_format: { type: "json_object" },
783`;
784785const agentBCompletion = await openai.chat.completions.create({
786model: "gpt-4o-mini",
787response_format: { type: "json_object" },
916`;
917918const agentBCompletion = await openai.chat.completions.create({
919model: "gpt-4o-mini",
920response_format: { type: "json_object" },
11101111// Make completion call with the appropriate agent prompt
1112const analysisCompletion = await openai.chat.completions.create({
1113model: "gpt-4o-mini",
1114response_format: { type: "json_object" },
12691270// Make completion call with the appropriate agent prompt
1271const agentCompletion = await openai.chat.completions.create({
1272model: "gpt-4o-mini",
1273response_format: { type: "json_object" },
cerebras_codermigrations5 matches
1import { sqlite } from "https://esm.town/v/stevekrouse/sqlite";
2import OpenAI from "openai";
3import { ITERATIONS_TABLE, KEY, PROJECTS_TABLE, SCHEMA_VERSION } from "./migrations";
45const openai = new OpenAI({ apiKey: Deno.env.get("OPENAI_API_KEY") });
67export async function createProject(prompt: string) {
59}
6061// Example of using OpenAI (you'll need to adapt this to your specific use case)
62export async function generateText(prompt: string) {
63try {
64const completion = await openai.chat.completions.create({
65messages: [{ role: "user", content: prompt }],
66model: "gpt-4-turbo-preview",
71return completion.choices[0].message?.content || "No response";
72} catch (error) {
73console.error("OpenAI Error:", error);
74return "Error generating text";
75}