discreetOrangeAntelopemain.tsx7 matches
1import { openai } from "npm:@ai-sdk/openai";
2import ValTown from "npm:@valtown/sdk";
3import { generateObject, generateText } from "npm:ai";
70const generateImplStart = Date.now();
71const { text: implementation } = await generateText({
72model: openai("gpt-4o"),
73system: `
74# VALTOWN HTTP Cloud Function Generator Prompt
965. **AI API Integration (if applicable)**:
97- If the toolDescription indicates that creative enrichment or additional processing via AI is needed, integrate an AI API call into the function.
98- Import { openai } from "npm:@ai-sdk/openai" and { generateText } from "npm:ai".
99- Use the model "gpt-4o-mini".
100- Construct a detailed, context-appropriate prompt for the AI call and integrate its response into the function’s output.
122123---typescript
124import { openai } from "npm:@ai-sdk/openai";
125import { generateText } from "npm:ai";
126135}
136const { text: analysis } = await generateText({
137model: openai("gpt-4o-mini"),
138system: "You are a creative code analyst.",
139prompt: "Analyze the following recent changes in the code: " + body.recent_changes,
233234const { object: manifestoConfig } = await generateObject({
235model: openai("gpt-4o"),
236schema: z.object({
237manifestoConfigPureJson: z.string(),
293294const { object: updatedManifestoConfigTypescript } = await generateObject({
295model: openai("gpt-4o"),
296system: promptForUpdatedManifest,
297schema: z.object({
harmoniousPlumTapirmain.tsx1 match
6"description": "A sample blah manifest demonstrating various tool types and configurations.",
7"env": {
8"OPENAI_API_KEY": "your-openai-api-key-here",
9},
10"tools": [
get_country_factmain.tsx2 matches
1import { openai } from "npm:@ai-sdk/openai";
2import { generateText } from "npm:ai";
312}
13const { text: fact } = await generateText({
14model: openai("gpt-4o-mini"),
15system: "You are an expert in world trivia.",
16prompt: `Provide an interesting and fun fact about the country: ${body.country}.`,
get_recipes_by_countrymain.tsx2 matches
1import { openai } from "npm:@ai-sdk/openai";
2import { generateText } from "npm:ai";
313const countriesList = body.countries.join(", ");
14const { text: recipes } = await generateText({
15model: openai("gpt-4o-mini"),
16system: "You are a culinary expert.",
17prompt: `Provide a list of popular recipes from the following countries: ${countriesList}.`,
fallacyDetectorAImain.tsx7 matches
42];
4344// Mock fallacy detection for when OpenAI fails
45function mockFallacyDetection(text) {
46const lowercaseText = text.toLowerCase();
9596try {
97// Dynamically import OpenAI with error handling
98const openAIModule = await import("https://esm.town/v/std/openai").catch(err => {
99console.error("Failed to import OpenAI module:", err);
100throw new Error("Could not load AI analysis module");
101});
102103const OpenAI = openAIModule.OpenAI;
104const openai = new OpenAI();
105106const response = await openai.chat.completions.create({
107model: "gpt-4o-mini",
108messages: [
audioAIChatmain.tsx5 matches
136export default async function server(request: Request): Promise<Response> {
137if (request.method === 'POST') {
138const { OpenAI } = await import("https://esm.town/v/std/openai");
139const openai = new OpenAI();
140141// Parse multipart form data
148149// Transcribe audio
150const transcriptionResponse = await openai.audio.transcriptions.create({
151file: base64Audio,
152model: "whisper-1",
157158// Generate AI response
159const chatCompletion = await openai.chat.completions.create({
160messages: [
161{
175176// Generate audio response
177const speechResponse = await openai.audio.speech.create({
178model: "tts-1",
179voice: "nova",
avatarGeneratormain.tsx3 matches
82export default async function server(request: Request): Promise<Response> {
83if (request.method === 'POST') {
84const { OpenAI } = await import("https://esm.town/v/std/openai");
85const openai = new OpenAI();
86
87const { theme } = await request.json();
88
89const completion = await openai.chat.completions.create({
90messages: [
91{
75export default async function server(request: Request): Promise<Response> {
76if (request.method === 'POST') {
77const { OpenAI } = await import("https://esm.town/v/std/openai");
78const openai = new OpenAI();
79
80const { theme } = await request.json();
81
82const completion = await openai.chat.completions.create({
83messages: [
84{
MEDIANALIZE_PROMedicalreport.tsx5 matches
224export default async function server(request: Request): Promise<Response> {
225if (request.method === 'POST' && new URL(request.url).pathname === '/analyze') {
226const { OpenAI } = await import("https://esm.town/v/std/openai");
227const openai = new OpenAI();
228229const body = await request.json();
234235try {
236const completion = await openai.chat.completions.create({
237messages: [
238{
265});
266} catch (error) {
267console.error('OpenAI Analysis Error:', error);
268return new Response(JSON.stringify({
269diagnosis: 'Unable to generate analysis',
298299function parseAnalysis(analysis: string) {
300// Basic parsing of OpenAI response
301const sections = analysis.split('\n\n');
302return {
MEDIANALIZE_PROhealthcare.tsx14 matches
222}
223224// Dynamically import OpenAI with error handling
225let OpenAI;
226try {
227const module = await import("https://esm.town/v/std/openai");
228OpenAI = module.OpenAI;
229} catch (importError) {
230console.error("OpenAI Import Error:", importError);
231return new Response(
232JSON.stringify({
233error: "Failed to import OpenAI module",
234details: String(importError),
235}),
244}
245246// Ensure OpenAI is available
247if (!OpenAI) {
248return new Response(
249JSON.stringify({
250error: "OpenAI module not found",
251}),
252{
260}
261262// Create OpenAI instance
263const openai = new OpenAI();
264265// Create OpenAI completion with comprehensive error handling
266let completion;
267try {
268completion = await openai.chat.completions.create({
269messages: body.messages,
270model: "gpt-4o-mini",
272});
273} catch (completionError) {
274console.error("OpenAI Completion Error:", completionError);
275return new Response(
276JSON.stringify({