24 provider: "groq",
25 model: "meta-llama/llama-4-maverick-17b-128e-instruct",
26 endpoint: "https://api.groq.com/openai/v1/chat/completions",
27 headers: {
28 "Authorization": `Bearer ${process.env.GROQ_API_KEY}`,
35 provider: "groq",
36 model: "meta-llama/llama-4-scout-17b-16e-instruct",
37 endpoint: "https://api.groq.com/openai/v1/chat/completions",
38 headers: {
39 "Authorization": `Bearer ${process.env.GROQ_API_KEY}`,
37 do {
38 const url: string = page
39 ? `https://api.openai.com/v1/organization/costs?start_time=${startTime}&limit=${days}&page=${page}`
40 : `https://api.openai.com/v1/organization/costs?start_time=${startTime}&limit=${days}`;
41
42 const { data, error } = await betterFetch<Page<CostsObject>>(url, {
43 headers: { Authorization: `Bearer ${Bun.env.OPENAI_ADMIN_KEY}` },
44 });
45
48 if (error.status === 401 || error.status === 403) {
49 throw new Error(
50 "Missing or invalid OPENAI_ADMIN_KEY. Please check your environment variables.",
51 );
52 }
1// @ts-ignore
2import { OpenAI } from "https://esm.town/v/std/openai?v=4";
3import { Hono } from "npm:hono@4.4.12";
4import { evaluate } from "npm:mathjs";
103// --- REFACTORED: CORE LOGIC WITH FULL AUDIT LOGGING ---
104async function callAgentWithTools<T>(
105 openai: OpenAI,
106 systemPrompt: string,
107 userContent: string,
122 };
123 console.log(
124 "--- OpenAI API Request [1/2] ---",
125 JSON.stringify(requestPayload, null, 2),
126 );
127
128 const response = await openai.chat.completions.create(requestPayload);
129
130 // --- AUDIT LOG: Initial Response ---
131 console.log(
132 "--- OpenAI API Response [1/2] ---",
133 JSON.stringify(response, null, 2),
134 );
168 };
169 console.log(
170 "--- OpenAI API Request [2/2] ---",
171 JSON.stringify(finalRequestPayload, null, 2),
172 );
173
174 const finalResponse = await openai.chat.completions.create(
175 finalRequestPayload,
176 );
178 // --- AUDIT LOG: Final Response ---
179 console.log(
180 "--- OpenAI API Response [2/2] ---",
181 JSON.stringify(finalResponse, null, 2),
182 );
342 JSON.stringify(currentState, null, 2)
343 }`;
344 const openai = new OpenAI();
345 const result = await callAgentWithTools(
346 openai,
347 agentConfig.systemPrompt,
348 agentInput,
1// @ts-ignore
2import { OpenAI } from "https://esm.town/v/std/openai?v=4";
3import { Hono } from "npm:hono@4.4.12";
4import { evaluate } from "npm:mathjs"; // Safe math evaluation library
140// --- CORE LOGIC WITH TOOL SUPPORT (No changes) ---
141async function callAgentWithTools<T>(
142 openai: OpenAI,
143 systemPrompt: string,
144 userContent: string,
149 ];
150
151 const response = await openai.chat.completions.create({
152 model: "gpt-4o",
153 messages: messages,
189 }
190
191 const finalResponse = await openai.chat.completions.create({
192 model: "gpt-4o",
193 messages: messages,
365 }`;
366
367 const openai = new OpenAI();
368 const result = await callAgentWithTools(
369 openai,
370 agentConfig.systemPrompt,
371 agentInput,
1// @ts-ignore
2import { OpenAI } from "https://esm.town/v/std/openai?v=4";
3// @ts-ignore
4import { blob } from "https://esm.town/v/std/blob?v=11";
405 if (req.method === "POST" && action === "classify") {
406 try {
407 const openai = new OpenAI();
408 const body = await req.json();
409
412 }
413
414 const completion = await openai.chat.completions.create({
415 model: "gpt-4o",
416 messages: [
1// @ts-nocheck
2import { Hono } from "npm:hono@4.4.12";
3import { OpenAI } from "https://esm.town/v/std/openai?v=4";
4import { blob } from "https://esm.town/v/std/blob?v=11";
5
446 if (!rawGoal) return c.json({ error: "Goal content is required" }, 400);
447
448 const openai = new OpenAI();
449 const completion = await openai.chat.completions.create({
450 model: "gpt-4o",
451 messages: [
517
518 if (allSubtasksComplete) {
519 const openai = new OpenAI();
520 const completion = await openai.chat.completions.create({
521 model: "gpt-4o",
522 messages: [
1// @ts-ignore
2import { OpenAI } from "https://esm.town/v/std/openai?v=4";
3import { Hono } from "npm:hono@4.4.12";
4
352app.post("/api/analyze", async (c) => {
353 try {
354 const openai = new OpenAI();
355 const body = await c.req.json();
356 const { polygon, stage } = body;
379 }. The current vine growth stage is: "${stage}".`;
380
381 const completion = await openai.chat.completions.create({
382 model: "gpt-4o",
383 messages: [
1const REALTIME_BASE_URL = "https://api.openai.com/v1/realtime";
2const OPENAI_API_KEY = Deno.env.get("OPENAI_API_KEY");
3if (!OPENAI_API_KEY) {
4 throw new Error("🔴 OpenAI API key not configured");
5}
6
24export function makeHeaders(contentType?: string) {
25 const obj: Record<string, string> = {
26 Authorization: `Bearer ${OPENAI_API_KEY}`,
27 };
28 if (contentType) obj["Content-Type"] = contentType;
9sip.post("/", async (c) => {
10 // Verify the webhook.
11 const OPENAI_SIGNING_SECRET = Deno.env.get("OPENAI_SIGNING_SECRET");
12 if (!OPENAI_SIGNING_SECRET) {
13 console.error("🔴 webhook secret not configured");
14 return c.text("Internal error", 500);
15 }
16 const webhook = new Webhook(OPENAI_SIGNING_SECRET);
17 const bodyStr = await c.req.text();
18 let callId: string | undefined;
1# hello-realtime
2
3**Hello Realtime** is a OpenAI Realtime app that supports both WebRTC and SIP
4(telephone) users. You can access the app via WebRTC at
5[hello-realtime.val.run](https://hello-realtime.val.run), or via SIP by calling
9server-side websocket interface.
10
11If you remix the app, you'll just need to pop in your own `OPENAI_API_KEY` (from
12[platform.openai.com](https://platform.openai.com)), and if you want SIP, the
13`OPENAI_SIGNING_SECRET`.
14
15## Architecture
18 - Browser connects to frontend
19 - creates WebRTC offer
20 - `/rtc` endpoint handles SDP negotiation with OpenAI
21 - observer established to monitor session
222. **SIP Flow**: