219
220export default async function server(request: Request): Promise<Response> {
221 const { OpenAI } = await import("https://esm.town/v/std/openai");
222 const openai = new OpenAI();
223
224 if (request.method === "POST" && new URL(request.url).pathname === "/chat") {
228 async start(controller) {
229 try {
230 const chatCompletion = await openai.chat.completions.create({
231 model: "gpt-4o",
232 messages: [
249 }
250 } catch (error) {
251 console.error("OpenAI Error:", error);
252 controller.enqueue(
253 new TextEncoder().encode(
219
220export default async function server(request: Request): Promise<Response> {
221 const { OpenAI } = await import("https://esm.town/v/std/openai");
222 const openai = new OpenAI();
223
224 if (request.method === "POST" && new URL(request.url).pathname === "/chat") {
228 async start(controller) {
229 try {
230 const chatCompletion = await openai.chat.completions.create({
231 model: "gpt-4o",
232 messages: [
249 }
250 } catch (error) {
251 console.error("OpenAI Error:", error);
252 controller.enqueue(
253 new TextEncoder().encode(
219
220export default async function server(request: Request): Promise<Response> {
221 const { OpenAI } = await import("https://esm.town/v/std/openai");
222 const openai = new OpenAI();
223
224 if (request.method === "POST" && new URL(request.url).pathname === "/chat") {
228 async start(controller) {
229 try {
230 const chatCompletion = await openai.chat.completions.create({
231 model: "gpt-4o",
232 messages: [
249 }
250 } catch (error) {
251 console.error("OpenAI Error:", error);
252 controller.enqueue(
253 new TextEncoder().encode(
219
220export default async function server(request: Request): Promise<Response> {
221 const { OpenAI } = await import("https://esm.town/v/std/openai");
222 const openai = new OpenAI();
223
224 if (request.method === "POST" && new URL(request.url).pathname === "/chat") {
228 async start(controller) {
229 try {
230 const chatCompletion = await openai.chat.completions.create({
231 model: "gpt-4o",
232 messages: [
249 }
250 } catch (error) {
251 console.error("OpenAI Error:", error);
252 controller.enqueue(
253 new TextEncoder().encode(
219
220export default async function server(request: Request): Promise<Response> {
221 const { OpenAI } = await import("https://esm.town/v/std/openai");
222 const openai = new OpenAI();
223
224 if (request.method === "POST" && new URL(request.url).pathname === "/chat") {
228 async start(controller) {
229 try {
230 const chatCompletion = await openai.chat.completions.create({
231 model: "gpt-4o",
232 messages: [
249 }
250 } catch (error) {
251 console.error("OpenAI Error:", error);
252 controller.enqueue(
253 new TextEncoder().encode(
219
220export default async function server(request: Request): Promise<Response> {
221 const { OpenAI } = await import("https://esm.town/v/std/openai");
222 const openai = new OpenAI();
223
224 if (request.method === "POST" && new URL(request.url).pathname === "/chat") {
228 async start(controller) {
229 try {
230 const chatCompletion = await openai.chat.completions.create({
231 model: "gpt-4o",
232 messages: [
249 }
250 } catch (error) {
251 console.error("OpenAI Error:", error);
252 controller.enqueue(
253 new TextEncoder().encode(
219
220export default async function server(request: Request): Promise<Response> {
221 const { OpenAI } = await import("https://esm.town/v/std/openai");
222 const openai = new OpenAI();
223
224 if (request.method === "POST" && new URL(request.url).pathname === "/chat") {
228 async start(controller) {
229 try {
230 const chatCompletion = await openai.chat.completions.create({
231 model: "gpt-4o",
232 messages: [
249 }
250 } catch (error) {
251 console.error("OpenAI Error:", error);
252 controller.enqueue(
253 new TextEncoder().encode(
4import { cors } from "npm:hono@4.4.12/cors";
5// @ts-ignore
6import { OpenAI } from "https://esm.town/v/std/openai?v=4";
7// @ts-ignore
8import { sqlite } from "https://esm.town/v/std/sqlite?v=4";
266 if (!description) return c.json({ error: "Description is required" }, 400);
267
268 const openai = new OpenAI();
269 const completion = await openai.chat.completions.create({
270 model: "gpt-4o",
271 messages: [
297 });
298
299 const openai = new OpenAI();
300 const completion = await openai.chat.completions.create({
301 model: "gpt-4o",
302 messages: [
324 }
325
326 const openai = new OpenAI();
327 const completion = await openai.chat.completions.create({
328 model: "gpt-4o",
329 messages: [
345 if (!action || !plantName) return c.json({ error: "Action and plant name are required" }, 400);
346
347 const openai = new OpenAI();
348 const completion = await openai.chat.completions.create({
349 model: "gpt-4o",
350 messages: [
219
220export default async function server(request: Request): Promise<Response> {
221 const { OpenAI } = await import("https://esm.town/v/std/openai");
222 const openai = new OpenAI();
223
224 if (request.method === "POST" && new URL(request.url).pathname === "/chat") {
228 async start(controller) {
229 try {
230 const chatCompletion = await openai.chat.completions.create({
231 model: "gpt-4o",
232 messages: [
249 }
250 } catch (error) {
251 console.error("OpenAI Error:", error);
252 controller.enqueue(
253 new TextEncoder().encode(
219
220export default async function server(request: Request): Promise<Response> {
221 const { OpenAI } = await import("https://esm.town/v/std/openai");
222 const openai = new OpenAI();
223
224 if (request.method === "POST" && new URL(request.url).pathname === "/chat") {
228 async start(controller) {
229 try {
230 const chatCompletion = await openai.chat.completions.create({
231 model: "gpt-4o",
232 messages: [
249 }
250 } catch (error) {
251 console.error("OpenAI Error:", error);
252 controller.enqueue(
253 new TextEncoder().encode(