6 */
7
8import { OpenAI } from "https://esm.town/v/std/openai";
9import { Octokit } from "https://esm.sh/@octokit/rest@20.0.2";
10import { WebClient } from "https://esm.sh/@slack/web-api@7.0.2";
129async function isBugReportLLM(text: string): Promise<boolean> {
130 try {
131 // Check if OpenAI API key is available
132 if (!Deno.env.get("OPENAI_API_KEY")) {
133 console.warn("OpenAI API key not found - bug detection disabled");
134 return false;
135 }
136
137 const openai = new OpenAI();
138 const completion = await openai.chat.completions.create({
139 messages: [
140 {
249async function findRelatedIssues(slackMessage: string, issues: any[]): Promise<any[]> {
250 try {
251 // Check if OpenAI API key is available
252 if (!Deno.env.get("OPENAI_API_KEY")) {
253 return [];
254 }
271 }).join("\n\n");
272
273 const openai = new OpenAI();
274 const completion = await openai.chat.completions.create({
275 messages: [
276 {
1# OpenAI Proxy
2
3This OpenAI API proxy injects Val Town's API keys. For usage documentation, check out https://www.val.town/v/std/openai
4
5Migrated from folder: openai/openaiproxy
1import { parseBearerString } from "https://esm.town/v/andreterron/parseBearerString";
2import { API_URL } from "https://esm.town/v/std/API_URL?v=5";
3import { OpenAIUsage } from "https://esm.town/v/std/OpenAIUsage";
4import { RateLimit } from "npm:@rlimit/http";
5const client = new OpenAIUsage();
6
7const allowedPathnames = [
43
44 // Proxy the request
45 const url = new URL("." + pathname, "https://api.openai.com");
46 url.search = search;
47
48 const headers = new Headers(req.headers);
49 headers.set("Host", url.hostname);
50 headers.set("Authorization", `Bearer ${Deno.env.get("OPENAI_API_KEY")}`);
51 headers.set("OpenAI-Organization", Deno.env.get("OPENAI_API_ORG"));
52
53 const modifiedBody = await limitFreeModel(req, user);
64 });
65
66 const openAIRes = await fetch(url, {
67 method: req.method,
68 headers,
72
73 // Remove internal header
74 const res = new Response(openAIRes.body, openAIRes);
75 res.headers.delete("openai-organization");
76 return res;
77}
88Note: When changing a SQLite table's schema, change the table's name (e.g., add _2 or _3) to create a fresh table.
89
90### OpenAI
91
92```ts
93import { OpenAI } from "https://esm.town/v/std/openai";
94const openai = new OpenAI();
95const completion = await openai.chat.completions.create({
96 messages: [
97 { role: "user", content: "Say hello in a creative way" },
94Note: When changing a SQLite table's schema, change the table's name (e.g., add _2 or _3) to create a fresh table.
95
96### OpenAI
97
98```ts
99import { OpenAI } from "https://esm.town/v/std/openai";
100const openai = new OpenAI();
101const completion = await openai.chat.completions.create({
102 messages: [
103 { role: "user", content: "Say hello in a creative way" },
88Note: When changing a SQLite table's schema, change the table's name (e.g., add _2 or _3) to create a fresh table.
89
90### OpenAI
91
92```ts
93import { OpenAI } from "https://esm.town/v/std/openai";
94const openai = new OpenAI();
95const completion = await openai.chat.completions.create({
96 messages: [
97 { role: "user", content: "Say hello in a creative way" },
94Note: When changing a SQLite table's schema, change the table's name (e.g., add _2 or _3) to create a fresh table.
95
96### OpenAI
97
98```ts
99import { OpenAI } from "https://esm.town/v/std/openai";
100const openai = new OpenAI();
101const completion = await openai.chat.completions.create({
102 messages: [
103 { role: "user", content: "Say hello in a creative way" },
7- 🎨 Modern Neumorphism/Dark Mode design with theme toggle
8- 💬 Real-time chat interface with typing indicators
9- 🤖 OpenAI GPT integration with fallback mock responses
10- 📱 Fully responsive design (mobile/tablet/desktop)
11- ✨ Smooth animations and micro-interactions
41## Environment Variables
42
43- `OPENAI_API_KEY` - OpenAI API key (optional, falls back to mock responses)
44
45## Usage
63
64✅ **AI Integration**
65- OpenAI GPT-4o-mini integration with fallback mock responses
66- Context-aware conversations (sends last 10 messages)
67- Intelligent mock responses for demo purposes
1import { Hono } from "https://esm.sh/hono@3.11.7";
2import { readFile, serveFile } from "https://esm.town/v/std/utils@85-main/index.ts";
3import { OpenAI } from "https://esm.town/v/std/openai";
4import type { ChatRequest, ChatResponse, Message } from "../shared/types.ts";
5
49 let responseMessage = "";
50
51 // Try to use OpenAI if API key is available
52 const openaiKey = Deno.env.get('OPENAI_API_KEY');
53
54 if (openaiKey) {
55 try {
56 const openai = new OpenAI();
57
58 // Build conversation context
72 ];
73
74 const completion = await openai.chat.completions.create({
75 model: "gpt-4o-mini",
76 messages,
80
81 responseMessage = completion.choices[0]?.message?.content || "I'm sorry, I couldn't generate a response.";
82 } catch (openaiError) {
83 console.error("OpenAI API error:", openaiError);
84 // Fall back to mock response
85 responseMessage = getMockResponse(message);
1export default async function(req: Request): Promise<Response> {
2 // This will be our OpenAI Vision API integration
3 // For now, just a placeholder that shows we're ready
4
5 return new Response(
6 JSON.stringify({
7 message: "OpenAI Vision API integration ready",
8 status: "placeholder",
9 required_env_vars: [
10 "OPENAI_API_KEY",
11 ],
12 next_features: [