21try {
22const url = query
23? `/api/contacts?q=${encodeURIComponent(query)}`
24: "/api/contacts";
25
26const response = await fetch(url);
64const handleAddContact = async (contact: Contact) => {
65try {
66const response = await fetch("/api/contacts", {
67method: "POST",
68headers: { "Content-Type": "application/json" },
89
90try {
91const response = await fetch(`/api/contacts/${contact.id}`, {
92method: "PUT",
93headers: { "Content-Type": "application/json" },
115
116try {
117const response = await fetch(`/api/contacts/${id}`, {
118method: "DELETE"
119});
235// Refresh the selected contact
236if (selectedContact.id) {
237fetch(`/api/contacts/${selectedContact.id}`)
238.then(res => res.json())
239.then(result => {
13import { Contact, Interaction } from "../../shared/types.ts";
1415const api = new Hono();
1617// Contacts endpoints
18api.get("/contacts", async (c) => {
19try {
20const searchQuery = c.req.query("q");
30});
3132api.get("/contacts/:id", async (c) => {
33try {
34const id = parseInt(c.req.param("id"));
46});
4748api.post("/contacts", async (c) => {
49try {
50const contact = await c.req.json() as Contact;
62});
6364api.put("/contacts/:id", async (c) => {
65try {
66const id = parseInt(c.req.param("id"));
84});
8586api.delete("/contacts/:id", async (c) => {
87try {
88const id = parseInt(c.req.param("id"));
101102// Interactions endpoints
103api.get("/contacts/:id/interactions", async (c) => {
104try {
105const contactId = parseInt(c.req.param("id"));
113});
114115api.post("/interactions", async (c) => {
116try {
117const interaction = await c.req.json() as Interaction;
132});
133134api.delete("/interactions/:id", async (c) => {
135try {
136const id = parseInt(c.req.param("id"));
148});
149150export default api;
18โ โ โโโ queries.ts # DB query functions
19โ โโโ routes/ # Route modules
20โ โ โโโ api.ts # API endpoints
21โ โโโ index.ts # Main entry point
22โโโ frontend/
untitled-2444index.html6 matches
318<div class="md:col-span-3">
319<div class="flex items-center justify-between">
320<label for="apiKey" class="block text-sm font-medium text-gray-700 dark:text-gray-300">OpenAI API Key (Optional)</label>
321<span class="text-xs text-gray-500 dark:text-gray-400">Direct API connection</span>
322</div>
323<div class="mt-1 flex rounded-md shadow-sm">
324<input
325type="password"
326name="apiKey"
327id="apiKey"
328class="flex-1 min-w-0 block w-full px-3 py-2 rounded-md border border-gray-300 dark:border-gray-600 focus:outline-none focus:ring-indigo-500 focus:border-indigo-500 sm:text-sm bg-white dark:bg-gray-800 text-gray-900 dark:text-gray-100"
329placeholder="sk-..."
331<button
332type="button"
333id="useApiKeyBtn"
334class="ml-3 inline-flex items-center px-3 py-2 border border-gray-300 dark:border-gray-600 shadow-sm text-sm leading-4 font-medium rounded-md text-gray-700 dark:text-gray-200 bg-white dark:bg-gray-800 hover:bg-gray-50 dark:hover:bg-gray-700 focus:outline-none focus:ring-2 focus:ring-offset-2 focus:ring-indigo-500"
335>
337</button>
338</div>
339<p class="mt-1 text-xs text-gray-500 dark:text-gray-400">Your API key is never stored on our servers</p>
340</div>
341
PRChecker2index.tsx3 matches
14const patchContent = await patchResponse.text();
1516// Call AI service with your private API key
17// Replace with your actual AI service URL
18const aiResponse = await fetch("https://api.openai.com/v1/chat/completions", {
19method: "POST",
20headers: {
21"Content-Type": "application/json",
22"Authorization": `Bearer ${Deno.env.get("AI_API_KEY")}`,
23},
24body: JSON.stringify({
untitled-2444README.md8 matches
10- Combines processed outputs seamlessly
11- Simple, responsive UI with token counting and progress tracking
12- **Direct OpenAI API Connection** - Use your own API key for direct processing
13- **Debug Console** - View API requests, responses, and token usage
14- **Script Type Detection** - Automatically identifies screenplay, technical, marketing, academic, or creative content
15- **Syntax Highlighting** - Automatically highlights code in technical scripts
29- `/index.ts` - Main HTTP endpoint and route handler
30- `/backend/processor.ts` - Text processing logic and OpenAI integration
31- `/backend/openaiProxy.ts` - Server-side proxy for OpenAI API calls
32- `/backend/scriptTypeDetector.ts` - Automatic script type detection
33- `/shared/tokenizer.ts` - Advanced token counting and text chunking
34- `/shared/OpenAIConnector.ts` - Direct OpenAI API connection handling
35- `/frontend/index.html` - Main HTML template
36- `/frontend/index.ts` - Frontend JavaScript logic
512. Select script type or use auto-detection
523. Choose an instruction template or write custom instructions
534. (Optional) Set your OpenAI API key for direct processing
545. Click "Improve Script" to process
556. View, compare, and download the improved script
57## Advanced Features
5859### Direct API Connection
60You can use your own OpenAI API key for direct processing, bypassing the server proxy. This can be useful for:
61- Processing very large scripts
62- Using custom model parameters
65### Debug Console
66The debug console provides real-time information about:
67- API requests and responses
68- Token usage statistics
69- Processing errors and warnings
untitled-2444processor.ts1 match
321// Otherwise wait and retry
322const delay = Math.pow(2, attempt) * 1000;
323console.log(`API error, retrying in ${delay}ms...`);
324await new Promise(resolve => setTimeout(resolve, delay));
325}
untitled-2444index.ts17 matches
26});
2728// API endpoint to get source URL
29app.get("/api/source-url", c => {
30const projectInfo = parseProject(import.meta.url);
31const sourceUrl = projectInfo.links.self.project.replace("esm.sh", "val.town");
33});
3435// API endpoint to get session status
36app.get("/api/sessions/:sessionId", async c => {
37try {
38const sessionId = c.req.param("sessionId");
62});
6364// API endpoint to get session content
65app.get("/api/sessions/:sessionId/content", async c => {
66try {
67const sessionId = c.req.param("sessionId");
88});
8990// API endpoint to list recent sessions
91app.get("/api/sessions", async c => {
92try {
93const keys = await blob.list("sessions/");
118});
119120// API endpoint to detect script type
121app.post("/api/detect-type", async c => {
122try {
123const body = await c.req.json();
138});
139140// API endpoint for OpenAI proxy
141app.post("/api/openai/chat", async c => {
142try {
143const body = await c.req.json();
144
145const { proxyChatCompletion, logApiUsage } = await import("./backend/openaiProxy.ts");
146const result = await proxyChatCompletion(body);
147
148// Log API usage
149if (result.usage) {
150logApiUsage(
151body.model || "gpt-4o",
152result.usage.prompt_tokens,
164});
165166// API endpoint to process text
167app.post("/api/process", async c => {
168try {
169const formData = await c.req.formData();
untitled-2444openaiProxy.ts6 matches
1/**
2* OpenAI API Proxy
3*
4* Proxies requests to OpenAI API to avoid exposing API keys to the client
5*/
6import { OpenAI } from "https://esm.town/v/std/openai";
34return completion;
35} catch (error) {
36console.error("OpenAI API error:", error);
37throw error;
38}
4041/**
42* Log API usage for monitoring
43*/
44export function logApiUsage(model: string, promptTokens: number, completionTokens: number): void {
45const timestamp = new Date().toISOString();
46console.log(`[${timestamp}] API Usage: ${model} - Prompt: ${promptTokens}, Completion: ${completionTokens}, Total: ${promptTokens + completionTokens}`);
47}