189
190export default async function server(request) {
191 const { OpenAI } = await import("https://esm.town/v/std/openai");
192 const { blob } = await import("https://esm.town/v/std/blob");
193 const url = new URL(request.url);
194
195 if (url.pathname === "/problem") {
196 const openai = new OpenAI();
197 const completion = await openai.chat.completions.create({
198 model: "gpt-4",
199 messages: [
1import { initializeAgentExecutorWithOptions } from "https://esm.sh/langchain/agents";
2import { ChatOpenAI } from "https://esm.sh/langchain/chat_models/openai";
3import { Calculator } from "https://esm.sh/langchain/tools/calculator";
4import { OpenAI } from "https://esm.town/v/std/openai";
5
6export default async function chatAgentWithCustomPrompt(input?: string) {
7 const model = new ChatOpenAI({
8 temperature: 0,
9 });
25## **Tech Stack**
26- **Townie AI** – AI-based automation
27- **OpenAI API** – For generating blog content
28- **Built-in Databases** – To store and retrieve topics (if needed)
29
276 // Handle API and shared content routes
277 if (request.method === "POST") {
278 const { OpenAI } = await import("https://esm.town/v/std/openai");
279 const { blob } = await import("https://esm.town/v/std/blob");
280 const openai = new OpenAI();
281
282 const { topic, idea, type } = await request.json();
293 let response;
294 if (type === "ideas") {
295 response = await openai.chat.completions.create({
296 messages: [
297 {
317 });
318 } else if (type === "outline") {
319 response = await openai.chat.completions.create({
320 messages: [
321 {
15### Tech Stack
16* Townie AI – AI-based automation
17* OpenAI API – For generating blog content
18* Built-in Databases – To store and retrieve topics (if needed)
19* How to Deploy & Share
25## **Tech Stack**
26- **Townie AI** – AI-based automation
27- **OpenAI API** – For generating blog content
28- **Built-in Databases** – To store and retrieve topics (if needed)
29
59 async function generateAIRecommendations() {
60 try {
61 const { OpenAI } = await import("https://esm.town/v/std/openai");
62 const openai = new OpenAI();
63
64 const recommendations = await openai.chat.completions.create({
65 messages: [
66 {
71 async function generateAIDescription() {
72 try {
73 const { OpenAI } = await import("https://esm.town/v/std/openai");
74 const openai = new OpenAI();
75
76 const aiVideoCompletion = await openai.chat.completions.create({
77 messages: [
78 {
85 });
86
87 const aiDescription = await openai.chat.completions.create({
88 messages: [
89 {
242
243export default async function server(request: Request): Promise<Response> {
244 const { OpenAI } = await import("https://esm.town/v/std/openai");
245 const openai = new OpenAI();
246
247 const url = new URL(request.url);
254 try {
255 // Simulated video analysis (in a real scenario, you'd use a more advanced video processing service)
256 const analysisResponse = await openai.chat.completions.create({
257 messages: [
258 {
288 try {
289 // Generate video concept
290 const conceptResponse = await openai.chat.completions.create({
291 messages: [
292 {
201export default async function server(request: Request): Promise<Response> {
202 const { sqlite } = await import("https://esm.town/v/stevekrouse/sqlite");
203 const { OpenAI } = await import("https://esm.town/v/std/openai");
204 const KEY = "priyanshSocialMediaApp";
205 const SCHEMA_VERSION = 5;
208
209 const url = new URL(request.url);
210 const openai = new OpenAI();
211
212 // New Vision Analysis Endpoint
222 );
223
224 // Use OpenAI Vision API
225 const visionResponse = await openai.chat.completions.create({
226 model: "gpt-4-vision-preview",
227 messages: [
271 `;
272
273 const textCompletion = await openai.chat.completions.create({
274 messages: [
275 { role: "system", content: "You are a creative content generator for social media templates." },