EchoPromptermain.tsx9 matches
938if (typeof document !== "undefined") { client(); }
939export default async function server(request: Request): Promise<Response> {
940const { OpenAI } = await import("https://esm.town/v/std/openai");
941const { sqlite } = await import("https://esm.town/v/stevekrouse/sqlite");
942const openai = new OpenAI();
943944// Use the val's URL as a unique key for database tables
1041const echoPrompt = getEchoPrompt();
10421043const completion = await openai.chat.completions.create({
1044messages: [
1045{
10971098// First, generate the agent prompt
1099const completion2 = await openai.chat.completions.create({
1100messages: [
1101{
11231124// Then, generate commands for the agent based on its purpose and description
1125const commandsCompletion = await openai.chat.completions.create({
1126messages: [
1127{
1230}
12311232// Format the history into OpenAI message format
1233const messages = [
1234{
1240// Add conversation history if it exists
1241if (history && history.length > 0) {
1242// Filter out system messages and map to OpenAI format
1243history.forEach(msg => {
1244if (msg.role !== "system") {
12581259// Chat with the agent using the stored prompt and history
1260const completion = await openai.chat.completions.create({
1261messages: messages,
1262model: "gpt-4o-mini",
13771378// Execute the command with the AI
1379const completion = await openai.chat.completions.create({
1380messages: messages,
1381model: "gpt-4o-mini",
1415try {
16const { OpenAI } = await import("https://esm.town/v/std/openai");
17const openai = new OpenAI();
1819const completion = await openai.chat.completions.create({
20messages: [
21{
1import { OpenAI } from "https://esm.town/v/std/openai";
23// Telegram Bot Token - you would replace this with your actual bot token
5const TELEGRAM_API_URL = `https://api.telegram.org/bot${TELEGRAM_BOT_TOKEN}`;
67// OpenAI for generating research insights
8const openai = new OpenAI();
910// Helper function to send Telegram message
27async function processResearchRequest(text: string) {
28try {
29const completion = await openai.chat.completions.create({
30messages: [
31{
44return completion.choices[0].message.content || "I couldn't generate a response.";
45} catch (error) {
46console.error("OpenAI Error:", error);
47return "Sorry, there was an error processing your research request.";
48}
AIContentHashtagsGeneratormain.tsx3 matches
125if (request.method === "POST") {
126try {
127const { OpenAI } = await import("https://esm.town/v/std/openai");
128const openai = new OpenAI();
129130const { topic } = await request.json();
131132const completion = await openai.chat.completions.create({
133messages: [
134{
daringPlumOctopusmain.tsx3 matches
125if (request.method === "POST") {
126try {
127const { OpenAI } = await import("https://esm.town/v/std/openai");
128const openai = new OpenAI();
129130const { topic } = await request.json();
131132const completion = await openai.chat.completions.create({
133messages: [
134{
125if (request.method === "POST") {
126try {
127const { OpenAI } = await import("https://esm.town/v/std/openai");
128const openai = new OpenAI();
129130const { topic } = await request.json();
131132const completion = await openai.chat.completions.create({
133messages: [
134{
simpleSearchPagemain.tsx3 matches
136export default async function server(request: Request): Promise<Response> {
137if (request.method === "POST") {
138const { OpenAI } = await import("https://esm.town/v/std/openai");
139const openai = new OpenAI();
140141try {
142const { query } = await request.json();
143144const completion = await openai.chat.completions.create({
145messages: [
146{
My_Schedulermain.tsx3 matches
96export default async function server(request: Request): Promise<Response> {
97const { sqlite } = await import("https://esm.town/v/stevekrouse/sqlite");
98const { OpenAI } = await import("https://esm.town/v/std/openai");
99const openai = new OpenAI();
100101const KEY = "My_Scheduler";
135const taskList = tasks.rows.map(t => `${t.task} (${t.duration} mins, ${t.priority} priority)`).join(", ");
136137const suggestion = await openai.chat.completions.create({
138messages: [
139{
greatTealTiglonmain.tsx3 matches
1import { email } from "https://esm.town/v/std/email";
2import { OpenAI } from "https://esm.town/v/std/openai";
3import { sqlite } from "https://esm.town/v/stevekrouse/sqlite";
44748export default async function (e: Email) {
49const openai = new OpenAI();
5051// Ensure we have text content to analyze
72try {
73// Use GPT to draft an intelligent reply
74const completion = await openai.chat.completions.create({
75messages: [
76{