1import { zodResponseFormat } from "https://esm.sh/openai/helpers/zod";
2import { z } from "https://esm.sh/zod";
3import { OpenAI } from "https://esm.town/v/std/openai";
4import { discordWebhook } from "https://esm.town/v/stevekrouse/discordWebhook";
5import { socialDataSearch, Tweet } from "https://esm.town/v/stevekrouse/socialDataSearch";
11 .join(" OR ") + " " + excludes;
12
13const openai = new OpenAI();
14
15const RelevanceSchema = z.object({
29
30 try {
31 const completion = await openai.beta.chat.completions.parse({
32 model: "gpt-4o-mini",
33 messages: [
42 return completion.choices[0].message.parsed;
43 } catch (error) {
44 console.error("Error parsing OpenAI response:", error);
45 return { isRelevant: false, confidence: 0, reason: "Error in processing" };
46 }
1import { email } from "https://esm.town/v/std/email?v=11";
2import { OpenAI } from "npm:openai";
3
4let location = "brooklyn ny";
8).then(r => r.json());
9
10const openai = new OpenAI();
11let chatCompletion = await openai.chat.completions.create({
12 messages: [{
13 role: "user",
235 const { intervieweeResponse, interviewPosition } = await request.json();
236
237 const { OpenAI } = await import("https://esm.town/v/std/openai");
238 const openai = new OpenAI();
239
240 const completion = await openai.chat.completions.create({
241 messages: [
242 {
228 const webpageText = await webpageResponse.text();
229
230 // Use OpenAI as Claude proxy
231 const { OpenAI } = await import("https://esm.town/v/std/openai");
232 const openai = new OpenAI();
233
234 const completion = await openai.chat.completions.create({
235 messages: [
236 {
339 const { question } = await request.json();
340
341 const { OpenAI } = await import("https://esm.town/v/std/openai");
342 const openai = new OpenAI();
343
344 const completion = await openai.chat.completions.create({
345 messages: [
346 {
1import { OpenAI } from "https://esm.sh/openai";
2import { zodResponseFormat } from "https://esm.sh/openai/helpers/zod";
3import { z } from "https://esm.sh/zod";
4import { zip } from "https://esm.town/v/pomdtr/sql";
42}, { concurrency: 3 });
43
44const openai = new OpenAI();
45
46const ValDescriptions = z.object({
50
51async function getDescriptions(val) {
52 const completion = await openai.beta.chat.completions.parse({
53 model: "gpt-4o-mini",
54 messages: [
339 const { question } = await request.json();
340
341 const { OpenAI } = await import("https://esm.town/v/std/openai");
342 const openai = new OpenAI();
343
344 const completion = await openai.chat.completions.create({
345 messages: [
346 {
1import { zodResponseFormat } from "https://esm.sh/openai/helpers/zod";
2import { z } from "https://esm.sh/zod";
3import { email } from "https://esm.town/v/std/email";
4import { OpenAI } from "https://esm.town/v/std/openai";
5
6export default async function(e: Email) {
7 const client = new OpenAI();
8
9 const Messages = z.object({
11
12### Toolings
13* Llms can uses [tools](https://platform.openai.com/docs/guides/function-calling), meaning you can make this an agent and a whole lot more useful.
14
15
177 }
178
179 const { OpenAI } = await import("https://esm.town/v/std/openai");
180 const openai = new OpenAI();
181
182 const valTownClient = new ValTown({
197
198 try {
199 const completion = await openai.chat.completions.create({
200 model: "gpt-4o",
201 messages: [