1import { fetch } from "https://esm.town/v/std/fetch";
2
3let getEpicTvProduct = async (url) => {
4 const cheerio = await import("npm:cheerio@1.0.0-rc.12");
5 const html = await (await fetch(url)).text();
6 const $ = cheerio.load(html);
7 console.log("how it run?");
21 return c.json({ newNode });
22 });
23 return app.fetch(req);
24};
1export function runValAPI(name, ...args) {
2 return fetch(`https://api.val.town/v1/run/${name.replace("@", "")}`, {
3 method: "POST",
4 body: JSON.stringify({
23 });
24
25 return app.fetch(req);
26};
44 });
45
46 return app.fetch(req);
47};
48```
1import { email } from "https://esm.town/v/std/email?v=9";
2import { fetch } from "https://esm.town/v/std/fetch?v=4";
3import { fetchJSON } from "https://esm.town/v/stevekrouse/fetchJSON?v=41";
4import { hnJobToText } from "https://esm.town/v/yeskunall/hnJobToText?v=2";
5
29// Get upto (200) of the latest jobs posted on HN
30export async function hnLatestJobs() {
31 const jobStories: Number[] = await fetch(
32 "https://hacker-news.firebaseio.com/v0/jobstories.json",
33 ).then((res) => res.json());
35 const posts = await Promise.all(
36 jobStories.map(async (id) => {
37 return await fetchJSON(
38 `https://hacker-news.firebaseio.com/v0/item/${id}.json`,
39 );
6
7export async function pollNodeJS(interval: Interval) {
8 let resp = await fetch(url);
9 if (!resp.ok) return;
10 let text = await resp.text();
1import { unsplashAccessToken } from "https://esm.town/v/byrneml/unsplashAccessToken";
2import { fetchJSON } from "https://esm.town/v/stevekrouse/fetchJSON?v=41";
3
4export let unsplashF = (token) =>
5 fetchJSON(
6 `https://api.unsplash.com/photos`,
7 {
1import { feedCache } from "https://esm.town/v/iakovos/feedCache";
2import { fetch } from "https://esm.town/v/std/fetch";
3import { set } from "https://esm.town/v/std/set?v=11";
4
5export const fetchTextWithCaching = async (url: string): Promise<string | null> => {
6 const cacheItem = feedCache[url];
7 const headers: Record<string, string> = {};
16
17 try {
18 const response = await fetch(url, { headers });
19
20 if (response.status === 304) {
27
28 feedCache[url] = {
29 lastFetched: Date.now(),
30 etag: response.headers.get("ETag") ?? cacheItem?.etag,
31 lastModified: response.headers.get("Last-Modified")
36 ...feedCache,
37 [url]: {
38 lastFetched: Date.now(),
39 etag: response.headers.get("ETag") ?? cacheItem?.etag,
40 lastModified: response.headers.get("Last-Modified")
52 }
53 catch (error) {
54 console.error("Failed to fetch feeds:", error);
55 return null;
56 }
1import { extractContent } from "https://esm.town/v/iakovos/extractContent";
2import { fetchText } from "https://esm.town/v/iakovos/fetchText";
3import { getLink } from "https://esm.town/v/iakovos/getLink";
4import { getMediaAttributeUrl } from "https://esm.town/v/iakovos/getMediaAttributeUrl";
5
6export const fetchAndParseFeeds = async (url: string): Promise<FeedItem[]> => {
7 try {
8 const { xml2js } = await import("https://deno.land/x/xml2js@1.0.0/mod.ts");
9 let { parse } = await import("npm:node-html-parser");
10 const xml = await fetchText(url);
11
12 if (!xml) {
41 return parsedItems;
42 } catch (error) {
43 console.error("Error while fetching and parsing feeds:", error);
44 return [];
45 }