Val Town Code SearchReturn to Val Town

API Access

You can access search results via JSON API by adding format=json to your query:

https://codesearch.val.run/$%7Burl%7D?q=fetch&page=5&format=json

For typeahead suggestions, use the /typeahead endpoint:

https://codesearch.val.run/typeahead?q=fetch

Returns an array of strings in format "username" or "username/projectName"

Found 14486 results for "fetch"(1347ms)

reddit-checkertest-slack.ts3 matches

@sunnyatlightswitch•Updated 1 day ago
76 };
77
78 const response = await fetch(SLACK_WEBHOOK_URL, {
79 method: 'POST',
80 headers: {
119 };
120
121 const response = await fetch(SLACK_WEBHOOK_URL, {
122 method: 'POST',
123 headers: {
155 };
156
157 const response = await fetch(SLACK_WEBHOOK_URL, {
158 method: 'POST',
159 headers: {
42 const auth = btoa(`${clientId}:${clientSecret}`);
43
44 const response = await fetch('https://www.reddit.com/api/v1/access_token', {
45 method: 'POST',
46 headers: {
66
67/**
68 * Fetches recent posts from a subreddit using Reddit API
69 */
70async function fetchSubredditPosts(accessToken: string, subreddit: string, limit: number = 10): Promise<RedditPost[]> {
71 try {
72 const url = `https://oauth.reddit.com/r/${subreddit}/new?limit=${limit}`;
73
74 const response = await fetch(url, {
75 headers: {
76 'Authorization': `Bearer ${accessToken}`,
87 return data.data.children.map(child => child.data);
88 } catch (error) {
89 console.error(`Error fetching posts from r/${subreddit}:`, error);
90 throw error;
91 }
159 console.log(`🔍 Testing Reddit API connection for r/${subreddit}...`);
160 const accessToken = await getRedditAccessToken(clientId, clientSecret);
161 const posts = await fetchSubredditPosts(accessToken, subreddit, 10);
162
163 const keywords = [
93
94/**
95 * Fetches RSS feed from a subreddit
96 */
97async function fetchSubredditRSS(subreddit: string): Promise<RSSItem[]> {
98 try {
99 const url = `https://www.reddit.com/r/${subreddit}/new.rss`;
100
101 const response = await fetch(url, {
102 headers: {
103 'User-Agent': 'Mozilla/5.0 (compatible; RedditMonitor/1.0; +https://val.town)',
114 return parseRSS(xmlText);
115 } catch (error) {
116 console.error(`Error fetching RSS from r/${subreddit}:`, error);
117 throw error;
118 }
140 try {
141 if (action === 'test') {
142 // Test fetching RSS feed
143 console.log(`🔍 Testing Reddit RSS connection for r/${subreddit}...`);
144 const items = await fetchSubredditRSS(subreddit);
145
146 const keywords = ["auth", "authentication", "login", "signin", "signup", "oauth", "jwt", "token"];
222 // Return raw RSS XML for debugging
223 const rssUrl = `https://www.reddit.com/r/${subreddit}/new.rss`;
224 const response = await fetch(rssUrl, {
225 headers: {
226 'User-Agent': 'Mozilla/5.0 (compatible; RedditMonitor/1.0; +https://val.town)',
106
107/**
108 * Fetches RSS feed from a subreddit
109 */
110async function fetchSubredditRSS(subreddit: string): Promise<RSSItem[]> {
111 try {
112 const url = `https://www.reddit.com/r/${subreddit}/new.rss`;
113
114 const response = await fetch(url, {
115 headers: {
116 'User-Agent': 'Mozilla/5.0 (compatible; RedditMonitor/1.0; +https://val.town)',
127 return parseRSS(xmlText);
128 } catch (error) {
129 console.error(`Error fetching RSS from r/${subreddit}:`, error);
130 throw error;
131 }
212 console.log(`⏰ Last checked: ${lastChecked ? new Date(lastChecked * 1000).toLocaleString() : 'Never'}`);
213
214 // Fetch RSS feed
215 const items = await fetchSubredditRSS(CONFIG.subreddit);
216 console.log(`📥 Fetched ${items.length} items from r/${CONFIG.subreddit} RSS feed`);
217
218 // Filter items that are newer than last check and contain keywords
27
28/**
29 * Fetches recent posts from a subreddit
30 */
31async function fetchSubredditPosts(subreddit: string, limit: number = 10): Promise<RedditPost[]> {
32 try {
33 const url = `https://www.reddit.com/r/${subreddit}/new.json?limit=${limit}`;
34
35 const response = await fetch(url, {
36 headers: {
37 'User-Agent': 'Mozilla/5.0 (compatible; RedditMonitor/1.0; +https://val.town)',
49 return data.data.children.map(child => child.data);
50 } catch (error) {
51 console.error(`Error fetching posts from r/${subreddit}:`, error);
52 throw error;
53 }
63 try {
64 if (action === 'test') {
65 // Test fetching posts
66 console.log('🔍 Testing Reddit API connection...');
67 const posts = await fetchSubredditPosts('lovable', 10);
68
69 const keywords = ["auth", "authentication", "login", "signin", "signup", "oauth", "jwt", "token"];
40
41/**
42 * Fetches recent posts from a subreddit
43 */
44async function fetchSubredditPosts(subreddit: string, limit: number = 25): Promise<RedditPost[]> {
45 try {
46 const url = `https://www.reddit.com/r/${subreddit}/new.json?limit=${limit}`;
47
48 const response = await fetch(url, {
49 headers: {
50 'User-Agent': 'Mozilla/5.0 (compatible; RedditMonitor/1.0; +https://val.town)',
62 return data.data.children.map(child => child.data);
63 } catch (error) {
64 console.error(`Error fetching posts from r/${subreddit}:`, error);
65 throw error;
66 }
129 console.log(`⏰ Last checked: ${lastChecked ? new Date(lastChecked * 1000).toLocaleString() : 'Never'}`);
130
131 // Fetch recent posts
132 const posts = await fetchSubredditPosts(CONFIG.subreddit);
133 console.log(`📥 Fetched ${posts.length} posts from r/${CONFIG.subreddit}`);
134
135 // Filter posts that are newer than last check and contain keywords

slimifymain.tsx1 match

@affan•Updated 1 day ago
66})
67
68export default app.fetch;

agatha-proxymain.tsx3 matches

@sammeltassen•Updated 1 day ago
10const manifestBaseUrl = "https://agatha.arch.be/data/json/";
11
12async function fetchJson(id: string) {
13 const headers = new Headers([
14 ["referer", "https://agatha.arch.be/"],
15 ]);
16 const url = new URL(manifestBaseUrl + id);
17 return fetch(url, { headers })
18 .then(resp => resp.json())
19 .catch((err) => ({ error: `Could not access item ${id}` }));
31 let resp;
32 if (requestType === "manifest") {
33 resp = await fetchJson(requestParams.join("/"));
34 }
35 if (resp.error) {
54
55 // Forward the request to Brave Search API
56 const braveResponse = await fetch(`${braveApiUrl}?${braveParams}`, {
57 method: 'GET',
58 headers: {

kaymain.tsx12 matches

@legal•Updated 1 day ago
564
565 try {
566 const response = await fetch(window.location.pathname + '?format=json', { method: 'POST', headers: { 'Accept': 'application/json'}, body: formData });
567 const data = await response.json();
568 if (!response.ok) { throw new Error(data?.error || data?.details || response.statusText || \`Server status: \${response.status}\`); }
615
616 } catch (error) {
617 console.error("Fetch Error:", error);
618 displayError('errorFetchFailed', { errorMessage: error.message });
619 setLoadingState(false);
620 }
652 // --- Dynamic Imports ---
653 const { OpenAI } = await import("https://esm.town/v/std/openai");
654 const { fetch } = await import("https://esm.town/v/std/fetch");
655 const { PDFExtract } = await import("npm:pdf.js-extract");
656
763 errorInvalidFile: "Invalid file type. Please upload a {{document_format_accepted | default('PDF')}}.",
764 errorFileSize: "File is too large (Max {{max_pdf_size_mb}}MB).",
765 errorFetchFailed: "Failed to perform analysis: {{errorMessage}}",
766 contactNamePlaceholder: APP_CONFIG.contact_form_placeholders_en.name,
767 contactEmailPlaceholder: APP_CONFIG.contact_form_placeholders_en.email,
803 errorInvalidFile: "Tipo de archivo inválido. Por favor, suba un {{document_format_accepted | default('PDF')}}.",
804 errorFileSize: "El archivo es demasiado grande (Máx {{max_pdf_size_mb}}MB).",
805 errorFetchFailed: "Falló la realización del análisis: {{errorMessage}}",
806 contactNamePlaceholder: APP_CONFIG.contact_form_placeholders_es.name,
807 contactEmailPlaceholder: APP_CONFIG.contact_form_placeholders_es.email,
872 }
873 try {
874 const response = await fetch(ref.potentialUrl, { method: "HEAD", redirect: "follow", timeout: 5000 });
875 ref.traversalStatus = response.ok ? "success" : "failed";
876 if (!response.ok) ref.error = `Status ${response.status}`;
877 } catch (e) {
878 ref.traversalStatus = "failed";
879 ref.error = e.name === "AbortError" || e.message.includes("timed out") ? "Timeout" : "Fetch Error";
880 }
881 return ref;
904 docText = await extractPdfTextNative(buffer, input.documentFile.name, log);
905 } else if (input.documentUrl) {
906 log.push({ agent: ingestAgent, type: "input", message: `Fetching content from URL: ${input.documentUrl}` });
907 try {
908 const response = await fetch(input.documentUrl);
909 if (!response.ok) throw new Error(`HTTP error! status: ${response.status}`);
910 docText = await response.text();
911 log.push({ agent: ingestAgent, type: "result", message: `Fetched ${docText.length} characters.` });
912 } catch (e) {
913 log.push({ agent: ingestAgent, type: "error", message: `Failed to fetch URL: ${e.message}. Halting.` });
914 }
915 } else if (input.documentText) {

testWeatherFetcher1 file match

@sjaskeprut•Updated 2 days ago

weatherFetcher1 file match

@sjaskeprut•Updated 2 days ago