Val Town Code SearchReturn to Val Town

API Access

You can access search results via JSON API by adding format=json to your query:

https://codesearch.val.run/image-url.jpg?q=api&page=98&format=json

For typeahead suggestions, use the /typeahead endpoint:

https://codesearch.val.run/typeahead?q=api

Returns an array of strings in format "username" or "username/projectName"

Found 18993 results for "api"(1922ms)

registry-valsfile3 matches

@dinavinter•Updated 1 week ago
90});
91
92// API endpoint to get specific file content
93app.get("/:zon/:file/raw", async (c: Context) => {
94 const { zon, file } = c.req.param();
95 const client = new ValTown({ bearerToken: Deno.env.get("VAL_TOWN_API_KEY") });
96
97 try {
221 const { zon, file } = c.req.param();
222 const { content } = await c.req.json();
223 const client = new ValTown({ bearerToken: Deno.env.get("VAL_TOWN_API_KEY") });
224
225 try {

FileDumpThingupload.ts2 matches

@sacredlindsay•Updated 1 week ago
3import { readStdinToBuffer } from "./utils.ts";
4
5const API_URL = `https://filedumpthing.val.run/api/upload`;
6
7async function main() {
50 }
51
52 const response = await fetch(API_URL, {
53 method: "POST",
54 body: formData,

FileDumpThingREADME.md4 matches

@sacredlindsay•Updated 1 week ago
50 - `index.html`: HTML template
51
52- **Backend**: Hono server for handling API requests
53 - `index.ts`: API routes for file uploads and serving stored content
54 - `api.ts`: API endpoints for file uploads
55
56- **Shared**: Code shared between frontend and backend
65- Built with TypeScript for type safety
66- React for the frontend UI
67- Hono for the backend API
68- Supports various file types with appropriate MIME type detection
69- Smart content detection (checks for null bytes and UTF-8 validity)

FileDumpThingREADME.md1 match

@sacredlindsay•Updated 1 week ago
811. Reads content from stdin
822. Automatically detects if it's text or binary
833. Uploads it to the API at `filedumpthing.val.run`
844. Returns a shareable URL

FileDumpThingindex.ts2 matches

@sacredlindsay•Updated 1 week ago
3import { Hono } from "npm:hono";
4import { getContentTypeHeaders } from "../shared/mimetype.ts";
5import api from "./api.ts";
6
7const app = new Hono();
12 .get("/frontend/**/*", c => serveFile(c.req.path, import.meta.url))
13 .get("/shared/**/*", c => serveFile(c.req.path, import.meta.url))
14 .route("/api", api)
15 .get("/blob", async c => {
16 const key = c.req.query("key");

FileDumpThingFileDumper.tsx2 matches

@sacredlindsay•Updated 1 week ago
54
55 try {
56 const response = await fetch("/api/upload", {
57 method: "POST",
58 body: formData,
79
80 try {
81 const response = await fetch("/api/upload", {
82 method: "POST",
83 body: formData,

FileDumpThingapi.ts2 matches

@sacredlindsay•Updated 1 week ago
3import { isTextContent } from "../shared/utils.ts";
4
5const api = new Hono()
6 .post("/upload", async c => {
7 try {
60 });
61
62export default api;

tsjScraperexampleDiv.html1 match

@alexwein•Updated 1 week ago
68 <a href="https://timothydereuse.github.io/" target="_blank"><strong>Tim
69 de Reuse:</strong></a> There’s been a zillion cultural artifacts
70 produced in the last decade about capital-T Trauma in the last decade;
71 it’s so zeitgeisty you can’t even make an animated Disney feature
72 without fitting the word “intergenerational” somewhere on the back of

xxxclearinghouse_urlscraperindex.ts11 matches

@toowired•Updated 1 week ago
2import { blob } from "https://esm.town/v/std/blob";
3
4interface ScrapingResult {
5 products: any[];
6 metadata: {
68};
69
70// Advanced scraping function with error handling and rate limiting
71async function scrapeWebsite(query: string, websiteUrl: string): Promise<ScrapingResult> {
72 const startTime = Date.now();
73 const results: ScrapingResult = {
74 products: [],
75 metadata: {
96 );
97
98 console.log(`Scraping ${urlsToScrape.length} URLs for ${hostname}`);
99
100 for (const url of urlsToScrape) {
126
127 } catch (error) {
128 results.errors?.push(`Error scraping ${url}: ${error.message}`);
129 console.error(`Error scraping ${url}:`, error);
130 }
131 }
155
156 } catch (error) {
157 results.errors?.push(`General scraping error: ${error.message}`);
158 results.metadata.duration = Date.now() - startTime;
159 console.error('Scraping failed:', error);
160 return results;
161 }
251
252// Main export function
253export default async function urlScrapeTemplate(query: string, website: string): Promise<ScrapingResult> {
254 try {
255 // Input validation
272 }
273
274 // Perform scraping
275 const result = await scrapeWebsite(query, normalizedWebsite);
276

xxxclearinghouse_orchestratorindex.ts13 matches

@toowired•Updated 1 week ago
14}
15
16interface ScrapingResult {
17 website: string;
18 status: 'success' | 'error';
28 query: string;
29 timestamp: string;
30 results: ScrapingResult[];
31 consolidatedProducts: any[];
32 summary: {
136 console.log(`Orchestrating scrape for "${query}" across ${websites.length} sites`);
137
138 // Parallel scraping with proper error handling
139 const scrapingPromises = websites.map(async (website: string): Promise<ScrapingResult> => {
140 try {
141 const startTime = Date.now();
142
143 // Call the URL scrape template
144 const scrapingResult = await scrapeWebsite(query, website);
145 const endTime = Date.now();
146
148 website,
149 status: 'success',
150 data: scrapingResult,
151 metadata: {
152 scrapedAt: new Date().toISOString(),
155 };
156 } catch (error) {
157 console.error(`Error scraping ${website}:`, error);
158 return {
159 website,
168 });
169
170 const scrapingResults = await Promise.all(scrapingPromises);
171
172 // Consolidate all successful results
173 const allProducts: any[] = [];
174 scrapingResults.forEach(result => {
175 if (result.status === 'success' && result.data?.products) {
176 allProducts.push(...result.data.products);
182
183 // Calculate summary statistics
184 const summary = calculateSummary(scrapingResults, consolidatedProducts);
185
186 // Prepare final response
188 query,
189 timestamp: new Date().toISOString(),
190 results: scrapingResults,
191 consolidatedProducts,
192 summary
267}
268
269// Simplified scraping function that calls our URL scrape template
270async function scrapeWebsite(query: string, website: string): Promise<any> {
271 // This would normally import and call the actual URL scrape template
365
366// Calculate summary statistics
367function calculateSummary(results: ScrapingResult[], products: any[]): any {
368 const successfulSites = results.filter(r => r.status === 'success').length;
369 const totalProducts = products.length;

readback-api

@tr3ntg•Updated 5 hours ago
API for readback.
Plantfo

Plantfo8 file matches

@Llad•Updated 11 hours ago
API for AI plant info
snartapi
apiry