plantIdentifierAppmain.tsx27 matches
26bloomSeason: 'Late spring to early summer',
27height: '4-6 feet (1.2-1.8 meters)',
28imageUrl: 'https://maxm-imggenurl.web.val.run/damask-rose-detailed'
29},
30'Monstera deliciosa': {
48bloomSeason: 'Rarely blooms indoors',
49height: '10-15 feet (3-4.5 meters) in natural habitat',
50imageUrl: 'https://maxm-imggenurl.web.val.run/monstera-deliciosa'
51}
52};
58<div className="flex items-center mb-4">
59<img
60src={plant.imageUrl}
61alt={plant.commonName}
62className="w-24 h-24 object-cover rounded-full mr-4 shadow-md"
119reader.readAsDataURL(file);
120reader.onloadend = async () => {
121const base64Image = reader.result.split(',')[1];
122123const openai = new OpenAI({
135{
136type: "text",
137text: "Identify the plant species in this image with high accuracy. Provide the scientific name, common name, and a detailed description. Include key identifying characteristics. If you cannot confidently identify the plant, explain why."
138},
139{
140type: "image_url",
141image_url: {
142url: `data:image/jpeg;base64,${base64Image}`
143}
144}
185bloomSeason: 'Bloom season varies',
186height: 'Height varies by specific variety',
187imageUrl: 'https://maxm-imggenurl.web.val.run/unknown-plant'
188};
189194});
195} else {
196reject(new Error('Unable to extract plant details from the image'));
197}
198} catch (apiError) {
203204reader.onerror = (error) => {
205reject(new Error(`Image reading error: ${error}`));
206};
207} catch (error) {
213// Main Plant Identifier Application Component
214function PlantIdentifierApp() {
215const [selectedImage, setSelectedImage] = useState(null);
216const [plantInfo, setPlantInfo] = useState(null);
217const [identificationError, setIdentificationError] = useState(null);
223const canvasRef = useRef(null);
224225const handleImageUpload = async (event) => {
226const file = event.target.files[0];
227processImage(file);
228};
229230const processImage = async (file) => {
231if (file) {
232setSelectedImage(URL.createObjectURL(file));
233setIsLoading(true);
234setIdentificationError(null);
328
329context.scale(-1, 1);
330context.drawImage(video, -canvas.width, 0, canvas.width, canvas.height);
331
332context.setTransform(1, 0, 0, 1, 0, 0);
334canvas.toBlob((blob) => {
335if (blob) {
336processImage(new File([blob], 'captured-plant.jpg', { type: 'image/jpeg' }));
337} else {
338setCameraError("Failed to create photo blob.");
339}
340}, 'image/jpeg');
341}
342}, [processImage]);
343344const resetCapture = () => {
345setSelectedImage(null);
346setPlantInfo(null);
347setIdentificationError(null);
381)}
382383{!captureMode && !selectedImage && (
384<div className="space-y-4">
385<input
386type="file"
387accept="image/*"
388onChange={handleImageUpload}
389className="hidden"
390id="file-upload"
394className="w-full bg-green-500 text-white py-3 rounded-lg text-center cursor-pointer hover:bg-green-600 transition duration-300 flex items-center justify-center"
395>
396📤 Upload Image
397</label>
398<button
442)}
443444{selectedImage && (
445<div className="mt-4 animate-fade-in">
446<img
447src={selectedImage}
448alt="Selected plant"
449className="w-full rounded-lg shadow-md mb-4"
17"settings": {
18"url": url,
19"optionStr": "content,metadata,screenshot,pageshot,citation,doi,images,links",
20"crawlMode": "jina",
21"outputMode": "json"
194title: '',
195description: '',
196image: '',
197pageshotUrl: '',
198screenshotUrl: ''
329const title = metadata.title || metadata['og:title'] || metadata['twitter:title'] || 'No title available';
330const description = metadata.description || metadata['og:description'] || metadata['twitter:description'] || 'No description available';
331const image = metadata['og:image'] || metadata['twitter:image'] || '';
332const pageshotUrl = result.jina?.pageshot || '';
333const screenshotUrl = result.jina?.screenshot || '';
339title,
340description,
341image,
342pageshotUrl: result.jina?.pageshot,
343screenshotUrl: result.jina?.screenshot
398<meta charset="UTF-8">
399<meta name="viewport" content="width=device-width, initial-scale=1.0">
400<link rel="icon" type="image/png" href="https://labspace.ai/ls2-circle.png" />
401<title>PageShot — Get data about any web page</title>
402<meta property="og:title" content="PageShot — Get data about any web page" />
403<meta property="og:description" content="Get data about any web page" />
404<meta property="og:image" content="https://yawnxyz-og.web.val.run/img?link=https://pageshot.labspace.ai&title=PageShot&subtitle=Get+data+about+any+web+page" />
405<meta property="og:url" content="https://pageshot.labspace.ai" />
406<meta property="og:type" content="website" />
407<meta name="twitter:card" content="summary_large_image" />
408<meta name="twitter:title" content="PageShot — Get data about any web page" />
409<meta name="twitter:description" content="Get data about any web page" />
410<meta name="twitter:image" content="https://yawnxyz-og.web.val.run/img?link=https://pageshot.labspace.ai&title=PageShot&subtitle=Get+data+about+any+web+page" />
411<script src="https://cdn.tailwindcss.com"></script>
412<script src="https://unpkg.com/dexie@3.2.2/dist/dexie.js"></script>
448<div class="OutputHeader bg-app-bg-alt p-4 rounded-lg mb-4">
449<div class="rounded-lg mb-4" x-show="metadata.title">
450<template x-if="metadata.image">
451<img :src="metadata.image.startsWith('http') ? metadata.image : new URL(metadata.image, input).href"
452alt="Page preview"
453class="max-w-48 h-auto rounded mt-2 mb-4">
captivatingLimeGuanmain.tsx33 matches
25bloomSeason: 'Late spring to early summer',
26height: '4-6 feet (1.2-1.8 meters)',
27imageUrl: 'https://maxm-imggenurl.web.val.run/damask-rose-detailed'
28},
29'Monstera deliciosa': {
47bloomSeason: 'Rarely blooms indoors',
48height: '10-15 feet (3-4.5 meters) in natural habitat',
49imageUrl: 'https://maxm-imggenurl.web.val.run/monstera-deliciosa'
50},
51'Ficus lyrata': {
68bloomSeason: 'Does not typically bloom indoors',
69height: '6-10 feet (1.8-3 meters) indoors',
70imageUrl: 'https://maxm-imggenurl.web.val.run/fiddle-leaf-fig'
71}
72};
7374// Plant Identification Service (Simulated with more sophisticated matching)
75async function identifyPlant(imageFile) {
76return new Promise((resolve, reject) => {
77// Simulate API call with image processing
78const reader = new FileReader();
79reader.onloadend = () => {
80// Basic image analysis simulation
81const imageData = reader.result;
82
83// Sophisticated matching logic
84const matchConfidences = {
85'Rosa damascena': calculateImageSimilarity(imageData, 'rose'),
86'Monstera deliciosa': calculateImageSimilarity(imageData, 'monstera'),
87'Ficus lyrata': calculateImageSimilarity(imageData, 'fiddle-leaf')
88};
89101}
102};
103reader.readAsDataURL(imageFile);
104});
105}
106107// Simulated image similarity calculation
108function calculateImageSimilarity(imageData, plantType) {
109// In a real scenario, this would use machine learning techniques
110const randomFactor = Math.random() * 0.4 + 0.3; // Base randomness
111
112const typeSpecificBoost = {
113'rose': imageData.includes('flower') ? 0.3 : 0,
114'monstera': imageData.includes('large-leaf') ? 0.3 : 0,
115'fiddle-leaf': imageData.includes('violin-shape') ? 0.3 : 0
116};
117124<div className="flex items-center mb-4">
125<img
126src={plant.imageUrl}
127alt={plant.commonName}
128className="w-24 h-24 object-cover rounded-full mr-4 shadow-md"
160161function PlantIdentifierApp() {
162const [selectedImage, setSelectedImage] = useState(null);
163const [plantInfo, setPlantInfo] = useState(null);
164const [identificationError, setIdentificationError] = useState(null);
168const canvasRef = useRef(null);
169170const handleImageUpload = async (event) => {
171const file = event.target.files[0];
172processImage(file);
173};
174175const processImage = async (file) => {
176if (file) {
177setSelectedImage(URL.createObjectURL(file));
178setIsLoading(true);
179setIdentificationError(null);
209if (videoRef.current && canvasRef.current) {
210const context = canvasRef.current.getContext('2d');
211context.drawImage(videoRef.current, 0, 0, 400, 300);
212
213canvasRef.current.toBlob((blob) => {
214processImage(new File([blob], 'captured-plant.jpg', { type: 'image/jpeg' }));
215}, 'image/jpeg');
216}
217}, [processImage]);
218219const resetCapture = () => {
220setSelectedImage(null);
221setPlantInfo(null);
222setIdentificationError(null);
235<main>
236{/* Previous implementation remains the same */}
237{!captureMode && !selectedImage && (
238<div className="space-y-4">
239<input
240type="file"
241accept="image/*"
242onChange={handleImageUpload}
243className="hidden"
244id="file-upload"
248className="w-full bg-green-500 text-white py-3 rounded-lg text-center cursor-pointer hover:bg-green-600 transition duration-300 flex items-center justify-center"
249>
250📤 Upload Image
251</label>
252<button
305)}
306307{selectedImage && (
308<div className="mt-4 animate-fade-in">
309<img
310src={selectedImage}
311alt="Selected plant"
312className="w-full rounded-lg shadow-md mb-4"
sqliteExplorerAppREADME.md1 match
3View and interact with your Val Town SQLite data. It's based off Steve's excellent [SQLite Admin](https://www.val.town/v/stevekrouse/sqlite_admin?v=46) val, adding the ability to run SQLite queries directly in the interface. This new version has a revised UI and that's heavily inspired by [LibSQL Studio](https://github.com/invisal/libsql-studio) by [invisal](https://github.com/invisal). This is now more an SPA, with tables, queries and results showing up on the same page.
45
67## Install
libsqlstudioREADME.md1 match
1# LibSQLStudio
23
45I'll write down the instructions tomorrow. The short version is:
sqliteExplorerAppREADME.md1 match
3View and interact with your Val Town SQLite data. It's based off Steve's excellent [SQLite Admin](https://www.val.town/v/stevekrouse/sqlite_admin?v=46) val, adding the ability to run SQLite queries directly in the interface. This new version has a revised UI and that's heavily inspired by [LibSQL Studio](https://github.com/invisal/libsql-studio) by [invisal](https://github.com/invisal). This is now more an SPA, with tables, queries and results showing up on the same page.
45
67## Install
thrillingAmaranthKiwimain.tsx3 matches
357title: location,
358icon: {
359url: 'data:image/svg+xml;utf8,<svg xmlns="http://www.w3.org/2000/svg" width="40" height="40" viewBox="0 0 24 24" fill="%23FF453A"><path d="M12 2c-5.33 4 0 10 0 10s5.33-6 0-10z"/></svg>',
360scaledSize: new window.google.maps.Size(40, 40)
361}
369title: vet.name,
370icon: {
371url: 'data:image/svg+xml;utf8,<svg xmlns="http://www.w3.org/2000/svg" width="30" height="30" viewBox="0 0 24 24" fill="%2300E461"><path d="M12 2l3.09 6.26L22 9.27l-5 4.87 1.18 6.88L12 17.77l-6.18 3.25L7 14.14 2 9.27l6.91-1.01L12 2z"/></svg>',
372scaledSize: new window.google.maps.Size(30, 30)
373}
553backgroundColor: theme.colors.background,
554minHeight: '100vh',
555backgroundImage: 'linear-gradient(to bottom, rgba(13,19,33,0.8), rgba(13,19,33,1))'
556}}
557>
sqliteExplorerAppREADME.md1 match
3View and interact with your Val Town SQLite data. It's based off Steve's excellent [SQLite Admin](https://www.val.town/v/stevekrouse/sqlite_admin?v=46) val, adding the ability to run SQLite queries directly in the interface. This new version has a revised UI and that's heavily inspired by [LibSQL Studio](https://github.com/invisal/libsql-studio) by [invisal](https://github.com/invisal). This is now more an SPA, with tables, queries and results showing up on the same page.
45
67## Install
bedtimeStoryMakerREADME.md3 matches
234
56Inspired from a RedwoodJS demo I mde last year, this adds generative art powered by Fal to the bedtime story maker.
21for a "fantastical story about a green whale who rides the bus" or the "spooky story about the tomato fox who explores a cave".
2223Then using the summary, OpenAI geenrates another prompt to describe the instructions to geneate a childrens story book image.
2425That's sent to Fal to generate an image.
2627Stories get saved to `bedtime_stories` in SQLite for viewing, searching and maybe sharing.
bedtimeStoryMakermain.tsx8 matches
124role: "system",
125content:
126`Describe an image that depicts the ${adjective} children's story about a ${color} colored ${animal}: ${summary}.
127The description should be descriptive, but three short sentences.
128Just give me the instructions, don't make an image.`,
129},
130],
143// fast-lightning-sdxl
144const options = {
145"image_size": "square",
146"num_images": 1,
147"num_inference_steps": 6,
148"enable_safety_checker": true,
149};
150// {"num_images": 1,
151// "guidance_scale": 9.5,
152// "num_inference_steps": 20,
153// "expand_prompt": true }
154const result: any = await fal.run(`fal-ai/${falModel}`, { input: { prompt }, options });
155const url = result.images[0].url;
156157return url;
162title: ogData?.title || "Bedtime Story Maker",
163description: ogData?.description || "",
164image: ogData?.image || "",
165url: ogData?.url || `https://dthyresson-bedtimestorymaker.web.val.run/bedtime_stories}`,
166};
480title,
481description: summary,
482image: pictureUrl,
483url: `https://dthyresson-bedtimestorymaker.web.val.run/bedtime_stories/read/${id}`,
484};