Spaces:
Sleeping
Sleeping
| import os, re, json, base64, requests | |
| from flask import Flask, request, jsonify, send_file | |
| from flask_cors import CORS | |
| from groq import Groq | |
| from supabase import create_client, Client | |
| app = Flask(__name__) | |
| CORS(app) | |
| client = Groq(api_key=os.environ["GROQ_API_KEY"]) | |
| supabase = create_client(os.environ["SUPABASE_URL"], os.environ["SUPABASE_KEY"]) | |
| def fetch_wikipedia(search_term): | |
| search_term = search_term.replace(" ", "_") | |
| url = f"https://en.wikipedia.org/api/rest_v1/page/summary/{search_term}" | |
| return requests.get(url, headers={"User-Agent": "EucalyptusLens/1.0"}).json() | |
| def clean_wikipedia(data): | |
| summary = data.get("extract", "") | |
| summary = re.sub(r'\[.*?\]', '', summary) | |
| summary = re.sub(r'\s{2,}', ' ', summary) | |
| summary = re.sub(r'[^\x00-\x7F]+', '', summary).strip() | |
| return {"title": data.get("title",""), "summary": summary, "url": data.get("content_urls",{}).get("desktop",{}).get("page","")} | |
| def build_rag_context(c): | |
| return f"Plant: {c['title']}\nSummary: {c['summary']}\nSource: {c['url']}\n" | |
| def identify_plant(image_path): | |
| with open(image_path,"rb") as f: | |
| b64 = base64.b64encode(f.read()).decode("utf-8") | |
| response = client.chat.completions.create( | |
| model="meta-llama/llama-4-scout-17b-16e-instruct", | |
| messages=[{"role":"user","content":[ | |
| {"type":"image_url","image_url":{"url":f"data:image/jpeg;base64,{b64}"}}, | |
| {"type":"text","text":'''You are an expert botanist. Respond in JSON only: | |
| {"common_name":"...","scientific_name":"...","family":"...","confidence":"high/medium/low","key_features":["..."],"wikipedia_search_term":"..."}'''} | |
| ]}], | |
| temperature=0.2, max_tokens=500 | |
| ) | |
| return json.loads(re.sub(r'```json|```','',response.choices[0].message.content).strip()) | |
| def analyze_plant(image_path): | |
| plant = identify_plant(image_path) | |
| cleaned = clean_wikipedia(fetch_wikipedia(plant.get("wikipedia_search_term", plant.get("common_name","")))) | |
| return {"identification": plant, "wikipedia": build_rag_context(cleaned)} | |
| def save_to_supabase(result): | |
| id = result["identification"] | |
| supabase.table("plant_history").insert({ | |
| "common_name": id.get("common_name"), "scientific_name": id.get("scientific_name"), | |
| "family": id.get("family"), "confidence": id.get("confidence"), | |
| "key_features": id.get("key_features"), "wikipedia_summary": result["wikipedia"], | |
| "wikipedia_url": id.get("wikipedia_search_term") | |
| }).execute() | |
| def index(): | |
| return send_file("index.html") | |
| def analyze(): | |
| if "file" not in request.files: | |
| return jsonify({"error": "No file"}), 400 | |
| file = request.files["file"] | |
| file.save("temp.jpg") | |
| result = analyze_plant("temp.jpg") | |
| os.remove("temp.jpg") | |
| save_to_supabase(result) | |
| return jsonify(result) | |
| def history(): | |
| return jsonify(supabase.table("plant_history").select("*").order("created_at", desc=True).execute().data) | |
| if __name__ == "__main__": | |
| app.run(host="0.0.0.0", port=int(os.environ.get("PORT", 5000))) | |