humza7656 commited on
Commit
e966654
·
verified ·
1 Parent(s): 47ea625

Upload 4 files

Browse files
Files changed (4) hide show
  1. Dockerfile +27 -0
  2. main.py +20 -0
  3. requirements.txt +8 -0
  4. snake_rec.py +117 -0
Dockerfile ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Use a Python base image
2
+ FROM python:3.10-slim
3
+
4
+ # Set the working directory
5
+ WORKDIR /app
6
+
7
+ # Copy the requirements file and install dependencies
8
+ COPY requirements.txt .
9
+ RUN pip install --no-cache-dir --upgrade -r requirements.txt
10
+
11
+ # --- IMPORTANT: Configure Caching to /tmp ---
12
+ # Spaces only allow writing to /tmp, which is critical for many ML libraries.
13
+ # Although not strictly required for google-genai, it is a best practice.
14
+ ENV XDG_CACHE_HOME=/tmp/hf_cache
15
+ ENV HF_HOME=/tmp/hf_cache
16
+
17
+ # Copy your application code
18
+ # This copies main.py, snake_rec.py, and your .env (if you want to use it
19
+ # but using HF Secrets is much better, see Step 3)
20
+ COPY . .
21
+
22
+ # Expose the required port (7860 is the default for HF Spaces)
23
+ EXPOSE 7860
24
+
25
+ # The command to run your FastAPI application using uvicorn
26
+ # 'main:app' refers to the 'app' object in 'main.py'
27
+ CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "7860"]
main.py ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from fastapi import FastAPI
2
+ from snake_rec import snake_app as snake
3
+ from fastapi.middleware.cors import CORSMiddleware
4
+
5
+ app=FastAPI()
6
+ app.add_middleware(
7
+ CORSMiddleware,
8
+ allow_origins=["*"],
9
+ allow_credentials=True,
10
+ allow_methods=["*"],
11
+ allow_headers=["*"],
12
+ )
13
+
14
+ @app.get("/")
15
+
16
+ def home():
17
+ return {"message":"SISISIS"}
18
+
19
+
20
+ app.include_router(snake)
requirements.txt ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ fastapi
2
+ uvicorn[standard]
3
+ python-dotenv
4
+ google-genai
5
+ python-multipart
6
+ Pillow
7
+ requests
8
+ supabase-py
snake_rec.py ADDED
@@ -0,0 +1,117 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from fastapi import APIRouter, UploadFile, File, Form
2
+ from google.genai.types import GenerateContentConfig
3
+ from google import genai
4
+ from typing import Optional
5
+ from dotenv import load_dotenv
6
+ import os
7
+ from uuid import uuid4
8
+ import requests
9
+ import io
10
+ from PIL import Image
11
+ from supabase import create_client
12
+
13
+ # Load environment variables
14
+ load_dotenv()
15
+ url = os.getenv('url')
16
+ key = os.getenv('key')
17
+ GOOGLE_API_KEY = os.getenv('GOOGLE_API_KEY')
18
+
19
+ # Initialize clients
20
+ client = genai.Client(api_key=GOOGLE_API_KEY)
21
+ supabase = create_client(url, key)
22
+
23
+ # Memory for chat history
24
+ user = []
25
+ chatbot = []
26
+
27
+ snake_app = APIRouter()
28
+
29
+ # Prompt for image analysis
30
+ prompt_text = (
31
+ "You will get a snake image. You have to tell what kind of snake it is "
32
+ "and if it's venomous or non-venomous. If image is not a snake then just say "
33
+ "'There is no snake in image!'."
34
+ )
35
+
36
+ @snake_app.post("/snake")
37
+ async def snakeeee(f1: Optional[UploadFile] = File(None), text: Optional[str] = Form(None)):
38
+ memory = {
39
+ 'User': user,
40
+ 'Chatbot_response': chatbot
41
+ }
42
+
43
+ # Case 1: File only or file + text
44
+ if f1:
45
+ # Save file locally
46
+ file_extension = f1.filename.split(".")[-1]
47
+ file_name = f"{uuid4()}.{file_extension}"
48
+ file_data = await f1.read()
49
+ with open(file_name, "wb") as f:
50
+ f.write(file_data)
51
+
52
+ # Upload to Supabase
53
+ supabase.storage.from_("user_files").upload(
54
+ f"images/{file_name}",
55
+ file_data,
56
+ {"content-type": f"image/{file_extension}"}
57
+ )
58
+ public_url = supabase.storage.from_("user_files").get_public_url(f"images/{file_name}")
59
+ response = requests.get(public_url)
60
+
61
+ if response.status_code == 200:
62
+ image_response = io.BytesIO(response.content)
63
+ image_content = Image.open(image_response)
64
+
65
+ # Prepare contents for Gemini
66
+ contents = [image_content, prompt_text]
67
+
68
+ # Generate response
69
+ response_gemini = client.models.generate_content(
70
+ model="gemini-2.5-flash",
71
+ contents=contents
72
+ )
73
+ chatbot.append(response_gemini.text)
74
+
75
+ # If text is also provided, append to chat history and process
76
+ if text:
77
+ user.append(text)
78
+ prompt_chat = (
79
+ f"You are a Snake Chatbot. User queries about snakes. Make Sure your response should not be so long which make user difficult to understand. "
80
+ f"Chat history: {memory}"
81
+ )
82
+ response_text = client.models.generate_content(
83
+ model="gemini-2.5-flash",
84
+ config=GenerateContentConfig(
85
+ system_instruction=prompt_chat,
86
+ temperature=0.1
87
+ ),
88
+ contents=text
89
+ )
90
+ chatbot.append(response_text.text)
91
+ return {
92
+
93
+ 'Chatbot': response_text.text
94
+ }
95
+
96
+ return {'Chatbot': response_gemini.text}
97
+
98
+ # Case 2: Text only
99
+ if text:
100
+ user.append(text)
101
+ prompt_chat = (
102
+ f"You are a Snake Chatbot know . User queries about snakes. Make sure your response should not be so long which make user difficult to understand. "
103
+ f"Chat history: {memory}"
104
+ )
105
+ response_text = client.models.generate_content(
106
+ model="gemini-2.5-flash",
107
+ config=GenerateContentConfig(
108
+ system_instruction=prompt_chat,
109
+ temperature=0.1
110
+ ),
111
+ contents=text
112
+ )
113
+ chatbot.append(response_text.text)
114
+ return {'Chatbot': response_text.text}
115
+
116
+ # Case 3: Neither file nor text
117
+ return {'error': 'No file or text provided'}