Sathvika Alla commited on
Commit
257ec40
·
1 Parent(s): 878016a
Files changed (2) hide show
  1. Dockerfile +29 -0
  2. app.py +39 -0
Dockerfile ADDED
@@ -0,0 +1,29 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Stage 1: Build stage
2
+ FROM python:3.10-slim AS build
3
+
4
+ WORKDIR /app
5
+
6
+ RUN apt-get update && apt-get install -y \
7
+ build-essential \
8
+ libglib2.0-0 \
9
+ libsm6 \
10
+ libxext6 \
11
+ libxrender-dev \
12
+ && rm -rf /var/lib/apt/lists/*
13
+
14
+ COPY requirements.txt .
15
+ RUN pip install --no-cache-dir --prefix=/install -r requirements.txt
16
+
17
+ COPY . .
18
+
19
+ # Stage 2: Production stage
20
+ FROM python:3.10-slim
21
+
22
+ WORKDIR /app
23
+
24
+ # Copy installed packages from build stage
25
+ COPY --from=build /install /usr/local
26
+
27
+ COPY . .
28
+
29
+ CMD ["uvicorn", "app:app", "--host", "0.0.0.0", "--port", "7860"]
app.py ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from fastapi import FastAPI
2
+ from fastapi.middleware.cors import CORSMiddleware
3
+ from huggingface_hub import hf_hub_download
4
+ import tensorflow as tf
5
+ import os
6
+ from fastapi import File, UploadFile
7
+ import numpy as np
8
+ from PIL import Image
9
+
10
+ repo_id = "Sathvika-Alla/masterclass-2025"
11
+ hf_hub_download(repo_id, filename="config.json", repo_type="model", local_dir="./model")
12
+ hf_hub_download(repo_id, filename="metadata.json", repo_type="model", local_dir="./model")
13
+ hf_hub_download(repo_id, filename="model.weights.h5", repo_type="model", local_dir="./model")
14
+
15
+ # 2) load it
16
+ model = tf.keras.models.load_model("./model")
17
+ app = FastAPI()
18
+
19
+ app.add_middleware(
20
+ CORSMiddleware,
21
+ allow_origins=["*"],
22
+ allow_credentials=True,
23
+ allow_methods=["*"],
24
+ allow_headers=["*"],
25
+ )
26
+
27
+ ANIMALS = ['Cat', 'Dog', 'Panda'] # Animal names here, these represent the labels of the images that we trained our model on.
28
+
29
+
30
+ @app.post('/upload/image')
31
+ async def uploadImage(img: UploadFile = File(...)):
32
+ original_image = Image.open(img.file) # Read the bytes and process as an image
33
+ resized_image = original_image.resize((64, 64)) # Resize
34
+ images_to_predict = np.expand_dims(np.array(resized_image), axis=0) # Our AI Model wanted a list of images, but we only have one, so we expand it's dimension
35
+ predictions = model.predict(images_to_predict) # The result will be a list with predictions in the one-hot encoded format: [ [0 1 0] ]
36
+ prediction_probabilities = predictions
37
+ classifications = prediction_probabilities.argmax(axis=1) # We try to fetch the index of the highest value in this list [ [1] ]
38
+
39
+ return ANIMALS[classifications.tolist()[0]] # Fetch the first item in our classifications array, format it as a list first, result will be e.g.: "Dog"