Spaces:
Runtime error
Runtime error
Amish Kushwaha commited on
Commit ·
156783f
1
Parent(s): 4848e5d
Redefine cache directory based on latest standard
Browse files- Dockerfile +4 -4
- app.py +5 -6
Dockerfile
CHANGED
|
@@ -1,8 +1,8 @@
|
|
| 1 |
# Use Python as the base image
|
| 2 |
FROM python:3.9
|
| 3 |
|
| 4 |
-
# Set environment variable for cache
|
| 5 |
-
ENV
|
| 6 |
|
| 7 |
# Set the working directory
|
| 8 |
WORKDIR /app
|
|
@@ -14,8 +14,8 @@ RUN pip install --no-cache-dir -r requirements.txt
|
|
| 14 |
# Copy the FastAPI app
|
| 15 |
COPY . .
|
| 16 |
|
| 17 |
-
# Ensure the cache directory is writable
|
| 18 |
-
RUN mkdir -p /
|
| 19 |
|
| 20 |
# Run FastAPI server
|
| 21 |
CMD ["uvicorn", "app:app", "--host", "0.0.0.0", "--port", "7860"]
|
|
|
|
| 1 |
# Use Python as the base image
|
| 2 |
FROM python:3.9
|
| 3 |
|
| 4 |
+
# Set environment variable for cache to a writable location
|
| 5 |
+
ENV HF_HOME=/tmp/hf_home
|
| 6 |
|
| 7 |
# Set the working directory
|
| 8 |
WORKDIR /app
|
|
|
|
| 14 |
# Copy the FastAPI app
|
| 15 |
COPY . .
|
| 16 |
|
| 17 |
+
# Ensure the cache directory is created and writable
|
| 18 |
+
RUN mkdir -p /tmp/hf_home
|
| 19 |
|
| 20 |
# Run FastAPI server
|
| 21 |
CMD ["uvicorn", "app:app", "--host", "0.0.0.0", "--port", "7860"]
|
app.py
CHANGED
|
@@ -3,11 +3,10 @@ from fastapi import FastAPI
|
|
| 3 |
from pydantic import BaseModel
|
| 4 |
from transformers import pipeline
|
| 5 |
|
| 6 |
-
|
| 7 |
-
|
| 8 |
-
|
| 9 |
-
os.
|
| 10 |
-
os.makedirs(CACHE_DIR, exist_ok=True) # Create the directory if it doesn't exist
|
| 11 |
|
| 12 |
# Load your Hugging Face model
|
| 13 |
model = pipeline("text-generation", model="devops-bda/Abap")
|
|
@@ -23,7 +22,7 @@ class InputData(BaseModel):
|
|
| 23 |
@app.get("/health")
|
| 24 |
async def health_check():
|
| 25 |
return {"status": "ok", "message": "Model is ready"}
|
| 26 |
-
|
| 27 |
# Define prediction endpoint
|
| 28 |
@app.post("/predict")
|
| 29 |
async def predict(data: InputData):
|
|
|
|
| 3 |
from pydantic import BaseModel
|
| 4 |
from transformers import pipeline
|
| 5 |
|
| 6 |
+
# Set the cache directory to a writable location using HF_HOME
|
| 7 |
+
cache_dir = "/tmp/hf_home"
|
| 8 |
+
os.environ["HF_HOME"] = cache_dir
|
| 9 |
+
os.makedirs(cache_dir, exist_ok=True)
|
|
|
|
| 10 |
|
| 11 |
# Load your Hugging Face model
|
| 12 |
model = pipeline("text-generation", model="devops-bda/Abap")
|
|
|
|
| 22 |
@app.get("/health")
|
| 23 |
async def health_check():
|
| 24 |
return {"status": "ok", "message": "Model is ready"}
|
| 25 |
+
|
| 26 |
# Define prediction endpoint
|
| 27 |
@app.post("/predict")
|
| 28 |
async def predict(data: InputData):
|