OffiongBassey commited on
Commit
6d5ca23
Β·
1 Parent(s): 934ccda

New Changes

Browse files
Files changed (2) hide show
  1. app.py +10 -11
  2. requirements.txt +11 -7
app.py CHANGED
@@ -4,13 +4,10 @@ from transformers import AutoTokenizer, AutoModelForSeq2SeqLM
4
  import logging
5
  import sys
6
 
7
- # Set up logging to see errors in Space logs
8
  logging.basicConfig(level=logging.INFO, stream=sys.stdout)
9
  logger = logging.getLogger(__name__)
10
 
11
  app = FastAPI()
12
-
13
- # Declare model/tokenizer globally, load them in startup
14
  tokenizer = None
15
  model = None
16
 
@@ -19,26 +16,28 @@ async def load_model():
19
  global tokenizer, model
20
  try:
21
  logger.info("Loading tokenizer and model...")
22
- # Use local model files for faster, more reliable loading
23
- tokenizer = AutoTokenizer.from_pretrained("offiongbassey/efik-mt", local_files_only=True)
24
- model = AutoModelForSeq2SeqLM.from_pretrained("offiongbassey/efik-mt", local_files_only=True)
 
25
  logger.info("βœ… Model loaded successfully!")
26
  except Exception as e:
27
  logger.error(f"❌ Failed to load model: {e}", exc_info=True)
28
- # Don't raise here, let the endpoint handle the error
 
29
 
30
  class TranslateRequest(BaseModel):
31
  text: str
32
- source: str # "eng_Latn"
33
 
34
  @app.get("/")
35
  async def home():
36
- return {"message": "Efik Translation API is running.", "status": "healthy"}
37
 
38
  @app.post("/translate")
39
  async def translate(req: TranslateRequest):
40
  if tokenizer is None or model is None:
41
- raise HTTPException(status_code=503, detail="Model is still loading or failed to load.")
42
 
43
  try:
44
  input_text = f"{req.source} {req.text}"
@@ -48,4 +47,4 @@ async def translate(req: TranslateRequest):
48
  return {"translation": translation}
49
  except Exception as e:
50
  logger.error(f"Translation error: {e}", exc_info=True)
51
- raise HTTPException(status_code=500, detail=f"Translation failed: {str(e)}")
 
4
  import logging
5
  import sys
6
 
 
7
  logging.basicConfig(level=logging.INFO, stream=sys.stdout)
8
  logger = logging.getLogger(__name__)
9
 
10
  app = FastAPI()
 
 
11
  tokenizer = None
12
  model = None
13
 
 
16
  global tokenizer, model
17
  try:
18
  logger.info("Loading tokenizer and model...")
19
+ # Load from Hub (allows download on first run).
20
+ # If you uploaded files to the Space, change repo_id to "."
21
+ tokenizer = AutoTokenizer.from_pretrained("offiongbassey/efik-mt")
22
+ model = AutoModelForSeq2SeqLM.from_pretrained("offiongbassey/efik-mt")
23
  logger.info("βœ… Model loaded successfully!")
24
  except Exception as e:
25
  logger.error(f"❌ Failed to load model: {e}", exc_info=True)
26
+ # A failing model load is critical. You may want to raise here to fail fast.
27
+ # For now, we let it be, and the /translate endpoint will check.
28
 
29
  class TranslateRequest(BaseModel):
30
  text: str
31
+ source: str
32
 
33
  @app.get("/")
34
  async def home():
35
+ return {"message": "Efik Translation API", "model_loaded": model is not None}
36
 
37
  @app.post("/translate")
38
  async def translate(req: TranslateRequest):
39
  if tokenizer is None or model is None:
40
+ raise HTTPException(status_code=503, detail="Model is still loading or failed to load. Please try again in a moment.")
41
 
42
  try:
43
  input_text = f"{req.source} {req.text}"
 
47
  return {"translation": translation}
48
  except Exception as e:
49
  logger.error(f"Translation error: {e}", exc_info=True)
50
+ raise HTTPException(status_code=500, detail="Internal translation error.")
requirements.txt CHANGED
@@ -1,7 +1,11 @@
1
- fastapi==0.104.1
2
- uvicorn[standard]==0.24.0
3
- transformers==4.36.2
4
- torch==2.1.2
5
- huggingface_hub==0.20.3
6
- sentencepiece==0.1.99 # <-- CRITICAL ADDITION
7
- accelerate==0.26.1 # <-- For efficient model loading
 
 
 
 
 
1
+ # PINNED: Critical to avoid NumPy 2.0 incompatibility
2
+ numpy<2.0,>=1.23.5
3
+
4
+ # UNPINNED: Will get latest compatible versions
5
+ torch
6
+ transformers
7
+ sentencepiece
8
+ accelerate
9
+ fastapi
10
+ uvicorn[standard]
11
+ huggingface_hub