adityabalaji commited on
Commit
a2c1655
·
verified ·
1 Parent(s): 12e8c98

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +19 -7
app.py CHANGED
@@ -1,10 +1,6 @@
1
- # app.py — EduPrompt API (per-task lazy load + cache-safe on Spaces)
2
 
3
- from fastapi import FastAPI
4
- from pydantic import BaseModel
5
- from fastapi.middleware.cors import CORSMiddleware
6
- from transformers import pipeline
7
- import os, time
8
 
9
  # ---------- Force ALL caches to /tmp ----------
10
  BASE = "/tmp"
@@ -22,6 +18,12 @@ for d in [
22
  ]:
23
  os.makedirs(d, exist_ok=True)
24
 
 
 
 
 
 
 
25
  app = FastAPI(title="EduPrompt API")
26
  app.add_middleware(
27
  CORSMiddleware,
@@ -39,7 +41,12 @@ def health():
39
  writable = True
40
  except Exception:
41
  writable = False
42
- return {"ok": True, "service": "eduprompt-api", "tmpWritable": writable}
 
 
 
 
 
43
 
44
  # ---------- lazy singletons ----------
45
  _summarizer = None
@@ -51,6 +58,7 @@ def safe_pipeline(task: str, model_id: str):
51
  """Cache every model in its own /tmp subdir. CPU-only."""
52
  model_cache = os.path.join(os.environ["TRANSFORMERS_CACHE"], model_id.replace("/", "_"))
53
  os.makedirs(model_cache, exist_ok=True)
 
54
  return pipeline(task, model=model_id, cache_dir=model_cache, trust_remote_code=True, device=-1)
55
 
56
  def get_model(task: str):
@@ -93,6 +101,8 @@ async def run_task(data: InputData):
93
  try:
94
  model, model_used = get_model(task)
95
  except Exception as e:
 
 
96
  return {"error": f"model_load_failed: {type(e).__name__}: {str(e)}"}
97
 
98
  try:
@@ -109,6 +119,8 @@ async def run_task(data: InputData):
109
  prompt = f"Explain what this code does in simple language:\n{text}"
110
  output = model(prompt, max_new_tokens=200, truncation=True)[0]["generated_text"]
111
  except Exception as e:
 
 
112
  return {"error": f"inference_failed: {type(e).__name__}: {str(e)}"}
113
 
114
  return {
 
1
+ # app.py — EduPrompt API (per-task lazy load + cache-safe on Spaces, debug prints)
2
 
3
+ import os
 
 
 
 
4
 
5
  # ---------- Force ALL caches to /tmp ----------
6
  BASE = "/tmp"
 
18
  ]:
19
  os.makedirs(d, exist_ok=True)
20
 
21
+ import time
22
+ from fastapi import FastAPI
23
+ from pydantic import BaseModel
24
+ from fastapi.middleware.cors import CORSMiddleware
25
+ from transformers import pipeline
26
+
27
  app = FastAPI(title="EduPrompt API")
28
  app.add_middleware(
29
  CORSMiddleware,
 
41
  writable = True
42
  except Exception:
43
  writable = False
44
+ return {
45
+ "ok": True,
46
+ "service": "eduprompt-api",
47
+ "tmpWritable": writable,
48
+ "TRANSFORMERS_CACHE": os.environ["TRANSFORMERS_CACHE"]
49
+ }
50
 
51
  # ---------- lazy singletons ----------
52
  _summarizer = None
 
58
  """Cache every model in its own /tmp subdir. CPU-only."""
59
  model_cache = os.path.join(os.environ["TRANSFORMERS_CACHE"], model_id.replace("/", "_"))
60
  os.makedirs(model_cache, exist_ok=True)
61
+ print(f"Loading model '{model_id}' for task '{task}' into cache dir: {model_cache}")
62
  return pipeline(task, model=model_id, cache_dir=model_cache, trust_remote_code=True, device=-1)
63
 
64
  def get_model(task: str):
 
101
  try:
102
  model, model_used = get_model(task)
103
  except Exception as e:
104
+ import traceback
105
+ print(traceback.format_exc())
106
  return {"error": f"model_load_failed: {type(e).__name__}: {str(e)}"}
107
 
108
  try:
 
119
  prompt = f"Explain what this code does in simple language:\n{text}"
120
  output = model(prompt, max_new_tokens=200, truncation=True)[0]["generated_text"]
121
  except Exception as e:
122
+ import traceback
123
+ print(traceback.format_exc())
124
  return {"error": f"inference_failed: {type(e).__name__}: {str(e)}"}
125
 
126
  return {