hadokenvskikoken commited on
Commit
b6522d9
·
verified ·
1 Parent(s): 04f73d7

Update main.py

Browse files
Files changed (1) hide show
  1. main.py +9 -10
main.py CHANGED
@@ -12,7 +12,7 @@ from fastapi.middleware.cors import CORSMiddleware
12
 
13
  app = FastAPI(title="Code Evaluation & Optimization API")
14
 
15
- # Required CORS configuration
16
  app.add_middleware(
17
  CORSMiddleware,
18
  allow_origins=["*"],
@@ -21,20 +21,20 @@ app.add_middleware(
21
  allow_headers=["*"],
22
  )
23
 
24
- # --- Environment Setup ---
25
  CACHE_DIR = Path("/.cache/huggingface")
26
  CACHE_DIR.mkdir(parents=True, exist_ok=True)
27
  os.environ["TRANSFORMERS_CACHE"] = str(CACHE_DIR)
28
  os.environ["HF_HOME"] = str(CACHE_DIR)
29
 
30
- # --- Load AI Model ---
31
  MODEL_NAME = "codellama/CodeLlama-7b-hf"
32
 
33
  try:
34
  tokenizer = AutoTokenizer.from_pretrained(
35
  MODEL_NAME,
36
- cache_dir=str(CACHE_DIR)
37
- model = AutoModelForCausalLM.from_pretrained(
38
  MODEL_NAME,
39
  device_map="auto",
40
  torch_dtype=torch.float16,
@@ -42,12 +42,12 @@ try:
42
  except Exception as e:
43
  raise RuntimeError(f"Failed to load model: {str(e)}")
44
 
45
- # --- Request Model ---
46
  class CodeRequest(BaseModel):
47
  code: str
48
- language: str = "python" # Default to Python
49
 
50
- # --- Helper Functions ---
51
  def evaluate_code(user_code: str, lang: str) -> dict:
52
  """Evaluate code for correctness, performance, and security"""
53
  start_time = time.time()
@@ -131,7 +131,7 @@ def optimize_code_ai(user_code: str, lang: str) -> str:
131
  except Exception as e:
132
  raise HTTPException(status_code=500, detail=f"AI optimization failed: {str(e)}")
133
 
134
- # --- API Endpoints ---
135
  @app.post("/evaluate")
136
  async def evaluate_endpoint(request: CodeRequest):
137
  try:
@@ -159,7 +159,6 @@ def health_check():
159
  }
160
  }
161
 
162
- # Required for Hugging Face Spaces
163
  if __name__ == "__main__":
164
  import uvicorn
165
  uvicorn.run("main:app", host="0.0.0.0", port=7860)
 
12
 
13
  app = FastAPI(title="Code Evaluation & Optimization API")
14
 
15
+ # CORS Configuration
16
  app.add_middleware(
17
  CORSMiddleware,
18
  allow_origins=["*"],
 
21
  allow_headers=["*"],
22
  )
23
 
24
+ # Environment Setup
25
  CACHE_DIR = Path("/.cache/huggingface")
26
  CACHE_DIR.mkdir(parents=True, exist_ok=True)
27
  os.environ["TRANSFORMERS_CACHE"] = str(CACHE_DIR)
28
  os.environ["HF_HOME"] = str(CACHE_DIR)
29
 
30
+ # Load AI Model
31
  MODEL_NAME = "codellama/CodeLlama-7b-hf"
32
 
33
  try:
34
  tokenizer = AutoTokenizer.from_pretrained(
35
  MODEL_NAME,
36
+ cache_dir=str(CACHE_DIR))
37
+ model = AutoModelForCausalLM.from_pretrained(
38
  MODEL_NAME,
39
  device_map="auto",
40
  torch_dtype=torch.float16,
 
42
  except Exception as e:
43
  raise RuntimeError(f"Failed to load model: {str(e)}")
44
 
45
+ # Request Model
46
  class CodeRequest(BaseModel):
47
  code: str
48
+ language: str = "python"
49
 
50
+ # Helper Functions
51
  def evaluate_code(user_code: str, lang: str) -> dict:
52
  """Evaluate code for correctness, performance, and security"""
53
  start_time = time.time()
 
131
  except Exception as e:
132
  raise HTTPException(status_code=500, detail=f"AI optimization failed: {str(e)}")
133
 
134
+ # API Endpoints
135
  @app.post("/evaluate")
136
  async def evaluate_endpoint(request: CodeRequest):
137
  try:
 
159
  }
160
  }
161
 
 
162
  if __name__ == "__main__":
163
  import uvicorn
164
  uvicorn.run("main:app", host="0.0.0.0", port=7860)