DSDUDEd commited on
Commit
b830d42
·
verified ·
1 Parent(s): 8437085

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +12 -10
app.py CHANGED
@@ -1,9 +1,8 @@
1
  import torch
2
- from fastapi import FastAPI, Request, Form
3
- from fastapi.responses import HTMLResponse, JSONResponse
4
- from fastapi.staticfiles import StaticFiles
5
- from fastapi.templating import Jinja2Templates
6
  from transformers import AutoTokenizer, AutoModelForCausalLM
 
7
 
8
  # ---------------------------
9
  # Models
@@ -11,7 +10,7 @@ from transformers import AutoTokenizer, AutoModelForCausalLM
11
  MODEL_OPTIONS = {
12
  "DeepSeek Coder 1.3B": "deepseek-ai/deepseek-coder-1.3b-instruct",
13
  "StarCoder 1B": "bigcode/starcoderbase-1b",
14
- "CodeLLaMA 7B": "codellama/CodeLlama-7b-Instruct-hf"
15
  }
16
  loaded_models = {}
17
 
@@ -31,19 +30,22 @@ def get_model(model_key):
31
  # FastAPI setup
32
  # ---------------------------
33
  app = FastAPI()
34
- app.mount("/static", StaticFiles(directory="static"), name="static")
35
- templates = Jinja2Templates(directory=".")
36
 
37
  # ---------------------------
38
  # Routes
39
  # ---------------------------
40
  @app.get("/", response_class=HTMLResponse)
41
- async def home(request: Request):
42
- return templates.TemplateResponse("index.html", {"request": request, "models": list(MODEL_OPTIONS.keys())})
 
 
 
 
 
 
43
 
44
  @app.post("/chat")
45
  async def chat(user_input: str = Form(...), model_choice: str = Form(...), history: str = Form("[]")):
46
- import json
47
  history = json.loads(history)
48
 
49
  tokenizer, model = get_model(model_choice)
 
1
  import torch
2
+ from fastapi import FastAPI, Form
3
+ from fastapi.responses import HTMLResponse, JSONResponse, FileResponse
 
 
4
  from transformers import AutoTokenizer, AutoModelForCausalLM
5
+ import json
6
 
7
  # ---------------------------
8
  # Models
 
10
  MODEL_OPTIONS = {
11
  "DeepSeek Coder 1.3B": "deepseek-ai/deepseek-coder-1.3b-instruct",
12
  "StarCoder 1B": "bigcode/starcoderbase-1b",
13
+ "CodeLLaMA 7B": "codellama/CodeLLaMA-7b-Instruct-hf"
14
  }
15
  loaded_models = {}
16
 
 
30
  # FastAPI setup
31
  # ---------------------------
32
  app = FastAPI()
 
 
33
 
34
  # ---------------------------
35
  # Routes
36
  # ---------------------------
37
  @app.get("/", response_class=HTMLResponse)
38
+ async def home():
39
+ # Just serve index.html directly from root
40
+ return FileResponse("index.html")
41
+
42
+ @app.get("/style.css")
43
+ async def css():
44
+ # Serve CSS directly
45
+ return FileResponse("style.css")
46
 
47
  @app.post("/chat")
48
  async def chat(user_input: str = Form(...), model_choice: str = Form(...), history: str = Form("[]")):
 
49
  history = json.loads(history)
50
 
51
  tokenizer, model = get_model(model_choice)