jflo commited on
Commit
358a66a
Β·
verified Β·
1 Parent(s): 5d8e251

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +9 -5
app.py CHANGED
@@ -1,5 +1,5 @@
1
  import os
2
- import logging
3
  from contextlib import asynccontextmanager
4
 
5
  from fastapi import FastAPI, HTTPException
@@ -12,8 +12,8 @@ import torch.nn as nn
12
  from transformers import DistilBertModel, DistilBertTokenizer
13
 
14
  # ── Logging setup ─────────────────────────────────────────────────────────────
15
- logging.basicConfig(level=logging.INFO)
16
- logger = logging.getLogger(__name__)
17
 
18
  # ── Label Maps ────────────────────────────────────────────────────────────────
19
  workout_label_map = {
@@ -72,7 +72,9 @@ state = AppState()
72
  @asynccontextmanager
73
  async def lifespan(app: FastAPI):
74
  # ── Startup ───────────────────────────────────────────────────────────────
75
- logger.info("Loading model, tokenizer and Supabase client...")
 
 
76
 
77
  state.device = torch.device('cpu')
78
 
@@ -81,7 +83,9 @@ async def lifespan(app: FastAPI):
81
  'distilbert-base-uncased',
82
  token=os.getenv('HF_TOKEN')
83
  )
84
- logger.info("Tokenizer loaded")
 
 
85
 
86
  # Load model once
87
  state.model = MultiHeadDistilBERT(
 
1
  import os
2
+ #import logging
3
  from contextlib import asynccontextmanager
4
 
5
  from fastapi import FastAPI, HTTPException
 
12
  from transformers import DistilBertModel, DistilBertTokenizer
13
 
14
  # ── Logging setup ─────────────────────────────────────────────────────────────
15
+ #logging.basicConfig(level=logging.INFO)
16
+ #logger = logging.getLogger(__name__)
17
 
18
  # ── Label Maps ────────────────────────────────────────────────────────────────
19
  workout_label_map = {
 
72
  @asynccontextmanager
73
  async def lifespan(app: FastAPI):
74
  # ── Startup ───────────────────────────────────────────────────────────────
75
+ print("Loading model, tokenizer and Supabase client...")
76
+
77
+ #logger.info("Loading model, tokenizer and Supabase client...")
78
 
79
  state.device = torch.device('cpu')
80
 
 
83
  'distilbert-base-uncased',
84
  token=os.getenv('HF_TOKEN')
85
  )
86
+ print("Tokenizer loaded")
87
+
88
+ #logger.info("Tokenizer loaded")
89
 
90
  # Load model once
91
  state.model = MultiHeadDistilBERT(