david167 commited on
Commit
0b607e8
·
1 Parent(s): c86959d

Fix syntax errors: correct comma placement and indentation

Browse files
Files changed (2) hide show
  1. app.py +6 -6
  2. gradio_app.py +1 -1
app.py CHANGED
@@ -60,7 +60,7 @@ async def load_model_with_retry(model_name: str, hf_token: str, max_retries: int
60
  model = AutoModelForCausalLM.from_pretrained(
61
  model_name,
62
  torch_dtype=torch.float16 if device == "cuda:0" else torch.float32,
63
- device_map={"": 0} # Force all parameters to GPU 0,
64
  trust_remote_code=True,
65
  low_cpu_mem_usage=True,
66
  token=hf_token
@@ -69,7 +69,7 @@ async def load_model_with_retry(model_name: str, hf_token: str, max_retries: int
69
  model = AutoModelForCausalLM.from_pretrained(
70
  model_name,
71
  torch_dtype=torch.float16 if device == "cuda:0" else torch.float32,
72
- device_map={"": 0} # Force all parameters to GPU 0,
73
  trust_remote_code=True,
74
  low_cpu_mem_usage=True,
75
  use_safetensors=True, # Force safetensors to avoid CVE-2025-32434
@@ -96,10 +96,10 @@ async def load_model():
96
 
97
  # Check if CUDA is available
98
  if torch.cuda.is_available():
99
- torch.cuda.set_device(0)
100
- device = "cuda:0"
101
- else:
102
- device = "cpu"
103
  logger.info(f"Using device: {device}")
104
 
105
  if device == "cuda:0":
 
60
  model = AutoModelForCausalLM.from_pretrained(
61
  model_name,
62
  torch_dtype=torch.float16 if device == "cuda:0" else torch.float32,
63
+ device_map={"": 0}, # Force all parameters to GPU 0
64
  trust_remote_code=True,
65
  low_cpu_mem_usage=True,
66
  token=hf_token
 
69
  model = AutoModelForCausalLM.from_pretrained(
70
  model_name,
71
  torch_dtype=torch.float16 if device == "cuda:0" else torch.float32,
72
+ device_map={"": 0}, # Force all parameters to GPU 0
73
  trust_remote_code=True,
74
  low_cpu_mem_usage=True,
75
  use_safetensors=True, # Force safetensors to avoid CVE-2025-32434
 
96
 
97
  # Check if CUDA is available
98
  if torch.cuda.is_available():
99
+ torch.cuda.set_device(0)
100
+ device = "cuda:0"
101
+ else:
102
+ device = "cpu"
103
  logger.info(f"Using device: {device}")
104
 
105
  if device == "cuda:0":
gradio_app.py CHANGED
@@ -56,7 +56,7 @@ class ModelManager:
56
  self.model = AutoModelForCausalLM.from_pretrained(
57
  base_model_name,
58
  torch_dtype=torch.float16 if self.device == "cuda:0" else torch.float32,
59
- device_map={"": 0} # Force all parameters to GPU 0,
60
  trust_remote_code=True,
61
  low_cpu_mem_usage=True,
62
  token=hf_token
 
56
  self.model = AutoModelForCausalLM.from_pretrained(
57
  base_model_name,
58
  torch_dtype=torch.float16 if self.device == "cuda:0" else torch.float32,
59
+ device_map={"": 0}, # Force all parameters to GPU 0
60
  trust_remote_code=True,
61
  low_cpu_mem_usage=True,
62
  token=hf_token