eddywu commited on
Commit
2878afa
·
verified ·
1 Parent(s): 43f8af8

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +15 -0
app.py CHANGED
@@ -176,6 +176,18 @@ def caption_video(video_path: str) -> str:
176
 
177
  T = Timer()
178
  model, processor = _load_model_and_processor()
 
 
 
 
 
 
 
 
 
 
 
 
179
  # 1) 可能轉碼 / 降維 / 限 FPS
180
  t = time.perf_counter()
181
  safe_path, used_temp, tr_info = maybe_transcode(video_path)
@@ -220,6 +232,9 @@ def caption_video(video_path: str) -> str:
220
  temperature=0.0,
221
  top_p=1.0
222
  )
 
 
 
223
  t = time.perf_counter()
224
  with torch.inference_mode():
225
  generated_ids = model.generate(**inputs, **gen_kwargs)
 
176
 
177
  T = Timer()
178
  model, processor = _load_model_and_processor()
179
+
180
+ print("[ENV] MAX_NEW_TOKENS =", MAX_NEW_TOKENS, flush=True)
181
+ print("[CUDA] available =", torch.cuda.is_available(), flush=True)
182
+ if torch.cuda.is_available():
183
+ print("[CUDA] device =", torch.cuda.get_device_name(0), flush=True)
184
+ try:
185
+ print("[MODEL] device_map =", getattr(model, "hf_device_map", None), flush=True)
186
+ print("[MODEL] first_param_device =", next(model.parameters()).device, flush=True)
187
+ except Exception as e:
188
+ print("[MODEL] device inspect error:", e, flush=True)
189
+
190
+
191
  # 1) 可能轉碼 / 降維 / 限 FPS
192
  t = time.perf_counter()
193
  safe_path, used_temp, tr_info = maybe_transcode(video_path)
 
232
  temperature=0.0,
233
  top_p=1.0
234
  )
235
+
236
+ print("[ENV] MAX_NEW_TOKENS =", MAX_NEW_TOKENS, flush=True)
237
+
238
  t = time.perf_counter()
239
  with torch.inference_mode():
240
  generated_ids = model.generate(**inputs, **gen_kwargs)