Eueuiaa commited on
Commit
fd9ba1f
·
verified ·
1 Parent(s): fec3865

Update api/ltx_server_refactored.py

Browse files
Files changed (1) hide show
  1. api/ltx_server_refactored.py +12 -4
api/ltx_server_refactored.py CHANGED
@@ -240,14 +240,22 @@ class VideoService:
240
  return conditioning_items
241
 
242
  def _prepare_condition_items_latent(self, items_list: List):
243
- if not items_list: return []
 
244
  conditioning_items = []
245
  for tensor_patch, frame, weight in items_list:
246
- tensor = torch.load(tensor_patch).to(self.device)
247
- safe_frame = max(0, min(int(frame), num_frames - 1))
 
 
 
 
 
 
 
 
248
  conditioning_items.append(ConditioningItem(tensor, safe_frame, float(weight)))
249
  return conditioning_items
250
-
251
 
252
  def generate_low(self, prompt, negative_prompt, height, width, duration, guidance_scale, seed, conditioning_items=None):
253
  used_seed = random.randint(0, 2**32 - 1) if seed is None else int(seed)
 
240
  return conditioning_items
241
 
242
  def _prepare_condition_items_latent(self, items_list: List):
243
+ if not items_list:
244
+ return []
245
  conditioning_items = []
246
  for tensor_patch, frame, weight in items_list:
247
+ # Verifica se já é um tensor
248
+ if isinstance(tensor_patch, torch.Tensor):
249
+ tensor = tensor_patch.to(self.device)
250
+ # Se é bytes, carrega do buffer
251
+ elif isinstance(tensor_patch, (bytes, bytearray)):
252
+ tensor = torch.load(io.BytesIO(tensor_patch)).to(self.device)
253
+ # Caso contrário, assume que é um caminho de arquivo
254
+ else:
255
+ tensor = torch.load(tensor_patch).to(self.device)
256
+ safe_frame = max(0, int(frame))
257
  conditioning_items.append(ConditioningItem(tensor, safe_frame, float(weight)))
258
  return conditioning_items
 
259
 
260
  def generate_low(self, prompt, negative_prompt, height, width, duration, guidance_scale, seed, conditioning_items=None):
261
  used_seed = random.randint(0, 2**32 - 1) if seed is None else int(seed)