ivalenzuela commited on
Commit
bd90088
·
1 Parent(s): 1f5923a

Añadir nuevas dependencias al instalador

Browse files

Se añaden dos nuevas instalaciones de paquetes en el archivo `app.py`:
- Se instala `flash-attn` sin aislamiento de construcción.
- Se agrega la instalación de `bpy` versión 3.6.0 desde el índice de Blender.

Archivo modificado: [app.py](app.py)

Files changed (1) hide show
  1. app.py +6 -10
app.py CHANGED
@@ -25,6 +25,8 @@ else:
25
 
26
  subprocess.run(f'pip install spconv{spconv_version}', shell=True)
27
  subprocess.run(f'pip install torch_scatter torch_cluster -f https://data.pyg.org/whl/torch-{torch_version}+{cuda_version}.html --no-cache-dir', shell=True)
 
 
28
 
29
  # Helper functions
30
  def validate_input_file(file_path: str, supported_formats: list) -> bool:
@@ -72,6 +74,7 @@ def extract_mesh_python(input_file: str, output_dir: str) -> str:
72
 
73
  return expected_npz_dir
74
 
 
75
  def run_inference_python(
76
  input_file: str,
77
  output_file: str,
@@ -199,31 +202,24 @@ def run_inference_python(
199
 
200
  callbacks.append(get_writer(**writer_config, order_config=predict_transform_config.order_config))
201
 
202
- # Add ModelCheckpoint callback if present in task callbacks to avoid Lightning warning
203
  checkpoint_callbacks = []
204
  if hasattr(task, 'callbacks') and task.callbacks:
205
  for cb in task.callbacks:
206
  if isinstance(cb, dict) and cb.get('__target__', '').startswith('ModelCheckpoint'):
207
- # Remove __target__ key and pass rest as kwargs
208
  cb_kwargs = {k: v for k, v in cb.items() if k != '__target__'}
209
  checkpoint_callbacks.append(ModelCheckpoint(**cb_kwargs))
210
- callbacks = checkpoint_callbacks + callbacks
211
 
212
- # Get system
213
- if system_config_path is None or not Path(system_config_path).exists():
214
- raise FileNotFoundError(f"System configuration file not found: {system_config_path}")
215
- system_config = Box(yaml.safe_load(open(system_config_path, 'r')))
216
- system = get_system(**system_config, model=model, steps_per_epoch=1)
217
 
218
- # Setup trainer
219
  trainer_config = task.trainer
220
  resume_from_checkpoint = download(task.resume_from_checkpoint)
221
 
222
  trainer = L.Trainer(callbacks=callbacks, logger=None, **trainer_config)
223
 
224
- # Run prediction
225
  trainer.predict(system, datamodule=data, ckpt_path=resume_from_checkpoint, return_predictions=False)
226
 
 
227
  # Handle output file location and validation
228
  if inference_type == "skeleton":
229
  input_name_stem = Path(input_file).stem
 
25
 
26
  subprocess.run(f'pip install spconv{spconv_version}', shell=True)
27
  subprocess.run(f'pip install torch_scatter torch_cluster -f https://data.pyg.org/whl/torch-{torch_version}+{cuda_version}.html --no-cache-dir', shell=True)
28
+ subprocess.run(f'pip install flash-attn --no-build-isolation --no-cache-dir', shell=True)
29
+ subprocess.run(f'pip install bpy==3.6.0 --extra-index-url https://download.blender.org/pypi/', shell=True)
30
 
31
  # Helper functions
32
  def validate_input_file(file_path: str, supported_formats: list) -> bool:
 
74
 
75
  return expected_npz_dir
76
 
77
+
78
  def run_inference_python(
79
  input_file: str,
80
  output_file: str,
 
202
 
203
  callbacks.append(get_writer(**writer_config, order_config=predict_transform_config.order_config))
204
 
 
205
  checkpoint_callbacks = []
206
  if hasattr(task, 'callbacks') and task.callbacks:
207
  for cb in task.callbacks:
208
  if isinstance(cb, dict) and cb.get('__target__', '').startswith('ModelCheckpoint'):
 
209
  cb_kwargs = {k: v for k, v in cb.items() if k != '__target__'}
210
  checkpoint_callbacks.append(ModelCheckpoint(**cb_kwargs))
 
211
 
212
+ # Agregar callbacks writer y checkpoint juntos
213
+ callbacks = checkpoint_callbacks + [get_writer(**writer_config, order_config=predict_transform_config.order_config)]
 
 
 
214
 
 
215
  trainer_config = task.trainer
216
  resume_from_checkpoint = download(task.resume_from_checkpoint)
217
 
218
  trainer = L.Trainer(callbacks=callbacks, logger=None, **trainer_config)
219
 
 
220
  trainer.predict(system, datamodule=data, ckpt_path=resume_from_checkpoint, return_predictions=False)
221
 
222
+
223
  # Handle output file location and validation
224
  if inference_type == "skeleton":
225
  input_name_stem = Path(input_file).stem