ezeanubis commited on
Commit
3f43c8e
verified
1 Parent(s): 097a1ae

Create tools/infer.py

Browse files
Files changed (1) hide show
  1. tools/infer.py +45 -0
tools/infer.py ADDED
@@ -0,0 +1,45 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ %%writefile tools/infer.py
2
+ # aqu铆 pegas el c贸digo que te pas茅 antes 馃憞
3
+
4
+ import os
5
+ import torch
6
+ from torchvision import transforms
7
+ from PIL import Image
8
+ from huggingface_hub import hf_hub_download
9
+ from transformers import AutoTokenizer, AutoModelForCausalLM
10
+
11
+ MODEL_REPO = "tencent/HunyuanWorld-Voyager"
12
+
13
+ def load_model():
14
+ print(f"Descargando y cargando modelo desde {MODEL_REPO} ...")
15
+ tokenizer = AutoTokenizer.from_pretrained(MODEL_REPO, trust_remote_code=True)
16
+ model = AutoModelForCausalLM.from_pretrained(
17
+ MODEL_REPO,
18
+ torch_dtype=torch.float16 if torch.cuda.is_available() else torch.float32,
19
+ device_map="auto",
20
+ trust_remote_code=True
21
+ )
22
+ return model, tokenizer
23
+
24
+ MODEL, TOKENIZER = load_model()
25
+
26
+ @torch.inference_mode()
27
+ def main(config=None, ckpt=None, prompt="A beautiful sci-fi landscape", steps=20, seed=42, out_dir="outputs/"):
28
+ os.makedirs(out_dir, exist_ok=True)
29
+ torch.manual_seed(seed)
30
+
31
+ inputs = TOKENIZER(prompt, return_tensors="pt").to(MODEL.device)
32
+ print(f"Prompt recibido: {prompt}")
33
+
34
+ try:
35
+ output = MODEL.generate(**inputs, max_new_tokens=steps)
36
+ text_result = TOKENIZER.decode(output[0], skip_special_tokens=True)
37
+ print("Resultado del modelo:", text_result)
38
+
39
+ img_path = os.path.join(out_dir, "result_placeholder.png")
40
+ Image.new("RGB", (720, 320), color=(20, 20, 20)).save(img_path)
41
+ return img_path
42
+
43
+ except Exception as e:
44
+ print("Error durante inferencia:", e)
45
+ return None