File size: 1,229 Bytes
970ac6b
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
import sys
import os
import torch

# Add the space directory to path
sys.path.append(os.path.dirname(os.path.abspath(__file__)))

try:
    print("Importing app...")
    import app
    
    print("\nTesting model loading...")
    if app.TASK is None:
        print("FAIL: Model failed to load")
        sys.exit(1)
    else:
        print("SUCCESS: Model loaded")
        
    print(f"Model type: {type(app.TASK)}")
    print(f"Model device: {app.TASK.device}")
    
    print("\nTesting generation (small batch)...")
    # Generate 2 molecules, fixed size 5, 10 steps (fast)
    html, zip_path, df = app.generate(
        num_molecules=2, 
        size_mode="Fixed size", 
        fixed_size=5, 
        diffusion_steps=10, 
        seed=123
    )
    
    print("\nGeneration output:")
    print(f"HTML len: {len(html) if html else 'None'}")
    print(f"Zip path: {zip_path}")
    print(f"DataFrame:\n{df}")
    
    if html and zip_path and df is not None:
        print("\nSUCCESS: Inference pipeline works!")
    else:
        print("\nFAIL: Generation produced incomplete outputs")
        sys.exit(1)

except Exception as e:
    print(f"\nCRITICAL FAIL: {e}")
    import traceback
    traceback.print_exc()
    sys.exit(1)