File size: 6,252 Bytes
80b58c8
165a196
 
80b58c8
 
 
 
 
 
165a196
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
80b58c8
 
 
 
 
165a196
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
80b58c8
 
 
 
165a196
 
 
 
 
 
 
80b58c8
165a196
 
 
 
 
 
 
80b58c8
165a196
 
 
 
 
 
80b58c8
165a196
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
80b58c8
 
165a196
80b58c8
165a196
 
 
 
 
 
 
 
 
 
 
 
 
 
80b58c8
165a196
 
 
 
80b58c8
 
165a196
 
 
80b58c8
 
165a196
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
80b58c8
 
 
165a196
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
80b58c8
165a196
 
 
 
80b58c8
 
 
165a196
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
"""

Quick Start Script for Hugging Face Integration

Helps you upload/download models easily

"""

import sys
from pathlib import Path


def print_banner(text):
    """Print formatted banner"""
    print("\n" + "="*60)
    print(text.center(60))
    print("="*60 + "\n")


def check_model_exists():
    """Check if trained model exists"""
    model_path = Path("./models/bytedream")
    
    if not model_path.exists():
        print("❌ Model directory not found!")
        print("\nPlease train the model first:")
        print("  python train.py")
        print("\nOr download from Hugging Face:")
        print("  python infer.py --hf_repo username/repo --prompt 'test'")
        return False
    
    # Check for weights
    unet_weights = model_path / "unet" / "pytorch_model.bin"
    vae_weights = model_path / "vae" / "pytorch_model.bin"
    
    if not (unet_weights.exists() or (model_path / "pytorch_model.bin").exists()):
        print("⚠ Model directory exists but no weights found!")
        print("Please train the model first.")
        return False
    
    return True


def upload_to_hf():
    """Upload model to Hugging Face"""
    print_banner("UPLOAD TO HUGGING FACE HUB")
    
    # Check model exists
    if not check_model_exists():
        return
    
    # Get token
    token = input("Enter your Hugging Face token (hf_...): ").strip()
    if not token:
        print("❌ Token is required!")
        return
    
    # Get repo ID
    repo_id = input("Enter repository ID (e.g., username/ByteDream): ").strip()
    if not repo_id:
        print("❌ Repository ID is required!")
        return
    
    print(f"\n📤 Uploading to {repo_id}...")
    
    try:
        from bytedream.generator import ByteDreamGenerator
        
        # Load model
        print("\nLoading model...")
        generator = ByteDreamGenerator(
            model_path="./models/bytedream",
            config_path="config.yaml",
            device="cpu",
        )
        
        # Upload
        generator.push_to_hub(
            repo_id=repo_id,
            token=token,
            private=False,
            commit_message="Upload Byte Dream model",
        )
        
        print("\n✅ SUCCESS!")
        print(f"\n📦 Your model is available at:")
        print(f"https://huggingface.co/{repo_id}")
        print(f"\nTo use this model:")
        print(f"  python infer.py --prompt 'your prompt' --hf_repo '{repo_id}'")
        print("="*60)
        
    except Exception as e:
        print(f"\n❌ Error: {e}")
        import traceback
        traceback.print_exc()


def download_from_hf():
    """Download model from Hugging Face"""
    print_banner("DOWNLOAD FROM HUGGING FACE HUB")
    
    # Get repo ID
    repo_id = input("Enter repository ID (e.g., username/ByteDream): ").strip()
    if not repo_id:
        print("❌ Repository ID is required!")
        return
    
    print(f"\n📥 Downloading from {repo_id}...")
    
    try:
        from bytedream.generator import ByteDreamGenerator
        
        # Load from HF
        generator = ByteDreamGenerator(
            hf_repo_id=repo_id,
            config_path="config.yaml",
            device="cpu",
        )
        
        print("\n✅ Model loaded successfully!")
        
        # Test generation
        test = input("\nGenerate test image? (y/n): ").strip().lower()
        if test == 'y':
            print("\nGenerating test image...")
            image = generator.generate(
                prompt="test pattern, simple colors",
                width=256,
                height=256,
                num_inference_steps=10,
            )
            
            output = "test_output.png"
            image.save(output)
            print(f"✓ Test image saved to: {output}")
        
        print("\nTo generate images:")
        print(f"  python infer.py --prompt 'your prompt' --hf_repo '{repo_id}'")
        print(f"  HF_REPO_ID={repo_id} python app.py")
        print("="*60)
        
    except Exception as e:
        print(f"\n❌ Error: {e}")
        import traceback
        traceback.print_exc()


def test_local_model():
    """Test local model"""
    print_banner("TEST LOCAL MODEL")
    
    if not check_model_exists():
        return
    
    print("Loading local model...")
    
    try:
        from bytedream.generator import ByteDreamGenerator
        
        generator = ByteDreamGenerator(
            model_path="./models/bytedream",
            config_path="config.yaml",
            device="cpu",
        )
        
        print("\n✅ Model loaded successfully!")
        
        # Generate test image
        print("\nGenerating test image...")
        image = generator.generate(
            prompt="test pattern, simple colors",
            width=256,
            height=256,
            num_inference_steps=10,
        )
        
        output = "test_output.png"
        image.save(output)
        print(f"✓ Test image saved to: {output}")
        
        print("\nModel ready for upload!")
        print("To upload: python quick_start.py upload")
        print("="*60)
        
    except Exception as e:
        print(f"\n❌ Error: {e}")
        import traceback
        traceback.print_exc()


def main():
    """Main function"""
    print_banner("BYTE DREAM - QUICK START")
    
    print("What would you like to do?")
    print("1. Upload model to Hugging Face")
    print("2. Download model from Hugging Face")
    print("3. Test local model")
    print("4. Exit")
    print()
    
    choice = input("Enter choice (1-4): ").strip()
    
    if choice == "1":
        upload_to_hf()
    elif choice == "2":
        download_from_hf()
    elif choice == "3":
        test_local_model()
    elif choice == "4":
        print("\nGoodbye!")
        return
    else:
        print("❌ Invalid choice!")
        return
    
    print("\nDone!")


if __name__ == "__main__":
    try:
        main()
    except KeyboardInterrupt:
        print("\n\nInterrupted!")
        sys.exit(0)