#!/usr/bin/env python3 """ Upload Bengali AI using Hugging Face CLI with environment token """ import os import subprocess def upload_with_hf_cli(): """Upload using hf CLI with environment token""" print("šŸš€ Uploading Bengali AI with Hugging Face CLI") print("=" * 50) print("Repository: megharudushi/Sheikh") # Check if model directory exists if not os.path.exists("ready_bengali_ai"): print("āŒ Error: ready_bengali_ai directory not found!") return False # Show files to upload files = os.listdir("ready_bengali_ai") print(f"\nšŸ“ Files to upload ({len(files)} total):") total_size = 0 for file in sorted(files): size = os.path.getsize(f"ready_bengali_ai/{file}") / (1024*1024) total_size += size print(f" šŸ“„ {file} ({size:.1f}MB)") print(f"šŸ“Š Total size: {total_size:.1f}MB") # Check for token token = os.environ.get('HF_TOKEN') if not token: print("\nāŒ No HF_TOKEN environment variable found!") print("\nšŸ”§ To upload, you need to:") print("1. Get your token from: https://huggingface.co/settings/tokens") print("2. Set environment variable:") print(" export HF_TOKEN=your_token_here") print("3. Then run: hf upload megharudushi/Sheikh .") return False print(f"\nāœ… Using token: {token[:8]}...") try: # Set environment variable for hf CLI env = os.environ.copy() env['HF_TOKEN'] = token # Upload using hf CLI print(f"\nšŸ“¤ Running: hf upload megharudushi/Sheikh .") result = subprocess.run( ["hf", "upload", "megharudushi/Sheikh", "."], env=env, capture_output=True, text=True ) if result.returncode == 0: print("\nšŸŽ‰ SUCCESS!") print("🌐 Your model: https://huggingface.co/megharudushi/Sheikh") print("\nšŸ’” Anyone can now use your model:") print("from transformers import AutoTokenizer, AutoModelForCausalLM") print('tokenizer = AutoTokenizer.from_pretrained("megharudushi/Sheikh")') print('model = AutoModelForCausalLM.from_pretrained("megharudushi/Sheikh")') return True else: print(f"āŒ Upload failed: {result.stderr}") return False except Exception as e: print(f"āŒ Upload error: {e}") return False def create_upload_commands(): """Create upload commands file""" commands = """# Hugging Face CLI Upload Commands ## Step 1: Get Your Token 1. Go to: https://huggingface.co/settings/tokens 2. Create a new token with "Write" permissions 3. Copy the token (starts with hf_) ## Step 2: Set Token and Upload ### Option A: Set environment variable ```bash export HF_TOKEN=your_token_here hf upload megharudushi/Sheikh . ``` ### Option B: Pass token directly ```bash hf upload megharudushi/Sheikh . --token your_token_here ``` ### Option C: Login interactively ```bash hf auth login # Enter your token when prompted hf upload megharudushi/Sheikh . ``` ## Step 3: Verify Upload After upload, visit: https://huggingface.co/megharudushi/Sheikh ## Files Being Uploaded - model.bin (1.4GB) - Main model weights - tokenizer.json (3.4MB) - Tokenizer configuration - vocab.json (780KB) - Vocabulary - merges.txt (446KB) - BPE merges - config.json (13KB) - Model configuration - Plus 6 other configuration files """ with open("HF_CLI_COMMANDS.md", "w", encoding="utf-8") as f: f.write(commands) print("šŸ“„ Created: HF_CLI_COMMANDS.md") if __name__ == "__main__": print("šŸ‡§šŸ‡© BANGLI AI - HUGGING FACE CLI UPLOAD") print("=" * 45) # Create commands file create_upload_commands() # Try upload success = upload_with_hf_cli() if not success: print("\nšŸ”§ Manual upload required:") print("1. Set HF_TOKEN environment variable") print("2. Run: hf upload megharudushi/Sheikh .") print("\nšŸ“– See HF_CLI_COMMANDS.md for detailed instructions") else: print("\nšŸŽŠ Upload successful! Your Bengali AI is live!")