Sheikh / hf_cli_upload.py
megharudushi's picture
Upload folder using huggingface_hub
7d3d63c verified
#!/usr/bin/env python3
"""
Upload Bengali AI using Hugging Face CLI with environment token
"""
import os
import subprocess
def upload_with_hf_cli():
"""Upload using hf CLI with environment token"""
print("🚀 Uploading Bengali AI with Hugging Face CLI")
print("=" * 50)
print("Repository: megharudushi/Sheikh")
# Check if model directory exists
if not os.path.exists("ready_bengali_ai"):
print("❌ Error: ready_bengali_ai directory not found!")
return False
# Show files to upload
files = os.listdir("ready_bengali_ai")
print(f"\n📁 Files to upload ({len(files)} total):")
total_size = 0
for file in sorted(files):
size = os.path.getsize(f"ready_bengali_ai/{file}") / (1024*1024)
total_size += size
print(f" 📄 {file} ({size:.1f}MB)")
print(f"📊 Total size: {total_size:.1f}MB")
# Check for token
token = os.environ.get('HF_TOKEN')
if not token:
print("\n❌ No HF_TOKEN environment variable found!")
print("\n🔧 To upload, you need to:")
print("1. Get your token from: https://huggingface.co/settings/tokens")
print("2. Set environment variable:")
print(" export HF_TOKEN=your_token_here")
print("3. Then run: hf upload megharudushi/Sheikh .")
return False
print(f"\n✅ Using token: {token[:8]}...")
try:
# Set environment variable for hf CLI
env = os.environ.copy()
env['HF_TOKEN'] = token
# Upload using hf CLI
print(f"\n📤 Running: hf upload megharudushi/Sheikh .")
result = subprocess.run(
["hf", "upload", "megharudushi/Sheikh", "."],
env=env,
capture_output=True,
text=True
)
if result.returncode == 0:
print("\n🎉 SUCCESS!")
print("🌐 Your model: https://huggingface.co/megharudushi/Sheikh")
print("\n💡 Anyone can now use your model:")
print("from transformers import AutoTokenizer, AutoModelForCausalLM")
print('tokenizer = AutoTokenizer.from_pretrained("megharudushi/Sheikh")')
print('model = AutoModelForCausalLM.from_pretrained("megharudushi/Sheikh")')
return True
else:
print(f"❌ Upload failed: {result.stderr}")
return False
except Exception as e:
print(f"❌ Upload error: {e}")
return False
def create_upload_commands():
"""Create upload commands file"""
commands = """# Hugging Face CLI Upload Commands
## Step 1: Get Your Token
1. Go to: https://huggingface.co/settings/tokens
2. Create a new token with "Write" permissions
3. Copy the token (starts with hf_)
## Step 2: Set Token and Upload
### Option A: Set environment variable
```bash
export HF_TOKEN=your_token_here
hf upload megharudushi/Sheikh .
```
### Option B: Pass token directly
```bash
hf upload megharudushi/Sheikh . --token your_token_here
```
### Option C: Login interactively
```bash
hf auth login
# Enter your token when prompted
hf upload megharudushi/Sheikh .
```
## Step 3: Verify Upload
After upload, visit: https://huggingface.co/megharudushi/Sheikh
## Files Being Uploaded
- model.bin (1.4GB) - Main model weights
- tokenizer.json (3.4MB) - Tokenizer configuration
- vocab.json (780KB) - Vocabulary
- merges.txt (446KB) - BPE merges
- config.json (13KB) - Model configuration
- Plus 6 other configuration files
"""
with open("HF_CLI_COMMANDS.md", "w", encoding="utf-8") as f:
f.write(commands)
print("📄 Created: HF_CLI_COMMANDS.md")
if __name__ == "__main__":
print("🇧🇩 BANGLI AI - HUGGING FACE CLI UPLOAD")
print("=" * 45)
# Create commands file
create_upload_commands()
# Try upload
success = upload_with_hf_cli()
if not success:
print("\n🔧 Manual upload required:")
print("1. Set HF_TOKEN environment variable")
print("2. Run: hf upload megharudushi/Sheikh .")
print("\n📖 See HF_CLI_COMMANDS.md for detailed instructions")
else:
print("\n🎊 Upload successful! Your Bengali AI is live!")