Spaces:
Sleeping
Sleeping
| import json | |
| import os | |
| import time | |
| import datetime | |
| import threading | |
| from huggingface_hub import hf_hub_download, list_repo_files, HfApi | |
| from fastapi import FastAPI | |
| from fastapi.responses import HTMLResponse | |
| from uvicorn import run as uvicorn_run | |
| import logging | |
| # --- Configuration --- | |
| ALL_REPO_ID = "samfred2/ALL" | |
| ATO_REPO_ID = "samfred2/ATO" | |
| OUTPUT_REPO_ID = "samfred2/ALL2" | |
| OUTPUT_DIR = "processed_files" | |
| HF_TOKEN = os.getenv("HF_TOKEN", "") | |
| MAX_UPLOADS_PER_HOUR = 128 | |
| RATE_LIMIT_DELAY = 3600 # 1 hour in seconds | |
| os.makedirs(OUTPUT_DIR, exist_ok=True) | |
| api = HfApi(token=HF_TOKEN) | |
| # Logging setup | |
| logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s') | |
| logger = logging.getLogger(__name__) | |
| # Global state | |
| processing_state = { | |
| 'running': False, | |
| 'status': 'idle', | |
| 'processed': 0, | |
| 'uploaded': 0, | |
| 'total': 0, | |
| 'current_file': '', | |
| 'error': None, | |
| 'start_time': None, | |
| 'eta': None | |
| } | |
| # FastAPI app | |
| app = FastAPI(title="Dataset Merger & Uploader", version="1.0.0") | |
| # HTML Dashboard embedded | |
| HTML_DASHBOARD = """<!DOCTYPE html> | |
| <html><head><meta charset="UTF-8"><meta name="viewport" content="width=device-width, initial-scale=1.0"><title>Dataset Merger & Uploader</title><style>*{margin:0;padding:0;box-sizing:border-box}body{font-family:'Segoe UI',Tahoma,Geneva,Verdana,sans-serif;background:linear-gradient(135deg,#667eea 0%,#764ba2 100%);min-height:100vh;padding:20px}.container{max-width:1200px;margin:0 auto}.header{text-align:center;color:white;margin-bottom:30px}.header h1{font-size:2.5em;margin-bottom:10px;text-shadow:2px 2px 4px rgba(0,0,0,.3)}.header p{font-size:1.1em;opacity:.9}.dashboard{display:grid;grid-template-columns:repeat(auto-fit,minmax(250px,1fr));gap:20px;margin-bottom:30px}.card{background:white;border-radius:10px;padding:25px;box-shadow:0 10px 30px rgba(0,0,0,.2);transition:transform .3s,box-shadow .3s}.card:hover{transform:translateY(-5px);box-shadow:0 15px 40px rgba(0,0,0,.3)}.card-title{font-size:.9em;color:#666;text-transform:uppercase;letter-spacing:1px;margin-bottom:15px;font-weight:600}.card-value{font-size:2.5em;font-weight:700;color:#333;margin-bottom:10px}.card-subtitle{font-size:.9em;color:#999}.status-badge{display:inline-block;padding:8px 16px;border-radius:20px;font-size:.85em;font-weight:600;margin-top:15px}.status-running{background:#4CAF50;color:white}.status-idle{background:#9E9E9E;color:white}.status-completed{background:#2196F3;color:white}.status-error{background:#f44336;color:white}.progress-section{background:white;border-radius:10px;padding:30px;box-shadow:0 10px 30px rgba(0,0,0,.2);margin-bottom:30px}.progress-title{font-size:1.3em;font-weight:700;margin-bottom:20px;color:#333}.progress-bar-container{width:100%;height:30px;background:#e0e0e0;border-radius:15px;overflow:hidden;margin-bottom:15px}.progress-bar{height:100%;background:linear-gradient(90deg,#667eea 0%,#764ba2 100%);transition:width .3s;display:flex;align-items:center;justify-content:center;color:white;font-weight:700;font-size:.9em}.progress-text{display:flex;justify-content:space-between;margin-bottom:20px;font-size:.95em}.current-file{padding:15px;background:#f5f5f5;border-left:4px solid #667eea;border-radius:5px;margin-bottom:15px;word-break:break-all}.current-file-label{font-size:.85em;color:#666;margin-bottom:5px}.current-file-name{font-weight:600;color:#333}.controls{display:flex;gap:10px;margin-top:20px}.btn{padding:12px 24px;border:none;border-radius:5px;font-size:1em;font-weight:600;cursor:pointer;transition:all .3s}.btn-stop{background:#f44336;color:white;flex:1}.btn-stop:hover{background:#d32f2f}.btn-stop:disabled{background:#ccc;cursor:not-allowed}.stats-grid{display:grid;grid-template-columns:repeat(auto-fit,minmax(200px,1fr));gap:15px;margin-top:20px}.stat-item{background:#f5f5f5;padding:15px;border-radius:5px;border-left:4px solid #667eea}.stat-label{font-size:.85em;color:#666;margin-bottom:5px}.stat-value{font-size:1.8em;font-weight:700;color:#333}.error-message{background:#ffebee;border-left:4px solid #f44336;color:#c62828;padding:15px;border-radius:5px;margin-top:15px;display:none}.error-message.show{display:block}@keyframes pulse{0%,100%{opacity:1}50%{opacity:.5}}.pulse{animation:pulse 1s infinite}</style></head><body><div class="container"><div class="header"><h1>📊 Dataset Merger & Uploader</h1><p>Real-time processing dashboard for samfred2/ALL → samfred2/ALL2</p></div><div class="dashboard"><div class="card"><div class="card-title">Status</div><div class="card-value"><span id="status-icon">⏸️</span></div><div class="card-subtitle">Current state</div><div class="status-badge idle" id="status-badge">IDLE</div></div><div class="card"><div class="card-title">Processed</div><div class="card-value"><span id="processed">0</span></div><div class="card-subtitle">files completed</div></div><div class="card"><div class="card-title">Uploaded</div><div class="card-value"><span id="uploaded">0</span></div><div class="card-subtitle">files to samfred2/ALL2</div></div><div class="card"><div class="card-title">Total</div><div class="card-value"><span id="total">0</span></div><div class="card-subtitle">matched pairs</div></div><div class="card"><div class="card-title">Elapsed Time</div><div class="card-value"><span id="elapsed">0s</span></div><div class="card-subtitle">since start</div></div><div class="card"><div class="card-title">ETA</div><div class="card-value"><span id="eta">--</span></div><div class="card-subtitle">estimated remaining</div></div></div><div class="progress-section"><div class="progress-title">Overall Progress</div><div class="progress-text"><span id="progress-label">0%</span><span id="progress-counter">0 / 0</span></div><div class="progress-bar-container"><div class="progress-bar" id="progress-bar" style="width: 0%"><span id="progress-percent"></span></div></div><div class="current-file" id="current-file-container" style="display: none;"><div class="current-file-label">Currently Processing</div><div class="current-file-name pulse" id="current-file">--</div></div><div class="stats-grid"><div class="stat-item"><div class="stat-label">Processing Rate</div><div class="stat-value"><span id="rate">0</span>/min</div></div><div class="stat-item"><div class="stat-label">Upload Rate</div><div class="stat-value"><span id="upload-rate">0</span>/min</div></div><div class="stat-item"><div class="stat-label">Avg Time/File</div><div class="stat-value"><span id="avg-time">0</span>s</div></div></div><div class="error-message" id="error-message"></div><div class="controls"><button class="btn btn-stop" id="stop-btn" onclick="stopProcessing()" disabled>Stop Processing</button></div></div></div><script>let startTime=null,lastProcessed=0,lastUploaded=0;async function updateDashboard(){try{const e=await fetch('/status'),t=await e.json();document.getElementById('processed').textContent=t.processed,document.getElementById('uploaded').textContent=t.uploaded,document.getElementById('total').textContent=t.total;const a=document.getElementById('status-badge'),s=document.getElementById('status-icon');if(a.className='status-badge status-'+t.status,a.textContent=t.status.toUpperCase(),'running'===t.status?(s.textContent='▶️',document.getElementById('stop-btn').disabled=!1,startTime||(startTime=Date.now())):'completed'===t.status?(s.textContent='✅',document.getElementById('stop-btn').disabled=!0):'error'===t.status?(s.textContent='❌',document.getElementById('stop-btn').disabled=!0,document.getElementById('error-message').textContent='Error: '+(t.error||'Unknown error'),document.getElementById('error-message').classList.add('show')):(s.textContent='⏸️',document.getElementById('stop-btn').disabled=!0),0===t.total)document.getElementById('progress-bar').style.width='0%';else{const e=Math.round(t.processed/t.total*100);document.getElementById('progress-bar').style.width=e+'%',document.getElementById('progress-label').textContent=e+'%',document.getElementById('progress-percent').textContent=e+'%'}document.getElementById('progress-counter').textContent=t.processed+' / '+t.total,t.current_file?(document.getElementById('current-file-container').style.display='block',document.getElementById('current-file').textContent=t.current_file):document.getElementById('current-file-container').style.display='none','running'===t.status&&startTime&&(function(){const e=Math.floor((Date.now()-startTime)/1e3);document.getElementById('elapsed').textContent=function(e){return e<60?e+'s':(t=>t<60?t+'m':(e=>e+'h '+e%60+'m')(Math.floor(t/60)))(Math.floor(e/60))};const a=t.processed-lastProcessed,s=t.uploaded-lastUploaded;lastProcessed=t.processed,lastUploaded=t.uploaded;const o=Math.round(t.processed/e*60),n=Math.round(t.uploaded/e*60);document.getElementById('rate').textContent=o,document.getElementById('upload-rate').textContent=n;if(t.processed>0){const a=Math.round(e/t.processed);document.getElementById('avg-time').textContent=a,o>0&&t.processed<t.total&&(document.getElementById('eta').textContent=function(e){return e<60?e+'s':(t=>t<60?t+'m':(e=>e+'h '+e%60+'m')(Math.floor(t/60)))(Math.floor(e/60))}(Math.round((t.total-t.processed)/(o/60))))}}())}catch(e){console.error('Error fetching status:',e)}}async function stopProcessing(){confirm('Are you sure you want to stop processing?')&&(await fetch('/stop',{method:'POST'}),updateDashboard())}setInterval(updateDashboard,2e3),updateDashboard()</script></body></html>""" | |
| async def startup_event(): | |
| """Auto-start processing when server starts.""" | |
| logger.info("Server startup - Auto-starting processing...") | |
| processing_state['running'] = True | |
| processing_state['status'] = 'starting' | |
| processing_state['start_time'] = time.time() | |
| thread = threading.Thread(target=run_processing_thread, daemon=True) | |
| thread.start() | |
| def get_dashboard(): | |
| """Serve dashboard HTML.""" | |
| return HTML_DASHBOARD | |
| def get_status(): | |
| """Get current processing status.""" | |
| return processing_state | |
| def stop_processing(): | |
| """Stop the processing.""" | |
| processing_state['running'] = False | |
| return {"message": "Stop signal sent"} | |
| # --- Helper Functions --- | |
| def download_file(repo_id, filename, local_dir): | |
| """Downloads a single file with a delay to mitigate rate-limiting.""" | |
| logger.info(f"Downloading {filename} from {repo_id}...") | |
| try: | |
| time.sleep(1) | |
| local_path = hf_hub_download( | |
| repo_id=repo_id, | |
| filename=filename, | |
| repo_type="dataset", | |
| local_dir=local_dir, | |
| local_dir_use_symlinks=False, | |
| token=HF_TOKEN | |
| ) | |
| logger.info(f"Downloaded to {local_path}") | |
| return local_path | |
| except Exception as e: | |
| logger.error(f"Could not download {filename}. Error: {e}") | |
| return None | |
| def upload_file_with_rate_limit(api, file_path, path_in_repo, upload_state, progress=None): | |
| """ | |
| Uploads a file to HF with rate limiting (128 files per hour). | |
| Handles 429 errors by waiting an hour and resuming. | |
| Tracks rate limit events in progress file. | |
| """ | |
| while True: | |
| try: | |
| # Check if we've reached the limit | |
| if upload_state['count'] >= MAX_UPLOADS_PER_HOUR: | |
| elapsed = time.time() - upload_state['hour_start'] | |
| if elapsed < RATE_LIMIT_DELAY: | |
| wait_time = RATE_LIMIT_DELAY - elapsed | |
| wait_until = datetime.datetime.now() + datetime.timedelta(seconds=wait_time) | |
| logger.info(f"Rate limit reached ({MAX_UPLOADS_PER_HOUR} uploads/hour). Waiting until {wait_until}") | |
| # Track rate limit event | |
| if progress is not None: | |
| event = { | |
| 'timestamp': datetime.datetime.now().isoformat(), | |
| 'type': 'hourly_limit_reached', | |
| 'reason': f'Reached {MAX_UPLOADS_PER_HOUR} uploads/hour', | |
| 'wait_seconds': int(wait_time), | |
| 'resume_time': wait_until.isoformat() | |
| } | |
| progress.setdefault('rate_limit_events', []).append(event) | |
| time.sleep(wait_time) | |
| upload_state['hour_start'] = time.time() | |
| upload_state['count'] = 0 | |
| logger.info(f"Uploading {path_in_repo}... ({upload_state['count']+1}/{MAX_UPLOADS_PER_HOUR})") | |
| api.upload_file( | |
| path_or_fileobj=file_path, | |
| path_in_repo=path_in_repo, | |
| repo_id=OUTPUT_REPO_ID, | |
| repo_type="dataset", | |
| token=HF_TOKEN | |
| ) | |
| upload_state['count'] += 1 | |
| logger.info(f"Uploaded {path_in_repo}") | |
| break | |
| except Exception as e: | |
| if "429" in str(e) or "rate" in str(e).lower(): | |
| wait_until = datetime.datetime.now() + datetime.timedelta(seconds=RATE_LIMIT_DELAY) | |
| logger.warning(f"Rate limit hit (429). Waiting 1 hour until {wait_until}") | |
| # Track rate limit error event | |
| if progress is not None: | |
| event = { | |
| 'timestamp': datetime.datetime.now().isoformat(), | |
| 'type': 'http_429_error', | |
| 'reason': 'HTTP 429 Too Many Requests from HF', | |
| 'wait_seconds': RATE_LIMIT_DELAY, | |
| 'resume_time': wait_until.isoformat(), | |
| 'file': path_in_repo | |
| } | |
| progress.setdefault('rate_limit_events', []).append(event) | |
| time.sleep(RATE_LIMIT_DELAY) | |
| upload_state['hour_start'] = time.time() | |
| upload_state['count'] = 0 | |
| else: | |
| logger.error(f"Could not upload {path_in_repo}. Error: {e}") | |
| break | |
| def load_json_file(local_path): | |
| """Loads and returns content of a JSON file.""" | |
| try: | |
| with open(local_path, 'r') as f: | |
| return json.load(f) | |
| except json.JSONDecodeError: | |
| logger.error(f"Could not decode JSON for {local_path}. Skipping.") | |
| return None | |
| except Exception as e: | |
| logger.error(f"Could not read file {local_path}. Error: {e}") | |
| return None | |
| def find_matching_all_file(ato_filename, all_file_names): | |
| """ | |
| Finds the full course file name in the 'all' dataset that corresponds | |
| to the given lesson file name from the 'ato' dataset using suffix matching. | |
| """ | |
| for all_name in all_file_names: | |
| if all_name.endswith(ato_filename): | |
| return all_name | |
| return None | |
| def load_progress(progress_file): | |
| """Load progress tracking file.""" | |
| if os.path.exists(progress_file): | |
| try: | |
| with open(progress_file, 'r') as f: | |
| return json.load(f) | |
| except: | |
| return {'processed': [], 'uploaded': [], 'rate_limit_events': []} | |
| return {'processed': [], 'uploaded': [], 'rate_limit_events': []} | |
| def save_progress(progress_file, progress): | |
| """Save progress tracking file.""" | |
| with open(progress_file, 'w') as f: | |
| json.dump(progress, f, indent=2) | |
| def run_processing_thread(): | |
| """Wrapper to run process_datasets in thread with error handling.""" | |
| try: | |
| processing_state['status'] = 'running' | |
| process_datasets() | |
| processing_state['status'] = 'completed' | |
| except Exception as e: | |
| processing_state['status'] = 'error' | |
| processing_state['error'] = str(e) | |
| logger.error(f"Processing error: {e}", exc_info=True) | |
| finally: | |
| processing_state['running'] = False | |
| logger.info("Processing thread finished") | |
| # --- Main Logic --- | |
| def process_datasets(): | |
| """ | |
| Two-phase processing: | |
| 1. Process matched pairs (ALL files with corresponding ATO transcriptions) | |
| 2. Upload remaining ALL files without transcriptions | |
| With rate limiting (128 files/hour, 429 error handling). | |
| """ | |
| # Load progress | |
| progress_file = os.path.join(OUTPUT_DIR, "progress.json") | |
| progress = load_progress(progress_file) | |
| upload_state = {'count': 0, 'hour_start': time.time()} | |
| # Ensure output repo exists | |
| try: | |
| api.create_repo(OUTPUT_REPO_ID, repo_type="dataset", exist_ok=True) | |
| logger.info(f"Using repo: {OUTPUT_REPO_ID}") | |
| except Exception as e: | |
| logger.warning(f"Could not create repo. {e}") | |
| # 1. Get the list of all files in both repositories | |
| logger.info("--- 1. Listing Repository Files ---") | |
| try: | |
| all_file_list = list_repo_files(repo_id=ALL_REPO_ID, repo_type="dataset", token=HF_TOKEN) | |
| ato_file_list = list_repo_files(repo_id=ATO_REPO_ID, repo_type="dataset", token=HF_TOKEN) | |
| except Exception as e: | |
| logger.error(f"Could not list repository files. {e}") | |
| processing_state['error'] = str(e) | |
| return | |
| # Filter for JSON files | |
| all_json_files = [f for f in all_file_list if f.endswith(".json")] | |
| ato_json_files = [f for f in ato_file_list if f.endswith(".json")] | |
| logger.info(f"Found {len(all_json_files)} JSON files in {ALL_REPO_ID}") | |
| logger.info(f"Found {len(ato_json_files)} JSON files in {ATO_REPO_ID}") | |
| # 2. Match ATO files to ALL files | |
| logger.info("--- 2. Matching ATO to ALL Files ---") | |
| match_map = {} | |
| for ato_file in ato_json_files: | |
| matching_all_file = find_matching_all_file(ato_file, all_json_files) | |
| if matching_all_file: | |
| match_map[ato_file] = matching_all_file | |
| logger.info(f"Found {len(match_map)} matching pairs.") | |
| # Create temporary directories for downloads | |
| all_download_dir = os.path.join(OUTPUT_DIR, "all_raw") | |
| ato_download_dir = os.path.join(OUTPUT_DIR, "ato_raw") | |
| os.makedirs(all_download_dir, exist_ok=True) | |
| os.makedirs(ato_download_dir, exist_ok=True) | |
| # ============================================================ | |
| # PHASE 1: Process matched pairs (with transcriptions) | |
| # ============================================================ | |
| logger.info("--- PHASE 1: Processing matched pairs (files with transcriptions) ---") | |
| logger.info(f"Total pairs to process: {len(match_map)}") | |
| logger.info(f"Already processed: {len(progress['processed'])}") | |
| logger.info(f"Already uploaded: {len(progress['uploaded'])}") | |
| processed_count = 0 | |
| matched_all_files = set() # Track which ALL files were processed in phase 1 | |
| for ato_filename, all_filename in match_map.items(): | |
| if not processing_state['running']: | |
| logger.info("Processing stopped by user") | |
| break | |
| # Skip if already processed | |
| if all_filename in progress['processed']: | |
| logger.info(f"Skipping already processed: {all_filename}") | |
| matched_all_files.add(all_filename) | |
| continue | |
| processing_state['current_file'] = all_filename | |
| logger.info(f"Processing {processed_count + 1}/{len(match_map)}: {ato_filename} <-> {all_filename}") | |
| # a. Download ATO file | |
| ato_local_path = download_file(ATO_REPO_ID, ato_filename, ato_download_dir) | |
| if not ato_local_path: | |
| continue | |
| ato_data = load_json_file(ato_local_path) | |
| if not ato_data: | |
| continue | |
| # b. Download ALL file | |
| all_local_path = download_file(ALL_REPO_ID, all_filename, all_download_dir) | |
| if not all_local_path: | |
| continue | |
| all_data = load_json_file(all_local_path) | |
| if not all_data: | |
| continue | |
| # c. Integrate transcription | |
| logger.info("Integrating transcription...") | |
| all_data["transcription_content"] = ato_data | |
| all_data["transcription_content"]["full_course_name"] = all_filename | |
| # d. Save locally | |
| final_output_path = os.path.join(OUTPUT_DIR, all_filename) | |
| with open(final_output_path, 'w') as f: | |
| json.dump(all_data, f, indent=4) | |
| logger.info(f"Saved locally to {final_output_path}") | |
| # e. Upload to samfred2/ALL2 | |
| if all_filename not in progress['uploaded']: | |
| upload_file_with_rate_limit(api, final_output_path, all_filename, upload_state, progress) | |
| progress['uploaded'].append(all_filename) | |
| processing_state['uploaded'] += 1 | |
| progress['processed'].append(all_filename) | |
| matched_all_files.add(all_filename) | |
| save_progress(progress_file, progress) | |
| processed_count += 1 | |
| processing_state['processed'] = processed_count | |
| logger.info(f"Progress: {processed_count}/{len(match_map)} | Uploaded: {len(progress['uploaded'])}") | |
| logger.info("--- PHASE 1 Complete ---") | |
| logger.info(f"Phase 1 processed: {processed_count} files with transcriptions") | |
| # ============================================================ | |
| # PHASE 2: Upload remaining ALL files without transcriptions | |
| # ============================================================ | |
| logger.info("--- PHASE 2: Uploading remaining ALL files without transcriptions ---") | |
| # Find files in ALL that don't have matches (no transcription) | |
| remaining_files = [f for f in all_json_files if f not in matched_all_files and f not in progress['uploaded']] | |
| logger.info(f"Found {len(remaining_files)} files without transcriptions to upload") | |
| remaining_count = 0 | |
| for all_filename in remaining_files: | |
| if not processing_state['running']: | |
| logger.info("Processing stopped by user during phase 2") | |
| break | |
| processing_state['current_file'] = all_filename | |
| logger.info(f"Uploading remaining file {remaining_count + 1}/{len(remaining_files)}: {all_filename}") | |
| # Download ALL file | |
| all_local_path = download_file(ALL_REPO_ID, all_filename, all_download_dir) | |
| if not all_local_path: | |
| continue | |
| # Load and prepare file | |
| all_data = load_json_file(all_local_path) | |
| if not all_data: | |
| continue | |
| # Save locally (no transcription added) | |
| final_output_path = os.path.join(OUTPUT_DIR, all_filename) | |
| with open(final_output_path, 'w') as f: | |
| json.dump(all_data, f, indent=4) | |
| logger.info(f"Saved locally to {final_output_path}") | |
| # Upload to samfred2/ALL2 | |
| upload_file_with_rate_limit(api, final_output_path, all_filename, upload_state, progress) | |
| progress['uploaded'].append(all_filename) | |
| processing_state['uploaded'] += 1 | |
| save_progress(progress_file, progress) | |
| remaining_count += 1 | |
| logger.info(f"Phase 2 Progress: {remaining_count}/{len(remaining_files)} | Total Uploaded: {len(progress['uploaded'])}") | |
| logger.info("--- PHASE 2 Complete ---") | |
| logger.info(f"Phase 2 uploaded: {remaining_count} files without transcriptions") | |
| logger.info("=== ALL PROCESSING COMPLETE ===") | |
| logger.info(f"Total processed (with transcriptions): {len(progress['processed'])}") | |
| logger.info(f"Total uploaded (all files): {len(progress['uploaded'])}") | |
| logger.info(f"Final stats: {len(progress['processed'])} with transcriptions + {remaining_count} without transcriptions = {len(progress['uploaded'])} total") | |
| if __name__ == "__main__": | |
| import sys | |
| logger.info(f"Starting server on http://127.0.0.1:8000") | |
| logger.info(f"Dashboard: http://127.0.0.1:8000") | |
| logger.info(f"Status: http://127.0.0.1:8000/status") | |
| logger.info(f"Stop: POST http://127.0.0.1:8000/stop") | |
| logger.info("Processing will auto-start on server startup...") | |
| uvicorn_run(app, host="127.0.0.1", port=8000) | |