Spaces:
Sleeping
Sleeping
| from flask import Flask, render_template, request, jsonify, send_from_directory, make_response | |
| import os | |
| import tempfile | |
| import atexit | |
| import shutil | |
| from datetime import datetime, timedelta | |
| import threading | |
| import time | |
| from mutagen import File | |
| from mutagen.easyid3 import EasyID3 | |
| import json | |
| import mutagen.mp3 | |
| import mutagen.flac | |
| import mutagen.oggvorbis | |
| from werkzeug.utils import secure_filename | |
| import logging | |
| import base64 | |
| from concurrent.futures import ThreadPoolExecutor | |
| import asyncio | |
| from flask_compress import Compress | |
| from flask_caching import Cache | |
| import hashlib | |
| from uuid import uuid4 | |
| # Create uploads directory in the static folder instead of using tempfile | |
| UPLOAD_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'static', 'uploads') | |
| if not os.path.exists(UPLOAD_DIR): | |
| os.makedirs(UPLOAD_DIR, exist_ok=True) | |
| # Initialize Flask app once with all configurations | |
| app = Flask(__name__, static_folder='static') | |
| # Add all app configs | |
| app.config['UPLOAD_FOLDER'] = UPLOAD_DIR | |
| app.config['MAX_CONTENT_LENGTH'] = 200 * 1024 * 1024 # 200MB max file size | |
| app.config['MAX_FILES'] = 10 # Maximum number of files that can be uploaded at once | |
| # Add cache config | |
| cache_config = { | |
| "DEBUG": True, | |
| "CACHE_TYPE": "SimpleCache", | |
| "CACHE_DEFAULT_TIMEOUT": 300 | |
| } | |
| app.config.from_mapping(cache_config) | |
| # Initialize cache | |
| cache = Cache(app) | |
| # Rest of your constants | |
| ALLOWED_EXTENSIONS = {'mp3', 'wav', 'ogg', 'flac'} | |
| FILE_LIFETIME = timedelta(hours=1) | |
| file_timestamps = {} | |
| # Configure logging | |
| logging.basicConfig(level=logging.INFO) | |
| logger = logging.getLogger(__name__) | |
| THREAD_POOL = ThreadPoolExecutor(max_workers=8) | |
| # Track files by session | |
| session_files = {} | |
| INACTIVE_SESSION_TIMEOUT = 3600 # 1 hour in seconds | |
| def get_session_id(): | |
| """Generate or retrieve session ID from request""" | |
| if 'X-Session-ID' in request.headers: | |
| return request.headers['X-Session-ID'] | |
| return str(uuid4()) | |
| def allowed_file(filename): | |
| return '.' in filename and filename.rsplit('.', 1)[1].lower() in ALLOWED_EXTENSIONS | |
| # Optimize metadata extraction | |
| # Cache for 1 hour | |
| def extract_metadata(filepath): | |
| try: | |
| audio = File(filepath, easy=True) # Added easy=True for faster loading | |
| if audio is None: | |
| return {} | |
| # Simplified metadata extraction | |
| metadata = { | |
| 'title': None, | |
| 'artist': 'Unknown Artist', | |
| 'duration': None | |
| } | |
| # Get filename as fallback | |
| original_filename = os.path.splitext(os.path.basename(filepath))[0] | |
| try: | |
| # Quick duration check | |
| if hasattr(audio.info, 'length'): | |
| metadata['duration'] = int(audio.info.length) # Convert to int for faster processing | |
| # Simplified tag extraction | |
| if hasattr(audio, 'tags') and audio.tags: | |
| metadata['title'] = str(audio.tags.get('title', [original_filename])[0]) | |
| metadata['artist'] = str(audio.tags.get('artist', ['Unknown Artist'])[0]) | |
| except Exception as e: | |
| logger.error(f"Error reading tags: {str(e)}") | |
| metadata['title'] = original_filename | |
| return metadata | |
| except Exception as e: | |
| logger.error(f"Error extracting metadata: {str(e)}") | |
| return { | |
| 'title': original_filename, | |
| 'artist': 'Unknown Artist' | |
| } | |
| def favicon(): | |
| return send_from_directory('static', 'favicon.ico', mimetype='image/x-icon') | |
| # Cache the index page | |
| # Cache for 5 minutes | |
| def index(): | |
| return render_template('index.html', title="Soundscape - 3D Music Visualizer") | |
| def cleanup_old_files(): | |
| """Enhanced cleanup that handles both time-based and session-based cleanup""" | |
| while True: | |
| current_time = datetime.now() | |
| files_to_delete = [] | |
| # Cleanup files by age | |
| for filename, timestamp in file_timestamps.items(): | |
| if current_time - timestamp > FILE_LIFETIME: | |
| filepath = os.path.join(app.config['UPLOAD_FOLDER'], filename) | |
| try: | |
| if os.path.exists(filepath): | |
| os.remove(filepath) | |
| logger.info(f"Deleted old file: {filename}") | |
| files_to_delete.append(filename) | |
| except Exception as e: | |
| logger.error(f"Error deleting file {filename}: {str(e)}") | |
| # Remove deleted files from timestamps | |
| for filename in files_to_delete: | |
| file_timestamps.pop(filename, None) | |
| # Cleanup files by session | |
| inactive_sessions = [] | |
| current_timestamp = time.time() | |
| for session_id, session_data in session_files.items(): | |
| last_access = session_data.get('last_access', 0) | |
| if current_timestamp - last_access > INACTIVE_SESSION_TIMEOUT: | |
| # Delete all files for this session | |
| for filename in session_data.get('files', []): | |
| filepath = os.path.join(app.config['UPLOAD_FOLDER'], filename) | |
| try: | |
| if os.path.exists(filepath): | |
| os.remove(filepath) | |
| logger.info(f"Deleted session file: {filename}") | |
| except Exception as e: | |
| logger.error(f"Error deleting session file {filename}: {str(e)}") | |
| inactive_sessions.append(session_id) | |
| # Remove inactive sessions | |
| for session_id in inactive_sessions: | |
| session_files.pop(session_id, None) | |
| time.sleep(300) # Check every 5 minutes | |
| # Start cleanup thread | |
| cleanup_thread = threading.Thread(target=cleanup_old_files, daemon=True) | |
| cleanup_thread.start() | |
| def request_entity_too_large(error): | |
| return jsonify({ | |
| 'success': False, | |
| 'error': 'File too large. Maximum total size is 200MB' | |
| }), 413 | |
| # Cache static file responses | |
| # Cache for 1 hour | |
| def serve_static(filename): | |
| response = make_response(send_from_directory('static', filename)) | |
| # Add cache control headers | |
| response.headers['Cache-Control'] = 'public, max-age=3600' | |
| response.headers['ETag'] = hashlib.md5( | |
| str(time.time()).encode() | |
| ).hexdigest() | |
| return response | |
| def upload_file(): | |
| logger.info('Upload request received') | |
| session_id = get_session_id() | |
| if session_id not in session_files: | |
| session_files[session_id] = { | |
| 'files': [], | |
| 'last_access': time.time() | |
| } | |
| # Update last access time | |
| session_files[session_id]['last_access'] = time.time() | |
| if 'files[]' not in request.files: | |
| logger.warning('No files in request') | |
| return jsonify({'success': False, 'error': 'No files uploaded'}), 400 | |
| files = request.files.getlist('files[]') | |
| logger.info(f'Received {len(files)} files') | |
| if len(files) > app.config['MAX_FILES']: | |
| return jsonify({ | |
| 'success': False, | |
| 'error': f"Maximum {app.config['MAX_FILES']} files can be uploaded at once" | |
| }), 400 | |
| results = [] | |
| def process_file(file): | |
| try: | |
| if file and allowed_file(file.filename): | |
| filename = secure_filename(file.filename) | |
| timestamp = datetime.now().strftime('%Y%m%d_%H%M%S_') | |
| filename = timestamp + filename | |
| filepath = os.path.join(app.config['UPLOAD_FOLDER'], filename) | |
| with open(filepath, 'wb') as f: | |
| while True: | |
| chunk = file.read(8192) | |
| if not chunk: | |
| break | |
| f.write(chunk) | |
| file_timestamps[filename] = datetime.now() | |
| # Track file with session | |
| session_files[session_id]['files'].append(filename) | |
| metadata = extract_metadata(filepath) | |
| invalidate_metadata_cache(filepath) | |
| return { | |
| 'filename': file.filename, | |
| 'success': True, | |
| 'filepath': f'/static/uploads/{filename}', | |
| 'metadata': metadata, | |
| 'session_id': session_id # Return session ID to client | |
| } | |
| return { | |
| 'filename': file.filename, | |
| 'success': False, | |
| 'error': 'Invalid file type' | |
| } | |
| except Exception as e: | |
| logger.error(f'Upload error for {file.filename}: {str(e)}') | |
| return { | |
| 'filename': file.filename, | |
| 'success': False, | |
| 'error': 'Server error during upload' | |
| } | |
| # Process files in parallel with a larger chunk size | |
| with THREAD_POOL as executor: | |
| results = list(executor.map(process_file, files)) | |
| return jsonify({ | |
| 'success': True, | |
| 'files': results, | |
| 'session_id': session_id | |
| }) | |
| def serve_audio(filename): | |
| return send_from_directory(app.config['UPLOAD_FOLDER'], filename) | |
| # Cleanup function to remove temp directory on shutdown | |
| def cleanup(): | |
| shutil.rmtree(UPLOAD_DIR, ignore_errors=True) | |
| atexit.register(cleanup) | |
| # Add response compression | |
| Compress(app) | |
| # Add caching headers | |
| def add_header(response): | |
| if 'Cache-Control' not in response.headers: | |
| response.headers['Cache-Control'] = 'public, max-age=300' | |
| return response | |
| # Add cache invalidation for uploaded files | |
| def invalidate_metadata_cache(filepath): | |
| cache.delete_memoized(extract_metadata, filepath) | |
| # Add endpoint to explicitly end session | |
| def end_session(): | |
| session_id = request.headers.get('X-Session-ID') | |
| if session_id and session_id in session_files: | |
| # Delete all files for this session | |
| for filename in session_files[session_id].get('files', []): | |
| filepath = os.path.join(app.config['UPLOAD_FOLDER'], filename) | |
| try: | |
| if os.path.exists(filepath): | |
| os.remove(filepath) | |
| logger.info(f"Deleted session file: {filename}") | |
| except Exception as e: | |
| logger.error(f"Error deleting session file {filename}: {str(e)}") | |
| # Remove session data | |
| session_files.pop(session_id, None) | |
| return jsonify({'success': True, 'message': 'Session ended and files cleaned up'}) | |
| return jsonify({'success': False, 'error': 'Session not found'}), 404 | |
| if __name__ == '__main__': | |
| app.run(host='0.0.0.0', port=7860) |