Spaces:
Paused
Paused
| # model_loader.py | |
| import json | |
| import os | |
| import glob | |
| from huggingface_hub import hf_hub_download, list_repo_files | |
| # --- Globale Konfiguration und Variablen --- | |
| # Diese Variablen werden von app.py importiert. | |
| MODEL_CONFIG_FILE = "models.json" | |
| DEFAULT_LOCAL_DIR = "./models" | |
| MODEL_DROPDOWN_CHOICES = [] | |
| MODEL_FILE_MAPPING = {} | |
| os.makedirs(DEFAULT_LOCAL_DIR, exist_ok=True) | |
| # ---------------------------------------------------------------------- | |
| def download_models(): | |
| """Liest models.json, lädt Modelldateien mit HF_TOKEN herunter und füllt die globale Map.""" | |
| global MODEL_DROPDOWN_CHOICES | |
| global MODEL_FILE_MAPPING | |
| # Sicherstellen, dass die Listen/Maps leer sind, falls die Funktion mehrfach aufgerufen wird | |
| MODEL_DROPDOWN_CHOICES.clear() | |
| MODEL_FILE_MAPPING.clear() | |
| hf_token = os.environ.get('HF_TOKEN') | |
| if not hf_token: | |
| print("⚠️ HF_TOKEN not found") | |
| else: | |
| print(f"🔑 HF_TOKEN found") | |
| try: | |
| with open(MODEL_CONFIG_FILE, 'r') as f: | |
| config = json.load(f) | |
| except FileNotFoundError: | |
| print(f"❌ ERROR: '{MODEL_CONFIG_FILE}' not found.") | |
| MODEL_DROPDOWN_CHOICES.append("ERROR: models.json missing") | |
| return | |
| except json.JSONDecodeError as e: | |
| print(f"❌ ERROR: {MODEL_CONFIG_FILE} is not a valid JSON. Error: {e}") | |
| MODEL_DROPDOWN_CHOICES.append("ERROR: models.json invalid") | |
| return | |
| local_dir = config.get('local_dir', DEFAULT_LOCAL_DIR) | |
| if not os.path.exists(local_dir): | |
| os.makedirs(local_dir, exist_ok=True) | |
| print(f"Local directory {local_dir} created.") | |
| models_list = config.get('models', []) | |
| print(f"✨ Starting download of {len(models_list)} configured models...") | |
| for model_entry in models_list: | |
| name = model_entry.get('name') | |
| repo_id = model_entry.get('repo_id') | |
| file_name = model_entry.get('file_name') | |
| folder_name = model_entry.get('folder_name') | |
| meta_repo = model_entry.get('meta_repo') | |
| meta_file = model_entry.get('meta_file') | |
| if not name or not repo_id: | |
| print(f"⚠️ WARNING: Entry without 'name' or 'repo_id' found: {model_entry}") | |
| continue | |
| MODEL_DROPDOWN_CHOICES.append(name) | |
| # Single File(s) (file_name) | |
| if file_name: | |
| print(f" -> Downloading single file for '{name}': {file_name}") | |
| try: | |
| # Check if file_name contains wildcards | |
| if '*' in file_name or '?' in file_name or '[' in file_name: | |
| all_files = list_repo_files(repo_id=repo_id, token=hf_token) | |
| matching_files = [] | |
| for f in all_files: | |
| if glob.fnmatch.fnmatch(f, file_name): | |
| matching_files.append(f) | |
| if not matching_files: | |
| print(f" ⚎ No files matched pattern: {file_name}") | |
| continue | |
| # Download all matching files | |
| for matching_file in matching_files: | |
| print(f" - Downloading {matching_file}") | |
| hf_hub_download( | |
| repo_id=repo_id, | |
| filename=matching_file, | |
| local_dir=local_dir, | |
| token=hf_token | |
| ) | |
| # Store the first matching file path for model initialization | |
| MODEL_FILE_MAPPING[name] = os.path.join(local_dir, matching_files[0]) | |
| print(f" -> Downloaded {len(matching_files)} files for {name}") | |
| else: | |
| # Regular file download | |
| hf_hub_download( | |
| repo_id=repo_id, | |
| filename=file_name, | |
| local_dir=local_dir, | |
| token=hf_token | |
| ) | |
| # Store the full path (relative to installation) | |
| MODEL_FILE_MAPPING[name] = os.path.join(local_dir, file_name) | |
| print(f" -> Downloaded: {name}") | |
| except Exception as e: | |
| print(f"❌ ERROR during download of {name} ({file_name}): {e}") | |
| # Folder Download (folder_name) | |
| elif folder_name: | |
| print(f" -> Downloading folder for '{name}': {folder_name}") | |
| try: | |
| all_files = list_repo_files(repo_id=repo_id, token=hf_token) | |
| files_to_download = sorted([ | |
| filename | |
| for filename in all_files | |
| if filename.startswith(f"{folder_name}/") | |
| ]) | |
| except Exception as e: | |
| print(f"❌ ERROR during listing files in repo {repo_id}: {e}") | |
| continue | |
| if not files_to_download: | |
| print(f"⚠️ WARNING: No files found in folder '{folder_name}'.") | |
| continue | |
| first_part_filename = files_to_download[0] | |
| for filename in files_to_download: | |
| print(f" - Downloading {filename}") | |
| try: | |
| hf_hub_download( | |
| repo_id=repo_id, | |
| filename=filename, | |
| local_dir=local_dir, | |
| token=hf_token | |
| ) | |
| except Exception as e: | |
| print(f"❌ ERROR during download of {filename}: {e}") | |
| # For Llama-CPP-Initialization store the path to the first part | |
| # Path: <local_dir>/<folder_name>/<first_file_part> | |
| MODEL_FILE_MAPPING[name] = os.path.join(local_dir, first_part_filename) | |
| print(f" -> Downloaded folder: {name}. First part: {MODEL_FILE_MAPPING[name]}") | |
| # Meta File Download (meta_file and meta_repo) | |
| elif meta_repo and meta_file: | |
| print(f" -> Downloading meta file for '{name}': {meta_file} from {meta_repo}") | |
| try: | |
| # Debug: Print the download parameters | |
| print(f" Debug: Downloading meta file from repo {meta_repo} with filename {meta_file}") | |
| # Download meta file from meta_repo | |
| hf_hub_download( | |
| repo_id=meta_repo, | |
| filename=meta_file, | |
| local_dir=local_dir, | |
| token=hf_token | |
| ) | |
| # Store the meta file path for model initialization | |
| MODEL_FILE_MAPPING[name] = os.path.join(local_dir, meta_file) | |
| print(f" -> Downloaded meta file: {name}") | |
| except Exception as e: | |
| print(f"❌ ERROR during download of meta file {name} ({meta_file}): {e}") | |
| # Try to print more details about the error | |
| import traceback | |
| traceback.print_exc() | |
| else: | |
| print(f"⚠️ WARNING: For '{name}' neither 'file_name', 'folder_name' nor 'meta_file' was given.") | |
| print("--- Download process completed. ---") | |
| # --- Global Downloads once started --- | |
| download_models() | |
| # ---------------------------------------- |