# ============================================================ # Comment utiliser SCLM Option B # ============================================================ from transformers import AutoModelForCausalLM, AutoTokenizer from huggingface_hub import hf_hub_download import torch import json # 1. Charger le modèle de base BASE_MODEL = "meta-llama/Llama-3.2-1B" base_model = AutoModelForCausalLM.from_pretrained(BASE_MODEL, torch_dtype=torch.float16, device_map="auto") tokenizer = AutoTokenizer.from_pretrained(BASE_MODEL) # 2. Télécharger les poids EARCP config_path = hf_hub_download(repo_id="amewebstudio/sclm-modelEarcp-optionB", filename="sclm_config.json") weights_path = hf_hub_download(repo_id="amewebstudio/sclm-modelEarcp-optionB", filename="earcp_weights.pt") # 3. Charger la config et les poids with open(config_path) as f: config_dict = json.load(f) earcp_weights = torch.load(weights_path) # 4. Initialiser SCLM (nécessite les classes du notebook) # model = SCLMModelOptionB(config, base_model) # model.earcp.load_state_dict(earcp_weights) print("SCLM Option B chargé avec succès!")