File size: 1,112 Bytes
1a5ce94
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30

# ============================================================
# Comment utiliser SCLM Option B
# ============================================================

from transformers import AutoModelForCausalLM, AutoTokenizer
from huggingface_hub import hf_hub_download
import torch
import json

# 1. Charger le modèle de base
BASE_MODEL = "meta-llama/Llama-3.2-1B"
base_model = AutoModelForCausalLM.from_pretrained(BASE_MODEL, torch_dtype=torch.float16, device_map="auto")
tokenizer = AutoTokenizer.from_pretrained(BASE_MODEL)

# 2. Télécharger les poids EARCP
config_path = hf_hub_download(repo_id="amewebstudio/sclm-modelEarcp-optionB", filename="sclm_config.json")
weights_path = hf_hub_download(repo_id="amewebstudio/sclm-modelEarcp-optionB", filename="earcp_weights.pt")

# 3. Charger la config et les poids
with open(config_path) as f:
    config_dict = json.load(f)
earcp_weights = torch.load(weights_path)

# 4. Initialiser SCLM (nécessite les classes du notebook)
# model = SCLMModelOptionB(config, base_model)
# model.earcp.load_state_dict(earcp_weights)

print("SCLM Option B chargé avec succès!")