Commit
·
26ec3cc
1
Parent(s):
b238807
Fix paths for HuggingFace - MODEL_PATH and checkpoint dirs
Browse files
ubermenschetien_v2_full.py
CHANGED
|
@@ -55,16 +55,16 @@ DATA_DIR = os.path.join(ROOT, "data")
|
|
| 55 |
SCRIPT_DIR = os.path.join(ROOT, "scripts")
|
| 56 |
RUN_DIR = os.path.join(ROOT, "runs")
|
| 57 |
LHT_DIR = os.path.join(ROOT, "lht")
|
| 58 |
-
CHECKPOINTS_DIR = os.path.join(ROOT, "
|
| 59 |
TRAINING_DIR = os.path.join(ROOT, "condensator_output")
|
| 60 |
LOGS_DIR = os.path.join(ROOT, "improvement_logs")
|
| 61 |
ROLLBACK_DIR = os.path.join(ROOT, "rollback_checkpoints")
|
| 62 |
|
| 63 |
# Model paths
|
| 64 |
-
MODEL_PATH = "/
|
| 65 |
-
DENSE_CHECKPOINT = os.path.join(ROOT, "
|
| 66 |
-
CFHOT_CHECKPOINT = os.path.join(ROOT, "
|
| 67 |
-
MULTI_HEAD_DIR = os.path.join(ROOT, "
|
| 68 |
|
| 69 |
for path in [DATA_DIR, SCRIPT_DIR, RUN_DIR, LHT_DIR, LOGS_DIR, ROLLBACK_DIR]:
|
| 70 |
os.makedirs(path, exist_ok=True)
|
|
@@ -853,7 +853,7 @@ def load_llm(checkpoint_path: str = None):
|
|
| 853 |
|
| 854 |
print(f"[llm] Loading base model: {MODEL_PATH}")
|
| 855 |
|
| 856 |
-
_tokenizer = AutoTokenizer.from_pretrained(MODEL_PATH, use_fast=True, local_files_only=
|
| 857 |
if _tokenizer.pad_token_id is None:
|
| 858 |
_tokenizer.pad_token = _tokenizer.eos_token
|
| 859 |
|
|
@@ -869,7 +869,7 @@ def load_llm(checkpoint_path: str = None):
|
|
| 869 |
quantization_config=bnb_config,
|
| 870 |
device_map="auto",
|
| 871 |
torch_dtype=torch.bfloat16,
|
| 872 |
-
local_files_only=
|
| 873 |
)
|
| 874 |
|
| 875 |
# Load DENSE checkpoint
|
|
@@ -1333,7 +1333,7 @@ print("Loading model for CONSERVATIVE training...")
|
|
| 1333 |
MODEL_PATH = "{MODEL_PATH}"
|
| 1334 |
CHECKPOINT = "{current_ckpt}"
|
| 1335 |
|
| 1336 |
-
tokenizer = AutoTokenizer.from_pretrained(MODEL_PATH, local_files_only=
|
| 1337 |
tokenizer.pad_token = tokenizer.eos_token
|
| 1338 |
|
| 1339 |
model = AutoModelForCausalLM.from_pretrained(
|
|
@@ -1345,7 +1345,7 @@ model = AutoModelForCausalLM.from_pretrained(
|
|
| 1345 |
),
|
| 1346 |
device_map="auto",
|
| 1347 |
torch_dtype=torch.bfloat16,
|
| 1348 |
-
local_files_only=
|
| 1349 |
)
|
| 1350 |
|
| 1351 |
if os.path.exists(CHECKPOINT):
|
|
|
|
| 55 |
SCRIPT_DIR = os.path.join(ROOT, "scripts")
|
| 56 |
RUN_DIR = os.path.join(ROOT, "runs")
|
| 57 |
LHT_DIR = os.path.join(ROOT, "lht")
|
| 58 |
+
CHECKPOINTS_DIR = os.path.join(ROOT, "dense_checkpoints")
|
| 59 |
TRAINING_DIR = os.path.join(ROOT, "condensator_output")
|
| 60 |
LOGS_DIR = os.path.join(ROOT, "improvement_logs")
|
| 61 |
ROLLBACK_DIR = os.path.join(ROOT, "rollback_checkpoints")
|
| 62 |
|
| 63 |
# Model paths
|
| 64 |
+
MODEL_PATH = "NousResearch/Hermes-3-Llama-3.1-8B"
|
| 65 |
+
DENSE_CHECKPOINT = os.path.join(ROOT, "dense_checkpoints/step_100")
|
| 66 |
+
CFHOT_CHECKPOINT = os.path.join(ROOT, "cfhot_checkpoints/ckpt_5000")
|
| 67 |
+
MULTI_HEAD_DIR = os.path.join(ROOT, "multi_head_checkpoints")
|
| 68 |
|
| 69 |
for path in [DATA_DIR, SCRIPT_DIR, RUN_DIR, LHT_DIR, LOGS_DIR, ROLLBACK_DIR]:
|
| 70 |
os.makedirs(path, exist_ok=True)
|
|
|
|
| 853 |
|
| 854 |
print(f"[llm] Loading base model: {MODEL_PATH}")
|
| 855 |
|
| 856 |
+
_tokenizer = AutoTokenizer.from_pretrained(MODEL_PATH, use_fast=True, local_files_only=False)
|
| 857 |
if _tokenizer.pad_token_id is None:
|
| 858 |
_tokenizer.pad_token = _tokenizer.eos_token
|
| 859 |
|
|
|
|
| 869 |
quantization_config=bnb_config,
|
| 870 |
device_map="auto",
|
| 871 |
torch_dtype=torch.bfloat16,
|
| 872 |
+
local_files_only=False
|
| 873 |
)
|
| 874 |
|
| 875 |
# Load DENSE checkpoint
|
|
|
|
| 1333 |
MODEL_PATH = "{MODEL_PATH}"
|
| 1334 |
CHECKPOINT = "{current_ckpt}"
|
| 1335 |
|
| 1336 |
+
tokenizer = AutoTokenizer.from_pretrained(MODEL_PATH, local_files_only=False)
|
| 1337 |
tokenizer.pad_token = tokenizer.eos_token
|
| 1338 |
|
| 1339 |
model = AutoModelForCausalLM.from_pretrained(
|
|
|
|
| 1345 |
),
|
| 1346 |
device_map="auto",
|
| 1347 |
torch_dtype=torch.bfloat16,
|
| 1348 |
+
local_files_only=False
|
| 1349 |
)
|
| 1350 |
|
| 1351 |
if os.path.exists(CHECKPOINT):
|