PyTorch
gpt2
achille-fusco commited on
Commit
9c8a062
·
verified ·
1 Parent(s): c2760fe

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +0 -10
config.json CHANGED
@@ -1,9 +1,4 @@
1
  {
2
- "tokenizer_dir": "/home/achille.fusco/pr_baby_lm/babyLM_2025/03-models/gpt2_ParFindFast_10M",
3
- "data_dir": "01-data",
4
- "train_glob": "*.train",
5
- "valid_glob": "*.valid",
6
- "output_dir": "03-models/gpt2_ParFindFast_10M",
7
  "datapoint_length" : 512,
8
  "training_type" : "strict_small",
9
  "n_epochs" : 10,
@@ -15,11 +10,6 @@
15
  "sft_learning_rate" : 0.00005,
16
  "gradient_clip_norm" : 1,
17
  "seed" : -1,
18
- "base_folder" : "03-models",
19
- "experiment_name" : "gpt2_ParFindFast_10M",
20
- "use_wandb" : false,
21
- "wandb_experiment_name" : "gpt2_ParFindFast",
22
- "wandb_project_name" : "BabyLM-2025",
23
  "tokenizer_class": "ParadigmTokenizerWrapper",
24
  "model_type": "gpt2",
25
  "vocab_size": 29215
 
1
  {
 
 
 
 
 
2
  "datapoint_length" : 512,
3
  "training_type" : "strict_small",
4
  "n_epochs" : 10,
 
10
  "sft_learning_rate" : 0.00005,
11
  "gradient_clip_norm" : 1,
12
  "seed" : -1,
 
 
 
 
 
13
  "tokenizer_class": "ParadigmTokenizerWrapper",
14
  "model_type": "gpt2",
15
  "vocab_size": 29215