CHYang25 commited on
Commit
795612c
·
verified ·
1 Parent(s): 5fe573a

Upload folder using huggingface_hub

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +1 -0
  2. 2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/.hydra/config.yaml +117 -0
  3. 2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/.hydra/hydra.yaml +156 -0
  4. 2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/.hydra/overrides.yaml +1 -0
  5. 2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/HuggingFaceTB/SmolLM2-135M-Instruct-finetuned-blockpush_lowdim_seed/checkpoint-10000/config.json +42 -0
  6. 2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/HuggingFaceTB/SmolLM2-135M-Instruct-finetuned-blockpush_lowdim_seed/checkpoint-10000/generation_config.json +7 -0
  7. 2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/HuggingFaceTB/SmolLM2-135M-Instruct-finetuned-blockpush_lowdim_seed/checkpoint-10000/merges.txt +0 -0
  8. 2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/HuggingFaceTB/SmolLM2-135M-Instruct-finetuned-blockpush_lowdim_seed/checkpoint-10000/mlp_projector.bin +3 -0
  9. 2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/HuggingFaceTB/SmolLM2-135M-Instruct-finetuned-blockpush_lowdim_seed/checkpoint-10000/model.safetensors +3 -0
  10. 2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/HuggingFaceTB/SmolLM2-135M-Instruct-finetuned-blockpush_lowdim_seed/checkpoint-10000/optimizer.pt +3 -0
  11. 2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/HuggingFaceTB/SmolLM2-135M-Instruct-finetuned-blockpush_lowdim_seed/checkpoint-10000/rng_state.pth +3 -0
  12. 2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/HuggingFaceTB/SmolLM2-135M-Instruct-finetuned-blockpush_lowdim_seed/checkpoint-10000/scheduler.pt +3 -0
  13. 2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/HuggingFaceTB/SmolLM2-135M-Instruct-finetuned-blockpush_lowdim_seed/checkpoint-10000/special_tokens_map.json +34 -0
  14. 2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/HuggingFaceTB/SmolLM2-135M-Instruct-finetuned-blockpush_lowdim_seed/checkpoint-10000/tokenizer.json +0 -0
  15. 2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/HuggingFaceTB/SmolLM2-135M-Instruct-finetuned-blockpush_lowdim_seed/checkpoint-10000/tokenizer_config.json +155 -0
  16. 2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/HuggingFaceTB/SmolLM2-135M-Instruct-finetuned-blockpush_lowdim_seed/checkpoint-10000/trainer_state.json +0 -0
  17. 2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/HuggingFaceTB/SmolLM2-135M-Instruct-finetuned-blockpush_lowdim_seed/checkpoint-10000/training_args.bin +3 -0
  18. 2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/HuggingFaceTB/SmolLM2-135M-Instruct-finetuned-blockpush_lowdim_seed/checkpoint-10000/vocab.json +0 -0
  19. 2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/HuggingFaceTB/SmolLM2-135M-Instruct-finetuned-blockpush_lowdim_seed/checkpoint-20000/config.json +42 -0
  20. 2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/HuggingFaceTB/SmolLM2-135M-Instruct-finetuned-blockpush_lowdim_seed/checkpoint-20000/generation_config.json +7 -0
  21. 2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/HuggingFaceTB/SmolLM2-135M-Instruct-finetuned-blockpush_lowdim_seed/checkpoint-20000/merges.txt +0 -0
  22. 2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/HuggingFaceTB/SmolLM2-135M-Instruct-finetuned-blockpush_lowdim_seed/checkpoint-20000/mlp_projector.bin +3 -0
  23. 2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/HuggingFaceTB/SmolLM2-135M-Instruct-finetuned-blockpush_lowdim_seed/checkpoint-20000/model.safetensors +3 -0
  24. 2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/HuggingFaceTB/SmolLM2-135M-Instruct-finetuned-blockpush_lowdim_seed/checkpoint-20000/optimizer.pt +3 -0
  25. 2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/HuggingFaceTB/SmolLM2-135M-Instruct-finetuned-blockpush_lowdim_seed/checkpoint-20000/rng_state.pth +3 -0
  26. 2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/HuggingFaceTB/SmolLM2-135M-Instruct-finetuned-blockpush_lowdim_seed/checkpoint-20000/scheduler.pt +3 -0
  27. 2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/HuggingFaceTB/SmolLM2-135M-Instruct-finetuned-blockpush_lowdim_seed/checkpoint-20000/special_tokens_map.json +34 -0
  28. 2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/HuggingFaceTB/SmolLM2-135M-Instruct-finetuned-blockpush_lowdim_seed/checkpoint-20000/tokenizer.json +0 -0
  29. 2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/HuggingFaceTB/SmolLM2-135M-Instruct-finetuned-blockpush_lowdim_seed/checkpoint-20000/tokenizer_config.json +155 -0
  30. 2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/HuggingFaceTB/SmolLM2-135M-Instruct-finetuned-blockpush_lowdim_seed/checkpoint-20000/trainer_state.json +0 -0
  31. 2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/HuggingFaceTB/SmolLM2-135M-Instruct-finetuned-blockpush_lowdim_seed/checkpoint-20000/training_args.bin +3 -0
  32. 2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/HuggingFaceTB/SmolLM2-135M-Instruct-finetuned-blockpush_lowdim_seed/checkpoint-20000/vocab.json +0 -0
  33. 2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/HuggingFaceTB/SmolLM2-135M-Instruct-finetuned-blockpush_lowdim_seed/checkpoint-23350/config.json +42 -0
  34. 2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/HuggingFaceTB/SmolLM2-135M-Instruct-finetuned-blockpush_lowdim_seed/checkpoint-23350/generation_config.json +7 -0
  35. 2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/HuggingFaceTB/SmolLM2-135M-Instruct-finetuned-blockpush_lowdim_seed/checkpoint-23350/merges.txt +0 -0
  36. 2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/HuggingFaceTB/SmolLM2-135M-Instruct-finetuned-blockpush_lowdim_seed/checkpoint-23350/mlp_projector.bin +3 -0
  37. 2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/HuggingFaceTB/SmolLM2-135M-Instruct-finetuned-blockpush_lowdim_seed/checkpoint-23350/model.safetensors +3 -0
  38. 2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/HuggingFaceTB/SmolLM2-135M-Instruct-finetuned-blockpush_lowdim_seed/checkpoint-23350/optimizer.pt +3 -0
  39. 2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/HuggingFaceTB/SmolLM2-135M-Instruct-finetuned-blockpush_lowdim_seed/checkpoint-23350/rng_state.pth +3 -0
  40. 2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/HuggingFaceTB/SmolLM2-135M-Instruct-finetuned-blockpush_lowdim_seed/checkpoint-23350/scheduler.pt +3 -0
  41. 2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/HuggingFaceTB/SmolLM2-135M-Instruct-finetuned-blockpush_lowdim_seed/checkpoint-23350/special_tokens_map.json +34 -0
  42. 2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/HuggingFaceTB/SmolLM2-135M-Instruct-finetuned-blockpush_lowdim_seed/checkpoint-23350/tokenizer.json +0 -0
  43. 2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/HuggingFaceTB/SmolLM2-135M-Instruct-finetuned-blockpush_lowdim_seed/checkpoint-23350/tokenizer_config.json +155 -0
  44. 2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/HuggingFaceTB/SmolLM2-135M-Instruct-finetuned-blockpush_lowdim_seed/checkpoint-23350/trainer_state.json +0 -0
  45. 2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/HuggingFaceTB/SmolLM2-135M-Instruct-finetuned-blockpush_lowdim_seed/checkpoint-23350/training_args.bin +3 -0
  46. 2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/HuggingFaceTB/SmolLM2-135M-Instruct-finetuned-blockpush_lowdim_seed/checkpoint-23350/vocab.json +0 -0
  47. 2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/HuggingFaceTB/SmolLM2-135M-Instruct-finetuned-blockpush_lowdim_seed/config.json +42 -0
  48. 2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/HuggingFaceTB/SmolLM2-135M-Instruct-finetuned-blockpush_lowdim_seed/generation_config.json +7 -0
  49. 2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/HuggingFaceTB/SmolLM2-135M-Instruct-finetuned-blockpush_lowdim_seed/model.safetensors +3 -0
  50. 2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/HuggingFaceTB/SmolLM2-135M-Instruct-finetuned-blockpush_lowdim_seed/normalizer.pt +3 -0
.gitattributes CHANGED
@@ -44,3 +44,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
44
  2025.10.23/19.36.20_train_llm_lowdim_parking-v0/wandb/run-20251023_193621-91yi7sb5/run-91yi7sb5.wandb filter=lfs diff=lfs merge=lfs -text
45
  2025.10.23/01.22.15_train_llm_lowdim_push-back-v2/wandb/run-20251023_012216-7spq0q9f/run-7spq0q9f.wandb filter=lfs diff=lfs merge=lfs -text
46
  2025.10.22/23.44.28_train_llm_lowdim_box-close-v2/wandb/run-20251022_234429-f1waphwn/run-f1waphwn.wandb filter=lfs diff=lfs merge=lfs -text
 
 
44
  2025.10.23/19.36.20_train_llm_lowdim_parking-v0/wandb/run-20251023_193621-91yi7sb5/run-91yi7sb5.wandb filter=lfs diff=lfs merge=lfs -text
45
  2025.10.23/01.22.15_train_llm_lowdim_push-back-v2/wandb/run-20251023_012216-7spq0q9f/run-7spq0q9f.wandb filter=lfs diff=lfs merge=lfs -text
46
  2025.10.22/23.44.28_train_llm_lowdim_box-close-v2/wandb/run-20251022_234429-f1waphwn/run-f1waphwn.wandb filter=lfs diff=lfs merge=lfs -text
47
+ 2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/wandb/run-20251031_120024-wy9rh57a/run-wy9rh57a.wandb filter=lfs diff=lfs merge=lfs -text
2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/.hydra/config.yaml ADDED
@@ -0,0 +1,117 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name: train_llm_lowdim
2
+ _target_: llmbc.workspace.train_llm_workspace.TrainLLMWorkspace
3
+ obs_dim: ${task.obs_dim}
4
+ action_dim: ${task.action_dim}
5
+ horizon: 1
6
+ n_obs_steps: 1
7
+ n_action_steps: 1
8
+ task_name: ${task.name}
9
+ exp_name: train llm
10
+ model_name: ${llm.name}
11
+ use_quantization: ${llm.use_quantization}
12
+ lora_config: ${llm.lora_config}
13
+ dataset:
14
+ test_data_ratio: 0.01
15
+ debug: false
16
+ training:
17
+ seed: 42
18
+ per_device_train_batch_size: 128
19
+ per_device_eval_batch_size: 128
20
+ gradient_accumulation_steps: 1
21
+ optim: paged_adamw_32bit
22
+ num_train_epochs: 10
23
+ eval_strategy: steps
24
+ logging_steps: 1
25
+ warmup_steps: 10
26
+ logging_strategy: steps
27
+ learning_rate: 1.0e-05
28
+ fp16: false
29
+ bf16: true
30
+ tf32: true
31
+ group_by_length: true
32
+ report_to: wandb
33
+ save_steps: 10000
34
+ eval_steps: 10
35
+ use_joint_mlp_projector: ${llm.use_joint_mlp_projector}
36
+ joint_obs_action_mlp_lr: 1.0e-05
37
+ trainer:
38
+ obs_dim: ${obs_dim}
39
+ action_dim: ${action_dim}
40
+ use_joint_mlp_projector: ${llm.use_joint_mlp_projector}
41
+ max_seq_length: ${llm.max_length}
42
+ dataset_text_field: text
43
+ packing: false
44
+ logging:
45
+ project: llm_module_finetuning
46
+ resume: true
47
+ mode: online
48
+ name: ${now:%Y.%m.%d-%H.%M.%S}_${name}_${task_name}
49
+ tags:
50
+ - ${name}
51
+ - ${task_name}
52
+ - ${exp_name}
53
+ id: null
54
+ group: null
55
+ multi_run:
56
+ run_dir: data/outputs/${now:%Y.%m.%d}/${now:%H.%M.%S}_${name}_${task_name}
57
+ wandb_name_base: ${now:%Y.%m.%d-%H.%M.%S}_${name}_${task_name}
58
+ task:
59
+ name: blockpush_lowdim_seed
60
+ obs_dim: 16
61
+ action_dim: 2
62
+ keypoint_dim: 2
63
+ obs_eef_target: true
64
+ env_runner:
65
+ _target_: llmbc.env_runner.blockpushing_lowdim_runner.BlockPushingLowdimRunner
66
+ env_name: llf-blockpushing-BlockPushMultimodal-v0
67
+ instruction_type: b
68
+ feedback_type:
69
+ - hp
70
+ - hn
71
+ - fp
72
+ visual: false
73
+ n_train: 10
74
+ n_test: 50
75
+ n_envs: 10
76
+ max_steps: 350
77
+ n_obs_steps: ${n_obs_steps}
78
+ n_action_steps: ${n_action_steps}
79
+ discount: 0.99
80
+ dataset:
81
+ _target_: llmbc.dataset.blockpush_lowdim_dataset.BlockPushLowdimDataset
82
+ data_path: datasets/BlockPushMultimodal-v0-general.pt
83
+ data_path2: datasets/BlockPushMultimodal-v0.pt
84
+ horizon: ${horizon}
85
+ pad_before: ${eval:'${n_obs_steps}-1'}
86
+ pad_after: ${eval:'${n_action_steps}-1'}
87
+ obs_eef_target: ${task.obs_eef_target}
88
+ use_manual_normalizer: false
89
+ val_ratio: 0.02
90
+ dummy_normalizer: false
91
+ instructor:
92
+ _target_: llmbc.translator.instructor.blockpush_instructor.BlockPushMultimodal_v0_instructor.BlockPushMultimodalV0Instructor
93
+ llm:
94
+ name: HuggingFaceTB/SmolLM2-135M-Instruct
95
+ model_name: SmolLM2-135M-Instruct
96
+ config_target: llmbc.model.llm.llama_lowdim_model.LowdimLlamaConfig
97
+ causal_lm_target: llmbc.model.llm.llama_lowdim_model.LowdimLlamaForCausalLM
98
+ use_quantization: false
99
+ use_joint_mlp_projector: true
100
+ llm_mode: mlp-finetuned
101
+ finetune_mode: orig
102
+ checkpoint: data/outputs/2025.10.27/13.53.44_train_mlp_projector_blockpush_lowdim_seed/checkpoints/latest.ckpt
103
+ max_length: 100
104
+ lora_config:
105
+ r: 32
106
+ lora_alpha: 64
107
+ lora_dropout: 0.05
108
+ bias: none
109
+ task_type: CAUSAL_LM
110
+ prompter:
111
+ _target_: llmbc.translator.prompter.smollm2_prompter.SmolLM2Prompter
112
+ use_joint_mlp_projector: true
113
+ hydra:
114
+ job:
115
+ override_dirname: ${model_name}
116
+ run:
117
+ dir: data/outputs/${now:%Y.%m.%d}/${now:%H.%M.%S}_${model_name}
2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/.hydra/hydra.yaml ADDED
@@ -0,0 +1,156 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ hydra:
2
+ run:
3
+ dir: data/outputs/${now:%Y.%m.%d}/${now:%H.%M.%S}_${name}_${task_name}
4
+ sweep:
5
+ dir: data/outputs/${now:%Y.%m.%d}/${now:%H.%M.%S}_${name}_${task_name}
6
+ subdir: ${hydra.job.num}
7
+ launcher:
8
+ _target_: hydra._internal.core_plugins.basic_launcher.BasicLauncher
9
+ sweeper:
10
+ _target_: hydra._internal.core_plugins.basic_sweeper.BasicSweeper
11
+ max_batch_size: null
12
+ params: null
13
+ help:
14
+ app_name: ${hydra.job.name}
15
+ header: '${hydra.help.app_name} is powered by Hydra.
16
+
17
+ '
18
+ footer: 'Powered by Hydra (https://hydra.cc)
19
+
20
+ Use --hydra-help to view Hydra specific help
21
+
22
+ '
23
+ template: '${hydra.help.header}
24
+
25
+ == Configuration groups ==
26
+
27
+ Compose your configuration from those groups (group=option)
28
+
29
+
30
+ $APP_CONFIG_GROUPS
31
+
32
+
33
+ == Config ==
34
+
35
+ Override anything in the config (foo.bar=value)
36
+
37
+
38
+ $CONFIG
39
+
40
+
41
+ ${hydra.help.footer}
42
+
43
+ '
44
+ hydra_help:
45
+ template: 'Hydra (${hydra.runtime.version})
46
+
47
+ See https://hydra.cc for more info.
48
+
49
+
50
+ == Flags ==
51
+
52
+ $FLAGS_HELP
53
+
54
+
55
+ == Configuration groups ==
56
+
57
+ Compose your configuration from those groups (For example, append hydra/job_logging=disabled
58
+ to command line)
59
+
60
+
61
+ $HYDRA_CONFIG_GROUPS
62
+
63
+
64
+ Use ''--cfg hydra'' to Show the Hydra config.
65
+
66
+ '
67
+ hydra_help: ???
68
+ hydra_logging:
69
+ version: 1
70
+ formatters:
71
+ simple:
72
+ format: '[%(asctime)s][HYDRA] %(message)s'
73
+ handlers:
74
+ console:
75
+ class: logging.StreamHandler
76
+ formatter: simple
77
+ stream: ext://sys.stdout
78
+ root:
79
+ level: INFO
80
+ handlers:
81
+ - console
82
+ loggers:
83
+ logging_example:
84
+ level: DEBUG
85
+ disable_existing_loggers: false
86
+ job_logging:
87
+ version: 1
88
+ formatters:
89
+ simple:
90
+ format: '[%(asctime)s][%(name)s][%(levelname)s] - %(message)s'
91
+ handlers:
92
+ console:
93
+ class: logging.StreamHandler
94
+ formatter: simple
95
+ stream: ext://sys.stdout
96
+ file:
97
+ class: logging.FileHandler
98
+ formatter: simple
99
+ filename: ${hydra.runtime.output_dir}/${hydra.job.name}.log
100
+ root:
101
+ level: INFO
102
+ handlers:
103
+ - console
104
+ - file
105
+ disable_existing_loggers: false
106
+ env: {}
107
+ mode: RUN
108
+ searchpath: []
109
+ callbacks: {}
110
+ output_subdir: .hydra
111
+ overrides:
112
+ hydra:
113
+ - hydra.mode=RUN
114
+ task: []
115
+ job:
116
+ name: train_llm_workspace
117
+ chdir: null
118
+ override_dirname: ''
119
+ id: ???
120
+ num: ???
121
+ config_name: train_llm_workspace
122
+ env_set: {}
123
+ env_copy: []
124
+ config:
125
+ override_dirname:
126
+ kv_sep: '='
127
+ item_sep: ','
128
+ exclude_keys: []
129
+ runtime:
130
+ version: 1.2.0
131
+ version_base: '1.2'
132
+ cwd: /home/chyang/workspace/LLM-BC
133
+ config_sources:
134
+ - path: hydra.conf
135
+ schema: pkg
136
+ provider: hydra
137
+ - path: /home/chyang/workspace/LLM-BC/llmbc/config
138
+ schema: file
139
+ provider: main
140
+ - path: ''
141
+ schema: structured
142
+ provider: schema
143
+ output_dir: /home/chyang/workspace/LLM-BC/data/outputs/2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed
144
+ choices:
145
+ llm: smollm2-135m-instruct
146
+ task: BlockPushMultimodal-v0
147
+ hydra/env: default
148
+ hydra/callbacks: null
149
+ hydra/job_logging: default
150
+ hydra/hydra_logging: default
151
+ hydra/hydra_help: default
152
+ hydra/help: default
153
+ hydra/sweeper: basic
154
+ hydra/launcher: basic
155
+ hydra/output: default
156
+ verbose: false
2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/.hydra/overrides.yaml ADDED
@@ -0,0 +1 @@
 
 
1
+ []
2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/HuggingFaceTB/SmolLM2-135M-Instruct-finetuned-blockpush_lowdim_seed/checkpoint-10000/config.json ADDED
@@ -0,0 +1,42 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "HuggingFaceTB/SmolLM2-135M-Instruct",
3
+ "action_dim": 2,
4
+ "architectures": [
5
+ "LowdimLlamaForCausalLM"
6
+ ],
7
+ "attention_bias": false,
8
+ "attention_dropout": 0.0,
9
+ "bos_token_id": 1,
10
+ "eos_token_id": 2,
11
+ "head_dim": 64,
12
+ "hidden_act": "silu",
13
+ "hidden_size": 576,
14
+ "initializer_range": 0.041666666666666664,
15
+ "intermediate_size": 1536,
16
+ "is_llama_config": true,
17
+ "max_position_embeddings": 8192,
18
+ "mlp_bias": false,
19
+ "model_type": "llama_lowdim",
20
+ "num_attention_heads": 9,
21
+ "num_hidden_layers": 30,
22
+ "num_key_value_heads": 3,
23
+ "obs_dim": 16,
24
+ "pad_token_id": 2,
25
+ "pretraining_tp": 1,
26
+ "rms_norm_eps": 1e-05,
27
+ "rope_interleaved": false,
28
+ "rope_scaling": null,
29
+ "rope_theta": 100000,
30
+ "tie_word_embeddings": true,
31
+ "torch_dtype": "float32",
32
+ "transformers.js_config": {
33
+ "kv_cache_dtype": {
34
+ "fp16": "float16",
35
+ "q4f16": "float16"
36
+ }
37
+ },
38
+ "transformers_version": "4.47.1",
39
+ "use_cache": false,
40
+ "use_joint_mlp_projector": true,
41
+ "vocab_size": 49152
42
+ }
2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/HuggingFaceTB/SmolLM2-135M-Instruct-finetuned-blockpush_lowdim_seed/checkpoint-10000/generation_config.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 1,
4
+ "eos_token_id": 2,
5
+ "pad_token_id": 2,
6
+ "transformers_version": "4.47.1"
7
+ }
2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/HuggingFaceTB/SmolLM2-135M-Instruct-finetuned-blockpush_lowdim_seed/checkpoint-10000/merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/HuggingFaceTB/SmolLM2-135M-Instruct-finetuned-blockpush_lowdim_seed/checkpoint-10000/mlp_projector.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fec97dc7a846a281f4ce5bb337448923c1d54a75b1234106c8d36c0f886a6557
3
+ size 1375360
2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/HuggingFaceTB/SmolLM2-135M-Instruct-finetuned-blockpush_lowdim_seed/checkpoint-10000/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:880513e1aa3f13a5b2633841520a80c64482e86383d52f0763f7447f4ee5e18b
3
+ size 539464080
2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/HuggingFaceTB/SmolLM2-135M-Instruct-finetuned-blockpush_lowdim_seed/checkpoint-10000/optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:236ed925e2b6ed7cb702fc4c51ed82bd11b7f2a34a079f9df6c61eca221ac031
3
+ size 1079035962
2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/HuggingFaceTB/SmolLM2-135M-Instruct-finetuned-blockpush_lowdim_seed/checkpoint-10000/rng_state.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f162cfccd332657eff88c58721a311084126a58db5c6cc17dab8d0226d4f3923
3
+ size 14244
2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/HuggingFaceTB/SmolLM2-135M-Instruct-finetuned-blockpush_lowdim_seed/checkpoint-10000/scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1195503ba68fe33292031123d55654ba1e0620f0a11bf8baf047b89937ca0d58
3
+ size 1064
2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/HuggingFaceTB/SmolLM2-135M-Instruct-finetuned-blockpush_lowdim_seed/checkpoint-10000/special_tokens_map.json ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ "<|im_start|>",
4
+ "<|im_end|>"
5
+ ],
6
+ "bos_token": {
7
+ "content": "<|im_start|>",
8
+ "lstrip": false,
9
+ "normalized": false,
10
+ "rstrip": false,
11
+ "single_word": false
12
+ },
13
+ "eos_token": {
14
+ "content": "<|im_end|>",
15
+ "lstrip": false,
16
+ "normalized": false,
17
+ "rstrip": false,
18
+ "single_word": false
19
+ },
20
+ "pad_token": {
21
+ "content": "<|im_end|>",
22
+ "lstrip": false,
23
+ "normalized": false,
24
+ "rstrip": false,
25
+ "single_word": false
26
+ },
27
+ "unk_token": {
28
+ "content": "<|endoftext|>",
29
+ "lstrip": false,
30
+ "normalized": false,
31
+ "rstrip": false,
32
+ "single_word": false
33
+ }
34
+ }
2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/HuggingFaceTB/SmolLM2-135M-Instruct-finetuned-blockpush_lowdim_seed/checkpoint-10000/tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/HuggingFaceTB/SmolLM2-135M-Instruct-finetuned-blockpush_lowdim_seed/checkpoint-10000/tokenizer_config.json ADDED
@@ -0,0 +1,155 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_prefix_space": false,
3
+ "added_tokens_decoder": {
4
+ "0": {
5
+ "content": "<|endoftext|>",
6
+ "lstrip": false,
7
+ "normalized": false,
8
+ "rstrip": false,
9
+ "single_word": false,
10
+ "special": true
11
+ },
12
+ "1": {
13
+ "content": "<|im_start|>",
14
+ "lstrip": false,
15
+ "normalized": false,
16
+ "rstrip": false,
17
+ "single_word": false,
18
+ "special": true
19
+ },
20
+ "2": {
21
+ "content": "<|im_end|>",
22
+ "lstrip": false,
23
+ "normalized": false,
24
+ "rstrip": false,
25
+ "single_word": false,
26
+ "special": true
27
+ },
28
+ "3": {
29
+ "content": "<repo_name>",
30
+ "lstrip": false,
31
+ "normalized": false,
32
+ "rstrip": false,
33
+ "single_word": false,
34
+ "special": true
35
+ },
36
+ "4": {
37
+ "content": "<reponame>",
38
+ "lstrip": false,
39
+ "normalized": false,
40
+ "rstrip": false,
41
+ "single_word": false,
42
+ "special": true
43
+ },
44
+ "5": {
45
+ "content": "<file_sep>",
46
+ "lstrip": false,
47
+ "normalized": false,
48
+ "rstrip": false,
49
+ "single_word": false,
50
+ "special": true
51
+ },
52
+ "6": {
53
+ "content": "<filename>",
54
+ "lstrip": false,
55
+ "normalized": false,
56
+ "rstrip": false,
57
+ "single_word": false,
58
+ "special": true
59
+ },
60
+ "7": {
61
+ "content": "<gh_stars>",
62
+ "lstrip": false,
63
+ "normalized": false,
64
+ "rstrip": false,
65
+ "single_word": false,
66
+ "special": true
67
+ },
68
+ "8": {
69
+ "content": "<issue_start>",
70
+ "lstrip": false,
71
+ "normalized": false,
72
+ "rstrip": false,
73
+ "single_word": false,
74
+ "special": true
75
+ },
76
+ "9": {
77
+ "content": "<issue_comment>",
78
+ "lstrip": false,
79
+ "normalized": false,
80
+ "rstrip": false,
81
+ "single_word": false,
82
+ "special": true
83
+ },
84
+ "10": {
85
+ "content": "<issue_closed>",
86
+ "lstrip": false,
87
+ "normalized": false,
88
+ "rstrip": false,
89
+ "single_word": false,
90
+ "special": true
91
+ },
92
+ "11": {
93
+ "content": "<jupyter_start>",
94
+ "lstrip": false,
95
+ "normalized": false,
96
+ "rstrip": false,
97
+ "single_word": false,
98
+ "special": true
99
+ },
100
+ "12": {
101
+ "content": "<jupyter_text>",
102
+ "lstrip": false,
103
+ "normalized": false,
104
+ "rstrip": false,
105
+ "single_word": false,
106
+ "special": true
107
+ },
108
+ "13": {
109
+ "content": "<jupyter_code>",
110
+ "lstrip": false,
111
+ "normalized": false,
112
+ "rstrip": false,
113
+ "single_word": false,
114
+ "special": true
115
+ },
116
+ "14": {
117
+ "content": "<jupyter_output>",
118
+ "lstrip": false,
119
+ "normalized": false,
120
+ "rstrip": false,
121
+ "single_word": false,
122
+ "special": true
123
+ },
124
+ "15": {
125
+ "content": "<jupyter_script>",
126
+ "lstrip": false,
127
+ "normalized": false,
128
+ "rstrip": false,
129
+ "single_word": false,
130
+ "special": true
131
+ },
132
+ "16": {
133
+ "content": "<empty_output>",
134
+ "lstrip": false,
135
+ "normalized": false,
136
+ "rstrip": false,
137
+ "single_word": false,
138
+ "special": true
139
+ }
140
+ },
141
+ "additional_special_tokens": [
142
+ "<|im_start|>",
143
+ "<|im_end|>"
144
+ ],
145
+ "bos_token": "<|im_start|>",
146
+ "chat_template": "{% for message in messages %}{% if loop.first and messages[0]['role'] != 'system' %}{{ '<|im_start|>system\nYou are a helpful AI assistant named SmolLM, trained by Hugging Face<|im_end|>\n' }}{% endif %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}",
147
+ "clean_up_tokenization_spaces": false,
148
+ "eos_token": "<|im_end|>",
149
+ "extra_special_tokens": {},
150
+ "model_max_length": 8192,
151
+ "pad_token": "<|im_end|>",
152
+ "tokenizer_class": "GPT2Tokenizer",
153
+ "unk_token": "<|endoftext|>",
154
+ "vocab_size": 49152
155
+ }
2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/HuggingFaceTB/SmolLM2-135M-Instruct-finetuned-blockpush_lowdim_seed/checkpoint-10000/trainer_state.json ADDED
The diff for this file is too large to render. See raw diff
 
2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/HuggingFaceTB/SmolLM2-135M-Instruct-finetuned-blockpush_lowdim_seed/checkpoint-10000/training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:96f0b568082b110be8cfc44c770c6a0b2b3ef6bcede3aa45d67d8a160154fb2b
3
+ size 6008
2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/HuggingFaceTB/SmolLM2-135M-Instruct-finetuned-blockpush_lowdim_seed/checkpoint-10000/vocab.json ADDED
The diff for this file is too large to render. See raw diff
 
2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/HuggingFaceTB/SmolLM2-135M-Instruct-finetuned-blockpush_lowdim_seed/checkpoint-20000/config.json ADDED
@@ -0,0 +1,42 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "HuggingFaceTB/SmolLM2-135M-Instruct",
3
+ "action_dim": 2,
4
+ "architectures": [
5
+ "LowdimLlamaForCausalLM"
6
+ ],
7
+ "attention_bias": false,
8
+ "attention_dropout": 0.0,
9
+ "bos_token_id": 1,
10
+ "eos_token_id": 2,
11
+ "head_dim": 64,
12
+ "hidden_act": "silu",
13
+ "hidden_size": 576,
14
+ "initializer_range": 0.041666666666666664,
15
+ "intermediate_size": 1536,
16
+ "is_llama_config": true,
17
+ "max_position_embeddings": 8192,
18
+ "mlp_bias": false,
19
+ "model_type": "llama_lowdim",
20
+ "num_attention_heads": 9,
21
+ "num_hidden_layers": 30,
22
+ "num_key_value_heads": 3,
23
+ "obs_dim": 16,
24
+ "pad_token_id": 2,
25
+ "pretraining_tp": 1,
26
+ "rms_norm_eps": 1e-05,
27
+ "rope_interleaved": false,
28
+ "rope_scaling": null,
29
+ "rope_theta": 100000,
30
+ "tie_word_embeddings": true,
31
+ "torch_dtype": "float32",
32
+ "transformers.js_config": {
33
+ "kv_cache_dtype": {
34
+ "fp16": "float16",
35
+ "q4f16": "float16"
36
+ }
37
+ },
38
+ "transformers_version": "4.47.1",
39
+ "use_cache": false,
40
+ "use_joint_mlp_projector": true,
41
+ "vocab_size": 49152
42
+ }
2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/HuggingFaceTB/SmolLM2-135M-Instruct-finetuned-blockpush_lowdim_seed/checkpoint-20000/generation_config.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 1,
4
+ "eos_token_id": 2,
5
+ "pad_token_id": 2,
6
+ "transformers_version": "4.47.1"
7
+ }
2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/HuggingFaceTB/SmolLM2-135M-Instruct-finetuned-blockpush_lowdim_seed/checkpoint-20000/merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/HuggingFaceTB/SmolLM2-135M-Instruct-finetuned-blockpush_lowdim_seed/checkpoint-20000/mlp_projector.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b81a2b0233e4ae59c361b2a2fcdd0227b3109b3a74e18cdbcaeefb4ffbec2a21
3
+ size 1375360
2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/HuggingFaceTB/SmolLM2-135M-Instruct-finetuned-blockpush_lowdim_seed/checkpoint-20000/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:312497b32285eb2d8bcfb65bbcc78d734798aa51481fc6ca76b52070271ff56a
3
+ size 539464080
2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/HuggingFaceTB/SmolLM2-135M-Instruct-finetuned-blockpush_lowdim_seed/checkpoint-20000/optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fd5317424aaa644bc88506cc19186bc0dd76beab0074c9ebb482f2417736f3d7
3
+ size 1079035962
2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/HuggingFaceTB/SmolLM2-135M-Instruct-finetuned-blockpush_lowdim_seed/checkpoint-20000/rng_state.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d22c9d0d37f6bb347715a7a47f78f972d197b1b34681a3d8dba82108d06bdbf5
3
+ size 14244
2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/HuggingFaceTB/SmolLM2-135M-Instruct-finetuned-blockpush_lowdim_seed/checkpoint-20000/scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:733de46ab29e7c07e98947b224fedd9403bc7268bc7dcdfc733200510cdf33ee
3
+ size 1064
2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/HuggingFaceTB/SmolLM2-135M-Instruct-finetuned-blockpush_lowdim_seed/checkpoint-20000/special_tokens_map.json ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ "<|im_start|>",
4
+ "<|im_end|>"
5
+ ],
6
+ "bos_token": {
7
+ "content": "<|im_start|>",
8
+ "lstrip": false,
9
+ "normalized": false,
10
+ "rstrip": false,
11
+ "single_word": false
12
+ },
13
+ "eos_token": {
14
+ "content": "<|im_end|>",
15
+ "lstrip": false,
16
+ "normalized": false,
17
+ "rstrip": false,
18
+ "single_word": false
19
+ },
20
+ "pad_token": {
21
+ "content": "<|im_end|>",
22
+ "lstrip": false,
23
+ "normalized": false,
24
+ "rstrip": false,
25
+ "single_word": false
26
+ },
27
+ "unk_token": {
28
+ "content": "<|endoftext|>",
29
+ "lstrip": false,
30
+ "normalized": false,
31
+ "rstrip": false,
32
+ "single_word": false
33
+ }
34
+ }
2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/HuggingFaceTB/SmolLM2-135M-Instruct-finetuned-blockpush_lowdim_seed/checkpoint-20000/tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/HuggingFaceTB/SmolLM2-135M-Instruct-finetuned-blockpush_lowdim_seed/checkpoint-20000/tokenizer_config.json ADDED
@@ -0,0 +1,155 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_prefix_space": false,
3
+ "added_tokens_decoder": {
4
+ "0": {
5
+ "content": "<|endoftext|>",
6
+ "lstrip": false,
7
+ "normalized": false,
8
+ "rstrip": false,
9
+ "single_word": false,
10
+ "special": true
11
+ },
12
+ "1": {
13
+ "content": "<|im_start|>",
14
+ "lstrip": false,
15
+ "normalized": false,
16
+ "rstrip": false,
17
+ "single_word": false,
18
+ "special": true
19
+ },
20
+ "2": {
21
+ "content": "<|im_end|>",
22
+ "lstrip": false,
23
+ "normalized": false,
24
+ "rstrip": false,
25
+ "single_word": false,
26
+ "special": true
27
+ },
28
+ "3": {
29
+ "content": "<repo_name>",
30
+ "lstrip": false,
31
+ "normalized": false,
32
+ "rstrip": false,
33
+ "single_word": false,
34
+ "special": true
35
+ },
36
+ "4": {
37
+ "content": "<reponame>",
38
+ "lstrip": false,
39
+ "normalized": false,
40
+ "rstrip": false,
41
+ "single_word": false,
42
+ "special": true
43
+ },
44
+ "5": {
45
+ "content": "<file_sep>",
46
+ "lstrip": false,
47
+ "normalized": false,
48
+ "rstrip": false,
49
+ "single_word": false,
50
+ "special": true
51
+ },
52
+ "6": {
53
+ "content": "<filename>",
54
+ "lstrip": false,
55
+ "normalized": false,
56
+ "rstrip": false,
57
+ "single_word": false,
58
+ "special": true
59
+ },
60
+ "7": {
61
+ "content": "<gh_stars>",
62
+ "lstrip": false,
63
+ "normalized": false,
64
+ "rstrip": false,
65
+ "single_word": false,
66
+ "special": true
67
+ },
68
+ "8": {
69
+ "content": "<issue_start>",
70
+ "lstrip": false,
71
+ "normalized": false,
72
+ "rstrip": false,
73
+ "single_word": false,
74
+ "special": true
75
+ },
76
+ "9": {
77
+ "content": "<issue_comment>",
78
+ "lstrip": false,
79
+ "normalized": false,
80
+ "rstrip": false,
81
+ "single_word": false,
82
+ "special": true
83
+ },
84
+ "10": {
85
+ "content": "<issue_closed>",
86
+ "lstrip": false,
87
+ "normalized": false,
88
+ "rstrip": false,
89
+ "single_word": false,
90
+ "special": true
91
+ },
92
+ "11": {
93
+ "content": "<jupyter_start>",
94
+ "lstrip": false,
95
+ "normalized": false,
96
+ "rstrip": false,
97
+ "single_word": false,
98
+ "special": true
99
+ },
100
+ "12": {
101
+ "content": "<jupyter_text>",
102
+ "lstrip": false,
103
+ "normalized": false,
104
+ "rstrip": false,
105
+ "single_word": false,
106
+ "special": true
107
+ },
108
+ "13": {
109
+ "content": "<jupyter_code>",
110
+ "lstrip": false,
111
+ "normalized": false,
112
+ "rstrip": false,
113
+ "single_word": false,
114
+ "special": true
115
+ },
116
+ "14": {
117
+ "content": "<jupyter_output>",
118
+ "lstrip": false,
119
+ "normalized": false,
120
+ "rstrip": false,
121
+ "single_word": false,
122
+ "special": true
123
+ },
124
+ "15": {
125
+ "content": "<jupyter_script>",
126
+ "lstrip": false,
127
+ "normalized": false,
128
+ "rstrip": false,
129
+ "single_word": false,
130
+ "special": true
131
+ },
132
+ "16": {
133
+ "content": "<empty_output>",
134
+ "lstrip": false,
135
+ "normalized": false,
136
+ "rstrip": false,
137
+ "single_word": false,
138
+ "special": true
139
+ }
140
+ },
141
+ "additional_special_tokens": [
142
+ "<|im_start|>",
143
+ "<|im_end|>"
144
+ ],
145
+ "bos_token": "<|im_start|>",
146
+ "chat_template": "{% for message in messages %}{% if loop.first and messages[0]['role'] != 'system' %}{{ '<|im_start|>system\nYou are a helpful AI assistant named SmolLM, trained by Hugging Face<|im_end|>\n' }}{% endif %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}",
147
+ "clean_up_tokenization_spaces": false,
148
+ "eos_token": "<|im_end|>",
149
+ "extra_special_tokens": {},
150
+ "model_max_length": 8192,
151
+ "pad_token": "<|im_end|>",
152
+ "tokenizer_class": "GPT2Tokenizer",
153
+ "unk_token": "<|endoftext|>",
154
+ "vocab_size": 49152
155
+ }
2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/HuggingFaceTB/SmolLM2-135M-Instruct-finetuned-blockpush_lowdim_seed/checkpoint-20000/trainer_state.json ADDED
The diff for this file is too large to render. See raw diff
 
2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/HuggingFaceTB/SmolLM2-135M-Instruct-finetuned-blockpush_lowdim_seed/checkpoint-20000/training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:96f0b568082b110be8cfc44c770c6a0b2b3ef6bcede3aa45d67d8a160154fb2b
3
+ size 6008
2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/HuggingFaceTB/SmolLM2-135M-Instruct-finetuned-blockpush_lowdim_seed/checkpoint-20000/vocab.json ADDED
The diff for this file is too large to render. See raw diff
 
2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/HuggingFaceTB/SmolLM2-135M-Instruct-finetuned-blockpush_lowdim_seed/checkpoint-23350/config.json ADDED
@@ -0,0 +1,42 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "HuggingFaceTB/SmolLM2-135M-Instruct",
3
+ "action_dim": 2,
4
+ "architectures": [
5
+ "LowdimLlamaForCausalLM"
6
+ ],
7
+ "attention_bias": false,
8
+ "attention_dropout": 0.0,
9
+ "bos_token_id": 1,
10
+ "eos_token_id": 2,
11
+ "head_dim": 64,
12
+ "hidden_act": "silu",
13
+ "hidden_size": 576,
14
+ "initializer_range": 0.041666666666666664,
15
+ "intermediate_size": 1536,
16
+ "is_llama_config": true,
17
+ "max_position_embeddings": 8192,
18
+ "mlp_bias": false,
19
+ "model_type": "llama_lowdim",
20
+ "num_attention_heads": 9,
21
+ "num_hidden_layers": 30,
22
+ "num_key_value_heads": 3,
23
+ "obs_dim": 16,
24
+ "pad_token_id": 2,
25
+ "pretraining_tp": 1,
26
+ "rms_norm_eps": 1e-05,
27
+ "rope_interleaved": false,
28
+ "rope_scaling": null,
29
+ "rope_theta": 100000,
30
+ "tie_word_embeddings": true,
31
+ "torch_dtype": "float32",
32
+ "transformers.js_config": {
33
+ "kv_cache_dtype": {
34
+ "fp16": "float16",
35
+ "q4f16": "float16"
36
+ }
37
+ },
38
+ "transformers_version": "4.47.1",
39
+ "use_cache": false,
40
+ "use_joint_mlp_projector": true,
41
+ "vocab_size": 49152
42
+ }
2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/HuggingFaceTB/SmolLM2-135M-Instruct-finetuned-blockpush_lowdim_seed/checkpoint-23350/generation_config.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 1,
4
+ "eos_token_id": 2,
5
+ "pad_token_id": 2,
6
+ "transformers_version": "4.47.1"
7
+ }
2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/HuggingFaceTB/SmolLM2-135M-Instruct-finetuned-blockpush_lowdim_seed/checkpoint-23350/merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/HuggingFaceTB/SmolLM2-135M-Instruct-finetuned-blockpush_lowdim_seed/checkpoint-23350/mlp_projector.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:938a8c4699b51c48d1b903147c5f8627a3b8f577d2ca58724c76fa51d7aed94a
3
+ size 1375360
2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/HuggingFaceTB/SmolLM2-135M-Instruct-finetuned-blockpush_lowdim_seed/checkpoint-23350/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b0737834dc70d70adcbee8db16268ddd50a060d62f59343e82bb5b9597916601
3
+ size 539464080
2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/HuggingFaceTB/SmolLM2-135M-Instruct-finetuned-blockpush_lowdim_seed/checkpoint-23350/optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6b7ceb6c85eb38e2b865d11b94d8b4e8f047129292f6ca5f76394886e7e2d542
3
+ size 1079035962
2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/HuggingFaceTB/SmolLM2-135M-Instruct-finetuned-blockpush_lowdim_seed/checkpoint-23350/rng_state.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bf7284960bb838d31672e55ab87f261edac45779bc649c85742a4783e417ea3f
3
+ size 14244
2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/HuggingFaceTB/SmolLM2-135M-Instruct-finetuned-blockpush_lowdim_seed/checkpoint-23350/scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:71790214c7f10757eb527830a3822ece0ac42bbc35e7f23e9af9b39490b85cd5
3
+ size 1064
2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/HuggingFaceTB/SmolLM2-135M-Instruct-finetuned-blockpush_lowdim_seed/checkpoint-23350/special_tokens_map.json ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ "<|im_start|>",
4
+ "<|im_end|>"
5
+ ],
6
+ "bos_token": {
7
+ "content": "<|im_start|>",
8
+ "lstrip": false,
9
+ "normalized": false,
10
+ "rstrip": false,
11
+ "single_word": false
12
+ },
13
+ "eos_token": {
14
+ "content": "<|im_end|>",
15
+ "lstrip": false,
16
+ "normalized": false,
17
+ "rstrip": false,
18
+ "single_word": false
19
+ },
20
+ "pad_token": {
21
+ "content": "<|im_end|>",
22
+ "lstrip": false,
23
+ "normalized": false,
24
+ "rstrip": false,
25
+ "single_word": false
26
+ },
27
+ "unk_token": {
28
+ "content": "<|endoftext|>",
29
+ "lstrip": false,
30
+ "normalized": false,
31
+ "rstrip": false,
32
+ "single_word": false
33
+ }
34
+ }
2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/HuggingFaceTB/SmolLM2-135M-Instruct-finetuned-blockpush_lowdim_seed/checkpoint-23350/tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/HuggingFaceTB/SmolLM2-135M-Instruct-finetuned-blockpush_lowdim_seed/checkpoint-23350/tokenizer_config.json ADDED
@@ -0,0 +1,155 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_prefix_space": false,
3
+ "added_tokens_decoder": {
4
+ "0": {
5
+ "content": "<|endoftext|>",
6
+ "lstrip": false,
7
+ "normalized": false,
8
+ "rstrip": false,
9
+ "single_word": false,
10
+ "special": true
11
+ },
12
+ "1": {
13
+ "content": "<|im_start|>",
14
+ "lstrip": false,
15
+ "normalized": false,
16
+ "rstrip": false,
17
+ "single_word": false,
18
+ "special": true
19
+ },
20
+ "2": {
21
+ "content": "<|im_end|>",
22
+ "lstrip": false,
23
+ "normalized": false,
24
+ "rstrip": false,
25
+ "single_word": false,
26
+ "special": true
27
+ },
28
+ "3": {
29
+ "content": "<repo_name>",
30
+ "lstrip": false,
31
+ "normalized": false,
32
+ "rstrip": false,
33
+ "single_word": false,
34
+ "special": true
35
+ },
36
+ "4": {
37
+ "content": "<reponame>",
38
+ "lstrip": false,
39
+ "normalized": false,
40
+ "rstrip": false,
41
+ "single_word": false,
42
+ "special": true
43
+ },
44
+ "5": {
45
+ "content": "<file_sep>",
46
+ "lstrip": false,
47
+ "normalized": false,
48
+ "rstrip": false,
49
+ "single_word": false,
50
+ "special": true
51
+ },
52
+ "6": {
53
+ "content": "<filename>",
54
+ "lstrip": false,
55
+ "normalized": false,
56
+ "rstrip": false,
57
+ "single_word": false,
58
+ "special": true
59
+ },
60
+ "7": {
61
+ "content": "<gh_stars>",
62
+ "lstrip": false,
63
+ "normalized": false,
64
+ "rstrip": false,
65
+ "single_word": false,
66
+ "special": true
67
+ },
68
+ "8": {
69
+ "content": "<issue_start>",
70
+ "lstrip": false,
71
+ "normalized": false,
72
+ "rstrip": false,
73
+ "single_word": false,
74
+ "special": true
75
+ },
76
+ "9": {
77
+ "content": "<issue_comment>",
78
+ "lstrip": false,
79
+ "normalized": false,
80
+ "rstrip": false,
81
+ "single_word": false,
82
+ "special": true
83
+ },
84
+ "10": {
85
+ "content": "<issue_closed>",
86
+ "lstrip": false,
87
+ "normalized": false,
88
+ "rstrip": false,
89
+ "single_word": false,
90
+ "special": true
91
+ },
92
+ "11": {
93
+ "content": "<jupyter_start>",
94
+ "lstrip": false,
95
+ "normalized": false,
96
+ "rstrip": false,
97
+ "single_word": false,
98
+ "special": true
99
+ },
100
+ "12": {
101
+ "content": "<jupyter_text>",
102
+ "lstrip": false,
103
+ "normalized": false,
104
+ "rstrip": false,
105
+ "single_word": false,
106
+ "special": true
107
+ },
108
+ "13": {
109
+ "content": "<jupyter_code>",
110
+ "lstrip": false,
111
+ "normalized": false,
112
+ "rstrip": false,
113
+ "single_word": false,
114
+ "special": true
115
+ },
116
+ "14": {
117
+ "content": "<jupyter_output>",
118
+ "lstrip": false,
119
+ "normalized": false,
120
+ "rstrip": false,
121
+ "single_word": false,
122
+ "special": true
123
+ },
124
+ "15": {
125
+ "content": "<jupyter_script>",
126
+ "lstrip": false,
127
+ "normalized": false,
128
+ "rstrip": false,
129
+ "single_word": false,
130
+ "special": true
131
+ },
132
+ "16": {
133
+ "content": "<empty_output>",
134
+ "lstrip": false,
135
+ "normalized": false,
136
+ "rstrip": false,
137
+ "single_word": false,
138
+ "special": true
139
+ }
140
+ },
141
+ "additional_special_tokens": [
142
+ "<|im_start|>",
143
+ "<|im_end|>"
144
+ ],
145
+ "bos_token": "<|im_start|>",
146
+ "chat_template": "{% for message in messages %}{% if loop.first and messages[0]['role'] != 'system' %}{{ '<|im_start|>system\nYou are a helpful AI assistant named SmolLM, trained by Hugging Face<|im_end|>\n' }}{% endif %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}",
147
+ "clean_up_tokenization_spaces": false,
148
+ "eos_token": "<|im_end|>",
149
+ "extra_special_tokens": {},
150
+ "model_max_length": 8192,
151
+ "pad_token": "<|im_end|>",
152
+ "tokenizer_class": "GPT2Tokenizer",
153
+ "unk_token": "<|endoftext|>",
154
+ "vocab_size": 49152
155
+ }
2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/HuggingFaceTB/SmolLM2-135M-Instruct-finetuned-blockpush_lowdim_seed/checkpoint-23350/trainer_state.json ADDED
The diff for this file is too large to render. See raw diff
 
2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/HuggingFaceTB/SmolLM2-135M-Instruct-finetuned-blockpush_lowdim_seed/checkpoint-23350/training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:96f0b568082b110be8cfc44c770c6a0b2b3ef6bcede3aa45d67d8a160154fb2b
3
+ size 6008
2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/HuggingFaceTB/SmolLM2-135M-Instruct-finetuned-blockpush_lowdim_seed/checkpoint-23350/vocab.json ADDED
The diff for this file is too large to render. See raw diff
 
2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/HuggingFaceTB/SmolLM2-135M-Instruct-finetuned-blockpush_lowdim_seed/config.json ADDED
@@ -0,0 +1,42 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "HuggingFaceTB/SmolLM2-135M-Instruct",
3
+ "action_dim": 2,
4
+ "architectures": [
5
+ "LowdimLlamaForCausalLM"
6
+ ],
7
+ "attention_bias": false,
8
+ "attention_dropout": 0.0,
9
+ "bos_token_id": 1,
10
+ "eos_token_id": 2,
11
+ "head_dim": 64,
12
+ "hidden_act": "silu",
13
+ "hidden_size": 576,
14
+ "initializer_range": 0.041666666666666664,
15
+ "intermediate_size": 1536,
16
+ "is_llama_config": true,
17
+ "max_position_embeddings": 8192,
18
+ "mlp_bias": false,
19
+ "model_type": "llama_lowdim",
20
+ "num_attention_heads": 9,
21
+ "num_hidden_layers": 30,
22
+ "num_key_value_heads": 3,
23
+ "obs_dim": 16,
24
+ "pad_token_id": 2,
25
+ "pretraining_tp": 1,
26
+ "rms_norm_eps": 1e-05,
27
+ "rope_interleaved": false,
28
+ "rope_scaling": null,
29
+ "rope_theta": 100000,
30
+ "tie_word_embeddings": true,
31
+ "torch_dtype": "float32",
32
+ "transformers.js_config": {
33
+ "kv_cache_dtype": {
34
+ "fp16": "float16",
35
+ "q4f16": "float16"
36
+ }
37
+ },
38
+ "transformers_version": "4.47.1",
39
+ "use_cache": true,
40
+ "use_joint_mlp_projector": true,
41
+ "vocab_size": 49152
42
+ }
2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/HuggingFaceTB/SmolLM2-135M-Instruct-finetuned-blockpush_lowdim_seed/generation_config.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 1,
4
+ "eos_token_id": 2,
5
+ "pad_token_id": 2,
6
+ "transformers_version": "4.47.1"
7
+ }
2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/HuggingFaceTB/SmolLM2-135M-Instruct-finetuned-blockpush_lowdim_seed/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b0737834dc70d70adcbee8db16268ddd50a060d62f59343e82bb5b9597916601
3
+ size 539464080
2025.10.31/12.00.23_train_llm_lowdim_blockpush_lowdim_seed/HuggingFaceTB/SmolLM2-135M-Instruct-finetuned-blockpush_lowdim_seed/normalizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b5b39e9f8f8786a8f9a947ada48cf8ae554ee4a0b0d2102f892f7f4527c059f1
3
+ size 4898