Praha-Labs commited on
Commit
4a55ae6
·
verified ·
1 Parent(s): 5032c1e

Upload folder using huggingface_hub

Browse files
.gitattributes CHANGED
@@ -33,3 +33,5 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ tokenizer.json filter=lfs diff=lfs merge=lfs -text
37
+ tokenizer_config.json filter=lfs diff=lfs merge=lfs -text
chat_template.jinja ADDED
@@ -0,0 +1,37 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {{- bos_token -}}
2
+ {%- set system_prompt = "" -%}
3
+ {%- set ns = namespace(system_prompt="") -%}
4
+ {%- if messages[0]["role"] == "system" -%}
5
+ {%- set ns.system_prompt = messages[0]["content"] -%}
6
+ {%- set messages = messages[1:] -%}
7
+ {%- endif -%}
8
+ {%- if tools -%}
9
+ {%- set ns.system_prompt = ns.system_prompt + ("\n" if ns.system_prompt else "") + "List of tools: <|tool_list_start|>[" -%}
10
+ {%- for tool in tools -%}
11
+ {%- if tool is not string -%}
12
+ {%- set tool = tool | tojson -%}
13
+ {%- endif -%}
14
+ {%- set ns.system_prompt = ns.system_prompt + tool -%}
15
+ {%- if not loop.last -%}
16
+ {%- set ns.system_prompt = ns.system_prompt + ", " -%}
17
+ {%- endif -%}
18
+ {%- endfor -%}
19
+ {%- set ns.system_prompt = ns.system_prompt + "]<|tool_list_end|>" -%}
20
+ {%- endif -%}
21
+ {%- if ns.system_prompt -%}
22
+ {{- "<|im_start|>system\n" + ns.system_prompt + "<|im_end|>\n" -}}
23
+ {%- endif -%}
24
+ {%- for message in messages -%}
25
+ {{- "<|im_start|>" + message["role"] + "\n" -}}
26
+ {%- set content = message["content"] -%}
27
+ {%- if content is not string -%}
28
+ {%- set content = content | tojson -%}
29
+ {%- endif -%}
30
+ {%- if message["role"] == "tool" -%}
31
+ {%- set content = "<|tool_response_start|>" + content + "<|tool_response_end|>" -%}
32
+ {%- endif -%}
33
+ {{- content + "<|im_end|>\n" -}}
34
+ {%- endfor -%}
35
+ {%- if add_generation_prompt -%}
36
+ {{- "<|im_start|>assistant\n" -}}
37
+ {%- endif -%}
checkpoint-10000/config.json ADDED
@@ -0,0 +1,57 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "Lfm2ForCausalLM"
4
+ ],
5
+ "block_auto_adjust_ff_dim": true,
6
+ "block_dim": 1024,
7
+ "block_ff_dim": 6656,
8
+ "block_ffn_dim_multiplier": 1.0,
9
+ "block_mlp_init_scale": 1.0,
10
+ "block_multiple_of": 256,
11
+ "block_norm_eps": 1e-05,
12
+ "block_out_init_scale": 1.0,
13
+ "block_use_swiglu": true,
14
+ "block_use_xavier_init": true,
15
+ "bos_token_id": 1,
16
+ "conv_L_cache": 3,
17
+ "conv_bias": false,
18
+ "conv_dim": 1024,
19
+ "conv_dim_out": 1024,
20
+ "conv_use_xavier_init": true,
21
+ "dtype": "bfloat16",
22
+ "eos_token_id": 7,
23
+ "hidden_size": 1024,
24
+ "initializer_range": 0.02,
25
+ "intermediate_size": 6656,
26
+ "layer_types": [
27
+ "conv",
28
+ "conv",
29
+ "full_attention",
30
+ "conv",
31
+ "conv",
32
+ "full_attention",
33
+ "conv",
34
+ "conv",
35
+ "full_attention",
36
+ "conv",
37
+ "full_attention",
38
+ "conv",
39
+ "full_attention",
40
+ "conv",
41
+ "full_attention",
42
+ "conv"
43
+ ],
44
+ "max_position_embeddings": 128000,
45
+ "model_type": "lfm2",
46
+ "norm_eps": 1e-05,
47
+ "num_attention_heads": 16,
48
+ "num_heads": 16,
49
+ "num_hidden_layers": 16,
50
+ "num_key_value_heads": 8,
51
+ "pad_token_id": 0,
52
+ "rope_theta": 1000000.0,
53
+ "transformers_version": "4.57.3",
54
+ "use_cache": true,
55
+ "use_pos_enc": true,
56
+ "vocab_size": 129947
57
+ }
checkpoint-10000/generation_config.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 1,
4
+ "eos_token_id": 7,
5
+ "pad_token_id": 0,
6
+ "transformers_version": "4.57.3"
7
+ }
checkpoint-10000/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8172603440f84439a269f53bae1fd4e809aad932a12652c6bb88968db08e3a63
3
+ size 840898192
checkpoint-10000/optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:de2ed7e3f2ea20d935ab290bd7c6af16c99dc939e35c8d8e270162f64e2e5bd0
3
+ size 1681890507
checkpoint-10000/rng_state.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2534e434cd5abbb8f7668d3eab0549db0ef95d6a797a3efa86b712e8e32266a7
3
+ size 14645
checkpoint-10000/scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:33e436fff42cb8427bf9ff6dbdb5a26237eed273ce056dc15c9f59abea60c69f
3
+ size 1465
checkpoint-10000/trainer_state.json ADDED
The diff for this file is too large to render. See raw diff
 
checkpoint-10000/training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e7738dab9a55337c2486f90bea9bcf7554dd4b25a9f1361451389418dd671925
3
+ size 5841
checkpoint-9500/config.json ADDED
@@ -0,0 +1,57 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "Lfm2ForCausalLM"
4
+ ],
5
+ "block_auto_adjust_ff_dim": true,
6
+ "block_dim": 1024,
7
+ "block_ff_dim": 6656,
8
+ "block_ffn_dim_multiplier": 1.0,
9
+ "block_mlp_init_scale": 1.0,
10
+ "block_multiple_of": 256,
11
+ "block_norm_eps": 1e-05,
12
+ "block_out_init_scale": 1.0,
13
+ "block_use_swiglu": true,
14
+ "block_use_xavier_init": true,
15
+ "bos_token_id": 1,
16
+ "conv_L_cache": 3,
17
+ "conv_bias": false,
18
+ "conv_dim": 1024,
19
+ "conv_dim_out": 1024,
20
+ "conv_use_xavier_init": true,
21
+ "dtype": "bfloat16",
22
+ "eos_token_id": 7,
23
+ "hidden_size": 1024,
24
+ "initializer_range": 0.02,
25
+ "intermediate_size": 6656,
26
+ "layer_types": [
27
+ "conv",
28
+ "conv",
29
+ "full_attention",
30
+ "conv",
31
+ "conv",
32
+ "full_attention",
33
+ "conv",
34
+ "conv",
35
+ "full_attention",
36
+ "conv",
37
+ "full_attention",
38
+ "conv",
39
+ "full_attention",
40
+ "conv",
41
+ "full_attention",
42
+ "conv"
43
+ ],
44
+ "max_position_embeddings": 128000,
45
+ "model_type": "lfm2",
46
+ "norm_eps": 1e-05,
47
+ "num_attention_heads": 16,
48
+ "num_heads": 16,
49
+ "num_hidden_layers": 16,
50
+ "num_key_value_heads": 8,
51
+ "pad_token_id": 0,
52
+ "rope_theta": 1000000.0,
53
+ "transformers_version": "4.57.3",
54
+ "use_cache": true,
55
+ "use_pos_enc": true,
56
+ "vocab_size": 129947
57
+ }
checkpoint-9500/generation_config.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 1,
4
+ "eos_token_id": 7,
5
+ "pad_token_id": 0,
6
+ "transformers_version": "4.57.3"
7
+ }
checkpoint-9500/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:38769915dc876c52c32de399c63900d4455a66d632bc3225e9f5518668fc310d
3
+ size 840898192
checkpoint-9500/optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5662edcff720fd75c9940fb8d593f868c415ba0cbd7c060aedfccdd57f1597a0
3
+ size 1681890507
checkpoint-9500/rng_state.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2534e434cd5abbb8f7668d3eab0549db0ef95d6a797a3efa86b712e8e32266a7
3
+ size 14645
checkpoint-9500/scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f173569886c9f59d978c3f98cbd703d82bb7e050324bceb2cc5d27f026d72af6
3
+ size 1465
checkpoint-9500/trainer_state.json ADDED
The diff for this file is too large to render. See raw diff
 
checkpoint-9500/training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e7738dab9a55337c2486f90bea9bcf7554dd4b25a9f1361451389418dd671925
3
+ size 5841
special_tokens_map.json ADDED
@@ -0,0 +1,23 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<|startoftext|>",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "<|im_end|>",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": {
17
+ "content": "<|pad|>",
18
+ "lstrip": false,
19
+ "normalized": false,
20
+ "rstrip": false,
21
+ "single_word": false
22
+ }
23
+ }
tokenizer.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f59d30520984a72eead3df16e8cdd1a426d322c215a84d5e66615d26956c7298
3
+ size 17598475
tokenizer_config.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c4d155d79b6dbb1dba7473f5d6aaed4de56fc15c0728e1a969b8ea7dc8d430ed
3
+ size 12302156