diff --git a/.gitattributes b/.gitattributes
new file mode 100644
index 0000000000000000000000000000000000000000..a6344aac8c09253b3b630fb776ae94478aa0275b
--- /dev/null
+++ b/.gitattributes
@@ -0,0 +1,35 @@
+*.7z filter=lfs diff=lfs merge=lfs -text
+*.arrow filter=lfs diff=lfs merge=lfs -text
+*.bin filter=lfs diff=lfs merge=lfs -text
+*.bz2 filter=lfs diff=lfs merge=lfs -text
+*.ckpt filter=lfs diff=lfs merge=lfs -text
+*.ftz filter=lfs diff=lfs merge=lfs -text
+*.gz filter=lfs diff=lfs merge=lfs -text
+*.h5 filter=lfs diff=lfs merge=lfs -text
+*.joblib filter=lfs diff=lfs merge=lfs -text
+*.lfs.* filter=lfs diff=lfs merge=lfs -text
+*.mlmodel filter=lfs diff=lfs merge=lfs -text
+*.model filter=lfs diff=lfs merge=lfs -text
+*.msgpack filter=lfs diff=lfs merge=lfs -text
+*.npy filter=lfs diff=lfs merge=lfs -text
+*.npz filter=lfs diff=lfs merge=lfs -text
+*.onnx filter=lfs diff=lfs merge=lfs -text
+*.ot filter=lfs diff=lfs merge=lfs -text
+*.parquet filter=lfs diff=lfs merge=lfs -text
+*.pb filter=lfs diff=lfs merge=lfs -text
+*.pickle filter=lfs diff=lfs merge=lfs -text
+*.pkl filter=lfs diff=lfs merge=lfs -text
+*.pt filter=lfs diff=lfs merge=lfs -text
+*.pth filter=lfs diff=lfs merge=lfs -text
+*.rar filter=lfs diff=lfs merge=lfs -text
+*.safetensors filter=lfs diff=lfs merge=lfs -text
+saved_model/**/* filter=lfs diff=lfs merge=lfs -text
+*.tar.* filter=lfs diff=lfs merge=lfs -text
+*.tar filter=lfs diff=lfs merge=lfs -text
+*.tflite filter=lfs diff=lfs merge=lfs -text
+*.tgz filter=lfs diff=lfs merge=lfs -text
+*.wasm filter=lfs diff=lfs merge=lfs -text
+*.xz filter=lfs diff=lfs merge=lfs -text
+*.zip filter=lfs diff=lfs merge=lfs -text
+*.zst filter=lfs diff=lfs merge=lfs -text
+*tfevents* filter=lfs diff=lfs merge=lfs -text
diff --git a/config.json b/config.json
new file mode 100644
index 0000000000000000000000000000000000000000..9a944e79a42c561418edac0c960a5ca33a59ef29
--- /dev/null
+++ b/config.json
@@ -0,0 +1,39 @@
+{
+ "_name_or_path": "meta-llama/Meta-Llama-3.1-70B-Instruct",
+ "architectures": [
+ "LlamaForCausalLM"
+ ],
+ "attention_bias": false,
+ "attention_dropout": 0.0,
+ "bos_token_id": 128000,
+ "eos_token_id": [
+ 128001,
+ 128008,
+ 128009
+ ],
+ "hidden_act": "silu",
+ "hidden_size": 8192,
+ "initializer_range": 0.02,
+ "intermediate_size": 28672,
+ "max_position_embeddings": 131072,
+ "mlp_bias": false,
+ "model_type": "llama",
+ "num_attention_heads": 64,
+ "num_hidden_layers": 80,
+ "num_key_value_heads": 8,
+ "pretraining_tp": 1,
+ "rms_norm_eps": 1e-05,
+ "rope_scaling": {
+ "factor": 8.0,
+ "high_freq_factor": 4.0,
+ "low_freq_factor": 1.0,
+ "original_max_position_embeddings": 8192,
+ "rope_type": "llama3"
+ },
+ "rope_theta": 500000.0,
+ "tie_word_embeddings": false,
+ "torch_dtype": "float32",
+ "transformers_version": "4.44.0",
+ "use_cache": true,
+ "vocab_size": 128256
+}
diff --git a/generation_config.json b/generation_config.json
new file mode 100644
index 0000000000000000000000000000000000000000..f667ffae5d0a5bc5110a68b1dab59831964eabb5
--- /dev/null
+++ b/generation_config.json
@@ -0,0 +1,12 @@
+{
+ "bos_token_id": 128000,
+ "do_sample": true,
+ "eos_token_id": [
+ 128001,
+ 128008,
+ 128009
+ ],
+ "temperature": 0.7,
+ "top_p": 0.95,
+ "transformers_version": "4.44.0"
+}
diff --git a/model-00003-of-00162.safetensors b/model-00003-of-00162.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..e34b3cd2359d0baf80c96047794e0b6e9400e53e
--- /dev/null
+++ b/model-00003-of-00162.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:44ce6875e055a1d3fe1485f6be750a53973f2b662e37c7453cb9383a915d1586
+size 1879114208
diff --git a/model-00004-of-00162.safetensors b/model-00004-of-00162.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..7c7eab5b0fd2338fc46d26728c52b122b32caeaf
--- /dev/null
+++ b/model-00004-of-00162.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:5f3a98e947923c7a784329410d89b7eac3b592ac668e755684fea98e23136aa5
+size 1543504480
diff --git a/model-00006-of-00162.safetensors b/model-00006-of-00162.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..fb1840d8dc720e92c857d346f53c2204c4dc5ee0
--- /dev/null
+++ b/model-00006-of-00162.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:69ebc96ca1314e1ea3b3228d2a50e7dddeed36d4e1ef4eba52ea0aeaac4e4147
+size 1543504480
diff --git a/model-00007-of-00162.safetensors b/model-00007-of-00162.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..379cbc8de76be1786cd249283e54edf83ad839dc
--- /dev/null
+++ b/model-00007-of-00162.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:766a8c842b953e0bd7c8fc605b1397265823b177d26ce9bdbf2604d1ad746237
+size 1879114208
diff --git a/model-00012-of-00162.safetensors b/model-00012-of-00162.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..2205ab104991933959b786964c42a54e377bf60f
--- /dev/null
+++ b/model-00012-of-00162.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:3df622381af2eaeb70965a909916053a6c90ea342ae86e08497c9f0f9c547a9f
+size 1543504480
diff --git a/model-00013-of-00162.safetensors b/model-00013-of-00162.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..a51d30ef48c9bc009d24d9dedad27a31038d7c30
--- /dev/null
+++ b/model-00013-of-00162.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:8bf702d935292db235264ae4f3f78c98053baac2ce6eeef0b66b846a3015344f
+size 1879114208
diff --git a/model-00014-of-00162.safetensors b/model-00014-of-00162.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..e0009284bd0310f619deacffa72a51cc1d2b7770
--- /dev/null
+++ b/model-00014-of-00162.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:7bb9cd37e7bc87565d4147eda9f83b6e158483b81b4bcd096a20b938c7943de8
+size 1543504480
diff --git a/model-00015-of-00162.safetensors b/model-00015-of-00162.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..e1416d510d8878d37eaf0ed6a42bffbd60e32ec8
--- /dev/null
+++ b/model-00015-of-00162.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:bfa9e6a3f2138b71b57c993fab2acff3d48ec282db8cb940403294b418230a75
+size 1879114208
diff --git a/model-00016-of-00162.safetensors b/model-00016-of-00162.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..451103d8a762cafcd6c77b82c39ba28610c61ac5
--- /dev/null
+++ b/model-00016-of-00162.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:bde36db20468fbf887f672fa855b4800b6066daae217571d316026381b061169
+size 1543504480
diff --git a/model-00017-of-00162.safetensors b/model-00017-of-00162.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..08ff02cd944013bba80d98468ae112377d9e6b4f
--- /dev/null
+++ b/model-00017-of-00162.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:d4091dd2578fa59e2e19551dc9495eb91996a11d8d6960f14e2967b83c53898a
+size 1879114208
diff --git a/model-00018-of-00162.safetensors b/model-00018-of-00162.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..cd54b73daee15246c481dc8dbd20b7b701c47cca
--- /dev/null
+++ b/model-00018-of-00162.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:6df521b397f7f9256da3d5956939ec6a584438d32de7da93f18907da680b8339
+size 1543504480
diff --git a/model-00019-of-00162.safetensors b/model-00019-of-00162.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..b918a51d36fedb85c936e983c06eed5824c84bc7
--- /dev/null
+++ b/model-00019-of-00162.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:477375e767804aa122eac211b8c11525dde58b17041ac14e8972468dbfa44121
+size 1879114208
diff --git a/model-00022-of-00162.safetensors b/model-00022-of-00162.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..38655660cf38dd624a97478cd884ab0346bab0f3
--- /dev/null
+++ b/model-00022-of-00162.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:40dea034cccba996c020c0a2d22e4685b93675208af91d11ee65bfc0be5b48c5
+size 1543504488
diff --git a/model-00025-of-00162.safetensors b/model-00025-of-00162.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..4275e0c35e88f9438a6bd9fb86f89fc8024e7a59
--- /dev/null
+++ b/model-00025-of-00162.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:f1f5e3b8831d43170f0f5d1fbf988dda8efad77bd6509af7cf9499a38c006b0b
+size 1879114208
diff --git a/model-00027-of-00162.safetensors b/model-00027-of-00162.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..1658035b43895dbaee207103adb3f83281a412a8
--- /dev/null
+++ b/model-00027-of-00162.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:4db45a3256c99cdcfe006001eacafd0e0c1a6ba9bbc48ea975fb9fd7a7471cbf
+size 1879114208
diff --git a/model-00028-of-00162.safetensors b/model-00028-of-00162.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..1105ca99febf5427ab86595a8b5533423ecbb102
--- /dev/null
+++ b/model-00028-of-00162.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:096dd9d39a8f56f3545399d58394b0079c938930409135e85a71c16c429c1e8a
+size 1543504488
diff --git a/model-00030-of-00162.safetensors b/model-00030-of-00162.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..43d26e39024505c19c5c65dffec19ccfa3fc0d78
--- /dev/null
+++ b/model-00030-of-00162.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:90f99159cdf690aa8e8a388536a583c439afea525aea1ef73b7317fae0f1af53
+size 1543504488
diff --git a/model-00035-of-00162.safetensors b/model-00035-of-00162.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..b0d404b4bb5f1dea64801e035a2b8e8b1f528812
--- /dev/null
+++ b/model-00035-of-00162.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:a781cc7ef0ab3d5fcb1380a948f0570ba0fe9984ed36cbae15f67141f76868f6
+size 1879114208
diff --git a/model-00036-of-00162.safetensors b/model-00036-of-00162.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..e86f8821ba37fe3c24cc072b6b7470315340e175
--- /dev/null
+++ b/model-00036-of-00162.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:39b5356f7477e8ee76b4c1431d0f9f431e6e3c54f66dc0ba7ee7d30a8feea116
+size 1543504488
diff --git a/model-00039-of-00162.safetensors b/model-00039-of-00162.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..9d2a63dc7316572cb0e2d0887ce59ca4c39d8365
--- /dev/null
+++ b/model-00039-of-00162.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:ea6002e4d8a68211434d1a72376a4ef9597b6cdf03ed792d5a505d7ad63d4e63
+size 1879114208
diff --git a/model-00041-of-00162.safetensors b/model-00041-of-00162.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..14e8555f83717b8318614e1f70f2d88bf9c9fbcc
--- /dev/null
+++ b/model-00041-of-00162.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:1e159bfcd8cfd7abf33fe312781dc1047b4b99158eea31eab5a1adb4c1662450
+size 1879114208
diff --git a/model-00043-of-00162.safetensors b/model-00043-of-00162.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..47ce72403cdff33e69f9cdb1e0809bb0118ee8cf
--- /dev/null
+++ b/model-00043-of-00162.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:0d5bef48b1b28ccdca2d740d2b7563455d0c591592e0e5f6c5716c139ed69790
+size 1879114208
diff --git a/model-00045-of-00162.safetensors b/model-00045-of-00162.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..b5bec37259f3c3324693243e9176cc3e8eb90db1
--- /dev/null
+++ b/model-00045-of-00162.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:0351bd91c979c5a979d1878e40e11639fd3819d7fe808e8a18cebf5b671375dc
+size 1879114208
diff --git a/model-00046-of-00162.safetensors b/model-00046-of-00162.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..f4441b69705620252a315d93f3eaa6d997723361
--- /dev/null
+++ b/model-00046-of-00162.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:c7ae48d761ca02225dd062c21d1664e7fa73cd6f17bddd017f434e017d06b067
+size 1543504488
diff --git a/model-00048-of-00162.safetensors b/model-00048-of-00162.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..a3ae705159f00c919eee8cce0ed17e892ebeafab
--- /dev/null
+++ b/model-00048-of-00162.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:69e4369cfda195e65c5cf773f5778eab028ff1e1384ae1a4d6f308245d7a4654
+size 1543504488
diff --git a/model-00051-of-00162.safetensors b/model-00051-of-00162.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..6e4039d7298e7031e03ce4149fc01d2975730179
--- /dev/null
+++ b/model-00051-of-00162.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:ca53d034340ebeaf12d67017efcf5b6c71f41c291a946833bc8838a333ecd5a2
+size 1879114208
diff --git a/model-00052-of-00162.safetensors b/model-00052-of-00162.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..0e3aad61517e59f55b2742227ebfa81957bc854e
--- /dev/null
+++ b/model-00052-of-00162.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:0c46ce6b01fe188f858c90ecd4d19b3caa131ae7cd10e20f386242f4f53ee4fd
+size 1543504488
diff --git a/model-00053-of-00162.safetensors b/model-00053-of-00162.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..097b5b4aec1b282f155ce38a24a30b133e46d242
--- /dev/null
+++ b/model-00053-of-00162.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:369048bf8b0ef6e1596e4e11f01835095b993c811a53ba18f36387980535eac2
+size 1879114208
diff --git a/model-00054-of-00162.safetensors b/model-00054-of-00162.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..772c268e1f5113c6cc2322cbada2c810ed2cb601
--- /dev/null
+++ b/model-00054-of-00162.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:32b829e2fa3da2b407a0fc99f74be93bf46a170743eb12a8120a9bc53729ca81
+size 1543504488
diff --git a/model-00056-of-00162.safetensors b/model-00056-of-00162.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..e182bc598004b4dee303255583e0137f97800409
--- /dev/null
+++ b/model-00056-of-00162.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:ecf07611c113bbf979960f2cb688a961b4dbe5dc86c57d2359650d83067ef288
+size 1543504488
diff --git a/model-00057-of-00162.safetensors b/model-00057-of-00162.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..6712a5f63970cc064aa3510f8480d12573e3d031
--- /dev/null
+++ b/model-00057-of-00162.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:c8698532687a0d5919681dcd59dc2cb8846804e5fc0ee711befa5f159cb23f01
+size 1879114208
diff --git a/model-00058-of-00162.safetensors b/model-00058-of-00162.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..b48a1c58eb2fe5ec18158fe3332cadd14551ebea
--- /dev/null
+++ b/model-00058-of-00162.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:b2d5fc525dde11e44a8ebc80f1a006be61536eff3b56fdcf8c90c6b0d9f25b58
+size 1543504488
diff --git a/model-00059-of-00162.safetensors b/model-00059-of-00162.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..9832c1446e2ea4bb4d1c410c527eab58128e416d
--- /dev/null
+++ b/model-00059-of-00162.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:d58355fc0ff53771bd8be9626590ef008a894165436b8af27863910922265de1
+size 1879114208
diff --git a/model-00061-of-00162.safetensors b/model-00061-of-00162.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..ae87281120b1b4a6b20fd0fd4a4a2e85eab160ac
--- /dev/null
+++ b/model-00061-of-00162.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:8958a93cf16ad284f559004b1709e1da0fefefabdf974b50f6695ed8fedf0ccc
+size 1879114208
diff --git a/model-00062-of-00162.safetensors b/model-00062-of-00162.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..7581fe904b8fe9a106a6043c83e26490456e540b
--- /dev/null
+++ b/model-00062-of-00162.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:14969a1180727583b7eb1013e55563b7ad7065a0c69df186c4958144583c2d98
+size 1543504488
diff --git a/model-00064-of-00162.safetensors b/model-00064-of-00162.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..a2db5365d6421125d557e47c38fdb852748cc0ec
--- /dev/null
+++ b/model-00064-of-00162.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:311db4dfde5098de92e11dee91075807a03c02843e95a651e52813f69e622129
+size 1543504488
diff --git a/model-00065-of-00162.safetensors b/model-00065-of-00162.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..c10c73675dd9b85eaf2e08ab594036f224fbed0b
--- /dev/null
+++ b/model-00065-of-00162.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:ab9f67c753c40223c6cc32aa006a016a0107cb03d5a11ea25b26154e7855d446
+size 1879114208
diff --git a/model-00066-of-00162.safetensors b/model-00066-of-00162.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..cb4a799e2287957dbd3cd93ffab4318a66bf0096
--- /dev/null
+++ b/model-00066-of-00162.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:db7b7be268652ef1ed50c8f842d27f4c797ef59ad40461272486026a5b89a681
+size 1543504488
diff --git a/model-00068-of-00162.safetensors b/model-00068-of-00162.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..d37ceacd8475578dd2aa542acc2b7db725710607
--- /dev/null
+++ b/model-00068-of-00162.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:130260d35ca06d58a6b69cdd01ed3ee78a337af11885ae8bcdb4a1cf6052273e
+size 1543504488
diff --git a/model-00070-of-00162.safetensors b/model-00070-of-00162.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..79ab1c2cfd785d98a407358a97828a0c3fc7f17b
--- /dev/null
+++ b/model-00070-of-00162.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:a649f30fd533fc9b9a432f017973682740f0bd25702764f359b7881f24becab8
+size 1543504488
diff --git a/model-00072-of-00162.safetensors b/model-00072-of-00162.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..ca49c8b602cc4b03df39c14e8b545858c5553e28
--- /dev/null
+++ b/model-00072-of-00162.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:84ff60e17e935532a826bf89bd976d0595545ff330ab9341e0d3f81e1618c541
+size 1543504488
diff --git a/model-00074-of-00162.safetensors b/model-00074-of-00162.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..e3e9f6caccc50e51562cb6b8f9c3692e5067e376
--- /dev/null
+++ b/model-00074-of-00162.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:be53131fe155f5b6adf1d28b4773baf02ffb884c5ac2ab55058bcc94d9006668
+size 1543504488
diff --git a/model-00076-of-00162.safetensors b/model-00076-of-00162.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..a224075e86e067a0d2ef06c8490e41a0c9e06f92
--- /dev/null
+++ b/model-00076-of-00162.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:8e52f072cc1038f7643de98a59360afefb1d5c1f13dcbd65bab179bb43bbaaa4
+size 1543504488
diff --git a/model-00077-of-00162.safetensors b/model-00077-of-00162.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..f4b85d6c6812d151af79ac3f902c5e61076e8aad
--- /dev/null
+++ b/model-00077-of-00162.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:a69161acc7964049bf5e91df1d734aef2380a812f8911f3cf6d54bc9f5c04f59
+size 1879114208
diff --git a/model-00080-of-00162.safetensors b/model-00080-of-00162.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..6c171607b967cf7dc6c1b026dd54282d58298f00
--- /dev/null
+++ b/model-00080-of-00162.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:f9c14f3e927112dff49ec821a912a30fbcdda5c8f59a02445b15f1a9a948049f
+size 1543504488
diff --git a/model-00082-of-00162.safetensors b/model-00082-of-00162.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..a88d7178db6221703b2467369524f1101850243e
--- /dev/null
+++ b/model-00082-of-00162.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:a0488e881985b942705561b4a3ccc43be61e8c4b7e1496a11c955a2de886f699
+size 1543504488
diff --git a/model-00083-of-00162.safetensors b/model-00083-of-00162.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..9ac423c8cf19c2e2eda84a038ff38b178a75646a
--- /dev/null
+++ b/model-00083-of-00162.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:bee4788577e142a785537ac156fa55df576e3b185b0d3209a16ff780b5018b40
+size 1879114208
diff --git a/model-00084-of-00162.safetensors b/model-00084-of-00162.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..94443b04933087ecab964f12d6d3c55c78548ae3
--- /dev/null
+++ b/model-00084-of-00162.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:006b23db27c1d51edb867f237b179c124fc2730d20e8016ef8e8507a8f70909b
+size 1543504488
diff --git a/model-00085-of-00162.safetensors b/model-00085-of-00162.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..cf99c219db9208925198f223e67c6cdaa7280761
--- /dev/null
+++ b/model-00085-of-00162.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:c2021ec33c615a610a5269d1b58170ae2d0b92a71c8fc2e3c88ab0ffd69b6501
+size 1879114208
diff --git a/model-00087-of-00162.safetensors b/model-00087-of-00162.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..168d3e43cd42e5a13126e9842cbf76628d52f61f
--- /dev/null
+++ b/model-00087-of-00162.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:4cdf8a79f610d63a56de2f6d28e6d99b2a41ec4e0c35b88b84e52a23db13b86e
+size 1879114208
diff --git a/model-00089-of-00162.safetensors b/model-00089-of-00162.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..c43e688ef2cdac3ab2150cfc2fff407837cf0da3
--- /dev/null
+++ b/model-00089-of-00162.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:09d81765096aec67cf27082603d05e9825794621100a7d0add841f0fcd01252e
+size 1879114208
diff --git a/model-00090-of-00162.safetensors b/model-00090-of-00162.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..18f64f626281666f916ec8815051a348ce191c17
--- /dev/null
+++ b/model-00090-of-00162.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:a298fe6606dd80aa30b97374e2d9caaba4a5d969837e2d1c8a8d62b224625116
+size 1543504488
diff --git a/model-00091-of-00162.safetensors b/model-00091-of-00162.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..74d1381ef2668a702c24737266f1b19ce21ec133
--- /dev/null
+++ b/model-00091-of-00162.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:4f62f72a35ddd85b496fecf750c76f6b8a3326bc8e5e1577b6525d01c8b4604c
+size 1879114208
diff --git a/model-00098-of-00162.safetensors b/model-00098-of-00162.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..8b6c29ef4bcb7cca1f7f679e0748b7242aadd797
--- /dev/null
+++ b/model-00098-of-00162.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:fc5063dfb57fc7260bbf4623e1cd920948495dc957a78af9eed086429355992b
+size 1543504488
diff --git a/model-00099-of-00162.safetensors b/model-00099-of-00162.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..16482dd4c2dbc4ba9704897aed47c426390b3ea7
--- /dev/null
+++ b/model-00099-of-00162.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:83f746227391e256e1116e897b988e018c117cbf8ac52c963793d7448fc94aed
+size 1879114208
diff --git a/model-00100-of-00162.safetensors b/model-00100-of-00162.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..2ba08339524c6f382708f1f91aa0751f3cc1fbdf
--- /dev/null
+++ b/model-00100-of-00162.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:e5812c1b77a537c3bf7c5d798fe4c0791438b8bb321767249793bca0956c34f1
+size 1543504488
diff --git a/model-00101-of-00162.safetensors b/model-00101-of-00162.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..bae0ce0136671fe882d61f3ff6b53caad8fbbd71
--- /dev/null
+++ b/model-00101-of-00162.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:d3d0c0578f59b2e1354bac1333d6c699df683dbede2cbb43e827e3de9fe2b4b5
+size 1879114208
diff --git a/model-00102-of-00162.safetensors b/model-00102-of-00162.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..5f375348f14b3a1bbde7ea0085ad9eab299574f6
--- /dev/null
+++ b/model-00102-of-00162.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:98f4d0a9aed00e82a29aee965c6a3a45b43869035cdbab1603a549c54e4ffc8f
+size 1543504488
diff --git a/model-00103-of-00162.safetensors b/model-00103-of-00162.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..e06374b98eb01e6ce74d406a9afb0424c56738ca
--- /dev/null
+++ b/model-00103-of-00162.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:1828269ab065765804bab5299a3b59a7d43eee93e750f81cec413d368d984267
+size 1879114208
diff --git a/model-00106-of-00162.safetensors b/model-00106-of-00162.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..17286409555f46ee24b85c3b5c8a43056a425f6c
--- /dev/null
+++ b/model-00106-of-00162.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:cb00ed30c608035320c30a8238b4e6943d8fc7bc2abe30d402b31cd540236d57
+size 1543504488
diff --git a/model-00109-of-00162.safetensors b/model-00109-of-00162.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..9e628c2637bdd80cb19264338d4e4145cc19e1bf
--- /dev/null
+++ b/model-00109-of-00162.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:facf62c5b5c380dc44d61f5b8aff9aa4e519b048efe42b3b947dbed7b319bc1a
+size 1879114208
diff --git a/model-00110-of-00162.safetensors b/model-00110-of-00162.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..ae517ff3aab3c9cd87b028accf08f6ae7fd5947c
--- /dev/null
+++ b/model-00110-of-00162.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:1431442e066fb232252555347c1c4cd14b109c4f2aef63a41ef8e2ba535d5977
+size 1543504488
diff --git a/model-00112-of-00162.safetensors b/model-00112-of-00162.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..842532b806793f25dc7770bc453cb006753b61b2
--- /dev/null
+++ b/model-00112-of-00162.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:3e3cc34ecb725d3e53aead557120d0ac88da94f79e96f12fde9dd5f27b34cc5a
+size 1543504488
diff --git a/model-00114-of-00162.safetensors b/model-00114-of-00162.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..52c78a21c46899f4c078dd6cb2b82b1fddd5e8c6
--- /dev/null
+++ b/model-00114-of-00162.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:c908fe1fbdaa14b0acf5a4c390079ac05d86ff2956f8b82dd4654aeda138f16c
+size 1543504488
diff --git a/model-00117-of-00162.safetensors b/model-00117-of-00162.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..ccb3a28d77f445c4384bcb5eb80fe64d3c832a45
--- /dev/null
+++ b/model-00117-of-00162.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:b06fbff515f9c6ff6a4a714a52cc6976a82b93a04a98f650a478df5504465f34
+size 1879114208
diff --git a/model-00118-of-00162.safetensors b/model-00118-of-00162.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..5a892eb74512b75539018e9fc2a0fedf1d3d5929
--- /dev/null
+++ b/model-00118-of-00162.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:1dd357f1a647ddf5b306d955dceec031da73787ba08014a7577d5c1dd86fad1c
+size 1543504488
diff --git a/model-00120-of-00162.safetensors b/model-00120-of-00162.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..9cca19a51ce9bd2727bea214b4b02fd33f87c71d
--- /dev/null
+++ b/model-00120-of-00162.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:1b501bd5f034c9af1cbff096e0036e246d66c42520a509aed6cf792339b2495c
+size 1543504488
diff --git a/model-00121-of-00162.safetensors b/model-00121-of-00162.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..0872e4de0f1260ff4f0d75c6fdd9feb4f5522a2a
--- /dev/null
+++ b/model-00121-of-00162.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:ecd0f3322aa147b7936e34ab2dde44698716b3186dd4818a7be0da80d18e6488
+size 1879114208
diff --git a/model-00122-of-00162.safetensors b/model-00122-of-00162.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..e3830f520e6bc3655aa70617d322111ba0a43199
--- /dev/null
+++ b/model-00122-of-00162.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:943e5693f2f476656ad70134df1173a2350633eb2a21a754d5cd905c5bf57e94
+size 1543504488
diff --git a/model-00124-of-00162.safetensors b/model-00124-of-00162.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..9d98716b19d76c0b948453e3b1d95ccf13f4cb60
--- /dev/null
+++ b/model-00124-of-00162.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:04604658ca7d8160a6ae846f147e314a9e29d49895e447d6987dc060e33df2a1
+size 1543504488
diff --git a/model-00127-of-00162.safetensors b/model-00127-of-00162.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..422949aa49fff92e6dba10e584286931e96f9f3d
--- /dev/null
+++ b/model-00127-of-00162.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:5656fe83a4813980005e996defc2498f2baeb4161299067cad0bf1b94b2eea58
+size 1879114208
diff --git a/model-00128-of-00162.safetensors b/model-00128-of-00162.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..305aa049520509a3ac2b45a0e394260ba4ac596a
--- /dev/null
+++ b/model-00128-of-00162.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:63fe5db5f2cd78f4811749785cd69ec0c936263598912a94a27c9787be39ca7d
+size 1543504488
diff --git a/model-00130-of-00162.safetensors b/model-00130-of-00162.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..38ba58b96a9f6dc7987c163243919c20cd944a2a
--- /dev/null
+++ b/model-00130-of-00162.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:d1bcea5450d23604e2413e0bc1293f24d3939fa0d03d13c3595542f62a55b5fa
+size 1543504488
diff --git a/model-00131-of-00162.safetensors b/model-00131-of-00162.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..08c3e20544f39c55bb13742fd0bfaddb39c52727
--- /dev/null
+++ b/model-00131-of-00162.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:aab17b0b79d17c4d80cd6f61648d6d802ba7627ea5e0d40ce35a83363dd7fc4b
+size 1879114208
diff --git a/model-00132-of-00162.safetensors b/model-00132-of-00162.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..a262617d6e4784f91851b151d84ba666accc1a8f
--- /dev/null
+++ b/model-00132-of-00162.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:c3d54f3f1c447518abb33d91fd351ec86a2b4f370a18a1c1f3cd94b90a3a4781
+size 1543504488
diff --git a/model-00133-of-00162.safetensors b/model-00133-of-00162.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..4d283996389f9d1f2a6bc3855c3910a610ce6aef
--- /dev/null
+++ b/model-00133-of-00162.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:2054e7e3428652f6cf0145b380d45d8df4b6bd1daa0082f93a428f14588e8864
+size 1879114208
diff --git a/model-00135-of-00162.safetensors b/model-00135-of-00162.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..12acd3df8807ad43df58402a1da4a144af92a9a8
--- /dev/null
+++ b/model-00135-of-00162.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:55d0abd7f13059f8af7584c634530ec567ee6a22de0c8704544c7896af8c32c0
+size 1879114208
diff --git a/model-00136-of-00162.safetensors b/model-00136-of-00162.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..7c5bcc800aa389bb8eed6cc0e4249a1a05a14036
--- /dev/null
+++ b/model-00136-of-00162.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:928fce84e7c5234eeaae5ee9b5a8bc8d32dfaed8ac3fbd2a9c279f1a76d76bae
+size 1543504488
diff --git a/model-00137-of-00162.safetensors b/model-00137-of-00162.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..475d45aad92f1226ea1bafae9b8f899455117fd7
--- /dev/null
+++ b/model-00137-of-00162.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:76fd468f3ed69de9639a38f8e6128119457209e5cf68101676872294e13425aa
+size 1879114208
diff --git a/model-00139-of-00162.safetensors b/model-00139-of-00162.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..757126555dac2ded589d48931a9fd9e20b82db58
--- /dev/null
+++ b/model-00139-of-00162.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:fd73326d76b3755b23ce3280a1498471c3c0feb59eb270e2b66413c6e10d1d74
+size 1879114208
diff --git a/model-00141-of-00162.safetensors b/model-00141-of-00162.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..9e5e3443621b93de0fafcd2060117bf878dc18ca
--- /dev/null
+++ b/model-00141-of-00162.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:209cfc9251cd001a62f1ae15f92f073e40c7651febddf2306ff2133003eecfd1
+size 1879114208
diff --git a/model-00142-of-00162.safetensors b/model-00142-of-00162.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..9032d9f3238f7120e752a569d8a7830cdd3190f5
--- /dev/null
+++ b/model-00142-of-00162.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:7671eae4c57760631be6e5a51971fe58a0aea036467b62d58505ec24b9278d0f
+size 1543504488
diff --git a/model-00143-of-00162.safetensors b/model-00143-of-00162.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..287d617c6ede82abc75fe037f6f444cb7bc56f79
--- /dev/null
+++ b/model-00143-of-00162.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:29ab2e273f72400c8bbddc593a18b855325d8c207ba14ea0c49e0b89257dc9c1
+size 1879114208
diff --git a/model-00144-of-00162.safetensors b/model-00144-of-00162.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..9169ced8a4cc64d1bb3bfa9ed846090fe863b9e4
--- /dev/null
+++ b/model-00144-of-00162.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:ce8117b5c680ab1223d450aaab61abe1c3cfa39744f6776a9f7ce91dc885628c
+size 1543504488
diff --git a/model-00146-of-00162.safetensors b/model-00146-of-00162.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..6cd6dd0a73e7963fda50fd8b7c5a11873f7a6b1c
--- /dev/null
+++ b/model-00146-of-00162.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:673e4451c8d7b66cbb65a88b7367df182886e3e42070963b7812d30023a1c767
+size 1543504488
diff --git a/model-00147-of-00162.safetensors b/model-00147-of-00162.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..1b4e2e7016034f9ad67c48cc90c3c8c1afba2f51
--- /dev/null
+++ b/model-00147-of-00162.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:9d7914a5b30d5459f2c349680dea4dc2652985b958852cb422256d6dc48f4d13
+size 1879114208
diff --git a/model-00148-of-00162.safetensors b/model-00148-of-00162.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..6b4f0f5652d0c91d33c596ea3f3bcb84aedee846
--- /dev/null
+++ b/model-00148-of-00162.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:95939bb6c264344cfb540c7f78829a7f6d0635804c21ea33e98d554d99802ab1
+size 1543504488
diff --git a/model-00149-of-00162.safetensors b/model-00149-of-00162.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..350707ac333d5f1a491a12d51ff9a36df6c2cf43
--- /dev/null
+++ b/model-00149-of-00162.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:cde9742fa621eb926907b1910f8f32d2bb0dce4c3f2f4d60e794969795de2297
+size 1879114208
diff --git a/model-00150-of-00162.safetensors b/model-00150-of-00162.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..2d7a8a658e9a179be7aef4e23cf220404b78469d
--- /dev/null
+++ b/model-00150-of-00162.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:d5a5577f7c8c009b9564f29d6894104b2e5f6e5b0af74b05b4679ffc8850540c
+size 1543504488
diff --git a/model-00151-of-00162.safetensors b/model-00151-of-00162.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..c657319fc3be31291fcfb832ba427c8099ad17d5
--- /dev/null
+++ b/model-00151-of-00162.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:91cb7249bac44c8e8f5b39fc57a739dd3ec2d98dd925d931f769a213d7644af8
+size 1879114208
diff --git a/model-00152-of-00162.safetensors b/model-00152-of-00162.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..2fb59156b52b92156f20d0c297e66bbf47a8aa26
--- /dev/null
+++ b/model-00152-of-00162.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:a3640d66b5e2a06ae2b5b248fef79a95019a7f7c32bb18c144da040caf23d435
+size 1543504488
diff --git a/model-00154-of-00162.safetensors b/model-00154-of-00162.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..18a459d62d21d59164ddc9f7cce96f0489c7d454
--- /dev/null
+++ b/model-00154-of-00162.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:f546cebf96f091599dd8bcefe842709ce845e7f597bafb3d11b492b5d3fea3cc
+size 1543504488
diff --git a/model-00155-of-00162.safetensors b/model-00155-of-00162.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..2b9f9e25cba861dc4f8bdeb29852e9498b5a515e
--- /dev/null
+++ b/model-00155-of-00162.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:699ca083bf794f871271ccadd31e3b0ac4428f86822092ad6fa8ff789256eb3c
+size 1879114208
diff --git a/model-00157-of-00162.safetensors b/model-00157-of-00162.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..6902006bd03ca9ede888dd773bbca28810f80526
--- /dev/null
+++ b/model-00157-of-00162.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:46b87e3bfdc1ccdb570f5b0379f133e4f01d2315760143a6f4d426bce48de975
+size 1879114208
diff --git a/model-00159-of-00162.safetensors b/model-00159-of-00162.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..e24ff33d529ead0a4be0471107901110c6fb5d91
--- /dev/null
+++ b/model-00159-of-00162.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:4c27fe83e62e9cf179081ff589877d229a1c5916a7817d266b80be1388fdf875
+size 1879114208
diff --git a/model-00161-of-00162.safetensors b/model-00161-of-00162.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..98923272aa894d24573f64a1111b4a8dde53f09c
--- /dev/null
+++ b/model-00161-of-00162.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:fcc593f9740dcf8c1198e73b92dd1fbaf97924a5e5eec6ff12dbe7f6eb7cf790
+size 4202692736
diff --git a/model.safetensors.index.json b/model.safetensors.index.json
new file mode 100644
index 0000000000000000000000000000000000000000..95550db6f5d38d712ea315d84a3be4967c6150ff
--- /dev/null
+++ b/model.safetensors.index.json
@@ -0,0 +1,730 @@
+{
+ "metadata": {
+ "total_size": 282214825984
+ },
+ "weight_map": {
+ "lm_head.weight": "model-00161-of-00162.safetensors",
+ "model.embed_tokens.weight": "model-00001-of-00162.safetensors",
+ "model.layers.0.input_layernorm.weight": "model-00003-of-00162.safetensors",
+ "model.layers.0.mlp.down_proj.weight": "model-00003-of-00162.safetensors",
+ "model.layers.0.mlp.gate_proj.weight": "model-00002-of-00162.safetensors",
+ "model.layers.0.mlp.up_proj.weight": "model-00003-of-00162.safetensors",
+ "model.layers.0.post_attention_layernorm.weight": "model-00003-of-00162.safetensors",
+ "model.layers.0.self_attn.k_proj.weight": "model-00002-of-00162.safetensors",
+ "model.layers.0.self_attn.o_proj.weight": "model-00002-of-00162.safetensors",
+ "model.layers.0.self_attn.q_proj.weight": "model-00002-of-00162.safetensors",
+ "model.layers.0.self_attn.v_proj.weight": "model-00002-of-00162.safetensors",
+ "model.layers.1.input_layernorm.weight": "model-00005-of-00162.safetensors",
+ "model.layers.1.mlp.down_proj.weight": "model-00005-of-00162.safetensors",
+ "model.layers.1.mlp.gate_proj.weight": "model-00004-of-00162.safetensors",
+ "model.layers.1.mlp.up_proj.weight": "model-00005-of-00162.safetensors",
+ "model.layers.1.post_attention_layernorm.weight": "model-00005-of-00162.safetensors",
+ "model.layers.1.self_attn.k_proj.weight": "model-00004-of-00162.safetensors",
+ "model.layers.1.self_attn.o_proj.weight": "model-00004-of-00162.safetensors",
+ "model.layers.1.self_attn.q_proj.weight": "model-00004-of-00162.safetensors",
+ "model.layers.1.self_attn.v_proj.weight": "model-00004-of-00162.safetensors",
+ "model.layers.10.input_layernorm.weight": "model-00023-of-00162.safetensors",
+ "model.layers.10.mlp.down_proj.weight": "model-00023-of-00162.safetensors",
+ "model.layers.10.mlp.gate_proj.weight": "model-00022-of-00162.safetensors",
+ "model.layers.10.mlp.up_proj.weight": "model-00023-of-00162.safetensors",
+ "model.layers.10.post_attention_layernorm.weight": "model-00023-of-00162.safetensors",
+ "model.layers.10.self_attn.k_proj.weight": "model-00022-of-00162.safetensors",
+ "model.layers.10.self_attn.o_proj.weight": "model-00022-of-00162.safetensors",
+ "model.layers.10.self_attn.q_proj.weight": "model-00022-of-00162.safetensors",
+ "model.layers.10.self_attn.v_proj.weight": "model-00022-of-00162.safetensors",
+ "model.layers.11.input_layernorm.weight": "model-00025-of-00162.safetensors",
+ "model.layers.11.mlp.down_proj.weight": "model-00025-of-00162.safetensors",
+ "model.layers.11.mlp.gate_proj.weight": "model-00024-of-00162.safetensors",
+ "model.layers.11.mlp.up_proj.weight": "model-00025-of-00162.safetensors",
+ "model.layers.11.post_attention_layernorm.weight": "model-00025-of-00162.safetensors",
+ "model.layers.11.self_attn.k_proj.weight": "model-00024-of-00162.safetensors",
+ "model.layers.11.self_attn.o_proj.weight": "model-00024-of-00162.safetensors",
+ "model.layers.11.self_attn.q_proj.weight": "model-00024-of-00162.safetensors",
+ "model.layers.11.self_attn.v_proj.weight": "model-00024-of-00162.safetensors",
+ "model.layers.12.input_layernorm.weight": "model-00027-of-00162.safetensors",
+ "model.layers.12.mlp.down_proj.weight": "model-00027-of-00162.safetensors",
+ "model.layers.12.mlp.gate_proj.weight": "model-00026-of-00162.safetensors",
+ "model.layers.12.mlp.up_proj.weight": "model-00027-of-00162.safetensors",
+ "model.layers.12.post_attention_layernorm.weight": "model-00027-of-00162.safetensors",
+ "model.layers.12.self_attn.k_proj.weight": "model-00026-of-00162.safetensors",
+ "model.layers.12.self_attn.o_proj.weight": "model-00026-of-00162.safetensors",
+ "model.layers.12.self_attn.q_proj.weight": "model-00026-of-00162.safetensors",
+ "model.layers.12.self_attn.v_proj.weight": "model-00026-of-00162.safetensors",
+ "model.layers.13.input_layernorm.weight": "model-00029-of-00162.safetensors",
+ "model.layers.13.mlp.down_proj.weight": "model-00029-of-00162.safetensors",
+ "model.layers.13.mlp.gate_proj.weight": "model-00028-of-00162.safetensors",
+ "model.layers.13.mlp.up_proj.weight": "model-00029-of-00162.safetensors",
+ "model.layers.13.post_attention_layernorm.weight": "model-00029-of-00162.safetensors",
+ "model.layers.13.self_attn.k_proj.weight": "model-00028-of-00162.safetensors",
+ "model.layers.13.self_attn.o_proj.weight": "model-00028-of-00162.safetensors",
+ "model.layers.13.self_attn.q_proj.weight": "model-00028-of-00162.safetensors",
+ "model.layers.13.self_attn.v_proj.weight": "model-00028-of-00162.safetensors",
+ "model.layers.14.input_layernorm.weight": "model-00031-of-00162.safetensors",
+ "model.layers.14.mlp.down_proj.weight": "model-00031-of-00162.safetensors",
+ "model.layers.14.mlp.gate_proj.weight": "model-00030-of-00162.safetensors",
+ "model.layers.14.mlp.up_proj.weight": "model-00031-of-00162.safetensors",
+ "model.layers.14.post_attention_layernorm.weight": "model-00031-of-00162.safetensors",
+ "model.layers.14.self_attn.k_proj.weight": "model-00030-of-00162.safetensors",
+ "model.layers.14.self_attn.o_proj.weight": "model-00030-of-00162.safetensors",
+ "model.layers.14.self_attn.q_proj.weight": "model-00030-of-00162.safetensors",
+ "model.layers.14.self_attn.v_proj.weight": "model-00030-of-00162.safetensors",
+ "model.layers.15.input_layernorm.weight": "model-00033-of-00162.safetensors",
+ "model.layers.15.mlp.down_proj.weight": "model-00033-of-00162.safetensors",
+ "model.layers.15.mlp.gate_proj.weight": "model-00032-of-00162.safetensors",
+ "model.layers.15.mlp.up_proj.weight": "model-00033-of-00162.safetensors",
+ "model.layers.15.post_attention_layernorm.weight": "model-00033-of-00162.safetensors",
+ "model.layers.15.self_attn.k_proj.weight": "model-00032-of-00162.safetensors",
+ "model.layers.15.self_attn.o_proj.weight": "model-00032-of-00162.safetensors",
+ "model.layers.15.self_attn.q_proj.weight": "model-00032-of-00162.safetensors",
+ "model.layers.15.self_attn.v_proj.weight": "model-00032-of-00162.safetensors",
+ "model.layers.16.input_layernorm.weight": "model-00035-of-00162.safetensors",
+ "model.layers.16.mlp.down_proj.weight": "model-00035-of-00162.safetensors",
+ "model.layers.16.mlp.gate_proj.weight": "model-00034-of-00162.safetensors",
+ "model.layers.16.mlp.up_proj.weight": "model-00035-of-00162.safetensors",
+ "model.layers.16.post_attention_layernorm.weight": "model-00035-of-00162.safetensors",
+ "model.layers.16.self_attn.k_proj.weight": "model-00034-of-00162.safetensors",
+ "model.layers.16.self_attn.o_proj.weight": "model-00034-of-00162.safetensors",
+ "model.layers.16.self_attn.q_proj.weight": "model-00034-of-00162.safetensors",
+ "model.layers.16.self_attn.v_proj.weight": "model-00034-of-00162.safetensors",
+ "model.layers.17.input_layernorm.weight": "model-00037-of-00162.safetensors",
+ "model.layers.17.mlp.down_proj.weight": "model-00037-of-00162.safetensors",
+ "model.layers.17.mlp.gate_proj.weight": "model-00036-of-00162.safetensors",
+ "model.layers.17.mlp.up_proj.weight": "model-00037-of-00162.safetensors",
+ "model.layers.17.post_attention_layernorm.weight": "model-00037-of-00162.safetensors",
+ "model.layers.17.self_attn.k_proj.weight": "model-00036-of-00162.safetensors",
+ "model.layers.17.self_attn.o_proj.weight": "model-00036-of-00162.safetensors",
+ "model.layers.17.self_attn.q_proj.weight": "model-00036-of-00162.safetensors",
+ "model.layers.17.self_attn.v_proj.weight": "model-00036-of-00162.safetensors",
+ "model.layers.18.input_layernorm.weight": "model-00039-of-00162.safetensors",
+ "model.layers.18.mlp.down_proj.weight": "model-00039-of-00162.safetensors",
+ "model.layers.18.mlp.gate_proj.weight": "model-00038-of-00162.safetensors",
+ "model.layers.18.mlp.up_proj.weight": "model-00039-of-00162.safetensors",
+ "model.layers.18.post_attention_layernorm.weight": "model-00039-of-00162.safetensors",
+ "model.layers.18.self_attn.k_proj.weight": "model-00038-of-00162.safetensors",
+ "model.layers.18.self_attn.o_proj.weight": "model-00038-of-00162.safetensors",
+ "model.layers.18.self_attn.q_proj.weight": "model-00038-of-00162.safetensors",
+ "model.layers.18.self_attn.v_proj.weight": "model-00038-of-00162.safetensors",
+ "model.layers.19.input_layernorm.weight": "model-00041-of-00162.safetensors",
+ "model.layers.19.mlp.down_proj.weight": "model-00041-of-00162.safetensors",
+ "model.layers.19.mlp.gate_proj.weight": "model-00040-of-00162.safetensors",
+ "model.layers.19.mlp.up_proj.weight": "model-00041-of-00162.safetensors",
+ "model.layers.19.post_attention_layernorm.weight": "model-00041-of-00162.safetensors",
+ "model.layers.19.self_attn.k_proj.weight": "model-00040-of-00162.safetensors",
+ "model.layers.19.self_attn.o_proj.weight": "model-00040-of-00162.safetensors",
+ "model.layers.19.self_attn.q_proj.weight": "model-00040-of-00162.safetensors",
+ "model.layers.19.self_attn.v_proj.weight": "model-00040-of-00162.safetensors",
+ "model.layers.2.input_layernorm.weight": "model-00007-of-00162.safetensors",
+ "model.layers.2.mlp.down_proj.weight": "model-00007-of-00162.safetensors",
+ "model.layers.2.mlp.gate_proj.weight": "model-00006-of-00162.safetensors",
+ "model.layers.2.mlp.up_proj.weight": "model-00007-of-00162.safetensors",
+ "model.layers.2.post_attention_layernorm.weight": "model-00007-of-00162.safetensors",
+ "model.layers.2.self_attn.k_proj.weight": "model-00006-of-00162.safetensors",
+ "model.layers.2.self_attn.o_proj.weight": "model-00006-of-00162.safetensors",
+ "model.layers.2.self_attn.q_proj.weight": "model-00006-of-00162.safetensors",
+ "model.layers.2.self_attn.v_proj.weight": "model-00006-of-00162.safetensors",
+ "model.layers.20.input_layernorm.weight": "model-00043-of-00162.safetensors",
+ "model.layers.20.mlp.down_proj.weight": "model-00043-of-00162.safetensors",
+ "model.layers.20.mlp.gate_proj.weight": "model-00042-of-00162.safetensors",
+ "model.layers.20.mlp.up_proj.weight": "model-00043-of-00162.safetensors",
+ "model.layers.20.post_attention_layernorm.weight": "model-00043-of-00162.safetensors",
+ "model.layers.20.self_attn.k_proj.weight": "model-00042-of-00162.safetensors",
+ "model.layers.20.self_attn.o_proj.weight": "model-00042-of-00162.safetensors",
+ "model.layers.20.self_attn.q_proj.weight": "model-00042-of-00162.safetensors",
+ "model.layers.20.self_attn.v_proj.weight": "model-00042-of-00162.safetensors",
+ "model.layers.21.input_layernorm.weight": "model-00045-of-00162.safetensors",
+ "model.layers.21.mlp.down_proj.weight": "model-00045-of-00162.safetensors",
+ "model.layers.21.mlp.gate_proj.weight": "model-00044-of-00162.safetensors",
+ "model.layers.21.mlp.up_proj.weight": "model-00045-of-00162.safetensors",
+ "model.layers.21.post_attention_layernorm.weight": "model-00045-of-00162.safetensors",
+ "model.layers.21.self_attn.k_proj.weight": "model-00044-of-00162.safetensors",
+ "model.layers.21.self_attn.o_proj.weight": "model-00044-of-00162.safetensors",
+ "model.layers.21.self_attn.q_proj.weight": "model-00044-of-00162.safetensors",
+ "model.layers.21.self_attn.v_proj.weight": "model-00044-of-00162.safetensors",
+ "model.layers.22.input_layernorm.weight": "model-00047-of-00162.safetensors",
+ "model.layers.22.mlp.down_proj.weight": "model-00047-of-00162.safetensors",
+ "model.layers.22.mlp.gate_proj.weight": "model-00046-of-00162.safetensors",
+ "model.layers.22.mlp.up_proj.weight": "model-00047-of-00162.safetensors",
+ "model.layers.22.post_attention_layernorm.weight": "model-00047-of-00162.safetensors",
+ "model.layers.22.self_attn.k_proj.weight": "model-00046-of-00162.safetensors",
+ "model.layers.22.self_attn.o_proj.weight": "model-00046-of-00162.safetensors",
+ "model.layers.22.self_attn.q_proj.weight": "model-00046-of-00162.safetensors",
+ "model.layers.22.self_attn.v_proj.weight": "model-00046-of-00162.safetensors",
+ "model.layers.23.input_layernorm.weight": "model-00049-of-00162.safetensors",
+ "model.layers.23.mlp.down_proj.weight": "model-00049-of-00162.safetensors",
+ "model.layers.23.mlp.gate_proj.weight": "model-00048-of-00162.safetensors",
+ "model.layers.23.mlp.up_proj.weight": "model-00049-of-00162.safetensors",
+ "model.layers.23.post_attention_layernorm.weight": "model-00049-of-00162.safetensors",
+ "model.layers.23.self_attn.k_proj.weight": "model-00048-of-00162.safetensors",
+ "model.layers.23.self_attn.o_proj.weight": "model-00048-of-00162.safetensors",
+ "model.layers.23.self_attn.q_proj.weight": "model-00048-of-00162.safetensors",
+ "model.layers.23.self_attn.v_proj.weight": "model-00048-of-00162.safetensors",
+ "model.layers.24.input_layernorm.weight": "model-00051-of-00162.safetensors",
+ "model.layers.24.mlp.down_proj.weight": "model-00051-of-00162.safetensors",
+ "model.layers.24.mlp.gate_proj.weight": "model-00050-of-00162.safetensors",
+ "model.layers.24.mlp.up_proj.weight": "model-00051-of-00162.safetensors",
+ "model.layers.24.post_attention_layernorm.weight": "model-00051-of-00162.safetensors",
+ "model.layers.24.self_attn.k_proj.weight": "model-00050-of-00162.safetensors",
+ "model.layers.24.self_attn.o_proj.weight": "model-00050-of-00162.safetensors",
+ "model.layers.24.self_attn.q_proj.weight": "model-00050-of-00162.safetensors",
+ "model.layers.24.self_attn.v_proj.weight": "model-00050-of-00162.safetensors",
+ "model.layers.25.input_layernorm.weight": "model-00053-of-00162.safetensors",
+ "model.layers.25.mlp.down_proj.weight": "model-00053-of-00162.safetensors",
+ "model.layers.25.mlp.gate_proj.weight": "model-00052-of-00162.safetensors",
+ "model.layers.25.mlp.up_proj.weight": "model-00053-of-00162.safetensors",
+ "model.layers.25.post_attention_layernorm.weight": "model-00053-of-00162.safetensors",
+ "model.layers.25.self_attn.k_proj.weight": "model-00052-of-00162.safetensors",
+ "model.layers.25.self_attn.o_proj.weight": "model-00052-of-00162.safetensors",
+ "model.layers.25.self_attn.q_proj.weight": "model-00052-of-00162.safetensors",
+ "model.layers.25.self_attn.v_proj.weight": "model-00052-of-00162.safetensors",
+ "model.layers.26.input_layernorm.weight": "model-00055-of-00162.safetensors",
+ "model.layers.26.mlp.down_proj.weight": "model-00055-of-00162.safetensors",
+ "model.layers.26.mlp.gate_proj.weight": "model-00054-of-00162.safetensors",
+ "model.layers.26.mlp.up_proj.weight": "model-00055-of-00162.safetensors",
+ "model.layers.26.post_attention_layernorm.weight": "model-00055-of-00162.safetensors",
+ "model.layers.26.self_attn.k_proj.weight": "model-00054-of-00162.safetensors",
+ "model.layers.26.self_attn.o_proj.weight": "model-00054-of-00162.safetensors",
+ "model.layers.26.self_attn.q_proj.weight": "model-00054-of-00162.safetensors",
+ "model.layers.26.self_attn.v_proj.weight": "model-00054-of-00162.safetensors",
+ "model.layers.27.input_layernorm.weight": "model-00057-of-00162.safetensors",
+ "model.layers.27.mlp.down_proj.weight": "model-00057-of-00162.safetensors",
+ "model.layers.27.mlp.gate_proj.weight": "model-00056-of-00162.safetensors",
+ "model.layers.27.mlp.up_proj.weight": "model-00057-of-00162.safetensors",
+ "model.layers.27.post_attention_layernorm.weight": "model-00057-of-00162.safetensors",
+ "model.layers.27.self_attn.k_proj.weight": "model-00056-of-00162.safetensors",
+ "model.layers.27.self_attn.o_proj.weight": "model-00056-of-00162.safetensors",
+ "model.layers.27.self_attn.q_proj.weight": "model-00056-of-00162.safetensors",
+ "model.layers.27.self_attn.v_proj.weight": "model-00056-of-00162.safetensors",
+ "model.layers.28.input_layernorm.weight": "model-00059-of-00162.safetensors",
+ "model.layers.28.mlp.down_proj.weight": "model-00059-of-00162.safetensors",
+ "model.layers.28.mlp.gate_proj.weight": "model-00058-of-00162.safetensors",
+ "model.layers.28.mlp.up_proj.weight": "model-00059-of-00162.safetensors",
+ "model.layers.28.post_attention_layernorm.weight": "model-00059-of-00162.safetensors",
+ "model.layers.28.self_attn.k_proj.weight": "model-00058-of-00162.safetensors",
+ "model.layers.28.self_attn.o_proj.weight": "model-00058-of-00162.safetensors",
+ "model.layers.28.self_attn.q_proj.weight": "model-00058-of-00162.safetensors",
+ "model.layers.28.self_attn.v_proj.weight": "model-00058-of-00162.safetensors",
+ "model.layers.29.input_layernorm.weight": "model-00061-of-00162.safetensors",
+ "model.layers.29.mlp.down_proj.weight": "model-00061-of-00162.safetensors",
+ "model.layers.29.mlp.gate_proj.weight": "model-00060-of-00162.safetensors",
+ "model.layers.29.mlp.up_proj.weight": "model-00061-of-00162.safetensors",
+ "model.layers.29.post_attention_layernorm.weight": "model-00061-of-00162.safetensors",
+ "model.layers.29.self_attn.k_proj.weight": "model-00060-of-00162.safetensors",
+ "model.layers.29.self_attn.o_proj.weight": "model-00060-of-00162.safetensors",
+ "model.layers.29.self_attn.q_proj.weight": "model-00060-of-00162.safetensors",
+ "model.layers.29.self_attn.v_proj.weight": "model-00060-of-00162.safetensors",
+ "model.layers.3.input_layernorm.weight": "model-00009-of-00162.safetensors",
+ "model.layers.3.mlp.down_proj.weight": "model-00009-of-00162.safetensors",
+ "model.layers.3.mlp.gate_proj.weight": "model-00008-of-00162.safetensors",
+ "model.layers.3.mlp.up_proj.weight": "model-00009-of-00162.safetensors",
+ "model.layers.3.post_attention_layernorm.weight": "model-00009-of-00162.safetensors",
+ "model.layers.3.self_attn.k_proj.weight": "model-00008-of-00162.safetensors",
+ "model.layers.3.self_attn.o_proj.weight": "model-00008-of-00162.safetensors",
+ "model.layers.3.self_attn.q_proj.weight": "model-00008-of-00162.safetensors",
+ "model.layers.3.self_attn.v_proj.weight": "model-00008-of-00162.safetensors",
+ "model.layers.30.input_layernorm.weight": "model-00063-of-00162.safetensors",
+ "model.layers.30.mlp.down_proj.weight": "model-00063-of-00162.safetensors",
+ "model.layers.30.mlp.gate_proj.weight": "model-00062-of-00162.safetensors",
+ "model.layers.30.mlp.up_proj.weight": "model-00063-of-00162.safetensors",
+ "model.layers.30.post_attention_layernorm.weight": "model-00063-of-00162.safetensors",
+ "model.layers.30.self_attn.k_proj.weight": "model-00062-of-00162.safetensors",
+ "model.layers.30.self_attn.o_proj.weight": "model-00062-of-00162.safetensors",
+ "model.layers.30.self_attn.q_proj.weight": "model-00062-of-00162.safetensors",
+ "model.layers.30.self_attn.v_proj.weight": "model-00062-of-00162.safetensors",
+ "model.layers.31.input_layernorm.weight": "model-00065-of-00162.safetensors",
+ "model.layers.31.mlp.down_proj.weight": "model-00065-of-00162.safetensors",
+ "model.layers.31.mlp.gate_proj.weight": "model-00064-of-00162.safetensors",
+ "model.layers.31.mlp.up_proj.weight": "model-00065-of-00162.safetensors",
+ "model.layers.31.post_attention_layernorm.weight": "model-00065-of-00162.safetensors",
+ "model.layers.31.self_attn.k_proj.weight": "model-00064-of-00162.safetensors",
+ "model.layers.31.self_attn.o_proj.weight": "model-00064-of-00162.safetensors",
+ "model.layers.31.self_attn.q_proj.weight": "model-00064-of-00162.safetensors",
+ "model.layers.31.self_attn.v_proj.weight": "model-00064-of-00162.safetensors",
+ "model.layers.32.input_layernorm.weight": "model-00067-of-00162.safetensors",
+ "model.layers.32.mlp.down_proj.weight": "model-00067-of-00162.safetensors",
+ "model.layers.32.mlp.gate_proj.weight": "model-00066-of-00162.safetensors",
+ "model.layers.32.mlp.up_proj.weight": "model-00067-of-00162.safetensors",
+ "model.layers.32.post_attention_layernorm.weight": "model-00067-of-00162.safetensors",
+ "model.layers.32.self_attn.k_proj.weight": "model-00066-of-00162.safetensors",
+ "model.layers.32.self_attn.o_proj.weight": "model-00066-of-00162.safetensors",
+ "model.layers.32.self_attn.q_proj.weight": "model-00066-of-00162.safetensors",
+ "model.layers.32.self_attn.v_proj.weight": "model-00066-of-00162.safetensors",
+ "model.layers.33.input_layernorm.weight": "model-00069-of-00162.safetensors",
+ "model.layers.33.mlp.down_proj.weight": "model-00069-of-00162.safetensors",
+ "model.layers.33.mlp.gate_proj.weight": "model-00068-of-00162.safetensors",
+ "model.layers.33.mlp.up_proj.weight": "model-00069-of-00162.safetensors",
+ "model.layers.33.post_attention_layernorm.weight": "model-00069-of-00162.safetensors",
+ "model.layers.33.self_attn.k_proj.weight": "model-00068-of-00162.safetensors",
+ "model.layers.33.self_attn.o_proj.weight": "model-00068-of-00162.safetensors",
+ "model.layers.33.self_attn.q_proj.weight": "model-00068-of-00162.safetensors",
+ "model.layers.33.self_attn.v_proj.weight": "model-00068-of-00162.safetensors",
+ "model.layers.34.input_layernorm.weight": "model-00071-of-00162.safetensors",
+ "model.layers.34.mlp.down_proj.weight": "model-00071-of-00162.safetensors",
+ "model.layers.34.mlp.gate_proj.weight": "model-00070-of-00162.safetensors",
+ "model.layers.34.mlp.up_proj.weight": "model-00071-of-00162.safetensors",
+ "model.layers.34.post_attention_layernorm.weight": "model-00071-of-00162.safetensors",
+ "model.layers.34.self_attn.k_proj.weight": "model-00070-of-00162.safetensors",
+ "model.layers.34.self_attn.o_proj.weight": "model-00070-of-00162.safetensors",
+ "model.layers.34.self_attn.q_proj.weight": "model-00070-of-00162.safetensors",
+ "model.layers.34.self_attn.v_proj.weight": "model-00070-of-00162.safetensors",
+ "model.layers.35.input_layernorm.weight": "model-00073-of-00162.safetensors",
+ "model.layers.35.mlp.down_proj.weight": "model-00073-of-00162.safetensors",
+ "model.layers.35.mlp.gate_proj.weight": "model-00072-of-00162.safetensors",
+ "model.layers.35.mlp.up_proj.weight": "model-00073-of-00162.safetensors",
+ "model.layers.35.post_attention_layernorm.weight": "model-00073-of-00162.safetensors",
+ "model.layers.35.self_attn.k_proj.weight": "model-00072-of-00162.safetensors",
+ "model.layers.35.self_attn.o_proj.weight": "model-00072-of-00162.safetensors",
+ "model.layers.35.self_attn.q_proj.weight": "model-00072-of-00162.safetensors",
+ "model.layers.35.self_attn.v_proj.weight": "model-00072-of-00162.safetensors",
+ "model.layers.36.input_layernorm.weight": "model-00075-of-00162.safetensors",
+ "model.layers.36.mlp.down_proj.weight": "model-00075-of-00162.safetensors",
+ "model.layers.36.mlp.gate_proj.weight": "model-00074-of-00162.safetensors",
+ "model.layers.36.mlp.up_proj.weight": "model-00075-of-00162.safetensors",
+ "model.layers.36.post_attention_layernorm.weight": "model-00075-of-00162.safetensors",
+ "model.layers.36.self_attn.k_proj.weight": "model-00074-of-00162.safetensors",
+ "model.layers.36.self_attn.o_proj.weight": "model-00074-of-00162.safetensors",
+ "model.layers.36.self_attn.q_proj.weight": "model-00074-of-00162.safetensors",
+ "model.layers.36.self_attn.v_proj.weight": "model-00074-of-00162.safetensors",
+ "model.layers.37.input_layernorm.weight": "model-00077-of-00162.safetensors",
+ "model.layers.37.mlp.down_proj.weight": "model-00077-of-00162.safetensors",
+ "model.layers.37.mlp.gate_proj.weight": "model-00076-of-00162.safetensors",
+ "model.layers.37.mlp.up_proj.weight": "model-00077-of-00162.safetensors",
+ "model.layers.37.post_attention_layernorm.weight": "model-00077-of-00162.safetensors",
+ "model.layers.37.self_attn.k_proj.weight": "model-00076-of-00162.safetensors",
+ "model.layers.37.self_attn.o_proj.weight": "model-00076-of-00162.safetensors",
+ "model.layers.37.self_attn.q_proj.weight": "model-00076-of-00162.safetensors",
+ "model.layers.37.self_attn.v_proj.weight": "model-00076-of-00162.safetensors",
+ "model.layers.38.input_layernorm.weight": "model-00079-of-00162.safetensors",
+ "model.layers.38.mlp.down_proj.weight": "model-00079-of-00162.safetensors",
+ "model.layers.38.mlp.gate_proj.weight": "model-00078-of-00162.safetensors",
+ "model.layers.38.mlp.up_proj.weight": "model-00079-of-00162.safetensors",
+ "model.layers.38.post_attention_layernorm.weight": "model-00079-of-00162.safetensors",
+ "model.layers.38.self_attn.k_proj.weight": "model-00078-of-00162.safetensors",
+ "model.layers.38.self_attn.o_proj.weight": "model-00078-of-00162.safetensors",
+ "model.layers.38.self_attn.q_proj.weight": "model-00078-of-00162.safetensors",
+ "model.layers.38.self_attn.v_proj.weight": "model-00078-of-00162.safetensors",
+ "model.layers.39.input_layernorm.weight": "model-00081-of-00162.safetensors",
+ "model.layers.39.mlp.down_proj.weight": "model-00081-of-00162.safetensors",
+ "model.layers.39.mlp.gate_proj.weight": "model-00080-of-00162.safetensors",
+ "model.layers.39.mlp.up_proj.weight": "model-00081-of-00162.safetensors",
+ "model.layers.39.post_attention_layernorm.weight": "model-00081-of-00162.safetensors",
+ "model.layers.39.self_attn.k_proj.weight": "model-00080-of-00162.safetensors",
+ "model.layers.39.self_attn.o_proj.weight": "model-00080-of-00162.safetensors",
+ "model.layers.39.self_attn.q_proj.weight": "model-00080-of-00162.safetensors",
+ "model.layers.39.self_attn.v_proj.weight": "model-00080-of-00162.safetensors",
+ "model.layers.4.input_layernorm.weight": "model-00011-of-00162.safetensors",
+ "model.layers.4.mlp.down_proj.weight": "model-00011-of-00162.safetensors",
+ "model.layers.4.mlp.gate_proj.weight": "model-00010-of-00162.safetensors",
+ "model.layers.4.mlp.up_proj.weight": "model-00011-of-00162.safetensors",
+ "model.layers.4.post_attention_layernorm.weight": "model-00011-of-00162.safetensors",
+ "model.layers.4.self_attn.k_proj.weight": "model-00010-of-00162.safetensors",
+ "model.layers.4.self_attn.o_proj.weight": "model-00010-of-00162.safetensors",
+ "model.layers.4.self_attn.q_proj.weight": "model-00010-of-00162.safetensors",
+ "model.layers.4.self_attn.v_proj.weight": "model-00010-of-00162.safetensors",
+ "model.layers.40.input_layernorm.weight": "model-00083-of-00162.safetensors",
+ "model.layers.40.mlp.down_proj.weight": "model-00083-of-00162.safetensors",
+ "model.layers.40.mlp.gate_proj.weight": "model-00082-of-00162.safetensors",
+ "model.layers.40.mlp.up_proj.weight": "model-00083-of-00162.safetensors",
+ "model.layers.40.post_attention_layernorm.weight": "model-00083-of-00162.safetensors",
+ "model.layers.40.self_attn.k_proj.weight": "model-00082-of-00162.safetensors",
+ "model.layers.40.self_attn.o_proj.weight": "model-00082-of-00162.safetensors",
+ "model.layers.40.self_attn.q_proj.weight": "model-00082-of-00162.safetensors",
+ "model.layers.40.self_attn.v_proj.weight": "model-00082-of-00162.safetensors",
+ "model.layers.41.input_layernorm.weight": "model-00085-of-00162.safetensors",
+ "model.layers.41.mlp.down_proj.weight": "model-00085-of-00162.safetensors",
+ "model.layers.41.mlp.gate_proj.weight": "model-00084-of-00162.safetensors",
+ "model.layers.41.mlp.up_proj.weight": "model-00085-of-00162.safetensors",
+ "model.layers.41.post_attention_layernorm.weight": "model-00085-of-00162.safetensors",
+ "model.layers.41.self_attn.k_proj.weight": "model-00084-of-00162.safetensors",
+ "model.layers.41.self_attn.o_proj.weight": "model-00084-of-00162.safetensors",
+ "model.layers.41.self_attn.q_proj.weight": "model-00084-of-00162.safetensors",
+ "model.layers.41.self_attn.v_proj.weight": "model-00084-of-00162.safetensors",
+ "model.layers.42.input_layernorm.weight": "model-00087-of-00162.safetensors",
+ "model.layers.42.mlp.down_proj.weight": "model-00087-of-00162.safetensors",
+ "model.layers.42.mlp.gate_proj.weight": "model-00086-of-00162.safetensors",
+ "model.layers.42.mlp.up_proj.weight": "model-00087-of-00162.safetensors",
+ "model.layers.42.post_attention_layernorm.weight": "model-00087-of-00162.safetensors",
+ "model.layers.42.self_attn.k_proj.weight": "model-00086-of-00162.safetensors",
+ "model.layers.42.self_attn.o_proj.weight": "model-00086-of-00162.safetensors",
+ "model.layers.42.self_attn.q_proj.weight": "model-00086-of-00162.safetensors",
+ "model.layers.42.self_attn.v_proj.weight": "model-00086-of-00162.safetensors",
+ "model.layers.43.input_layernorm.weight": "model-00089-of-00162.safetensors",
+ "model.layers.43.mlp.down_proj.weight": "model-00089-of-00162.safetensors",
+ "model.layers.43.mlp.gate_proj.weight": "model-00088-of-00162.safetensors",
+ "model.layers.43.mlp.up_proj.weight": "model-00089-of-00162.safetensors",
+ "model.layers.43.post_attention_layernorm.weight": "model-00089-of-00162.safetensors",
+ "model.layers.43.self_attn.k_proj.weight": "model-00088-of-00162.safetensors",
+ "model.layers.43.self_attn.o_proj.weight": "model-00088-of-00162.safetensors",
+ "model.layers.43.self_attn.q_proj.weight": "model-00088-of-00162.safetensors",
+ "model.layers.43.self_attn.v_proj.weight": "model-00088-of-00162.safetensors",
+ "model.layers.44.input_layernorm.weight": "model-00091-of-00162.safetensors",
+ "model.layers.44.mlp.down_proj.weight": "model-00091-of-00162.safetensors",
+ "model.layers.44.mlp.gate_proj.weight": "model-00090-of-00162.safetensors",
+ "model.layers.44.mlp.up_proj.weight": "model-00091-of-00162.safetensors",
+ "model.layers.44.post_attention_layernorm.weight": "model-00091-of-00162.safetensors",
+ "model.layers.44.self_attn.k_proj.weight": "model-00090-of-00162.safetensors",
+ "model.layers.44.self_attn.o_proj.weight": "model-00090-of-00162.safetensors",
+ "model.layers.44.self_attn.q_proj.weight": "model-00090-of-00162.safetensors",
+ "model.layers.44.self_attn.v_proj.weight": "model-00090-of-00162.safetensors",
+ "model.layers.45.input_layernorm.weight": "model-00093-of-00162.safetensors",
+ "model.layers.45.mlp.down_proj.weight": "model-00093-of-00162.safetensors",
+ "model.layers.45.mlp.gate_proj.weight": "model-00092-of-00162.safetensors",
+ "model.layers.45.mlp.up_proj.weight": "model-00093-of-00162.safetensors",
+ "model.layers.45.post_attention_layernorm.weight": "model-00093-of-00162.safetensors",
+ "model.layers.45.self_attn.k_proj.weight": "model-00092-of-00162.safetensors",
+ "model.layers.45.self_attn.o_proj.weight": "model-00092-of-00162.safetensors",
+ "model.layers.45.self_attn.q_proj.weight": "model-00092-of-00162.safetensors",
+ "model.layers.45.self_attn.v_proj.weight": "model-00092-of-00162.safetensors",
+ "model.layers.46.input_layernorm.weight": "model-00095-of-00162.safetensors",
+ "model.layers.46.mlp.down_proj.weight": "model-00095-of-00162.safetensors",
+ "model.layers.46.mlp.gate_proj.weight": "model-00094-of-00162.safetensors",
+ "model.layers.46.mlp.up_proj.weight": "model-00095-of-00162.safetensors",
+ "model.layers.46.post_attention_layernorm.weight": "model-00095-of-00162.safetensors",
+ "model.layers.46.self_attn.k_proj.weight": "model-00094-of-00162.safetensors",
+ "model.layers.46.self_attn.o_proj.weight": "model-00094-of-00162.safetensors",
+ "model.layers.46.self_attn.q_proj.weight": "model-00094-of-00162.safetensors",
+ "model.layers.46.self_attn.v_proj.weight": "model-00094-of-00162.safetensors",
+ "model.layers.47.input_layernorm.weight": "model-00097-of-00162.safetensors",
+ "model.layers.47.mlp.down_proj.weight": "model-00097-of-00162.safetensors",
+ "model.layers.47.mlp.gate_proj.weight": "model-00096-of-00162.safetensors",
+ "model.layers.47.mlp.up_proj.weight": "model-00097-of-00162.safetensors",
+ "model.layers.47.post_attention_layernorm.weight": "model-00097-of-00162.safetensors",
+ "model.layers.47.self_attn.k_proj.weight": "model-00096-of-00162.safetensors",
+ "model.layers.47.self_attn.o_proj.weight": "model-00096-of-00162.safetensors",
+ "model.layers.47.self_attn.q_proj.weight": "model-00096-of-00162.safetensors",
+ "model.layers.47.self_attn.v_proj.weight": "model-00096-of-00162.safetensors",
+ "model.layers.48.input_layernorm.weight": "model-00099-of-00162.safetensors",
+ "model.layers.48.mlp.down_proj.weight": "model-00099-of-00162.safetensors",
+ "model.layers.48.mlp.gate_proj.weight": "model-00098-of-00162.safetensors",
+ "model.layers.48.mlp.up_proj.weight": "model-00099-of-00162.safetensors",
+ "model.layers.48.post_attention_layernorm.weight": "model-00099-of-00162.safetensors",
+ "model.layers.48.self_attn.k_proj.weight": "model-00098-of-00162.safetensors",
+ "model.layers.48.self_attn.o_proj.weight": "model-00098-of-00162.safetensors",
+ "model.layers.48.self_attn.q_proj.weight": "model-00098-of-00162.safetensors",
+ "model.layers.48.self_attn.v_proj.weight": "model-00098-of-00162.safetensors",
+ "model.layers.49.input_layernorm.weight": "model-00101-of-00162.safetensors",
+ "model.layers.49.mlp.down_proj.weight": "model-00101-of-00162.safetensors",
+ "model.layers.49.mlp.gate_proj.weight": "model-00100-of-00162.safetensors",
+ "model.layers.49.mlp.up_proj.weight": "model-00101-of-00162.safetensors",
+ "model.layers.49.post_attention_layernorm.weight": "model-00101-of-00162.safetensors",
+ "model.layers.49.self_attn.k_proj.weight": "model-00100-of-00162.safetensors",
+ "model.layers.49.self_attn.o_proj.weight": "model-00100-of-00162.safetensors",
+ "model.layers.49.self_attn.q_proj.weight": "model-00100-of-00162.safetensors",
+ "model.layers.49.self_attn.v_proj.weight": "model-00100-of-00162.safetensors",
+ "model.layers.5.input_layernorm.weight": "model-00013-of-00162.safetensors",
+ "model.layers.5.mlp.down_proj.weight": "model-00013-of-00162.safetensors",
+ "model.layers.5.mlp.gate_proj.weight": "model-00012-of-00162.safetensors",
+ "model.layers.5.mlp.up_proj.weight": "model-00013-of-00162.safetensors",
+ "model.layers.5.post_attention_layernorm.weight": "model-00013-of-00162.safetensors",
+ "model.layers.5.self_attn.k_proj.weight": "model-00012-of-00162.safetensors",
+ "model.layers.5.self_attn.o_proj.weight": "model-00012-of-00162.safetensors",
+ "model.layers.5.self_attn.q_proj.weight": "model-00012-of-00162.safetensors",
+ "model.layers.5.self_attn.v_proj.weight": "model-00012-of-00162.safetensors",
+ "model.layers.50.input_layernorm.weight": "model-00103-of-00162.safetensors",
+ "model.layers.50.mlp.down_proj.weight": "model-00103-of-00162.safetensors",
+ "model.layers.50.mlp.gate_proj.weight": "model-00102-of-00162.safetensors",
+ "model.layers.50.mlp.up_proj.weight": "model-00103-of-00162.safetensors",
+ "model.layers.50.post_attention_layernorm.weight": "model-00103-of-00162.safetensors",
+ "model.layers.50.self_attn.k_proj.weight": "model-00102-of-00162.safetensors",
+ "model.layers.50.self_attn.o_proj.weight": "model-00102-of-00162.safetensors",
+ "model.layers.50.self_attn.q_proj.weight": "model-00102-of-00162.safetensors",
+ "model.layers.50.self_attn.v_proj.weight": "model-00102-of-00162.safetensors",
+ "model.layers.51.input_layernorm.weight": "model-00105-of-00162.safetensors",
+ "model.layers.51.mlp.down_proj.weight": "model-00105-of-00162.safetensors",
+ "model.layers.51.mlp.gate_proj.weight": "model-00104-of-00162.safetensors",
+ "model.layers.51.mlp.up_proj.weight": "model-00105-of-00162.safetensors",
+ "model.layers.51.post_attention_layernorm.weight": "model-00105-of-00162.safetensors",
+ "model.layers.51.self_attn.k_proj.weight": "model-00104-of-00162.safetensors",
+ "model.layers.51.self_attn.o_proj.weight": "model-00104-of-00162.safetensors",
+ "model.layers.51.self_attn.q_proj.weight": "model-00104-of-00162.safetensors",
+ "model.layers.51.self_attn.v_proj.weight": "model-00104-of-00162.safetensors",
+ "model.layers.52.input_layernorm.weight": "model-00107-of-00162.safetensors",
+ "model.layers.52.mlp.down_proj.weight": "model-00107-of-00162.safetensors",
+ "model.layers.52.mlp.gate_proj.weight": "model-00106-of-00162.safetensors",
+ "model.layers.52.mlp.up_proj.weight": "model-00107-of-00162.safetensors",
+ "model.layers.52.post_attention_layernorm.weight": "model-00107-of-00162.safetensors",
+ "model.layers.52.self_attn.k_proj.weight": "model-00106-of-00162.safetensors",
+ "model.layers.52.self_attn.o_proj.weight": "model-00106-of-00162.safetensors",
+ "model.layers.52.self_attn.q_proj.weight": "model-00106-of-00162.safetensors",
+ "model.layers.52.self_attn.v_proj.weight": "model-00106-of-00162.safetensors",
+ "model.layers.53.input_layernorm.weight": "model-00109-of-00162.safetensors",
+ "model.layers.53.mlp.down_proj.weight": "model-00109-of-00162.safetensors",
+ "model.layers.53.mlp.gate_proj.weight": "model-00108-of-00162.safetensors",
+ "model.layers.53.mlp.up_proj.weight": "model-00109-of-00162.safetensors",
+ "model.layers.53.post_attention_layernorm.weight": "model-00109-of-00162.safetensors",
+ "model.layers.53.self_attn.k_proj.weight": "model-00108-of-00162.safetensors",
+ "model.layers.53.self_attn.o_proj.weight": "model-00108-of-00162.safetensors",
+ "model.layers.53.self_attn.q_proj.weight": "model-00108-of-00162.safetensors",
+ "model.layers.53.self_attn.v_proj.weight": "model-00108-of-00162.safetensors",
+ "model.layers.54.input_layernorm.weight": "model-00111-of-00162.safetensors",
+ "model.layers.54.mlp.down_proj.weight": "model-00111-of-00162.safetensors",
+ "model.layers.54.mlp.gate_proj.weight": "model-00110-of-00162.safetensors",
+ "model.layers.54.mlp.up_proj.weight": "model-00111-of-00162.safetensors",
+ "model.layers.54.post_attention_layernorm.weight": "model-00111-of-00162.safetensors",
+ "model.layers.54.self_attn.k_proj.weight": "model-00110-of-00162.safetensors",
+ "model.layers.54.self_attn.o_proj.weight": "model-00110-of-00162.safetensors",
+ "model.layers.54.self_attn.q_proj.weight": "model-00110-of-00162.safetensors",
+ "model.layers.54.self_attn.v_proj.weight": "model-00110-of-00162.safetensors",
+ "model.layers.55.input_layernorm.weight": "model-00113-of-00162.safetensors",
+ "model.layers.55.mlp.down_proj.weight": "model-00113-of-00162.safetensors",
+ "model.layers.55.mlp.gate_proj.weight": "model-00112-of-00162.safetensors",
+ "model.layers.55.mlp.up_proj.weight": "model-00113-of-00162.safetensors",
+ "model.layers.55.post_attention_layernorm.weight": "model-00113-of-00162.safetensors",
+ "model.layers.55.self_attn.k_proj.weight": "model-00112-of-00162.safetensors",
+ "model.layers.55.self_attn.o_proj.weight": "model-00112-of-00162.safetensors",
+ "model.layers.55.self_attn.q_proj.weight": "model-00112-of-00162.safetensors",
+ "model.layers.55.self_attn.v_proj.weight": "model-00112-of-00162.safetensors",
+ "model.layers.56.input_layernorm.weight": "model-00115-of-00162.safetensors",
+ "model.layers.56.mlp.down_proj.weight": "model-00115-of-00162.safetensors",
+ "model.layers.56.mlp.gate_proj.weight": "model-00114-of-00162.safetensors",
+ "model.layers.56.mlp.up_proj.weight": "model-00115-of-00162.safetensors",
+ "model.layers.56.post_attention_layernorm.weight": "model-00115-of-00162.safetensors",
+ "model.layers.56.self_attn.k_proj.weight": "model-00114-of-00162.safetensors",
+ "model.layers.56.self_attn.o_proj.weight": "model-00114-of-00162.safetensors",
+ "model.layers.56.self_attn.q_proj.weight": "model-00114-of-00162.safetensors",
+ "model.layers.56.self_attn.v_proj.weight": "model-00114-of-00162.safetensors",
+ "model.layers.57.input_layernorm.weight": "model-00117-of-00162.safetensors",
+ "model.layers.57.mlp.down_proj.weight": "model-00117-of-00162.safetensors",
+ "model.layers.57.mlp.gate_proj.weight": "model-00116-of-00162.safetensors",
+ "model.layers.57.mlp.up_proj.weight": "model-00117-of-00162.safetensors",
+ "model.layers.57.post_attention_layernorm.weight": "model-00117-of-00162.safetensors",
+ "model.layers.57.self_attn.k_proj.weight": "model-00116-of-00162.safetensors",
+ "model.layers.57.self_attn.o_proj.weight": "model-00116-of-00162.safetensors",
+ "model.layers.57.self_attn.q_proj.weight": "model-00116-of-00162.safetensors",
+ "model.layers.57.self_attn.v_proj.weight": "model-00116-of-00162.safetensors",
+ "model.layers.58.input_layernorm.weight": "model-00119-of-00162.safetensors",
+ "model.layers.58.mlp.down_proj.weight": "model-00119-of-00162.safetensors",
+ "model.layers.58.mlp.gate_proj.weight": "model-00118-of-00162.safetensors",
+ "model.layers.58.mlp.up_proj.weight": "model-00119-of-00162.safetensors",
+ "model.layers.58.post_attention_layernorm.weight": "model-00119-of-00162.safetensors",
+ "model.layers.58.self_attn.k_proj.weight": "model-00118-of-00162.safetensors",
+ "model.layers.58.self_attn.o_proj.weight": "model-00118-of-00162.safetensors",
+ "model.layers.58.self_attn.q_proj.weight": "model-00118-of-00162.safetensors",
+ "model.layers.58.self_attn.v_proj.weight": "model-00118-of-00162.safetensors",
+ "model.layers.59.input_layernorm.weight": "model-00121-of-00162.safetensors",
+ "model.layers.59.mlp.down_proj.weight": "model-00121-of-00162.safetensors",
+ "model.layers.59.mlp.gate_proj.weight": "model-00120-of-00162.safetensors",
+ "model.layers.59.mlp.up_proj.weight": "model-00121-of-00162.safetensors",
+ "model.layers.59.post_attention_layernorm.weight": "model-00121-of-00162.safetensors",
+ "model.layers.59.self_attn.k_proj.weight": "model-00120-of-00162.safetensors",
+ "model.layers.59.self_attn.o_proj.weight": "model-00120-of-00162.safetensors",
+ "model.layers.59.self_attn.q_proj.weight": "model-00120-of-00162.safetensors",
+ "model.layers.59.self_attn.v_proj.weight": "model-00120-of-00162.safetensors",
+ "model.layers.6.input_layernorm.weight": "model-00015-of-00162.safetensors",
+ "model.layers.6.mlp.down_proj.weight": "model-00015-of-00162.safetensors",
+ "model.layers.6.mlp.gate_proj.weight": "model-00014-of-00162.safetensors",
+ "model.layers.6.mlp.up_proj.weight": "model-00015-of-00162.safetensors",
+ "model.layers.6.post_attention_layernorm.weight": "model-00015-of-00162.safetensors",
+ "model.layers.6.self_attn.k_proj.weight": "model-00014-of-00162.safetensors",
+ "model.layers.6.self_attn.o_proj.weight": "model-00014-of-00162.safetensors",
+ "model.layers.6.self_attn.q_proj.weight": "model-00014-of-00162.safetensors",
+ "model.layers.6.self_attn.v_proj.weight": "model-00014-of-00162.safetensors",
+ "model.layers.60.input_layernorm.weight": "model-00123-of-00162.safetensors",
+ "model.layers.60.mlp.down_proj.weight": "model-00123-of-00162.safetensors",
+ "model.layers.60.mlp.gate_proj.weight": "model-00122-of-00162.safetensors",
+ "model.layers.60.mlp.up_proj.weight": "model-00123-of-00162.safetensors",
+ "model.layers.60.post_attention_layernorm.weight": "model-00123-of-00162.safetensors",
+ "model.layers.60.self_attn.k_proj.weight": "model-00122-of-00162.safetensors",
+ "model.layers.60.self_attn.o_proj.weight": "model-00122-of-00162.safetensors",
+ "model.layers.60.self_attn.q_proj.weight": "model-00122-of-00162.safetensors",
+ "model.layers.60.self_attn.v_proj.weight": "model-00122-of-00162.safetensors",
+ "model.layers.61.input_layernorm.weight": "model-00125-of-00162.safetensors",
+ "model.layers.61.mlp.down_proj.weight": "model-00125-of-00162.safetensors",
+ "model.layers.61.mlp.gate_proj.weight": "model-00124-of-00162.safetensors",
+ "model.layers.61.mlp.up_proj.weight": "model-00125-of-00162.safetensors",
+ "model.layers.61.post_attention_layernorm.weight": "model-00125-of-00162.safetensors",
+ "model.layers.61.self_attn.k_proj.weight": "model-00124-of-00162.safetensors",
+ "model.layers.61.self_attn.o_proj.weight": "model-00124-of-00162.safetensors",
+ "model.layers.61.self_attn.q_proj.weight": "model-00124-of-00162.safetensors",
+ "model.layers.61.self_attn.v_proj.weight": "model-00124-of-00162.safetensors",
+ "model.layers.62.input_layernorm.weight": "model-00127-of-00162.safetensors",
+ "model.layers.62.mlp.down_proj.weight": "model-00127-of-00162.safetensors",
+ "model.layers.62.mlp.gate_proj.weight": "model-00126-of-00162.safetensors",
+ "model.layers.62.mlp.up_proj.weight": "model-00127-of-00162.safetensors",
+ "model.layers.62.post_attention_layernorm.weight": "model-00127-of-00162.safetensors",
+ "model.layers.62.self_attn.k_proj.weight": "model-00126-of-00162.safetensors",
+ "model.layers.62.self_attn.o_proj.weight": "model-00126-of-00162.safetensors",
+ "model.layers.62.self_attn.q_proj.weight": "model-00126-of-00162.safetensors",
+ "model.layers.62.self_attn.v_proj.weight": "model-00126-of-00162.safetensors",
+ "model.layers.63.input_layernorm.weight": "model-00129-of-00162.safetensors",
+ "model.layers.63.mlp.down_proj.weight": "model-00129-of-00162.safetensors",
+ "model.layers.63.mlp.gate_proj.weight": "model-00128-of-00162.safetensors",
+ "model.layers.63.mlp.up_proj.weight": "model-00129-of-00162.safetensors",
+ "model.layers.63.post_attention_layernorm.weight": "model-00129-of-00162.safetensors",
+ "model.layers.63.self_attn.k_proj.weight": "model-00128-of-00162.safetensors",
+ "model.layers.63.self_attn.o_proj.weight": "model-00128-of-00162.safetensors",
+ "model.layers.63.self_attn.q_proj.weight": "model-00128-of-00162.safetensors",
+ "model.layers.63.self_attn.v_proj.weight": "model-00128-of-00162.safetensors",
+ "model.layers.64.input_layernorm.weight": "model-00131-of-00162.safetensors",
+ "model.layers.64.mlp.down_proj.weight": "model-00131-of-00162.safetensors",
+ "model.layers.64.mlp.gate_proj.weight": "model-00130-of-00162.safetensors",
+ "model.layers.64.mlp.up_proj.weight": "model-00131-of-00162.safetensors",
+ "model.layers.64.post_attention_layernorm.weight": "model-00131-of-00162.safetensors",
+ "model.layers.64.self_attn.k_proj.weight": "model-00130-of-00162.safetensors",
+ "model.layers.64.self_attn.o_proj.weight": "model-00130-of-00162.safetensors",
+ "model.layers.64.self_attn.q_proj.weight": "model-00130-of-00162.safetensors",
+ "model.layers.64.self_attn.v_proj.weight": "model-00130-of-00162.safetensors",
+ "model.layers.65.input_layernorm.weight": "model-00133-of-00162.safetensors",
+ "model.layers.65.mlp.down_proj.weight": "model-00133-of-00162.safetensors",
+ "model.layers.65.mlp.gate_proj.weight": "model-00132-of-00162.safetensors",
+ "model.layers.65.mlp.up_proj.weight": "model-00133-of-00162.safetensors",
+ "model.layers.65.post_attention_layernorm.weight": "model-00133-of-00162.safetensors",
+ "model.layers.65.self_attn.k_proj.weight": "model-00132-of-00162.safetensors",
+ "model.layers.65.self_attn.o_proj.weight": "model-00132-of-00162.safetensors",
+ "model.layers.65.self_attn.q_proj.weight": "model-00132-of-00162.safetensors",
+ "model.layers.65.self_attn.v_proj.weight": "model-00132-of-00162.safetensors",
+ "model.layers.66.input_layernorm.weight": "model-00135-of-00162.safetensors",
+ "model.layers.66.mlp.down_proj.weight": "model-00135-of-00162.safetensors",
+ "model.layers.66.mlp.gate_proj.weight": "model-00134-of-00162.safetensors",
+ "model.layers.66.mlp.up_proj.weight": "model-00135-of-00162.safetensors",
+ "model.layers.66.post_attention_layernorm.weight": "model-00135-of-00162.safetensors",
+ "model.layers.66.self_attn.k_proj.weight": "model-00134-of-00162.safetensors",
+ "model.layers.66.self_attn.o_proj.weight": "model-00134-of-00162.safetensors",
+ "model.layers.66.self_attn.q_proj.weight": "model-00134-of-00162.safetensors",
+ "model.layers.66.self_attn.v_proj.weight": "model-00134-of-00162.safetensors",
+ "model.layers.67.input_layernorm.weight": "model-00137-of-00162.safetensors",
+ "model.layers.67.mlp.down_proj.weight": "model-00137-of-00162.safetensors",
+ "model.layers.67.mlp.gate_proj.weight": "model-00136-of-00162.safetensors",
+ "model.layers.67.mlp.up_proj.weight": "model-00137-of-00162.safetensors",
+ "model.layers.67.post_attention_layernorm.weight": "model-00137-of-00162.safetensors",
+ "model.layers.67.self_attn.k_proj.weight": "model-00136-of-00162.safetensors",
+ "model.layers.67.self_attn.o_proj.weight": "model-00136-of-00162.safetensors",
+ "model.layers.67.self_attn.q_proj.weight": "model-00136-of-00162.safetensors",
+ "model.layers.67.self_attn.v_proj.weight": "model-00136-of-00162.safetensors",
+ "model.layers.68.input_layernorm.weight": "model-00139-of-00162.safetensors",
+ "model.layers.68.mlp.down_proj.weight": "model-00139-of-00162.safetensors",
+ "model.layers.68.mlp.gate_proj.weight": "model-00138-of-00162.safetensors",
+ "model.layers.68.mlp.up_proj.weight": "model-00139-of-00162.safetensors",
+ "model.layers.68.post_attention_layernorm.weight": "model-00139-of-00162.safetensors",
+ "model.layers.68.self_attn.k_proj.weight": "model-00138-of-00162.safetensors",
+ "model.layers.68.self_attn.o_proj.weight": "model-00138-of-00162.safetensors",
+ "model.layers.68.self_attn.q_proj.weight": "model-00138-of-00162.safetensors",
+ "model.layers.68.self_attn.v_proj.weight": "model-00138-of-00162.safetensors",
+ "model.layers.69.input_layernorm.weight": "model-00141-of-00162.safetensors",
+ "model.layers.69.mlp.down_proj.weight": "model-00141-of-00162.safetensors",
+ "model.layers.69.mlp.gate_proj.weight": "model-00140-of-00162.safetensors",
+ "model.layers.69.mlp.up_proj.weight": "model-00141-of-00162.safetensors",
+ "model.layers.69.post_attention_layernorm.weight": "model-00141-of-00162.safetensors",
+ "model.layers.69.self_attn.k_proj.weight": "model-00140-of-00162.safetensors",
+ "model.layers.69.self_attn.o_proj.weight": "model-00140-of-00162.safetensors",
+ "model.layers.69.self_attn.q_proj.weight": "model-00140-of-00162.safetensors",
+ "model.layers.69.self_attn.v_proj.weight": "model-00140-of-00162.safetensors",
+ "model.layers.7.input_layernorm.weight": "model-00017-of-00162.safetensors",
+ "model.layers.7.mlp.down_proj.weight": "model-00017-of-00162.safetensors",
+ "model.layers.7.mlp.gate_proj.weight": "model-00016-of-00162.safetensors",
+ "model.layers.7.mlp.up_proj.weight": "model-00017-of-00162.safetensors",
+ "model.layers.7.post_attention_layernorm.weight": "model-00017-of-00162.safetensors",
+ "model.layers.7.self_attn.k_proj.weight": "model-00016-of-00162.safetensors",
+ "model.layers.7.self_attn.o_proj.weight": "model-00016-of-00162.safetensors",
+ "model.layers.7.self_attn.q_proj.weight": "model-00016-of-00162.safetensors",
+ "model.layers.7.self_attn.v_proj.weight": "model-00016-of-00162.safetensors",
+ "model.layers.70.input_layernorm.weight": "model-00143-of-00162.safetensors",
+ "model.layers.70.mlp.down_proj.weight": "model-00143-of-00162.safetensors",
+ "model.layers.70.mlp.gate_proj.weight": "model-00142-of-00162.safetensors",
+ "model.layers.70.mlp.up_proj.weight": "model-00143-of-00162.safetensors",
+ "model.layers.70.post_attention_layernorm.weight": "model-00143-of-00162.safetensors",
+ "model.layers.70.self_attn.k_proj.weight": "model-00142-of-00162.safetensors",
+ "model.layers.70.self_attn.o_proj.weight": "model-00142-of-00162.safetensors",
+ "model.layers.70.self_attn.q_proj.weight": "model-00142-of-00162.safetensors",
+ "model.layers.70.self_attn.v_proj.weight": "model-00142-of-00162.safetensors",
+ "model.layers.71.input_layernorm.weight": "model-00145-of-00162.safetensors",
+ "model.layers.71.mlp.down_proj.weight": "model-00145-of-00162.safetensors",
+ "model.layers.71.mlp.gate_proj.weight": "model-00144-of-00162.safetensors",
+ "model.layers.71.mlp.up_proj.weight": "model-00145-of-00162.safetensors",
+ "model.layers.71.post_attention_layernorm.weight": "model-00145-of-00162.safetensors",
+ "model.layers.71.self_attn.k_proj.weight": "model-00144-of-00162.safetensors",
+ "model.layers.71.self_attn.o_proj.weight": "model-00144-of-00162.safetensors",
+ "model.layers.71.self_attn.q_proj.weight": "model-00144-of-00162.safetensors",
+ "model.layers.71.self_attn.v_proj.weight": "model-00144-of-00162.safetensors",
+ "model.layers.72.input_layernorm.weight": "model-00147-of-00162.safetensors",
+ "model.layers.72.mlp.down_proj.weight": "model-00147-of-00162.safetensors",
+ "model.layers.72.mlp.gate_proj.weight": "model-00146-of-00162.safetensors",
+ "model.layers.72.mlp.up_proj.weight": "model-00147-of-00162.safetensors",
+ "model.layers.72.post_attention_layernorm.weight": "model-00147-of-00162.safetensors",
+ "model.layers.72.self_attn.k_proj.weight": "model-00146-of-00162.safetensors",
+ "model.layers.72.self_attn.o_proj.weight": "model-00146-of-00162.safetensors",
+ "model.layers.72.self_attn.q_proj.weight": "model-00146-of-00162.safetensors",
+ "model.layers.72.self_attn.v_proj.weight": "model-00146-of-00162.safetensors",
+ "model.layers.73.input_layernorm.weight": "model-00149-of-00162.safetensors",
+ "model.layers.73.mlp.down_proj.weight": "model-00149-of-00162.safetensors",
+ "model.layers.73.mlp.gate_proj.weight": "model-00148-of-00162.safetensors",
+ "model.layers.73.mlp.up_proj.weight": "model-00149-of-00162.safetensors",
+ "model.layers.73.post_attention_layernorm.weight": "model-00149-of-00162.safetensors",
+ "model.layers.73.self_attn.k_proj.weight": "model-00148-of-00162.safetensors",
+ "model.layers.73.self_attn.o_proj.weight": "model-00148-of-00162.safetensors",
+ "model.layers.73.self_attn.q_proj.weight": "model-00148-of-00162.safetensors",
+ "model.layers.73.self_attn.v_proj.weight": "model-00148-of-00162.safetensors",
+ "model.layers.74.input_layernorm.weight": "model-00151-of-00162.safetensors",
+ "model.layers.74.mlp.down_proj.weight": "model-00151-of-00162.safetensors",
+ "model.layers.74.mlp.gate_proj.weight": "model-00150-of-00162.safetensors",
+ "model.layers.74.mlp.up_proj.weight": "model-00151-of-00162.safetensors",
+ "model.layers.74.post_attention_layernorm.weight": "model-00151-of-00162.safetensors",
+ "model.layers.74.self_attn.k_proj.weight": "model-00150-of-00162.safetensors",
+ "model.layers.74.self_attn.o_proj.weight": "model-00150-of-00162.safetensors",
+ "model.layers.74.self_attn.q_proj.weight": "model-00150-of-00162.safetensors",
+ "model.layers.74.self_attn.v_proj.weight": "model-00150-of-00162.safetensors",
+ "model.layers.75.input_layernorm.weight": "model-00153-of-00162.safetensors",
+ "model.layers.75.mlp.down_proj.weight": "model-00153-of-00162.safetensors",
+ "model.layers.75.mlp.gate_proj.weight": "model-00152-of-00162.safetensors",
+ "model.layers.75.mlp.up_proj.weight": "model-00153-of-00162.safetensors",
+ "model.layers.75.post_attention_layernorm.weight": "model-00153-of-00162.safetensors",
+ "model.layers.75.self_attn.k_proj.weight": "model-00152-of-00162.safetensors",
+ "model.layers.75.self_attn.o_proj.weight": "model-00152-of-00162.safetensors",
+ "model.layers.75.self_attn.q_proj.weight": "model-00152-of-00162.safetensors",
+ "model.layers.75.self_attn.v_proj.weight": "model-00152-of-00162.safetensors",
+ "model.layers.76.input_layernorm.weight": "model-00155-of-00162.safetensors",
+ "model.layers.76.mlp.down_proj.weight": "model-00155-of-00162.safetensors",
+ "model.layers.76.mlp.gate_proj.weight": "model-00154-of-00162.safetensors",
+ "model.layers.76.mlp.up_proj.weight": "model-00155-of-00162.safetensors",
+ "model.layers.76.post_attention_layernorm.weight": "model-00155-of-00162.safetensors",
+ "model.layers.76.self_attn.k_proj.weight": "model-00154-of-00162.safetensors",
+ "model.layers.76.self_attn.o_proj.weight": "model-00154-of-00162.safetensors",
+ "model.layers.76.self_attn.q_proj.weight": "model-00154-of-00162.safetensors",
+ "model.layers.76.self_attn.v_proj.weight": "model-00154-of-00162.safetensors",
+ "model.layers.77.input_layernorm.weight": "model-00157-of-00162.safetensors",
+ "model.layers.77.mlp.down_proj.weight": "model-00157-of-00162.safetensors",
+ "model.layers.77.mlp.gate_proj.weight": "model-00156-of-00162.safetensors",
+ "model.layers.77.mlp.up_proj.weight": "model-00157-of-00162.safetensors",
+ "model.layers.77.post_attention_layernorm.weight": "model-00157-of-00162.safetensors",
+ "model.layers.77.self_attn.k_proj.weight": "model-00156-of-00162.safetensors",
+ "model.layers.77.self_attn.o_proj.weight": "model-00156-of-00162.safetensors",
+ "model.layers.77.self_attn.q_proj.weight": "model-00156-of-00162.safetensors",
+ "model.layers.77.self_attn.v_proj.weight": "model-00156-of-00162.safetensors",
+ "model.layers.78.input_layernorm.weight": "model-00159-of-00162.safetensors",
+ "model.layers.78.mlp.down_proj.weight": "model-00159-of-00162.safetensors",
+ "model.layers.78.mlp.gate_proj.weight": "model-00158-of-00162.safetensors",
+ "model.layers.78.mlp.up_proj.weight": "model-00159-of-00162.safetensors",
+ "model.layers.78.post_attention_layernorm.weight": "model-00159-of-00162.safetensors",
+ "model.layers.78.self_attn.k_proj.weight": "model-00158-of-00162.safetensors",
+ "model.layers.78.self_attn.o_proj.weight": "model-00158-of-00162.safetensors",
+ "model.layers.78.self_attn.q_proj.weight": "model-00158-of-00162.safetensors",
+ "model.layers.78.self_attn.v_proj.weight": "model-00158-of-00162.safetensors",
+ "model.layers.79.input_layernorm.weight": "model-00162-of-00162.safetensors",
+ "model.layers.79.mlp.down_proj.weight": "model-00162-of-00162.safetensors",
+ "model.layers.79.mlp.gate_proj.weight": "model-00160-of-00162.safetensors",
+ "model.layers.79.mlp.up_proj.weight": "model-00162-of-00162.safetensors",
+ "model.layers.79.post_attention_layernorm.weight": "model-00162-of-00162.safetensors",
+ "model.layers.79.self_attn.k_proj.weight": "model-00160-of-00162.safetensors",
+ "model.layers.79.self_attn.o_proj.weight": "model-00160-of-00162.safetensors",
+ "model.layers.79.self_attn.q_proj.weight": "model-00160-of-00162.safetensors",
+ "model.layers.79.self_attn.v_proj.weight": "model-00160-of-00162.safetensors",
+ "model.layers.8.input_layernorm.weight": "model-00019-of-00162.safetensors",
+ "model.layers.8.mlp.down_proj.weight": "model-00019-of-00162.safetensors",
+ "model.layers.8.mlp.gate_proj.weight": "model-00018-of-00162.safetensors",
+ "model.layers.8.mlp.up_proj.weight": "model-00019-of-00162.safetensors",
+ "model.layers.8.post_attention_layernorm.weight": "model-00019-of-00162.safetensors",
+ "model.layers.8.self_attn.k_proj.weight": "model-00018-of-00162.safetensors",
+ "model.layers.8.self_attn.o_proj.weight": "model-00018-of-00162.safetensors",
+ "model.layers.8.self_attn.q_proj.weight": "model-00018-of-00162.safetensors",
+ "model.layers.8.self_attn.v_proj.weight": "model-00018-of-00162.safetensors",
+ "model.layers.9.input_layernorm.weight": "model-00021-of-00162.safetensors",
+ "model.layers.9.mlp.down_proj.weight": "model-00021-of-00162.safetensors",
+ "model.layers.9.mlp.gate_proj.weight": "model-00020-of-00162.safetensors",
+ "model.layers.9.mlp.up_proj.weight": "model-00021-of-00162.safetensors",
+ "model.layers.9.post_attention_layernorm.weight": "model-00021-of-00162.safetensors",
+ "model.layers.9.self_attn.k_proj.weight": "model-00020-of-00162.safetensors",
+ "model.layers.9.self_attn.o_proj.weight": "model-00020-of-00162.safetensors",
+ "model.layers.9.self_attn.q_proj.weight": "model-00020-of-00162.safetensors",
+ "model.layers.9.self_attn.v_proj.weight": "model-00020-of-00162.safetensors",
+ "model.norm.weight": "model-00162-of-00162.safetensors"
+ }
+}
diff --git a/model.safetensors.index.json.json b/model.safetensors.index.json.json
new file mode 100644
index 0000000000000000000000000000000000000000..95550db6f5d38d712ea315d84a3be4967c6150ff
--- /dev/null
+++ b/model.safetensors.index.json.json
@@ -0,0 +1,730 @@
+{
+ "metadata": {
+ "total_size": 282214825984
+ },
+ "weight_map": {
+ "lm_head.weight": "model-00161-of-00162.safetensors",
+ "model.embed_tokens.weight": "model-00001-of-00162.safetensors",
+ "model.layers.0.input_layernorm.weight": "model-00003-of-00162.safetensors",
+ "model.layers.0.mlp.down_proj.weight": "model-00003-of-00162.safetensors",
+ "model.layers.0.mlp.gate_proj.weight": "model-00002-of-00162.safetensors",
+ "model.layers.0.mlp.up_proj.weight": "model-00003-of-00162.safetensors",
+ "model.layers.0.post_attention_layernorm.weight": "model-00003-of-00162.safetensors",
+ "model.layers.0.self_attn.k_proj.weight": "model-00002-of-00162.safetensors",
+ "model.layers.0.self_attn.o_proj.weight": "model-00002-of-00162.safetensors",
+ "model.layers.0.self_attn.q_proj.weight": "model-00002-of-00162.safetensors",
+ "model.layers.0.self_attn.v_proj.weight": "model-00002-of-00162.safetensors",
+ "model.layers.1.input_layernorm.weight": "model-00005-of-00162.safetensors",
+ "model.layers.1.mlp.down_proj.weight": "model-00005-of-00162.safetensors",
+ "model.layers.1.mlp.gate_proj.weight": "model-00004-of-00162.safetensors",
+ "model.layers.1.mlp.up_proj.weight": "model-00005-of-00162.safetensors",
+ "model.layers.1.post_attention_layernorm.weight": "model-00005-of-00162.safetensors",
+ "model.layers.1.self_attn.k_proj.weight": "model-00004-of-00162.safetensors",
+ "model.layers.1.self_attn.o_proj.weight": "model-00004-of-00162.safetensors",
+ "model.layers.1.self_attn.q_proj.weight": "model-00004-of-00162.safetensors",
+ "model.layers.1.self_attn.v_proj.weight": "model-00004-of-00162.safetensors",
+ "model.layers.10.input_layernorm.weight": "model-00023-of-00162.safetensors",
+ "model.layers.10.mlp.down_proj.weight": "model-00023-of-00162.safetensors",
+ "model.layers.10.mlp.gate_proj.weight": "model-00022-of-00162.safetensors",
+ "model.layers.10.mlp.up_proj.weight": "model-00023-of-00162.safetensors",
+ "model.layers.10.post_attention_layernorm.weight": "model-00023-of-00162.safetensors",
+ "model.layers.10.self_attn.k_proj.weight": "model-00022-of-00162.safetensors",
+ "model.layers.10.self_attn.o_proj.weight": "model-00022-of-00162.safetensors",
+ "model.layers.10.self_attn.q_proj.weight": "model-00022-of-00162.safetensors",
+ "model.layers.10.self_attn.v_proj.weight": "model-00022-of-00162.safetensors",
+ "model.layers.11.input_layernorm.weight": "model-00025-of-00162.safetensors",
+ "model.layers.11.mlp.down_proj.weight": "model-00025-of-00162.safetensors",
+ "model.layers.11.mlp.gate_proj.weight": "model-00024-of-00162.safetensors",
+ "model.layers.11.mlp.up_proj.weight": "model-00025-of-00162.safetensors",
+ "model.layers.11.post_attention_layernorm.weight": "model-00025-of-00162.safetensors",
+ "model.layers.11.self_attn.k_proj.weight": "model-00024-of-00162.safetensors",
+ "model.layers.11.self_attn.o_proj.weight": "model-00024-of-00162.safetensors",
+ "model.layers.11.self_attn.q_proj.weight": "model-00024-of-00162.safetensors",
+ "model.layers.11.self_attn.v_proj.weight": "model-00024-of-00162.safetensors",
+ "model.layers.12.input_layernorm.weight": "model-00027-of-00162.safetensors",
+ "model.layers.12.mlp.down_proj.weight": "model-00027-of-00162.safetensors",
+ "model.layers.12.mlp.gate_proj.weight": "model-00026-of-00162.safetensors",
+ "model.layers.12.mlp.up_proj.weight": "model-00027-of-00162.safetensors",
+ "model.layers.12.post_attention_layernorm.weight": "model-00027-of-00162.safetensors",
+ "model.layers.12.self_attn.k_proj.weight": "model-00026-of-00162.safetensors",
+ "model.layers.12.self_attn.o_proj.weight": "model-00026-of-00162.safetensors",
+ "model.layers.12.self_attn.q_proj.weight": "model-00026-of-00162.safetensors",
+ "model.layers.12.self_attn.v_proj.weight": "model-00026-of-00162.safetensors",
+ "model.layers.13.input_layernorm.weight": "model-00029-of-00162.safetensors",
+ "model.layers.13.mlp.down_proj.weight": "model-00029-of-00162.safetensors",
+ "model.layers.13.mlp.gate_proj.weight": "model-00028-of-00162.safetensors",
+ "model.layers.13.mlp.up_proj.weight": "model-00029-of-00162.safetensors",
+ "model.layers.13.post_attention_layernorm.weight": "model-00029-of-00162.safetensors",
+ "model.layers.13.self_attn.k_proj.weight": "model-00028-of-00162.safetensors",
+ "model.layers.13.self_attn.o_proj.weight": "model-00028-of-00162.safetensors",
+ "model.layers.13.self_attn.q_proj.weight": "model-00028-of-00162.safetensors",
+ "model.layers.13.self_attn.v_proj.weight": "model-00028-of-00162.safetensors",
+ "model.layers.14.input_layernorm.weight": "model-00031-of-00162.safetensors",
+ "model.layers.14.mlp.down_proj.weight": "model-00031-of-00162.safetensors",
+ "model.layers.14.mlp.gate_proj.weight": "model-00030-of-00162.safetensors",
+ "model.layers.14.mlp.up_proj.weight": "model-00031-of-00162.safetensors",
+ "model.layers.14.post_attention_layernorm.weight": "model-00031-of-00162.safetensors",
+ "model.layers.14.self_attn.k_proj.weight": "model-00030-of-00162.safetensors",
+ "model.layers.14.self_attn.o_proj.weight": "model-00030-of-00162.safetensors",
+ "model.layers.14.self_attn.q_proj.weight": "model-00030-of-00162.safetensors",
+ "model.layers.14.self_attn.v_proj.weight": "model-00030-of-00162.safetensors",
+ "model.layers.15.input_layernorm.weight": "model-00033-of-00162.safetensors",
+ "model.layers.15.mlp.down_proj.weight": "model-00033-of-00162.safetensors",
+ "model.layers.15.mlp.gate_proj.weight": "model-00032-of-00162.safetensors",
+ "model.layers.15.mlp.up_proj.weight": "model-00033-of-00162.safetensors",
+ "model.layers.15.post_attention_layernorm.weight": "model-00033-of-00162.safetensors",
+ "model.layers.15.self_attn.k_proj.weight": "model-00032-of-00162.safetensors",
+ "model.layers.15.self_attn.o_proj.weight": "model-00032-of-00162.safetensors",
+ "model.layers.15.self_attn.q_proj.weight": "model-00032-of-00162.safetensors",
+ "model.layers.15.self_attn.v_proj.weight": "model-00032-of-00162.safetensors",
+ "model.layers.16.input_layernorm.weight": "model-00035-of-00162.safetensors",
+ "model.layers.16.mlp.down_proj.weight": "model-00035-of-00162.safetensors",
+ "model.layers.16.mlp.gate_proj.weight": "model-00034-of-00162.safetensors",
+ "model.layers.16.mlp.up_proj.weight": "model-00035-of-00162.safetensors",
+ "model.layers.16.post_attention_layernorm.weight": "model-00035-of-00162.safetensors",
+ "model.layers.16.self_attn.k_proj.weight": "model-00034-of-00162.safetensors",
+ "model.layers.16.self_attn.o_proj.weight": "model-00034-of-00162.safetensors",
+ "model.layers.16.self_attn.q_proj.weight": "model-00034-of-00162.safetensors",
+ "model.layers.16.self_attn.v_proj.weight": "model-00034-of-00162.safetensors",
+ "model.layers.17.input_layernorm.weight": "model-00037-of-00162.safetensors",
+ "model.layers.17.mlp.down_proj.weight": "model-00037-of-00162.safetensors",
+ "model.layers.17.mlp.gate_proj.weight": "model-00036-of-00162.safetensors",
+ "model.layers.17.mlp.up_proj.weight": "model-00037-of-00162.safetensors",
+ "model.layers.17.post_attention_layernorm.weight": "model-00037-of-00162.safetensors",
+ "model.layers.17.self_attn.k_proj.weight": "model-00036-of-00162.safetensors",
+ "model.layers.17.self_attn.o_proj.weight": "model-00036-of-00162.safetensors",
+ "model.layers.17.self_attn.q_proj.weight": "model-00036-of-00162.safetensors",
+ "model.layers.17.self_attn.v_proj.weight": "model-00036-of-00162.safetensors",
+ "model.layers.18.input_layernorm.weight": "model-00039-of-00162.safetensors",
+ "model.layers.18.mlp.down_proj.weight": "model-00039-of-00162.safetensors",
+ "model.layers.18.mlp.gate_proj.weight": "model-00038-of-00162.safetensors",
+ "model.layers.18.mlp.up_proj.weight": "model-00039-of-00162.safetensors",
+ "model.layers.18.post_attention_layernorm.weight": "model-00039-of-00162.safetensors",
+ "model.layers.18.self_attn.k_proj.weight": "model-00038-of-00162.safetensors",
+ "model.layers.18.self_attn.o_proj.weight": "model-00038-of-00162.safetensors",
+ "model.layers.18.self_attn.q_proj.weight": "model-00038-of-00162.safetensors",
+ "model.layers.18.self_attn.v_proj.weight": "model-00038-of-00162.safetensors",
+ "model.layers.19.input_layernorm.weight": "model-00041-of-00162.safetensors",
+ "model.layers.19.mlp.down_proj.weight": "model-00041-of-00162.safetensors",
+ "model.layers.19.mlp.gate_proj.weight": "model-00040-of-00162.safetensors",
+ "model.layers.19.mlp.up_proj.weight": "model-00041-of-00162.safetensors",
+ "model.layers.19.post_attention_layernorm.weight": "model-00041-of-00162.safetensors",
+ "model.layers.19.self_attn.k_proj.weight": "model-00040-of-00162.safetensors",
+ "model.layers.19.self_attn.o_proj.weight": "model-00040-of-00162.safetensors",
+ "model.layers.19.self_attn.q_proj.weight": "model-00040-of-00162.safetensors",
+ "model.layers.19.self_attn.v_proj.weight": "model-00040-of-00162.safetensors",
+ "model.layers.2.input_layernorm.weight": "model-00007-of-00162.safetensors",
+ "model.layers.2.mlp.down_proj.weight": "model-00007-of-00162.safetensors",
+ "model.layers.2.mlp.gate_proj.weight": "model-00006-of-00162.safetensors",
+ "model.layers.2.mlp.up_proj.weight": "model-00007-of-00162.safetensors",
+ "model.layers.2.post_attention_layernorm.weight": "model-00007-of-00162.safetensors",
+ "model.layers.2.self_attn.k_proj.weight": "model-00006-of-00162.safetensors",
+ "model.layers.2.self_attn.o_proj.weight": "model-00006-of-00162.safetensors",
+ "model.layers.2.self_attn.q_proj.weight": "model-00006-of-00162.safetensors",
+ "model.layers.2.self_attn.v_proj.weight": "model-00006-of-00162.safetensors",
+ "model.layers.20.input_layernorm.weight": "model-00043-of-00162.safetensors",
+ "model.layers.20.mlp.down_proj.weight": "model-00043-of-00162.safetensors",
+ "model.layers.20.mlp.gate_proj.weight": "model-00042-of-00162.safetensors",
+ "model.layers.20.mlp.up_proj.weight": "model-00043-of-00162.safetensors",
+ "model.layers.20.post_attention_layernorm.weight": "model-00043-of-00162.safetensors",
+ "model.layers.20.self_attn.k_proj.weight": "model-00042-of-00162.safetensors",
+ "model.layers.20.self_attn.o_proj.weight": "model-00042-of-00162.safetensors",
+ "model.layers.20.self_attn.q_proj.weight": "model-00042-of-00162.safetensors",
+ "model.layers.20.self_attn.v_proj.weight": "model-00042-of-00162.safetensors",
+ "model.layers.21.input_layernorm.weight": "model-00045-of-00162.safetensors",
+ "model.layers.21.mlp.down_proj.weight": "model-00045-of-00162.safetensors",
+ "model.layers.21.mlp.gate_proj.weight": "model-00044-of-00162.safetensors",
+ "model.layers.21.mlp.up_proj.weight": "model-00045-of-00162.safetensors",
+ "model.layers.21.post_attention_layernorm.weight": "model-00045-of-00162.safetensors",
+ "model.layers.21.self_attn.k_proj.weight": "model-00044-of-00162.safetensors",
+ "model.layers.21.self_attn.o_proj.weight": "model-00044-of-00162.safetensors",
+ "model.layers.21.self_attn.q_proj.weight": "model-00044-of-00162.safetensors",
+ "model.layers.21.self_attn.v_proj.weight": "model-00044-of-00162.safetensors",
+ "model.layers.22.input_layernorm.weight": "model-00047-of-00162.safetensors",
+ "model.layers.22.mlp.down_proj.weight": "model-00047-of-00162.safetensors",
+ "model.layers.22.mlp.gate_proj.weight": "model-00046-of-00162.safetensors",
+ "model.layers.22.mlp.up_proj.weight": "model-00047-of-00162.safetensors",
+ "model.layers.22.post_attention_layernorm.weight": "model-00047-of-00162.safetensors",
+ "model.layers.22.self_attn.k_proj.weight": "model-00046-of-00162.safetensors",
+ "model.layers.22.self_attn.o_proj.weight": "model-00046-of-00162.safetensors",
+ "model.layers.22.self_attn.q_proj.weight": "model-00046-of-00162.safetensors",
+ "model.layers.22.self_attn.v_proj.weight": "model-00046-of-00162.safetensors",
+ "model.layers.23.input_layernorm.weight": "model-00049-of-00162.safetensors",
+ "model.layers.23.mlp.down_proj.weight": "model-00049-of-00162.safetensors",
+ "model.layers.23.mlp.gate_proj.weight": "model-00048-of-00162.safetensors",
+ "model.layers.23.mlp.up_proj.weight": "model-00049-of-00162.safetensors",
+ "model.layers.23.post_attention_layernorm.weight": "model-00049-of-00162.safetensors",
+ "model.layers.23.self_attn.k_proj.weight": "model-00048-of-00162.safetensors",
+ "model.layers.23.self_attn.o_proj.weight": "model-00048-of-00162.safetensors",
+ "model.layers.23.self_attn.q_proj.weight": "model-00048-of-00162.safetensors",
+ "model.layers.23.self_attn.v_proj.weight": "model-00048-of-00162.safetensors",
+ "model.layers.24.input_layernorm.weight": "model-00051-of-00162.safetensors",
+ "model.layers.24.mlp.down_proj.weight": "model-00051-of-00162.safetensors",
+ "model.layers.24.mlp.gate_proj.weight": "model-00050-of-00162.safetensors",
+ "model.layers.24.mlp.up_proj.weight": "model-00051-of-00162.safetensors",
+ "model.layers.24.post_attention_layernorm.weight": "model-00051-of-00162.safetensors",
+ "model.layers.24.self_attn.k_proj.weight": "model-00050-of-00162.safetensors",
+ "model.layers.24.self_attn.o_proj.weight": "model-00050-of-00162.safetensors",
+ "model.layers.24.self_attn.q_proj.weight": "model-00050-of-00162.safetensors",
+ "model.layers.24.self_attn.v_proj.weight": "model-00050-of-00162.safetensors",
+ "model.layers.25.input_layernorm.weight": "model-00053-of-00162.safetensors",
+ "model.layers.25.mlp.down_proj.weight": "model-00053-of-00162.safetensors",
+ "model.layers.25.mlp.gate_proj.weight": "model-00052-of-00162.safetensors",
+ "model.layers.25.mlp.up_proj.weight": "model-00053-of-00162.safetensors",
+ "model.layers.25.post_attention_layernorm.weight": "model-00053-of-00162.safetensors",
+ "model.layers.25.self_attn.k_proj.weight": "model-00052-of-00162.safetensors",
+ "model.layers.25.self_attn.o_proj.weight": "model-00052-of-00162.safetensors",
+ "model.layers.25.self_attn.q_proj.weight": "model-00052-of-00162.safetensors",
+ "model.layers.25.self_attn.v_proj.weight": "model-00052-of-00162.safetensors",
+ "model.layers.26.input_layernorm.weight": "model-00055-of-00162.safetensors",
+ "model.layers.26.mlp.down_proj.weight": "model-00055-of-00162.safetensors",
+ "model.layers.26.mlp.gate_proj.weight": "model-00054-of-00162.safetensors",
+ "model.layers.26.mlp.up_proj.weight": "model-00055-of-00162.safetensors",
+ "model.layers.26.post_attention_layernorm.weight": "model-00055-of-00162.safetensors",
+ "model.layers.26.self_attn.k_proj.weight": "model-00054-of-00162.safetensors",
+ "model.layers.26.self_attn.o_proj.weight": "model-00054-of-00162.safetensors",
+ "model.layers.26.self_attn.q_proj.weight": "model-00054-of-00162.safetensors",
+ "model.layers.26.self_attn.v_proj.weight": "model-00054-of-00162.safetensors",
+ "model.layers.27.input_layernorm.weight": "model-00057-of-00162.safetensors",
+ "model.layers.27.mlp.down_proj.weight": "model-00057-of-00162.safetensors",
+ "model.layers.27.mlp.gate_proj.weight": "model-00056-of-00162.safetensors",
+ "model.layers.27.mlp.up_proj.weight": "model-00057-of-00162.safetensors",
+ "model.layers.27.post_attention_layernorm.weight": "model-00057-of-00162.safetensors",
+ "model.layers.27.self_attn.k_proj.weight": "model-00056-of-00162.safetensors",
+ "model.layers.27.self_attn.o_proj.weight": "model-00056-of-00162.safetensors",
+ "model.layers.27.self_attn.q_proj.weight": "model-00056-of-00162.safetensors",
+ "model.layers.27.self_attn.v_proj.weight": "model-00056-of-00162.safetensors",
+ "model.layers.28.input_layernorm.weight": "model-00059-of-00162.safetensors",
+ "model.layers.28.mlp.down_proj.weight": "model-00059-of-00162.safetensors",
+ "model.layers.28.mlp.gate_proj.weight": "model-00058-of-00162.safetensors",
+ "model.layers.28.mlp.up_proj.weight": "model-00059-of-00162.safetensors",
+ "model.layers.28.post_attention_layernorm.weight": "model-00059-of-00162.safetensors",
+ "model.layers.28.self_attn.k_proj.weight": "model-00058-of-00162.safetensors",
+ "model.layers.28.self_attn.o_proj.weight": "model-00058-of-00162.safetensors",
+ "model.layers.28.self_attn.q_proj.weight": "model-00058-of-00162.safetensors",
+ "model.layers.28.self_attn.v_proj.weight": "model-00058-of-00162.safetensors",
+ "model.layers.29.input_layernorm.weight": "model-00061-of-00162.safetensors",
+ "model.layers.29.mlp.down_proj.weight": "model-00061-of-00162.safetensors",
+ "model.layers.29.mlp.gate_proj.weight": "model-00060-of-00162.safetensors",
+ "model.layers.29.mlp.up_proj.weight": "model-00061-of-00162.safetensors",
+ "model.layers.29.post_attention_layernorm.weight": "model-00061-of-00162.safetensors",
+ "model.layers.29.self_attn.k_proj.weight": "model-00060-of-00162.safetensors",
+ "model.layers.29.self_attn.o_proj.weight": "model-00060-of-00162.safetensors",
+ "model.layers.29.self_attn.q_proj.weight": "model-00060-of-00162.safetensors",
+ "model.layers.29.self_attn.v_proj.weight": "model-00060-of-00162.safetensors",
+ "model.layers.3.input_layernorm.weight": "model-00009-of-00162.safetensors",
+ "model.layers.3.mlp.down_proj.weight": "model-00009-of-00162.safetensors",
+ "model.layers.3.mlp.gate_proj.weight": "model-00008-of-00162.safetensors",
+ "model.layers.3.mlp.up_proj.weight": "model-00009-of-00162.safetensors",
+ "model.layers.3.post_attention_layernorm.weight": "model-00009-of-00162.safetensors",
+ "model.layers.3.self_attn.k_proj.weight": "model-00008-of-00162.safetensors",
+ "model.layers.3.self_attn.o_proj.weight": "model-00008-of-00162.safetensors",
+ "model.layers.3.self_attn.q_proj.weight": "model-00008-of-00162.safetensors",
+ "model.layers.3.self_attn.v_proj.weight": "model-00008-of-00162.safetensors",
+ "model.layers.30.input_layernorm.weight": "model-00063-of-00162.safetensors",
+ "model.layers.30.mlp.down_proj.weight": "model-00063-of-00162.safetensors",
+ "model.layers.30.mlp.gate_proj.weight": "model-00062-of-00162.safetensors",
+ "model.layers.30.mlp.up_proj.weight": "model-00063-of-00162.safetensors",
+ "model.layers.30.post_attention_layernorm.weight": "model-00063-of-00162.safetensors",
+ "model.layers.30.self_attn.k_proj.weight": "model-00062-of-00162.safetensors",
+ "model.layers.30.self_attn.o_proj.weight": "model-00062-of-00162.safetensors",
+ "model.layers.30.self_attn.q_proj.weight": "model-00062-of-00162.safetensors",
+ "model.layers.30.self_attn.v_proj.weight": "model-00062-of-00162.safetensors",
+ "model.layers.31.input_layernorm.weight": "model-00065-of-00162.safetensors",
+ "model.layers.31.mlp.down_proj.weight": "model-00065-of-00162.safetensors",
+ "model.layers.31.mlp.gate_proj.weight": "model-00064-of-00162.safetensors",
+ "model.layers.31.mlp.up_proj.weight": "model-00065-of-00162.safetensors",
+ "model.layers.31.post_attention_layernorm.weight": "model-00065-of-00162.safetensors",
+ "model.layers.31.self_attn.k_proj.weight": "model-00064-of-00162.safetensors",
+ "model.layers.31.self_attn.o_proj.weight": "model-00064-of-00162.safetensors",
+ "model.layers.31.self_attn.q_proj.weight": "model-00064-of-00162.safetensors",
+ "model.layers.31.self_attn.v_proj.weight": "model-00064-of-00162.safetensors",
+ "model.layers.32.input_layernorm.weight": "model-00067-of-00162.safetensors",
+ "model.layers.32.mlp.down_proj.weight": "model-00067-of-00162.safetensors",
+ "model.layers.32.mlp.gate_proj.weight": "model-00066-of-00162.safetensors",
+ "model.layers.32.mlp.up_proj.weight": "model-00067-of-00162.safetensors",
+ "model.layers.32.post_attention_layernorm.weight": "model-00067-of-00162.safetensors",
+ "model.layers.32.self_attn.k_proj.weight": "model-00066-of-00162.safetensors",
+ "model.layers.32.self_attn.o_proj.weight": "model-00066-of-00162.safetensors",
+ "model.layers.32.self_attn.q_proj.weight": "model-00066-of-00162.safetensors",
+ "model.layers.32.self_attn.v_proj.weight": "model-00066-of-00162.safetensors",
+ "model.layers.33.input_layernorm.weight": "model-00069-of-00162.safetensors",
+ "model.layers.33.mlp.down_proj.weight": "model-00069-of-00162.safetensors",
+ "model.layers.33.mlp.gate_proj.weight": "model-00068-of-00162.safetensors",
+ "model.layers.33.mlp.up_proj.weight": "model-00069-of-00162.safetensors",
+ "model.layers.33.post_attention_layernorm.weight": "model-00069-of-00162.safetensors",
+ "model.layers.33.self_attn.k_proj.weight": "model-00068-of-00162.safetensors",
+ "model.layers.33.self_attn.o_proj.weight": "model-00068-of-00162.safetensors",
+ "model.layers.33.self_attn.q_proj.weight": "model-00068-of-00162.safetensors",
+ "model.layers.33.self_attn.v_proj.weight": "model-00068-of-00162.safetensors",
+ "model.layers.34.input_layernorm.weight": "model-00071-of-00162.safetensors",
+ "model.layers.34.mlp.down_proj.weight": "model-00071-of-00162.safetensors",
+ "model.layers.34.mlp.gate_proj.weight": "model-00070-of-00162.safetensors",
+ "model.layers.34.mlp.up_proj.weight": "model-00071-of-00162.safetensors",
+ "model.layers.34.post_attention_layernorm.weight": "model-00071-of-00162.safetensors",
+ "model.layers.34.self_attn.k_proj.weight": "model-00070-of-00162.safetensors",
+ "model.layers.34.self_attn.o_proj.weight": "model-00070-of-00162.safetensors",
+ "model.layers.34.self_attn.q_proj.weight": "model-00070-of-00162.safetensors",
+ "model.layers.34.self_attn.v_proj.weight": "model-00070-of-00162.safetensors",
+ "model.layers.35.input_layernorm.weight": "model-00073-of-00162.safetensors",
+ "model.layers.35.mlp.down_proj.weight": "model-00073-of-00162.safetensors",
+ "model.layers.35.mlp.gate_proj.weight": "model-00072-of-00162.safetensors",
+ "model.layers.35.mlp.up_proj.weight": "model-00073-of-00162.safetensors",
+ "model.layers.35.post_attention_layernorm.weight": "model-00073-of-00162.safetensors",
+ "model.layers.35.self_attn.k_proj.weight": "model-00072-of-00162.safetensors",
+ "model.layers.35.self_attn.o_proj.weight": "model-00072-of-00162.safetensors",
+ "model.layers.35.self_attn.q_proj.weight": "model-00072-of-00162.safetensors",
+ "model.layers.35.self_attn.v_proj.weight": "model-00072-of-00162.safetensors",
+ "model.layers.36.input_layernorm.weight": "model-00075-of-00162.safetensors",
+ "model.layers.36.mlp.down_proj.weight": "model-00075-of-00162.safetensors",
+ "model.layers.36.mlp.gate_proj.weight": "model-00074-of-00162.safetensors",
+ "model.layers.36.mlp.up_proj.weight": "model-00075-of-00162.safetensors",
+ "model.layers.36.post_attention_layernorm.weight": "model-00075-of-00162.safetensors",
+ "model.layers.36.self_attn.k_proj.weight": "model-00074-of-00162.safetensors",
+ "model.layers.36.self_attn.o_proj.weight": "model-00074-of-00162.safetensors",
+ "model.layers.36.self_attn.q_proj.weight": "model-00074-of-00162.safetensors",
+ "model.layers.36.self_attn.v_proj.weight": "model-00074-of-00162.safetensors",
+ "model.layers.37.input_layernorm.weight": "model-00077-of-00162.safetensors",
+ "model.layers.37.mlp.down_proj.weight": "model-00077-of-00162.safetensors",
+ "model.layers.37.mlp.gate_proj.weight": "model-00076-of-00162.safetensors",
+ "model.layers.37.mlp.up_proj.weight": "model-00077-of-00162.safetensors",
+ "model.layers.37.post_attention_layernorm.weight": "model-00077-of-00162.safetensors",
+ "model.layers.37.self_attn.k_proj.weight": "model-00076-of-00162.safetensors",
+ "model.layers.37.self_attn.o_proj.weight": "model-00076-of-00162.safetensors",
+ "model.layers.37.self_attn.q_proj.weight": "model-00076-of-00162.safetensors",
+ "model.layers.37.self_attn.v_proj.weight": "model-00076-of-00162.safetensors",
+ "model.layers.38.input_layernorm.weight": "model-00079-of-00162.safetensors",
+ "model.layers.38.mlp.down_proj.weight": "model-00079-of-00162.safetensors",
+ "model.layers.38.mlp.gate_proj.weight": "model-00078-of-00162.safetensors",
+ "model.layers.38.mlp.up_proj.weight": "model-00079-of-00162.safetensors",
+ "model.layers.38.post_attention_layernorm.weight": "model-00079-of-00162.safetensors",
+ "model.layers.38.self_attn.k_proj.weight": "model-00078-of-00162.safetensors",
+ "model.layers.38.self_attn.o_proj.weight": "model-00078-of-00162.safetensors",
+ "model.layers.38.self_attn.q_proj.weight": "model-00078-of-00162.safetensors",
+ "model.layers.38.self_attn.v_proj.weight": "model-00078-of-00162.safetensors",
+ "model.layers.39.input_layernorm.weight": "model-00081-of-00162.safetensors",
+ "model.layers.39.mlp.down_proj.weight": "model-00081-of-00162.safetensors",
+ "model.layers.39.mlp.gate_proj.weight": "model-00080-of-00162.safetensors",
+ "model.layers.39.mlp.up_proj.weight": "model-00081-of-00162.safetensors",
+ "model.layers.39.post_attention_layernorm.weight": "model-00081-of-00162.safetensors",
+ "model.layers.39.self_attn.k_proj.weight": "model-00080-of-00162.safetensors",
+ "model.layers.39.self_attn.o_proj.weight": "model-00080-of-00162.safetensors",
+ "model.layers.39.self_attn.q_proj.weight": "model-00080-of-00162.safetensors",
+ "model.layers.39.self_attn.v_proj.weight": "model-00080-of-00162.safetensors",
+ "model.layers.4.input_layernorm.weight": "model-00011-of-00162.safetensors",
+ "model.layers.4.mlp.down_proj.weight": "model-00011-of-00162.safetensors",
+ "model.layers.4.mlp.gate_proj.weight": "model-00010-of-00162.safetensors",
+ "model.layers.4.mlp.up_proj.weight": "model-00011-of-00162.safetensors",
+ "model.layers.4.post_attention_layernorm.weight": "model-00011-of-00162.safetensors",
+ "model.layers.4.self_attn.k_proj.weight": "model-00010-of-00162.safetensors",
+ "model.layers.4.self_attn.o_proj.weight": "model-00010-of-00162.safetensors",
+ "model.layers.4.self_attn.q_proj.weight": "model-00010-of-00162.safetensors",
+ "model.layers.4.self_attn.v_proj.weight": "model-00010-of-00162.safetensors",
+ "model.layers.40.input_layernorm.weight": "model-00083-of-00162.safetensors",
+ "model.layers.40.mlp.down_proj.weight": "model-00083-of-00162.safetensors",
+ "model.layers.40.mlp.gate_proj.weight": "model-00082-of-00162.safetensors",
+ "model.layers.40.mlp.up_proj.weight": "model-00083-of-00162.safetensors",
+ "model.layers.40.post_attention_layernorm.weight": "model-00083-of-00162.safetensors",
+ "model.layers.40.self_attn.k_proj.weight": "model-00082-of-00162.safetensors",
+ "model.layers.40.self_attn.o_proj.weight": "model-00082-of-00162.safetensors",
+ "model.layers.40.self_attn.q_proj.weight": "model-00082-of-00162.safetensors",
+ "model.layers.40.self_attn.v_proj.weight": "model-00082-of-00162.safetensors",
+ "model.layers.41.input_layernorm.weight": "model-00085-of-00162.safetensors",
+ "model.layers.41.mlp.down_proj.weight": "model-00085-of-00162.safetensors",
+ "model.layers.41.mlp.gate_proj.weight": "model-00084-of-00162.safetensors",
+ "model.layers.41.mlp.up_proj.weight": "model-00085-of-00162.safetensors",
+ "model.layers.41.post_attention_layernorm.weight": "model-00085-of-00162.safetensors",
+ "model.layers.41.self_attn.k_proj.weight": "model-00084-of-00162.safetensors",
+ "model.layers.41.self_attn.o_proj.weight": "model-00084-of-00162.safetensors",
+ "model.layers.41.self_attn.q_proj.weight": "model-00084-of-00162.safetensors",
+ "model.layers.41.self_attn.v_proj.weight": "model-00084-of-00162.safetensors",
+ "model.layers.42.input_layernorm.weight": "model-00087-of-00162.safetensors",
+ "model.layers.42.mlp.down_proj.weight": "model-00087-of-00162.safetensors",
+ "model.layers.42.mlp.gate_proj.weight": "model-00086-of-00162.safetensors",
+ "model.layers.42.mlp.up_proj.weight": "model-00087-of-00162.safetensors",
+ "model.layers.42.post_attention_layernorm.weight": "model-00087-of-00162.safetensors",
+ "model.layers.42.self_attn.k_proj.weight": "model-00086-of-00162.safetensors",
+ "model.layers.42.self_attn.o_proj.weight": "model-00086-of-00162.safetensors",
+ "model.layers.42.self_attn.q_proj.weight": "model-00086-of-00162.safetensors",
+ "model.layers.42.self_attn.v_proj.weight": "model-00086-of-00162.safetensors",
+ "model.layers.43.input_layernorm.weight": "model-00089-of-00162.safetensors",
+ "model.layers.43.mlp.down_proj.weight": "model-00089-of-00162.safetensors",
+ "model.layers.43.mlp.gate_proj.weight": "model-00088-of-00162.safetensors",
+ "model.layers.43.mlp.up_proj.weight": "model-00089-of-00162.safetensors",
+ "model.layers.43.post_attention_layernorm.weight": "model-00089-of-00162.safetensors",
+ "model.layers.43.self_attn.k_proj.weight": "model-00088-of-00162.safetensors",
+ "model.layers.43.self_attn.o_proj.weight": "model-00088-of-00162.safetensors",
+ "model.layers.43.self_attn.q_proj.weight": "model-00088-of-00162.safetensors",
+ "model.layers.43.self_attn.v_proj.weight": "model-00088-of-00162.safetensors",
+ "model.layers.44.input_layernorm.weight": "model-00091-of-00162.safetensors",
+ "model.layers.44.mlp.down_proj.weight": "model-00091-of-00162.safetensors",
+ "model.layers.44.mlp.gate_proj.weight": "model-00090-of-00162.safetensors",
+ "model.layers.44.mlp.up_proj.weight": "model-00091-of-00162.safetensors",
+ "model.layers.44.post_attention_layernorm.weight": "model-00091-of-00162.safetensors",
+ "model.layers.44.self_attn.k_proj.weight": "model-00090-of-00162.safetensors",
+ "model.layers.44.self_attn.o_proj.weight": "model-00090-of-00162.safetensors",
+ "model.layers.44.self_attn.q_proj.weight": "model-00090-of-00162.safetensors",
+ "model.layers.44.self_attn.v_proj.weight": "model-00090-of-00162.safetensors",
+ "model.layers.45.input_layernorm.weight": "model-00093-of-00162.safetensors",
+ "model.layers.45.mlp.down_proj.weight": "model-00093-of-00162.safetensors",
+ "model.layers.45.mlp.gate_proj.weight": "model-00092-of-00162.safetensors",
+ "model.layers.45.mlp.up_proj.weight": "model-00093-of-00162.safetensors",
+ "model.layers.45.post_attention_layernorm.weight": "model-00093-of-00162.safetensors",
+ "model.layers.45.self_attn.k_proj.weight": "model-00092-of-00162.safetensors",
+ "model.layers.45.self_attn.o_proj.weight": "model-00092-of-00162.safetensors",
+ "model.layers.45.self_attn.q_proj.weight": "model-00092-of-00162.safetensors",
+ "model.layers.45.self_attn.v_proj.weight": "model-00092-of-00162.safetensors",
+ "model.layers.46.input_layernorm.weight": "model-00095-of-00162.safetensors",
+ "model.layers.46.mlp.down_proj.weight": "model-00095-of-00162.safetensors",
+ "model.layers.46.mlp.gate_proj.weight": "model-00094-of-00162.safetensors",
+ "model.layers.46.mlp.up_proj.weight": "model-00095-of-00162.safetensors",
+ "model.layers.46.post_attention_layernorm.weight": "model-00095-of-00162.safetensors",
+ "model.layers.46.self_attn.k_proj.weight": "model-00094-of-00162.safetensors",
+ "model.layers.46.self_attn.o_proj.weight": "model-00094-of-00162.safetensors",
+ "model.layers.46.self_attn.q_proj.weight": "model-00094-of-00162.safetensors",
+ "model.layers.46.self_attn.v_proj.weight": "model-00094-of-00162.safetensors",
+ "model.layers.47.input_layernorm.weight": "model-00097-of-00162.safetensors",
+ "model.layers.47.mlp.down_proj.weight": "model-00097-of-00162.safetensors",
+ "model.layers.47.mlp.gate_proj.weight": "model-00096-of-00162.safetensors",
+ "model.layers.47.mlp.up_proj.weight": "model-00097-of-00162.safetensors",
+ "model.layers.47.post_attention_layernorm.weight": "model-00097-of-00162.safetensors",
+ "model.layers.47.self_attn.k_proj.weight": "model-00096-of-00162.safetensors",
+ "model.layers.47.self_attn.o_proj.weight": "model-00096-of-00162.safetensors",
+ "model.layers.47.self_attn.q_proj.weight": "model-00096-of-00162.safetensors",
+ "model.layers.47.self_attn.v_proj.weight": "model-00096-of-00162.safetensors",
+ "model.layers.48.input_layernorm.weight": "model-00099-of-00162.safetensors",
+ "model.layers.48.mlp.down_proj.weight": "model-00099-of-00162.safetensors",
+ "model.layers.48.mlp.gate_proj.weight": "model-00098-of-00162.safetensors",
+ "model.layers.48.mlp.up_proj.weight": "model-00099-of-00162.safetensors",
+ "model.layers.48.post_attention_layernorm.weight": "model-00099-of-00162.safetensors",
+ "model.layers.48.self_attn.k_proj.weight": "model-00098-of-00162.safetensors",
+ "model.layers.48.self_attn.o_proj.weight": "model-00098-of-00162.safetensors",
+ "model.layers.48.self_attn.q_proj.weight": "model-00098-of-00162.safetensors",
+ "model.layers.48.self_attn.v_proj.weight": "model-00098-of-00162.safetensors",
+ "model.layers.49.input_layernorm.weight": "model-00101-of-00162.safetensors",
+ "model.layers.49.mlp.down_proj.weight": "model-00101-of-00162.safetensors",
+ "model.layers.49.mlp.gate_proj.weight": "model-00100-of-00162.safetensors",
+ "model.layers.49.mlp.up_proj.weight": "model-00101-of-00162.safetensors",
+ "model.layers.49.post_attention_layernorm.weight": "model-00101-of-00162.safetensors",
+ "model.layers.49.self_attn.k_proj.weight": "model-00100-of-00162.safetensors",
+ "model.layers.49.self_attn.o_proj.weight": "model-00100-of-00162.safetensors",
+ "model.layers.49.self_attn.q_proj.weight": "model-00100-of-00162.safetensors",
+ "model.layers.49.self_attn.v_proj.weight": "model-00100-of-00162.safetensors",
+ "model.layers.5.input_layernorm.weight": "model-00013-of-00162.safetensors",
+ "model.layers.5.mlp.down_proj.weight": "model-00013-of-00162.safetensors",
+ "model.layers.5.mlp.gate_proj.weight": "model-00012-of-00162.safetensors",
+ "model.layers.5.mlp.up_proj.weight": "model-00013-of-00162.safetensors",
+ "model.layers.5.post_attention_layernorm.weight": "model-00013-of-00162.safetensors",
+ "model.layers.5.self_attn.k_proj.weight": "model-00012-of-00162.safetensors",
+ "model.layers.5.self_attn.o_proj.weight": "model-00012-of-00162.safetensors",
+ "model.layers.5.self_attn.q_proj.weight": "model-00012-of-00162.safetensors",
+ "model.layers.5.self_attn.v_proj.weight": "model-00012-of-00162.safetensors",
+ "model.layers.50.input_layernorm.weight": "model-00103-of-00162.safetensors",
+ "model.layers.50.mlp.down_proj.weight": "model-00103-of-00162.safetensors",
+ "model.layers.50.mlp.gate_proj.weight": "model-00102-of-00162.safetensors",
+ "model.layers.50.mlp.up_proj.weight": "model-00103-of-00162.safetensors",
+ "model.layers.50.post_attention_layernorm.weight": "model-00103-of-00162.safetensors",
+ "model.layers.50.self_attn.k_proj.weight": "model-00102-of-00162.safetensors",
+ "model.layers.50.self_attn.o_proj.weight": "model-00102-of-00162.safetensors",
+ "model.layers.50.self_attn.q_proj.weight": "model-00102-of-00162.safetensors",
+ "model.layers.50.self_attn.v_proj.weight": "model-00102-of-00162.safetensors",
+ "model.layers.51.input_layernorm.weight": "model-00105-of-00162.safetensors",
+ "model.layers.51.mlp.down_proj.weight": "model-00105-of-00162.safetensors",
+ "model.layers.51.mlp.gate_proj.weight": "model-00104-of-00162.safetensors",
+ "model.layers.51.mlp.up_proj.weight": "model-00105-of-00162.safetensors",
+ "model.layers.51.post_attention_layernorm.weight": "model-00105-of-00162.safetensors",
+ "model.layers.51.self_attn.k_proj.weight": "model-00104-of-00162.safetensors",
+ "model.layers.51.self_attn.o_proj.weight": "model-00104-of-00162.safetensors",
+ "model.layers.51.self_attn.q_proj.weight": "model-00104-of-00162.safetensors",
+ "model.layers.51.self_attn.v_proj.weight": "model-00104-of-00162.safetensors",
+ "model.layers.52.input_layernorm.weight": "model-00107-of-00162.safetensors",
+ "model.layers.52.mlp.down_proj.weight": "model-00107-of-00162.safetensors",
+ "model.layers.52.mlp.gate_proj.weight": "model-00106-of-00162.safetensors",
+ "model.layers.52.mlp.up_proj.weight": "model-00107-of-00162.safetensors",
+ "model.layers.52.post_attention_layernorm.weight": "model-00107-of-00162.safetensors",
+ "model.layers.52.self_attn.k_proj.weight": "model-00106-of-00162.safetensors",
+ "model.layers.52.self_attn.o_proj.weight": "model-00106-of-00162.safetensors",
+ "model.layers.52.self_attn.q_proj.weight": "model-00106-of-00162.safetensors",
+ "model.layers.52.self_attn.v_proj.weight": "model-00106-of-00162.safetensors",
+ "model.layers.53.input_layernorm.weight": "model-00109-of-00162.safetensors",
+ "model.layers.53.mlp.down_proj.weight": "model-00109-of-00162.safetensors",
+ "model.layers.53.mlp.gate_proj.weight": "model-00108-of-00162.safetensors",
+ "model.layers.53.mlp.up_proj.weight": "model-00109-of-00162.safetensors",
+ "model.layers.53.post_attention_layernorm.weight": "model-00109-of-00162.safetensors",
+ "model.layers.53.self_attn.k_proj.weight": "model-00108-of-00162.safetensors",
+ "model.layers.53.self_attn.o_proj.weight": "model-00108-of-00162.safetensors",
+ "model.layers.53.self_attn.q_proj.weight": "model-00108-of-00162.safetensors",
+ "model.layers.53.self_attn.v_proj.weight": "model-00108-of-00162.safetensors",
+ "model.layers.54.input_layernorm.weight": "model-00111-of-00162.safetensors",
+ "model.layers.54.mlp.down_proj.weight": "model-00111-of-00162.safetensors",
+ "model.layers.54.mlp.gate_proj.weight": "model-00110-of-00162.safetensors",
+ "model.layers.54.mlp.up_proj.weight": "model-00111-of-00162.safetensors",
+ "model.layers.54.post_attention_layernorm.weight": "model-00111-of-00162.safetensors",
+ "model.layers.54.self_attn.k_proj.weight": "model-00110-of-00162.safetensors",
+ "model.layers.54.self_attn.o_proj.weight": "model-00110-of-00162.safetensors",
+ "model.layers.54.self_attn.q_proj.weight": "model-00110-of-00162.safetensors",
+ "model.layers.54.self_attn.v_proj.weight": "model-00110-of-00162.safetensors",
+ "model.layers.55.input_layernorm.weight": "model-00113-of-00162.safetensors",
+ "model.layers.55.mlp.down_proj.weight": "model-00113-of-00162.safetensors",
+ "model.layers.55.mlp.gate_proj.weight": "model-00112-of-00162.safetensors",
+ "model.layers.55.mlp.up_proj.weight": "model-00113-of-00162.safetensors",
+ "model.layers.55.post_attention_layernorm.weight": "model-00113-of-00162.safetensors",
+ "model.layers.55.self_attn.k_proj.weight": "model-00112-of-00162.safetensors",
+ "model.layers.55.self_attn.o_proj.weight": "model-00112-of-00162.safetensors",
+ "model.layers.55.self_attn.q_proj.weight": "model-00112-of-00162.safetensors",
+ "model.layers.55.self_attn.v_proj.weight": "model-00112-of-00162.safetensors",
+ "model.layers.56.input_layernorm.weight": "model-00115-of-00162.safetensors",
+ "model.layers.56.mlp.down_proj.weight": "model-00115-of-00162.safetensors",
+ "model.layers.56.mlp.gate_proj.weight": "model-00114-of-00162.safetensors",
+ "model.layers.56.mlp.up_proj.weight": "model-00115-of-00162.safetensors",
+ "model.layers.56.post_attention_layernorm.weight": "model-00115-of-00162.safetensors",
+ "model.layers.56.self_attn.k_proj.weight": "model-00114-of-00162.safetensors",
+ "model.layers.56.self_attn.o_proj.weight": "model-00114-of-00162.safetensors",
+ "model.layers.56.self_attn.q_proj.weight": "model-00114-of-00162.safetensors",
+ "model.layers.56.self_attn.v_proj.weight": "model-00114-of-00162.safetensors",
+ "model.layers.57.input_layernorm.weight": "model-00117-of-00162.safetensors",
+ "model.layers.57.mlp.down_proj.weight": "model-00117-of-00162.safetensors",
+ "model.layers.57.mlp.gate_proj.weight": "model-00116-of-00162.safetensors",
+ "model.layers.57.mlp.up_proj.weight": "model-00117-of-00162.safetensors",
+ "model.layers.57.post_attention_layernorm.weight": "model-00117-of-00162.safetensors",
+ "model.layers.57.self_attn.k_proj.weight": "model-00116-of-00162.safetensors",
+ "model.layers.57.self_attn.o_proj.weight": "model-00116-of-00162.safetensors",
+ "model.layers.57.self_attn.q_proj.weight": "model-00116-of-00162.safetensors",
+ "model.layers.57.self_attn.v_proj.weight": "model-00116-of-00162.safetensors",
+ "model.layers.58.input_layernorm.weight": "model-00119-of-00162.safetensors",
+ "model.layers.58.mlp.down_proj.weight": "model-00119-of-00162.safetensors",
+ "model.layers.58.mlp.gate_proj.weight": "model-00118-of-00162.safetensors",
+ "model.layers.58.mlp.up_proj.weight": "model-00119-of-00162.safetensors",
+ "model.layers.58.post_attention_layernorm.weight": "model-00119-of-00162.safetensors",
+ "model.layers.58.self_attn.k_proj.weight": "model-00118-of-00162.safetensors",
+ "model.layers.58.self_attn.o_proj.weight": "model-00118-of-00162.safetensors",
+ "model.layers.58.self_attn.q_proj.weight": "model-00118-of-00162.safetensors",
+ "model.layers.58.self_attn.v_proj.weight": "model-00118-of-00162.safetensors",
+ "model.layers.59.input_layernorm.weight": "model-00121-of-00162.safetensors",
+ "model.layers.59.mlp.down_proj.weight": "model-00121-of-00162.safetensors",
+ "model.layers.59.mlp.gate_proj.weight": "model-00120-of-00162.safetensors",
+ "model.layers.59.mlp.up_proj.weight": "model-00121-of-00162.safetensors",
+ "model.layers.59.post_attention_layernorm.weight": "model-00121-of-00162.safetensors",
+ "model.layers.59.self_attn.k_proj.weight": "model-00120-of-00162.safetensors",
+ "model.layers.59.self_attn.o_proj.weight": "model-00120-of-00162.safetensors",
+ "model.layers.59.self_attn.q_proj.weight": "model-00120-of-00162.safetensors",
+ "model.layers.59.self_attn.v_proj.weight": "model-00120-of-00162.safetensors",
+ "model.layers.6.input_layernorm.weight": "model-00015-of-00162.safetensors",
+ "model.layers.6.mlp.down_proj.weight": "model-00015-of-00162.safetensors",
+ "model.layers.6.mlp.gate_proj.weight": "model-00014-of-00162.safetensors",
+ "model.layers.6.mlp.up_proj.weight": "model-00015-of-00162.safetensors",
+ "model.layers.6.post_attention_layernorm.weight": "model-00015-of-00162.safetensors",
+ "model.layers.6.self_attn.k_proj.weight": "model-00014-of-00162.safetensors",
+ "model.layers.6.self_attn.o_proj.weight": "model-00014-of-00162.safetensors",
+ "model.layers.6.self_attn.q_proj.weight": "model-00014-of-00162.safetensors",
+ "model.layers.6.self_attn.v_proj.weight": "model-00014-of-00162.safetensors",
+ "model.layers.60.input_layernorm.weight": "model-00123-of-00162.safetensors",
+ "model.layers.60.mlp.down_proj.weight": "model-00123-of-00162.safetensors",
+ "model.layers.60.mlp.gate_proj.weight": "model-00122-of-00162.safetensors",
+ "model.layers.60.mlp.up_proj.weight": "model-00123-of-00162.safetensors",
+ "model.layers.60.post_attention_layernorm.weight": "model-00123-of-00162.safetensors",
+ "model.layers.60.self_attn.k_proj.weight": "model-00122-of-00162.safetensors",
+ "model.layers.60.self_attn.o_proj.weight": "model-00122-of-00162.safetensors",
+ "model.layers.60.self_attn.q_proj.weight": "model-00122-of-00162.safetensors",
+ "model.layers.60.self_attn.v_proj.weight": "model-00122-of-00162.safetensors",
+ "model.layers.61.input_layernorm.weight": "model-00125-of-00162.safetensors",
+ "model.layers.61.mlp.down_proj.weight": "model-00125-of-00162.safetensors",
+ "model.layers.61.mlp.gate_proj.weight": "model-00124-of-00162.safetensors",
+ "model.layers.61.mlp.up_proj.weight": "model-00125-of-00162.safetensors",
+ "model.layers.61.post_attention_layernorm.weight": "model-00125-of-00162.safetensors",
+ "model.layers.61.self_attn.k_proj.weight": "model-00124-of-00162.safetensors",
+ "model.layers.61.self_attn.o_proj.weight": "model-00124-of-00162.safetensors",
+ "model.layers.61.self_attn.q_proj.weight": "model-00124-of-00162.safetensors",
+ "model.layers.61.self_attn.v_proj.weight": "model-00124-of-00162.safetensors",
+ "model.layers.62.input_layernorm.weight": "model-00127-of-00162.safetensors",
+ "model.layers.62.mlp.down_proj.weight": "model-00127-of-00162.safetensors",
+ "model.layers.62.mlp.gate_proj.weight": "model-00126-of-00162.safetensors",
+ "model.layers.62.mlp.up_proj.weight": "model-00127-of-00162.safetensors",
+ "model.layers.62.post_attention_layernorm.weight": "model-00127-of-00162.safetensors",
+ "model.layers.62.self_attn.k_proj.weight": "model-00126-of-00162.safetensors",
+ "model.layers.62.self_attn.o_proj.weight": "model-00126-of-00162.safetensors",
+ "model.layers.62.self_attn.q_proj.weight": "model-00126-of-00162.safetensors",
+ "model.layers.62.self_attn.v_proj.weight": "model-00126-of-00162.safetensors",
+ "model.layers.63.input_layernorm.weight": "model-00129-of-00162.safetensors",
+ "model.layers.63.mlp.down_proj.weight": "model-00129-of-00162.safetensors",
+ "model.layers.63.mlp.gate_proj.weight": "model-00128-of-00162.safetensors",
+ "model.layers.63.mlp.up_proj.weight": "model-00129-of-00162.safetensors",
+ "model.layers.63.post_attention_layernorm.weight": "model-00129-of-00162.safetensors",
+ "model.layers.63.self_attn.k_proj.weight": "model-00128-of-00162.safetensors",
+ "model.layers.63.self_attn.o_proj.weight": "model-00128-of-00162.safetensors",
+ "model.layers.63.self_attn.q_proj.weight": "model-00128-of-00162.safetensors",
+ "model.layers.63.self_attn.v_proj.weight": "model-00128-of-00162.safetensors",
+ "model.layers.64.input_layernorm.weight": "model-00131-of-00162.safetensors",
+ "model.layers.64.mlp.down_proj.weight": "model-00131-of-00162.safetensors",
+ "model.layers.64.mlp.gate_proj.weight": "model-00130-of-00162.safetensors",
+ "model.layers.64.mlp.up_proj.weight": "model-00131-of-00162.safetensors",
+ "model.layers.64.post_attention_layernorm.weight": "model-00131-of-00162.safetensors",
+ "model.layers.64.self_attn.k_proj.weight": "model-00130-of-00162.safetensors",
+ "model.layers.64.self_attn.o_proj.weight": "model-00130-of-00162.safetensors",
+ "model.layers.64.self_attn.q_proj.weight": "model-00130-of-00162.safetensors",
+ "model.layers.64.self_attn.v_proj.weight": "model-00130-of-00162.safetensors",
+ "model.layers.65.input_layernorm.weight": "model-00133-of-00162.safetensors",
+ "model.layers.65.mlp.down_proj.weight": "model-00133-of-00162.safetensors",
+ "model.layers.65.mlp.gate_proj.weight": "model-00132-of-00162.safetensors",
+ "model.layers.65.mlp.up_proj.weight": "model-00133-of-00162.safetensors",
+ "model.layers.65.post_attention_layernorm.weight": "model-00133-of-00162.safetensors",
+ "model.layers.65.self_attn.k_proj.weight": "model-00132-of-00162.safetensors",
+ "model.layers.65.self_attn.o_proj.weight": "model-00132-of-00162.safetensors",
+ "model.layers.65.self_attn.q_proj.weight": "model-00132-of-00162.safetensors",
+ "model.layers.65.self_attn.v_proj.weight": "model-00132-of-00162.safetensors",
+ "model.layers.66.input_layernorm.weight": "model-00135-of-00162.safetensors",
+ "model.layers.66.mlp.down_proj.weight": "model-00135-of-00162.safetensors",
+ "model.layers.66.mlp.gate_proj.weight": "model-00134-of-00162.safetensors",
+ "model.layers.66.mlp.up_proj.weight": "model-00135-of-00162.safetensors",
+ "model.layers.66.post_attention_layernorm.weight": "model-00135-of-00162.safetensors",
+ "model.layers.66.self_attn.k_proj.weight": "model-00134-of-00162.safetensors",
+ "model.layers.66.self_attn.o_proj.weight": "model-00134-of-00162.safetensors",
+ "model.layers.66.self_attn.q_proj.weight": "model-00134-of-00162.safetensors",
+ "model.layers.66.self_attn.v_proj.weight": "model-00134-of-00162.safetensors",
+ "model.layers.67.input_layernorm.weight": "model-00137-of-00162.safetensors",
+ "model.layers.67.mlp.down_proj.weight": "model-00137-of-00162.safetensors",
+ "model.layers.67.mlp.gate_proj.weight": "model-00136-of-00162.safetensors",
+ "model.layers.67.mlp.up_proj.weight": "model-00137-of-00162.safetensors",
+ "model.layers.67.post_attention_layernorm.weight": "model-00137-of-00162.safetensors",
+ "model.layers.67.self_attn.k_proj.weight": "model-00136-of-00162.safetensors",
+ "model.layers.67.self_attn.o_proj.weight": "model-00136-of-00162.safetensors",
+ "model.layers.67.self_attn.q_proj.weight": "model-00136-of-00162.safetensors",
+ "model.layers.67.self_attn.v_proj.weight": "model-00136-of-00162.safetensors",
+ "model.layers.68.input_layernorm.weight": "model-00139-of-00162.safetensors",
+ "model.layers.68.mlp.down_proj.weight": "model-00139-of-00162.safetensors",
+ "model.layers.68.mlp.gate_proj.weight": "model-00138-of-00162.safetensors",
+ "model.layers.68.mlp.up_proj.weight": "model-00139-of-00162.safetensors",
+ "model.layers.68.post_attention_layernorm.weight": "model-00139-of-00162.safetensors",
+ "model.layers.68.self_attn.k_proj.weight": "model-00138-of-00162.safetensors",
+ "model.layers.68.self_attn.o_proj.weight": "model-00138-of-00162.safetensors",
+ "model.layers.68.self_attn.q_proj.weight": "model-00138-of-00162.safetensors",
+ "model.layers.68.self_attn.v_proj.weight": "model-00138-of-00162.safetensors",
+ "model.layers.69.input_layernorm.weight": "model-00141-of-00162.safetensors",
+ "model.layers.69.mlp.down_proj.weight": "model-00141-of-00162.safetensors",
+ "model.layers.69.mlp.gate_proj.weight": "model-00140-of-00162.safetensors",
+ "model.layers.69.mlp.up_proj.weight": "model-00141-of-00162.safetensors",
+ "model.layers.69.post_attention_layernorm.weight": "model-00141-of-00162.safetensors",
+ "model.layers.69.self_attn.k_proj.weight": "model-00140-of-00162.safetensors",
+ "model.layers.69.self_attn.o_proj.weight": "model-00140-of-00162.safetensors",
+ "model.layers.69.self_attn.q_proj.weight": "model-00140-of-00162.safetensors",
+ "model.layers.69.self_attn.v_proj.weight": "model-00140-of-00162.safetensors",
+ "model.layers.7.input_layernorm.weight": "model-00017-of-00162.safetensors",
+ "model.layers.7.mlp.down_proj.weight": "model-00017-of-00162.safetensors",
+ "model.layers.7.mlp.gate_proj.weight": "model-00016-of-00162.safetensors",
+ "model.layers.7.mlp.up_proj.weight": "model-00017-of-00162.safetensors",
+ "model.layers.7.post_attention_layernorm.weight": "model-00017-of-00162.safetensors",
+ "model.layers.7.self_attn.k_proj.weight": "model-00016-of-00162.safetensors",
+ "model.layers.7.self_attn.o_proj.weight": "model-00016-of-00162.safetensors",
+ "model.layers.7.self_attn.q_proj.weight": "model-00016-of-00162.safetensors",
+ "model.layers.7.self_attn.v_proj.weight": "model-00016-of-00162.safetensors",
+ "model.layers.70.input_layernorm.weight": "model-00143-of-00162.safetensors",
+ "model.layers.70.mlp.down_proj.weight": "model-00143-of-00162.safetensors",
+ "model.layers.70.mlp.gate_proj.weight": "model-00142-of-00162.safetensors",
+ "model.layers.70.mlp.up_proj.weight": "model-00143-of-00162.safetensors",
+ "model.layers.70.post_attention_layernorm.weight": "model-00143-of-00162.safetensors",
+ "model.layers.70.self_attn.k_proj.weight": "model-00142-of-00162.safetensors",
+ "model.layers.70.self_attn.o_proj.weight": "model-00142-of-00162.safetensors",
+ "model.layers.70.self_attn.q_proj.weight": "model-00142-of-00162.safetensors",
+ "model.layers.70.self_attn.v_proj.weight": "model-00142-of-00162.safetensors",
+ "model.layers.71.input_layernorm.weight": "model-00145-of-00162.safetensors",
+ "model.layers.71.mlp.down_proj.weight": "model-00145-of-00162.safetensors",
+ "model.layers.71.mlp.gate_proj.weight": "model-00144-of-00162.safetensors",
+ "model.layers.71.mlp.up_proj.weight": "model-00145-of-00162.safetensors",
+ "model.layers.71.post_attention_layernorm.weight": "model-00145-of-00162.safetensors",
+ "model.layers.71.self_attn.k_proj.weight": "model-00144-of-00162.safetensors",
+ "model.layers.71.self_attn.o_proj.weight": "model-00144-of-00162.safetensors",
+ "model.layers.71.self_attn.q_proj.weight": "model-00144-of-00162.safetensors",
+ "model.layers.71.self_attn.v_proj.weight": "model-00144-of-00162.safetensors",
+ "model.layers.72.input_layernorm.weight": "model-00147-of-00162.safetensors",
+ "model.layers.72.mlp.down_proj.weight": "model-00147-of-00162.safetensors",
+ "model.layers.72.mlp.gate_proj.weight": "model-00146-of-00162.safetensors",
+ "model.layers.72.mlp.up_proj.weight": "model-00147-of-00162.safetensors",
+ "model.layers.72.post_attention_layernorm.weight": "model-00147-of-00162.safetensors",
+ "model.layers.72.self_attn.k_proj.weight": "model-00146-of-00162.safetensors",
+ "model.layers.72.self_attn.o_proj.weight": "model-00146-of-00162.safetensors",
+ "model.layers.72.self_attn.q_proj.weight": "model-00146-of-00162.safetensors",
+ "model.layers.72.self_attn.v_proj.weight": "model-00146-of-00162.safetensors",
+ "model.layers.73.input_layernorm.weight": "model-00149-of-00162.safetensors",
+ "model.layers.73.mlp.down_proj.weight": "model-00149-of-00162.safetensors",
+ "model.layers.73.mlp.gate_proj.weight": "model-00148-of-00162.safetensors",
+ "model.layers.73.mlp.up_proj.weight": "model-00149-of-00162.safetensors",
+ "model.layers.73.post_attention_layernorm.weight": "model-00149-of-00162.safetensors",
+ "model.layers.73.self_attn.k_proj.weight": "model-00148-of-00162.safetensors",
+ "model.layers.73.self_attn.o_proj.weight": "model-00148-of-00162.safetensors",
+ "model.layers.73.self_attn.q_proj.weight": "model-00148-of-00162.safetensors",
+ "model.layers.73.self_attn.v_proj.weight": "model-00148-of-00162.safetensors",
+ "model.layers.74.input_layernorm.weight": "model-00151-of-00162.safetensors",
+ "model.layers.74.mlp.down_proj.weight": "model-00151-of-00162.safetensors",
+ "model.layers.74.mlp.gate_proj.weight": "model-00150-of-00162.safetensors",
+ "model.layers.74.mlp.up_proj.weight": "model-00151-of-00162.safetensors",
+ "model.layers.74.post_attention_layernorm.weight": "model-00151-of-00162.safetensors",
+ "model.layers.74.self_attn.k_proj.weight": "model-00150-of-00162.safetensors",
+ "model.layers.74.self_attn.o_proj.weight": "model-00150-of-00162.safetensors",
+ "model.layers.74.self_attn.q_proj.weight": "model-00150-of-00162.safetensors",
+ "model.layers.74.self_attn.v_proj.weight": "model-00150-of-00162.safetensors",
+ "model.layers.75.input_layernorm.weight": "model-00153-of-00162.safetensors",
+ "model.layers.75.mlp.down_proj.weight": "model-00153-of-00162.safetensors",
+ "model.layers.75.mlp.gate_proj.weight": "model-00152-of-00162.safetensors",
+ "model.layers.75.mlp.up_proj.weight": "model-00153-of-00162.safetensors",
+ "model.layers.75.post_attention_layernorm.weight": "model-00153-of-00162.safetensors",
+ "model.layers.75.self_attn.k_proj.weight": "model-00152-of-00162.safetensors",
+ "model.layers.75.self_attn.o_proj.weight": "model-00152-of-00162.safetensors",
+ "model.layers.75.self_attn.q_proj.weight": "model-00152-of-00162.safetensors",
+ "model.layers.75.self_attn.v_proj.weight": "model-00152-of-00162.safetensors",
+ "model.layers.76.input_layernorm.weight": "model-00155-of-00162.safetensors",
+ "model.layers.76.mlp.down_proj.weight": "model-00155-of-00162.safetensors",
+ "model.layers.76.mlp.gate_proj.weight": "model-00154-of-00162.safetensors",
+ "model.layers.76.mlp.up_proj.weight": "model-00155-of-00162.safetensors",
+ "model.layers.76.post_attention_layernorm.weight": "model-00155-of-00162.safetensors",
+ "model.layers.76.self_attn.k_proj.weight": "model-00154-of-00162.safetensors",
+ "model.layers.76.self_attn.o_proj.weight": "model-00154-of-00162.safetensors",
+ "model.layers.76.self_attn.q_proj.weight": "model-00154-of-00162.safetensors",
+ "model.layers.76.self_attn.v_proj.weight": "model-00154-of-00162.safetensors",
+ "model.layers.77.input_layernorm.weight": "model-00157-of-00162.safetensors",
+ "model.layers.77.mlp.down_proj.weight": "model-00157-of-00162.safetensors",
+ "model.layers.77.mlp.gate_proj.weight": "model-00156-of-00162.safetensors",
+ "model.layers.77.mlp.up_proj.weight": "model-00157-of-00162.safetensors",
+ "model.layers.77.post_attention_layernorm.weight": "model-00157-of-00162.safetensors",
+ "model.layers.77.self_attn.k_proj.weight": "model-00156-of-00162.safetensors",
+ "model.layers.77.self_attn.o_proj.weight": "model-00156-of-00162.safetensors",
+ "model.layers.77.self_attn.q_proj.weight": "model-00156-of-00162.safetensors",
+ "model.layers.77.self_attn.v_proj.weight": "model-00156-of-00162.safetensors",
+ "model.layers.78.input_layernorm.weight": "model-00159-of-00162.safetensors",
+ "model.layers.78.mlp.down_proj.weight": "model-00159-of-00162.safetensors",
+ "model.layers.78.mlp.gate_proj.weight": "model-00158-of-00162.safetensors",
+ "model.layers.78.mlp.up_proj.weight": "model-00159-of-00162.safetensors",
+ "model.layers.78.post_attention_layernorm.weight": "model-00159-of-00162.safetensors",
+ "model.layers.78.self_attn.k_proj.weight": "model-00158-of-00162.safetensors",
+ "model.layers.78.self_attn.o_proj.weight": "model-00158-of-00162.safetensors",
+ "model.layers.78.self_attn.q_proj.weight": "model-00158-of-00162.safetensors",
+ "model.layers.78.self_attn.v_proj.weight": "model-00158-of-00162.safetensors",
+ "model.layers.79.input_layernorm.weight": "model-00162-of-00162.safetensors",
+ "model.layers.79.mlp.down_proj.weight": "model-00162-of-00162.safetensors",
+ "model.layers.79.mlp.gate_proj.weight": "model-00160-of-00162.safetensors",
+ "model.layers.79.mlp.up_proj.weight": "model-00162-of-00162.safetensors",
+ "model.layers.79.post_attention_layernorm.weight": "model-00162-of-00162.safetensors",
+ "model.layers.79.self_attn.k_proj.weight": "model-00160-of-00162.safetensors",
+ "model.layers.79.self_attn.o_proj.weight": "model-00160-of-00162.safetensors",
+ "model.layers.79.self_attn.q_proj.weight": "model-00160-of-00162.safetensors",
+ "model.layers.79.self_attn.v_proj.weight": "model-00160-of-00162.safetensors",
+ "model.layers.8.input_layernorm.weight": "model-00019-of-00162.safetensors",
+ "model.layers.8.mlp.down_proj.weight": "model-00019-of-00162.safetensors",
+ "model.layers.8.mlp.gate_proj.weight": "model-00018-of-00162.safetensors",
+ "model.layers.8.mlp.up_proj.weight": "model-00019-of-00162.safetensors",
+ "model.layers.8.post_attention_layernorm.weight": "model-00019-of-00162.safetensors",
+ "model.layers.8.self_attn.k_proj.weight": "model-00018-of-00162.safetensors",
+ "model.layers.8.self_attn.o_proj.weight": "model-00018-of-00162.safetensors",
+ "model.layers.8.self_attn.q_proj.weight": "model-00018-of-00162.safetensors",
+ "model.layers.8.self_attn.v_proj.weight": "model-00018-of-00162.safetensors",
+ "model.layers.9.input_layernorm.weight": "model-00021-of-00162.safetensors",
+ "model.layers.9.mlp.down_proj.weight": "model-00021-of-00162.safetensors",
+ "model.layers.9.mlp.gate_proj.weight": "model-00020-of-00162.safetensors",
+ "model.layers.9.mlp.up_proj.weight": "model-00021-of-00162.safetensors",
+ "model.layers.9.post_attention_layernorm.weight": "model-00021-of-00162.safetensors",
+ "model.layers.9.self_attn.k_proj.weight": "model-00020-of-00162.safetensors",
+ "model.layers.9.self_attn.o_proj.weight": "model-00020-of-00162.safetensors",
+ "model.layers.9.self_attn.q_proj.weight": "model-00020-of-00162.safetensors",
+ "model.layers.9.self_attn.v_proj.weight": "model-00020-of-00162.safetensors",
+ "model.norm.weight": "model-00162-of-00162.safetensors"
+ }
+}
diff --git a/special_tokens_map.json b/special_tokens_map.json
new file mode 100644
index 0000000000000000000000000000000000000000..30cb3b3fb062737c55011708077a4b961624f92c
--- /dev/null
+++ b/special_tokens_map.json
@@ -0,0 +1,61 @@
+{
+ "additional_special_tokens": [
+ {
+ "content": "
&": 5909,
+ "CON": 5910,
+ "Ġrepl": 5911,
+ "Ġregular": 5912,
+ "Storage": 5913,
+ "ramework": 5914,
+ "Ġgoal": 5915,
+ "Ġtouch": 5916,
+ ".widget": 5917,
+ "Ġbuilt": 5918,
+ "des": 5919,
+ "Part": 5920,
+ "(re": 5921,
+ "Ġworth": 5922,
+ "hib": 5923,
+ "game": 5924,
+ "91": 5925,
+ "192": 5926,
+ "Ġв": 5927,
+ "acion": 5928,
+ "ĠWhite": 5929,
+ "(type": 5930,
+ "(`": 5931,
+ "81": 5932,
+ "Ġnatural": 5933,
+ "Ġinj": 5934,
+ "Ġcalcul": 5935,
+ "ĠApril": 5936,
+ ".List": 5937,
+ "Ġassociated": 5938,
+ "ĉSystem": 5939,
+ "~~": 5940,
+ "=[": 5941,
+ "Ġstorage": 5942,
+ "Ġbytes": 5943,
+ "Ġtravel": 5944,
+ "Ġsou": 5945,
+ "Ġpassed": 5946,
+ "!=": 5947,
+ "ascript": 5948,
+ ".open": 5949,
+ "Ġgrid": 5950,
+ "Ġbus": 5951,
+ "Ġrecogn": 5952,
+ "Ab": 5953,
+ "Ġhon": 5954,
+ "ĠCenter": 5955,
+ "Ġprec": 5956,
+ "build": 5957,
+ "73": 5958,
+ "HTML": 5959,
+ "ĠSan": 5960,
+ "Ġcountries": 5961,
+ "aled": 5962,
+ "token": 5963,
+ "kt": 5964,
+ "Ġqual": 5965,
+ "Last": 5966,
+ "adow": 5967,
+ "Ġmanufact": 5968,
+ "idad": 5969,
+ "jango": 5970,
+ "Next": 5971,
+ "xf": 5972,
+ ".a": 5973,
+ "Ġporno": 5974,
+ "ĠPM": 5975,
+ "erve": 5976,
+ "iting": 5977,
+ "_th": 5978,
+ "ci": 5979,
+ "=None": 5980,
+ "gs": 5981,
+ "Ġlogin": 5982,
+ "atives": 5983,
+ "']);Ċ": 5984,
+ "Äħ": 5985,
+ "Ġill": 5986,
+ "IA": 5987,
+ "children": 5988,
+ "DO": 5989,
+ "Ġlevels": 5990,
+ "Ġ{{": 5991,
+ "Ġlooks": 5992,
+ "Ġ\"#": 5993,
+ "ToString": 5994,
+ "Ġnecessary": 5995,
+ "ĠĠĠĊ": 5996,
+ "cell": 5997,
+ "Entry": 5998,
+ "Ġ'#": 5999,
+ "Ġextrem": 6000,
+ "Selector": 6001,
+ "Ġplaceholder": 6002,
+ "Load": 6003,
+ "Ġreleased": 6004,
+ "ORE": 6005,
+ "Enumer": 6006,
+ "ĠTV": 6007,
+ "SET": 6008,
+ "inq": 6009,
+ "Press": 6010,
+ "ĠDepartment": 6011,
+ "Ġproperties": 6012,
+ "Ġrespond": 6013,
+ "Search": 6014,
+ "ael": 6015,
+ "Ġrequ": 6016,
+ "ĠBook": 6017,
+ "/Ċ": 6018,
+ "(st": 6019,
+ "Ġfinancial": 6020,
+ "icket": 6021,
+ "_input": 6022,
+ "Ġthreat": 6023,
+ "(in": 6024,
+ "Strip": 6025,
+ "ìĿ": 6026,
+ "ção": 6027,
+ "71": 6028,
+ "Ġevidence": 6029,
+ "));": 6030,
+ "ĠBro": 6031,
+ "Ġ[];Ċ": 6032,
+ "Ġou": 6033,
+ "buf": 6034,
+ "Script": 6035,
+ "dat": 6036,
+ "Ġrule": 6037,
+ "#import": 6038,
+ "=\"/": 6039,
+ "Serial": 6040,
+ "Ġstarting": 6041,
+ "[index": 6042,
+ "ae": 6043,
+ "Ġcontrib": 6044,
+ "session": 6045,
+ "_new": 6046,
+ "utable": 6047,
+ "ober": 6048,
+ "Ġ\"./": 6049,
+ "Ġlogger": 6050,
+ "Ġrecently": 6051,
+ "Ġreturned": 6052,
+ "ččĊ": 6053,
+ ")))Ċ": 6054,
+ "itions": 6055,
+ "Ġseek": 6056,
+ "Ġcommunic": 6057,
+ "Ġ\".": 6058,
+ "Ġusername": 6059,
+ "ECT": 6060,
+ "DS": 6061,
+ "Ġotherwise": 6062,
+ "ĠGerman": 6063,
+ ".aw": 6064,
+ "Adapter": 6065,
+ "ixel": 6066,
+ "Ġsystems": 6067,
+ "Ġdrop": 6068,
+ "83": 6069,
+ "Ġstructure": 6070,
+ "Ġ$(\"#": 6071,
+ "encies": 6072,
+ "anning": 6073,
+ "ĠLink": 6074,
+ "ĠResponse": 6075,
+ "Ġstri": 6076,
+ "ż": 6077,
+ "ĠDB": 6078,
+ "æĹ": 6079,
+ "android": 6080,
+ "submit": 6081,
+ "otion": 6082,
+ "92": 6083,
+ "(@": 6084,
+ ".test": 6085,
+ "82": 6086,
+ "ĊĊĊĊĊĊĊĊ": 6087,
+ "];čĊ": 6088,
+ "Ġdirectly": 6089,
+ "Ġ\"%": 6090,
+ "ris": 6091,
+ "elta": 6092,
+ "AIL": 6093,
+ "){čĊ": 6094,
+ "mine": 6095,
+ "ĠĠĠĠĠĠĠĠĠĠĠĠĠĠĠĠĠĠĠĠĠĠĠĠĠĠ": 6096,
+ "(k": 6097,
+ "bon": 6098,
+ "asic": 6099,
+ "pite": 6100,
+ "___": 6101,
+ "Max": 6102,
+ "Ġerrors": 6103,
+ "ĠWhile": 6104,
+ "Ġarguments": 6105,
+ "Ġensure": 6106,
+ "Right": 6107,
+ "-based": 6108,
+ "Web": 6109,
+ "Ġ-=": 6110,
+ "Ġintrodu": 6111,
+ "ĠInst": 6112,
+ "ĠWash": 6113,
+ "ordin": 6114,
+ "join": 6115,
+ "Database": 6116,
+ "Ġgrad": 6117,
+ "Ġusually": 6118,
+ "ITE": 6119,
+ "Props": 6120,
+ "?>Ċ": 6121,
+ "ĠGo": 6122,
+ "@Override": 6123,
+ "REF": 6124,
+ "Ġip": 6125,
+ "ĠAustral": 6126,
+ "Ġist": 6127,
+ "ViewById": 6128,
+ "Ġserious": 6129,
+ "Ġcustomer": 6130,
+ ".prototype": 6131,
+ "odo": 6132,
+ "cor": 6133,
+ "Ġdoor": 6134,
+ "ĠWITHOUT": 6135,
+ "Ġplant": 6136,
+ "Ġbegan": 6137,
+ "Ġdistance": 6138,
+ "()).": 6139,
+ "Ġchance": 6140,
+ "Ġord": 6141,
+ "came": 6142,
+ "pragma": 6143,
+ "Ġprotect": 6144,
+ "ragment": 6145,
+ "ĠNode": 6146,
+ "ening": 6147,
+ "Ñĩ": 6148,
+ "Ġroute": 6149,
+ "ĠSchool": 6150,
+ "hi": 6151,
+ "Ġneighb": 6152,
+ "After": 6153,
+ "licit": 6154,
+ "Ġcontr": 6155,
+ "Ġprimary": 6156,
+ "AA": 6157,
+ ".WriteLine": 6158,
+ "utils": 6159,
+ "Ġbi": 6160,
+ "Red": 6161,
+ ".Linq": 6162,
+ ".object": 6163,
+ "Ġleaders": 6164,
+ "unities": 6165,
+ "Ġgun": 6166,
+ "onth": 6167,
+ "ĠDev": 6168,
+ "FILE": 6169,
+ "Ġcomments": 6170,
+ "_len": 6171,
+ "arrow": 6172,
+ "amount": 6173,
+ "Range": 6174,
+ "sert": 6175,
+ "GridView": 6176,
+ "Ġupdated": 6177,
+ "ĠMo": 6178,
+ "Ġinform": 6179,
+ "ociety": 6180,
+ "ala": 6181,
+ "Access": 6182,
+ "Ġhab": 6183,
+ "Ġcreat": 6184,
+ "_arg": 6185,
+ "ĠJanuary": 6186,
+ "ĠDay": 6187,
+ "\")čĊ": 6188,
+ "uple": 6189,
+ "document": 6190,
+ "gorith": 6191,
+ "menu": 6192,
+ "ĠOver": 6193,
+ "bb": 6194,
+ ".title": 6195,
+ "_out": 6196,
+ "Ġled": 6197,
+ "uri": 6198,
+ "Ġ?>": 6199,
+ "gl": 6200,
+ "Ġbank": 6201,
+ "ayment": 6202,
+ "ĉprintf": 6203,
+ "MD": 6204,
+ "Ġsample": 6205,
+ "Ġhands": 6206,
+ "ĠVersion": 6207,
+ "uario": 6208,
+ "Ġoffers": 6209,
+ "ityEngine": 6210,
+ "Ġshape": 6211,
+ "Ġsleep": 6212,
+ "_point": 6213,
+ "Settings": 6214,
+ "Ġachie": 6215,
+ "Ġsold": 6216,
+ "ota": 6217,
+ ".bind": 6218,
+ "Am": 6219,
+ "Ġsafe": 6220,
+ "Store": 6221,
+ "Ġshared": 6222,
+ "Ġpriv": 6223,
+ "_VAL": 6224,
+ "Ġsens": 6225,
+ "){": 6226,
+ "Ġremember": 6227,
+ "shared": 6228,
+ "element": 6229,
+ "Ġshoot": 6230,
+ "Vert": 6231,
+ "cout": 6232,
+ "Ġenv": 6233,
+ "_label": 6234,
+ "Ġ>Ċ": 6235,
+ "run": 6236,
+ "Ġscene": 6237,
+ "(array": 6238,
+ "device": 6239,
+ "_title": 6240,
+ "agon": 6241,
+ "]čĊ": 6242,
+ "aby": 6243,
+ "Ġbecame": 6244,
+ "boolean": 6245,
+ "Ġpark": 6246,
+ "ĠCode": 6247,
+ "upload": 6248,
+ "riday": 6249,
+ "ĠSeptember": 6250,
+ "Fe": 6251,
+ "Ġsen": 6252,
+ "cing": 6253,
+ "FL": 6254,
+ "Col": 6255,
+ "uts": 6256,
+ "_page": 6257,
+ "inn": 6258,
+ "Ġimplied": 6259,
+ "aling": 6260,
+ "Ġyourself": 6261,
+ ".Count": 6262,
+ "conf": 6263,
+ "Ġaud": 6264,
+ "_init": 6265,
+ ".)": 6266,
+ "Ġwrote": 6267,
+ "003": 6268,
+ "NG": 6269,
+ ".Error": 6270,
+ "ä»": 6271,
+ ".for": 6272,
+ "Ġequal": 6273,
+ "ĠRequest": 6274,
+ "Ġserial": 6275,
+ "Ġallows": 6276,
+ "XX": 6277,
+ "Ġmiddle": 6278,
+ "chor": 6279,
+ "195": 6280,
+ "94": 6281,
+ "ø": 6282,
+ "erval": 6283,
+ ".Column": 6284,
+ "reading": 6285,
+ "Ġescort": 6286,
+ "ĠAugust": 6287,
+ "Ġquickly": 6288,
+ "Ġweap": 6289,
+ "ĠCG": 6290,
+ "ropri": 6291,
+ "ho": 6292,
+ "Ġcop": 6293,
+ "(struct": 6294,
+ "ĠBig": 6295,
+ "Ġvs": 6296,
+ "Ġfrequ": 6297,
+ ".Value": 6298,
+ "Ġactions": 6299,
+ "Ġproper": 6300,
+ "Ġinn": 6301,
+ "Ġobjects": 6302,
+ "Ġmatrix": 6303,
+ "avascript": 6304,
+ "Ġones": 6305,
+ ".group": 6306,
+ "Ġgreen": 6307,
+ "Ġpaint": 6308,
+ "ools": 6309,
+ "ycl": 6310,
+ "encode": 6311,
+ "olt": 6312,
+ "comment": 6313,
+ ".api": 6314,
+ "Dir": 6315,
+ "Ġune": 6316,
+ "izont": 6317,
+ ".position": 6318,
+ "Ġdesigned": 6319,
+ "_val": 6320,
+ "avi": 6321,
+ "iring": 6322,
+ "tab": 6323,
+ "Ġlayer": 6324,
+ "Ġviews": 6325,
+ "Ġreve": 6326,
+ "rael": 6327,
+ "ĠON": 6328,
+ "rics": 6329,
+ "160": 6330,
+ "np": 6331,
+ "Ġcore": 6332,
+ "());čĊ": 6333,
+ "Main": 6334,
+ "Ġexpert": 6335,
+ "ĉĉčĊ": 6336,
+ "_en": 6337,
+ "Ġ/>": 6338,
+ "utter": 6339,
+ "IAL": 6340,
+ "ails": 6341,
+ "ĠKing": 6342,
+ "*/ĊĊ": 6343,
+ "ĠMet": 6344,
+ "_end": 6345,
+ "addr": 6346,
+ "ora": 6347,
+ "Ġir": 6348,
+ "Min": 6349,
+ "Ġsurpr": 6350,
+ "Ġrepe": 6351,
+ "Ġdirectory": 6352,
+ "PUT": 6353,
+ "-S": 6354,
+ "Ġelection": 6355,
+ "haps": 6356,
+ ".pre": 6357,
+ "cm": 6358,
+ "Values": 6359,
+ "Ġ\"Ċ": 6360,
+ "column": 6361,
+ "ivil": 6362,
+ "Login": 6363,
+ "inue": 6364,
+ "93": 6365,
+ "Ġbeautiful": 6366,
+ "Ġsecret": 6367,
+ "(event": 6368,
+ "Ġchat": 6369,
+ "ums": 6370,
+ "Ġorigin": 6371,
+ "Ġeffects": 6372,
+ "Ġmanagement": 6373,
+ "illa": 6374,
+ "tk": 6375,
+ "Ġsetting": 6376,
+ "ĠCour": 6377,
+ "Ġmassage": 6378,
+ "ĉend": 6379,
+ "Ġhappy": 6380,
+ "Ġfinish": 6381,
+ "Ġcamera": 6382,
+ "ĠVer": 6383,
+ "ĠDemocr": 6384,
+ "ĠHer": 6385,
+ "(Q": 6386,
+ "cons": 6387,
+ "ita": 6388,
+ "Ġ'.": 6389,
+ "{}": 6390,
+ "ĉC": 6391,
+ "Ġstuff": 6392,
+ "194": 6393,
+ "Ġ:Ċ": 6394,
+ "ĠAR": 6395,
+ "Task": 6396,
+ "hidden": 6397,
+ "eros": 6398,
+ "IGN": 6399,
+ "atio": 6400,
+ "ĠHealth": 6401,
+ "olute": 6402,
+ "Enter": 6403,
+ "'>": 6404,
+ "ĠTwitter": 6405,
+ "ĠCounty": 6406,
+ "scribe": 6407,
+ "Ġ=>Ċ": 6408,
+ "Ġhy": 6409,
+ "fit": 6410,
+ "Ġmilitary": 6411,
+ "Ġsale": 6412,
+ "required": 6413,
+ "non": 6414,
+ "bootstrap": 6415,
+ "hold": 6416,
+ "rim": 6417,
+ "-old": 6418,
+ "ĠDown": 6419,
+ "Ġmention": 6420,
+ "contact": 6421,
+ "_group": 6422,
+ "oday": 6423,
+ "Ġtown": 6424,
+ "Ġsolution": 6425,
+ "uate": 6426,
+ "elling": 6427,
+ "]->": 6428,
+ "otes": 6429,
+ "ental": 6430,
+ "omen": 6431,
+ "ospital": 6432,
+ "ĠSup": 6433,
+ "_EN": 6434,
+ "Ġslow": 6435,
+ "SESSION": 6436,
+ "Ġblue": 6437,
+ "ago": 6438,
+ "Ġlives": 6439,
+ "Ġ^": 6440,
+ ".un": 6441,
+ "inst": 6442,
+ "enge": 6443,
+ "Ġcustomers": 6444,
+ "Ġcast": 6445,
+ "udget": 6446,
+ "ï¼ģ": 6447,
+ "icens": 6448,
+ "Ġdetermin": 6449,
+ "Selected": 6450,
+ "_pl": 6451,
+ "ueue": 6452,
+ "Ġdark": 6453,
+ "//ĊĊ": 6454,
+ "si": 6455,
+ "thern": 6456,
+ "ĠJapan": 6457,
+ "/w": 6458,
+ "PU": 6459,
+ "ĠEast": 6460,
+ "ovie": 6461,
+ "Ġpackage": 6462,
+ "Ġnor": 6463,
+ "Ġapi": 6464,
+ "bot": 6465,
+ "\"];Ċ": 6466,
+ "_post": 6467,
+ "ulate": 6468,
+ "Ġclub": 6469,
+ "'));Ċ": 6470,
+ "Ġloop": 6471,
+ "PIO": 6472,
+ "ione": 6473,
+ "shot": 6474,
+ "Initial": 6475,
+ "Ġplayed": 6476,
+ "register": 6477,
+ "rought": 6478,
+ "_max": 6479,
+ "acement": 6480,
+ "match": 6481,
+ "raphics": 6482,
+ "AST": 6483,
+ "Ġexisting": 6484,
+ "Ġcomplex": 6485,
+ "DA": 6486,
+ ".Ch": 6487,
+ ".common": 6488,
+ "mo": 6489,
+ "Ġ'../../": 6490,
+ "ito": 6491,
+ "Ġanalysis": 6492,
+ "Ġdeliver": 6493,
+ "ĠĠĠĠĠĠĠĠĠĠĠĠĠĠĠĠĊ": 6494,
+ "idx": 6495,
+ "Ãł": 6496,
+ "ongo": 6497,
+ "ĠEnglish": 6498,
+ "Ċ": 10197,
+ "_default": 10198,
+ "ĠDatabase": 10199,
+ "rep": 10200,
+ "ESS": 10201,
+ "nergy": 10202,
+ ".Find": 10203,
+ "_mask": 10204,
+ "Ġrise": 10205,
+ "Ġkernel": 10206,
+ "::$": 10207,
+ ".Q": 10208,
+ "Ġoffering": 10209,
+ "decl": 10210,
+ "ĠCS": 10211,
+ "Ġlisted": 10212,
+ "Ġmostly": 10213,
+ "enger": 10214,
+ "Ġblocks": 10215,
+ "olo": 10216,
+ "Ġgoverning": 10217,
+ "\\F": 10218,
+ "Ġconcent": 10219,
+ ".getText": 10220,
+ "Ġmb": 10221,
+ "Ġoccurred": 10222,
+ "Ġchanging": 10223,
+ "Scene": 10224,
+ "_CODE": 10225,
+ "Beh": 10226,
+ "\"The": 10227,
+ "Ġtile": 10228,
+ "ĠAssociation": 10229,
+ "ĉP": 10230,
+ "alty": 10231,
+ "_ad": 10232,
+ "odies": 10233,
+ "iated": 10234,
+ "Ġprepared": 10235,
+ "possible": 10236,
+ "Ġmort": 10237,
+ "TEST": 10238,
+ "142": 10239,
+ "Ġignore": 10240,
+ "Ġcalc": 10241,
+ "Ġrs": 10242,
+ "ĠassertEquals": 10243,
+ "Ġsz": 10244,
+ "ĠTHIS": 10245,
+ ".\"Ċ": 10246,
+ "Ġcanvas": 10247,
+ "java": 10248,
+ "Ġdut": 10249,
+ "VALID": 10250,
+ ".sql": 10251,
+ ".input": 10252,
+ "Ġaux": 10253,
+ "Sup": 10254,
+ "Ġartist": 10255,
+ "Vec": 10256,
+ "_TIME": 10257,
+ ".stringify": 10258,
+ "etween": 10259,
+ "ĠCategory": 10260,
+ "Ġ[-": 10261,
+ "ĠDevExpress": 10262,
+ "ĠJul": 10263,
+ "Ġring": 10264,
+ ".ed": 10265,
+ "YY": 10266,
+ "Let": 10267,
+ "TextField": 10268,
+ "Ġflat": 10269,
+ "_print": 10270,
+ "ĠOTHER": 10271,
+ "adian": 10272,
+ "Ġchecked": 10273,
+ "ele": 10274,
+ "Align": 10275,
+ "standing": 10276,
+ "Ġ[],": 10277,
+ "Ġlab": 10278,
+ "ucky": 10279,
+ "ĠChristmas": 10280,
+ "(image": 10281,
+ ".module": 10282,
+ "Ġlots": 10283,
+ "Ġslightly": 10284,
+ "(final": 10285,
+ "erge": 10286,
+ "è¿": 10287,
+ "147": 10288,
+ "ĠPolice": 10289,
+ "143": 10290,
+ "ĠRight": 10291,
+ "Ġaward": 10292,
+ "ĠOS": 10293,
+ "Ġ{}ĊĊ": 10294,
+ "Ġptr": 10295,
+ "oves": 10296,
+ "icated": 10297,
+ "ем": 10298,
+ "Ġmanage": 10299,
+ "oliday": 10300,
+ "Amount": 10301,
+ "oolStrip": 10302,
+ "tbody": 10303,
+ "Nav": 10304,
+ "wrap": 10305,
+ "BB": 10306,
+ "Ġwatching": 10307,
+ "arios": 10308,
+ "Ġoptional": 10309,
+ "_K": 10310,
+ "ĠLicensed": 10311,
+ ".Map": 10312,
+ "Timer": 10313,
+ "ĠAP": 10314,
+ "ĠRev": 10315,
+ "(o": 10316,
+ ",c": 10317,
+ "umin": 10318,
+ "etailed": 10319,
+ "ĠHy": 10320,
+ "Ġblank": 10321,
+ "agger": 10322,
+ "ĠSelf": 10323,
+ "()[": 10324,
+ ".make": 10325,
+ "earn": 10326,
+ "channel": 10327,
+ ";Ċ": 10342,
+ "World": 10343,
+ "Ġpython": 10344,
+ "Ġlif": 10345,
+ "Ġtrav": 10346,
+ "Ġconven": 10347,
+ "company": 10348,
+ "ĠClub": 10349,
+ "138": 10350,
+ "Ver": 10351,
+ "Btn": 10352,
+ "Ġzone": 10353,
+ "products": 10354,
+ "ĠEduc": 10355,
+ "Ġverify": 10356,
+ "ĠMil": 10357,
+ "ono": 10358,
+ "]);ĊĊ": 10359,
+ "ENCE": 10360,
+ "Ġpacket": 10361,
+ "Ġcer": 10362,
+ "Ġenumer": 10363,
+ "Ġpars": 10364,
+ "formed": 10365,
+ "Ġoccup": 10366,
+ "tre": 10367,
+ "Ġexercise": 10368,
+ "Day": 10369,
+ "_sum": 10370,
+ "Ġasking": 10371,
+ "aption": 10372,
+ "Ġorders": 10373,
+ "Ġspending": 10374,
+ "ĠERR": 10375,
+ ".Dis": 10376,
+ "ĠUtil": 10377,
+ "âĢľI": 10378,
+ "\\'": 10379,
+ "?)": 10380,
+ "/>Ċ": 10381,
+ "Ġemot": 10382,
+ "Ġinfluence": 10383,
+ "ĠAfrica": 10384,
+ "atters": 10385,
+ "Ùħ": 10386,
+ ".session": 10387,
+ "Ġchief": 10388,
+ "ĉĉĉĉĉĉĉĉĉĉĉ": 10389,
+ "Ġtom": 10390,
+ "cluded": 10391,
+ "serial": 10392,
+ "_handler": 10393,
+ ".Type": 10394,
+ "aped": 10395,
+ "Ġpolicies": 10396,
+ "-ex": 10397,
+ "-tr": 10398,
+ "blank": 10399,
+ "merce": 10400,
+ "Ġcoverage": 10401,
+ "Ġrc": 10402,
+ "_matrix": 10403,
+ "_box": 10404,
+ "Ġcharges": 10405,
+ "ĠBoston": 10406,
+ "Pe": 10407,
+ "Ġcircum": 10408,
+ "Ġfilled": 10409,
+ "148": 10410,
+ "Ġnorth": 10411,
+ "ictureBox": 10412,
+ "ĉres": 10413,
+ "è®": 10414,
+ "Ġtermin": 10415,
+ "Ġ[â̦": 10416,
+ "IRECT": 10417,
+ "Ġber": 10418,
+ "Ġ\"../../": 10419,
+ "retch": 10420,
+ ".code": 10421,
+ "_col": 10422,
+ "ĠGovernment": 10423,
+ "Ġargv": 10424,
+ "ĠLord": 10425,
+ "asi": 10426,
+ "Exec": 10427,
+ "ĉlet": 10428,
+ "vertis": 10429,
+ "Ġdiscussion": 10430,
+ "enance": 10431,
+ "outube": 10432,
+ "typeof": 10433,
+ "Ġserved": 10434,
+ "ĠPut": 10435,
+ "ĉx": 10436,
+ "Ġsweet": 10437,
+ "Before": 10438,
+ "ategy": 10439,
+ ".of": 10440,
+ "ĠMaterial": 10441,
+ "Sort": 10442,
+ "ONT": 10443,
+ "igital": 10444,
+ "Why": 10445,
+ "Ġsust": 10446,
+ "Ġç": 10447,
+ "abet": 10448,
+ "Ġsegment": 10449,
+ "Ġ[],Ċ": 10450,
+ "ĠMuslim": 10451,
+ "ĠfindViewById": 10452,
+ "cut": 10453,
+ "_TEXT": 10454,
+ "ĠMary": 10455,
+ "Ġloved": 10456,
+ "Ġlie": 10457,
+ "ĠJO": 10458,
+ "Ġisset": 10459,
+ "month": 10460,
+ "Ġprime": 10461,
+ "ti": 10462,
+ "ĠCarol": 10463,
+ "Use": 10464,
+ "146": 10465,
+ "ĠPop": 10466,
+ "ĠSave": 10467,
+ "Interval": 10468,
+ "execute": 10469,
+ "dy": 10470,
+ "ĠIran": 10471,
+ "_cont": 10472,
+ "ĉT": 10473,
+ "Ġphase": 10474,
+ "checkbox": 10475,
+ "week": 10476,
+ "Ġhide": 10477,
+ "Ġtil": 10478,
+ "Ġju": 10479,
+ "Custom": 10480,
+ "burg": 10481,
+ "/M": 10482,
+ "TON": 10483,
+ "Ġquant": 10484,
+ "Ġrub": 10485,
+ "ixels": 10486,
+ "Ġinstalled": 10487,
+ "Ġdump": 10488,
+ "Ġproperly": 10489,
+ "(List": 10490,
+ "Ġdecide": 10491,
+ "apply": 10492,
+ "Has": 10493,
+ "Ġkeeping": 10494,
+ "Ġcitizens": 10495,
+ "Ġjoint": 10496,
+ "pool": 10497,
+ "Socket": 10498,
+ "_op": 10499,
+ "Ġweapon": 10500,
+ "gnore": 10501,
+ "ĠExec": 10502,
+ "otten": 10503,
+ "ĠMS": 10504,
+ "Ġ(-": 10505,
+ "ĠReview": 10506,
+ "Ġexamples": 10507,
+ "Ġtight": 10508,
+ "!(": 10509,
+ "DP": 10510,
+ "ĠMessageBox": 10511,
+ "Ġphotograph": 10512,
+ "164": 10513,
+ "URI": 10514,
+ "ét": 10515,
+ "low": 10516,
+ "ĠGrand": 10517,
+ ".persistence": 10518,
+ "Ġmaintain": 10519,
+ "Ġnums": 10520,
+ "Ġzip": 10521,
+ "ials": 10522,
+ "ĠGets": 10523,
+ "peg": 10524,
+ "ĠBuffer": 10525,
+ "~~~~": 10526,
+ "rastructure": 10527,
+ "ĠPL": 10528,
+ "uen": 10529,
+ "obby": 10530,
+ "sizeof": 10531,
+ "Ġpic": 10532,
+ "Ġseed": 10533,
+ "Ġexperienced": 10534,
+ "Ġodd": 10535,
+ "Ġkick": 10536,
+ "Ġprocedure": 10537,
+ "avigator": 10538,
+ "-on": 10539,
+ ",j": 10540,
+ "ĠAlthough": 10541,
+ "ĠuserId": 10542,
+ "accept": 10543,
+ "Blue": 10544,
+ "IColor": 10545,
+ "layer": 10546,
+ "available": 10547,
+ "Ġends": 10548,
+ ".table": 10549,
+ "Ġdataset": 10550,
+ "bus": 10551,
+ "Ġexplain": 10552,
+ "(pro": 10553,
+ "ĠCommittee": 10554,
+ "Ġnoted": 10555,
+ "]:Ċ": 10556,
+ "Dim": 10557,
+ "stdio": 10558,
+ "154": 10559,
+ ".\",Ċ": 10560,
+ "_source": 10561,
+ "181": 10562,
+ "ĠWeek": 10563,
+ "ĠEdge": 10564,
+ "Ġoperating": 10565,
+ "Ġeste": 10566,
+ "ipl": 10567,
+ "330": 10568,
+ "agination": 10569,
+ "Ġproceed": 10570,
+ "Ġanimation": 10571,
+ ".Models": 10572,
+ "ĠWatch": 10573,
+ "iat": 10574,
+ "Ġoppon": 10575,
+ "/A": 10576,
+ "Report": 10577,
+ "Ġsounds": 10578,
+ "_buf": 10579,
+ "IELD": 10580,
+ "Ġbund": 10581,
+ "ĉget": 10582,
+ ".pr": 10583,
+ "(tmp": 10584,
+ "Ġkid": 10585,
+ ">ĊĊĊ": 10586,
+ "Ġyang": 10587,
+ "NotFound": 10588,
+ "ÑĨ": 10589,
+ "math": 10590,
+ "@gmail": 10591,
+ "ĠLIMIT": 10592,
+ "redients": 10593,
+ "Ġvent": 10594,
+ "avigate": 10595,
+ "Look": 10596,
+ "Ġreligious": 10597,
+ "Ġrand": 10598,
+ "rio": 10599,
+ "(GL": 10600,
+ "_ip": 10601,
+ "uan": 10602,
+ "iciency": 10603,
+ "ĠChange": 10604,
+ ">čĊčĊ": 10605,
+ "ĠEntity": 10606,
+ "Ġrencontre": 10607,
+ "ĠRet": 10608,
+ "plan": 10609,
+ "én": 10610,
+ "BOOL": 10611,
+ "uries": 10612,
+ "train": 10613,
+ "Definition": 10614,
+ "============": 10615,
+ "zz": 10616,
+ "450": 10617,
+ "Animation": 10618,
+ "ĠOK": 10619,
+ "_menu": 10620,
+ ".bl": 10621,
+ "_score": 10622,
+ "Ġacad": 10623,
+ "(System": 10624,
+ "Ġrefresh": 10625,
+ "'=>$": 10626,
+ ".Graphics": 10627,
+ "amento": 10628,
+ "pid": 10629,
+ "tc": 10630,
+ "Ġtips": 10631,
+ "Ġhomes": 10632,
+ "Ġfuel": 10633,
+ "âĸ": 10634,
+ "_helper": 10635,
+ "ĠĠčĊ": 10636,
+ "ĠRoom": 10637,
+ ".Close": 10638,
+ "_attr": 10639,
+ "ĠMount": 10640,
+ "ĠEv": 10641,
+ "arser": 10642,
+ "_top": 10643,
+ "eah": 10644,
+ "ĠDelete": 10645,
+ "ãĢį": 10646,
+ "uke": 10647,
+ "Ġusage": 10648,
+ "aria": 10649,
+ "_dev": 10650,
+ "Ġtexture": 10651,
+ "Ġconversation": 10652,
+ "eper": 10653,
+ "Bean": 10654,
+ "done": 10655,
+ "nonatomic": 10656,
+ "ĠSecond": 10657,
+ "Ġshooting": 10658,
+ "_pre": 10659,
+ "Components": 10660,
+ "Ġ]ĊĊ": 10661,
+ "__,": 10662,
+ "stitution": 10663,
+ ".Char": 10664,
+ ">();ĊĊ": 10665,
+ "Ġpresented": 10666,
+ "Ġwa": 10667,
+ "oker": 10668,
+ "-ĊĊ": 10669,
+ "iner": 10670,
+ "Ġbecoming": 10671,
+ "Ġincident": 10672,
+ "Att": 10673,
+ "162": 10674,
+ "Ġrevealed": 10675,
+ "forc": 10676,
+ "Ġboot": 10677,
+ ".page": 10678,
+ "Enumerator": 10679,
+ "165": 10680,
+ "_->": 10681,
+ "Photo": 10682,
+ "Ġspring": 10683,
+ ".\",": 10684,
+ "ĠDictionary": 10685,
+ "BJECT": 10686,
+ "Ġlocations": 10687,
+ "Ġsamples": 10688,
+ "InputStream": 10689,
+ "ĠBrown": 10690,
+ "Ġstats": 10691,
+ "quality": 10692,
+ "Ñħ": 10693,
+ "-dis": 10694,
+ "Ġhelping": 10695,
+ "Ġped": 10696,
+ "224": 10697,
+ "(se": 10698,
+ "ĠWho": 10699,
+ "alian": 10700,
+ "internal": 10701,
+ "Ġft": 10702,
+ ">().": 10703,
+ "->{": 10704,
+ "Ġmine": 10705,
+ "Ġsector": 10706,
+ "Ġgro": 10707,
+ "Ġopportunities": 10708,
+ "Ġü": 10709,
+ "Ġmp": 10710,
+ "Ġalleged": 10711,
+ "Ġdoubt": 10712,
+ "Mouse": 10713,
+ "About": 10714,
+ "_part": 10715,
+ "Ġchair": 10716,
+ "Ġstopped": 10717,
+ "161": 10718,
+ "loop": 10719,
+ "entities": 10720,
+ "Ġapps": 10721,
+ "ansion": 10722,
+ "Ġmental": 10723,
+ "ĠĠĠĠĠĠĠĠĠĠĠĠĠĠĠĠĠĠĠĠĠĠĠĠĠĠĠĠĠĠĠĠĠĠĠĠĠĠĠĠĠ": 10724,
+ "FR": 10725,
+ "Ġdefend": 10726,
+ "care": 10727,
+ "Ġideal": 10728,
+ "/api": 10729,
+ "urface": 10730,
+ "011": 10731,
+ "Ġele": 10732,
+ "ulator": 10733,
+ "ĠRights": 10734,
+ "anguages": 10735,
+ "Ġfunds": 10736,
+ "Ġadapt": 10737,
+ "Attributes": 10738,
+ "Ġdeploy": 10739,
+ "opts": 10740,
+ "Ġvalidation": 10741,
+ "Ġconcerns": 10742,
+ "uce": 10743,
+ ".num": 10744,
+ "ulture": 10745,
+ "ila": 10746,
+ "Ġcup": 10747,
+ "Ġpure": 10748,
+ ".Fore": 10749,
+ "183": 10750,
+ "ĠHashMap": 10751,
+ ".valueOf": 10752,
+ "asm": 10753,
+ "MO": 10754,
+ "Ġcs": 10755,
+ "Ġstores": 10756,
+ "Ġ************************************************************************": 10757,
+ "Ġcommunication": 10758,
+ "mem": 10759,
+ ".EventHandler": 10760,
+ ".Status": 10761,
+ "_right": 10762,
+ ".setOn": 10763,
+ "Sheet": 10764,
+ "Ġidentify": 10765,
+ "enerated": 10766,
+ "ordered": 10767,
+ "Ġ\"[": 10768,
+ "Ġswe": 10769,
+ "Condition": 10770,
+ "ĠAccording": 10771,
+ "Ġprepare": 10772,
+ "Ġrob": 10773,
+ "Pool": 10774,
+ "Ġsport": 10775,
+ "rv": 10776,
+ "ĠRouter": 10777,
+ "Ġalternative": 10778,
+ "([]": 10779,
+ "ĠChicago": 10780,
+ "ipher": 10781,
+ "ische": 10782,
+ "ĠDirector": 10783,
+ "kl": 10784,
+ "ĠWil": 10785,
+ "keys": 10786,
+ "Ġmysql": 10787,
+ "Ġwelcome": 10788,
+ "king": 10789,
+ "ĠManager": 10790,
+ "Ġcaught": 10791,
+ ")}Ċ": 10792,
+ "Score": 10793,
+ "_PR": 10794,
+ "Ġsurvey": 10795,
+ "hab": 10796,
+ "Headers": 10797,
+ "ADER": 10798,
+ "Ġdecor": 10799,
+ "Ġturns": 10800,
+ "Ġradius": 10801,
+ "errupt": 10802,
+ "Cor": 10803,
+ "Ġmel": 10804,
+ "Ġintr": 10805,
+ "(q": 10806,
+ "ĠAC": 10807,
+ "amos": 10808,
+ "MAX": 10809,
+ "ĠGrid": 10810,
+ "ĠJesus": 10811,
+ "ĠĠĠĠĠĠĠĠĠĠĠĠĠĠĠĠĠĠĠĠĠĠĠĠĠĠĠĠĠĠĠĠĠĠĠĠ": 10812,
+ ".DE": 10813,
+ "Ġts": 10814,
+ "Ġlinked": 10815,
+ "free": 10816,
+ "ĠQt": 10817,
+ "Ġ/**čĊ": 10818,
+ "Ġfaster": 10819,
+ "ctr": 10820,
+ "_J": 10821,
+ "DT": 10822,
+ ".Check": 10823,
+ "Ġcombination": 10824,
+ "Ġintended": 10825,
+ "-the": 10826,
+ "-type": 10827,
+ "182": 10828,
+ "ectors": 10829,
+ "ami": 10830,
+ "uting": 10831,
+ "Ġuma": 10832,
+ "XML": 10833,
+ "UCT": 10834,
+ "Ap": 10835,
+ "ĠRandom": 10836,
+ "Ġran": 10837,
+ ".sort": 10838,
+ "Ġsorted": 10839,
+ ".Un": 10840,
+ "401": 10841,
+ "_PER": 10842,
+ "itory": 10843,
+ "Ġpriority": 10844,
+ "ĠGal": 10845,
+ "ĠOld": 10846,
+ "hot": 10847,
+ "ĠDisplay": 10848,
+ "(sub": 10849,
+ "_TH": 10850,
+ "_Y": 10851,
+ "ĠCare": 10852,
+ "loading": 10853,
+ "Kind": 10854,
+ "_handle": 10855,
+ ",,": 10856,
+ "rase": 10857,
+ "_replace": 10858,
+ ".addEventListener": 10859,
+ "ĠRT": 10860,
+ "172": 10861,
+ "Ġentered": 10862,
+ "gers": 10863,
+ "Ġich": 10864,
+ "(start": 10865,
+ "205": 10866,
+ "/app": 10867,
+ "Ġbrother": 10868,
+ "Memory": 10869,
+ "Outlet": 10870,
+ "Ġutf": 10871,
+ "prec": 10872,
+ "Ġnavigation": 10873,
+ "ORK": 10874,
+ "Ġdst": 10875,
+ "Detail": 10876,
+ "Ġaudience": 10877,
+ "Ġdur": 10878,
+ "Ġcluster": 10879,
+ "unched": 10880,
+ "Ġ],": 10881,
+ "Ġcomfortable": 10882,
+ ".values": 10883,
+ "ĠTotal": 10884,
+ "Ġsnap": 10885,
+ "Ġstandards": 10886,
+ "Ġperformed": 10887,
+ "hand": 10888,
+ "(\"@": 10889,
+ "åŃ": 10890,
+ "Ġphil": 10891,
+ "ibr": 10892,
+ "trim": 10893,
+ "Ġforget": 10894,
+ "157": 10895,
+ "Ġdoctor": 10896,
+ ".TextBox": 10897,
+ "377": 10898,
+ "icons": 10899,
+ ",s": 10900,
+ "ĠOp": 10901,
+ "Sm": 10902,
+ "Stop": 10903,
+ "ĉList": 10904,
+ "ĉu": 10905,
+ "Comment": 10906,
+ "_VERSION": 10907,
+ ".Xtra": 10908,
+ "Person": 10909,
+ "rb": 10910,
+ "LOB": 10911,
+ "ĠĠĠĠĠĠĠĠĠĠĠĠĠĠĠĠĠĠĠĠĊ": 10912,
+ "ĠCentral": 10913,
+ "270": 10914,
+ "ICK": 10915,
+ "raq": 10916,
+ "Ġputting": 10917,
+ "Ġmd": 10918,
+ "ĠLove": 10919,
+ "Program": 10920,
+ "Border": 10921,
+ "oor": 10922,
+ "Ġallowing": 10923,
+ "after": 10924,
+ "Ġentries": 10925,
+ "ĠMaybe": 10926,
+ "]).": 10927,
+ "ĠShort": 10928,
+ ")\\": 10929,
+ ".now": 10930,
+ "friend": 10931,
+ "Ġprefer": 10932,
+ "ĠGPIO": 10933,
+ "osis": 10934,
+ "ĠGameObject": 10935,
+ "Ġskip": 10936,
+ "Ġcompetition": 10937,
+ "_match": 10938,
+ "lications": 10939,
+ "_CONT": 10940,
+ ".groupBox": 10941,
+ "Ġals": 10942,
+ "666": 10943,
+ "\"We": 10944,
+ "_eq": 10945,
+ "lan": 10946,
+ "_search": 10947,
+ "ĠMusic": 10948,
+ "asis": 10949,
+ "Ġbind": 10950,
+ "ĠIsland": 10951,
+ "rum": 10952,
+ "(E": 10953,
+ "Ġseat": 10954,
+ "Video": 10955,
+ "Ġack": 10956,
+ "reek": 10957,
+ "={()": 10958,
+ "Ġrating": 10959,
+ "Ġrestaurant": 10960,
+ "456": 10961,
+ "DEX": 10962,
+ "(buf": 10963,
+ "pping": 10964,
+ "uality": 10965,
+ "Ġleague": 10966,
+ "176": 10967,
+ "Ġfocused": 10968,
+ "apon": 10969,
+ "$data": 10970,
+ "CLUD": 10971,
+ "CLUDING": 10972,
+ "Ġabsolute": 10973,
+ "(query": 10974,
+ "Ġtells": 10975,
+ "Ang": 10976,
+ "Ġcommunities": 10977,
+ "Ġhonest": 10978,
+ "oking": 10979,
+ "Ġapart": 10980,
+ "arity": 10981,
+ "/$": 10982,
+ "_module": 10983,
+ "ĠEnc": 10984,
+ ".an": 10985,
+ ".Config": 10986,
+ "Cre": 10987,
+ "Ġshock": 10988,
+ "ĠArab": 10989,
+ "IENT": 10990,
+ "/re": 10991,
+ "Ġretrie": 10992,
+ "ycler": 10993,
+ "isa": 10994,
+ "ĠOrgan": 10995,
+ ".graph": 10996,
+ "Ġí": 10997,
+ "ĠBAS": 10998,
+ "Enum": 10999,
+ "Ġpossibly": 11000,
+ "ÑĢаÐ": 11001,
+ "ĠJapanese": 11002,
+ "Ġcraft": 11003,
+ "ĠPlace": 11004,
+ "Ġtalent": 11005,
+ "Ġfunding": 11006,
+ "Ġconfirmed": 11007,
+ "Ġcycle": 11008,
+ "/x": 11009,
+ "GE": 11010,
+ "Ġhearing": 11011,
+ "Ġplants": 11012,
+ "Ġmouth": 11013,
+ "pages": 11014,
+ "oria": 11015,
+ "ĠRemove": 11016,
+ "_total": 11017,
+ "Ġod": 11018,
+ "ollapse": 11019,
+ "door": 11020,
+ "Ġbought": 11021,
+ "Ġaddr": 11022,
+ "ARCH": 11023,
+ "_dim": 11024,
+ "dden": 11025,
+ "Ġdecades": 11026,
+ "REQUEST": 11027,
+ "Ġversions": 11028,
+ "fire": 11029,
+ "006": 11030,
+ "Ġmoves": 11031,
+ "fb": 11032,
+ "Ġcoffee": 11033,
+ ".connect": 11034,
+ "ĠRow": 11035,
+ "Ġschema": 11036,
+ "Scope": 11037,
+ "-Type": 11038,
+ "Ġfighting": 11039,
+ "Ġretail": 11040,
+ "Ġmodified": 11041,
+ "TF": 11042,
+ "Files": 11043,
+ "nie": 11044,
+ "_command": 11045,
+ "stone": 11046,
+ "ĠÑĤ": 11047,
+ "_thread": 11048,
+ "Ġbond": 11049,
+ "ĠDevelopment": 11050,
+ "Ġpt": 11051,
+ "FORM": 11052,
+ "plet": 11053,
+ "Ġidentified": 11054,
+ "cpp": 11055,
+ "206": 11056,
+ "225": 11057,
+ "Ġcoding": 11058,
+ "oked": 11059,
+ "ĠMaster": 11060,
+ "IDTH": 11061,
+ "Ġresidents": 11062,
+ "redit": 11063,
+ "ĠPhoto": 11064,
+ "=-": 11065,
+ "unte": 11066,
+ "ateur": 11067,
+ "159": 11068,
+ "_STATE": 11069,
+ "ĠSing": 11070,
+ "Ġsheet": 11071,
+ ".val": 11072,
+ "orse": 11073,
+ "Ġhers": 11074,
+ "Ġdetermined": 11075,
+ "Common": 11076,
+ "Ġwed": 11077,
+ "_queue": 11078,
+ "PH": 11079,
+ "ĠAtl": 11080,
+ "cred": 11081,
+ "/LICENSE": 11082,
+ "Ġmes": 11083,
+ "Ġadvanced": 11084,
+ ".java": 11085,
+ ".Sh": 11086,
+ "Go": 11087,
+ "kill": 11088,
+ "fp": 11089,
+ "_settings": 11090,
+ "Ġpal": 11091,
+ "Ġtruck": 11092,
+ "Ġcombined": 11093,
+ "Ġ\"${": 11094,
+ "ĠCorpor": 11095,
+ "Ġjoined": 11096,
+ "ĠJose": 11097,
+ "ĠCup": 11098,
+ "uns": 11099,
+ "estival": 11100,
+ "levision": 11101,
+ "Ġbroken": 11102,
+ "Ġmarriage": 11103,
+ "ĠWestern": 11104,
+ "Ġrepresents": 11105,
+ "ĠTitle": 11106,
+ "Ġss": 11107,
+ ".Ass": 11108,
+ "ongoose": 11109,
+ "iento": 11110,
+ "<>();Ċ": 11111,
+ "Ġabsolutely": 11112,
+ "Ġsmooth": 11113,
+ "TERN": 11114,
+ "ĠUnless": 11115,
+ "Word": 11116,
+ "Ġmerge": 11117,
+ "igan": 11118,
+ "ĠVol": 11119,
+ "Ġnn": 11120,
+ ".getId": 11121,
+ "Ġз": 11122,
+ "171": 11123,
+ "Ġsexy": 11124,
+ "Ġseeking": 11125,
+ "Single": 11126,
+ ".this": 11127,
+ "179": 11128,
+ "Ġkom": 11129,
+ "bound": 11130,
+ ";\"": 11131,
+ "ĠfontSize": 11132,
+ "_df": 11133,
+ "Ġinjury": 11134,
+ "(H": 11135,
+ "Ġissued": 11136,
+ "_END": 11137,
+ ":self": 11138,
+ "020": 11139,
+ "Ġpatch": 11140,
+ "Ġleaves": 11141,
+ "Ġadopt": 11142,
+ "FileName": 11143,
+ "ãĢIJ": 11144,
+ "Ġexecutive": 11145,
+ "ĠByte": 11146,
+ "]))Ċ": 11147,
+ "Ġnu": 11148,
+ "outing": 11149,
+ "cluding": 11150,
+ "-R": 11151,
+ ".options": 11152,
+ "Ġsubstant": 11153,
+ "avax": 11154,
+ "ĠBUT": 11155,
+ "Ġtechnical": 11156,
+ "Ġtwice": 11157,
+ "Ġmás": 11158,
+ "Ġunivers": 11159,
+ "yr": 11160,
+ "Ġdrag": 11161,
+ "ĠDC": 11162,
+ "Ġsed": 11163,
+ "Ġbot": 11164,
+ "ĠPal": 11165,
+ "ĠHall": 11166,
+ "forcement": 11167,
+ "Ġauch": 11168,
+ ".mod": 11169,
+ "notation": 11170,
+ "_files": 11171,
+ ".line": 11172,
+ "_flag": 11173,
+ "[name": 11174,
+ "Ġresolution": 11175,
+ "Ġbott": 11176,
+ "(\"[": 11177,
+ "ende": 11178,
+ "(arr": 11179,
+ "Free": 11180,
+ "(@\"": 11181,
+ "ĠDistrict": 11182,
+ "PEC": 11183,
+ ":-": 11184,
+ "Picker": 11185,
+ "ĠJo": 11186,
+ "ĠĠĠĠĠĊ": 11187,
+ "ĠRiver": 11188,
+ "_rows": 11189,
+ "Ġhelpful": 11190,
+ "Ġmassive": 11191,
+ "---Ċ": 11192,
+ "Ġmeasures": 11193,
+ "007": 11194,
+ "ĠRuntime": 11195,
+ "Ġworry": 11196,
+ "ĠSpec": 11197,
+ "ĉD": 11198,
+ "ãĢij": 11199,
+ "Ġ){Ċ": 11200,
+ "Ġworse": 11201,
+ "(filename": 11202,
+ "Ġlay": 11203,
+ "Ġmagic": 11204,
+ "ĠTheir": 11205,
+ "oul": 11206,
+ "stroy": 11207,
+ "ĠWhere": 11208,
+ "280": 11209,
+ "Ġsudden": 11210,
+ "Ġdefe": 11211,
+ "Ġbinding": 11212,
+ "Ġflight": 11213,
+ "ĠOnInit": 11214,
+ "ĠWomen": 11215,
+ "ĠPolicy": 11216,
+ "Ġdrugs": 11217,
+ "ishing": 11218,
+ "('../": 11219,
+ "ĠMel": 11220,
+ "peat": 11221,
+ "tor": 11222,
+ "Ġproposed": 11223,
+ "Ġstated": 11224,
+ "_RES": 11225,
+ "Ġeast": 11226,
+ "212": 11227,
+ "ĠCONDITION": 11228,
+ "_desc": 11229,
+ "Ġwinning": 11230,
+ "folio": 11231,
+ "Mapper": 11232,
+ "ĠPan": 11233,
+ "ĠAnge": 11234,
+ ".servlet": 11235,
+ "Ġcopies": 11236,
+ "LM": 11237,
+ "Ġvm": 11238,
+ "åį": 11239,
+ "Ġdictionary": 11240,
+ "Seg": 11241,
+ "177": 11242,
+ "elines": 11243,
+ "ĠSend": 11244,
+ "Ġiron": 11245,
+ "ĠFort": 11246,
+ "166": 11247,
+ ".domain": 11248,
+ "Ġdebate": 11249,
+ "NotNull": 11250,
+ "eq": 11251,
+ "acher": 11252,
+ "lf": 11253,
+ "ĉfmt": 11254,
+ "Ġlawy": 11255,
+ "178": 11256,
+ "ÄŁ": 11257,
+ "ĠMen": 11258,
+ "Ġtrim": 11259,
+ "(NULL": 11260,
+ "Ġ!!": 11261,
+ "Ġpad": 11262,
+ "Ġfollows": 11263,
+ "\"][\"": 11264,
+ "requ": 11265,
+ "ĠEp": 11266,
+ ".github": 11267,
+ "(img": 11268,
+ "eto": 11269,
+ "('\\": 11270,
+ "Services": 11271,
+ "umbnail": 11272,
+ "_main": 11273,
+ "pleted": 11274,
+ "fortunately": 11275,
+ "Ġwindows": 11276,
+ "Ġplane": 11277,
+ "ĠConnection": 11278,
+ ".local": 11279,
+ "uard": 11280,
+ "}\\": 11281,
+ "==\"": 11282,
+ "andon": 11283,
+ "ĠRoy": 11284,
+ "west": 11285,
+ "158": 11286,
+ "iginal": 11287,
+ "emies": 11288,
+ "itz": 11289,
+ "'):Ċ": 11290,
+ "ĠPeter": 11291,
+ "Ġtough": 11292,
+ "Ġreduced": 11293,
+ "Ġcalculate": 11294,
+ "Ġrapid": 11295,
+ "customer": 11296,
+ "Ġefficient": 11297,
+ "Ġmedium": 11298,
+ "Ġfell": 11299,
+ ".ref": 11300,
+ "ĠCas": 11301,
+ "Ġfeedback": 11302,
+ "Speed": 11303,
+ "(output": 11304,
+ "aje": 11305,
+ "Ġcategories": 11306,
+ "Ġfee": 11307,
+ "};": 11308,
+ "Ġdeleted": 11309,
+ "reh": 11310,
+ "Ġproof": 11311,
+ "Desc": 11312,
+ "Build": 11313,
+ "Ġsides": 11314,
+ ".ArrayList": 11315,
+ "-%": 11316,
+ "ĠĠĠĠĠĠĠĠĠĠĠĠĠĠĠĠĠĠĠĠĠĠĠĠĠĠĠĠĠĠĠĠĠĠĠĠĠĠ": 11317,
+ "ر": 11318,
+ ".match": 11319,
+ "ли": 11320,
+ "Ġfeels": 11321,
+ "Ġachieve": 11322,
+ "Ġclim": 11323,
+ "_ON": 11324,
+ "ĠCD": 11325,
+ "Ġteacher": 11326,
+ "_current": 11327,
+ "bn": 11328,
+ "_PL": 11329,
+ "isting": 11330,
+ "Enable": 11331,
+ "GEN": 11332,
+ "Ġtv": 11333,
+ "Ġsock": 11334,
+ "Ġplays": 11335,
+ "Ġdiscount": 11336,
+ "ĠKE": 11337,
+ "ĠDebug": 11338,
+ "Fore": 11339,
+ "ĠIraq": 11340,
+ "Ġappearance": 11341,
+ "Mon": 11342,
+ "Ġstyled": 11343,
+ "ĠHuman": 11344,
+ "iot": 11345,
+ "ĠHistory": 11346,
+ "Ġsac": 11347,
+ "ĠCollection": 11348,
+ "Ġrecommended": 11349,
+ ".Selected": 11350,
+ "Ġorganizations": 11351,
+ "Ġdiscovered": 11352,
+ "cohol": 11353,
+ "adas": 11354,
+ "ĠThomas": 11355,
+ "May": 11356,
+ "Ġconserv": 11357,
+ "Ġdomin": 11358,
+ "ĠFollow": 11359,
+ "ĠSection": 11360,
+ "ĠThanks": 11361,
+ "Username": 11362,
+ "Ġrecipe": 11363,
+ "Ġwonderful": 11364,
+ ".sleep": 11365,
+ "_if": 11366,
+ "ĉĊĉĊ": 11367,
+ "orno": 11368,
+ "Ġru": 11369,
+ "_target": 11370,
+ ".\"\"": 11371,
+ "à¦": 11372,
+ "EventArgs": 11373,
+ "Ġinputs": 11374,
+ "Ġfif": 11375,
+ "Ġvision": 11376,
+ "cy": 11377,
+ "ĠSeries": 11378,
+ ")(((": 11379,
+ "Ġtrading": 11380,
+ "Ġmarker": 11381,
+ "Begin": 11382,
+ "Ġtypically": 11383,
+ "Ġcauses": 11384,
+ "dropdown": 11385,
+ "_DEBUG": 11386,
+ "260": 11387,
+ "Ġdetect": 11388,
+ "country": 11389,
+ "!\");Ċ": 11390,
+ "ĉR": 11391,
+ "appy": 11392,
+ "Ġcref": 11393,
+ "('<": 11394,
+ "\"=>": 11395,
+ "ĠLE": 11396,
+ "reader": 11397,
+ "Ġadministr": 11398,
+ "õ": 11399,
+ "ucket": 11400,
+ "Ġfashion": 11401,
+ ".char": 11402,
+ "izar": 11403,
+ "Ġdisable": 11404,
+ "Ġsuc": 11405,
+ "ĠLive": 11406,
+ "issue": 11407,
+ "Ġmetadata": 11408,
+ "flags": 11409,
+ "ĠðŁ": 11410,
+ "Ġcommitted": 11411,
+ "Ġva": 11412,
+ "Ġrough": 11413,
+ "Ġ'''Ċ": 11414,
+ "Ġhighlight": 11415,
+ "_vars": 11416,
+ "VO": 11417,
+ "Ġencoding": 11418,
+ "-Z": 11419,
+ "_sign": 11420,
+ "$(\"#": 11421,
+ "Ġrain": 11422,
+ "reatest": 11423,
+ "ĠEND": 11424,
+ "Selection": 11425,
+ "Ġcandidates": 11426,
+ "Ġsav": 11427,
+ ".Empty": 11428,
+ "Ġdecisions": 11429,
+ "Ġcollabor": 11430,
+ "ridge": 11431,
+ "feed": 11432,
+ "ression": 11433,
+ "Ġpersons": 11434,
+ "VM": 11435,
+ "008": 11436,
+ "ega": 11437,
+ "_BIT": 11438,
+ "According": 11439,
+ "acked": 11440,
+ "Ġdollars": 11441,
+ "_loss": 11442,
+ "ĠCost": 11443,
+ "}\"Ċ": 11444,
+ "Notification": 11445,
+ "Ġprostit": 11446,
+ "Ġauthority": 11447,
+ ".rec": 11448,
+ "Ġspokes": 11449,
+ "ĠToday": 11450,
+ "istant": 11451,
+ "ĠHead": 11452,
+ "âĢĿ.": 11453,
+ "ertainment": 11454,
+ "cean": 11455,
+ "culate": 11456,
+ "Ġven": 11457,
+ "However": 11458,
+ "_arr": 11459,
+ "Ġtokens": 11460,
+ "Graph": 11461,
+ "ĠJud": 11462,
+ "ĠVirgin": 11463,
+ "ĠSerial": 11464,
+ "unning": 11465,
+ "Mutable": 11466,
+ "agers": 11467,
+ ".csv": 11468,
+ "Ġdeveloping": 11469,
+ "Ġinstructions": 11470,
+ "Ġpromise": 11471,
+ "Ġrequested": 11472,
+ "_encode": 11473,
+ "/\"": 11474,
+ "ĠIcon": 11475,
+ "uilt": 11476,
+ "-day": 11477,
+ "Ġintelligence": 11478,
+ ".IS": 11479,
+ "ĠObservable": 11480,
+ "ĠHard": 11481,
+ "Bool": 11482,
+ "211": 11483,
+ "idential": 11484,
+ ".Anchor": 11485,
+ "Ġselling": 11486,
+ "CI": 11487,
+ "AGES": 11488,
+ "tle": 11489,
+ "bur": 11490,
+ "UFFER": 11491,
+ "RY": 11492,
+ "Ġbigger": 11493,
+ "Ġrat": 11494,
+ "Ġfamous": 11495,
+ "Ġtypename": 11496,
+ "Ġexplained": 11497,
+ "}}Ċ": 11498,
+ "Ġnuclear": 11499,
+ "-N": 11500,
+ "Ġcrisis": 11501,
+ "ĠEnter": 11502,
+ "Ġanswers": 11503,
+ "/${": 11504,
+ "/pl": 11505,
+ "Ġsequ": 11506,
+ "_next": 11507,
+ "mask": 11508,
+ "Ġstanding": 11509,
+ "Ġplenty": 11510,
+ "ĠCross": 11511,
+ "ĉret": 11512,
+ "dro": 11513,
+ "ĠCast": 11514,
+ "167": 11515,
+ "=true": 11516,
+ "ĠChris": 11517,
+ "icio": 11518,
+ "ĠMike": 11519,
+ "Decimal": 11520,
+ "addComponent": 11521,
+ "Len": 11522,
+ "Ġcock": 11523,
+ "Ġ#{": 11524,
+ "URN": 11525,
+ "": 11657,
+ "Ġ*=": 11658,
+ "ĠPS": 11659,
+ "Ġdangerous": 11660,
+ "[p": 11661,
+ "OME": 11662,
+ "Other": 11663,
+ "ĠStringBuilder": 11664,
+ "Points": 11665,
+ "heading": 11666,
+ "Ġcurrency": 11667,
+ "Ġpercentage": 11668,
+ "_API": 11669,
+ "Ġclassic": 11670,
+ "thead": 11671,
+ "ĠMO": 11672,
+ "FE": 11673,
+ "Idx": 11674,
+ "await": 11675,
+ "Ġè": 11676,
+ "Ġaccident": 11677,
+ "Ġvariant": 11678,
+ "Ġmyst": 11679,
+ "ĠLand": 11680,
+ "ĠBre": 11681,
+ "Ġharm": 11682,
+ "ĠAcc": 11683,
+ "Ġcharged": 11684,
+ "iones": 11685,
+ "Visibility": 11686,
+ "arry": 11687,
+ "ĠLanguage": 11688,
+ "Ġwalking": 11689,
+ "\".ĊĊ": 11690,
+ "ifer": 11691,
+ "Ġleadership": 11692,
+ ".From": 11693,
+ "ynam": 11694,
+ "Ġtimestamp": 11695,
+ "ipt": 11696,
+ "ĠHas": 11697,
+ "REFER": 11698,
+ "ĠIts": 11699,
+ "Ġlistener": 11700,
+ "UTE": 11701,
+ "213": 11702,
+ "_description": 11703,
+ "Ġexperiences": 11704,
+ "Ġcreates": 11705,
+ "RS": 11706,
+ "cart": 11707,
+ "black": 11708,
+ "Ġchoices": 11709,
+ "war": 11710,
+ "750": 11711,
+ "Ġ'''": 11712,
+ "Ġordered": 11713,
+ "Ġevening": 11714,
+ "Ġpil": 11715,
+ "Ġtun": 11716,
+ "ĠBad": 11717,
+ "(app": 11718,
+ "random": 11719,
+ "Ġexplicit": 11720,
+ "Ġarrived": 11721,
+ "Ġfly": 11722,
+ "Ġeconom": 11723,
+ "-mail": 11724,
+ "Ġlists": 11725,
+ "Ġarchitect": 11726,
+ "234": 11727,
+ "ĠPay": 11728,
+ "Ġds": 11729,
+ "ĠSol": 11730,
+ "Ġvehicles": 11731,
+ "Hz": 11732,
+ "-com": 11733,
+ "Ġking": 11734,
+ "_equal": 11735,
+ "ĠHelp": 11736,
+ "Ġabuse": 11737,
+ "480": 11738,
+ "169": 11739,
+ "--;Ċ": 11740,
+ "Ġextr": 11741,
+ "Ġchemical": 11742,
+ "ä¿": 11743,
+ "Ġorient": 11744,
+ "Ġbreath": 11745,
+ "ĠSpace": 11746,
+ "(element": 11747,
+ "wait": 11748,
+ "DED": 11749,
+ "igma": 11750,
+ "Ġentr": 11751,
+ "Ġsob": 11752,
+ "-name": 11753,
+ "Ġaffected": 11754,
+ "ika": 11755,
+ "Ġcoal": 11756,
+ "_work": 11757,
+ "Ġhundreds": 11758,
+ "Ġpolitics": 11759,
+ "subject": 11760,
+ "Ġconsumer": 11761,
+ "ANGE": 11762,
+ "Ġrepeated": 11763,
+ "Send": 11764,
+ "Ġ#[": 11765,
+ "Ġprotocol": 11766,
+ "Ġleads": 11767,
+ "useum": 11768,
+ "Every": 11769,
+ "808": 11770,
+ "174": 11771,
+ "Import": 11772,
+ "(count": 11773,
+ "Ġchallenges": 11774,
+ "Ġnovel": 11775,
+ "Ġdepart": 11776,
+ "bits": 11777,
+ ".Current": 11778,
+ "Ġ`${": 11779,
+ "oting": 11780,
+ "(\\": 11781,
+ "Ġcreative": 11782,
+ "Ġbuff": 11783,
+ "Ġintroduced": 11784,
+ "usic": 11785,
+ "modules": 11786,
+ "Are": 11787,
+ "-doc": 11788,
+ "language": 11789,
+ "_cache": 11790,
+ "Ġtod": 11791,
+ "?>": 11792,
+ "omething": 11793,
+ "Ġhun": 11794,
+ "åº": 11795,
+ "aters": 11796,
+ "Intent": 11797,
+ "Ġimplemented": 11798,
+ "ĠCase": 11799,
+ "Children": 11800,
+ "Ġnotification": 11801,
+ "Renderer": 11802,
+ "Wrapper": 11803,
+ "Objects": 11804,
+ "tl": 11805,
+ ".Contains": 11806,
+ "Plugin": 11807,
+ ".row": 11808,
+ "Ġforg": 11809,
+ "Ġpermit": 11810,
+ "Ġtargets": 11811,
+ "ĠIF": 11812,
+ "Ġtip": 11813,
+ "sex": 11814,
+ "Ġsupports": 11815,
+ "Ġfold": 11816,
+ "photo": 11817,
+ "},čĊ": 11818,
+ "Ġgoogle": 11819,
+ "$('#": 11820,
+ "Ġsharing": 11821,
+ "Ġgoods": 11822,
+ "vs": 11823,
+ "ĠDan": 11824,
+ "Rate": 11825,
+ "ĠMartin": 11826,
+ "Ġmanner": 11827,
+ "lie": 11828,
+ ".The": 11829,
+ "Internal": 11830,
+ "ĠCONTR": 11831,
+ "Mock": 11832,
+ "RIGHT": 11833,
+ "Ġ'{": 11834,
+ "Ġcontrols": 11835,
+ "Mat": 11836,
+ "Ġmand": 11837,
+ "Ġextended": 11838,
+ "Ok": 11839,
+ "Ġembed": 11840,
+ "Ġplanet": 11841,
+ "ĠNon": 11842,
+ "-ch": 11843,
+ ")\",": 11844,
+ "epar": 11845,
+ "Ġbelieved": 11846,
+ "ĠEnvironment": 11847,
+ "ĠFriend": 11848,
+ "-res": 11849,
+ "Ġhandling": 11850,
+ "nic": 11851,
+ "-level": 11852,
+ "scri": 11853,
+ "Xml": 11854,
+ "BE": 11855,
+ "ungen": 11856,
+ "Ġalter": 11857,
+ "[idx": 11858,
+ "Pop": 11859,
+ "cam": 11860,
+ "Ġ(((": 11861,
+ "Ġshipping": 11862,
+ "Ġbattery": 11863,
+ "iddleware": 11864,
+ "MC": 11865,
+ "Ġimpl": 11866,
+ "otation": 11867,
+ "ĠLab": 11868,
+ "