y8lc2wpi commited on
Commit
c986c97
·
1 Parent(s): 89a6682

Upload folder using huggingface_hub

Browse files
.ipynb_checkpoints/config-checkpoint.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "/home/ubuntu/data/amadeus",
3
  "architectures": [
4
  "LlamaForCausalLM"
5
  ],
@@ -10,7 +10,7 @@
10
  "initializer_range": 0.02,
11
  "intermediate_size": 11008,
12
  "max_position_embeddings": 2048,
13
- "max_sequence_length": 2048,
14
  "model_type": "llama",
15
  "num_attention_heads": 32,
16
  "num_hidden_layers": 32,
 
1
  {
2
+ "_name_or_path": "/notebooks/amadeus",
3
  "architectures": [
4
  "LlamaForCausalLM"
5
  ],
 
10
  "initializer_range": 0.02,
11
  "intermediate_size": 11008,
12
  "max_position_embeddings": 2048,
13
+ "max_sequence_length": 131072,
14
  "model_type": "llama",
15
  "num_attention_heads": 32,
16
  "num_hidden_layers": 32,
config.json CHANGED
@@ -10,7 +10,7 @@
10
  "initializer_range": 0.02,
11
  "intermediate_size": 11008,
12
  "max_position_embeddings": 2048,
13
- "max_sequence_length": 2048,
14
  "model_type": "llama",
15
  "num_attention_heads": 32,
16
  "num_hidden_layers": 32,
 
10
  "initializer_range": 0.02,
11
  "intermediate_size": 11008,
12
  "max_position_embeddings": 2048,
13
+ "max_sequence_length": 131072,
14
  "model_type": "llama",
15
  "num_attention_heads": 32,
16
  "num_hidden_layers": 32,
pytorch_model-00001-of-00002.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:05d5d65136fb368e9fc997e97250004e10c7d6f2c9c73af5367a85c712cebecf
3
  size 9946970285
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b5fc3ca8716bc6c5709b93c848b26eeadb7d7cada1f70059535f220e5686c975
3
  size 9946970285
pytorch_model-00002-of-00002.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:0bf91be4aae43eb4bd016114a8f0f01876bd0811b736e7b41d0735ed8bdc6373
3
  size 3651041161
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:678809a5667d958c3e746b183dcab09952fd20b504829e0aac986c9ffa88286a
3
  size 3651041161