mahmoudOmar03 commited on
Commit
377c88c
·
verified ·
1 Parent(s): 997c574

Delete config.json

Browse files
Files changed (1) hide show
  1. config.json +0 -14
config.json DELETED
@@ -1,14 +0,0 @@
1
- {
2
- "model_type": "llama",
3
- "hidden_size": 512,
4
- "num_hidden_layers": 12,
5
- "num_attention_heads": 8,
6
- "intermediate_size": 2048,
7
- "hidden_act": "gelu",
8
- "initializer_range": 0.02,
9
- "layer_norm_eps": 1e-12,
10
- "max_position_embeddings": 512,
11
- "vocab_size": 32000,
12
- "dropout": 0.1,
13
- "attention_probs_dropout_prob": 0.1
14
- }