ayushutkarsh commited on
Commit
1850347
·
1 Parent(s): 1463d8d

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +2 -2
config.json CHANGED
@@ -15,8 +15,8 @@
15
  "layer_norm_eps": 1e-05,
16
  "max_position_embeddings": 514,
17
  "model_type": "roberta",
18
- "num_attention_heads": 18,
19
- "num_hidden_layers": 9,
20
  "pad_token_id": 1,
21
  "position_embedding_type": "absolute",
22
  "transformers_version": "4.27.4",
 
15
  "layer_norm_eps": 1e-05,
16
  "max_position_embeddings": 514,
17
  "model_type": "roberta",
18
+ "num_attention_heads": 16,
19
+ "num_hidden_layers": 3,
20
  "pad_token_id": 1,
21
  "position_embedding_type": "absolute",
22
  "transformers_version": "4.27.4",