Muennighoff commited on
Commit
56dd468
·
1 Parent(s): 8b2c899

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +1 -2
config.json CHANGED
@@ -7,7 +7,6 @@
7
  "attention_dropout": 0.1,
8
  "bias_dropout_fusion": true,
9
  "bos_token_id": 0,
10
- "dtype": "bfloat16",
11
  "eos_token_id": 0,
12
  "hidden_dropout": 0.1,
13
  "hidden_size": 64,
@@ -22,7 +21,7 @@
22
  "seq_length": 20,
23
  "skip_bias_add": true,
24
  "slow_but_exact": true,
25
- "torch_dtype": "bfloat16",
26
  "transformers_version": "4.21.0.dev0",
27
  "use_cache": false,
28
  "vocab_size": 250880
 
7
  "attention_dropout": 0.1,
8
  "bias_dropout_fusion": true,
9
  "bos_token_id": 0,
 
10
  "eos_token_id": 0,
11
  "hidden_dropout": 0.1,
12
  "hidden_size": 64,
 
21
  "seq_length": 20,
22
  "skip_bias_add": true,
23
  "slow_but_exact": true,
24
+ "torch_dtype": "torch.bfloat16",
25
  "transformers_version": "4.21.0.dev0",
26
  "use_cache": false,
27
  "vocab_size": 250880