aframson commited on
Commit
22bb3bd
·
1 Parent(s): bd5f213
Files changed (1) hide show
  1. config.json +6 -6
config.json CHANGED
@@ -8,16 +8,16 @@
8
  "AutoModelForCausalLM": "modelLM.OBILanguageModel",
9
  "AutoModelForQuestionAnswering": "modelLM.OBILanguageModel"
10
  },
11
- "batch_size": 6,
12
- "block_size": 6,
13
  "device": "cpu",
14
  "eval_interval": 100,
15
  "hidden_dropout_prob": 0.1,
16
- "hidden_size": 4,
17
  "learning_rate": 0.001,
18
- "max_iters": 700,
19
- "num_attention_heads": 2,
20
- "num_hidden_layers": 2,
21
  "torch_dtype": "float32",
22
  "transformers_version": "4.30.2",
23
  "vocab_size": 5000
 
8
  "AutoModelForCausalLM": "modelLM.OBILanguageModel",
9
  "AutoModelForQuestionAnswering": "modelLM.OBILanguageModel"
10
  },
11
+ "batch_size": 160,
12
+ "block_size": 120,
13
  "device": "cpu",
14
  "eval_interval": 100,
15
  "hidden_dropout_prob": 0.1,
16
+ "hidden_size": 24,
17
  "learning_rate": 0.001,
18
+ "max_iters": 1000,
19
+ "num_attention_heads": 6,
20
+ "num_hidden_layers": 6,
21
  "torch_dtype": "float32",
22
  "transformers_version": "4.30.2",
23
  "vocab_size": 5000