deepparag commited on
Commit
77760d9
·
1 Parent(s): ffd35ed

Fixed errors

Browse files
Files changed (1) hide show
  1. config.json +6 -6
config.json CHANGED
@@ -28,12 +28,12 @@
28
  "summary_use_proj": true,
29
  "task_specific_params": {
30
  "conversational": {
31
- "max_length"=200,
32
- "no_repeat_ngram_size"=3,
33
- "do_sample"=True,
34
- "top_k"=100,
35
- "top_p"=0.7,
36
- "temperature"=0.8
37
  }
38
  },
39
  "torch_dtype": "float32",
 
28
  "summary_use_proj": true,
29
  "task_specific_params": {
30
  "conversational": {
31
+ "max_length":200,
32
+ "no_repeat_ngram_size":3,
33
+ "do_sample":True,
34
+ "top_k":100,
35
+ "top_p":0.7,
36
+ "temperature":0.8
37
  }
38
  },
39
  "torch_dtype": "float32",