Upload FalconForCausalLM
Browse files- config.json +1 -1
config.json
CHANGED
|
@@ -48,6 +48,6 @@
|
|
| 48 |
"rope_theta": 10000.0,
|
| 49 |
"torch_dtype": "float16",
|
| 50 |
"transformers_version": "4.38.2",
|
| 51 |
-
"use_cache":
|
| 52 |
"vocab_size": 65024
|
| 53 |
}
|
|
|
|
| 48 |
"rope_theta": 10000.0,
|
| 49 |
"torch_dtype": "float16",
|
| 50 |
"transformers_version": "4.38.2",
|
| 51 |
+
"use_cache": true,
|
| 52 |
"vocab_size": 65024
|
| 53 |
}
|