Add attention_bias to make TGI work
#3
by
philschmid
- opened
- config.json +1 -0
config.json
CHANGED
|
@@ -43,5 +43,6 @@
|
|
| 43 |
"torch_dtype": "bfloat16",
|
| 44 |
"transformers_version": "4.38.1",
|
| 45 |
"use_cache": true,
|
|
|
|
| 46 |
"vocab_size": 100352
|
| 47 |
}
|
|
|
|
| 43 |
"torch_dtype": "bfloat16",
|
| 44 |
"transformers_version": "4.38.1",
|
| 45 |
"use_cache": true,
|
| 46 |
+
"attention_bias": false,
|
| 47 |
"vocab_size": 100352
|
| 48 |
}
|