Add attention_bias to make TGI work
#64
by
philschmid
HF staff
- opened
- config.json +1 -0
config.json
CHANGED
@@ -31,5 +31,6 @@
|
|
31 |
"torch_dtype": "bfloat16",
|
32 |
"transformers_version": "4.39.3",
|
33 |
"use_cache": true,
|
|
|
34 |
"vocab_size": 32064
|
35 |
}
|
|
|
31 |
"torch_dtype": "bfloat16",
|
32 |
"transformers_version": "4.39.3",
|
33 |
"use_cache": true,
|
34 |
+
"attention_bias": false,
|
35 |
"vocab_size": 32064
|
36 |
}
|