Add attention_bias to make TGI work
#68
by
philschmid
HF staff
- opened
- config.json +1 -0
config.json
CHANGED
@@ -133,5 +133,6 @@
|
|
133 |
"torch_dtype": "bfloat16",
|
134 |
"transformers_version": "4.39.3",
|
135 |
"use_cache": true,
|
|
|
136 |
"vocab_size": 32064
|
137 |
}
|
|
|
133 |
"torch_dtype": "bfloat16",
|
134 |
"transformers_version": "4.39.3",
|
135 |
"use_cache": true,
|
136 |
+
"attention_bias": false,
|
137 |
"vocab_size": 32064
|
138 |
}
|