lysandre HF staff commited on
Commit
8a03e86
1 Parent(s): 120b131

Default to 'eager' attention implementation (#22)

Browse files

- [WIP] Default to 'eager' attention implementation (18e0d25d09aa0f16514fce2ae1b197482f557139)

Files changed (1) hide show
  1. config.json +2 -1
config.json CHANGED
@@ -29,5 +29,6 @@
29
  "torch_dtype": "bfloat16",
30
  "transformers_version": "4.42.0.dev0",
31
  "use_cache": true,
32
- "vocab_size": 256000
 
33
  }
 
29
  "torch_dtype": "bfloat16",
30
  "transformers_version": "4.42.0.dev0",
31
  "use_cache": true,
32
+ "vocab_size": 256000,
33
+ "_attn_implementation": "eager"
34
  }