ybelkada commited on
Commit
db2c4b6
1 Parent(s): 9186b74

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +1 -1
config.json CHANGED
@@ -23,7 +23,7 @@
23
  "relative_attention_max_distance": 128,
24
  "relative_attention_num_buckets": 32,
25
  "tie_word_embeddings": false,
26
- "torch_dtype": "float32",
27
  "transformers_version": "4.24.0.dev0",
28
  "use_cache": true,
29
  "vocab_size": 32128
 
23
  "relative_attention_max_distance": 128,
24
  "relative_attention_num_buckets": 32,
25
  "tie_word_embeddings": false,
26
+ "torch_dtype": "bfloat16",
27
  "transformers_version": "4.24.0.dev0",
28
  "use_cache": true,
29
  "vocab_size": 32128