File size: 91 Bytes
759912b |
1 2 3 4 5 6 |
{
"_from_model_config": true,
"transformers_version": "4.28.1",
"use_cache": false
}
|
759912b |
1 2 3 4 5 6 |
{
"_from_model_config": true,
"transformers_version": "4.28.1",
"use_cache": false
}
|