File size: 547 Bytes
308ee80
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
{
    "module": "keras_nlp.src.models.bert.bert_tokenizer",
    "class_name": "BertTokenizer",
    "config": {
        "name": "bert_tokenizer",
        "trainable": true,
        "dtype": "int32",
        "vocabulary": null,
        "sequence_length": null,
        "lowercase": true,
        "strip_accents": false,
        "split": true,
        "suffix_indicator": "##",
        "oov_token": "[UNK]"
    },
    "registered_name": "keras_nlp>BertTokenizer",
    "assets": [
        "assets/tokenizer/vocabulary.txt"
    ],
    "weights": null
}