File size: 1,824 Bytes
5567e23 0334326 5567e23 0334326 5567e23 0334326 5567e23 0334326 5567e23 0334326 5567e23 0334326 5567e23 0334326 5567e23 0334326 5567e23 0334326 5567e23 0334326 5567e23 0334326 5567e23 0334326 5567e23 0334326 5567e23 0334326 5567e23 0334326 5567e23 0334326 5567e23 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 |
{
"module": "keras_nlp.src.models.gpt2.gpt2_causal_lm",
"class_name": "GPT2CausalLM",
"config": {
"backbone": {
"module": "keras_nlp.src.models.gpt2.gpt2_backbone",
"class_name": "GPT2Backbone",
"config": {
"name": "gpt2_backbone",
"trainable": true,
"vocabulary_size": 50257,
"num_layers": 12,
"num_heads": 12,
"hidden_dim": 768,
"intermediate_dim": 3072,
"dropout": 0.1,
"max_sequence_length": 1024
},
"registered_name": "keras_nlp>GPT2Backbone"
},
"preprocessor": {
"module": "keras_nlp.src.models.gpt2.gpt2_causal_lm_preprocessor",
"class_name": "GPT2CausalLMPreprocessor",
"config": {
"name": "gpt2_causal_lm_preprocessor",
"trainable": true,
"dtype": "float32",
"tokenizer": {
"module": "keras_nlp.src.models.gpt2.gpt2_tokenizer",
"class_name": "GPT2Tokenizer",
"config": {
"name": "gpt2_tokenizer",
"trainable": true,
"dtype": "int32",
"sequence_length": null,
"add_prefix_space": false
},
"registered_name": "keras_nlp>GPT2Tokenizer"
},
"sequence_length": 1024,
"add_start_token": true,
"add_end_token": true
},
"registered_name": "keras_nlp>GPT2CausalLMPreprocessor"
},
"name": "gpt2_causal_lm"
},
"registered_name": "keras_nlp>GPT2CausalLM"
} |