Update config.json
Browse files- config.json +3 -3
config.json
CHANGED
@@ -4,7 +4,7 @@
|
|
4 |
],
|
5 |
"initializer_factor": 1.0,
|
6 |
"logit_scale_init_value": 2.6592,
|
7 |
-
"model_type": "
|
8 |
"projection_dim": 512,
|
9 |
"text_config": {
|
10 |
"architectures": [
|
@@ -21,7 +21,7 @@
|
|
21 |
"intermediate_size": 3072,
|
22 |
"layer_norm_eps": 1e-12,
|
23 |
"max_position_embeddings": 512,
|
24 |
-
"model_type": "
|
25 |
"num_attention_heads": 12,
|
26 |
"num_hidden_layers": 12,
|
27 |
"output_past": true,
|
@@ -75,7 +75,7 @@
|
|
75 |
"length_penalty": 1.0,
|
76 |
"max_length": 20,
|
77 |
"min_length": 0,
|
78 |
-
"model_type": "
|
79 |
"no_repeat_ngram_size": 0,
|
80 |
"num_attention_heads": 12,
|
81 |
"num_beam_groups": 1,
|
|
|
4 |
],
|
5 |
"initializer_factor": 1.0,
|
6 |
"logit_scale_init_value": 2.6592,
|
7 |
+
"model_type": "chinese_clip",
|
8 |
"projection_dim": 512,
|
9 |
"text_config": {
|
10 |
"architectures": [
|
|
|
21 |
"intermediate_size": 3072,
|
22 |
"layer_norm_eps": 1e-12,
|
23 |
"max_position_embeddings": 512,
|
24 |
+
"model_type": "chinese_clip_text_model",
|
25 |
"num_attention_heads": 12,
|
26 |
"num_hidden_layers": 12,
|
27 |
"output_past": true,
|
|
|
75 |
"length_penalty": 1.0,
|
76 |
"max_length": 20,
|
77 |
"min_length": 0,
|
78 |
+
"model_type": "chinese_clip_vision_model",
|
79 |
"no_repeat_ngram_size": 0,
|
80 |
"num_attention_heads": 12,
|
81 |
"num_beam_groups": 1,
|