Commit
•
535518d
1
Parent(s):
8fcf575
Upload Idefics2ForConditionalGeneration
Browse files- config.json +7 -7
- generation_config.json +6 -0
- model.safetensors +2 -2
config.json
CHANGED
@@ -1,6 +1,6 @@
|
|
1 |
{
|
2 |
"architectures": [
|
3 |
-
"
|
4 |
],
|
5 |
"image_token_id": 32001,
|
6 |
"model_type": "idefics2",
|
@@ -95,7 +95,7 @@
|
|
95 |
"forced_bos_token_id": null,
|
96 |
"forced_eos_token_id": null,
|
97 |
"hidden_act": "silu",
|
98 |
-
"hidden_size":
|
99 |
"id2label": {
|
100 |
"0": "LABEL_0",
|
101 |
"1": "LABEL_1"
|
@@ -110,11 +110,11 @@
|
|
110 |
},
|
111 |
"length_penalty": 1.0,
|
112 |
"max_length": 20,
|
113 |
-
"max_position_embeddings":
|
114 |
"min_length": 0,
|
115 |
"model_type": "mistral",
|
116 |
"no_repeat_ngram_size": 0,
|
117 |
-
"num_attention_heads":
|
118 |
"num_beam_groups": 1,
|
119 |
"num_beams": 1,
|
120 |
"num_hidden_layers": 2,
|
@@ -176,7 +176,7 @@
|
|
176 |
"forced_bos_token_id": null,
|
177 |
"forced_eos_token_id": null,
|
178 |
"hidden_act": "gelu_pytorch_tanh",
|
179 |
-
"hidden_size":
|
180 |
"id2label": {
|
181 |
"0": "LABEL_0",
|
182 |
"1": "LABEL_1"
|
@@ -196,11 +196,11 @@
|
|
196 |
"min_length": 0,
|
197 |
"model_type": "idefics2",
|
198 |
"no_repeat_ngram_size": 0,
|
199 |
-
"num_attention_heads":
|
200 |
"num_beam_groups": 1,
|
201 |
"num_beams": 1,
|
202 |
"num_channels": 3,
|
203 |
-
"num_hidden_layers":
|
204 |
"num_return_sequences": 1,
|
205 |
"output_attentions": false,
|
206 |
"output_hidden_states": false,
|
|
|
1 |
{
|
2 |
"architectures": [
|
3 |
+
"Idefics2ForConditionalGeneration"
|
4 |
],
|
5 |
"image_token_id": 32001,
|
6 |
"model_type": "idefics2",
|
|
|
95 |
"forced_bos_token_id": null,
|
96 |
"forced_eos_token_id": null,
|
97 |
"hidden_act": "silu",
|
98 |
+
"hidden_size": 32,
|
99 |
"id2label": {
|
100 |
"0": "LABEL_0",
|
101 |
"1": "LABEL_1"
|
|
|
110 |
},
|
111 |
"length_penalty": 1.0,
|
112 |
"max_length": 20,
|
113 |
+
"max_position_embeddings": 32768,
|
114 |
"min_length": 0,
|
115 |
"model_type": "mistral",
|
116 |
"no_repeat_ngram_size": 0,
|
117 |
+
"num_attention_heads": 8,
|
118 |
"num_beam_groups": 1,
|
119 |
"num_beams": 1,
|
120 |
"num_hidden_layers": 2,
|
|
|
176 |
"forced_bos_token_id": null,
|
177 |
"forced_eos_token_id": null,
|
178 |
"hidden_act": "gelu_pytorch_tanh",
|
179 |
+
"hidden_size": 256,
|
180 |
"id2label": {
|
181 |
"0": "LABEL_0",
|
182 |
"1": "LABEL_1"
|
|
|
196 |
"min_length": 0,
|
197 |
"model_type": "idefics2",
|
198 |
"no_repeat_ngram_size": 0,
|
199 |
+
"num_attention_heads": 4,
|
200 |
"num_beam_groups": 1,
|
201 |
"num_beams": 1,
|
202 |
"num_channels": 3,
|
203 |
+
"num_hidden_layers": 2,
|
204 |
"num_return_sequences": 1,
|
205 |
"output_attentions": false,
|
206 |
"output_hidden_states": false,
|
generation_config.json
ADDED
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"_from_model_config": true,
|
3 |
+
"bos_token_id": 1,
|
4 |
+
"eos_token_id": 2,
|
5 |
+
"transformers_version": "4.40.2"
|
6 |
+
}
|
model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:b6de3469bdf7124dc1615286bdbcec09d82a22314fba204fafab573ba132199a
|
3 |
+
size 15634952
|