tiny-open-clip-model / config.json
zofinka's picture
Upload model
34bb85b verified
raw
history blame
877 Bytes
{
"export_model_type": "clip",
"initializer_factor": 1.0,
"logit_scale_init_value": 2.6592,
"model_cfg": {
"embed_dim": 8,
"text_cfg": {
"context_length": 77,
"heads": 2,
"layers": 2,
"vocab_size": 49408,
"width": 8
},
"vision_cfg": {
"image_size": 8,
"layers": 2,
"patch_size": 2,
"width": 128
}
},
"model_type": "clip",
"preprocess_cfg": {
"mean": [
0.5,
0.5,
0.5
],
"std": [
0.5,
0.5,
0.5
]
},
"projection_dim": 512,
"text_config": {
"context_length": 77,
"heads": 2,
"layers": 2,
"model_type": "clip_text_model",
"width": 8
},
"transformers_version": "4.41.2",
"vision_config": {
"image_size": 8,
"layers": 2,
"model_type": "clip_vision_model",
"patch_size": 2,
"width": 128
}
}