neuralhaven's picture
Training in progress, epoch 1
fe97e9a verified
raw
history blame
No virus
2.81 kB
{
"_name_or_path": "google/siglip-so400m-patch14-224",
"architectures": [
"SiglipForImageClassification"
],
"id2label": {
"0": "airplane",
"1": "airport",
"2": "baseball_diamond",
"3": "basketball_court",
"4": "beach",
"5": "bridge",
"6": "chaparral",
"7": "church",
"8": "circular_farmland",
"9": "cloud",
"10": "commercial_area",
"11": "dense_residential",
"12": "desert",
"13": "forest",
"14": "freeway",
"15": "golf_course",
"16": "ground_track_field",
"17": "harbor",
"18": "industrial_area",
"19": "intersection",
"20": "island",
"21": "lake",
"22": "meadow",
"23": "medium_residential",
"24": "mobile_home_park",
"25": "mountain",
"26": "overpass",
"27": "palace",
"28": "parking_lot",
"29": "railway",
"30": "railway_station",
"31": "rectangular_farmland",
"32": "river",
"33": "roundabout",
"34": "runway",
"35": "sea_ice",
"36": "ship",
"37": "snowberg",
"38": "sparse_residential",
"39": "stadium",
"40": "storage_tank",
"41": "tennis_court",
"42": "terrace",
"43": "thermal_power_station",
"44": "wetland"
},
"initializer_factor": 1.0,
"label2id": {
"airplane": 0,
"airport": 1,
"baseball_diamond": 2,
"basketball_court": 3,
"beach": 4,
"bridge": 5,
"chaparral": 6,
"church": 7,
"circular_farmland": 8,
"cloud": 9,
"commercial_area": 10,
"dense_residential": 11,
"desert": 12,
"forest": 13,
"freeway": 14,
"golf_course": 15,
"ground_track_field": 16,
"harbor": 17,
"industrial_area": 18,
"intersection": 19,
"island": 20,
"lake": 21,
"meadow": 22,
"medium_residential": 23,
"mobile_home_park": 24,
"mountain": 25,
"overpass": 26,
"palace": 27,
"parking_lot": 28,
"railway": 29,
"railway_station": 30,
"rectangular_farmland": 31,
"river": 32,
"roundabout": 33,
"runway": 34,
"sea_ice": 35,
"ship": 36,
"snowberg": 37,
"sparse_residential": 38,
"stadium": 39,
"storage_tank": 40,
"tennis_court": 41,
"terrace": 42,
"thermal_power_station": 43,
"wetland": 44
},
"model_type": "siglip",
"problem_type": "single_label_classification",
"text_config": {
"hidden_size": 1152,
"intermediate_size": 4304,
"max_position_embeddings": 16,
"model_type": "siglip_text_model",
"num_attention_heads": 16,
"num_hidden_layers": 27
},
"torch_dtype": "float32",
"transformers_version": "4.44.0",
"vision_config": {
"hidden_size": 1152,
"intermediate_size": 4304,
"model_type": "siglip_vision_model",
"num_attention_heads": 16,
"num_hidden_layers": 27,
"patch_size": 14
}
}