Spaces:
Running
on
CPU Upgrade
Running
on
CPU Upgrade
eduagarcia
commited on
Commit
•
5408125
1
Parent(s):
ec3a730
Makes Model Cards optional
Browse files- src/scripts/update_all_request_files.py +27 -26
- src/submission/submit.py +19 -15
src/scripts/update_all_request_files.py
CHANGED
@@ -32,36 +32,37 @@ def update_models(file_path, models):
|
|
32 |
still_on_hub, error, model_config = is_model_on_hub(
|
33 |
model_name=model_id, revision=data.get("revision"), trust_remote_code=True, test_tokenizer=False, token=H4_TOKEN
|
34 |
)
|
35 |
-
# If the model doesn't have a model card or a license, we consider it's deleted
|
36 |
-
if still_on_hub:
|
37 |
-
try:
|
38 |
-
if check_model_card(model_id)[0] is False:
|
39 |
-
still_on_hub = False
|
40 |
-
except Exception:
|
41 |
-
still_on_hub = False
|
42 |
data['still_on_hub'] = still_on_hub
|
43 |
|
44 |
-
|
45 |
-
|
46 |
-
is_moe_from_metadata = False
|
47 |
if still_on_hub:
|
48 |
-
|
49 |
-
|
50 |
-
|
51 |
-
|
52 |
-
|
53 |
-
|
54 |
-
|
55 |
-
|
56 |
-
|
57 |
-
|
58 |
-
|
59 |
-
|
60 |
-
if not is_merge_from_metadata:
|
61 |
-
tags.append("flagged:undisclosed_merge")
|
62 |
moe_keywords = ["moe", "mixture of experts", "mixtral"]
|
63 |
-
|
64 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
65 |
if is_moe_from_model_card or is_moe_from_name or is_moe_from_metadata:
|
66 |
tags.append("moe")
|
67 |
if not is_moe_from_metadata:
|
|
|
32 |
still_on_hub, error, model_config = is_model_on_hub(
|
33 |
model_name=model_id, revision=data.get("revision"), trust_remote_code=True, test_tokenizer=False, token=H4_TOKEN
|
34 |
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
35 |
data['still_on_hub'] = still_on_hub
|
36 |
|
37 |
+
tags = []
|
38 |
+
|
|
|
39 |
if still_on_hub:
|
40 |
+
model = model_id
|
41 |
+
modelcard_OK, error_msg = check_model_card(model)
|
42 |
+
model_card = None
|
43 |
+
if modelcard_OK:
|
44 |
+
model_card = ModelCard.load(model)
|
45 |
+
|
46 |
+
is_merge_from_metadata = False
|
47 |
+
is_moe_from_metadata = False
|
48 |
+
is_merge_from_model_card = False
|
49 |
+
is_moe_from_model_card = False
|
50 |
+
|
51 |
+
# Storing the model tags
|
|
|
|
|
52 |
moe_keywords = ["moe", "mixture of experts", "mixtral"]
|
53 |
+
if modelcard_OK:
|
54 |
+
if model_card.data.tags:
|
55 |
+
is_merge_from_metadata = "merge" in model_card.data.tags
|
56 |
+
is_moe_from_metadata = "moe" in model_card.data.tags
|
57 |
+
merge_keywords = ["mergekit", "merged model", "merge model", "merging"]
|
58 |
+
# If the model is a merge but not saying it in the metadata, we flag it
|
59 |
+
is_merge_from_model_card = any(keyword in model_card.text.lower() for keyword in merge_keywords)
|
60 |
+
if is_merge_from_model_card or is_merge_from_metadata:
|
61 |
+
tags.append("merge")
|
62 |
+
if not is_merge_from_metadata:
|
63 |
+
tags.append("flagged:undisclosed_merge")
|
64 |
+
is_moe_from_model_card = any(keyword in model_card.text.lower() for keyword in moe_keywords)
|
65 |
+
is_moe_from_name = "moe" in model.lower().replace("/", "-").replace("_", "-").split("-")
|
66 |
if is_moe_from_model_card or is_moe_from_name or is_moe_from_metadata:
|
67 |
tags.append("moe")
|
68 |
if not is_moe_from_metadata:
|
src/submission/submit.py
CHANGED
@@ -94,30 +94,34 @@ def add_new_eval(
|
|
94 |
try:
|
95 |
license = model_info.cardData["license"]
|
96 |
except Exception:
|
97 |
-
|
|
|
98 |
|
99 |
modelcard_OK, error_msg = check_model_card(model)
|
100 |
-
|
101 |
-
|
|
|
102 |
|
103 |
is_merge_from_metadata = False
|
104 |
is_moe_from_metadata = False
|
105 |
-
|
|
|
106 |
|
107 |
# Storing the model tags
|
108 |
tags = []
|
109 |
-
if model_card.data.tags:
|
110 |
-
is_merge_from_metadata = "merge" in model_card.data.tags
|
111 |
-
is_moe_from_metadata = "moe" in model_card.data.tags
|
112 |
-
merge_keywords = ["mergekit", "merged model", "merge model", "merging"]
|
113 |
-
# If the model is a merge but not saying it in the metadata, we flag it
|
114 |
-
is_merge_from_model_card = any(keyword in model_card.text.lower() for keyword in merge_keywords)
|
115 |
-
if is_merge_from_model_card or is_merge_from_metadata:
|
116 |
-
tags.append("merge")
|
117 |
-
if not is_merge_from_metadata:
|
118 |
-
tags.append("flagged:undisclosed_merge")
|
119 |
moe_keywords = ["moe", "mixture of experts", "mixtral"]
|
120 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
121 |
is_moe_from_name = "moe" in model.lower().replace("/", "-").replace("_", "-").split("-")
|
122 |
if is_moe_from_model_card or is_moe_from_name or is_moe_from_metadata:
|
123 |
tags.append("moe")
|
|
|
94 |
try:
|
95 |
license = model_info.cardData["license"]
|
96 |
except Exception:
|
97 |
+
license = None
|
98 |
+
#return styled_error("Please select a license for your model")
|
99 |
|
100 |
modelcard_OK, error_msg = check_model_card(model)
|
101 |
+
model_card = None
|
102 |
+
if modelcard_OK:
|
103 |
+
model_card = ModelCard.load(model)
|
104 |
|
105 |
is_merge_from_metadata = False
|
106 |
is_moe_from_metadata = False
|
107 |
+
is_merge_from_model_card = False
|
108 |
+
is_moe_from_model_card = False
|
109 |
|
110 |
# Storing the model tags
|
111 |
tags = []
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
112 |
moe_keywords = ["moe", "mixture of experts", "mixtral"]
|
113 |
+
if modelcard_OK:
|
114 |
+
if model_card.data.tags:
|
115 |
+
is_merge_from_metadata = "merge" in model_card.data.tags
|
116 |
+
is_moe_from_metadata = "moe" in model_card.data.tags
|
117 |
+
merge_keywords = ["mergekit", "merged model", "merge model", "merging"]
|
118 |
+
# If the model is a merge but not saying it in the metadata, we flag it
|
119 |
+
is_merge_from_model_card = any(keyword in model_card.text.lower() for keyword in merge_keywords)
|
120 |
+
if is_merge_from_model_card or is_merge_from_metadata:
|
121 |
+
tags.append("merge")
|
122 |
+
if not is_merge_from_metadata:
|
123 |
+
tags.append("flagged:undisclosed_merge")
|
124 |
+
is_moe_from_model_card = any(keyword in model_card.text.lower() for keyword in moe_keywords)
|
125 |
is_moe_from_name = "moe" in model.lower().replace("/", "-").replace("_", "-").split("-")
|
126 |
if is_moe_from_model_card or is_moe_from_name or is_moe_from_metadata:
|
127 |
tags.append("moe")
|