Spaces:
Running
Running
Update pages/Entorno de Ejecución.py
Browse files- pages/Entorno de Ejecución.py +10 -10
pages/Entorno de Ejecución.py
CHANGED
@@ -88,16 +88,16 @@ def multiclass_prediction(classifier, important_class): #made for hf zero-shot p
|
|
88 |
class_score = clase['score']
|
89 |
return (labels[0] if len(labels) == 1 else labels, score, class_score)
|
90 |
|
91 |
-
API_URL = "https://api-inference.huggingface.co/models"
|
92 |
-
headers = {"Authorization": f"Bearer {st.secrets['token']}"}
|
93 |
-
|
94 |
-
def query(data, models): #HF API
|
95 |
-
response = requests.post(API_URL + "/" + model_name, headers=headers, data=data)
|
96 |
-
if response.json()["error"] == "Internal Server Error":
|
97 |
-
return -1
|
98 |
-
while "error" in response.json():
|
99 |
-
response = requests.post(API_URL + "/" + model_name, headers=headers, data=data)
|
100 |
-
return response.json()[1]["score"] #.json
|
101 |
|
102 |
@st.cache_resource
|
103 |
def load_clip():
|
|
|
88 |
class_score = clase['score']
|
89 |
return (labels[0] if len(labels) == 1 else labels, score, class_score)
|
90 |
|
91 |
+
#API_URL = "https://api-inference.huggingface.co/models"
|
92 |
+
#headers = {"Authorization": f"Bearer {st.secrets['token']}"}
|
93 |
+
|
94 |
+
#def query(data, models): #HF API
|
95 |
+
# response = requests.post(API_URL + "/" + model_name, headers=headers, data=data)
|
96 |
+
# if response.json()["error"] == "Internal Server Error":
|
97 |
+
# return -1
|
98 |
+
# while "error" in response.json():
|
99 |
+
# response = requests.post(API_URL + "/" + model_name, headers=headers, data=data)
|
100 |
+
# return response.json()[1]["score"] #.json
|
101 |
|
102 |
@st.cache_resource
|
103 |
def load_clip():
|