Spaces:
Sleeping
Sleeping
dont cache examples
Browse files
app.py
CHANGED
@@ -16,13 +16,13 @@ def predict(img):
|
|
16 |
img_input = np.transpose(img_input, (2, 0, 1))
|
17 |
img_input = img_input[np.newaxis, :]
|
18 |
pred = model.run(None, {"img": img_input})[0].item()
|
19 |
-
return
|
20 |
|
21 |
|
22 |
if __name__ == "__main__":
|
23 |
model_path = hf_hub_download(repo_id="skytnt/anime-aesthetic", filename="model.onnx")
|
24 |
model = rt.InferenceSession(model_path, providers=['CUDAExecutionProvider', 'CPUExecutionProvider'])
|
25 |
examples = [[f"examples/{x:02d}.jpg"] for x in range(0, 2)]
|
26 |
-
app = gr.Interface(predict, gr.Image(label="input image"), gr.
|
27 |
-
allow_flagging="never", examples=examples)
|
28 |
app.launch()
|
|
|
16 |
img_input = np.transpose(img_input, (2, 0, 1))
|
17 |
img_input = img_input[np.newaxis, :]
|
18 |
pred = model.run(None, {"img": img_input})[0].item()
|
19 |
+
return pred
|
20 |
|
21 |
|
22 |
if __name__ == "__main__":
|
23 |
model_path = hf_hub_download(repo_id="skytnt/anime-aesthetic", filename="model.onnx")
|
24 |
model = rt.InferenceSession(model_path, providers=['CUDAExecutionProvider', 'CPUExecutionProvider'])
|
25 |
examples = [[f"examples/{x:02d}.jpg"] for x in range(0, 2)]
|
26 |
+
app = gr.Interface(predict, gr.Image(label="input image"), gr.Number(label="score"),title="Anime Aesthetic Predict",
|
27 |
+
allow_flagging="never", examples=examples, cache_examples=False)
|
28 |
app.launch()
|