Spaces:
Running
Running
Felix Marty
commited on
Commit
•
590064e
1
Parent(s):
b23ba47
add timeout
Browse files- app.py +20 -6
- backend.py +13 -4
app.py
CHANGED
@@ -1,15 +1,29 @@
|
|
1 |
import gradio as gr
|
2 |
|
3 |
-
from backend import
|
4 |
-
|
5 |
-
|
6 |
-
|
7 |
-
|
|
|
|
|
|
|
|
|
8 |
|
9 |
with gr.Blocks() as demo:
|
10 |
gr.Markdown(
|
11 |
"""
|
12 |
-
Let's try out TorchServe + BetterTransformer!
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
13 |
|
14 |
## Inference using...
|
15 |
"""
|
|
|
1 |
import gradio as gr
|
2 |
|
3 |
+
from backend import get_message_single, get_message_spam, send_single, send_spam
|
4 |
+
from defaults import (
|
5 |
+
ADDRESS_BETTERTRANSFORMER,
|
6 |
+
ADDRESS_VANILLA,
|
7 |
+
defaults_bt_single,
|
8 |
+
defaults_bt_spam,
|
9 |
+
defaults_vanilla_single,
|
10 |
+
defaults_vanilla_spam,
|
11 |
+
)
|
12 |
|
13 |
with gr.Blocks() as demo:
|
14 |
gr.Markdown(
|
15 |
"""
|
16 |
+
Let's try out TorchServe + BetterTransformer!
|
17 |
+
|
18 |
+
BetterTransformer is a feature made available with PyTorch 1.13. allowing to use a fastpath execution for encoder attention blocks.
|
19 |
+
|
20 |
+
As a one-liner, you can use BetterTransformer with compatible Transformers models:
|
21 |
+
|
22 |
+
```
|
23 |
+
better_model = BetterTransformer.transform(model)
|
24 |
+
```
|
25 |
+
|
26 |
+
This Space is a demo of an **end-to-end** deployement of PyTorch eager-mode models, both with and without BetterTransformer. The goal is to see what are the benefits server-side and client-side of using BetterTransformer.
|
27 |
|
28 |
## Inference using...
|
29 |
"""
|
backend.py
CHANGED
@@ -2,8 +2,12 @@ import json
|
|
2 |
|
3 |
from datasets import load_dataset
|
4 |
|
5 |
-
from defaults import (
|
6 |
-
|
|
|
|
|
|
|
|
|
7 |
from utils import ElapsedFuturesSession
|
8 |
|
9 |
data = load_dataset("glue", "sst2", split="validation")
|
@@ -67,8 +71,9 @@ SESSION = ElapsedFuturesSession()
|
|
67 |
def send_single(input_model_vanilla, address: str):
|
68 |
assert address in [ADDRESS_VANILLA, ADDRESS_BETTERTRANSFORMER]
|
69 |
|
|
|
70 |
promise = SESSION.post(
|
71 |
-
address, headers=HEADERS, data=input_model_vanilla.encode("utf-8")
|
72 |
)
|
73 |
|
74 |
try:
|
@@ -110,7 +115,11 @@ def send_spam(address: str):
|
|
110 |
|
111 |
for i in range(SPAM_N_REQUESTS):
|
112 |
input_data = inp[i]["sentence"].encode("utf-8")
|
113 |
-
|
|
|
|
|
|
|
|
|
114 |
|
115 |
for promise in promises:
|
116 |
try:
|
|
|
2 |
|
3 |
from datasets import load_dataset
|
4 |
|
5 |
+
from defaults import (
|
6 |
+
ADDRESS_BETTERTRANSFORMER,
|
7 |
+
ADDRESS_VANILLA,
|
8 |
+
HEADERS,
|
9 |
+
SPAM_N_REQUESTS,
|
10 |
+
)
|
11 |
from utils import ElapsedFuturesSession
|
12 |
|
13 |
data = load_dataset("glue", "sst2", split="validation")
|
|
|
71 |
def send_single(input_model_vanilla, address: str):
|
72 |
assert address in [ADDRESS_VANILLA, ADDRESS_BETTERTRANSFORMER]
|
73 |
|
74 |
+
# should not take more than 10 s, so timeout if that's the case
|
75 |
promise = SESSION.post(
|
76 |
+
address, headers=HEADERS, data=input_model_vanilla.encode("utf-8"), timeout=10
|
77 |
)
|
78 |
|
79 |
try:
|
|
|
115 |
|
116 |
for i in range(SPAM_N_REQUESTS):
|
117 |
input_data = inp[i]["sentence"].encode("utf-8")
|
118 |
+
|
119 |
+
# should not take more than 15 s, so timeout if that's the case
|
120 |
+
promises.append(
|
121 |
+
SESSION.post(address, headers=HEADERS, data=input_data, timeout=15)
|
122 |
+
)
|
123 |
|
124 |
for promise in promises:
|
125 |
try:
|