Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -9,7 +9,7 @@ import os
|
|
9 |
import torch.nn.functional as F
|
10 |
|
11 |
num_return_sequences = 1
|
12 |
-
max_length =
|
13 |
|
14 |
|
15 |
@dataclass
|
@@ -72,7 +72,7 @@ def chat_fn(message, history):
|
|
72 |
topk_probs, topk_indices = torch.topk(probs, 50, dim=-1)
|
73 |
|
74 |
# sampling a token from topk
|
75 |
-
ix = torch.multinomial(input=topk_probs, num_samples=1) # (B, 1)
|
76 |
|
77 |
# gather corresponding indices
|
78 |
xcol = torch.gather(input=topk_indices, dim=-1, index=ix)
|
@@ -83,8 +83,9 @@ def chat_fn(message, history):
|
|
83 |
tokens = x[i, :max_length].tolist()
|
84 |
decoded = tokenizer.decode(tokens)
|
85 |
|
86 |
-
yield decoded
|
87 |
|
88 |
|
89 |
-
gr.ChatInterface(chat_fn, examples=examples
|
90 |
-
|
|
|
|
9 |
import torch.nn.functional as F
|
10 |
|
11 |
num_return_sequences = 1
|
12 |
+
max_length = 200
|
13 |
|
14 |
|
15 |
@dataclass
|
|
|
72 |
topk_probs, topk_indices = torch.topk(probs, 50, dim=-1)
|
73 |
|
74 |
# sampling a token from topk
|
75 |
+
ix = torch.multinomial(input=topk_probs, num_samples=1) # (B, 1)
|
76 |
|
77 |
# gather corresponding indices
|
78 |
xcol = torch.gather(input=topk_indices, dim=-1, index=ix)
|
|
|
83 |
tokens = x[i, :max_length].tolist()
|
84 |
decoded = tokenizer.decode(tokens)
|
85 |
|
86 |
+
yield decoded + "\n"
|
87 |
|
88 |
|
89 |
+
gr.ChatInterface(chat_fn, examples=examples,
|
90 |
+
title="GPT2 trained from scratch on Shakespeare dataset").launch()
|
91 |
+
|