Spaces:
Paused
Paused
Update app.py
Browse files
app.py
CHANGED
@@ -11,7 +11,7 @@ import yfinance as yf
|
|
11 |
from peft import PeftModel
|
12 |
from collections import defaultdict
|
13 |
from datetime import date, datetime, timedelta
|
14 |
-
from transformers import AutoTokenizer, AutoModelForCausalLM
|
15 |
|
16 |
os.environ['HF_HOME'] = '/data/.huggingface'
|
17 |
|
@@ -38,6 +38,8 @@ tokenizer = AutoTokenizer.from_pretrained(
|
|
38 |
token=access_token
|
39 |
)
|
40 |
|
|
|
|
|
41 |
B_INST, E_INST = "[INST]", "[/INST]"
|
42 |
B_SYS, E_SYS = "<<SYS>>\n", "\n<</SYS>>\n\n"
|
43 |
|
@@ -237,7 +239,7 @@ def predict(ticker, date, n_weeks, use_basics):
|
|
237 |
res = model.generate(
|
238 |
**inputs, max_length=4096, do_sample=True,
|
239 |
eos_token_id=tokenizer.eos_token_id,
|
240 |
-
use_cache=True
|
241 |
)
|
242 |
output = tokenizer.decode(res[0], skip_special_tokens=True)
|
243 |
answer = re.sub(r'.*\[/INST\]\s*', '', output, flags=re.DOTALL)
|
|
|
11 |
from peft import PeftModel
|
12 |
from collections import defaultdict
|
13 |
from datetime import date, datetime, timedelta
|
14 |
+
from transformers import AutoTokenizer, AutoModelForCausalLM, TextStreamer
|
15 |
|
16 |
os.environ['HF_HOME'] = '/data/.huggingface'
|
17 |
|
|
|
38 |
token=access_token
|
39 |
)
|
40 |
|
41 |
+
streamer = TextStreamer(tokenizer)
|
42 |
+
|
43 |
B_INST, E_INST = "[INST]", "[/INST]"
|
44 |
B_SYS, E_SYS = "<<SYS>>\n", "\n<</SYS>>\n\n"
|
45 |
|
|
|
239 |
res = model.generate(
|
240 |
**inputs, max_length=4096, do_sample=True,
|
241 |
eos_token_id=tokenizer.eos_token_id,
|
242 |
+
use_cache=True, streamer=streamer
|
243 |
)
|
244 |
output = tokenizer.decode(res[0], skip_special_tokens=True)
|
245 |
answer = re.sub(r'.*\[/INST\]\s*', '', output, flags=re.DOTALL)
|