Spaces:
Running
on
Zero
Running
on
Zero
MaziyarPanahi
commited on
Commit
•
6ad6f9b
1
Parent(s):
91c00b7
Update app.py (#6)
Browse files- Update app.py (3b001a1f4da58ad2a0d6c1f135deae4e681ce6ff)
app.py
CHANGED
@@ -62,10 +62,10 @@ def bot_streaming(message, history):
|
|
62 |
prompt = f"{message['text']}<|image_1|>\nCan you convert the table to markdown format?{prompt_suffix}{assistant_prompt}"
|
63 |
# print(f"prompt: {prompt}")
|
64 |
image = Image.open(image)
|
65 |
-
inputs = processor(prompt,
|
66 |
|
67 |
streamer = TextIteratorStreamer(processor, **{"skip_special_tokens": False, "skip_prompt": True})
|
68 |
-
generation_kwargs = dict(inputs, streamer=streamer, max_new_tokens=1024, do_sample=False)
|
69 |
|
70 |
thread = Thread(target=model.generate, kwargs=generation_kwargs)
|
71 |
thread.start()
|
|
|
62 |
prompt = f"{message['text']}<|image_1|>\nCan you convert the table to markdown format?{prompt_suffix}{assistant_prompt}"
|
63 |
# print(f"prompt: {prompt}")
|
64 |
image = Image.open(image)
|
65 |
+
inputs = processor(prompt, image, return_tensors='pt').to("cuda:0")
|
66 |
|
67 |
streamer = TextIteratorStreamer(processor, **{"skip_special_tokens": False, "skip_prompt": True})
|
68 |
+
generation_kwargs = dict(inputs, streamer=streamer, max_new_tokens=1024, do_sample=False, eos_token_id=processor.tokenizer.eos_token_id)
|
69 |
|
70 |
thread = Thread(target=model.generate, kwargs=generation_kwargs)
|
71 |
thread.start()
|