MaziyarPanahi commited on
Commit
7ff1f7d
1 Parent(s): 6ad6f9b

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -5
app.py CHANGED
@@ -60,9 +60,9 @@ def bot_streaming(message, history):
60
  gr.Error("You need to upload an image for Phi-3-vision to work.")
61
 
62
  prompt = f"{message['text']}<|image_1|>\nCan you convert the table to markdown format?{prompt_suffix}{assistant_prompt}"
63
- # print(f"prompt: {prompt}")
64
  image = Image.open(image)
65
- inputs = processor(prompt, image, return_tensors='pt').to("cuda:0")
66
 
67
  streamer = TextIteratorStreamer(processor, **{"skip_special_tokens": False, "skip_prompt": True})
68
  generation_kwargs = dict(inputs, streamer=streamer, max_new_tokens=1024, do_sample=False, eos_token_id=processor.tokenizer.eos_token_id)
@@ -73,9 +73,6 @@ def bot_streaming(message, history):
73
  buffer = ""
74
  time.sleep(0.5)
75
  for new_text in streamer:
76
- # find <|eot_id|> and remove it from the new_text
77
- if "<|eot_id|>" in new_text:
78
- new_text = new_text.split("<|eot_id|>")[0]
79
  buffer += new_text
80
 
81
  generated_text_without_prompt = buffer
 
60
  gr.Error("You need to upload an image for Phi-3-vision to work.")
61
 
62
  prompt = f"{message['text']}<|image_1|>\nCan you convert the table to markdown format?{prompt_suffix}{assistant_prompt}"
63
+ print(f"prompt: {prompt}")
64
  image = Image.open(image)
65
+ inputs = processor(prompt, [image], return_tensors='pt').to("cuda:0")
66
 
67
  streamer = TextIteratorStreamer(processor, **{"skip_special_tokens": False, "skip_prompt": True})
68
  generation_kwargs = dict(inputs, streamer=streamer, max_new_tokens=1024, do_sample=False, eos_token_id=processor.tokenizer.eos_token_id)
 
73
  buffer = ""
74
  time.sleep(0.5)
75
  for new_text in streamer:
 
 
 
76
  buffer += new_text
77
 
78
  generated_text_without_prompt = buffer