Update app.py
Browse files
app.py
CHANGED
@@ -32,22 +32,36 @@ model.generation_config.eos_token_id = processor.tokenizer.eos_token_id
|
|
32 |
def krypton(input, history):
|
33 |
if input["files"]:
|
34 |
print("found the image\n")
|
35 |
-
|
36 |
else:
|
37 |
-
|
38 |
for hist in history:
|
39 |
if isinstance(hist[0], tuple):
|
40 |
image = hist[0][0]
|
41 |
|
42 |
-
if not
|
43 |
gr.Error("You need to upload an image for Krypton to work.")
|
44 |
return
|
45 |
|
46 |
prompt = f"user\n\n<image>\n{input['text']}\nassistant\n\n"
|
47 |
-
print("
|
48 |
-
|
49 |
-
|
50 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
51 |
|
52 |
# Streamer
|
53 |
print('About to init streamer')
|
|
|
32 |
def krypton(input, history):
|
33 |
if input["files"]:
|
34 |
print("found the image\n")
|
35 |
+
image_path = input["files"][-1]["path"] if isinstance(input["files"][-1], dict) else input["files"][-1]
|
36 |
else:
|
37 |
+
image_path = None
|
38 |
for hist in history:
|
39 |
if isinstance(hist[0], tuple):
|
40 |
image = hist[0][0]
|
41 |
|
42 |
+
if not image_path:
|
43 |
gr.Error("You need to upload an image for Krypton to work.")
|
44 |
return
|
45 |
|
46 |
prompt = f"user\n\n<image>\n{input['text']}\nassistant\n\n"
|
47 |
+
print("Made the prompt")
|
48 |
+
|
49 |
+
try:
|
50 |
+
image = Image.open(image_path)
|
51 |
+
print(f"Image open: {image}")
|
52 |
+
except Exception as e:
|
53 |
+
print(f"Error opening image: {e}")
|
54 |
+
gr.Error("Failed to open the image.")
|
55 |
+
return
|
56 |
+
|
57 |
+
try:
|
58 |
+
inputs = processor(prompt, images=image, return_tensors='pt').to('cuda', torch.float16)
|
59 |
+
print(f"Processed inputs: {inputs}")
|
60 |
+
except Exception as e:
|
61 |
+
print(f"Error processing inputs: {e}")
|
62 |
+
gr.Error("Failed to process the inputs.")
|
63 |
+
return
|
64 |
+
|
65 |
|
66 |
# Streamer
|
67 |
print('About to init streamer')
|