import gradio as gr def single_inference(): pass def blended_inference(): pass TITLE = """MKG Analogy""" with gr.Blocks() as block: with gr.Column(elem_id="col-container"): gr.HTML(TITLE) with gr.Tab("Single Analogical Reasoning"): with gr.Row(): gr.Markdown(""" $(I_h, I_t) : (T_q, ?)$ """) with gr.Column(): head_image = gr.Image(type='pil', label="Head Image") head_ent = gr.Textbox(lines=1, label="Head Entity") with gr.Column(): tail_image = gr.Image(type='pil', label="Tail Image") tail_ent = gr.Textbox(lines=1, label="Tail Entity") with gr.Column(): question_text = gr.Textbox(lines=1, label="Question Name") question_ent = gr.Textbox(lines=1, label="Question Entity") submit_btn = gr.Button("Submit") output_text = gr.Textbox(label="Output") # examples=[['example01.jpg', MODELS[0], 'best'], ['example02.jpg', MODELS[0], 'best']] # ex = gr.Examples( # examples=examples, # fn=image_to_prompt, # inputs=[input_image, input_model, input_mode], # outputs=[output_text, share_button, community_icon, loading_icon], # cache_examples=True, # run_on_click=True # ) # ex.dataset.headers = [""] with gr.Tab("Blended Analogical Reasoning"): pass # gr.HTML(ARTICLE) # submit_btn.click( # fn=image_to_prompt, # inputs=[input_image, input_model, input_mode], # outputs=[output_text, share_button, community_icon, loading_icon] # ) # share_button.click(None, [], [], _js=None) block.queue(max_size=64).launch(enable_queue=True)