Spaces:
Running
Running
File size: 6,419 Bytes
7c1eee1 6296cf9 7c1eee1 a4c3ce7 7c1eee1 7affe33 7c1eee1 a4c3ce7 7c1eee1 a4c3ce7 7c1eee1 6296cf9 7c1eee1 6296cf9 7c1eee1 6296cf9 7c1eee1 6296cf9 7c1eee1 6296cf9 7c1eee1 6296cf9 7c1eee1 f8f1d3f 6296cf9 f8f1d3f 6296cf9 f8f1d3f 6296cf9 f8f1d3f 7c1eee1 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 |
import os
import json
import random
import datetime
import requests
import numpy as np
import gradio as gr
from pathlib import Path
from model.model_registry import *
from constants import LOGDIR, LOG_SERVER_ADDR, APPEND_JSON, SAVE_IMAGE, SAVE_LOG, EVALUATE_DIMS
from typing import Union
enable_btn = gr.update(interactive=True, visible=True)
disable_btn = gr.update(interactive=False)
invisible_btn = gr.update(interactive=False, visible=False)
no_change_btn = gr.update(value="No Change", interactive=True, visible=True)
def build_about():
about_markdown = f"""
# About Us
Supported by Shanghai AI Laboratory
## Contributors:
Yuhan Zhang, Mengchen Zhang, Tong Wu, Tengfei Wang, Ziwei Liu, Dahua Lin
## Contact:
Email: [email protected]
## Sponsorship
We are keep looking for sponsorship to support the arena project for the long term. Please contact us if you are interested in supporting this project.
"""
gr.Markdown(about_markdown, elem_id="about_markdown")
acknowledgment_md = """
### Acknowledgment
<div class="image-container">
<p> Our code base is built upon <a href="https://github.com/lm-sys/FastChat" target="_blank">FastChat</a> and <a href="https://github.com/TIGER-AI-Lab/GenAI-Arena" target="_blank">GenAI-Arena</a></p>.
</div>
"""
block_css = """
#notice_markdown {
font-size: 110%
}
#notice_markdown th {
display: none;
}
#notice_markdown td {
padding-top: 6px;
padding-bottom: 6px;
}
#model_description_markdown {
font-size: 110%
}
#leaderboard_markdown {
font-size: 110%
}
#leaderboard_markdown td {
padding-top: 6px;
padding-bottom: 6px;
}
#leaderboard_dataframe td {
line-height: 0.1em;
}
#about_markdown {
font-size: 110%
}
#ack_markdown {
font-size: 110%
}
#evaldim_markdown {
font-weight: bold;
text-align: center;
background-color: white;
}
#input_box textarea {
}
footer {
display:none !important
}
.image-about img {
margin: 0 30px;
margin-top: 30px;
height: 60px;
max-height: 100%;
width: auto;
float: left;
.input-image, .image-preview {
margin: 0 30px;
height: 30px;
max-height: 100%;
width: auto;
max-width: 30%;}
}
"""
def enable_mds():
return tuple(gr.update(visible=True) for _ in range(EVALUATE_DIMS))
def disable_mds():
return tuple(gr.update(visible=False) for _ in range(EVALUATE_DIMS))
def enable_buttons_side_by_side():
return tuple(gr.update(visible=True, interactive=True) for i in range(EVALUATE_DIMS*4 + 2))
def disable_buttons_side_by_side():
return tuple(gr.update(visible=(i>=EVALUATE_DIMS*4), interactive=False) for i in range(EVALUATE_DIMS*4 + 2))
def enable_buttons():
return tuple(gr.update(interactive=True) for _ in range(EVALUATE_DIMS*3 + 2))
def disable_buttons():
return tuple(gr.update(interactive=False) for _ in range(EVALUATE_DIMS*3 + 2))
def reset_state(state):
state.normal_video, state.rgb_video = None, None
state.evaluted_dims = 0
return (state,) + tuple(gr.update(interactive=False) for _ in range(EVALUATE_DIMS*3 + 2))
def reset_states_side_by_side(state_0, state_1):
state_0.normal_video, state_0.rgb_video = None, None
state_1.normal_video, state_1.rgb_video = None, None
state_0.evaluted_dims, state_1.evaluted_dims = 0, 0
return (state_0, state_1) \
+ tuple(gr.update(visible=(i>=EVALUATE_DIMS*4), interactive=False) for i in range(EVALUATE_DIMS*4 + 2)) \
+ tuple(gr.update(visible=False) for _ in range(EVALUATE_DIMS))
def reset_states_side_by_side_anony(state_0, state_1):
state_0.model_name, state_1.model_name = "", ""
state_0.normal_video, state_0.rgb_video = None, None
state_1.normal_video, state_1.rgb_video = None, None
state_0.evaluted_dims, state_1.evaluted_dims = 0, 0
return (state_0, state_1) \
+ (gr.Markdown("", visible=False), gr.Markdown("", visible=False))\
+ tuple(gr.update(visible=(i>=EVALUATE_DIMS*4), interactive=False) for i in range(EVALUATE_DIMS*4 + 2)) \
+ tuple(gr.update(visible=False) for _ in range(EVALUATE_DIMS))
def clear_t2s_history():
return None, "", None, None
def clear_t2s_history_side_by_side():
return [None] * 2 + [""] + [None] * 4
def clear_t2s_history_side_by_side_anony():
return [None] * 2 + [""] + [None] * 4 + [gr.Markdown("", visible=False), gr.Markdown("", visible=False)]
def clear_i2s_history():
return None, None, None, None
def clear_i2s_history_side_by_side():
return [None] * 2 + [None] + [None] * 4
def clear_i2s_history_side_by_side_anony():
return [None] * 2 + [None] + [None] * 4 + [gr.Markdown("", visible=False), gr.Markdown("", visible=False)]
def get_ip(request: gr.Request):
if request:
if "cf-connecting-ip" in request.headers:
ip = request.headers["cf-connecting-ip"] or request.client.host
else:
ip = request.client.host
else:
ip = None
return ip
def get_conv_log_filename():
t = datetime.datetime.now()
name = os.path.join(LOGDIR, f"{t.year}-{t.month:02d}-{t.day:02d}-conv.json")
return name
def save_image_file_on_log_server(image_file:str):
image_file = Path(image_file).absolute().relative_to(os.getcwd())
image_file = str(image_file)
# Open the image file in binary mode
url = f"{LOG_SERVER_ADDR}/{SAVE_IMAGE}"
with open(image_file, 'rb') as f:
# Make the POST request, sending the image file and the image path
response = requests.post(url, files={'image': f}, data={'image_path': image_file})
return response
def append_json_item_on_log_server(json_item: Union[dict, str], log_file: str):
if isinstance(json_item, dict):
json_item = json.dumps(json_item)
log_file = Path(log_file).absolute().relative_to(os.getcwd())
log_file = str(log_file)
url = f"{LOG_SERVER_ADDR}/{APPEND_JSON}"
# Make the POST request, sending the JSON string and the log file name
response = requests.post(url, data={'json_str': json_item, 'file_name': log_file})
return response
def save_log_str_on_log_server(log_str: str, log_file: str):
log_file = Path(log_file).absolute().relative_to(os.getcwd())
log_file = str(log_file)
url = f"{LOG_SERVER_ADDR}/{SAVE_LOG}"
# Make the POST request, sending the log message and the log file name
response = requests.post(url, data={'message': log_str, 'log_path': log_file})
return response |