Terry Zhuo
update
b64230f
raw
history blame
3.29 kB
import gradio as gr
import subprocess
import sys
import os
import threading
import time
import uuid
import glob
import shutil
from pathlib import Path
from apscheduler.schedulers.background import BackgroundScheduler
default_command = "bigcodebench.evaluate"
is_running = False
lock = threading.Lock()
def generate_command(
jsonl_file, split, subset, parallel,
min_time_limit, max_as_limit, max_data_limit, max_stack_limit,
check_gt_only, no_gt
):
command = [default_command]
if jsonl_file is not None:
# Copy the uploaded file to the current directory
local_filename = os.path.basename(jsonl_file.name)
shutil.copy(jsonl_file.name, local_filename)
command.extend(["--samples", local_filename])
command.extend(["--split", split, "--subset", subset])
if parallel is not None and parallel != 0:
command.extend(["--parallel", str(int(parallel))])
command.extend([
"--min-time-limit", str(min_time_limit),
"--max-as-limit", str(int(max_as_limit)),
"--max-data-limit", str(int(max_data_limit)),
"--max-stack-limit", str(int(max_stack_limit))
])
if check_gt_only:
command.append("--check-gt-only")
if no_gt:
command.append("--no-gt")
return " ".join(command)
def cleanup_previous_files(jsonl_file):
if jsonl_file is not None:
file_list = ['Dockerfile', 'app.py', 'README.md', os.path.basename(jsonl_file.name), "__pycache__"]
else:
file_list = ['Dockerfile', 'app.py', 'README.md', "__pycache__"]
for file in glob.glob("*"):
try:
if file not in file_list:
os.remove(file)
except Exception as e:
print(f"Error during cleanup of {file}: {e}")
def find_result_file():
json_files = glob.glob("*.json")
if json_files:
return max(json_files, key=os.path.getmtime)
return None
def run_bigcodebench(command):
global is_running
with lock:
if is_running:
yield "A command is already running. Please wait for it to finish.\n"
return
is_running = True
try:
yield f"Executing command: {command}\n"
process = subprocess.Popen(command.split(), stdout=subprocess.PIPE, stderr=subprocess.STDOUT, universal_newlines=True)
for line in process.stdout:
yield line
# process.wait()
if process.returncode != 0:
yield f"Error: Command exited with status {process.returncode}\n"
yield "Evaluation completed.\n"
result_file = find_result_file()
if result_file:
yield f"Result file found: {result_file}\n"
else:
yield "No result file found.\n"
finally:
with lock:
is_running = False
def stream_logs(command, jsonl_file=None):
global is_running
if is_running:
yield "A command is already running. Please wait for it to finish.\n"
return
cleanup_previous_files(jsonl_file)
yield "Cleaned up previous files.\n"
log_content = []
for log_line in run_bigcodebench(command):
log_content.append(log_line)
yield "".join(log_content)