Spaces:
Running
on
CPU Upgrade
Running
on
CPU Upgrade
File size: 3,294 Bytes
80c91d0 b64230f |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 |
import gradio as gr
import subprocess
import sys
import os
import threading
import time
import uuid
import glob
import shutil
from pathlib import Path
from apscheduler.schedulers.background import BackgroundScheduler
default_command = "bigcodebench.evaluate"
is_running = False
lock = threading.Lock()
def generate_command(
jsonl_file, split, subset, parallel,
min_time_limit, max_as_limit, max_data_limit, max_stack_limit,
check_gt_only, no_gt
):
command = [default_command]
if jsonl_file is not None:
# Copy the uploaded file to the current directory
local_filename = os.path.basename(jsonl_file.name)
shutil.copy(jsonl_file.name, local_filename)
command.extend(["--samples", local_filename])
command.extend(["--split", split, "--subset", subset])
if parallel is not None and parallel != 0:
command.extend(["--parallel", str(int(parallel))])
command.extend([
"--min-time-limit", str(min_time_limit),
"--max-as-limit", str(int(max_as_limit)),
"--max-data-limit", str(int(max_data_limit)),
"--max-stack-limit", str(int(max_stack_limit))
])
if check_gt_only:
command.append("--check-gt-only")
if no_gt:
command.append("--no-gt")
return " ".join(command)
def cleanup_previous_files(jsonl_file):
if jsonl_file is not None:
file_list = ['Dockerfile', 'app.py', 'README.md', os.path.basename(jsonl_file.name), "__pycache__"]
else:
file_list = ['Dockerfile', 'app.py', 'README.md', "__pycache__"]
for file in glob.glob("*"):
try:
if file not in file_list:
os.remove(file)
except Exception as e:
print(f"Error during cleanup of {file}: {e}")
def find_result_file():
json_files = glob.glob("*.json")
if json_files:
return max(json_files, key=os.path.getmtime)
return None
def run_bigcodebench(command):
global is_running
with lock:
if is_running:
yield "A command is already running. Please wait for it to finish.\n"
return
is_running = True
try:
yield f"Executing command: {command}\n"
process = subprocess.Popen(command.split(), stdout=subprocess.PIPE, stderr=subprocess.STDOUT, universal_newlines=True)
for line in process.stdout:
yield line
# process.wait()
if process.returncode != 0:
yield f"Error: Command exited with status {process.returncode}\n"
yield "Evaluation completed.\n"
result_file = find_result_file()
if result_file:
yield f"Result file found: {result_file}\n"
else:
yield "No result file found.\n"
finally:
with lock:
is_running = False
def stream_logs(command, jsonl_file=None):
global is_running
if is_running:
yield "A command is already running. Please wait for it to finish.\n"
return
cleanup_previous_files(jsonl_file)
yield "Cleaned up previous files.\n"
log_content = []
for log_line in run_bigcodebench(command):
log_content.append(log_line)
yield "".join(log_content) |