jjyang77 commited on
Commit
25db7e9
1 Parent(s): 656459b

move scripts from testing space

Browse files
Files changed (9) hide show
  1. .dockerignore +11 -0
  2. .gitignore +11 -0
  3. Dockerfile +39 -0
  4. README.md +4 -4
  5. api/__init__.py +0 -0
  6. api/app.py +172 -0
  7. api/code_execution.py +524 -0
  8. dev.sh +11 -0
  9. prod.sh +10 -0
.dockerignore ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ .dockerignore
2
+
3
+ # Python cache files
4
+ **.pyc
5
+ **/__pycache__
6
+
7
+ # Testing data
8
+ /data
9
+
10
+ # Environment file
11
+ .env
.gitignore ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Python cache files
2
+ **.pyc
3
+ **/__pycache__
4
+
5
+ .hypothesis/
6
+
7
+ # Testing data
8
+ /data
9
+
10
+ # Environment file
11
+ .env
Dockerfile ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Better use newer Python as generated code can use new features
2
+ FROM python:3.10-slim
3
+
4
+ # RUN rm -rf /var/lib/apt/lists/*
5
+ # install git, g++ and python3-tk
6
+ RUN apt-get update && apt-get install -y git g++ python3-tk zip unzip procps r-base
7
+
8
+ # upgrade to latest pip
9
+ RUN pip install --upgrade pip
10
+ RUN pip install fastapi gunicorn uvicorn[standard] httpx pydantic==2.* plotly
11
+
12
+ # Acquire benchmark code to local
13
+ # ADD "https://api.github.com/repos/bigcode-project/bigcodebench/commits?per_page=1" latest_commit
14
+ # RUN git clone https://github.com/bigcode-project/bigcodebench.git /bigcodebench
15
+ # RUN cd /bigcodebench
16
+ # RUN python3 -c "from bigcodebench.data import get_bigcodebench; get_bigcodebench()"
17
+
18
+ # Add a new user "bigcodebenchuser"
19
+ RUN adduser --disabled-password --gecos "" bigcodebenchuser
20
+
21
+ RUN pip install -I --timeout 2000 -r https://github.com/bigcode-project/bigcodebench-annotation/releases/download/v0.1.0/requirements.txt
22
+
23
+ COPY . .
24
+
25
+ WORKDIR /
26
+
27
+ # For matplotlib import caching in reliability guard
28
+ RUN mkdir -p /api/cache/matplotlib
29
+ ENV MPLCONFIGDIR=/api/cache/matplotlib
30
+
31
+ RUN chmod -R 777 /api
32
+
33
+ RUN chown -R bigcodebenchuser:bigcodebenchuser /api
34
+
35
+ USER bigcodebenchuser
36
+ # Start the FastAPI app on port 7860, the default port expected by Spaces
37
+ # CMD ["uvicorn", "api.app:app", "--host", "0.0.0.0", "--port", "7860"]
38
+ # ENTRYPOINT [ "./dev.sh" ]
39
+ ENTRYPOINT [ "./prod.sh" ]
README.md CHANGED
@@ -1,8 +1,8 @@
1
  ---
2
- title: Oe Eval Bcb Lite Evaluator
3
- emoji: 🏆
4
- colorFrom: red
5
- colorTo: gray
6
  sdk: docker
7
  pinned: false
8
  ---
 
1
  ---
2
+ title: OE Eval Bcb Evaluator Testing
3
+ emoji: 🐢
4
+ colorFrom: green
5
+ colorTo: pink
6
  sdk: docker
7
  pinned: false
8
  ---
api/__init__.py ADDED
File without changes
api/app.py ADDED
@@ -0,0 +1,172 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import logging
2
+ import os
3
+ from collections import Counter, defaultdict
4
+ import multiprocessing
5
+ from datetime import datetime
6
+ from concurrent.futures import ProcessPoolExecutor, as_completed
7
+ from typing import Dict, List, Tuple
8
+ import gc
9
+
10
+ from fastapi import FastAPI, HTTPException
11
+ from fastapi.responses import RedirectResponse
12
+
13
+ from api.code_execution import untrusted_check
14
+
15
+ Result = Tuple[str, List[bool]]
16
+
17
+ def create_app() -> FastAPI:
18
+
19
+ level = os.environ.get("LOG_LEVEL", default=logging.INFO)
20
+ logging.basicConfig(level=level)
21
+ logger = logging.getLogger(__name__)
22
+
23
+ app = FastAPI()
24
+
25
+ @app.get("/")
26
+ def root():
27
+ return RedirectResponse("/docs")
28
+
29
+ @app.get("/health", status_code=204)
30
+ def health():
31
+ return
32
+
33
+ @app.post("/evaluate/")
34
+ async def evaluate(
35
+ samples: List[dict],
36
+ calibrate: bool = True,
37
+ parallel: int = -1,
38
+ min_time_limit: float = 1,
39
+ max_as_limit: int = 30 * 1024,
40
+ max_data_limit: int = 30 * 1024,
41
+ max_stack_limit: int = 10,
42
+ no_gt: bool = True,
43
+ ) -> dict:
44
+ """
45
+ Evaluate the correctness of the solutions in the given samples data.
46
+ """
47
+ if parallel < 1:
48
+ n_workers = max(1, multiprocessing.cpu_count() // 2)
49
+ else:
50
+ n_workers = parallel
51
+
52
+ if not no_gt:
53
+ expected_time = get_groundtruth()
54
+ else:
55
+ expected_time = {}
56
+
57
+ results = {
58
+ "date": datetime.now().strftime("%Y-%m-%d %H:%M"),
59
+ "eval": {},
60
+ }
61
+
62
+ with ProcessPoolExecutor(max_workers=n_workers) as executor:
63
+ futures = []
64
+ completion_id = Counter()
65
+ n_samples = 0
66
+ eval_results = defaultdict(list) # task_id ->
67
+ remainings = set()
68
+
69
+ for i, sample in enumerate(samples):
70
+ # TODO: investigate why HTTPException detail is not passed to client.
71
+
72
+ for key in ["task_id", "res_id", "test", "solution", "entry_point"]:
73
+ if key not in sample:
74
+ raise HTTPException(status_code=400, detail=f"'{key}' not in sample {i}!")
75
+
76
+ if not isinstance(sample["solution"], str):
77
+ raise HTTPException(status_code=400, detail="Solution must be a string!")
78
+
79
+ sample["_identifier"] = (
80
+ sample["task_id"] + f" (line {i+1} )"
81
+ )
82
+
83
+ task_id = sample["task_id"]
84
+
85
+ solution = sample["solution"]
86
+
87
+ if calibrate:
88
+ solution = sample["code_prompt"] + "\n pass\n" + solution
89
+ remainings.add(sample["_identifier"])
90
+ args = (
91
+ completion_id[task_id],
92
+ sample["res_id"],
93
+ task_id,
94
+ solution,
95
+ sample["test"],
96
+ sample["entry_point"],
97
+ max_as_limit,
98
+ max_data_limit,
99
+ max_stack_limit,
100
+ sample["_identifier"],
101
+ min_time_limit,
102
+ expected_time.get(task_id) if expected_time.get(task_id) else 20
103
+ )
104
+ futures.append(executor.submit(check_correctness, *args))
105
+ completion_id[task_id] += 1
106
+ n_samples += 1
107
+
108
+ assert n_samples == len(remainings), "Missing problems in unfinished"
109
+ #assert len(completion_id) == len(problems), "Missing problems in samples"
110
+
111
+ for future in as_completed(futures):
112
+ result = future.result()
113
+ remainings.remove(result["_identifier"])
114
+ eval_results[result["task_id"]].append(result)
115
+ del future, result
116
+ gc.collect()
117
+
118
+ # sort the results for each problem by completion_id
119
+ for task_id, task_results in eval_results.items():
120
+ task_results.sort(key=lambda x: x["completion_id"])
121
+ results["eval"][task_id] = []
122
+ for res in task_results:
123
+ stat, details = res["base"]
124
+ results["eval"][task_id].append(
125
+ {
126
+ "res_id": res["res_id"],
127
+ "task_id": task_id,
128
+ "solution": res["solution"],
129
+ "status": stat,
130
+ "details": details,
131
+ }
132
+ )
133
+ return results
134
+
135
+ return app
136
+
137
+ def check_correctness(
138
+ completion_id: int,
139
+ res_id: int,
140
+ task_id: str,
141
+ solution: str,
142
+ test: str,
143
+ entry_point: str,
144
+ max_as_limit: float,
145
+ max_data_limit: float,
146
+ max_stack_limit: float,
147
+ identifier=None,
148
+ min_time_limit: float = 0.1,
149
+ gt_time_limit: float = 2.0,
150
+ ) -> Dict[str, Result]:
151
+ ret = {
152
+ "completion_id": completion_id,
153
+ "res_id": res_id,
154
+ "task_id": task_id,
155
+ "_identifier": identifier,
156
+ "solution": solution,
157
+ }
158
+ ret["base"] = untrusted_check(
159
+ solution,
160
+ test,
161
+ entry_point,
162
+ max_as_limit,
163
+ max_data_limit,
164
+ max_stack_limit,
165
+ min_time_limit,
166
+ gt_time_limit,
167
+ )
168
+ return ret
169
+
170
+
171
+ def get_groundtruth():
172
+ raise HTTPException(status_code=405, detail="Groundtruth execution is not implemented yet!")
api/code_execution.py ADDED
@@ -0,0 +1,524 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # The MIT License
2
+ #
3
+ # Copyright (c) OpenAI (https://openai.com)
4
+ #
5
+ # Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ # of this software and associated documentation files (the "Software"), to deal
7
+ # in the Software without restriction, including without limitation the rights
8
+ # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ # copies of the Software, and to permit persons to whom the Software is
10
+ # furnished to do so, subject to the following conditions:
11
+ #
12
+ # The above copyright notice and this permission notice shall be included in
13
+ # all copies or substantial portions of the Software.
14
+ #
15
+ # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
21
+ # THE SOFTWARE.
22
+
23
+ import contextlib
24
+ import faulthandler
25
+ import tempfile
26
+ import platform
27
+ import itertools
28
+ import io
29
+ import os
30
+ import sys
31
+ import time
32
+ import types
33
+ import unittest
34
+ import subprocess
35
+ import signal
36
+ import multiprocessing
37
+ from multiprocessing import Value, Manager
38
+ from typing import List, Tuple, Union
39
+
40
+ import numpy as np
41
+
42
+ TIMEOUT_LIMIT=240.0 # BCB default is 240.0
43
+
44
+
45
+ @contextlib.contextmanager
46
+ def swallow_subprocess_output():
47
+ """Context manager to swallow stdout and stderr for subprocesses."""
48
+ original_popen = subprocess.Popen
49
+ original_run = subprocess.run
50
+
51
+ def _popen_patch(*args, **kwargs):
52
+ if 'capture_output' in kwargs and kwargs['capture_output']:
53
+ # Avoid setting stdout or stderr if capture_output is True
54
+ kwargs.pop('stdout', None)
55
+ kwargs.pop('stderr', None)
56
+ else:
57
+ kwargs.setdefault('stdout', subprocess.PIPE)
58
+ kwargs.setdefault('stderr', subprocess.PIPE)
59
+ return original_popen(*args, **kwargs)
60
+
61
+ def _run_patch(*args, **kwargs):
62
+ if 'capture_output' in kwargs and kwargs['capture_output']:
63
+ # Avoid setting stdout or stderr if capture_output is True
64
+ kwargs.pop('stdout', None)
65
+ kwargs.pop('stderr', None)
66
+ else:
67
+ kwargs.setdefault('stdout', subprocess.PIPE)
68
+ kwargs.setdefault('stderr', subprocess.PIPE)
69
+ return original_run(*args, **kwargs)
70
+
71
+ subprocess.Popen = _popen_patch
72
+ subprocess.run = _run_patch
73
+ try:
74
+ yield
75
+ finally:
76
+ subprocess.Popen = original_popen
77
+ subprocess.run = original_run
78
+
79
+
80
+ @contextlib.contextmanager
81
+ def swallow_io():
82
+ stream = WriteOnlyStringIO()
83
+ with contextlib.redirect_stdout(stream):
84
+ with contextlib.redirect_stderr(stream):
85
+ with redirect_stdin(stream):
86
+ with swallow_subprocess_output():
87
+ yield
88
+
89
+
90
+ @contextlib.contextmanager
91
+ def time_limit(seconds: float):
92
+ def signal_handler(signum, frame):
93
+ raise TimeoutException("Timed out!")
94
+
95
+ signal.setitimer(signal.ITIMER_REAL, seconds)
96
+ signal.signal(signal.SIGALRM, signal_handler)
97
+ try:
98
+ yield
99
+ finally:
100
+ signal.setitimer(signal.ITIMER_REAL, 0)
101
+
102
+
103
+ @contextlib.contextmanager
104
+ def create_tempdir():
105
+ with tempfile.TemporaryDirectory() as dirname:
106
+ with chdir(dirname):
107
+ yield dirname
108
+
109
+
110
+ @contextlib.contextmanager
111
+ def chdir(root):
112
+ if root == ".":
113
+ yield
114
+ return
115
+ cwd = os.getcwd()
116
+ os.chdir(root)
117
+ try:
118
+ yield
119
+ except BaseException as exc:
120
+ raise exc
121
+ finally:
122
+ os.chdir(cwd)
123
+
124
+
125
+ @contextlib.contextmanager
126
+ def safe_environment():
127
+ # Save original functions
128
+ original_kill = os.kill
129
+ original_killpg = os.killpg
130
+ original_system = os.system
131
+ original_subprocess_call = subprocess.call
132
+ original_subprocess_check_output = subprocess.check_output
133
+ original_subprocess_run = subprocess.run
134
+ original_subprocess_popen = subprocess.Popen
135
+ original_os_popen = os.popen
136
+ original_os_execv = os.execv
137
+ original_os_execvp = os.execvp
138
+ original_os_execvpe = os.execvpe
139
+
140
+ current_pid = os.getpid()
141
+ current_pgid = os.getpgid(current_pid)
142
+ manager = multiprocessing.Manager()
143
+ child_pids = manager.list()
144
+
145
+ def safe_kill(pid, sig):
146
+ try:
147
+ pgid = os.getpgid(pid)
148
+ if pid == current_pid or pid in child_pids:
149
+ original_kill(pid, sig)
150
+ else:
151
+ print(f"Prevented attempt to kill PID {pid} with signal {sig}")
152
+ except ProcessLookupError:
153
+ pass
154
+
155
+ def safe_killpg(pgid, sig):
156
+ if pgid == current_pgid or pgid in {os.getpgid(pid) for pid in child_pids}:
157
+ original_killpg(pgid, sig)
158
+ else:
159
+ print(f"Prevented attempt to kill PGID {pgid} with signal {sig}")
160
+
161
+ def safe_system(command):
162
+ print(f"Intercepted system command: {command}")
163
+ if 'kill' in command or 'killall' in command:
164
+ return 0 # Simulate successful execution without doing anything
165
+ return original_system(command)
166
+
167
+ def safe_subprocess_call(command, *args, **kwargs):
168
+ print(f"Intercepted subprocess call: {command}")
169
+ if 'kill' in command or 'killall' in command:
170
+ return 0 # Simulate successful execution without doing anything
171
+ return original_subprocess_call(command, *args, **kwargs)
172
+
173
+ def safe_subprocess_check_output(command, *args, **kwargs):
174
+ print(f"Intercepted command: {command}")
175
+ if 'ps' in command:
176
+ return b"" # Simulate no processes found
177
+ return original_subprocess_check_output(command, *args, **kwargs)
178
+
179
+ def safe_subprocess_run(*args, **kwargs):
180
+ print(f"Intercepted subprocess run command: {args}")
181
+ if 'kill' in args[0] or 'killall' in args[0]:
182
+ return subprocess.CompletedProcess(args, 0, b'', b'') # Simulate successful execution
183
+ return original_subprocess_run(*args, **kwargs)
184
+
185
+ class SafePopen(subprocess.Popen):
186
+ def __init__(self, *args, **kwargs):
187
+ print(f"Intercepted Popen command: {args}")
188
+ kwargs['preexec_fn'] = os.setsid # Start the process in a new session
189
+ super().__init__(*args, **kwargs)
190
+ child_pids.append(self.pid)
191
+
192
+ def communicate(self, *args, **kwargs):
193
+ try:
194
+ return super().communicate(*args, **kwargs)
195
+ except subprocess.TimeoutExpired:
196
+ print("Timeout expired, intercepted and returning None")
197
+ return None, None
198
+
199
+ def kill(self):
200
+ print(f"Intercepted kill call for PID {self.pid}")
201
+ safe_kill(self.pid, signal.SIGTERM)
202
+
203
+ def terminate(self):
204
+ print(f"Intercepted terminate call for PID {self.pid}")
205
+ safe_kill(self.pid, signal.SIGTERM)
206
+
207
+ def safe_os_popen(command):
208
+ print(f"Intercepted os.popen command: {command}")
209
+ if 'kill' in command or 'killall' in command:
210
+ return os.popen('echo Intercepted')
211
+ return original_os_popen(command)
212
+
213
+ def safe_exec(*args, **kwargs):
214
+ print(f"Intercepted exec command: {args}")
215
+
216
+ # Override the risky functions with the safe versions
217
+ os.kill = safe_kill
218
+ os.killpg = safe_killpg
219
+ os.system = safe_system
220
+ subprocess.call = safe_subprocess_call
221
+ subprocess.check_output = safe_subprocess_check_output
222
+ subprocess.run = safe_subprocess_run
223
+ subprocess.Popen = SafePopen
224
+ os.popen = safe_os_popen
225
+ os.execv = safe_exec
226
+ os.execvp = safe_exec
227
+ os.execvpe = safe_exec
228
+
229
+ try:
230
+ yield
231
+ finally:
232
+ for pid in child_pids:
233
+ try:
234
+ os.kill(pid, signal.SIGTERM)
235
+ for _ in range(10):
236
+ time.sleep(0.1)
237
+ try:
238
+ os.kill(pid, 0)
239
+ except ProcessLookupError:
240
+ break
241
+ else:
242
+ os.kill(pid, signal.SIGKILL)
243
+ except ProcessLookupError:
244
+ pass
245
+ except Exception as e:
246
+ print(f"Error handling process {pid}: {e}")
247
+
248
+ os.kill = original_kill
249
+ os.killpg = original_killpg
250
+ os.system = original_system
251
+ subprocess.call = original_subprocess_call
252
+ subprocess.check_output = original_subprocess_check_output
253
+ subprocess.run = original_subprocess_run
254
+ subprocess.Popen = original_subprocess_popen
255
+ os.popen = original_os_popen
256
+ os.execv = original_os_execv
257
+ os.execvp = original_os_execvp
258
+ os.execvpe = original_os_execvpe
259
+
260
+
261
+ class TimeoutException(Exception):
262
+ pass
263
+
264
+
265
+ class WriteOnlyStringIO(io.StringIO):
266
+ """StringIO that throws an exception when it's read from"""
267
+
268
+ def read(self, *args, **kwargs):
269
+ raise IOError
270
+
271
+ def readline(self, *args, **kwargs):
272
+ raise IOError
273
+
274
+ def readlines(self, *args, **kwargs):
275
+ raise IOError
276
+
277
+ def readable(self, *args, **kwargs):
278
+ """Returns True if the IO object can be read."""
279
+ return False
280
+
281
+
282
+ class redirect_stdin(contextlib._RedirectStream): # type: ignore
283
+ _stream = "stdin"
284
+
285
+
286
+ def reliability_guard(max_as_limit, max_data_limit, max_stack_limit):
287
+ """
288
+ This disables various destructive functions and prevents the generated code
289
+ from interfering with the test (e.g. fork bomb, killing other processes,
290
+ removing filesystem files, etc.)
291
+
292
+ WARNING
293
+ This function is NOT a security sandbox. Untrusted code, including, model-
294
+ generated code, should not be blindly executed outside of one. See the
295
+ Codex paper for more information about OpenAI's code sandbox, and proceed
296
+ with caution.
297
+ """
298
+
299
+ import os
300
+ import time
301
+ from datetime import datetime
302
+
303
+ os.environ['TZ'] = 'UTC'
304
+ time.tzset()
305
+
306
+ os.environ["OMP_NUM_THREADS"] = "1"
307
+ os.environ['TF_CPP_MIN_LOG_LEVEL'] = "3"
308
+ os.environ['TF_ENABLE_ONEDNN_OPTS'] = "0"
309
+
310
+ if max_as_limit and max_data_limit and max_stack_limit:
311
+ import resource
312
+
313
+ max_as_limit = max_as_limit * 1024 * 1024
314
+ max_data_limit = max_data_limit * 1024 * 1024
315
+ max_stack_limit = max_stack_limit * 1024 * 1024
316
+
317
+ resource.setrlimit(
318
+ resource.RLIMIT_AS, (max_as_limit, max_as_limit)
319
+ )
320
+ resource.setrlimit(
321
+ resource.RLIMIT_DATA, (max_data_limit, max_data_limit)
322
+ )
323
+ if not platform.uname().system == "Darwin":
324
+ resource.setrlimit(
325
+ resource.RLIMIT_STACK, (max_stack_limit, max_stack_limit)
326
+ )
327
+
328
+ faulthandler.disable()
329
+
330
+ import builtins
331
+
332
+ builtins.exit = None
333
+ builtins.quit = None
334
+
335
+ import matplotlib.pyplot as plt
336
+ plt.close('all')
337
+
338
+
339
+ # unbiased estimator from https://github.com/openai/human-eval
340
+ def estimate_pass_at_k(
341
+ num_samples: Union[int, List[int], np.ndarray],
342
+ num_correct: Union[List[int], np.ndarray],
343
+ k: int,
344
+ ) -> np.ndarray:
345
+ """
346
+ Estimates pass@k of each problem and returns them in an array.
347
+ """
348
+
349
+ def estimator(n: int, c: int, k: int) -> float:
350
+ """
351
+ Calculates 1 - comb(n - c, k) / comb(n, k).
352
+ """
353
+ if n - c < k:
354
+ return 1.0
355
+ return 1.0 - np.prod(1.0 - k / np.arange(n - c + 1, n + 1))
356
+
357
+ if isinstance(num_samples, int):
358
+ num_samples_it = itertools.repeat(num_samples, len(num_correct))
359
+ else:
360
+ assert len(num_samples) == len(num_correct)
361
+ num_samples_it = iter(num_samples)
362
+
363
+ return np.array(
364
+ [estimator(int(n), int(c), k) for n, c in zip(num_samples_it, num_correct)]
365
+ )
366
+
367
+
368
+ PASS = "pass"
369
+ FAIL = "fail"
370
+ TIMEOUT = "timeout"
371
+
372
+ _SUCCESS = 0
373
+ _FAILED = 1
374
+ _TIMEOUT = 2
375
+ _UNKNOWN = 3
376
+
377
+ _mapping = {_SUCCESS: PASS, _FAILED: FAIL, _TIMEOUT: TIMEOUT, _UNKNOWN: None}
378
+
379
+
380
+ def is_floats(x) -> bool:
381
+ # check if it is float; List[float]; Tuple[float]
382
+ if isinstance(x, float):
383
+ return True
384
+ if isinstance(x, (list, tuple)):
385
+ return all(isinstance(i, float) for i in x)
386
+ if isinstance(x, np.ndarray):
387
+ return x.dtype == np.float64 or x.dtype == np.float32
388
+ return False
389
+
390
+
391
+ def unsafe_execute(
392
+ entry_point: str,
393
+ code: str,
394
+ test_code: str,
395
+ timeout: float,
396
+ max_as_limit: float,
397
+ max_data_limit: float,
398
+ max_stack_limit: float,
399
+ stat, # Value
400
+ details, # Array
401
+ ):
402
+ with safe_environment(), create_tempdir():
403
+ # These system calls are needed when cleaning up tempdir.
404
+ import os
405
+ import shutil
406
+ import builtins
407
+
408
+ rmtree = shutil.rmtree
409
+ rmdir = os.rmdir
410
+ chdir = os.chdir
411
+ # Disable functionalities that can make destructive changes to the test.
412
+ reliability_guard(max_as_limit, max_data_limit, max_stack_limit)
413
+ module_name = "__test__"
414
+ new_module = types.ModuleType(module_name)
415
+ # Set necessary attributes for the module
416
+ new_module.__dict__.update({
417
+ '__builtins__': builtins,
418
+ '__file__': f"{module_name}.py",
419
+ '__package__': None,
420
+ '__doc__': None,
421
+ 'sys': sys,
422
+ 'os': os,
423
+ 'environ': os.environ,
424
+ })
425
+
426
+ try:
427
+ full_code = code + "\n" + test_code
428
+
429
+ with swallow_io():
430
+ exec(compile(full_code, f"{module_name}.py", 'exec'), new_module.__dict__)
431
+ sys.modules[module_name] = new_module
432
+ TestCases = getattr(new_module, 'TestCases')
433
+ loader = unittest.TestLoader()
434
+ suite = loader.loadTestsFromTestCase(TestCases)
435
+ test_result = unittest.TestResult()
436
+ start_time = time.time()
437
+ with time_limit(timeout):
438
+ suite.run(test_result)
439
+
440
+ issues = test_result.failures + test_result.errors
441
+ for test, trace in issues:
442
+ details[test.id().split(".")[-1]] = trace
443
+ stat.value = _SUCCESS
444
+ except BaseException as e:
445
+ details["ALL"] = str(e)
446
+ stat.value = _FAILED
447
+ # Needed for cleaning up.
448
+ shutil.rmtree = rmtree
449
+ os.rmdir = rmdir
450
+ os.chdir = chdir
451
+
452
+
453
+ def untrusted_check(
454
+ code: str,
455
+ test_code: str,
456
+ entry_point: str,
457
+ max_as_limit: float,
458
+ max_data_limit: float,
459
+ max_stack_limit: float,
460
+ min_time_limit: float = 10,
461
+ gt_time_limit: float = 60
462
+ ) -> Tuple[str, np.ndarray]:
463
+ min_time_limit = max(min_time_limit, gt_time_limit)
464
+ timeout = max(os.getenv("BIGCODEBENCH_TIMEOUT_PER_TASK", TIMEOUT_LIMIT), min_time_limit) + 1
465
+ # shared memory objects
466
+ stat = Value("i", _UNKNOWN)
467
+ manager = Manager()
468
+ details = manager.dict()
469
+
470
+ p = multiprocessing.Process(
471
+ target=unsafe_execute,
472
+ args=(
473
+ entry_point,
474
+ code,
475
+ test_code,
476
+ timeout,
477
+ max_as_limit,
478
+ max_data_limit,
479
+ max_stack_limit,
480
+ stat,
481
+ details,
482
+ ),
483
+ )
484
+ p.start()
485
+ p.join(timeout=timeout+1)
486
+ if p.is_alive():
487
+ p.terminate()
488
+ time.sleep(0.1)
489
+ if p.is_alive():
490
+ p.kill()
491
+ time.sleep(0.1)
492
+
493
+ stat = _mapping[stat.value]
494
+ # convert details to a dict
495
+ details = dict(details)
496
+
497
+ if not stat:
498
+ stat = TIMEOUT
499
+ if stat == PASS:
500
+ if details:
501
+ stat = FAIL
502
+
503
+ return stat, details
504
+
505
+
506
+ def evaluate_files(
507
+ files: List[str],
508
+ inputs: List,
509
+ entry_point: str,
510
+ min_time_limit: float = 0.1,
511
+ gt_time_limit_factor: float = 2.0,
512
+ ) -> List[Tuple[str, List[bool]]]:
513
+ ret = []
514
+ # sort files by the id in name (i.e., "../n.py")
515
+ files = sorted(files, key=lambda x: int(x.split("/")[-1].split(".")[0]))
516
+ for file in files:
517
+ code = open(file, "r").read()
518
+ stat, det = untrusted_check(
519
+ code,
520
+ inputs,
521
+ entry_point,
522
+ )
523
+ ret.append((stat, det.tolist()))
524
+ return ret
dev.sh ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/bin/bash
2
+ exec \
3
+ gunicorn \
4
+ -k uvicorn.workers.UvicornWorker \
5
+ --workers 1 \
6
+ --timeout 0 \
7
+ --bind 0.0.0.0:7860 \
8
+ --enable-stdio-inheritance \
9
+ --access-logfile - \
10
+ --reload \
11
+ 'api.app:create_app()'
prod.sh ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/bin/bash
2
+ exec \
3
+ gunicorn \
4
+ -k uvicorn.workers.UvicornWorker \
5
+ --workers 2 \
6
+ --timeout 0 \
7
+ --bind 0.0.0.0:7860 \
8
+ --enable-stdio-inheritance \
9
+ --access-logfile - \
10
+ 'api.app:create_app()'