Spaces:
Running
on
CPU Upgrade
Running
on
CPU Upgrade
Terry Zhuo
commited on
Commit
•
4e93785
1
Parent(s):
a437a30
update
Browse files
app.py
CHANGED
@@ -1,3 +1,4 @@
|
|
|
|
1 |
import gradio as gr
|
2 |
import subprocess
|
3 |
import sys
|
@@ -9,10 +10,16 @@ import glob
|
|
9 |
import shutil
|
10 |
from pathlib import Path
|
11 |
from apscheduler.schedulers.background import BackgroundScheduler
|
|
|
|
|
|
|
12 |
|
13 |
default_command = "bigcodebench.evaluate"
|
14 |
is_running = False
|
15 |
lock = threading.Lock()
|
|
|
|
|
|
|
16 |
|
17 |
def generate_command(
|
18 |
jsonl_file, split, subset, parallel,
|
@@ -67,7 +74,7 @@ def find_result_file():
|
|
67 |
return None
|
68 |
|
69 |
def run_bigcodebench(command):
|
70 |
-
global is_running
|
71 |
with lock:
|
72 |
if is_running:
|
73 |
yield "A command is already running. Please wait for it to finish.\n"
|
@@ -77,12 +84,12 @@ def run_bigcodebench(command):
|
|
77 |
try:
|
78 |
yield f"Executing command: {command}\n"
|
79 |
|
80 |
-
process = subprocess.Popen(command.split(), stdout=subprocess.PIPE, stderr=subprocess.STDOUT, universal_newlines=True)
|
81 |
|
82 |
for line in process.stdout:
|
83 |
yield line
|
84 |
|
85 |
-
|
86 |
|
87 |
if process.returncode != 0:
|
88 |
yield f"Error: Command exited with status {process.returncode}\n"
|
@@ -97,6 +104,13 @@ def run_bigcodebench(command):
|
|
97 |
finally:
|
98 |
with lock:
|
99 |
is_running = False
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
100 |
|
101 |
def stream_logs(command, jsonl_file=None):
|
102 |
global is_running
|
@@ -132,6 +146,24 @@ with gr.Blocks() as demo:
|
|
132 |
check_gt_only = gr.Checkbox(label="Check GT Only")
|
133 |
no_gt = gr.Checkbox(label="No GT")
|
134 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
135 |
command_output = gr.Textbox(label="Command", value=default_command, interactive=False)
|
136 |
with gr.Row():
|
137 |
submit_btn = gr.Button("Run Evaluation")
|
@@ -174,5 +206,21 @@ with gr.Blocks() as demo:
|
|
174 |
inputs=[command_output, jsonl_file, subset, split],
|
175 |
outputs=[log_output, download_btn])
|
176 |
|
177 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
178 |
scheduler = BackgroundScheduler()
|
|
|
1 |
+
from fastapi import FastAPI
|
2 |
import gradio as gr
|
3 |
import subprocess
|
4 |
import sys
|
|
|
10 |
import shutil
|
11 |
from pathlib import Path
|
12 |
from apscheduler.schedulers.background import BackgroundScheduler
|
13 |
+
import signal
|
14 |
+
import uvicorn
|
15 |
+
|
16 |
|
17 |
default_command = "bigcodebench.evaluate"
|
18 |
is_running = False
|
19 |
lock = threading.Lock()
|
20 |
+
process = None
|
21 |
+
|
22 |
+
app = FastAPI()
|
23 |
|
24 |
def generate_command(
|
25 |
jsonl_file, split, subset, parallel,
|
|
|
74 |
return None
|
75 |
|
76 |
def run_bigcodebench(command):
|
77 |
+
global is_running, process
|
78 |
with lock:
|
79 |
if is_running:
|
80 |
yield "A command is already running. Please wait for it to finish.\n"
|
|
|
84 |
try:
|
85 |
yield f"Executing command: {command}\n"
|
86 |
|
87 |
+
process = subprocess.Popen(command.split(), stdout=subprocess.PIPE, stderr=subprocess.STDOUT, universal_newlines=True, preexec_fn=os.setsid)
|
88 |
|
89 |
for line in process.stdout:
|
90 |
yield line
|
91 |
|
92 |
+
process.wait()
|
93 |
|
94 |
if process.returncode != 0:
|
95 |
yield f"Error: Command exited with status {process.returncode}\n"
|
|
|
104 |
finally:
|
105 |
with lock:
|
106 |
is_running = False
|
107 |
+
process = None
|
108 |
+
|
109 |
+
def kill_process():
|
110 |
+
global process
|
111 |
+
if process:
|
112 |
+
os.killpg(os.getpgid(process.pid), signal.SIGTERM)
|
113 |
+
process = None
|
114 |
|
115 |
def stream_logs(command, jsonl_file=None):
|
116 |
global is_running
|
|
|
146 |
check_gt_only = gr.Checkbox(label="Check GT Only")
|
147 |
no_gt = gr.Checkbox(label="No GT")
|
148 |
|
149 |
+
kill_process_btn = gr.Button("Kill Process", visible=False)
|
150 |
+
kill_process_btn.click(kill_process)
|
151 |
+
|
152 |
+
# Add this JavaScript to handle window closing
|
153 |
+
gr.HTML("""
|
154 |
+
<script>
|
155 |
+
window.addEventListener('beforeunload', function (e) {
|
156 |
+
fetch('/kill_process', {
|
157 |
+
method: 'POST',
|
158 |
+
headers: {
|
159 |
+
'Content-Type': 'application/json',
|
160 |
+
},
|
161 |
+
body: JSON.stringify({}),
|
162 |
+
});
|
163 |
+
});
|
164 |
+
</script>
|
165 |
+
""")
|
166 |
+
|
167 |
command_output = gr.Textbox(label="Command", value=default_command, interactive=False)
|
168 |
with gr.Row():
|
169 |
submit_btn = gr.Button("Run Evaluation")
|
|
|
206 |
inputs=[command_output, jsonl_file, subset, split],
|
207 |
outputs=[log_output, download_btn])
|
208 |
|
209 |
+
@app.get("/")
|
210 |
+
def read_main():
|
211 |
+
return {"message": "This is your main app"}
|
212 |
+
|
213 |
+
@app.post("/kill_process")
|
214 |
+
async def api_kill_process():
|
215 |
+
kill_process()
|
216 |
+
return {"status": "success"}
|
217 |
+
|
218 |
+
# demo.queue(max_size=300).launch(
|
219 |
+
# share=True,
|
220 |
+
# server_name="0.0.0.0",
|
221 |
+
# server_port=7860,
|
222 |
+
# additional_routes={"/kill_process": kill_process_api}
|
223 |
+
# )
|
224 |
+
app = gr.mount_gradio_app(app, demo, path="/gradio")
|
225 |
+
uvicorn.run(app, host="0.0.0.0", port=7860)
|
226 |
scheduler = BackgroundScheduler()
|