This commit is contained in:
parent
9eb86bc4aa
commit
063f21a336
55
gradio_ui.py
55
gradio_ui.py
|
|
@ -188,24 +188,47 @@ def run_eval_tool(
|
|||
|
||||
# ---------------- 停止函数 ----------------
|
||||
def stop_eval():
|
||||
"""
|
||||
彻底终止 current_process 及其全部子孙进程:
|
||||
1. 先发 SIGINT(Ctrl‑C)尝试优雅退出
|
||||
2. 3 秒内仍存活的进程升级为 SIGKILL
|
||||
3. 最后 wait() 主进程,防止僵尸
|
||||
"""
|
||||
global current_process, should_stop
|
||||
should_stop = True
|
||||
|
||||
if current_process and current_process.poll() is None:
|
||||
try:
|
||||
pgid = os.getpgid(current_process.pid)
|
||||
os.killpg(pgid, signal.SIGINT)
|
||||
time.sleep(2)
|
||||
if current_process.poll() is None:
|
||||
os.killpg(pgid, signal.SIGKILL)
|
||||
return "[✅ 已发送终止信号 (SIGINT → SIGKILL fallback)]\n"
|
||||
except Exception as e:
|
||||
return f"[❌ 终止失败: {e}]\n"
|
||||
finally:
|
||||
current_process = None
|
||||
else:
|
||||
if not (current_process and current_process.poll() is None):
|
||||
return "[⚠️ 无活动 evalscope 进程]\n"
|
||||
|
||||
try:
|
||||
parent = psutil.Process(current_process.pid)
|
||||
family = parent.children(recursive=True) + [parent] # 整棵进程树
|
||||
|
||||
# ── 1) 尝试优雅终止 ──────────────────────
|
||||
for p in family:
|
||||
p.send_signal(signal.SIGINT)
|
||||
|
||||
# 给 10 秒宽限期
|
||||
_, alive = psutil.wait_procs(family, timeout=10)
|
||||
|
||||
# ── 2) 强制 kill 仍存活的 ────────────────
|
||||
for p in alive:
|
||||
p.kill()
|
||||
psutil.wait_procs(alive, timeout=10)
|
||||
|
||||
# ── 3) 回收僵尸,确保句柄关闭 ────────────
|
||||
current_process.wait(timeout=10)
|
||||
|
||||
return "[✅ 已终止进程树 (SIGINT ➜ SIGKILL fallback)]\n"
|
||||
|
||||
except Exception as e:
|
||||
return f"[❌ 终止失败: {e}]\n"
|
||||
|
||||
finally:
|
||||
current_process = None
|
||||
|
||||
|
||||
|
||||
# ---------------- 控制器 ----------------
|
||||
def toggle_run(
|
||||
inputs, native, other, output_choices,
|
||||
|
|
@ -300,14 +323,14 @@ with gr.Blocks(title="EvalScope 全功能界面") as demo:
|
|||
with gr.Row():
|
||||
api_provider_dropdown = gr.Dropdown(label="API Provider", choices=["openai", "azure", "ollama", "gemini"], value="openai")
|
||||
dataset_dropdown = gr.Dropdown(label="评测数据集 (--dataset)", choices=PERF_DATASETS, value=PERF_DATASETS[0])
|
||||
model_override_input = gr.Textbox(label="自定义模型名 (--model)", placeholder="my-llm")
|
||||
model_override_input = gr.Textbox(label="自定义模型名 (--model)", placeholder="llm-name")
|
||||
with gr.Row():
|
||||
max_tokens_slider = gr.Slider(label="Max Tokens", minimum=256, maximum=8192, step=256, value=1024)
|
||||
min_tokens_slider = gr.Slider(label="Min Tokens", minimum=0, maximum=4096, step=64, value=1024)
|
||||
with gr.Row():
|
||||
parallel_slider = gr.Slider(label="并发请求数", minimum=1, maximum=16, step=1, value=1)
|
||||
parallel_slider = gr.Slider(label="并发请求数", minimum=1, maximum=100, step=1, value=1)
|
||||
num_req_slider = gr.Slider(label="请求条数", minimum=1, maximum=1000, step=1, value=100)
|
||||
max_prompt_len_slider = gr.Slider(label="最大 Prompt 长度", minimum=2048, maximum=32768, step=512, value=15360)
|
||||
max_prompt_len_slider = gr.Slider(label="最大 Prompt 长度", minimum=2048, maximum=262144, step=512, value=15360)
|
||||
|
||||
with gr.Row():
|
||||
with gr.Column():
|
||||
|
|
|
|||
Loading…
Reference in New Issue