From 47bb4e366e4e21e1d02fa45fde74610755cd40bd Mon Sep 17 00:00:00 2001 From: hailin Date: Fri, 1 Aug 2025 11:43:27 +0800 Subject: [PATCH] . --- meta_ui.py | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/meta_ui.py b/meta_ui.py index 1e4e7c8..1a0762d 100644 --- a/meta_ui.py +++ b/meta_ui.py @@ -167,7 +167,7 @@ def chat( result = {"text": str(result)} history.append({"role": "assistant", "content": result["text"]}) - yield result["text"], history # ✅ 显示模型输出,同时更新 history + yield result["text"], None # ✅ 显示模型输出,同时更新 history return else: while thread.is_alive(): @@ -218,6 +218,7 @@ with gr.Blocks(title="调试界面") as demo: with gr.Row(): api_choice = gr.Dropdown(choices=["/generate", "/v1/completions", "/v1/chat/completions"], value="/generate", label="选择推理接口") + with gr.Row(): max_new = gr.Slider(32, 32768, 128, label="max_new_tokens") temp = gr.Slider(0, 1.5, 0.8, step=0.05, label="temperature") @@ -249,6 +250,15 @@ with gr.Blocks(title="调试界面") as demo: outputs=[log_box], ) + def clear_all_logs(_): + global LOG_Q, LOG_TXT, prev_log_value + with LOG_Q.mutex: + LOG_Q.queue.clear() + LOG_TXT = "" + prev_log_value = "" + return gr.update(value=""), gr.update(value="") + + api_choice.change(fn=clear_all_logs, inputs=api_choice, outputs=[log_state, log_box]) log_state.change(lambda txt: gr.update(value=txt), log_state, log_box) dbg_chk.change(lambda v: gr.update(visible=v), dbg_chk, log_box)