sglang.0.4.8.post1/meta_ui.py

131 lines
5.3 KiB
Python
Raw Blame History

This file contains invisible Unicode characters

This file contains invisible Unicode characters that are indistinguishable to humans but may be processed differently by a computer. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

import json, datetime, textwrap, requests, gradio as gr
from pathlib import Path
from collections import deque
import queue, threading, time
# ───────────────────── 基础配置 ─────────────────────
API_URL = "http://localhost:30000/generate"
API_KEY = "token-abc123"
MODEL_PATH = Path("/root/.cradle/Alibaba/Qwen3-30B-A3B-Base")
def model_name(path: Path):
cfg = path / "config.json"
if cfg.exists():
data = json.load(cfg.open())
return data.get("architectures", [None])[0] or data.get("model_type") or path.name
return path.name
MODEL_NAME = model_name(MODEL_PATH)
now = lambda: datetime.datetime.now().strftime("%H:%M:%S")
# ───────────────────── 日志队列 ─────────────────────
LOG_Q: "queue.Queue[str]" = queue.Queue()
def log(msg): # 写终端 + 推队列
print(msg, flush=True)
LOG_Q.put(msg)
def consume_logs(state_txt: str):
"""供 Interval 调用:把队列里所有新行取出拼接到 state"""
buf = deque(state_txt.splitlines(), maxlen=400)
while not LOG_Q.empty():
buf.append(LOG_Q.get())
return "\n".join(buf)
# ───────────────────── 后端调用 ─────────────────────
def backend(text, sampling):
payload = {"model": MODEL_NAME, "text": text, "sampling_params": sampling}
log(f"\n🟡 [{now()}] payload\n{json.dumps(payload, ensure_ascii=False, indent=2)}")
try:
r = requests.post(API_URL,
headers={"Authorization": f"Bearer {API_KEY}",
"Content-Type": "application/json"},
json=payload, timeout=180)
try:
data = r.json()
except Exception:
data = {}
fr = data.get("meta_info", {}).get("finish_reason")
ctok = data.get("meta_info", {}).get("completion_tokens")
log(f"🟢 [{now()}] HTTP {r.status_code} tokens={ctok} finish={fr}\n"
f"🟢 resp800={r.text[:800]!r}")
if r.status_code != 200:
return f"[HTTP {r.status_code}] {r.text[:300]}"
return data.get("text", "").strip() or "[⚠ 空]"
except Exception as e:
log(f"[❌ 请求异常] {e}")
return f"[❌ 请求异常] {e}"
# ───────────────────── Chat 回调 ─────────────────────
def chat(
user, history,
max_new, temp, top_p, top_k,
rep_pen, pres_pen, stop_raw,
log_state
):
stop = [s.strip() for s in stop_raw.split(",") if s.strip()] or None
samp = {
"max_new_tokens": int(max_new),
"temperature": temp,
"top_p": top_p,
"top_k": int(top_k),
"repetition_penalty": rep_pen,
"presence_penalty": pres_pen,
**({"stop": stop} if stop else {})
}
out = backend(user, samp)
# 返回回答,同时把 log_state 原样带回(不刷新由 Interval 处理)
return out, log_state
# ───────────────────── Gradio UI ─────────────────────
with gr.Blocks(title="调试界面") as demo:
gr.Markdown(f"## 💬 调试界面 \n权重 **{MODEL_PATH.name}**")
# 采样参数控件
with gr.Row():
max_new = gr.Slider(32, 32768, 2048, label="max_new_tokens")
temp = gr.Slider(0, 1.5, 0.8, step=0.05, label="temperature")
with gr.Row():
top_p = gr.Slider(0, 1, 0.95, step=0.01, label="top_p")
top_k = gr.Slider(0, 200, 50, step=1, label="top_k")
with gr.Row():
rep_pen = gr.Slider(0.8, 2, 1.05, step=0.01, label="repetition_penalty")
pres_pen= gr.Slider(0, 2, 0.0, step=0.05, label="presence_penalty")
stop_txt = gr.Textbox("", label="stop 序列(逗号分隔)")
dbg_chk = gr.Checkbox(label="📜 显示 Debug 面板", value=True)
log_box = gr.Textbox(label="实时日志", lines=20, interactive=False, visible=True)
log_state= gr.State("") # 保存全部日志文本
# # ────────────── 定时刷新日志 ──────────────
# logger = gr.Timer(value=1.0, render=False) # 每 1秒 tick 一次(取代 gr.Interval
# logger.tick(
# fn=consume_logs,
# inputs=log_state,
# outputs=log_state
# )
# ────────────── 定时刷新日志 ──────────────
logger = gr.Timer(1.0, render=False) # 每 1秒触发一次
logger.tick(
fn=consume_logs,
inputs=[log_state],
outputs=[log_state],
)
# 显示到 log_box
log_state.change(lambda txt: gr.update(value=txt), log_state, log_box)
# Debug 面板可见性切换
dbg_chk.change(lambda v: gr.update(visible=v), dbg_chk, log_box)
# Chatbot
chatbot = gr.ChatInterface(
fn=chat,
additional_inputs=[max_new, temp, top_p, top_k,
rep_pen, pres_pen, stop_txt, log_state],
additional_outputs=[log_state], # ★ 必加
type="messages"
)
demo.launch(server_name="0.0.0.0", server_port=30001)