From e71c4823efb2c388cdfd36b7806bf4afd4314956 Mon Sep 17 00:00:00 2001 From: hailin Date: Fri, 1 Aug 2025 13:53:06 +0800 Subject: [PATCH] . --- meta_ui.py | 19 +++++++++++++------ 1 file changed, 13 insertions(+), 6 deletions(-) diff --git a/meta_ui.py b/meta_ui.py index 7edc096..4d85b2d 100644 --- a/meta_ui.py +++ b/meta_ui.py @@ -148,14 +148,21 @@ def chat( except Empty: continue + # 你之前在 worker() 中把完整字符串放进 result_q,所以这里只是字符串 if isinstance(result, str): - result = {"text": result} - elif not isinstance(result, dict) or "text" not in result: - result = {"text": str(result)} + # 假设是 JSON 字符串(但你原来实际放的是 content 文本) + try: + parsed = json.loads(result) + txt = parsed["choices"][0]["message"]["content"].strip() + except Exception: + txt = result.strip() + elif isinstance(result, dict): + txt = result.get("text", "").strip() + else: + txt = str(result).strip() - # ❌ 不 append 到 history(让前端 UI 不显示之前的历史) - # ✅ 但我们已经在前面把 history 全部传给 LLM 推理了 - yield result["text"], None # UI 只显示当前回复 + # ✅ 推荐返回结构,自动渲染 + 自动 history 追加 + yield {"text": txt}, log_state return else: while thread.is_alive():