From 4e1070db676114899f1b25d156317fd63ece348e Mon Sep 17 00:00:00 2001 From: hailin Date: Thu, 3 Jul 2025 21:16:33 +0800 Subject: [PATCH] . --- Dockerfile | 20 +++++++++++++++++--- 1 file changed, 17 insertions(+), 3 deletions(-) diff --git a/Dockerfile b/Dockerfile index b2fb28e..1dd6d95 100644 --- a/Dockerfile +++ b/Dockerfile @@ -85,13 +85,22 @@ WORKDIR /sgl/sglang/python RUN python3 -m pip install ".[srt,openai]" --no-build-isolation && \ python3 -m pip wheel ".[srt,openai]" --no-deps -w /tmp/sg_wheels + # ── 收集所有 wheel 到 /wheels ────────────────────────────────────────────── RUN mkdir -p /wheels && \ cp /tmp/torch_dist/torch*.whl /wheels/ && \ cp /opt/vision/dist/torchvision-*.whl /wheels/ && \ cp /opt/flashinfer/dist/flashinfer_python-*.whl /wheels/ && \ cp /tmp/vllm_wheels/vllm-*.whl /wheels/ && \ - cp /tmp/sg_wheels/sglang-*.whl /wheels/ + cp /tmp/sg_wheels/sglang-*.whl /wheels/ && \ + pip wheel filelock typing-extensions sympy fsspec jinja2 networkx -w /wheels + +# ── ✅ 再打包 runtime 阶段必需依赖 ──────────────────────────────────────────── +RUN pip wheel \ + pydantic orjson psutil pyzmq pynvml \ + transformers==4.48.3 uvicorn fastapi IPython aiohttp \ + setproctitle uvloop sentencepiece triton \ + -w /wheels ############################################################################### # Stage 2 ─ runtime:极简运行镜像,仅离线安装 wheel @@ -123,8 +132,13 @@ RUN python3 -m pip install --no-cache-dir /tmp/wheels/torch*.whl && \ python3 -c "from torch.distributed import Backend; print('✅ Runtime torch distributed OK, GLOO =', Backend.GLOO)" && \ rm -rf /tmp/wheels -# 安装运行时漏掉的依赖 -RUN python3 -m pip install --no-cache-dir pydantic orjson psutil pyzmq pynvml transformers==4.48.3 uvicorn fastapi IPython aiohttp setproctitle uvloop sentencepiece triton +# # 安装运行时漏掉的依赖 +# RUN python3 -m pip install --no-cache-dir pydantic orjson psutil pyzmq pynvml transformers==4.48.3 uvicorn fastapi IPython aiohttp setproctitle uvloop sentencepiece triton + +# ✅ 离线安装全部依赖(包含所有运行时必需包) +RUN python3 -m pip install --no-cache-dir /tmp/wheels/* && \ + python3 -c "from torch.distributed import Backend; print('✅ Runtime torch distributed OK, GLOO =', Backend.GLOO)" && \ + rm -rf /tmp/wheels # ✅ 添加 Tini(推荐) ENV TINI_VERSION=v0.19.0