diff --git a/Dockerfile b/Dockerfile index 66cb9e3..7068394 100644 --- a/Dockerfile +++ b/Dockerfile @@ -129,9 +129,10 @@ RUN ldconfig COPY --from=builder-extras /wheels /tmp/wheels #RUN python3 -m pip install --no-cache-dir /tmp/wheels/* && rm -rf /tmp/wheels # ✅ 优先装你自编的 torch,避免被 PyPI 上的覆盖 -RUN python3 -m pip install --no-cache-dir /tmp/wheels/torch*.whl && \ +RUN ls -lh /tmp/wheels && \ + python3 -m pip install --no-cache-dir /tmp/wheels/torch*.whl && \ python3 -m pip install /tmp/wheels/vllm-*.whl && \ - python3 -m pip install --no-cache-dir /tmp/wheels/* && \ + python3 -m pip install --no-cache-dir --no-deps /tmp/wheels/* && \ python3 -c "from torch.distributed import Backend; print('✅ Runtime torch distributed OK, GLOO =', Backend.GLOO)" && \ rm -rf /tmp/wheels @@ -139,9 +140,9 @@ RUN python3 -m pip install --no-cache-dir /tmp/wheels/torch*.whl && \ # RUN python3 -m pip install --no-cache-dir pydantic orjson psutil pyzmq pynvml transformers==4.48.3 uvicorn fastapi IPython aiohttp setproctitle uvloop sentencepiece triton # ✅ 离线安装全部依赖(包含所有运行时必需包) -RUN python3 -m pip install --no-cache-dir /tmp/wheels/* && \ - python3 -c "from torch.distributed import Backend; print('✅ Runtime torch distributed OK, GLOO =', Backend.GLOO)" && \ - rm -rf /tmp/wheels +# RUN python3 -m pip install --no-cache-dir --no-deps /tmp/wheels/* && \ +# python3 -c "from torch.distributed import Backend; print('✅ Runtime torch distributed OK, GLOO =', Backend.GLOO)" && \ +# rm -rf /tmp/wheels # ✅ 添加 Tini(推荐) ENV TINI_VERSION=v0.19.0