Skip to content
This repository has been archived by the owner on Oct 11, 2024. It is now read-only.

Commit

Permalink
Fix docker image build issue (#305)
Browse files Browse the repository at this point in the history
1. fixed apt-get install issue
2. fixed tag issue for nightly

---------

Co-authored-by: dhuangnm <[email protected]>
  • Loading branch information
2 people authored and derekk-nm committed Jun 24, 2024
1 parent de33059 commit fb14a05
Show file tree
Hide file tree
Showing 4 changed files with 22 additions and 16 deletions.
6 changes: 3 additions & 3 deletions .github/actions/nm-build-docker/action.yml
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ inputs:
description: "tag to be used for the docker image"
type: string
required: true
additional_tag:
extra_tag:
description: "additional tag for the docker image"
type: string
required: true
Expand Down Expand Up @@ -33,8 +33,8 @@ runs:
--build-arg build_version=${{ inputs.build_version }} \
--target vllm-openai . || status=$?
if [ ${status} -eq 0 ]; then
echo "Add tag ${additional_tag} for "${build_type}" build too"
docker image tag ghcr.io/neuralmagic/nm-vllm-openai:${{ inputs.docker_tag }} ghcr.io/neuralmagic/nm-vllm-openai:${additional_tag} || ((status+=$?))
echo "Add tag ${{ inputs.extra_tag }} for "${{ inputs.build_type }}" build too"
docker image tag ghcr.io/neuralmagic/nm-vllm-openai:${{ inputs.docker_tag }} ghcr.io/neuralmagic/nm-vllm-openai:${{ inputs.extra_tag }} || ((status+=$?))
fi
docker image ls -a
echo "status=${status}" >> $GITHUB_OUTPUT
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,12 +5,17 @@ inputs:
description: "type of nm-vllm to install for the docker image: NIGHTLY (default) or RELEASE"
type: string
default: 'NIGHTLY'
outputs:
tag:
description: "extra tag for the docker image based on build type"
value: ${{ steps.extratag.outputs.tag }}
runs:
using: composite
steps:
- run: |
- id: extratag
run: |
tag=nightly
if [[ "${build_type}" = "RELEASE" ]]; then
if [[ "${{ inputs.build_type }}" = "RELEASE" ]]; then
tag=latest
fi
echo "tag=${tag}" >> $GITHUB_OUTPUT
Expand Down
11 changes: 6 additions & 5 deletions .github/workflows/publish-docker.yml
Original file line number Diff line number Diff line change
Expand Up @@ -40,18 +40,19 @@ jobs:
password: ${{ secrets.GITHUB_TOKEN }}

- name: Checkout code
uses: actions/checkout@v3
uses: actions/checkout@v4
with:
fetch-depth: 1
fetch-depth: 0
ref: ${{ inputs.gitref }}
submodules: recursive

- name: Set up nvidia-container-toolkit
id: setup
uses: ./.github/actions/nm-setup-nvidia-container-toolkit/

- name: Get image additional tag
- name: Get docker image extra tag
id: tag
uses: ./.github/actions/nm-get-tag/
uses: ./.github/actions/nm-get-docker-tag/
with:
build_type: ${{ inputs.build_type }}

Expand All @@ -60,7 +61,7 @@ jobs:
uses: ./.github/actions/nm-build-docker/
with:
docker_tag: ${{ inputs.docker_tag }}
additional_tag: ${{ steps.tag.outputs.tag }}
extra_tag: ${{ steps.tag.outputs.tag }}
build_type: ${{ inputs.build_type }}
build_version: ${{ inputs.build_version }}

Expand Down
12 changes: 6 additions & 6 deletions Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,8 @@
# prepare basic build environment
FROM nvidia/cuda:12.4.1-devel-ubuntu22.04 AS dev

RUN apt-get update -y \
&& apt-get install -y python3-pip git
RUN apt-get update -y && \
apt-get install -y python3-pip git

# Workaround for https://github.com/openai/triton/issues/2507 and
# https://github.com/pytorch/pytorch/issues/107960 -- hopefully
Expand Down Expand Up @@ -60,8 +60,8 @@ RUN pip --verbose wheel flash-attn==${FLASH_ATTN_VERSION} \
FROM nvidia/cuda:12.4.1-base-ubuntu22.04 AS vllm-base
WORKDIR /vllm-workspace

RUN apt-get update -y \
&& apt-get install -y python3-pip git vim
RUN apt-get update -y && \
apt-get install -y python3-pip git vim

# Workaround for https://github.com/openai/triton/issues/2507 and
# https://github.com/pytorch/pytorch/issues/107960 -- hopefully
Expand All @@ -70,15 +70,15 @@ RUN apt-get update -y \
RUN ldconfig /usr/local/cuda-12.4/compat/

# install nm-vllm wheel first, so that torch etc will be installed
ARG build_type="nightly"
ARG build_type="NIGHTLY"
ARG build_version="latest"
ENV INSTALL_TYPE=${build_type}
ENV INSTALL_VERSION=${build_version}
# UPSTREAM SYNC: Install nm-vllm with sparsity extras
# use nm pypi for now for testing
RUN --mount=type=bind,from=build \
--mount=type=cache,target=/root/.cache/pip \
if [ "${INSTALL_TYPE}" = "nightly" ]; then \
if [ "${INSTALL_TYPE}" = "NIGHTLY" ]; then \
if [ "${INSTALL_VERSION}" = "latest" ]; then \
pip install nm-vllm-nightly[sparse] --extra-index-url https://pypi.neuralmagic.com/simple; \
else \
Expand Down

0 comments on commit fb14a05

Please sign in to comment.