samgis-lisa-on-cuda / Dockerfile
alessandro trinca tornidor
ci: try fixing the huggingface run error using directly the uvicorn command
74dd214
raw
history blame
10.5 kB
# Include global ARGs at the dockerfile top
ARG ARCH="x86_64"
ARG WORKDIR_ROOT="/var/task"
ARG FASTAPI_STATIC="${WORKDIR_ROOT}/static"
ARG PYTHONPATH="${WORKDIR_ROOT}:${PYTHONPATH}:/usr/local/lib/python3/dist-packages"
ARG POETRY_NO_INTERACTION=1
ARG POETRY_VIRTUALENVS_IN_PROJECT=1
ARG POETRY_VIRTUALENVS_CREATE=1
ARG POETRY_CACHE_DIR=/tmp/poetry_cache
FROM nvcr.io/nvidia/pytorch:24.10-py3 AS builder_global
LABEL authors="alessandro@trinca.tornidor.com"
ARG ARCH
ARG WORKDIR_ROOT
ARG PYTHONPATH
ARG POETRY_NO_INTERACTION
ARG POETRY_VIRTUALENVS_IN_PROJECT
ARG POETRY_VIRTUALENVS_CREATE
ARG POETRY_CACHE_DIR
RUN echo "ARCH: $ARCH ..."
RUN echo "ARG POETRY_CACHE_DIR: ${POETRY_CACHE_DIR} ..."
RUN echo "ARG PYTHONPATH: $PYTHONPATH, check for python3 and pip"
RUN which python3
RUN python3 --version
RUN python --version
RUN which pip
RUN echo "arg dep:"
# Set working directory to function root directory
RUN groupadd -g 999 python && useradd -r -u 999 -g python python
# Set working directory to function root directory
RUN mkdir ${WORKDIR_ROOT} && chown python:python ${WORKDIR_ROOT}
WORKDIR ${WORKDIR_ROOT}
COPY --chown=python:python requirements_poetry.txt pyproject.toml poetry.lock README.md ${WORKDIR_ROOT}/
WORKDIR ${WORKDIR_ROOT}
COPY --chown=python:python ./requirements_poetry.txt pyproject.toml poetry.lock README.md ${WORKDIR_ROOT}/
RUN apt update && apt install software-properties-common -y
COPY --chown=python:python ./dockerfiles/ubuntu.sources /etc/apt/ubuntu.sources
COPY --chown=python:python ./dockerfiles/apt_preferences_ubuntu /etc/apt/preferences
RUN ls -l /etc/apt/preferences ${WORKDIR_ROOT}/pyproject.toml
RUN apt update && add-apt-repository "deb http://archive.ubuntu.com/ubuntu jammy main universe restricted multiverse" && \
add-apt-repository "deb http://archive.ubuntu.com/ubuntu jammy-security main universe restricted multiverse" && \
add-apt-repository "deb http://archive.ubuntu.com/ubuntu jammy-updates main universe restricted multiverse" && \
add-apt-repository "deb http://archive.ubuntu.com/ubuntu noble main universe restricted multiverse" && \
add-apt-repository "deb http://archive.ubuntu.com/ubuntu noble-security main universe restricted multiverse" && \
add-apt-repository "deb http://archive.ubuntu.com/ubuntu noble-updates main universe restricted multiverse"
RUN cat /etc/lsb-release
# avoid segment-geospatial exception caused by missing libGL.so.1 library
RUN echo "BUILDER: check libz.s* before start:" && ls -l /usr/lib/${ARCH}-linux-gnu/libz.so* /lib/${ARCH}-linux-gnu/libz.so*
RUN apt update && apt upgrade -y && apt install -y libgl1 curl && apt clean
RUN echo "run update noble..."
RUN apt update && apt install -t noble zlib1g -y
RUN rm /lib/x86_64-linux-gnu/libz.so.1.2* || echo "BUILDER: no /lib/${ARCH}-linux-gnu/libz.so.1.2* found"
RUN rm /usr/lib/${ARCH}-linux-gnu/libz.so.1.2* || echo "BUILDER: no /usr/lib/${ARCH}-linux-gnu/libz.so.1.2* found"
RUN ln -sf /usr/lib/${ARCH}-linux-gnu/libz.so.1 /usr/lib/${ARCH}-linux-gnu/libz.so
RUN ln -sf /lib/${ARCH}-linux-gnu/libz.so.1 /lib/${ARCH}-linux-gnu/libz.so
RUN echo "BUILDER: check libz.s* after install from noble" && ls -l /usr/lib/${ARCH}-linux-gnu/libz.so* /lib/${ARCH}-linux-gnu/libz.so*
RUN ls -l /etc/apt/sources* /etc/apt/preferences*
# ADD "https://www.random.org/cgi-bin/randbyte?nbytes=10&format=h" skipcache
# poetry installation path is NOT within ${WORKDIR_ROOT}: not needed for runtime docker image
RUN python3 -m pip install -r ${WORKDIR_ROOT}/requirements_poetry.txt
RUN which poetry && poetry --version && poetry config --list
RUN poetry config virtualenvs.path ${WORKDIR_ROOT}
RUN poetry config installer.max-workers 7
RUN echo "# poetry config --list #" && poetry config --list
RUN ls -ld ${WORKDIR_ROOT}/
#RUN . ${WORKDIR_ROOT}/.venv/bin/activate && ${WORKDIR_ROOT}/.venv/bin/python --version && ${WORKDIR_ROOT}/.venv/bin/python -m pip install pip wheel setuptools --upgrade
RUN echo "current path:"
RUN pwd
RUN echo "installing poetry dependencies..."
RUN poetry run python3 -m pip install pip wheel setuptools --upgrade
RUN poetry install --no-root --no-cache
ENV VIRTUAL_ENV=${WORKDIR_ROOT}/.venv \
PATH="${WORKDIR_ROOT}/.venv/bin:$PATH" \
PYTHONPATH="${WORKDIR_ROOT}:${WORKDIR_ROOT}/.venv/bin:/usr/local/lib/python3/dist-packages:${PYTHONPATH}"
RUN ls -l ${WORKDIR_ROOT}/.venv/bin
RUN ls -ld ${WORKDIR_ROOT}/.venv/bin
RUN which python3
RUN python3 -c "import sys; print(sys.path)"
RUN python3 -c "import cv2"
FROM nvcr.io/nvidia/pytorch:24.10-py3 AS runtime
ARG ARCH
ARG WORKDIR_ROOT
RUN groupadd -g 999 python && useradd -r -u 999 -g python python
ENV VIRTUAL_ENV=${WORKDIR_ROOT}/.venv \
PATH="${WORKDIR_ROOT}/.venv/bin:$PATH" \
PYTHONPATH="${WORKDIR_ROOT}:${WORKDIR_ROOT}/.venv/bin:/usr/local/lib/python3/dist-packages:${PYTHONPATH}"
RUN which python3
RUN echo "COPY --chown=python:python --from=builder_global /usr/lib/${ARCH}-linux-gnu/libGL.so* /usr/lib/${ARCH}-linux-gnu/"
COPY --chown=python:python --from=builder_global /usr/lib/${ARCH}-linux-gnu/libGL.so* /usr/lib/${ARCH}-linux-gnu/
RUN echo "RUNTIME: check libz.s* before upgrade" && ls -l /usr/lib/${ARCH}-linux-gnu/libz.so*
RUN echo "RUNTIME: remove libz.s* to force upgrade" && rm /usr/lib/${ARCH}-linux-gnu/libz.so*
COPY --chown=python:python --from=builder_global /usr/lib/${ARCH}-linux-gnu/libz.so* /usr/lib/${ARCH}-linux-gnu/
RUN echo "RUNTIME: check libz.s* after copy" && ls -l /usr/lib/${ARCH}-linux-gnu/libz.so*
COPY --chown=python:python --from=builder_global ${WORKDIR_ROOT}/.venv ${WORKDIR_ROOT}/.venv
RUN echo "COPY --chown=python:python scripts/docker_entrypoint.sh ${WORKDIR_ROOT}/"
COPY --chown=python:python ./scripts/docker_entrypoint.sh ${WORKDIR_ROOT}/
RUN echo "COPY --chown=python:python scripts/entrypoint.sh ${WORKDIR_ROOT}/"
COPY --chown=python:python ./scripts/entrypoint.sh ${WORKDIR_ROOT}/
RUN chmod 744 ${WORKDIR_ROOT}/docker_entrypoint.sh ${WORKDIR_ROOT}/entrypoint.sh
RUN ls -l ${WORKDIR_ROOT}/docker_entrypoint.sh ${WORKDIR_ROOT}/entrypoint.sh
RUN apt update && apt upgrade -y && apt clean
RUN echo "new WORKDIR_ROOT after hidden venv COPY --chown=python:python => ${WORKDIR_ROOT}"
RUN ls -ld ${WORKDIR_ROOT}/
RUN ls -lA ${WORKDIR_ROOT}/
RUN echo "content of WORKDIR_ROOT/.venv => ${WORKDIR_ROOT}/.venv"
RUN ls -ld ${WORKDIR_ROOT}/.venv
RUN ls -lA ${WORKDIR_ROOT}/.venv
RUN ls -lA ${WORKDIR_ROOT}/docker_entrypoint.sh
RUN ls -lA ${WORKDIR_ROOT}/entrypoint.sh
RUN cat /etc/lsb-release
### conditional section
FROM node:22 AS node_fastapi
ENV PNPM_HOME="/pnpm"
ENV PATH="$PNPM_HOME:$PATH"
ARG WORKDIR_ROOT
RUN corepack enable
COPY ./static /appnode
WORKDIR /appnode
# RUN echo "pnpm store path:" && pnpm store path
RUN ls -l /appnode
RUN ls -l /appnode/list_files.html
FROM node_fastapi AS node_prod_deps
RUN --mount=type=cache,id=pnpm,target=/pnpm/store; pnpm install --prod --frozen-lockfile
# here multiple conditions concatenated to avoid failing on check
RUN if [ ! -d /appnode/node_modules ]; then echo "no node_modules folder" && exit 1; fi
FROM node_fastapi AS node_build
ARG VITE__MAP_DESCRIPTION
ARG VITE__SAMGIS_SPACE
RUN echo "VITE__MAP_DESCRIPTION:" ${VITE__MAP_DESCRIPTION}
RUN echo "VITE__SAMGIS_SPACE:" ${VITE__SAMGIS_SPACE}
RUN --mount=type=cache,id=pnpm,target=/pnpm/store pnpm install --frozen-lockfile
RUN --mount=type=cache,id=pnpm,target=/pnpm/store pnpm build
RUN --mount=type=cache,id=pnpm,target=/pnpm/store pnpm tailwindcss -i /appnode/src/input.css -o /appnode/dist/output.css
RUN if [ ! -d /appnode/dist ]; then echo "no dist folder" && exit 1; fi
FROM runtime
ARG FASTAPI_STATIC
# Include global arg in this stage of the build
ARG WORKDIR_ROOT="/var/task"
ENV VIRTUAL_ENV=${WORKDIR_ROOT}/.venv \
PATH="${WORKDIR_ROOT}/.venv/bin:$PATH"
ENV IS_AWS_LAMBDA=""
RUN mkdir ${FASTAPI_STATIC}
COPY --chown=python:python ./scripts/healthcheck.py ${WORKDIR_ROOT}/
COPY --chown=python:python ./app.py ${WORKDIR_ROOT}/
COPY --chown=python:python ./sam-quantized/machine_learning_models ${WORKDIR_ROOT}/machine_learning_models
COPY --chown=python:python --from=node_prod_deps /appnode/node_modules* ${FASTAPI_STATIC}/node_modules
COPY --chown=python:python --from=node_build /appnode/dist* ${FASTAPI_STATIC}/dist
COPY --chown=python:python static/list_files.html ${FASTAPI_STATIC}/
RUN ls -l ${FASTAPI_STATIC}/
RUN ls -l ${FASTAPI_STATIC}/list_files.html
# Set working directory to function root directory
WORKDIR ${WORKDIR_ROOT}
# ADD "https://www.random.org/cgi-bin/randbyte?nbytes=10&format=h" skipcache
RUN ls -lA ${WORKDIR_ROOT}/
RUN ls -ld ${WORKDIR_ROOT}/.venv
RUN ls -l /usr/bin/which
RUN /usr/bin/which python3
RUN python3 --version
# RUN node --version
# RUN npm --version
RUN echo "PYTHONPATH: ${PYTHONPATH}."
RUN echo "PATH: ${PATH}."
RUN echo "WORKDIR_ROOT: ${WORKDIR_ROOT}."
RUN ls -l ${WORKDIR_ROOT}
RUN ls -ld ${WORKDIR_ROOT}
RUN ls -l ${WORKDIR_ROOT}/machine_learning_models
RUN python3 -c "import sys; print(sys.path)"
RUN python3 -c "import cv2"
RUN python3 -c "import fastapi"
RUN python3 -c "import geopandas"
RUN python3 -c "import rasterio"
RUN python3 -c "import uvicorn"
RUN df -h
RUN echo "WORKDIR_ROOT /static/:"
RUN ls -l ${WORKDIR_ROOT}/static/ || true
RUN ls -l ${WORKDIR_ROOT}/static/dist || true
RUN ls -l ${WORKDIR_ROOT}/static/node_modules || true
RUN echo "FASTAPI_STATIC:"
RUN ls -l ${FASTAPI_STATIC}/ || true
RUN ls -l ${FASTAPI_STATIC}/dist || true
RUN ls -l ${FASTAPI_STATIC}/node_modules || true
RUN ls -ld ${WORKDIR_ROOT}/
RUN ls -lA ${WORKDIR_ROOT}/
RUN ls -l ${WORKDIR_ROOT}/.venv
RUN ls -l ${WORKDIR_ROOT}/.venv/bin/activate
#CMD [
# "source", "/var/task/.venv/bin/activate", "&&",
# "uvicorn", "app:app", "--host","0.0.0.0", "--port", "7860"
#]
CMD ["/usr/bin/bash", "-c", "source /var/task/.venv/bin/activate && python -m uvicorn app:app --host 0.0.0.0 --port 7860"]
# CMD ["python", "-m", "uvicorn", "app:app", "--host", "0.0.0.0", "--port", "7860"]
# HEALTHCHECK --interval=30s --timeout=900s --start-period=5s --retries=3 CMD "python -c 'import requests; r1 = requests.get(\"http://localhost:7860/health\"); print(r1.status_code); r2 = requests.get(\"http://localhost:7860/health_models\"); print(f\"status health:{r1.status_code}, health_models:{r2.status_code}!\"); exit(0) if r1.status_code == 200 and r2.status_code == 200 else exit(1)'"
HEALTHCHECK --interval=10s --timeout=1080s --start-period=10s --start-interval=10s --retries=3 CMD [ "python", "healthcheck.py" ]