File size: 10,453 Bytes
d326f12 32656c4 d326f12 32656c4 d326f12 32656c4 d326f12 32656c4 d326f12 32656c4 945877a d326f12 32656c4 d326f12 32656c4 d326f12 32656c4 d326f12 32656c4 d326f12 32656c4 66ef4af d326f12 32656c4 d326f12 32656c4 d326f12 32656c4 d326f12 32656c4 d326f12 24292ed 32656c4 d326f12 32656c4 d326f12 32656c4 d326f12 32656c4 c2124bb 32656c4 d4e2b14 d326f12 32656c4 d326f12 32656c4 d326f12 32656c4 d326f12 66ef4af d326f12 32656c4 d326f12 66ef4af d326f12 66ef4af d326f12 32656c4 d326f12 32656c4 8980653 32656c4 d326f12 32656c4 d326f12 32656c4 d326f12 32656c4 d326f12 32656c4 d326f12 32656c4 d326f12 32656c4 268865d 3035898 74dd214 8980653 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 |
# Include global ARGs at the dockerfile top
ARG ARCH="x86_64"
ARG WORKDIR_ROOT="/var/task"
ARG FASTAPI_STATIC="${WORKDIR_ROOT}/static"
ARG PYTHONPATH="${WORKDIR_ROOT}:${PYTHONPATH}:/usr/local/lib/python3/dist-packages"
ARG POETRY_NO_INTERACTION=1
ARG POETRY_VIRTUALENVS_IN_PROJECT=1
ARG POETRY_VIRTUALENVS_CREATE=1
ARG POETRY_CACHE_DIR=/tmp/poetry_cache
FROM nvcr.io/nvidia/pytorch:24.10-py3 AS builder_global
LABEL authors="[email protected]"
ARG ARCH
ARG WORKDIR_ROOT
ARG PYTHONPATH
ARG POETRY_NO_INTERACTION
ARG POETRY_VIRTUALENVS_IN_PROJECT
ARG POETRY_VIRTUALENVS_CREATE
ARG POETRY_CACHE_DIR
RUN echo "ARCH: $ARCH ..."
RUN echo "ARG POETRY_CACHE_DIR: ${POETRY_CACHE_DIR} ..."
RUN echo "ARG PYTHONPATH: $PYTHONPATH, check for python3 and pip"
RUN which python3
RUN python3 --version
RUN python --version
RUN which pip
RUN echo "arg dep:"
# Set working directory to function root directory
RUN groupadd -g 999 python && useradd -r -u 999 -g python python
# Set working directory to function root directory
RUN mkdir ${WORKDIR_ROOT} && chown python:python ${WORKDIR_ROOT}
WORKDIR ${WORKDIR_ROOT}
COPY --chown=python:python requirements_poetry.txt pyproject.toml poetry.lock README.md ${WORKDIR_ROOT}/
WORKDIR ${WORKDIR_ROOT}
COPY --chown=python:python ./requirements_poetry.txt pyproject.toml poetry.lock README.md ${WORKDIR_ROOT}/
RUN apt update && apt install software-properties-common -y
COPY --chown=python:python ./dockerfiles/ubuntu.sources /etc/apt/ubuntu.sources
COPY --chown=python:python ./dockerfiles/apt_preferences_ubuntu /etc/apt/preferences
RUN ls -l /etc/apt/preferences ${WORKDIR_ROOT}/pyproject.toml
RUN apt update && add-apt-repository "deb http://archive.ubuntu.com/ubuntu jammy main universe restricted multiverse" && \
add-apt-repository "deb http://archive.ubuntu.com/ubuntu jammy-security main universe restricted multiverse" && \
add-apt-repository "deb http://archive.ubuntu.com/ubuntu jammy-updates main universe restricted multiverse" && \
add-apt-repository "deb http://archive.ubuntu.com/ubuntu noble main universe restricted multiverse" && \
add-apt-repository "deb http://archive.ubuntu.com/ubuntu noble-security main universe restricted multiverse" && \
add-apt-repository "deb http://archive.ubuntu.com/ubuntu noble-updates main universe restricted multiverse"
RUN cat /etc/lsb-release
# avoid segment-geospatial exception caused by missing libGL.so.1 library
RUN echo "BUILDER: check libz.s* before start:" && ls -l /usr/lib/${ARCH}-linux-gnu/libz.so* /lib/${ARCH}-linux-gnu/libz.so*
RUN apt update && apt upgrade -y && apt install -y libgl1 curl && apt clean
RUN echo "run update noble..."
RUN apt update && apt install -t noble zlib1g -y
RUN rm /lib/x86_64-linux-gnu/libz.so.1.2* || echo "BUILDER: no /lib/${ARCH}-linux-gnu/libz.so.1.2* found"
RUN rm /usr/lib/${ARCH}-linux-gnu/libz.so.1.2* || echo "BUILDER: no /usr/lib/${ARCH}-linux-gnu/libz.so.1.2* found"
RUN ln -sf /usr/lib/${ARCH}-linux-gnu/libz.so.1 /usr/lib/${ARCH}-linux-gnu/libz.so
RUN ln -sf /lib/${ARCH}-linux-gnu/libz.so.1 /lib/${ARCH}-linux-gnu/libz.so
RUN echo "BUILDER: check libz.s* after install from noble" && ls -l /usr/lib/${ARCH}-linux-gnu/libz.so* /lib/${ARCH}-linux-gnu/libz.so*
RUN ls -l /etc/apt/sources* /etc/apt/preferences*
# ADD "https://www.random.org/cgi-bin/randbyte?nbytes=10&format=h" skipcache
# poetry installation path is NOT within ${WORKDIR_ROOT}: not needed for runtime docker image
RUN python3 -m pip install -r ${WORKDIR_ROOT}/requirements_poetry.txt
RUN which poetry && poetry --version && poetry config --list
RUN poetry config virtualenvs.path ${WORKDIR_ROOT}
RUN poetry config installer.max-workers 7
RUN echo "# poetry config --list #" && poetry config --list
RUN ls -ld ${WORKDIR_ROOT}/
#RUN . ${WORKDIR_ROOT}/.venv/bin/activate && ${WORKDIR_ROOT}/.venv/bin/python --version && ${WORKDIR_ROOT}/.venv/bin/python -m pip install pip wheel setuptools --upgrade
RUN echo "current path:"
RUN pwd
RUN echo "installing poetry dependencies..."
RUN poetry run python3 -m pip install pip wheel setuptools --upgrade
RUN poetry install --no-root --no-cache
ENV VIRTUAL_ENV=${WORKDIR_ROOT}/.venv \
PATH="${WORKDIR_ROOT}/.venv/bin:$PATH" \
PYTHONPATH="${WORKDIR_ROOT}:${WORKDIR_ROOT}/.venv/bin:/usr/local/lib/python3/dist-packages:${PYTHONPATH}"
RUN ls -l ${WORKDIR_ROOT}/.venv/bin
RUN ls -ld ${WORKDIR_ROOT}/.venv/bin
RUN which python3
RUN python3 -c "import sys; print(sys.path)"
RUN python3 -c "import cv2"
FROM nvcr.io/nvidia/pytorch:24.10-py3 AS runtime
ARG ARCH
ARG WORKDIR_ROOT
RUN groupadd -g 999 python && useradd -r -u 999 -g python python
ENV VIRTUAL_ENV=${WORKDIR_ROOT}/.venv \
PATH="${WORKDIR_ROOT}/.venv/bin:$PATH" \
PYTHONPATH="${WORKDIR_ROOT}:${WORKDIR_ROOT}/.venv/bin:/usr/local/lib/python3/dist-packages:${PYTHONPATH}"
RUN which python3
RUN echo "COPY --chown=python:python --from=builder_global /usr/lib/${ARCH}-linux-gnu/libGL.so* /usr/lib/${ARCH}-linux-gnu/"
COPY --chown=python:python --from=builder_global /usr/lib/${ARCH}-linux-gnu/libGL.so* /usr/lib/${ARCH}-linux-gnu/
RUN echo "RUNTIME: check libz.s* before upgrade" && ls -l /usr/lib/${ARCH}-linux-gnu/libz.so*
RUN echo "RUNTIME: remove libz.s* to force upgrade" && rm /usr/lib/${ARCH}-linux-gnu/libz.so*
COPY --chown=python:python --from=builder_global /usr/lib/${ARCH}-linux-gnu/libz.so* /usr/lib/${ARCH}-linux-gnu/
RUN echo "RUNTIME: check libz.s* after copy" && ls -l /usr/lib/${ARCH}-linux-gnu/libz.so*
COPY --chown=python:python --from=builder_global ${WORKDIR_ROOT}/.venv ${WORKDIR_ROOT}/.venv
RUN echo "COPY --chown=python:python scripts/docker_entrypoint.sh ${WORKDIR_ROOT}/"
COPY --chown=python:python ./scripts/docker_entrypoint.sh ${WORKDIR_ROOT}/
RUN echo "COPY --chown=python:python scripts/entrypoint.sh ${WORKDIR_ROOT}/"
COPY --chown=python:python ./scripts/entrypoint.sh ${WORKDIR_ROOT}/
RUN chmod 744 ${WORKDIR_ROOT}/docker_entrypoint.sh ${WORKDIR_ROOT}/entrypoint.sh
RUN ls -l ${WORKDIR_ROOT}/docker_entrypoint.sh ${WORKDIR_ROOT}/entrypoint.sh
RUN apt update && apt upgrade -y && apt clean
RUN echo "new WORKDIR_ROOT after hidden venv COPY --chown=python:python => ${WORKDIR_ROOT}"
RUN ls -ld ${WORKDIR_ROOT}/
RUN ls -lA ${WORKDIR_ROOT}/
RUN echo "content of WORKDIR_ROOT/.venv => ${WORKDIR_ROOT}/.venv"
RUN ls -ld ${WORKDIR_ROOT}/.venv
RUN ls -lA ${WORKDIR_ROOT}/.venv
RUN ls -lA ${WORKDIR_ROOT}/docker_entrypoint.sh
RUN ls -lA ${WORKDIR_ROOT}/entrypoint.sh
RUN cat /etc/lsb-release
### conditional section
FROM node:22 AS node_fastapi
ENV PNPM_HOME="/pnpm"
ENV PATH="$PNPM_HOME:$PATH"
ARG WORKDIR_ROOT
RUN corepack enable
COPY ./static /appnode
WORKDIR /appnode
# RUN echo "pnpm store path:" && pnpm store path
RUN ls -l /appnode
RUN ls -l /appnode/list_files.html
FROM node_fastapi AS node_prod_deps
RUN --mount=type=cache,id=pnpm,target=/pnpm/store; pnpm install --prod --frozen-lockfile
# here multiple conditions concatenated to avoid failing on check
RUN if [ ! -d /appnode/node_modules ]; then echo "no node_modules folder" && exit 1; fi
FROM node_fastapi AS node_build
ARG VITE__MAP_DESCRIPTION
ARG VITE__SAMGIS_SPACE
RUN echo "VITE__MAP_DESCRIPTION:" ${VITE__MAP_DESCRIPTION}
RUN echo "VITE__SAMGIS_SPACE:" ${VITE__SAMGIS_SPACE}
RUN --mount=type=cache,id=pnpm,target=/pnpm/store pnpm install --frozen-lockfile
RUN --mount=type=cache,id=pnpm,target=/pnpm/store pnpm build
RUN --mount=type=cache,id=pnpm,target=/pnpm/store pnpm tailwindcss -i /appnode/src/input.css -o /appnode/dist/output.css
RUN if [ ! -d /appnode/dist ]; then echo "no dist folder" && exit 1; fi
FROM runtime
ARG FASTAPI_STATIC
# Include global arg in this stage of the build
ARG WORKDIR_ROOT="/var/task"
ENV VIRTUAL_ENV=${WORKDIR_ROOT}/.venv \
PATH="${WORKDIR_ROOT}/.venv/bin:$PATH"
ENV IS_AWS_LAMBDA=""
RUN mkdir ${FASTAPI_STATIC}
COPY --chown=python:python ./scripts/healthcheck.py ${WORKDIR_ROOT}/
COPY --chown=python:python ./app.py ${WORKDIR_ROOT}/
COPY --chown=python:python ./sam-quantized/machine_learning_models ${WORKDIR_ROOT}/machine_learning_models
COPY --chown=python:python --from=node_prod_deps /appnode/node_modules* ${FASTAPI_STATIC}/node_modules
COPY --chown=python:python --from=node_build /appnode/dist* ${FASTAPI_STATIC}/dist
COPY --chown=python:python static/list_files.html ${FASTAPI_STATIC}/
RUN ls -l ${FASTAPI_STATIC}/
RUN ls -l ${FASTAPI_STATIC}/list_files.html
# Set working directory to function root directory
WORKDIR ${WORKDIR_ROOT}
# ADD "https://www.random.org/cgi-bin/randbyte?nbytes=10&format=h" skipcache
RUN ls -lA ${WORKDIR_ROOT}/
RUN ls -ld ${WORKDIR_ROOT}/.venv
RUN ls -l /usr/bin/which
RUN /usr/bin/which python3
RUN python3 --version
# RUN node --version
# RUN npm --version
RUN echo "PYTHONPATH: ${PYTHONPATH}."
RUN echo "PATH: ${PATH}."
RUN echo "WORKDIR_ROOT: ${WORKDIR_ROOT}."
RUN ls -l ${WORKDIR_ROOT}
RUN ls -ld ${WORKDIR_ROOT}
RUN ls -l ${WORKDIR_ROOT}/machine_learning_models
RUN python3 -c "import sys; print(sys.path)"
RUN python3 -c "import cv2"
RUN python3 -c "import fastapi"
RUN python3 -c "import geopandas"
RUN python3 -c "import rasterio"
RUN python3 -c "import uvicorn"
RUN df -h
RUN echo "WORKDIR_ROOT /static/:"
RUN ls -l ${WORKDIR_ROOT}/static/ || true
RUN ls -l ${WORKDIR_ROOT}/static/dist || true
RUN ls -l ${WORKDIR_ROOT}/static/node_modules || true
RUN echo "FASTAPI_STATIC:"
RUN ls -l ${FASTAPI_STATIC}/ || true
RUN ls -l ${FASTAPI_STATIC}/dist || true
RUN ls -l ${FASTAPI_STATIC}/node_modules || true
RUN ls -ld ${WORKDIR_ROOT}/
RUN ls -lA ${WORKDIR_ROOT}/
RUN ls -l ${WORKDIR_ROOT}/.venv
RUN ls -l ${WORKDIR_ROOT}/.venv/bin/activate
#CMD [
# "source", "/var/task/.venv/bin/activate", "&&",
# "uvicorn", "app:app", "--host","0.0.0.0", "--port", "7860"
#]
CMD ["/usr/bin/bash", "-c", "source /var/task/.venv/bin/activate && python -m uvicorn app:app --host 0.0.0.0 --port 7860"]
# CMD ["python", "-m", "uvicorn", "app:app", "--host", "0.0.0.0", "--port", "7860"]
# HEALTHCHECK --interval=30s --timeout=900s --start-period=5s --retries=3 CMD "python -c 'import requests; r1 = requests.get(\"http://localhost:7860/health\"); print(r1.status_code); r2 = requests.get(\"http://localhost:7860/health_models\"); print(f\"status health:{r1.status_code}, health_models:{r2.status_code}!\"); exit(0) if r1.status_code == 200 and r2.status_code == 200 else exit(1)'"
HEALTHCHECK --interval=10s --timeout=1080s --start-period=10s --start-interval=10s --retries=3 CMD [ "python", "healthcheck.py" ]
|