miqumaidtabby / Dockerfile
gfjiogopdfgdfs's picture
Update Dockerfile
79b705b verified
raw
history blame
2.15 kB
FROM nvidia/cuda:11.8.0-devel-ubuntu22.04
# Install necessary packages
RUN apt update && apt install git build-essential libopenblas-dev wget python3-pip -y
RUN apt update && apt install -y wget
# Set up a new user named "user" with user ID 1000
RUN useradd -m -u 1000 user
# Switch to the "user" user
USER user
# Set home to the user's home directory
ENV HOME=/home/user \
PATH=/home/user/.local/bin:$PATH
# Set the working directory to the user's home directory
WORKDIR $HOME/app
# Copy the current directory contents into the container at $HOME/app setting the owner to the user
COPY --chown=user . $HOME/app
# Set up the working directory
ARG MODEL_PART_A
ARG MODEL_PART_B
ARG MODEL_NAME
ARG ADDITIONAL
RUN git clone https://github.com/theroyallab/tabbyAPI
WORKDIR $HOME/app/tabbyAPI
RUN pip install -r requirements.txt -q
RUN pip install huggingface-hub -q
RUN echo "network:" > config.yml && \
echo " host: 127.0.0.1" >> config.yml && \
echo " port: 5000" >> config.yml && \
echo " disable_auth: False" >> config.yml && \
echo "" >> config.yml && \
echo "logging:" >> config.yml && \
echo " prompt: False" >> config.yml && \
echo " generation_params: False" >> config.yml && \
echo "" >> config.yml && \
echo "sampling:" >> config.yml && \
echo " override_preset: null" >> config.yml && \
echo "" >> config.yml && \
echo "developer:" >> config.yml && \
echo " unsafe_launch: False" >> config.yml && \
echo "" >> config.yml && \
echo "model:" >> config.yml && \
echo " model_dir: models" >> config.yml && \
echo " model_name: goliath-120b-gptq" >> config.yml && \
echo " use_dummy_models: False" >> config.yml
WORKDIR $HOME/app/tabbyAPI/models
RUN mkdir -p goliath-120b-gptq
WORKDIR $HOME/app/tabbyAPI/models/goliath-120b-gptq
RUN pip install huggingface-hub hf-transfer
ENV HF_HUB_ENABLE_HF_TRANSFER=1
# Ensure the model is downloaded into the current working directory (models folder)
RUN huggingface-cli download TheBloke/goliath-120b-gptq --local-dir ./ --local-dir-use-symlinks False --cache-dir ~/cache
WORKDIR $HOME/app/tabbyAPI
CMD ["python3", "main.py"]