bagel / Dockerfile
gfjiogopdfgdfs's picture
Update Dockerfile
1fe6da2 verified
raw
history blame
1.69 kB
FROM nvidia/cuda:11.8.0-devel-ubuntu22.04
# Install necessary packages
RUN apt update && apt install -y \
git \
build-essential \
libopenblas-dev \
wget \
python3-pip \
nodejs \
npm
# Create a new user to avoid using root
RUN useradd -m -u 1000 user
# Switch to the new user
USER user
# Set environment variables
ENV HOME=/home/user \
PATH=/home/user/.local/bin:$PATH
# Set the working directory
WORKDIR $HOME/app
# Copy the current directory contents into the container at /home/user/app
COPY --chown=user . $HOME/app
# Install aphrodite-engine from PyPI to handle dependencies
RUN python3 -m pip install aphrodite-engine
# Clone the specific branch of aphrodite-engine for the latest features
RUN git clone --branch feat/exllamav2-support https://github.com/PygmalionAI/aphrodite-engine.git $HOME/aphrodite-engine
# Install additional dependencies
RUN pip install huggingface-hub hf-transfer
# Set environment variable to enable hf-transfer
ENV HF_HUB_ENABLE_HF_TRANSFER=1
# Download the model using huggingface-cli
RUN huggingface-cli download LoneStriker/TinyLlama-1.1B-32k-Instruct-8.0bpw-h8-exl2 --local-dir $HOME/goliath-gptq --local-dir-use-symlinks False --cache-dir $HOME/cache
# Expose the port the API server will listen on
EXPOSE 7860
RUN pip install aioprometheus
# Command to run the API server from the cloned directory
CMD ["/bin/bash", "-c", "cd $HOME/aphrodite-engine/aphrodite/endpoints/openai && /bin/python3 api_server.py -q exl2 --dtype auto -gmu 0.95 --kv-cache-dtype fp8_e5m2 --max-num-seqs 15 --served-model-name \"BagelMIsteryTour-v2-8x7B-AWQ\" --enforce-eager -tp 4 --port 7860 --host 0.0.0.0 --model ~/goliath-gptq"]