|
FROM nvidia/cuda:11.8.0-cudnn8-devel-ubuntu20.04 |
|
LABEL maintainer="Hugging Face" |
|
|
|
ARG DEBIAN_FRONTEND=noninteractive |
|
|
|
|
|
SHELL ["sh", "-lc"] |
|
|
|
|
|
|
|
|
|
ARG PYTORCH='2.2.1' |
|
|
|
ARG CUDA='cu118' |
|
|
|
RUN apt update |
|
RUN apt install -y git libsndfile1-dev tesseract-ocr espeak-ng python python3-pip ffmpeg |
|
RUN python3 -m pip install --no-cache-dir --upgrade pip |
|
|
|
ARG REF=main |
|
RUN git clone https://github.com/huggingface/transformers && cd transformers && git checkout $REF |
|
|
|
RUN [ ${#PYTORCH} -gt 0 ] && VERSION='torch=='$PYTORCH'.*' || VERSION='torch'; echo "export VERSION='$VERSION'" >> ~/.profile |
|
RUN echo torch=$VERSION |
|
|
|
|
|
RUN python3 -m pip install --no-cache-dir -U $VERSION torchvision torchaudio --extra-index-url https://download.pytorch.org/whl/$CUDA |
|
|
|
RUN python3 -m pip install --no-cache-dir -e ./transformers[dev-torch] |
|
|
|
RUN python3 -m pip install --no-cache-dir git+https://github.com/huggingface/accelerate@main |
|
|
|
|
|
RUN python3 -m pip install --no-cache-dir einops |
|
|
|
|
|
RUN python3 -m pip install --no-cache-dir bitsandbytes |
|
|
|
|
|
RUN python3 -m pip install --no-cache-dir auto-gptq --extra-index-url https://huggingface.github.io/autogptq-index/whl/cu118/ |
|
|
|
|
|
RUN python3 -m pip install --no-cache-dir git+https://github.com/huggingface/optimum@main |
|
|
|
|
|
RUN python3 -m pip install --no-cache-dir aqlm[gpu]==1.0.2 |
|
|
|
|
|
|
|
RUN python3 -m pip install --no-cache-dir https://github.com/casper-hansen/AutoAWQ/releases/download/v0.2.3/autoawq-0.2.3+cu118-cp38-cp38-linux_x86_64.whl |
|
|
|
|
|
RUN python3 -m pip install --no-cache-dir quanto |
|
|
|
|
|
RUN python3 -m pip install git+https://github.com/NetEase-FuXi/EETQ.git |
|
|
|
|
|
|
|
RUN cd transformers && python3 setup.py develop |