Spaces:
Runtime error
Runtime error
File size: 1,154 Bytes
c12ddc3 3e0e8cb cc2684c 3e0e8cb 9bf2007 07a4fb2 bb0f9ac 07a4fb2 cc94792 07a4fb2 da45991 2d565d4 2d5d217 cc2684c c12ddc3 fafbe92 cc2684c 6db4d36 fafbe92 c12ddc3 9bf2007 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 |
FROM python:3.10.9
# Set the working directory to /
WORKDIR /
# Copy the current directory contents into the container at .
COPY ./requirements.txt /requirements.txt
RUN pip install transformers
# Install pytorch starts
RUN pip install torch
#Install pytorch ends
RUN pip install accelerate
RUN pip install sentencepiece
RUN pip install protobuf
RUN pip install lxml
# Install hugging face hub to download llama2 model
#RUN pip install --upgrade huggingface_hub
#RUN CMAKE_ARGS="-DLLAMA_BLAS=ON -DLLAMA_BLAS_VENDOR=OpenBLAS" pip install 'llama-cpp-python[server]' --upgrade --force-reinstall --no-cache-dir
# Install requirements.txt
RUN pip install --no-cache-dir --upgrade -r /requirements.txt
# write access
RUN useradd -m -u 1000 user
USER user
ENV HOME=/home/user \
PATH=/home/user/.local/bin:$PATH
WORKDIR $HOME/app
#RUN huggingface-cli download TheBloke/Llama-2-7b-Chat-GGUF llama-2-7b-chat.Q2_K.gguf --local-dir . --local-dir-use-symlinks False
COPY --chown=user . $HOME/app
# Start the FastAPI app on port 7860, the default port expected by Spaces
CMD ["uvicorn", "question_paper:app", "--host", "0.0.0.0", "--port", "7860"]
|