Spaces:
Running
on
Zero
Running
on
Zero
# This file was autogenerated by uv via the following command: | |
# uv pip compile pyproject.toml -o requirements.txt | |
accelerate==1.0.0 | |
# via llama-2-7b-chat (pyproject.toml) | |
aiofiles==23.2.1 | |
# via gradio | |
annotated-types==0.7.0 | |
# via pydantic | |
anyio==4.6.0 | |
# via | |
# gradio | |
# httpx | |
# starlette | |
bitsandbytes==0.44.1 | |
# via llama-2-7b-chat (pyproject.toml) | |
certifi==2024.8.30 | |
# via | |
# httpcore | |
# httpx | |
# requests | |
charset-normalizer==3.3.2 | |
# via requests | |
click==8.1.7 | |
# via | |
# typer | |
# uvicorn | |
exceptiongroup==1.2.2 | |
# via anyio | |
fastapi==0.115.0 | |
# via gradio | |
ffmpy==0.4.0 | |
# via gradio | |
filelock==3.16.1 | |
# via | |
# huggingface-hub | |
# torch | |
# transformers | |
# triton | |
fsspec==2024.9.0 | |
# via | |
# gradio-client | |
# huggingface-hub | |
# torch | |
gradio==5.0.1 | |
# via | |
# llama-2-7b-chat (pyproject.toml) | |
# spaces | |
gradio-client==1.4.0 | |
# via gradio | |
h11==0.14.0 | |
# via | |
# httpcore | |
# uvicorn | |
hf-transfer==0.1.8 | |
# via llama-2-7b-chat (pyproject.toml) | |
httpcore==1.0.5 | |
# via httpx | |
httpx==0.27.2 | |
# via | |
# gradio | |
# gradio-client | |
# spaces | |
huggingface-hub==0.25.1 | |
# via | |
# accelerate | |
# gradio | |
# gradio-client | |
# tokenizers | |
# transformers | |
idna==3.10 | |
# via | |
# anyio | |
# httpx | |
# requests | |
jinja2==3.1.4 | |
# via | |
# gradio | |
# torch | |
markdown-it-py==3.0.0 | |
# via rich | |
markupsafe==2.1.5 | |
# via | |
# gradio | |
# jinja2 | |
mdurl==0.1.2 | |
# via markdown-it-py | |
mpmath==1.3.0 | |
# via sympy | |
networkx==3.3 | |
# via torch | |
numpy==2.1.1 | |
# via | |
# accelerate | |
# bitsandbytes | |
# gradio | |
# pandas | |
# transformers | |
nvidia-cublas-cu12==12.1.3.1 | |
# via | |
# nvidia-cudnn-cu12 | |
# nvidia-cusolver-cu12 | |
# torch | |
nvidia-cuda-cupti-cu12==12.1.105 | |
# via torch | |
nvidia-cuda-nvrtc-cu12==12.1.105 | |
# via torch | |
nvidia-cuda-runtime-cu12==12.1.105 | |
# via torch | |
nvidia-cudnn-cu12==9.1.0.70 | |
# via torch | |
nvidia-cufft-cu12==11.0.2.54 | |
# via torch | |
nvidia-curand-cu12==10.3.2.106 | |
# via torch | |
nvidia-cusolver-cu12==11.4.5.107 | |
# via torch | |
nvidia-cusparse-cu12==12.1.0.106 | |
# via | |
# nvidia-cusolver-cu12 | |
# torch | |
nvidia-nccl-cu12==2.20.5 | |
# via torch | |
nvidia-nvjitlink-cu12==12.6.68 | |
# via | |
# nvidia-cusolver-cu12 | |
# nvidia-cusparse-cu12 | |
nvidia-nvtx-cu12==12.1.105 | |
# via torch | |
orjson==3.10.7 | |
# via gradio | |
packaging==24.1 | |
# via | |
# accelerate | |
# gradio | |
# gradio-client | |
# huggingface-hub | |
# spaces | |
# transformers | |
pandas==2.2.3 | |
# via gradio | |
pillow==10.4.0 | |
# via gradio | |
psutil==5.9.8 | |
# via | |
# accelerate | |
# spaces | |
pydantic==2.9.2 | |
# via | |
# fastapi | |
# gradio | |
# spaces | |
pydantic-core==2.23.4 | |
# via pydantic | |
pydub==0.25.1 | |
# via gradio | |
pygments==2.18.0 | |
# via rich | |
python-dateutil==2.9.0.post0 | |
# via pandas | |
python-multipart==0.0.12 | |
# via gradio | |
pytz==2024.2 | |
# via pandas | |
pyyaml==6.0.2 | |
# via | |
# accelerate | |
# gradio | |
# huggingface-hub | |
# transformers | |
regex==2024.9.11 | |
# via transformers | |
requests==2.32.3 | |
# via | |
# huggingface-hub | |
# spaces | |
# transformers | |
rich==13.8.1 | |
# via typer | |
ruff==0.6.8 | |
# via gradio | |
safetensors==0.4.5 | |
# via | |
# accelerate | |
# transformers | |
semantic-version==2.10.0 | |
# via gradio | |
shellingham==1.5.4 | |
# via typer | |
six==1.16.0 | |
# via python-dateutil | |
sniffio==1.3.1 | |
# via | |
# anyio | |
# httpx | |
spaces==0.30.3 | |
# via llama-2-7b-chat (pyproject.toml) | |
starlette==0.38.6 | |
# via fastapi | |
sympy==1.13.3 | |
# via torch | |
tokenizers==0.20.0 | |
# via transformers | |
tomlkit==0.12.0 | |
# via gradio | |
torch==2.4.0 | |
# via | |
# llama-2-7b-chat (pyproject.toml) | |
# accelerate | |
# bitsandbytes | |
tqdm==4.66.5 | |
# via | |
# huggingface-hub | |
# transformers | |
transformers==4.45.2 | |
# via llama-2-7b-chat (pyproject.toml) | |
triton==3.0.0 | |
# via torch | |
typer==0.12.5 | |
# via gradio | |
typing-extensions==4.12.2 | |
# via | |
# anyio | |
# fastapi | |
# gradio | |
# gradio-client | |
# huggingface-hub | |
# pydantic | |
# pydantic-core | |
# spaces | |
# torch | |
# typer | |
# uvicorn | |
tzdata==2024.2 | |
# via pandas | |
urllib3==2.2.3 | |
# via requests | |
uvicorn==0.31.0 | |
# via gradio | |
websockets==12.0 | |
# via gradio-client | |
optimum | |
auto-gptq | |
tiktoken | |
einops | |
pytest | |