# This file was autogenerated by uv via the following command: # uv pip compile pyproject.toml -o requirements.txt aiofiles==23.2.1 # via gradio aiohappyeyeballs==2.4.0 # via aiohttp aiohttp==3.10.5 # via # datasets # fsspec # langchain # llama-index-core # llama-index-legacy aiosignal==1.3.1 # via aiohttp annotated-types==0.7.0 # via pydantic anyio==4.4.0 # via # gradio # httpx # openai # starlette apscheduler==3.10.4 # via daily-papers (pyproject.toml) async-timeout==4.0.3 # via # aiohttp # langchain attrs==24.2.0 # via aiohttp beautifulsoup4==4.12.3 # via llama-index-readers-file bitarray==2.9.2 # via colbert-ai blinker==1.8.2 # via flask catalogue==2.0.10 # via srsly certifi==2024.8.30 # via # httpcore # httpx # requests charset-normalizer==3.3.2 # via requests click==8.1.7 # via # flask # nltk # typer # uvicorn colbert-ai==0.2.19 # via ragatouille dataclasses-json==0.6.7 # via # llama-index-core # llama-index-legacy datasets==2.21.0 # via # daily-papers (pyproject.toml) # colbert-ai deprecated==1.2.14 # via # llama-index-core # llama-index-legacy dill==0.3.8 # via # datasets # multiprocess dirtyjson==1.0.8 # via # llama-index-core # llama-index-legacy distro==1.9.0 # via openai exceptiongroup==1.2.2 # via anyio faiss-cpu==1.8.0.post1 # via ragatouille fast-pytorch-kmeans==0.2.0.1 # via ragatouille fastapi==0.115.4 # via gradio ffmpy==0.4.0 # via gradio filelock==3.16.0 # via # datasets # huggingface-hub # torch # transformers # triton flask==3.0.3 # via colbert-ai frozenlist==1.4.1 # via # aiohttp # aiosignal fsspec==2024.6.1 # via # datasets # gradio-client # huggingface-hub # llama-index-core # llama-index-legacy # torch git-python==1.0.3 # via colbert-ai gitdb==4.0.11 # via gitpython gitpython==3.1.43 # via git-python gradio==5.5.0 # via # daily-papers (pyproject.toml) # gradio-calendar gradio-calendar==0.0.6 # via daily-papers (pyproject.toml) gradio-client==1.4.2 # via gradio greenlet==3.1.0 # via sqlalchemy h11==0.14.0 # via # httpcore # uvicorn hf-transfer==0.1.8 # via daily-papers (pyproject.toml) httpcore==1.0.5 # via httpx httpx==0.27.2 # via # gradio # gradio-client # langsmith # llama-cloud # llama-index-core # llama-index-legacy # openai # safehttpx huggingface-hub==0.25.2 # via # datasets # gradio # gradio-client # sentence-transformers # tokenizers # transformers idna==3.8 # via # anyio # httpx # requests # yarl itsdangerous==2.2.0 # via flask jinja2==3.1.4 # via # flask # gradio # torch jiter==0.5.0 # via openai joblib==1.4.2 # via # nltk # scikit-learn jsonpatch==1.33 # via langchain-core jsonpointer==3.0.0 # via jsonpatch langchain==0.2.16 # via ragatouille langchain-core==0.2.39 # via # langchain # langchain-text-splitters # ragatouille langchain-text-splitters==0.2.4 # via langchain langsmith==0.1.117 # via # langchain # langchain-core llama-cloud==0.0.17 # via llama-index-indices-managed-llama-cloud llama-index==0.11.8 # via ragatouille llama-index-agent-openai==0.3.1 # via # llama-index # llama-index-llms-openai # llama-index-program-openai llama-index-cli==0.3.1 # via llama-index llama-index-core==0.11.8 # via # llama-index # llama-index-agent-openai # llama-index-cli # llama-index-embeddings-openai # llama-index-indices-managed-llama-cloud # llama-index-llms-openai # llama-index-multi-modal-llms-openai # llama-index-program-openai # llama-index-question-gen-openai # llama-index-readers-file # llama-index-readers-llama-parse # llama-parse llama-index-embeddings-openai==0.2.4 # via # llama-index # llama-index-cli llama-index-indices-managed-llama-cloud==0.3.0 # via llama-index llama-index-legacy==0.9.48.post3 # via llama-index llama-index-llms-openai==0.2.3 # via # llama-index # llama-index-agent-openai # llama-index-cli # llama-index-multi-modal-llms-openai # llama-index-program-openai # llama-index-question-gen-openai llama-index-multi-modal-llms-openai==0.2.0 # via llama-index llama-index-program-openai==0.2.0 # via # llama-index # llama-index-question-gen-openai llama-index-question-gen-openai==0.2.0 # via llama-index llama-index-readers-file==0.2.1 # via llama-index llama-index-readers-llama-parse==0.3.0 # via llama-index llama-parse==0.5.5 # via llama-index-readers-llama-parse markdown-it-py==3.0.0 # via rich markupsafe==2.1.5 # via # gradio # jinja2 # werkzeug marshmallow==3.22.0 # via dataclasses-json mdurl==0.1.2 # via markdown-it-py mpmath==1.3.0 # via sympy multidict==6.1.0 # via # aiohttp # yarl multiprocess==0.70.16 # via datasets mypy-extensions==1.0.0 # via typing-inspect nest-asyncio==1.6.0 # via # llama-index-core # llama-index-legacy networkx==3.3 # via # llama-index-core # llama-index-legacy # torch ninja==1.11.1.1 # via colbert-ai nltk==3.9.1 # via # llama-index # llama-index-core # llama-index-legacy numpy==1.26.4 # via # datasets # faiss-cpu # fast-pytorch-kmeans # gradio # langchain # llama-index-core # llama-index-legacy # onnx # pandas # pyarrow # scikit-learn # scipy # sentence-transformers # transformers # voyager nvidia-cublas-cu12==12.1.3.1 # via # nvidia-cudnn-cu12 # nvidia-cusolver-cu12 # torch nvidia-cuda-cupti-cu12==12.1.105 # via torch nvidia-cuda-nvrtc-cu12==12.1.105 # via torch nvidia-cuda-runtime-cu12==12.1.105 # via torch nvidia-cudnn-cu12==9.1.0.70 # via torch nvidia-cufft-cu12==11.0.2.54 # via torch nvidia-curand-cu12==10.3.2.106 # via torch nvidia-cusolver-cu12==11.4.5.107 # via torch nvidia-cusparse-cu12==12.1.0.106 # via # nvidia-cusolver-cu12 # torch nvidia-nccl-cu12==2.20.5 # via torch nvidia-nvjitlink-cu12==12.6.68 # via # nvidia-cusolver-cu12 # nvidia-cusparse-cu12 nvidia-nvtx-cu12==12.1.105 # via torch onnx==1.16.2 # via ragatouille openai==1.44.1 # via # llama-index-agent-openai # llama-index-embeddings-openai # llama-index-legacy # llama-index-llms-openai orjson==3.10.7 # via # gradio # langsmith packaging==24.1 # via # datasets # faiss-cpu # gradio # gradio-client # huggingface-hub # langchain-core # marshmallow # transformers pandas==2.2.2 # via # daily-papers (pyproject.toml) # datasets # gradio # llama-index-legacy # llama-index-readers-file pillow==10.4.0 # via # gradio # llama-index-core # sentence-transformers protobuf==5.28.0 # via onnx pyarrow==17.0.0 # via datasets pydantic==2.9.1 # via # fastapi # gradio # langchain # langchain-core # langsmith # llama-cloud # llama-index-core # openai pydantic-core==2.23.3 # via pydantic pydub==0.25.1 # via gradio pygments==2.18.0 # via rich pynvml==11.5.3 # via fast-pytorch-kmeans pypdf==4.3.1 # via llama-index-readers-file python-dateutil==2.9.0.post0 # via pandas python-dotenv==1.0.1 # via colbert-ai python-multipart==0.0.12 # via gradio pytz==2024.2 # via # apscheduler # pandas pyyaml==6.0.2 # via # datasets # gradio # huggingface-hub # langchain # langchain-core # llama-index-core # transformers ragatouille==0.0.8.post4 # via daily-papers (pyproject.toml) regex==2024.7.24 # via # nltk # tiktoken # transformers requests==2.32.3 # via # datasets # huggingface-hub # langchain # langsmith # llama-index-core # llama-index-legacy # tiktoken # transformers rich==13.8.1 # via typer ruff==0.6.4 # via gradio safehttpx==0.1.1 # via gradio safetensors==0.4.5 # via transformers scikit-learn==1.5.1 # via sentence-transformers scipy==1.14.1 # via # colbert-ai # scikit-learn # sentence-transformers semantic-version==2.10.0 # via gradio sentence-transformers==2.7.0 # via ragatouille setuptools==74.1.2 # via daily-papers (pyproject.toml) shellingham==1.5.4 # via typer six==1.16.0 # via # apscheduler # python-dateutil smmap==5.0.1 # via gitdb sniffio==1.3.1 # via # anyio # httpx # openai soupsieve==2.6 # via beautifulsoup4 sqlalchemy==2.0.34 # via # langchain # llama-index-core # llama-index-legacy srsly==2.4.8 # via ragatouille starlette==0.41.2 # via # fastapi # gradio striprtf==0.0.26 # via llama-index-readers-file sympy==1.13.2 # via torch tenacity==8.5.0 # via # langchain # langchain-core # llama-index-core # llama-index-legacy threadpoolctl==3.5.0 # via scikit-learn tiktoken==0.7.0 # via # llama-index-core # llama-index-legacy tokenizers==0.19.1 # via transformers tomlkit==0.12.0 # via gradio torch==2.4.1 # via # fast-pytorch-kmeans # ragatouille # sentence-transformers tqdm==4.66.5 # via # daily-papers (pyproject.toml) # colbert-ai # datasets # huggingface-hub # llama-index-core # nltk # openai # sentence-transformers # transformers transformers==4.44.2 # via # colbert-ai # ragatouille # sentence-transformers triton==3.0.0 # via torch typer==0.12.5 # via gradio typing-extensions==4.12.2 # via # anyio # fastapi # gradio # gradio-client # huggingface-hub # langchain-core # llama-index-core # llama-index-legacy # multidict # openai # pydantic # pydantic-core # pypdf # sqlalchemy # torch # typer # typing-inspect # uvicorn typing-inspect==0.9.0 # via # dataclasses-json # llama-index-core # llama-index-legacy tzdata==2024.1 # via pandas tzlocal==5.2 # via apscheduler ujson==5.10.0 # via colbert-ai urllib3==2.2.2 # via requests uvicorn==0.30.6 # via gradio voyager==2.0.9 # via ragatouille websockets==12.0 # via gradio-client werkzeug==3.0.4 # via flask wrapt==1.16.0 # via # deprecated # llama-index-core xxhash==3.5.0 # via datasets yarl==1.11.1 # via aiohttp