aitoolkits-webui / requirements-dev.lock
twn39's picture
add init files
bb159c0
raw
history blame
9.08 kB
# generated by rye
# use `rye lock` or `rye sync` to update this lockfile
#
# last locked with the following flags:
# pre: false
# features: []
# all-features: false
# with-sources: false
# generate-hashes: false
-e file:.
aiofiles==23.2.1
# via gradio
aiohttp==3.9.5
# via langchain
# via llama-index-core
# via llama-index-legacy
aiosignal==1.3.1
# via aiohttp
altair==5.3.0
# via gradio
annotated-types==0.7.0
# via pydantic
anyio==4.4.0
# via httpx
# via openai
# via starlette
# via watchfiles
async-timeout==4.0.3
# via aiohttp
# via langchain
attrs==23.2.0
# via aiohttp
# via aitoolkits-webui
# via jsonschema
# via referencing
beautifulsoup4==4.12.3
# via llama-index-readers-file
certifi==2024.7.4
# via httpcore
# via httpx
# via requests
charset-normalizer==3.3.2
# via requests
click==8.1.7
# via nltk
# via typer
# via uvicorn
colorama==0.4.6
# via click
# via tqdm
# via uvicorn
contourpy==1.2.1
# via matplotlib
cycler==0.12.1
# via matplotlib
dataclasses-json==0.6.7
# via llama-index-core
# via llama-index-legacy
deprecated==1.2.14
# via llama-index-core
# via llama-index-legacy
diffusers==0.29.2
# via aitoolkits-webui
dirtyjson==1.0.8
# via llama-index-core
# via llama-index-legacy
distro==1.9.0
# via openai
dnspython==2.6.1
# via email-validator
email-validator==2.2.0
# via fastapi
exceptiongroup==1.2.2
# via anyio
fastapi==0.111.0
# via gradio
fastapi-cli==0.0.4
# via fastapi
ffmpy==0.3.2
# via gradio
filelock==3.15.4
# via diffusers
# via huggingface-hub
fonttools==4.53.1
# via matplotlib
frozenlist==1.4.1
# via aiohttp
# via aiosignal
fsspec==2024.6.1
# via gradio-client
# via huggingface-hub
# via llama-index-core
# via llama-index-legacy
gradio==4.38.1
# via aitoolkits-webui
gradio-client==1.1.0
# via gradio
greenlet==3.0.3
# via sqlalchemy
h11==0.14.0
# via httpcore
# via uvicorn
httpcore==1.0.5
# via httpx
httptools==0.6.1
# via uvicorn
httpx==0.27.0
# via fastapi
# via gradio
# via gradio-client
# via llama-cloud
# via llama-index-core
# via llama-index-legacy
# via openai
huggingface-hub==0.23.4
# via diffusers
# via gradio
# via gradio-client
idna==3.7
# via anyio
# via email-validator
# via httpx
# via requests
# via yarl
importlib-metadata==8.0.0
# via diffusers
importlib-resources==6.4.0
# via gradio
jinja2==3.1.4
# via altair
# via fastapi
# via gradio
joblib==1.4.2
# via nltk
jsonpatch==1.33
# via langchain-core
jsonpointer==3.0.0
# via jsonpatch
jsonschema==4.23.0
# via altair
jsonschema-specifications==2023.12.1
# via jsonschema
kiwisolver==1.4.5
# via matplotlib
langchain==0.2.7
# via aitoolkits-webui
langchain-core==0.2.18
# via langchain
# via langchain-openai
# via langchain-text-splitters
langchain-openai==0.1.16
# via aitoolkits-webui
langchain-text-splitters==0.2.2
# via langchain
langsmith==0.1.85
# via langchain
# via langchain-core
llama-cloud==0.0.9
# via llama-index-indices-managed-llama-cloud
llama-index==0.10.55
# via aitoolkits-webui
llama-index-agent-openai==0.2.8
# via llama-index
# via llama-index-program-openai
llama-index-cli==0.1.12
# via llama-index
llama-index-core==0.10.55
# via llama-index
# via llama-index-agent-openai
# via llama-index-cli
# via llama-index-embeddings-openai
# via llama-index-indices-managed-llama-cloud
# via llama-index-llms-openai
# via llama-index-multi-modal-llms-openai
# via llama-index-program-openai
# via llama-index-question-gen-openai
# via llama-index-readers-file
# via llama-index-readers-llama-parse
# via llama-parse
llama-index-embeddings-openai==0.1.10
# via llama-index
# via llama-index-cli
llama-index-indices-managed-llama-cloud==0.2.5
# via llama-index
llama-index-legacy==0.9.48
# via llama-index
llama-index-llms-openai==0.1.25
# via aitoolkits-webui
# via llama-index
# via llama-index-agent-openai
# via llama-index-cli
# via llama-index-multi-modal-llms-openai
# via llama-index-program-openai
# via llama-index-question-gen-openai
llama-index-multi-modal-llms-openai==0.1.7
# via llama-index
llama-index-program-openai==0.1.6
# via llama-index
# via llama-index-question-gen-openai
llama-index-question-gen-openai==0.1.3
# via llama-index
llama-index-readers-file==0.1.30
# via llama-index
llama-index-readers-llama-parse==0.1.6
# via llama-index
llama-parse==0.4.7
# via llama-index-readers-llama-parse
markdown-it-py==3.0.0
# via rich
markupsafe==2.1.5
# via gradio
# via jinja2
marshmallow==3.21.3
# via dataclasses-json
matplotlib==3.9.1
# via gradio
mdurl==0.1.2
# via markdown-it-py
multidict==6.0.5
# via aiohttp
# via yarl
mypy-extensions==1.0.0
# via typing-inspect
nest-asyncio==1.6.0
# via llama-index-core
# via llama-index-legacy
networkx==3.3
# via llama-index-core
# via llama-index-legacy
nltk==3.8.1
# via llama-index-core
# via llama-index-legacy
numpy==1.26.4
# via altair
# via contourpy
# via diffusers
# via gradio
# via langchain
# via llama-index-core
# via llama-index-legacy
# via matplotlib
# via pandas
openai==1.35.13
# via langchain-openai
# via llama-index-agent-openai
# via llama-index-core
# via llama-index-legacy
orjson==3.10.6
# via fastapi
# via gradio
# via langsmith
packaging==24.1
# via altair
# via gradio
# via gradio-client
# via huggingface-hub
# via langchain-core
# via marshmallow
# via matplotlib
pandas==2.2.2
# via altair
# via gradio
# via llama-index-core
# via llama-index-legacy
pillow==10.4.0
# via diffusers
# via gradio
# via llama-index-core
# via matplotlib
pydantic==2.8.2
# via aitoolkits-webui
# via fastapi
# via gradio
# via langchain
# via langchain-core
# via langsmith
# via llama-cloud
# via openai
# via pydantic-settings
pydantic-core==2.20.1
# via pydantic
pydantic-settings==2.3.4
# via aitoolkits-webui
pydub==0.25.1
# via gradio
pygments==2.18.0
# via rich
pyparsing==3.1.2
# via matplotlib
pypdf==4.2.0
# via llama-index-readers-file
python-dateutil==2.9.0.post0
# via matplotlib
# via pandas
python-dotenv==1.0.1
# via pydantic-settings
# via uvicorn
python-multipart==0.0.9
# via fastapi
# via gradio
pytz==2024.1
# via pandas
pyyaml==6.0.1
# via gradio
# via huggingface-hub
# via langchain
# via langchain-core
# via llama-index-core
# via uvicorn
referencing==0.35.1
# via jsonschema
# via jsonschema-specifications
regex==2024.5.15
# via diffusers
# via nltk
# via tiktoken
requests==2.32.3
# via diffusers
# via huggingface-hub
# via langchain
# via langsmith
# via llama-index-core
# via llama-index-legacy
# via tiktoken
rich==13.7.1
# via typer
rpds-py==0.19.0
# via jsonschema
# via referencing
ruff==0.5.1
# via gradio
safetensors==0.4.3
# via diffusers
semantic-version==2.10.0
# via gradio
shellingham==1.5.4
# via typer
six==1.16.0
# via python-dateutil
sniffio==1.3.1
# via anyio
# via httpx
# via openai
soupsieve==2.5
# via beautifulsoup4
sqlalchemy==2.0.31
# via langchain
# via llama-index-core
# via llama-index-legacy
starlette==0.37.2
# via fastapi
striprtf==0.0.26
# via llama-index-readers-file
tenacity==8.5.0
# via langchain
# via langchain-core
# via llama-index-core
# via llama-index-legacy
tiktoken==0.7.0
# via langchain-openai
# via llama-index-core
# via llama-index-legacy
tomlkit==0.12.0
# via gradio
toolz==0.12.1
# via altair
tqdm==4.66.4
# via huggingface-hub
# via llama-index-core
# via nltk
# via openai
typer==0.12.3
# via fastapi-cli
# via gradio
typing-extensions==4.12.2
# via altair
# via anyio
# via fastapi
# via gradio
# via gradio-client
# via huggingface-hub
# via llama-index-core
# via llama-index-legacy
# via openai
# via pydantic
# via pydantic-core
# via pypdf
# via sqlalchemy
# via typer
# via typing-inspect
# via uvicorn
typing-inspect==0.9.0
# via dataclasses-json
# via llama-index-core
# via llama-index-legacy
tzdata==2024.1
# via pandas
ujson==5.10.0
# via fastapi
urllib3==2.2.2
# via gradio
# via requests
uvicorn==0.30.1
# via fastapi
# via gradio
watchfiles==0.22.0
# via uvicorn
websockets==11.0.3
# via gradio-client
# via uvicorn
wrapt==1.16.0
# via deprecated
# via llama-index-core
yarl==1.9.4
# via aiohttp
zipp==3.19.2
# via importlib-metadata