numpy==1.26.4 | |
scipy | |
lxml | |
pydub | |
fastapi | |
soundfile | |
omegaconf | |
pypinyin | |
vocos | |
pandas | |
vector_quantize_pytorch | |
einops | |
transformers~=4.41.1 | |
omegaconf~=2.3.0 | |
tqdm | |
# torch | |
# torchvision | |
# torchaudio | |
gradio | |
emojiswitch | |
python-dotenv | |
zhon | |
mistune==3.0.2 | |
cn2an | |
# audio_denoiser | |
python-box | |
ftfy | |
librosa | |
pyrubberband | |
# https://github.com/Dao-AILab/flash-attention/releases/download/v2.5.9.post1/flash_attn-2.5.9.post1+cu118torch1.12cxx11abiFALSE-cp310-cp310-linux_x86_64.whl |