Limour's picture
Upload 11 files
0523803 verified
raw
history blame
1.7 kB
import subprocess
import select
import os
from mods.btn_reset import restart_space
if not os.path.exists('downloads/causallm_7b.Q5_K_M.gguf'):
from huggingface_hub import snapshot_download
os.mkdir("downloads")
os.mkdir("cache")
snapshot_download(repo_id='TheBloke/CausalLM-7B-GGUF', local_dir=r'downloads',
allow_patterns='causallm_7b.Q5_K_M.gguf')
snapshot_download(repo_id='Limour/llama-python-streamingllm-cache', repo_type='dataset', local_dir=r'cache')
try:
# 启动另一个程序,并通过管道捕获其输出
process = subprocess.Popen(["python", "gradio_streamingllm.py"],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
bufsize=1, universal_newlines=True)
while process.poll() is None:
# 使用 select 模块检查是否有可读数据
ready_reads, _, _ = select.select([process.stdout, process.stderr], [], [])
if not ready_reads:
print('select.select timeout')
for ready in ready_reads:
# 读取输出并打印
output = ready.readline()
if output:
print(output, end='')
else:
print('select.select timeout')
# 读取剩余的输出
for output in process.stdout.readlines() + process.stderr.readlines():
print(output, end='')
# 检查进程的返回代码以确定是否成功结束
if process.returncode == 0:
print("Process has terminated successfully.")
else:
print(f"Process has terminated with an error. {process.returncode}")
finally:
restart_space()