Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -9,8 +9,6 @@ def install():
|
|
9 |
subprocess.run(shlex.split("pip install https://github.com/Dao-AILab/causal-conv1d/archive/refs/tags/v1.4.0.tar.gz"))
|
10 |
subprocess.run(shlex.split("pip install https://github.com/state-spaces/mamba/archive/refs/tags/v2.2.2.tar.gz"))
|
11 |
|
12 |
-
install()
|
13 |
-
|
14 |
import torch
|
15 |
from transformers import AutoModelForCausalLM, AutoTokenizer, TextIteratorStreamer
|
16 |
import gradio as gr
|
@@ -49,6 +47,8 @@ model = AutoModelForCausalLM.from_pretrained(
|
|
49 |
if device == "cuda":
|
50 |
model = torch.compile(model)
|
51 |
|
|
|
|
|
52 |
@spaces.GPU
|
53 |
def stream_chat(
|
54 |
message: str,
|
|
|
9 |
subprocess.run(shlex.split("pip install https://github.com/Dao-AILab/causal-conv1d/archive/refs/tags/v1.4.0.tar.gz"))
|
10 |
subprocess.run(shlex.split("pip install https://github.com/state-spaces/mamba/archive/refs/tags/v2.2.2.tar.gz"))
|
11 |
|
|
|
|
|
12 |
import torch
|
13 |
from transformers import AutoModelForCausalLM, AutoTokenizer, TextIteratorStreamer
|
14 |
import gradio as gr
|
|
|
47 |
if device == "cuda":
|
48 |
model = torch.compile(model)
|
49 |
|
50 |
+
install()
|
51 |
+
|
52 |
@spaces.GPU
|
53 |
def stream_chat(
|
54 |
message: str,
|