haodongli commited on
Commit
5a85e2a
1 Parent(s): 50e7792

add transformers.utils.move_cache()

Browse files
Files changed (1) hide show
  1. app.py +3 -0
app.py CHANGED
@@ -14,6 +14,9 @@ from pathlib import Path
14
  import gradio
15
  from gradio.utils import get_cache_folder
16
  from infer import lotus, lotus_video
 
 
 
17
 
18
  device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
19
 
 
14
  import gradio
15
  from gradio.utils import get_cache_folder
16
  from infer import lotus, lotus_video
17
+ import transformers
18
+
19
+ transformers.utils.move_cache()
20
 
21
  device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
22