DamarJati commited on
Commit
227fd9e
β€’
1 Parent(s): 7923139

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +4 -0
app.py CHANGED
@@ -12,6 +12,10 @@ import subprocess
12
  # Install necessary packages
13
  subprocess.run('pip install flash-attn --no-build-isolation', env={'FLASH_ATTENTION_SKIP_CUDA_BUILD': "TRUE"}, shell=True)
14
 
 
 
 
 
15
  # Initialize Llama Cleaner model
16
  device = 'cuda' if torch.cuda.is_available() else 'cpu'
17
 
 
12
  # Install necessary packages
13
  subprocess.run('pip install flash-attn --no-build-isolation', env={'FLASH_ATTENTION_SKIP_CUDA_BUILD': "TRUE"}, shell=True)
14
 
15
+ torch.backends.cudnn.deterministic = True
16
+ torch.backends.cudnn.benchmark = False
17
+ torch.backends.cuda.matmul.allow_tf32 = True
18
+
19
  # Initialize Llama Cleaner model
20
  device = 'cuda' if torch.cuda.is_available() else 'cpu'
21