Squaad AI commited on
Commit
725e40d
β€’
1 Parent(s): 760e49d

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +1 -1
app.py CHANGED
@@ -14,7 +14,7 @@ from diffusers import AutoencoderKL, DiffusionPipeline
14
 
15
  DESCRIPTION = "# Run any LoRA or SD Model"
16
  if not torch.cuda.is_available():
17
- DESCRIPTION += "\n<p>⚠️ This space is running on the CPU. This demo doesn't work on CPU 😞! Run on a GPU by duplicating this space or test our website for free and unlimited by <a href='https://discord.gg/JprjXpjt9K'>clicking here</a>, which provides these and more options.</p>"
18
 
19
  MAX_SEED = np.iinfo(np.int32).max
20
  CACHE_EXAMPLES = torch.cuda.is_available() and os.getenv("CACHE_EXAMPLES") == "1"
 
14
 
15
  DESCRIPTION = "# Run any LoRA or SD Model"
16
  if not torch.cuda.is_available():
17
+ DESCRIPTION += "\n<p>⚠️ This space is running on the CPU. This demo doesn't work on CPU 😞! Run on a GPU by duplicating this space or test our website for free and unlimited by <a href='https://squaadai.com'>clicking here</a>, which provides these and more options.</p>"
18
 
19
  MAX_SEED = np.iinfo(np.int32).max
20
  CACHE_EXAMPLES = torch.cuda.is_available() and os.getenv("CACHE_EXAMPLES") == "1"