jbilcke-hf HF staff commited on
Commit
88cc598
1 Parent(s): 7f11b82

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +5 -13
app.py CHANGED
@@ -50,12 +50,6 @@ if torch.cuda.is_available():
50
  else:
51
  pipe = None
52
  refiner = None
53
-
54
- def check_secret_token(token: str) -> str:
55
- """Raises an error if the token does not match the secret token."""
56
- if token != SECRET_TOKEN:
57
- raise ValueError("Invalid secret token!")
58
- return token
59
 
60
  def randomize_seed_fn(seed: int, randomize_seed: bool) -> int:
61
  if randomize_seed:
@@ -77,7 +71,11 @@ def generate(prompt: str,
77
  guidance_scale_refiner: float = 5.0,
78
  num_inference_steps_base: int = 50,
79
  num_inference_steps_refiner: int = 50,
80
- apply_refiner: bool = False) -> PIL.Image.Image:
 
 
 
 
81
  generator = torch.Generator().manual_seed(seed)
82
 
83
  if not use_negative_prompt:
@@ -272,12 +270,6 @@ with gr.Blocks(css='style.css') as demo:
272
  secret_token,
273
  ]
274
  prompt.submit(
275
- fn=check_secret_token,
276
- inputs=[secret_token],
277
- outputs=gr.outputs.Void(),
278
- queue=False,
279
- api_name=False,
280
- ).then(
281
  fn=randomize_seed_fn,
282
  inputs=[seed, randomize_seed],
283
  outputs=seed,
 
50
  else:
51
  pipe = None
52
  refiner = None
 
 
 
 
 
 
53
 
54
  def randomize_seed_fn(seed: int, randomize_seed: bool) -> int:
55
  if randomize_seed:
 
71
  guidance_scale_refiner: float = 5.0,
72
  num_inference_steps_base: int = 50,
73
  num_inference_steps_refiner: int = 50,
74
+ apply_refiner: bool = False,
75
+ secret_token: str = '') -> PIL.Image.Image:
76
+ if secret_token != SECRET_TOKEN:
77
+ raise ValueError("Invalid secret token!")
78
+
79
  generator = torch.Generator().manual_seed(seed)
80
 
81
  if not use_negative_prompt:
 
270
  secret_token,
271
  ]
272
  prompt.submit(
 
 
 
 
 
 
273
  fn=randomize_seed_fn,
274
  inputs=[seed, randomize_seed],
275
  outputs=seed,