Spaces:
Runtime error
Runtime error
Upload multit2i.py
Browse files- multit2i.py +1 -1
multit2i.py
CHANGED
@@ -124,7 +124,7 @@ def load_from_model(model_name: str, hf_token: str = None):
|
|
124 |
f"Could not find model: {model_name}. If it is a private or gated model, please provide your Hugging Face access token (https://huggingface.co/settings/tokens) as the argument for the `hf_token` parameter."
|
125 |
)
|
126 |
headers["X-Wait-For-Model"] = "true"
|
127 |
-
client = huggingface_hub.InferenceClient(model=model_name, headers=headers, token=hf_token, timeout=
|
128 |
inputs = gr.components.Textbox(label="Input")
|
129 |
outputs = gr.components.Image(label="Output")
|
130 |
fn = client.text_to_image
|
|
|
124 |
f"Could not find model: {model_name}. If it is a private or gated model, please provide your Hugging Face access token (https://huggingface.co/settings/tokens) as the argument for the `hf_token` parameter."
|
125 |
)
|
126 |
headers["X-Wait-For-Model"] = "true"
|
127 |
+
client = huggingface_hub.InferenceClient(model=model_name, headers=headers, token=hf_token, timeout=300)
|
128 |
inputs = gr.components.Textbox(label="Input")
|
129 |
outputs = gr.components.Image(label="Output")
|
130 |
fn = client.text_to_image
|