Update gradio_app.py
Browse files- gradio_app.py +4 -4
gradio_app.py
CHANGED
@@ -32,19 +32,19 @@ def single_inference(user_prompt, save_path, guidance_scale, num_sampling_steps,
|
|
32 |
|
33 |
# Load models
|
34 |
vae = AllegroAutoencoderKL3D.from_pretrained(
|
35 |
-
"
|
36 |
torch_dtype=torch.float32
|
37 |
).cuda()
|
38 |
vae.eval()
|
39 |
|
40 |
-
text_encoder = T5EncoderModel.from_pretrained("
|
41 |
text_encoder.eval()
|
42 |
|
43 |
-
tokenizer = T5Tokenizer.from_pretrained("
|
44 |
|
45 |
scheduler = EulerAncestralDiscreteScheduler()
|
46 |
|
47 |
-
transformer = AllegroTransformer3DModel.from_pretrained("
|
48 |
transformer.eval()
|
49 |
|
50 |
allegro_pipeline = AllegroPipeline(
|
|
|
32 |
|
33 |
# Load models
|
34 |
vae = AllegroAutoencoderKL3D.from_pretrained(
|
35 |
+
"./allegro_weights/vae/",
|
36 |
torch_dtype=torch.float32
|
37 |
).cuda()
|
38 |
vae.eval()
|
39 |
|
40 |
+
text_encoder = T5EncoderModel.from_pretrained("./allegro_weights/text_encoder/", torch_dtype=dtype)
|
41 |
text_encoder.eval()
|
42 |
|
43 |
+
tokenizer = T5Tokenizer.from_pretrained("./allegro_weights/tokenizer/")
|
44 |
|
45 |
scheduler = EulerAncestralDiscreteScheduler()
|
46 |
|
47 |
+
transformer = AllegroTransformer3DModel.from_pretrained("./allegro_weights/transformer/", torch_dtype=dtype).cuda()
|
48 |
transformer.eval()
|
49 |
|
50 |
allegro_pipeline = AllegroPipeline(
|