Spaces:
Runtime error
Runtime error
dmitriitochilkin
commited on
Commit
•
0b6b43b
1
Parent(s):
5a25b6c
Add glb export
Browse files- app.py +34 -14
- examples/captured_p.webp +0 -0
- markdown.md +0 -40
- tsr/utils.py +1 -1
app.py
CHANGED
@@ -53,7 +53,7 @@ HEADER = """
|
|
53 |
|
54 |
**Tips:**
|
55 |
1. If you find the result is unsatisfied, please try to change the foreground ratio. It might improve the results.
|
56 |
-
2.
|
57 |
3. Otherwise, please disable "Remove Background" option only if your input image is RGBA with transparent background, image contents are centered and occupy more than 70% of image width or height.
|
58 |
"""
|
59 |
|
@@ -99,18 +99,24 @@ def preprocess(input_image, do_remove_background, foreground_ratio):
|
|
99 |
|
100 |
|
101 |
@spaces.GPU
|
102 |
-
def generate(image):
|
103 |
scene_codes = model(image, device=device)
|
104 |
-
mesh = model.extract_mesh(scene_codes)[0]
|
105 |
mesh = to_gradio_3d_orientation(mesh)
|
106 |
-
|
107 |
-
|
108 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
109 |
|
110 |
def run_example(image_pil):
|
111 |
preprocessed = preprocess(image_pil, False, 0.9)
|
112 |
-
|
113 |
-
return preprocessed,
|
114 |
|
115 |
with gr.Blocks() as demo:
|
116 |
gr.Markdown(HEADER)
|
@@ -137,21 +143,35 @@ with gr.Blocks() as demo:
|
|
137 |
value=0.85,
|
138 |
step=0.05,
|
139 |
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
140 |
with gr.Row():
|
141 |
submit = gr.Button("Generate", elem_id="generate", variant="primary")
|
142 |
with gr.Column():
|
143 |
-
with gr.Tab("
|
144 |
-
|
145 |
-
label="Output Model",
|
|
|
|
|
|
|
|
|
|
|
|
|
146 |
interactive=False,
|
147 |
)
|
|
|
148 |
with gr.Row(variant="panel"):
|
149 |
gr.Examples(
|
150 |
examples=[
|
151 |
os.path.join("examples", img_name) for img_name in sorted(os.listdir("examples"))
|
152 |
],
|
153 |
inputs=[input_image],
|
154 |
-
outputs=[processed_image,
|
155 |
cache_examples=True,
|
156 |
fn=partial(run_example),
|
157 |
label="Examples",
|
@@ -163,8 +183,8 @@ with gr.Blocks() as demo:
|
|
163 |
outputs=[processed_image],
|
164 |
).success(
|
165 |
fn=generate,
|
166 |
-
inputs=[processed_image],
|
167 |
-
outputs=[
|
168 |
)
|
169 |
|
170 |
demo.queue(max_size=10)
|
|
|
53 |
|
54 |
**Tips:**
|
55 |
1. If you find the result is unsatisfied, please try to change the foreground ratio. It might improve the results.
|
56 |
+
2. It's better to disable "Remove Background" for the provided examples since they have been already preprocessed.
|
57 |
3. Otherwise, please disable "Remove Background" option only if your input image is RGBA with transparent background, image contents are centered and occupy more than 70% of image width or height.
|
58 |
"""
|
59 |
|
|
|
99 |
|
100 |
|
101 |
@spaces.GPU
|
102 |
+
def generate(image, mc_resolution, formats=["obj", "glb"]):
|
103 |
scene_codes = model(image, device=device)
|
104 |
+
mesh = model.extract_mesh(scene_codes, resolution=mc_resolution)[0]
|
105 |
mesh = to_gradio_3d_orientation(mesh)
|
106 |
+
|
107 |
+
mesh_path_glb = tempfile.NamedTemporaryFile(suffix=f".glb", delete=False)
|
108 |
+
mesh.export(mesh_path_glb.name)
|
109 |
+
|
110 |
+
mesh_path_obj = tempfile.NamedTemporaryFile(suffix=f".obj", delete=False)
|
111 |
+
mesh.apply_scale([-1, 1, 1]) # Otherwise the visualized .obj will be flipped
|
112 |
+
mesh.export(mesh_path_obj.name)
|
113 |
+
|
114 |
+
return mesh_path_obj.name, mesh_path_glb.name
|
115 |
|
116 |
def run_example(image_pil):
|
117 |
preprocessed = preprocess(image_pil, False, 0.9)
|
118 |
+
mesh_name_obj, mesh_name_glb = generate(preprocessed, 256, ["obj", "glb"])
|
119 |
+
return preprocessed, mesh_name_obj, mesh_name_glb
|
120 |
|
121 |
with gr.Blocks() as demo:
|
122 |
gr.Markdown(HEADER)
|
|
|
143 |
value=0.85,
|
144 |
step=0.05,
|
145 |
)
|
146 |
+
mc_resolution = gr.Slider(
|
147 |
+
label="Marching Cubes Resolution",
|
148 |
+
minimum=32,
|
149 |
+
maximum=320,
|
150 |
+
value=256,
|
151 |
+
step=32
|
152 |
+
)
|
153 |
with gr.Row():
|
154 |
submit = gr.Button("Generate", elem_id="generate", variant="primary")
|
155 |
with gr.Column():
|
156 |
+
with gr.Tab("OBJ"):
|
157 |
+
output_model_obj = gr.Model3D(
|
158 |
+
label="Output Model (OBJ Format)",
|
159 |
+
interactive=False,
|
160 |
+
)
|
161 |
+
gr.Markdown("Note: Downloaded object will be flipped in case of .obj export. Export .glb instead or manually flip it before usage.")
|
162 |
+
with gr.Tab("GLB"):
|
163 |
+
output_model_glb = gr.Model3D(
|
164 |
+
label="Output Model (GLB Format)",
|
165 |
interactive=False,
|
166 |
)
|
167 |
+
gr.Markdown("Note: The model shown here has a darker appearance. Download to get correct results.")
|
168 |
with gr.Row(variant="panel"):
|
169 |
gr.Examples(
|
170 |
examples=[
|
171 |
os.path.join("examples", img_name) for img_name in sorted(os.listdir("examples"))
|
172 |
],
|
173 |
inputs=[input_image],
|
174 |
+
outputs=[processed_image, output_model_obj, output_model_glb],
|
175 |
cache_examples=True,
|
176 |
fn=partial(run_example),
|
177 |
label="Examples",
|
|
|
183 |
outputs=[processed_image],
|
184 |
).success(
|
185 |
fn=generate,
|
186 |
+
inputs=[processed_image, mc_resolution],
|
187 |
+
outputs=[output_model_obj, output_model_glb],
|
188 |
)
|
189 |
|
190 |
demo.queue(max_size=10)
|
examples/captured_p.webp
ADDED
markdown.md
DELETED
@@ -1,40 +0,0 @@
|
|
1 |
-
# TripoSR Demo
|
2 |
-
<table>
|
3 |
-
<tr>
|
4 |
-
<td style="text-align: center;">
|
5 |
-
<a href="https://stability.ai">
|
6 |
-
<img src="https://images.squarespace-cdn.com/content/v1/6213c340453c3f502425776e/6c9c4c25-5410-4547-bc26-dc621cdacb25/Stability+AI+logo.png" height="40" />
|
7 |
-
</a>
|
8 |
-
</td>
|
9 |
-
<td style="border-left: 1px solid #000; width: 1px;"></td>
|
10 |
-
<td style="text-align: center;">
|
11 |
-
<a href="https://www.tripo3d.ai">
|
12 |
-
<img src="https://www.tripo3d.ai/logo.png" height="40" />
|
13 |
-
</a>
|
14 |
-
</td>
|
15 |
-
</tr>
|
16 |
-
</table>
|
17 |
-
|
18 |
-
<table cellspacing="0" cellpadding="0">
|
19 |
-
<tr>
|
20 |
-
<td style="text-align: center;">
|
21 |
-
<a href="https://huggingface.co/stabilityai/TripoSR"><img src="https://img.shields.io/badge/%F0%9F%A4%97%20Model_Card-Huggingface-orange"></a>
|
22 |
-
</td>
|
23 |
-
<td style="border-left: 1px solid #000; width: 1px;"></td>
|
24 |
-
<td style="text-align: center;">
|
25 |
-
<a href="https://github.com/VAST-AI-Research/TripoSR"><img src="logos/github-mark-white.png" height="20"></a>
|
26 |
-
</td>
|
27 |
-
<td style="border-left: 1px solid #000; width: 1px;"></td>
|
28 |
-
<td style="text-align: left;">
|
29 |
-
<a href="https://github.com/VAST-AI-Research/TripoSR"><img src="https://img.shields.io/badge/arXiv-1234.56789-b31b1b.svg" height="20"></a>
|
30 |
-
</td>
|
31 |
-
</tr>
|
32 |
-
</table>
|
33 |
-
|
34 |
-
|
35 |
-
|
36 |
-
**TripoSR** is a state-of-the-art open-source model for **fast** feedforward 3D reconstruction from a single image, developed in collaboration between [Tripo AI](https://www.tripo3d.ai/) and [Stability AI](https://stability.ai/).
|
37 |
-
|
38 |
-
**Tips:**
|
39 |
-
1. If you find the result is unsatisfied, please try to change the foreground ratio. It might improve the results.
|
40 |
-
2. Please disable "Remove Background" option only if your input image is RGBA with transparent background, image contents are centered and occupy more than 70% of image width or height.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
tsr/utils.py
CHANGED
@@ -477,6 +477,6 @@ def save_video(
|
|
477 |
|
478 |
def to_gradio_3d_orientation(mesh):
|
479 |
mesh.apply_transform(trimesh.transformations.rotation_matrix(-np.pi/2, [1, 0, 0]))
|
480 |
-
mesh.apply_scale([1, 1, -1])
|
481 |
mesh.apply_transform(trimesh.transformations.rotation_matrix(np.pi/2, [0, 1, 0]))
|
482 |
return mesh
|
|
|
477 |
|
478 |
def to_gradio_3d_orientation(mesh):
|
479 |
mesh.apply_transform(trimesh.transformations.rotation_matrix(-np.pi/2, [1, 0, 0]))
|
480 |
+
# mesh.apply_scale([1, 1, -1])
|
481 |
mesh.apply_transform(trimesh.transformations.rotation_matrix(np.pi/2, [0, 1, 0]))
|
482 |
return mesh
|