Spaces:
Paused
Paused
Update app.py
Browse files
app.py
CHANGED
@@ -262,78 +262,78 @@ def unprojection_pcd(state_cache):
|
|
262 |
return fig, pcd_ply, "Success!"
|
263 |
|
264 |
|
265 |
-
title = "Metric3D"
|
266 |
-
description = '''# Metric3Dv2: A versatile monocular geometric foundation model for zero-shot metric depth and surface normal estimation
|
267 |
-
Gradio demo for Metric3D v1/v2 which takes in a single image for computing metric depth and surface normal. To use it, simply upload your image, or click one of the examples to load them. Learn more from our paper linked below.'''
|
268 |
-
article = "<p style='text-align: center'><a href='https://arxiv.org/pdf/2307.10984.pdf'>Metric3D arxiv</a> | <a href='https://arxiv.org/abs/2404.15506'>Metric3Dv2 arxiv</a> | <a href='https://github.com/YvanYin/Metric3D'>Github Repo</a></p>"
|
269 |
-
|
270 |
-
custom_css = '''#button1, #button2 {
|
271 |
-
|
272 |
-
}'''
|
273 |
-
|
274 |
-
examples = [
|
275 |
-
|
276 |
-
|
277 |
-
|
278 |
-
|
279 |
-
|
280 |
-
|
281 |
-
|
282 |
-
|
283 |
-
|
284 |
-
|
285 |
-
|
286 |
-
]
|
287 |
-
|
288 |
-
|
289 |
-
with gr.Blocks(title=title, css=custom_css) as demo:
|
290 |
-
|
291 |
-
|
292 |
-
|
293 |
-
|
294 |
-
|
295 |
-
|
296 |
-
|
297 |
-
|
298 |
-
|
299 |
-
|
300 |
-
|
301 |
-
|
302 |
-
|
303 |
-
|
304 |
-
|
305 |
-
|
306 |
-
|
307 |
-
|
308 |
-
|
309 |
-
|
310 |
-
|
311 |
-
|
312 |
-
|
313 |
-
|
314 |
-
|
315 |
-
|
316 |
-
|
317 |
-
|
318 |
-
|
319 |
-
|
320 |
-
|
321 |
-
|
322 |
-
|
323 |
-
|
324 |
-
|
325 |
-
|
326 |
-
|
327 |
-
|
328 |
-
|
329 |
-
|
330 |
-
|
331 |
-
|
332 |
-
|
333 |
-
|
334 |
-
|
335 |
-
|
336 |
-
|
337 |
|
338 |
#demo.launch(server_name="0.0.0.0")
|
339 |
|
@@ -359,7 +359,7 @@ with gr.Blocks(title=title, css=custom_css) as demo:
|
|
359 |
# iface.launch()
|
360 |
|
361 |
|
362 |
-
|
363 |
fn=predict_depth_normal,
|
364 |
inputs=[
|
365 |
gr.Image(type='pil', label="Original Image"),
|
@@ -378,4 +378,5 @@ iface = gr.Interface(
|
|
378 |
description="Metric3Dv2: A versatile monocular geometric foundation model for zero-shot metric depth and surface normal estimation."
|
379 |
)
|
380 |
|
381 |
-
|
|
|
|
262 |
return fig, pcd_ply, "Success!"
|
263 |
|
264 |
|
265 |
+
# title = "Metric3D"
|
266 |
+
# description = '''# Metric3Dv2: A versatile monocular geometric foundation model for zero-shot metric depth and surface normal estimation
|
267 |
+
# Gradio demo for Metric3D v1/v2 which takes in a single image for computing metric depth and surface normal. To use it, simply upload your image, or click one of the examples to load them. Learn more from our paper linked below.'''
|
268 |
+
# article = "<p style='text-align: center'><a href='https://arxiv.org/pdf/2307.10984.pdf'>Metric3D arxiv</a> | <a href='https://arxiv.org/abs/2404.15506'>Metric3Dv2 arxiv</a> | <a href='https://github.com/YvanYin/Metric3D'>Github Repo</a></p>"
|
269 |
+
|
270 |
+
# custom_css = '''#button1, #button2 {
|
271 |
+
# width: 20px;
|
272 |
+
# }'''
|
273 |
+
|
274 |
+
# examples = [
|
275 |
+
# #["turtle.jpg"],
|
276 |
+
# #["lions.jpg"]
|
277 |
+
# #["files/gundam.jpg"],
|
278 |
+
# "files/p50_pro.jpg",
|
279 |
+
# "files/iphone13.JPG",
|
280 |
+
# "files/canon_cat.JPG",
|
281 |
+
# "files/canon_dog.JPG",
|
282 |
+
# "files/museum.jpg",
|
283 |
+
# "files/terra.jpg",
|
284 |
+
# "files/underwater.jpg",
|
285 |
+
# "files/venue.jpg",
|
286 |
+
# ]
|
287 |
+
|
288 |
+
|
289 |
+
# with gr.Blocks(title=title, css=custom_css) as demo:
|
290 |
+
# gr.Markdown(description + article)
|
291 |
+
|
292 |
+
# # input and control components
|
293 |
+
# with gr.Row():
|
294 |
+
# with gr.Column():
|
295 |
+
# image_input = gr.Image(type='pil', label="Original Image")
|
296 |
+
# _ = gr.Examples(examples=examples, inputs=[image_input])
|
297 |
+
# with gr.Column():
|
298 |
+
# model_dropdown = gr.Dropdown(["vit-small", "vit-large"], label="Model", value="vit-large")
|
299 |
+
|
300 |
+
# with gr.Accordion('Advanced options (beta)', open=True):
|
301 |
+
# with gr.Row():
|
302 |
+
# sensor_width = gr.Number(None, label="Sensor Width in mm", precision=2)
|
303 |
+
# sensor_height = gr.Number(None, label="Sensor Height in mm", precision=2)
|
304 |
+
# focal_len = gr.Number(None, label="Focal Length in mm", precision=2)
|
305 |
+
# camera_detector = gr.Button("Detect Camera from EXIF", elem_id="#button1")
|
306 |
+
# with gr.Row():
|
307 |
+
# fx = gr.Number(1000.0, label="fx in pixels", precision=2)
|
308 |
+
# fy = gr.Number(1000.0, label="fy in pixels", precision=2)
|
309 |
+
# focal_detector = gr.Button("Calculate Intrinsic", elem_id="#button2")
|
310 |
+
|
311 |
+
# message_box = gr.Textbox(label="Messages")
|
312 |
+
|
313 |
+
# # depth and normal
|
314 |
+
# submit_button = gr.Button("Predict Depth & Normal")
|
315 |
+
# with gr.Row():
|
316 |
+
# with gr.Column():
|
317 |
+
# depth_output = gr.Image(label="Output Depth")
|
318 |
+
# depth_file = gr.File(label="Depth (.npy)")
|
319 |
+
# with gr.Column():
|
320 |
+
# normal_output = gr.Image(label="Output Normal")
|
321 |
+
# normal_file = gr.File(label="Normal (.npy)")
|
322 |
+
|
323 |
+
# # 3D reconstruction
|
324 |
+
# reconstruct_button = gr.Button("Reconstruct 3D")
|
325 |
+
# pcd_output = gr.Plot(label="3D Point Cloud (Sampled sparse version)")
|
326 |
+
# pcd_ply = gr.File(label="3D Point Cloud (.ply)")
|
327 |
+
|
328 |
+
# # cache for depth, normal maps and other states
|
329 |
+
# state_cache = gr.State({})
|
330 |
+
|
331 |
+
# # detect focal length in pixels
|
332 |
+
# camera_detector.click(fn=get_camera, inputs=[image_input], outputs=[sensor_width, sensor_height, focal_len, message_box])
|
333 |
+
# focal_detector.click(fn=get_intrinsic, inputs=[image_input, sensor_width, sensor_height, focal_len], outputs=[fx, fy, message_box])
|
334 |
+
|
335 |
+
# submit_button.click(fn=predict_depth_normal, inputs=[image_input, model_dropdown, fx, fy, state_cache], outputs=[depth_output, depth_file, normal_output, normal_file, state_cache, message_box])
|
336 |
+
# reconstruct_button.click(fn=unprojection_pcd, inputs=[state_cache], outputs=[pcd_output, pcd_ply, message_box])
|
337 |
|
338 |
#demo.launch(server_name="0.0.0.0")
|
339 |
|
|
|
359 |
# iface.launch()
|
360 |
|
361 |
|
362 |
+
gradio_app = gr.Interface(
|
363 |
fn=predict_depth_normal,
|
364 |
inputs=[
|
365 |
gr.Image(type='pil', label="Original Image"),
|
|
|
378 |
description="Metric3Dv2: A versatile monocular geometric foundation model for zero-shot metric depth and surface normal estimation."
|
379 |
)
|
380 |
|
381 |
+
if __name__ == "__main__":
|
382 |
+
gradio_app.launch(share=True)
|