File size: 4,146 Bytes
4bbe787
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
import matplotlib
import numpy as np
import torch

def colorize_depth_maps(
    depth_map, min_depth, max_depth, cmap="Spectral", valid_mask=None
):
    """
    Colorize depth maps.
    """
    assert len(depth_map.shape) >= 2, "Invalid dimension"

    if isinstance(depth_map, torch.Tensor):
        depth = depth_map.detach().squeeze().numpy()
    elif isinstance(depth_map, np.ndarray):
        depth = depth_map.copy().squeeze()
    # reshape to [ (B,) H, W ]
    if depth.ndim < 3:
        depth = depth[np.newaxis, :, :]

    # colorize
    cm = matplotlib.colormaps[cmap]
    depth = ((depth - min_depth) / (max_depth - min_depth)).clip(0, 1)
    img_colored_np = cm(depth, bytes=False)[:, :, :, 0:3]  # value from 0 to 1
    img_colored_np = np.rollaxis(img_colored_np, 3, 1)

    if valid_mask is not None:
        if isinstance(depth_map, torch.Tensor):
            valid_mask = valid_mask.detach().numpy()
        valid_mask = valid_mask.squeeze()  # [H, W] or [B, H, W]
        if valid_mask.ndim < 3:
            valid_mask = valid_mask[np.newaxis, np.newaxis, :, :]
        else:
            valid_mask = valid_mask[:, np.newaxis, :, :]
        valid_mask = np.repeat(valid_mask, 3, axis=1)
        img_colored_np[~valid_mask] = 0

    if isinstance(depth_map, torch.Tensor):
        img_colored = torch.from_numpy(img_colored_np).float()
    elif isinstance(depth_map, np.ndarray):
        img_colored = img_colored_np

    return img_colored


def scale_depth_to_model(depth, camera_type='ortho'):
    """
    Scale depth from the original range.
    """
    assert camera_type == 'ortho' or camera_type == 'persp'
    w, h = depth.shape

    if camera_type == 'ortho':
        original_min = 9000
        original_max = 17000
        target_min = 2000
        target_max = 62000

        mask = depth != 0
        # Scale depth to [0, 1]
        depth_normalized = np.zeros([w, h])
        depth_normalized[mask] = (depth[mask] - original_min) / (original_max - original_min)

        # Scale depth to [2000, 60000]
        scaled_depth = np.zeros([w, h])
        scaled_depth[mask] = depth_normalized[mask] * (target_max - target_min) + target_min

    else:
        original_min = 4000
        original_max = 13000
        target_min = 2000
        target_max = 62000

        mask = depth != 0
        # Scale depth to [0, 1]
        depth_normalized = np.zeros([w, h])
        depth_normalized[mask] = (depth[mask] - original_min) / (original_max - original_min)

        # Scale depth to [2000, 60000]
        scaled_depth = np.zeros([w, h])
        scaled_depth[mask] = depth_normalized[mask] * (target_max - target_min) + target_min

    scaled_depth[scaled_depth > 62000] = 0
    scaled_depth = scaled_depth / 65535. # [0, 1]

    return scaled_depth

def rescale_depth_to_world(scaled_depth, camera_type='ortho'):
    """
    Rescale depth from the scaled range back to the original range.
    """
    assert camera_type == 'ortho' or camera_type == 'persp'
    scaled_depth = scaled_depth * 65535.
    w, h = scaled_depth.shape

    if camera_type == 'ortho':
        original_min = 9000
        original_max = 17000
        target_min = 2000
        target_max = 62000

        mask = scaled_depth != 0
        rescaled_depth_norm = np.zeros([w, h])
        # Rescale depth to [0, 1]
        rescaled_depth_norm[mask] = (scaled_depth[mask] - target_min) / (target_max - target_min)

        # Rescale depth to [9000, 17000]
        rescaled_depth = np.zeros([w, h])
        rescaled_depth[mask] = rescaled_depth_norm[mask] * (original_max - original_min) + original_min

    else:
        original_min = 4000
        original_max = 13000
        target_min = 2000
        target_max = 62000

        mask = scaled_depth != 0
        rescaled_depth_norm = np.zeros([w, h])
        # Rescale depth to [0, 1]
        rescaled_depth_norm[mask] = (scaled_depth[mask] - target_min) / (target_max - target_min)

        # Rescale depth to [9000, 17000]
        rescaled_depth = np.zeros([w, h])
        rescaled_depth[mask] = rescaled_depth_norm[mask] * (original_max - original_min) + original_min

    return rescaled_depth