File size: 3,822 Bytes
93dbb79
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
from datasets import Dataset, DatasetDict, Features, Image, Value
import os

# Script for preparing the dataset from a local directory.

def load_ai4mars_dataset(data_dir):
    # Define features
    features = Features({
        'image': Image(decode=True),
        'label_mask': Image(decode=True),
        'rover_mask': Image(decode=True),
        'range_mask': Image(decode=True),
        'has_masks': Value(dtype='bool'),
        'has_labels': Value(dtype='bool')
    })
    
    dataset_dict = {}
    train_data = {
        'image': [],
        'label_mask': [],
        'rover_mask': [],
        'range_mask': [],
        'has_masks': [],
        'has_labels': []
    }
    
    # Training data paths
    train_img_dir = os.path.join(data_dir, 'msl/images/edr')
    train_label_dir = os.path.join(data_dir, 'msl/labels/train')
    train_mxy_dir = os.path.join(data_dir, 'msl/images/mxy')
    train_range_dir = os.path.join(data_dir, 'msl/images/rng-30m')
    
    without_labels = 0
    without_masks = 0

    for img_name in os.listdir(train_img_dir):
        base_name = os.path.splitext(img_name)[0]
        img_path = os.path.join(train_img_dir, img_name)
        label_path = os.path.join(train_label_dir, f"{base_name}.png")
        rover_path = os.path.join(train_mxy_dir, f"{base_name}.png").replace('EDR', 'MXY')
        range_path = os.path.join(train_range_dir, f"{base_name}.png").replace('EDR', 'RNG')

        # Always add the image
        train_data['image'].append(img_path)
        
        # Check if label files exist
        has_labels = os.path.exists(label_path)
        has_masks = os.path.exists(rover_path) and os.path.exists(range_path)
        without_labels += 1 if not has_labels else 0
        without_masks += 1 if not has_masks else 0
        train_data['has_labels'].append(has_labels)
        train_data['has_masks'].append(has_masks)
        
        # Add paths if they exist, None if they don't
        train_data['label_mask'].append(label_path if os.path.exists(label_path) else None)
        train_data['rover_mask'].append(rover_path if os.path.exists(rover_path) else None)
        train_data['range_mask'].append(range_path if os.path.exists(range_path) else None)
    

    print(f"Training data without labels: {without_labels}")
    print(f"Training data without masks: {without_masks}")
    dataset_dict['train'] = Dataset.from_dict(train_data, features=features)
    
    # Load test data for each agreement level
    for agreement in ['min1', 'min2', 'min3']:
        test_data = {
            'image': [],
            'label_mask': [],
            'rover_mask': [],
            'range_mask': [],
            'has_masks': [],
            'has_labels': []
        }
        
        test_label_dir = os.path.join(data_dir, f'msl/labels/test/masked-gold-{agreement}-100agree')

        for label_name in os.listdir(test_label_dir):
            base_name = os.path.splitext(label_name)[0]
            img_path = os.path.join(data_dir, 'msl/images/edr', f"{base_name[:-len('_merged')]}.JPG")

            if os.path.exists(img_path):
                test_data['image'].append(img_path)
                test_data['label_mask'].append(os.path.join(test_label_dir, label_name))
                test_data['rover_mask'].append(os.path.join(train_mxy_dir, f"{base_name.replace('_merged', '').replace('EDR', 'MXY')}.png"))
                test_data['range_mask'].append(os.path.join(train_range_dir, f"{base_name.replace('_merged', '').replace('EDR', 'RNG')}.png"))
                test_data['has_labels'].append(True)
                test_data['has_masks'].append(True)
        
        dataset_dict[f'test_{agreement}'] = Dataset.from_dict(test_data, features=features)
    
    return DatasetDict(dataset_dict)

dataset = load_ai4mars_dataset("./ai4mars-dataset-merged-0.1")