File size: 2,180 Bytes
83034b6 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 |
import os
from data.pix2pix_dataset import Pix2pixDataset, BaseDataset
from data.image_folder import make_dataset
from torchvision import transforms
import os
from PIL import Image
class Summer2WinterYosemiteDataset(BaseDataset):
@staticmethod
def modify_commandline_options(parser, is_train):
parser = Pix2pixDataset.modify_commandline_options(parser, is_train)
parser.set_defaults(preprocess_mode='resize_and_crop')
parser.set_defaults(load_size=512)
parser.set_defaults(crop_size=256)
return parser
def initialize(self, opt):
self.opt = opt
label_paths, image_paths, instance_paths = self.get_paths(opt)
self.label_paths = label_paths[:opt.max_dataset_size]
self.image_paths = image_paths[:opt.max_dataset_size]
self.dataset_size = len(self.label_paths)
print(f"Number of labels: {len(self.label_paths)}, Number of images: {len(self.image_paths)}")
if len(self.label_paths) != len(self.image_paths):
raise ValueError("The number of labels and images do not match.")
def get_paths(self, opt):
croot = opt.croot
sroot = opt.sroot
c_image_dir = os.path.join(croot, f'{opt.phase}A')
s_image_dir = os.path.join(sroot, f'{opt.phase}B')
c_image_paths = sorted(make_dataset(c_image_dir, recursive=True))
s_image_paths = sorted(make_dataset(s_image_dir, recursive=True))
return c_image_paths, s_image_paths, []
def __getitem__(self, index):
label_path = self.label_paths[index]
image_path = self.image_paths[index]
label = Image.open(label_path).convert('RGB')
image = Image.open(image_path).convert('RGB')
transform = transforms.Compose([
transforms.Resize((self.opt.load_size, self.opt.load_size)),
transforms.RandomCrop(self.opt.crop_size),
transforms.ToTensor(),
transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))
])
return {'image': transform(label), 'label': transform(image),"cpath":image_path}
def __len__(self):
return self.dataset_size
|