|
import os
|
|
|
|
from tqdm import tqdm
|
|
import networkx as nx
|
|
import torch
|
|
from torch.utils.data import Dataset
|
|
|
|
atom_number_index_dict = {
|
|
1: 0,
|
|
6: 1,
|
|
7: 2,
|
|
8: 3,
|
|
9: 4
|
|
}
|
|
atom_index_number_dict = {v: k for k, v in atom_number_index_dict.items()}
|
|
max_atom_number = max(atom_number_index_dict.keys())
|
|
|
|
|
|
def atom_number2index(atom_number):
|
|
return atom_number_index_dict[atom_number]
|
|
|
|
|
|
def atom_index2number(atom_index):
|
|
return atom_index_number_dict[atom_index]
|
|
|
|
|
|
class PreprocessedQM9Dataset(Dataset):
|
|
def __init__(self, dataset):
|
|
self.dataset = dataset
|
|
self.processed_data = []
|
|
if dataset is not None:
|
|
self._preprocess()
|
|
def _preprocess(self):
|
|
i = 0
|
|
for g, label in tqdm(self.dataset):
|
|
g.ndata["Z_index"] = torch.tensor([atom_number2index(z.item()) for z in g.ndata["Z"]])
|
|
g.ndata["sample_idx"] = i
|
|
self.processed_data.append((g, label))
|
|
|
|
def __len__(self):
|
|
return len(self.processed_data)
|
|
|
|
def __getitem__(self, idx):
|
|
return self.processed_data[idx]
|
|
|
|
def save_dataset(self, save_dir):
|
|
if not os.path.exists(save_dir):
|
|
os.makedirs(save_dir)
|
|
torch.save(self.processed_data, os.path.join(save_dir,"QM9_dataset_processed.pt"))
|
|
def load_dataset(self, dataset_path):
|
|
self.processed_data = torch.load(dataset_path) |