HUANGYIFEI commited on
Commit
6f2df68
·
verified ·
1 Parent(s): b8b5cda

add run.py

Browse files
Files changed (1) hide show
  1. Graph/GraphMAE_MQ9/run.py +94 -0
Graph/GraphMAE_MQ9/run.py ADDED
@@ -0,0 +1,94 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import argparse
2
+ import os
3
+
4
+ import dgl
5
+ import torch.utils.data
6
+ from dgl.dataloading import GraphDataLoader
7
+ from torch import optim
8
+ from tqdm import tqdm
9
+ from QM9_dataset_class import PreprocessedQM9Dataset
10
+ from model import SimpleGnn, GMae
11
+ import torch.nn as nn
12
+
13
+ def train_epoch(epoch, graphLoader: torch.utils.data.DataLoader,
14
+ model: nn.Module,device, optimizer:torch.optim.Optimizer,
15
+ save_dir:str
16
+ ):
17
+ print(f"epoch {epoch} started!")
18
+ model.train()
19
+ model.encoder.train()
20
+ model.decoder.train()
21
+ model.to(device)
22
+ loss_epoch = 0
23
+ for batch in tqdm(graphLoader):
24
+ optimizer.zero_grad()
25
+ batch_g, _ = batch
26
+ R = batch_g.ndata["R"].to(device)
27
+ # Z_index = batch_g.ndata["Z_index"].to(device)
28
+ Z_index = batch_g.ndata["Z_index"].to(device)
29
+ Z_emb = model.encode_atom_index(Z_index)
30
+ feat = torch.cat([R, Z_emb], dim=1)
31
+ batch_g = batch_g.to(device)
32
+ loss = model.mask_attr_prediction(batch_g, feat)
33
+ loss.backward()
34
+ optimizer.step()
35
+ loss_epoch += loss.item()
36
+ return loss_epoch
37
+
38
+
39
+ def train_loop(dataset_path, epochs, batch_size,device,save_dir):
40
+ device = torch.device(device)
41
+ dataset = PreprocessedQM9Dataset(None)
42
+ dataset.load_dataset(dataset_path)
43
+ print("Dataset loaded:", dataset_path, "Total samples:", len(dataset))
44
+ print("Initializing dataloader")
45
+ myGLoader = GraphDataLoader(dataset, batch_size=batch_size, pin_memory=True,shuffle=False)
46
+ sage_enc = SimpleGnn(in_feats=7, hid_feats=4, out_feats=4) # 7 = R_dim(3)+Z_embedding_dim(4)
47
+ sage_dec = SimpleGnn(in_feats=4, hid_feats=4, out_feats=7)
48
+ gmae = GMae(sage_enc, sage_dec, 7, 4, 7, replace_rate=0)
49
+ optimizer = optim.Adam(gmae.parameters(), lr=1e-3)
50
+ print("Start training", "epochs:", epochs, "batch_size:", batch_size)
51
+ for epoch in range(epochs):
52
+ loss_epoch = train_epoch(epoch, myGLoader,gmae,device,optimizer,save_dir)
53
+ formatted_loss_epoch = f"{loss_epoch:.3f}"
54
+ save_path = os.path.join(save_dir,f"epoch_{epoch}",f"gmae_{formatted_loss_epoch}.pt")
55
+ save_subdir = os.path.dirname(save_path)
56
+ if not os.path.exists(save_subdir):
57
+ os.makedirs(save_subdir, exist_ok=True)
58
+ torch.save(gmae.state_dict(), save_path)
59
+ print(f"Epoch:{epoch},loss:{loss_epoch},Model saved:{save_path}")
60
+ with torch.no_grad():
61
+ embedded_graphs = []
62
+ print(f"Epoch:{epoch},start embedding")
63
+ gmae.eval()
64
+ gmae.encoder.eval()
65
+ for batch in tqdm(myGLoader):
66
+ batch_g, _ = batch
67
+ R = batch_g.ndata["R"].to(device)
68
+ Z_index = batch_g.ndata["Z_index"].to(device)
69
+ Z_emb = gmae.encode_atom_index(Z_index)
70
+ feat = torch.cat([R, Z_emb], dim=1)
71
+ batch_g = batch_g.to(device)
72
+ batch_g.ndata["embedding"] = gmae.embed(batch_g,feat)
73
+ unbatched_graphs = dgl.unbatch(batch_g)
74
+ embedded_graphs.extend(unbatched_graphs)
75
+ for idx,embedded_graph in enumerate(embedded_graphs):
76
+ embeddings_save_path = os.path.join(save_dir, f"epoch_{epoch}", f"embedding_{idx}.dgl")
77
+ dgl.save_graphs(embeddings_save_path, [embedded_graph])
78
+ print(f"epoch:{epoch},embedding saved:{embeddings_save_path},total_graphs:{len(embedded_graphs)}")
79
+
80
+
81
+
82
+ def main():
83
+ parser = argparse.ArgumentParser(description="Prepare QM9 dataset")
84
+ parser.add_argument('--dataset_path', type=str, default='dataset/QM9_dataset_processed.pt')
85
+ parser.add_argument('--batch_size', type=int, default=4)
86
+ parser.add_argument('--epochs', type=int, default=10, help='number of epochs')
87
+ parser.add_argument("--device", type=str, default='cuda:0')
88
+ parser.add_argument("--save_dir", type=str, default='./model')
89
+ args = parser.parse_args()
90
+ train_loop(args.dataset_path, args.epochs, args.batch_size,args.device,args.save_dir)
91
+
92
+
93
+ if __name__ == '__main__':
94
+ main()