-
Notifications
You must be signed in to change notification settings - Fork 0
/
train_main.py
67 lines (53 loc) · 1.77 KB
/
train_main.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
import argparse
import os
import torch as th
import torch.nn as nn
from dgl import save_graphs
from dgl.data import (
BACommunityDataset,
BAShapeDataset,
TreeCycleDataset,
TreeGridDataset,
)
from models import Model
def main(args):
if args.dataset == "BAShape":
dataset = BAShapeDataset(seed=0)
elif args.dataset == "BACommunity":
dataset = BACommunityDataset(seed=0)
elif args.dataset == "TreeCycle":
dataset = TreeCycleDataset(seed=0)
elif args.dataset == "TreeGrid":
dataset = TreeGridDataset(seed=0)
graph = dataset[0]
labels = graph.ndata["label"]
n_feats = graph.ndata["feat"]
num_classes = dataset.num_classes
model = Model(n_feats.shape[-1], num_classes)
loss_fn = nn.CrossEntropyLoss()
optim = th.optim.Adam(model.parameters(), lr=0.001)
for epoch in range(500):
model.train()
# For demo purpose, we train the model on all datapoints
# In practice, you should train only on the training datapoints
logits = model(graph, n_feats)
loss = loss_fn(logits, labels)
acc = th.sum(logits.argmax(dim=1) == labels).item() / len(labels)
optim.zero_grad()
loss.backward()
optim.step()
print(f"In Epoch: {epoch}; Acc: {acc}; Loss: {loss.item()}")
model_stat_dict = model.state_dict()
model_path = os.path.join("./", f"model_{args.dataset}.pth")
th.save(model_stat_dict, model_path)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Dummy model training")
parser.add_argument(
"--dataset",
type=str,
default="BAShape",
choices=["BAShape", "BACommunity", "TreeCycle", "TreeGrid"],
)
args = parser.parse_args()
print(args)
main(args)