-
Notifications
You must be signed in to change notification settings - Fork 1
/
logger.py
95 lines (82 loc) · 3.66 KB
/
logger.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
import torch
class Logger(object):
""" Adapted from https://github.com/snap-stanford/ogb/ """
def __init__(self, runs, info=None):
self.info = info
self.results = [[] for _ in range(runs)]
def add_result(self, run, result):
assert len(result) == 4
assert run >= 0 and run < len(self.results)
self.results[run].append(result)
def print_statistics(self, run=None, mode='max_acc'):
if run is not None:
result = 100 * torch.tensor(self.results[run])
argmax = result[:, 1].argmax().item()
argmin = result[:, 3].argmin().item()
if mode == 'max_acc':
ind = argmax
else:
ind = argmin
print(f'Run {run + 1:02d}:')
print(f'Highest Train: {result[:, 0].max():.2f}')
print(f'Highest Valid: {result[:, 1].max():.2f}')
print(f'Highest Test: {result[:, 2].max():.2f}')
print(f'Chosen epoch: {ind}')
print(f'Final Train: {result[ind, 0]:.2f}')
print(f'Final Test: {result[ind, 2]:.2f}')
self.test=result[ind, 2]
else:
result = 100 * torch.tensor(self.results)
best_results = []
for r in result:
train1 = r[:, 0].max().item()
test1 = r[:, 2].max().item()
valid = r[:, 1].max().item()
if mode == 'max_acc':
train2 = r[r[:, 1].argmax(), 0].item()
test2 = r[r[:, 1].argmax(), 2].item()
else:
train2 = r[r[:, 3].argmin(), 0].item()
test2 = r[r[:, 3].argmin(), 2].item()
best_results.append((train1, test1, valid, train2, test2))
best_result = torch.tensor(best_results)
print(f'All runs:')
r = best_result[:, 0]
print(f'Highest Train: {r.mean():.2f} ± {r.std():.2f}')
r = best_result[:, 1]
print(f'Highest Test: {r.mean():.2f} ± {r.std():.2f}')
r = best_result[:, 2]
print(f'Highest Valid: {r.mean():.2f} ± {r.std():.2f}')
r = best_result[:, 3]
print(f' Final Train: {r.mean():.2f} ± {r.std():.2f}')
r = best_result[:, 4]
print(f' Final Test: {r.mean():.2f} ± {r.std():.2f}')
self.test=r.mean()
return best_result[:, 4]
def output(self,out_path,info):
with open(out_path,'a') as f:
f.write(info)
f.write(f'test acc:{self.test}\n')
import os
def save_model(args, model, optimizer, run):
if not os.path.exists(f'models/{args.dataset}'):
os.makedirs(f'models/{args.dataset}')
model_path = f'models/{args.dataset}/{args.method}_{run}_{args.beta}.pt'
torch.save({'model_state_dict': model.state_dict(),
'optimizer_state_dict': optimizer.state_dict()
}, model_path)
def load_model(args, model, optimizer, run):
model_path = f'models/{args.dataset}/{args.method}_{run}_{args.beta}.pt'
checkpoint = torch.load(model_path)
model.load_state_dict(checkpoint['model_state_dict'])
optimizer.load_state_dict(checkpoint['optimizer_state_dict'])
return model, optimizer
def save_result(args, results):
if not os.path.exists(f'results/{args.dataset}'):
os.makedirs(f'results/{args.dataset}')
filename = f'results/{args.dataset}/{args.method}.csv'
print(f"Saving results to {filename}")
with open(f"{filename}", 'a+') as write_obj:
write_obj.write(
f"{args.method} " + f"{args.dropout} " + f"{args.lr} " + \
f"{results.mean():.2f} $\pm$ {results.std():.2f} \n")