-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathcalvin.py
70 lines (62 loc) · 1.8 KB
/
calvin.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
import torch
import torch.nn as nn
import torch.optim as optim
from alias_utils.data import house
from alias_utils.model import RunnerBuilder as Builder
from alias_utils.loss import MSELoss, MNormLoss
from alias_utils.metrics import (
Loss as LossMetric,
# RSquared as RSquaredMetric,
DesignMatNorm as MNormMetric,
)
from matplotlib import pyplot as plt
if __name__ == "__main__":
# Read in the data
data = house.data()
torch.manual_seed(13)
# Construct a basic layered model
wrapper = (
Builder()
.name("Basic 3-Layer")
.loss(MSELoss())
.optimizer(optim.Adam)
.steps(
nn.Linear(13, 8),
nn.Linear(8, 8),
nn.Linear(8, 1),
)
.with_metric(LossMetric(name="Standard Loss"))
.with_metric(MNormMetric(name="Standard MNorm"))
.build()
)
# Train the model
wrapper.train(data, n_epochs=1000)
torch.manual_seed(13)
# Construct model w m norm loss
m_wrapper = (
Builder()
.name("Basic 3-Layer")
.loss(MNormLoss(alpha=1e4))
.optimizer(optim.Adam)
.steps(
nn.Linear(13, 8),
nn.Linear(8, 8),
nn.Linear(8, 1),
)
.with_metric(LossMetric(name="MNorm Loss"))
.with_metric(MNormMetric(name="MNorm MNorm"))
.build()
)
# Train the model
m_wrapper.train(data, n_epochs=1000)
# plot loss and m norm for each
# wrapper.plot_two(LossMetric, MNormMetric, log=True)
# m_wrapper.plot_two(LossMetric, MNormMetric, log=True)
# plt.tight_layout()
# plt.show()
# plot both on same plot
plt.title("Comparison w and w/o MNorm")
wrapper.plot(log=True, normalize=False)
m_wrapper.plot(log=True, normalize=False)
plt.show()
print("Success")