-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathparameter_tuning.py
136 lines (93 loc) · 3.48 KB
/
parameter_tuning.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
from solver import *
import matplotlib.pyplot as plt
class HyperParameterTuning:
def __init__(self, A, b, optimizer="GD"):
self.A = A
self.b = b
self.lr_upper = 1 / A.shape[0]
self.lr_downer = 1 / (A.shape[0] * 100)
self.result = {}
self.optimizer = optimizer
if self.optimizer == "GD":
self.learning_rate = np.linspace(self.lr_downer, self.lr_upper)
elif self.optimizer == "momentum":
self.learning_rate = np.linspace(self.lr_downer, self.lr_upper)
self.beta = np.linspace(0,1)
elif self.optimizer == "Nesterov":
self.learning_rate = np.linspace(self.lr_downer, self.lr_upper)
self.beta = np.linspace(0,1)
else:
pass
self.parameter = list()
def run(self):
if self.optimizer == "GD":
self.tune_single_parameter()
elif self.optimizer == "momentum":
self.tune_double_parameter()
elif self.optimizer == "Nesterov":
self.tune_double_parameter()
else:
return NotImplementedError()
self.visualize()
self.unpacking_key()
def tune_single_parameter(self):
for learning_rate in self.learning_rate:
key = f"learning_rate: {learning_rate}"
lstsq = LeastSquare(self.A, self.b, learning_rate, optimize_method=self.optimizer)
lstsq.solve()
self.result[key] = lstsq.error
def tune_double_parameter(self):
for learning_rate in self.learning_rate:
for beta in self.beta:
key = f"learning_rate: {learning_rate} beta: {beta}"
lstsq = LeastSquare(self.A, self.b, learning_rate, optimize_method=self.optimizer)
lstsq.shared.beta = beta
lstsq.solve()
self.result[key] = lstsq.error
def visualize(self):
self.keys = list()
self.error_list = list()
k = 0
for key, value in sorted(self.result.items(), key=lambda x : x[1][-1]):
if k < 5:
self.keys.append(key)
self.error_list.append(self.result[key])
else:
break
k += 1
plt.figure(figsize=(15,3))
## best 5
for i in range(5):
plt.subplot(1,5,i+1)
plt.plot(self.error_list[i])
plt.title(f"Best-{i+1}")
plt.loglog(basex=10,basey=10)
plt.xlim(1,10)
plt.ylim(1,1000)
plt.show()
print(self.keys[0])
def cut_nan_in_single_parameter(self):
for learning_rate in self.learning_rate:
key = f"learning_rate: {learning_rate}"
value = self.result[key]
if value[-1] == value[-1]:
pass
else:
del self.result[key]
def cut_nan_in_double_parameter(self):
## cut nan values...
for learning_rate in self.learning_rate:
for beta in self.beta:
key = f"learning_rate: {learning_rate} beta: {beta}"
value = self.result[key]
if value[-1] == value[-1]:
pass
else:
del self.result[key]
def unpacking_key(self):
best_key = self.keys[0]
for string in best_key.split():
try:
self.parameter.append(float(string))
except:
pass