Skip to content

Commit fddb763

Browse files
added e2e tests for NORMAL and LOG_NORMAL
Signed-off-by: Shashank Mittal <[email protected]> sigma calculation fixed fix parse new arguments to mnist.py
1 parent fb60028 commit fddb763

File tree

3 files changed

+47
-10
lines changed

3 files changed

+47
-10
lines changed

examples/v1beta1/hp-tuning/hyperopt-distribution.yaml

+21
Original file line numberDiff line numberDiff line change
@@ -28,6 +28,19 @@ spec:
2828
min: "0.5"
2929
max: "0.9"
3030
distribution: "logUniform"
31+
- name: weight_decay
32+
parameterType: double
33+
feasibleSpace:
34+
min: "0.01"
35+
max: "0.05"
36+
distribution: "normal"
37+
- name: dropout_rate
38+
parameterType: double
39+
feasibleSpace:
40+
min: "0.1"
41+
max: "0.5"
42+
step: "0.001"
43+
distribution: "logNormal"
3144
trialTemplate:
3245
primaryContainerName: training-container
3346
trialParameters:
@@ -37,6 +50,12 @@ spec:
3750
- name: momentum
3851
description: Momentum for the training model
3952
reference: momentum
53+
- name: weightDecay
54+
description: Weight decay for the training model
55+
reference: weight_decay
56+
- name: dropoutRate
57+
description: Dropout rate for the training model
58+
reference: dropout_rate
4059
trialSpec:
4160
apiVersion: batch/v1
4261
kind: Job
@@ -53,6 +72,8 @@ spec:
5372
- "--batch-size=16"
5473
- "--lr=${trialParameters.learningRate}"
5574
- "--momentum=${trialParameters.momentum}"
75+
- "--weight-decay=${trialParameters.weightDecay}"
76+
- "--dropout-rate=${trialParameters.dropoutRate}"
5677
resources:
5778
limits:
5879
memory: "1Gi"

examples/v1beta1/trial-images/pytorch-mnist/mnist.py

+14
Original file line numberDiff line numberDiff line change
@@ -150,6 +150,20 @@ def main():
150150
metavar="M",
151151
help="SGD momentum (default: 0.5)",
152152
)
153+
parser.add_argument(
154+
"--weight-decay",
155+
type=float,
156+
default=0.01,
157+
metavar="WD",
158+
help="Weight decay for regularization (default: 0.01)",
159+
)
160+
parser.add_argument(
161+
"--dropout-rate",
162+
type=float,
163+
default=0.5,
164+
metavar="DR",
165+
help="Dropout rate for the model (default: 0.5)",
166+
)
153167
parser.add_argument(
154168
"--no-cuda", action="store_true", default=False, help="disables CUDA training"
155169
)

pkg/suggestion/v1beta1/hyperopt/base_service.py

+12-10
Original file line numberDiff line numberDiff line change
@@ -93,34 +93,36 @@ def create_hyperopt_domain(self):
9393
param.name, float(param.min), float(param.max)
9494
)
9595
elif param.distribution == api_pb2.NORMAL:
96-
sigma = 1
96+
mu = (float(param.min) + float(param.max)) / 2
97+
sigma = (float(param.max) - float(param.min)) / 6
9798
if param.step:
9899
hyperopt_search_space[param.name] = hyperopt.hp.qnormal(
99100
param.name,
100-
float((float(param.min) + float(param.max)) / 2),
101-
float(sigma),
101+
mu,
102+
sigma,
102103
float(param.step),
103104
)
104105
else:
105106
hyperopt_search_space[param.name] = hyperopt.hp.normal(
106107
param.name,
107-
float((float(param.min) + float(param.max)) / 2),
108-
float(sigma),
108+
mu,
109+
sigma,
109110
)
110111
elif param.distribution == api_pb2.LOG_NORMAL:
111-
sigma = 1
112+
mu = (float(param.min) + float(param.max)) / 2
113+
sigma = (float(param.max) - float(param.min)) / 6
112114
if param.step:
113115
hyperopt_search_space[param.name] = hyperopt.hp.qlognormal(
114116
param.name,
115-
float((float(param.min) + float(param.max)) / 2),
116-
float(sigma),
117+
mu,
118+
sigma,
117119
float(param.step),
118120
)
119121
else:
120122
hyperopt_search_space[param.name] = hyperopt.hp.lognormal(
121123
param.name,
122-
float((float(param.min) + float(param.max)) / 2),
123-
float(sigma),
124+
mu,
125+
sigma,
124126
)
125127
elif param.type == CATEGORICAL or param.type == DISCRETE:
126128
hyperopt_search_space[param.name] = hyperopt.hp.choice(

0 commit comments

Comments
 (0)