forked from syne-tune/syne-tune
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathlaunch_height_standalone_scheduler.py
111 lines (92 loc) · 3.63 KB
/
launch_height_standalone_scheduler.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
# Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License").
# You may not use this file except in compliance with the License.
# A copy of the License is located at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# or in the "license" file accompanying this file. This file is distributed
# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
# express or implied. See the License for the specific language governing
# permissions and limitations under the License.
"""
Example showing how to implement a new Scheduler.
"""
import logging
from pathlib import Path
from typing import Optional, List
import numpy as np
from syne_tune.backend import LocalBackend
from syne_tune.backend.trial_status import Trial
from syne_tune.optimizer.scheduler import (
TrialScheduler,
SchedulerDecision,
TrialSuggestion,
)
from syne_tune import Tuner, StoppingCriterion
from syne_tune.config_space import randint
class SimpleScheduler(TrialScheduler):
def __init__(self, config_space: dict, metric: str, mode: Optional[str] = None):
super(SimpleScheduler, self).__init__(config_space=config_space)
self.metric = metric
self.mode = mode if mode is not None else "min"
self.sorted_results = []
def _suggest(self, trial_id: int) -> Optional[TrialSuggestion]:
# Called when a slot exists to run a trial, here we simply draw a
# random candidate.
config = {
k: v.sample() if hasattr(v, "sample") else v
for k, v in self.config_space.items()
}
return TrialSuggestion.start_suggestion(config)
def on_trial_result(self, trial: Trial, result: dict) -> str:
# Given a new result, we decide whether the trial should stop or continue.
# In this case, we implement a naive strategy that stops if the result is worse than 80% of previous results.
# This is a naive strategy as we do not account for the fact that trial improves with more steps.
new_metric = result[self.metric]
# insert new metric in sorted results
index = np.searchsorted(self.sorted_results, new_metric)
self.sorted_results = np.insert(self.sorted_results, index, new_metric)
normalized_rank = index / float(len(self.sorted_results))
if self.mode == "max":
normalized_rank = 1 - normalized_rank
if normalized_rank < 0.8:
return SchedulerDecision.CONTINUE
else:
logging.info(
f"see new results {new_metric} which rank {normalized_rank * 100}%, "
f"stopping it as it does not rank on the top 80%"
)
return SchedulerDecision.STOP
def metric_names(self) -> List[str]:
return [self.metric]
if __name__ == "__main__":
logging.getLogger().setLevel(logging.DEBUG)
random_seed = 31415927
max_steps = 100
n_workers = 4
config_space = {
"steps": max_steps,
"width": randint(0, 20),
"height": randint(-100, 100),
}
entry_point = str(
Path(__file__).parent
/ "training_scripts"
/ "height_example"
/ "train_height.py"
)
metric = "mean_loss"
# Local back-end
trial_backend = LocalBackend(entry_point=entry_point)
np.random.seed(random_seed)
scheduler = SimpleScheduler(config_space=config_space, metric=metric)
stop_criterion = StoppingCriterion(max_wallclock_time=30)
tuner = Tuner(
trial_backend=trial_backend,
scheduler=scheduler,
stop_criterion=stop_criterion,
n_workers=n_workers,
)
tuner.run()