diff --git a/pkg/suggestion/v1alpha3/skopt/base_skopt_service.py b/pkg/suggestion/v1alpha3/skopt/base_skopt_service.py index 64da07146e1..203bce5245e 100644 --- a/pkg/suggestion/v1alpha3/skopt/base_skopt_service.py +++ b/pkg/suggestion/v1alpha3/skopt/base_skopt_service.py @@ -5,6 +5,7 @@ from pkg.suggestion.v1alpha3.internal.search_space import * from pkg.suggestion.v1alpha3.internal.trial import * +import datetime logger = logging.getLogger("BaseSkoptService") @@ -14,27 +15,29 @@ class BaseSkoptService(object): Refer to https://github.com/scikit-optimize/scikit-optimize . """ - def __init__(self, algorithm_name="skopt-bayesian-optimization", + def __init__(self, base_estimator="GP", n_initial_points=10, acq_func="gp_hedge", acq_optimizer="auto", - random_state=None): + random_state=None, + search_space=None): self.base_estimator = base_estimator self.n_initial_points = n_initial_points self.acq_func = acq_func self.acq_optimizer = acq_optimizer self.random_state = random_state - self.algorithm_name = algorithm_name - - def getSuggestions(self, search_space, trials, request_number): - """ - Get the new suggested trials with skopt algorithm. - """ + self.search_space = search_space + self.skopt_optimizer = None + self.create_optimizer() + self.succeeded_trials = 0 + # List of recorded Trials names + self.recorded_trials_names = [] + def create_optimizer(self): skopt_search_space = [] - for param in search_space.params: + for param in self.search_space.params: if param.type == INTEGER: skopt_search_space.append(skopt.space.Integer( int(param.min), int(param.max), name=param.name)) @@ -45,48 +48,71 @@ def getSuggestions(self, search_space, trials, request_number): skopt_search_space.append( skopt.space.Categorical(param.list, name=param.name)) - if self.algorithm_name != "bayesianoptimization": - raise Exception( - '"Failed to create the algortihm: {}'.format(self.algorithm_name)) - skopt_optimizer = skopt.Optimizer(skopt_search_space, - base_estimator=self.base_estimator, - n_initial_points=self.n_initial_points, - acq_func=self.acq_func, - acq_optimizer=self.acq_optimizer, - random_state=self.random_state) + self.skopt_optimizer = skopt.Optimizer( + skopt_search_space, + base_estimator=self.base_estimator, + n_initial_points=self.n_initial_points, + acq_func=self.acq_func, + acq_optimizer=self.acq_optimizer, + random_state=self.random_state) + def getSuggestions(self, trials, request_number): + """ + Get the new suggested trials with skopt algorithm. + """ + logger.info("-" * 100 + "\n") + logger.info("New GetSuggestions call\n") skopt_suggested = [] loss_for_skopt = [] - for trial in trials: - trial_assignment = [] - for param in search_space.params: - parameter_value = None - for assignment in trial.assignments: - if assignment.name == param.name: - parameter_value = assignment.value - break - if param.type == INTEGER: - trial_assignment.append(int(parameter_value)) - elif param.type == DOUBLE: - trial_assignment.append(float(parameter_value)) - else: - trial_assignment.append(parameter_value) - skopt_suggested.append(trial_assignment) - - loss_value = float(trial.target_metric.value) - if search_space.goal == MAX_GOAL: - loss_value = -1 * loss_value - loss_for_skopt.append(loss_value) - - if loss_for_skopt != [] and skopt_suggested != []: - skopt_optimizer.tell(skopt_suggested, loss_for_skopt) + if len(trials) > self.succeeded_trials or self.succeeded_trials == 0: + self.succeeded_trials = len(trials) + if self.succeeded_trials != 0: + logger.info("Succeeded Trials changed: {}\n".format(self.succeeded_trials)) + for trial in trials: + if trial.name not in self.recorded_trials_names: + self.recorded_trials_names.append(trial.name) + trial_assignment = [] + for param in self.search_space.params: + parameter_value = None + for assignment in trial.assignments: + if assignment.name == param.name: + parameter_value = assignment.value + break + if param.type == INTEGER: + trial_assignment.append(int(parameter_value)) + elif param.type == DOUBLE: + trial_assignment.append(float(parameter_value)) + else: + trial_assignment.append(parameter_value) + skopt_suggested.append(trial_assignment) + loss_value = float(trial.target_metric.value) + if self.search_space.goal == MAX_GOAL: + loss_value = -1 * loss_value + loss_for_skopt.append(loss_value) + + if loss_for_skopt != [] and skopt_suggested != []: + logger.info("Running Optimizer tell to record observation") + logger.info("Evaluated parameters: {}".format(skopt_suggested)) + logger.info("Objective values: {}\n".format(loss_for_skopt)) + t1 = datetime.datetime.now() + self.skopt_optimizer.tell(skopt_suggested, loss_for_skopt) + logger.info("Optimizer tell method takes {} seconds".format((datetime.datetime.now()-t1).seconds)) + logger.info("List of recorded Trials names: {}\n".format(self.recorded_trials_names)) + + else: + logger.info("Succeeded Trials didn't change: {}\n".format(self.succeeded_trials)) + + logger.info("Running Optimizer ask to query new parameters for Trials\n") return_trial_list = [] for i in range(request_number): - skopt_suggested = skopt_optimizer.ask() + skopt_suggested = self.skopt_optimizer.ask() + logger.info("New suggested parameters for Trial: {}".format(skopt_suggested)) return_trial_list.append( - BaseSkoptService.convert(search_space, skopt_suggested)) + BaseSkoptService.convert(self.search_space, skopt_suggested)) + + logger.info("GetSuggestions return {} new Trials\n\n".format(request_number)) return return_trial_list @staticmethod diff --git a/pkg/suggestion/v1alpha3/skopt_service.py b/pkg/suggestion/v1alpha3/skopt_service.py index 2fcbbb4f289..6094dd2b7d9 100644 --- a/pkg/suggestion/v1alpha3/skopt_service.py +++ b/pkg/suggestion/v1alpha3/skopt_service.py @@ -11,25 +11,35 @@ logger = logging.getLogger("SkoptService") -class SkoptService( - api_pb2_grpc.SuggestionServicer, HealthServicer): +class SkoptService(api_pb2_grpc.SuggestionServicer, HealthServicer): + + def __init__(self): + super(SkoptService, self).__init__() + self.base_service = None + self.is_first_run = True + def GetSuggestions(self, request, context): """ Main function to provide suggestion. """ - name, config = OptimizerConfiguration.convertAlgorithmSpec( + algorithm_name, config = OptimizerConfiguration.convertAlgorithmSpec( request.experiment.spec.algorithm) - base_service = BaseSkoptService( - algorithm_name=name, - base_estimator=config.base_estimator, - n_initial_points=config.n_initial_points, - acq_func=config.acq_func, - acq_optimizer=config.acq_optimizer, - random_state=config.random_state) - search_space = HyperParameterSearchSpace.convert(request.experiment) + if algorithm_name != "bayesianoptimization": + raise Exception("Failed to create the algortihm: {}".format(algorithm_name)) + + if self.is_first_run: + search_space = HyperParameterSearchSpace.convert(request.experiment) + self.base_service = BaseSkoptService( + base_estimator=config.base_estimator, + n_initial_points=config.n_initial_points, + acq_func=config.acq_func, + acq_optimizer=config.acq_optimizer, + random_state=config.random_state, + search_space=search_space) + self.is_first_run = False + trials = Trial.convert(request.trials) - new_trials = base_service.getSuggestions( - search_space, trials, request.request_number) + new_trials = self.base_service.getSuggestions(trials, request.request_number) return api_pb2.GetSuggestionsReply( parameter_assignments=Assignment.generate(new_trials) ) @@ -49,16 +59,16 @@ def __init__(self, base_estimator="GP", @staticmethod def convertAlgorithmSpec(algorithm_spec): - optmizer = OptimizerConfiguration() + optimizer = OptimizerConfiguration() for s in algorithm_spec.algorithm_setting: if s.name == "base_estimator": - optmizer.base_estimator = s.value + optimizer.base_estimator = s.value elif s.name == "n_initial_points": - optmizer.n_initial_points = int(s.value) + optimizer.n_initial_points = int(s.value) elif s.name == "acq_func": - optmizer.acq_func = s.value + optimizer.acq_func = s.value elif s.name == "acq_optimizer": - optmizer.acq_optimizer = s.value + optimizer.acq_optimizer = s.value elif s.name == "random_state": - optmizer.random_state = int(s.value) - return algorithm_spec.algorithm_name, optmizer + optimizer.random_state = int(s.value) + return algorithm_spec.algorithm_name, optimizer