Skip to content
This repository has been archived by the owner on Sep 18, 2024. It is now read-only.

Update evaluate parameters from GMM #1195

Merged
merged 49 commits into from
Jun 25, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
49 commits
Select commit Hold shift + click to select a range
20bf28b
update readme in ga_squad
xuehui1991 Sep 20, 2018
14d8f60
update readme
xuehui1991 Sep 20, 2018
d842121
fix typo
xuehui1991 Sep 20, 2018
0d67525
Update README.md
xuehui1991 Sep 20, 2018
b52854a
Update README.md
xuehui1991 Sep 20, 2018
374491d
Update README.md
xuehui1991 Sep 20, 2018
c805d2a
update readme
xuehui1991 Sep 20, 2018
c825d49
Merge branch 'update_ga_squad' of https://github.com/xuehui1991/nni i…
xuehui1991 Sep 20, 2018
6fdde2c
Merge branch 'master' of https://github.com/Microsoft/nni
xuehui1991 Sep 25, 2018
d41a8b0
Merge branch 'master' of https://github.com/Microsoft/nni
xuehui1991 Sep 25, 2018
cc765b8
Merge branch 'master' into update_ga_squad
xuehui1991 Sep 26, 2018
ef5eba8
Merge branch 'update_ga_squad' of https://github.com/xuehui1991/nni i…
xuehui1991 Sep 26, 2018
8ff4508
Merge branch 'xuehui1991-update_ga_squad'
xuehui1991 Sep 26, 2018
629222f
update tuner docs about Anneal
xuehui1991 Dec 4, 2018
ef74ffa
update
xuehui1991 Dec 4, 2018
22b3add
fix path
xuehui1991 Dec 4, 2018
c97fd11
update reference
xuehui1991 Dec 4, 2018
0afbffd
fix bug in config file
xuehui1991 Dec 5, 2018
7022e6e
Merge remote-tracking branch 'upstream/master'
xuehui1991 Dec 14, 2018
770677c
update nni_arch_overview.png
xuehui1991 Dec 14, 2018
e002331
update
xuehui1991 Dec 14, 2018
d066896
update
xuehui1991 Dec 14, 2018
096f69b
update
xuehui1991 Dec 14, 2018
47218a7
Merge remote-tracking branch 'upstream/master'
xuehui1991 Dec 27, 2018
5b84743
Merge remote-tracking branch 'upstream/master'
xuehui1991 Jan 16, 2019
4d989a6
update home page
xuehui1991 Jan 16, 2019
829ff65
Merge remote-tracking branch 'upstream/master'
xuehui1991 Apr 24, 2019
377ddad
update default value of metis tuner
xuehui1991 Apr 24, 2019
93d5d24
Merge remote-tracking branch 'upstream/master'
xuehui1991 May 21, 2019
4465706
fix broken link in CommunitySharings
xuehui1991 May 22, 2019
47308e1
Merge remote-tracking branch 'upstream/master'
xuehui1991 May 28, 2019
2550f5d
update docs about nested search space
xuehui1991 May 28, 2019
71a93e6
update docs
xuehui1991 May 28, 2019
89e13e4
rename cascding to nested
xuehui1991 May 28, 2019
2c9914d
fix broken link
xuehui1991 May 28, 2019
fb721ab
update
xuehui1991 May 28, 2019
593edb7
update issue link
xuehui1991 May 28, 2019
0d3e6b6
fix typo
xuehui1991 May 28, 2019
31309d0
Merge remote-tracking branch 'upstream/master'
xuehui1991 May 28, 2019
0dd7f42
Merge tag 'v0.8'
xuehui1991 Jun 4, 2019
90dfdde
Merge remote-tracking branch 'upstream/master'
xuehui1991 Jun 17, 2019
be72fa4
Merge remote-tracking branch 'upstream/master'
xuehui1991 Jun 24, 2019
216f1f2
update evaluate parameters from GMM
xuehui1991 Jun 24, 2019
d02fdb6
refine code
xuehui1991 Jun 24, 2019
3089e4e
Merge remote-tracking branch 'upstream/master'
xuehui1991 Jun 25, 2019
62a9354
fix optimized mode bug
xuehui1991 Jun 25, 2019
3e1d927
update import warning
xuehui1991 Jun 25, 2019
3606f14
update warning
xuehui1991 Jun 25, 2019
bd1ab20
update optimized mode
xuehui1991 Jun 25, 2019
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion src/sdk/pynni/nni/bohb_advisor/bohb_advisor.py
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,7 @@ def __init__(self, s, s_max, eta, max_budget, optimize_mode):
self.s_max = s_max
self.eta = eta
self.max_budget = max_budget
self.optimize_mode = optimize_mode
self.optimize_mode = OptimizeMode(optimize_mode)

self.n = math.ceil((s_max + 1) * eta**s / (s + 1) - _epsilon)
self.r = max_budget / eta**s
Expand Down
2 changes: 1 addition & 1 deletion src/sdk/pynni/nni/hyperband_advisor/hyperband_advisor.py
Original file line number Diff line number Diff line change
Expand Up @@ -144,7 +144,7 @@ def __init__(self, s, s_max, eta, R, optimize_mode):
self.configs_perf = [] # [ {id: [seq, acc]}, {}, ... ]
self.num_configs_to_run = [] # [ n, n, n, ... ]
self.num_finished_configs = [] # [ n, n, n, ... ]
self.optimize_mode = optimize_mode
self.optimize_mode = OptimizeMode(optimize_mode)
self.no_more_trial = False

def is_completed(self):
Expand Down
9 changes: 5 additions & 4 deletions src/sdk/pynni/nni/metis_tuner/Regression_GMM/Selection.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,15 +49,16 @@ def selection_r(x_bounds,
num_starting_points=100,
minimize_constraints_fun=None):
'''
Call selection
Select using different types.
'''
minimize_starting_points = [lib_data.rand(x_bounds, x_types)\
for i in range(0, num_starting_points)]
minimize_starting_points = clusteringmodel_gmm_good.sample(n_samples=num_starting_points)

outputs = selection(x_bounds, x_types,
clusteringmodel_gmm_good,
clusteringmodel_gmm_bad,
minimize_starting_points,
minimize_starting_points[0],
minimize_constraints_fun)

return outputs

def selection(x_bounds,
Expand Down
64 changes: 40 additions & 24 deletions src/sdk/pynni/nni/metis_tuner/metis_tuner.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,15 +20,15 @@

import copy
import logging
import numpy as np
import os
import random
import statistics
import sys
import warnings
from enum import Enum, unique
from multiprocessing.dummy import Pool as ThreadPool

import numpy as np

import nni.metis_tuner.lib_constraint_summation as lib_constraint_summation
import nni.metis_tuner.lib_data as lib_data
import nni.metis_tuner.Regression_GMM.CreateModel as gmm_create_model
Expand All @@ -42,8 +42,6 @@

logger = logging.getLogger("Metis_Tuner_AutoML")



NONE_TYPE = ''
CONSTRAINT_LOWERBOUND = None
CONSTRAINT_UPPERBOUND = None
Expand Down Expand Up @@ -93,7 +91,7 @@ def __init__(self, optimize_mode="maximize", no_resampling=True, no_candidates=F
self.space = None
self.no_resampling = no_resampling
self.no_candidates = no_candidates
self.optimize_mode = optimize_mode
self.optimize_mode = OptimizeMode(optimize_mode)
self.key_order = []
self.cold_start_num = cold_start_num
self.selection_num_starting_points = selection_num_starting_points
Expand Down Expand Up @@ -254,6 +252,9 @@ def _selection(self, samples_x, samples_y_aggregation, samples_y,
threshold_samplessize_resampling=50, no_candidates=False,
minimize_starting_points=None, minimize_constraints_fun=None):

with warnings.catch_warnings():
warnings.simplefilter("ignore")

xuehui1991 marked this conversation as resolved.
Show resolved Hide resolved
next_candidate = None
candidates = []
samples_size_all = sum([len(i) for i in samples_y])
Expand All @@ -271,13 +272,12 @@ def _selection(self, samples_x, samples_y_aggregation, samples_y,
minimize_constraints_fun=minimize_constraints_fun)
if not lm_current:
return None

if no_candidates is False:
candidates.append({'hyperparameter': lm_current['hyperparameter'],
logger.info({'hyperparameter': lm_current['hyperparameter'],
'expected_mu': lm_current['expected_mu'],
'expected_sigma': lm_current['expected_sigma'],
'reason': "exploitation_gp"})

if no_candidates is False:
# ===== STEP 2: Get recommended configurations for exploration =====
results_exploration = gp_selection.selection(
"lc",
Expand All @@ -290,34 +290,48 @@ def _selection(self, samples_x, samples_y_aggregation, samples_y,

if results_exploration is not None:
if _num_past_samples(results_exploration['hyperparameter'], samples_x, samples_y) == 0:
candidates.append({'hyperparameter': results_exploration['hyperparameter'],
temp_candidate = {'hyperparameter': results_exploration['hyperparameter'],
'expected_mu': results_exploration['expected_mu'],
'expected_sigma': results_exploration['expected_sigma'],
'reason': "exploration"})
'reason': "exploration"}
candidates.append(temp_candidate)

logger.info("DEBUG: 1 exploration candidate selected\n")
logger.info(temp_candidate)
else:
logger.info("DEBUG: No suitable exploration candidates were")

# ===== STEP 3: Get recommended configurations for exploitation =====
if samples_size_all >= threshold_samplessize_exploitation:
print("Getting candidates for exploitation...\n")
logger.info("Getting candidates for exploitation...\n")
try:
gmm = gmm_create_model.create_model(samples_x, samples_y_aggregation)
results_exploitation = gmm_selection.selection(
x_bounds,
x_types,
gmm['clusteringmodel_good'],
gmm['clusteringmodel_bad'],
minimize_starting_points,
minimize_constraints_fun=minimize_constraints_fun)

if ("discrete_int" in x_types) or ("range_int" in x_types):
xuehui1991 marked this conversation as resolved.
Show resolved Hide resolved
results_exploitation = gmm_selection.selection(x_bounds, x_types,
gmm['clusteringmodel_good'],
gmm['clusteringmodel_bad'],
minimize_starting_points,
minimize_constraints_fun=minimize_constraints_fun)
else:
# If all parameters are of "range_continuous", let's use GMM to generate random starting points
results_exploitation = gmm_selection.selection_r(x_bounds, x_types,
gmm['clusteringmodel_good'],
gmm['clusteringmodel_bad'],
num_starting_points=self.selection_num_starting_points,
minimize_constraints_fun=minimize_constraints_fun)

if results_exploitation is not None:
if _num_past_samples(results_exploitation['hyperparameter'], samples_x, samples_y) == 0:
candidates.append({'hyperparameter': results_exploitation['hyperparameter'],\
'expected_mu': results_exploitation['expected_mu'],\
'expected_sigma': results_exploitation['expected_sigma'],\
'reason': "exploitation_gmm"})
temp_expected_mu, temp_expected_sigma = gp_prediction.predict(results_exploitation['hyperparameter'], gp_model['model'])
temp_candidate = {'hyperparameter': results_exploitation['hyperparameter'],
'expected_mu': temp_expected_mu,
'expected_sigma': temp_expected_sigma,
'reason': "exploitation_gmm"}
candidates.append(temp_candidate)

logger.info("DEBUG: 1 exploitation_gmm candidate selected\n")
logger.info(temp_candidate)
else:
logger.info("DEBUG: No suitable exploitation_gmm candidates were found\n")

Expand All @@ -338,11 +352,13 @@ def _selection(self, samples_x, samples_y_aggregation, samples_y,
if results_outliers is not None:
for results_outlier in results_outliers:
if _num_past_samples(samples_x[results_outlier['samples_idx']], samples_x, samples_y) < max_resampling_per_x:
candidates.append({'hyperparameter': samples_x[results_outlier['samples_idx']],\
temp_candidate = {'hyperparameter': samples_x[results_outlier['samples_idx']],\
'expected_mu': results_outlier['expected_mu'],\
'expected_sigma': results_outlier['expected_sigma'],\
'reason': "resampling"})
'reason': "resampling"}
candidates.append(temp_candidate)
logger.info("DEBUG: %d re-sampling candidates selected\n")
logger.info(temp_candidate)
else:
logger.info("DEBUG: No suitable resampling candidates were found\n")

Expand Down