diff --git a/pylintrc b/pylintrc index 673d8e058c..57ec53011e 100644 --- a/pylintrc +++ b/pylintrc @@ -1,30 +1,18 @@ # Usage: -# pylint --rcfile=PATH_TO_THIS_FILE PACKAGE_NAME +# python3 -m pylint --rcfile=PATH_TO_THIS_FILE PACKAGE_NAME # or -# pylint --rcfile=PATH_TO_THIS_FILE SOURCE_FILE.py +# python3 -m pylint --rcfile=PATH_TO_THIS_FILE SOURCE_FILE.py [SETTINGS] max-line-length=140 -max-args=5 +max-args=8 max-locals=15 max-statements=50 -max-attributes=7 +max-attributes=15 const-naming-style=any -disable=all - -enable=F, - E, - unreachable, - duplicate-key, - unnecessary-semicolon, - global-variable-not-assigned, - binary-op-exception, - bad-format-string, - anomalous-backslash-in-string, - bad-open-mode - -extension-pkg-whitelist=numpy \ No newline at end of file +disable=duplicate-code, + super-init-not-called \ No newline at end of file diff --git a/src/nni_manager/rest_server/restValidationSchemas.ts b/src/nni_manager/rest_server/restValidationSchemas.ts index a95c81765c..32e8ef5215 100644 --- a/src/nni_manager/rest_server/restValidationSchemas.ts +++ b/src/nni_manager/rest_server/restValidationSchemas.ts @@ -61,7 +61,7 @@ export namespace ValidationSchemas { maxExecDuration: joi.number().min(0).required(), multiPhase: joi.boolean(), tuner: joi.object({ - builtinTunerName: joi.string().valid('TPE', 'Random', 'Anneal', 'Evolution', 'SMAC', 'BatchTuner'), + builtinTunerName: joi.string().valid('TPE', 'Random', 'Anneal', 'Evolution', 'SMAC', 'BatchTuner', 'GridSearch'), codeDir: joi.string(), classFileName: joi.string(), className: joi.string(), diff --git a/src/sdk/pynni/nni/README.md b/src/sdk/pynni/nni/README.md index fc3dd20a91..259edf2f77 100644 --- a/src/sdk/pynni/nni/README.md +++ b/src/sdk/pynni/nni/README.md @@ -6,6 +6,7 @@ For now, NNI has supported the following tuner algorithms. Note that NNI install - Random Search - Anneal - Naive Evolution + - Grid Search - SMAC (to install through `nnictl`) - ENAS (ongoing) - Batch (ongoing) @@ -46,6 +47,15 @@ Note that SMAC only supports a subset of the types in [search space spec](../../ Batch allows users to simply provide several configurations (i.e., choices of hyper-parameters) for their trial code. After finishing all the configurations, the experiment is done. +**Gridsearch** + +Gridsearch performs an exhaustive searching through a manually specified subset of the hyperparameter space defined in the searchspace file + +Note that the only acceptable types of search space are 'quniform', 'qloguniform' and 'choice': + +* Type 'choice' will select one of the options. Note that it can also be nested. +* Type 'quniform' will receive three values [low, high, q], where [low, high] specifies a range and 'q' specifies the number of values that will be sampled evenly. It will be sampled in a way that the first sampled value is 'low', and each of the following values is (high-low)/q larger that the value in front of it. +* Type 'qloguniform' behaves like 'quniform' except that it will first change the range to [log10(low), log10(high)] and sample and then change the sampled value back. ## 2. How to use the tuner algorithm in NNI? diff --git a/src/sdk/pynni/nni/constants.py b/src/sdk/pynni/nni/constants.py index 0e4fff8663..5eb27b762d 100644 --- a/src/sdk/pynni/nni/constants.py +++ b/src/sdk/pynni/nni/constants.py @@ -25,7 +25,8 @@ 'Evolution': 'nni.evolution_tuner.evolution_tuner', 'SMAC': 'nni.smac_tuner.smac_tuner', 'BatchTuner': 'nni.batch_tuner.batch_tuner', - + 'GridSearch': 'nni.gridsearch_tuner.gridsearch_tuner', + 'Medianstop': 'nni.medianstop_assessor.medianstop_assessor' } @@ -36,6 +37,7 @@ 'Evolution': 'EvolutionTuner', 'SMAC': 'SMACTuner', 'BatchTuner': 'BatchTuner', + 'GridSearch': 'GridSearchTuner', 'Medianstop': 'MedianstopAssessor' } diff --git a/src/sdk/pynni/nni/gridsearch_tuner/__init__.py b/src/sdk/pynni/nni/gridsearch_tuner/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/src/sdk/pynni/nni/gridsearch_tuner/gridsearch_tuner.py b/src/sdk/pynni/nni/gridsearch_tuner/gridsearch_tuner.py new file mode 100644 index 0000000000..18caa523d7 --- /dev/null +++ b/src/sdk/pynni/nni/gridsearch_tuner/gridsearch_tuner.py @@ -0,0 +1,143 @@ +# Copyright (c) Microsoft Corporation +# All rights reserved. +# +# MIT License +# +# Permission is hereby granted, free of charge, +# to any person obtaining a copy of this software and associated +# documentation files (the "Software"), to deal in the Software without restriction, +# including without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and +# to permit persons to whom the Software is furnished to do so, subject to the following conditions: +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING +# BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +''' +gridsearch_tuner.py including: + class GridSearchTuner +''' + +import copy +import numpy as np + +import nni +from nni.tuner import Tuner + +TYPE = '_type' +CHOICE = 'choice' +VALUE = '_value' + + +class GridSearchTuner(Tuner): + ''' + GridSearchTuner will search all the possible configures that the user define in the searchSpace. + The only acceptable types of search space are 'quniform', 'qloguniform' and 'choice' + + Type 'choice' will select one of the options. Note that it can also be nested. + + Type 'quniform' will receive three values [low, high, q], where [low, high] specifies a range and 'q' specifies the number of values that will be sampled evenly. + Note that q should be at least 2. + It will be sampled in a way that the first sampled value is 'low', and each of the following values is (high-low)/q larger that the value in front of it. + + Type 'qloguniform' behaves like 'quniform' except that it will first change the range to [log10(low), log10(high)] + and sample and then change the sampled value back. + ''' + + def __init__(self, optimize_mode): + self.count = -1 + self.expanded_search_space = [] + + def json2paramater(self, ss_spec): + ''' + generate all possible configs for hyperparameters from hyperparameter space. + ss_spec: hyperparameter space + ''' + if isinstance(ss_spec, dict): + if '_type' in ss_spec.keys(): + _type = ss_spec['_type'] + _value = ss_spec['_value'] + chosen_params = list() + if _type == 'choice': + for value in _value: + choice = self.json2paramater(value) + if isinstance(choice, list): + chosen_params.extend(choice) + else: + chosen_params.append(choice) + else: + chosen_params = self.parse_qtype(_type, _value) + else: + chosen_params = dict() + for key in ss_spec.keys(): + chosen_params[key] = self.json2paramater(ss_spec[key]) + return self.expand_parameters(chosen_params) + elif isinstance(ss_spec, list): + chosen_params = list() + for subspec in ss_spec[1:]: + choice = self.json2paramater(subspec) + if isinstance(choice, list): + chosen_params.extend(choice) + else: + chosen_params.append(choice) + chosen_params = list(map(lambda v: {ss_spec[0]: v}, chosen_params)) + else: + chosen_params = copy.deepcopy(ss_spec) + return chosen_params + + def _parse_quniform(self, param_value): + '''parse type of quniform parameter and return a list''' + if param_value[2] < 2: + raise RuntimeError("The number of values sampled (q) should be at least 2") + low, high, count = param_value[0], param_value[1], param_value[2] + interval = (high - low) / (count - 1) + return [float(low + interval * i) for i in range(count)] + + def parse_qtype(self, param_type, param_value): + '''parse type of quniform or qloguniform''' + if param_type == 'quniform': + return self._parse_quniform(param_value) + if param_type == 'qloguniform': + param_value[:2] = np.log10(param_value[:2]) + return list(np.power(10, self._parse_quniform(param_value))) + + raise RuntimeError("Not supported type: %s" % param_type) + + def expand_parameters(self, para): + ''' + Enumerate all possible combinations of all parameters + para: {key1: [v11, v12, ...], key2: [v21, v22, ...], ...} + return: {{key1: v11, key2: v21, ...}, {key1: v11, key2: v22, ...}, ...} + ''' + if len(para) == 1: + for key, values in para.items(): + return list(map(lambda v: {key: v}, values)) + + key = list(para)[0] + values = para.pop(key) + rest_para = self.expand_parameters(para) + ret_para = list() + for val in values: + for config in rest_para: + config[key] = val + ret_para.append(copy.deepcopy(config)) + return ret_para + + def update_search_space(self, search_space): + ''' + Check if the search space is valid and expand it: only contains 'choice' type or other types beginnning with the letter 'q' + ''' + self.expanded_search_space = self.json2paramater(search_space) + + def generate_parameters(self, parameter_id): + self.count += 1 + if self.count > len(self.expanded_search_space)-1: + raise nni.NoMoreTrialError('no more parameters now.') + return self.expanded_search_space[self.count] + + def receive_trial_result(self, parameter_id, parameters, value): + pass diff --git a/tools/nni_cmd/config_schema.py b/tools/nni_cmd/config_schema.py index 80626037e1..19e36fea98 100644 --- a/tools/nni_cmd/config_schema.py +++ b/tools/nni_cmd/config_schema.py @@ -33,7 +33,7 @@ Optional('multiPhase'): bool, 'useAnnotation': bool, 'tuner': Or({ - 'builtinTunerName': Or('TPE', 'Random', 'Anneal', 'Evolution', 'SMAC', 'BatchTuner'), + 'builtinTunerName': Or('TPE', 'Random', 'Anneal', 'Evolution', 'SMAC', 'BatchTuner', 'GridSearch'), 'classArgs': { 'optimize_mode': Or('maximize', 'minimize'), Optional('speed'): int