Skip to content
This repository has been archived by the owner on Sep 18, 2024. It is now read-only.

add gridsearch tuner #364

Merged
merged 12 commits into from
Nov 16, 2018
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
24 changes: 6 additions & 18 deletions pylintrc
Original file line number Diff line number Diff line change
@@ -1,30 +1,18 @@
# Usage:
# pylint --rcfile=PATH_TO_THIS_FILE PACKAGE_NAME
# python3 -m pylint --rcfile=PATH_TO_THIS_FILE PACKAGE_NAME
# or
# pylint --rcfile=PATH_TO_THIS_FILE SOURCE_FILE.py
# python3 -m pylint --rcfile=PATH_TO_THIS_FILE SOURCE_FILE.py

[SETTINGS]

max-line-length=140

max-args=5
max-args=8
max-locals=15
max-statements=50
max-attributes=7
max-attributes=15

const-naming-style=any

disable=all

enable=F,
E,
unreachable,
duplicate-key,
unnecessary-semicolon,
global-variable-not-assigned,
binary-op-exception,
bad-format-string,
anomalous-backslash-in-string,
bad-open-mode

extension-pkg-whitelist=numpy
disable=duplicate-code,
super-init-not-called
2 changes: 1 addition & 1 deletion src/nni_manager/rest_server/restValidationSchemas.ts
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ export namespace ValidationSchemas {
maxExecDuration: joi.number().min(0).required(),
multiPhase: joi.boolean(),
tuner: joi.object({
builtinTunerName: joi.string().valid('TPE', 'Random', 'Anneal', 'Evolution', 'SMAC', 'BatchTuner'),
builtinTunerName: joi.string().valid('TPE', 'Random', 'Anneal', 'Evolution', 'SMAC', 'BatchTuner', 'GridSearch'),
codeDir: joi.string(),
classFileName: joi.string(),
className: joi.string(),
Expand Down
10 changes: 10 additions & 0 deletions src/sdk/pynni/nni/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ For now, NNI has supported the following tuner algorithms. Note that NNI install
- Random Search
- Anneal
- Naive Evolution
- Grid Search
- SMAC (to install through `nnictl`)
- ENAS (ongoing)
- Batch (ongoing)
Expand Down Expand Up @@ -46,6 +47,15 @@ Note that SMAC only supports a subset of the types in [search space spec](../../

Batch allows users to simply provide several configurations (i.e., choices of hyper-parameters) for their trial code. After finishing all the configurations, the experiment is done.

**Gridsearch**
Crysple marked this conversation as resolved.
Show resolved Hide resolved

Gridsearch performs an exhaustive searching through a manually specified subset of the hyperparameter space defined in the searchspace file

Note that the only acceptable types of search space are 'quniform', 'qloguniform' and 'choice':

* Type 'choice' will select one of the options. Note that it can also be nested.
* Type 'quniform' will receive three values [low, high, q], where [low, high] specifies a range and 'q' specifies the number of values that will be sampled evenly. It will be sampled in a way that the first sampled value is 'low', and each of the following values is (high-low)/q larger that the value in front of it.
* Type 'qloguniform' behaves like 'quniform' except that it will first change the range to [log10(low), log10(high)] and sample and then change the sampled value back.

## 2. How to use the tuner algorithm in NNI?

Expand Down
4 changes: 3 additions & 1 deletion src/sdk/pynni/nni/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,8 @@
'Evolution': 'nni.evolution_tuner.evolution_tuner',
'SMAC': 'nni.smac_tuner.smac_tuner',
'BatchTuner': 'nni.batch_tuner.batch_tuner',

'GridSearch': 'nni.gridsearch_tuner.gridsearch_tuner',

'Medianstop': 'nni.medianstop_assessor.medianstop_assessor'
}

Expand All @@ -36,6 +37,7 @@
'Evolution': 'EvolutionTuner',
'SMAC': 'SMACTuner',
'BatchTuner': 'BatchTuner',
'GridSearch': 'GridSearchTuner',

'Medianstop': 'MedianstopAssessor'
}
Expand Down
Empty file.
143 changes: 143 additions & 0 deletions src/sdk/pynni/nni/gridsearch_tuner/gridsearch_tuner.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,143 @@
# Copyright (c) Microsoft Corporation
# All rights reserved.
#
# MIT License
#
# Permission is hereby granted, free of charge,
# to any person obtaining a copy of this software and associated
# documentation files (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and
# to permit persons to whom the Software is furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING
# BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
'''
gridsearch_tuner.py including:
class GridSearchTuner
'''

import copy
import numpy as np

import nni
from nni.tuner import Tuner

TYPE = '_type'
CHOICE = 'choice'
VALUE = '_value'


class GridSearchTuner(Tuner):
'''
GridSearchTuner will search all the possible configures that the user define in the searchSpace.
The only acceptable types of search space are 'quniform', 'qloguniform' and 'choice'

Type 'choice' will select one of the options. Note that it can also be nested.

Type 'quniform' will receive three values [low, high, q], where [low, high] specifies a range and 'q' specifies the number of values that will be sampled evenly.
Note that q should be at least 2.
It will be sampled in a way that the first sampled value is 'low', and each of the following values is (high-low)/q larger that the value in front of it.

Type 'qloguniform' behaves like 'quniform' except that it will first change the range to [log10(low), log10(high)]
and sample and then change the sampled value back.
'''

def __init__(self, optimize_mode):
self.count = -1
self.expanded_search_space = []

def json2paramater(self, ss_spec):
'''
generate all possible configs for hyperparameters from hyperparameter space.
ss_spec: hyperparameter space
'''
if isinstance(ss_spec, dict):
if '_type' in ss_spec.keys():
_type = ss_spec['_type']
_value = ss_spec['_value']
chosen_params = list()
if _type == 'choice':
for value in _value:
choice = self.json2paramater(value)
if isinstance(choice, list):
chosen_params.extend(choice)
QuanluZhang marked this conversation as resolved.
Show resolved Hide resolved
else:
chosen_params.append(choice)
else:
chosen_params = self.parse_qtype(_type, _value)
else:
chosen_params = dict()
for key in ss_spec.keys():
chosen_params[key] = self.json2paramater(ss_spec[key])
return self.expand_parameters(chosen_params)
elif isinstance(ss_spec, list):
chosen_params = list()
for subspec in ss_spec[1:]:
choice = self.json2paramater(subspec)
if isinstance(choice, list):
chosen_params.extend(choice)
else:
chosen_params.append(choice)
chosen_params = list(map(lambda v: {ss_spec[0]: v}, chosen_params))
else:
chosen_params = copy.deepcopy(ss_spec)
return chosen_params

def _parse_quniform(self, param_value):
'''parse type of quniform parameter and return a list'''
if param_value[2] < 2:
raise RuntimeError("The number of values sampled (q) should be at least 2")
low, high, count = param_value[0], param_value[1], param_value[2]
interval = (high - low) / (count - 1)
return [float(low + interval * i) for i in range(count)]

def parse_qtype(self, param_type, param_value):
'''parse type of quniform or qloguniform'''
if param_type == 'quniform':
return self._parse_quniform(param_value)
if param_type == 'qloguniform':
param_value[:2] = np.log10(param_value[:2])
return list(np.power(10, self._parse_quniform(param_value)))

raise RuntimeError("Not supported type: %s" % param_type)

def expand_parameters(self, para):
'''
Enumerate all possible combinations of all parameters
para: {key1: [v11, v12, ...], key2: [v21, v22, ...], ...}
return: {{key1: v11, key2: v21, ...}, {key1: v11, key2: v22, ...}, ...}
'''
if len(para) == 1:
for key, values in para.items():
return list(map(lambda v: {key: v}, values))

key = list(para)[0]
values = para.pop(key)
rest_para = self.expand_parameters(para)
ret_para = list()
for val in values:
for config in rest_para:
config[key] = val
ret_para.append(copy.deepcopy(config))
return ret_para

def update_search_space(self, search_space):
'''
Check if the search space is valid and expand it: only contains 'choice' type or other types beginnning with the letter 'q'
'''
self.expanded_search_space = self.json2paramater(search_space)

def generate_parameters(self, parameter_id):
self.count += 1
if self.count > len(self.expanded_search_space)-1:
raise nni.NoMoreTrialError('no more parameters now.')
return self.expanded_search_space[self.count]

def receive_trial_result(self, parameter_id, parameters, value):
pass
2 changes: 1 addition & 1 deletion tools/nni_cmd/config_schema.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@
Optional('multiPhase'): bool,
'useAnnotation': bool,
'tuner': Or({
'builtinTunerName': Or('TPE', 'Random', 'Anneal', 'Evolution', 'SMAC', 'BatchTuner'),
'builtinTunerName': Or('TPE', 'Random', 'Anneal', 'Evolution', 'SMAC', 'BatchTuner', 'GridSearch'),
'classArgs': {
'optimize_mode': Or('maximize', 'minimize'),
Optional('speed'): int
Expand Down