diff --git a/docs/AnnotationSpec.md b/docs/AnnotationSpec.md index 5383e3cc24..62d2c60392 100644 --- a/docs/AnnotationSpec.md +++ b/docs/AnnotationSpec.md @@ -4,23 +4,26 @@ For good user experience and reduce user effort, we need to design a good annota If users use NNI system, they only need to: - 1. Annotation variable in code as: + 1. Use nni.get_next_parameter() to retrieve hyper parameters from Tuner, before using other annotation, use following annotation at the begining of trial code: + '''@nni.get_next_parameter()''' + + 2. Annotation variable in code as: '''@nni.variable(nni.choice(2,3,5,7),name=self.conv_size)''' - 2. Annotation intermediate in code as: + 3. Annotation intermediate in code as: '''@nni.report_intermediate_result(test_acc)''' - 3. Annotation output in code as: + 4. Annotation output in code as: '''@nni.report_final_result(test_acc)''' - 4. Annotation `function_choice` in code as: + 5. Annotation `function_choice` in code as: '''@nni.function_choice(max_pool(h_conv1, self.pool_size),avg_pool(h_conv1, self.pool_size),name=max_pool)''' -In this way, they can easily realize automatic tuning on NNI. +In this way, they can easily implement automatic tuning on NNI. For `@nni.variable`, `nni.choice` is the type of search space and there are 10 types to express your search space as follows: diff --git a/docs/howto_1_WriteTrial.md b/docs/howto_1_WriteTrial.md index 58e513c9e3..907ff5b72e 100644 --- a/docs/howto_1_WriteTrial.md +++ b/docs/howto_1_WriteTrial.md @@ -27,7 +27,7 @@ Refer to [SearchSpaceSpec.md](SearchSpaceSpec.md) to learn more about search spa 2.2 Get predefined parameters Use the following code snippet: - RECEIVED_PARAMS = nni.get_parameters() + RECEIVED_PARAMS = nni.get_next_parameter() to get hyper-parameters' values assigned by tuner. `RECEIVED_PARAMS` is an object, for example: diff --git a/docs/howto_2_CustomizedTuner.md b/docs/howto_2_CustomizedTuner.md index 7994a82cad..862df6885d 100644 --- a/docs/howto_2_CustomizedTuner.md +++ b/docs/howto_2_CustomizedTuner.md @@ -61,7 +61,7 @@ If the you implement the ```generate_parameters``` like this: # your code implements here. return {"dropout": 0.3, "learning_rate": 0.4} ``` -It's means your Tuner will always generate parameters ```{"dropout": 0.3, "learning_rate": 0.4}```. Then Trial will receive ```{"dropout": 0.3, "learning_rate": 0.4}``` this object will using ```nni.get_parameters()``` API from NNI SDK. After training of Trial, it will send result to Tuner by calling ```nni.report_final_result(0.93)```. Then ```receive_trial_result``` will function will receied these parameters like: + It means your Tuner will always generate parameters ```{"dropout": 0.3, "learning_rate": 0.4}```. Then Trial will receive ```{"dropout": 0.3, "learning_rate": 0.4}``` by calling API ```nni.get_next_parameter()```. Once the trial ends with a result (normally some kind of metrics), it can send the result to Tuner by calling API ```nni.report_final_result()```, for example ```nni.report_final_result(0.93)```. Then your Tuner's ```receive_trial_result``` function will receied the result like: ``` parameter_id = 82347 parameters = {"dropout": 0.3, "learning_rate": 0.4} diff --git a/examples/trials/README.md b/examples/trials/README.md index cd636e74f9..e78715120c 100644 --- a/examples/trials/README.md +++ b/examples/trials/README.md @@ -1,284 +1,284 @@ -# How to write a Trial running on NNI? - -*Trial receive the hyper-parameter/architecture configure from Tuner, and send intermediate result to Assessor and final result to Tuner.* - -So when user want to write a Trial running on NNI, she/he should: - -**1)Have an original Trial could run**, - -Trial's code could be any machine learning code that could run in local. Here we use ```mnist-keras.py``` as example: - -```python -import argparse -import logging -import keras -import numpy as np -from keras import backend as K -from keras.datasets import mnist -from keras.layers import Conv2D, Dense, Flatten, MaxPooling2D -from keras.models import Sequential - -K.set_image_data_format('channels_last') - -H, W = 28, 28 -NUM_CLASSES = 10 - -def create_mnist_model(hyper_params, input_shape=(H, W, 1), num_classes=NUM_CLASSES): - layers = [ - Conv2D(32, kernel_size=(3, 3), activation='relu', input_shape=input_shape), - Conv2D(64, (3, 3), activation='relu'), - MaxPooling2D(pool_size=(2, 2)), - Flatten(), - Dense(100, activation='relu'), - Dense(num_classes, activation='softmax') - ] - - model = Sequential(layers) - - if hyper_params['optimizer'] == 'Adam': - optimizer = keras.optimizers.Adam(lr=hyper_params['learning_rate']) - else: - optimizer = keras.optimizers.SGD(lr=hyper_params['learning_rate'], momentum=0.9) - model.compile(loss=keras.losses.categorical_crossentropy, optimizer=optimizer, metrics=['accuracy']) - - return model - -def load_mnist_data(args): - (x_train, y_train), (x_test, y_test) = mnist.load_data() - - x_train = (np.expand_dims(x_train, -1).astype(np.float) / 255.)[:args.num_train] - x_test = (np.expand_dims(x_test, -1).astype(np.float) / 255.)[:args.num_test] - y_train = keras.utils.to_categorical(y_train, NUM_CLASSES)[:args.num_train] - y_test = keras.utils.to_categorical(y_test, NUM_CLASSES)[:args.num_test] - - return x_train, y_train, x_test, y_test - -class SendMetrics(keras.callbacks.Callback): - def on_epoch_end(self, epoch, logs={}): - pass - -def train(args, params): - x_train, y_train, x_test, y_test = load_mnist_data(args) - model = create_mnist_model(params) - - model.fit(x_train, y_train, batch_size=args.batch_size, epochs=args.epochs, verbose=1, - validation_data=(x_test, y_test), callbacks=[SendMetrics()]) - - _, acc = model.evaluate(x_test, y_test, verbose=0) - -def generate_default_params(): - return { - 'optimizer': 'Adam', - 'learning_rate': 0.001 - } - -if __name__ == '__main__': - PARSER = argparse.ArgumentParser() - PARSER.add_argument("--batch_size", type=int, default=200, help="batch size", required=False) - PARSER.add_argument("--epochs", type=int, default=10, help="Train epochs", required=False) - PARSER.add_argument("--num_train", type=int, default=1000, help="Number of train samples to be used, maximum 60000", required=False) - PARSER.add_argument("--num_test", type=int, default=1000, help="Number of test samples to be used, maximum 10000", required=False) - - ARGS, UNKNOWN = PARSER.parse_known_args() - PARAMS = generate_default_params() - train(ARGS, PARAMS) -``` - -**2)Get configure from Tuner** - -User import ```nni``` and use ```nni.get_parameters()``` to recive configure. Please noted **10**, **24** and **25** line in the following code. - - -```python -import argparse -import logging -import keras -import numpy as np -from keras import backend as K -from keras.datasets import mnist -from keras.layers import Conv2D, Dense, Flatten, MaxPooling2D -from keras.models import Sequential - -import nni - -... - -if __name__ == '__main__': - PARSER = argparse.ArgumentParser() - PARSER.add_argument("--batch_size", type=int, default=200, help="batch size", required=False) - PARSER.add_argument("--epochs", type=int, default=10, help="Train epochs", required=False) - PARSER.add_argument("--num_train", type=int, default=1000, help="Number of train samples to be used, maximum 60000", required=False) - PARSER.add_argument("--num_test", type=int, default=1000, help="Number of test samples to be used, maximum 10000", required=False) - - ARGS, UNKNOWN = PARSER.parse_known_args() - - PARAMS = generate_default_params() - RECEIVED_PARAMS = nni.get_parameters() - PARAMS.update(RECEIVED_PARAMS) - train(ARGS, PARAMS) -``` - - -**3) Send intermediate result** - -Use ```nni.report_intermediate_result``` to send intermediate result to Assessor. Please noted **5** line in the following code. - - -```python -... - -class SendMetrics(keras.callbacks.Callback): - def on_epoch_end(self, epoch, logs={}): - nni.report_intermediate_result(logs) - -def train(args, params): - x_train, y_train, x_test, y_test = load_mnist_data(args) - model = create_mnist_model(params) - - model.fit(x_train, y_train, batch_size=args.batch_size, epochs=args.epochs, verbose=1, - validation_data=(x_test, y_test), callbacks=[SendMetrics()]) - - _, acc = model.evaluate(x_test, y_test, verbose=0) - -... -``` -**4) Send final result** - -Use ```nni.report_final_result``` to send final result to Trial. Please noted **15** line in the following code. - -```python -... - -class SendMetrics(keras.callbacks.Callback): - def on_epoch_end(self, epoch, logs={}): - nni.report_intermediate_result(logs) - -def train(args, params): - x_train, y_train, x_test, y_test = load_mnist_data(args) - model = create_mnist_model(params) - - model.fit(x_train, y_train, batch_size=args.batch_size, epochs=args.epochs, verbose=1, - validation_data=(x_test, y_test), callbacks=[SendMetrics()]) - - _, acc = model.evaluate(x_test, y_test, verbose=0) - nni.report_final_result(acc) -... -``` - -Here is the complete exampe: - - -```python -import argparse -import logging - -import keras -import numpy as np -from keras import backend as K -from keras.datasets import mnist -from keras.layers import Conv2D, Dense, Flatten, MaxPooling2D -from keras.models import Sequential - -import nni - -LOG = logging.getLogger('mnist_keras') -K.set_image_data_format('channels_last') - -H, W = 28, 28 -NUM_CLASSES = 10 - -def create_mnist_model(hyper_params, input_shape=(H, W, 1), num_classes=NUM_CLASSES): - ''' - Create simple convolutional model - ''' - layers = [ - Conv2D(32, kernel_size=(3, 3), activation='relu', input_shape=input_shape), - Conv2D(64, (3, 3), activation='relu'), - MaxPooling2D(pool_size=(2, 2)), - Flatten(), - Dense(100, activation='relu'), - Dense(num_classes, activation='softmax') - ] - - model = Sequential(layers) - - if hyper_params['optimizer'] == 'Adam': - optimizer = keras.optimizers.Adam(lr=hyper_params['learning_rate']) - else: - optimizer = keras.optimizers.SGD(lr=hyper_params['learning_rate'], momentum=0.9) - model.compile(loss=keras.losses.categorical_crossentropy, optimizer=optimizer, metrics=['accuracy']) - - return model - -def load_mnist_data(args): - ''' - Load MNIST dataset - ''' - (x_train, y_train), (x_test, y_test) = mnist.load_data() - - x_train = (np.expand_dims(x_train, -1).astype(np.float) / 255.)[:args.num_train] - x_test = (np.expand_dims(x_test, -1).astype(np.float) / 255.)[:args.num_test] - y_train = keras.utils.to_categorical(y_train, NUM_CLASSES)[:args.num_train] - y_test = keras.utils.to_categorical(y_test, NUM_CLASSES)[:args.num_test] - - LOG.debug('x_train shape: %s', (x_train.shape,)) - LOG.debug('x_test shape: %s', (x_test.shape,)) - - return x_train, y_train, x_test, y_test - -class SendMetrics(keras.callbacks.Callback): - ''' - Keras callback to send metrics to NNI framework - ''' - def on_epoch_end(self, epoch, logs={}): - ''' - Run on end of each epoch - ''' - LOG.debug(logs) - nni.report_intermediate_result(logs) - -def train(args, params): - ''' - Train model - ''' - x_train, y_train, x_test, y_test = load_mnist_data(args) - model = create_mnist_model(params) - - model.fit(x_train, y_train, batch_size=args.batch_size, epochs=args.epochs, verbose=1, - validation_data=(x_test, y_test), callbacks=[SendMetrics()]) - - _, acc = model.evaluate(x_test, y_test, verbose=0) - LOG.debug('Final result is: %d', acc) - nni.report_final_result(acc) - -def generate_default_params(): - ''' - Generate default hyper parameters - ''' - return { - 'optimizer': 'Adam', - 'learning_rate': 0.001 - } - -if __name__ == '__main__': - PARSER = argparse.ArgumentParser() - PARSER.add_argument("--batch_size", type=int, default=200, help="batch size", required=False) - PARSER.add_argument("--epochs", type=int, default=10, help="Train epochs", required=False) - PARSER.add_argument("--num_train", type=int, default=1000, help="Number of train samples to be used, maximum 60000", required=False) - PARSER.add_argument("--num_test", type=int, default=1000, help="Number of test samples to be used, maximum 10000", required=False) - - ARGS, UNKNOWN = PARSER.parse_known_args() - - try: - # get parameters from tuner - RECEIVED_PARAMS = nni.get_parameters() - LOG.debug(RECEIVED_PARAMS) - PARAMS = generate_default_params() - PARAMS.update(RECEIVED_PARAMS) - # train - train(ARGS, PARAMS) - except Exception as e: - LOG.exception(e) - raise - +# How to write a Trial running on NNI? + +*Trial receive the hyper-parameter/architecture configure from Tuner, and send intermediate result to Assessor and final result to Tuner.* + +So when user want to write a Trial running on NNI, she/he should: + +**1)Have an original Trial could run**, + +Trial's code could be any machine learning code that could run in local. Here we use ```mnist-keras.py``` as example: + +```python +import argparse +import logging +import keras +import numpy as np +from keras import backend as K +from keras.datasets import mnist +from keras.layers import Conv2D, Dense, Flatten, MaxPooling2D +from keras.models import Sequential + +K.set_image_data_format('channels_last') + +H, W = 28, 28 +NUM_CLASSES = 10 + +def create_mnist_model(hyper_params, input_shape=(H, W, 1), num_classes=NUM_CLASSES): + layers = [ + Conv2D(32, kernel_size=(3, 3), activation='relu', input_shape=input_shape), + Conv2D(64, (3, 3), activation='relu'), + MaxPooling2D(pool_size=(2, 2)), + Flatten(), + Dense(100, activation='relu'), + Dense(num_classes, activation='softmax') + ] + + model = Sequential(layers) + + if hyper_params['optimizer'] == 'Adam': + optimizer = keras.optimizers.Adam(lr=hyper_params['learning_rate']) + else: + optimizer = keras.optimizers.SGD(lr=hyper_params['learning_rate'], momentum=0.9) + model.compile(loss=keras.losses.categorical_crossentropy, optimizer=optimizer, metrics=['accuracy']) + + return model + +def load_mnist_data(args): + (x_train, y_train), (x_test, y_test) = mnist.load_data() + + x_train = (np.expand_dims(x_train, -1).astype(np.float) / 255.)[:args.num_train] + x_test = (np.expand_dims(x_test, -1).astype(np.float) / 255.)[:args.num_test] + y_train = keras.utils.to_categorical(y_train, NUM_CLASSES)[:args.num_train] + y_test = keras.utils.to_categorical(y_test, NUM_CLASSES)[:args.num_test] + + return x_train, y_train, x_test, y_test + +class SendMetrics(keras.callbacks.Callback): + def on_epoch_end(self, epoch, logs={}): + pass + +def train(args, params): + x_train, y_train, x_test, y_test = load_mnist_data(args) + model = create_mnist_model(params) + + model.fit(x_train, y_train, batch_size=args.batch_size, epochs=args.epochs, verbose=1, + validation_data=(x_test, y_test), callbacks=[SendMetrics()]) + + _, acc = model.evaluate(x_test, y_test, verbose=0) + +def generate_default_params(): + return { + 'optimizer': 'Adam', + 'learning_rate': 0.001 + } + +if __name__ == '__main__': + PARSER = argparse.ArgumentParser() + PARSER.add_argument("--batch_size", type=int, default=200, help="batch size", required=False) + PARSER.add_argument("--epochs", type=int, default=10, help="Train epochs", required=False) + PARSER.add_argument("--num_train", type=int, default=1000, help="Number of train samples to be used, maximum 60000", required=False) + PARSER.add_argument("--num_test", type=int, default=1000, help="Number of test samples to be used, maximum 10000", required=False) + + ARGS, UNKNOWN = PARSER.parse_known_args() + PARAMS = generate_default_params() + train(ARGS, PARAMS) +``` + +**2)Get configure from Tuner** + +User import ```nni``` and use ```nni.get_next_parameter()``` to recive configure. Please noted **10**, **24** and **25** line in the following code. + + +```python +import argparse +import logging +import keras +import numpy as np +from keras import backend as K +from keras.datasets import mnist +from keras.layers import Conv2D, Dense, Flatten, MaxPooling2D +from keras.models import Sequential + +import nni + +... + +if __name__ == '__main__': + PARSER = argparse.ArgumentParser() + PARSER.add_argument("--batch_size", type=int, default=200, help="batch size", required=False) + PARSER.add_argument("--epochs", type=int, default=10, help="Train epochs", required=False) + PARSER.add_argument("--num_train", type=int, default=1000, help="Number of train samples to be used, maximum 60000", required=False) + PARSER.add_argument("--num_test", type=int, default=1000, help="Number of test samples to be used, maximum 10000", required=False) + + ARGS, UNKNOWN = PARSER.parse_known_args() + + PARAMS = generate_default_params() + RECEIVED_PARAMS = nni.get_next_parameter() + PARAMS.update(RECEIVED_PARAMS) + train(ARGS, PARAMS) +``` + + +**3) Send intermediate result** + +Use ```nni.report_intermediate_result``` to send intermediate result to Assessor. Please noted **5** line in the following code. + + +```python +... + +class SendMetrics(keras.callbacks.Callback): + def on_epoch_end(self, epoch, logs={}): + nni.report_intermediate_result(logs) + +def train(args, params): + x_train, y_train, x_test, y_test = load_mnist_data(args) + model = create_mnist_model(params) + + model.fit(x_train, y_train, batch_size=args.batch_size, epochs=args.epochs, verbose=1, + validation_data=(x_test, y_test), callbacks=[SendMetrics()]) + + _, acc = model.evaluate(x_test, y_test, verbose=0) + +... +``` +**4) Send final result** + +Use ```nni.report_final_result``` to send final result to Trial. Please noted **15** line in the following code. + +```python +... + +class SendMetrics(keras.callbacks.Callback): + def on_epoch_end(self, epoch, logs={}): + nni.report_intermediate_result(logs) + +def train(args, params): + x_train, y_train, x_test, y_test = load_mnist_data(args) + model = create_mnist_model(params) + + model.fit(x_train, y_train, batch_size=args.batch_size, epochs=args.epochs, verbose=1, + validation_data=(x_test, y_test), callbacks=[SendMetrics()]) + + _, acc = model.evaluate(x_test, y_test, verbose=0) + nni.report_final_result(acc) +... +``` + +Here is the complete exampe: + + +```python +import argparse +import logging + +import keras +import numpy as np +from keras import backend as K +from keras.datasets import mnist +from keras.layers import Conv2D, Dense, Flatten, MaxPooling2D +from keras.models import Sequential + +import nni + +LOG = logging.getLogger('mnist_keras') +K.set_image_data_format('channels_last') + +H, W = 28, 28 +NUM_CLASSES = 10 + +def create_mnist_model(hyper_params, input_shape=(H, W, 1), num_classes=NUM_CLASSES): + ''' + Create simple convolutional model + ''' + layers = [ + Conv2D(32, kernel_size=(3, 3), activation='relu', input_shape=input_shape), + Conv2D(64, (3, 3), activation='relu'), + MaxPooling2D(pool_size=(2, 2)), + Flatten(), + Dense(100, activation='relu'), + Dense(num_classes, activation='softmax') + ] + + model = Sequential(layers) + + if hyper_params['optimizer'] == 'Adam': + optimizer = keras.optimizers.Adam(lr=hyper_params['learning_rate']) + else: + optimizer = keras.optimizers.SGD(lr=hyper_params['learning_rate'], momentum=0.9) + model.compile(loss=keras.losses.categorical_crossentropy, optimizer=optimizer, metrics=['accuracy']) + + return model + +def load_mnist_data(args): + ''' + Load MNIST dataset + ''' + (x_train, y_train), (x_test, y_test) = mnist.load_data() + + x_train = (np.expand_dims(x_train, -1).astype(np.float) / 255.)[:args.num_train] + x_test = (np.expand_dims(x_test, -1).astype(np.float) / 255.)[:args.num_test] + y_train = keras.utils.to_categorical(y_train, NUM_CLASSES)[:args.num_train] + y_test = keras.utils.to_categorical(y_test, NUM_CLASSES)[:args.num_test] + + LOG.debug('x_train shape: %s', (x_train.shape,)) + LOG.debug('x_test shape: %s', (x_test.shape,)) + + return x_train, y_train, x_test, y_test + +class SendMetrics(keras.callbacks.Callback): + ''' + Keras callback to send metrics to NNI framework + ''' + def on_epoch_end(self, epoch, logs={}): + ''' + Run on end of each epoch + ''' + LOG.debug(logs) + nni.report_intermediate_result(logs) + +def train(args, params): + ''' + Train model + ''' + x_train, y_train, x_test, y_test = load_mnist_data(args) + model = create_mnist_model(params) + + model.fit(x_train, y_train, batch_size=args.batch_size, epochs=args.epochs, verbose=1, + validation_data=(x_test, y_test), callbacks=[SendMetrics()]) + + _, acc = model.evaluate(x_test, y_test, verbose=0) + LOG.debug('Final result is: %d', acc) + nni.report_final_result(acc) + +def generate_default_params(): + ''' + Generate default hyper parameters + ''' + return { + 'optimizer': 'Adam', + 'learning_rate': 0.001 + } + +if __name__ == '__main__': + PARSER = argparse.ArgumentParser() + PARSER.add_argument("--batch_size", type=int, default=200, help="batch size", required=False) + PARSER.add_argument("--epochs", type=int, default=10, help="Train epochs", required=False) + PARSER.add_argument("--num_train", type=int, default=1000, help="Number of train samples to be used, maximum 60000", required=False) + PARSER.add_argument("--num_test", type=int, default=1000, help="Number of test samples to be used, maximum 10000", required=False) + + ARGS, UNKNOWN = PARSER.parse_known_args() + + try: + # get parameters from tuner + RECEIVED_PARAMS = nni.get_next_parameter() + LOG.debug(RECEIVED_PARAMS) + PARAMS = generate_default_params() + PARAMS.update(RECEIVED_PARAMS) + # train + train(ARGS, PARAMS) + except Exception as e: + LOG.exception(e) + raise + ``` \ No newline at end of file diff --git a/examples/trials/auto-gbdt/main.py b/examples/trials/auto-gbdt/main.py index 85489a312b..ce8abe4e27 100644 --- a/examples/trials/auto-gbdt/main.py +++ b/examples/trials/auto-gbdt/main.py @@ -97,7 +97,7 @@ def run(lgb_train, lgb_eval, params, X_test, y_test): try: # get parameters from tuner - RECEIVED_PARAMS = nni.get_parameters() + RECEIVED_PARAMS = nni.get_next_parameter() LOG.debug(RECEIVED_PARAMS) PARAMS = get_default_parameters() PARAMS.update(RECEIVED_PARAMS) diff --git a/examples/trials/ga_squad/trial.py b/examples/trials/ga_squad/trial.py index b96805c9c7..4dbfdc6b30 100644 --- a/examples/trials/ga_squad/trial.py +++ b/examples/trials/ga_squad/trial.py @@ -436,7 +436,7 @@ def load_data(): qp_pairs, dev_qp_pairs = load_data() logger.debug('Init finish.') - original_params = nni.get_parameters() + original_params = nni.get_next_parameter() ''' with open('data.json') as f: original_params = json.load(f) diff --git a/examples/trials/mnist-annotation/mnist.py b/examples/trials/mnist-annotation/mnist.py index f99a7cd323..69ef283336 100644 --- a/examples/trials/mnist-annotation/mnist.py +++ b/examples/trials/mnist-annotation/mnist.py @@ -229,6 +229,7 @@ def generate_defualt_params(): if __name__ == '__main__': + '''@nni.get_next_parameter()''' try: main(generate_defualt_params()) except Exception as exception: diff --git a/examples/trials/mnist-batch-tune-keras/mnist-keras.py b/examples/trials/mnist-batch-tune-keras/mnist-keras.py index 87c2114991..133a52b25a 100644 --- a/examples/trials/mnist-batch-tune-keras/mnist-keras.py +++ b/examples/trials/mnist-batch-tune-keras/mnist-keras.py @@ -122,7 +122,7 @@ def generate_default_params(): try: # get parameters from tuner # RECEIVED_PARAMS = {"optimizer": "Adam", "learning_rate": 0.00001} - RECEIVED_PARAMS = nni.get_parameters() + RECEIVED_PARAMS = nni.get_next_parameter() LOG.debug(RECEIVED_PARAMS) PARAMS = generate_default_params() PARAMS.update(RECEIVED_PARAMS) diff --git a/examples/trials/mnist-cascading-search-space/mnist.py b/examples/trials/mnist-cascading-search-space/mnist.py index bd6dd35a5c..8b4aacd9b9 100644 --- a/examples/trials/mnist-cascading-search-space/mnist.py +++ b/examples/trials/mnist-cascading-search-space/mnist.py @@ -149,7 +149,7 @@ def parse_init_json(data): if __name__ == '__main__': try: # get parameters form tuner - data = nni.get_parameters() + data = nni.get_next_parameter() logger.debug(data) RCV_PARAMS = parse_init_json(data) diff --git a/examples/trials/mnist-keras/mnist-keras.py b/examples/trials/mnist-keras/mnist-keras.py index a21d002841..27e26e152b 100644 --- a/examples/trials/mnist-keras/mnist-keras.py +++ b/examples/trials/mnist-keras/mnist-keras.py @@ -120,7 +120,7 @@ def generate_default_params(): try: # get parameters from tuner - RECEIVED_PARAMS = nni.get_parameters() + RECEIVED_PARAMS = nni.get_next_parameter() LOG.debug(RECEIVED_PARAMS) PARAMS = generate_default_params() PARAMS.update(RECEIVED_PARAMS) diff --git a/examples/trials/mnist/mnist.py b/examples/trials/mnist/mnist.py index 36f4bfe910..d5c6347b5a 100644 --- a/examples/trials/mnist/mnist.py +++ b/examples/trials/mnist/mnist.py @@ -219,7 +219,7 @@ def generate_default_params(): if __name__ == '__main__': try: # get parameters form tuner - RCV_PARAMS = nni.get_parameters() + RCV_PARAMS = nni.get_next_parameter() logger.debug(RCV_PARAMS) # run params = generate_default_params() diff --git a/examples/trials/pytorch_cifar10/main.py b/examples/trials/pytorch_cifar10/main.py index 1b1ec7b8e1..42e836fb8e 100644 --- a/examples/trials/pytorch_cifar10/main.py +++ b/examples/trials/pytorch_cifar10/main.py @@ -175,7 +175,7 @@ def test(epoch): if __name__ == '__main__': try: - RCV_CONFIG = nni.get_parameters() + RCV_CONFIG = nni.get_next_parameter() #RCV_CONFIG = {'lr': 0.1, 'optimizer': 'Adam', 'model':'senet18'} _logger.debug(RCV_CONFIG) diff --git a/examples/trials/sklearn/classification/main.py b/examples/trials/sklearn/classification/main.py index 537849d5bf..92bdd8219d 100644 --- a/examples/trials/sklearn/classification/main.py +++ b/examples/trials/sklearn/classification/main.py @@ -71,7 +71,7 @@ def run(X_train, X_test, y_train, y_test, PARAMS): try: # get parameters from tuner - RECEIVED_PARAMS = nni.get_parameters() + RECEIVED_PARAMS = nni.get_next_parameter() LOG.debug(RECEIVED_PARAMS) PARAMS = get_default_parameters() PARAMS.update(RECEIVED_PARAMS) diff --git a/examples/trials/sklearn/regression/main.py b/examples/trials/sklearn/regression/main.py index 0a8876887f..1e290f21df 100644 --- a/examples/trials/sklearn/regression/main.py +++ b/examples/trials/sklearn/regression/main.py @@ -90,7 +90,7 @@ def run(X_train, X_test, y_train, y_test, PARAMS): try: # get parameters from tuner - RECEIVED_PARAMS = nni.get_parameters() + RECEIVED_PARAMS = nni.get_next_parameter() LOG.debug(RECEIVED_PARAMS) PARAMS = get_default_parameters() PARAMS.update(RECEIVED_PARAMS) diff --git a/src/sdk/pynni/nni/platform/local.py b/src/sdk/pynni/nni/platform/local.py index e6da1d0126..032c18e71e 100644 --- a/src/sdk/pynni/nni/platform/local.py +++ b/src/sdk/pynni/nni/platform/local.py @@ -49,13 +49,18 @@ def request_next_parameter(): }) send_metric(metric) -def get_parameters(): +def get_next_parameter(): global _param_index params_file_name = '' if _multiphase and (_multiphase == 'true' or _multiphase == 'True'): params_file_name = ('parameter_{}.cfg'.format(_param_index), 'parameter.cfg')[_param_index == 0] else: - params_file_name = 'parameter.cfg' + if _param_index > 0: + return None + elif _param_index == 0: + params_file_name = 'parameter.cfg' + else: + raise AssertionError('_param_index value ({}) should >=0'.format(_param_index)) params_filepath = os.path.join(_sysdir, params_file_name) if not os.path.isfile(params_filepath): diff --git a/src/sdk/pynni/nni/platform/standalone.py b/src/sdk/pynni/nni/platform/standalone.py index 9fa1e947e5..f1236f61ea 100644 --- a/src/sdk/pynni/nni/platform/standalone.py +++ b/src/sdk/pynni/nni/platform/standalone.py @@ -22,7 +22,7 @@ import json_tricks -def get_parameters(): +def get_next_parameter(): pass def get_sequence_id(): diff --git a/src/sdk/pynni/nni/platform/test.py b/src/sdk/pynni/nni/platform/test.py index 8f896e09cf..1a87de5e2c 100644 --- a/src/sdk/pynni/nni/platform/test.py +++ b/src/sdk/pynni/nni/platform/test.py @@ -29,7 +29,7 @@ _last_metric = None -def get_parameters(): +def get_next_parameter(): return _params def send_metric(string): diff --git a/src/sdk/pynni/nni/smartparam.py b/src/sdk/pynni/nni/smartparam.py index ca035be575..87ca91b8f8 100644 --- a/src/sdk/pynni/nni/smartparam.py +++ b/src/sdk/pynni/nni/smartparam.py @@ -126,4 +126,4 @@ def _get_param(func, name): if name is None: name = '__line{:d}'.format(lineno) key = '{}/{}/{}'.format(module, name, func) - return trial.get_parameter(key) + return trial.get_current_parameter(key) diff --git a/src/sdk/pynni/nni/trial.py b/src/sdk/pynni/nni/trial.py index cbfd85e85a..35d0397795 100644 --- a/src/sdk/pynni/nni/trial.py +++ b/src/sdk/pynni/nni/trial.py @@ -26,7 +26,8 @@ __all__ = [ - 'get_parameters', + 'get_next_parameter', + 'get_current_parameter', 'report_intermediate_result', 'report_final_result', 'get_sequence_id' @@ -37,15 +38,18 @@ _sequence_id = platform.get_sequence_id() -def get_parameters(): +def get_next_parameter(): """Returns a set of (hyper-)paremeters generated by Tuner.""" global _params - _params = platform.get_parameters() + _params = platform.get_next_parameter() + if _params is None: + return None return _params['parameters'] - -def get_parameter(tag): - return get_parameters()[tag] +def get_current_parameter(tag): + if _params is None: + return None + return _params['parameters'][tag] def get_sequence_id(): return _sequence_id @@ -57,7 +61,7 @@ def report_intermediate_result(metric): metric: serializable object. """ global _intermediate_seq - assert _params is not None, 'nni.get_parameters() needs to be called before report_intermediate_result' + assert _params is not None, 'nni.get_next_parameter() needs to be called before report_intermediate_result' metric = json_tricks.dumps({ 'parameter_id': _params['parameter_id'], 'trial_job_id': env_args.trial_job_id, @@ -73,7 +77,7 @@ def report_final_result(metric): """Reports final result to tuner. metric: serializable object. """ - assert _params is not None, 'nni.get_parameters() needs to be called before report_final_result' + assert _params is not None, 'nni.get_next_parameter() needs to be called before report_final_result' metric = json_tricks.dumps({ 'parameter_id': _params['parameter_id'], 'trial_job_id': env_args.trial_job_id, diff --git a/src/sdk/pynni/tests/test_trial.py b/src/sdk/pynni/tests/test_trial.py index de3bb2b77a..f7f854123b 100644 --- a/src/sdk/pynni/tests/test_trial.py +++ b/src/sdk/pynni/tests/test_trial.py @@ -32,8 +32,8 @@ def setUp(self): self._trial_params = { 'msg': 'hi', 'x': 123, 'dict': { 'key': 'value', 'y': None } } nni.trial._params = { 'parameter_id': 'test_param', 'parameters': self._trial_params } - def test_get_parameters(self): - self.assertEqual(nni.get_parameters(), self._trial_params) + def test_get_next_parameter(self): + self.assertEqual(nni.get_next_parameter(), self._trial_params) def test_report_intermediate_result(self): nni.report_intermediate_result(123) diff --git a/test/naive_test/naive_trial.py b/test/naive_test/naive_trial.py index 1512e9c72c..ce8b14fafe 100644 --- a/test/naive_test/naive_trial.py +++ b/test/naive_test/naive_trial.py @@ -2,7 +2,7 @@ import nni -params = nni.get_parameters() +params = nni.get_next_parameter() print('params:', params) x = params['x'] diff --git a/tools/nni_annotation/code_generator.py b/tools/nni_annotation/code_generator.py index b1ca3fc87b..215bbf4cde 100644 --- a/tools/nni_annotation/code_generator.py +++ b/tools/nni_annotation/code_generator.py @@ -196,8 +196,9 @@ def _visit_string(self, node): else: return node # not an annotation, ignore it - if string.startswith('@nni.report_intermediate_result(') \ - or string.startswith('@nni.report_final_result('): + if string.startswith('@nni.report_intermediate_result(') \ + or string.startswith('@nni.report_final_result(') \ + or string.startswith('@nni.get_next_parameter('): return parse_annotation(string[1:]) # expand annotation string to code if string.startswith('@nni.variable(') \ diff --git a/tools/nni_annotation/examples/mnist_with_annotation.py b/tools/nni_annotation/examples/mnist_with_annotation.py index f1dea8e051..55d09c7c27 100644 --- a/tools/nni_annotation/examples/mnist_with_annotation.py +++ b/tools/nni_annotation/examples/mnist_with_annotation.py @@ -247,6 +247,7 @@ def generate_defualt_params(): if __name__ == '__main__': + """@nni.get_next_parameter()""" try: main(generate_defualt_params()) except Exception as exception: diff --git a/tools/nni_annotation/testcase/annotated/mnist.py b/tools/nni_annotation/testcase/annotated/mnist.py index edcf118023..c8303f1a2c 100644 --- a/tools/nni_annotation/testcase/annotated/mnist.py +++ b/tools/nni_annotation/testcase/annotated/mnist.py @@ -161,6 +161,7 @@ def generate_default_params(): if __name__ == '__main__': + nni.get_next_parameter() try: params = generate_default_params() logger.debug('params') diff --git a/tools/nni_annotation/testcase/usercode/mnist.py b/tools/nni_annotation/testcase/usercode/mnist.py index 55a51db116..d640ae8a19 100644 --- a/tools/nni_annotation/testcase/usercode/mnist.py +++ b/tools/nni_annotation/testcase/usercode/mnist.py @@ -198,6 +198,7 @@ def generate_default_params(): #original_params = parse_init_json(FLAGS.init_file_path, {}) #pipe_interface.set_params_to_env() + '''@nni.get_next_parameter()''' try: params = generate_default_params() logger.debug('params')