From e5034036db0debb22e3c80e387fc54dd3d2fcb1d Mon Sep 17 00:00:00 2001 From: Lijiaoa <61399850+Lijiaoa@users.noreply.github.com> Date: Fri, 11 Sep 2020 11:55:44 +0800 Subject: [PATCH 1/5] turn off function no-use-before-define rule (#2880) Co-authored-by: Lijiao --- src/webui/.eslintrc | 1 + 1 file changed, 1 insertion(+) diff --git a/src/webui/.eslintrc b/src/webui/.eslintrc index 5208c1ad4b..548a2811f5 100644 --- a/src/webui/.eslintrc +++ b/src/webui/.eslintrc @@ -22,6 +22,7 @@ "@typescript-eslint/no-namespace": 0, "@typescript-eslint/consistent-type-assertions": 0, "@typescript-eslint/no-inferrable-types": 0, + "@typescript-eslint/no-use-before-define": [2, "nofunc"], "no-inner-declarations": 0, "@typescript-eslint/no-var-requires": 0, "react/display-name": 0 From 7d784d05606d0a5fb37769a3948b85d57daa9f14 Mon Sep 17 00:00:00 2001 From: Yuge Zhang Date: Fri, 11 Sep 2020 14:37:59 +0800 Subject: [PATCH 2/5] Fix pipeline pygments --- azure-pipelines.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 78917d879f..3ca9d08a21 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -27,6 +27,7 @@ jobs: - script: | set -e sudo apt-get install -y pandoc + python3 -m pip install pygments --user --upgrade python3 -m pip install torch==1.5.0+cpu torchvision==0.6.0+cpu -f https://download.pytorch.org/whl/torch_stable.html --user python3 -m pip install tensorflow==2.2.0 --user python3 -m pip install keras==2.4.2 --user From ebca3cec44e32527a5df6f351ea9c540537610b5 Mon Sep 17 00:00:00 2001 From: J-shang <33053116+J-shang@users.noreply.github.com> Date: Mon, 14 Sep 2020 09:29:49 +0800 Subject: [PATCH 3/5] support annotation in python 3.8 (#2881) Co-authored-by: Ning Shang --- tools/nni_annotation/code_generator.py | 31 ++++++++++--------- .../nni_annotation/search_space_generator.py | 10 +++--- .../nni_annotation/specific_code_generator.py | 22 +++++++------ tools/nni_annotation/utils.py | 15 +++++++++ 4 files changed, 49 insertions(+), 29 deletions(-) create mode 100644 tools/nni_annotation/utils.py diff --git a/tools/nni_annotation/code_generator.py b/tools/nni_annotation/code_generator.py index 985f72fcd7..d51cd82542 100644 --- a/tools/nni_annotation/code_generator.py +++ b/tools/nni_annotation/code_generator.py @@ -4,6 +4,7 @@ import ast import astor +from .utils import ast_Num, ast_Str # pylint: disable=unidiomatic-typecheck @@ -37,13 +38,13 @@ def parse_annotation_mutable_layers(code, lineno, nas_mode): for call in value.elts: assert type(call) is ast.Call, 'Element in layer_choice should be function call' call_name = astor.to_source(call).strip() - call_funcs_keys.append(ast.Str(s=call_name)) + call_funcs_keys.append(ast_Str(s=call_name)) call_funcs_values.append(call.func) assert not call.args, 'Number of args without keyword should be zero' kw_args = [] kw_values = [] for kw in call.keywords: - kw_args.append(ast.Str(s=kw.arg)) + kw_args.append(ast_Str(s=kw.arg)) kw_values.append(kw.value) call_kwargs_values.append(ast.Dict(keys=kw_args, values=kw_values)) call_funcs = ast.Dict(keys=call_funcs_keys, values=call_funcs_values) @@ -57,12 +58,12 @@ def parse_annotation_mutable_layers(code, lineno, nas_mode): elif k.id == 'optional_inputs': assert not fields['optional_inputs'], 'Duplicated field: optional_inputs' assert type(value) is ast.List, 'Value of optional_inputs should be a list' - var_names = [ast.Str(s=astor.to_source(var).strip()) for var in value.elts] + var_names = [ast_Str(s=astor.to_source(var).strip()) for var in value.elts] optional_inputs = ast.Dict(keys=var_names, values=value.elts) fields['optional_inputs'] = True elif k.id == 'optional_input_size': assert not fields['optional_input_size'], 'Duplicated field: optional_input_size' - assert type(value) is ast.Num or type(value) is ast.List, \ + assert type(value) is ast_Num or type(value) is ast.List, \ 'Value of optional_input_size should be a number or list' optional_input_size = value fields['optional_input_size'] = True @@ -79,8 +80,8 @@ def parse_annotation_mutable_layers(code, lineno, nas_mode): mutable_layer_id = 'mutable_layer_' + str(mutable_layer_cnt) mutable_layer_cnt += 1 target_call_attr = ast.Attribute(value=ast.Name(id='nni', ctx=ast.Load()), attr='mutable_layer', ctx=ast.Load()) - target_call_args = [ast.Str(s=mutable_id), - ast.Str(s=mutable_layer_id), + target_call_args = [ast_Str(s=mutable_id), + ast_Str(s=mutable_layer_id), call_funcs, call_kwargs] if fields['fixed_inputs']: @@ -93,8 +94,8 @@ def parse_annotation_mutable_layers(code, lineno, nas_mode): target_call_args.append(optional_input_size) else: target_call_args.append(ast.Dict(keys=[], values=[])) - target_call_args.append(ast.Num(n=0)) - target_call_args.append(ast.Str(s=nas_mode)) + target_call_args.append(ast_Num(n=0)) + target_call_args.append(ast_Str(s=nas_mode)) if nas_mode in ['enas_mode', 'oneshot_mode', 'darts_mode']: target_call_args.append(ast.Name(id='tensorflow')) target_call = ast.Call(func=target_call_attr, args=target_call_args, keywords=[]) @@ -151,7 +152,7 @@ def parse_nni_variable(code): assert arg.func.value.id == 'nni', 'nni.variable value is not a NNI function' name_str = astor.to_source(name).strip() - keyword_arg = ast.keyword(arg='name', value=ast.Str(s=name_str)) + keyword_arg = ast.keyword(arg='name', value=ast_Str(s=name_str)) arg.keywords.append(keyword_arg) if arg.func.attr == 'choice': convert_args_to_dict(arg) @@ -169,7 +170,7 @@ def parse_nni_function(code): convert_args_to_dict(call, with_lambda=True) name_str = astor.to_source(name).strip() - call.keywords[0].value = ast.Str(s=name_str) + call.keywords[0].value = ast_Str(s=name_str) return call, funcs @@ -180,12 +181,12 @@ def convert_args_to_dict(call, with_lambda=False): """ keys, values = list(), list() for arg in call.args: - if type(arg) in [ast.Str, ast.Num]: + if type(arg) in [ast_Str, ast_Num]: arg_value = arg else: # if arg is not a string or a number, we use its source code as the key arg_value = astor.to_source(arg).strip('\n"') - arg_value = ast.Str(str(arg_value)) + arg_value = ast_Str(str(arg_value)) arg = make_lambda(arg) if with_lambda else arg keys.append(arg_value) values.append(arg) @@ -209,7 +210,7 @@ def test_variable_equal(node1, node2): return False if isinstance(node1, ast.AST): for k, v in vars(node1).items(): - if k in ('lineno', 'col_offset', 'ctx'): + if k in ('lineno', 'col_offset', 'ctx', 'end_lineno', 'end_col_offset'): continue if not test_variable_equal(v, getattr(node2, k)): return False @@ -282,7 +283,7 @@ def visit(self, node): annotation = self.stack[-1] # this is a standalone string, may be an annotation - if type(node) is ast.Expr and type(node.value) is ast.Str: + if type(node) is ast.Expr and type(node.value) is ast_Str: # must not annotate an annotation string assert annotation is None, 'Annotating an annotation' return self._visit_string(node) @@ -306,7 +307,7 @@ def _visit_string(self, node): if string.startswith('@nni.training_update'): expr = parse_annotation(string[1:]) call_node = expr.value - call_node.args.insert(0, ast.Str(s=self.nas_mode)) + call_node.args.insert(0, ast_Str(s=self.nas_mode)) return expr if string.startswith('@nni.report_intermediate_result') \ diff --git a/tools/nni_annotation/search_space_generator.py b/tools/nni_annotation/search_space_generator.py index df9e2e498b..0bfe2cb71d 100644 --- a/tools/nni_annotation/search_space_generator.py +++ b/tools/nni_annotation/search_space_generator.py @@ -6,6 +6,8 @@ import astor +from .utils import ast_Num, ast_Str + # pylint: disable=unidiomatic-typecheck @@ -44,7 +46,7 @@ def generate_mutable_layer_search_space(self, args): self.search_space[key]['_value'][mutable_layer] = { 'layer_choice': [k.s for k in args[2].keys], 'optional_inputs': [k.s for k in args[5].keys], - 'optional_input_size': args[6].n if isinstance(args[6], ast.Num) else [args[6].elts[0].n, args[6].elts[1].n] + 'optional_input_size': args[6].n if isinstance(args[6], ast_Num) else [args[6].elts[0].n, args[6].elts[1].n] } def visit_Call(self, node): # pylint: disable=invalid-name @@ -73,7 +75,7 @@ def visit_Call(self, node): # pylint: disable=invalid-name # there is a `name` argument assert len(node.keywords) == 1, 'Smart parameter has keyword argument other than "name"' assert node.keywords[0].arg == 'name', 'Smart paramater\'s keyword argument is not "name"' - assert type(node.keywords[0].value) is ast.Str, 'Smart parameter\'s name must be string literal' + assert type(node.keywords[0].value) is ast_Str, 'Smart parameter\'s name must be string literal' name = node.keywords[0].value.s specified_name = True else: @@ -86,7 +88,7 @@ def visit_Call(self, node): # pylint: disable=invalid-name # we will use keys in the dict as the choices, which is generated by code_generator according to the args given by user assert len(node.args) == 1, 'Smart parameter has arguments other than dict' # check if it is a number or a string and get its value accordingly - args = [key.n if type(key) is ast.Num else key.s for key in node.args[0].keys] + args = [key.n if type(key) is ast_Num else key.s for key in node.args[0].keys] else: # arguments of other functions must be literal number assert all(isinstance(ast.literal_eval(astor.to_source(arg)), numbers.Real) for arg in node.args), \ @@ -95,7 +97,7 @@ def visit_Call(self, node): # pylint: disable=invalid-name key = self.module_name + '/' + name + '/' + func # store key in ast.Call - node.keywords.append(ast.keyword(arg='key', value=ast.Str(s=key))) + node.keywords.append(ast.keyword(arg='key', value=ast_Str(s=key))) if func == 'function_choice': func = 'choice' diff --git a/tools/nni_annotation/specific_code_generator.py b/tools/nni_annotation/specific_code_generator.py index 9eb0e82af8..511f7dba05 100644 --- a/tools/nni_annotation/specific_code_generator.py +++ b/tools/nni_annotation/specific_code_generator.py @@ -5,6 +5,8 @@ import astor from nni_cmd.common_utils import print_warning +from .utils import ast_Num, ast_Str + # pylint: disable=unidiomatic-typecheck para_cfg = None @@ -134,7 +136,7 @@ def parse_nni_variable(code): assert arg.func.value.id == 'nni', 'nni.variable value is not a NNI function' name_str = astor.to_source(name).strip() - keyword_arg = ast.keyword(arg='name', value=ast.Str(s=name_str)) + keyword_arg = ast.keyword(arg='name', value=ast_Str(s=name_str)) arg.keywords.append(keyword_arg) if arg.func.attr == 'choice': convert_args_to_dict(arg) @@ -152,7 +154,7 @@ def parse_nni_function(code): convert_args_to_dict(call, with_lambda=True) name_str = astor.to_source(name).strip() - call.keywords[0].value = ast.Str(s=name_str) + call.keywords[0].value = ast_Str(s=name_str) return call, funcs @@ -163,12 +165,12 @@ def convert_args_to_dict(call, with_lambda=False): """ keys, values = list(), list() for arg in call.args: - if type(arg) in [ast.Str, ast.Num]: + if type(arg) in [ast_Str, ast_Num]: arg_value = arg else: # if arg is not a string or a number, we use its source code as the key arg_value = astor.to_source(arg).strip('\n"') - arg_value = ast.Str(str(arg_value)) + arg_value = ast_Str(str(arg_value)) arg = make_lambda(arg) if with_lambda else arg keys.append(arg_value) values.append(arg) @@ -192,7 +194,7 @@ def test_variable_equal(node1, node2): return False if isinstance(node1, ast.AST): for k, v in vars(node1).items(): - if k in ('lineno', 'col_offset', 'ctx'): + if k in ('lineno', 'col_offset', 'ctx', 'end_lineno', 'end_col_offset'): continue if not test_variable_equal(v, getattr(node2, k)): return False @@ -264,7 +266,7 @@ def visit(self, node): annotation = self.stack[-1] # this is a standalone string, may be an annotation - if type(node) is ast.Expr and type(node.value) is ast.Str: + if type(node) is ast.Expr and type(node.value) is ast_Str: # must not annotate an annotation string assert annotation is None, 'Annotating an annotation' return self._visit_string(node) @@ -290,23 +292,23 @@ def _visit_string(self, node): "Please remove this line in the trial code." print_warning(deprecated_message) return ast.Expr(value=ast.Call(func=ast.Name(id='print', ctx=ast.Load()), - args=[ast.Str(s='Get next parameter here...')], keywords=[])) + args=[ast_Str(s='Get next parameter here...')], keywords=[])) if string.startswith('@nni.training_update'): return ast.Expr(value=ast.Call(func=ast.Name(id='print', ctx=ast.Load()), - args=[ast.Str(s='Training update here...')], keywords=[])) + args=[ast_Str(s='Training update here...')], keywords=[])) if string.startswith('@nni.report_intermediate_result'): module = ast.parse(string[1:]) arg = module.body[0].value.args[0] return ast.Expr(value=ast.Call(func=ast.Name(id='print', ctx=ast.Load()), - args=[ast.Str(s='nni.report_intermediate_result: '), arg], keywords=[])) + args=[ast_Str(s='nni.report_intermediate_result: '), arg], keywords=[])) if string.startswith('@nni.report_final_result'): module = ast.parse(string[1:]) arg = module.body[0].value.args[0] return ast.Expr(value=ast.Call(func=ast.Name(id='print', ctx=ast.Load()), - args=[ast.Str(s='nni.report_final_result: '), arg], keywords=[])) + args=[ast_Str(s='nni.report_final_result: '), arg], keywords=[])) if string.startswith('@nni.mutable_layers'): return parse_annotation_mutable_layers(string[1:], node.lineno) diff --git a/tools/nni_annotation/utils.py b/tools/nni_annotation/utils.py new file mode 100644 index 0000000000..04c1a59063 --- /dev/null +++ b/tools/nni_annotation/utils.py @@ -0,0 +1,15 @@ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT license. + +import ast +from sys import version_info + + +if version_info >= (3, 8): + ast_Num = ast_Str = ast_Bytes = ast_NameConstant = ast_Ellipsis = ast.Constant +else: + ast_Num = ast.Num + ast_Str = ast.Str + ast_Bytes = ast.Bytes + ast_NameConstant = ast.NameConstant + ast_Ellipsis = ast.Ellipsis From bc71a0d6409803f227d6e8bc680467719a014746 Mon Sep 17 00:00:00 2001 From: lin bin <756691769@qq.com> Date: Mon, 14 Sep 2020 13:33:44 +0800 Subject: [PATCH 4/5] Update QuickStart.md (#2884) --- docs/en_US/Compressor/QuickStart.md | 29 +++++++++++++++++++++++++++-- 1 file changed, 27 insertions(+), 2 deletions(-) diff --git a/docs/en_US/Compressor/QuickStart.md b/docs/en_US/Compressor/QuickStart.md index b47175e832..c40aa00655 100644 --- a/docs/en_US/Compressor/QuickStart.md +++ b/docs/en_US/Compressor/QuickStart.md @@ -123,7 +123,7 @@ It means following the algorithm's default setting for compressed operations wit #### Quantization specific keys -**If you use quantization algorithms, you need to specify more keys. If you use pruning algorithms, you can safely skip these keys** +Besides the keys explained above, if you use quantization algorithms you need to specify more keys in `config_list`, which are explained below. * __quant_types__ : list of string. @@ -148,6 +148,31 @@ when the value is int type, all quantization types share same bits length. eg. } ``` +The following example shows a more complete `config_list`, it uses `op_names` (or `op_types`) to specify the target layers along with the quantization bits for those layers. +``` +configure_list = [{ + 'quant_types': ['weight'], + 'quant_bits': 8, + 'op_names': ['conv1'] + }, { + 'quant_types': ['weight'], + 'quant_bits': 4, + 'quant_start_step': 0, + 'op_names': ['conv2'] + }, { + 'quant_types': ['weight'], + 'quant_bits': 3, + 'op_names': ['fc1'] + }, + { + 'quant_types': ['weight'], + 'quant_bits': 2, + 'op_names': ['fc2'] + } +] +``` +In this example, 'op_names' is the name of layer and four layers will be quantized to different quant_bits. + ### APIs for Updating Fine Tuning Status Some compression algorithms use epochs to control the progress of compression (e.g. [AGP](https://nni.readthedocs.io/en/latest/Compressor/Pruner.html#agp-pruner)), and some algorithms need to do something after every minibatch. Therefore, we provide another two APIs for users to invoke: `pruner.update_epoch(epoch)` and `pruner.step()`. @@ -168,4 +193,4 @@ pruner.export_model(model_path='model.pth') pruner.export_model(model_path='model.pth', mask_path='mask.pth', onnx_path='model.onnx', input_shape=[1, 1, 28, 28]) ``` -If you want to really speed up the compressed model, please refer to [NNI model speedup](./ModelSpeedup.md) for details. \ No newline at end of file +If you want to really speed up the compressed model, please refer to [NNI model speedup](./ModelSpeedup.md) for details. From 98a49b188ab1055b3913ea94cd16f8a2d67dc791 Mon Sep 17 00:00:00 2001 From: Junwei Sun <30487595+JunweiSUN@users.noreply.github.com> Date: Mon, 14 Sep 2020 18:23:01 +0800 Subject: [PATCH 5/5] Update bash-completion (#2857) --- deployment/pypi/README.md | 4 +- .../en_US/CommunitySharings/AutoCompletion.md | 39 ++++++++++++ .../CommunitySharings/community_sharings.rst | 1 + docs/en_US/Tutorial/Nnictl.md | 19 +++++- docs/en_US/Tutorial/QuickStart.md | 3 + tools/bash-completion | 59 ++++++++++--------- 6 files changed, 94 insertions(+), 31 deletions(-) create mode 100644 docs/en_US/CommunitySharings/AutoCompletion.md diff --git a/deployment/pypi/README.md b/deployment/pypi/README.md index 636222ee58..ecd1567109 100644 --- a/deployment/pypi/README.md +++ b/deployment/pypi/README.md @@ -11,7 +11,7 @@ This is the PyPI build and upload tool for NNI project. Ubuntu 16.04 LTS make wget - Python >= 3.5 + Python >= 3.6 Pip Node.js Yarn @@ -45,7 +45,7 @@ This is the PyPI build and upload tool for NNI project. ``` Windows 10 powershell - Python >= 3.5 + Python >= 3.6 Pip Yarn ``` diff --git a/docs/en_US/CommunitySharings/AutoCompletion.md b/docs/en_US/CommunitySharings/AutoCompletion.md new file mode 100644 index 0000000000..f7da5dabb2 --- /dev/null +++ b/docs/en_US/CommunitySharings/AutoCompletion.md @@ -0,0 +1,39 @@ +# Auto Completion for nnictl Commands + +NNI's command line tool __nnictl__ support auto-completion, i.e., you can complete a nnictl command by pressing the `tab` key. + +For example, if the current command is +``` +nnictl cre +``` +By pressing the `tab` key, it will be completed to +``` +nnictl create +``` + +For now, auto-completion will not be enabled by default if you install NNI through `pip`, and it only works on Linux with bash shell. If you want to enable this feature on your computer, please refer to the following steps: + +### Step 1. Download `bash-completion` +``` +cd ~ +wget https://raw.githubusercontent.com/microsoft/nni/{nni-version}/tools/bash-completion +``` +Here, {nni-version} should by replaced by the version of NNI, e.g., `master`, `v1.9`. You can also check the latest `bash-completion` script [here](https://github.com/microsoft/nni/blob/master/tools/bash-completion). + +### Step 2. Install the script +If you are running a root account and want to install this script for all the users +``` +install -m644 ~/bash-completion /usr/share/bash-completion/completions/nnictl +``` +If you just want to install this script for your self +``` +mkdir -p ~/.bash_completion.d +install -m644 ~/bash-completion ~/.bash_completion.d/nnictl +echo '[[ -f ~/.bash_completion.d/nnictl ]] && source ~/.bash_completion.d/nnictl' >> ~/.bash_completion +``` + +### Step 3. Reopen your terminal +Reopen your terminal and you should be able to use the auto-completion feature. Enjoy! + +### Step 4. Uninstall +If you want to uninstall this feature, just revert the changes in the steps above. diff --git a/docs/en_US/CommunitySharings/community_sharings.rst b/docs/en_US/CommunitySharings/community_sharings.rst index 20ab51e24e..7f4635eee0 100644 --- a/docs/en_US/CommunitySharings/community_sharings.rst +++ b/docs/en_US/CommunitySharings/community_sharings.rst @@ -13,3 +13,4 @@ Different from the tutorials and examples in the rest of the document which show Feature Engineering Performance measurement, comparison and analysis Use NNI on Google Colab + Auto Completion for nnictl Commands diff --git a/docs/en_US/Tutorial/Nnictl.md b/docs/en_US/Tutorial/Nnictl.md index adcc4b91e3..28084494a3 100644 --- a/docs/en_US/Tutorial/Nnictl.md +++ b/docs/en_US/Tutorial/Nnictl.md @@ -295,7 +295,7 @@ Debug mode will disable version check function in Trialkeeper. > update experiment's trial num ```bash - nnictl update trialnum --id [experiment_id] --value [trial_num] + nnictl update trialnum [experiment_id] --value [trial_num] ``` @@ -347,7 +347,7 @@ Debug mode will disable version check function in Trialkeeper. > kill trail job ```bash - nnictl trial [trial_id] --experiment [experiment_id] + nnictl trial kill [experiment_id] --trial_id [trial_id] ``` @@ -704,6 +704,21 @@ Debug mode will disable version check function in Trialkeeper. ### Manage webui * __nnictl webui url__ + * Description + + Show an experiment's webui url + + * Usage + + ```bash + nnictl webui url [options] + ``` + + * Options + + |Name, shorthand|Required|Default|Description| + |------|------|------ |------| + |id| False| |Experiment ID| ### Manage tensorboard diff --git a/docs/en_US/Tutorial/QuickStart.md b/docs/en_US/Tutorial/QuickStart.md index 982d3a5826..1265d968b8 100644 --- a/docs/en_US/Tutorial/QuickStart.md +++ b/docs/en_US/Tutorial/QuickStart.md @@ -27,6 +27,9 @@ python -m pip install --upgrade nni ```eval_rst .. Note:: For the system requirements of NNI, please refer to :doc:`Install NNI on Linux & Mac ` or :doc:`Windows `. ``` +### Enable NNI Command-line Auto-Completion (Optional) + +After the installation, you may want to enable the auto-completion feature for __nnictl__ commands. Please refer to this [tutorial](../CommunitySharings/AutoCompletion.md). ## "Hello World" example on MNIST diff --git a/tools/bash-completion b/tools/bash-completion index 918686b6b0..8b5b8fb433 100644 --- a/tools/bash-completion +++ b/tools/bash-completion @@ -1,7 +1,7 @@ # list of commands/arguments -__nnictl_cmds="create resume view update stop trial experiment platform import export webui config log package tensorboard top" -__nnictl_create_cmds="--config --port --debug --watch" -__nnictl_resume_cmds="--port --debug --watch" +__nnictl_cmds="create resume view update stop trial experiment platform webui config log package tensorboard top ss_gen --version" +__nnictl_create_cmds="--config --port --debug --foreground" +__nnictl_resume_cmds="--port --debug --foreground" __nnictl_view_cmds="--port" __nnictl_update_cmds="searchspace concurrency duration trialnum" __nnictl_update_searchspace_cmds="--filename" @@ -9,30 +9,34 @@ __nnictl_update_concurrency_cmds="--value" __nnictl_update_duration_cmds="--value" __nnictl_update_trialnum_cmds="--value" __nnictl_stop_cmds="--port --all" -__nnictl_trial_cmds="ls kill codegen" +__nnictl_trial_cmds="ls kill" +__nnictl_trial_ls_cmds="--head --tail" __nnictl_trial_kill_cmds="--trial_id" -__nnictl_trial_codegen_cmds="--trial_id" -__nnictl_experiment_cmds="show status list delete" +__nnictl_experiment_cmds="show status list delete export import save load" __nnictl_experiment_list_cmds="--all" __nnictl_experiment_delete_cmds="--all" +__nnictl_experiment_export_cmds="--filename --type --intermediate" +__nnictl_experiment_import_cmds="--filename" +__nnictl_experiment_save_cmds="--path --saveCodeDir" +__nnictl_experiment_load_cmds="--path --codeDir --logDir" __nnictl_platform_cmds="clean" __nnictl_platform_clean_cmds="--config" -__nnictl_import_cmds="--filename" -__nnictl_export_cmds="--type --filename" __nnictl_webui_cmds="url" __nnictl_config_cmds="show" __nnictl_log_cmds="stdout stderr trial" __nnictl_log_stdout_cmds="--tail --head --path" __nnictl_log_stderr_cmds="--tail --head --path" __nnictl_log_trial_cmds="--trial_id" -__nnictl_package_cmds="install show" +__nnictl_package_cmds="install show list uninstall" __nnictl_package_install_cmds="--name" +__nnictl_package_list_cmds="--all" __nnictl_tensorboard_cmds="start stop" __nnictl_tensorboard_start_cmds="--trial_id --port" __nnictl_top_cmds="--time" +__nnictl_ss_gen_cmds="--trial_command --trial_dir --file" # list of commands that accept an experiment ID as second argument -__nnictl_2nd_expid_cmds=" resume view stop import export " +__nnictl_2nd_expid_cmds=" resume view stop top " # list of commands that accept an experiment ID as third argument __nnictl_3rd_expid_cmds=" update trial experiment webui config log tensorboard " @@ -41,10 +45,12 @@ __nnictl_3rd_expid_cmds=" update trial experiment webui config log tensorboard " __nnictl_remain_args() { local ret=${!1} # ret = $__nnictl_xxx_cmds - # for arg in COMP_WORDS[:-1]: - for arg in "${COMP_WORDS[@]::${#COMP_WORDS[@]}-1}"; do - local ret=${ret/$arg/} # remove it from $ret - done + # prevent that "--trial_id" changes to "--_id" in this situation: "nnictl trial kill --trial_id" + if [[ ${ret} != "--trial_id" ]]; then + for arg in "${COMP_WORDS[@]::${#COMP_WORDS[@]}-1}"; do + local ret=${ret/$arg/} # remove it from $ret + done + fi echo $ret } @@ -63,9 +69,8 @@ _nnictl() { local cur=${COMP_WORDS[-1]} local last=${COMP_WORDS[-2]} - if [[ ${#COMP_WORDS[@]} -eq 2 ]]; then - # completing frst argument from __nnictl_cmds + # completing first argument from __nnictl_cmds COMPREPLY=($(compgen -W "$__nnictl_cmds" -- "${COMP_WORDS[1]}")) elif [[ ${#COMP_WORDS[@]} -eq 3 ]]; then @@ -79,11 +84,11 @@ _nnictl() COMPREPLY+=($(compgen -W "$experiments" -- $cur)) fi - elif [[ $last != -* || $last == --debug ]]; then + elif [[ $last != -* || $last == --debug || $last == --foreground || $last == --intermediate || $last == --all ]]; then # last argument does not starts with "-", so this one is likely to be "--xxx" local args=__nnictl_${COMP_WORDS[1]}_${COMP_WORDS[2]}_cmds - if [[ $args =~ "-" || -z ${!args} ]]; then - # the second argument starts with "-", use __nnictl_${FirstArg}_cmds + if [[ $args =~ "-" || $__nnictl_2nd_expid_cmds =~ " ${COMP_WORDS[1]} " ]]; then + # the second argument starts with "-" or is an experiment id, use __nnictl_${FirstArg}_cmds local args=__nnictl_${COMP_WORDS[1]}_cmds fi # remove already set arguments from candidates @@ -93,12 +98,12 @@ _nnictl() # if this is 3rd arguments, try adding experiment IDs to candidates if [[ ${#COMP_WORDS[@]} -eq 4 ]]; then if [[ $__nnictl_3rd_expid_cmds =~ " ${COMP_WORDS[1]} " && ${COMP_WORDS[2]} != "list" ]]; then - local experiments=$(ls ~/nni/experiments 2>/dev/null) + local experiments=$(ls ~/nni-experiments 2>/dev/null) COMPREPLY+=($(compgen -W "$experiments" -- $cur)) fi fi - elif [[ ${COMP_WORDS[1]} == "export" ]]; then + elif [[ ${COMP_WORDS[2]} == "export" ]]; then # "export" command is somewhat unique if [[ " --type -t " =~ " $last " ]]; then COMPREPLY=($(compgen -W "json csv" -- $cur)) @@ -117,14 +122,14 @@ _nnictl() __nnictl_complete_extension "$ext" fi - elif [[ " --trial_id -t " =~ " $last " ]]; then + elif [[ " --trial_id -T " =~ " $last " ]]; then # complete trial ID - if [[ -e ${HOME}/nni/experiments/${COMP_WORDS[2]} ]]; then - local trials=$(ls -d ~/nni/experiments/${COMP_WORDS[2]}/trials/* 2>/dev/null | grep -o '[^/]*$') - elif [[ -e "${HOME}/nni/experiments/${COMP_WORDS[3]}" ]]; then - local trials=$(ls -d ~/nni/experiments/${COMP_WORDS[3]}/trials/* 2>/dev/null | grep -o '[^/]*$') + if [[ -e ${HOME}/nni-experiments/${COMP_WORDS[2]} ]]; then + local trials=$(ls -d ~/nni-experiments/${COMP_WORDS[2]}/trials/* 2>/dev/null | grep -o '[^/]*$') + elif [[ -e "${HOME}/nni-experiments/${COMP_WORDS[3]}" ]]; then + local trials=$(ls -d ~/nni-experiments/${COMP_WORDS[3]}/trials/* 2>/dev/null | grep -o '[^/]*$') else - local trials=$(ls -d ~/nni/experiments/*/trials/* 2>/dev/null | grep -o '[^/]*$') + local trials=$(ls -d ~/nni-experiments/*/trials/* 2>/dev/null | grep -o '[^/]*$') fi COMPREPLY=($(compgen -W "$trials" -- $cur))