diff --git a/awscli/__init__.py b/awscli/__init__.py index baa0402c0589..fda8431a68c9 100644 --- a/awscli/__init__.py +++ b/awscli/__init__.py @@ -15,8 +15,9 @@ ---- A Universal Command Line Environment for Amazon Web Services. """ -import os + import importlib.abc +import os import sys __version__ = '2.22.12' @@ -36,10 +37,18 @@ os.environ['AWS_DATA_PATH'] = os.pathsep.join(_awscli_data_path) -SCALAR_TYPES = set([ - 'string', 'float', 'integer', 'long', 'boolean', 'double', - 'blob', 'timestamp' -]) +SCALAR_TYPES = set( + [ + 'string', + 'float', + 'integer', + 'long', + 'boolean', + 'double', + 'blob', + 'timestamp', + ] +) COMPLEX_TYPES = set(['structure', 'map', 'list']) @@ -57,13 +66,14 @@ class TopLevelImportAliasFinder(importlib.abc.MetaPathFinder): Note: That this import alias only comes into affect if anything is imported from the awscli package. """ + _PACKAGES = [ 'botocore', 's3transfer', ] _TARGET_FINDERS = [ 'pyimod02_importers.PyiFrozenImporter', # Pyinstaller injected finder - '_frozen_importlib_external.PathFinder' # Built-in path finder + '_frozen_importlib_external.PathFinder', # Built-in path finder ] def __init__(self, underlying_finder): diff --git a/awscli/__main__.py b/awscli/__main__.py index 7d49ba7f871c..63263a3cb831 100644 --- a/awscli/__main__.py +++ b/awscli/__main__.py @@ -16,6 +16,5 @@ from awscli.clidriver import main - if __name__ == "__main__": sys.exit(main()) diff --git a/awscli/alias.py b/awscli/alias.py index 7c889c0215df..affef7c87c9c 100644 --- a/awscli/alias.py +++ b/awscli/alias.py @@ -15,12 +15,10 @@ import shlex import subprocess -from botocore.configloader import raw_config_parse - -from awscli.compat import compat_shell_quote from awscli.commands import CLICommand +from awscli.compat import compat_shell_quote from awscli.utils import emit_top_level_args_parsed_event - +from botocore.configloader import raw_config_parse LOG = logging.getLogger(__name__) @@ -41,10 +39,13 @@ class InvalidAliasException(Exception): pass -class AliasLoader(object): - def __init__(self, - alias_filename=os.path.expanduser( - os.path.join('~', '.aws', 'cli', 'alias'))): +class AliasLoader: + def __init__( + self, + alias_filename=os.path.expanduser( + os.path.join('~', '.aws', 'cli', 'alias') + ), + ): """Interface for loading and interacting with alias file :param alias_filename: The name of the file to load aliases from. @@ -60,8 +61,7 @@ def _build_aliases(self): def _load_aliases(self): parsed = {} if os.path.exists(self._filename): - parsed = raw_config_parse( - self._filename, parse_subsections=False) + parsed = raw_config_parse(self._filename, parse_subsections=False) self._normalize_key_names(parsed) return parsed @@ -94,7 +94,7 @@ def get_aliases(self, command=None): return self._aliases.get(key, {}) -class BaseAliasCommandInjector(object): +class BaseAliasCommandInjector: def __init__(self, alias_loader): self._alias_loader = alias_loader @@ -106,7 +106,8 @@ def _is_external_alias(self, alias_value): def _inject_external_alias(self, alias_name, alias_value, command_table): command_table[alias_name] = ExternalAliasCommand( - alias_name, alias_value) + alias_name, alias_value + ) class AliasCommandInjector(BaseAliasCommandInjector): @@ -119,33 +120,36 @@ def __init__(self, session, alias_loader): :type alias_loader: awscli.alias.AliasLoader :param alias_loader: The alias loader to use """ - super(AliasCommandInjector, self).__init__(alias_loader) + super().__init__(alias_loader) self._session = session def inject_aliases(self, command_table, parser): for alias_name, alias_value in self._get_alias_items(): if self._is_external_alias(alias_value): - self._inject_external_alias(alias_name, alias_value, - command_table) + self._inject_external_alias( + alias_name, alias_value, command_table + ) else: service_alias_cmd_args = [ - alias_name, alias_value, self._session, command_table, - parser + alias_name, + alias_value, + self._session, + command_table, + parser, ] # If the alias name matches something already in the # command table provide the command it is about # to clobber as a possible reference that it will # need to proxy to. if alias_name in command_table: - service_alias_cmd_args.append( - command_table[alias_name]) + service_alias_cmd_args.append(command_table[alias_name]) alias_cmd = ServiceAliasCommand(*service_alias_cmd_args) command_table[alias_name] = alias_cmd class AliasSubCommandInjector(BaseAliasCommandInjector): def __init__(self, alias_loader): - super(AliasSubCommandInjector, self).__init__(alias_loader) + super().__init__(alias_loader) self._global_cmd_driver = None self._global_args_parser = None @@ -153,14 +157,18 @@ def _retrieve_global_args_parser(self): if self._global_args_parser is None: if self._global_cmd_driver is not None: command_table = self._global_cmd_driver.subcommand_table - self._global_args_parser = \ + self._global_args_parser = ( self._global_cmd_driver.create_parser(command_table) + ) return self._global_args_parser - def on_building_command_table(self, command_table, event_name, - command_object, session, **kwargs): - if not isinstance(command_object, CLICommand) and \ - event_name == 'building-command-table.main': + def on_building_command_table( + self, command_table, event_name, command_object, session, **kwargs + ): + if ( + not isinstance(command_object, CLICommand) + and event_name == 'building-command-table.main' + ): self._global_cmd_driver = command_object return # We have to transform the event name to figure out what the @@ -185,14 +193,18 @@ def on_building_command_table(self, command_table, event_name, for alias_name, alias_value in aliases_for_cmd.items(): if self._is_external_alias(alias_value): self._inject_external_alias( - alias_name, alias_value, command_table) + alias_name, alias_value, command_table + ) else: proxied_sub_command = command_table.get(alias_name) command_table[alias_name] = InternalAliasSubCommand( - alias_name, alias_value, command_object, + alias_name, + alias_value, + command_object, self._retrieve_global_args_parser(), session=session, - proxied_sub_command=proxied_sub_command) + proxied_sub_command=proxied_sub_command, + ) class BaseAliasCommand(CLICommand): @@ -233,13 +245,10 @@ def lineage(self, value): class BaseInternalAliasCommand(BaseAliasCommand): - UNSUPPORTED_GLOBAL_PARAMETERS = [ - 'debug', - 'profile' - ] + UNSUPPORTED_GLOBAL_PARAMETERS = ['debug', 'profile'] def __init__(self, alias_name, alias_value, session): - super(BaseInternalAliasCommand, self).__init__(alias_name, alias_value) + super().__init__(alias_name, alias_value) self._session = session def _get_alias_args(self): @@ -247,22 +256,25 @@ def _get_alias_args(self): alias_args = shlex.split(self._alias_value) except ValueError as e: raise InvalidAliasException( - 'Value of alias "%s" could not be parsed. ' - 'Received error: %s when parsing:\n%s' % ( - self._alias_name, e, self._alias_value) + f'Value of alias "{self._alias_name}" could not be parsed. ' + f'Received error: {e} when parsing:\n{self._alias_value}' ) alias_args = [arg.strip(os.linesep) for arg in alias_args] LOG.debug( 'Expanded subcommand alias %r with value: %r to: %r', - self._alias_name, self._alias_value, alias_args + self._alias_name, + self._alias_value, + alias_args, ) return alias_args - def _update_parsed_globals(self, arg_parser, parsed_alias_args, - parsed_globals): + def _update_parsed_globals( + self, arg_parser, parsed_alias_args, parsed_globals + ): global_params_to_update = self._get_global_parameters_to_update( - arg_parser, parsed_alias_args) + arg_parser, parsed_alias_args + ) # Emit the top level args parsed event to ensure all possible # customizations that typically get applied are applied to the # global parameters provided in the alias before updating @@ -287,17 +299,24 @@ def _get_global_parameters_to_update(self, arg_parser, parsed_alias_args): if arg_parser.get_default(parsed_param) != value: if parsed_param in self.UNSUPPORTED_GLOBAL_PARAMETERS: raise InvalidAliasException( - 'Global parameter "--%s" detected in alias "%s" ' - 'which is not support in subcommand aliases.' % ( - parsed_param, self._alias_name)) + f'Global parameter "--{parsed_param}" detected in alias "{self._alias_name}" ' + 'which is not support in subcommand aliases.' + ) else: global_params_to_update.append(parsed_param) return global_params_to_update class ServiceAliasCommand(BaseInternalAliasCommand): - def __init__(self, alias_name, alias_value, session, command_table, - parser, shadow_proxy_command=None): + def __init__( + self, + alias_name, + alias_value, + session, + command_table, + parser, + shadow_proxy_command=None, + ): """Command for a `toplevel` subcommand alias :type alias_name: string @@ -328,8 +347,9 @@ def __init__(self, alias_name, alias_value, session, command_table, to this command as opposed to proxy to itself in the command table """ - super(ServiceAliasCommand, self).__init__( - alias_name, alias_value, session) + super().__init__( + alias_name, alias_value, session + ) self._command_table = command_table self._parser = parser self._shadow_proxy_command = shadow_proxy_command @@ -337,15 +357,20 @@ def __init__(self, alias_name, alias_value, session, command_table, def __call__(self, args, parsed_globals): alias_args = self._get_alias_args() parsed_alias_args, remaining = self._parser.parse_known_args( - alias_args) - self._update_parsed_globals(self._parser, parsed_alias_args, - parsed_globals) + alias_args + ) + self._update_parsed_globals( + self._parser, parsed_alias_args, parsed_globals + ) # Take any of the remaining arguments that were not parsed out and # prepend them to the remaining args provided to the alias. remaining.extend(args) LOG.debug( 'Alias %r passing on arguments: %r to %r command', - self._alias_name, remaining, parsed_alias_args.command) + self._alias_name, + remaining, + parsed_alias_args.command, + ) # Pass the update remaining args and global args to the service command # the alias proxied to. command = self._command_table[parsed_alias_args.command] @@ -356,9 +381,9 @@ def __call__(self, args, parsed_globals): # a built-in command. if shadow_name == parsed_alias_args.command: LOG.debug( - 'Using shadowed command object: %s ' - 'for alias: %s', self._shadow_proxy_command, - self._alias_name + 'Using shadowed command object: %s ' 'for alias: %s', + self._shadow_proxy_command, + self._alias_name, ) command = self._shadow_proxy_command return command(remaining, parsed_globals) @@ -382,36 +407,44 @@ def __init__(self, alias_name, alias_value, invoker=subprocess.call): :param invoker: Callable to run arguments of external alias. The signature should match that of ``subprocess.call`` """ - super(ExternalAliasCommand, self).__init__(alias_name, alias_value) + super().__init__(alias_name, alias_value) self._invoker = invoker def __call__(self, args, parsed_globals): - command_components = [ - self._alias_value[1:] - ] + command_components = [self._alias_value[1:]] command_components.extend(compat_shell_quote(a) for a in args) command = ' '.join(command_components) LOG.debug( 'Using external alias %r with value: %r to run: %r', - self._alias_name, self._alias_value, command) + self._alias_name, + self._alias_value, + command, + ) return self._invoker(command, shell=True) class InternalAliasSubCommand(BaseInternalAliasCommand): - - def __init__(self, alias_name, alias_value, command_object, - global_args_parser, session, - proxied_sub_command=None): - super(InternalAliasSubCommand, self).__init__( - alias_name, alias_value, session) + def __init__( + self, + alias_name, + alias_value, + command_object, + global_args_parser, + session, + proxied_sub_command=None, + ): + super().__init__( + alias_name, alias_value, session + ) self._command_object = command_object self._global_args_parser = global_args_parser self._proxied_sub_command = proxied_sub_command def _process_global_args(self, arg_parser, alias_args, parsed_globals): globally_parseable_args = [parsed_globals.command] + alias_args - alias_globals, remaining = arg_parser\ - .parse_known_args(globally_parseable_args) + alias_globals, remaining = arg_parser.parse_known_args( + globally_parseable_args + ) self._update_parsed_globals(arg_parser, alias_globals, parsed_globals) return remaining @@ -429,7 +462,8 @@ def __call__(self, args, parsed_globals): # embedded as part of the alias value (i.e defined in the alias file) alias_args = self._get_alias_args() cmd_specific_args = self._process_global_args( - self._global_args_parser, alias_args, parsed_globals) + self._global_args_parser, alias_args, parsed_globals + ) cmd_specific_args.extend(args) if self._proxied_sub_command is not None: # If we overwrote an existing command, we just delegate to that @@ -438,8 +472,10 @@ def __call__(self, args, parsed_globals): # command so we remove that value before delegating to the # proxied command. cmd_specific_args = cmd_specific_args[1:] - LOG.debug("Delegating to proxy sub-command with new alias " - "args: %s", alias_args) + LOG.debug( + "Delegating to proxy sub-command with new alias " "args: %s", + alias_args, + ) return self._proxied_sub_command(cmd_specific_args, parsed_globals) else: return self._command_object(cmd_specific_args, parsed_globals) diff --git a/awscli/argparser.py b/awscli/argparser.py index 31bdfa1ccb06..5037443323c1 100644 --- a/awscli/argparser.py +++ b/awscli/argparser.py @@ -15,7 +15,6 @@ import sys from difflib import get_close_matches - HELP_BLURB = ( "To see help text, you can run:\n" "\n" @@ -25,7 +24,7 @@ ) USAGE = ( "aws [options] [ ...] [parameters]\n" - "%s" % HELP_BLURB + f"{HELP_BLURB}" ) @@ -40,9 +39,10 @@ class CommandAction(argparse.Action): are dynamically retrieved from the keys of the referenced command table """ + def __init__(self, option_strings, dest, command_table, **kwargs): self.command_table = command_table - super(CommandAction, self).__init__( + super().__init__( option_strings, dest, choices=self.choices, **kwargs ) @@ -78,21 +78,23 @@ def _check_value(self, action, value): # converted value must be one of the choices (if specified) if action.choices is not None and value not in action.choices: msg = ['Invalid choice, valid choices are:\n'] - for i in range(len(action.choices))[::self.ChoicesPerLine]: + for i in range(len(action.choices))[:: self.ChoicesPerLine]: current = [] - for choice in action.choices[i:i+self.ChoicesPerLine]: + for choice in action.choices[i : i + self.ChoicesPerLine]: current.append('%-40s' % choice) msg.append(' | '.join(current)) possible = get_close_matches(value, action.choices, cutoff=0.8) if possible: - extra = ['\n\nInvalid choice: %r, maybe you meant:\n' % value] + extra = [f'\n\nInvalid choice: {value!r}, maybe you meant:\n'] for word in possible: - extra.append(' * %s' % word) + extra.append(f' * {word}') msg.extend(extra) raise argparse.ArgumentError(action, '\n'.join(msg)) def parse_known_args(self, args, namespace=None): - parsed, remaining = super(CLIArgParser, self).parse_known_args(args, namespace) + parsed, remaining = super().parse_known_args( + args, namespace + ) terminal_encoding = getattr(sys.stdin, 'encoding', 'utf-8') if terminal_encoding is None: # In some cases, sys.stdin won't have an encoding set, @@ -131,48 +133,60 @@ def error(self, message): class MainArgParser(CLIArgParser): Formatter = argparse.RawTextHelpFormatter - def __init__(self, command_table, version_string, - description, argument_table, prog=None): - super(MainArgParser, self).__init__( + def __init__( + self, + command_table, + version_string, + description, + argument_table, + prog=None, + ): + super().__init__( formatter_class=self.Formatter, add_help=False, conflict_handler='resolve', description=description, usage=USAGE, - prog=prog) + prog=prog, + ) self._build(command_table, version_string, argument_table) def _create_choice_help(self, choices): help_str = '' for choice in sorted(choices): - help_str += '* %s\n' % choice + help_str += f'* {choice}\n' return help_str def _build(self, command_table, version_string, argument_table): for argument_name in argument_table: argument = argument_table[argument_name] argument.add_to_parser(self) - self.add_argument('--version', action="version", - version=version_string, - help='Display the version of this tool') - self.add_argument('command', action=CommandAction, - command_table=command_table) + self.add_argument( + '--version', + action="version", + version=version_string, + help='Display the version of this tool', + ) + self.add_argument( + 'command', action=CommandAction, command_table=command_table + ) class ServiceArgParser(CLIArgParser): - def __init__(self, operations_table, service_name): - super(ServiceArgParser, self).__init__( + super().__init__( formatter_class=argparse.RawTextHelpFormatter, add_help=False, conflict_handler='resolve', - usage=USAGE) + usage=USAGE, + ) self._build(operations_table) self._service_name = service_name def _build(self, operations_table): - self.add_argument('operation', action=CommandAction, - command_table=operations_table) + self.add_argument( + 'operation', action=CommandAction, command_table=operations_table + ) class ArgTableArgParser(CLIArgParser): @@ -182,11 +196,12 @@ def __init__(self, argument_table, command_table=None): # command_table is an optional subcommand_table. If it's passed # in, then we'll update the argparse to parse a 'subcommand' argument # and populate the choices field with the command table keys. - super(ArgTableArgParser, self).__init__( + super().__init__( formatter_class=self.Formatter, add_help=False, usage=USAGE, - conflict_handler='resolve') + conflict_handler='resolve', + ) if command_table is None: command_table = {} self._build(argument_table, command_table) @@ -196,8 +211,12 @@ def _build(self, argument_table, command_table): argument = argument_table[arg_name] argument.add_to_parser(self) if command_table: - self.add_argument('subcommand', action=CommandAction, - command_table=command_table, nargs='?') + self.add_argument( + 'subcommand', + action=CommandAction, + command_table=command_table, + nargs='?', + ) def parse_known_args(self, args, namespace=None): if len(args) == 1 and args[0] == 'help': @@ -205,8 +224,9 @@ def parse_known_args(self, args, namespace=None): namespace.help = 'help' return namespace, [] else: - return super(ArgTableArgParser, self).parse_known_args( - args, namespace) + return super().parse_known_args( + args, namespace + ) class SubCommandArgParser(ArgTableArgParser): @@ -219,8 +239,7 @@ class SubCommandArgParser(ArgTableArgParser): """ def parse_known_args(self, args, namespace=None): - parsed_args, remaining = super( - SubCommandArgParser, self).parse_known_args(args, namespace) + parsed_args, remaining = super().parse_known_args(args, namespace) if getattr(parsed_args, 'subcommand', None) is not None: new_args = self._remove_subcommand(args, parsed_args) return new_args, parsed_args.subcommand @@ -256,14 +275,17 @@ def _build(self, argument_table, command_table): # fail if any of the required args aren't provided. We don't # want to mutate the arg table that's provided to us, so we # make a copy of it and then set all the required to not required. - non_required_arg_table = self._non_required_arg_table( - argument_table) + non_required_arg_table = self._non_required_arg_table(argument_table) for arg_name in non_required_arg_table: argument = non_required_arg_table[arg_name] argument.add_to_parser(self) if command_table: - self.add_argument('subcommand', action=CommandAction, - command_table=command_table, nargs='?') + self.add_argument( + 'subcommand', + action=CommandAction, + command_table=command_table, + nargs='?', + ) def _non_required_arg_table(self, argument_table): arg_table_copy = {} diff --git a/awscli/argprocess.py b/awscli/argprocess.py index 74d0ed9011d7..3498adfdaab1 100644 --- a/awscli/argprocess.py +++ b/awscli/argprocess.py @@ -11,17 +11,17 @@ # ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. """Module for processing CLI args.""" -import os -import logging -from botocore.compat import OrderedDict, json +import logging +import os -from awscli import SCALAR_TYPES, COMPLEX_TYPES -from awscli import shorthand +from awscli import COMPLEX_TYPES, SCALAR_TYPES, shorthand from awscli.utils import ( - find_service_and_method_in_event_name, is_document_type, - is_document_type_container + find_service_and_method_in_event_name, + is_document_type, + is_document_type_container, ) +from botocore.compat import OrderedDict, json from botocore.utils import is_json_value_header LOG = logging.getLogger('awscli.argprocess') @@ -40,9 +40,8 @@ def __init__(self, cli_name, message): :param message: The error message to display to the user. """ - full_message = ("Error parsing parameter '%s': %s" % - (cli_name, message)) - super(ParamError, self).__init__(full_message) + full_message = f"Error parsing parameter '{cli_name}': {message}" + super().__init__(full_message) self.cli_name = cli_name self.message = message @@ -54,17 +53,17 @@ class ParamSyntaxError(Exception): class ParamUnknownKeyError(Exception): def __init__(self, key, valid_keys): valid_keys = ', '.join(valid_keys) - full_message = ( - "Unknown key '%s', valid choices " - "are: %s" % (key, valid_keys)) - super(ParamUnknownKeyError, self).__init__(full_message) + full_message = f"Unknown key '{key}', valid choices " f"are: {valid_keys}" + super().__init__(full_message) class TooComplexError(Exception): pass -def unpack_argument(session, service_name, operation_name, cli_argument, value): +def unpack_argument( + session, service_name, operation_name, cli_argument, value +): """ Unpack an argument's value from the commandline. This is part one of a two step process in handling commandline arguments. Emits the load-cli-arg @@ -76,11 +75,12 @@ def unpack_argument(session, service_name, operation_name, cli_argument, value): param_name = getattr(cli_argument, 'name', 'anonymous') value_override = session.emit_first_non_none_response( - 'load-cli-arg.%s.%s.%s' % (service_name, - operation_name, - param_name), - param=cli_argument, value=value, service_name=service_name, - operation_name=operation_name) + f'load-cli-arg.{service_name}.{operation_name}.{param_name}', + param=cli_argument, + value=value, + service_name=service_name, + operation_name=operation_name, + ) if value_override is not None: value = value_override @@ -102,8 +102,10 @@ def _detect_shape_structure(param, stack): if param.type_name in SCALAR_TYPES: return 'scalar' elif param.type_name == 'structure': - sub_types = [_detect_shape_structure(p, stack) - for p in param.members.values()] + sub_types = [ + _detect_shape_structure(p, stack) + for p in param.members.values() + ] # We're distinguishing between structure(scalar) # and structure(scalars), because for the case of # a single scalar in a structure we can simplify @@ -113,14 +115,14 @@ def _detect_shape_structure(param, stack): elif len(sub_types) > 1 and all(p == 'scalar' for p in sub_types): return 'structure(scalars)' else: - return 'structure(%s)' % ', '.join(sorted(set(sub_types))) + return 'structure({})'.format(', '.join(sorted(set(sub_types)))) elif param.type_name == 'list': - return 'list-%s' % _detect_shape_structure(param.member, stack) + return f'list-{_detect_shape_structure(param.member, stack)}' elif param.type_name == 'map': if param.value.type_name in SCALAR_TYPES: return 'map-scalar' else: - return 'map-%s' % _detect_shape_structure(param.value, stack) + return f'map-{_detect_shape_structure(param.value, stack)}' finally: stack.pop() @@ -141,29 +143,31 @@ def unpack_cli_arg(cli_argument, value): :return: The "unpacked" argument than can be sent to the `Operation` object in python. """ - return _unpack_cli_arg(cli_argument.argument_model, value, - cli_argument.cli_name) + return _unpack_cli_arg( + cli_argument.argument_model, value, cli_argument.cli_name + ) def _special_type(model): # check if model is jsonvalue header and that value is serializable - if model.serialization.get('jsonvalue') and \ - model.serialization.get('location') == 'header' and \ - model.type_name == 'string': + if ( + model.serialization.get('jsonvalue') + and model.serialization.get('location') == 'header' + and model.type_name == 'string' + ): return True return False def _unpack_cli_arg(argument_model, value, cli_name): - if is_json_value_header(argument_model) or \ - is_document_type(argument_model): + if is_json_value_header(argument_model) or is_document_type( + argument_model + ): return _unpack_json_cli_arg(argument_model, value, cli_name) elif argument_model.type_name in SCALAR_TYPES: - return unpack_scalar_cli_arg( - argument_model, value, cli_name) + return unpack_scalar_cli_arg(argument_model, value, cli_name) elif argument_model.type_name in COMPLEX_TYPES: - return _unpack_complex_cli_arg( - argument_model, value, cli_name) + return _unpack_complex_cli_arg(argument_model, value, cli_name) else: return str(value) @@ -173,8 +177,8 @@ def _unpack_json_cli_arg(argument_model, value, cli_name): return json.loads(value, object_pairs_hook=OrderedDict) except ValueError as e: raise ParamError( - cli_name, "Invalid JSON: %s\nJSON received: %s" - % (e, value)) + cli_name, f"Invalid JSON: {e}\nJSON received: {value}" + ) def _unpack_complex_cli_arg(argument_model, value, cli_name): @@ -182,7 +186,7 @@ def _unpack_complex_cli_arg(argument_model, value, cli_name): if type_name == 'structure' or type_name == 'map': if value.lstrip()[0] == '{': return _unpack_json_cli_arg(argument_model, value, cli_name) - raise ParamError(cli_name, "Invalid JSON:\n%s" % value) + raise ParamError(cli_name, f"Invalid JSON:\n{value}") elif type_name == 'list': if isinstance(value, str): if value.lstrip()[0] == '[': @@ -198,9 +202,10 @@ def _unpack_complex_cli_arg(argument_model, value, cli_name): # 2. It's possible this is a list of json objects: # --filters '{"Name": ..}' '{"Name": ...}' member_shape_model = argument_model.member - return [_unpack_cli_arg(member_shape_model, v, cli_name) - for v in value] - except (ValueError, TypeError) as e: + return [ + _unpack_cli_arg(member_shape_model, v, cli_name) for v in value + ] + except (ValueError, TypeError): # The list params don't have a name/cli_name attached to them # so they will have bad error messages. We're going to # attach the parent parameter to this error message to provide @@ -211,13 +216,21 @@ def _unpack_complex_cli_arg(argument_model, value, cli_name): def unpack_scalar_cli_arg(argument_model, value, cli_name=''): # Note the cli_name is used strictly for error reporting. It's # not required to use unpack_scalar_cli_arg - if argument_model.type_name == 'integer' or argument_model.type_name == 'long': + if ( + argument_model.type_name == 'integer' + or argument_model.type_name == 'long' + ): return int(value) - elif argument_model.type_name == 'float' or argument_model.type_name == 'double': + elif ( + argument_model.type_name == 'float' + or argument_model.type_name == 'double' + ): # TODO: losing precision on double types return float(value) - elif argument_model.type_name == 'blob' and \ - argument_model.serialization.get('streaming'): + elif ( + argument_model.type_name == 'blob' + and argument_model.serialization.get('streaming') + ): file_path = os.path.expandvars(value) file_path = os.path.expanduser(file_path) if not os.path.isfile(file_path): @@ -256,8 +269,7 @@ def _is_complex_shape(model): return True -class ParamShorthand(object): - +class ParamShorthand: def _uses_old_list_case(self, command_name, operation_name, argument_name): """ Determines whether a given operation for a service needs to use the @@ -265,27 +277,24 @@ def _uses_old_list_case(self, command_name, operation_name, argument_name): a single member. """ cases = { - 'firehose': { - 'put-record-batch': ['records'] - }, + 'firehose': {'put-record-batch': ['records']}, 'workspaces': { 'reboot-workspaces': ['reboot-workspace-requests'], 'rebuild-workspaces': ['rebuild-workspace-requests'], - 'terminate-workspaces': ['terminate-workspace-requests'] + 'terminate-workspaces': ['terminate-workspace-requests'], }, 'elb': { 'remove-tags': ['tags'], 'describe-instance-health': ['instances'], 'deregister-instances-from-load-balancer': ['instances'], - 'register-instances-with-load-balancer': ['instances'] - } + 'register-instances-with-load-balancer': ['instances'], + }, } cases = cases.get(command_name, {}).get(operation_name, []) return argument_name in cases class ParamShorthandParser(ParamShorthand): - def __init__(self): self._parser = shorthand.ShorthandParser() self._visitor = shorthand.BackCompatVisitor() @@ -321,18 +330,21 @@ def __call__(self, cli_argument, value, event_name, **kwargs): if not self._should_parse_as_shorthand(cli_argument, value): return else: - command_name, operation_name = \ + command_name, operation_name = ( find_service_and_method_in_event_name(event_name) + ) return self._parse_as_shorthand( - cli_argument, value, command_name, operation_name) + cli_argument, value, command_name, operation_name + ) - def _parse_as_shorthand(self, cli_argument, value, command_name, - operation_name): + def _parse_as_shorthand( + self, cli_argument, value, command_name, operation_name + ): try: - LOG.debug("Parsing param %s as shorthand", - cli_argument.cli_name) + LOG.debug("Parsing param %s as shorthand", cli_argument.cli_name) handled_value = self._handle_special_cases( - cli_argument, value, command_name, operation_name) + cli_argument, value, command_name, operation_name + ) if handled_value is not None: return handled_value if isinstance(value, list): @@ -357,15 +369,20 @@ def _parse_as_shorthand(self, cli_argument, value, command_name, raise ParamError(cli_argument.cli_name, str(e)) return parsed - def _handle_special_cases(self, cli_argument, value, command_name, - operation_name): + def _handle_special_cases( + self, cli_argument, value, command_name, operation_name + ): # We need to handle a few special cases that the previous # parser handled in order to stay backwards compatible. model = cli_argument.argument_model - if model.type_name == 'list' and \ - model.member.type_name == 'structure' and \ - len(model.member.members) == 1 and \ - self._uses_old_list_case(command_name, operation_name, cli_argument.name): + if ( + model.type_name == 'list' + and model.member.type_name == 'structure' + and len(model.member.members) == 1 + and self._uses_old_list_case( + command_name, operation_name, cli_argument.name + ) + ): # First special case is handling a list of structures # of a single element such as: # @@ -378,11 +395,13 @@ def _handle_special_cases(self, cli_argument, value, command_name, key_name = list(model.member.members.keys())[0] new_values = [{key_name: v} for v in value] return new_values - elif model.type_name == 'structure' and \ - len(model.members) == 1 and \ - 'Value' in model.members and \ - model.members['Value'].type_name == 'string' and \ - '=' not in value: + elif ( + model.type_name == 'structure' + and len(model.members) == 1 + and 'Value' in model.members + and model.members['Value'].type_name == 'string' + and '=' not in value + ): # Second special case is where a structure of a single # value whose member name is "Value" can be specified # as: @@ -401,9 +420,13 @@ def _should_parse_as_shorthand(self, cli_argument, value): else: check_val = value if isinstance(check_val, str) and check_val.strip().startswith( - ('[', '{')): - LOG.debug("Param %s looks like JSON, not considered for " - "param shorthand.", cli_argument.py_name) + ('[', '{') + ): + LOG.debug( + "Param %s looks like JSON, not considered for " + "param shorthand.", + cli_argument.py_name, + ) return False model = cli_argument.argument_model return _supports_shorthand_syntax(model) @@ -421,8 +444,9 @@ def supports_shorthand(self, argument_model): return _supports_shorthand_syntax(argument_model) return False - def generate_shorthand_example(self, cli_argument, command_name, - operation_name): + def generate_shorthand_example( + self, cli_argument, command_name, operation_name + ): """Generate documentation for a CLI argument. :type cli_argument: awscli.arguments.BaseCLIArgument @@ -437,7 +461,8 @@ def generate_shorthand_example(self, cli_argument, command_name, """ docstring = self._handle_special_cases( - cli_argument, command_name, operation_name) + cli_argument, command_name, operation_name + ) if docstring is self._DONT_DOC: return None elif docstring: @@ -455,24 +480,30 @@ def generate_shorthand_example(self, cli_argument, command_name, except TooComplexError: return '' - def _handle_special_cases(self, cli_argument, command_name, operation_name): + def _handle_special_cases( + self, cli_argument, command_name, operation_name + ): model = cli_argument.argument_model - if model.type_name == 'list' and \ - model.member.type_name == 'structure' and \ - len(model.member.members) == 1 and \ - self._uses_old_list_case( - command_name, operation_name, cli_argument.name): + if ( + model.type_name == 'list' + and model.member.type_name == 'structure' + and len(model.member.members) == 1 + and self._uses_old_list_case( + command_name, operation_name, cli_argument.name + ) + ): member_name = list(model.member.members)[0] # Handle special case where the min/max is exactly one. metadata = model.metadata if metadata.get('min') == 1 and metadata.get('max') == 1: - return '%s %s1' % (cli_argument.cli_name, member_name) - return '%s %s1 %s2 %s3' % (cli_argument.cli_name, member_name, - member_name, member_name) - elif model.type_name == 'structure' and \ - len(model.members) == 1 and \ - 'Value' in model.members and \ - model.members['Value'].type_name == 'string': + return f'{cli_argument.cli_name} {member_name}1' + return f'{cli_argument.cli_name} {member_name}1 {member_name}2 {member_name}3' + elif ( + model.type_name == 'structure' + and len(model.members) == 1 + and 'Value' in model.members + and model.members['Value'].type_name == 'string' + ): return self._DONT_DOC return '' @@ -496,20 +527,20 @@ def _list_docs(self, argument_model, stack): finally: stack.pop() if list_member.type_name in COMPLEX_TYPES or len(stack) > 1: - return '[%s,%s]' % (element_docs, element_docs) + return f'[{element_docs},{element_docs}]' else: - return '%s,%s' % (element_docs, element_docs) + return f'{element_docs},{element_docs}' def _map_docs(self, argument_model, stack): k = argument_model.key value_docs = self._shorthand_docs(argument_model.value, stack) - start = 'KeyName1=%s,KeyName2=%s' % (value_docs, value_docs) + start = f'KeyName1={value_docs},KeyName2={value_docs}' if k.enum and not stack: start += '\n\nWhere valid key names are:\n' for enum in k.enum: - start += ' %s\n' % enum + start += f' {enum}\n' elif stack: - start = '{%s}' % start + start = f'{{{start}}}' return start def _structure_docs(self, argument_model, stack): @@ -521,7 +552,7 @@ def _structure_docs(self, argument_model, stack): inner_part = ','.join(parts) if not stack: return inner_part - return '{%s}' % inner_part + return f'{{{inner_part}}}' def _member_docs(self, name, shape, stack): if stack.count(shape.name) > 0: @@ -531,4 +562,4 @@ def _member_docs(self, name, shape, stack): value_doc = self._shorthand_docs(shape, stack) finally: stack.pop() - return '%s=%s' % (name, value_doc) + return f'{name}={value_doc}' diff --git a/awscli/arguments.py b/awscli/arguments.py index 4cb1b291d267..5e13e6f97ec5 100644 --- a/awscli/arguments.py +++ b/awscli/arguments.py @@ -36,15 +36,13 @@ user input and maps the input value to several API parameters. """ -import logging -from botocore import xform_name -from botocore.hooks import first_non_none_response +import logging from awscli.argprocess import unpack_cli_arg from awscli.schema import SchemaTransformer -from botocore import model - +from botocore import model, xform_name +from botocore.hooks import first_non_none_response LOG = logging.getLogger('awscli.arguments') @@ -66,7 +64,7 @@ def create_argument_model_from_schema(schema): return arg_shape -class BaseCLIArgument(object): +class BaseCLIArgument: """Interface for CLI argument. This class represents the interface used for representing CLI @@ -203,11 +201,24 @@ class CustomArgument(BaseCLIArgument): """ - def __init__(self, name, help_text='', dest=None, default=None, - action=None, required=None, choices=None, nargs=None, - cli_type_name=None, group_name=None, positional_arg=False, - no_paramfile=False, argument_model=None, synopsis='', - const=None): + def __init__( + self, + name, + help_text='', + dest=None, + default=None, + action=None, + required=None, + choices=None, + nargs=None, + cli_type_name=None, + group_name=None, + positional_arg=False, + no_paramfile=False, + argument_model=None, + synopsis='', + const=None, + ): self._name = name self._help = help_text self._dest = dest @@ -235,8 +246,10 @@ def __init__(self, name, help_text='', dest=None, default=None, # If the top level element is a list then set nargs to # accept multiple values seperated by a space. - if self.argument_model is not None and \ - self.argument_model.type_name == 'list': + if ( + self.argument_model is not None + and self.argument_model.type_name == 'list' + ): self._nargs = '+' def _create_scalar_argument_model(self): @@ -337,9 +350,7 @@ def nargs(self): class CLIArgument(BaseCLIArgument): - """Represents a CLI argument that maps to a service parameter. - - """ + """Represents a CLI argument that maps to a service parameter.""" TYPE_MAP = { 'structure': str, @@ -352,12 +363,18 @@ class CLIArgument(BaseCLIArgument): 'long': int, 'boolean': bool, 'double': float, - 'blob': str + 'blob': str, } - def __init__(self, name, argument_model, operation_model, - event_emitter, is_required=False, - serialized_name=None): + def __init__( + self, + name, + argument_model, + operation_model, + event_emitter, + is_required=False, + serialized_name=None, + ): """ :type name: str @@ -433,7 +450,8 @@ def add_to_parser(self, parser): cli_name, help=self.documentation, type=self.cli_type, - required=self.required) + required=self.required, + ) def add_to_params(self, parameters, value): if value is None: @@ -451,16 +469,23 @@ def add_to_params(self, parameters, value): # below. Sometimes this can be more complicated, and subclasses # can customize as they need. unpacked = self._unpack_argument(value) - LOG.debug('Unpacked value of %r for parameter "%s": %r', value, - self.py_name, unpacked) + LOG.debug( + 'Unpacked value of %r for parameter "%s": %r', + value, + self.py_name, + unpacked, + ) parameters[self._serialized_name] = unpacked def _unpack_argument(self, value): service_name = self._operation_model.service_model.service_name operation_name = xform_name(self._operation_model.name, '-') - override = self._emit_first_response('process-cli-arg.%s.%s' % ( - service_name, operation_name), param=self.argument_model, - cli_argument=self, value=value) + override = self._emit_first_response( + f'process-cli-arg.{service_name}.{operation_name}', + param=self.argument_model, + cli_argument=self, + value=value, + ) if override is not None: # A plugin supplied an alternate conversion, # use it instead. @@ -478,17 +503,18 @@ def _emit_first_response(self, name, **kwargs): class ListArgument(CLIArgument): - @property def nargs(self): return '*' def add_to_parser(self, parser): cli_name = self.cli_name - parser.add_argument(cli_name, - nargs=self.nargs, - type=self.cli_type, - required=self.required) + parser.add_argument( + cli_name, + nargs=self.nargs, + type=self.cli_type, + required=self.required, + ) class BooleanArgument(CLIArgument): @@ -508,17 +534,27 @@ class BooleanArgument(CLIArgument): """ - def __init__(self, name, argument_model, operation_model, - event_emitter, - is_required=False, action='store_true', dest=None, - group_name=None, default=None, - serialized_name=None): - super(BooleanArgument, self).__init__(name, - argument_model, - operation_model, - event_emitter, - is_required, - serialized_name=serialized_name) + def __init__( + self, + name, + argument_model, + operation_model, + event_emitter, + is_required=False, + action='store_true', + dest=None, + group_name=None, + default=None, + serialized_name=None, + ): + super().__init__( + name, + argument_model, + operation_model, + event_emitter, + is_required, + serialized_name=serialized_name, + ) self._mutex_group = None self._action = action if dest is None: @@ -547,20 +583,27 @@ def add_to_arg_table(self, argument_table): # ourselves for the negative service. We then insert both into the # arg table. argument_table[self.name] = self - negative_name = 'no-%s' % self.name + negative_name = f'no-{self.name}' negative_version = self.__class__( - negative_name, self.argument_model, - self._operation_model, self._event_emitter, - action='store_false', dest=self._destination, - group_name=self.group_name, serialized_name=self._serialized_name) + negative_name, + self.argument_model, + self._operation_model, + self._event_emitter, + action='store_false', + dest=self._destination, + group_name=self.group_name, + serialized_name=self._serialized_name, + ) argument_table[negative_name] = negative_version def add_to_parser(self, parser): - parser.add_argument(self.cli_name, - help=self.documentation, - action=self._action, - default=self._default, - dest=self._destination) + parser.add_argument( + self.cli_name, + help=self.documentation, + action=self._action, + default=self._default, + dest=self._destination, + ) @property def group_name(self): diff --git a/awscli/clidocs.py b/awscli/clidocs.py index 9d8fb9d59159..d379a98f8c4f 100644 --- a/awscli/clidocs.py +++ b/awscli/clidocs.py @@ -13,30 +13,33 @@ import logging import os import re -from botocore import xform_name -from botocore.model import StringShape -from botocore.utils import is_json_value_header from awscli import SCALAR_TYPES from awscli.argprocess import ParamShorthandDocGen from awscli.bcdoc.docevents import DOC_EVENTS from awscli.topictags import TopicTagDB from awscli.utils import ( - find_service_and_method_in_event_name, is_document_type, - operation_uses_document_types, is_streaming_blob_type, - is_tagged_union_type + find_service_and_method_in_event_name, + is_document_type, + is_streaming_blob_type, + is_tagged_union_type, + operation_uses_document_types, ) +from botocore import xform_name +from botocore.model import StringShape +from botocore.utils import is_json_value_header LOG = logging.getLogger(__name__) -EXAMPLES_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), - 'examples') +EXAMPLES_DIR = os.path.join( + os.path.dirname(os.path.abspath(__file__)), 'examples' +) GLOBAL_OPTIONS_FILE = os.path.join(EXAMPLES_DIR, 'global_options.rst') -GLOBAL_OPTIONS_SYNOPSIS_FILE = os.path.join(EXAMPLES_DIR, - 'global_synopsis.rst') - +GLOBAL_OPTIONS_SYNOPSIS_FILE = os.path.join( + EXAMPLES_DIR, 'global_synopsis.rst' +) -class CLIDocumentEventHandler(object): +class CLIDocumentEventHandler: def __init__(self, help_command): self.help_command = help_command self.register(help_command.session, help_command.event_class) @@ -91,9 +94,11 @@ def unregister(self): handler method will be unregistered for the all events of that type for the specified ``event_class``. """ - self._map_handlers(self.help_command.session, - self.help_command.event_class, - self.help_command.session.unregister) + self._map_handlers( + self.help_command.session, + self.help_command.event_class, + self.help_command.session.unregister, + ) # These are default doc handlers that apply in the general case. @@ -108,7 +113,7 @@ def doc_breadcrumbs(self, help_command, **kwargs): doc.write(' . ') full_cmd_list.append(cmd) full_cmd_name = ' '.join(full_cmd_list) - doc.write(':ref:`%s `' % (cmd, full_cmd_name)) + doc.write(f':ref:`{cmd} `') doc.write(' ]') def doc_title(self, help_command, **kwargs): @@ -117,7 +122,7 @@ def doc_title(self, help_command, **kwargs): reference = help_command.event_class.replace('.', ' ') if reference != 'aws': reference = 'aws ' + reference - doc.writeln('.. _cli:%s:' % reference) + doc.writeln(f'.. _cli:{reference}:') doc.style.h1(help_command.name) def doc_description(self, help_command, **kwargs): @@ -131,7 +136,7 @@ def doc_synopsis_start(self, help_command, **kwargs): doc = help_command.doc doc.style.h2('Synopsis') doc.style.start_codeblock() - doc.writeln('%s' % help_command.name) + doc.writeln(f'{help_command.name}') def doc_synopsis_option(self, arg_name, help_command, **kwargs): doc = help_command.doc @@ -141,17 +146,19 @@ def doc_synopsis_option(self, arg_name, help_command, **kwargs): # This arg is already documented so we can move on. return option_str = ' | '.join( - [a.cli_name for a in - self._arg_groups[argument.group_name]]) + [a.cli_name for a in self._arg_groups[argument.group_name]] + ) self._documented_arg_groups.append(argument.group_name) elif argument.cli_name.startswith('--'): - option_str = '%s ' % argument.cli_name + option_str = f'{argument.cli_name} ' else: - option_str = '<%s>' % argument.cli_name - if not (argument.required - or getattr(argument, '_DOCUMENT_AS_REQUIRED', False)): - option_str = '[%s]' % option_str - doc.writeln('%s' % option_str) + option_str = f'<{argument.cli_name}>' + if not ( + argument.required + or getattr(argument, '_DOCUMENT_AS_REQUIRED', False) + ): + option_str = f'[{option_str}]' + doc.writeln(f'{option_str}') def doc_synopsis_end(self, help_command, **kwargs): doc = help_command.doc @@ -177,13 +184,22 @@ def doc_option(self, arg_name, help_command, **kwargs): # This arg is already documented so we can move on. return name = ' | '.join( - ['``%s``' % a.cli_name for a in - self._arg_groups[argument.group_name]]) + [ + f'``{a.cli_name}``' + for a in self._arg_groups[argument.group_name] + ] + ) self._documented_arg_groups.append(argument.group_name) else: - name = '``%s``' % argument.cli_name - doc.write('%s (%s)\n' % (name, self._get_argument_type_name( - argument.argument_model, argument.cli_type_name))) + name = f'``{argument.cli_name}``' + doc.write( + '{} ({})\n'.format( + name, + self._get_argument_type_name( + argument.argument_model, argument.cli_type_name + ), + ) + ) doc.style.indent() doc.include_doc_string(argument.documentation) if is_streaming_blob_type(argument.argument_model): @@ -210,8 +226,7 @@ def doc_relateditem(self, help_command, related_item, **kwargs): doc = help_command.doc doc.write('* ') doc.style.sphinx_reference_label( - label='cli:%s' % related_item, - text=related_item + label=f'cli:{related_item}', text=related_item ) doc.write('\n') @@ -223,7 +238,7 @@ def _document_enums(self, model, doc): doc.write('Possible values:') doc.style.start_ul() for enum in model.enum: - doc.style.li('``%s``' % enum) + doc.style.li(f'``{enum}``') doc.style.end_ul() def _document_nested_structure(self, model, doc): @@ -231,8 +246,9 @@ def _document_nested_structure(self, model, doc): member_type_name = getattr(model, 'type_name', None) if member_type_name == 'structure': for member_name, member_shape in model.members.items(): - self._doc_member(doc, member_name, member_shape, - stack=[model.name]) + self._doc_member( + doc, member_name, member_shape, stack=[model.name] + ) elif member_type_name == 'list': self._doc_member(doc, '', model.member, stack=[model.name]) elif member_type_name == 'map': @@ -253,19 +269,19 @@ def _doc_member(self, doc, member_name, member_shape, stack): return stack.append(member_shape.name) try: - self._do_doc_member(doc, member_name, - member_shape, stack) + self._do_doc_member(doc, member_name, member_shape, stack) finally: stack.pop() def _do_doc_member(self, doc, member_name, member_shape, stack): docs = member_shape.documentation type_name = self._get_argument_type_name( - member_shape, member_shape.type_name) + member_shape, member_shape.type_name + ) if member_name: - doc.write('%s -> (%s)' % (member_name, type_name)) + doc.write(f'{member_name} -> ({type_name})') else: - doc.write('(%s)' % type_name) + doc.write(f'({type_name})') doc.style.indent() doc.style.new_paragraph() doc.include_doc_string(docs) @@ -290,26 +306,27 @@ def _do_doc_member(self, doc, member_name, member_shape, stack): def _add_streaming_blob_note(self, doc): doc.style.start_note() - msg = ("This argument is of type: streaming blob. " - "Its value must be the path to a file " - "(e.g. ``path/to/file``) and must **not** " - "be prefixed with ``file://`` or ``fileb://``") + msg = ( + "This argument is of type: streaming blob. " + "Its value must be the path to a file " + "(e.g. ``path/to/file``) and must **not** " + "be prefixed with ``file://`` or ``fileb://``" + ) doc.writeln(msg) doc.style.end_note() def _add_tagged_union_note(self, shape, doc): doc.style.start_note() - members_str = ", ".join( - [f'``{key}``' for key in shape.members.keys()] + members_str = ", ".join([f'``{key}``' for key in shape.members.keys()]) + msg = ( + "This is a Tagged Union structure. Only one of the " + f"following top level keys can be set: {members_str}." ) - msg = ("This is a Tagged Union structure. Only one of the " - f"following top level keys can be set: {members_str}.") doc.writeln(msg) doc.style.end_note() class ProviderDocumentEventHandler(CLIDocumentEventHandler): - def doc_breadcrumbs(self, help_command, event_name, **kwargs): pass @@ -339,12 +356,11 @@ def doc_subitems_start(self, help_command, **kwargs): def doc_subitem(self, command_name, help_command, **kwargs): doc = help_command.doc - file_name = '%s/index' % command_name + file_name = f'{command_name}/index' doc.style.tocitem(command_name, file_name=file_name) class ServiceDocumentEventHandler(CLIDocumentEventHandler): - # A service document has no synopsis. def doc_synopsis_start(self, help_command, **kwargs): pass @@ -390,15 +406,14 @@ def doc_subitem(self, command_name, help_command, **kwargs): # If the subcommand table has commands in it, # direct the subitem to the command's index because # it has more subcommands to be documented. - if (len(subcommand_table) > 0): - file_name = '%s/index' % command_name + if len(subcommand_table) > 0: + file_name = f'{command_name}/index' doc.style.tocitem(command_name, file_name=file_name) else: doc.style.tocitem(command_name) class OperationDocumentEventHandler(CLIDocumentEventHandler): - AWS_DOC_BASE = 'https://docs.aws.amazon.com/goto/WebAPI' def doc_description(self, help_command, **kwargs): @@ -409,7 +424,6 @@ def doc_description(self, help_command, **kwargs): self._add_webapi_crosslink(help_command) self._add_note_for_document_types_if_used(help_command) - def _add_webapi_crosslink(self, help_command): doc = help_command.doc operation_model = help_command.obj @@ -422,8 +436,7 @@ def _add_webapi_crosslink(self, help_command): return doc.style.new_paragraph() doc.write("See also: ") - link = '%s/%s/%s' % (self.AWS_DOC_BASE, service_uid, - operation_model.name) + link = f'{self.AWS_DOC_BASE}/{service_uid}/{operation_model.name}' doc.style.external_link(title="AWS API Documentation", link=link) doc.writeln('') @@ -431,27 +444,29 @@ def _add_note_for_document_types_if_used(self, help_command): if operation_uses_document_types(help_command.obj): help_command.doc.style.new_paragraph() help_command.doc.writeln( - '``%s`` uses document type values. Document types follow the ' + f'``{help_command.name}`` uses document type values. Document types follow the ' 'JSON data model where valid values are: strings, numbers, ' 'booleans, null, arrays, and objects. For command input, ' 'options and nested parameters that are labeled with the type ' '``document`` must be provided as JSON. Shorthand syntax does ' - 'not support document types.' % help_command.name + 'not support document types.' ) - def _json_example_value_name(self, argument_model, include_enum_values=True): + def _json_example_value_name( + self, argument_model, include_enum_values=True + ): # If include_enum_values is True, then the valid enum values # are included as the sample JSON value. if isinstance(argument_model, StringShape): if argument_model.enum and include_enum_values: choices = argument_model.enum - return '|'.join(['"%s"' % c for c in choices]) + return '|'.join([f'"{c}"' for c in choices]) else: return '"string"' elif argument_model.type_name == 'boolean': return 'true|false' else: - return '%s' % argument_model.type_name + return f'{argument_model.type_name}' def _json_example(self, doc, argument_model, stack): if argument_model.name in stack: @@ -471,7 +486,9 @@ def _do_json_example(self, doc, argument_model, stack): if argument_model.type_name == 'list': doc.write('[') if argument_model.member.type_name in SCALAR_TYPES: - doc.write('%s, ...' % self._json_example_value_name(argument_model.member)) + doc.write( + f'{self._json_example_value_name(argument_model.member)}, ...' + ) else: doc.style.indent() doc.style.new_line() @@ -485,7 +502,7 @@ def _do_json_example(self, doc, argument_model, stack): doc.write('{') doc.style.indent() key_string = self._json_example_value_name(argument_model.key) - doc.write('%s: ' % key_string) + doc.write(f'{key_string}: ') if argument_model.value.type_name in SCALAR_TYPES: doc.write(self._json_example_value_name(argument_model.value)) else: @@ -514,16 +531,17 @@ def _doc_input_structure_members(self, doc, argument_model, stack): member_model = members[member_name] member_type_name = member_model.type_name if member_type_name in SCALAR_TYPES: - doc.write('"%s": %s' % (member_name, - self._json_example_value_name(member_model))) + doc.write( + f'"{member_name}": {self._json_example_value_name(member_model)}' + ) elif member_type_name == 'structure': - doc.write('"%s": ' % member_name) + doc.write(f'"{member_name}": ') self._json_example(doc, member_model, stack) elif member_type_name == 'map': - doc.write('"%s": ' % member_name) + doc.write(f'"{member_name}": ') self._json_example(doc, member_model, stack) elif member_type_name == 'list': - doc.write('"%s": ' % member_name) + doc.write(f'"{member_name}": ') self._json_example(doc, member_model, stack) if i < len(members) - 1: doc.write(',') @@ -533,8 +551,9 @@ def _doc_input_structure_members(self, doc, argument_model, stack): doc.write('}') def doc_option_example(self, arg_name, help_command, event_name, **kwargs): - service_id, operation_name = \ - find_service_and_method_in_event_name(event_name) + service_id, operation_name = find_service_and_method_in_event_name( + event_name + ) doc = help_command.doc cli_argument = help_command.arg_table[arg_name] if cli_argument.group_name in self._arg_groups: @@ -546,7 +565,8 @@ def doc_option_example(self, arg_name, help_command, event_name, **kwargs): docgen = ParamShorthandDocGen() if docgen.supports_shorthand(cli_argument.argument_model): example_shorthand_syntax = docgen.generate_shorthand_example( - cli_argument, service_id, operation_name) + cli_argument, service_id, operation_name + ) if example_shorthand_syntax is None: # If the shorthand syntax returns a value of None, # this indicates to us that there is no example @@ -560,8 +580,11 @@ def doc_option_example(self, arg_name, help_command, event_name, **kwargs): for example_line in example_shorthand_syntax.splitlines(): doc.writeln(example_line) doc.style.end_codeblock() - if argument_model is not None and argument_model.type_name == 'list' and \ - argument_model.member.type_name in SCALAR_TYPES: + if ( + argument_model is not None + and argument_model.type_name == 'list' + and argument_model.member.type_name in SCALAR_TYPES + ): # A list of scalars is special. While you *can* use # JSON ( ["foo", "bar", "baz"] ), you can also just # use the argparse behavior of space separated lists. @@ -572,8 +595,9 @@ def doc_option_example(self, arg_name, help_command, event_name, **kwargs): doc.write('Syntax') doc.style.start_codeblock() example_type = self._json_example_value_name( - member, include_enum_values=False) - doc.write('%s %s ...' % (example_type, example_type)) + member, include_enum_values=False + ) + doc.write(f'{example_type} {example_type} ...') if isinstance(member, StringShape) and member.enum: # If we have enum values, we can tell the user # exactly what valid values they can provide. @@ -592,7 +616,7 @@ def _write_valid_enums(self, doc, enum_values): doc.style.new_paragraph() doc.write("Where valid values are:\n") for value in enum_values: - doc.write(" %s\n" % value) + doc.write(f" {value}\n") doc.write("\n") def doc_output(self, help_command, event_name, **kwargs): @@ -614,7 +638,8 @@ class TopicListerDocumentEventHandler(CLIDocumentEventHandler): 'the list of topics from the command line, run ``aws help topics``. ' 'To access a specific topic from the command line, run ' '``aws help [topicname]``, where ``topicname`` is the name of the ' - 'topic as it appears in the output from ``aws help topics``.') + 'topic as it appears in the output from ``aws help topics``.' + ) def __init__(self, help_command): self.help_command = help_command @@ -633,8 +658,8 @@ def doc_title(self, help_command, **kwargs): doc = help_command.doc doc.style.new_paragraph() doc.style.link_target_definition( - refname='cli:aws help %s' % self.help_command.name, - link='') + refname=f'cli:aws help {self.help_command.name}', link='' + ) doc.style.h1('AWS CLI Topic Guide') def doc_description(self, help_command, **kwargs): @@ -674,13 +699,13 @@ def doc_subitems_start(self, help_command, **kwargs): # each category. for topic_name in sorted(categories[category_name]): description = self._topic_tag_db.get_tag_single_value( - topic_name, 'description') + topic_name, 'description' + ) doc.write('* ') doc.style.sphinx_reference_label( - label='cli:aws help %s' % topic_name, - text=topic_name + label=f'cli:aws help {topic_name}', text=topic_name ) - doc.write(': %s\n' % description) + doc.write(f': {description}\n') # Add a hidden toctree to make sure everything is connected in # the document. doc.style.hidden_toctree() @@ -689,7 +714,6 @@ def doc_subitems_start(self, help_command, **kwargs): class TopicDocumentEventHandler(TopicListerDocumentEventHandler): - def doc_breadcrumbs(self, help_command, **kwargs): doc = help_command.doc if doc.target != 'man': @@ -697,8 +721,7 @@ def doc_breadcrumbs(self, help_command, **kwargs): doc.style.sphinx_reference_label(label='cli:aws', text='aws') doc.write(' . ') doc.style.sphinx_reference_label( - label='cli:aws help topics', - text='topics' + label='cli:aws help topics', text='topics' ) doc.write(' ]') @@ -706,22 +729,24 @@ def doc_title(self, help_command, **kwargs): doc = help_command.doc doc.style.new_paragraph() doc.style.link_target_definition( - refname='cli:aws help %s' % self.help_command.name, - link='') + refname=f'cli:aws help {self.help_command.name}', link='' + ) title = self._topic_tag_db.get_tag_single_value( - help_command.name, 'title') + help_command.name, 'title' + ) doc.style.h1(title) def doc_description(self, help_command, **kwargs): doc = help_command.doc - topic_filename = os.path.join(self._topic_tag_db.topic_dir, - help_command.name + '.rst') + topic_filename = os.path.join( + self._topic_tag_db.topic_dir, help_command.name + '.rst' + ) contents = self._remove_tags_from_content(topic_filename) doc.writeln(contents) doc.style.new_paragraph() def _remove_tags_from_content(self, filename): - with open(filename, 'r') as f: + with open(filename) as f: lines = f.readlines() content_begin_index = 0 @@ -759,7 +784,8 @@ def doc_global_options(self): for arg in help_command.arg_table: argument = help_command.arg_table.get(arg) help_command.doc.writeln( - f"``{argument.cli_name}`` ({argument.cli_type_name})") + f"``{argument.cli_name}`` ({argument.cli_type_name})" + ) help_command.doc.style.indent() help_command.doc.style.new_paragraph() help_command.doc.include_doc_string(argument.documentation) diff --git a/awscli/clidriver.py b/awscli/clidriver.py index 5e6460182ef0..30938e9c524a 100644 --- a/awscli/clidriver.py +++ b/awscli/clidriver.py @@ -10,69 +10,81 @@ # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF # ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. +import copy import json +import logging import os import platform -import sys -import copy -import logging import re +import sys import distro -import botocore.session -from botocore import xform_name -from botocore.compat import copy_kwargs, OrderedDict -from botocore.history import get_global_history_recorder -from botocore.configprovider import InstanceVarProvider -from botocore.configprovider import EnvironmentProvider -from botocore.configprovider import ScopedConfigProvider -from botocore.configprovider import ConstantProvider -from botocore.configprovider import ChainProvider +import botocore.session from awscli import __version__ +from awscli.alias import AliasCommandInjector, AliasLoader +from awscli.argparser import ( + ArgTableArgParser, + FirstPassGlobalArgParser, + MainArgParser, + ServiceArgParser, + SubCommandArgParser, +) +from awscli.argprocess import unpack_argument +from awscli.arguments import ( + BooleanArgument, + CLIArgument, + CustomArgument, + ListArgument, + UnknownArgumentError, +) +from awscli.autoprompt.core import AutoPromptDriver +from awscli.commands import CLICommand from awscli.compat import ( - default_pager, get_stderr_text_writer, get_stdout_text_writer + default_pager, + get_stderr_text_writer, + get_stdout_text_writer, +) +from awscli.constants import PARAM_VALIDATION_ERROR_RC +from awscli.errorhandler import ( + construct_cli_error_handlers_chain, + construct_entry_point_handlers_chain, ) from awscli.formatter import get_formatter -from awscli.plugin import load_plugins -from awscli.commands import CLICommand -from awscli.argparser import MainArgParser -from awscli.argparser import FirstPassGlobalArgParser -from awscli.argparser import ServiceArgParser -from awscli.argparser import ArgTableArgParser -from awscli.argparser import SubCommandArgParser -from awscli.help import ProviderHelpCommand -from awscli.help import ServiceHelpCommand -from awscli.help import OperationHelpCommand -from awscli.arguments import CustomArgument -from awscli.arguments import ListArgument -from awscli.arguments import BooleanArgument -from awscli.arguments import CLIArgument -from awscli.arguments import UnknownArgumentError -from awscli.argprocess import unpack_argument -from awscli.alias import AliasLoader -from awscli.alias import AliasCommandInjector +from awscli.help import ( + OperationHelpCommand, + ProviderHelpCommand, + ServiceHelpCommand, +) from awscli.logger import ( - set_stream_logger, remove_stream_logger, enable_crt_logging, disable_crt_logging, + enable_crt_logging, + remove_stream_logger, + set_stream_logger, ) +from awscli.plugin import load_plugins from awscli.utils import ( + IMDSRegionProvider, + OutputStreamFactory, + add_command_lineage_to_user_agent_extra, add_metadata_component_to_user_agent_extra, - add_command_lineage_to_user_agent_extra + emit_top_level_args_parsed_event, ) -from awscli.utils import emit_top_level_args_parsed_event -from awscli.utils import OutputStreamFactory -from awscli.utils import IMDSRegionProvider -from awscli.constants import PARAM_VALIDATION_ERROR_RC -from awscli.autoprompt.core import AutoPromptDriver -from awscli.errorhandler import ( - construct_cli_error_handlers_chain, construct_entry_point_handlers_chain +from botocore import xform_name +from botocore.compat import OrderedDict, copy_kwargs +from botocore.configprovider import ( + ChainProvider, + ConstantProvider, + EnvironmentProvider, + InstanceVarProvider, + ScopedConfigProvider, ) - +from botocore.history import get_global_history_recorder LOG = logging.getLogger('awscli.clidriver') LOG_FORMAT = ( - '%(asctime)s - %(threadName)s - %(name)s - %(levelname)s - %(message)s') + '%(asctime)s - %(threadName)s - %(name)s - %(levelname)s - %(message)s' +) HISTORY_RECORDER = get_global_history_recorder() METADATA_FILENAME = 'metadata.json' # Don't remove this line. The idna encoding @@ -84,7 +96,7 @@ # the encodings.idna is imported and registered in the codecs registry, # which will stop the LookupErrors from happening. # See: https://bugs.python.org/issue29288 -u''.encode('idna') +''.encode('idna') def main(): @@ -99,19 +111,21 @@ def create_clidriver(args=None): debug = args.debug session = botocore.session.Session() _set_user_agent_for_session(session) - load_plugins(session.full_config.get('plugins', {}), - event_hooks=session.get_component('event_emitter')) + load_plugins( + session.full_config.get('plugins', {}), + event_hooks=session.get_component('event_emitter'), + ) error_handlers_chain = construct_cli_error_handlers_chain() - driver = CLIDriver(session=session, - error_handler=error_handlers_chain, - debug=debug) + driver = CLIDriver( + session=session, error_handler=error_handlers_chain, debug=debug + ) return driver def _get_distribution_source(): metadata_file = os.path.join( os.path.join(os.path.dirname(os.path.abspath(__file__)), 'data'), - METADATA_FILENAME + METADATA_FILENAME, ) metadata = {} if os.path.isfile(metadata_file): @@ -133,7 +147,7 @@ def _get_linux_distribution(): linux_distribution = distro.id() version = distro.major_version() if version: - linux_distribution += '.%s' % version + linux_distribution += f'.{version}' except Exception: pass return linux_distribution @@ -141,9 +155,7 @@ def _get_linux_distribution(): def _add_distribution_source_to_user_agent(session): add_metadata_component_to_user_agent_extra( - session, - 'installer', - _get_distribution_source() + session, 'installer', _get_distribution_source() ) @@ -151,7 +163,7 @@ def _add_linux_distribution_to_user_agent(session): if linux_distribution := _get_distribution(): add_metadata_component_to_user_agent_extra( session, - 'distrib', + 'distrib', linux_distribution, ) @@ -166,9 +178,7 @@ def _set_user_agent_for_session(session): def no_pager_handler(session, parsed_args, **kwargs): if parsed_args.no_cli_pager: config_store = session.get_component('config_store') - config_store.set_config_provider( - 'pager', ConstantProvider(value=None) - ) + config_store.set_config_provider('pager', ConstantProvider(value=None)) class AWSCLIEntryPoint: @@ -184,7 +194,7 @@ def main(self, args): return self._error_handler.handle_exception( e, stdout=get_stdout_text_writer(), - stderr=get_stderr_text_writer() + stderr=get_stderr_text_writer(), ) HISTORY_RECORDER.record('CLI_RC', rc, 'CLI') @@ -222,10 +232,8 @@ def _do_main(self, args): return rc -class CLIDriver(object): - - def __init__(self, session=None, error_handler=None, - debug=False): +class CLIDriver: + def __init__(self, session=None, error_handler=None, debug=False): if session is None: self.session = botocore.session.get_session() _set_user_agent_for_session(self.session) @@ -245,32 +253,24 @@ def __init__(self, session=None, error_handler=None, def _update_config_chain(self): config_store = self.session.get_component('config_store') config_store.set_config_provider( - 'region', - self._construct_cli_region_chain() + 'region', self._construct_cli_region_chain() ) config_store.set_config_provider( - 'output', - self._construct_cli_output_chain() + 'output', self._construct_cli_output_chain() ) config_store.set_config_provider( - 'pager', - self._construct_cli_pager_chain() + 'pager', self._construct_cli_pager_chain() ) config_store.set_config_provider( - 'cli_binary_format', - self._construct_cli_binary_format_chain() + 'cli_binary_format', self._construct_cli_binary_format_chain() ) config_store.set_config_provider( - 'cli_auto_prompt', - self._construct_cli_auto_prompt_chain() + 'cli_auto_prompt', self._construct_cli_auto_prompt_chain() ) def _construct_cli_region_chain(self): providers = [ - InstanceVarProvider( - instance_var='region', - session=self.session - ), + InstanceVarProvider(instance_var='region', session=self.session), EnvironmentProvider( name='AWS_REGION', env=os.environ, @@ -344,8 +344,7 @@ def _construct_cli_auto_prompt_chain(self): env=os.environ, ), ScopedConfigProvider( - config_var_name='cli_auto_prompt', - session=self.session + config_var_name='cli_auto_prompt', session=self.session ), ConstantProvider(value='off'), ] @@ -390,24 +389,27 @@ def _build_command_table(self): """ command_table = self._build_builtin_commands(self.session) - self.session.emit('building-command-table.main', - command_table=command_table, - session=self.session, - command_object=self) + self.session.emit( + 'building-command-table.main', + command_table=command_table, + session=self.session, + command_object=self, + ) return command_table def _build_builtin_commands(self, session): commands = OrderedDict() services = session.get_available_services() for service_name in services: - commands[service_name] = ServiceCommand(cli_name=service_name, - session=self.session, - service_name=service_name) + commands[service_name] = ServiceCommand( + cli_name=service_name, + session=self.session, + service_name=service_name, + ) return commands def _add_aliases(self, command_table, parser): - injector = AliasCommandInjector( - self.session, self.alias_loader) + injector = AliasCommandInjector(self.session, self.alias_loader) injector.inject_aliases(command_table, parser) def _build_argument_table(self): @@ -420,29 +422,36 @@ def _build_argument_table(self): cli_argument.add_to_arg_table(argument_table) # Then the final step is to send out an event so handlers # can add extra arguments or modify existing arguments. - self.session.emit('building-top-level-params', - session=self.session, - argument_table=argument_table, - driver=self) + self.session.emit( + 'building-top-level-params', + session=self.session, + argument_table=argument_table, + driver=self, + ) return argument_table def _create_cli_argument(self, option_name, option_params): return CustomArgument( - option_name, help_text=option_params.get('help', ''), + option_name, + help_text=option_params.get('help', ''), dest=option_params.get('dest'), default=option_params.get('default'), action=option_params.get('action'), required=option_params.get('required'), choices=option_params.get('choices'), - cli_type_name=option_params.get('type')) + cli_type_name=option_params.get('type'), + ) def create_help_command(self): cli_data = self._get_cli_data() - return ProviderHelpCommand(self.session, self._get_command_table(), - self._get_argument_table(), - cli_data.get('description', None), - cli_data.get('synopsis', None), - cli_data.get('help_usage', None)) + return ProviderHelpCommand( + self.session, + self._get_command_table(), + self._get_argument_table(), + cli_data.get('description', None), + cli_data.get('synopsis', None), + cli_data.get('help_usage', None), + ) def _cli_version(self): version_string = ( @@ -452,8 +461,10 @@ def _cli_version(self): ) if 'AWS_EXECUTION_ENV' in os.environ: - version_string += f' exec-env/{os.environ.get("AWS_EXECUTION_ENV")}' - + version_string += ( + f' exec-env/{os.environ.get("AWS_EXECUTION_ENV")}' + ) + version_string += f' {_get_distribution_source()}/{platform.machine()}' if linux_distribution := _get_distribution(): @@ -466,10 +477,12 @@ def create_parser(self, command_table): command_table['help'] = self.create_help_command() cli_data = self._get_cli_data() parser = MainArgParser( - command_table, self._cli_version(), + command_table, + self._cli_version(), cli_data.get('description', None), self._get_argument_table(), - prog="aws") + prog="aws", + ) return parser def main(self, args=None): @@ -493,8 +506,7 @@ def main(self, args=None): parsed_args, remaining = parser.parse_known_args(args) self._handle_top_level_args(parsed_args) self._emit_session_event(parsed_args) - HISTORY_RECORDER.record( - 'CLI_VERSION', self._cli_version(), 'CLI') + HISTORY_RECORDER.record('CLI_VERSION', self._cli_version(), 'CLI') HISTORY_RECORDER.record('CLI_ARGUMENTS', args, 'CLI') return command_table[parsed_args.command](remaining, parsed_args) except BaseException as e: @@ -506,7 +518,7 @@ def main(self, args=None): return self._error_handler.handle_exception( e, stdout=get_stdout_text_writer(), - stderr=get_stderr_text_writer() + stderr=get_stderr_text_writer(), ) def _emit_session_event(self, parsed_args): @@ -516,8 +528,10 @@ def _emit_session_event(self, parsed_args): # session components to be reset (such as session.profile = foo) # then all the prior registered components would be removed. self.session.emit( - 'session-initialized', session=self.session, - parsed_args=parsed_args) + 'session-initialized', + session=self.session, + parsed_args=parsed_args, + ) def _show_error(self, msg): LOG.debug(msg, exc_info=True) @@ -536,8 +550,9 @@ def _set_logging(self, debug): loggers_list = ['botocore', 'awscli', 's3transfer', 'urllib3'] if debug: for logger_name in loggers_list: - set_stream_logger(logger_name, logging.DEBUG, - format_string=LOG_FORMAT) + set_stream_logger( + logger_name, logging.DEBUG, format_string=LOG_FORMAT + ) enable_crt_logging() LOG.debug("CLI version: %s", self._cli_version()) LOG.debug("Arguments entered to CLI: %s", sys.argv[1:]) @@ -548,12 +563,10 @@ def _set_logging(self, debug): for logger_name in loggers_list: remove_stream_logger(logger_name) disable_crt_logging() - set_stream_logger(logger_name='awscli', - log_level=logging.ERROR) + set_stream_logger(logger_name='awscli', log_level=logging.ERROR) class ServiceCommand(CLICommand): - """A service command for the CLI. For example, ``aws ec2 ...`` we'd create a ServiceCommand @@ -621,7 +634,8 @@ def _get_command_table(self): def _get_service_model(self): if self._service_model is None: self._service_model = self.session.get_service_model( - self._service_name) + self._service_name + ) return self._service_model def __call__(self, args, parsed_globals): @@ -646,10 +660,12 @@ def _create_command_table(self): operation_model=operation_model, operation_caller=CLIOperationCaller(self.session), ) - self.session.emit('building-command-table.%s' % self._name, - command_table=command_table, - session=self.session, - command_object=self) + self.session.emit( + f'building-command-table.{self._name}', + command_table=command_table, + session=self.session, + command_object=self, + ) self._add_lineage(command_table) return command_table @@ -660,23 +676,25 @@ def _add_lineage(self, command_table): def create_help_command(self): command_table = self._get_command_table() - return ServiceHelpCommand(session=self.session, - obj=self._get_service_model(), - command_table=command_table, - arg_table=None, - event_class='.'.join(self.lineage_names), - name=self._name) + return ServiceHelpCommand( + session=self.session, + obj=self._get_service_model(), + command_table=command_table, + arg_table=None, + event_class='.'.join(self.lineage_names), + name=self._name, + ) def create_parser(self): command_table = self._get_command_table() # Also add a 'help' command. command_table['help'] = self.create_help_command() return ServiceArgParser( - operations_table=command_table, service_name=self._name) - + operations_table=command_table, service_name=self._name + ) -class ServiceOperation(object): +class ServiceOperation: """A single operation of a service. This class represents a single operation for a service, for @@ -690,8 +708,9 @@ class ServiceOperation(object): } DEFAULT_ARG_CLASS = CLIArgument - def __init__(self, name, parent_name, operation_caller, - operation_model, session): + def __init__( + self, name, parent_name, operation_caller, operation_model, session + ): """ :type name: str @@ -750,7 +769,7 @@ def _build_subcommand_table(self): subcommand_table = OrderedDict() full_name = '_'.join([c.name for c in self.lineage]) self._session.emit( - 'building-command-table.%s' % full_name, + f'building-command-table.{full_name}', command_table=subcommand_table, session=self._session, command_object=self, @@ -780,18 +799,23 @@ def _parse_potential_subcommand(self, args, subcommand_table): def __call__(self, args, parsed_globals): # Once we know we're trying to call a particular operation # of a service we can go ahead and load the parameters. - event = 'before-building-argument-table-parser.%s.%s' % \ - (self._parent_name, self._name) - self._emit(event, argument_table=self.arg_table, args=args, - session=self._session) + event = f'before-building-argument-table-parser.{self._parent_name}.{self._name}' + self._emit( + event, + argument_table=self.arg_table, + args=args, + session=self._session, + ) subcommand_table = self.subcommand_table maybe_parsed_subcommand = self._parse_potential_subcommand( - args, subcommand_table) + args, subcommand_table + ) if maybe_parsed_subcommand is not None: new_args, subcommand_name = maybe_parsed_subcommand return subcommand_table[subcommand_name](new_args, parsed_globals) operation_parser = self._create_operation_parser( - self.arg_table, subcommand_table) + self.arg_table, subcommand_table + ) self._add_help(operation_parser) parsed_args, remaining = operation_parser.parse_known_args(args) if parsed_args.help == 'help': @@ -801,20 +825,21 @@ def __call__(self, args, parsed_globals): remaining.append(parsed_args.help) if remaining: raise UnknownArgumentError( - "Unknown options: %s" % ', '.join(remaining)) - event = 'operation-args-parsed.%s.%s' % (self._parent_name, - self._name) - self._emit(event, parsed_args=parsed_args, - parsed_globals=parsed_globals) + "Unknown options: {}".format(', '.join(remaining)) + ) + event = f'operation-args-parsed.{self._parent_name}.{self._name}' + self._emit( + event, parsed_args=parsed_args, parsed_globals=parsed_globals + ) call_parameters = self._build_call_parameters( - parsed_args, self.arg_table) - event = 'calling-command.%s.%s' % (self._parent_name, - self._name) + parsed_args, self.arg_table + ) + event = f'calling-command.{self._parent_name}.{self._name}' override = self._emit_first_non_none_response( event, call_parameters=call_parameters, parsed_args=parsed_args, - parsed_globals=parsed_globals + parsed_globals=parsed_globals, ) # There are two possible values for override. It can be some type # of exception that will be raised if detected or it can represent @@ -837,14 +862,18 @@ def __call__(self, args, parsed_globals): return self._operation_caller.invoke( self._operation_model.service_model.service_name, self._operation_model.name, - call_parameters, parsed_globals) + call_parameters, + parsed_globals, + ) def create_help_command(self): return OperationHelpCommand( self._session, operation_model=self._operation_model, arg_table=self.arg_table, - name=self._name, event_class='.'.join(self.lineage_names)) + name=self._name, + event_class='.'.join(self.lineage_names), + ) def _add_help(self, parser): # The 'help' output is processed a little differently from @@ -874,8 +903,9 @@ def _unpack_arg(self, cli_argument, value): service_name = self._operation_model.service_model.endpoint_prefix operation_name = xform_name(self._name, '-') - return unpack_argument(session, service_name, operation_name, - cli_argument, value) + return unpack_argument( + session, service_name, operation_name, cli_argument, value + ) def _create_argument_table(self): argument_table = OrderedDict() @@ -887,8 +917,9 @@ def _create_argument_table(self): arg_dict = input_shape.members for arg_name, arg_shape in arg_dict.items(): cli_arg_name = xform_name(arg_name, '-') - arg_class = self.ARG_TYPES.get(arg_shape.type_name, - self.DEFAULT_ARG_CLASS) + arg_class = self.ARG_TYPES.get( + arg_shape.type_name, self.DEFAULT_ARG_CLASS + ) is_token = arg_shape.metadata.get('idempotencyToken', False) is_required = arg_name in required_arguments and not is_token event_emitter = self._session.get_component('event_emitter') @@ -898,34 +929,36 @@ def _create_argument_table(self): is_required=is_required, operation_model=self._operation_model, serialized_name=arg_name, - event_emitter=event_emitter) + event_emitter=event_emitter, + ) arg_object.add_to_arg_table(argument_table) LOG.debug(argument_table) - self._emit('building-argument-table.%s.%s' % (self._parent_name, - self._name), - operation_model=self._operation_model, - session=self._session, - command=self, - argument_table=argument_table) + self._emit( + f'building-argument-table.{self._parent_name}.{self._name}', + operation_model=self._operation_model, + session=self._session, + command=self, + argument_table=argument_table, + ) return argument_table def _emit(self, name, **kwargs): return self._session.emit(name, **kwargs) def _emit_first_non_none_response(self, name, **kwargs): - return self._session.emit_first_non_none_response( - name, **kwargs) + return self._session.emit_first_non_none_response(name, **kwargs) def _create_operation_parser(self, arg_table, subcommand_table): parser = ArgTableArgParser(arg_table, subcommand_table) return parser def _add_customization_to_user_agent(self): - add_command_lineage_to_user_agent_extra(self._session, self.lineage_names) - + add_command_lineage_to_user_agent_extra( + self._session, self.lineage_names + ) -class CLIOperationCaller(object): +class CLIOperationCaller: """Call an AWS operation and format the response.""" def __init__(self, session): @@ -957,27 +990,31 @@ def invoke(self, service_name, operation_name, parameters, parsed_globals): """ client = self._session.create_client( - service_name, region_name=parsed_globals.region, + service_name, + region_name=parsed_globals.region, endpoint_url=parsed_globals.endpoint_url, - verify=parsed_globals.verify_ssl) + verify=parsed_globals.verify_ssl, + ) response = self._make_client_call( - client, operation_name, parameters, parsed_globals) + client, operation_name, parameters, parsed_globals + ) self._display_response(operation_name, response, parsed_globals) return 0 - def _make_client_call(self, client, operation_name, parameters, - parsed_globals): + def _make_client_call( + self, client, operation_name, parameters, parsed_globals + ): py_operation_name = xform_name(operation_name) if client.can_paginate(py_operation_name) and parsed_globals.paginate: paginator = client.get_paginator(py_operation_name) response = paginator.paginate(**parameters) else: response = getattr(client, xform_name(operation_name))( - **parameters) + **parameters + ) return response - def _display_response(self, command_name, response, - parsed_globals): + def _display_response(self, command_name, response, parsed_globals): output = parsed_globals.output if output is None: output = self._session.get_config_variable('output') diff --git a/awscli/commands.py b/awscli/commands.py index c0c9b4477ed2..09951fe3dc4b 100644 --- a/awscli/commands.py +++ b/awscli/commands.py @@ -12,8 +12,7 @@ # language governing permissions and limitations under the License. -class CLICommand(object): - +class CLICommand: """Interface for a CLI command. This class represents a top level CLI command diff --git a/awscli/compat.py b/awscli/compat.py index b6ae89818c90..887ed1cce7bc 100644 --- a/awscli/compat.py +++ b/awscli/compat.py @@ -6,31 +6,31 @@ # http://aws.amazon.com/apache2.0/ +import collections.abc as collections_abc +import contextlib +import io +import locale +import os +import os.path +import platform +import queue +import re +import shlex +import signal + # or in the "license" file accompanying this file. This file is # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF # ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. import sys -import re -import shlex -import os -import os.path -import platform +import urllib.parse as urlparse import zipfile -import signal -import contextlib -import collections.abc as collections_abc -import locale -import queue -import io -from urllib.request import urlopen from configparser import RawConfigParser from functools import partial -import urllib.parse as urlparse from urllib.error import URLError +from urllib.request import urlopen -from botocore.compat import six -from botocore.compat import OrderedDict +from botocore.compat import OrderedDict, six # Backwards compatible definitions from six PY3 = sys.version_info[0] == 3 @@ -46,6 +46,7 @@ # package the files in a zip container. try: import zlib + ZIP_COMPRESSION_MODE = zipfile.ZIP_DEFLATED except ImportError: ZIP_COMPRESSION_MODE = zipfile.ZIP_STORED @@ -74,14 +75,12 @@ class StdinMissingError(Exception): def __init__(self): - message = ( - 'stdin is required for this operation, but is not available.' - ) - super(StdinMissingError, self).__init__(message) + message = 'stdin is required for this operation, but is not available.' + super().__init__(message) -class NonTranslatedStdout(object): - """ This context manager sets the line-end translation mode for stdout. +class NonTranslatedStdout: + """This context manager sets the line-end translation mode for stdout. It is deliberately set to binary mode so that `\r` does not get added to the line ending. This can be useful when printing commands where a @@ -91,13 +90,16 @@ class NonTranslatedStdout(object): def __enter__(self): if sys.platform == "win32": import msvcrt - self.previous_mode = msvcrt.setmode(sys.stdout.fileno(), - os.O_BINARY) + + self.previous_mode = msvcrt.setmode( + sys.stdout.fileno(), os.O_BINARY + ) return sys.stdout def __exit__(self, type, value, traceback): if sys.platform == "win32": import msvcrt + msvcrt.setmode(sys.stdout.fileno(), self.previous_mode) @@ -106,7 +108,7 @@ def ensure_text_type(s): return s if isinstance(s, bytes): return s.decode('utf-8') - raise ValueError("Expected str, unicode or bytes, received %s." % type(s)) + raise ValueError(f"Expected str, unicode or bytes, received {type(s)}.") def get_binary_stdin(): @@ -280,7 +282,7 @@ def _windows_shell_quote(s): if ' ' in new_s or '\t' in new_s: # If there are any spaces or tabs then the string needs to be double # quoted. - return '"%s"' % new_s + return f'"{new_s}"' return new_s @@ -330,12 +332,12 @@ def ignore_user_entered_signals(): from platform import linux_distribution except ImportError: _UNIXCONFDIR = '/etc' - def _dist_try_harder(distname, version, id): - """ Tries some special tricks to get the distribution - information in case the default method fails. - Currently supports older SuSE Linux, Caldera OpenLinux and - Slackware Linux distributions. + def _dist_try_harder(distname, version, id): + """Tries some special tricks to get the distribution + information in case the default method fails. + Currently supports older SuSE Linux, Caldera OpenLinux and + Slackware Linux distributions. """ if os.path.exists('/var/adm/inst-log/info'): # SuSE Linux stores distribution information in that file @@ -367,7 +369,7 @@ def _dist_try_harder(distname, version, id): if os.path.isdir('/usr/lib/setup'): # Check for slackware version tag file (thanks to Greg Andruk) verfiles = os.listdir('/usr/lib/setup') - for n in range(len(verfiles)-1, -1, -1): + for n in range(len(verfiles) - 1, -1, -1): if verfiles[n][:14] != 'slack-version-': del verfiles[n] if verfiles: @@ -379,14 +381,13 @@ def _dist_try_harder(distname, version, id): return distname, version, id _release_filename = re.compile(r'(\w+)[-_](release|version)', re.ASCII) - _lsb_release_version = re.compile(r'(.+)' - r' release ' - r'([\d.]+)' - r'[^(]*(?:\((.+)\))?', re.ASCII) - _release_version = re.compile(r'([^0-9]+)' - r'(?: release )?' - r'([\d.]+)' - r'[^(]*(?:\((.+)\))?', re.ASCII) + _lsb_release_version = re.compile( + r'(.+)' r' release ' r'([\d.]+)' r'[^(]*(?:\((.+)\))?', re.ASCII + ) + _release_version = re.compile( + r'([^0-9]+)' r'(?: release )?' r'([\d.]+)' r'[^(]*(?:\((.+)\))?', + re.ASCII, + ) # See also http://www.novell.com/coolsolutions/feature/11251.html # and http://linuxmafia.com/faq/Admin/release-files.html @@ -394,12 +395,24 @@ def _dist_try_harder(distname, version, id): # and http://www.die.net/doc/linux/man/man1/lsb_release.1.html _supported_dists = ( - 'SuSE', 'debian', 'fedora', 'redhat', 'centos', - 'mandrake', 'mandriva', 'rocks', 'slackware', 'yellowdog', 'gentoo', - 'UnitedLinux', 'turbolinux', 'arch', 'mageia') + 'SuSE', + 'debian', + 'fedora', + 'redhat', + 'centos', + 'mandrake', + 'mandriva', + 'rocks', + 'slackware', + 'yellowdog', + 'gentoo', + 'UnitedLinux', + 'turbolinux', + 'arch', + 'mageia', + ) def _parse_release_file(firstline): - # Default to empty 'version' and 'id' strings. Both defaults are used # when 'firstline' is empty. 'id' defaults to empty when an id can not # be deduced. @@ -429,34 +442,39 @@ def _parse_release_file(firstline): _release_file_re = re.compile(r"(?:DISTRIB_RELEASE\s*=)\s*(.*)", re.I) _codename_file_re = re.compile(r"(?:DISTRIB_CODENAME\s*=)\s*(.*)", re.I) - def linux_distribution(distname='', version='', id='', - supported_dists=_supported_dists, - full_distribution_name=1): - return _linux_distribution(distname, version, id, supported_dists, - full_distribution_name) - - def _linux_distribution(distname, version, id, supported_dists, - full_distribution_name): - - """ Tries to determine the name of the Linux OS distribution name. - The function first looks for a distribution release file in - /etc and then reverts to _dist_try_harder() in case no - suitable files are found. - supported_dists may be given to define the set of Linux - distributions to look for. It defaults to a list of currently - supported Linux distributions identified by their release file - name. - If full_distribution_name is true (default), the full - distribution read from the OS is returned. Otherwise the short - name taken from supported_dists is used. - Returns a tuple (distname, version, id) which default to the - args given as parameters. + def linux_distribution( + distname='', + version='', + id='', + supported_dists=_supported_dists, + full_distribution_name=1, + ): + return _linux_distribution( + distname, version, id, supported_dists, full_distribution_name + ) + + def _linux_distribution( + distname, version, id, supported_dists, full_distribution_name + ): + """Tries to determine the name of the Linux OS distribution name. + The function first looks for a distribution release file in + /etc and then reverts to _dist_try_harder() in case no + suitable files are found. + supported_dists may be given to define the set of Linux + distributions to look for. It defaults to a list of currently + supported Linux distributions identified by their release file + name. + If full_distribution_name is true (default), the full + distribution read from the OS is returned. Otherwise the short + name taken from supported_dists is used. + Returns a tuple (distname, version, id) which default to the + args given as parameters. """ # check for the Debian/Ubuntu /etc/lsb-release file first, needed so # that the distribution doesn't get identified as Debian. # https://bugs.python.org/issue9514 try: - with open("/etc/lsb-release", "r") as etclsbrel: + with open("/etc/lsb-release") as etclsbrel: for line in etclsbrel: m = _distributor_id_file_re.search(line) if m: @@ -469,8 +487,8 @@ def _linux_distribution(distname, version, id, supported_dists, _u_id = m.group(1).strip() if _u_distname and _u_version: return (_u_distname, _u_version, _u_id) - except (EnvironmentError, UnboundLocalError): - pass + except (OSError, UnboundLocalError): + pass try: etc = os.listdir(_UNIXCONFDIR) @@ -489,8 +507,11 @@ def _linux_distribution(distname, version, id, supported_dists, return _dist_try_harder(distname, version, id) # Read the first line - with open(os.path.join(_UNIXCONFDIR, file), 'r', - encoding='utf-8', errors='surrogateescape') as f: + with open( + os.path.join(_UNIXCONFDIR, file), + encoding='utf-8', + errors='surrogateescape', + ) as f: firstline = f.readline() _distname, _version, _id = _parse_release_file(firstline) diff --git a/awscli/errorhandler.py b/awscli/errorhandler.py index 198f908ad23e..7ac60221191a 100644 --- a/awscli/errorhandler.py +++ b/awscli/errorhandler.py @@ -13,24 +13,25 @@ import logging import signal -from botocore.exceptions import ( - NoRegionError, NoCredentialsError, ClientError, - ParamValidationError as BotocoreParamValidationError, -) - +from awscli.argparser import USAGE, ArgParseException from awscli.argprocess import ParamError, ParamSyntaxError from awscli.arguments import UnknownArgumentError -from awscli.argparser import ArgParseException, USAGE +from awscli.autoprompt.factory import PrompterKeyboardInterrupt from awscli.constants import ( - PARAM_VALIDATION_ERROR_RC, CONFIGURATION_ERROR_RC, CLIENT_ERROR_RC, - GENERAL_ERROR_RC + CLIENT_ERROR_RC, + CONFIGURATION_ERROR_RC, + GENERAL_ERROR_RC, + PARAM_VALIDATION_ERROR_RC, ) -from awscli.utils import PagerInitializationException -from awscli.autoprompt.factory import PrompterKeyboardInterrupt from awscli.customizations.exceptions import ( - ParamValidationError, ConfigurationError + ConfigurationError, + ParamValidationError, +) +from awscli.utils import PagerInitializationException +from botocore.exceptions import ClientError, NoCredentialsError, NoRegionError +from botocore.exceptions import ( + ParamValidationError as BotocoreParamValidationError, ) - LOG = logging.getLogger(__name__) @@ -40,7 +41,7 @@ def construct_entry_point_handlers_chain(): ParamValidationErrorsHandler(), PrompterInterruptExceptionHandler(), InterruptExceptionHandler(), - GeneralExceptionHandler() + GeneralExceptionHandler(), ] return ChainedExceptionHandler(exception_handlers=handlers) @@ -55,7 +56,7 @@ def construct_cli_error_handlers_chain(): PagerErrorHandler(), InterruptExceptionHandler(), ClientErrorHandler(), - GeneralExceptionHandler() + GeneralExceptionHandler(), ] return ChainedExceptionHandler(exception_handlers=handlers) @@ -84,8 +85,11 @@ def _do_handle_exception(self, exception, stdout, stderr): class ParamValidationErrorsHandler(FilteredExceptionHandler): EXCEPTIONS_TO_HANDLE = ( - ParamError, ParamSyntaxError, ArgParseException, - ParamValidationError, BotocoreParamValidationError + ParamError, + ParamSyntaxError, + ArgParseException, + ParamValidationError, + BotocoreParamValidationError, ) RC = PARAM_VALIDATION_ERROR_RC @@ -108,7 +112,9 @@ class ConfigurationErrorHandler(FilteredExceptionHandler): class NoRegionErrorHandler(FilteredExceptionHandler): EXCEPTIONS_TO_HANDLE = NoRegionError RC = CONFIGURATION_ERROR_RC - MESSAGE = '%s You can also configure your region by running "aws configure".' + MESSAGE = ( + '%s You can also configure your region by running "aws configure".' + ) class NoCredentialsErrorHandler(FilteredExceptionHandler): diff --git a/awscli/formatter.py b/awscli/formatter.py index 2fbd9a20cbec..d0d191f210b4 100644 --- a/awscli/formatter.py +++ b/awscli/formatter.py @@ -13,16 +13,14 @@ import logging from datetime import datetime -from botocore.compat import json -from botocore.utils import set_value_from_jmespath -from botocore.paginate import PageIterator from ruamel.yaml import YAML -from awscli.table import MultiTable, Styler, ColorizedStyler -from awscli import text -from awscli import compat +from awscli import compat, text +from awscli.table import ColorizedStyler, MultiTable, Styler from awscli.utils import json_encoder - +from botocore.compat import json +from botocore.paginate import PageIterator +from botocore.utils import set_value_from_jmespath LOG = logging.getLogger(__name__) @@ -31,7 +29,7 @@ def is_response_paginated(response): return isinstance(response, PageIterator) -class Formatter(object): +class Formatter: def __init__(self, args): self._args = args @@ -60,7 +58,7 @@ def _get_default_stream(self): def _flush_stream(self, stream): try: stream.flush() - except IOError: + except OSError: pass @@ -78,10 +76,11 @@ def __call__(self, command_name, response, stream=None): else: response_data = response response_data = self._get_transformed_response_for_output( - response_data) + response_data + ) try: self._format_response(command_name, response_data, stream) - except IOError as e: + except OSError: # If the reading end of our stdout stream has closed the file # we can just exit. pass @@ -92,19 +91,23 @@ def __call__(self, command_name, response, stream=None): class JSONFormatter(FullyBufferedFormatter): - def _format_response(self, command_name, response, stream): # For operations that have no response body (e.g. s3 put-object) # the response will be an empty string. We don't want to print # that out to the user but other "falsey" values like an empty # dictionary should be printed. if response != {}: - json.dump(response, stream, indent=4, default=json_encoder, - ensure_ascii=False) + json.dump( + response, + stream, + indent=4, + default=json_encoder, + ensure_ascii=False, + ) stream.write('\n') -class YAMLDumper(object): +class YAMLDumper: def __init__(self): self._yaml = YAML(typ='safe') # Encoding is set to None because we handle the encoding by @@ -136,7 +139,7 @@ def _is_json_scalar(self, value): class YAMLFormatter(FullyBufferedFormatter): def __init__(self, args, yaml_dumper=None): - super(YAMLFormatter, self).__init__(args) + super().__init__(args) self._yaml_dumper = yaml_dumper if yaml_dumper is None: self._yaml_dumper = YAMLDumper() @@ -149,7 +152,7 @@ def _format_response(self, command_name, response, stream): class StreamedYAMLFormatter(Formatter): def __init__(self, args, yaml_dumper=None): - super(StreamedYAMLFormatter, self).__init__(args) + super().__init__(args) self._yaml_dumper = yaml_dumper if yaml_dumper is None: self._yaml_dumper = YAMLDumper() @@ -165,7 +168,7 @@ def __call__(self, command_name, response, stream=None): # response. We go with the latter so we can reuse our YAML # dumper self._yaml_dumper.dump([response], stream) - except IOError: + except OSError: # If the reading end of our stdout stream has closed the file # we can just exit. return @@ -178,7 +181,8 @@ def __call__(self, command_name, response, stream=None): def _get_response_stream(self, response): if is_response_paginated(response): return compat.imap( - self._get_transformed_response_for_output, response) + self._get_transformed_response_for_output, response + ) else: output = self._get_transformed_response_for_output(response) if output == {}: @@ -196,27 +200,31 @@ class TableFormatter(FullyBufferedFormatter): using the output definition from the model. """ + def __init__(self, args, table=None): - super(TableFormatter, self).__init__(args) + super().__init__(args) if args.color == 'auto': - self.table = MultiTable(initial_section=False, - column_separator='|') + self.table = MultiTable( + initial_section=False, column_separator='|' + ) elif args.color == 'off': styler = Styler() - self.table = MultiTable(initial_section=False, - column_separator='|', styler=styler) + self.table = MultiTable( + initial_section=False, column_separator='|', styler=styler + ) elif args.color == 'on': styler = ColorizedStyler() - self.table = MultiTable(initial_section=False, - column_separator='|', styler=styler) + self.table = MultiTable( + initial_section=False, column_separator='|', styler=styler + ) else: - raise ValueError("Unknown color option: %s" % args.color) + raise ValueError(f"Unknown color option: {args.color}") def _format_response(self, command_name, response, stream): if self._build_table(command_name, response): try: self.table.render(stream) - except IOError: + except OSError: # If they're piping stdout to another process which exits before # we're done writing all of our output, we'll get an error about a # closed pipe which we can safely ignore. @@ -257,8 +265,9 @@ def _build_sub_table_from_dict(self, current, indent_level): self.table.add_row_header(headers) self.table.add_row([current[k] for k in headers]) for remaining in more: - self._build_table(remaining, current[remaining], - indent_level=indent_level + 1) + self._build_table( + remaining, current[remaining], indent_level=indent_level + 1 + ) def _build_sub_table_from_list(self, current, indent_level, title): headers, more = self._group_scalar_keys_from_list(current) @@ -266,8 +275,7 @@ def _build_sub_table_from_list(self, current, indent_level, title): first = True for element in current: if not first and more: - self.table.new_section(title, - indent_level=indent_level) + self.table.new_section(title, indent_level=indent_level) self.table.add_row_header(headers) first = False # Use .get() to account for the fact that sometimes an element @@ -278,8 +286,11 @@ def _build_sub_table_from_list(self, current, indent_level, title): # be in every single element of the list, so we need to # check this condition before recursing. if remaining in element: - self._build_table(remaining, element[remaining], - indent_level=indent_level + 1) + self._build_table( + remaining, + element[remaining], + indent_level=indent_level + 1, + ) def _scalar_type(self, element): return not isinstance(element, (list, dict)) @@ -315,7 +326,6 @@ def _group_scalar_keys(self, current): class TextFormatter(Formatter): - def __call__(self, command_name, response, stream=None): if stream is None: stream = self._get_default_stream() @@ -331,9 +341,7 @@ def __call__(self, command_name, response, stream=None): for result_key in result_keys: data = result_key.search(page) set_value_from_jmespath( - current, - result_key.expression, - data + current, result_key.expression, data ) self._format_response(current, stream) if response.resume_token: @@ -341,7 +349,8 @@ def __call__(self, command_name, response, stream=None): # if they want. self._format_response( {'NextToken': {'NextToken': response.resume_token}}, - stream) + stream, + ) else: self._remove_request_id(response) self._format_response(response, stream) @@ -368,6 +377,6 @@ def _format_response(self, response, stream): def get_formatter(format_type, args): if format_type not in CLI_OUTPUT_FORMATS: - raise ValueError("Unknown output type: %s" % format_type) + raise ValueError(f"Unknown output type: {format_type}") format_type_cls = CLI_OUTPUT_FORMATS[format_type] return format_type_cls(args) diff --git a/awscli/handlers.py b/awscli/handlers.py index 4cbaa6ea6472..ef3abe07fc3a 100644 --- a/awscli/handlers.py +++ b/awscli/handlers.py @@ -16,93 +16,121 @@ registered with the event system. """ + +from awscli.alias import register_alias_commands from awscli.argprocess import ParamShorthandParser -from awscli.customizations.ec2instanceconnect import register_ec2_instance_connect_commands -from awscli.paramfile import register_uri_param_handler from awscli.clidriver import no_pager_handler from awscli.customizations import datapipeline from awscli.customizations.addexamples import add_examples from awscli.customizations.argrename import register_arg_renames from awscli.customizations.assumerole import register_assume_role_provider from awscli.customizations.awslambda import register_lambda_create_function +from awscli.customizations.binaryformat import add_binary_formatter from awscli.customizations.cliinput import register_cli_input_args -from awscli.customizations.cloudformation import initialize as cloudformation_init +from awscli.customizations.cloudformation import ( + initialize as cloudformation_init, +) from awscli.customizations.cloudfront import register as register_cloudfront from awscli.customizations.cloudsearch import initialize as cloudsearch_init from awscli.customizations.cloudsearchdomain import register_cloudsearchdomain from awscli.customizations.cloudtrail import initialize as cloudtrail_init from awscli.customizations.codeartifact import register_codeartifact_commands from awscli.customizations.codecommit import initialize as codecommit_init -from awscli.customizations.codedeploy.codedeploy import initialize as \ - codedeploy_init +from awscli.customizations.codedeploy.codedeploy import ( + initialize as codedeploy_init, +) from awscli.customizations.configservice.getstatus import register_get_status -from awscli.customizations.configservice.putconfigurationrecorder import \ - register_modify_put_configuration_recorder -from awscli.customizations.configservice.rename_cmd import \ - register_rename_config +from awscli.customizations.configservice.putconfigurationrecorder import ( + register_modify_put_configuration_recorder, +) +from awscli.customizations.configservice.rename_cmd import ( + register_rename_config, +) from awscli.customizations.configservice.subscribe import register_subscribe from awscli.customizations.configure.configure import register_configure_cmd +from awscli.customizations.devcommands import register_dev_commands +from awscli.customizations.dlm.dlm import dlm_initialize +from awscli.customizations.dsql import register_dsql_customizations from awscli.customizations.dynamodb.ddb import register_ddb -from awscli.customizations.dynamodb.paginatorfix import \ - register_dynamodb_paginator_fix -from awscli.customizations.history import register_history_mode -from awscli.customizations.history import register_history_commands +from awscli.customizations.dynamodb.paginatorfix import ( + register_dynamodb_paginator_fix, +) from awscli.customizations.ec2.addcount import register_count_events from awscli.customizations.ec2.bundleinstance import register_bundleinstance from awscli.customizations.ec2.decryptpassword import ec2_add_priv_launch_key +from awscli.customizations.ec2.paginate import register_ec2_page_size_injector from awscli.customizations.ec2.protocolarg import register_protocol_args from awscli.customizations.ec2.runinstances import register_runinstances from awscli.customizations.ec2.secgroupsimplify import register_secgroup -from awscli.customizations.ec2.paginate import register_ec2_page_size_injector +from awscli.customizations.ec2instanceconnect import ( + register_ec2_instance_connect_commands, +) from awscli.customizations.ecr import register_ecr_commands from awscli.customizations.ecr_public import register_ecr_public_commands -from awscli.customizations.emr.emr import emr_initialize -from awscli.customizations.emrcontainers import \ - initialize as emrcontainers_initialize -from awscli.customizations.eks import initialize as eks_initialize from awscli.customizations.ecs import initialize as ecs_initialize +from awscli.customizations.eks import initialize as eks_initialize +from awscli.customizations.emr.emr import emr_initialize +from awscli.customizations.emrcontainers import ( + initialize as emrcontainers_initialize, +) from awscli.customizations.gamelift import register_gamelift_commands -from awscli.customizations.generatecliskeleton import \ - register_generate_cli_skeleton +from awscli.customizations.generatecliskeleton import ( + register_generate_cli_skeleton, +) from awscli.customizations.globalargs import register_parse_global_args +from awscli.customizations.history import ( + register_history_commands, + register_history_mode, +) from awscli.customizations.iamvirtmfa import IAMVMFAWrapper -from awscli.customizations.iot import register_create_keys_and_cert_arguments -from awscli.customizations.iot import register_create_keys_from_csr_arguments +from awscli.customizations.iot import ( + register_create_keys_and_cert_arguments, + register_create_keys_from_csr_arguments, +) from awscli.customizations.iot_data import register_custom_endpoint_note +from awscli.customizations.kinesis import ( + register_kinesis_list_streams_pagination_backcompat, +) from awscli.customizations.kms import register_fix_kms_create_grant_docs -from awscli.customizations.dlm.dlm import dlm_initialize +from awscli.customizations.lightsail import initialize as lightsail_initialize +from awscli.customizations.logs import register_logs_commands from awscli.customizations.opsworks import initialize as opsworks_init +from awscli.customizations.opsworkscm import register_alias_opsworks_cm from awscli.customizations.paginate import register_pagination from awscli.customizations.putmetricdata import register_put_metric_data -from awscli.customizations.rds import register_rds_modify_split -from awscli.customizations.rds import register_add_generate_db_auth_token -from awscli.customizations.dsql import register_dsql_customizations -from awscli.customizations.rekognition import register_rekognition_detect_labels +from awscli.customizations.quicksight import ( + register_quicksight_asset_bundle_customizations, +) +from awscli.customizations.rds import ( + register_add_generate_db_auth_token, + register_rds_modify_split, +) +from awscli.customizations.rekognition import ( + register_rekognition_detect_labels, +) from awscli.customizations.removals import register_removals from awscli.customizations.route53 import register_create_hosted_zone_doc_fix from awscli.customizations.s3.s3 import s3_plugin_initialize from awscli.customizations.s3errormsg import register_s3_error_msg -from awscli.customizations.timestampformat import register_timestamp_format +from awscli.customizations.s3events import ( + register_document_expires_string, + register_event_stream_arg, +) +from awscli.customizations.servicecatalog import ( + register_servicecatalog_commands, +) from awscli.customizations.sessendemail import register_ses_send_email +from awscli.customizations.sessionmanager import register_ssm_session from awscli.customizations.sso import register_sso_commands from awscli.customizations.streamingoutputarg import add_streaming_output_arg -from awscli.customizations.translate import register_translate_import_terminology +from awscli.customizations.timestampformat import register_timestamp_format from awscli.customizations.toplevelbool import register_bool_params +from awscli.customizations.translate import ( + register_translate_import_terminology, +) from awscli.customizations.waiters import register_add_waiters -from awscli.customizations.opsworkscm import register_alias_opsworks_cm -from awscli.customizations.servicecatalog import register_servicecatalog_commands -from awscli.customizations.s3events import register_event_stream_arg, register_document_expires_string -from awscli.customizations.sessionmanager import register_ssm_session -from awscli.customizations.logs import register_logs_commands -from awscli.customizations.devcommands import register_dev_commands from awscli.customizations.wizard.commands import register_wizard_commands -from awscli.customizations.binaryformat import add_binary_formatter -from awscli.customizations.lightsail import initialize as lightsail_initialize -from awscli.alias import register_alias_commands -from awscli.customizations.kinesis import \ - register_kinesis_list_streams_pagination_backcompat -from awscli.customizations.quicksight import \ - register_quicksight_asset_bundle_customizations +from awscli.paramfile import register_uri_param_handler def awscli_initialize(event_handlers): @@ -114,23 +142,25 @@ def awscli_initialize(event_handlers): # The s3 error mesage needs to registered before the # generic error handler. register_s3_error_msg(event_handlers) -# # The following will get fired for every option we are -# # documenting. It will attempt to add an example_fn on to -# # the parameter object if the parameter supports shorthand -# # syntax. The documentation event handlers will then use -# # the examplefn to generate the sample shorthand syntax -# # in the docs. Registering here should ensure that this -# # handler gets called first but it still feels a bit brittle. -# event_handlers.register('doc-option-example.*.*.*', -# param_shorthand.add_example_fn) - event_handlers.register('doc-examples.*.*', - add_examples) + # # The following will get fired for every option we are + # # documenting. It will attempt to add an example_fn on to + # # the parameter object if the parameter supports shorthand + # # syntax. The documentation event handlers will then use + # # the examplefn to generate the sample shorthand syntax + # # in the docs. Registering here should ensure that this + # # handler gets called first but it still feels a bit brittle. + # event_handlers.register('doc-option-example.*.*.*', + # param_shorthand.add_example_fn) + event_handlers.register('doc-examples.*.*', add_examples) register_cli_input_args(event_handlers) - event_handlers.register('building-argument-table.*', - add_streaming_output_arg) + event_handlers.register( + 'building-argument-table.*', add_streaming_output_arg + ) register_count_events(event_handlers) - event_handlers.register('building-argument-table.ec2.get-password-data', - ec2_add_priv_launch_key) + event_handlers.register( + 'building-argument-table.ec2.get-password-data', + ec2_add_priv_launch_key, + ) register_parse_global_args(event_handlers) register_pagination(event_handlers) register_secgroup(event_handlers) @@ -179,10 +209,12 @@ def awscli_initialize(event_handlers): register_custom_endpoint_note(event_handlers) event_handlers.register( 'building-argument-table.iot.create-keys-and-certificate', - register_create_keys_and_cert_arguments) + register_create_keys_and_cert_arguments, + ) event_handlers.register( 'building-argument-table.iot.create-certificate-from-csr', - register_create_keys_from_csr_arguments) + register_create_keys_from_csr_arguments, + ) register_cloudfront(event_handlers) register_gamelift_commands(event_handlers) register_ec2_page_size_injector(event_handlers) diff --git a/awscli/help.py b/awscli/help.py index 1ce8571f3aec..777b3f7dbcd0 100644 --- a/awscli/help.py +++ b/awscli/help.py @@ -12,35 +12,37 @@ # language governing permissions and limitations under the License. import logging import os -import sys import platform import shlex -from subprocess import Popen, PIPE +import sys +from subprocess import PIPE, Popen from docutils.core import publish_string from docutils.writers import manpage -from awscli.clidocs import ProviderDocumentEventHandler -from awscli.clidocs import ServiceDocumentEventHandler -from awscli.clidocs import OperationDocumentEventHandler -from awscli.clidocs import TopicListerDocumentEventHandler -from awscli.clidocs import TopicDocumentEventHandler +from awscli.argparser import ArgTableArgParser +from awscli.argprocess import ParamShorthandParser from awscli.bcdoc import docevents from awscli.bcdoc.restdoc import ReSTDocument from awscli.bcdoc.textwriter import TextWriter -from awscli.argprocess import ParamShorthandParser -from awscli.argparser import ArgTableArgParser +from awscli.clidocs import ( + OperationDocumentEventHandler, + ProviderDocumentEventHandler, + ServiceDocumentEventHandler, + TopicDocumentEventHandler, + TopicListerDocumentEventHandler, +) from awscli.topictags import TopicTagDB from awscli.utils import ignore_ctrl_c - LOG = logging.getLogger('awscli.help') class ExecutableNotFoundError(Exception): def __init__(self, executable_name): - super(ExecutableNotFoundError, self).__init__( - 'Could not find executable named "%s"' % executable_name) + super().__init__( + f'Could not find executable named "{executable_name}"' + ) def get_renderer(): @@ -54,7 +56,7 @@ def get_renderer(): return PosixHelpRenderer() -class PagingHelpRenderer(object): +class PagingHelpRenderer: """ Interface for a help renderer. @@ -62,6 +64,7 @@ class PagingHelpRenderer(object): a particular platform. """ + def __init__(self, output_stream=sys.stdout): self.output_stream = output_stream @@ -118,7 +121,8 @@ def _convert_doc_content(self, contents): settings_overrides = self._DEFAULT_DOCUTILS_SETTINGS_OVERRIDES.copy() settings_overrides["report_level"] = 3 man_contents = publish_string( - contents, writer=manpage.Writer(), + contents, + writer=manpage.Writer(), settings_overrides=self._DEFAULT_DOCUTILS_SETTINGS_OVERRIDES, ) if self._exists_on_path('groff'): @@ -135,8 +139,9 @@ def _convert_doc_content(self, contents): def _send_output_to_pager(self, output): cmdline = self.get_pager_cmdline() if not self._exists_on_path(cmdline[0]): - LOG.debug("Pager '%s' not found in PATH, printing raw help." % - cmdline[0]) + LOG.debug( + f"Pager '{cmdline[0]}' not found in PATH, printing raw help." + ) self.output_stream.write(output.decode('utf-8') + "\n") self.output_stream.flush() return @@ -159,8 +164,12 @@ def _send_output_to_pager(self, output): def _exists_on_path(self, name): # Since we're only dealing with POSIX systems, we can # ignore things like PATHEXT. - return any([os.path.exists(os.path.join(p, name)) - for p in os.environ.get('PATH', '').split(os.pathsep)]) + return any( + [ + os.path.exists(os.path.join(p, name)) + for p in os.environ.get('PATH', '').split(os.pathsep) + ] + ) class WindowsHelpRenderer(PagingHelpRenderer): @@ -170,7 +179,8 @@ class WindowsHelpRenderer(PagingHelpRenderer): def _convert_doc_content(self, contents): text_output = publish_string( - contents, writer=TextWriter(), + contents, + writer=TextWriter(), settings_overrides=self._DEFAULT_DOCUTILS_SETTINGS_OVERRIDES, ) return text_output @@ -182,7 +192,7 @@ def _popen(self, *args, **kwargs): return Popen(*args, **kwargs) -class HelpCommand(object): +class HelpCommand: """ HelpCommand Interface --------------------- @@ -280,8 +290,9 @@ def __call__(self, args, parsed_globals): subcommand_parser = ArgTableArgParser({}, self.subcommand_table) parsed, remaining = subcommand_parser.parse_known_args(args) if getattr(parsed, 'subcommand', None) is not None: - return self.subcommand_table[parsed.subcommand](remaining, - parsed_globals) + return self.subcommand_table[parsed.subcommand]( + remaining, parsed_globals + ) # Create an event handler for a Provider Document instance = self.EventHandlerClass(self) @@ -299,12 +310,13 @@ class ProviderHelpCommand(HelpCommand): This is what is called when ``aws help`` is run. """ + EventHandlerClass = ProviderDocumentEventHandler - def __init__(self, session, command_table, arg_table, - description, synopsis, usage): - HelpCommand.__init__(self, session, None, - command_table, arg_table) + def __init__( + self, session, command_table, arg_table, description, synopsis, usage + ): + HelpCommand.__init__(self, session, None, command_table, arg_table) self.description = description self.synopsis = synopsis self.help_usage = usage @@ -353,10 +365,12 @@ class ServiceHelpCommand(HelpCommand): EventHandlerClass = ServiceDocumentEventHandler - def __init__(self, session, obj, command_table, arg_table, name, - event_class): - super(ServiceHelpCommand, self).__init__(session, obj, command_table, - arg_table) + def __init__( + self, session, obj, command_table, arg_table, name, event_class + ): + super().__init__( + session, obj, command_table, arg_table + ) self._name = name self._event_class = event_class @@ -376,10 +390,10 @@ class OperationHelpCommand(HelpCommand): e.g. ``aws ec2 describe-instances help``. """ + EventHandlerClass = OperationDocumentEventHandler - def __init__(self, session, operation_model, arg_table, name, - event_class): + def __init__(self, session, operation_model, arg_table, name, event_class): HelpCommand.__init__(self, session, operation_model, None, arg_table) self.param_shorthand = ParamShorthandParser() self._name = name @@ -398,7 +412,7 @@ class TopicListerCommand(HelpCommand): EventHandlerClass = TopicListerDocumentEventHandler def __init__(self, session): - super(TopicListerCommand, self).__init__(session, None, {}, {}) + super().__init__(session, None, {}, {}) @property def event_class(self): @@ -413,7 +427,7 @@ class TopicHelpCommand(HelpCommand): EventHandlerClass = TopicDocumentEventHandler def __init__(self, session, topic_name): - super(TopicHelpCommand, self).__init__(session, None, {}, {}) + super().__init__(session, None, {}, {}) self._topic_name = topic_name @property diff --git a/awscli/logger.py b/awscli/logger.py index 38b16fe7bbe3..e0f818b0d009 100644 --- a/awscli/logger.py +++ b/awscli/logger.py @@ -17,8 +17,7 @@ LOG_FORMAT = '%(asctime)s - %(name)s - %(levelname)s - %(message)s' -def set_stream_logger(logger_name, log_level, stream=None, - format_string=None): +def set_stream_logger(logger_name, log_level, stream=None, format_string=None): """ Convenience method to configure a stream logger. diff --git a/awscli/paramfile.py b/awscli/paramfile.py index da5307f214f6..92d9a0a809f3 100644 --- a/awscli/paramfile.py +++ b/awscli/paramfile.py @@ -10,12 +10,12 @@ # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF # ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. +import copy import logging import os -import copy -from awscli.compat import compat_open from awscli import argprocess +from awscli.compat import compat_open logger = logging.getLogger(__name__) @@ -30,7 +30,7 @@ def register_uri_param_handler(session, **kwargs): session.register('load-cli-arg', handler) -class URIArgumentHandler(object): +class URIArgumentHandler: def __init__(self, prefixes): self._prefixes = prefixes @@ -77,21 +77,23 @@ def get_paramfile(path, cases): def get_file(prefix, path, mode): - file_path = os.path.expandvars(os.path.expanduser(path[len(prefix):])) + file_path = os.path.expandvars(os.path.expanduser(path[len(prefix) :])) try: with compat_open(file_path, mode) as f: return f.read() except UnicodeDecodeError: raise ResourceLoadingError( - 'Unable to load paramfile (%s), text contents could ' + f'Unable to load paramfile ({file_path}), text contents could ' 'not be decoded. If this is a binary file, please use the ' - 'fileb:// prefix instead of the file:// prefix.' % file_path) - except (OSError, IOError) as e: - raise ResourceLoadingError('Unable to load paramfile %s: %s' % ( - path, e)) + 'fileb:// prefix instead of the file:// prefix.' + ) + except OSError as e: + raise ResourceLoadingError( + f'Unable to load paramfile {path}: {e}' + ) LOCAL_PREFIX_MAP = { 'file://': (get_file, {'mode': 'r'}), 'fileb://': (get_file, {'mode': 'rb'}), -} \ No newline at end of file +} diff --git a/awscli/plugin.py b/awscli/plugin.py index 1c2331ae1cbe..46a26a4fc1a7 100644 --- a/awscli/plugin.py +++ b/awscli/plugin.py @@ -10,9 +10,9 @@ # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF # ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. -import sys -import os import logging +import os +import sys from botocore.hooks import HierarchicalEmitter @@ -80,6 +80,9 @@ def _import_plugins(plugin_mapping): def _add_plugin_path_to_sys_path(plugin_path): for dirname in plugin_path.split(os.pathsep): - log.debug("Adding additional path from cli_legacy_plugin_path " - "configuration: %s", dirname) + log.debug( + "Adding additional path from cli_legacy_plugin_path " + "configuration: %s", + dirname, + ) sys.path.append(dirname) diff --git a/awscli/schema.py b/awscli/schema.py index 17ec6ba416cd..faa676fbe116 100644 --- a/awscli/schema.py +++ b/awscli/schema.py @@ -17,7 +17,7 @@ class ParameterRequiredError(ValueError): pass -class SchemaTransformer(object): +class SchemaTransformer: """ Transforms a custom argument parameter schema into an internal model representation so that it can be treated like a normal @@ -63,6 +63,7 @@ class SchemaTransformer(object): $ aws foo bar --baz arg1=Value1,arg2=5 arg1=Value2 """ + JSON_SCHEMA_TO_AWS_TYPES = { 'object': 'structure', 'array': 'list', @@ -116,7 +117,8 @@ def _transform_structure(self, schema, shapes): for key, value in schema['properties'].items(): current_type_name = self._json_schema_to_aws_type(value) current_shape_name = self._shape_namer.new_shape_name( - current_type_name) + current_type_name + ) members[key] = {'shape': current_shape_name} if value.get('required', False): required_members.append(key) @@ -161,11 +163,11 @@ def _json_schema_to_aws_type(self, schema): return self.JSON_SCHEMA_TO_AWS_TYPES.get(type_name, type_name) -class ShapeNameGenerator(object): +class ShapeNameGenerator: def __init__(self): self._name_cache = defaultdict(int) def new_shape_name(self, type_name): self._name_cache[type_name] += 1 current_index = self._name_cache[type_name] - return '%sType%s' % (type_name.capitalize(), current_index) + return f'{type_name.capitalize()}Type{current_index}' diff --git a/awscli/shorthand.py b/awscli/shorthand.py index 7443dba4a141..77f3a1d6e705 100644 --- a/awscli/shorthand.py +++ b/awscli/shorthand.py @@ -38,6 +38,7 @@ ``BackCompatVisitor`` class. """ + import re import string @@ -47,7 +48,7 @@ _EOF = object() -class _NamedRegex(object): +class _NamedRegex: def __init__(self, name, regex_str): self.name = name self.regex = re.compile(regex_str, re.UNICODE) @@ -57,28 +58,27 @@ def match(self, value): class ShorthandParseError(Exception): - def _error_location(self): consumed, remaining, num_spaces = self.value, '', self.index - if '\n' in self.value[:self.index]: + if '\n' in self.value[: self.index]: # If there's newlines in the consumed expression, we want # to make sure we're only counting the spaces # from the last newline: # foo=bar,\n # bar==baz # ^ - last_newline = self.value[:self.index].rindex('\n') + last_newline = self.value[: self.index].rindex('\n') num_spaces = self.index - last_newline - 1 - if '\n' in self.value[self.index:]: + if '\n' in self.value[self.index :]: # If there's newline in the remaining, divide value # into consumed and remainig # foo==bar,\n # ^ # bar=baz - next_newline = self.index + self.value[self.index:].index('\n') + next_newline = self.index + self.value[self.index :].index('\n') consumed = self.value[:next_newline] remaining = self.value[next_newline:] - return '%s\n%s%s' % (consumed, (' ' * num_spaces) + '^', remaining) + return '{}\n{}{}'.format(consumed, (' ' * num_spaces) + '^', remaining) class ShorthandParseSyntaxError(ShorthandParseError): @@ -88,13 +88,10 @@ def __init__(self, value, expected, actual, index): self.actual = actual self.index = index msg = self._construct_msg() - super(ShorthandParseSyntaxError, self).__init__(msg) + super().__init__(msg) def _construct_msg(self): - msg = ( - "Expected: '%s', received: '%s' for input:\n" - "%s" - ) % (self.expected, self.actual, self._error_location()) + msg = (f"Expected: '{self.expected}', received: '{self.actual}' for input:\n" f"{self._error_location()}") return msg @@ -104,14 +101,14 @@ def __init__(self, key, value, index): self.value = value self.index = index msg = self._construct_msg() - super(DuplicateKeyInObjectError, self).__init__(msg) + super().__init__(msg) def _construct_msg(self): msg = ( - "Second instance of key \"%s\" encountered for input:\n%s\n" + f"Second instance of key \"{self.key}\" encountered for input:\n{self._error_location()}\n" "This is often because there is a preceding \",\" instead of a " "space." - ) % (self.key, self._error_location()) + ) return msg @@ -119,7 +116,7 @@ class DocumentTypesNotSupportedError(Exception): pass -class ShorthandParser(object): +class ShorthandParser: """Parses shorthand syntax in the CLI. Note that this parser does not rely on any JSON models to control @@ -135,20 +132,14 @@ class ShorthandParser(object): _ESCAPED_COMMA = '(\\\\,)' _FIRST_VALUE = _NamedRegex( 'first', - u'({escaped_comma}|[{start_word}])' - u'({escaped_comma}|[{follow_chars}])*'.format( - escaped_comma=_ESCAPED_COMMA, - start_word=_START_WORD, - follow_chars=_FIRST_FOLLOW_CHARS, - )) + f'({_ESCAPED_COMMA}|[{_START_WORD}])' + f'({_ESCAPED_COMMA}|[{_FIRST_FOLLOW_CHARS}])*', + ) _SECOND_VALUE = _NamedRegex( 'second', - u'({escaped_comma}|[{start_word}])' - u'({escaped_comma}|[{follow_chars}])*'.format( - escaped_comma=_ESCAPED_COMMA, - start_word=_START_WORD, - follow_chars=_SECOND_FOLLOW_CHARS, - )) + f'({_ESCAPED_COMMA}|[{_START_WORD}])' + f'({_ESCAPED_COMMA}|[{_SECOND_FOLLOW_CHARS}])*', + ) def __init__(self): self._tokens = [] @@ -213,7 +204,7 @@ def _key(self): if self._current() not in valid_chars: break self._index += 1 - return self._input_value[start:self._index] + return self._input_value[start : self._index] def _values(self): # values = csv-list / explicit-list / hash-literal @@ -275,11 +266,15 @@ def _csv_value(self): return csv_list def _value(self): - result = self._FIRST_VALUE.match(self._input_value[self._index:]) + result = self._FIRST_VALUE.match(self._input_value[self._index :]) if result is not None: consumed = self._consume_matched_regex(result) processed = consumed.replace('\\,', ',').rstrip() - return self._resolve_paramfiles(processed) if self._should_resolve_paramfiles else processed + return ( + self._resolve_paramfiles(processed) + if self._should_resolve_paramfiles + else processed + ) return '' def _explicit_list(self): @@ -339,18 +334,26 @@ def _single_quoted_value(self): # val-escaped-single = %x20-26 / %x28-7F / escaped-escape / # (escape single-quote) processed = self._consume_quoted(self._SINGLE_QUOTED, escaped_char="'") - return self._resolve_paramfiles(processed) if self._should_resolve_paramfiles else processed + return ( + self._resolve_paramfiles(processed) + if self._should_resolve_paramfiles + else processed + ) def _consume_quoted(self, regex, escaped_char=None): value = self._must_consume_regex(regex)[1:-1] if escaped_char is not None: - value = value.replace("\\%s" % escaped_char, escaped_char) + value = value.replace(f"\\{escaped_char}", escaped_char) value = value.replace("\\\\", "\\") return value def _double_quoted_value(self): processed = self._consume_quoted(self._DOUBLE_QUOTED, escaped_char='"') - return self._resolve_paramfiles(processed) if self._should_resolve_paramfiles else processed + return ( + self._resolve_paramfiles(processed) + if self._should_resolve_paramfiles + else processed + ) def _second_value(self): if self._current() == "'": @@ -360,7 +363,11 @@ def _second_value(self): else: consumed = self._must_consume_regex(self._SECOND_VALUE) processed = consumed.replace('\\,', ',').rstrip() - return self._resolve_paramfiles(processed) if self._should_resolve_paramfiles else processed + return ( + self._resolve_paramfiles(processed) + if self._should_resolve_paramfiles + else processed + ) def _resolve_paramfiles(self, val): if (paramfile := get_paramfile(val, LOCAL_PREFIX_MAP)) is not None: @@ -371,27 +378,30 @@ def _expect(self, char, consume_whitespace=False): if consume_whitespace: self._consume_whitespace() if self._index >= len(self._input_value): - raise ShorthandParseSyntaxError(self._input_value, char, - 'EOF', self._index) + raise ShorthandParseSyntaxError( + self._input_value, char, 'EOF', self._index + ) actual = self._input_value[self._index] if actual != char: - raise ShorthandParseSyntaxError(self._input_value, char, - actual, self._index) + raise ShorthandParseSyntaxError( + self._input_value, char, actual, self._index + ) self._index += 1 if consume_whitespace: self._consume_whitespace() def _must_consume_regex(self, regex): - result = regex.match(self._input_value[self._index:]) + result = regex.match(self._input_value[self._index :]) if result is not None: return self._consume_matched_regex(result) - raise ShorthandParseSyntaxError(self._input_value, '<%s>' % regex.name, - '', self._index) + raise ShorthandParseSyntaxError( + self._input_value, f'<{regex.name}>', '', self._index + ) def _consume_matched_regex(self, result): start, end = result.span() - v = self._input_value[self._index+start:self._index+end] - self._index += (end - start) + v = self._input_value[self._index + start : self._index + end] + self._index += end - start return v def _current(self): @@ -413,21 +423,23 @@ def _consume_whitespace(self): self._index += 1 -class ModelVisitor(object): +class ModelVisitor: def visit(self, params, model): self._visit({}, model, '', params) def _visit(self, parent, shape, name, value): - method = getattr(self, '_visit_%s' % shape.type_name, - self._visit_scalar) + method = getattr( + self, f'_visit_{shape.type_name}', self._visit_scalar + ) method(parent, shape, name, value) def _visit_structure(self, parent, shape, name, value): if not isinstance(value, dict): return for member_name, member_shape in shape.members.items(): - self._visit(value, member_shape, member_name, - value.get(member_name)) + self._visit( + value, member_shape, member_name, value.get(member_name) + ) def _visit_list(self, parent, shape, name, value): if not isinstance(value, list): @@ -453,8 +465,9 @@ def _visit_structure(self, parent, shape, name, value): return for member_name, member_shape in shape.members.items(): try: - self._visit(value, member_shape, member_name, - value.get(member_name)) + self._visit( + value, member_shape, member_name, value.get(member_name) + ) except DocumentTypesNotSupportedError: # Catch and propagate the document type error to a better # error message as when the original error is thrown there is @@ -463,7 +476,7 @@ def _visit_structure(self, parent, shape, name, value): raise ShorthandParseError( 'Shorthand syntax does not support document types. Use ' 'JSON input for top-level argument to specify nested ' - 'parameter: %s' % member_name + f'parameter: {member_name}' ) def _visit_list(self, parent, shape, name, value): @@ -473,8 +486,9 @@ def _visit_list(self, parent, shape, name, value): if value is not None: parent[name] = [value] else: - return super(BackCompatVisitor, self)._visit_list( - parent, shape, name, value) + return super()._visit_list( + parent, shape, name, value + ) def _visit_scalar(self, parent, shape, name, value): if value is None: diff --git a/awscli/table.py b/awscli/table.py index 8ebfc454d0ed..a60c9b0a8544 100644 --- a/awscli/table.py +++ b/awscli/table.py @@ -6,19 +6,19 @@ # http://aws.amazon.com/apache2.0/ +import struct + # or in the "license" file accompanying this file. This file is # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF # ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. import sys -import struct import unicodedata import colorama from awscli.utils import is_a_tty - # `autoreset` allows us to not have to sent reset sequences for every # string. `strip` lets us preserve color when redirecting. COLORAMA_KWARGS = { @@ -35,28 +35,32 @@ def get_text_length(text): # * F(Fullwidth) # * W(Wide) text = str(text) - return sum(2 if unicodedata.east_asian_width(char) in 'WFA' else 1 - for char in text) + return sum( + 2 if unicodedata.east_asian_width(char) in 'WFA' else 1 + for char in text + ) def determine_terminal_width(default_width=80): # If we can't detect the terminal width, the default_width is returned. try: - from termios import TIOCGWINSZ from fcntl import ioctl + from termios import TIOCGWINSZ except ImportError: return default_width try: - height, width = struct.unpack('hhhh', ioctl(sys.stdout, - TIOCGWINSZ, '\000' * 8))[0:2] + height, width = struct.unpack( + 'hhhh', ioctl(sys.stdout, TIOCGWINSZ, '\000' * 8) + )[0:2] except Exception: return default_width else: return width -def center_text(text, length=80, left_edge='|', right_edge='|', - text_length=None): +def center_text( + text, length=80, left_edge='|', right_edge='|', text_length=None +): """Center text with specified edge chars. You can pass in the length of the text as an arg, otherwise it is computed @@ -77,15 +81,24 @@ def center_text(text, length=80, left_edge='|', right_edge='|', return final -def align_left(text, length, left_edge='|', right_edge='|', text_length=None, - left_padding=2): +def align_left( + text, + length, + left_edge='|', + right_edge='|', + text_length=None, + left_padding=2, +): """Left align text.""" # postcondition: get_text_length(returned_text) == length if text_length is None: text_length = get_text_length(text) computed_length = ( - text_length + left_padding + \ - get_text_length(left_edge) + get_text_length(right_edge)) + text_length + + left_padding + + get_text_length(left_edge) + + get_text_length(right_edge) + ) if length - computed_length >= 0: padding = left_padding else: @@ -125,9 +138,10 @@ def convert_to_vertical_table(sections): sections[i] = new_section -class IndentedStream(object): - def __init__(self, stream, indent_level, left_indent_char='|', - right_indent_char='|'): +class IndentedStream: + def __init__( + self, stream, indent_level, left_indent_char='|', right_indent_char='|' + ): self._stream = stream self._indent_level = indent_level self._left_indent_char = left_indent_char @@ -146,7 +160,7 @@ def __getattr__(self, attr): return getattr(self._stream, attr) -class Styler(object): +class Styler: def style_title(self, text): return text @@ -167,25 +181,39 @@ def __init__(self): def style_title(self, text): # Originally bold + underline return text - #return colorama.Style.BOLD + text + colorama.Style.RESET_ALL + # return colorama.Style.BOLD + text + colorama.Style.RESET_ALL def style_header_column(self, text): # Originally underline return text def style_row_element(self, text): - return (colorama.Style.BRIGHT + colorama.Fore.BLUE + - text + colorama.Style.RESET_ALL) + return ( + colorama.Style.BRIGHT + + colorama.Fore.BLUE + + text + + colorama.Style.RESET_ALL + ) def style_indentation_char(self, text): - return (colorama.Style.DIM + colorama.Fore.YELLOW + - text + colorama.Style.RESET_ALL) - - -class MultiTable(object): - def __init__(self, terminal_width=None, initial_section=True, - column_separator='|', terminal=None, - styler=None, auto_reformat=True): + return ( + colorama.Style.DIM + + colorama.Fore.YELLOW + + text + + colorama.Style.RESET_ALL + ) + + +class MultiTable: + def __init__( + self, + terminal_width=None, + initial_section=True, + column_separator='|', + terminal=None, + styler=None, + auto_reformat=True, + ): self._auto_reformat = auto_reformat if initial_section: self._current_section = Section() @@ -238,16 +266,22 @@ def _determine_conversion_needed(self, max_width): return self._auto_reformat def _calculate_max_width(self): - max_width = max(s.total_width(padding=4, with_border=True, - outer_padding=s.indent_level) - for s in self._sections) + max_width = max( + s.total_width( + padding=4, with_border=True, outer_padding=s.indent_level + ) + for s in self._sections + ) return max_width def _render_section(self, section, max_width, stream): - stream = IndentedStream(stream, section.indent_level, - self._styler.style_indentation_char('|'), - self._styler.style_indentation_char('|')) - max_width -= (section.indent_level * 2) + stream = IndentedStream( + stream, + section.indent_level, + self._styler.style_indentation_char('|'), + self._styler.style_indentation_char('|'), + ) + max_width -= section.indent_level * 2 self._render_title(section, max_width, stream) self._render_column_titles(section, max_width, stream) self._render_rows(section, max_width, stream) @@ -258,8 +292,12 @@ def _render_title(self, section, max_width, stream): # bottom_border: ---------------------------- if section.title: title = self._styler.style_title(section.title) - stream.write(center_text(title, max_width, '|', '|', - get_text_length(section.title)) + '\n') + stream.write( + center_text( + title, max_width, '|', '|', get_text_length(section.title) + ) + + '\n' + ) if not section.headers and not section.rows: stream.write('+%s+' % ('-' * (max_width - 2)) + '\n') @@ -268,8 +306,9 @@ def _render_column_titles(self, section, max_width, stream): return # In order to render the column titles we need to know # the width of each of the columns. - widths = section.calculate_column_widths(padding=4, - max_width=max_width) + widths = section.calculate_column_widths( + padding=4, max_width=max_width + ) # TODO: Built a list instead of +=, it's more efficient. current = '' length_so_far = 0 @@ -283,9 +322,13 @@ def _render_column_titles(self, section, max_width, stream): first = False else: left_edge = '' - current += center_text(text=stylized_header, length=width, - left_edge=left_edge, right_edge='|', - text_length=get_text_length(header)) + current += center_text( + text=stylized_header, + length=width, + left_edge=left_edge, + right_edge='|', + text_length=get_text_length(header), + ) length_so_far += width self._write_line_break(stream, widths) stream.write(current + '\n') @@ -307,8 +350,9 @@ def _write_line_break(self, stream, widths): def _render_rows(self, section, max_width, stream): if not section.rows: return - widths = section.calculate_column_widths(padding=4, - max_width=max_width) + widths = section.calculate_column_widths( + padding=4, max_width=max_width + ) if not widths: return self._write_line_break(stream, widths) @@ -325,16 +369,19 @@ def _render_rows(self, section, max_width, stream): else: left_edge = '' stylized = self._styler.style_row_element(element) - current += align_left(text=stylized, length=width, - left_edge=left_edge, - right_edge=self._column_separator, - text_length=get_text_length(element)) + current += align_left( + text=stylized, + length=width, + left_edge=left_edge, + right_edge=self._column_separator, + text_length=get_text_length(element), + ) length_so_far += width stream.write(current + '\n') self._write_line_break(stream, widths) -class Section(object): +class Section: def __init__(self): self.title = '' self.headers = [] @@ -344,8 +391,9 @@ def __init__(self): self._max_widths = [] def __repr__(self): - return ("Section(title=%s, headers=%s, indent_level=%s, num_rows=%s)" % - (self.title, self.headers, self.indent_level, len(self.rows))) + return ( + f"Section(title={self.title}, headers={self.headers}, indent_level={self.indent_level}, num_rows={len(self.rows)})" + ) def calculate_column_widths(self, padding=0, max_width=None): # postcondition: sum(widths) == max_width @@ -385,8 +433,13 @@ def total_width(self, padding=0, with_border=False, outer_padding=0): if with_border: total += border_padding total += outer_padding + outer_padding - return max(get_text_length(self.title) + border_padding + outer_padding + - outer_padding, total) + return max( + get_text_length(self.title) + + border_padding + + outer_padding + + outer_padding, + total, + ) def add_title(self, title): self.title = title @@ -404,8 +457,10 @@ def add_row(self, row): if self._num_cols is None: self._num_cols = len(row) if len(row) != self._num_cols: - raise ValueError("Row should have %s elements, instead " - "it has %s" % (self._num_cols, len(row))) + raise ValueError( + f"Row should have {self._num_cols} elements, instead " + f"it has {len(row)}" + ) row = self._format_row(row) self.rows.append(row) self._update_max_widths(row) @@ -418,4 +473,6 @@ def _update_max_widths(self, row): self._max_widths = [get_text_length(el) for el in row] else: for i, el in enumerate(row): - self._max_widths[i] = max(get_text_length(el), self._max_widths[i]) + self._max_widths[i] = max( + get_text_length(el), self._max_widths[i] + ) diff --git a/awscli/testutils.py b/awscli/testutils.py index 5153fa960559..a02ee9f8af76 100644 --- a/awscli/testutils.py +++ b/awscli/testutils.py @@ -19,37 +19,34 @@ advantage of all the testing utilities we provide. """ -import os -import sys + +import binascii +import contextlib import copy -import shutil -import time import json import logging -import tempfile -import platform -import contextlib -import binascii import math +import os +import platform +import shutil +import sys +import tempfile +import time +import unittest from pprint import pformat -from subprocess import Popen, PIPE +from subprocess import PIPE, Popen from unittest import mock -import unittest - -from awscli.compat import BytesIO, StringIO from ruamel.yaml import YAML -from botocore.session import Session -from botocore.exceptions import ClientError -from botocore.exceptions import WaiterError -import botocore.loaders -from botocore.awsrequest import AWSResponse - import awscli.clidriver -from awscli.plugin import load_plugins +import botocore.loaders from awscli.clidriver import CLIDriver - +from awscli.compat import BytesIO, StringIO +from awscli.plugin import load_plugins +from botocore.awsrequest import AWSResponse +from botocore.exceptions import ClientError, WaiterError +from botocore.session import Session _LOADER = botocore.loaders.Loader() INTEG_LOG = logging.getLogger('awscli.tests.integration') @@ -66,9 +63,12 @@ def test_some_non_windows_stuff(self): self.assertEqual(...) """ + def decorator(func): return unittest.skipIf( - platform.system() not in ['Darwin', 'Linux'], reason)(func) + platform.system() not in ['Darwin', 'Linux'], reason + )(func) + return decorator @@ -82,8 +82,10 @@ def test_some_windows_stuff(self): self.assertEqual(...) """ + def decorator(func): return unittest.skipIf(platform.system() != 'Windows', reason)(func) + return decorator @@ -101,6 +103,7 @@ def create_clidriver(): def get_aws_cmd(): global AWS_CMD import awscli + if AWS_CMD is None: # Try /bin/aws repo_root = os.path.dirname(os.path.abspath(awscli.__file__)) @@ -108,10 +111,12 @@ def get_aws_cmd(): if not os.path.isfile(aws_cmd): aws_cmd = _search_path_for_cmd('aws') if aws_cmd is None: - raise ValueError('Could not find "aws" executable. Either ' - 'make sure it is on your PATH, or you can ' - 'explicitly set this value using ' - '"set_aws_cmd()"') + raise ValueError( + 'Could not find "aws" executable. Either ' + 'make sure it is on your PATH, or you can ' + 'explicitly set this value using ' + '"set_aws_cmd()"' + ) AWS_CMD = aws_cmd return AWS_CMD @@ -142,7 +147,7 @@ def temporary_file(mode): """ temporary_directory = tempfile.mkdtemp() - basename = 'tmpfile-%s' % str(random_chars(8)) + basename = f'tmpfile-{str(random_chars(8))}' full_filename = os.path.join(temporary_directory, basename) open(full_filename, 'w').close() try: @@ -197,15 +202,12 @@ def create_dir_bucket(session, name=None, location=None): params = { 'Bucket': bucket_name, 'CreateBucketConfiguration': { - 'Location': { - 'Type': 'AvailabilityZone', - 'Name': az - }, + 'Location': {'Type': 'AvailabilityZone', 'Name': az}, 'Bucket': { 'Type': 'Directory', - 'DataRedundancy': 'SingleAvailabilityZone' - } - } + 'DataRedundancy': 'SingleAvailabilityZone', + }, + }, } try: client.create_bucket(**params) @@ -249,6 +251,7 @@ class BaseCLIDriverTest(unittest.TestCase): This will load all the default plugins as well so it will simulate the behavior the user will see. """ + def setUp(self): self.environ = { 'AWS_DATA_PATH': os.environ['AWS_DATA_PATH'], @@ -268,35 +271,39 @@ def tearDown(self): class BaseAWSHelpOutputTest(BaseCLIDriverTest): def setUp(self): - super(BaseAWSHelpOutputTest, self).setUp() + super().setUp() self.renderer_patch = mock.patch('awscli.help.get_renderer') self.renderer_mock = self.renderer_patch.start() self.renderer = CapturedRenderer() self.renderer_mock.return_value = self.renderer def tearDown(self): - super(BaseAWSHelpOutputTest, self).tearDown() + super().tearDown() self.renderer_patch.stop() def assert_contains(self, contains): if contains not in self.renderer.rendered_contents: - self.fail("The expected contents:\n%s\nwere not in the " - "actual rendered contents:\n%s" % ( - contains, self.renderer.rendered_contents)) + self.fail( + f"The expected contents:\n{contains}\nwere not in the " + f"actual rendered contents:\n{self.renderer.rendered_contents}" + ) def assert_contains_with_count(self, contains, count): r_count = self.renderer.rendered_contents.count(contains) if r_count != count: - self.fail("The expected contents:\n%s\n, with the " - "count:\n%d\nwere not in the actual rendered " - " contents:\n%s\nwith count:\n%d" % ( - contains, count, self.renderer.rendered_contents, r_count)) + self.fail( + "The expected contents:\n%s\n, with the " + "count:\n%d\nwere not in the actual rendered " + " contents:\n%s\nwith count:\n%d" + % (contains, count, self.renderer.rendered_contents, r_count) + ) def assert_not_contains(self, contents): if contents in self.renderer.rendered_contents: - self.fail("The contents:\n%s\nwere not suppose to be in the " - "actual rendered contents:\n%s" % ( - contents, self.renderer.rendered_contents)) + self.fail( + f"The contents:\n{contents}\nwere not suppose to be in the " + f"actual rendered contents:\n{self.renderer.rendered_contents}" + ) def assert_text_order(self, *args, **kwargs): # First we need to find where the SYNOPSIS section starts. @@ -309,15 +316,18 @@ def assert_text_order(self, *args, **kwargs): previous = arg_indices[0] for i, index in enumerate(arg_indices[1:], 1): if index == -1: - self.fail('The string %r was not found in the contents: %s' - % (args[index], contents)) + self.fail( + f'The string {args[index]!r} was not found in the contents: {contents}' + ) if index < previous: - self.fail('The string %r came before %r, but was suppose to come ' - 'after it.\n%s' % (args[i], args[i - 1], contents)) + self.fail( + f'The string {args[i]!r} came before {args[i - 1]!r}, but was suppose to come ' + f'after it.\n{contents}' + ) previous = index -class CapturedRenderer(object): +class CapturedRenderer: def __init__(self): self.rendered_contents = '' @@ -325,7 +335,7 @@ def render(self, contents): self.rendered_contents = contents.decode('utf-8') -class CapturedOutput(object): +class CapturedOutput: def __init__(self, stdout, stderr): self.stdout = stdout self.stderr = stderr @@ -388,7 +398,9 @@ def setUp(self): self.http_response = AWSResponse(None, 200, {}, None) self.error_http_response = AWSResponse(None, 400, {}, None) self.parsed_response = {} - self.make_request_patch = mock.patch('botocore.endpoint.Endpoint.make_request') + self.make_request_patch = mock.patch( + 'botocore.endpoint.Endpoint.make_request' + ) self.make_request_is_patched = False self.operations_called = [] self.parsed_responses = None @@ -424,7 +436,10 @@ def patch_make_request(self): if self.parsed_responses is not None: make_request_patch.side_effect = self._request_patch_side_effect else: - make_request_patch.return_value = (self.http_response, self.parsed_response) + make_request_patch.return_value = ( + self.http_response, + self.parsed_response, + ) self.make_request_is_patched = True def _request_patch_side_effect(self, *args, **kwargs): @@ -436,8 +451,14 @@ def _request_patch_side_effect(self, *args, **kwargs): http_response = self.error_http_response return http_response, parsed_response - def assert_params_for_cmd(self, cmd, params=None, expected_rc=0, - stderr_contains=None, ignore_params=None): + def assert_params_for_cmd( + self, + cmd, + params=None, + expected_rc=0, + stderr_contains=None, + ignore_params=None, + ): stdout, stderr, rc = self.run_cmd(cmd, expected_rc) if stderr_contains is not None: self.assertIn(stderr_contains, stderr) @@ -451,11 +472,12 @@ def assert_params_for_cmd(self, cmd, params=None, expected_rc=0, except KeyError: pass if params != last_kwargs: - self.fail("Actual params did not match expected params.\n" - "Expected:\n\n" - "%s\n" - "Actual:\n\n%s\n" % ( - pformat(params), pformat(last_kwargs))) + self.fail( + "Actual params did not match expected params.\n" + "Expected:\n\n" + f"{pformat(params)}\n" + f"Actual:\n\n{pformat(last_kwargs)}\n" + ) return stdout, stderr, rc def before_parameter_build(self, params, model, **kwargs): @@ -468,7 +490,8 @@ def run_cmd(self, cmd, expected_rc=0): event_emitter = self.driver.session.get_component('event_emitter') event_emitter.register('before-call', self.before_call) event_emitter.register_first( - 'before-parameter-build.*.*', self.before_parameter_build) + 'before-parameter-build.*.*', self.before_parameter_build + ) if not isinstance(cmd, list): cmdlist = cmd.split() else: @@ -478,10 +501,11 @@ def run_cmd(self, cmd, expected_rc=0): stderr = captured.stderr.getvalue() stdout = captured.stdout.getvalue() self.assertEqual( - rc, expected_rc, - "Unexpected rc (expected: %s, actual: %s) for command: %s\n" - "stdout:\n%sstderr:\n%s" % ( - expected_rc, rc, cmd, stdout, stderr)) + rc, + expected_rc, + f"Unexpected rc (expected: {expected_rc}, actual: {rc}) for command: {cmd}\n" + f"stdout:\n{stdout}stderr:\n{stderr}", + ) return stdout, stderr, rc @@ -492,7 +516,7 @@ def setUp(self): 'AWS_DEFAULT_REGION': 'us-east-1', 'AWS_ACCESS_KEY_ID': 'access_key', 'AWS_SECRET_ACCESS_KEY': 'secret_key', - 'AWS_CONFIG_FILE': '' + 'AWS_CONFIG_FILE': '', } self.environ_patch = mock.patch('os.environ', self.environ) self.environ_patch.start() @@ -502,7 +526,6 @@ def setUp(self): self.driver = create_clidriver() self.entry_point = awscli.clidriver.AWSCLIEntryPoint(self.driver) - def tearDown(self): self.environ_patch.stop() if self.send_is_patched: @@ -514,9 +537,9 @@ def patch_send(self, status_code=200, headers={}, content=b''): self.send_patch.stop() self.send_is_patched = False send_patch = self.send_patch.start() - send_patch.return_value = mock.Mock(status_code=status_code, - headers=headers, - content=content) + send_patch.return_value = mock.Mock( + status_code=status_code, headers=headers, content=content + ) self.send_is_patched = True def run_cmd(self, cmd, expected_rc=0): @@ -532,14 +555,15 @@ def run_cmd(self, cmd, expected_rc=0): stderr = captured.stderr.getvalue() stdout = captured.stdout.getvalue() self.assertEqual( - rc, expected_rc, - "Unexpected rc (expected: %s, actual: %s) for command: %s\n" - "stdout:\n%sstderr:\n%s" % ( - expected_rc, rc, cmd, stdout, stderr)) + rc, + expected_rc, + f"Unexpected rc (expected: {expected_rc}, actual: {rc}) for command: {cmd}\n" + f"stdout:\n{stdout}stderr:\n{stderr}", + ) return stdout, stderr, rc -class FileCreator(object): +class FileCreator: def __init__(self): self.rootdir = tempfile.mkdtemp() @@ -547,8 +571,9 @@ def remove_all(self): if os.path.exists(self.rootdir): shutil.rmtree(self.rootdir) - def create_file(self, filename, contents, mtime=None, mode='w', - encoding=None): + def create_file( + self, filename, contents, mtime=None, mode='w', encoding=None + ): """Creates a file in a tmpdir ``filename`` should be a relative path, e.g. "foo/bar/baz.txt" @@ -611,7 +636,7 @@ class ProcessTerminatedError(Exception): pass -class Result(object): +class Result: def __init__(self, rc, stdout, stderr, memory_usage=None): self.rc = rc self.stdout = stdout @@ -638,8 +663,14 @@ def _escape_quotes(command): return command -def aws(command, collect_memory=False, env_vars=None, - wait_for_finish=True, input_data=None, input_file=None): +def aws( + command, + collect_memory=False, + env_vars=None, + wait_for_finish=True, + input_data=None, + input_file=None, +): """Run an aws command. This help function abstracts the differences of running the "aws" @@ -676,8 +707,8 @@ def aws(command, collect_memory=False, env_vars=None, if 'AWS_TEST_COMMAND' in os.environ: aws_command = os.environ['AWS_TEST_COMMAND'] else: - aws_command = 'python %s' % get_aws_cmd() - full_command = '%s %s' % (aws_command, command) + aws_command = f'python {get_aws_cmd()}' + full_command = f'{aws_command} {command}' stdout_encoding = get_stdout_encoding() INTEG_LOG.debug("Running command: %s", full_command) env = os.environ.copy() @@ -687,8 +718,14 @@ def aws(command, collect_memory=False, env_vars=None, env = env_vars if input_file is None: input_file = PIPE - process = Popen(full_command, stdout=PIPE, stderr=PIPE, stdin=input_file, - shell=True, env=env) + process = Popen( + full_command, + stdout=PIPE, + stderr=PIPE, + stdin=input_file, + shell=True, + env=env, + ) if not wait_for_finish: return process memory = None @@ -699,10 +736,12 @@ def aws(command, collect_memory=False, env_vars=None, stdout, stderr = process.communicate(**kwargs) else: stdout, stderr, memory = _wait_and_collect_mem(process) - return Result(process.returncode, - stdout.decode(stdout_encoding), - stderr.decode(stdout_encoding), - memory) + return Result( + process.returncode, + stdout.decode(stdout_encoding), + stderr.decode(stdout_encoding), + memory, + ) def get_stdout_encoding(): @@ -720,8 +759,8 @@ def _wait_and_collect_mem(process): get_memory = _get_memory_with_ps else: raise ValueError( - "Can't collect memory for process on platform %s." % - platform.system()) + f"Can't collect memory for process on platform {platform.system()}." + ) memory = [] while process.poll() is None: try: @@ -758,6 +797,7 @@ class BaseS3CLICommand(unittest.TestCase): and more streamlined. """ + _PUT_HEAD_SHARED_EXTRAS = [ 'SSECustomerAlgorithm', 'SSECustomerKey', @@ -803,8 +843,10 @@ def assert_key_contents_equal(self, bucket, key, expected_contents): # without necessarily printing the actual contents. self.assertEqual(len(actual_contents), len(expected_contents)) if actual_contents != expected_contents: - self.fail("Contents for %s/%s do not match (but they " - "have the same length)" % (bucket, key)) + self.fail( + f"Contents for {bucket}/{key} do not match (but they " + "have the same length)" + ) def delete_public_access_block(self, bucket_name): client = self.create_client_for_bucket(bucket_name) @@ -825,10 +867,7 @@ def create_bucket(self, name=None, region=None): def put_object(self, bucket_name, key_name, contents='', extra_args=None): client = self.create_client_for_bucket(bucket_name) - call_args = { - 'Bucket': bucket_name, - 'Key': key_name, 'Body': contents - } + call_args = {'Bucket': bucket_name, 'Key': key_name, 'Body': contents} if extra_args is not None: call_args.update(extra_args) response = client.put_object(**call_args) @@ -836,7 +875,8 @@ def put_object(self, bucket_name, key_name, contents='', extra_args=None): extra_head_params = {} if extra_args: extra_head_params = dict( - (k, v) for (k, v) in extra_args.items() + (k, v) + for (k, v) in extra_args.items() if k in self._PUT_HEAD_SHARED_EXTRAS ) self.wait_until_key_exists( @@ -881,7 +921,7 @@ def remove_all_objects(self, bucket_name): def delete_key(self, bucket_name, key_name): client = self.create_client_for_bucket(bucket_name) - response = client.delete_object(Bucket=bucket_name, Key=key_name) + client.delete_object(Bucket=bucket_name, Key=key_name) def get_key_contents(self, bucket_name, key_name): self.wait_until_key_exists(bucket_name, key_name) @@ -893,7 +933,8 @@ def wait_bucket_exists(self, bucket_name, min_successes=3): client = self.create_client_for_bucket(bucket_name) waiter = client.get_waiter('bucket_exists') consistency_waiter = ConsistencyWaiter( - min_successes=min_successes, delay_initial_poll=True) + min_successes=min_successes, delay_initial_poll=True + ) consistency_waiter.wait( lambda: waiter.wait(Bucket=bucket_name) is None ) @@ -911,7 +952,8 @@ def bucket_not_exists(self, bucket_name): def key_exists(self, bucket_name, key_name, min_successes=3): try: self.wait_until_key_exists( - bucket_name, key_name, min_successes=min_successes) + bucket_name, key_name, min_successes=min_successes + ) return True except (ClientError, WaiterError): return False @@ -919,7 +961,8 @@ def key_exists(self, bucket_name, key_name, min_successes=3): def key_not_exists(self, bucket_name, key_name, min_successes=3): try: self.wait_until_key_not_exists( - bucket_name, key_name, min_successes=min_successes) + bucket_name, key_name, min_successes=min_successes + ) return True except (ClientError, WaiterError): return False @@ -937,18 +980,28 @@ def head_object(self, bucket_name, key_name): response = client.head_object(Bucket=bucket_name, Key=key_name) return response - def wait_until_key_exists(self, bucket_name, key_name, extra_params=None, - min_successes=3): - self._wait_for_key(bucket_name, key_name, extra_params, - min_successes, exists=True) + def wait_until_key_exists( + self, bucket_name, key_name, extra_params=None, min_successes=3 + ): + self._wait_for_key( + bucket_name, key_name, extra_params, min_successes, exists=True + ) - def wait_until_key_not_exists(self, bucket_name, key_name, extra_params=None, - min_successes=3): - self._wait_for_key(bucket_name, key_name, extra_params, - min_successes, exists=False) + def wait_until_key_not_exists( + self, bucket_name, key_name, extra_params=None, min_successes=3 + ): + self._wait_for_key( + bucket_name, key_name, extra_params, min_successes, exists=False + ) - def _wait_for_key(self, bucket_name, key_name, extra_params=None, - min_successes=3, exists=True): + def _wait_for_key( + self, + bucket_name, + key_name, + extra_params=None, + min_successes=3, + exists=True, + ): client = self.create_client_for_bucket(bucket_name) if exists: waiter = client.get_waiter('object_exists') @@ -962,8 +1015,10 @@ def _wait_for_key(self, bucket_name, key_name, extra_params=None, def assert_no_errors(self, p): self.assertEqual( - p.rc, 0, - "Non zero rc (%s) received: %s" % (p.rc, p.stdout + p.stderr)) + p.rc, + 0, + f"Non zero rc ({p.rc}) received: {p.stdout + p.stderr}", + ) self.assertNotIn("Error:", p.stderr) self.assertNotIn("failed:", p.stderr) self.assertNotIn("client error", p.stderr) @@ -975,7 +1030,7 @@ def fileno(self): return 0 -class EventCaptureHandler(object): +class EventCaptureHandler: def __init__(self, handler=None): self._handler = handler self._called = False @@ -994,7 +1049,7 @@ class ConsistencyWaiterException(Exception): pass -class ConsistencyWaiter(object): +class ConsistencyWaiter: """ A waiter class for some check to reach a consistent state. @@ -1010,8 +1065,14 @@ class ConsistencyWaiter(object): :param delay: The number of seconds to delay the next API call after a failed check call. Default of 5 seconds. """ - def __init__(self, min_successes=1, max_attempts=20, delay=5, - delay_initial_poll=False): + + def __init__( + self, + min_successes=1, + max_attempts=20, + delay=5, + delay_initial_poll=False, + ): self.min_successes = min_successes self.max_attempts = max_attempts self.delay = delay @@ -1048,4 +1109,4 @@ def wait(self, check, *args, **kwargs): def _fail_message(self, attempts, successes): format_args = (attempts, successes) - return 'Failed after %s attempts, only had %s successes' % format_args + return 'Failed after {} attempts, only had {} successes'.format(*format_args) diff --git a/awscli/text.py b/awscli/text.py index a5bd0090829e..6b915b0f8f2b 100644 --- a/awscli/text.py +++ b/awscli/text.py @@ -34,15 +34,18 @@ def _format_list(item, identifier, stream): if any(isinstance(el, dict) for el in item): all_keys = _all_scalar_keys(item) for element in item: - _format_text(element, stream=stream, identifier=identifier, - scalar_keys=all_keys) + _format_text( + element, + stream=stream, + identifier=identifier, + scalar_keys=all_keys, + ) elif any(isinstance(el, list) for el in item): scalar_elements, non_scalars = _partition_list(item) if scalar_elements: _format_scalar_list(scalar_elements, identifier, stream) for non_scalar in non_scalars: - _format_text(non_scalar, stream=stream, - identifier=identifier) + _format_text(non_scalar, stream=stream, identifier=identifier) else: _format_scalar_list(item, identifier, stream) @@ -61,8 +64,7 @@ def _partition_list(item): def _format_scalar_list(elements, identifier, stream): if identifier is not None: for item in elements: - stream.write('%s\t%s\n' % (identifier.upper(), - item)) + stream.write(f'{identifier.upper()}\t{item}\n') else: # For a bare list, just print the contents. stream.write('\t'.join([str(item) for item in elements])) @@ -77,8 +79,7 @@ def _format_dict(scalar_keys, item, identifier, stream): stream.write('\t'.join(scalars)) stream.write('\n') for new_identifier, non_scalar in non_scalars: - _format_text(item=non_scalar, stream=stream, - identifier=new_identifier) + _format_text(item=non_scalar, stream=stream, identifier=new_identifier) def _all_scalar_keys(list_of_dicts): diff --git a/awscli/topictags.py b/awscli/topictags.py index 93d281b8add2..fee934bd6924 100644 --- a/awscli/topictags.py +++ b/awscli/topictags.py @@ -19,12 +19,13 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. # -import os import json +import os + import docutils.core -class TopicTagDB(object): +class TopicTagDB: """This class acts like a database for the tags of all available topics. A tag is an element in a topic reStructured text file that contains @@ -67,19 +68,25 @@ class TopicTagDB(object): that all tag values for a specific tag of a specific topic are unique. """ - VALID_TAGS = ['category', 'description', 'title', 'related topic', - 'related command'] + VALID_TAGS = [ + 'category', + 'description', + 'title', + 'related topic', + 'related command', + ] # The default directory to look for topics. TOPIC_DIR = os.path.join( - os.path.dirname( - os.path.abspath(__file__)), 'topics') + os.path.dirname(os.path.abspath(__file__)), 'topics' + ) # The default JSON index to load. JSON_INDEX = os.path.join(TOPIC_DIR, 'topic-tags.json') - def __init__(self, tag_dictionary=None, index_file=JSON_INDEX, - topic_dir=TOPIC_DIR): + def __init__( + self, tag_dictionary=None, index_file=JSON_INDEX, topic_dir=TOPIC_DIR + ): """ :param index_file: The path to a specific JSON index to load. If nothing is specified it will default to the default JSON @@ -121,7 +128,7 @@ def valid_tags(self): def load_json_index(self): """Loads a JSON file into the tag dictionary.""" - with open(self.index_file, 'r') as f: + with open(self.index_file) as f: self._tag_dictionary = json.load(f) def save_to_json_index(self): @@ -156,7 +163,7 @@ def scan(self, topic_files): :param topic_files: A list of paths to topics to scan into memory. """ for topic_file in topic_files: - with open(topic_file, 'r') as f: + with open(topic_file) as f: # Parse out the name of the topic topic_name = self._find_topic_name(topic_file) # Add the topic to the dictionary if it does not exist @@ -164,7 +171,8 @@ def scan(self, topic_files): topic_content = f.read() # Record the tags and the values self._add_tag_and_values_from_content( - topic_name, topic_content) + topic_name, topic_content + ) def _find_topic_name(self, topic_src_file): # Get the name of each of these files @@ -193,8 +201,7 @@ def _add_tag_and_values_from_content(self, topic_name, content): self._add_tag_to_dict(topic_name, tag, tag_values) else: raise ValueError( - "Tag %s found under topic %s is not supported." - % (tag, topic_name) + f"Tag {tag} found under topic {topic_name} is not supported." ) def _add_topic_name_to_dict(self, topic_name): @@ -259,9 +266,9 @@ def query(self, tag, values=None): # no value constraints are provided or if the tag value # falls in the allowed tag values. if values is None or tag_value in values: - self._add_key_values(query_dict, - key=tag_value, - values=[topic_name]) + self._add_key_values( + query_dict, key=tag_value, values=[topic_name] + ) return query_dict def get_tag_value(self, topic_name, tag, default_value=None): @@ -288,8 +295,8 @@ def get_tag_single_value(self, topic_name, tag): if value is not None: if len(value) != 1: raise ValueError( - 'Tag %s for topic %s has value %s. Expected a single ' - 'element in list.' % (tag, topic_name, value) + f'Tag {tag} for topic {topic_name} has value {value}. Expected a single ' + 'element in list.' ) value = value[0] return value diff --git a/awscli/utils.py b/awscli/utils.py index c8424bba997b..d9b615c386eb 100644 --- a/awscli/utils.py +++ b/awscli/utils.py @@ -10,24 +10,28 @@ # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF # ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. +import contextlib import csv -import signal import datetime -import contextlib +import logging import os import re +import signal import sys -from subprocess import Popen, PIPE -import logging +from subprocess import PIPE, Popen -from awscli.compat import get_stdout_text_writer -from awscli.compat import get_popen_kwargs_for_pager_cmd -from awscli.compat import StringIO -from botocore.useragent import UserAgentComponent -from botocore.utils import resolve_imds_endpoint_mode -from botocore.utils import IMDSFetcher -from botocore.utils import BadIMDSRequestError +from awscli.compat import ( + StringIO, + get_popen_kwargs_for_pager_cmd, + get_stdout_text_writer, +) from botocore.configprovider import BaseProvider +from botocore.useragent import UserAgentComponent +from botocore.utils import ( + BadIMDSRequestError, + IMDSFetcher, + resolve_imds_endpoint_mode, +) logger = logging.getLogger(__name__) @@ -128,12 +132,15 @@ def _get_fetcher(self): def _create_fetcher(self): metadata_timeout = self._session.get_config_variable( - 'metadata_service_timeout') + 'metadata_service_timeout' + ) metadata_num_attempts = self._session.get_config_variable( - 'metadata_service_num_attempts') + 'metadata_service_num_attempts' + ) imds_config = { 'ec2_metadata_service_endpoint': self._session.get_config_variable( - 'ec2_metadata_service_endpoint'), + 'ec2_metadata_service_endpoint' + ), 'ec2_metadata_service_endpoint_mode': resolve_imds_endpoint_mode( self._session ), @@ -175,13 +182,14 @@ def retrieve_region(self): logger.debug( "Max number of attempts exceeded (%s) when " "attempting to retrieve data from metadata service.", - self._num_attempts + self._num_attempts, ) except BadIMDSRequestError as e: logger.debug( "Failed to retrieve a region from IMDS. " "Region detection may not be supported from this endpoint: " - "%s", e.request.url + "%s", + e.request.url, ) return None @@ -190,7 +198,7 @@ def _get_region(self): response = self._get_request( url_path=self._URL_PATH, retry_func=self._default_retry, - token=token + token=token, ) availability_zone = response.text region = availability_zone[:-1] @@ -219,7 +227,7 @@ def _split_with_quotes(value): try: parts = list(csv.reader(StringIO(value), escapechar='\\'))[0] except csv.Error: - raise ValueError("Bad csv value: %s" % value) + raise ValueError(f"Bad csv value: {value}") iter_parts = iter(parts) new_parts = [] for part in iter_parts: @@ -229,16 +237,19 @@ def _split_with_quotes(value): # Find an opening list bracket list_start = part.find('=[') - if list_start >= 0 and value.find(']') != -1 and \ - (quote_char is None or part.find(quote_char) > list_start): + if ( + list_start >= 0 + and value.find(']') != -1 + and (quote_char is None or part.find(quote_char) > list_start) + ): # This is a list, eat all the items until the end if ']' in part: # Short circuit for only one item new_chunk = part else: new_chunk = _eat_items(value, iter_parts, part, ']') - list_items = _split_with_quotes(new_chunk[list_start + 2:-1]) - new_chunk = new_chunk[:list_start + 1] + ','.join(list_items) + list_items = _split_with_quotes(new_chunk[list_start + 2 : -1]) + new_chunk = new_chunk[: list_start + 1] + ','.join(list_items) new_parts.append(new_chunk) continue elif quote_char is None: @@ -334,8 +345,11 @@ def is_document_type_container(shape): def is_streaming_blob_type(shape): """Check if the shape is a streaming blob type.""" - return (shape and shape.type_name == 'blob' and - shape.serialization.get('streaming', False)) + return ( + shape + and shape.type_name == 'blob' + and shape.serialization.get('streaming', False) + ) def is_tagged_union_type(shape): @@ -373,27 +387,27 @@ def ignore_ctrl_c(): def emit_top_level_args_parsed_event(session, args): - session.emit( - 'top-level-args-parsed', parsed_args=args, session=session) + session.emit('top-level-args-parsed', parsed_args=args, session=session) def is_a_tty(): try: return os.isatty(sys.stdout.fileno()) - except Exception as e: + except Exception: return False def is_stdin_a_tty(): try: return os.isatty(sys.stdin.fileno()) - except Exception as e: + except Exception: return False -class OutputStreamFactory(object): - def __init__(self, session, popen=None, environ=None, - default_less_flags='FRX'): +class OutputStreamFactory: + def __init__( + self, session, popen=None, environ=None, default_less_flags='FRX' + ): self._session = session self._popen = popen if popen is None: @@ -417,7 +431,7 @@ def get_pager_stream(self, preferred_pager=None): process = LazyPager(self._popen, **popen_kwargs) try: yield process.stdin - except IOError: + except OSError: # Ignore IOError since this can commonly be raised when a pager # is closed abruptly and causes a broken pipe. pass @@ -490,7 +504,7 @@ def dump_yaml_to_str(yaml, data): return stream.getvalue() -class ShapeWalker(object): +class ShapeWalker: def walk(self, shape, visitor): """Walk through and visit shapes for introspection @@ -510,7 +524,7 @@ def _walk(self, shape, visitor, stack): if shape.name in stack: return stack.append(shape.name) - getattr(self, '_walk_%s' % shape.type_name, self._default_scalar_walk)( + getattr(self, f'_walk_{shape.type_name}', self._default_scalar_walk)( shape, visitor, stack ) stack.pop() @@ -535,14 +549,16 @@ def _do_shape_visit(self, shape, visitor): visitor.visit_shape(shape) -class BaseShapeVisitor(object): +class BaseShapeVisitor: """Visit shape encountered by ShapeWalker""" + def visit_shape(self, shape): pass class ShapeRecordingVisitor(BaseShapeVisitor): """Record shapes visited by ShapeWalker""" + def __init__(self): self.visited = [] @@ -558,12 +574,13 @@ def add_component_to_user_agent_extra(session, component): def add_metadata_component_to_user_agent_extra(session, name, value=None): add_component_to_user_agent_extra( - session, - UserAgentComponent("md", name, value) + session, UserAgentComponent("md", name, value) ) def add_command_lineage_to_user_agent_extra(session, lineage): # Only add a command lineage if one is not already present in the user agent extra. if not re.search(r'md\/command#[\w\.]*', session.user_agent_extra): - add_metadata_component_to_user_agent_extra(session, "command", ".".join(lineage)) + add_metadata_component_to_user_agent_extra( + session, "command", ".".join(lineage) + )