diff --git a/.ci/polish/cli.py b/.ci/polish/cli.py index 550ddee5fb..362c398a5c 100755 --- a/.ci/polish/cli.py +++ b/.ci/polish/cli.py @@ -114,23 +114,23 @@ def launch(expression, code, use_calculations, use_calcfunctions, sleep, timeout valid, error = validate(expression) if not valid: - click.echo("the expression '{}' is invalid: {}".format(expression, error)) + click.echo(f"the expression '{expression}' is invalid: {error}") sys.exit(1) - filename = 'polish_{}.py'.format(str(uuid.uuid4().hex)) + filename = f'polish_{str(uuid.uuid4().hex)}.py' evaluated = evaluate(expression, modulo) outlines, stack = generate_outlines(expression) outlines_string = format_outlines(outlines, use_calculations, use_calcfunctions) write_workchain(outlines_string, filename=filename) - click.echo('Expression: {}'.format(expression)) + click.echo(f'Expression: {expression}') if not dry_run: try: - workchain_module = 'polish_workchains.{}'.format(filename.replace('.py', '')) + workchain_module = f"polish_workchains.{filename.replace('.py', '')}" workchains = importlib.import_module(workchain_module) except ImportError: - click.echo('could not import the {} module'.format(workchain_module)) + click.echo(f'could not import the {workchain_module} module') sys.exit(1) inputs = {'modulo': Int(modulo), 'operands': Str(' '.join(stack))} @@ -153,8 +153,7 @@ def launch(expression, code, use_calculations, use_calcfunctions, sleep, timeout if timed_out: click.secho('Failed: ', fg='red', bold=True, nl=False) click.secho( - 'the workchain<{}> did not finish in time and the operation timed out'.format(workchain.pk), - bold=True + f'the workchain<{workchain.pk}> did not finish in time and the operation timed out', bold=True ) sys.exit(1) @@ -162,17 +161,17 @@ def launch(expression, code, use_calculations, use_calcfunctions, sleep, timeout result = workchain.outputs.result except AttributeError: click.secho('Failed: ', fg='red', bold=True, nl=False) - click.secho('the workchain<{}> did not return a result output node'.format(workchain.pk), bold=True) + click.secho(f'the workchain<{workchain.pk}> did not return a result output node', bold=True) sys.exit(1) else: results, workchain = run_get_node(workchains.Polish00WorkChain, **inputs) result = results['result'] - click.echo('Evaluated : {}'.format(evaluated)) + click.echo(f'Evaluated : {evaluated}') if not dry_run: - click.echo('Workchain : {} <{}>'.format(result, workchain.pk)) + click.echo(f'Workchain : {result} <{workchain.pk}>') if result != evaluated: click.secho('Failed: ', fg='red', bold=True, nl=False) diff --git a/.ci/polish/lib/expression.py b/.ci/polish/lib/expression.py index e982de2b4c..11d66a63a1 100644 --- a/.ci/polish/lib/expression.py +++ b/.ci/polish/lib/expression.py @@ -73,7 +73,7 @@ def validate(expression): try: symbols = expression.split() except ValueError as exception: - return False, 'failed to split the expression into symbols: {}'.format(exception) + return False, f'failed to split the expression into symbols: {exception}' while len(symbols) > 1: try: @@ -85,19 +85,19 @@ def validate(expression): try: operand = int(operand) except ValueError: - return False, 'the operand {} is not a valid integer'.format(operand) + return False, f'the operand {operand} is not a valid integer' if operator not in OPERATORS.keys(): - return False, 'the operator {} is not supported'.format(operator) + return False, f'the operator {operator} is not supported' if OPERATORS[operator] is operators.pow and operand < 0: - return False, 'a negative operand {} was found for the ^ operator, which is not allowed'.format(operand) + return False, f'a negative operand {operand} was found for the ^ operator, which is not allowed' # At this point the symbols list should only contain the initial operand try: operand = int(symbols.pop()) except ValueError: - return False, 'the operand {} is not a valid integer'.format(operand) + return False, f'the operand {operand} is not a valid integer' if symbols: return False, 'incorrect number of symbols found, should contain N operands followed by (N - 1) operators' diff --git a/.ci/polish/lib/workchain.py b/.ci/polish/lib/workchain.py index 74b3939e3b..e20a9a3308 100644 --- a/.ci/polish/lib/workchain.py +++ b/.ci/polish/lib/workchain.py @@ -231,15 +231,15 @@ def write_workchain(outlines, directory=None, filename=None): outline_string = '' for subline in outline.split('\n'): - outline_string += '\t\t\t{}\n'.format(subline) + outline_string += f'\t\t\t{subline}\n' if counter == len(outlines) - 1: child_class = None else: - child_class = 'Polish{:02d}WorkChain'.format(counter + 1) + child_class = f'Polish{counter + 1:02d}WorkChain' subs = { - 'class_name': 'Polish{:02d}WorkChain'.format(counter), + 'class_name': f'Polish{counter:02d}WorkChain', 'child_class': child_class, 'outline': outline_string, } diff --git a/.ci/test_daemon.py b/.ci/test_daemon.py index 85668da6c0..e6638b91f0 100644 --- a/.ci/test_daemon.py +++ b/.ci/test_daemon.py @@ -37,14 +37,14 @@ def print_daemon_log(): daemon_client = get_daemon_client() daemon_log = daemon_client.daemon_log_file - print("Output of 'cat {}':".format(daemon_log)) + print(f"Output of 'cat {daemon_log}':") try: print(subprocess.check_output( - ['cat', '{}'.format(daemon_log)], + ['cat', f'{daemon_log}'], stderr=subprocess.STDOUT, )) except subprocess.CalledProcessError as exception: - print('Note: the command failed, message: {}'.format(exception)) + print(f'Note: the command failed, message: {exception}') def jobs_have_finished(pks): @@ -55,21 +55,21 @@ def jobs_have_finished(pks): for node in node_list: if not node.is_terminated: - print('not terminated: {} [{}]'.format(node.pk, node.process_state)) - print('{}/{} finished'.format(num_finished, len(finished_list))) + print(f'not terminated: {node.pk} [{node.process_state}]') + print(f'{num_finished}/{len(finished_list)} finished') return False not in finished_list def print_report(pk): """Print the process report for given pk.""" - print("Output of 'verdi process report {}':".format(pk)) + print(f"Output of 'verdi process report {pk}':") try: print(subprocess.check_output( - ['verdi', 'process', 'report', '{}'.format(pk)], + ['verdi', 'process', 'report', f'{pk}'], stderr=subprocess.STDOUT, )) except subprocess.CalledProcessError as exception: - print('Note: the command failed, message: {}'.format(exception)) + print(f'Note: the command failed, message: {exception}') def validate_calculations(expected_results): @@ -79,18 +79,14 @@ def validate_calculations(expected_results): for pk, expected_dict in expected_results.items(): calc = load_node(pk) if not calc.is_finished_ok: - print( - 'Calculation<{}> not finished ok: process_state<{}> exit_status<{}>'.format( - pk, calc.process_state, calc.exit_status - ) - ) + print(f'Calc<{pk}> not finished ok: process_state<{calc.process_state}> exit_status<{calc.exit_status}>') print_report(pk) valid = False try: actual_dict = calc.outputs.output_parameters.get_dict() except exceptions.NotExistent: - print('Could not retrieve `output_parameters` node for Calculation<{}>'.format(pk)) + print(f'Could not retrieve `output_parameters` node for Calculation<{pk}>') print_report(pk) valid = False @@ -101,7 +97,7 @@ def validate_calculations(expected_results): pass if actual_dict != expected_dict: - print('* UNEXPECTED VALUE {} for calc pk={}: I expected {}'.format(actual_dict, pk, expected_dict)) + print(f'* UNEXPECTED VALUE {actual_dict} for calc pk={pk}: I expected {expected_dict}') valid = False return valid @@ -166,7 +162,7 @@ def validate_cached(cached_calcs): valid = False if '_aiida_cached_from' not in calc.extras or calc.get_hash() != calc.get_extra('_aiida_hash'): - print('Cached calculation<{}> has invalid hash'.format(calc.pk)) + print(f'Cached calculation<{calc.pk}> has invalid hash') print_report(calc.pk) valid = False @@ -176,7 +172,7 @@ def validate_cached(cached_calcs): files_cached = calc.list_object_names() if not files_cached: - print('Cached calculation <{}> does not have any raw inputs files'.format(calc.pk)) + print(f'Cached calculation <{calc.pk}> does not have any raw inputs files') print_report(calc.pk) valid = False if not files_original: @@ -204,7 +200,7 @@ def launch_calculation(code, counter, inputval): """ process, inputs, expected_result = create_calculation_process(code=code, inputval=inputval) calc = submit(process, **inputs) - print('[{}] launched calculation {}, pk={}'.format(counter, calc.uuid, calc.pk)) + print(f'[{counter}] launched calculation {calc.uuid}, pk={calc.pk}') return calc, expected_result @@ -214,7 +210,7 @@ def run_calculation(code, counter, inputval): """ process, inputs, expected_result = create_calculation_process(code=code, inputval=inputval) _, calc = run.get_node(process, **inputs) - print('[{}] ran calculation {}, pk={}'.format(counter, calc.uuid, calc.pk)) + print(f'[{counter}] ran calculation {calc.uuid}, pk={calc.pk}') return calc, expected_result @@ -361,14 +357,14 @@ def main(): run_multiply_add_workchain() # Submitting the Calculations the new way directly through the launchers - print('Submitting {} calculations to the daemon'.format(NUMBER_CALCULATIONS)) + print(f'Submitting {NUMBER_CALCULATIONS} calculations to the daemon') for counter in range(1, NUMBER_CALCULATIONS + 1): inputval = counter calc, expected_result = launch_calculation(code=code_doubler, counter=counter, inputval=inputval) expected_results_calculations[calc.pk] = expected_result # Submitting the Workchains - print('Submitting {} workchains to the daemon'.format(NUMBER_WORKCHAINS)) + print(f'Submitting {NUMBER_WORKCHAINS} workchains to the daemon') for index in range(NUMBER_WORKCHAINS): inp = Int(index) _, node = run.get_node(NestedWorkChain, inp=inp) @@ -435,7 +431,7 @@ def main(): # that the test machine is shut down because there is no output print('#' * 78) - print('####### TIME ELAPSED: {} s'.format(time.time() - start_time)) + print(f'####### TIME ELAPSED: {time.time() - start_time} s') print('#' * 78) print("Output of 'verdi process list -a':") try: @@ -444,7 +440,7 @@ def main(): stderr=subprocess.STDOUT, )) except subprocess.CalledProcessError as exception: - print('Note: the command failed, message: {}'.format(exception)) + print(f'Note: the command failed, message: {exception}') print("Output of 'verdi daemon status':") try: @@ -453,7 +449,7 @@ def main(): stderr=subprocess.STDOUT, )) except subprocess.CalledProcessError as exception: - print('Note: the command failed, message: {}'.format(exception)) + print(f'Note: the command failed, message: {exception}') if jobs_have_finished(pks): print('Calculation terminated its execution') @@ -463,7 +459,7 @@ def main(): if exited_with_timeout: print_daemon_log() print('') - print('Timeout!! Calculation did not complete after {} seconds'.format(TIMEOUTSECS)) + print(f'Timeout!! Calculation did not complete after {TIMEOUTSECS} seconds') sys.exit(2) else: # Launch the same calculations but with caching enabled -- these should be FINISHED immediately diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 7ceff0877c..85c21172de 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -8,6 +8,15 @@ repos: - id: mixed-line-ending - id: trailing-whitespace +- repo: https://github.com/ikamensh/flynt/ + rev: '0.55' + hooks: + - id: flynt + args: [ + '--line-length=120', + '--fail-on-change', + ] + - repo: https://github.com/pre-commit/mirrors-yapf rev: v0.30.0 hooks: diff --git a/aiida/__init__.py b/aiida/__init__.py index 80ca4b8be8..3d1e6956ea 100644 --- a/aiida/__init__.py +++ b/aiida/__init__.py @@ -122,10 +122,10 @@ def _get_raw_file_header(): :return: default AiiDA source file header :rtype: str """ - return """This file has been created with AiiDA v. {} + return f"""This file has been created with AiiDA v. {__version__} If you use AiiDA for publication purposes, please cite: -{} -""".format(__version__, __paper__) +{__paper__} +""" def get_file_header(comment_char='# '): @@ -143,4 +143,4 @@ def get_file_header(comment_char='# '): :rtype: str """ lines = _get_raw_file_header().splitlines() - return '\n'.join('{}{}'.format(comment_char, line) for line in lines) + return '\n'.join(f'{comment_char}{line}' for line in lines) diff --git a/aiida/backends/__init__.py b/aiida/backends/__init__.py index 4a60f9ce8a..81095dac98 100644 --- a/aiida/backends/__init__.py +++ b/aiida/backends/__init__.py @@ -27,4 +27,4 @@ def get_backend_manager(backend): from aiida.backends.sqlalchemy.manager import SqlaBackendManager return SqlaBackendManager() - raise Exception('unknown backend type `{}`'.format(backend)) + raise Exception(f'unknown backend type `{backend}`') diff --git a/aiida/backends/djsite/db/migrations/0015_invalidating_node_hash.py b/aiida/backends/djsite/db/migrations/0015_invalidating_node_hash.py index 03f579d557..aa06e10476 100644 --- a/aiida/backends/djsite/db/migrations/0015_invalidating_node_hash.py +++ b/aiida/backends/djsite/db/migrations/0015_invalidating_node_hash.py @@ -31,8 +31,8 @@ class Migration(migrations.Migration): operations = [ migrations.RunSQL( - """ DELETE FROM db_dbextra WHERE key='""" + _HASH_EXTRA_KEY + """';""", - reverse_sql=""" DELETE FROM db_dbextra WHERE key='""" + _HASH_EXTRA_KEY + """';""" + f" DELETE FROM db_dbextra WHERE key='{_HASH_EXTRA_KEY}';", + reverse_sql=f" DELETE FROM db_dbextra WHERE key='{_HASH_EXTRA_KEY}';" ), upgrade_schema_version(REVISION, DOWN_REVISION) ] diff --git a/aiida/backends/djsite/db/migrations/0024_dblog_update.py b/aiida/backends/djsite/db/migrations/0024_dblog_update.py index 0f1580cd6f..daf92ec6b2 100644 --- a/aiida/backends/djsite/db/migrations/0024_dblog_update.py +++ b/aiida/backends/djsite/db/migrations/0024_dblog_update.py @@ -181,7 +181,7 @@ def export_and_clean_workflow_logs(apps, schema_editor): # If delete_on_close is False, we are running for the user and add additional message of file location if not delete_on_close: - click.echo('Exported legacy workflow logs to {}'.format(filename)) + click.echo(f'Exported legacy workflow logs to {filename}') # Now delete the records DbLog.objects.filter(objname__startswith=leg_workflow_prefix).delete() @@ -205,7 +205,7 @@ def export_and_clean_workflow_logs(apps, schema_editor): # If delete_on_close is False, we are running for the user and add additional message of file location if not delete_on_close: - click.echo('Exported unexpected entity logs to {}'.format(filename)) + click.echo(f'Exported unexpected entity logs to {filename}') # Now delete the records DbLog.objects.exclude(objname__startswith=node_prefix).exclude(objname__startswith=leg_workflow_prefix).delete() diff --git a/aiida/backends/djsite/db/migrations/0032_remove_legacy_workflows.py b/aiida/backends/djsite/db/migrations/0032_remove_legacy_workflows.py index cf16c65fe9..71ceb5b2d6 100644 --- a/aiida/backends/djsite/db/migrations/0032_remove_legacy_workflows.py +++ b/aiida/backends/djsite/db/migrations/0032_remove_legacy_workflows.py @@ -68,7 +68,7 @@ def export_workflow_data(apps, _): # If delete_on_close is False, we are running for the user and add additional message of file location if not delete_on_close: - echo.echo_info('Exported workflow data to {}'.format(filename)) + echo.echo_info(f'Exported workflow data to {filename}') class Migration(migrations.Migration): diff --git a/aiida/backends/djsite/db/migrations/0037_attributes_extras_settings_json.py b/aiida/backends/djsite/db/migrations/0037_attributes_extras_settings_json.py index 991d8bc8aa..aa93a255c8 100644 --- a/aiida/backends/djsite/db/migrations/0037_attributes_extras_settings_json.py +++ b/aiida/backends/djsite/db/migrations/0037_attributes_extras_settings_json.py @@ -127,7 +127,7 @@ def attributes_to_dict(attr_list): try: tmp_d = select_from_key(a.key, d) except ValueError: - echo.echo_error("Couldn't transfer attribute {} with key {} for dbnode {}".format(a.id, a.key, a.dbnode_id)) + echo.echo_error(f"Couldn't transfer attribute {a.id} with key {a.key} for dbnode {a.dbnode_id}") error = True continue key = a.key.split('.')[-1] diff --git a/aiida/backends/djsite/db/migrations/0039_reset_hash.py b/aiida/backends/djsite/db/migrations/0039_reset_hash.py index 374351c5fc..caad4d48d4 100644 --- a/aiida/backends/djsite/db/migrations/0039_reset_hash.py +++ b/aiida/backends/djsite/db/migrations/0039_reset_hash.py @@ -41,8 +41,8 @@ class Migration(migrations.Migration): operations = [ migrations.RunPython(notify_user, reverse_code=notify_user), migrations.RunSQL( - """UPDATE db_dbnode SET extras = extras #- '{""" + _HASH_EXTRA_KEY + """}'::text[];""", - reverse_sql="""UPDATE db_dbnode SET extras = extras #- '{""" + _HASH_EXTRA_KEY + """}'::text[];""" + f"UPDATE db_dbnode SET extras = extras #- '{{{_HASH_EXTRA_KEY}}}'::text[];", + reverse_sql=f"UPDATE db_dbnode SET extras = extras #- '{{{_HASH_EXTRA_KEY}}}'::text[];" ), upgrade_schema_version(REVISION, DOWN_REVISION) ] diff --git a/aiida/backends/djsite/db/migrations/__init__.py b/aiida/backends/djsite/db/migrations/__init__.py index b361f04156..da2065cbaf 100644 --- a/aiida/backends/djsite/db/migrations/__init__.py +++ b/aiida/backends/djsite/db/migrations/__init__.py @@ -75,9 +75,7 @@ def current_schema_version(): """Migrate the current schema version.""" # Have to use this ugly way of importing because the django migration # files start with numbers which are not a valid package name - latest_migration = __import__( - 'aiida.backends.djsite.db.migrations.{}'.format(LATEST_MIGRATION), fromlist=['REVISION'] - ) + latest_migration = __import__(f'aiida.backends.djsite.db.migrations.{LATEST_MIGRATION}', fromlist=['REVISION']) return latest_migration.REVISION @@ -103,9 +101,7 @@ def _deserialize_basic_type(mainitem): if mainitem['datatype'] == 'txt': return mainitem['tval'] raise TypeError( - "Expected one of the following types: 'none', 'bool', 'int', 'float', 'txt', got {}".format( - mainitem['datatype'] - ) + f"Expected one of the following types: 'none', 'bool', 'int', 'float', 'txt', got {mainitem['datatype']}" ) @@ -121,14 +117,14 @@ def deserialize_list(mainitem, subitems, sep, original_class, original_pk, lesse firstlevelsubdict = {k: v for k, v in subitems.items() if sep not in k} # For checking, I verify the expected values - expected_set = {'{:d}'.format(i) for i in range(mainitem['ival'])} + expected_set = {f'{i:d}' for i in range(mainitem['ival'])} received_set = set(firstlevelsubdict.keys()) # If there are more entries than expected, but all expected # ones are there, I just issue an error but I do not stop. if not expected_set.issubset(received_set): if (original_class is not None and original_class._subspecifier_field_name is not None): - subspecifier_string = '{}={} and '.format(original_class._subspecifier_field_name, original_pk) + subspecifier_string = f'{original_class._subspecifier_field_name}={original_pk} and ' else: subspecifier_string = '' if original_class is None: @@ -142,7 +138,7 @@ def deserialize_list(mainitem, subitems, sep, original_class, original_pk, lesse ) if expected_set != received_set: if (original_class is not None and original_class._subspecifier_field_name is not None): - subspecifier_string = '{}={} and '.format(original_class._subspecifier_field_name, original_pk) + subspecifier_string = f'{original_class._subspecifier_field_name}={original_pk} and ' else: subspecifier_string = '' @@ -167,7 +163,7 @@ def deserialize_list(mainitem, subitems, sep, original_class, original_pk, lesse ) # And then I put them in a list - retlist = [tempdict['{:d}'.format(i)] for i in range(mainitem['ival'])] + retlist = [tempdict[f'{i:d}'] for i in range(mainitem['ival'])] return retlist @@ -183,7 +179,7 @@ def deserialize_dict(mainitem, subitems, sep, original_class, original_pk, lesse if len(firstlevelsubdict) != mainitem['ival']: if (original_class is not None and original_class._subspecifier_field_name is not None): - subspecifier_string = '{}={} and '.format(original_class._subspecifier_field_name, original_pk) + subspecifier_string = f'{original_class._subspecifier_field_name}={original_pk} and ' else: subspecifier_string = '' if original_class is None: @@ -275,7 +271,7 @@ def _deserialize_attribute(mainitem, subitems, sep, original_class=None, origina except ValueError: raise DeserializationException('Error in the content of the json field') from ValueError else: - raise DeserializationException("The type field '{}' is not recognized".format(mainitem['datatype'])) + raise DeserializationException(f"The type field '{mainitem['datatype']}' is not recognized") def deserialize_attributes(data, sep, original_class=None, original_pk=None): @@ -326,10 +322,7 @@ def deserialize_attributes(data, sep, original_class=None, original_pk=None): # without mainitmes. lone_subitems = set(found_subitems.keys()) - set(found_mainitems.keys()) if lone_subitems: - raise DeserializationException( - 'Missing base keys for the following ' - 'items: {}'.format(','.join(lone_subitems)) - ) + raise DeserializationException(f"Missing base keys for the following items: {','.join(lone_subitems)}") # For each zero-level entity, I call the _deserialize_attribute function retval = {} @@ -426,7 +419,7 @@ def get_value_for_node(self, dbnode, key): try: attr = cls.objects.get(dbnode=dbnode_node, key=key) except ObjectDoesNotExist: - raise AttributeError('{} with key {} for node {} not found in db'.format(cls.__name__, key, dbnode.pk)) \ + raise AttributeError(f'{cls.__name__} with key {key} for node {dbnode.pk} not found in db') \ from ObjectDoesNotExist return self.getvalue(attr) @@ -435,7 +428,7 @@ def getvalue(self, attr): """This can be called on a given row and will get the corresponding value, casting it correctly. """ try: if attr.datatype == 'list' or attr.datatype == 'dict': - prefix = '{}{}'.format(attr.key, self._sep) + prefix = f'{attr.key}{self._sep}' prefix_len = len(prefix) dballsubvalues = self._model_class.objects.filter( key__startswith=prefix, **self.subspecifiers_dict(attr) @@ -443,7 +436,7 @@ def getvalue(self, attr): # Strip the FULL prefix and replace it with the simple # "attr" prefix data = { - 'attr.{}'.format(_[0][prefix_len:]): { + f'attr.{_[0][prefix_len:]}': { 'datatype': _[1], 'tval': _[2], 'fval': _[3], @@ -570,9 +563,7 @@ def del_value(self, key, only_children=False, subspecifier_value=None): if self._subspecifier_field_name is None: if subspecifier_value is not None: raise ValueError( - 'You cannot specify a subspecifier value for ' - 'class {} because it has no subspecifiers' - ''.format(cls.__name__) + f'You cannot specify a subspecifier value for class {cls.__name__} because it has no subspecifiers' ) subspecifiers_dict = {} else: @@ -583,7 +574,7 @@ def del_value(self, key, only_children=False, subspecifier_value=None): ) subspecifiers_dict = {self._subspecifier_field_name: subspecifier_value} - query = Q(key__startswith='{parentkey}{sep}'.format(parentkey=key, sep=self._sep), **subspecifiers_dict) + query = Q(key__startswith=f'{key}{self._sep}', **subspecifiers_dict) if not only_children: query.add(Q(key=key, **subspecifiers_dict), Q.OR) @@ -733,9 +724,7 @@ def create_value(self, key, value, subspecifier_value=None, other_attribs=None): if self._subspecifier_field_name is None: if subspecifier_value is not None: raise ValueError( - 'You cannot specify a subspecifier value for ' - 'class {} because it has no subspecifiers' - ''.format(cls.__name__) + f'You cannot specify a subspecifier value for class {cls.__name__} because it has no subspecifiers' ) new_entry = cls(key=key, **other_attribs) else: @@ -778,9 +767,7 @@ def create_value(self, key, value, subspecifier_value=None, other_attribs=None): # expect no concurrency) # NOTE: I do not pass other_attribs list_to_return.extend( - self.create_value( - key=('{}{}{:d}'.format(key, self._sep, i)), value=subv, subspecifier_value=subspecifier_value - ) + self.create_value(key=f'{key}{self._sep}{i:d}', value=subv, subspecifier_value=subspecifier_value) ) elif isinstance(value, dict): @@ -796,17 +783,14 @@ def create_value(self, key, value, subspecifier_value=None, other_attribs=None): # expect no concurrency) # NOTE: I do not pass other_attribs list_to_return.extend( - self.create_value( - key='{}{}{}'.format(key, self._sep, subk), value=subv, subspecifier_value=subspecifier_value - ) + self.create_value(key=f'{key}{self._sep}{subk}', value=subv, subspecifier_value=subspecifier_value) ) else: try: jsondata = json.dumps(value) except TypeError: raise ValueError( - 'Unable to store the value: it must be either a basic datatype, or json-serializable: {}'. - format(value) + f'Unable to store the value: it must be either a basic datatype, or json-serializable: {value}' ) from TypeError new_entry.datatype = 'json' diff --git a/aiida/backends/djsite/db/models.py b/aiida/backends/djsite/db/models.py index 6a239e2a2c..3ccfc33c2a 100644 --- a/aiida/backends/djsite/db/models.py +++ b/aiida/backends/djsite/db/models.py @@ -154,8 +154,8 @@ def __str__(self): simplename = self.get_simple_name(invalid_result='Unknown') # node pk + type if self.label: - return '{} node [{}]: {}'.format(simplename, self.pk, self.label) - return '{} node [{}]'.format(simplename, self.pk) + return f'{simplename} node [{self.pk}]: {self.label}' + return f'{simplename} node [{self.pk}]' class DbLink(m.Model): @@ -190,7 +190,7 @@ class DbSetting(m.Model): time = m.DateTimeField(auto_now=True, editable=False) def __str__(self): - return "'{}'={}".format(self.key, self.getvalue()) + return f"'{self.key}'={self.getvalue()}" @classmethod def set_value(cls, key, value, other_attribs=None, stop_if_existing=False): @@ -261,7 +261,7 @@ class Meta: unique_together = (('label', 'type_string'),) def __str__(self): - return ''.format(self.type_string, self.label) + return f'' class DbComputer(m.Model): @@ -305,7 +305,7 @@ class DbComputer(m.Model): metadata = JSONField(default=dict) def __str__(self): - return '{} ({})'.format(self.name, self.hostname) + return f'{self.name} ({self.hostname})' class DbAuthInfo(m.Model): @@ -330,8 +330,8 @@ class Meta: def __str__(self): if self.enabled: - return 'DB authorization info for {} on {}'.format(self.aiidauser.email, self.dbcomputer.name) - return 'DB authorization info for {} on {} [DISABLED]'.format(self.aiidauser.email, self.dbcomputer.name) + return f'DB authorization info for {self.aiidauser.email} on {self.dbcomputer.name}' + return f'DB authorization info for {self.aiidauser.email} on {self.dbcomputer.name} [DISABLED]' class DbComment(m.Model): @@ -363,7 +363,7 @@ class DbLog(m.Model): metadata = JSONField(default=dict) def __str__(self): - return 'DbLog: {} for node {}: {}'.format(self.levelname, self.dbnode.id, self.message) + return f'DbLog: {self.levelname} for node {self.dbnode.id}: {self.message}' @contextlib.contextmanager diff --git a/aiida/backends/djsite/manager.py b/aiida/backends/djsite/manager.py index c80ea0389d..81c12c2dfe 100644 --- a/aiida/backends/djsite/manager.py +++ b/aiida/backends/djsite/manager.py @@ -169,7 +169,7 @@ def get(self, key): setting = DbSetting.objects.filter(key=key).first() if setting is None: - raise NotExistent('setting `{}` does not exist'.format(key)) + raise NotExistent(f'setting `{key}` does not exist') return Setting(setting.key, setting.val, setting.description, setting.time) @@ -205,4 +205,4 @@ def delete(self, key): try: DbSetting.del_value(key=key) except KeyError: - raise NotExistent('setting `{}` does not exist'.format(key)) from KeyError + raise NotExistent(f'setting `{key}` does not exist') from KeyError diff --git a/aiida/backends/djsite/settings.py b/aiida/backends/djsite/settings.py index 10cd8db73a..f24053acdd 100644 --- a/aiida/backends/djsite/settings.py +++ b/aiida/backends/djsite/settings.py @@ -19,21 +19,21 @@ try: PROFILE = get_profile() except exceptions.MissingConfigurationError as exception: - raise exceptions.MissingConfigurationError('the configuration could not be loaded: {}'.format(exception)) + raise exceptions.MissingConfigurationError(f'the configuration could not be loaded: {exception}') if PROFILE is None: raise exceptions.ProfileConfigurationError('no profile has been loaded') if PROFILE.database_backend != 'django': raise exceptions.ProfileConfigurationError( - 'incommensurate database backend `{}` for profile `{}`'.format(PROFILE.database_backend, PROFILE.name) + f'incommensurate database backend `{PROFILE.database_backend}` for profile `{PROFILE.name}`' ) PROFILE_CONF = PROFILE.dictionary DATABASES = { 'default': { - 'ENGINE': 'django.db.backends.' + PROFILE.database_engine, + 'ENGINE': f'django.db.backends.{PROFILE.database_engine}', 'NAME': PROFILE.database_name, 'PORT': PROFILE.database_port, 'HOST': PROFILE.database_hostname, diff --git a/aiida/backends/general/abstractqueries.py b/aiida/backends/general/abstractqueries.py index 6e3b56812e..cdf4f9baec 100644 --- a/aiida/backends/general/abstractqueries.py +++ b/aiida/backends/general/abstractqueries.py @@ -31,15 +31,15 @@ def get_duplicate_uuids(self, table): :return: list of tuples of (id, uuid) of rows with duplicate UUIDs :rtype list: """ - query = """ - SELECT s.id, s.uuid FROM (SELECT *, COUNT(*) OVER(PARTITION BY uuid) AS c FROM {}) + query = f""" + SELECT s.id, s.uuid FROM (SELECT *, COUNT(*) OVER(PARTITION BY uuid) AS c FROM {table}) AS s WHERE c > 1 - """.format(table) + """ return self._backend.execute_raw(query) def apply_new_uuid_mapping(self, table, mapping): for pk, uuid in mapping.items(): - query = """UPDATE {table:} SET uuid = '{uuid:}' WHERE id = {pk:}""".format(table=table, uuid=uuid, pk=pk) + query = f"""UPDATE {table} SET uuid = '{uuid}' WHERE id = {pk}""" with self._backend.cursor() as cursor: cursor.execute(query) diff --git a/aiida/backends/general/migrations/utils.py b/aiida/backends/general/migrations/utils.py index 40f2de4fde..fd1e8c69dc 100644 --- a/aiida/backends/general/migrations/utils.py +++ b/aiida/backends/general/migrations/utils.py @@ -85,7 +85,7 @@ def get_numpy_array_absolute_path(uuid, name): :param name: the name of the numpy array :return: the absolute path of the numpy array file """ - return os.path.join(get_node_repository_sub_folder(uuid), name + '.npy') + return os.path.join(get_node_repository_sub_folder(uuid), f'{name}.npy') def store_numpy_array_in_repository(uuid, name, array): diff --git a/aiida/backends/sqlalchemy/manager.py b/aiida/backends/sqlalchemy/manager.py index ab69457ee1..3d4b92fdaf 100644 --- a/aiida/backends/sqlalchemy/manager.py +++ b/aiida/backends/sqlalchemy/manager.py @@ -179,7 +179,7 @@ def get(self, key): try: setting = get_scoped_session().query(DbSetting).filter_by(key=key).one() except NoResultFound: - raise NotExistent('setting `{}` does not exist'.format(key)) from NoResultFound + raise NotExistent(f'setting `{key}` does not exist') from NoResultFound return Setting(key, setting.getvalue(), setting.description, setting.time) @@ -215,4 +215,4 @@ def delete(self, key): setting = get_scoped_session().query(DbSetting).filter_by(key=key).one() setting.delete() except NoResultFound: - raise NotExistent('setting `{}` does not exist'.format(key)) from NoResultFound + raise NotExistent(f'setting `{key}` does not exist') from NoResultFound diff --git a/aiida/backends/sqlalchemy/migrations/versions/041a79fc615f_dblog_cleaning.py b/aiida/backends/sqlalchemy/migrations/versions/041a79fc615f_dblog_cleaning.py index b5b46f1ba3..33e45372b3 100644 --- a/aiida/backends/sqlalchemy/migrations/versions/041a79fc615f_dblog_cleaning.py +++ b/aiida/backends/sqlalchemy/migrations/versions/041a79fc615f_dblog_cleaning.py @@ -178,7 +178,7 @@ def export_and_clean_workflow_logs(connection): # If delete_on_close is False, we are running for the user and add additional message of file location if not delete_on_close: - click.echo('Exported legacy workflow logs to {}'.format(filename)) + click.echo(f'Exported legacy workflow logs to {filename}') # Now delete the records connection.execute( @@ -203,7 +203,7 @@ def export_and_clean_workflow_logs(connection): # If delete_on_close is False, we are running for the user and add additional message of file location if not delete_on_close: - click.echo('Exported unexpected entity logs to {}'.format(filename)) + click.echo(f'Exported unexpected entity logs to {filename}') # Now delete the records connection.execute( diff --git a/aiida/backends/sqlalchemy/migrations/versions/12536798d4d3_trajectory_symbols_to_attribute.py b/aiida/backends/sqlalchemy/migrations/versions/12536798d4d3_trajectory_symbols_to_attribute.py index 70c331faa1..d53ec44ce3 100644 --- a/aiida/backends/sqlalchemy/migrations/versions/12536798d4d3_trajectory_symbols_to_attribute.py +++ b/aiida/backends/sqlalchemy/migrations/versions/12536798d4d3_trajectory_symbols_to_attribute.py @@ -69,4 +69,4 @@ def downgrade(): for pk, _ in nodes: connection.execute( - text("""UPDATE db_dbnode SET attributes = attributes #- '{{symbols}}' WHERE id = {}""".format(pk))) + text(f"""UPDATE db_dbnode SET attributes = attributes #- '{{symbols}}' WHERE id = {pk}""")) diff --git a/aiida/backends/sqlalchemy/migrations/versions/1b8ed3425af9_remove_legacy_workflows.py b/aiida/backends/sqlalchemy/migrations/versions/1b8ed3425af9_remove_legacy_workflows.py index 27d898b654..f5daf0bac6 100644 --- a/aiida/backends/sqlalchemy/migrations/versions/1b8ed3425af9_remove_legacy_workflows.py +++ b/aiida/backends/sqlalchemy/migrations/versions/1b8ed3425af9_remove_legacy_workflows.py @@ -47,7 +47,7 @@ def json_serializer(obj): if isinstance(obj, (datetime, date)): return obj.isoformat() - raise TypeError('Type %s not serializable' % type(obj)) + raise TypeError(f'Type {type(obj)} not serializable') def export_workflow_data(connection): @@ -91,7 +91,7 @@ def export_workflow_data(connection): # If delete_on_close is False, we are running for the user and add additional message of file location if not delete_on_close: - echo.echo_info('Exported workflow data to {}'.format(filename)) + echo.echo_info(f'Exported workflow data to {filename}') def upgrade(): diff --git a/aiida/backends/sqlalchemy/migrations/versions/37f3d4882837_make_all_uuid_columns_unique.py b/aiida/backends/sqlalchemy/migrations/versions/37f3d4882837_make_all_uuid_columns_unique.py index bc43767eb1..8974df9d88 100644 --- a/aiida/backends/sqlalchemy/migrations/versions/37f3d4882837_make_all_uuid_columns_unique.py +++ b/aiida/backends/sqlalchemy/migrations/versions/37f3d4882837_make_all_uuid_columns_unique.py @@ -43,14 +43,13 @@ def verify_uuid_uniqueness(table): from aiida.common.exceptions import IntegrityError query = text( - 'SELECT s.id, s.uuid FROM (SELECT *, COUNT(*) OVER(PARTITION BY uuid) AS c FROM {}) AS s WHERE c > 1'. - format(table) + f'SELECT s.id, s.uuid FROM (SELECT *, COUNT(*) OVER(PARTITION BY uuid) AS c FROM {table}) AS s WHERE c > 1' ) conn = op.get_bind() duplicates = conn.execute(query).fetchall() if duplicates: - command = '`verdi database integrity detect-duplicate-uuid {table}`'.format(table=table) + command = f'`verdi database integrity detect-duplicate-uuid {table}`' raise IntegrityError( 'Your table "{}"" contains entries with duplicate UUIDS.\nRun {} ' 'to return to a consistent state'.format(table, command) @@ -61,10 +60,10 @@ def upgrade(): for table in tables: verify_uuid_uniqueness(table) - op.create_unique_constraint(table + '_uuid_key', table, ['uuid']) + op.create_unique_constraint(f'{table}_uuid_key', table, ['uuid']) def downgrade(): for table in tables: - op.drop_constraint(table + '_uuid_key', table) + op.drop_constraint(f'{table}_uuid_key', table) diff --git a/aiida/backends/sqlalchemy/migrations/versions/5d4d844852b6_invalidating_node_hash.py b/aiida/backends/sqlalchemy/migrations/versions/5d4d844852b6_invalidating_node_hash.py index 225b972616..b74006ca7a 100644 --- a/aiida/backends/sqlalchemy/migrations/versions/5d4d844852b6_invalidating_node_hash.py +++ b/aiida/backends/sqlalchemy/migrations/versions/5d4d844852b6_invalidating_node_hash.py @@ -36,7 +36,7 @@ def upgrade(): conn = op.get_bind() # pylint: disable=no-member # Invalidate all the hashes - statement = text("""UPDATE db_dbnode SET extras = extras #- '{""" + _HASH_EXTRA_KEY + """}'::text[];""") + statement = text(f"UPDATE db_dbnode SET extras = extras #- '{{{_HASH_EXTRA_KEY}}}'::text[];") conn.execute(statement) @@ -45,5 +45,5 @@ def downgrade(): conn = op.get_bind() # pylint: disable=no-member # Invalidate all the hashes - statement = text("""UPDATE db_dbnode SET extras = extras #- '{""" + _HASH_EXTRA_KEY + """}'::text[];""") + statement = text(f"UPDATE db_dbnode SET extras = extras #- '{{{_HASH_EXTRA_KEY}}}'::text[];") conn.execute(statement) diff --git a/aiida/backends/sqlalchemy/migrations/versions/62fe0d36de90_add_node_uuid_unique_constraint.py b/aiida/backends/sqlalchemy/migrations/versions/62fe0d36de90_add_node_uuid_unique_constraint.py index 81336fb16f..cbc893457a 100644 --- a/aiida/backends/sqlalchemy/migrations/versions/62fe0d36de90_add_node_uuid_unique_constraint.py +++ b/aiida/backends/sqlalchemy/migrations/versions/62fe0d36de90_add_node_uuid_unique_constraint.py @@ -43,7 +43,7 @@ def verify_node_uuid_uniqueness(): if duplicates: table = 'db_dbnode' - command = '`verdi database integrity detect-duplicate-uuid {table}`'.format(table=table) + command = f'`verdi database integrity detect-duplicate-uuid {table}`' raise IntegrityError( 'Your table "{}" contains entries with duplicate UUIDS.\nRun {} ' 'to return to a consistent state'.format(table, command) diff --git a/aiida/backends/sqlalchemy/migrations/versions/ce56d84bcc35_delete_trajectory_symbols_array.py b/aiida/backends/sqlalchemy/migrations/versions/ce56d84bcc35_delete_trajectory_symbols_array.py index 682e4371ad..765a4eaa6a 100644 --- a/aiida/backends/sqlalchemy/migrations/versions/ce56d84bcc35_delete_trajectory_symbols_array.py +++ b/aiida/backends/sqlalchemy/migrations/versions/ce56d84bcc35_delete_trajectory_symbols_array.py @@ -48,7 +48,7 @@ def upgrade(): for pk, uuid in nodes: connection.execute( - text("""UPDATE db_dbnode SET attributes = attributes #- '{{array|symbols}}' WHERE id = {}""".format(pk))) + text(f"""UPDATE db_dbnode SET attributes = attributes #- '{{array|symbols}}' WHERE id = {pk}""")) utils.delete_numpy_array_from_repository(uuid, 'symbols') diff --git a/aiida/backends/sqlalchemy/migrations/versions/e797afa09270_reset_hash.py b/aiida/backends/sqlalchemy/migrations/versions/e797afa09270_reset_hash.py index a6d05ea985..c327275e31 100644 --- a/aiida/backends/sqlalchemy/migrations/versions/e797afa09270_reset_hash.py +++ b/aiida/backends/sqlalchemy/migrations/versions/e797afa09270_reset_hash.py @@ -41,7 +41,7 @@ def drop_hashes(conn): # pylint: disable=unused-argument if n_nodes > 0: echo.echo_warning('Invalidating the hashes of all nodes. Please run "verdi rehash".', bold=True) - statement = text("""UPDATE db_dbnode SET extras = extras #- '{""" + _HASH_EXTRA_KEY + """}'::text[];""") + statement = text(f"UPDATE db_dbnode SET extras = extras #- '{{{_HASH_EXTRA_KEY}}}'::text[];") conn.execute(statement) diff --git a/aiida/backends/sqlalchemy/migrations/versions/ea2f50e7f615_dblog_create_uuid_column.py b/aiida/backends/sqlalchemy/migrations/versions/ea2f50e7f615_dblog_create_uuid_column.py index 3d34308429..f2d9af01c1 100644 --- a/aiida/backends/sqlalchemy/migrations/versions/ea2f50e7f615_dblog_create_uuid_column.py +++ b/aiida/backends/sqlalchemy/migrations/versions/ea2f50e7f615_dblog_create_uuid_column.py @@ -48,10 +48,10 @@ def set_new_uuid(connection): # Create the key/value pairs key_values = ','.join("({}, '{}')".format(curr_id, curr_uuid) for curr_id, curr_uuid in zip(ids, uuids)) - update_stm = """ + update_stm = f""" UPDATE db_dblog as t SET uuid = uuid(c.uuid) - from (values {}) as c(id, uuid) where c.id = t.id""".format(key_values) + from (values {key_values}) as c(id, uuid) where c.id = t.id""" connection.execute(update_stm) diff --git a/aiida/backends/sqlalchemy/models/authinfo.py b/aiida/backends/sqlalchemy/models/authinfo.py index 30518d40ef..c92872ab9d 100644 --- a/aiida/backends/sqlalchemy/models/authinfo.py +++ b/aiida/backends/sqlalchemy/models/authinfo.py @@ -49,5 +49,5 @@ def __init__(self, *args, **kwargs): def __str__(self): if self.enabled: - return 'DB authorization info for {} on {}'.format(self.aiidauser.email, self.dbcomputer.name) - return 'DB authorization info for {} on {} [DISABLED]'.format(self.aiidauser.email, self.dbcomputer.name) + return f'DB authorization info for {self.aiidauser.email} on {self.dbcomputer.name}' + return f'DB authorization info for {self.aiidauser.email} on {self.dbcomputer.name} [DISABLED]' diff --git a/aiida/backends/sqlalchemy/models/computer.py b/aiida/backends/sqlalchemy/models/computer.py index e53f052ebf..200638cced 100644 --- a/aiida/backends/sqlalchemy/models/computer.py +++ b/aiida/backends/sqlalchemy/models/computer.py @@ -46,4 +46,4 @@ def pk(self): return self.id def __str__(self): - return '{} ({})'.format(self.name, self.hostname) + return f'{self.name} ({self.hostname})' diff --git a/aiida/backends/sqlalchemy/models/group.py b/aiida/backends/sqlalchemy/models/group.py index 22d422968f..d60518fcb4 100644 --- a/aiida/backends/sqlalchemy/models/group.py +++ b/aiida/backends/sqlalchemy/models/group.py @@ -64,4 +64,4 @@ def pk(self): return self.id def __str__(self): - return ''.format(self.type_string, self.label) + return f'' diff --git a/aiida/backends/sqlalchemy/models/log.py b/aiida/backends/sqlalchemy/models/log.py index c4a653593b..2bd5afff86 100644 --- a/aiida/backends/sqlalchemy/models/log.py +++ b/aiida/backends/sqlalchemy/models/log.py @@ -55,4 +55,4 @@ def __init__(self, time, loggername, levelname, dbnode_id, **kwargs): self.dbnode_id = dbnode_id def __str__(self): - return 'DbLog: {} for node {}: {}'.format(self.levelname, self.dbnode.id, self.message) + return f'DbLog: {self.levelname} for node {self.dbnode.id}: {self.message}' diff --git a/aiida/backends/sqlalchemy/models/node.py b/aiida/backends/sqlalchemy/models/node.py index 48d0cae220..efe18bc979 100644 --- a/aiida/backends/sqlalchemy/models/node.py +++ b/aiida/backends/sqlalchemy/models/node.py @@ -140,8 +140,8 @@ def __str__(self): simplename = self.get_simple_name(invalid_result='Unknown') # node pk + type if self.label: - return '{} node [{}]: {}'.format(simplename, self.pk, self.label) - return '{} node [{}]'.format(simplename, self.pk) + return f'{simplename} node [{self.pk}]: {self.label}' + return f'{simplename} node [{self.pk}]' class DbLink(Base): diff --git a/aiida/backends/sqlalchemy/models/settings.py b/aiida/backends/sqlalchemy/models/settings.py index eac5433a28..7ce85c034c 100644 --- a/aiida/backends/sqlalchemy/models/settings.py +++ b/aiida/backends/sqlalchemy/models/settings.py @@ -36,7 +36,7 @@ class DbSetting(Base): time = Column(DateTime(timezone=True), default=UTC, onupdate=timezone.now) def __str__(self): - return "'{}'={}".format(self.key, self.getvalue()) + return f"'{self.key}'={self.getvalue()}" @classmethod def set_value(cls, key, value, other_attribs=None, stop_if_existing=False): diff --git a/aiida/backends/testbase.py b/aiida/backends/testbase.py index ed18f27566..68d8aa107c 100644 --- a/aiida/backends/testbase.py +++ b/aiida/backends/testbase.py @@ -26,7 +26,7 @@ def check_if_tests_can_run(): """Verify that the currently loaded profile is a test profile, otherwise raise `TestsNotAllowedError`.""" profile = configuration.PROFILE if not profile.is_test_profile: - raise TestsNotAllowedError('currently loaded profile {} is not a valid test profile'.format(profile.name)) + raise TestsNotAllowedError(f'currently loaded profile {profile.name} is not a valid test profile') class AiidaTestCase(unittest.TestCase): diff --git a/aiida/backends/utils.py b/aiida/backends/utils.py index ebea81ab48..f258d9e621 100644 --- a/aiida/backends/utils.py +++ b/aiida/backends/utils.py @@ -55,6 +55,6 @@ def delete_nodes_and_connections(pks): elif configuration.PROFILE.database_backend == BACKEND_SQLA: from aiida.backends.sqlalchemy.utils import delete_nodes_and_connections_sqla as delete_nodes_backend else: - raise Exception('unknown backend {}'.format(configuration.PROFILE.database_backend)) + raise Exception(f'unknown backend {configuration.PROFILE.database_backend}') delete_nodes_backend(pks) diff --git a/aiida/calculations/arithmetic/add.py b/aiida/calculations/arithmetic/add.py index 6fccfe8d49..77a9434708 100644 --- a/aiida/calculations/arithmetic/add.py +++ b/aiida/calculations/arithmetic/add.py @@ -50,7 +50,7 @@ def prepare_for_submission(self, folder: Folder) -> CalcInfo: :returns: the `CalcInfo` instance """ with folder.open(self.options.input_filename, 'w', encoding='utf8') as handle: - handle.write('echo $(({x} + {y}))\n'.format(x=self.inputs.x.value, y=self.inputs.y.value)) + handle.write(f'echo $(({self.inputs.x.value} + {self.inputs.y.value}))\n') codeinfo = CodeInfo() codeinfo.code_uuid = self.inputs.code.uuid diff --git a/aiida/calculations/templatereplacer.py b/aiida/calculations/templatereplacer.py index f84e715b10..4cde1a125e 100644 --- a/aiida/calculations/templatereplacer.py +++ b/aiida/calculations/templatereplacer.py @@ -113,12 +113,12 @@ def prepare_for_submission(self, folder): if template: raise exceptions.InputValidationError( - 'The following keys could not be used in the template node: {}'.format(template.keys())) + f'The following keys could not be used in the template node: {template.keys()}') try: validate_list_of_string_tuples(files_to_copy, tuple_length=2) except ValidationError as exc: - raise exceptions.InputValidationError('invalid file_to_copy format: {}'.format(exc)) + raise exceptions.InputValidationError(f'invalid file_to_copy format: {exc}') local_copy_list = [] remote_copy_list = [] diff --git a/aiida/cmdline/commands/cmd_calcjob.py b/aiida/cmdline/commands/cmd_calcjob.py index 7a59444558..94bb585cce 100644 --- a/aiida/cmdline/commands/cmd_calcjob.py +++ b/aiida/cmdline/commands/cmd_calcjob.py @@ -68,7 +68,7 @@ def calcjob_res(calcjob, fmt, keys): try: result = {k: results[k] for k in keys} except KeyError as exc: - echo.echo_critical("key '{}' was not found in the results dictionary".format(exc.args[0])) + echo.echo_critical(f"key '{exc.args[0]}' was not found in the results dictionary") else: result = results @@ -118,7 +118,7 @@ def calcjob_inputcat(calcjob, path): # It can happen if the output is redirected, for example, to `head`. if exception.errno != errno.EPIPE: # Incorrect path or file not readable - echo.echo_critical('Could not open output path "{}". Exception: {}'.format(path, exception)) + echo.echo_critical(f'Could not open output path "{path}". Exception: {exception}') @verdi_calcjob.command('outputcat') @@ -170,7 +170,7 @@ def calcjob_outputcat(calcjob, path): # It can happen if the output is redirected, for example, to `head`. if exception.errno != errno.EPIPE: # Incorrect path or file not readable - echo.echo_critical('Could not open output path "{}". Exception: {}'.format(path, exception)) + echo.echo_critical(f'Could not open output path "{path}". Exception: {exception}') @verdi_calcjob.command('inputls') @@ -191,7 +191,7 @@ def calcjob_inputls(calcjob, path, color): try: list_repository_contents(calcjob, path, color) except FileNotFoundError: - echo.echo_critical('the path `{}` does not exist for the given node'.format(path)) + echo.echo_critical(f'the path `{path}` does not exist for the given node') @verdi_calcjob.command('outputls') @@ -218,7 +218,7 @@ def calcjob_outputls(calcjob, path, color): try: list_repository_contents(retrieved, path, color) except FileNotFoundError: - echo.echo_critical('the path `{}` does not exist for the given node'.format(path)) + echo.echo_critical(f'the path `{path}` does not exist for the given node') @verdi_calcjob.command('cleanworkdir') @@ -255,7 +255,7 @@ def calcjob_cleanworkdir(calcjobs, past_days, older_than, computers, force): if not force: path_count = sum([len(paths) for computer, paths in path_mapping.items()]) - warning = 'Are you sure you want to clean the work directory of {} calcjobs?'.format(path_count) + warning = f'Are you sure you want to clean the work directory of {path_count} calcjobs?' click.confirm(warning, abort=True) user = orm.User.objects.get_default() @@ -271,4 +271,4 @@ def calcjob_cleanworkdir(calcjobs, past_days, older_than, computers, force): clean_remote(transport, path) counter += 1 - echo.echo_success('{} remote folders cleaned on {}'.format(counter, computer.label)) + echo.echo_success(f'{counter} remote folders cleaned on {computer.label}') diff --git a/aiida/cmdline/commands/cmd_code.py b/aiida/cmdline/commands/cmd_code.py index 80cfb943e4..caf2f7e46c 100644 --- a/aiida/cmdline/commands/cmd_code.py +++ b/aiida/cmdline/commands/cmd_code.py @@ -88,15 +88,15 @@ def setup_code(non_interactive, **kwargs): try: code = code_builder.new() except InputValidationError as exception: - echo.echo_critical('invalid inputs: {}'.format(exception)) + echo.echo_critical(f'invalid inputs: {exception}') try: code.store() code.reveal() except ValidationError as exception: - echo.echo_critical('Unable to store the Code: {}'.format(exception)) + echo.echo_critical(f'Unable to store the Code: {exception}') - echo.echo_success('Code<{}> {} created'.format(code.pk, code.full_label)) + echo.echo_success(f'Code<{code.pk}> {code.full_label} created') @verdi_code.command('duplicate') @@ -138,9 +138,9 @@ def code_duplicate(ctx, code, non_interactive, **kwargs): new_code.store() new_code.reveal() except ValidationError as exception: - echo.echo_critical('Unable to store the Code: {}'.format(exception)) + echo.echo_critical(f'Unable to store the Code: {exception}') - echo.echo_success('Code<{}> {} created'.format(new_code.pk, new_code.full_label)) + echo.echo_success(f'Code<{new_code.pk}> {new_code.full_label} created') @verdi_code.command() @@ -211,7 +211,7 @@ def hide(codes): """Hide one or more codes from `verdi code list`.""" for code in codes: code.hide() - echo.echo_success('Code<{}> {} hidden'.format(code.pk, code.full_label)) + echo.echo_success(f'Code<{code.pk}> {code.full_label} hidden') @verdi_code.command() @@ -221,7 +221,7 @@ def reveal(codes): """Reveal one or more hidden codes in `verdi code list`.""" for code in codes: code.reveal() - echo.echo_success('Code<{}> {} revealed'.format(code.pk, code.full_label)) + echo.echo_success(f'Code<{code.pk}> {code.full_label} revealed') @verdi_code.command() @@ -235,9 +235,9 @@ def relabel(code, label): try: code.relabel(label) except InputValidationError as exception: - echo.echo_critical('invalid code label: {}'.format(exception)) + echo.echo_critical(f'invalid code label: {exception}') else: - echo.echo_success('Code<{}> relabeled from {} to {}'.format(code.pk, old_label, code.full_label)) + echo.echo_success(f'Code<{code.pk}> relabeled from {old_label} to {code.full_label}') @verdi_code.command('list') @@ -272,7 +272,7 @@ def code_list(computer, input_plugin, all_entries, all_users, show_owner): '!has_key': Code.HIDDEN_KEY } }, { - 'extras.{}'.format(Code.HIDDEN_KEY): { + f'extras.{Code.HIDDEN_KEY}': { '==': False } }] @@ -346,11 +346,11 @@ def print_list_res(qb_query, show_owner): return if show_owner: - owner_string = ' ({})'.format(useremail) + owner_string = f' ({useremail})' else: owner_string = '' if computername is None: computernamestring = '' else: - computernamestring = '@{}'.format(computername) - echo.echo('* pk {} - {}{}{}'.format(pk, label, computernamestring, owner_string)) + computernamestring = f'@{computername}' + echo.echo(f'* pk {pk} - {label}{computernamestring}{owner_string}') diff --git a/aiida/cmdline/commands/cmd_computer.py b/aiida/cmdline/commands/cmd_computer.py index b7a2e09c79..3d83d66bd6 100644 --- a/aiida/cmdline/commands/cmd_computer.py +++ b/aiida/cmdline/commands/cmd_computer.py @@ -55,7 +55,7 @@ def _computer_test_get_jobs(transport, scheduler, authinfo): # pylint: disable= :return: tuple of boolean indicating success or failure and an optional string message """ found_jobs = scheduler.get_jobs(as_dict=True) - return True, '{} jobs found in the queue'.format(len(found_jobs)) + return True, f'{len(found_jobs)} jobs found in the queue' def _computer_test_no_unexpected_output(transport, scheduler, authinfo): # pylint: disable=unused-argument @@ -73,7 +73,7 @@ def _computer_test_no_unexpected_output(transport, scheduler, authinfo): # pyli # Execute a command that should not return any error retval, stdout, stderr = transport.exec_command_wait('echo -n') if retval != 0: - return False, 'The command `echo -n` returned a non-zero return code ({})'.format(retval) + return False, f'The command `echo -n` returned a non-zero return code ({retval})' template = """ We detected some spurious output in the {} when connecting to the computer, as shown between the bars @@ -121,7 +121,7 @@ def _computer_create_temp_file(transport, scheduler, authinfo): # pylint: disab import datetime import os - file_content = "Test from 'verdi computer test' on {}".format(datetime.datetime.now().isoformat()) + file_content = f"Test from 'verdi computer test' on {datetime.datetime.now().isoformat()}" workdir = authinfo.get_workdir().format(username=transport.whoami()) try: @@ -138,7 +138,7 @@ def _computer_create_temp_file(transport, scheduler, authinfo): # pylint: disab transport.putfile(tempf.name, remote_file_path) if not transport.path_exists(remote_file_path): - return False, 'failed to create the file `{}` on the remote'.format(remote_file_path) + return False, f'failed to create the file `{remote_file_path}` on the remote' handle, destfile = tempfile.mkstemp() os.close(handle) @@ -150,8 +150,8 @@ def _computer_create_temp_file(transport, scheduler, authinfo): # pylint: disab if read_string != file_content: message = 'retrieved file content is different from what was expected' - message += '\n Expected: {}'.format(file_content) - message += '\n Retrieved: {}'.format(read_string) + message += f'\n Expected: {file_content}' + message += f'\n Retrieved: {read_string}' return False, message finally: @@ -228,17 +228,17 @@ def computer_setup(ctx, non_interactive, **kwargs): try: computer = computer_builder.new() except (ComputerBuilder.ComputerValidationError, ValidationError) as e: - echo.echo_critical('{}: {}'.format(type(e).__name__, e)) + echo.echo_critical(f'{type(e).__name__}: {e}') try: computer.store() except ValidationError as err: - echo.echo_critical('unable to store the computer: {}. Exiting...'.format(err)) + echo.echo_critical(f'unable to store the computer: {err}. Exiting...') else: - echo.echo_success('Computer<{}> {} created'.format(computer.pk, computer.label)) + echo.echo_success(f'Computer<{computer.pk}> {computer.label} created') echo.echo_info('Note: before the computer can be used, it has to be configured with the command:') - echo.echo_info(' verdi computer configure {} {}'.format(computer.transport_type, computer.label)) + echo.echo_info(f' verdi computer configure {computer.transport_type} {computer.label}') @verdi_computer.command('duplicate') @@ -263,7 +263,7 @@ def computer_duplicate(ctx, computer, non_interactive, **kwargs): from aiida.orm.utils.builders.computer import ComputerBuilder if kwargs['label'] in get_computer_names(): - echo.echo_critical('A computer called {} already exists'.format(kwargs['label'])) + echo.echo_critical(f"A computer called {kwargs['label']} already exists") kwargs['transport'] = kwargs['transport'].name kwargs['scheduler'] = kwargs['scheduler'].name @@ -276,22 +276,22 @@ def computer_duplicate(ctx, computer, non_interactive, **kwargs): try: computer = computer_builder.new() except (ComputerBuilder.ComputerValidationError, ValidationError) as e: - echo.echo_critical('{}: {}'.format(type(e).__name__, e)) + echo.echo_critical(f'{type(e).__name__}: {e}') else: - echo.echo_success('stored computer {}<{}>'.format(computer.label, computer.pk)) + echo.echo_success(f'stored computer {computer.label}<{computer.pk}>') try: computer.store() except ValidationError as err: - echo.echo_critical('unable to store the computer: {}. Exiting...'.format(err)) + echo.echo_critical(f'unable to store the computer: {err}. Exiting...') else: - echo.echo_success('Computer<{}> {} created'.format(computer.pk, computer.label)) + echo.echo_success(f'Computer<{computer.pk}> {computer.label} created') is_configured = computer.is_user_configured(orm.User.objects.get_default()) if not is_configured: echo.echo_info('Note: before the computer can be used, it has to be configured with the command:') - echo.echo_info(' verdi computer configure {} {}'.format(computer.transport_type, computer.label)) + echo.echo_info(f' verdi computer configure {computer.transport_type} {computer.label}') @verdi_computer.command('enable') @@ -305,17 +305,13 @@ def computer_enable(computer, user): try: authinfo = computer.get_authinfo(user) except NotExistent: - echo.echo_critical( - "User with email '{}' is not configured for computer '{}' yet.".format(user.email, computer.label) - ) + echo.echo_critical(f"User with email '{user.email}' is not configured for computer '{computer.label}' yet.") if not authinfo.enabled: authinfo.enabled = True - echo.echo_info("Computer '{}' enabled for user {}.".format(computer.label, user.get_full_name())) + echo.echo_info(f"Computer '{computer.label}' enabled for user {user.get_full_name()}.") else: - echo.echo_info( - "Computer '{}' was already enabled for user {} {}.".format(computer.label, user.first_name, user.last_name) - ) + echo.echo_info(f"Computer '{computer.label}' was already enabled for user {user.first_name} {user.last_name}.") @verdi_computer.command('disable') @@ -331,19 +327,13 @@ def computer_disable(computer, user): try: authinfo = computer.get_authinfo(user) except NotExistent: - echo.echo_critical( - "User with email '{}' is not configured for computer '{}' yet.".format(user.email, computer.label) - ) + echo.echo_critical(f"User with email '{user.email}' is not configured for computer '{computer.label}' yet.") if authinfo.enabled: authinfo.enabled = False - echo.echo_info("Computer '{}' disabled for user {}.".format(computer.label, user.get_full_name())) + echo.echo_info(f"Computer '{computer.label}' disabled for user {user.get_full_name()}.") else: - echo.echo_info( - "Computer '{}' was already disabled for user {} {}.".format( - computer.label, user.first_name, user.last_name - ) - ) + echo.echo_info(f"Computer '{computer.label}' was already disabled for user {user.first_name} {user.last_name}.") @verdi_computer.command('list') @@ -419,13 +409,13 @@ def computer_relabel(computer, label): computer.label = label computer.store() except ValidationError as error: - echo.echo_critical('Invalid input! {}'.format(error)) + echo.echo_critical(f'Invalid input! {error}') except UniquenessError as error: echo.echo_critical( - "Uniqueness error encountered! Probably a computer with label '{}' already exists: {}".format(label, error) + f"Uniqueness error encountered! Probably a computer with label '{label}' already exists: {error}" ) - echo.echo_success("Computer '{}' relabeled to '{}'".format(old_label, label)) + echo.echo_success(f"Computer '{old_label}' relabeled to '{label}'") @verdi_computer.command('test') @@ -452,15 +442,15 @@ def computer_test(user, print_traceback, computer): if user is None: user = orm.User.objects.get_default() - echo.echo_info('Testing computer<{}> for user<{}>...'.format(computer.label, user.email)) + echo.echo_info(f'Testing computer<{computer.label}> for user<{user.email}>...') try: authinfo = computer.get_authinfo(user) except NotExistent: - echo.echo_critical('Computer<{}> is not yet configured for user<{}>'.format(computer.label, user.email)) + echo.echo_critical(f'Computer<{computer.label}> is not yet configured for user<{user.email}>') if not authinfo.enabled: - echo.echo_warning('Computer<{}> is disabled for user<{}>'.format(computer.label, user.email)) + echo.echo_warning(f'Computer<{computer.label}> is disabled for user<{user.email}>') click.confirm('Do you really want to test it?', abort=True) scheduler = authinfo.computer.get_scheduler() @@ -489,13 +479,13 @@ def computer_test(user, print_traceback, computer): for test, test_label in tests.items(): - echo.echo('* {}... '.format(test_label), nl=False) + echo.echo(f'* {test_label}... ', nl=False) num_tests += 1 try: success, message = test(transport=transport, scheduler=scheduler, authinfo=authinfo) except Exception as exception: # pylint:disable=broad-except success = False - message = '{}: {}'.format(exception.__class__.__name__, str(exception)) + message = f'{exception.__class__.__name__}: {str(exception)}' if print_traceback: message += '\n Full traceback:\n' @@ -518,9 +508,9 @@ def computer_test(user, print_traceback, computer): echo.echo_highlight('[OK]', color='success') if num_failures: - echo.echo_warning('{} out of {} tests failed'.format(num_failures, num_tests)) + echo.echo_warning(f'{num_failures} out of {num_tests} tests failed') else: - echo.echo_success('all {} tests succeeded'.format(num_tests)) + echo.echo_success(f'all {num_tests} tests succeeded') except Exception as exception: # pylint:disable=broad-except echo.echo_highlight('[FAILED]: ', color='error', nl=False) @@ -533,7 +523,7 @@ def computer_test(user, print_traceback, computer): message += '\n Use the `--print-traceback` option to see the full traceback.' echo.echo(message) - echo.echo_warning('{} out of {} tests failed'.format(1, num_tests)) + echo.echo_warning(f'{1} out of {num_tests} tests failed') @verdi_computer.command('delete') @@ -555,7 +545,7 @@ def computer_delete(computer): except InvalidOperation as error: echo.echo_critical(str(error)) - echo.echo_success("Computer '{}' deleted.".format(label)) + echo.echo_success(f"Computer '{label}' deleted.") @verdi_computer.group('configure') @@ -594,14 +584,15 @@ def computer_config_show(computer, user, defaults, as_option_string): t_opt = transport_cls.auth_options[option.name] if config.get(option.name) or config.get(option.name) is False: if t_opt.get('switch'): - option_value = option.opts[-1] if config.get(option.name - ) else '--no-{}'.format(option.name.replace('_', '-')) + option_value = option.opts[-1] if config.get( + option.name + ) else f"--no-{option.name.replace('_', '-')}" elif t_opt.get('is_flag'): is_default = config.get(option.name ) == transport_cli.transport_option_default(option.name, computer) option_value = option.opts[-1] if is_default else '' else: - option_value = '{}={}'.format(option.opts[-1], option.type(config[option.name])) + option_value = f'{option.opts[-1]}={option.type(config[option.name])}' option_items.append(option_value) opt_string = ' '.join(option_items) echo.echo(escape_for_bash(opt_string)) @@ -609,9 +600,9 @@ def computer_config_show(computer, user, defaults, as_option_string): table = [] for name in transport_cls.get_valid_auth_params(): if name in config: - table.append(('* ' + name, config[name])) + table.append((f'* {name}', config[name])) else: - table.append(('* ' + name, '-')) + table.append((f'* {name}', '-')) echo.echo(tabulate.tabulate(table, tablefmt='plain')) diff --git a/aiida/cmdline/commands/cmd_config.py b/aiida/cmdline/commands/cmd_config.py index 8e978b869e..5eb87cf376 100644 --- a/aiida/cmdline/commands/cmd_config.py +++ b/aiida/cmdline/commands/cmd_config.py @@ -32,22 +32,22 @@ def verdi_config(ctx, option, value, globally, unset): # Define the string that determines the scope: for specific profile or globally scope = profile.name if (not globally and profile) else None - scope_text = 'for {}'.format(profile.name) if (not globally and profile) else 'globally' + scope_text = f'for {profile.name}' if (not globally and profile) else 'globally' # Unset the specified option if unset: config.unset_option(option.name, scope=scope) config.store() - echo.echo_success('{} unset {}'.format(option.name, scope_text)) + echo.echo_success(f'{option.name} unset {scope_text}') # Get the specified option elif value is None: option_value = config.get_option(option.name, scope=scope, default=False) if option_value: - echo.echo('{}'.format(option_value)) + echo.echo(f'{option_value}') # Set the specified option else: config.set_option(option.name, value, scope=scope) config.store() - echo.echo_success('{} set to {} {}'.format(option.name, value, scope_text)) + echo.echo_success(f'{option.name} set to {value} {scope_text}') diff --git a/aiida/cmdline/commands/cmd_daemon.py b/aiida/cmdline/commands/cmd_daemon.py index 94e71dfc80..faf720e436 100644 --- a/aiida/cmdline/commands/cmd_daemon.py +++ b/aiida/cmdline/commands/cmd_daemon.py @@ -31,10 +31,10 @@ def validate_daemon_workers(ctx, param, value): # pylint: disable=unused-argume value = ctx.obj.config.get_option('daemon.default_workers', ctx.obj.profile.name) if not isinstance(value, int): - raise click.BadParameter('{} is not an integer'.format(value)) + raise click.BadParameter(f'{value} is not an integer') if value <= 0: - raise click.BadParameter('{} is not a positive non-zero integer'.format(value)) + raise click.BadParameter(f'{value} is not a positive non-zero integer') return value @@ -102,7 +102,7 @@ def status(all_profiles): client = get_daemon_client(profile.name) delete_stale_pid_file(client) click.secho('Profile: ', fg='red', bold=True, nl=False) - click.secho('{}'.format(profile.name), bold=True) + click.secho(f'{profile.name}', bold=True) result = get_daemon_status(client) echo.echo(result) daemons_running.append(client.is_daemon_running) @@ -169,7 +169,7 @@ def stop(no_wait, all_profiles): client = get_daemon_client(profile.name) click.secho('Profile: ', fg='red', bold=True, nl=False) - click.secho('{}'.format(profile.name), bold=True) + click.secho(f'{profile.name}', bold=True) if not client.is_daemon_running: echo.echo('Daemon was not running') diff --git a/aiida/cmdline/commands/cmd_data/cmd_bands.py b/aiida/cmdline/commands/cmd_data/cmd_bands.py index 18a72c7009..cdee152e39 100644 --- a/aiida/cmdline/commands/cmd_data/cmd_bands.py +++ b/aiida/cmdline/commands/cmd_data/cmd_bands.py @@ -78,7 +78,7 @@ def bands_list(elements, elements_exclusive, raw, formula_mode, past_days, group echo.echo(tabulate(bands_list_data, tablefmt='plain')) else: echo.echo(tabulate(bands_list_data, headers='firstrow')) - echo.echo('\nTotal results: {}\n'.format(counter)) + echo.echo(f'\nTotal results: {counter}\n') @bands.command('show') @@ -88,9 +88,9 @@ def bands_list(elements, elements_exclusive, raw, formula_mode, past_days, group def bands_show(data, fmt): """Visualize BandsData objects.""" try: - show_function = getattr(cmd_show, '_show_{}'.format(fmt)) + show_function = getattr(cmd_show, f'_show_{fmt}') except AttributeError: - echo.echo_critical('visualization format {} is not supported'.format(fmt)) + echo.echo_critical(f'visualization format {fmt} is not supported') show_function(fmt, data) diff --git a/aiida/cmdline/commands/cmd_data/cmd_cif.py b/aiida/cmdline/commands/cmd_data/cmd_cif.py index b835a67cb2..6317b13970 100644 --- a/aiida/cmdline/commands/cmd_data/cmd_cif.py +++ b/aiida/cmdline/commands/cmd_data/cmd_cif.py @@ -67,7 +67,7 @@ def cif_list(raw, formula_mode, past_days, groups, all_users): echo.echo(tabulate(cif_list_data, tablefmt='plain')) else: echo.echo(tabulate(cif_list_data, headers='firstrow')) - echo.echo('\nTotal results: {}\n'.format(counter)) + echo.echo(f'\nTotal results: {counter}\n') @cif.command('show') @@ -77,9 +77,9 @@ def cif_list(raw, formula_mode, past_days, groups, all_users): def cif_show(data, fmt): """Visualize CifData objects.""" try: - show_function = getattr(cmd_show, '_show_{}'.format(fmt)) + show_function = getattr(cmd_show, f'_show_{fmt}') except AttributeError: - echo.echo_critical('visualization format {} is not supported'.format(fmt)) + echo.echo_critical(f'visualization format {fmt} is not supported') show_function(fmt, data) @@ -93,7 +93,7 @@ def cif_content(data): try: echo.echo(node.get_content()) except IOError as exception: - echo.echo_warning('could not read the content for CifData<{}>: {}'.format(node.pk, str(exception))) + echo.echo_warning(f'could not read the content for CifData<{node.pk}>: {str(exception)}') @cif.command('export') @@ -122,6 +122,6 @@ def cif_import(filename): try: node, _ = CifData.get_or_create(filename) - echo.echo_success('imported {}'.format(str(node))) + echo.echo_success(f'imported {str(node)}') except ValueError as err: echo.echo_critical(err) diff --git a/aiida/cmdline/commands/cmd_data/cmd_export.py b/aiida/cmdline/commands/cmd_data/cmd_export.py index 371a7f2d8b..6752f5274d 100644 --- a/aiida/cmdline/commands/cmd_data/cmd_export.py +++ b/aiida/cmdline/commands/cmd_data/cmd_export.py @@ -101,7 +101,7 @@ def data_export(node, output_fname, fileformat, other_args=None, overwrite=False try: node.export(output_fname, fileformat=fileformat, overwrite=overwrite, **other_args) except OSError as err: - echo.echo_critical('OSError while exporting file:\n{}'.format(err)) + echo.echo_critical(f'OSError while exporting file:\n{err}') else: filetext, extra_files = node._exportcontent(fileformat, main_file_name=output_fname, **other_args) if extra_files: @@ -115,7 +115,5 @@ def data_export(node, output_fname, fileformat, other_args=None, overwrite=False # This typically occurs for parameters that are passed down to the # methods in, e.g., BandsData, but they are not accepted echo.echo_critical( - 'TypeError, perhaps a parameter is not ' - 'supported by the specific format?\nError ' - 'message: {}'.format(err) + f'TypeError, perhaps a parameter is not supported by the specific format?\nError message: {err}' ) diff --git a/aiida/cmdline/commands/cmd_data/cmd_remote.py b/aiida/cmdline/commands/cmd_data/cmd_remote.py index 6b992697a7..9b2eef2770 100644 --- a/aiida/cmdline/commands/cmd_data/cmd_remote.py +++ b/aiida/cmdline/commands/cmd_data/cmd_remote.py @@ -39,8 +39,7 @@ def remote_ls(ls_long, path, datum): content = datum.listdir_withattributes(path=path) except (IOError, OSError) as err: echo.echo_critical( - 'Unable to access the remote folder or file, check if it exists.\n' - 'Original error: {}'.format(str(err)) + f'Unable to access the remote folder or file, check if it exists.\nOriginal error: {str(err)}' ) for metadata in content: if ls_long: @@ -71,7 +70,7 @@ def remote_cat(datum, path): with open(tmpf.name, encoding='utf8') as fhandle: sys.stdout.write(fhandle.read()) except IOError as err: - echo.echo_critical('{}: {}'.format(err.errno, str(err))) + echo.echo_critical(f'{err.errno}: {str(err)}') try: os.remove(tmpf.name) @@ -85,6 +84,6 @@ def remote_cat(datum, path): def remote_show(datum): """Show information for a RemoteData object.""" click.echo('- Remote computer name:') - click.echo(' {}'.format(datum.computer.label)) + click.echo(f' {datum.computer.label}') click.echo('- Remote folder full path:') - click.echo(' {}'.format(datum.get_remote_path())) + click.echo(f' {datum.get_remote_path()}') diff --git a/aiida/cmdline/commands/cmd_data/cmd_show.py b/aiida/cmdline/commands/cmd_data/cmd_show.py index 51cdbca9b1..9dd17868b3 100644 --- a/aiida/cmdline/commands/cmd_data/cmd_show.py +++ b/aiida/cmdline/commands/cmd_data/cmd_show.py @@ -66,7 +66,7 @@ def _show_jmol(exec_name, trajectory_list, **kwargs): subprocess.check_output([exec_name, handle.name]) except subprocess.CalledProcessError: # The program died: just print a message - echo.echo_info('the call to {} ended with an error.'.format(exec_name)) + echo.echo_info(f'the call to {exec_name} ended with an error.') except OSError as err: if err.errno == 2: echo.echo_critical( @@ -97,7 +97,7 @@ def _show_xcrysden(exec_name, object_list, **kwargs): subprocess.check_output([exec_name, '--xsf', tmpf.name]) except subprocess.CalledProcessError: # The program died: just print a message - echo.echo_info('the call to {} ended with an error.'.format(exec_name)) + echo.echo_info(f'the call to {exec_name} ended with an error.') except OSError as err: if err.errno == 2: echo.echo_critical( @@ -159,7 +159,7 @@ def _show_vesta(exec_name, structure_list): subprocess.check_output([exec_name, tmpf.name]) except subprocess.CalledProcessError: # The program died: just print a message - echo.echo_info('the call to {} ended with an error.'.format(exec_name)) + echo.echo_info(f'the call to {exec_name} ended with an error.') except OSError as err: if err.errno == 2: echo.echo_critical( @@ -190,7 +190,7 @@ def _show_vmd(exec_name, structure_list): subprocess.check_output([exec_name, tmpf.name]) except subprocess.CalledProcessError: # The program died: just print a message - echo.echo_info('the call to {} ended with an error.'.format(exec_name)) + echo.echo_info(f'the call to {exec_name} ended with an error.') except OSError as err: if err.errno == 2: echo.echo_critical( @@ -230,10 +230,10 @@ def _show_xmgrace(exec_name, list_bands): try: subprocess.check_output([exec_name] + [f.name for f in list_files]) except subprocess.CalledProcessError: - print('Note: the call to {} ended with an error.'.format(exec_name)) + print(f'Note: the call to {exec_name} ended with an error.') except OSError as err: if err.errno == 2: - print("No executable '{}' found. Add to the path, or try with an absolute path.".format(exec_name)) + print(f"No executable '{exec_name}' found. Add to the path, or try with an absolute path.") sys.exit(1) else: raise diff --git a/aiida/cmdline/commands/cmd_data/cmd_singlefile.py b/aiida/cmdline/commands/cmd_data/cmd_singlefile.py index 82e9709fb4..3fb203a23c 100644 --- a/aiida/cmdline/commands/cmd_data/cmd_singlefile.py +++ b/aiida/cmdline/commands/cmd_data/cmd_singlefile.py @@ -27,4 +27,4 @@ def singlefile_content(datum): try: echo.echo(datum.get_content()) except (IOError, OSError) as exception: - echo.echo_critical('could not read the content for SinglefileData<{}>: {}'.format(datum.pk, str(exception))) + echo.echo_critical(f'could not read the content for SinglefileData<{datum.pk}>: {str(exception)}') diff --git a/aiida/cmdline/commands/cmd_data/cmd_structure.py b/aiida/cmdline/commands/cmd_data/cmd_structure.py index a2e91949bd..0faee68e30 100644 --- a/aiida/cmdline/commands/cmd_data/cmd_structure.py +++ b/aiida/cmdline/commands/cmd_data/cmd_structure.py @@ -34,15 +34,11 @@ def _store_structure(new_structure, dry_run): """ if dry_run: echo.echo( - ' Successfully imported structure {} (not storing it, dry-run requested)'.format( - new_structure.get_formula() - ) + f' Successfully imported structure {new_structure.get_formula()} (not storing it, dry-run requested)' ) else: new_structure.store() - echo.echo( - ' Successfully imported structure {} (PK = {})'.format(new_structure.get_formula(), new_structure.pk) - ) + echo.echo(f' Successfully imported structure {new_structure.get_formula()} (PK = {new_structure.pk})') @verdi_data.group('structure') @@ -118,7 +114,7 @@ def structure_list(elements, raw, formula_mode, past_days, groups, all_users): echo.echo(tabulate(struct_list_data, tablefmt='plain')) else: echo.echo(tabulate(struct_list_data, headers='firstrow')) - echo.echo('\nTotal results: {}\n'.format(counter)) + echo.echo(f'\nTotal results: {counter}\n') @structure.command('show') @@ -128,9 +124,9 @@ def structure_list(elements, raw, formula_mode, past_days, groups, all_users): def structure_show(data, fmt): """Visualize StructureData objects.""" try: - show_function = getattr(cmd_show, '_show_{}'.format(fmt)) + show_function = getattr(cmd_show, f'_show_{fmt}') except AttributeError: - echo.echo_critical('visualization format {} is not supported'.format(fmt)) + echo.echo_critical(f'visualization format {fmt} is not supported') show_function(fmt, data) diff --git a/aiida/cmdline/commands/cmd_data/cmd_trajectory.py b/aiida/cmdline/commands/cmd_data/cmd_trajectory.py index 07ee4a63d0..5e1e779a78 100644 --- a/aiida/cmdline/commands/cmd_data/cmd_trajectory.py +++ b/aiida/cmdline/commands/cmd_data/cmd_trajectory.py @@ -60,7 +60,7 @@ def trajectory_list(raw, past_days, groups, all_users): echo.echo(tabulate(struct_list_data, tablefmt='plain')) else: echo.echo(tabulate(struct_list_data, headers='firstrow')) - echo.echo('\nTotal results: {}\n'.format(counter)) + echo.echo(f'\nTotal results: {counter}\n') @trajectory.command('show') @@ -71,9 +71,9 @@ def trajectory_list(raw, past_days, groups, all_users): def trajectory_show(data, fmt): """Visualize a trajectory.""" try: - show_function = getattr(cmd_show, '_show_{}'.format(fmt)) + show_function = getattr(cmd_show, f'_show_{fmt}') except AttributeError: - echo.echo_critical('visualization format {} is not supported'.format(fmt)) + echo.echo_critical(f'visualization format {fmt} is not supported') show_function(fmt, data) diff --git a/aiida/cmdline/commands/cmd_data/cmd_upf.py b/aiida/cmdline/commands/cmd_data/cmd_upf.py index 745f4af7a2..d133384077 100644 --- a/aiida/cmdline/commands/cmd_data/cmd_upf.py +++ b/aiida/cmdline/commands/cmd_data/cmd_upf.py @@ -44,7 +44,7 @@ def upf_uploadfamily(folder, group_label, group_description, stop_if_existing): """ from aiida.orm.nodes.data.upf import upload_upf_family files_found, files_uploaded = upload_upf_family(folder, group_label, group_description, stop_if_existing) - echo.echo_success('UPF files found: {}. New files uploaded: {}'.format(files_found, files_uploaded)) + echo.echo_success(f'UPF files found: {files_found}. New files uploaded: {files_uploaded}') @upf.command('listfamilies') @@ -82,11 +82,11 @@ def upf_listfamilies(elements, with_description): query.append(UpfData, project=['id'], with_group='thisgroup') if with_description: - description_string = ': {}'.format(group_desc) + description_string = f': {group_desc}' else: description_string = '' - echo.echo_success('* {} [{} pseudos]{}'.format(group_label, query.count(), description_string)) + echo.echo_success(f'* {group_label} [{query.count()} pseudos]{description_string}') else: echo.echo_warning('No valid UPF pseudopotential family found.') @@ -102,7 +102,7 @@ def upf_exportfamily(folder, group): Call without parameters to get some help. """ if group.is_empty: - echo.echo_critical('Group<{}> contains no pseudos'.format(group.label)) + echo.echo_critical(f'Group<{group.label}> contains no pseudos') for node in group.nodes: dest_path = os.path.join(folder, node.filename) @@ -110,7 +110,7 @@ def upf_exportfamily(folder, group): with open(dest_path, 'w', encoding='utf8') as handle: handle.write(node.get_content()) else: - echo.echo_warning('File {} is already present in the destination folder'.format(node.filename)) + echo.echo_warning(f'File {node.filename} is already present in the destination folder') @upf.command('import') @@ -123,7 +123,7 @@ def upf_import(filename): from aiida.orm import UpfData node, _ = UpfData.get_or_create(filename) - echo.echo_success('Imported: {}'.format(node)) + echo.echo_success(f'Imported: {node}') @upf.command('export') diff --git a/aiida/cmdline/commands/cmd_database.py b/aiida/cmdline/commands/cmd_database.py index 7ca1dabf3d..c486ea038f 100644 --- a/aiida/cmdline/commands/cmd_database.py +++ b/aiida/cmdline/commands/cmd_database.py @@ -111,7 +111,7 @@ def detect_duplicate_uuid(table, apply_patch): try: messages = deduplicate_uuids(table=table, dry_run=not apply_patch) except Exception as exception: # pylint: disable=broad-except - echo.echo_critical('integrity check failed: {}'.format(str(exception))) + echo.echo_critical(f'integrity check failed: {str(exception)}') else: for message in messages: echo.echo_info(message) @@ -141,7 +141,7 @@ def detect_invalid_links(): if result: integrity_violated = True - echo.echo_warning('{}:\n'.format(check.message)) + echo.echo_warning(f'{check.message}:\n') echo.echo(tabulate(result, headers=check.headers)) if not integrity_violated: @@ -169,7 +169,7 @@ def detect_invalid_nodes(): if result: integrity_violated = True - echo.echo_warning('{}:\n'.format(check.message)) + echo.echo_warning(f'{check.message}:\n') echo.echo(tabulate(result, headers=check.headers)) if not integrity_violated: diff --git a/aiida/cmdline/commands/cmd_devel.py b/aiida/cmdline/commands/cmd_devel.py index 40d9972ad2..381fd46045 100644 --- a/aiida/cmdline/commands/cmd_devel.py +++ b/aiida/cmdline/commands/cmd_devel.py @@ -58,11 +58,11 @@ def devel_check_undesired_imports(): for modulename in ['seekpath', 'CifFile', 'ase', 'pymatgen', 'spglib', 'pymysql']: if modulename in sys.modules: - echo.echo_warning('Detected loaded module "{}"'.format(modulename)) + echo.echo_warning(f'Detected loaded module "{modulename}"') loaded_modules += 1 if loaded_modules > 0: - echo.echo_critical('Detected {} unwanted modules'.format(loaded_modules)) + echo.echo_critical(f'Detected {loaded_modules} unwanted modules') echo.echo_success('no issues detected') diff --git a/aiida/cmdline/commands/cmd_export.py b/aiida/cmdline/commands/cmd_export.py index 21596007c4..dad847def6 100644 --- a/aiida/cmdline/commands/cmd_export.py +++ b/aiida/cmdline/commands/cmd_export.py @@ -56,7 +56,7 @@ def inspect(archive, version, data, meta_data): data.extend(sorted([(k.capitalize(), v) for k, v in archive_object.get_data_statistics().items()])) echo.echo(tabulate.tabulate(data)) except CorruptArchive as exception: - echo.echo_critical('corrupt archive: {}'.format(exception)) + echo.echo_critical(f'corrupt archive: {exception}') @verdi_export.command('create') @@ -135,9 +135,9 @@ def create( try: export(entities, filename=output_file, file_format=export_format, **kwargs) except ArchiveExportError as exception: - echo.echo_critical('failed to write the archive file. Exception: {}'.format(exception)) + echo.echo_critical(f'failed to write the archive file. Exception: {exception}') else: - echo.echo_success('wrote the export archive file to {}'.format(output_file)) + echo.echo_success(f'wrote the export archive file to {output_file}') @verdi_export.command('migrate') @@ -199,11 +199,11 @@ def migrate(input_file, output_file, force, silent, in_place, archive_format, ve with open(folder.get_abs_path('metadata.json'), 'r', encoding='utf8') as fhandle: metadata = json.load(fhandle) except IOError: - echo.echo_critical('export archive does not contain the required file {}'.format(fhandle.filename)) + echo.echo_critical(f'export archive does not contain the required file {fhandle.filename}') old_version = migration.verify_metadata_version(metadata) if version <= old_version: - echo.echo_success('nothing to be done - archive already at version {} >= {}'.format(old_version, version)) + echo.echo_success(f'nothing to be done - archive already at version {old_version} >= {version}') return try: @@ -236,4 +236,4 @@ def migrate(input_file, output_file, force, silent, in_place, archive_format, ve tempdir.cleanup() if not silent: - echo.echo_success('migrated the archive from version {} to {}'.format(old_version, new_version)) + echo.echo_success(f'migrated the archive from version {old_version} to {new_version}') diff --git a/aiida/cmdline/commands/cmd_group.py b/aiida/cmdline/commands/cmd_group.py index 327c1ccc36..ee02c7005c 100644 --- a/aiida/cmdline/commands/cmd_group.py +++ b/aiida/cmdline/commands/cmd_group.py @@ -30,7 +30,7 @@ def verdi_group(): def group_add_nodes(group, force, nodes): """Add nodes to the a group.""" if not force: - click.confirm('Do you really want to add {} nodes to Group<{}>?'.format(len(nodes), group.label), abort=True) + click.confirm(f'Do you really want to add {len(nodes)} nodes to Group<{group.label}>?', abort=True) group.add_nodes(nodes) @@ -44,9 +44,9 @@ def group_add_nodes(group, force, nodes): def group_remove_nodes(group, nodes, clear, force): """Remove nodes from a group.""" if clear: - message = 'Do you really want to remove ALL the nodes from Group<{}>?'.format(group.label) + message = f'Do you really want to remove ALL the nodes from Group<{group.label}>?' else: - message = 'Do you really want to remove {} nodes from Group<{}>?'.format(len(nodes), group.label) + message = f'Do you really want to remove {len(nodes)} nodes from Group<{group.label}>?' if not force: click.confirm(message, abort=True) @@ -81,13 +81,13 @@ def group_delete(group, clear, force): )) if not force: - click.confirm('Are you sure to delete Group<{}>?'.format(label), abort=True) + click.confirm(f'Are you sure to delete Group<{label}>?', abort=True) if clear: group.clear() orm.Group.objects.delete(group.pk) - echo.echo_success('Group<{}> deleted.'.format(label)) + echo.echo_success(f'Group<{label}> deleted.') @verdi_group.command('relabel') @@ -99,9 +99,9 @@ def group_relabel(group, label): try: group.label = label except UniquenessError as exception: - echo.echo_critical('Error: {}.'.format(exception)) + echo.echo_critical(f'Error: {exception}.') else: - echo.echo_success('Label changed to {}'.format(label)) + echo.echo_success(f'Label changed to {label}') @verdi_group.command('description') @@ -115,7 +115,7 @@ def group_description(group, description): """ if description: group.description = description - echo.echo_success('Changed the description of Group<{}>'.format(group.label)) + echo.echo_success(f'Changed the description of Group<{group.label}>') else: echo.echo(group.description) @@ -269,11 +269,11 @@ def group_list( # Query for specific group names filters['or'] = [] if startswith: - filters['or'].append({'label': {'like': '{}%'.format(escape_for_sql_like(startswith))}}) + filters['or'].append({'label': {'like': f'{escape_for_sql_like(startswith)}%'}}) if endswith: - filters['or'].append({'label': {'like': '%{}'.format(escape_for_sql_like(endswith))}}) + filters['or'].append({'label': {'like': f'%{escape_for_sql_like(endswith)}'}}) if contains: - filters['or'].append({'label': {'like': '%{}%'.format(escape_for_sql_like(contains))}}) + filters['or'].append({'label': {'like': f'%{escape_for_sql_like(contains)}%'}}) builder.append(orm.Group, filters=filters, tag='group', project='*') @@ -338,9 +338,9 @@ def group_create(group_label): group, created = orm.Group.objects.get_or_create(label=group_label) if created: - echo.echo_success("Group created with PK = {} and name '{}'".format(group.id, group.label)) + echo.echo_success(f"Group created with PK = {group.id} and name '{group.label}'") else: - echo.echo_info("Group '{}' already exists, PK = {}".format(group.label, group.id)) + echo.echo_info(f"Group '{group.label}' already exists, PK = {group.id}") @verdi_group.command('copy') @@ -358,12 +358,12 @@ def group_copy(source_group, destination_group): # Issue warning if destination group is not empty and get user confirmation to continue if not created and not dest_group.is_empty: - echo.echo_warning('Destination group<{}> already exists and is not empty.'.format(dest_group.label)) + echo.echo_warning(f'Destination group<{dest_group.label}> already exists and is not empty.') click.confirm('Do you wish to continue anyway?', abort=True) # Copy nodes dest_group.add_nodes(list(source_group.nodes)) - echo.echo_success('Nodes copied from group<{}> to group<{}>'.format(source_group.label, dest_group.label)) + echo.echo_success(f'Nodes copied from group<{source_group.label}> to group<{dest_group.label}>') @verdi_group.group('path') diff --git a/aiida/cmdline/commands/cmd_help.py b/aiida/cmdline/commands/cmd_help.py index 5503a43774..5eca57b24c 100644 --- a/aiida/cmdline/commands/cmd_help.py +++ b/aiida/cmdline/commands/cmd_help.py @@ -29,7 +29,7 @@ def verdi_help(ctx, command): if not cmd: # we should never end up here since verdi.get_command(...) gives # suggestions if the command could not be found and calls click.fail - echo.echo_critical("command '{}' not found".format(command)) + echo.echo_critical(f"command '{command}' not found") cmdctx = click.Context(cmd, info_name=cmd.name, parent=ctx.parent) diff --git a/aiida/cmdline/commands/cmd_import.py b/aiida/cmdline/commands/cmd_import.py index 4435d41ff2..e53ad2b0a1 100644 --- a/aiida/cmdline/commands/cmd_import.py +++ b/aiida/cmdline/commands/cmd_import.py @@ -55,7 +55,7 @@ def _echo_error( # pylint: disable=unused-argument IMPORT_LOGGER.debug('%s', traceback.format_exc()) - exception = '{}: {}'.format(raised_exception.__class__.__name__, str(raised_exception)) + exception = f'{raised_exception.__class__.__name__}: {str(raised_exception)}' echo.echo_error(message) echo.echo(exception) @@ -95,7 +95,7 @@ def _try_import(migration_performed, file_to_import, archive, group, migration, expected_keys = ['extras_mode_existing', 'extras_mode_new', 'comment_mode'] for key in expected_keys: if key not in kwargs: - raise ValueError("{} needed for utility function '{}' to use in 'import_data'".format(key, '_try_import')) + raise ValueError(f"{key} needed for utility function '_try_import' to use in 'import_data'") # Initialization migrate_archive = False @@ -106,7 +106,7 @@ def _try_import(migration_performed, file_to_import, archive, group, migration, if migration_performed: # Migration has been performed, something is still wrong _echo_error( - '{} has been migrated, but it still cannot be imported'.format(archive), + f'{archive} has been migrated, but it still cannot be imported', non_interactive=non_interactive, raised_exception=exception, **kwargs @@ -130,13 +130,13 @@ def _try_import(migration_performed, file_to_import, archive, group, migration, echo.echo_critical(str(exception)) except Exception as exception: _echo_error( - 'an exception occurred while importing the archive {}'.format(archive), + f'an exception occurred while importing the archive {archive}', non_interactive=non_interactive, raised_exception=exception, **kwargs ) else: - echo.echo_success('imported archive {}'.format(archive)) + echo.echo_success(f'imported archive {archive}') return migrate_archive @@ -159,7 +159,7 @@ def _migrate_archive(ctx, temp_folder, file_to_import, archive, non_interactive, from aiida.cmdline.commands.cmd_export import migrate # Echo start - echo.echo_info('migrating archive {}'.format(archive)) + echo.echo_info(f'migrating archive {archive}') # Initialization temp_out_file = 'migrated_importfile.aiida' @@ -262,17 +262,17 @@ def cmd_import( if webpages is not None: for webpage in webpages: try: - echo.echo_info('retrieving archive URLS from {}'.format(webpage)) + echo.echo_info(f'retrieving archive URLS from {webpage}') urls = get_valid_import_links(webpage) except Exception as exception: _echo_error( - 'an exception occurred while trying to discover archives at URL {}'.format(webpage), + f'an exception occurred while trying to discover archives at URL {webpage}', non_interactive=non_interactive, more_archives=webpage != webpages[-1] or archives_file or archives_url, raised_exception=exception ) else: - echo.echo_success('{} archive URLs discovered and added'.format(len(urls))) + echo.echo_success(f'{len(urls)} archive URLs discovered and added') archives_url += urls # Preliminary sanity check @@ -295,7 +295,7 @@ def cmd_import( # Import local archives for archive in archives_file: - echo.echo_info('importing archive {}'.format(archive)) + echo.echo_info(f'importing archive {archive}') # Initialization import_opts['archive'] = archive @@ -318,12 +318,12 @@ def cmd_import( import_opts['archive'] = archive import_opts['more_archives'] = archive != archives_url[-1] - echo.echo_info('downloading archive {}'.format(archive)) + echo.echo_info(f'downloading archive {archive}') try: response = urllib.request.urlopen(archive) except Exception as exception: - _echo_error('downloading archive {} failed'.format(archive), raised_exception=exception, **import_opts) + _echo_error(f'downloading archive {archive} failed', raised_exception=exception, **import_opts) with SandboxFolder() as temp_folder: temp_file = 'importfile.tar.gz' diff --git a/aiida/cmdline/commands/cmd_node.py b/aiida/cmdline/commands/cmd_node.py index 8c0e4b12ab..3f73c106ff 100644 --- a/aiida/cmdline/commands/cmd_node.py +++ b/aiida/cmdline/commands/cmd_node.py @@ -52,7 +52,7 @@ def repo_cat(node, relative_path): # The sepcial case is breakon pipe error, which is usually OK. if exception.errno != errno.EPIPE: # Incorrect path or file not readable - echo.echo_critical('failed to get the content of file `{}`: {}'.format(relative_path, exception)) + echo.echo_critical(f'failed to get the content of file `{relative_path}`: {exception}') @verdi_node_repo.command('ls') @@ -67,7 +67,7 @@ def repo_ls(node, relative_path, color): try: list_repository_contents(node, relative_path, color) except FileNotFoundError: - echo.echo_critical('the path `{}` does not exist for the given node'.format(relative_path)) + echo.echo_critical(f'the path `{relative_path}` does not exist for the given node') @verdi_node_repo.command('dump') @@ -91,7 +91,7 @@ def repo_dump(node, output_directory): try: output_directory.mkdir(parents=True, exist_ok=False) except FileExistsError: - echo.echo_critical('Invalid value for "OUTPUT_DIRECTORY": Path "{}" exists.'.format(output_directory)) + echo.echo_critical(f'Invalid value for "OUTPUT_DIRECTORY": Path "{output_directory}" exists.') def _copy_tree(key, output_dir): # pylint: disable=too-many-branches """ @@ -101,7 +101,7 @@ def _copy_tree(key, output_dir): # pylint: disable=too-many-branches for file in node.list_objects(key): # Not using os.path.join here, because this is the "path" # in the AiiDA node, not an actual OS - level path. - file_key = file.name if not key else key + '/' + file.name + file_key = file.name if not key else f'{key}/{file.name}' if file.file_type == FileType.DIRECTORY: new_out_dir = output_dir / file.name assert not new_out_dir.exists() @@ -144,13 +144,13 @@ def node_label(nodes, label, raw, force): else: if not force: - warning = 'Are you sure you want to set the label for {} nodes?'.format(len(nodes)) + warning = f'Are you sure you want to set the label for {len(nodes)} nodes?' click.confirm(warning, abort=True) for node in nodes: node.label = label - echo.echo_success("Set label '{}' for {} nodes".format(label, len(nodes))) + echo.echo_success(f"Set label '{label}' for {len(nodes)} nodes") @verdi_node.command('description') @@ -178,13 +178,13 @@ def node_description(nodes, description, force, raw): else: if not force: - warning = 'Are you sure you want to set the description for {} nodes?'.format(len(nodes)) + warning = f'Are you sure you want to set the description for {len(nodes)} nodes?' click.confirm(warning, abort=True) for node in nodes: node.description = description - echo.echo_success('Set description for {} nodes'.format(len(nodes))) + echo.echo_success(f'Set description for {len(nodes)} nodes') @verdi_node.command('show') @@ -214,9 +214,9 @@ def node_show(nodes, print_groups): echo.echo('#### GROUPS:') if qb.count() == 0: - echo.echo('Node {} does not belong to any group'.format(node.pk)) + echo.echo(f'Node {node.pk} does not belong to any group') else: - echo.echo('Node {} belongs to the following groups:'.format(node.pk)) + echo.echo(f'Node {node.pk} belongs to the following groups:') res = qb.iterdict() table = [(gr['groups']['id'], gr['groups']['label'], gr['groups']['type_string']) for gr in res] table.sort() @@ -248,7 +248,7 @@ def echo_node_dict(nodes, keys, fmt, identifier, raw, use_attrs=True): if raw: all_nodes.append({id_name: id_value, dict_name: node_dict}) else: - echo.echo('{}: {}'.format(id_name, id_value), bold=True) + echo.echo(f'{id_name}: {id_value}', bold=True) echo.echo_dictionary(node_dict, fmt=fmt) if raw: echo.echo_dictionary(all_nodes, fmt=fmt) @@ -374,7 +374,7 @@ def rehash(nodes, entry_point, force): for node, in iter_hash: node.rehash() - echo.echo_success('{} nodes re-hashed.'.format(num_nodes)) + echo.echo_success(f'{num_nodes} nodes re-hashed.') @verdi_node.group('graph') @@ -447,9 +447,9 @@ def graph_generate( print_func = echo.echo_info if verbose else None link_types = {'all': (), 'logic': ('input_work', 'return'), 'data': ('input_calc', 'create')}[link_types] - echo.echo_info('Initiating graphviz engine: {}'.format(engine)) + echo.echo_info(f'Initiating graphviz engine: {engine}') graph = Graph(engine=engine, node_id_type=identifier) - echo.echo_info('Recursing ancestors, max depth={}'.format(ancestor_depth)) + echo.echo_info(f'Recursing ancestors, max depth={ancestor_depth}') graph.recurse_ancestors( root_node, @@ -460,7 +460,7 @@ def graph_generate( highlight_classes=highlight_classes, print_func=print_func ) - echo.echo_info('Recursing descendants, max depth={}'.format(descendant_depth)) + echo.echo_info(f'Recursing descendants, max depth={descendant_depth}') graph.recurse_descendants( root_node, depth=descendant_depth, @@ -471,10 +471,10 @@ def graph_generate( print_func=print_func ) output_file_name = graph.graphviz.render( - filename='{}.{}'.format(root_node.pk, engine), format=output_format, view=show, cleanup=True + filename=f'{root_node.pk}.{engine}', format=output_format, view=show, cleanup=True ) - echo.echo_success('Output file: {}'.format(output_file_name)) + echo.echo_success(f'Output file: {output_file_name}') @verdi_node.group('comment') @@ -494,7 +494,7 @@ def comment_add(nodes, content): for node in nodes: node.add_comment(content) - echo.echo_success('comment added to {} nodes'.format(len(nodes))) + echo.echo_success(f'comment added to {len(nodes)} nodes') @verdi_comment.command('update') @@ -508,14 +508,14 @@ def comment_update(comment_id, content): try: comment = Comment.objects.get(id=comment_id) except (exceptions.NotExistent, exceptions.MultipleObjectsError): - echo.echo_critical('comment<{}> not found'.format(comment_id)) + echo.echo_critical(f'comment<{comment_id}> not found') if content is None: content = multi_line_input.edit_comment(comment.content) comment.set_content(content) - echo.echo_success('comment<{}> updated'.format(comment_id)) + echo.echo_success(f'comment<{comment_id}> updated') @verdi_comment.command('show') @@ -525,7 +525,7 @@ def comment_update(comment_id, content): def comment_show(user, nodes): """Show the comments of one or multiple nodes.""" for node in nodes: - msg = '* Comments for Node<{}>'.format(node.pk) + msg = f'* Comments for Node<{node.pk}>' echo.echo('*' * len(msg)) echo.echo(msg) echo.echo('*' * len(msg)) @@ -537,18 +537,18 @@ def comment_show(user, nodes): if not comments: valid_users = ', '.join(set(comment.user.email for comment in all_comments)) - echo.echo_warning('no comments found for user {}'.format(user)) - echo.echo_info('valid users found for Node<{}>: {}'.format(node.pk, valid_users)) + echo.echo_warning(f'no comments found for user {user}') + echo.echo_info(f'valid users found for Node<{node.pk}>: {valid_users}') else: comments = all_comments for comment in comments: comment_msg = [ - 'Comment<{}> for Node<{}> by {}'.format(comment.id, node.pk, comment.user.email), - 'Created on {}'.format(timezone.localtime(comment.ctime).strftime('%Y-%m-%d %H:%M')), - 'Last modified on {}'.format(timezone.localtime(comment.mtime).strftime('%Y-%m-%d %H:%M')), - '\n{}\n'.format(comment.content), + f'Comment<{comment.id}> for Node<{node.pk}> by {comment.user.email}', + f"Created on {timezone.localtime(comment.ctime).strftime('%Y-%m-%d %H:%M')}", + f"Last modified on {timezone.localtime(comment.mtime).strftime('%Y-%m-%d %H:%M')}", + f'\n{comment.content}\n', ] echo.echo('\n'.join(comment_msg)) @@ -565,11 +565,11 @@ def comment_remove(force, comment): from aiida.orm.comments import Comment if not force: - click.confirm('Are you sure you want to remove comment<{}>'.format(comment), abort=True) + click.confirm(f'Are you sure you want to remove comment<{comment}>', abort=True) try: Comment.objects.delete(comment) except exceptions.NotExistent as exception: - echo.echo_critical('failed to remove comment<{}>: {}'.format(comment, exception)) + echo.echo_critical(f'failed to remove comment<{comment}>: {exception}') else: - echo.echo_success('removed comment<{}>'.format(comment)) + echo.echo_success(f'removed comment<{comment}>') diff --git a/aiida/cmdline/commands/cmd_plugin.py b/aiida/cmdline/commands/cmd_plugin.py index e28ae82df7..ec93f887f1 100644 --- a/aiida/cmdline/commands/cmd_plugin.py +++ b/aiida/cmdline/commands/cmd_plugin.py @@ -36,7 +36,7 @@ def plugin_list(entry_point_group, entry_point): if entry_point_group is None: echo.echo_info('Available entry point groups:') for group in sorted(ENTRY_POINT_GROUP_TO_MODULE_PATH_MAP.keys()): - echo.echo('* {}'.format(group)) + echo.echo(f'* {group}') echo.echo('') echo.echo_info('Pass one of the groups as an additional argument to show the registered plugins') @@ -54,15 +54,15 @@ def plugin_list(entry_point_group, entry_point): else: echo.echo(str(plugin.get_description())) except AttributeError: - echo.echo_error('No description available for {}'.format(entry_point)) + echo.echo_error(f'No description available for {entry_point}') else: entry_points = get_entry_point_names(entry_point_group) if entry_points: - echo.echo('Registered entry points for {}:'.format(entry_point_group)) + echo.echo(f'Registered entry points for {entry_point_group}:') for registered_entry_point in entry_points: - echo.echo('* {}'.format(registered_entry_point)) + echo.echo(f'* {registered_entry_point}') echo.echo('') echo.echo_info('Pass the entry point as an argument to display detailed information') else: - echo.echo_error('No plugins found for group {}'.format(entry_point_group)) + echo.echo_error(f'No plugins found for group {entry_point_group}') diff --git a/aiida/cmdline/commands/cmd_process.py b/aiida/cmdline/commands/cmd_process.py index 16fd660f60..04513ad94a 100644 --- a/aiida/cmdline/commands/cmd_process.py +++ b/aiida/cmdline/commands/cmd_process.py @@ -75,7 +75,7 @@ def process_list( else: tabulated = tabulate(projected, headers=headers) echo.echo(tabulated) - echo.echo('\nTotal results: {}\n'.format(len(projected))) + echo.echo(f'\nTotal results: {len(projected)}\n') print_last_process_state_change() # Second query to get active process count # Currently this is slow but will be fixed wiith issue #2770 @@ -109,7 +109,7 @@ def process_call_root(processes): caller = process.caller if caller is None: - echo.echo('No callers found for Process<{}>'.format(process.pk)) + echo.echo(f'No callers found for Process<{process.pk}>') continue while True: @@ -120,7 +120,7 @@ def process_call_root(processes): caller = next_caller - echo.echo('{}'.format(caller.pk)) + echo.echo(f'{caller.pk}') @verdi_process.command('report') @@ -150,7 +150,7 @@ def process_report(processes, levelname, indent_size, max_depth): elif isinstance(process, (CalcFunctionNode, WorkFunctionNode)): echo.echo(get_process_function_report(process)) else: - echo.echo('Nothing to show for node type {}'.format(process.__class__)) + echo.echo(f'Nothing to show for node type {process.__class__}') @verdi_process.command('status') @@ -179,13 +179,13 @@ def process_kill(processes, timeout, wait): for process in processes: if process.is_terminated: - echo.echo_error('Process<{}> is already terminated'.format(process.pk)) + echo.echo_error(f'Process<{process.pk}> is already terminated') continue try: future = controller.kill_process(process.pk, msg='Killed through `verdi process kill`') except communications.UnroutableError: - echo.echo_error('Process<{}> is unreachable'.format(process.pk)) + echo.echo_error(f'Process<{process.pk}> is unreachable') else: futures[future] = process @@ -217,13 +217,13 @@ def process_pause(processes, all_entries, timeout, wait): for process in processes: if process.is_terminated: - echo.echo_error('Process<{}> is already terminated'.format(process.pk)) + echo.echo_error(f'Process<{process.pk}> is already terminated') continue try: future = controller.pause_process(process.pk, msg='Paused through `verdi process pause`') except communications.UnroutableError: - echo.echo_error('Process<{}> is unreachable'.format(process.pk)) + echo.echo_error(f'Process<{process.pk}> is unreachable') else: futures[future] = process @@ -254,13 +254,13 @@ def process_play(processes, all_entries, timeout, wait): for process in processes: if process.is_terminated: - echo.echo_error('Process<{}> is already terminated'.format(process.pk)) + echo.echo_error(f'Process<{process.pk}> is already terminated') continue try: future = controller.play_process(process.pk) except communications.UnroutableError: - echo.echo_error('Process<{}> is unreachable'.format(process.pk)) + echo.echo_error(f'Process<{process.pk}> is unreachable') else: futures[future] = process @@ -284,7 +284,7 @@ def _print(communicator, body, sender, subject, correlation_id): # pylint: disa if correlation_id is None: correlation_id = '--' - echo.echo('Process<{}> [{}|{}]: {}'.format(sender, subject, correlation_id, body)) + echo.echo(f'Process<{sender}> [{subject}|{correlation_id}]: {body}') communicator = get_manager().get_communicator() echo.echo_info('watching for broadcasted messages, press CTRL+C to stop...') @@ -292,7 +292,7 @@ def _print(communicator, body, sender, subject, correlation_id): # pylint: disa for process in processes: if process.is_terminated: - echo.echo_error('Process<{}> is already terminated'.format(process.pk)) + echo.echo_error(f'Process<{process.pk}> is already terminated') continue communicator.add_broadcast_subscriber(BroadcastFilter(_print, sender=process.pk)) @@ -349,24 +349,22 @@ def process_actions(futures_map, infinitive, present, past, wait=False, timeout= try: result = future.result() except CommunicationTimeout: - echo.echo_error('call to {} Process<{}> timed out'.format(infinitive, process.pk)) + echo.echo_error(f'call to {infinitive} Process<{process.pk}> timed out') except Exception as exception: # pylint: disable=broad-except - echo.echo_error('failed to {} Process<{}>: {}'.format(infinitive, process.pk, exception)) + echo.echo_error(f'failed to {infinitive} Process<{process.pk}>: {exception}') else: if result is True: - echo.echo_success('{} Process<{}>'.format(past, process.pk)) + echo.echo_success(f'{past} Process<{process.pk}>') elif result is False: - echo.echo_error('problem {} Process<{}>'.format(present, process.pk)) + echo.echo_error(f'problem {present} Process<{process.pk}>') elif isinstance(result, kiwipy.Future): - echo.echo_success('scheduled {} Process<{}>'.format(infinitive, process.pk)) + echo.echo_success(f'scheduled {infinitive} Process<{process.pk}>') scheduled[result] = process else: - echo.echo_error( - 'got unexpected response when {} Process<{}>: {}'.format(present, process.pk, result) - ) + echo.echo_error(f'got unexpected response when {present} Process<{process.pk}>: {result}') if wait and scheduled: - echo.echo_info('waiting for process(es) {}'.format(','.join([str(proc.pk) for proc in scheduled.values()]))) + echo.echo_info(f"waiting for process(es) {','.join([str(proc.pk) for proc in scheduled.values()])}") for future in futures.as_completed(scheduled.keys(), timeout=timeout): process = scheduled[future] @@ -374,16 +372,14 @@ def process_actions(futures_map, infinitive, present, past, wait=False, timeout= try: result = future.result() except Exception as exception: # pylint: disable=broad-except - echo.echo_error('failed to {} Process<{}>: {}'.format(infinitive, process.pk, exception)) + echo.echo_error(f'failed to {infinitive} Process<{process.pk}>: {exception}') else: if result is True: - echo.echo_success('{} Process<{}>'.format(past, process.pk)) + echo.echo_success(f'{past} Process<{process.pk}>') elif result is False: - echo.echo_error('problem {} Process<{}>'.format(present, process.pk)) + echo.echo_error(f'problem {present} Process<{process.pk}>') else: - echo.echo_error( - 'got unexpected response when {} Process<{}>: {}'.format(present, process.pk, result) - ) + echo.echo_error(f'got unexpected response when {present} Process<{process.pk}>: {result}') except futures.TimeoutError: - echo.echo_error('timed out trying to {} processes {}'.format(infinitive, futures_map.values())) + echo.echo_error(f'timed out trying to {infinitive} processes {futures_map.values()}') diff --git a/aiida/cmdline/commands/cmd_profile.py b/aiida/cmdline/commands/cmd_profile.py index 6ac1671237..90dbf79439 100644 --- a/aiida/cmdline/commands/cmd_profile.py +++ b/aiida/cmdline/commands/cmd_profile.py @@ -35,10 +35,10 @@ def profile_list(): # to be able to see the configuration directory, for instance for those who have set `AIIDA_PATH`. This way # they can at least verify that it is correctly set. from aiida.manage.configuration.settings import AIIDA_CONFIG_FOLDER - echo.echo_info('configuration folder: {}'.format(AIIDA_CONFIG_FOLDER)) + echo.echo_info(f'configuration folder: {AIIDA_CONFIG_FOLDER}') echo.echo_critical(str(exception)) else: - echo.echo_info('configuration folder: {}'.format(config.dirpath)) + echo.echo_info(f'configuration folder: {config.dirpath}') if not config.profiles: echo.echo_warning('no profiles configured: run `verdi setup` to create one') @@ -56,7 +56,7 @@ def profile_show(profile): if profile is None: echo.echo_critical('no profile to show') - echo.echo_info('Profile: {}'.format(profile.name)) + echo.echo_info(f'Profile: {profile.name}') data = sorted([(k.lower(), v) for k, v in profile.dictionary.items()]) echo.echo(tabulate.tabulate(data)) @@ -71,7 +71,7 @@ def profile_setdefault(profile): echo.echo_critical(str(exception)) config.set_default_profile(profile.name, overwrite=True).store() - echo.echo_success('{} set as default profile'.format(profile.name)) + echo.echo_success(f'{profile.name} set as default profile') @verdi_profile.command('delete') @@ -103,7 +103,7 @@ def profile_delete(force, include_config, include_db, include_repository, profil from aiida.manage.configuration.setup import delete_profile for profile in profiles: - echo.echo_info("Deleting profile '{}'".format(profile.name)) + echo.echo_info(f"Deleting profile '{profile.name}'") delete_profile( profile, non_interactive=force, diff --git a/aiida/cmdline/commands/cmd_run.py b/aiida/cmdline/commands/cmd_run.py index d46b6f984c..727bfe1286 100644 --- a/aiida/cmdline/commands/cmd_run.py +++ b/aiida/cmdline/commands/cmd_run.py @@ -47,7 +47,7 @@ def validate_entrypoint_string(ctx, param, value): # pylint: disable=unused-arg try: autogroup.Autogroup.validate(value) except Exception as exc: - raise click.BadParameter(str(exc) + ' ({})'.format(value)) + raise click.BadParameter(f'{str(exc)} ({value})') return value @@ -106,7 +106,7 @@ def run(scriptname, varargs, auto_group, auto_group_label_prefix, group_name, ex # Dynamically load modules (the same of verdi shell) - but in globals_dict, not in the current environment for app_mod, model_name, alias in DEFAULT_MODULES_LIST: - globals_dict['{}'.format(alias)] = getattr(__import__(app_mod, {}, {}, model_name), model_name) + globals_dict[f'{alias}'] = getattr(__import__(app_mod, {}, {}, model_name), model_name) if group_name: warnings.warn('--group-name is deprecated, use `--auto-group-label-prefix` instead', AiidaDeprecationWarning) # pylint: disable=no-member @@ -137,7 +137,7 @@ def run(scriptname, varargs, auto_group, auto_group_label_prefix, group_name, ex # Here we use a standard open and not open, as exec will later fail if passed a unicode type string. handle = open(scriptname, 'r') except IOError: - echo.echo_critical("Unable to load file '{}'".format(scriptname)) + echo.echo_critical(f"Unable to load file '{scriptname}'") else: try: # Must add also argv[0] diff --git a/aiida/cmdline/commands/cmd_setup.py b/aiida/cmdline/commands/cmd_setup.py index 3fb5159b13..28b34effdd 100644 --- a/aiida/cmdline/commands/cmd_setup.py +++ b/aiida/cmdline/commands/cmd_setup.py @@ -64,7 +64,7 @@ def setup( profile.broker_host = broker_host profile.broker_port = broker_port profile.broker_virtual_host = broker_virtual_host - profile.repository_uri = 'file://' + repository + profile.repository_uri = f'file://{repository}' config = get_config() @@ -74,7 +74,7 @@ def setup( # Load the profile load_profile(profile.name) - echo.echo_success('created new profile `{}`.'.format(profile.name)) + echo.echo_success(f'created new profile `{profile.name}`.') # Migrate the database echo.echo_info('migrating the database.') @@ -84,7 +84,7 @@ def setup( backend.migrate() except Exception as exception: # pylint: disable=broad-except echo.echo_critical( - 'database migration failed, probably because connection details are incorrect:\n{}'.format(exception) + f'database migration failed, probably because connection details are incorrect:\n{exception}' ) else: echo.echo_success('database migration completed.') diff --git a/aiida/cmdline/commands/cmd_status.py b/aiida/cmdline/commands/cmd_status.py index e021e85d4b..a5599ee630 100644 --- a/aiida/cmdline/commands/cmd_status.py +++ b/aiida/cmdline/commands/cmd_status.py @@ -74,7 +74,7 @@ def verdi_status(print_traceback, no_rmq): try: profile = manager.get_profile() - print_status(ServiceStatus.UP, 'profile', 'On profile {}'.format(profile.name)) + print_status(ServiceStatus.UP, 'profile', f'On profile {profile.name}') except Exception as exc: message = 'Unable to read AiiDA profile' print_status(ServiceStatus.ERROR, 'profile', message, exception=exc, print_traceback=print_traceback) @@ -115,11 +115,11 @@ def verdi_status(print_traceback, no_rmq): comm = manager.create_communicator(with_orm=False) comm.stop() except Exception as exc: - message = 'Unable to connect to rabbitmq with URL: {}'.format(profile.get_rmq_url()) + message = f'Unable to connect to rabbitmq with URL: {profile.get_rmq_url()}' print_status(ServiceStatus.ERROR, 'rabbitmq', message, exception=exc, print_traceback=print_traceback) exit_code = ExitCode.CRITICAL else: - print_status(ServiceStatus.UP, 'rabbitmq', 'Connected as {}'.format(profile.get_rmq_url())) + print_status(ServiceStatus.UP, 'rabbitmq', f'Connected as {profile.get_rmq_url()}') # Getting the daemon status try: @@ -152,11 +152,11 @@ def print_status(status, service, msg='', exception=None, print_traceback=False) :param msg: message string """ symbol = STATUS_SYMBOLS[status] - click.secho(' {} '.format(symbol['string']), fg=symbol['color'], nl=False) - click.secho('{:12s} {}'.format(service + ':', msg)) + click.secho(f" {symbol['string']} ", fg=symbol['color'], nl=False) + click.secho(f"{service + ':':12s} {msg}") if exception is not None: - echo.echo_error('{}: {}'.format(type(exception).__name__, exception)) + echo.echo_error(f'{type(exception).__name__}: {exception}') if print_traceback: import traceback diff --git a/aiida/cmdline/commands/cmd_user.py b/aiida/cmdline/commands/cmd_user.py index cd20475c14..3240ce61f1 100644 --- a/aiida/cmdline/commands/cmd_user.py +++ b/aiida/cmdline/commands/cmd_user.py @@ -127,7 +127,7 @@ def user_configure(ctx, user, first_name, last_name, institution, set_default): user.store() - echo.echo_success('{} successfully {}'.format(user.email, action)) + echo.echo_success(f'{user.email} successfully {action}') if set_default: ctx.invoke(user_set_default, user=user) @@ -140,4 +140,4 @@ def user_configure(ctx, user, first_name, last_name, institution, set_default): def user_set_default(ctx, user): """Set a user as the default user for the profile.""" set_default_user(ctx.obj.profile, user) - echo.echo_success('set `{}` as the new default user for profile `{}`'.format(user.email, ctx.obj.profile.name)) + echo.echo_success(f'set `{user.email}` as the new default user for profile `{ctx.obj.profile.name}`') diff --git a/aiida/cmdline/commands/cmd_verdi.py b/aiida/cmdline/commands/cmd_verdi.py index 6645f2e858..e9cebf5229 100644 --- a/aiida/cmdline/commands/cmd_verdi.py +++ b/aiida/cmdline/commands/cmd_verdi.py @@ -77,7 +77,7 @@ def get_command(self, ctx, cmd_name): '{matches}'.format(cmd=cmd_name, matches='\n'.join('\t{}'.format(m) for m in sorted(matches))) ) else: - ctx.fail("'{cmd}' is not a verdi command.\n\nNo similar commands found.".format(cmd=cmd_name)) + ctx.fail(f"'{cmd_name}' is not a verdi command.\n\nNo similar commands found.") return None diff --git a/aiida/cmdline/params/options/__init__.py b/aiida/cmdline/params/options/__init__.py index b10b32327f..16ae5eb95c 100644 --- a/aiida/cmdline/params/options/__init__.py +++ b/aiida/cmdline/params/options/__init__.py @@ -79,7 +79,7 @@ def decorator(command): if traversal_rule.toggleable: option_name = name.replace('_', '-') option_label = '--{option_name}/--no-{option_name}'.format(option_name=option_name) - help_string = 'Whether to expand the node set by following {}.'.format(TRAVERSAL_RULE_HELP_STRING[name]) + help_string = f'Whether to expand the node set by following {TRAVERSAL_RULE_HELP_STRING[name]}.' click.option(option_label, default=traversal_rule.default, show_default=True, help=help_string)(command) return command diff --git a/aiida/cmdline/params/options/commands/computer.py b/aiida/cmdline/params/options/commands/computer.py index 0a98049b4f..4411d58cda 100644 --- a/aiida/cmdline/params/options/commands/computer.py +++ b/aiida/cmdline/params/options/commands/computer.py @@ -30,7 +30,7 @@ def should_call_default_mpiprocs_per_machine(ctx): # pylint: disable=invalid-na try: scheduler_cls = scheduler_ep.load() except ImportError: - raise ImportError("Unable to load the '{}' scheduler".format(scheduler_ep.name)) + raise ImportError(f"Unable to load the '{scheduler_ep.name}' scheduler") else: raise ValidationError( 'The should_call_... function should always be run (and prompted) AFTER asking for a scheduler' diff --git a/aiida/cmdline/params/options/commands/setup.py b/aiida/cmdline/params/options/commands/setup.py index a1d49f737f..b600c73e83 100644 --- a/aiida/cmdline/params/options/commands/setup.py +++ b/aiida/cmdline/params/options/commands/setup.py @@ -31,7 +31,7 @@ def validate_profile_parameter(ctx): """ option = 'profile' if option not in ctx.params or ctx.params[option] is None or not isinstance(ctx.params[option], Profile): - raise click.BadParameter('specifying the name of the profile is required', param_hint='"--{}"'.format(option)) + raise click.BadParameter('specifying the name of the profile is required', param_hint=f'"--{option}"') def get_profile_attribute_default(attribute_tuple, ctx): @@ -96,7 +96,7 @@ def get_quicksetup_database_name(ctx, param, value): # pylint: disable=unused-a config = get_config() profile = ctx.params['profile'].name config_hash = hashlib.md5(config.dirpath.encode('utf-8')).hexdigest() - database_name = '{profile}_{user}_{hash}'.format(profile=profile, user=getpass.getuser(), hash=config_hash) + database_name = f'{profile}_{getpass.getuser()}_{config_hash}' return database_name @@ -115,7 +115,7 @@ def get_quicksetup_username(ctx, param, value): # pylint: disable=unused-argume config = get_config() config_hash = hashlib.md5(config.dirpath.encode('utf-8')).hexdigest() - username = 'aiida_qs_{user}_{hash}'.format(user=getpass.getuser(), hash=config_hash) + username = f'aiida_qs_{getpass.getuser()}_{config_hash}' return username diff --git a/aiida/cmdline/params/options/config.py b/aiida/cmdline/params/options/config.py index 2cfcbd79bc..9ab5d82278 100644 --- a/aiida/cmdline/params/options/config.py +++ b/aiida/cmdline/params/options/config.py @@ -35,7 +35,7 @@ class ConfigFileOption(OverridableOption): @click.option('computer_name') @CONFIG_FILE(help='Configuration file for computer_setup') def computer_setup(computer_name): - click.echo("Setting up computer {}".format(computername)) + click.echo(f"Setting up computer {computername}") computer_setup --config config.yml diff --git a/aiida/cmdline/params/options/interactive.py b/aiida/cmdline/params/options/interactive.py index 38f50f5e27..beee3a92c2 100644 --- a/aiida/cmdline/params/options/interactive.py +++ b/aiida/cmdline/params/options/interactive.py @@ -42,7 +42,7 @@ class InteractiveOption(ConditionalOption): @click.command() @click.option('label', prompt='Label', cls=InteractiveOption) def foo(label): - click.echo('Labeling with label: {}'.format(label)) + click.echo(f'Labeling with label: {label}') """ PROMPT_COLOR = 'yellow' @@ -71,9 +71,7 @@ def __init__(self, param_decls=None, switch=None, prompt_fn=None, contextual_def # I do it after calling super so e.g. 'self.name' is defined if not self._prompt: raise TypeError( - "Interactive options need to have a prompt specified, but '{}' does not have a prompt defined".format( - self.name - ) + f"Interactive options need to have a prompt specified, but '{self.name}' does not have a prompt defined" ) # other kwargs @@ -151,7 +149,7 @@ def format_help_message(self): gives a list of possibilities for parameter types that support completion """ - msg = self.help or 'Expecting {}'.format(self.type.name) + msg = self.help or f'Expecting {self.type.name}' choices = getattr(self.type, 'complete', lambda x, y: [])(None, '') if choices: choice_table = [] @@ -160,7 +158,7 @@ def format_help_message(self): if isinstance(choice, tuple): choice_table.append('\t{:<12} {}'.format(*choice)) else: - choice_table.append('\t{:<12}'.format(choice)) + choice_table.append(f'\t{choice:<12}') msg += '\n'.join(choice_table) return msg @@ -201,8 +199,8 @@ def safely_convert(self, value, param, ctx): def simple_prompt_loop(self, ctx, param, value): """Prompt until successful conversion. dispatch control sequences.""" if not hasattr(ctx, 'prompt_loop_info_printed'): - echo.echo_info('enter "{}" for help'.format(self.CHARACTER_PROMPT_HELP)) - echo.echo_info('enter "{}" to ignore the default and set no value'.format(self.CHARACTER_IGNORE_DEFAULT)) + echo.echo_info(f'enter "{self.CHARACTER_PROMPT_HELP}" for help') + echo.echo_info(f'enter "{self.CHARACTER_IGNORE_DEFAULT}" to ignore the default and set no value') ctx.prompt_loop_info_printed = True while 1: diff --git a/aiida/cmdline/params/types/choice.py b/aiida/cmdline/params/types/choice.py index b1ccce62e0..92d5894eb3 100644 --- a/aiida/cmdline/params/types/choice.py +++ b/aiida/cmdline/params/types/choice.py @@ -26,7 +26,7 @@ class LazyChoice(click.ParamType): def __init__(self, get_choices): if not callable(get_choices): - raise TypeError("Must pass a callable, got '{}'".format(get_choices)) + raise TypeError(f"Must pass a callable, got '{get_choices}'") super().__init__() self._get_choices = get_choices @@ -62,4 +62,4 @@ def __repr__(self): if self.__click_choice is None: return 'LazyChoice(UNINITIALISED)' - return 'LazyChoice(%r)' % list(self.choices) + return f'LazyChoice({list(self.choices)!r})' diff --git a/aiida/cmdline/params/types/computer.py b/aiida/cmdline/params/types/computer.py index bd41a03cd3..3767a6142e 100644 --- a/aiida/cmdline/params/types/computer.py +++ b/aiida/cmdline/params/types/computer.py @@ -55,7 +55,7 @@ def convert(self, value, param, ctx): if newval is None: return None if not newval.startswith('#!'): - self.fail('The shebang line should start with the two caracters #!, it is instead: {}'.format(newval)) + self.fail(f'The shebang line should start with the two caracters #!, it is instead: {newval}') return newval def __repr__(self): @@ -87,7 +87,7 @@ def convert(self, value, param, ctx): try: job_resource_keys = scheduler_ep.load().job_resource_class.get_valid_keys() except ImportError: - self.fail("Unable to load the '{}' scheduler".format(scheduler_ep.name)) + self.fail(f"Unable to load the '{scheduler_ep.name}' scheduler") else: self.fail( 'Scheduler not specified for this computer! The mpirun-command must always be asked ' @@ -101,8 +101,8 @@ def convert(self, value, param, ctx): try: newval.format(**subst) except KeyError as exc: - self.fail("In workdir there is an unknown replacement field '{}'".format(exc.args[0])) + self.fail(f"In workdir there is an unknown replacement field '{exc.args[0]}'") except ValueError as exc: - self.fail("Error in the string: '{}'".format(exc)) + self.fail(f"Error in the string: '{exc}'") return newval diff --git a/aiida/cmdline/params/types/config.py b/aiida/cmdline/params/types/config.py index 9af822d654..3927092f58 100644 --- a/aiida/cmdline/params/types/config.py +++ b/aiida/cmdline/params/types/config.py @@ -23,7 +23,7 @@ def convert(self, value, param, ctx): from aiida.manage.configuration.options import get_option, get_option_names if value not in get_option_names(): - raise click.BadParameter('{} is not a valid configuration option'.format(value)) + raise click.BadParameter(f'{value} is not a valid configuration option') return get_option(value) diff --git a/aiida/cmdline/params/types/identifier.py b/aiida/cmdline/params/types/identifier.py index 94deaf21a4..513ee2a82b 100644 --- a/aiida/cmdline/params/types/identifier.py +++ b/aiida/cmdline/params/types/identifier.py @@ -59,7 +59,7 @@ def __init__(self, sub_classes=None): try: entry_point = get_entry_point_from_string(entry_point_string) except (ValueError, exceptions.EntryPointError) as exception: - raise ValueError('{} is not a valid entry point string: {}'.format(entry_point_string, exception)) + raise ValueError(f'{entry_point_string} is not a valid entry point string: {exception}') else: self._entry_points.append(entry_point) @@ -107,7 +107,7 @@ def convert(self, value, param, ctx): try: sub_class = entry_point.load() except ImportError as exception: - raise RuntimeError('failed to load the entry point {}: {}'.format(entry_point, exception)) + raise RuntimeError(f'failed to load the entry point {entry_point}: {exception}') if not issubclass(sub_class, loader.orm_base_class): raise RuntimeError( diff --git a/aiida/cmdline/params/types/multiple.py b/aiida/cmdline/params/types/multiple.py index 9e959bcf9a..733ce7dcd4 100644 --- a/aiida/cmdline/params/types/multiple.py +++ b/aiida/cmdline/params/types/multiple.py @@ -23,9 +23,9 @@ def __init__(self, param_type): self._param_type = param_type if hasattr(param_type, 'name'): - self.name = '{}...'.format(param_type.name) + self.name = f'{param_type.name}...' else: - self.name = '{}...'.format(param_type.__name__.upper()) + self.name = f'{param_type.__name__.upper()}...' def get_metavar(self, param): try: @@ -37,4 +37,4 @@ def convert(self, value, param, ctx): try: return tuple([self._param_type(entry) for entry in value]) except ValueError: - self.fail('could not convert {} into type {}'.format(value, self._param_type)) + self.fail(f'could not convert {value} into type {self._param_type}') diff --git a/aiida/cmdline/params/types/path.py b/aiida/cmdline/params/types/path.py index 55b2b08166..daeb5ae115 100644 --- a/aiida/cmdline/params/types/path.py +++ b/aiida/cmdline/params/types/path.py @@ -23,7 +23,7 @@ def _check_timeout_seconds(timeout_seconds): try: timeout_seconds = int(timeout_seconds) except ValueError: - raise TypeError('timeout_seconds should be an integer but got: {}'.format(type(timeout_seconds))) + raise TypeError(f'timeout_seconds should be an integer but got: {type(timeout_seconds)}') if timeout_seconds < 0 or timeout_seconds > 60: raise ValueError('timeout_seconds needs to be in the range [0;60].') diff --git a/aiida/cmdline/params/types/plugin.py b/aiida/cmdline/params/types/plugin.py index 607e0c2a35..387e5127a5 100644 --- a/aiida/cmdline/params/types/plugin.py +++ b/aiida/cmdline/params/types/plugin.py @@ -64,7 +64,7 @@ def __init__(self, group=None, load=False, *args, **kwargs): grp = ENTRY_POINT_GROUP_PREFIX + grp if grp not in valid_entry_point_groups: - raise ValueError('entry point group {} is not recognized'.format(grp)) + raise ValueError(f'entry point group {grp} is not recognized') groups.append(grp) @@ -189,7 +189,7 @@ def get_entry_point_from_string(self, entry_point_string): group = matching_groups[0] else: - ValueError('invalid entry point string format: {}'.format(entry_point_string)) + ValueError(f'invalid entry point string format: {entry_point_string}') try: entry_point = get_entry_point(group, name) diff --git a/aiida/cmdline/params/types/profile.py b/aiida/cmdline/params/types/profile.py index 6c3902bad6..61d5737f9c 100644 --- a/aiida/cmdline/params/types/profile.py +++ b/aiida/cmdline/params/types/profile.py @@ -44,7 +44,7 @@ def convert(self, value, param, ctx): profile = Profile(value, {}) else: if self._cannot_exist: - self.fail(str('the profile `{}` already exists'.format(value))) + self.fail(str(f'the profile `{value}` already exists')) if self._load_profile: load_profile(profile.name) diff --git a/aiida/cmdline/params/types/strings.py b/aiida/cmdline/params/types/strings.py index b681766094..63abbcd599 100644 --- a/aiida/cmdline/params/types/strings.py +++ b/aiida/cmdline/params/types/strings.py @@ -49,7 +49,7 @@ class LabelStringType(NonEmptyStringParamType): def convert(self, value, param, ctx): newval = super().convert(value, param, ctx) - if not re.match('^[{}]*$'.format(self.ALPHABET), newval): + if not re.match(f'^[{self.ALPHABET}]*$', newval): self.fail('Please use only alphanumeric characters, dashes, underscores or dots') return newval diff --git a/aiida/cmdline/params/types/user.py b/aiida/cmdline/params/types/user.py index 561cb2531d..71a1c4eaab 100644 --- a/aiida/cmdline/params/types/user.py +++ b/aiida/cmdline/params/types/user.py @@ -35,10 +35,10 @@ def convert(self, value, param, ctx): if self._create: return orm.User(email=value) - self.fail("User '{}' not found".format(value), param, ctx) + self.fail(f"User '{value}' not found", param, ctx) if len(results) > 1: - self.fail("Multiple users found with email '{}': {}".format(value, results)) + self.fail(f"Multiple users found with email '{value}': {results}") return results[0] diff --git a/aiida/cmdline/utils/ascii_vis.py b/aiida/cmdline/utils/ascii_vis.py index 9872e3abe2..f7fa03d78e 100644 --- a/aiida/cmdline/utils/ascii_vis.py +++ b/aiida/cmdline/utils/ascii_vis.py @@ -38,7 +38,7 @@ def print_node_tree(cls, node, max_depth, follow_links=()): echo.echo(get_node_summary(node)) - tree_string = '({});'.format(cls._build_tree(node, max_depth=max_depth, follow_links=follow_links)) + tree_string = f'({cls._build_tree(node, max_depth=max_depth, follow_links=follow_links)});' tmp = Tree(tree_string, format=1) echo.echo(tmp.get_ascii(show_internal=True)) @@ -69,7 +69,7 @@ def _build_tree(cls, node, show_pk=True, max_depth=None, follow_links=(), depth= lab = node.__class__.__name__ if show_pk: - lab += ' [{}]'.format(node.pk) + lab += f' [{node.pk}]' out_values.append(lab) @@ -154,7 +154,7 @@ def get_ascii_tree(node, node_label=None, show_pk=True, max_depth=1, follow_link warnings.warn('function is deprecated and will be removed in `aiida-core==2.0.0`.', AiidaDeprecationWarning) # pylint: disable=no-member from ete3 import Tree tree_string = build_tree(node, node_label, show_pk, max_depth, follow_links_of_type, descend) - tree = Tree('({});'.format(tree_string), format=1) + tree = Tree(f'({tree_string});', format=1) return tree.get_ascii(show_internal=True) @@ -206,7 +206,7 @@ def build_tree(node, node_label=None, show_pk=True, max_depth=1, follow_links_of ) if relatives: - out_values.append('({})'.format(', '.join(relatives))) + out_values.append(f"({', '.join(relatives)})") out_values.append(_generate_node_label(node, node_label, show_pk)) @@ -252,7 +252,7 @@ def _generate_node_label(node, node_attr, show_pk): label = node.__class__.__name__ if show_pk: - label += ' [{}]'.format(node.pk) + label += f' [{node.pk}]' return label @@ -262,19 +262,19 @@ def calc_info(node): from aiida.orm import ProcessNode, WorkChainNode if not isinstance(node, ProcessNode): - raise TypeError('Unknown type: {}'.format(type(node))) + raise TypeError(f'Unknown type: {type(node)}') process_label = node.process_label process_state = node.process_state.value.capitalize() exit_status = node.exit_status if exit_status is not None: - string = '{}<{}> {} [{}]'.format(process_label, node.pk, process_state, exit_status) + string = f'{process_label}<{node.pk}> {process_state} [{exit_status}]' else: - string = '{}<{}> {}'.format(process_label, node.pk, process_state) + string = f'{process_label}<{node.pk}> {process_state}' if isinstance(node, WorkChainNode) and node.stepper_state_info: - string += ' [{}]'.format(node.stepper_state_info) + string += f' [{node.stepper_state_info}]' return string @@ -315,20 +315,20 @@ def format_tree_descending(tree, prefix='', pos=-1): if pos == -1: pre = '' elif pos == 0: - pre = '{}{}'.format(prefix, TREE_FIRST_ENTRY) + pre = f'{prefix}{TREE_FIRST_ENTRY}' elif pos == 1: - pre = '{}{}'.format(prefix, TREE_MIDDLE_ENTRY) + pre = f'{prefix}{TREE_MIDDLE_ENTRY}' else: - pre = '{}{}'.format(prefix, TREE_LAST_ENTRY) - text.append('{}{}'.format(pre, info)) + pre = f'{prefix}{TREE_LAST_ENTRY}' + text.append(f'{pre}{info}') if isinstance(tree, tuple): _, value = tree num_entries = len(value) if pos in [-1, 2]: - new_prefix = '{} '.format(prefix) + new_prefix = f'{prefix} ' else: - new_prefix = '{}\u2502 '.format(prefix) + new_prefix = f'{prefix}│ ' for i, entry in enumerate(value): if i == num_entries - 1: pos = 2 diff --git a/aiida/cmdline/utils/common.py b/aiida/cmdline/utils/common.py index 4a6d240a8a..04bdecec0b 100644 --- a/aiida/cmdline/utils/common.py +++ b/aiida/cmdline/utils/common.py @@ -24,7 +24,7 @@ def get_env_with_venv_bin(): config = get_config() currenv = os.environ.copy() - currenv['PATH'] = os.path.dirname(sys.executable) + ':' + currenv['PATH'] + currenv['PATH'] = f"{os.path.dirname(sys.executable)}:{currenv['PATH']}" currenv['AIIDA_PATH'] = config.dirpath currenv['PYTHONUNBUFFERED'] = 'True' @@ -70,7 +70,7 @@ def print_last_process_state_change(process_type=None): timedelta = timezone.delta(timestamp, timezone.now()) formatted = format_local_time(timestamp, format_str='at %H:%M:%S on %Y-%m-%d') relative = str_timedelta(timedelta, negative_to_zero=True, max_num_fields=1) - echo_info('last time an entry changed state: {} ({})'.format(relative, formatted)) + echo_info(f'last time an entry changed state: {relative} ({formatted})') if not client.is_daemon_running: echo_warning('the daemon is not running', bold=True) @@ -99,11 +99,11 @@ def get_node_summary(node): process_state_string = process_state.value.capitalize() if process_state == ProcessState.FINISHED and node.exit_message: - table.append(['state', '{} [{}] {}'.format(process_state_string, node.exit_status, node.exit_message)]) + table.append(['state', f'{process_state_string} [{node.exit_status}] {node.exit_message}']) elif process_state == ProcessState.FINISHED: - table.append(['state', '{} [{}]'.format(process_state_string, node.exit_status)]) + table.append(['state', f'{process_state_string} [{node.exit_status}]']) elif process_state == ProcessState.EXCEPTED: - table.append(['state', '{} <{}>'.format(process_state_string, node.exception)]) + table.append(['state', f'{process_state_string} <{node.exception}>']) else: table.append(['state', process_state_string]) @@ -123,7 +123,7 @@ def get_node_summary(node): pass else: if computer is not None: - table.append(['computer', '[{}] {}'.format(node.computer.pk, node.computer.label)]) + table.append(['computer', f'[{node.computer.pk}] {node.computer.label}']) return tabulate(table, headers=table_headers) @@ -148,29 +148,27 @@ def get_node_info(node, include_summary=True): nodes_output = node.get_outgoing(link_type=(LinkType.CREATE, LinkType.RETURN)) if nodes_input: - result += '\n' + format_nested_links(nodes_input.nested(), headers=['Inputs', 'PK', 'Type']) + result += f"\n{format_nested_links(nodes_input.nested(), headers=['Inputs', 'PK', 'Type'])}" if nodes_output: - result += '\n' + format_nested_links(nodes_output.nested(), headers=['Outputs', 'PK', 'Type']) + result += f"\n{format_nested_links(nodes_output.nested(), headers=['Outputs', 'PK', 'Type'])}" if nodes_caller: - result += '\n' + format_flat_links( - sorted(nodes_caller.all(), key=lambda x: x.node.ctime), headers=['Caller', 'PK', 'Type'] - ) + links = sorted(nodes_caller.all(), key=lambda x: x.node.ctime) + result += f"\n{format_flat_links(links, headers=['Caller', 'PK', 'Type'])}" if nodes_called: - result += '\n' + format_flat_links( - sorted(nodes_called.all(), key=lambda x: x.node.ctime), headers=['Called', 'PK', 'Type'] - ) + links = sorted(nodes_called.all(), key=lambda x: x.node.ctime) + result += f"\n{format_flat_links(links, headers=['Called', 'PK', 'Type'])}" log_messages = orm.Log.objects.get_logs_for(node) if log_messages: table = [] table_headers = ['Log messages'] - table.append(['There are {} log messages for this calculation'.format(len(log_messages))]) - table.append(["Run 'verdi process report {}' to see them".format(node.pk)]) - result += '\n\n{}'.format(tabulate(table, headers=table_headers)) + table.append([f'There are {len(log_messages)} log messages for this calculation']) + table.append([f"Run 'verdi process report {node.pk}' to see them"]) + result += f'\n\n{tabulate(table, headers=table_headers)}' return result @@ -187,7 +185,7 @@ def format_flat_links(links, headers): for link_triple in links: table.append([link_triple.link_label, link_triple.node.pk, link_triple.node.get_attribute('process_label', '')]) - result = '\n{}'.format(tabulate(table, headers=headers)) + result = f'\n{tabulate(table, headers=headers)}' return result @@ -220,9 +218,9 @@ def format_recursive(links, depth=0): table = [] for depth, label, pk, class_name in format_recursive(links): - table.append(['{indent}{label}'.format(indent=' ' * (depth * indent_size), label=label), pk, class_name]) + table.append([f"{' ' * (depth * indent_size)}{label}", pk, class_name]) - result = '\n{}'.format(tabulate(table, headers=headers)) + result = f'\n{tabulate(table, headers=headers)}' tb.PRESERVE_WHITESPACE = False return result @@ -247,39 +245,37 @@ def get_calcjob_report(calcjob): report = [] if calcjob_state == CalcJobState.WITHSCHEDULER: - state_string = '{}, scheduler state: {}'.format( - calcjob_state, scheduler_state if scheduler_state else '(unknown)' - ) + state_string = f"{calcjob_state}, scheduler state: {scheduler_state if scheduler_state else '(unknown)'}" else: - state_string = '{}'.format(calcjob_state) + state_string = f'{calcjob_state}' - label_string = ' [{}]'.format(calcjob.label) if calcjob.label else '' + label_string = f' [{calcjob.label}]' if calcjob.label else '' - report.append('*** {}{}: {}'.format(calcjob.pk, label_string, state_string)) + report.append(f'*** {calcjob.pk}{label_string}: {state_string}') if scheduler_out is None: report.append('*** Scheduler output: N/A') elif scheduler_out: - report.append('*** Scheduler output:\n{}'.format(scheduler_out)) + report.append(f'*** Scheduler output:\n{scheduler_out}') else: report.append('*** (empty scheduler output file)') if scheduler_err is None: report.append('*** Scheduler errors: N/A') elif scheduler_err: - report.append('*** Scheduler errors:\n{}'.format(scheduler_err)) + report.append(f'*** Scheduler errors:\n{scheduler_err}') else: report.append('*** (empty scheduler errors file)') if log_messages: - report.append('*** {} LOG MESSAGES:'.format(len(log_messages))) + report.append(f'*** {len(log_messages)} LOG MESSAGES:') else: report.append('*** 0 LOG MESSAGES') for log in log_messages: - report.append('+-> {} at {}'.format(log.levelname, log.time)) + report.append(f'+-> {log.levelname} at {log.time}') for message in log.message.splitlines(): - report.append(' | {}'.format(message)) + report.append(f' | {message}') return '\n'.join(report) @@ -296,7 +292,7 @@ def get_process_function_report(node): report = [] for log in orm.Log.objects.get_logs_for(node): - report.append('{time:%Y-%m-%d %H:%M:%S} [{id}]: {msg}'.format(id=log.id, msg=log.message, time=log.time)) + report.append(f'{log.time:%Y-%m-%d %H:%M:%S} [{log.id}]: {log.message}') return '\n'.join(report) @@ -396,7 +392,7 @@ def print_process_info(process): click.secho('Description:\n', fg='red', bold=True) for line in docstring: - click.echo('\t' + line.lstrip()) + click.echo(f' {line.lstrip()}') click.echo() print_process_spec(process.spec()) @@ -425,7 +421,7 @@ def build_entries(ports): valid_types = ', '.join([valid_type.__name__ for valid_type in valid_types if valid_type is not None]) required = 'required' if port.required else 'optional' info = port.help if port.help is not None else '' - info = info[:75] + ' ...' if len(info) > 75 else info + info = f'{info[:75]} ...' if len(info) > 75 else info result.append([name, required, valid_types, info]) return result @@ -513,5 +509,5 @@ def check_worker_load(active_slots): percent_load = (active_slots / available_slots) if percent_load > warning_threshold: echo.echo('') # New line - echo.echo_warning('{:.0f}% of the available daemon worker slots have been used!'.format(percent_load * 100)) + echo.echo_warning(f'{percent_load * 100:.0f}% of the available daemon worker slots have been used!') echo.echo_warning("Increase the number of workers with 'verdi daemon incr'.\n") diff --git a/aiida/cmdline/utils/daemon.py b/aiida/cmdline/utils/daemon.py index 57792f8bf1..552501ee39 100644 --- a/aiida/cmdline/utils/daemon.py +++ b/aiida/cmdline/utils/daemon.py @@ -124,8 +124,7 @@ class StartCircusNotFound(Exception): raise StartCircusNotFound() # Also this is a case in which the process is not there anymore except (psutil.AccessDenied, psutil.NoSuchProcess, StartCircusNotFound): echo.echo_warning( - 'Deleted apparently stale daemon PID file as its associated process<{}> does not exist anymore'. - format(pid) + f'Deleted apparently stale daemon PID file as its associated process<{pid}> does not exist anymore' ) if os.path.isfile(client.circus_pid_file): os.remove(client.circus_pid_file) diff --git a/aiida/cmdline/utils/echo.py b/aiida/cmdline/utils/echo.py index 806afc60b1..7a7c3210cf 100644 --- a/aiida/cmdline/utils/echo.py +++ b/aiida/cmdline/utils/echo.py @@ -174,7 +174,7 @@ def echo_formatted_list(collection, attributes, sort=None, highlight=None, hide= else: entries = collection - template = '{symbol}' + ' {}' * len(attributes) + template = f"{{symbol}}{' {}' * len(attributes)}" for entry in entries: if hide and hide(entry): @@ -199,7 +199,7 @@ def default_jsondump(data): if isinstance(data, datetime.datetime): return timezone.localtime(data).strftime('%Y-%m-%dT%H:%M:%S.%f%z') - raise TypeError(repr(data) + ' is not JSON serializable') + raise TypeError(f'{repr(data)} is not JSON serializable') return json.dumps(dictionary, indent=4, sort_keys=True, default=default_jsondump) @@ -219,7 +219,7 @@ def echo_dictionary(dictionary, fmt='json+date'): format_function = VALID_DICT_FORMATS_MAPPING[fmt] except KeyError: formats = ', '.join(VALID_DICT_FORMATS_MAPPING.keys()) - raise ValueError('Unrecognised printing format. Valid formats are: {}'.format(formats)) + raise ValueError(f'Unrecognised printing format. Valid formats are: {formats}') echo(format_function(dictionary)) diff --git a/aiida/cmdline/utils/multi_line_input.py b/aiida/cmdline/utils/multi_line_input.py index 97c3e440de..23d0d3c969 100644 --- a/aiida/cmdline/utils/multi_line_input.py +++ b/aiida/cmdline/utils/multi_line_input.py @@ -29,7 +29,7 @@ def edit_multiline_template(template_name, comment_marker='#=', extension=None, if content: # Remove all comments, which are all lines that start with the comment marker - value = re.sub(r'(^' + re.escape(comment_marker) + '.*$\n)+', '', content, flags=re.M).strip() + value = re.sub(f'(^{re.escape(comment_marker)}.*$\n)+', '', content, flags=re.M).strip() return value diff --git a/aiida/cmdline/utils/query/formatting.py b/aiida/cmdline/utils/query/formatting.py index 4fe5ad285d..36cbb3835e 100644 --- a/aiida/cmdline/utils/query/formatting.py +++ b/aiida/cmdline/utils/query/formatting.py @@ -49,9 +49,9 @@ def format_state(process_state, paused=None, exit_status=None): symbol = '\u00B7' # middle dot if process_state == 'finished' and exit_status is not None: - return '{} {} [{}]'.format(symbol, format_process_state(process_state), exit_status) + return f'{symbol} {format_process_state(process_state)} [{exit_status}]' - return '{} {}'.format(symbol, format_process_state(process_state)) + return f'{symbol} {format_process_state(process_state)}' def format_process_state(process_state): @@ -61,7 +61,7 @@ def format_process_state(process_state): :param process_state: the process state :return: string representation of process state """ - return '{}'.format(process_state.capitalize() if process_state else None) + return f'{process_state.capitalize() if process_state else None}' def format_sealed(sealed): diff --git a/aiida/cmdline/utils/query/mapping.py b/aiida/cmdline/utils/query/mapping.py index f469c75dfb..17d46a8b43 100644 --- a/aiida/cmdline/utils/query/mapping.py +++ b/aiida/cmdline/utils/query/mapping.py @@ -83,14 +83,14 @@ def __init__(self, projections, projection_labels=None, projection_attributes=No self._valid_projections = projections - sealed_key = 'attributes.{}'.format(Sealable.SEALED_KEY) - job_state_key = 'attributes.{}'.format('state') - scheduler_state_key = 'attributes.{}'.format('scheduler_state') - process_paused_key = 'attributes.{}'.format(ProcessNode.PROCESS_PAUSED_KEY) - process_label_key = 'attributes.{}'.format(ProcessNode.PROCESS_LABEL_KEY) - process_state_key = 'attributes.{}'.format(ProcessNode.PROCESS_STATE_KEY) - process_status_key = 'attributes.{}'.format(ProcessNode.PROCESS_STATUS_KEY) - exit_status_key = 'attributes.{}'.format(ProcessNode.EXIT_STATUS_KEY) + sealed_key = f'attributes.{Sealable.SEALED_KEY}' + job_state_key = 'attributes.state' + scheduler_state_key = 'attributes.scheduler_state' + process_paused_key = f'attributes.{ProcessNode.PROCESS_PAUSED_KEY}' + process_label_key = f'attributes.{ProcessNode.PROCESS_LABEL_KEY}' + process_state_key = f'attributes.{ProcessNode.PROCESS_STATE_KEY}' + process_status_key = f'attributes.{ProcessNode.PROCESS_STATUS_KEY}' + exit_status_key = f'attributes.{ProcessNode.EXIT_STATUS_KEY}' default_labels = {'pk': 'PK', 'uuid': 'UUID', 'ctime': 'Created', 'mtime': 'Modified', 'state': 'Process State'} @@ -123,21 +123,21 @@ def __init__(self, projections, projection_labels=None, projection_attributes=No if projection_labels is not None: for projection, label in projection_labels.items(): if projection not in self.valid_projections: - raise ValueError('{} is not a valid projection'.format(projection)) + raise ValueError(f'{projection} is not a valid projection') else: default_labels[projection] = label if projection_attributes is not None: for projection, attribute in projection_attributes.items(): if projection not in self.valid_projections: - raise ValueError('{} is not a valid projection'.format(projection)) + raise ValueError(f'{projection} is not a valid projection') else: default_attributes[projection] = attribute if projection_formatters is not None: for projection, formatter in projection_formatters.items(): if projection not in self.valid_projections: - raise ValueError('{} is not a valid projection'.format(projection)) + raise ValueError(f'{projection} is not a valid projection') else: default_formatters[projection] = formatter diff --git a/aiida/common/escaping.py b/aiida/common/escaping.py index fa17d09e33..dbec8ba545 100644 --- a/aiida/common/escaping.py +++ b/aiida/common/escaping.py @@ -41,7 +41,7 @@ def escape_for_bash(str_to_escape): str_to_escape = str(str_to_escape) escaped_quotes = str_to_escape.replace("'", """'"'"'""") - return "'{}'".format(escaped_quotes) + return f"'{escaped_quotes}'" # Mapping of "SQL" tokens into corresponding regex expressions @@ -126,7 +126,7 @@ def tokenizer(string, tokens_to_apply): # regex symbols like $ ^ [ ] etc return re.escape(string) - return '^{}$'.format(tokenizer(sql_pattern, tokens_to_apply=[token_pair[0] for token_pair in SQL_TO_REGEX_TOKENS])) + return f'^{tokenizer(sql_pattern, tokens_to_apply=[token_pair[0] for token_pair in SQL_TO_REGEX_TOKENS])}$' def sql_string_match(string, pattern): diff --git a/aiida/common/extendeddicts.py b/aiida/common/extendeddicts.py index 616d907f12..9a3c3ee3b8 100644 --- a/aiida/common/extendeddicts.py +++ b/aiida/common/extendeddicts.py @@ -40,7 +40,7 @@ def __init__(self, dictionary=None): def __repr__(self): """Representation of the object.""" - return '%s(%s)' % (self.__class__.__name__, dict.__repr__(self)) + return f'{self.__class__.__name__}({dict.__repr__(self)})' def __getattr__(self, attr): """Read a key as an attribute. @@ -50,7 +50,7 @@ def __getattr__(self, attr): try: return self[attr] except KeyError: - errmsg = "'{}' object has no attribute '{}'".format(self.__class__.__name__, attr) + errmsg = f"'{self.__class__.__name__}' object has no attribute '{attr}'" raise AttributeError(errmsg) def __setattr__(self, attr, value): @@ -59,8 +59,7 @@ def __setattr__(self, attr, value): self[attr] = value except KeyError: raise AttributeError( - "AttributeError: '{}' is not a valid attribute of the object " - "'{}'".format(attr, self.__class__.__name__) + f"AttributeError: '{attr}' is not a valid attribute of the object '{self.__class__.__name__}'" ) def __delattr__(self, attr): @@ -71,7 +70,7 @@ def __delattr__(self, attr): try: del self[attr] except KeyError: - errmsg = "'{}' object has no attribute '{}'".format(self.__class__.__name__, attr) + errmsg = f"'{self.__class__.__name__}' object has no attribute '{attr}'" raise AttributeError(errmsg) def __deepcopy__(self, memo=None): @@ -114,7 +113,7 @@ def __init__(self, init=None): for key in init: if key not in self._valid_fields: - errmsg = "'{}' is not a valid key for object '{}'".format(key, self.__class__.__name__) + errmsg = f"'{key}' is not a valid key for object '{self.__class__.__name__}'" raise KeyError(errmsg) super().__init__(init) @@ -123,7 +122,7 @@ def __setitem__(self, item, value): Set a key as an attribute. """ if item not in self._valid_fields: - errmsg = "'{}' is not a valid key for object '{}'".format(item, self.__class__.__name__) + errmsg = f"'{item}' is not a valid key for object '{self.__class__.__name__}'" raise KeyError(errmsg) super().__setitem__(item, value) @@ -210,14 +209,12 @@ def validate(self): for key in self.get_default_fields(): # I get the attribute starting with validate_ and containing the name of the key # I set a dummy function if there is no validate_KEY function defined - validator = getattr(self, 'validate_{}'.format(key), lambda value: None) + validator = getattr(self, f'validate_{key}', lambda value: None) if callable(validator): try: validator(self[key]) except Exception as exc: - raise exceptions.ValidationError( - "Invalid value for key '{}' [{}]: {}".format(key, exc.__class__.__name__, exc) - ) + raise exceptions.ValidationError(f"Invalid value for key '{key}' [{exc.__class__.__name__}]: {exc}") def __setattr__(self, attr, value): """ diff --git a/aiida/common/folders.py b/aiida/common/folders.py index 79b6eb3db5..df185faa79 100644 --- a/aiida/common/folders.py +++ b/aiida/common/folders.py @@ -188,7 +188,7 @@ def insert_path(self, src, dest_name=None, overwrite=True): # This automatically overwrites files shutil.copyfile(src, dest_abs_path) else: - raise IOError('destination already exists: {}'.format(os.path.join(dest_abs_path))) + raise IOError(f'destination already exists: {os.path.join(dest_abs_path)}') else: shutil.copyfile(src, dest_abs_path) elif os.path.isdir(src): @@ -201,7 +201,7 @@ def insert_path(self, src, dest_name=None, overwrite=True): # This automatically overwrites files shutil.copytree(src, dest_abs_path) else: - raise IOError('destination already exists: {}'.format(os.path.join(dest_abs_path))) + raise IOError(f'destination already exists: {os.path.join(dest_abs_path)}') else: shutil.copytree(src, dest_abs_path) else: @@ -262,12 +262,12 @@ def get_abs_path(self, relpath, check_existence=False): dest_abs_path = os.path.join(self.abspath, relpath) if not os.path.commonprefix([dest_abs_path, self.folder_limit]) == self.folder_limit: - errstr = "You didn't specify a valid filename: {}".format(relpath) + errstr = f"You didn't specify a valid filename: {relpath}" raise ValueError(errstr) if check_existence: if not os.path.exists(dest_abs_path): - raise OSError('{} does not exist within the folder {}'.format(relpath, self.abspath)) + raise OSError(f'{relpath} does not exist within the folder {self.abspath}') return dest_abs_path @@ -366,7 +366,7 @@ def replace_with_folder(self, srcdir, move=False, overwrite=False): if overwrite: self.erase() elif self.exists(): - raise IOError('Location {} already exists, and overwrite is set to False'.format(self.abspath)) + raise IOError(f'Location {self.abspath} already exists, and overwrite is set to False') # Create parent dir, if needed, with the right mode pardir = os.path.dirname(self.abspath) @@ -466,7 +466,7 @@ def __init__(self, basepath=CALC_JOB_DRY_RUN_BASE_PATH): while True: counter += 1 - subfolder_path = os.path.join(self.abspath, '{}-{:05d}'.format(subfolder_basename, counter)) + subfolder_path = os.path.join(self.abspath, f'{subfolder_basename}-{counter:05d}') try: os.mkdir(subfolder_path) @@ -500,10 +500,7 @@ def __init__(self, section, uuid, subfolder=os.curdir): Pass the uuid as a string. """ if section not in VALID_SECTIONS: - retstr = ( - "Repository section '{}' not allowed. " - 'Valid sections are: {}'.format(section, ','.join(VALID_SECTIONS)) - ) + retstr = (f"Repository section '{section}' not allowed. Valid sections are: {','.join(VALID_SECTIONS)}") raise ValueError(retstr) self._section = section self._uuid = uuid diff --git a/aiida/common/hashing.py b/aiida/common/hashing.py index f10ef0e786..02d7c6e95d 100644 --- a/aiida/common/hashing.py +++ b/aiida/common/hashing.py @@ -68,7 +68,7 @@ def get_random_string(length=12, allowed_chars='abcdefghijklmnopqrstuvwxyzABCDEF # time a random string is required. This may change the # properties of the chosen random sequence slightly, but this # is better than absolute predictability. - random.seed(hashlib.sha256(('%s%s%s' % (random.getstate(), time.time(), HASHING_KEY)).encode('utf-8')).digest()) + random.seed(hashlib.sha256(f'{random.getstate()}{time.time()}{HASHING_KEY}'.encode('utf-8')).digest()) return ''.join(random.choice(allowed_chars) for i in range(length)) @@ -123,7 +123,7 @@ def _make_hash(object_to_hash, **_): Implementation of the ``make_hash`` function. The hash is created as a 28 byte integer, and only later converted to a string. """ - raise ValueError('Value of type {} cannot be hashed'.format(type(object_to_hash))) + raise ValueError(f'Value of type {type(object_to_hash)} cannot be hashed') def _single_digest(obj_type, obj_bytes=b''): @@ -217,7 +217,7 @@ def _(val, **kwargs): @_make_hash.register(numbers.Integral) def _(val, **kwargs): """get the hash of the little-endian signed long long representation of the integer""" - return [_single_digest('int', '{}'.format(val).encode('utf-8'))] + return [_single_digest('int', f'{val}'.encode('utf-8'))] @_make_hash.register(bool) @@ -288,5 +288,5 @@ def float_to_text(value, sig): :param value: the float value to convert :param sig: choose how many digits after the comma should be output """ - fmt = '{{:.{}g}}'.format(sig) + fmt = f'{{:.{sig}g}}' return fmt.format(value) diff --git a/aiida/common/lang.py b/aiida/common/lang.py index bb8503532a..f2bb8906f6 100644 --- a/aiida/common/lang.py +++ b/aiida/common/lang.py @@ -38,7 +38,7 @@ def type_check(what, of_type, msg=None, allow_none=False): if not isinstance(what, of_type): if msg is None: - msg = "Got object of type '{}', expecting '{}'".format(type(what), of_type) + msg = f"Got object of type '{type(what)}', expecting '{of_type}'" raise TypeError(msg) return what @@ -62,7 +62,7 @@ def wrapped_fn(self, *args, **kwargs): # pylint: disable=missing-docstring try: getattr(super(), func.__name__) except AttributeError: - raise RuntimeError('Function {} does not override a superclass method'.format(func)) + raise RuntimeError(f'Function {func} does not override a superclass method') return func(self, *args, **kwargs) else: diff --git a/aiida/common/links.py b/aiida/common/links.py index 414278dd12..7e8b1fcb7b 100644 --- a/aiida/common/links.py +++ b/aiida/common/links.py @@ -106,7 +106,7 @@ def validate_link_label(link_label): """ import re - message = 'invalid link label `{}`: should be string type but is instead: {}'.format(link_label, type(link_label)) + message = f'invalid link label `{link_label}`: should be string type but is instead: {type(link_label)}' type_check(link_label, str, message) allowed_character_set = '[a-zA-Z0-9_]' diff --git a/aiida/common/timezone.py b/aiida/common/timezone.py index 0727e2c68c..a44bad40c0 100644 --- a/aiida/common/timezone.py +++ b/aiida/common/timezone.py @@ -73,7 +73,7 @@ def make_aware(value, timezone=None, is_dst=None): return timezone.localize(value, is_dst=is_dst) if is_aware(value): - raise ValueError('make_aware expects a naive datetime, got %s' % value) + raise ValueError(f'make_aware expects a naive datetime, got {value}') # This may be wrong around DST changes! # See http://pytz.sourceforge.net/#localized-times-and-date-arithmetic diff --git a/aiida/common/utils.py b/aiida/common/utils.py index ef9092a4fa..244c9b994a 100644 --- a/aiida/common/utils.py +++ b/aiida/common/utils.py @@ -85,7 +85,7 @@ def get_unique_filename(filename, list_of_filenames): # Not optimized, but for the moment this should be fast enough append_int = 1 while True: - new_filename = '{:s}-{:d}{:s}'.format(basename, append_int, ext) + new_filename = f'{basename:s}-{append_int:d}{ext:s}' if new_filename not in list_of_filenames: break append_int += 1 @@ -157,7 +157,7 @@ def str_timedelta(dt, max_num_fields=3, short=False, negative_to_zero=False): # # Return the resulting string, appending a suitable string if the time # is negative - return '{}{}'.format(raw_string, negative_string) + return f'{raw_string}{negative_string}' def get_class_string(obj): @@ -168,9 +168,9 @@ def get_class_string(obj): It works both for classes and for class instances. """ if inspect.isclass(obj): - return '{}.{}'.format(obj.__module__, obj.__name__) + return f'{obj.__module__}.{obj.__name__}' - return '{}.{}'.format(obj.__module__, obj.__class__.__name__) + return f'{obj.__module__}.{obj.__class__.__name__}' def get_object_from_string(class_string): @@ -264,7 +264,7 @@ def are_dir_trees_equal(dir1, dir2): if not res: return False, msg - return True, 'The given directories ({} and {}) are equal'.format(dir1, dir2) + return True, f'The given directories ({dir1} and {dir2}) are equal' class Prettifier: @@ -419,9 +419,7 @@ def __init__(self, format): # pylint: disable=redefined-builtin try: self._prettifier_f = self.prettifiers[format] # pylint: disable=unsubscriptable-object except KeyError: - raise ValueError( - 'Unknown prettifier format {}; valid formats: {}'.format(format, ', '.join(self.get_prettifiers())) - ) + raise ValueError(f"Unknown prettifier format {format}; valid formats: {', '.join(self.get_prettifiers())}") def prettify(self, label): """ @@ -582,4 +580,4 @@ def result(self, raise_error=Exception): def raise_errors(self, raise_cls): if not self.success(): - raise raise_cls('The following errors were encountered: {}'.format(self.errors)) + raise raise_cls(f'The following errors were encountered: {self.errors}') diff --git a/aiida/engine/daemon/client.py b/aiida/engine/daemon/client.py index cc1cac79e0..32d96466bf 100644 --- a/aiida/engine/daemon/client.py +++ b/aiida/engine/daemon/client.py @@ -98,7 +98,7 @@ def cmd_string(self): "Unable to find 'verdi' in the path. Make sure that you are working " "in a virtual environment, or that at least the 'verdi' executable is on the PATH" ) - return '{} -p {} devel run_daemon'.format(VERDI_BIN, self.profile.name) + return f'{VERDI_BIN} -p {self.profile.name} devel run_daemon' @property def loglevel(self): @@ -254,7 +254,7 @@ def get_controller_endpoint(self): elif self._ENDPOINT_PROTOCOL == ControllerProtocol.TCP: endpoint = self.get_tcp_endpoint(self.get_circus_port()) else: - raise ValueError('invalid controller protocol {}'.format(self._ENDPOINT_PROTOCOL)) + raise ValueError(f'invalid controller protocol {self._ENDPOINT_PROTOCOL}') return endpoint @@ -270,7 +270,7 @@ def get_pubsub_endpoint(self): elif self._ENDPOINT_PROTOCOL == ControllerProtocol.TCP: endpoint = self.get_tcp_endpoint() else: - raise ValueError('invalid controller protocol {}'.format(self._ENDPOINT_PROTOCOL)) + raise ValueError(f'invalid controller protocol {self._ENDPOINT_PROTOCOL}') return endpoint @@ -286,7 +286,7 @@ def get_stats_endpoint(self): elif self._ENDPOINT_PROTOCOL == ControllerProtocol.TCP: endpoint = self.get_tcp_endpoint() else: - raise ValueError('invalid controller protocol {}'.format(self._ENDPOINT_PROTOCOL)) + raise ValueError(f'invalid controller protocol {self._ENDPOINT_PROTOCOL}') return endpoint diff --git a/aiida/engine/daemon/execmanager.py b/aiida/engine/daemon/execmanager.py index d823ea194f..3a9d0137da 100644 --- a/aiida/engine/daemon/execmanager.py +++ b/aiida/engine/daemon/execmanager.py @@ -47,7 +47,7 @@ def upload_calculation(node, transport, calc_info, folder, inputs=None, dry_run= # chance to perform the state transition. Upon reloading this calculation, it will re-attempt the upload. link_label = 'remote_folder' if node.get_outgoing(RemoteData, link_label_filter=link_label).first(): - execlogger.warning('CalcJobNode<{}> already has a `{}` output: skipping upload'.format(node.pk, link_label)) + execlogger.warning(f'CalcJobNode<{node.pk}> already has a `{link_label}` output: skipping upload') return calc_info computer = node.computer @@ -118,9 +118,7 @@ def upload_calculation(node, transport, calc_info, folder, inputs=None, dry_run= path_lost_found = os.path.join(remote_working_directory, REMOTE_WORK_DIRECTORY_LOST_FOUND) path_target = os.path.join(path_lost_found, calc_info.uuid) logger.warning( - 'tried to create path {} but it already exists, moving the entire folder to {}'.format( - path_existing, path_target - ) + f'tried to create path {path_existing} but it already exists, moving the entire folder to {path_target}' ) # Make sure the lost+found directory exists, then copy the existing folder there and delete the original @@ -162,7 +160,7 @@ def upload_calculation(node, transport, calc_info, folder, inputs=None, dry_run= provenance_exclude_list = calc_info.provenance_exclude_list or [] for uuid, filename, target in local_copy_list: - logger.debug('[submission of calculation {}] copying local file/folder to {}'.format(node.uuid, target)) + logger.debug(f'[submission of calculation {node.uuid}] copying local file/folder to {target}') def find_data_node(inputs, uuid): """Find and return the node with the given UUID from a nested mapping of input nodes. @@ -190,7 +188,7 @@ def find_data_node(inputs, uuid): data_node = find_data_node(inputs, uuid) if data_node is None: - logger.warning('failed to load Node<{}> specified in the `local_copy_list`'.format(uuid)) + logger.warning(f'failed to load Node<{uuid}> specified in the `local_copy_list`') else: dirname = os.path.dirname(target) if dirname: @@ -203,7 +201,7 @@ def find_data_node(inputs, uuid): # In a dry_run, the working directory is the raw input folder, which will already contain these resources if not dry_run: for filename in folder.get_content_list(): - logger.debug('[submission of calculation {}] copying file/folder {}...'.format(node.pk, filename)) + logger.debug(f'[submission of calculation {node.pk}] copying file/folder {filename}...') transport.put(folder.get_abs_path(filename), filename) for (remote_computer_uuid, remote_abs_path, dest_rel_path) in remote_copy_list: @@ -244,8 +242,7 @@ def find_data_node(inputs, uuid): raise else: raise IOError( - 'It is not possible to create a symlink between two different machines for ' - 'calculation {}'.format(node.pk) + f'It is not possible to create a symlink between two different machines for calculation {node.pk}' ) else: @@ -335,8 +332,8 @@ def retrieve_calculation(calculation, transport, retrieved_temporary_folder): logger_extra = get_dblogger_extra(calculation) workdir = calculation.get_remote_workdir() - execlogger.debug('Retrieving calc {}'.format(calculation.pk), extra=logger_extra) - execlogger.debug('[retrieval of calc {}] chdir {}'.format(calculation.pk, workdir), extra=logger_extra) + execlogger.debug(f'Retrieving calc {calculation.pk}', extra=logger_extra) + execlogger.debug(f'[retrieval of calc {calculation.pk}] chdir {workdir}', extra=logger_extra) # If the calculation already has a `retrieved` folder, simply return. The retrieval was apparently already completed # before, which can happen if the daemon is restarted and it shuts down after retrieving but before getting the @@ -344,7 +341,7 @@ def retrieve_calculation(calculation, transport, retrieved_temporary_folder): link_label = calculation.link_label_retrieved if calculation.get_outgoing(FolderData, link_label_filter=link_label).first(): execlogger.warning( - 'CalcJobNode<{}> already has a `{}` output folder: skipping retrieval'.format(calculation.pk, link_label) + f'CalcJobNode<{calculation.pk}> already has a `{link_label}` output folder: skipping retrieval' ) return @@ -377,15 +374,13 @@ def retrieve_calculation(calculation, transport, retrieved_temporary_folder): # Log the files that were retrieved in the temporary folder for filename in os.listdir(retrieved_temporary_folder): execlogger.debug( - "[retrieval of calc {}] Retrieved temporary file or folder '{}'".format(calculation.pk, filename), + f"[retrieval of calc {calculation.pk}] Retrieved temporary file or folder '{filename}'", extra=logger_extra ) # Store everything execlogger.debug( - '[retrieval of calc {}] ' - 'Storing retrieved_files={}'.format(calculation.pk, retrieved_files.pk), - extra=logger_extra + f'[retrieval of calc {calculation.pk}] Storing retrieved_files={retrieved_files.pk}', extra=logger_extra ) retrieved_files.store() @@ -419,7 +414,7 @@ def kill_calculation(calculation, transport): # If the job is returned it is still running and the kill really failed, so we raise if job is not None and job.job_state != JobState.DONE: - raise exceptions.RemoteOperationError('scheduler.kill({}) was unsuccessful'.format(job_id)) + raise exceptions.RemoteOperationError(f'scheduler.kill({job_id}) was unsuccessful') else: execlogger.warning('scheduler.kill() failed but job<{%s}> no longer seems to be running regardless', job_id) @@ -451,10 +446,7 @@ def _retrieve_singlefiles(job, transport, folder, retrieve_file_list, logger_ext singlefiles.append(singlefile) for fil in singlefiles: - execlogger.debug( - '[retrieval of calc {}] ' - 'Storing retrieved_singlefile={}'.format(job.pk, fil.pk), extra=logger_extra - ) + execlogger.debug(f'[retrieval of calc {job.pk}] Storing retrieved_singlefile={fil.pk}', extra=logger_extra) fil.store() @@ -510,7 +502,5 @@ def retrieve_files_from_list(calculation, transport, folder, retrieve_list): local_names = [os.path.split(item)[1]] for rem, loc in zip(remote_names, local_names): - transport.logger.debug( - "[retrieval of calc {}] Trying to retrieve remote item '{}'".format(calculation.pk, rem) - ) + transport.logger.debug(f"[retrieval of calc {calculation.pk}] Trying to retrieve remote item '{rem}'") transport.get(rem, os.path.join(folder, loc), ignore_nonexisting=True) diff --git a/aiida/engine/persistence.py b/aiida/engine/persistence.py index 33823cd931..5aedd9d386 100644 --- a/aiida/engine/persistence.py +++ b/aiida/engine/persistence.py @@ -41,12 +41,12 @@ def load_object(self, identifier): try: module = importlib.import_module(module) except ImportError: - raise ImportError("module '{}' from identifier '{}' could not be loaded".format(module, identifier)) + raise ImportError(f"module '{module}' from identifier '{identifier}' could not be loaded") try: return getattr(module, name) except AttributeError: - raise ImportError("object '{}' from identifier '{}' could not be loaded".format(name, identifier)) + raise ImportError(f"object '{name}' from identifier '{identifier}' could not be loaded") def get_object_loader(): @@ -80,16 +80,12 @@ def save_checkpoint(self, process, tag=None): bundle = plumpy.Bundle(process, plumpy.LoadSaveContext(loader=get_object_loader())) except ImportError: # Couldn't create the bundle - raise plumpy.PersistenceError( - "Failed to create a bundle for '{}': {}".format(process, traceback.format_exc()) - ) + raise plumpy.PersistenceError(f"Failed to create a bundle for '{process}': {traceback.format_exc()}") try: process.node.set_checkpoint(serialize.serialize(bundle)) except Exception: - raise plumpy.PersistenceError( - "Failed to store a checkpoint for '{}': {}".format(process, traceback.format_exc()) - ) + raise plumpy.PersistenceError(f"Failed to store a checkpoint for '{process}': {traceback.format_exc()}") return bundle @@ -111,21 +107,17 @@ def load_checkpoint(self, pid, tag=None): try: calculation = load_node(pid) except (MultipleObjectsError, NotExistent): - raise plumpy.PersistenceError( - 'Failed to load the node for process<{}>: {}'.format(pid, traceback.format_exc()) - ) + raise plumpy.PersistenceError(f'Failed to load the node for process<{pid}>: {traceback.format_exc()}') checkpoint = calculation.checkpoint if checkpoint is None: - raise plumpy.PersistenceError('Calculation<{}> does not have a saved checkpoint'.format(calculation.pk)) + raise plumpy.PersistenceError(f'Calculation<{calculation.pk}> does not have a saved checkpoint') try: bundle = serialize.deserialize(checkpoint) except Exception: - raise plumpy.PersistenceError( - 'Failed to load the checkpoint for process<{}>: {}'.format(pid, traceback.format_exc()) - ) + raise plumpy.PersistenceError(f'Failed to load the checkpoint for process<{pid}>: {traceback.format_exc()}') return bundle diff --git a/aiida/engine/processes/builder.py b/aiida/engine/processes/builder.py index b5e9f2c6c3..ce5d4cf5e4 100644 --- a/aiida/engine/processes/builder.py +++ b/aiida/engine/processes/builder.py @@ -81,12 +81,12 @@ def __setattr__(self, attr, value): port = self._port_namespace[attr] except KeyError: if not self._port_namespace.dynamic: - raise AttributeError('Unknown builder parameter: {}'.format(attr)) + raise AttributeError(f'Unknown builder parameter: {attr}') else: value = port.serialize(value) validation_error = port.validate(value) if validation_error: - raise ValueError('invalid attribute value {}'.format(validation_error.message)) + raise ValueError(f'invalid attribute value {validation_error.message}') self._data[attr] = value @@ -125,7 +125,7 @@ def _update(self, *args, **kwds): :type kwds: dict """ if len(args) > 1: - raise TypeError('update expected at most 1 arguments, got %d' % len(args)) + raise TypeError(f'update expected at most 1 arguments, got {int(len(args))}') if args: for key, value in args[0].items(): diff --git a/aiida/engine/processes/calcjobs/calcjob.py b/aiida/engine/processes/calcjobs/calcjob.py index 39dd934868..2c7c640af9 100644 --- a/aiida/engine/processes/calcjobs/calcjob.py +++ b/aiida/engine/processes/calcjobs/calcjob.py @@ -53,10 +53,10 @@ def validate_calc_job(inputs, ctx): # pylint: disable=too-many-return-statement return 'no computer has been specified in `metadata.computer` nor via `code`.' if computer_from_code and not computer_from_code.is_stored: - return 'the Computer<{}> is not stored'.format(computer_from_code) + return f'the Computer<{computer_from_code}> is not stored' if computer_from_metadata and not computer_from_metadata.is_stored: - return 'the Computer<{}> is not stored'.format(computer_from_metadata) + return f'the Computer<{computer_from_metadata}> is not stored' if computer_from_code and computer_from_metadata and computer_from_code.uuid != computer_from_metadata.uuid: return ( @@ -87,7 +87,7 @@ def validate_calc_job(inputs, ctx): # pylint: disable=too-many-return-statement try: scheduler.validate_resources(**resources) except ValueError as exception: - return 'input `metadata.options.resources` is not valid for the `{}` scheduler: {}'.format(scheduler, exception) + return f'input `metadata.options.resources` is not valid for the `{scheduler}` scheduler: {exception}' def validate_parser(parser_name, _): @@ -101,7 +101,7 @@ def validate_parser(parser_name, _): try: ParserFactory(parser_name) except exceptions.EntryPointError as exception: - return 'invalid parser specified: {}'.format(exception) + return f'invalid parser specified: {exception}' class CalcJob(Process): @@ -137,7 +137,7 @@ def define(cls, spec: CalcJobProcessSpec): help='When set to `True` will prepare the calculation job for submission but not actually launch it.') spec.input('metadata.computer', valid_type=orm.Computer, required=False, help='When using a "local" code, set the computer on which the calculation should be run.') - spec.input_namespace('{}.{}'.format(spec.metadata_key, spec.options_key), required=False) + spec.input_namespace(f'{spec.metadata_key}.{spec.options_key}', required=False) spec.input('metadata.options.input_filename', valid_type=str, required=False, help='Filename to which the input for the code that is to be run is written.') spec.input('metadata.options.output_filename', valid_type=str, required=False, @@ -303,8 +303,8 @@ def parse(self, retrieved_temporary_folder=None): # If an exit code is returned by the scheduler output parser, we log it and set it on the node. This will # allow the actual `Parser` implementation, if defined in the inputs, to inspect it and decide to keep it, # or override it with a more specific exit code, if applicable. - args = (exit_code_scheduler.status, exit_code_scheduler.message) - self.logger.warning('scheduler parser returned exit code<{}>: {}'.format(*args)) + msg = f'scheduler parser returned exit code<{exit_code_scheduler.status}>: {exit_code_scheduler.message}' + self.logger.warning(msg) self.node.set_exit_status(exit_code_scheduler.status) self.node.set_exit_message(exit_code_scheduler.message) @@ -315,8 +315,8 @@ def parse(self, retrieved_temporary_folder=None): shutil.rmtree(retrieved_temporary_folder, ignore_errors=True) if exit_code_retrieved is not None and exit_code_retrieved.status > 0: - args = (exit_code_retrieved.status, exit_code_retrieved.message) - self.logger.warning('output parser returned exit code<{}>: {}'.format(*args)) + msg = f'output parser returned exit code<{exit_code_retrieved.status}>: {exit_code_retrieved.message}' + self.logger.warning(msg) # The final exit code is that of the scheduler, unless the output parser returned one if exit_code_retrieved is not None: @@ -348,13 +348,13 @@ def parse_scheduler_output(self, retrieved): scheduler_stderr = retrieved.get_object_content(filename_stderr) except FileNotFoundError: scheduler_stderr = None - self.logger.warning('could not parse scheduler output: the `{}` file is missing'.format(filename_stderr)) + self.logger.warning(f'could not parse scheduler output: the `{filename_stderr}` file is missing') try: scheduler_stdout = retrieved.get_object_content(filename_stdout) except FileNotFoundError: scheduler_stdout = None - self.logger.warning('could not parse scheduler output: the `{}` file is missing'.format(filename_stdout)) + self.logger.warning(f'could not parse scheduler output: the `{filename_stdout}` file is missing') # Only attempt to call the scheduler parser if all three resources of information are available if any(entry is None for entry in [detailed_job_info, scheduler_stderr, scheduler_stdout]): @@ -363,10 +363,10 @@ def parse_scheduler_output(self, retrieved): try: exit_code = scheduler.parse_output(detailed_job_info, scheduler_stdout, scheduler_stderr) except exceptions.FeatureNotAvailable: - self.logger.info('`{}` does not implement scheduler output parsing'.format(scheduler.__class__.__name__)) + self.logger.info(f'`{scheduler.__class__.__name__}` does not implement scheduler output parsing') return except Exception as exception: # pylint: disable=broad-except - self.logger.error('the `parse_output` method of the scheduler excepted: {}'.format(exception)) + self.logger.error(f'the `parse_output` method of the scheduler excepted: {exception}') return if exit_code is not None and not isinstance(exit_code, ExitCode): @@ -394,7 +394,7 @@ def parse_retrieved_output(self, retrieved_temporary_folder=None): try: self.out(link_label, node) except ValueError as exception: - self.logger.error('invalid value {} specified with label {}: {}'.format(node, link_label, exception)) + self.logger.error(f'invalid value {node} specified with label {link_label}: {exception}') exit_code = self.exit_codes.ERROR_INVALID_OUTPUT # pylint: disable=no-member break @@ -452,7 +452,7 @@ def presubmit(self, folder): job_tmpl.rerunnable = False job_tmpl.job_environment = {} # 'email', 'email_on_started', 'email_on_terminated', - job_tmpl.job_name = 'aiida-{}'.format(self.node.pk) + job_tmpl.job_name = f'aiida-{self.node.pk}' job_tmpl.sched_output_path = self.options.scheduler_stdout if self.options.scheduler_stderr == self.options.scheduler_stdout: job_tmpl.sched_join_files = True @@ -613,15 +613,13 @@ def presubmit(self, folder): try: validate_list_of_string_tuples(local_copy_list, tuple_length=3) except ValidationError as exc: - raise PluginInternalError('[presubmission of calc {}] ' - 'local_copy_list format problem: {}'.format(this_pk, exc)) + raise PluginInternalError(f'[presubmission of calc {this_pk}] local_copy_list format problem: {exc}') remote_copy_list = calc_info.remote_copy_list try: validate_list_of_string_tuples(remote_copy_list, tuple_length=3) except ValidationError as exc: - raise PluginInternalError('[presubmission of calc {}] ' - 'remote_copy_list format problem: {}'.format(this_pk, exc)) + raise PluginInternalError(f'[presubmission of calc {this_pk}] remote_copy_list format problem: {exc}') for (remote_computer_uuid, _, dest_rel_path) in remote_copy_list: try: diff --git a/aiida/engine/processes/calcjobs/manager.py b/aiida/engine/processes/calcjobs/manager.py index 5f40eae886..c6a2adfc96 100644 --- a/aiida/engine/processes/calcjobs/manager.py +++ b/aiida/engine/processes/calcjobs/manager.py @@ -108,7 +108,7 @@ def _get_jobs_from_scheduler(self): # Update the last update time and clear the jobs cache self._last_updated = time.time() jobs_cache = {} - self.logger.info('AuthInfo<{}>: successfully retrieved status of active jobs'.format(self._authinfo.pk)) + self.logger.info(f'AuthInfo<{self._authinfo.pk}>: successfully retrieved status of active jobs') for job_id, job_info in scheduler_response.items(): jobs_cache[job_id] = job_info diff --git a/aiida/engine/processes/calcjobs/tasks.py b/aiida/engine/processes/calcjobs/tasks.py index c0958e02d5..0de3d8a8b7 100644 --- a/aiida/engine/processes/calcjobs/tasks.py +++ b/aiida/engine/processes/calcjobs/tasks.py @@ -60,7 +60,7 @@ def task_upload_job(process, transport_queue, cancellable): node = process.node if node.get_state() == CalcJobState.SUBMITTING: - logger.warning('CalcJob<{}> already marked as SUBMITTING, skipping task_update_job'.format(node.pk)) + logger.warning(f'CalcJob<{node.pk}> already marked as SUBMITTING, skipping task_update_job') raise Return initial_interval = TRANSPORT_TASK_RETRY_INITIAL_INTERVAL @@ -85,7 +85,7 @@ def do_upload(): raise Return try: - logger.info('scheduled request to upload CalcJob<{}>'.format(node.pk)) + logger.info(f'scheduled request to upload CalcJob<{node.pk}>') ignore_exceptions = (plumpy.CancelledError, PreSubmitException) result = yield exponential_backoff_retry( do_upload, initial_interval, max_attempts, logger=node.logger, ignore_exceptions=ignore_exceptions @@ -95,10 +95,10 @@ def do_upload(): except plumpy.CancelledError: pass except Exception: - logger.warning('uploading CalcJob<{}> failed'.format(node.pk)) - raise TransportTaskException('upload_calculation failed {} times consecutively'.format(max_attempts)) + logger.warning(f'uploading CalcJob<{node.pk}> failed') + raise TransportTaskException(f'upload_calculation failed {max_attempts} times consecutively') else: - logger.info('uploading CalcJob<{}> successful'.format(node.pk)) + logger.info(f'uploading CalcJob<{node.pk}> successful') node.set_state(CalcJobState.SUBMITTING) raise Return(result) @@ -121,7 +121,7 @@ def task_submit_job(node, transport_queue, cancellable): """ if node.get_state() == CalcJobState.WITHSCHEDULER: assert node.get_job_id() is not None, 'job is WITHSCHEDULER, however, it does not have a job id' - logger.warning('CalcJob<{}> already marked as WITHSCHEDULER, skipping task_submit_job'.format(node.pk)) + logger.warning(f'CalcJob<{node.pk}> already marked as WITHSCHEDULER, skipping task_submit_job') raise Return(node.get_job_id()) initial_interval = TRANSPORT_TASK_RETRY_INITIAL_INTERVAL @@ -136,17 +136,17 @@ def do_submit(): raise Return(execmanager.submit_calculation(node, transport)) try: - logger.info('scheduled request to submit CalcJob<{}>'.format(node.pk)) + logger.info(f'scheduled request to submit CalcJob<{node.pk}>') result = yield exponential_backoff_retry( do_submit, initial_interval, max_attempts, logger=node.logger, ignore_exceptions=plumpy.Interruption ) except plumpy.Interruption: pass except Exception: - logger.warning('submitting CalcJob<{}> failed'.format(node.pk)) - raise TransportTaskException('submit_calculation failed {} times consecutively'.format(max_attempts)) + logger.warning(f'submitting CalcJob<{node.pk}> failed') + raise TransportTaskException(f'submit_calculation failed {max_attempts} times consecutively') else: - logger.info('submitting CalcJob<{}> successful'.format(node.pk)) + logger.info(f'submitting CalcJob<{node.pk}> successful') node.set_state(CalcJobState.WITHSCHEDULER) raise Return(result) @@ -169,7 +169,7 @@ def task_update_job(node, job_manager, cancellable): :raises: Return containing True if the tasks was successfully completed, False otherwise """ if node.get_state() == CalcJobState.RETRIEVING: - logger.warning('CalcJob<{}> already marked as RETRIEVING, skipping task_update_job'.format(node.pk)) + logger.warning(f'CalcJob<{node.pk}> already marked as RETRIEVING, skipping task_update_job') raise Return(True) initial_interval = TRANSPORT_TASK_RETRY_INITIAL_INTERVAL @@ -196,17 +196,17 @@ def do_update(): raise Return(job_done) try: - logger.info('scheduled request to update CalcJob<{}>'.format(node.pk)) + logger.info(f'scheduled request to update CalcJob<{node.pk}>') job_done = yield exponential_backoff_retry( do_update, initial_interval, max_attempts, logger=node.logger, ignore_exceptions=plumpy.Interruption ) except plumpy.Interruption: raise except Exception: - logger.warning('updating CalcJob<{}> failed'.format(node.pk)) - raise TransportTaskException('update_calculation failed {} times consecutively'.format(max_attempts)) + logger.warning(f'updating CalcJob<{node.pk}> failed') + raise TransportTaskException(f'update_calculation failed {max_attempts} times consecutively') else: - logger.info('updating CalcJob<{}> successful'.format(node.pk)) + logger.info(f'updating CalcJob<{node.pk}> successful') if job_done: node.set_state(CalcJobState.RETRIEVING) @@ -230,7 +230,7 @@ def task_retrieve_job(node, transport_queue, retrieved_temporary_folder, cancell :raises: TransportTaskException if after the maximum number of retries the transport task still excepted """ if node.get_state() == CalcJobState.PARSING: - logger.warning('CalcJob<{}> already marked as PARSING, skipping task_retrieve_job'.format(node.pk)) + logger.warning(f'CalcJob<{node.pk}> already marked as PARSING, skipping task_retrieve_job') raise Return initial_interval = TRANSPORT_TASK_RETRY_INITIAL_INTERVAL @@ -252,7 +252,7 @@ def do_retrieve(): try: detailed_job_info = scheduler.get_detailed_job_info(node.get_job_id()) except FeatureNotAvailable: - logger.info('detailed job info not available for scheduler of CalcJob<{}>'.format(node.pk)) + logger.info(f'detailed job info not available for scheduler of CalcJob<{node.pk}>') node.set_detailed_job_info(None) else: node.set_detailed_job_info(detailed_job_info) @@ -260,18 +260,18 @@ def do_retrieve(): raise Return(execmanager.retrieve_calculation(node, transport, retrieved_temporary_folder)) try: - logger.info('scheduled request to retrieve CalcJob<{}>'.format(node.pk)) + logger.info(f'scheduled request to retrieve CalcJob<{node.pk}>') yield exponential_backoff_retry( do_retrieve, initial_interval, max_attempts, logger=node.logger, ignore_exceptions=plumpy.Interruption ) except plumpy.Interruption: raise except Exception: - logger.warning('retrieving CalcJob<{}> failed'.format(node.pk)) - raise TransportTaskException('retrieve_calculation failed {} times consecutively'.format(max_attempts)) + logger.warning(f'retrieving CalcJob<{node.pk}> failed') + raise TransportTaskException(f'retrieve_calculation failed {max_attempts} times consecutively') else: node.set_state(CalcJobState.PARSING) - logger.info('retrieving CalcJob<{}> successful'.format(node.pk)) + logger.info(f'retrieving CalcJob<{node.pk}> successful') raise Return @@ -295,7 +295,7 @@ def task_kill_job(node, transport_queue, cancellable): max_attempts = TRANSPORT_TASK_MAXIMUM_ATTEMTPS if node.get_state() in [CalcJobState.UPLOADING, CalcJobState.SUBMITTING]: - logger.warning('CalcJob<{}> killed, it was in the {} state'.format(node.pk, node.get_state())) + logger.warning(f'CalcJob<{node.pk}> killed, it was in the {node.get_state()} state') raise Return(True) authinfo = node.computer.get_authinfo(node.user) @@ -307,15 +307,15 @@ def do_kill(): raise Return(execmanager.kill_calculation(node, transport)) try: - logger.info('scheduled request to kill CalcJob<{}>'.format(node.pk)) + logger.info(f'scheduled request to kill CalcJob<{node.pk}>') result = yield exponential_backoff_retry(do_kill, initial_interval, max_attempts, logger=node.logger) except plumpy.Interruption: raise except Exception: - logger.warning('killing CalcJob<{}> failed'.format(node.pk)) - raise TransportTaskException('kill_calculation failed {} times consecutively'.format(max_attempts)) + logger.warning(f'killing CalcJob<{node.pk}> failed') + raise TransportTaskException(f'kill_calculation failed {max_attempts} times consecutively') else: - logger.info('killing CalcJob<{}> successful'.format(node.pk)) + logger.info(f'killing CalcJob<{node.pk}> successful') node.set_scheduler_state(JobState.DONE) raise Return(result) @@ -344,7 +344,7 @@ def execute(self): transport_queue = self.process.runner.transport command = self.data - process_status = 'Waiting for transport task: {}'.format(command) + process_status = f'Waiting for transport task: {command}' try: @@ -364,7 +364,7 @@ def execute(self): while not job_done: scheduler_state = node.get_scheduler_state() scheduler_state_string = scheduler_state.name if scheduler_state else 'UNKNOWN' - process_status = 'Monitoring scheduler: job state {}'.format(scheduler_state_string) + process_status = f'Monitoring scheduler: job state {scheduler_state_string}' node.set_process_status(process_status) job_done = yield self._launch_task(task_update_job, node, self.process.runner.job_manager) @@ -381,7 +381,7 @@ def execute(self): raise RuntimeError('Unknown waiting command') except TransportTaskException as exception: - raise plumpy.PauseInterruption('Pausing after failed transport task: {}'.format(exception)) + raise plumpy.PauseInterruption(f'Pausing after failed transport task: {exception}') except plumpy.KillInterruption: yield self._launch_task(task_kill_job, node, transport_queue) self._killing.set_result(True) @@ -390,7 +390,7 @@ def execute(self): node.set_process_status(None) raise except (plumpy.Interruption, plumpy.CancelledError): - node.set_process_status('Transport task {} was interrupted'.format(command)) + node.set_process_status(f'Transport task {command} was interrupted') raise finally: # If we were trying to kill but we didn't deal with it, make sure it's set here diff --git a/aiida/engine/processes/functions.py b/aiida/engine/processes/functions.py index 8c5ea9bcca..ff1f7dc6ad 100644 --- a/aiida/engine/processes/functions.py +++ b/aiida/engine/processes/functions.py @@ -126,7 +126,7 @@ def run_get_node(*args, **kwargs): # If any kwargs remain, the spec should be dynamic, so we raise if it isn't if kwargs and not process_class.spec().inputs.dynamic: - raise ValueError('{} does not support these kwargs: {}'.format(function.__name__, kwargs.keys())) + raise ValueError(f'{function.__name__} does not support these kwargs: {kwargs.keys()}') process = process_class(inputs=inputs, runner=runner) @@ -303,7 +303,7 @@ def validate_inputs(cls, *args, **kwargs): # pylint: disable=unused-argument # the input link to be completely lost. if cls.spec().inputs.dynamic and nargs > nparameters: name = cls._func.__name__ - raise TypeError('{}() takes {} positional arguments but {} were given'.format(name, nparameters, nargs)) + raise TypeError(f'{name}() takes {nparameters} positional arguments but {nargs} were given') @classmethod def create_inputs(cls, *args, **kwargs): diff --git a/aiida/engine/processes/futures.py b/aiida/engine/processes/futures.py index 3d4c9fc1e2..e98f25c64f 100644 --- a/aiida/engine/processes/futures.py +++ b/aiida/engine/processes/futures.py @@ -51,7 +51,7 @@ def __init__(self, pk, loop=None, poll_interval=None, communicator=None): if self._communicator is not None: broadcast_filter = kiwipy.BroadcastFilter(lambda *args, **kwargs: self.set_result(node), sender=pk) for state in [ProcessState.FINISHED, ProcessState.KILLED, ProcessState.EXCEPTED]: - broadcast_filter.add_subject_filter('state_changed.*.{}'.format(state.value)) + broadcast_filter.add_subject_filter(f'state_changed.*.{state.value}') self._broadcast_identifier = self._communicator.add_broadcast_subscriber(broadcast_filter) # Start polling diff --git a/aiida/engine/processes/ports.py b/aiida/engine/processes/ports.py index 1340ba9b0d..1613d2169d 100644 --- a/aiida/engine/processes/ports.py +++ b/aiida/engine/processes/ports.py @@ -119,7 +119,7 @@ def get_description(self): :returns: a dictionary of the stringified InputPort attributes """ description = super().get_description() - description['non_db'] = '{}'.format(self.non_db) + description['non_db'] = f'{self.non_db}' return description @@ -186,14 +186,14 @@ def validate_port_name(port_name): try: validate_link_label(port_name) except ValueError as exception: - raise ValueError('invalid port name `{}`: {}'.format(port_name, exception)) + raise ValueError(f'invalid port name `{port_name}`: {exception}') # Following regexes will match all groups of consecutive underscores where each group will be of the form # `('___', '_')`, where the first element is the matched group of consecutive underscores. consecutive_underscores = [match[0] for match in re.findall(r'((_)\2+)', port_name)] if any([len(entry) > PORT_NAME_MAX_CONSECUTIVE_UNDERSCORES for entry in consecutive_underscores]): - raise ValueError('invalid port name `{}`: more than two consecutive underscores'.format(port_name)) + raise ValueError(f'invalid port name `{port_name}`: more than two consecutive underscores') def serialize(self, mapping, breadcrumbs=()): """Serialize the given mapping onto this `Portnamespace`. @@ -213,7 +213,7 @@ def serialize(self, mapping, breadcrumbs=()): if not isinstance(mapping, collections.Mapping): port = breadcrumbs_to_port(breadcrumbs) - raise TypeError('port namespace `{}` received `{}` instead of a dictionary'.format(port, type(mapping))) + raise TypeError(f'port namespace `{port}` received `{type(mapping)}` instead of a dictionary') result = {} diff --git a/aiida/engine/processes/process.py b/aiida/engine/processes/process.py index 7c98663333..ae8cf25c7a 100644 --- a/aiida/engine/processes/process.py +++ b/aiida/engine/processes/process.py @@ -59,13 +59,13 @@ def define(cls, spec): # yapf: disable super().define(spec) spec.input_namespace(spec.metadata_key, required=False, non_db=True) - spec.input('{}.store_provenance'.format(spec.metadata_key), valid_type=bool, default=True, + spec.input(f'{spec.metadata_key}.store_provenance', valid_type=bool, default=True, help='If set to `False` provenance will not be stored in the database.') - spec.input('{}.description'.format(spec.metadata_key), valid_type=str, required=False, + spec.input(f'{spec.metadata_key}.description', valid_type=str, required=False, help='Description to set on the process node.') - spec.input('{}.label'.format(spec.metadata_key), valid_type=str, required=False, + spec.input(f'{spec.metadata_key}.label', valid_type=str, required=False, help='Label to set on the process node.') - spec.input('{}.call_link_label'.format(spec.metadata_key), valid_type=str, default='CALL', + spec.input(f'{spec.metadata_key}.call_link_label', valid_type=str, default='CALL', help='The label to use for the `CALL` link if the process is called by another process.') spec.exit_code(1, 'ERROR_UNSPECIFIED', message='The process has failed with an unspecified error.') spec.exit_code(2, 'ERROR_LEGACY_FAILURE', message='The process failed with legacy failure mode.') @@ -245,7 +245,7 @@ def load_instance_state(self, saved_state, load_context): else: self._pid = self._create_and_setup_db_record() - self.node.logger.info('Loaded process<{}> from saved state'.format(self.node.pk)) + self.node.logger.info(f'Loaded process<{self.node.pk}> from saved state') def kill(self, msg=None): """ @@ -256,7 +256,7 @@ def kill(self, msg=None): :rtype: bool """ - self.node.logger.info('Request to kill Process<{}>'.format(self.node.pk)) + self.node.logger.info(f'Request to kill Process<{self.node.pk}>') had_been_terminated = self.has_terminated() @@ -267,7 +267,7 @@ def kill(self, msg=None): killing = [] for child in self.node.called: try: - result = self.runner.controller.kill_process(child.pk, 'Killed by parent<{}>'.format(self.node.pk)) + result = self.runner.controller.kill_process(child.pk, f'Killed by parent<{self.node.pk}>') if isinstance(result, plumpy.Future): killing.append(result) except ConnectionClosed: @@ -426,7 +426,7 @@ def on_output_emitting(self, output_port, value): # Note that `PortNamespaces` should be able to receive non `Data` types such as a normal dictionary if isinstance(output_port, OutputPort) and not isinstance(value, orm.Data): - raise TypeError('Processes can only return `orm.Data` instances as output, got {}'.format(value.__class__)) + raise TypeError(f'Processes can only return `orm.Data` instances as output, got {value.__class__}') def set_status(self, status): """ @@ -489,7 +489,7 @@ def build_process_type(cls): # If no entry point was found, default to fully qualified path name if process_type is None: - return '{}.{}'.format(class_module, class_name) + return f'{class_module}.{class_name}' return process_type @@ -507,7 +507,7 @@ def report(self, msg, *args, **kwargs): :param kwargs: kwargs to pass to the log call :type kwargs: dict """ - message = '[{}|{}|{}]: {}'.format(self.node.pk, self.__class__.__name__, inspect.stack()[1][3], msg) + message = f'[{self.node.pk}|{self.__class__.__name__}|{inspect.stack()[1][3]}]: {msg}' self.logger.log(LOG_LEVEL_REPORT, message, *args, **kwargs) def _create_and_setup_db_record(self): @@ -525,7 +525,7 @@ def _create_and_setup_db_record(self): if self.node.is_finished_ok: self._state = ProcessState.FINISHED for entry in self.node.get_outgoing(link_type=LinkType.RETURN): - if entry.link_label.endswith('_{pk}'.format(pk=entry.node.pk)): + if entry.link_label.endswith(f'_{entry.node.pk}'): continue self.out(entry.link_label, entry.node) # This is needed for CalcJob. In that case, the outputs are @@ -650,7 +650,7 @@ def _setup_metadata(self): for option_name, option_value in metadata.items(): self.node.set_option(option_name, option_value) else: - raise RuntimeError('unsupported metadata key: {}'.format(name)) + raise RuntimeError(f'unsupported metadata key: {name}') def _setup_inputs(self): """Create the links between the input nodes and the ProcessNode that represents this process.""" @@ -809,7 +809,7 @@ def exposed_inputs(self, process_class, namespace=None, agglomerate=True): try: port_namespace = self.spec().inputs.get_port(sub_namespace) except KeyError: - raise ValueError('this process does not contain the "{}" input namespace'.format(sub_namespace)) + raise ValueError(f'this process does not contain the "{sub_namespace}" input namespace') # Get the list of ports that were exposed for the given Process class in the current sub_namespace exposed_inputs_list = self.spec()._exposed_inputs[sub_namespace][process_class] # pylint: disable=protected-access @@ -920,7 +920,7 @@ def get_query_string_from_process_type_string(process_type_string): # pylint: d :rtype: str """ if ':' in process_type_string: - return process_type_string + '.' + return f'{process_type_string}.' path = process_type_string.rsplit('.', 2)[0] - return path + '.' + return f'{path}.' diff --git a/aiida/engine/processes/process_spec.py b/aiida/engine/processes/process_spec.py index a3344ed8c3..334a8e0794 100644 --- a/aiida/engine/processes/process_spec.py +++ b/aiida/engine/processes/process_spec.py @@ -60,19 +60,19 @@ def exit_code(self, status, label, message, invalidates_cache=False): with this exit code will not be considered for caching """ if not isinstance(status, int): - raise TypeError('status should be of integer type and not of {}'.format(type(status))) + raise TypeError(f'status should be of integer type and not of {type(status)}') if status < 0: - raise ValueError('status should be a positive integer, received {}'.format(type(status))) + raise ValueError(f'status should be a positive integer, received {type(status)}') if not isinstance(label, str): - raise TypeError('label should be of str type and not of {}'.format(type(label))) + raise TypeError(f'label should be of str type and not of {type(label)}') if not isinstance(message, str): - raise TypeError('message should be of str type and not of {}'.format(type(message))) + raise TypeError(f'message should be of str type and not of {type(message)}') if not isinstance(invalidates_cache, bool): - raise TypeError('invalidates_cache should be of type bool and not of {}'.format(type(invalidates_cache))) + raise TypeError(f'invalidates_cache should be of type bool and not of {type(invalidates_cache)}') self._exit_codes[label] = ExitCode(status, message, invalidates_cache=invalidates_cache) @@ -95,16 +95,14 @@ def default_output_node(self, port_name): from aiida.orm import Dict if port_name not in self.outputs: - raise ValueError('{} is not a registered output port'.format(port_name)) + raise ValueError(f'{port_name} is not a registered output port') valid_type_port = self.outputs[port_name].valid_type valid_type_required = Dict if valid_type_port is not valid_type_required: raise ValueError( - 'the valid type of a default output has to be a {} but it is {}'.format( - valid_type_port, valid_type_required - ) + f'the valid type of a default output has to be a {valid_type_port} but it is {valid_type_required}' ) self._default_output_node = port_name diff --git a/aiida/engine/processes/workchains/awaitable.py b/aiida/engine/processes/workchains/awaitable.py index e02f28d7b2..fee97be995 100644 --- a/aiida/engine/processes/workchains/awaitable.py +++ b/aiida/engine/processes/workchains/awaitable.py @@ -52,7 +52,7 @@ def construct_awaitable(target): if isinstance(target, ProcessNode): awaitable_target = AwaitableTarget.PROCESS else: - raise ValueError('invalid class for awaitable target: {}'.format(type(target))) + raise ValueError(f'invalid class for awaitable target: {type(target)}') awaitable = Awaitable( **{ diff --git a/aiida/engine/processes/workchains/restart.py b/aiida/engine/processes/workchains/restart.py index 35b65022dd..7bf5d368bd 100644 --- a/aiida/engine/processes/workchains/restart.py +++ b/aiida/engine/processes/workchains/restart.py @@ -40,13 +40,13 @@ def validate_handler_overrides(process_class, handler_overrides, ctx): # pylint for handler, override in handler_overrides.get_dict().items(): if not isinstance(handler, str): - return 'The key `{}` is not a string.'.format(handler) + return f'The key `{handler}` is not a string.' if not process_class.is_process_handler(handler): - return 'The key `{}` is not a process handler of {}'.format(handler, process_class) + return f'The key `{handler}` is not a process handler of {process_class}' if not isinstance(override, bool): - return 'The value of key `{}` is not a boolean.'.format(handler) + return f'The value of key `{handler}` is not a boolean.' class BaseRestartWorkChain(WorkChain): @@ -154,7 +154,7 @@ def run_process(self): raise AttributeError('no process input dictionary was defined in `self.ctx.inputs`') # Set the `CALL` link label - unwrapped_inputs.setdefault('metadata', {})['call_link_label'] = 'iteration_{:02d}'.format(self.ctx.iteration) + unwrapped_inputs.setdefault('metadata', {})['call_link_label'] = f'iteration_{self.ctx.iteration:02d}' inputs = self._wrap_bare_dict_inputs(self._process_class.spec().inputs, unwrapped_inputs) node = self.submit(self._process_class, **inputs) @@ -166,7 +166,7 @@ def run_process(self): considered_handlers.append([]) self.node.set_extra(self._considered_handlers_extra, considered_handlers) - self.report('launching {}<{}> iteration #{}'.format(self.ctx.process_name, node.pk, self.ctx.iteration)) + self.report(f'launching {self.ctx.process_name}<{node.pk}> iteration #{self.ctx.iteration}') return ToContext(children=append_(node)) @@ -215,7 +215,7 @@ def inspect_process(self): # pylint: disable=too-many-branches if report is not None and not isinstance(report, ProcessHandlerReport): name = handler.__name__ - raise RuntimeError('handler `{}` returned a value that is not a ProcessHandlerReport'.format(name)) + raise RuntimeError(f'handler `{name}` returned a value that is not a ProcessHandlerReport') # If an actual report was returned, save it so it is not overridden by next handler returning `None` if report: @@ -272,7 +272,7 @@ def results(self): self.inputs.max_iterations.value, self.ctx.process_name, node.pk)) return self.exit_codes.ERROR_MAXIMUM_ITERATIONS_EXCEEDED # pylint: disable=no-member - self.report('work chain completed after {} iterations'.format(self.ctx.iteration)) + self.report(f'work chain completed after {self.ctx.iteration} iterations') for name, port in self.spec().outputs.items(): @@ -280,8 +280,7 @@ def results(self): output = node.get_outgoing(link_label_filter=name).one().node except ValueError: if port.required: - self.report("required output '{}' was not an output of {}<{}>".format( - name, self.ctx.process_name, node.pk)) + self.report(f"required output '{name}' was not an output of {self.ctx.process_name}<{node.pk}>") else: self.out(name, output) @@ -332,7 +331,7 @@ def on_terminated(self): pass if cleaned_calcs: - self.report('cleaned remote folders of calculations: {}'.format(' '.join(cleaned_calcs))) + self.report(f"cleaned remote folders of calculations: {' '.join(cleaned_calcs)}") def _wrap_bare_dict_inputs(self, port_namespace, inputs): """Wrap bare dictionaries in `inputs` in a `Dict` node if dictated by the corresponding inputs portnamespace. diff --git a/aiida/engine/processes/workchains/utils.py b/aiida/engine/processes/workchains/utils.py index 94fd826e61..53dceb3a60 100644 --- a/aiida/engine/processes/workchains/utils.py +++ b/aiida/engine/processes/workchains/utils.py @@ -87,7 +87,7 @@ def process_handler(wrapped=None, *, priority=0, exit_codes=None, enabled=True): handler_args = getfullargspec(wrapped)[0] if len(handler_args) != 2: - raise TypeError('process handler `{}` has invalid signature: should be (self, node)'.format(wrapped.__name__)) + raise TypeError(f'process handler `{wrapped.__name__}` has invalid signature: should be (self, node)') wrapped.decorator = process_handler wrapped.priority = priority diff --git a/aiida/engine/processes/workchains/workchain.py b/aiida/engine/processes/workchains/workchain.py index 5ed5f1aa13..f0c1f96541 100644 --- a/aiida/engine/processes/workchains/workchain.py +++ b/aiida/engine/processes/workchains/workchain.py @@ -133,7 +133,7 @@ def insert_awaitable(self, awaitable): elif awaitable.action == AwaitableAction.APPEND: self.ctx.setdefault(awaitable.key, []).append(awaitable) else: - assert 'Unknown awaitable action: {}'.format(awaitable.action) + assert f'Unknown awaitable action: {awaitable.action}' self._update_process_status() @@ -156,9 +156,9 @@ def resolve_awaitable(self, awaitable, value): container[index] = value break else: - assert 'Awaitable `{} was not found in `ctx.{}`'.format(awaitable.pk, awaitable.pk) + assert f'Awaitable `{awaitable.pk} was not found in `ctx.{awaitable.pk}`' else: - assert 'Unknown awaitable action: {}'.format(awaitable.action) + assert f'Unknown awaitable action: {awaitable.action}' awaitable.resolved = True @@ -178,7 +178,7 @@ def to_context(self, **kwargs): def _update_process_status(self): """Set the process status with a message accounting the current sub processes that we are waiting for.""" if self._awaitables: - status = 'Waiting for child processes: {}'.format(', '.join([str(_.pk) for _ in self._awaitables])) + status = f"Waiting for child processes: {', '.join([str(_.pk) for _ in self._awaitables])}" self.node.set_process_status(status) else: self.node.set_process_status(None) @@ -273,7 +273,7 @@ def action_awaitables(self): callback = functools.partial(self._run_task, self.on_process_finished, awaitable) self.runner.call_on_process_finish(awaitable.pk, callback) else: - assert "invalid awaitable target '{}'".format(awaitable.target) + assert f"invalid awaitable target '{awaitable.target}'" def on_process_finished(self, awaitable): """Callback function called by the runner when the process instance identified by pk is completed. @@ -288,7 +288,7 @@ def on_process_finished(self, awaitable): try: node = load_node(awaitable.pk) except (exceptions.MultipleObjectsError, exceptions.NotExistent): - raise ValueError('provided pk<{}> could not be resolved to a valid Node instance'.format(awaitable.pk)) + raise ValueError(f'provided pk<{awaitable.pk}> could not be resolved to a valid Node instance') if awaitable.outputs: value = {entry.link_label: entry.node for entry in node.get_outgoing()} diff --git a/aiida/engine/runners.py b/aiida/engine/runners.py index b5020967dc..a0c43ed6d2 100644 --- a/aiida/engine/runners.py +++ b/aiida/engine/runners.py @@ -305,7 +305,7 @@ def inline_callback(event, *args, **kwargs): # pylint: disable=unused-argument broadcast_filter = kiwipy.BroadcastFilter(functools.partial(inline_callback, event), sender=pk) for state in [ProcessState.FINISHED, ProcessState.KILLED, ProcessState.EXCEPTED]: - broadcast_filter.add_subject_filter('state_changed.*.{}'.format(state.value)) + broadcast_filter.add_subject_filter(f'state_changed.*.{state.value}') LOGGER.info('adding subscriber for broadcasts of %d', pk) self._communicator.add_broadcast_subscriber(broadcast_filter, subscriber_identifier) diff --git a/aiida/engine/utils.py b/aiida/engine/utils.py index 446e554b7f..823330dc82 100644 --- a/aiida/engine/utils.py +++ b/aiida/engine/utils.py @@ -52,7 +52,7 @@ def instantiate_process(runner, process, *args, **inputs): elif issubclass(process, Process): process_class = process else: - raise ValueError('invalid process {}, needs to be Process or ProcessBuilder'.format(type(process))) + raise ValueError(f'invalid process {type(process)}, needs to be Process or ProcessBuilder') process = process_class(runner=runner, inputs=inputs) @@ -88,7 +88,7 @@ def with_interrupt(self, yieldable): wait_iterator = gen.WaitIterator(yieldable, self) result = yield wait_iterator.next() # pylint: disable=stop-iteration-return if not wait_iterator.current_index == 0: - raise RuntimeError("This interruptible future had it's result set unexpectedly to {}".format(result)) + raise RuntimeError(f"This interruptible future had it's result set unexpectedly to {result}") result = yield [yieldable, self][0] raise gen.Return(result) @@ -251,7 +251,7 @@ def set_process_state_change_timestamp(process): # This will only occur for testing, as in general users cannot launch plain Process classes return else: - raise ValueError('unsupported calculation node type {}'.format(type(process.node))) + raise ValueError(f'unsupported calculation node type {type(process.node)}') key = PROCESS_STATE_CHANGE_KEY.format(process_type) description = PROCESS_STATE_CHANGE_DESCRIPTION.format(process_type) @@ -261,7 +261,7 @@ def set_process_state_change_timestamp(process): manager = get_manager() manager.get_backend_manager().get_settings_manager().set(key, value, description) except UniquenessError as exception: - process.logger.debug('could not update the {} setting because of a UniquenessError: {}'.format(key, exception)) + process.logger.debug(f'could not update the {key} setting because of a UniquenessError: {exception}') def get_process_state_change_timestamp(process_type=None): @@ -282,7 +282,7 @@ def get_process_state_change_timestamp(process_type=None): valid_process_types = ['calculation', 'work'] if process_type is not None and process_type not in valid_process_types: - raise ValueError('invalid value for process_type, valid values are {}'.format(', '.join(valid_process_types))) + raise ValueError(f"invalid value for process_type, valid values are {', '.join(valid_process_types)}") if process_type is None: process_types = valid_process_types diff --git a/aiida/manage/backup/backup_base.py b/aiida/manage/backup/backup_base.py index 13007be262..a643699a4c 100644 --- a/aiida/manage/backup/backup_base.py +++ b/aiida/manage/backup/backup_base.py @@ -92,7 +92,7 @@ def _read_backup_info_from_file(self, backup_info_file_name): backup_variables = json.load(backup_info_file) except ValueError: self._logger.error('Could not parse file %s', backup_info_file_name) - raise BackupError('Could not parse file ' + backup_info_file_name) + raise BackupError(f'Could not parse file {backup_info_file_name}') self._read_backup_info_from_dict(backup_variables) @@ -370,7 +370,7 @@ def _extract_parent_dirs(given_rel_dir, parent_dir_set): temp_path = '' for sub_path in sub_paths: - temp_path += sub_path + '/' + temp_path += f'{sub_path}/' parent_dir_set.add(temp_path) return parent_dir_set diff --git a/aiida/manage/backup/backup_setup.py b/aiida/manage/backup/backup_setup.py index 0a041aa93c..264e6b1ac2 100644 --- a/aiida/manage/backup/backup_setup.py +++ b/aiida/manage/backup/backup_setup.py @@ -35,7 +35,7 @@ class BackupSetup: def __init__(self): # The backup directory names - self._conf_backup_folder_rel = 'backup_{}'.format(configuration.PROFILE.name) + self._conf_backup_folder_rel = f'backup_{configuration.PROFILE.name}' self._file_backup_folder_rel = 'backup_dest' # The backup configuration file (& template) names @@ -99,7 +99,7 @@ def create_dir(self, question, dir_path): final_path = utils.query_string(question, dir_path) if not os.path.exists(final_path): - if utils.query_yes_no("The path {} doesn't exist. Should it be created?".format(final_path), 'yes'): + if utils.query_yes_no(f"The path {final_path} doesn't exist. Should it be created?", 'yes'): try: os.makedirs(final_path) except OSError: @@ -205,34 +205,34 @@ def run(self): # If the backup parameters are configured manually else: sys.stdout.write( - 'Please rename the file {} '.format(self._backup_info_tmpl_filename) + - 'found in {} to '.format(conf_backup_folder_abs) + '{} and '.format(self._backup_info_filename) + + f'Please rename the file {self._backup_info_tmpl_filename} ' + + f'found in {conf_backup_folder_abs} to ' + f'{self._backup_info_filename} and ' + 'change the backup parameters accordingly.\n' ) sys.stdout.write( 'Please adapt the startup script accordingly to point to the ' + 'correct backup configuration file. For the moment, it points ' + - 'to {}\n'.format(os.path.join(conf_backup_folder_abs, self._backup_info_filename)) + f'to {os.path.join(conf_backup_folder_abs, self._backup_info_filename)}\n' ) script_content = \ -u"""#!/usr/bin/env python +f"""#!/usr/bin/env python import logging from aiida.manage.configuration import load_profile -load_profile(profile='{}') +load_profile(profile='{configuration.PROFILE.name}') from aiida.manage.backup.backup_general import Backup # Create the backup instance -backup_inst = Backup(backup_info_filepath="{}", additional_back_time_mins=2) +backup_inst = Backup(backup_info_filepath="{final_conf_filepath}", additional_back_time_mins=2) # Define the backup logging level backup_inst._logger.setLevel(logging.INFO) # Start the backup backup_inst.run() -""".format(configuration.PROFILE.name, final_conf_filepath) +""" # Script full path script_path = os.path.join(conf_backup_folder_abs, self._script_filename) diff --git a/aiida/manage/backup/backup_utils.py b/aiida/manage/backup/backup_utils.py index e74ac907ba..b00b1c7320 100644 --- a/aiida/manage/backup/backup_utils.py +++ b/aiida/manage/backup/backup_utils.py @@ -48,12 +48,12 @@ def ask_question(question, reply_type, allow_none_as_answer=True): raise ValueError # If it is not parsable... except ValueError: - sys.stdout.write('The given value could not be parsed. Type expected: {}\n'.format(reply_type)) + sys.stdout.write(f'The given value could not be parsed. Type expected: {reply_type}\n') # If the timestamp could not have been parsed, # ask again the same question. continue - if query_yes_no('{} was parsed. Is it correct?'.format(final_answer), default='yes'): + if query_yes_no(f'{final_answer} was parsed. Is it correct?', default='yes'): break return final_answer @@ -76,7 +76,7 @@ def query_yes_no(question, default='yes'): elif default == 'no': prompt = ' [y/N] ' else: - raise ValueError("invalid default answer: '%s'" % default) + raise ValueError(f"invalid default answer: '{default}'") while True: choice = input(question + prompt).lower() @@ -109,7 +109,7 @@ def query_string(question, default): if default is None or not default: prompt = '' else: - prompt = ' [{}]'.format(default) + prompt = f' [{default}]' while True: reply = input(question + prompt) diff --git a/aiida/manage/caching.py b/aiida/manage/caching.py index 9b7f1d427d..6e81cf4bff 100644 --- a/aiida/manage/caching.py +++ b/aiida/manage/caching.py @@ -65,7 +65,7 @@ def _get_config(config_file): # Validate configuration for key in config: if key not in DEFAULT_CONFIG: - raise exceptions.ConfigurationError("Configuration error: Invalid key '{}' in cache_config.yml".format(key)) + raise exceptions.ConfigurationError(f"Configuration error: Invalid key '{key}' in cache_config.yml") # Add defaults where key is either completely missing or specifies no values in which case it will be `None` for key, default_config in DEFAULT_CONFIG.items(): @@ -262,13 +262,12 @@ def _validate_identifier_pattern(*, identifier): :param identifier: Process type string, or a pattern with '*' wildcard that matches it. :type identifier: str """ - common_error_msg = "Invalid identifier pattern '{}': ".format(identifier) + common_error_msg = f"Invalid identifier pattern '{identifier}': " assert ENTRY_POINT_STRING_SEPARATOR not in '.*' # The logic of this function depends on this # Check if it can be an entry point string if identifier.count(ENTRY_POINT_STRING_SEPARATOR) > 1: raise ValueError( - common_error_msg + - "Can contain at most one entry point string separator '{}'".format(ENTRY_POINT_STRING_SEPARATOR) + f"{common_error_msg}Can contain at most one entry point string separator '{ENTRY_POINT_STRING_SEPARATOR}'" ) # If there is one separator, it must be an entry point string. # Check if the left hand side is a matching pattern @@ -302,10 +301,10 @@ def _validate_identifier_pattern(*, identifier): if not identifier_part.replace('*', 'a').isidentifier(): raise ValueError( common_error_msg + - "Identifier part '{}' can not match a fully qualified Python name.".format(identifier_part) + f"Identifier part '{identifier_part}' can not match a fully qualified Python name." ) else: if not identifier_part.isidentifier(): - raise ValueError(common_error_msg + "'{}' is not a valid Python identifier.".format(identifier_part)) + raise ValueError(f"{common_error_msg}'{identifier_part}' is not a valid Python identifier.") if keyword.iskeyword(identifier_part): - raise ValueError(common_error_msg + "'{}' is a reserved Python keyword.".format(identifier_part)) + raise ValueError(f"{common_error_msg}'{identifier_part}' is a reserved Python keyword.") diff --git a/aiida/manage/configuration/__init__.py b/aiida/manage/configuration/__init__.py index 2b5618f394..942b9ac5c2 100644 --- a/aiida/manage/configuration/__init__.py +++ b/aiida/manage/configuration/__init__.py @@ -94,12 +94,12 @@ def load_config(create=False): filepath = get_config_path() if not os.path.isfile(filepath) and not create: - raise exceptions.MissingConfigurationError('configuration file {} does not exist'.format(filepath)) + raise exceptions.MissingConfigurationError(f'configuration file {filepath} does not exist') try: config = Config.from_file(filepath) except ValueError: - raise exceptions.ConfigurationError('configuration file {} contains invalid JSON'.format(filepath)) + raise exceptions.ConfigurationError(f'configuration file {filepath} contains invalid JSON') return config diff --git a/aiida/manage/configuration/config.py b/aiida/manage/configuration/config.py index 9961ba57dd..c3faff006c 100644 --- a/aiida/manage/configuration/config.py +++ b/aiida/manage/configuration/config.py @@ -53,9 +53,9 @@ def from_file(cls, filepath): else: # If the configuration file needs to be migrated first create a specific backup so it can easily be reverted if config_needs_migrating(config): - echo.echo_warning('current configuration file `{}` is outdated and will be migrated'.format(filepath)) + echo.echo_warning(f'current configuration file `{filepath}` is outdated and will be migrated') filepath_backup = cls._backup(filepath) - echo.echo_warning('original backed up to `{}`'.format(filepath_backup)) + echo.echo_warning(f'original backed up to `{filepath_backup}`') config = Config(filepath, check_and_migrate_config(config)) config.store() @@ -75,7 +75,7 @@ def _backup(cls, filepath): # Keep generating a new backup filename based on the current time until it does not exist while not filepath_backup or os.path.isfile(filepath_backup): - filepath_backup = '{}.{}'.format(filepath, timezone.now().strftime('%Y%m%d-%H%M%S.%f')) + filepath_backup = f"{filepath}.{timezone.now().strftime('%Y%m%d-%H%M%S.%f')}" shutil.copy(filepath, filepath_backup) @@ -105,7 +105,7 @@ def __init__(self, filepath, config): if unknown_keys: keys = ', '.join(unknown_keys) - self.handle_invalid('encountered unknown keys [{}] in `{}` which have been removed'.format(keys, filepath)) + self.handle_invalid(f'encountered unknown keys [{keys}] in `{filepath}` which have been removed') try: self._options = config[self.KEY_OPTIONS] @@ -119,7 +119,7 @@ def __init__(self, filepath, config): for name, config_profile in config.get(self.KEY_PROFILES, {}).items(): if Profile.contains_unknown_keys(config_profile): - self.handle_invalid('encountered unknown keys in profile `{}` which have been removed'.format(name)) + self.handle_invalid(f'encountered unknown keys in profile `{name}` which have been removed') self._profiles[name] = Profile(name, config_profile, from_config=True) def __eq__(self, other): @@ -140,7 +140,7 @@ def handle_invalid(self, message): from aiida.cmdline.utils import echo filepath_backup = self._backup(self.filepath) echo.echo_warning(message) - echo.echo_warning('backup of the original config file written to: `{}`'.format(filepath_backup)) + echo.echo_warning(f'backup of the original config file written to: `{filepath_backup}`') @property def dictionary(self): @@ -235,7 +235,7 @@ def validate_profile(self, name): from aiida.common import exceptions if name not in self.profile_names: - raise exceptions.ProfileConfigurationError('profile `{}` does not exist'.format(name)) + raise exceptions.ProfileConfigurationError(f'profile `{name}` does not exist') def get_profile(self, name=None): """Return the profile for the given name or the default one if not specified. @@ -247,7 +247,7 @@ def get_profile(self, name=None): if not name and not self.default_profile_name: raise exceptions.ProfileConfigurationError( - 'no default profile defined: {}\n{}'.format(self._default_profile, self.dictionary) + f'no default profile defined: {self._default_profile}\n{self.dictionary}' ) if not name: diff --git a/aiida/manage/configuration/options.py b/aiida/manage/configuration/options.py index e388ce162a..176bb5c713 100644 --- a/aiida/manage/configuration/options.py +++ b/aiida/manage/configuration/options.py @@ -198,7 +198,7 @@ def get_option(option_name): try: option = Option(option_name, **CONFIG_OPTIONS[option_name]) except KeyError: - raise ValueError('the option {} does not exist'.format(option_name)) + raise ValueError(f'the option {option_name} does not exist') else: return option @@ -229,7 +229,7 @@ def parse_option(option_name, option_value): elif option_value.strip().lower() in ['1', 'true', 't']: value = True else: - raise ValueError('option {} expects a boolean value'.format(option.name)) + raise ValueError(f'option {option.name} expects a boolean value') else: value = bool(option_value) elif option.valid_type == 'string': @@ -239,7 +239,7 @@ def parse_option(option_name, option_value): elif option.valid_type == 'list_of_str': value = option_value.split() else: - raise NotImplementedError('Type string {} not implemented yet'.format(option.valid_type)) + raise NotImplementedError(f'Type string {option.valid_type} not implemented yet') if option.valid_values is not None: if value not in option.valid_values: diff --git a/aiida/manage/configuration/profile.py b/aiida/manage/configuration/profile.py index 4cba1f0c7a..e8276d3f06 100644 --- a/aiida/manage/configuration/profile.py +++ b/aiida/manage/configuration/profile.py @@ -83,7 +83,7 @@ def contains_unknown_keys(cls, dictionary): def __init__(self, name, attributes, from_config=False): if not isinstance(attributes, collections.abc.Mapping): - raise TypeError('attributes should be a mapping but is {}'.format(type(attributes))) + raise TypeError(f'attributes should be a mapping but is {type(attributes)}') self._name = name self._attributes = {} @@ -95,9 +95,7 @@ def __init__(self, name, attributes, from_config=False): except KeyError: from aiida.cmdline.utils import echo echo.echo_warning( - 'removed unsupported key `{}` with value `{}` from profile `{}`'.format( - internal_key, value, name - ) + f'removed unsupported key `{internal_key}` with value `{value}` from profile `{name}`' ) continue setattr(self, internal_key, value) @@ -359,7 +357,7 @@ def configure_repository(self): except OSError as exception: if exception.errno != errno.EEXIST: raise exceptions.ConfigurationError( - 'could not create the configured repository `{}`: {}'.format(self.repository_path, str(exception)) + f'could not create the configured repository `{self.repository_path}`: {str(exception)}' ) @property diff --git a/aiida/manage/configuration/settings.py b/aiida/manage/configuration/settings.py index 2614b0670b..54d1ab5037 100644 --- a/aiida/manage/configuration/settings.py +++ b/aiida/manage/configuration/settings.py @@ -60,7 +60,7 @@ def create_directory(path): os.makedirs(path) except OSError as exception: if exception.errno != errno.EEXIST: - raise ConfigurationError("could not create the '{}' configuration directory".format(path)) + raise ConfigurationError(f"could not create the '{path}' configuration directory") def set_configuration_directory(): diff --git a/aiida/manage/configuration/setup.py b/aiida/manage/configuration/setup.py index b84ce7d71b..ff04aa4630 100644 --- a/aiida/manage/configuration/setup.py +++ b/aiida/manage/configuration/setup.py @@ -29,18 +29,18 @@ def delete_repository(profile, non_interactive=True): repo_path = os.path.expanduser(repo_path) if not os.path.isabs(repo_path): - echo.echo_info("Associated file repository '{}' does not exist.".format(repo_path)) + echo.echo_info(f"Associated file repository '{repo_path}' does not exist.") return if not os.path.isdir(repo_path): - echo.echo_info("Associated file repository '{}' is not a directory.".format(repo_path)) + echo.echo_info(f"Associated file repository '{repo_path}' is not a directory.") return if non_interactive or click.confirm( "Delete associated file repository '{}'?\n" 'WARNING: All data will be lost.'.format(repo_path) ): - echo.echo_info("Deleting directory '{}'.".format(repo_path)) + echo.echo_info(f"Deleting directory '{repo_path}'.") import shutil shutil.rmtree(repo_path) @@ -68,12 +68,12 @@ def delete_db(profile, non_interactive=True, verbose=False): database_name = profile.database_name if not postgres.db_exists(database_name): - echo.echo_info("Associated database '{}' does not exist.".format(database_name)) + echo.echo_info(f"Associated database '{database_name}' does not exist.") elif non_interactive or click.confirm( "Delete associated database '{}'?\n" 'WARNING: All data will be lost.'.format(database_name) ): - echo.echo_info("Deleting database '{}'.".format(database_name)) + echo.echo_info(f"Deleting database '{database_name}'.") postgres.drop_db(database_name) user = profile.database_username @@ -81,14 +81,14 @@ def delete_db(profile, non_interactive=True, verbose=False): users = [available_profile.database_username for available_profile in config.profiles] if not postgres.dbuser_exists(user): - echo.echo_info("Associated database user '{}' does not exist.".format(user)) + echo.echo_info(f"Associated database user '{user}' does not exist.") elif users.count(user) > 1: echo.echo_info( "Associated database user '{}' is used by other profiles " 'and will not be deleted.'.format(user) ) - elif non_interactive or click.confirm("Delete database user '{}'?".format(user)): - echo.echo_info("Deleting user '{}'.".format(user)) + elif non_interactive or click.confirm(f"Delete database user '{user}'?"): + echo.echo_info(f"Deleting user '{user}'.") postgres.drop_dbuser(user) @@ -107,7 +107,7 @@ def delete_from_config(profile, non_interactive=True): "Delete configuration for profile '{}'?\n" 'WARNING: Permanently removes profile from the list of AiiDA profiles.'.format(profile.name) ): - echo.echo_info("Deleting configuration for profile '{}'.".format(profile.name)) + echo.echo_info(f"Deleting configuration for profile '{profile.name}'.") config = get_config() config.remove_profile(profile.name) config.store() diff --git a/aiida/manage/database/delete/nodes.py b/aiida/manage/database/delete/nodes.py index b0eb7d1023..47860be84b 100644 --- a/aiida/manage/database/delete/nodes.py +++ b/aiida/manage/database/delete/nodes.py @@ -60,7 +60,7 @@ def delete_nodes(pks, verbosity=0, dry_run=False, force=False, **kwargs): try: load_node(pk) except exceptions.NotExistent: - echo.echo_warning('warning: node with pk<{}> does not exist, skipping'.format(pk)) + echo.echo_warning(f'warning: node with pk<{pk}> does not exist, skipping') else: starting_pks.append(pk) @@ -84,13 +84,13 @@ def delete_nodes(pks, verbosity=0, dry_run=False, force=False, **kwargs): 'in': pks_set_to_delete }}, project=('uuid', 'id', 'node_type', 'label') ) - echo.echo('The nodes I {} delete:'.format('would' if dry_run else 'will')) + echo.echo(f"The nodes I {'would' if dry_run else 'will'} delete:") for uuid, pk, type_string, label in builder.iterall(): try: short_type_string = type_string.split('.')[-2] except IndexError: short_type_string = type_string - echo.echo(' {} {} {} {}'.format(uuid, pk, short_type_string, label)) + echo.echo(f' {uuid} {pk} {short_type_string} {label}') if dry_run: if verbosity > 0: @@ -101,7 +101,7 @@ def delete_nodes(pks, verbosity=0, dry_run=False, force=False, **kwargs): if force: pass else: - echo.echo_warning('YOU ARE ABOUT TO DELETE {} NODES! THIS CANNOT BE UNDONE!'.format(len(pks_set_to_delete))) + echo.echo_warning(f'YOU ARE ABOUT TO DELETE {len(pks_set_to_delete)} NODES! THIS CANNOT BE UNDONE!') if not click.confirm('Shall I continue?'): echo.echo('Exiting without deleting') return diff --git a/aiida/manage/database/integrity/__init__.py b/aiida/manage/database/integrity/__init__.py index 2c53a6d803..796a9a7213 100644 --- a/aiida/manage/database/integrity/__init__.py +++ b/aiida/manage/database/integrity/__init__.py @@ -45,8 +45,8 @@ def write_database_integrity_violation(results, headers, reason_message, action_ ) ) - handle.write('# {}\n'.format(datetime.utcnow().isoformat())) - handle.write('# Violation reason: {}\n'.format(reason_message)) - handle.write('# Performed action: {}\n'.format(action_message)) + handle.write(f'# {datetime.utcnow().isoformat()}\n') + handle.write(f'# Violation reason: {reason_message}\n') + handle.write(f'# Performed action: {action_message}\n') handle.write('\n') handle.write(tabulate(results, headers)) diff --git a/aiida/manage/database/integrity/duplicate_uuid.py b/aiida/manage/database/integrity/duplicate_uuid.py index a17b2edde3..de581b2341 100644 --- a/aiida/manage/database/integrity/duplicate_uuid.py +++ b/aiida/manage/database/integrity/duplicate_uuid.py @@ -74,7 +74,7 @@ def deduplicate_uuids(table=None, dry_run=True): from aiida.orm.utils._repository import Repository if table not in TABLES_UUID_DEDUPLICATION: - raise ValueError('invalid table {}: choose from {}'.format(table, ', '.join(TABLES_UUID_DEDUPLICATION))) + raise ValueError(f"invalid table {table}: choose from {', '.join(TABLES_UUID_DEDUPLICATION)}") mapping = defaultdict(list) @@ -99,9 +99,9 @@ def deduplicate_uuids(table=None, dry_run=True): mapping_new_uuid[pk] = uuid_new if dry_run: - messages.append('would update UUID of {} row<{}> from {} to {}'.format(table, pk, uuid_ref, uuid_new)) + messages.append(f'would update UUID of {table} row<{pk}> from {uuid_ref} to {uuid_new}') else: - messages.append('updated UUID of {} row<{}> from {} to {}'.format(table, pk, uuid_ref, uuid_new)) + messages.append(f'updated UUID of {table} row<{pk}> from {uuid_ref} to {uuid_new}') repo_ref = Repository(uuid_ref, True, 'path') repo_new = Repository(uuid_new, False, 'path') repo_new.put_object_from_tree(repo_ref._get_base_folder().abspath) # pylint: disable=protected-access diff --git a/aiida/manage/database/integrity/plugins.py b/aiida/manage/database/integrity/plugins.py index eb1b4726df..764a287e73 100644 --- a/aiida/manage/database/integrity/plugins.py +++ b/aiida/manage/database/integrity/plugins.py @@ -130,13 +130,9 @@ class of `JobCalculation`, would get `calculation.job.quantumespresso.pw.PwCalcu inferred_entry_point_name = '.'.join(plugin_parts) if inferred_entry_point_name in entry_point_names: - entry_point_string = '{entry_point_group}:{entry_point_name}'.format( - entry_point_group=entry_point_group, entry_point_name=inferred_entry_point_name - ) + entry_point_string = f'{entry_point_group}:{inferred_entry_point_name}' elif inferred_entry_point_name: - entry_point_string = '{plugin_name}.{plugin_class}'.format( - plugin_name=inferred_entry_point_name, plugin_class=plugin_class - ) + entry_point_string = f'{inferred_entry_point_name}.{plugin_class}' else: # If there is no inferred entry point name, i.e. there is no module name, use an empty string as fall back # This should only be the case for the type string `calculation.job.JobCalculation.` diff --git a/aiida/manage/database/integrity/sql/nodes.py b/aiida/manage/database/integrity/sql/nodes.py index 099c7bf098..8a520f987f 100644 --- a/aiida/manage/database/integrity/sql/nodes.py +++ b/aiida/manage/database/integrity/sql/nodes.py @@ -37,13 +37,13 @@ def format_type_string_regex(node_class): type_parts = type_string.split('.')[:-2] # 'nodes.process.calculation.' - type_string_regex = '{}.'.format('.'.join(type_parts)) + type_string_regex = f"{'.'.join(type_parts)}." return type_string_regex VALID_NODE_BASE_CLASSES = [Data, CalculationNode, WorkflowNode] -VALID_NODE_TYPE_STRING = '({})%'.format('|'.join([format_type_string_regex(cls) for cls in VALID_NODE_BASE_CLASSES])) +VALID_NODE_TYPE_STRING = f"({'|'.join([format_type_string_regex(cls) for cls in VALID_NODE_BASE_CLASSES])})%" SELECT_NODES_WITH_INVALID_TYPE = """ SELECT node.id, node.uuid, node.node_type diff --git a/aiida/manage/external/postgres.py b/aiida/manage/external/postgres.py index 5bd8c5c541..c4faff90c1 100644 --- a/aiida/manage/external/postgres.py +++ b/aiida/manage/external/postgres.py @@ -118,7 +118,7 @@ def check_dbuser(self, dbuser): """ create = True while create and self.dbuser_exists(dbuser): - echo.echo_info('Database user "{}" already exists!'.format(dbuser)) + echo.echo_info(f'Database user "{dbuser}" already exists!') if not click.confirm('Use it? '): dbuser = click.prompt('New database user name: ', type=str, default=dbuser) else: @@ -165,7 +165,7 @@ def check_db(self, dbname): """ create = True while create and self.db_exists(dbname): - echo.echo_info('database {} already exists!'.format(dbname)) + echo.echo_info(f'database {dbname} already exists!') if not click.confirm('Use it (make sure it is not used by another profile)?'): dbname = click.prompt('new name', type=str, default=dbname) else: @@ -216,8 +216,8 @@ def manual_setup_instructions(dbuser, dbname): 'Run the following commands as a UNIX user with access to PostgreSQL (Ubuntu: $ sudo su postgres):', '', '\t$ psql template1', - '\t==> ' + _CREATE_USER_COMMAND.format(dbuser, dbpass), - '\t==> ' + _CREATE_DB_COMMAND.format(dbname, dbuser), - '\t==> ' + _GRANT_PRIV_COMMAND.format(dbname, dbuser), + f' ==> {_CREATE_USER_COMMAND.format(dbuser, dbpass)}', + f' ==> {_CREATE_DB_COMMAND.format(dbname, dbuser)}', + f' ==> {_GRANT_PRIV_COMMAND.format(dbname, dbuser)}', ]) return instructions diff --git a/aiida/manage/external/rmq.py b/aiida/manage/external/rmq.py index 51ffe1272c..f8ea55ec3c 100644 --- a/aiida/manage/external/rmq.py +++ b/aiida/manage/external/rmq.py @@ -72,7 +72,7 @@ def get_rmq_url(protocol=None, username=None, password=None, host=None, port=Non invalid = set(kwargs.keys()).difference(BROKER_VALID_PARAMETERS) if invalid: - raise ValueError('invalid URL parameters specified in the keyword arguments: {}'.format(', '.join(invalid))) + raise ValueError(f"invalid URL parameters specified in the keyword arguments: {', '.join(invalid)}") if 'heartbeat' not in kwargs: kwargs['heartbeat'] = BROKER_DEFAULTS.heartbeat @@ -92,7 +92,7 @@ def get_rmq_url(protocol=None, username=None, password=None, host=None, port=Non # The virtual host is optional but if it is specified it needs to start with a forward slash. If the virtual host # itself contains forward slashes, they need to be encoded. if path and not path.startswith('/'): - path = '/' + path + path = f'/{path}' return urlunparse((scheme, netloc, path, parameters, query, fragment)) @@ -103,7 +103,7 @@ def get_launch_queue_name(prefix=None): :returns: launch queue name """ if prefix is not None: - return '{}.{}'.format(prefix, _LAUNCH_QUEUE) + return f'{prefix}.{_LAUNCH_QUEUE}' return _LAUNCH_QUEUE @@ -113,7 +113,7 @@ def get_message_exchange_name(prefix): :returns: message exchange name """ - return '{}.{}'.format(prefix, _MESSAGE_EXCHANGE) + return f'{prefix}.{_MESSAGE_EXCHANGE}' def get_task_exchange_name(prefix): @@ -121,7 +121,7 @@ def get_task_exchange_name(prefix): :returns: task exchange name """ - return '{}.{}'.format(prefix, _TASK_EXCHANGE) + return f'{prefix}.{_TASK_EXCHANGE}' def _store_inputs(inputs): diff --git a/aiida/manage/tests/__init__.py b/aiida/manage/tests/__init__.py index c5486326a0..c3a6822dc2 100644 --- a/aiida/manage/tests/__init__.py +++ b/aiida/manage/tests/__init__.py @@ -278,7 +278,7 @@ def profile_dictionary(self): 'broker_host': self.profile_info.get('broker_host'), 'broker_port': self.profile_info.get('broker_port'), 'broker_virtual_host': self.profile_info.get('broker_virtual_host'), - 'repository_uri': 'file://' + self.repo, + 'repository_uri': f'file://{self.repo}', } return dictionary @@ -290,7 +290,7 @@ def create_db_cluster(self): if self.pg_cluster is not None: raise TestManagerError( - 'Running temporary postgresql cluster detected.' + 'Use destroy_all() before creating a new cluster.' + 'Running temporary postgresql cluster detected.Use destroy_all() before creating a new cluster.' ) self.pg_cluster = PGTest(**self._pgtest) self.dbinfo.update(self.pg_cluster.dsn) @@ -367,7 +367,7 @@ def backend(self, backend): valid_backends = [BACKEND_DJANGO, BACKEND_SQLA] if backend not in valid_backends: - raise ValueError('invalid backend {}, must be one of {}'.format(backend, valid_backends)) + raise ValueError(f'invalid backend {backend}, must be one of {valid_backends}') self.profile_info['backend'] = backend @property @@ -485,7 +485,7 @@ def get_test_backend_name(): if backend_res in (BACKEND_DJANGO, BACKEND_SQLA): return backend_res - raise ValueError("Unknown backend '{}' read from AIIDA_TEST_BACKEND environment variable".format(backend_res)) + raise ValueError(f"Unknown backend '{backend_res}' read from AIIDA_TEST_BACKEND environment variable") def get_test_profile_name(): diff --git a/aiida/manage/tests/pytest_fixtures.py b/aiida/manage/tests/pytest_fixtures.py index 00a151e66a..2512546bc4 100644 --- a/aiida/manage/tests/pytest_fixtures.py +++ b/aiida/manage/tests/pytest_fixtures.py @@ -161,7 +161,7 @@ def get_code(entry_point, executable, computer=aiida_localhost, label=None, prep executable_path = shutil.which(executable) if not executable_path: - raise ValueError('The executable "{}" was not found in the $PATH.'.format(executable)) + raise ValueError(f'The executable "{executable}" was not found in the $PATH.') code = Code(input_plugin_name=entry_point, remote_computer_exec=[computer, executable_path]) code.label = label diff --git a/aiida/orm/authinfos.py b/aiida/orm/authinfos.py index d3bd980f66..ddb2203e32 100644 --- a/aiida/orm/authinfos.py +++ b/aiida/orm/authinfos.py @@ -50,9 +50,9 @@ def __init__(self, computer, user, backend=None): def __str__(self): if self.enabled: - return 'AuthInfo for {} on {}'.format(self.user.email, self.computer.label) + return f'AuthInfo for {self.user.email} on {self.computer.label}' - return 'AuthInfo for {} on {} [DISABLED]'.format(self.user.email, self.computer.label) + return f'AuthInfo for {self.user.email} on {self.computer.label} [DISABLED]' @property def enabled(self): @@ -143,8 +143,6 @@ def get_transport(self): try: transport_class = TransportFactory(transport_type) except exceptions.EntryPointError as exception: - raise exceptions.ConfigurationError( - 'transport type `{}` could not be loaded: {}'.format(transport_type, exception) - ) + raise exceptions.ConfigurationError(f'transport type `{transport_type}` could not be loaded: {exception}') return transport_class(machine=computer.hostname, **self.get_auth_params()) diff --git a/aiida/orm/autogroup.py b/aiida/orm/autogroup.py index 06e83185e3..0a9ba72358 100644 --- a/aiida/orm/autogroup.py +++ b/aiida/orm/autogroup.py @@ -47,7 +47,7 @@ def __init__(self): self._include = None now = timezone.now() - default_label_prefix = 'Verdi autogroup on ' + now.strftime('%Y-%m-%d %H:%M:%S') + default_label_prefix = f"Verdi autogroup on {now.strftime('%Y-%m-%d %H:%M:%S')}" self._group_label_prefix = default_label_prefix self._group_label = None # Actual group label, set by `get_or_create_group` @@ -61,12 +61,11 @@ def validate(strings): pieces = string.split(':') if len(pieces) != 2: raise exceptions.ValidationError( - "'{}' is not a valid include/exclude filter, must contain two parts split by a colon". - format(string) + f"'{string}' is not a valid include/exclude filter, must contain two parts split by a colon" ) if pieces[0] not in valid_prefixes: raise exceptions.ValidationError( - "'{}' has an invalid prefix, must be among: {}".format(string, sorted(valid_prefixes)) + f"'{string}' has an invalid prefix, must be among: {sorted(valid_prefixes)}" ) def get_exclude(self): @@ -238,7 +237,7 @@ def get_or_create_group(self): } }, { 'label': { - 'like': escape_for_sql_like(label_prefix + '_') + '%' + 'like': f"{escape_for_sql_like(f'{label_prefix}_')}%" } }] }, @@ -264,7 +263,7 @@ def get_or_create_group(self): while True: try: - label = label_prefix if counter == 0 else '{}_{}'.format(label_prefix, counter) + label = label_prefix if counter == 0 else f'{label_prefix}_{counter}' group = AutoGroup(label=label).store() self._group_label = group.label except exceptions.IntegrityError: diff --git a/aiida/orm/computers.py b/aiida/orm/computers.py index 3662edf71a..4e83ac05c4 100644 --- a/aiida/orm/computers.py +++ b/aiida/orm/computers.py @@ -126,10 +126,10 @@ def __init__( # pylint: disable=too-many-arguments self.set_workdir(workdir) def __repr__(self): - return '<{}: {}>'.format(self.__class__.__name__, str(self)) + return f'<{self.__class__.__name__}: {str(self)}>' def __str__(self): - return '{} ({}), pk: {}'.format(self.label, self.hostname, self.pk) + return f'{self.label} ({self.hostname}), pk: {self.pk}' @property def full_text_info(self): @@ -143,19 +143,19 @@ def full_text_info(self): """ warnings.warn('this property is deprecated', AiidaDeprecationWarning) # pylint: disable=no-member ret_lines = [] - ret_lines.append('Computer name: {}'.format(self.label)) - ret_lines.append(' * PK: {}'.format(self.pk)) - ret_lines.append(' * UUID: {}'.format(self.uuid)) - ret_lines.append(' * Description: {}'.format(self.description)) - ret_lines.append(' * Hostname: {}'.format(self.hostname)) - ret_lines.append(' * Transport type: {}'.format(self.transport_type)) - ret_lines.append(' * Scheduler type: {}'.format(self.scheduler_type)) - ret_lines.append(' * Work directory: {}'.format(self.get_workdir())) - ret_lines.append(' * Shebang: {}'.format(self.get_shebang())) - ret_lines.append(' * mpirun command: {}'.format(' '.join(self.get_mpirun_command()))) + ret_lines.append(f'Computer name: {self.label}') + ret_lines.append(f' * PK: {self.pk}') + ret_lines.append(f' * UUID: {self.uuid}') + ret_lines.append(f' * Description: {self.description}') + ret_lines.append(f' * Hostname: {self.hostname}') + ret_lines.append(f' * Transport type: {self.transport_type}') + ret_lines.append(f' * Scheduler type: {self.scheduler_type}') + ret_lines.append(f' * Work directory: {self.get_workdir()}') + ret_lines.append(f' * Shebang: {self.get_shebang()}') + ret_lines.append(f" * mpirun command: {' '.join(self.get_mpirun_command())}") def_cpus_machine = self.get_default_mpiprocs_per_machine() if def_cpus_machine is not None: - ret_lines.append(' * Default number of cpus per machine: {}'.format(def_cpus_machine)) + ret_lines.append(f' * Default number of cpus per machine: {def_cpus_machine}') # pylint: disable=fixme # TODO: Put back following line when we port Node to new backend system # ret_lines.append(" * Used by: {} nodes".format(len(self._dbcomputer.dbnodes.all()))) @@ -163,13 +163,13 @@ def full_text_info(self): ret_lines.append(' * prepend text:') if self.get_prepend_text().strip(): for line in self.get_prepend_text().split('\n'): - ret_lines.append(' {}'.format(line)) + ret_lines.append(f' {line}') else: ret_lines.append(' # No prepend text.') ret_lines.append(' * append text:') if self.get_append_text().strip(): for line in self.get_append_text().split('\n'): - ret_lines.append(' {}'.format(line)) + ret_lines.append(f' {line}') else: ret_lines.append(' # No append text.') @@ -245,9 +245,9 @@ def _workdir_validator(cls, workdir): try: convertedwd = workdir.format(username='test') except KeyError as exc: - raise exceptions.ValidationError('In workdir there is an unknown replacement field {}'.format(exc.args[0])) + raise exceptions.ValidationError(f'In workdir there is an unknown replacement field {exc.args[0]}') except ValueError as exc: - raise exceptions.ValidationError("Error in the string: '{}'".format(exc)) + raise exceptions.ValidationError(f"Error in the string: '{exc}'") if not os.path.isabs(convertedwd): raise exceptions.ValidationError('The workdir must be an absolute path') @@ -272,9 +272,9 @@ def _mpirun_command_validator(self, mpirun_cmd): for arg in mpirun_cmd: arg.format(**subst) except KeyError as exc: - raise exceptions.ValidationError('In workdir there is an unknown replacement field {}'.format(exc.args[0])) + raise exceptions.ValidationError(f'In workdir there is an unknown replacement field {exc.args[0]}') except ValueError as exc: - raise exceptions.ValidationError("Error in the string: '{}'".format(exc)) + raise exceptions.ValidationError(f"Error in the string: '{exc}'") def validate(self): """ @@ -446,7 +446,7 @@ def delete_property(self, name, raise_exception=True): self.metadata = olddata except KeyError: if raise_exception: - raise AttributeError("'{}' property not found".format(name)) + raise AttributeError(f"'{name}' property not found") def set_property(self, name, value): """ @@ -477,7 +477,7 @@ def get_property(self, name, *args): return olddata[name] except KeyError: if not args: - raise AttributeError("'{}' property not found".format(name)) + raise AttributeError(f"'{name}' property not found") return args[0] def get_prepend_text(self): @@ -570,9 +570,9 @@ def set_shebang(self, val): :param str val: A valid shebang line """ if not isinstance(val, str): - raise ValueError('{} is invalid. Input has to be a string'.format(val)) + raise ValueError(f'{val} is invalid. Input has to be a string') if not val.startswith('#!'): - raise ValueError('{} is invalid. A shebang line has to start with #!'.format(val)) + raise ValueError(f'{val} is invalid. A shebang line has to start with #!') metadata = self.metadata metadata['shebang'] = val self.metadata = metadata @@ -656,7 +656,7 @@ def get_transport_class(self): return TransportFactory(self.transport_type) except exceptions.EntryPointError as exception: raise exceptions.ConfigurationError( - 'No transport found for {} [type {}], message: {}'.format(self.label, self.transport_type, exception) + f'No transport found for {self.label} [type {self.transport_type}], message: {exception}' ) def get_scheduler(self): @@ -672,7 +672,7 @@ def get_scheduler(self): return scheduler_class() except exceptions.EntryPointError as exception: raise exceptions.ConfigurationError( - 'No scheduler found for {} [type {}], message: {}'.format(self.label, self.scheduler_type, exception) + f'No scheduler found for {self.label} [type {self.scheduler_type}], message: {exception}' ) def configure(self, user=None, **kwargs): @@ -692,11 +692,7 @@ def configure(self, user=None, **kwargs): if not set(kwargs.keys()).issubset(valid_keys): invalid_keys = [key for key in kwargs if key not in valid_keys] - raise ValueError( - '{transport}: received invalid authentication parameter(s) "{invalid}"'.format( - transport=transport_cls, invalid=invalid_keys - ) - ) + raise ValueError(f'{transport_cls}: received invalid authentication parameter(s) "{invalid_keys}"') try: authinfo = self.get_authinfo(user) diff --git a/aiida/orm/convert.py b/aiida/orm/convert.py index d6b577773b..8c3e0e40e9 100644 --- a/aiida/orm/convert.py +++ b/aiida/orm/convert.py @@ -18,9 +18,7 @@ @singledispatch def get_orm_entity(backend_entity): - raise TypeError( - 'No corresponding AiiDA ORM class exists for backend instance {}'.format(backend_entity.__class__.__name__) - ) + raise TypeError(f'No corresponding AiiDA ORM class exists for backend instance {backend_entity.__class__.__name__}') @get_orm_entity.register(Mapping) diff --git a/aiida/orm/groups.py b/aiida/orm/groups.py index 47846520ea..2313c62f0f 100644 --- a/aiida/orm/groups.py +++ b/aiida/orm/groups.py @@ -38,7 +38,7 @@ def load_group_class(type_string): try: group_class = load_entry_point('aiida.groups', type_string) except EntryPointError: - message = 'could not load entry point `{}`, falling back onto `Group` base class.'.format(type_string) + message = f'could not load entry point `{type_string}`, falling back onto `Group` base class.' warnings.warn(message) # pylint: disable=no-member group_class = Group @@ -57,7 +57,7 @@ def __new__(cls, name, bases, namespace, **kwargs): if entry_point_group is None or entry_point_group != 'aiida.groups': newcls._type_string = None - message = 'no registered entry point for `{}` so its instances will not be storable.'.format(name) + message = f'no registered entry point for `{name}` so its instances will not be storable.' warnings.warn(message) # pylint: disable=no-member else: newcls._type_string = entry_point.name # pylint: disable=protected-access @@ -162,13 +162,13 @@ def __init__(self, label=None, user=None, description='', type_string=None, back super().__init__(model) def __repr__(self): - return '<{}: {}>'.format(self.__class__.__name__, str(self)) + return f'<{self.__class__.__name__}: {str(self)}>' def __str__(self): if self.type_string: - return '"{}" [type {}], of user {}'.format(self.label, self.type_string, self.user.email) + return f'"{self.label}" [type {self.type_string}], of user {self.user.email}' - return '"{}" [user-defined], of user {}'.format(self.label, self.user.email) + return f'"{self.label}" [user-defined], of user {self.user.email}' def store(self): """Verify that the group is allowed to be stored, which is the case along as `type_string` is set.""" diff --git a/aiida/orm/implementation/django/authinfos.py b/aiida/orm/implementation/django/authinfos.py index 87e0ff6910..c8da8d5eb0 100644 --- a/aiida/orm/implementation/django/authinfos.py +++ b/aiida/orm/implementation/django/authinfos.py @@ -134,7 +134,7 @@ def delete(self, pk): try: DbAuthInfo.objects.get(pk=pk).delete() except ObjectDoesNotExist: - raise exceptions.NotExistent('AuthInfo<{}> does not exist'.format(pk)) + raise exceptions.NotExistent(f'AuthInfo<{pk}> does not exist') def get(self, computer, user): """Return an entry from the collection that is configured for the given computer and user @@ -151,12 +151,10 @@ def get(self, computer, user): try: authinfo = DbAuthInfo.objects.get(dbcomputer=computer.id, aiidauser=user.id) except ObjectDoesNotExist: - raise exceptions.NotExistent( - 'User<{}> has no configuration for Computer<{}>'.format(user.email, computer.name) - ) + raise exceptions.NotExistent(f'User<{user.email}> has no configuration for Computer<{computer.name}>') except MultipleObjectsReturned: raise exceptions.MultipleObjectsError( - 'User<{}> has multiple configurations for Computer<{}>'.format(user.email, computer.name) + f'User<{user.email}> has multiple configurations for Computer<{computer.name}>' ) else: return self.from_dbmodel(authinfo) diff --git a/aiida/orm/implementation/django/comments.py b/aiida/orm/implementation/django/comments.py index 8d1ceac867..abdcf798ab 100644 --- a/aiida/orm/implementation/django/comments.py +++ b/aiida/orm/implementation/django/comments.py @@ -51,11 +51,11 @@ def __init__(self, backend, node, user, content=None, ctime=None, mtime=None): } if ctime: - lang.type_check(ctime, datetime, 'the given ctime is of type {}'.format(type(ctime))) + lang.type_check(ctime, datetime, f'the given ctime is of type {type(ctime)}') arguments['ctime'] = ctime if mtime: - lang.type_check(mtime, datetime, 'the given mtime is of type {}'.format(type(mtime))) + lang.type_check(mtime, datetime, f'the given mtime is of type {type(mtime)}') arguments['mtime'] = mtime self._dbmodel = ModelWrapper(models.DbComment(**arguments), auto_flush=self._auto_flush) @@ -134,7 +134,7 @@ def delete(self, comment_id): try: models.DbComment.objects.get(id=comment_id).delete() except ObjectDoesNotExist: - raise exceptions.NotExistent("Comment with id '{}' not found".format(comment_id)) + raise exceptions.NotExistent(f"Comment with id '{comment_id}' not found") def delete_all(self): """ @@ -147,7 +147,7 @@ def delete_all(self): with transaction.atomic(): models.DbComment.objects.all().delete() except Exception as exc: - raise exceptions.IntegrityError('Could not delete all Comments. Full exception: {}'.format(exc)) + raise exceptions.IntegrityError(f'Could not delete all Comments. Full exception: {exc}') def delete_many(self, filters): """ diff --git a/aiida/orm/implementation/django/convert.py b/aiida/orm/implementation/django/convert.py index 2b99d115e3..12caeee63d 100644 --- a/aiida/orm/implementation/django/convert.py +++ b/aiida/orm/implementation/django/convert.py @@ -28,7 +28,7 @@ def get_backend_entity(dbmodel, backend): # pylint: disable=unused-argument :param dbmodel: the db model instance """ raise TypeError( - 'No corresponding AiiDA backend class exists for the DbModel instance {}'.format(dbmodel.__class__.__name__) + f'No corresponding AiiDA backend class exists for the DbModel instance {dbmodel.__class__.__name__}' ) diff --git a/aiida/orm/implementation/django/entities.py b/aiida/orm/implementation/django/entities.py index 4e17518edc..3faa363a3c 100644 --- a/aiida/orm/implementation/django/entities.py +++ b/aiida/orm/implementation/django/entities.py @@ -62,7 +62,7 @@ def get_dbmodel_attribute_name(cls, attr_name): if hasattr(cls.MODEL_CLASS, attr_name): return attr_name - raise ValueError("Unknown attribute '{}'".format(attr_name)) + raise ValueError(f"Unknown attribute '{attr_name}'") def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) diff --git a/aiida/orm/implementation/django/groups.py b/aiida/orm/implementation/django/groups.py index a5658b80a7..7220425aaa 100644 --- a/aiida/orm/implementation/django/groups.py +++ b/aiida/orm/implementation/django/groups.py @@ -158,7 +158,7 @@ def add_nodes(self, nodes, **kwargs): for node in nodes: if not isinstance(node, DjangoNode): - raise TypeError('invalid type {}, has to be {}'.format(type(node), DjangoNode)) + raise TypeError(f'invalid type {type(node)}, has to be {DjangoNode}') if not node.is_stored: raise ValueError('At least one of the provided nodes is unstored, stopping...') @@ -177,7 +177,7 @@ def remove_nodes(self, nodes): for node in nodes: if not isinstance(node, DjangoNode): - raise TypeError('invalid type {}, has to be {}'.format(type(node), DjangoNode)) + raise TypeError(f'invalid type {type(node)}, has to be {DjangoNode}') if not node.is_stored: raise ValueError('At least one of the provided nodes is unstored, stopping...') @@ -249,7 +249,7 @@ def query( queryobject &= Q(user=user.id) if label_filters is not None: - label_filters_list = {'name__' + key: value for (key, value) in label_filters.items() if value} + label_filters_list = {f'name__{key}': value for (key, value) in label_filters.items() if value} queryobject &= Q(**label_filters_list) groups_pk = set(models.DbGroup.objects.filter(queryobject, **kwargs).values_list('pk', flat=True)) @@ -266,7 +266,7 @@ def query( base_query_dict = models.DbAttribute.get_query_dict(value) # prepend to the key the right django string to SQL-join # on the right table - query_dict = {'dbnodes__dbattributes__{}'.format(k2): v2 for k2, v2 in base_query_dict.items()} + query_dict = {f'dbnodes__dbattributes__{k2}': v2 for k2, v2 in base_query_dict.items()} # I narrow down the list of groups. # I had to do it in this way, with multiple DB hits and diff --git a/aiida/orm/implementation/django/logs.py b/aiida/orm/implementation/django/logs.py index 1ddc79934b..7b3b725c2c 100644 --- a/aiida/orm/implementation/django/logs.py +++ b/aiida/orm/implementation/django/logs.py @@ -107,7 +107,7 @@ def delete(self, log_id): try: models.DbLog.objects.get(id=log_id).delete() except ObjectDoesNotExist: - raise exceptions.NotExistent("Log with id '{}' not found".format(log_id)) + raise exceptions.NotExistent(f"Log with id '{log_id}' not found") def delete_all(self): """ @@ -120,7 +120,7 @@ def delete_all(self): with transaction.atomic(): models.DbLog.objects.all().delete() except Exception as exc: - raise exceptions.IntegrityError('Could not delete all Logs. Full exception: {}'.format(exc)) + raise exceptions.IntegrityError(f'Could not delete all Logs. Full exception: {exc}') def delete_many(self, filters): """ diff --git a/aiida/orm/implementation/django/nodes.py b/aiida/orm/implementation/django/nodes.py index a37666cb70..d8f527e5fd 100644 --- a/aiida/orm/implementation/django/nodes.py +++ b/aiida/orm/implementation/django/nodes.py @@ -71,15 +71,15 @@ def __init__( type_check(user, DjangoUser) if computer: - type_check(computer, DjangoComputer, 'computer is of type {}'.format(type(computer))) + type_check(computer, DjangoComputer, f'computer is of type {type(computer)}') arguments['dbcomputer'] = computer.dbmodel if ctime: - type_check(ctime, datetime, 'the given ctime is of type {}'.format(type(ctime))) + type_check(ctime, datetime, f'the given ctime is of type {type(ctime)}') arguments['ctime'] = ctime if mtime: - type_check(mtime, datetime, 'the given mtime is of type {}'.format(type(mtime))) + type_check(mtime, datetime, f'the given mtime is of type {type(mtime)}') arguments['mtime'] = mtime self._dbmodel = dj_utils.ModelWrapper(models.DbNode(**arguments)) @@ -182,7 +182,7 @@ def _add_link(self, source, link_type, link_label): transaction.savepoint_commit(savepoint_id) except IntegrityError as exception: transaction.savepoint_rollback(savepoint_id) - raise exceptions.UniquenessError('failed to create the link: {}'.format(exception)) from exception + raise exceptions.UniquenessError(f'failed to create the link: {exception}') from exception def clean_values(self): self._dbmodel.attributes = clean_value(self._dbmodel.attributes) @@ -230,7 +230,7 @@ def get(self, pk): try: return self.ENTITY_CLASS.from_dbmodel(models.DbNode.objects.get(pk=pk), self.backend) except ObjectDoesNotExist: - raise exceptions.NotExistent("Node with pk '{}' not found".format(pk)) from ObjectDoesNotExist + raise exceptions.NotExistent(f"Node with pk '{pk}' not found") from ObjectDoesNotExist def delete(self, pk): """Remove a Node entry from the collection with the given id @@ -240,4 +240,4 @@ def delete(self, pk): try: models.DbNode.objects.filter(pk=pk).delete() # pylint: disable=no-member except ObjectDoesNotExist: - raise exceptions.NotExistent("Node with pk '{}' not found".format(pk)) from ObjectDoesNotExist + raise exceptions.NotExistent(f"Node with pk '{pk}' not found") from ObjectDoesNotExist diff --git a/aiida/orm/implementation/django/querybuilder.py b/aiida/orm/implementation/django/querybuilder.py index 04ef270546..578610f24a 100644 --- a/aiida/orm/implementation/django/querybuilder.py +++ b/aiida/orm/implementation/django/querybuilder.py @@ -31,7 +31,7 @@ def compile(element, compiler, **_kw): # pylint: disable=function-redefined, re """ Get length of array defined in a JSONB column """ - return 'jsonb_array_length(%s)' % compiler.process(element.clauses) + return f'jsonb_array_length({compiler.process(element.clauses)})' class array_length(FunctionElement): # pylint: disable=invalid-name @@ -43,7 +43,7 @@ def compile(element, compiler, **_kw): # pylint: disable=function-redefined """ Get length of array defined in a JSONB column """ - return 'array_length(%s)' % compiler.process(element.clauses) + return f'array_length({compiler.process(element.clauses)})' class jsonb_typeof(FunctionElement): # pylint: disable=invalid-name @@ -55,7 +55,7 @@ def compile(element, compiler, **_kw): # pylint: disable=function-redefined """ Get length of array defined in a JSONB column """ - return 'jsonb_typeof(%s)' % compiler.process(element.clauses) + return f'jsonb_typeof({compiler.process(element.clauses)})' class DjangoQueryBuilder(BackendQueryBuilder): @@ -194,10 +194,7 @@ def get_filter_expr(self, operator, value, attr_key, is_attribute, alias=None, c raise InputValidationError('You have to give an integer when comparing to a length') elif operator in ('like', 'ilike'): if not isinstance(value, str): - raise InputValidationError( - 'Value for operator {} has to be a string (you gave {})' - ''.format(operator, value) - ) + raise InputValidationError(f'Value for operator {operator} has to be a string (you gave {value})') elif operator == 'in': try: value_type_set = set(type(i) for i in value) @@ -206,7 +203,7 @@ def get_filter_expr(self, operator, value, attr_key, is_attribute, alias=None, c if not value_type_set: raise InputValidationError('Value for operator `in` is an empty list') if len(value_type_set) > 1: - raise InputValidationError('Value for operator `in` contains more than one type: {}'.format(value)) + raise InputValidationError(f'Value for operator `in` contains more than one type: {value}') elif operator in ('and', 'or'): expressions_for_this_path = [] for filter_operation_dict in value: @@ -274,7 +271,7 @@ def cast_according_to_type(path_in_json, value): type_filter = jsonb_typeof(path_in_json) == 'null' casted_entity = path_in_json.astext.cast(JSONB) # BOOLEANS? else: - raise TypeError('Unknown type {}'.format(type(value))) + raise TypeError(f'Unknown type {type(value)}') return type_filter, casted_entity if column is None: @@ -301,10 +298,7 @@ def cast_according_to_type(path_in_json, value): # Possible types are object, array, string, number, boolean, and null. valid_types = ('object', 'array', 'string', 'number', 'boolean', 'null') if value not in valid_types: - raise InputValidationError( - 'value {} for of_type is not among valid types\n' - '{}'.format(value, valid_types) - ) + raise InputValidationError(f'value {value} for of_type is not among valid types\n{valid_types}') expr = jsonb_typeof(database_entity) == value elif operator == 'like': type_filter, casted_entity = cast_according_to_type(database_entity, value) @@ -336,7 +330,7 @@ def cast_according_to_type(path_in_json, value): ], else_=False) else: - raise InputValidationError('Unknown operator {} for filters in JSON field'.format(operator)) + raise InputValidationError(f'Unknown operator {operator} for filters in JSON field') return expr @staticmethod @@ -350,6 +344,6 @@ def get_column_names(self, alias): """ # pylint: disable=protected-access return [ - str(c).replace(alias._aliased_insp.class_.table.name + '.', '') + str(c).replace(f'{alias._aliased_insp.class_.table.name}.', '') for c in alias._aliased_insp.class_.table._columns._all_columns ] diff --git a/aiida/orm/implementation/entities.py b/aiida/orm/implementation/entities.py index a8cdfadafe..1729755ce0 100644 --- a/aiida/orm/implementation/entities.py +++ b/aiida/orm/implementation/entities.py @@ -153,7 +153,7 @@ def get_attribute(self, key): try: return self._dbmodel.attributes[key] except KeyError as exception: - raise AttributeError('attribute `{}` does not exist'.format(exception)) from exception + raise AttributeError(f'attribute `{exception}` does not exist') from exception def get_attribute_many(self, keys): """Return the values of multiple attributes. @@ -173,7 +173,7 @@ def get_attribute_many(self, keys): try: return [self.get_attribute(key) for key in keys] except KeyError as exception: - raise AttributeError('attribute `{}` does not exist'.format(exception)) from exception + raise AttributeError(f'attribute `{exception}` does not exist') from exception def set_attribute(self, key, value): """Set an attribute to the given value. @@ -233,7 +233,7 @@ def delete_attribute(self, key): try: self._dbmodel.attributes.pop(key) except KeyError as exception: - raise AttributeError('attribute `{}` does not exist'.format(exception)) from exception + raise AttributeError(f'attribute `{exception}` does not exist') from exception else: self._flush_if_stored({'attributes'}) @@ -246,7 +246,7 @@ def delete_attribute_many(self, keys): non_existing_keys = [key for key in keys if key not in self._dbmodel.attributes] if non_existing_keys: - raise AttributeError('attributes `{}` do not exist'.format(', '.join(non_existing_keys))) + raise AttributeError(f"attributes `{', '.join(non_existing_keys)}` do not exist") for key in keys: self.dbmodel.attributes.pop(key) @@ -322,7 +322,7 @@ def get_extra(self, key): try: return self._dbmodel.extras[key] except KeyError as exception: - raise AttributeError('extra `{}` does not exist'.format(exception)) from exception + raise AttributeError(f'extra `{exception}` does not exist') from exception def get_extra_many(self, keys): """Return the values of multiple extras. @@ -398,7 +398,7 @@ def delete_extra(self, key): try: self._dbmodel.extras.pop(key) except KeyError as exception: - raise AttributeError('extra `{}` does not exist'.format(exception)) from exception + raise AttributeError(f'extra `{exception}` does not exist') from exception else: self._flush_if_stored({'extras'}) @@ -411,7 +411,7 @@ def delete_extra_many(self, keys): non_existing_keys = [key for key in keys if key not in self._dbmodel.extras] if non_existing_keys: - raise AttributeError('extras `{}` do not exist'.format(', '.join(non_existing_keys))) + raise AttributeError(f"extras `{', '.join(non_existing_keys)}` do not exist") for key in keys: self.dbmodel.extras.pop(key) diff --git a/aiida/orm/implementation/groups.py b/aiida/orm/implementation/groups.py index b4e7fbf2b9..d1ab920eb4 100644 --- a/aiida/orm/implementation/groups.py +++ b/aiida/orm/implementation/groups.py @@ -165,7 +165,7 @@ def add_nodes(self, nodes, **kwargs): # pylint: disable=unused-argument raise TypeError('nodes has to be a list or tuple') if any([not isinstance(node, BackendNode) for node in nodes]): - raise TypeError('nodes have to be of type {}'.format(BackendNode)) + raise TypeError(f'nodes have to be of type {BackendNode}') def remove_nodes(self, nodes): """Remove a set of nodes from the group. @@ -181,16 +181,16 @@ def remove_nodes(self, nodes): raise TypeError('nodes has to be a list or tuple') if any([not isinstance(node, BackendNode) for node in nodes]): - raise TypeError('nodes have to be of type {}'.format(BackendNode)) + raise TypeError(f'nodes have to be of type {BackendNode}') def __repr__(self): - return '<{}: {}>'.format(self.__class__.__name__, str(self)) + return f'<{self.__class__.__name__}: {str(self)}>' def __str__(self): if self.type_string: - return '"{}" [type {}], of user {}'.format(self.label, self.type_string, self.user.email) + return f'"{self.label}" [type {self.type_string}], of user {self.user.email}' - return '"{}" [user-defined], of user {}'.format(self.label, self.user.email) + return f'"{self.label}" [user-defined], of user {self.user.email}' class BackendGroupCollection(BackendCollection[BackendGroup]): @@ -260,9 +260,9 @@ def get(self, **filters): """ results = self.query(**filters) if len(results) > 1: - raise exceptions.MultipleObjectsError("Found multiple groups matching criteria '{}'".format(filters)) + raise exceptions.MultipleObjectsError(f"Found multiple groups matching criteria '{filters}'") if not results: - raise exceptions.NotExistent("No group bound matching criteria '{}'".format(filters)) + raise exceptions.NotExistent(f"No group bound matching criteria '{filters}'") return results[0] @abc.abstractmethod diff --git a/aiida/orm/implementation/querybuilder.py b/aiida/orm/implementation/querybuilder.py index 9d026e13fb..41be182278 100644 --- a/aiida/orm/implementation/querybuilder.py +++ b/aiida/orm/implementation/querybuilder.py @@ -199,7 +199,7 @@ def get_filter_expr_from_column(cls, operator, value, column): from sqlalchemy.types import String if not isinstance(column, (Cast, InstrumentedAttribute, QueryableAttribute, Label, ColumnClause)): - raise TypeError('column ({}) {} is not a valid column'.format(type(column), column)) + raise TypeError(f'column ({type(column)}) {column} is not a valid column') database_entity = column if operator == '==': expr = database_entity == value @@ -220,7 +220,7 @@ def get_filter_expr_from_column(cls, operator, value, column): elif operator == 'in': expr = database_entity.in_(value) else: - raise InputValidationError('Unknown operator {} for filters on columns'.format(operator)) + raise InputValidationError(f'Unknown operator {operator} for filters on columns') return expr def get_projectable_attribute(self, alias, column_name, attrpath, cast=None, **kwargs): @@ -244,7 +244,7 @@ def get_projectable_attribute(self, alias, column_name, attrpath, cast=None, **k elif cast == 'd': entity = entity.astext.cast(DateTime) else: - raise InputValidationError('Unkown casting key {}'.format(cast)) + raise InputValidationError(f'Unkown casting key {cast}') return entity def get_aiida_res(self, res): @@ -310,7 +310,7 @@ def iterall(self, query, batch_size, tag_to_index_dict): """ try: if not tag_to_index_dict: - raise Exception('Got an empty dictionary: {}'.format(tag_to_index_dict)) + raise Exception(f'Got an empty dictionary: {tag_to_index_dict}') results = query.yield_per(batch_size) diff --git a/aiida/orm/implementation/sqlalchemy/authinfos.py b/aiida/orm/implementation/sqlalchemy/authinfos.py index c68b0e03f0..0c07f9245e 100644 --- a/aiida/orm/implementation/sqlalchemy/authinfos.py +++ b/aiida/orm/implementation/sqlalchemy/authinfos.py @@ -131,7 +131,7 @@ def delete(self, pk): session.query(DbAuthInfo).filter_by(id=pk).one().delete() session.commit() except NoResultFound: - raise exceptions.NotExistent('AuthInfo<{}> does not exist'.format(pk)) + raise exceptions.NotExistent(f'AuthInfo<{pk}> does not exist') def get(self, computer, user): """Return an entry from the collection that is configured for the given computer and user @@ -150,12 +150,10 @@ def get(self, computer, user): try: authinfo = session.query(DbAuthInfo).filter_by(dbcomputer_id=computer.id, aiidauser_id=user.id).one() except NoResultFound: - raise exceptions.NotExistent( - 'User<{}> has no configuration for Computer<{}>'.format(user.email, computer.name) - ) + raise exceptions.NotExistent(f'User<{user.email}> has no configuration for Computer<{computer.name}>') except MultipleResultsFound: raise exceptions.MultipleObjectsError( - 'User<{}> has multiple configurations for Computer<{}>'.format(user.email, computer.name) + f'User<{user.email}> has multiple configurations for Computer<{computer.name}>' ) else: return self.from_dbmodel(authinfo) diff --git a/aiida/orm/implementation/sqlalchemy/comments.py b/aiida/orm/implementation/sqlalchemy/comments.py index ddc7a80d98..d97df9aea7 100644 --- a/aiida/orm/implementation/sqlalchemy/comments.py +++ b/aiida/orm/implementation/sqlalchemy/comments.py @@ -51,11 +51,11 @@ def __init__(self, backend, node, user, content=None, ctime=None, mtime=None): } if ctime: - lang.type_check(ctime, datetime, 'the given ctime is of type {}'.format(type(ctime))) + lang.type_check(ctime, datetime, f'the given ctime is of type {type(ctime)}') arguments['ctime'] = ctime if mtime: - lang.type_check(mtime, datetime, 'the given mtime is of type {}'.format(type(mtime))) + lang.type_check(mtime, datetime, f'the given mtime is of type {type(mtime)}') arguments['mtime'] = mtime self._dbmodel = utils.ModelWrapper(models.DbComment(**arguments)) @@ -134,7 +134,7 @@ def delete(self, comment_id): session.commit() except NoResultFound: session.rollback() - raise exceptions.NotExistent("Comment with id '{}' not found".format(comment_id)) + raise exceptions.NotExistent(f"Comment with id '{comment_id}' not found") def delete_all(self): """ @@ -149,7 +149,7 @@ def delete_all(self): session.commit() except Exception as exc: session.rollback() - raise exceptions.IntegrityError('Could not delete all Comments. Full exception: {}'.format(exc)) + raise exceptions.IntegrityError(f'Could not delete all Comments. Full exception: {exc}') def delete_many(self, filters): """ diff --git a/aiida/orm/implementation/sqlalchemy/convert.py b/aiida/orm/implementation/sqlalchemy/convert.py index 9dc43ae1f8..5190cf3fa5 100644 --- a/aiida/orm/implementation/sqlalchemy/convert.py +++ b/aiida/orm/implementation/sqlalchemy/convert.py @@ -37,9 +37,7 @@ def get_backend_entity(dbmodel, backend): # pylint: disable=unused-argument """ Default get_backend_entity """ - raise TypeError( - "No corresponding AiiDA backend class exists for the model class '{}'".format(dbmodel.__class__.__name__) - ) + raise TypeError(f"No corresponding AiiDA backend class exists for the model class '{dbmodel.__class__.__name__}'") ################################ diff --git a/aiida/orm/implementation/sqlalchemy/entities.py b/aiida/orm/implementation/sqlalchemy/entities.py index 67e3043c63..1fda976fc3 100644 --- a/aiida/orm/implementation/sqlalchemy/entities.py +++ b/aiida/orm/implementation/sqlalchemy/entities.py @@ -60,7 +60,7 @@ def get_dbmodel_attribute_name(cls, attr_name): if hasattr(cls.MODEL_CLASS, attr_name): return attr_name - raise ValueError("Unknown attribute '{}'".format(attr_name)) + raise ValueError(f"Unknown attribute '{attr_name}'") def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) diff --git a/aiida/orm/implementation/sqlalchemy/groups.py b/aiida/orm/implementation/sqlalchemy/groups.py index 0864c7d3b1..8cc831b850 100644 --- a/aiida/orm/implementation/sqlalchemy/groups.py +++ b/aiida/orm/implementation/sqlalchemy/groups.py @@ -70,7 +70,7 @@ def label(self, label): try: self._dbmodel.save() except Exception: - raise UniquenessError('a group of the same type with the label {} already exists'.format(label)) \ + raise UniquenessError(f'a group of the same type with the label {label} already exists') \ from Exception @property @@ -194,7 +194,7 @@ def add_nodes(self, nodes, **kwargs): def check_node(given_node): """ Check if given node is of correct type and stored """ if not isinstance(given_node, SqlaNode): - raise TypeError('invalid type {}, has to be {}'.format(type(given_node), SqlaNode)) + raise TypeError(f'invalid type {type(given_node)}, has to be {SqlaNode}') if not given_node.is_stored: raise ValueError('At least one of the provided nodes is unstored, stopping...') @@ -252,7 +252,7 @@ def remove_nodes(self, nodes, **kwargs): def check_node(node): if not isinstance(node, SqlaNode): - raise TypeError('invalid type {}, has to be {}'.format(type(node), SqlaNode)) + raise TypeError(f'invalid type {type(node)}, has to be {SqlaNode}') if node.id is None: raise ValueError('At least one of the provided nodes is unstored, stopping...') @@ -349,11 +349,11 @@ def query( if not value: continue if key == 'startswith': - filters.append(DbGroup.label.like('{}%'.format(value))) + filters.append(DbGroup.label.like(f'{value}%')) elif key == 'endswith': - filters.append(DbGroup.label.like('%{}'.format(value))) + filters.append(DbGroup.label.like(f'%{value}')) elif key == 'contains': - filters.append(DbGroup.label.like('%{}%'.format(value))) + filters.append(DbGroup.label.like(f'%{value}%')) if node_attributes: _LOGGER.warning("SQLA query doesn't support node attribute filters, ignoring '%s'", node_attributes) diff --git a/aiida/orm/implementation/sqlalchemy/logs.py b/aiida/orm/implementation/sqlalchemy/logs.py index 96155ee43b..2723e68e02 100644 --- a/aiida/orm/implementation/sqlalchemy/logs.py +++ b/aiida/orm/implementation/sqlalchemy/logs.py @@ -115,7 +115,7 @@ def delete(self, log_id): session.commit() except NoResultFound: session.rollback() - raise exceptions.NotExistent("Log with id '{}' not found".format(log_id)) + raise exceptions.NotExistent(f"Log with id '{log_id}' not found") def delete_all(self): """ @@ -130,7 +130,7 @@ def delete_all(self): session.commit() except Exception as exc: session.rollback() - raise exceptions.IntegrityError('Could not delete all Logs. Full exception: {}'.format(exc)) + raise exceptions.IntegrityError(f'Could not delete all Logs. Full exception: {exc}') def delete_many(self, filters): """ diff --git a/aiida/orm/implementation/sqlalchemy/nodes.py b/aiida/orm/implementation/sqlalchemy/nodes.py index 8b857c746d..e565292904 100644 --- a/aiida/orm/implementation/sqlalchemy/nodes.py +++ b/aiida/orm/implementation/sqlalchemy/nodes.py @@ -71,15 +71,15 @@ def __init__( type_check(user, SqlaUser) if computer: - type_check(computer, SqlaComputer, 'computer is of type {}'.format(type(computer))) + type_check(computer, SqlaComputer, f'computer is of type {type(computer)}') arguments['dbcomputer'] = computer.dbmodel if ctime: - type_check(ctime, datetime, 'the given ctime is of type {}'.format(type(ctime))) + type_check(ctime, datetime, f'the given ctime is of type {type(ctime)}') arguments['ctime'] = ctime if mtime: - type_check(mtime, datetime, 'the given mtime is of type {}'.format(type(mtime))) + type_check(mtime, datetime, f'the given mtime is of type {type(mtime)}') arguments['mtime'] = mtime self._dbmodel = sqla_utils.ModelWrapper(models.DbNode(**arguments)) @@ -186,7 +186,7 @@ def _add_link(self, source, link_type, link_label): link = DbLink(input_id=source.id, output_id=self.id, label=link_label, type=link_type.value) session.add(link) except SQLAlchemyError as exception: - raise exceptions.UniquenessError('failed to create the link: {}'.format(exception)) from exception + raise exceptions.UniquenessError(f'failed to create the link: {exception}') from exception def clean_values(self): self._dbmodel.attributes = clean_value(self._dbmodel.attributes) @@ -235,7 +235,7 @@ def get(self, pk): try: return self.ENTITY_CLASS.from_dbmodel(session.query(models.DbNode).filter_by(id=pk).one(), self.backend) except NoResultFound: - raise exceptions.NotExistent("Node with pk '{}' not found".format(pk)) from NoResultFound + raise exceptions.NotExistent(f"Node with pk '{pk}' not found") from NoResultFound def delete(self, pk): """Remove a Node entry from the collection with the given id @@ -248,4 +248,4 @@ def delete(self, pk): session.query(models.DbNode).filter_by(id=pk).one().delete() session.commit() except NoResultFound: - raise exceptions.NotExistent("Node with pk '{}' not found".format(pk)) from NoResultFound + raise exceptions.NotExistent(f"Node with pk '{pk}' not found") from NoResultFound diff --git a/aiida/orm/implementation/sqlalchemy/querybuilder.py b/aiida/orm/implementation/sqlalchemy/querybuilder.py index 64c49a3f35..e2d66e623f 100644 --- a/aiida/orm/implementation/sqlalchemy/querybuilder.py +++ b/aiida/orm/implementation/sqlalchemy/querybuilder.py @@ -29,7 +29,7 @@ def compile(element, compiler, **_kw): # pylint: disable=function-redefined, re """ Get length of array defined in a JSONB column """ - return 'jsonb_array_length(%s)' % compiler.process(element.clauses) + return f'jsonb_array_length({compiler.process(element.clauses)})' class array_length(FunctionElement): # pylint: disable=invalid-name @@ -41,7 +41,7 @@ def compile(element, compiler, **_kw): # pylint: disable=function-redefined """ Get length of array defined in a JSONB column """ - return 'array_length(%s)' % compiler.process(element.clauses) + return f'array_length({compiler.process(element.clauses)})' class jsonb_typeof(FunctionElement): # pylint: disable=invalid-name @@ -53,7 +53,7 @@ def compile(element, compiler, **_kw): # pylint: disable=function-redefined """ Get length of array defined in a JSONB column """ - return 'jsonb_typeof(%s)' % compiler.process(element.clauses) + return f'jsonb_typeof({compiler.process(element.clauses)})' class SqlaQueryBuilder(BackendQueryBuilder): @@ -127,7 +127,7 @@ def modify_expansions(self, alias, expansions): # The update of expansions makes sense only when AliasedClass is provided if hasattr(alias, '_sa_class_manager'): if '_metadata' in expansions: - raise NotExistent("_metadata doesn't exist for {}. Please try metadata.".format(alias)) + raise NotExistent(f"_metadata doesn't exist for {alias}. Please try metadata.") return self.get_corresponding_properties(alias.__tablename__, expansions, self.outer_to_inner_schema) @@ -223,10 +223,7 @@ def get_filter_expr(self, operator, value, attr_key, is_attribute, alias=None, c raise InputValidationError('You have to give an integer when comparing to a length') elif operator in ('like', 'ilike'): if not isinstance(value, str): - raise InputValidationError( - 'Value for operator {} has to be a string (you gave {})' - ''.format(operator, value) - ) + raise InputValidationError(f'Value for operator {operator} has to be a string (you gave {value})') elif operator == 'in': try: @@ -236,7 +233,7 @@ def get_filter_expr(self, operator, value, attr_key, is_attribute, alias=None, c if not value_type_set: raise InputValidationError('Value for operator `in` is an empty list') if len(value_type_set) > 1: - raise InputValidationError('Value for operator `in` contains more than one type: {}'.format(value)) + raise InputValidationError(f'Value for operator `in` contains more than one type: {value}') elif operator in ('and', 'or'): expressions_for_this_path = [] for filter_operation_dict in value: @@ -298,7 +295,7 @@ def cast_according_to_type(path_in_json, value): type_filter = jsonb_typeof(path_in_json) == 'null' casted_entity = path_in_json.astext.cast(JSONB) # BOOLEANS? else: - raise TypeError('Unknown type {}'.format(type(value))) + raise TypeError(f'Unknown type {type(value)}') return type_filter, casted_entity if column is None: @@ -325,10 +322,7 @@ def cast_according_to_type(path_in_json, value): # Possible types are object, array, string, number, boolean, and null. valid_types = ('object', 'array', 'string', 'number', 'boolean', 'null') if value not in valid_types: - raise InputValidationError( - 'value {} for of_type is not among valid types\n' - '{}'.format(value, valid_types) - ) + raise InputValidationError(f'value {value} for of_type is not among valid types\n{valid_types}') expr = jsonb_typeof(database_entity) == value elif operator == 'like': type_filter, casted_entity = cast_according_to_type(database_entity, value) @@ -360,7 +354,7 @@ def cast_according_to_type(path_in_json, value): ], else_=False) else: - raise InputValidationError('Unknown operator {} for filters in JSON field'.format(operator)) + raise InputValidationError(f'Unknown operator {operator} for filters in JSON field') return expr @staticmethod @@ -372,4 +366,4 @@ def get_column_names(self, alias): """ Given the backend specific alias, return the column names that correspond to the aliased table. """ - return [str(c).replace(alias.__table__.name + '.', '') for c in alias.__table__.columns] + return [str(c).replace(f'{alias.__table__.name}.', '') for c in alias.__table__.columns] diff --git a/aiida/orm/implementation/utils.py b/aiida/orm/implementation/utils.py index 3641eff954..2964cf6865 100644 --- a/aiida/orm/implementation/utils.py +++ b/aiida/orm/implementation/utils.py @@ -33,7 +33,7 @@ def validate_attribute_extra_key(key): if FIELD_SEPARATOR in key: raise exceptions.ValidationError( - 'key for attributes or extras cannot contain the character `{}`'.format(FIELD_SEPARATOR) + f'key for attributes or extras cannot contain the character `{FIELD_SEPARATOR}`' ) @@ -78,7 +78,7 @@ def clean_builtin(val): # This is for float-like types, like ``numpy.float128`` that are not json-serializable # Note that `numbers.Real` also match booleans but they are already returned above if isinstance(val, numbers.Real): - string_representation = '{{:.{}g}}'.format(AIIDA_FLOAT_PRECISION).format(val) + string_representation = f'{{:.{AIIDA_FLOAT_PRECISION}g}}'.format(val) new_val = float(string_representation) if 'e' in string_representation and new_val.is_integer(): # This is indeed often quite unexpected, because it is going to change the type of the data @@ -94,7 +94,7 @@ def clean_builtin(val): return new_val # Anything else we do not understand and we refuse - raise exceptions.ValidationError('type `{}` is not supported as it is not json-serializable'.format(type(val))) + raise exceptions.ValidationError(f'type `{type(val)}` is not supported as it is not json-serializable') if isinstance(value, BaseType): return clean_builtin(value.value) diff --git a/aiida/orm/nodes/data/array/array.py b/aiida/orm/nodes/data/array/array.py index 5b87234bef..b4d00079d4 100644 --- a/aiida/orm/nodes/data/array/array.py +++ b/aiida/orm/nodes/data/array/array.py @@ -43,14 +43,14 @@ def delete_array(self, name): :param name: The name of the array to delete from the node. """ - fname = '{}.npy'.format(name) + fname = f'{name}.npy' if fname not in self.list_object_names(): - raise KeyError("Array with name '{}' not found in node pk= {}".format(name, self.pk)) + raise KeyError(f"Array with name '{name}' not found in node pk= {self.pk}") # remove both file and attribute self.delete_object(fname) try: - self.delete_attribute('{}{}'.format(self.array_prefix, name)) + self.delete_attribute(f'{self.array_prefix}{name}') except (KeyError, AttributeError): # Should not happen, but do not crash if for some reason the property was not set. pass @@ -86,7 +86,7 @@ def get_shape(self, name): :param name: The name of the array. """ - return tuple(self.get_attribute('{}{}'.format(self.array_prefix, name))) + return tuple(self.get_attribute(f'{self.array_prefix}{name}')) def get_iterarrays(self): """ @@ -108,10 +108,10 @@ def get_array(self, name): def get_array_from_file(self, name): """Return the array stored in a .npy file""" - filename = '{}.npy'.format(name) + filename = f'{name}.npy' if filename not in self.list_object_names(): - raise KeyError('Array with name `{}` not found in ArrayData<{}>'.format(name, self.pk)) + raise KeyError(f'Array with name `{name}` not found in ArrayData<{self.pk}>') # Open a handle in binary read mode as the arrays are written as binary files as well with self.open(filename, mode='rb') as handle: @@ -169,10 +169,10 @@ def set_array(self, name, array): handle.seek(0) # Write the numpy array to the repository, keeping the byte representation - self.put_object_from_filelike(handle, '{}.npy'.format(name), mode='wb', encoding=None) + self.put_object_from_filelike(handle, f'{name}.npy', mode='wb', encoding=None) # Store the array name and shape for querying purposes - self.set_attribute('{}{}'.format(self.array_prefix, name), list(array.shape)) + self.set_attribute(f'{self.array_prefix}{name}', list(array.shape)) def _validate(self): """ @@ -188,7 +188,6 @@ def _validate(self): if set(files) != set(properties): raise ValidationError( - 'Mismatch of files and properties for ArrayData' - ' node (pk= {}): {} vs. {}'.format(self.pk, files, properties) + f'Mismatch of files and properties for ArrayData node (pk= {self.pk}): {files} vs. {properties}' ) super()._validate() diff --git a/aiida/orm/nodes/data/array/bands.py b/aiida/orm/nodes/data/array/bands.py index 1c3b64777d..21ba303707 100644 --- a/aiida/orm/nodes/data/array/bands.py +++ b/aiida/orm/nodes/data/array/bands.py @@ -28,13 +28,13 @@ def prepare_header_comment(uuid, plot_info, comment_char='#'): filetext = [] filetext += get_file_header(comment_char='').splitlines() filetext.append('') - filetext.append('Dumped from BandsData UUID={}'.format(uuid)) + filetext.append(f'Dumped from BandsData UUID={uuid}') filetext.append('\tpoints\tbands') filetext.append('\t{}\t{}'.format(*plot_info['y'].shape)) filetext.append('') filetext.append('\tlabel\tpoint') for label in plot_info['raw_labels']: - filetext.append('\t{}\t{:.8f}'.format(label[1], label[0])) + filetext.append(f'\t{label[1]}\t{label[0]:.8f}') return '\n'.join('{} {}'.format(comment_char, line) for line in filetext) @@ -280,8 +280,7 @@ def _validate_bands_occupations(self, bands, occupations=None, labels=None): the_occupations = numpy.array(occupations) if the_occupations.shape != the_bands.shape: raise ValueError( - 'Shape of occupations {} different from shape' - 'shape of bands {}'.format(the_occupations.shape, the_bands.shape) + f'Shape of occupations {the_occupations.shape} different from shapeshape of bands {the_bands.shape}' ) if not the_bands.dtype.type == numpy.float64: @@ -298,7 +297,7 @@ def _validate_bands_occupations(self, bands, occupations=None, labels=None): try: [float(_) for _ in x.flatten() if _ is not None] except (TypeError, ValueError): - raise ValueError('The {} array can only contain float or None values'.format(msg)) + raise ValueError(f'The {msg} array can only contain float or None values') # check the labels if labels is not None: @@ -582,10 +581,10 @@ def _prepare_agr_batch(self, main_file_name='', comments=True, prettify_format=N if comments: batch.append(prepare_header_comment(self.uuid, plot_info, comment_char='#')) - batch.append('READ XY "{}"'.format(dat_filename)) + batch.append(f'READ XY "{dat_filename}"') # axis limits - batch.append('world {}, {}, {}, {}'.format(x_min_lim, y_min_lim, x_max_lim, y_max_lim)) + batch.append(f'world {x_min_lim}, {y_min_lim}, {x_max_lim}, {y_max_lim}') # axis label batch.append('yaxis label "Dispersion"') @@ -593,11 +592,11 @@ def _prepare_agr_batch(self, main_file_name='', comments=True, prettify_format=N # axis ticks batch.append('xaxis tick place both') batch.append('xaxis tick spec type both') - batch.append('xaxis tick spec {}'.format(len(labels))) + batch.append(f'xaxis tick spec {len(labels)}') # set the name of the special points for index, label in enumerate(labels): - batch.append('xaxis tick major {}, {}'.format(index, label[0])) - batch.append('xaxis ticklabel {}, "{}"'.format(index, label[1])) + batch.append(f'xaxis tick major {index}, {label[0]}') + batch.append(f'xaxis ticklabel {index}, "{label[1]}"') batch.append('xaxis tick major color 7') batch.append('xaxis tick major grid on') @@ -612,8 +611,8 @@ def _prepare_agr_batch(self, main_file_name='', comments=True, prettify_format=N # set color and linewidths of bands for index in range(num_bands): - batch.append('s{} line color 1'.format(index)) - batch.append('s{} linewidth 1'.format(index)) + batch.append(f's{index} line color 1') + batch.append(f's{index} linewidth 1') batch_data = '\n'.join(batch) + '\n' extra_files = {dat_filename: raw_data} @@ -639,7 +638,7 @@ def _prepare_dat_multicolumn(self, main_file_name='', comments=True): # pylint: return_text.append(prepare_header_comment(self.uuid, plot_info, comment_char='#')) for i in zip(x, bands): - line = ['{:.8f}'.format(i[0])] + ['{:.8f}'.format(j) for j in i[1]] + line = [f'{i[0]:.8f}'] + [f'{j:.8f}' for j in i[1]] return_text.append('\t'.join(line)) return ('\n'.join(return_text) + '\n').encode('utf-8'), {} @@ -664,7 +663,7 @@ def _prepare_dat_blocks(self, main_file_name='', comments=True): # pylint: disa for band in numpy.transpose(bands): for i in zip(x, band): - line = ['{:.8f}'.format(i[0]), '{:.8f}'.format(i[1])] + line = [f'{i[0]:.8f}', f'{i[1]:.8f}'] return_text.append('\t'.join(line)) return_text.append('') return_text.append('') @@ -778,7 +777,7 @@ def _matplotlib_get_dict( all_data['tick_labels'] = tick_labels all_data['legend_text'] = legend all_data['legend_text2'] = legend2 - all_data['yaxis_label'] = 'Dispersion ({})'.format(self.units) + all_data['yaxis_label'] = f'Dispersion ({self.units})' all_data['title'] = title if comments: all_data['comment'] = prepare_header_comment(self.uuid, plot_info, comment_char='#') @@ -797,7 +796,7 @@ def _matplotlib_get_dict( for key, value in kwargs.items(): if key not in valid_additional_keywords: - raise TypeError("_matplotlib_get_dict() got an unexpected keyword argument '{}'".format(key)) + raise TypeError(f"_matplotlib_get_dict() got an unexpected keyword argument '{key}'") all_data[key] = value return all_data @@ -1054,17 +1053,17 @@ def _prepare_gnuplot( # Actual logic script.append('set termopt enhanced') # Properly deals with e.g. subscripts script.append('set encoding utf8') # To deal with Greek letters - script.append('set xtics ({})'.format(xtics_string)) + script.append(f'set xtics ({xtics_string})') script.append('unset key') - script.append('set yrange [{}:{}]'.format(y_min_lim, y_max_lim)) - script.append('set ylabel "{}"'.format('Dispersion ({})'.format(self.units))) + script.append(f'set yrange [{y_min_lim}:{y_max_lim}]') + script.append(f"set ylabel \"Dispersion ({self.units})\"") if title: script.append('set title "{}"'.format(title.replace('"', '\"'))) # Plot, escaping filename if len(x) > 1: - script.append('set xrange [{}:{}]'.format(x_min_lim, x_max_lim)) + script.append(f'set xrange [{x_min_lim}:{x_max_lim}]') script.append('set grid xtics lt 1 lc rgb "#888888"') script.append('plot "{}" with l lc rgb "#000000"'.format(os.path.basename(dat_filename).replace('"', '\"'))) else: @@ -1133,9 +1132,9 @@ def _prepare_agr( import math # load the x and y of every set if color_number > MAX_NUM_AGR_COLORS: - raise ValueError('Color number is too high (should be less than {})'.format(MAX_NUM_AGR_COLORS)) + raise ValueError(f'Color number is too high (should be less than {MAX_NUM_AGR_COLORS})') if color_number2 > MAX_NUM_AGR_COLORS: - raise ValueError('Color number 2 is too high (should be less than {})'.format(MAX_NUM_AGR_COLORS)) + raise ValueError(f'Color number 2 is too high (should be less than {MAX_NUM_AGR_COLORS})') bands = plot_info['y'] x = plot_info['x'] @@ -1170,7 +1169,7 @@ def _prepare_agr( for band in the_bands: this_set = '' for i in zip(x, band): - line = '{:.8f}'.format(i[0]) + '\t' + '{:.8f}'.format(i[1]) + '\n' + line = f'{i[0]:.8f}' + '\t' + f'{i[1]:.8f}' + '\n' this_set += line all_sets.append(this_set) @@ -1195,7 +1194,7 @@ def _prepare_agr( y_min_lim=y_min_lim, x_max_lim=x_max_lim, y_max_lim=y_max_lim, - yaxislabel='Dispersion ({})'.format(units), + yaxislabel=f'Dispersion ({units})', xticks_template=xticks, set_descriptions=set_descriptions, ytick_spacing=ytick_spacing, diff --git a/aiida/orm/nodes/data/array/kpoints.py b/aiida/orm/nodes/data/array/kpoints.py index 68e91c7b40..e0709a3692 100644 --- a/aiida/orm/nodes/data/array/kpoints.py +++ b/aiida/orm/nodes/data/array/kpoints.py @@ -47,7 +47,7 @@ def get_description(self): ) except AttributeError: try: - return '(Path of {} kpts)'.format(len(self.get_kpoints())) + return f'(Path of {len(self.get_kpoints())} kpts)' except OSError: return self.node_type @@ -370,10 +370,10 @@ def _validate_kpoints_weights(self, kpoints, weights): # replace by singletons kpoints = kpoints.reshape(kpoints.shape[0], 1) else: - raise ValueError('kpoints must be a list of lists in {}D case'.format(self._dimension)) + raise ValueError(f'kpoints must be a list of lists in {self._dimension}D case') if kpoints.dtype != numpy.dtype(numpy.float): - raise ValueError('kpoints must be an array of type floats. Found instead {}'.format(kpoints.dtype)) + raise ValueError(f'kpoints must be an array of type floats. Found instead {kpoints.dtype}') if kpoints.shape[1] < self._dimension: raise ValueError( @@ -384,9 +384,9 @@ def _validate_kpoints_weights(self, kpoints, weights): if weights is not None: weights = numpy.array(weights) if weights.shape[0] != kpoints.shape[0]: - raise ValueError('Found {} weights but {} kpoints'.format(weights.shape[0], kpoints.shape[0])) + raise ValueError(f'Found {weights.shape[0]} weights but {kpoints.shape[0]} kpoints') if weights.dtype != numpy.dtype(numpy.float): - raise ValueError('weights must be an array of type floats. Found instead {}'.format(weights.dtype)) + raise ValueError(f'weights must be an array of type floats. Found instead {weights.dtype}') return kpoints, weights @@ -436,10 +436,7 @@ def set_kpoints(self, kpoints, cartesian=False, labels=None, weights=None, fill_ fill_values = [fill_values] * (3 - the_kpoints.shape[1]) if len(fill_values) < 3 - the_kpoints.shape[1]: - raise ValueError( - 'fill_values should be either a scalar or a ' - 'length-{} list'.format(3 - the_kpoints.shape[1]) - ) + raise ValueError(f'fill_values should be either a scalar or a length-{3 - the_kpoints.shape[1]} list') else: tmp_kpoints = numpy.zeros((the_kpoints.shape[0], 0)) i_kpts = 0 diff --git a/aiida/orm/nodes/data/array/projection.py b/aiida/orm/nodes/data/array/projection.py index 9e81fc8c5d..87b1f8bd08 100644 --- a/aiida/orm/nodes/data/array/projection.py +++ b/aiida/orm/nodes/data/array/projection.py @@ -129,8 +129,8 @@ def get_pdos(self, **kwargs): """ retrieve_indices, all_orbitals = self._find_orbitals_and_indices(**kwargs) out_list = [( - all_orbitals[i], self.get_array('pdos_{}'.format(self._from_index_to_arrayname(i))), - self.get_array('energy_{}'.format(self._from_index_to_arrayname(i))) + all_orbitals[i], self.get_array(f'pdos_{self._from_index_to_arrayname(i)}'), + self.get_array(f'energy_{self._from_index_to_arrayname(i)}') ) for i in retrieve_indices] return out_list @@ -145,8 +145,9 @@ def get_projections(self, **kwargs): """ retrieve_indices, all_orbitals = self._find_orbitals_and_indices(**kwargs) - out_list = [(all_orbitals[i], self.get_array('proj_{}'.format(self._from_index_to_arrayname(i)))) - for i in retrieve_indices] + out_list = [ + (all_orbitals[i], self.get_array(f'proj_{self._from_index_to_arrayname(i)}')) for i in retrieve_indices + ] return out_list @staticmethod @@ -154,7 +155,7 @@ def _from_index_to_arrayname(index): """ Used internally to determine the array names. """ - return 'array_{}'.format(index) + return f'array_{index}' def set_projectiondata( self, @@ -218,12 +219,9 @@ def array_list_checker(array_list, array_name, orb_length): a failure """ if not all([isinstance(_, np.ndarray) for _ in array_list]): - raise exceptions.ValidationError('{} was not composed entirely of ndarrays'.format(array_name)) + raise exceptions.ValidationError(f'{array_name} was not composed entirely of ndarrays') if len(array_list) != orb_length: - raise exceptions.ValidationError( - '{} did not have the same length as the ' - 'list of orbitals'.format(array_name) - ) + raise exceptions.ValidationError(f'{array_name} did not have the same length as the list of orbitals') ############## list_of_orbitals = single_to_list(list_of_orbitals) @@ -245,7 +243,7 @@ def array_list_checker(array_list, array_name, orb_length): try: orbital_type = orbital_dict.pop('_orbital_type') except KeyError: - raise exceptions.ValidationError('No _orbital_type key found in dictionary: {}'.format(orbital_dict)) + raise exceptions.ValidationError(f'No _orbital_type key found in dictionary: {orbital_dict}') cls = OrbitalFactory(orbital_type) test_orbital = cls(**orbital_dict) list_of_orbital_dicts.append(test_orbital.get_orbital_dict()) @@ -260,7 +258,7 @@ def array_list_checker(array_list, array_name, orb_length): array_name = self._from_index_to_arrayname(i) if bands_check: self._check_projections_bands(this_projection) - self.set_array('proj_{}'.format(array_name), this_projection) + self.set_array(f'proj_{array_name}', this_projection) # verifies and sets both pdos and energy if list_of_pdos: @@ -274,8 +272,8 @@ def array_list_checker(array_list, array_name, orb_length): array_name = self._from_index_to_arrayname(i) if bands_check: self._check_projections_bands(this_projection) - self.set_array('pdos_{}'.format(array_name), this_pdos) - self.set_array('energy_{}'.format(array_name), this_energy) + self.set_array(f'pdos_{array_name}', this_pdos) + self.set_array(f'energy_{array_name}', this_energy) # verifies and sets the tags if tags is not None: diff --git a/aiida/orm/nodes/data/array/trajectory.py b/aiida/orm/nodes/data/array/trajectory.py index 4ef6903351..ce43a4452e 100644 --- a/aiida/orm/nodes/data/array/trajectory.py +++ b/aiida/orm/nodes/data/array/trajectory.py @@ -206,8 +206,7 @@ def _validate(self): # Should catch TypeErrors, ValueErrors, and KeyErrors for missing arrays except Exception as exception: raise ValidationError( - 'The TrajectoryData did not validate. ' - 'Error: {} with message {}'.format(type(exception).__name__, exception) + f'The TrajectoryData did not validate. Error: {type(exception).__name__} with message {exception}' ) @property @@ -315,7 +314,7 @@ def get_index_from_stepid(self, stepid): try: return numpy.where(self.get_stepids() == stepid)[0][0] except IndexError: - raise ValueError('{} not among the stepids'.format(stepid)) + raise ValueError(f'{stepid} not among the stepids') def get_step_data(self, index): r""" @@ -340,10 +339,7 @@ def get_step_data(self, index): :raises KeyError: if you did not store the trajectory yet. """ if index >= self.numsteps: - raise IndexError( - 'You have only {} steps, but you are looking beyond' - ' (index={})'.format(self.numsteps, index) - ) + raise IndexError(f'You have only {self.numsteps} steps, but you are looking beyond (index={index})') vel = self.get_velocities() if vel is not None: @@ -426,7 +422,7 @@ def _prepare_xsf(self, index=None, main_file_name=''): # pylint: disable=unused indices = list(range(self.numsteps)) if index is not None: indices = [index] - return_string = 'ANIMSTEPS {}\nCRYSTAL\n'.format(len(indices)) + return_string = f'ANIMSTEPS {len(indices)}\nCRYSTAL\n' # Do the checks once and for all here: structure = self.get_step_structure(index=0) if structure.is_alloy or structure.has_vacancies: @@ -440,15 +436,15 @@ def _prepare_xsf(self, index=None, main_file_name=''): # pylint: disable=unused nat = len(symbols) for idx in indices: - return_string += 'PRIMVEC {}\n'.format(idx + 1) + return_string += f'PRIMVEC {idx + 1}\n' for cell_vector in cells[idx]: return_string += ' '.join(['{:18.5f}'.format(i) for i in cell_vector]) return_string += '\n' - return_string += 'PRIMCOORD {}\n'.format(idx + 1) - return_string += '{} 1\n'.format(nat) + return_string += f'PRIMCOORD {idx + 1}\n' + return_string += f'{nat} 1\n' for atn, pos in zip(atomic_numbers_list, positions[idx]): try: - return_string += '{} {:18.10f} {:18.10f} {:18.10f}\n'.format(atn, pos[0], pos[1], pos[2]) + return_string += f'{atn} {pos[0]:18.10f} {pos[1]:18.10f} {pos[2]:18.10f}\n' except: print(atn, pos) raise @@ -636,9 +632,9 @@ def show_mpl_pos(self, **kwargs): # pylint: disable=too-many-locals elif colors == 'cpk': from ase.data.colors import cpk_colors as colors else: - raise InputValidationError('Unknown color spec {}'.format(colors)) + raise InputValidationError(f'Unknown color spec {colors}') if kwargs: - raise InputValidationError('Unrecognized keyword {}'.format(kwargs.keys())) + raise InputValidationError(f'Unrecognized keyword {kwargs.keys()}') if element_list is None: # If not all elements are allowed @@ -879,7 +875,7 @@ def plot_positions_XYZ( # pylint: disable=too-many-arguments,too-many-locals,in plt.xlim(*tlim) ax3 = fig.add_subplot(gridspec[2]) plt.ylabel(r'Z Position $\left[{}\right]$'.format(positions_unit)) - plt.xlabel('Time [{}]'.format(times_unit)) + plt.xlabel(f'Time [{times_unit}]') plt.xlim(*tlim) sparse_indices = np.linspace(*index_range, num=label_sparsity, dtype=int) diff --git a/aiida/orm/nodes/data/array/xy.py b/aiida/orm/nodes/data/array/xy.py index ecc0b3ee8f..db1fc9ec5d 100644 --- a/aiida/orm/nodes/data/array/xy.py +++ b/aiida/orm/nodes/data/array/xy.py @@ -99,12 +99,8 @@ def set_y(self, y_arrays, y_names, y_units): for num, (y_array, y_name, y_unit) in enumerate(zip(y_arrays, y_names, y_units)): self._arrayandname_validator(y_array, y_name, y_unit) if np.shape(y_array) != np.shape(x_array): - raise InputValidationError( - 'y_array {} did not have the ' - 'same shape has the x_array!' - ''.format(y_name) - ) - self.set_array('y_array_{}'.format(num), y_array) + raise InputValidationError(f'y_array {y_name} did not have the same shape has the x_array!') + self.set_array(f'y_array_{num}', y_array) # if the y_arrays pass the initial validation, sets each self.set_attribute('y_names', y_names) @@ -146,7 +142,7 @@ def get_y(self): y_arrays = [] try: for i in range(len(y_names)): - y_arrays += [self.get_array('y_array_{}'.format(i))] + y_arrays += [self.get_array(f'y_array_{i}')] except (KeyError, AttributeError): - raise NotExistent('Could not retrieve array associated with y array {}'.format(y_names[i])) + raise NotExistent(f'Could not retrieve array associated with y array {y_names[i]}') return list(zip(y_names, y_arrays, y_units)) diff --git a/aiida/orm/nodes/data/base.py b/aiida/orm/nodes/data/base.py index 001fb306fc..86858e14ce 100644 --- a/aiida/orm/nodes/data/base.py +++ b/aiida/orm/nodes/data/base.py @@ -18,7 +18,7 @@ @singledispatch def to_aiida_type(value): """Turns basic Python types (str, int, float, bool) into the corresponding AiiDA types.""" - raise TypeError('Cannot convert value of type {} to AiiDA type.'.format(type(value))) + raise TypeError(f'Cannot convert value of type {type(value)} to AiiDA type.') class BaseType(Data): @@ -48,7 +48,7 @@ def value(self, value): self.set_attribute('value', self._type(value)) # pylint: disable=no-member def __str__(self): - return super().__str__() + ' value: {}'.format(self.value) + return f'{super().__str__()} value: {self.value}' def __eq__(self, other): if isinstance(other, BaseType): diff --git a/aiida/orm/nodes/data/cif.py b/aiida/orm/nodes/data/cif.py index 963341a59f..1d058f3ad2 100644 --- a/aiida/orm/nodes/data/cif.py +++ b/aiida/orm/nodes/data/cif.py @@ -137,7 +137,7 @@ def pycifrw_from_cif(datablocks, loops=None, names=None): import CifFile from CifFile import CifBlock except ImportError as exc: - raise ImportError(str(exc) + '. You need to install the PyCifRW package.') + raise ImportError(f'{str(exc)}. You need to install the PyCifRW package.') if loops is None: loops = dict() @@ -151,9 +151,7 @@ def pycifrw_from_cif(datablocks, loops=None, names=None): if names and len(names) < len(datablocks): raise ValueError( - 'Not enough names supplied for ' - 'datablocks: {} (names) < ' - '{} (datablocks)'.format(len(names), len(datablocks)) + f'Not enough names supplied for datablocks: {len(names)} (names) < {len(datablocks)} (datablocks)' ) for i, values in enumerate(datablocks): name = str(i) @@ -294,7 +292,7 @@ def __init__( for left, right in CifData._SET_INCOMPATIBILITIES: if args[left] is not None and args[right] is not None: - raise ValueError('cannot pass {} and {} at the same time'.format(left, right)) + raise ValueError(f'cannot pass {left} and {right} at the same time') super().__init__(file, filename=filename, **kwargs) self.set_scan_type(scan_type or CifData._SCAN_TYPE_DEFAULT) @@ -540,7 +538,7 @@ def set_scan_type(self, scan_type): if scan_type in CifData._SCAN_TYPES: self.set_attribute('scan_type', scan_type) else: - raise ValueError('Got unknown scan_type {}'.format(scan_type)) + raise ValueError(f'Got unknown scan_type {scan_type}') def set_parse_policy(self, parse_policy): """ @@ -552,7 +550,7 @@ def set_parse_policy(self, parse_policy): if parse_policy in CifData._PARSE_POLICIES: self.set_attribute('parse_policy', parse_policy) else: - raise ValueError('Got unknown parse_policy {}'.format(parse_policy)) + raise ValueError(f'Got unknown parse_policy {parse_policy}') def get_formulae(self, mode='sum', custom_tags=None): """ @@ -563,7 +561,7 @@ def get_formulae(self, mode='sum', custom_tags=None): """ # note: If formulae are not None, they could be returned # directly (but the function is very cheap anyhow). - formula_tags = ['_chemical_formula_{}'.format(mode)] + formula_tags = [f'_chemical_formula_{mode}'] if custom_tags: if not isinstance(custom_tags, (list, tuple)): custom_tags = [custom_tags] @@ -770,9 +768,9 @@ def get_structure(self, converter='pymatgen', store=False, **kwargs): parameters = Dict(dict=kwargs) try: - convert_function = getattr(cif_tools, '_get_aiida_structure_{}_inline'.format(converter)) + convert_function = getattr(cif_tools, f'_get_aiida_structure_{converter}_inline') except AttributeError: - raise ValueError("No such converter '{}' available".format(converter)) + raise ValueError(f"No such converter '{converter}' available") result = convert_function(cif=self, parameters=parameters, metadata={'store_provenance': store}) @@ -821,4 +819,4 @@ def _validate(self): raise ValidationError("attribute 'md5' not set.") md5 = self.generate_md5() if attr_md5 != md5: - raise ValidationError("Attribute 'md5' says '{}' but '{}' was parsed instead.".format(attr_md5, md5)) + raise ValidationError(f"Attribute 'md5' says '{attr_md5}' but '{md5}' was parsed instead.") diff --git a/aiida/orm/nodes/data/code.py b/aiida/orm/nodes/data/code.py index d39ec9602f..d96924a5cf 100644 --- a/aiida/orm/nodes/data/code.py +++ b/aiida/orm/nodes/data/code.py @@ -98,7 +98,7 @@ def set_files(self, files): def __str__(self): local_str = 'Local' if self.is_local() else 'Remote' computer_str = self.computer.label - return "{} code '{}' on {}, pk: {}, uuid: {}".format(local_str, self.label, computer_str, self.pk, self.uuid) + return f"{local_str} code '{self.label}' on {computer_str}, pk: {self.pk}, uuid: {self.uuid}" def get_computer_name(self): """Get label of this code's computer. @@ -118,7 +118,7 @@ def full_label(self): Returns label of the form @. """ - return '{}@{}'.format(self.label, self.get_computer_label()) + return f'{self.label}@{self.get_computer_label()}' @property def label(self): @@ -151,7 +151,7 @@ def relabel(self, new_label, raise_error=True): Will remove raise_error in `v2.0.0`. Use `try/except` instead. """ # pylint: disable=unused-argument - suffix = '@{}'.format(self.computer.label) + suffix = f'@{self.computer.label}' if new_label.endswith(suffix): new_label = new_label[:-len(suffix)] @@ -162,7 +162,7 @@ def get_description(self): :return: string description of this Code instance """ - return '{}'.format(self.description) + return f'{self.description}' @classmethod def get_code_helper(cls, label, machinename=None): @@ -184,11 +184,11 @@ def get_code_helper(cls, label, machinename=None): query.append(Computer, filters={'name': machinename}, with_node='code') if query.count() == 0: - raise NotExistent("'{}' is not a valid code name.".format(label)) + raise NotExistent(f"'{label}' is not a valid code name.") elif query.count() > 1: codes = query.all(flat=True) - retstr = ("There are multiple codes with label '{}', having IDs: ".format(label)) - retstr += ', '.join(sorted([str(c.pk) for c in codes])) + '.\n' + retstr = f"There are multiple codes with label '{label}', having IDs: " + retstr += f"{', '.join(sorted([str(c.pk) for c in codes]))}.\n" retstr += ('Relabel them (using their ID), or refer to them with their ID.') raise MultipleObjectsError(retstr) else: @@ -217,9 +217,9 @@ def get(cls, pk=None, label=None, machinename=None): try: return load_code(pk=code_int) except exceptions.NotExistent: - raise ValueError('{} is not valid code pk'.format(pk)) + raise ValueError(f'{pk} is not valid code pk') except exceptions.MultipleObjectsError: - raise exceptions.MultipleObjectsError("More than one code in the DB with pk='{}'!".format(pk)) + raise exceptions.MultipleObjectsError(f"More than one code in the DB with pk='{pk}'!") # check if label (and machinename) is provided elif label is not None: @@ -258,9 +258,9 @@ def get_from_string(cls, code_string): try: return cls.get_code_helper(label, machinename) except NotExistent: - raise NotExistent('{} could not be resolved to a valid code label'.format(code_string)) + raise NotExistent(f'{code_string} could not be resolved to a valid code label') except MultipleObjectsError: - raise MultipleObjectsError('{} could not be uniquely resolved'.format(code_string)) + raise MultipleObjectsError(f'{code_string} could not be uniquely resolved') @classmethod def list_for_plugin(cls, plugin, labels=True): @@ -464,7 +464,7 @@ def get_execname(self): For remote codes, it is the absolute path to the executable. """ if self.is_local(): - return './{}'.format(self.get_local_executable()) + return f'./{self.get_local_executable()}' return self.get_remote_exec_path() @@ -489,7 +489,7 @@ def get_builder(self): try: process_class = CalculationFactory(plugin_name) except exceptions.EntryPointError: - raise exceptions.EntryPointError('the calculation entry point `{}` could not be loaded'.format(plugin_name)) + raise exceptions.EntryPointError(f'the calculation entry point `{plugin_name}` could not be loaded') builder = process_class.get_builder() builder.code = self diff --git a/aiida/orm/nodes/data/data.py b/aiida/orm/nodes/data/data.py index 0660bddb6c..872192b48e 100644 --- a/aiida/orm/nodes/data/data.py +++ b/aiida/orm/nodes/data/data.py @@ -105,7 +105,7 @@ def source(self, source): raise ValueError('Source must be supplied as a dictionary') unknown_attrs = tuple(set(source.keys()) - set(self._source_attributes)) if unknown_attrs: - raise KeyError('Unknown source parameters: {}'.format(', '.join(unknown_attrs))) + raise KeyError(f"Unknown source parameters: {', '.join(unknown_attrs)}") self.set_attribute('source', source) @@ -190,7 +190,7 @@ def export(self, path, fileformat=None, overwrite=False, **kwargs): raise ValueError('Path not recognized') if os.path.exists(path) and not overwrite: - raise OSError('A file was already found at {}'.format(path)) + raise OSError(f'A file was already found at {path}') if fileformat is None: extension = os.path.splitext(path)[1] @@ -211,10 +211,10 @@ def export(self, path, fileformat=None, overwrite=False, **kwargs): if not overwrite: for fname in extra_files: if os.path.exists(fname): - raise OSError('The file {} already exists, stopping.'.format(fname)) + raise OSError(f'The file {fname} already exists, stopping.') if os.path.exists(path): - raise OSError('The file {} already exists, stopping.'.format(path)) + raise OSError(f'The file {path} already exists, stopping.') for additional_fname, additional_fcontent in extra_files.items(): retlist.append(additional_fname) diff --git a/aiida/orm/nodes/data/list.py b/aiida/orm/nodes/data/list.py index 5f656d4930..5057ab63c0 100644 --- a/aiida/orm/nodes/data/list.py +++ b/aiida/orm/nodes/data/list.py @@ -44,7 +44,7 @@ def __len__(self): return len(self.get_list()) def __str__(self): - return super().__str__() + ' value: {}'.format(self.get_list()) + return f'{super().__str__()} value: {self.get_list()}' def __eq__(self, other): try: diff --git a/aiida/orm/nodes/data/orbital.py b/aiida/orm/nodes/data/orbital.py index cc97cc2ae2..92a663b17d 100644 --- a/aiida/orm/nodes/data/orbital.py +++ b/aiida/orm/nodes/data/orbital.py @@ -57,7 +57,7 @@ def get_orbitals(self, **kwargs): try: orbital_type = orbital_dict.pop('_orbital_type') except KeyError: - raise ValidationError('No _orbital_type found in: {}'.format(orbital_dict)) + raise ValidationError(f'No _orbital_type found in: {orbital_dict}') cls = OrbitalFactory(orbital_type) orbital = cls(**orbital_dict) @@ -80,7 +80,7 @@ def set_orbitals(self, orbitals): try: _orbital_type = orbital_dict['_orbital_type'] except KeyError: - raise InputValidationError('No _orbital_type found in: {}'.format(orbital_dict)) + raise InputValidationError(f'No _orbital_type found in: {orbital_dict}') orbital_dicts.append(orbital_dict) self.set_attribute('orbital_dicts', orbital_dicts) diff --git a/aiida/orm/nodes/data/singlefile.py b/aiida/orm/nodes/data/singlefile.py index ef6040c52c..17c03663ed 100644 --- a/aiida/orm/nodes/data/singlefile.py +++ b/aiida/orm/nodes/data/singlefile.py @@ -37,7 +37,7 @@ def __init__(self, file, filename=None, **kwargs): # 'filename' argument was added to 'set_file' after 1.0.0. if 'filename' not in inspect.getfullargspec(self.set_file)[0]: warnings.warn( # pylint: disable=no-member - "Method '{}.set_file' does not support the 'filename' argument. ".format(type(self).__name__) + + f"Method '{type(self).__name__}.set_file' does not support the 'filename' argument. " + 'This will raise an exception in AiiDA 2.0.', AiidaDeprecationWarning ) @@ -107,10 +107,10 @@ def set_file(self, file, filename=None): key = os.path.basename(file) if not os.path.isabs(file): - raise ValueError('path `{}` is not absolute'.format(file)) + raise ValueError(f'path `{file}` is not absolute') if not os.path.isfile(file): - raise ValueError('path `{}` does not correspond to an existing file'.format(file)) + raise ValueError(f'path `{file}` does not correspond to an existing file') else: is_filelike = True try: @@ -152,5 +152,5 @@ def _validate(self): if [filename] != objects: raise exceptions.ValidationError( - 'respository files {} do not match the `filename` attribute {}.'.format(objects, filename) + f'respository files {objects} do not match the `filename` attribute {filename}.' ) diff --git a/aiida/orm/nodes/data/structure.py b/aiida/orm/nodes/data/structure.py index 4f4e179571..14bcd18881 100644 --- a/aiida/orm/nodes/data/structure.py +++ b/aiida/orm/nodes/data/structure.py @@ -244,8 +244,7 @@ def validate_symbols_tuple(symbols_tuple): valid = all(is_valid_symbol(sym) for sym in symbols_tuple) if not valid: raise UnsupportedSpeciesError( - 'At least one element of the symbol list {} has ' - 'not been recognized.'.format(symbols_tuple) + f'At least one element of the symbol list {symbols_tuple} has not been recognized.' ) @@ -315,13 +314,11 @@ def get_formula_from_symbol_list(_list, separator=''): multiplicity_str = str(elem[0]) if isinstance(elem[1], str): - list_str.append('{}{}'.format(elem[1], multiplicity_str)) + list_str.append(f'{elem[1]}{multiplicity_str}') elif elem[0] > 1: - list_str.append( - '({}){}'.format(get_formula_from_symbol_list(elem[1], separator=separator), multiplicity_str) - ) + list_str.append(f'({get_formula_from_symbol_list(elem[1], separator=separator)}){multiplicity_str}') else: - list_str.append('{}{}'.format(get_formula_from_symbol_list(elem[1], separator=separator), multiplicity_str)) + list_str.append(f'{get_formula_from_symbol_list(elem[1], separator=separator)}{multiplicity_str}') return separator.join(list_str) @@ -557,10 +554,10 @@ def get_symbols_string(symbols, weights): pieces = [] for symbol, weight in zip(symbols, weights): - pieces.append('{}{:4.2f}'.format(symbol, weight)) + pieces.append(f'{symbol}{weight:4.2f}') if has_vacancies(weights): - pieces.append('X{:4.2f}'.format(1. - sum(weights))) - return '{{{}}}'.format(''.join(sorted(pieces))) + pieces.append(f'X{1.0 - sum(weights):4.2f}') + return f"{{{''.join(sorted(pieces))}}}" def has_vacancies(weights): @@ -694,7 +691,7 @@ def atom_kinds_to_html(atom_kind): weight = element[1] if element[1] != '' else None if weight is not None: - html_formula_pieces.append(species + '' + weight + '') + html_formula_pieces.append(f'{species}{weight}') else: html_formula_pieces.append(species) @@ -743,7 +740,7 @@ def __init__( for left, right in self._set_incompatibilities: if args[left] is not None and args[right] is not None: - raise ValueError('cannot pass {} and {} at the same time'.format(left, right)) + raise ValueError(f'cannot pass {left} and {right} at the same time') super().__init__(**kwargs) @@ -799,7 +796,7 @@ def get_dimensionality(self): elif dim == 3: retdict['value'] = np.dot(cell[0], np.cross(cell[1], cell[2])) else: - raise ValueError('Dimensionality {} must be <= 3'.format(dim)) + raise ValueError(f'Dimensionality {dim} must be <= 3') return retdict @@ -826,9 +823,9 @@ def set_pymatgen(self, obj, **kwargs): """ typestr = type(obj).__name__ try: - func = getattr(self, 'set_pymatgen_{}'.format(typestr.lower())) + func = getattr(self, f'set_pymatgen_{typestr.lower()}') except AttributeError: - raise AttributeError("Converter for '{}' to AiiDA structure does not exist".format(typestr)) + raise AttributeError(f"Converter for '{typestr}' to AiiDA structure does not exist") func(obj, **kwargs) def set_pymatgen_molecule(self, mol, margin=5): @@ -943,18 +940,18 @@ def _validate(self): try: _get_valid_cell(self.cell) except ValueError as exc: - raise ValidationError('Invalid cell: {}'.format(exc)) + raise ValidationError(f'Invalid cell: {exc}') try: get_valid_pbc(self.pbc) except ValueError as exc: - raise ValidationError('Invalid periodic boundary conditions: {}'.format(exc)) + raise ValidationError(f'Invalid periodic boundary conditions: {exc}') try: # This will try to create the kinds objects kinds = self.kinds except ValueError as exc: - raise ValidationError('Unable to validate the kinds: {}'.format(exc)) + raise ValidationError(f'Unable to validate the kinds: {exc}') from collections import Counter @@ -970,20 +967,16 @@ def _validate(self): # This will try to create the sites objects sites = self.sites except ValueError as exc: - raise ValidationError('Unable to validate the sites: {}'.format(exc)) + raise ValidationError(f'Unable to validate the sites: {exc}') for site in sites: if site.kind_name not in [k.name for k in kinds]: - raise ValidationError( - 'A site has kind {}, but no specie with that name exists' - ''.format(site.kind_name) - ) + raise ValidationError(f'A site has kind {site.kind_name}, but no specie with that name exists') kinds_without_sites = (set(k.name for k in kinds) - set(s.kind_name for s in sites)) if kinds_without_sites: raise ValidationError( - 'The following kinds are defined, but there ' - 'are no sites with that kind: {}'.format(list(kinds_without_sites)) + f'The following kinds are defined, but there are no sites with that kind: {list(kinds_without_sites)}' ) def _prepare_xsf(self, main_file_name=''): # pylint: disable=unused-argument @@ -1000,11 +993,11 @@ def _prepare_xsf(self, main_file_name=''): # pylint: disable=unused-argument return_string += ' '.join(['%18.10f' % i for i in cell_vector]) return_string += '\n' return_string += 'PRIMCOORD 1\n' - return_string += '%d 1\n' % len(sites) + return_string += f'{int(len(sites))} 1\n' for site in sites: # I checked above that it is not an alloy, therefore I take the # first symbol - return_string += '%s ' % _atomic_numbers[self.get_kind(site.kind_name).symbols[0]] + return_string += f'{_atomic_numbers[self.get_kind(site.kind_name).symbols[0]]} ' return_string += '%18.10f %18.10f %18.10f\n' % tuple(site.position) return return_string.encode('utf-8'), {} @@ -1093,7 +1086,7 @@ def _prepare_xyz(self, main_file_name=''): # pylint: disable=unused-argument sites = self.sites cell = self.cell - return_list = ['{}'.format(len(sites))] + return_list = [f'{len(sites)}'] return_list.append( 'Lattice="{} {} {} {} {} {} {} {} {}" pbc="{} {} {}"'.format( cell[0][0], cell[0][1], cell[0][2], cell[1][0], cell[1][1], cell[1][2], cell[2][0], cell[2][1], @@ -1356,7 +1349,7 @@ def append_kind(self, kind): new_kind = Kind(kind=kind) # So we make a copy if kind.name in [k.name for k in self.kinds]: - raise ValueError('A kind with the same name ({}) already exists.'.format(kind.name)) + raise ValueError(f'A kind with the same name ({kind.name}) already exists.') # If here, no exceptions have been raised, so I add the site. self.attributes.setdefault('kinds', []).append(new_kind.get_raw()) @@ -1469,7 +1462,7 @@ def append_atom(self, **kwargs): simplename = kind.name counter = 1 while kind.name in existing_names: - kind.name = '{}{}'.format(simplename, counter) + kind.name = f'{simplename}{counter}' counter += 1 self.append_kind(kind) else: # 'name' was specified @@ -1568,7 +1561,7 @@ def get_kind(self, kind_name): try: return kinds_dict[kind_name] except KeyError: - raise ValueError("Kind name '{}' unknown".format(kind_name)) + raise ValueError(f"Kind name '{kind_name}' unknown") def get_kind_names(self): """ @@ -1657,10 +1650,10 @@ def reset_sites_positions(self, new_positions, conserve_particle=True): try: this_pos = [float(j) for j in new_positions[i]] except ValueError: - raise ValueError('Expecting a list of floats. Found instead {}'.format(new_positions[i])) + raise ValueError(f'Expecting a list of floats. Found instead {new_positions[i]}') if len(this_pos) != 3: - raise ValueError('Expecting a list of lists of length 3. found instead {}'.format(len(this_pos))) + raise ValueError(f'Expecting a list of lists of length 3. found instead {len(this_pos)}') # now append this Site to the new_site list. new_site = Site(site=self.sites[i]) # So we make a copy @@ -1787,9 +1780,9 @@ def get_cif(self, converter='ase', store=False, **kwargs): param = Dict(dict=kwargs) try: - conv_f = getattr(structure_tools, '_get_cif_{}_inline'.format(converter)) + conv_f = getattr(structure_tools, f'_get_cif_{converter}_inline') except AttributeError: - raise ValueError("No such converter '{}' available".format(converter)) + raise ValueError(f"No such converter '{converter}' available") ret_dict = conv_f(struct=self, parameters=param, metadata={'store_provenance': store}) return ret_dict['cif'] @@ -1903,7 +1896,7 @@ def _get_object_pymatgen_structure(self, **kwargs): additional_kwargs['site_properties'] = {'kind_name': self.get_site_kindnames()} if kwargs: - raise ValueError('Unrecognized parameters passed to pymatgen converter: {}'.format(kwargs.keys())) + raise ValueError(f'Unrecognized parameters passed to pymatgen converter: {kwargs.keys()}') positions = [list(x.position) for x in self.sites] return Structure(self.cell, species, positions, coords_are_cartesian=True, **additional_kwargs) @@ -1924,7 +1917,7 @@ def _get_object_pymatgen_molecule(self, **kwargs): from pymatgen import Molecule if kwargs: - raise ValueError('Unrecognized parameters passed to pymatgen converter: {}'.format(kwargs.keys())) + raise ValueError(f'Unrecognized parameters passed to pymatgen converter: {kwargs.keys()}') species = [] for site in self.sites: @@ -2060,7 +2053,7 @@ def __init__(self, **kwargs): except KeyError: self.set_automatic_kind_name() if kwargs: - raise ValueError('Unrecognized parameters passed to Kind constructor: {}'.format(kwargs.keys())) + raise ValueError(f'Unrecognized parameters passed to Kind constructor: {kwargs.keys()}') def get_raw(self): """ @@ -2129,7 +2122,7 @@ def set_automatic_kind_name(self, tag=None): if tag is None: self.name = name_string else: - self.name = '{}{}'.format(name_string, tag) + self.name = f'{name_string}{tag}' def compare_with(self, other_kind): """ @@ -2158,20 +2151,18 @@ def compare_with(self, other_kind): for i in range(len(self.symbols)): if self.symbols[i] != other_kind.symbols[i]: return ( - False, 'Symbol at position {:d} are different ' - '({} vs. {})'.format(i + 1, self.symbols[i], other_kind.symbols[i]) + False, f'Symbol at position {i + 1:d} are different ({self.symbols[i]} vs. {other_kind.symbols[i]})' ) # Check weights (assuming length of weights and of symbols have same # length, which should be always true for i in range(len(self.weights)): if self.weights[i] != other_kind.weights[i]: return ( - False, 'Weight at position {:d} are different ' - '({} vs. {})'.format(i + 1, self.weights[i], other_kind.weights[i]) + False, f'Weight at position {i + 1:d} are different ({self.weights[i]} vs. {other_kind.weights[i]})' ) # Check masses if abs(self.mass - other_kind.mass) > _MASS_THRESHOLD: - return (False, 'Masses are different ({} vs. {})'.format(self.mass, other_kind.mass)) + return (False, f'Masses are different ({self.mass} vs. {other_kind.mass})') if self._internal_tag != other_kind._internal_tag: # pylint: disable=protected-access return ( @@ -2252,7 +2243,7 @@ def symbol(self): if len(self._symbols) == 1: return self._symbols[0] - raise ValueError('This kind has more than one symbol (it is an alloy): {}'.format(self._symbols)) + raise ValueError(f'This kind has more than one symbol (it is an alloy): {self._symbols}') @property def symbols(self): @@ -2320,11 +2311,11 @@ def has_vacancies(self): return has_vacancies(self._weights) def __repr__(self): - return '<{}: {}>'.format(self.__class__.__name__, str(self)) + return f'<{self.__class__.__name__}: {str(self)}>' def __str__(self): symbol = self.get_symbols_string() - return "name '{}', symbol '{}'".format(self.name, symbol) + return f"name '{self.name}', symbol '{symbol}'" class Site: @@ -2363,7 +2354,7 @@ def __init__(self, **kwargs): self.kind_name = raw['kind_name'] self.position = raw['position'] except KeyError as exc: - raise ValueError('Invalid raw object, it does not contain any key {}'.format(exc.args[0])) + raise ValueError(f'Invalid raw object, it does not contain any key {exc.args[0]}') except TypeError: raise ValueError('Invalid raw object, it is not a dictionary') @@ -2372,9 +2363,9 @@ def __init__(self, **kwargs): self.kind_name = kwargs.pop('kind_name') self.position = kwargs.pop('position') except KeyError as exc: - raise ValueError('You need to specify {}'.format(exc.args[0])) + raise ValueError(f'You need to specify {exc.args[0]}') if kwargs: - raise ValueError('Unrecognized parameters: {}'.format(kwargs.keys)) + raise ValueError(f'Unrecognized parameters: {kwargs.keys}') def get_raw(self): """ @@ -2449,7 +2440,7 @@ def get_ase(self, kinds): found = True break if not found: - raise ValueError("No kind '{}' has been found in the list of kinds".format(self.kind_name)) + raise ValueError(f"No kind '{self.kind_name}' has been found in the list of kinds") if kind.is_alloy or kind.has_vacancies: raise ValueError('Cannot convert to ASE if the kind represents an alloy or it has vacancies.') @@ -2499,10 +2490,10 @@ def position(self, value): self._position = internal_pos def __repr__(self): - return '<{}: {}>'.format(self.__class__.__name__, str(self)) + return f'<{self.__class__.__name__}: {str(self)}>' def __str__(self): - return "kind name '{}' @ {},{},{}".format(self.kind_name, self.position[0], self.position[1], self.position[2]) + return f"kind name '{self.kind_name}' @ {self.position[0]},{self.position[1]},{self.position[2]}" # get_structuredata_from_qeinput has been moved to: diff --git a/aiida/orm/nodes/data/upf.py b/aiida/orm/nodes/data/upf.py index 8b7a8c5c0e..0c2a481b75 100644 --- a/aiida/orm/nodes/data/upf.py +++ b/aiida/orm/nodes/data/upf.py @@ -56,7 +56,7 @@ def get_pseudos_from_structure(structure, family_name): if isinstance(node, UpfData): if node.element in family_pseudos: raise MultipleObjectsError( - 'More than one UPF for element {} found in family {}'.format(node.element, family_name) + f'More than one UPF for element {node.element} found in family {family_name}' ) family_pseudos[node.element] = node @@ -64,7 +64,7 @@ def get_pseudos_from_structure(structure, family_name): try: pseudo_list[kind.name] = family_pseudos[kind.symbol] except KeyError: - raise NotExistent('No UPF for element {} found in family {}'.format(kind.symbol, family_name)) + raise NotExistent(f'No UPF for element {kind.symbol} found in family {family_name}') return pseudo_list @@ -131,11 +131,7 @@ def upload_upf_family(folder, group_label, group_description, stop_if_existing=T pseudo_and_created.append((pseudo, created)) else: if stop_if_existing: - raise ValueError( - 'A UPF with identical MD5 to ' - ' {} cannot be added with stop_if_existing' - ''.format(filename) - ) + raise ValueError(f'A UPF with identical MD5 to {filename} cannot be added with stop_if_existing') existing_upf = existing_upf[0] pseudo_and_created.append((existing_upf, False)) @@ -157,7 +153,7 @@ def upload_upf_family(folder, group_label, group_description, stop_if_existing=T if not len(elements_names) == len(set(elements_names)): duplicates = {x for x in elements_names if elements_names.count(x) > 1} duplicates_string = ', '.join(i for i in duplicates) - raise UniquenessError('More than one UPF found for the elements: ' + duplicates_string + '.') + raise UniquenessError(f'More than one UPF found for the elements: {duplicates_string}.') # At this point, save the group, if still unstored if group_created: @@ -168,9 +164,9 @@ def upload_upf_family(folder, group_label, group_description, stop_if_existing=T if created: pseudo.store() - AIIDA_LOGGER.debug('New node {} created for file {}'.format(pseudo.uuid, pseudo.filename)) + AIIDA_LOGGER.debug(f'New node {pseudo.uuid} created for file {pseudo.filename}') else: - AIIDA_LOGGER.debug('Reusing node {} for file {}'.format(pseudo.uuid, pseudo.filename)) + AIIDA_LOGGER.debug(f'Reusing node {pseudo.uuid} for file {pseudo.filename}') # Add elements to the group all togetehr group.add_nodes([pseudo for pseudo, created in pseudo_and_created]) @@ -207,9 +203,9 @@ def parse_upf(fname, check_filename=True): match = REGEX_UPF_VERSION.search(upf_contents) if match: version = match.group('version') - AIIDA_LOGGER.debug('Version found: {} for file {}'.format(version, fname)) + AIIDA_LOGGER.debug(f'Version found: {version} for file {fname}') else: - AIIDA_LOGGER.debug('Assuming version 1 for file {}'.format(fname)) + AIIDA_LOGGER.debug(f'Assuming version 1 for file {fname}') version = '1' parsed_data['version'] = version @@ -218,10 +214,7 @@ def parse_upf(fname, check_filename=True): except ValueError: # If the version string does not contain a dot, fallback # to version 1 - AIIDA_LOGGER.debug( - 'Falling back to version 1 for file {}, ' - "version string '{}' unrecognized".format(fname, version) - ) + AIIDA_LOGGER.debug(f'Falling back to version 1 for file {fname} version string {version} unrecognized') version_major = 1 element = None @@ -235,10 +228,10 @@ def parse_upf(fname, check_filename=True): element = match.group('element_name') if element is None: - raise ParsingError('Unable to find the element of UPF {}'.format(fname)) + raise ParsingError(f'Unable to find the element of UPF {fname}') element = element.capitalize() if element not in _valid_symbols: - raise ParsingError('Unknown element symbol {} for file {}'.format(element, fname)) + raise ParsingError(f'Unknown element symbol {element} for file {fname}') if check_filename: if not os.path.basename(fname).lower().startswith(element.lower()): raise ParsingError( @@ -322,7 +315,7 @@ def store(self, *args, **kwargs): # pylint: disable=signature-differs try: element = parsed_data['element'] except KeyError: - raise ParsingError('Could not parse the element from the UPF file {}'.format(self.filename)) + raise ParsingError(f'Could not parse the element from the UPF file {self.filename}') self.set_attribute('element', str(element)) self.set_attribute('md5', md5) @@ -364,7 +357,7 @@ def set_file(self, file, filename=None): try: element = parsed_data['element'] except KeyError: - raise ParsingError("No 'element' parsed in the UPF file {}; unable to store".format(self.filename)) + raise ParsingError(f"No 'element' parsed in the UPF file {self.filename}; unable to store") super().set_file(file, filename=filename) @@ -414,7 +407,7 @@ def _validate(self): try: element = parsed_data['element'] except KeyError: - raise ValidationError("No 'element' could be parsed in the UPF {}".format(self.filename)) + raise ValidationError(f"No 'element' could be parsed in the UPF {self.filename}") try: attr_element = self.get_attribute('element') @@ -427,12 +420,10 @@ def _validate(self): raise ValidationError("attribute 'md5' not set.") if attr_element != element: - raise ValidationError( - "Attribute 'element' says '{}' but '{}' was parsed instead.".format(attr_element, element) - ) + raise ValidationError(f"Attribute 'element' says '{attr_element}' but '{element}' was parsed instead.") if attr_md5 != md5: - raise ValidationError("Attribute 'md5' says '{}' but '{}' was parsed instead.".format(attr_md5, md5)) + raise ValidationError(f"Attribute 'md5' says '{attr_md5}' but '{md5}' was parsed instead.") def _prepare_upf(self, main_file_name=''): """ diff --git a/aiida/orm/nodes/node.py b/aiida/orm/nodes/node.py index f43db44332..3f4e12b13a 100644 --- a/aiida/orm/nodes/node.py +++ b/aiida/orm/nodes/node.py @@ -114,14 +114,10 @@ def delete(self, node_id): return if node.get_incoming().all(): - raise exceptions.InvalidOperation( - 'cannot delete Node<{}> because it has incoming links'.format(node.pk) - ) + raise exceptions.InvalidOperation(f'cannot delete Node<{node.pk}> because it has incoming links') if node.get_outgoing().all(): - raise exceptions.InvalidOperation( - 'cannot delete Node<{}> because it has outgoing links'.format(node.pk) - ) + raise exceptions.InvalidOperation(f'cannot delete Node<{node.pk}> because it has outgoing links') repository = node._repository # pylint: disable=protected-access self._backend.nodes.delete(node_id) @@ -174,13 +170,13 @@ def __init__(self, backend=None, user=None, computer=None, **kwargs): super().__init__(backend_entity) def __repr__(self): - return '<{}: {}>'.format(self.__class__.__name__, str(self)) + return f'<{self.__class__.__name__}: {str(self)}>' def __str__(self): if not self.is_stored: - return 'uuid: {} (unstored)'.format(self.uuid) + return f'uuid: {self.uuid} (unstored)' - return 'uuid: {} (pk: {})'.format(self.uuid, self.pk) + return f'uuid: {self.uuid} (pk: {self.pk})' def __copy__(self): """Copying a Node is not supported in general, but only for the Data sub class.""" @@ -227,7 +223,7 @@ def validate_storability(self): raise exceptions.StoringNotAllowed(self._unstorable_message) if not is_registered_entry_point(self.__module__, self.__class__.__name__, groups=('aiida.node', 'aiida.data')): - msg = 'class `{}:{}` does not have registered entry point'.format(self.__module__, self.__class__.__name__) + msg = f'class `{self.__module__}:{self.__class__.__name__}` does not have registered entry point' raise exceptions.StoringNotAllowed(msg) @classproperty @@ -767,8 +763,8 @@ def validate_outgoing(self, target, link_type, link_label): # pylint: disable=u :raise TypeError: if `target` is not a Node instance or `link_type` is not a `LinkType` enum :raise ValueError: if the proposed link is invalid """ - type_check(link_type, LinkType, 'link_type should be a LinkType enum but got: {}'.format(type(link_type))) - type_check(target, Node, 'target should be a `Node` instance but got: {}'.format(type(target))) + type_check(link_type, LinkType, f'link_type should be a LinkType enum but got: {type(link_type)}') + type_check(target, Node, f'target should be a `Node` instance but got: {type(target)}') def _add_incoming_cache(self, source, link_type, link_label): """Add an incoming link to the cache. @@ -784,7 +780,7 @@ def _add_incoming_cache(self, source, link_type, link_label): link_triple = LinkTriple(source, link_type, link_label) if link_triple in self._incoming_cache: - raise exceptions.UniquenessError('the link triple {} is already present in the cache'.format(link_triple)) + raise exceptions.UniquenessError(f'the link triple {link_triple} is already present in the cache') self._incoming_cache.append(link_triple) @@ -806,7 +802,7 @@ def get_stored_link_triples( link_type = (link_type,) if link_type and not all([isinstance(t, LinkType) for t in link_type]): - raise TypeError('link_type should be a LinkType or tuple of LinkType: got {}'.format(link_type)) + raise TypeError(f'link_type should be a LinkType or tuple of LinkType: got {link_type}') node_class = node_class or Node node_filters = {'id': {'==': self.id}} @@ -870,7 +866,7 @@ def get_incoming(self, node_class=None, link_type=(), link_label_filter=None, on if link_triple in link_triples: raise exceptions.InternalError( - 'Node<{}> has both a stored and cached link triple {}'.format(self.pk, link_triple) + f'Node<{self.pk}> has both a stored and cached link triple {link_triple}' ) if not link_type or link_triple.link_type in link_type: @@ -916,7 +912,7 @@ def store_all(self, with_transaction=True, use_cache=None): ) if self.is_stored: - raise exceptions.ModificationNotAllowed('Node<{}> is already stored'.format(self.id)) + raise exceptions.ModificationNotAllowed(f'Node<{self.id}> is already stored') # For each node of a cached incoming link, check that all its incoming links are stored for link_triple in self._incoming_cache: @@ -1009,7 +1005,7 @@ def verify_are_parents_stored(self): for link_triple in self._incoming_cache: if not link_triple.node.is_stored: raise exceptions.ModificationNotAllowed( - 'Cannot store because source node of link triple {} is not stored'.format(link_triple) + f'Cannot store because source node of link triple {link_triple} is not stored' ) def _store_from_cache(self, cache_node, with_transaction): diff --git a/aiida/orm/nodes/process/calculation/calcjob.py b/aiida/orm/nodes/process/calculation/calcjob.py index 0d32d26459..311ccf8a6b 100644 --- a/aiida/orm/nodes/process/calculation/calcjob.py +++ b/aiida/orm/nodes/process/calculation/calcjob.py @@ -70,7 +70,7 @@ def tools(self): except exceptions.EntryPointError as exception: self._tools = CalculationTools(self) self.logger.warning( - 'could not load the calculation tools entry point {}: {}'.format(entry_point.name, exception) + f'could not load the calculation tools entry point {entry_point.name}: {exception}' ) return self._tools @@ -230,7 +230,7 @@ def set_state(self, state): :raise: ValueError if state is invalid """ if not isinstance(state, CalcJobState): - raise ValueError('{} is not a valid CalcJobState'.format(state)) + raise ValueError(f'{state} is not a valid CalcJobState') self.set_attribute(self.CALC_JOB_STATE_KEY, state.value) @@ -273,7 +273,7 @@ def _validate_retrieval_directive(directives): # Otherwise, it has to be a tuple of length three with specific requirements if not isinstance(directive, (tuple, list)) or len(directive) != 3: - raise ValueError('invalid directive, not a list or tuple of length three: {}'.format(directive)) + raise ValueError(f'invalid directive, not a list or tuple of length three: {directive}') if not isinstance(directive[0], str): raise ValueError('invalid directive, first element has to be a string representing remote path') @@ -385,7 +385,7 @@ def set_scheduler_state(self, state): from aiida.schedulers.datastructures import JobState if not isinstance(state, JobState): - raise ValueError('scheduler state should be an instance of JobState, got: {}'.format(state)) + raise ValueError(f'scheduler state should be an instance of JobState, got: {state}') self.set_attribute(self.SCHEDULER_STATE_KEY, state.value) self.set_attribute(self.SCHEDULER_LAST_CHECK_TIME_KEY, timezone.datetime_to_isoformat(timezone.now())) diff --git a/aiida/orm/nodes/process/process.py b/aiida/orm/nodes/process/process.py index 890645a1d0..e78b6a4b8d 100644 --- a/aiida/orm/nodes/process/process.py +++ b/aiida/orm/nodes/process/process.py @@ -51,9 +51,9 @@ class ProcessNode(Sealable, Node): def __str__(self): base = super().__str__() if self.process_type: - return '{} ({})'.format(base, self.process_type) + return f'{base} ({self.process_type})' - return '{}'.format(base) + return f'{base}' @classproperty def _updatable_attributes(cls): @@ -105,7 +105,7 @@ def process_class(self): from aiida.plugins.entry_point import load_entry_point_from_string if not self.process_type: - raise ValueError('no process type for CalcJobNode<{}>: cannot recreate process class'.format(self.pk)) + raise ValueError(f'no process type for CalcJobNode<{self.pk}>: cannot recreate process class') try: process_class = load_entry_point_from_string(self.process_type) @@ -123,9 +123,7 @@ def process_class(self): process_class = getattr(module, class_name) except (ValueError, ImportError): raise ValueError( - 'could not load process class CalcJobNode<{}> given its `process_type`: {}'.format( - self.pk, self.process_type - ) + f'could not load process class CalcJobNode<{self.pk}> given its `process_type`: {self.process_type}' ) return process_class @@ -306,7 +304,7 @@ def set_exit_status(self, status): status = status.value if not isinstance(status, int): - raise ValueError('exit status has to be an integer, got {}'.format(status)) + raise ValueError(f'exit status has to be an integer, got {status}') return self.set_attribute(self.EXIT_STATUS_KEY, status) @@ -329,7 +327,7 @@ def set_exit_message(self, message): return None if not isinstance(message, str): - raise ValueError('exit message has to be a string type, got {}'.format(type(message))) + raise ValueError(f'exit message has to be a string type, got {type(message)}') return self.set_attribute(self.EXIT_MESSAGE_KEY, message) @@ -354,7 +352,7 @@ def set_exception(self, exception): :param exception: the exception message """ if not isinstance(exception, str): - raise ValueError('exception message has to be a string type, got {}'.format(type(exception))) + raise ValueError(f'exception message has to be a string type, got {type(exception)}') return self.set_attribute(self.EXCEPTION_KEY, exception) @@ -481,9 +479,7 @@ def is_valid_cache(self): try: process_class = self.process_class except ValueError as exc: - self.logger.warning( - "Not considering {} for caching, '{!r}' when accessing its process class.".format(self, exc) - ) + self.logger.warning(f"Not considering {self} for caching, '{exc!r}' when accessing its process class.") return False # For process functions, the `process_class` does not have an # is_valid_cache attribute diff --git a/aiida/orm/querybuilder.py b/aiida/orm/querybuilder.py index 5ac0c2f682..dab51feee5 100644 --- a/aiida/orm/querybuilder.py +++ b/aiida/orm/querybuilder.py @@ -147,7 +147,7 @@ def get_querybuilder_classifiers_from_cls(cls, query): # pylint: disable=invali ormclass = query.Node else: - raise InputValidationError('I do not know what to do with {}'.format(cls)) + raise InputValidationError(f'I do not know what to do with {cls}') if ormclass == query.Node: is_valid_node_type_string(classifiers['ormclass_type_string'], raise_on_false=True) @@ -212,7 +212,7 @@ def get_node_type_filter(classifiers, subclassing): else: # Note: the query_type_string always ends with a dot. This ensures that "like {str}%" matches *only* # the query type string - filters = {'like': '{}%'.format(escape_for_sql_like(get_query_type_from_type_string(value)))} + filters = {'like': f'{escape_for_sql_like(get_query_type_from_type_string(value))}%'} return filters @@ -307,7 +307,7 @@ def get_group_type_filter(classifiers, subclassing): # possible so we perform the switch here in code. if value == 'core': value = '' - filters = {'like': '{}%'.format(escape_for_sql_like(value))} + filters = {'like': f'{escape_for_sql_like(value)}%'} return filters @@ -486,7 +486,7 @@ def __str__(self): elif engine.startswith('postgre'): from sqlalchemy.dialects import postgresql as mydialect else: - raise ConfigurationError('Unknown DB engine: {}'.format(engine)) + raise ConfigurationError(f'Unknown DB engine: {engine}') que = self.get_query() return str(que.statement.compile(compile_kwargs={'literal_binds': True}, dialect=mydialect.dialect())) @@ -574,7 +574,7 @@ def get_tag_from_type(classifiers): basetag = get_tag_from_type(classifiers) tags_used = self.tag_to_alias_map.keys() for i in range(1, 100): - tag = '{}_{}'.format(basetag, i) + tag = f'{basetag}_{i}' if tag not in tags_used: return tag @@ -660,9 +660,7 @@ def append( # the class or the type (not both) if cls is not None and entity_type is not None: - raise InputValidationError( - 'You cannot specify both a class ({}) and a entity_type ({})'.format(cls, entity_type) - ) + raise InputValidationError(f'You cannot specify both a class ({cls}) and a entity_type ({entity_type})') if cls is None and entity_type is None: raise InputValidationError('You need to specify at least a class or a entity_type') @@ -672,18 +670,18 @@ def append( if isinstance(cls, (tuple, list, set)): for sub_cls in cls: if not inspect_isclass(sub_cls): - raise InputValidationError("{} was passed with kw 'cls', but is not a class".format(sub_cls)) + raise InputValidationError(f"{sub_cls} was passed with kw 'cls', but is not a class") else: if not inspect_isclass(cls): - raise InputValidationError("{} was passed with kw 'cls', but is not a class".format(cls)) + raise InputValidationError(f"{cls} was passed with kw 'cls', but is not a class") elif entity_type is not None: if isinstance(entity_type, (tuple, list, set)): for sub_type in entity_type: if not isinstance(sub_type, str): - raise InputValidationError('{} was passed as entity_type, but is not a string'.format(sub_type)) + raise InputValidationError(f'{sub_type} was passed as entity_type, but is not a string') else: if not isinstance(entity_type, str): - raise InputValidationError('{} was passed as entity_type, but is not a string'.format(entity_type)) + raise InputValidationError(f'{entity_type} was passed as entity_type, but is not a string') ormclass, classifiers = self._get_ormclass(cls, entity_type) @@ -692,12 +690,10 @@ def append( if tag: if self._EDGE_TAG_DELIM in tag: raise InputValidationError( - 'tag cannot contain {}\n' - 'since this is used as a delimiter for links' - ''.format(self._EDGE_TAG_DELIM) + f'tag cannot contain {self._EDGE_TAG_DELIM}\nsince this is used as a delimiter for links' ) if tag in self.tag_to_alias_map.keys(): - raise InputValidationError('This tag ({}) is already in use'.format(tag)) + raise InputValidationError(f'This tag ({tag}) is already in use') else: tag = self._get_unique_tag(classifiers) @@ -828,9 +824,7 @@ def append( joining_value = self._path[-abs(val)]['tag'] except IndexError as exc: raise InputValidationError( - 'You have specified a non-existent entity with\n' - 'direction={}\n' - '{}\n'.format(joining_value, exc) + f'You have specified a non-existent entity with\ndirection={joining_value}\n{exc}\n' ) else: joining_value = self._get_tag_from_specification(val) @@ -861,7 +855,7 @@ def append( edge_tag = edge_destination_tag + self._EDGE_TAG_DELIM + tag else: if edge_tag in self.tag_to_alias_map.keys(): - raise InputValidationError('The tag {} is already in use'.format(edge_tag)) + raise InputValidationError(f'The tag {edge_tag} is already in use') if self._debug: print('I have chosen', edge_tag) @@ -982,9 +976,7 @@ def order_by(self, order_by): pass else: raise InputValidationError( - 'Cannot deal with input to order_by {}\n' - 'of type{}' - '\n'.format(item_to_order_by, type(item_to_order_by)) + f'Cannot deal with input to order_by {item_to_order_by}\nof type{type(item_to_order_by)}\n' ) for entityname, orderspec in item_to_order_by.items(): # if somebody specifies eg {'node':{'id':'asc'}} @@ -1057,7 +1049,7 @@ def _process_filters(filters): if isinstance(value, entities.Entity): # Convert to be the id of the joined entity because we can't query # for the object instance directly - processed_filters['{}_id'.format(key)] = value.id + processed_filters[f'{key}_id'] = value.id else: processed_filters[key] = value @@ -1184,21 +1176,18 @@ def add_projection(self, tag_spec, projection_spec): elif isinstance(projection, str): _thisprojection = {projection: {}} else: - raise InputValidationError('Cannot deal with projection specification {}\n'.format(projection)) + raise InputValidationError(f'Cannot deal with projection specification {projection}\n') for spec in _thisprojection.values(): if not isinstance(spec, dict): raise InputValidationError( - '\nThe value of a key-value pair in a projection\n' - 'has to be a dictionary\n' - 'You gave: {}\n' - ''.format(spec) + f'\nThe value of a key-value pair in a projection\nhas to be a dictionary\nYou gave: {spec}\n' ) for key, val in spec.items(): if key not in self._VALID_PROJECTION_KEYS: - raise InputValidationError('{} is not a valid key {}'.format(key, self._VALID_PROJECTION_KEYS)) + raise InputValidationError(f'{key} is not a valid key {self._VALID_PROJECTION_KEYS}') if not isinstance(val, str): - raise InputValidationError('{} has to be a string'.format(val)) + raise InputValidationError(f'{val} has to be a string') _projections.append(_thisprojection) if self._debug: print(' projections have become:', _projections) @@ -1245,7 +1234,7 @@ def _add_to_projections(self, alias, projectable_entity_name, cast=None, func=No elif func == 'count': entity_to_project = sa_func.count(entity_to_project) else: - raise InputValidationError('\nInvalid function specification {}'.format(func)) + raise InputValidationError(f'\nInvalid function specification {func}') self._query = self._query.add_columns(entity_to_project) def _build_projections(self, tag, items_to_project=None): @@ -1289,8 +1278,7 @@ def _get_tag_from_specification(self, specification): tag = specification else: raise InputValidationError( - 'tag {} is not among my known tags\n' - 'My tags are: {}'.format(specification, self.tag_to_alias_map.keys()) + f'tag {specification} is not among my known tags\nMy tags are: {self.tag_to_alias_map.keys()}' ) else: if specification in self._cls_to_tag_map.keys(): @@ -1809,9 +1797,7 @@ def _get_connecting_node(self, index, joining_keyword=None, joining_value=None, func = self._get_function_map()[calling_entity][joining_keyword] except KeyError: raise InputValidationError( - "'{}' is not a valid joining keyword for a '{}' type entity".format( - joining_keyword, calling_entity - ) + f"'{joining_keyword}' is not a valid joining keyword for a '{calling_entity}' type entity" ) if isinstance(joining_value, int): @@ -2132,7 +2118,7 @@ def inject_query(self, query): """ from sqlalchemy.orm import Query if not isinstance(query, Query): - raise InputValidationError('{} must be a subclass of {}'.format(query, Query)) + raise InputValidationError(f'{query} must be a subclass of {Query}') self._query = query self._injected = True diff --git a/aiida/orm/users.py b/aiida/orm/users.py index 47d714bb62..c7c3b3565c 100644 --- a/aiida/orm/users.py +++ b/aiida/orm/users.py @@ -139,13 +139,13 @@ def get_full_name(self): :return: the user full name """ if self.first_name and self.last_name: - full_name = '{} {} ({})'.format(self.first_name, self.last_name, self.email) + full_name = f'{self.first_name} {self.last_name} ({self.email})' elif self.first_name: - full_name = '{} ({})'.format(self.first_name, self.email) + full_name = f'{self.first_name} ({self.email})' elif self.last_name: - full_name = '{} ({})'.format(self.last_name, self.email) + full_name = f'{self.last_name} ({self.email})' else: - full_name = '{}'.format(self.email) + full_name = f'{self.email}' return full_name diff --git a/aiida/orm/utils/__init__.py b/aiida/orm/utils/__init__.py index 7d0ce1ecac..f703884d0e 100644 --- a/aiida/orm/utils/__init__.py +++ b/aiida/orm/utils/__init__.py @@ -38,7 +38,7 @@ def load_entity( from aiida.orm.utils.loaders import OrmEntityLoader, IdentifierType if entity_loader is None or not issubclass(entity_loader, OrmEntityLoader): - raise TypeError('entity_loader should be a sub class of {}'.format(type(OrmEntityLoader))) + raise TypeError(f'entity_loader should be a sub class of {type(OrmEntityLoader)}') inputs_provided = [value is not None for value in (identifier, pk, uuid, label)].count(True) diff --git a/aiida/orm/utils/_repository.py b/aiida/orm/utils/_repository.py index aca52a0c08..7b4c400acf 100644 --- a/aiida/orm/utils/_repository.py +++ b/aiida/orm/utils/_repository.py @@ -126,7 +126,7 @@ def get_object(self, key): if os.path.isfile(filepath): return File(filename, FileType.FILE) - raise IOError('object {} does not exist'.format(key)) + raise IOError(f'object {key} does not exist') def get_object_content(self, key, mode='r'): """Return the content of a object identified by key. diff --git a/aiida/orm/utils/builders/code.py b/aiida/orm/utils/builders/code.py index 0d31560f8c..935806b3dd 100644 --- a/aiida/orm/utils/builders/code.py +++ b/aiida/orm/utils/builders/code.py @@ -71,7 +71,7 @@ def new(self): # Complain if there are keys that are passed but not used if passed_keys - used: raise self.CodeValidationError( - 'Unknown parameters passed to the CodeBuilder: {}'.format(', '.join(sorted(passed_keys - used))) + f"Unknown parameters passed to the CodeBuilder: {', '.join(sorted(passed_keys - used))}" ) return code @@ -120,7 +120,7 @@ def __getattr__(self, key): try: return self._code_spec[key] except KeyError: - raise KeyError("Attribute '{}' not set".format(key)) + raise KeyError(f"Attribute '{key}' not set") return None def _get(self, key): @@ -170,7 +170,7 @@ def validate_code_type(self): """Make sure the code type is set correctly""" if self._get('code_type') and self.code_type not in self.CodeType: raise self.CodeValidationError( - 'invalid code type: must be one of {}, not {}'.format(list(self.CodeType), self.code_type) + f'invalid code type: must be one of {list(self.CodeType)}, not {self.code_type}' ) def validate_upload(self): @@ -182,7 +182,7 @@ def validate_upload(self): if self._get('remote_abs_path'): messages.append('invalid option for store-and-upload code: "remote_abs_path"') if messages: - raise self.CodeValidationError('{}'.format(messages)) + raise self.CodeValidationError(f'{messages}') def validate_installed(self): """If the code is on-computer, catch invalid store-and-upload attributes""" @@ -193,7 +193,7 @@ def validate_installed(self): if self._get('code_rel_path'): messages.append('invalid options for on-computer code: "code_rel_path"') if messages: - raise self.CodeValidationError('{}'.format(messages)) + raise self.CodeValidationError(f'{messages}') class CodeValidationError(Exception): """ @@ -211,7 +211,7 @@ def __str__(self): return self.msg def __repr__(self): - return ''.format(self) + return f'' def is_local(self): """Analogous to Code.is_local()""" diff --git a/aiida/orm/utils/builders/computer.py b/aiida/orm/utils/builders/computer.py index a9acbafee5..8abb3f3742 100644 --- a/aiida/orm/utils/builders/computer.py +++ b/aiida/orm/utils/builders/computer.py @@ -108,7 +108,7 @@ def new(self): # Complain if there are keys that are passed but not used if passed_keys - used: raise self.ComputerValidationError( - 'Unknown parameters passed to the ComputerBuilder: {}'.format(', '.join(sorted(passed_keys - used))) + f"Unknown parameters passed to the ComputerBuilder: {', '.join(sorted(passed_keys - used))}" ) return computer @@ -119,7 +119,7 @@ def __getattr__(self, key): try: return self._computer_spec[key] except KeyError: - raise self.ComputerValidationError(key + ' not set') + raise self.ComputerValidationError(f'{key} not set') return None def _get(self, key): @@ -172,4 +172,4 @@ def __str__(self): return self.msg def __repr__(self): - return ''.format(self) + return f'' diff --git a/aiida/orm/utils/calcjob.py b/aiida/orm/utils/calcjob.py index 984d3dbe43..9c61542ac1 100644 --- a/aiida/orm/utils/calcjob.py +++ b/aiida/orm/utils/calcjob.py @@ -52,18 +52,18 @@ def _load_results(self): try: process_class = self._node.process_class except ValueError as exception: - raise ValueError('cannot load results because process class cannot be loaded: {}'.format(exception)) + raise ValueError(f'cannot load results because process class cannot be loaded: {exception}') process_spec = process_class.spec() default_output_node_label = process_spec.default_output_node if default_output_node_label is None: - raise ValueError('cannot load results as {} does not specify a default output node'.format(process_class)) + raise ValueError(f'cannot load results as {process_class} does not specify a default output node') try: default_output_node = self.node.get_outgoing().get_node_by_label(default_output_node_label) except exceptions.NotExistent as exception: - raise ValueError('cannot load results as the default node could not be retrieved: {}'.format(exception)) + raise ValueError(f'cannot load results as the default node could not be retrieved: {exception}') self._result_node = default_output_node self._results = default_output_node.get_dict() @@ -100,7 +100,7 @@ def __getattr__(self, name): except ValueError as exception: raise AttributeError from exception except KeyError: - raise AttributeError("Default result node<{}> does not contain key '{}'".format(self._result_node.pk, name)) + raise AttributeError(f"Default result node<{self._result_node.pk}> does not contain key '{name}'") def __getitem__(self, name): """Return an attribute from the results dictionary. @@ -114,4 +114,4 @@ def __getitem__(self, name): except ValueError as exception: raise KeyError from exception except KeyError: - raise KeyError("Default result node<{}> does not contain key '{}'".format(self._result_node.pk, name)) + raise KeyError(f"Default result node<{self._result_node.pk}> does not contain key '{name}'") diff --git a/aiida/orm/utils/links.py b/aiida/orm/utils/links.py index a5fbc08c83..155d1cac29 100644 --- a/aiida/orm/utils/links.py +++ b/aiida/orm/utils/links.py @@ -116,9 +116,9 @@ def validate_link(source, target, link_type, link_label): from aiida.common.links import LinkType, validate_link_label from aiida.orm import Node, Data, CalculationNode, WorkflowNode - type_check(link_type, LinkType, 'link_type should be a LinkType enum but got: {}'.format(type(link_type))) - type_check(source, Node, 'source should be a `Node` but got: {}'.format(type(source))) - type_check(target, Node, 'target should be a `Node` but got: {}'.format(type(target))) + type_check(link_type, LinkType, f'link_type should be a LinkType enum but got: {type(link_type)}') + type_check(source, Node, f'source should be a `Node` but got: {type(source)}') + type_check(target, Node, f'target should be a `Node` but got: {type(target)}') if source.uuid is None or target.uuid is None: raise ValueError('source or target node does not have a UUID') @@ -129,7 +129,7 @@ def validate_link(source, target, link_type, link_label): try: validate_link_label(link_label) except ValueError as exception: - raise ValueError('invalid link label `{}`: {}'.format(link_label, exception)) + raise ValueError(f'invalid link label `{link_label}`: {exception}') # For each link type, define a tuple that defines the valid types for the source and target node, as well as # the outdegree and indegree character. If the degree is `unique` that means that there can only be a single @@ -148,7 +148,7 @@ def validate_link(source, target, link_type, link_label): type_source, type_target, outdegree, indegree = link_mapping[link_type] if not isinstance(source, type_source) or not isinstance(target, type_target): - raise ValueError('cannot add a {} link from {} to {}'.format(link_type, type(source), type(target))) + raise ValueError(f'cannot add a {link_type} link from {type(source)} to {type(target)}') if outdegree == 'unique_triple' or indegree == 'unique_triple': # For a `unique_triple` degree we just have to check if an identical triple already exist, either in the cache @@ -157,13 +157,12 @@ def validate_link(source, target, link_type, link_label): # If the outdegree is `unique` there cannot already be any other outgoing link of that type if outdegree == 'unique' and source.get_outgoing(link_type=link_type, only_uuid=True).all(): - raise ValueError('node<{}> already has an outgoing {} link'.format(source.uuid, link_type)) + raise ValueError(f'node<{source.uuid}> already has an outgoing {link_type} link') # If the outdegree is `unique_pair`, then the link labels for outgoing links of this type should be unique elif outdegree == 'unique_pair' and source.get_outgoing( link_type=link_type, only_uuid=True, link_label_filter=link_label).all(): - raise ValueError('node<{}> already has an outgoing {} link with label "{}"'.format( - source.uuid, link_type, link_label)) + raise ValueError(f'node<{source.uuid}> already has an outgoing {link_type} link with label "{link_label}"') # If the outdegree is `unique_triple`, then the link triples of link type, link label and target should be unique elif outdegree == 'unique_triple' and duplicate_link_triple: @@ -172,13 +171,12 @@ def validate_link(source, target, link_type, link_label): # If the indegree is `unique` there cannot already be any other incoming links of that type if indegree == 'unique' and target.get_incoming(link_type=link_type, only_uuid=True).all(): - raise ValueError('node<{}> already has an incoming {} link'.format(target.uuid, link_type)) + raise ValueError(f'node<{target.uuid}> already has an incoming {link_type} link') # If the indegree is `unique_pair`, then the link labels for incoming links of this type should be unique elif indegree == 'unique_pair' and target.get_incoming( link_type=link_type, link_label_filter=link_label, only_uuid=True).all(): - raise ValueError('node<{}> already has an incoming {} link with label "{}"'.format( - target.uuid, link_type, link_label)) + raise ValueError(f'node<{target.uuid}> already has an incoming {link_type} link with label "{link_label}"') # If the indegree is `unique_triple`, then the link triples of link type, link label and source should be unique elif indegree == 'unique_triple' and duplicate_link_triple: @@ -295,11 +293,11 @@ def get_node_by_label(self, label): matching_entry = entry.node else: raise exceptions.MultipleObjectsError( - 'more than one neighbor with the label {} found'.format(label) + f'more than one neighbor with the label {label} found' ) if matching_entry is None: - raise exceptions.NotExistent('no neighbor with the label {} found'.format(label)) + raise exceptions.NotExistent(f'no neighbor with the label {label} found') return matching_entry @@ -332,7 +330,7 @@ def nested(self, sort=True): # Insert the node at the given port name if port_name in current_namespace: - raise KeyError("duplicate label '{}' in namespace '{}'".format(port_name, '.'.join(port_namespaces))) + raise KeyError(f"duplicate label '{port_name}' in namespace '{'.'.join(port_namespaces)}'") current_namespace[port_name] = entry.node diff --git a/aiida/orm/utils/loaders.py b/aiida/orm/utils/loaders.py index ecf08e6215..92646f2227 100644 --- a/aiida/orm/utils/loaders.py +++ b/aiida/orm/utils/loaders.py @@ -42,7 +42,7 @@ def get_loader(orm_class): if issubclass(orm_class, Node): return NodeEntityLoader - raise ValueError('no OrmEntityLoader available for {}'.format(orm_class)) + raise ValueError(f'no OrmEntityLoader available for {orm_class}') class IdentifierType(Enum): @@ -123,7 +123,7 @@ def _get_query_builder_uuid_identifier(cls, identifier, classes, query_with_dash if query_with_dashes: for dash_pos in [20, 16, 12, 8]: if len(uuid) > dash_pos: - uuid = '{}-{}'.format(uuid[:dash_pos], uuid[dash_pos:]) + uuid = f'{uuid[:dash_pos]}-{uuid[dash_pos:]}' builder = QueryBuilder() builder.append(cls=classes, tag='entity', project=['*']) @@ -132,7 +132,7 @@ def _get_query_builder_uuid_identifier(cls, identifier, classes, query_with_dash try: UUID(uuid) except ValueError: - builder.add_filter('entity', {'uuid': {'like': '{}%'.format(uuid)}}) + builder.add_filter('entity', {'uuid': {'like': f'{uuid}%'}}) else: builder.add_filter('entity', {'uuid': uuid}) @@ -212,10 +212,10 @@ def load_entity(cls, identifier, identifier_type=None, sub_classes=None, query_w try: entity = builder.one()[0] except MultipleObjectsError: - error = 'multiple {} entries found with {}<{}>'.format(classes, identifier_type, identifier) + error = f'multiple {classes} entries found with {identifier_type}<{identifier}>' raise MultipleObjectsError(error) except NotExistent as exception: - error = 'no {} found with {}<{}>: {}'.format(classes, identifier_type, identifier, exception) + error = f'no {classes} found with {identifier_type}<{identifier}>: {exception}' raise NotExistent(error) return entity @@ -237,11 +237,11 @@ def get_query_classes(cls, sub_classes=None): return (cls.orm_base_class,) if not isinstance(sub_classes, tuple): - raise TypeError('sub_classes should be a tuple: {}'.format(sub_classes)) + raise TypeError(f'sub_classes should be a tuple: {sub_classes}') for sub_class in sub_classes: if not issubclass(sub_class, cls.orm_base_class): - raise ValueError('{} is not a sub class of the base orm class {}'.format(sub_class, cls.orm_base_class)) + raise ValueError(f'{sub_class} is not a sub class of the base orm class {cls.orm_base_class}') return sub_classes @@ -339,7 +339,7 @@ def _get_query_builder_label_identifier(cls, identifier, classes, operator='==', from aiida.common.escaping import escape_for_sql_like if operator == 'like': - identifier = escape_for_sql_like(identifier) + '%' + identifier = f'{escape_for_sql_like(identifier)}%' builder = QueryBuilder() builder.append(cls=classes, tag='process', project=project, filters={'label': {operator: identifier}}) @@ -379,7 +379,7 @@ def _get_query_builder_label_identifier(cls, identifier, classes, operator='==', from aiida.common.escaping import escape_for_sql_like if operator == 'like': - identifier = escape_for_sql_like(identifier) + '%' + identifier = f'{escape_for_sql_like(identifier)}%' builder = QueryBuilder() builder.append(cls=classes, tag='calculation', project=project, filters={'label': {operator: identifier}}) @@ -419,7 +419,7 @@ def _get_query_builder_label_identifier(cls, identifier, classes, operator='==', from aiida.common.escaping import escape_for_sql_like if operator == 'like': - identifier = escape_for_sql_like(identifier) + '%' + identifier = f'{escape_for_sql_like(identifier)}%' builder = QueryBuilder() builder.append(cls=classes, tag='workflow', project=project, filters={'label': {operator: identifier}}) @@ -465,7 +465,7 @@ def _get_query_builder_label_identifier(cls, identifier, classes, operator='==', raise ValueError('the identifier needs to be a string') if operator == 'like': - identifier = escape_for_sql_like(identifier) + '%' + identifier = f'{escape_for_sql_like(identifier)}%' builder = QueryBuilder() builder.append(cls=classes, tag='code', project=project, filters={'label': {operator: identifier}}) @@ -508,7 +508,7 @@ def _get_query_builder_label_identifier(cls, identifier, classes, operator='==', from aiida.common.escaping import escape_for_sql_like if operator == 'like': - identifier = escape_for_sql_like(identifier) + '%' + identifier = f'{escape_for_sql_like(identifier)}%' builder = QueryBuilder() builder.append(cls=classes, tag='computer', project=project, filters={'name': {operator: identifier}}) @@ -548,7 +548,7 @@ def _get_query_builder_label_identifier(cls, identifier, classes, operator='==', from aiida.common.escaping import escape_for_sql_like if operator == 'like': - identifier = escape_for_sql_like(identifier) + '%' + identifier = f'{escape_for_sql_like(identifier)}%' builder = QueryBuilder() builder.append(cls=classes, tag='calculation', project=project, filters={'label': {operator: identifier}}) @@ -588,7 +588,7 @@ def _get_query_builder_label_identifier(cls, identifier, classes, operator='==', from aiida.common.escaping import escape_for_sql_like if operator == 'like': - identifier = escape_for_sql_like(identifier) + '%' + identifier = f'{escape_for_sql_like(identifier)}%' builder = QueryBuilder() builder.append(cls=classes, tag='group', project=project, filters={'label': {operator: identifier}}) @@ -628,7 +628,7 @@ def _get_query_builder_label_identifier(cls, identifier, classes, operator='==', from aiida.common.escaping import escape_for_sql_like if operator == 'like': - identifier = escape_for_sql_like(identifier) + '%' + identifier = f'{escape_for_sql_like(identifier)}%' builder = QueryBuilder() builder.append(cls=classes, tag='node', project=project, filters={'label': {operator: identifier}}) diff --git a/aiida/orm/utils/managers.py b/aiida/orm/utils/managers.py index 1ea42f6219..10068a6467 100644 --- a/aiida/orm/utils/managers.py +++ b/aiida/orm/utils/managers.py @@ -85,11 +85,8 @@ def __getattr__(self, name): # Note: in order for TAB-completion to work, we need to raise an exception that also inherits from # `AttributeError`, so that `getattr(node.inputs, 'some_label', some_default)` returns `some_default`. # Otherwise, the exception is not caught by `getattr` and is propagated, instead of returning the default. - raise NotExistentAttributeError( - "Node '{}' does not have an {}put with link label '{}'".format( - self._node.pk, 'in' if self._incoming else 'out', name - ) - ) + prefix = 'input' if self._incoming else 'output' + raise NotExistentAttributeError(f"Node<{self._node.pk}> does not have an {prefix} with link label '{name}'") def __getitem__(self, name): """ @@ -103,20 +100,16 @@ def __getitem__(self, name): # Note: in order for this class to behave as a dictionary, we raise an exception that also inherits from # `KeyError` - in this way, users can use the standard construct `try/except KeyError` and this will behave # like a standard dictionary. - raise NotExistentKeyError( - "Node '{}' does not have an {}put with link label '{}'".format( - self._node.pk, 'in' if self._incoming else 'out', name - ) - ) + prefix = 'input' if self._incoming else 'output' + raise NotExistentKeyError(f"Node<{self._node.pk}> does not have an {prefix} with link label '{name}'") def __str__(self): """Return a string representation of the manager""" - return 'Manager for {} {} links for node pk={}'.format( - 'incoming' if self._incoming else 'outgoing', self._link_type.value.upper(), self._node.pk - ) + prefix = 'incoming' if self._incoming else 'outgoing' + return f'Manager for {prefix} {self._link_type.value.upper()} links for node pk={self._node.pk}' def __repr__(self): - return '<{}: {}>'.format(self.__class__.__name__, str(self)) + return f'<{self.__class__.__name__}: {str(self)}>' class AttributeManager: diff --git a/aiida/orm/utils/mixins.py b/aiida/orm/utils/mixins.py index e65a744675..3c12048fb6 100644 --- a/aiida/orm/utils/mixins.py +++ b/aiida/orm/utils/mixins.py @@ -176,7 +176,7 @@ def set_attribute(self, key, value): raise exceptions.ModificationNotAllowed('attributes of a sealed node are immutable') if self.is_stored and key not in self._updatable_attributes: # pylint: disable=unsupported-membership-test - raise exceptions.ModificationNotAllowed('`{}` is not an updatable attribute'.format(key)) + raise exceptions.ModificationNotAllowed(f'`{key}` is not an updatable attribute') self.backend_entity.set_attribute(key, value) @@ -193,6 +193,6 @@ def delete_attribute(self, key): raise exceptions.ModificationNotAllowed('attributes of a sealed node are immutable') if self.is_stored and key not in self._updatable_attributes: # pylint: disable=unsupported-membership-test - raise exceptions.ModificationNotAllowed('`{}` is not an updatable attribute'.format(key)) + raise exceptions.ModificationNotAllowed(f'`{key}` is not an updatable attribute') self.backend_entity.delete_attribute(key) diff --git a/aiida/orm/utils/node.py b/aiida/orm/utils/node.py index 337ea59ce5..8d01d2e154 100644 --- a/aiida/orm/utils/node.py +++ b/aiida/orm/utils/node.py @@ -41,7 +41,7 @@ def load_node_class(type_string): return Data if not type_string.endswith('.'): - raise exceptions.DbContentError('The type string `{}` is invalid'.format(type_string)) + raise exceptions.DbContentError(f'The type string `{type_string}` is invalid') try: base_path = type_string.rsplit('.', 2)[0] @@ -71,7 +71,7 @@ def load_node_class(type_string): # node then would fail miserably. This is now no longer allowed, but we need a fallback for existing cases, which # should be rare. We fallback on `Data` and not `Node` because bare node instances are also not storable and so the # logic of the ORM is not well defined for a loaded instance of the base `Node` class. - warnings.warn('unknown type string `{}`, falling back onto `Data` class'.format(type_string)) # pylint: disable=no-member + warnings.warn(f'unknown type string `{type_string}`, falling back onto `Data` class') # pylint: disable=no-member return Data @@ -92,11 +92,11 @@ def get_type_string_from_class(class_module, class_name): # If we can reverse engineer an entry point group and name, we're dealing with an external class if group and entry_point: module_base_path = ENTRY_POINT_GROUP_TO_MODULE_PATH_MAP[group] - type_string = '{}.{}.{}.'.format(module_base_path, entry_point.name, class_name) + type_string = f'{module_base_path}.{entry_point.name}.{class_name}.' # Otherwise we are dealing with an internal class else: - type_string = '{}.{}.'.format(class_module, class_name) + type_string = f'{class_module}.{class_name}.' prefixes = ('aiida.orm.nodes.',) @@ -129,7 +129,7 @@ def is_valid_node_type_string(type_string, raise_on_false=False): # as well as the usual type strings like 'data.parameter.ParameterData.' if type_string.count('.') == 1 or not type_string.endswith('.'): if raise_on_false: - raise exceptions.DbContentError('The type string {} is invalid'.format(type_string)) + raise exceptions.DbContentError(f'The type string {type_string} is invalid') return False return True @@ -150,7 +150,7 @@ def get_query_type_from_type_string(type_string): return '' type_path = type_string.rsplit('.', 2)[0] - type_string = type_path + '.' + type_string = f'{type_path}.' return type_string @@ -160,7 +160,7 @@ class AbstractNodeMeta(ABCMeta): def __new__(cls, name, bases, namespace, **kwargs): newcls = ABCMeta.__new__(cls, name, bases, namespace, **kwargs) # pylint: disable=too-many-function-args - newcls._logger = logging.getLogger('{}.{}'.format(namespace['__module__'], name)) + newcls._logger = logging.getLogger(f"{namespace['__module__']}.{name}") # Set the plugin type string and query type string based on the plugin type string newcls._plugin_type_string = get_type_string_from_class(namespace['__module__'], name) # pylint: disable=protected-access diff --git a/aiida/orm/utils/serialize.py b/aiida/orm/utils/serialize.py index 23aa68348b..ae1ebf49dc 100644 --- a/aiida/orm/utils/serialize.py +++ b/aiida/orm/utils/serialize.py @@ -40,8 +40,8 @@ def represent_node(dumper, node): :return: the representation """ if not node.is_stored: - raise ValueError('node {}<{}> cannot be represented because it is not stored'.format(type(node), node.uuid)) - return dumper.represent_scalar(_NODE_TAG, '%s' % node.uuid) + raise ValueError(f'node {type(node)}<{node.uuid}> cannot be represented because it is not stored') + return dumper.represent_scalar(_NODE_TAG, f'{node.uuid}') def node_constructor(loader, node): @@ -65,8 +65,8 @@ def represent_group(dumper, group): :return: the representation """ if not group.is_stored: - raise ValueError('group {} cannot be represented because it is not stored'.format(group)) - return dumper.represent_scalar(_GROUP_TAG, '%s' % group.uuid) + raise ValueError(f'group {group} cannot be represented because it is not stored') + return dumper.represent_scalar(_GROUP_TAG, f'{group.uuid}') def group_constructor(loader, group): @@ -90,8 +90,8 @@ def represent_computer(dumper, computer): :return: the representation """ if not computer.is_stored: - raise ValueError('computer {} cannot be represented because it is not stored'.format(computer)) - return dumper.represent_scalar(_COMPUTER_TAG, '%s' % computer.uuid) + raise ValueError(f'computer {computer} cannot be represented because it is not stored') + return dumper.represent_scalar(_COMPUTER_TAG, f'{computer.uuid}') def computer_constructor(loader, computer): diff --git a/aiida/parsers/parser.py b/aiida/parsers/parser.py index 627ac452c7..e74f91e435 100644 --- a/aiida/parsers/parser.py +++ b/aiida/parsers/parser.py @@ -79,7 +79,7 @@ def out(self, link_label, node): :raises aiida.common.ModificationNotAllowed: if an output node was already registered with the same link label """ if link_label in self._outputs: - raise exceptions.ModificationNotAllowed('the output {} already exists'.format(link_label)) + raise exceptions.ModificationNotAllowed(f'the output {link_label} already exists') self._outputs[link_label] = node def get_outputs_for_parsing(self): diff --git a/aiida/parsers/plugins/templatereplacer/doubler.py b/aiida/parsers/plugins/templatereplacer/doubler.py index 9b66f22d51..45b54eeb7d 100644 --- a/aiida/parsers/plugins/templatereplacer/doubler.py +++ b/aiida/parsers/plugins/templatereplacer/doubler.py @@ -31,7 +31,7 @@ def parse(self, **kwargs): with output_folder.open(output_file, 'r') as handle: result = self.parse_stdout(handle) except (OSError, IOError): - self.logger.exception('unable to parse the output for CalcJobNode<{}>'.format(self.node.pk)) + self.logger.exception(f'unable to parse the output for CalcJobNode<{self.node.pk}>') return self.exit_codes.ERROR_READING_OUTPUT_FILE output_dict = {'value': result, 'retrieved_temporary_files': []} diff --git a/aiida/plugins/entry_point.py b/aiida/plugins/entry_point.py index 46e4bf3c7e..a8b40a9049 100644 --- a/aiida/plugins/entry_point.py +++ b/aiida/plugins/entry_point.py @@ -108,11 +108,11 @@ def format_entry_point_string(group, name, fmt=EntryPointFormat.FULL): raise TypeError('fmt should be an instance of EntryPointFormat') if fmt == EntryPointFormat.FULL: - return '{}{}{}'.format(group, ENTRY_POINT_STRING_SEPARATOR, name) + return f'{group}{ENTRY_POINT_STRING_SEPARATOR}{name}' if fmt == EntryPointFormat.PARTIAL: - return '{}{}{}'.format(group[len(ENTRY_POINT_GROUP_PREFIX):], ENTRY_POINT_STRING_SEPARATOR, name) + return f'{group[len(ENTRY_POINT_GROUP_PREFIX):]}{ENTRY_POINT_STRING_SEPARATOR}{name}' if fmt == EntryPointFormat.MINIMAL: - return '{}'.format(name) + return f'{name}' raise ValueError('invalid EntryPointFormat') @@ -204,7 +204,7 @@ def load_entry_point(group, name): try: loaded_entry_point = entry_point.load() except ImportError: - raise LoadingEntryPointError("Failed to load entry point '{}':\n{}".format(name, traceback.format_exc())) + raise LoadingEntryPointError(f"Failed to load entry point '{name}':\n{traceback.format_exc()}") return loaded_entry_point diff --git a/aiida/plugins/utils.py b/aiida/plugins/utils.py index 5b5f131226..b2c427a533 100644 --- a/aiida/plugins/utils.py +++ b/aiida/plugins/utils.py @@ -60,13 +60,13 @@ def get_version_info(self, plugin): parent_module_name = plugin.__module__.split('.')[0] parent_module = import_module(parent_module_name) except (AttributeError, IndexError, ImportError): - self.logger.debug('could not determine the top level module for plugin: {}'.format(plugin)) + self.logger.debug(f'could not determine the top level module for plugin: {plugin}') return self._cache[plugin] try: version_plugin = parent_module.__version__ except AttributeError: - self.logger.debug('parent module does not define `__version__` attribute for plugin: {}'.format(plugin)) + self.logger.debug(f'parent module does not define `__version__` attribute for plugin: {plugin}') return self._cache[plugin] self._cache[plugin][KEY_VERSION_ROOT][KEY_VERSION_PLUGIN] = version_plugin diff --git a/aiida/restapi/common/identifiers.py b/aiida/restapi/common/identifiers.py index 4fd1d5ff5d..5173a4ed25 100644 --- a/aiida/restapi/common/identifiers.py +++ b/aiida/restapi/common/identifiers.py @@ -53,15 +53,11 @@ def validate_full_type(full_type): if FULL_TYPE_CONCATENATOR not in full_type: raise ValueError( - 'full type `{}` does not include the required concatenator symbol `{}`.'.format( - full_type, FULL_TYPE_CONCATENATOR - ) + f'full type `{full_type}` does not include the required concatenator symbol `{FULL_TYPE_CONCATENATOR}`.' ) elif full_type.count(FULL_TYPE_CONCATENATOR) > 1: raise ValueError( - 'full type `{}` includes the concatenator symbol `{}` more than once.'.format( - full_type, FULL_TYPE_CONCATENATOR - ) + f'full type `{full_type}` includes the concatenator symbol `{FULL_TYPE_CONCATENATOR}` more than once.' ) @@ -78,7 +74,7 @@ def construct_full_type(node_type, process_type): if process_type is None: process_type = '' - return '{}{}{}'.format(node_type, FULL_TYPE_CONCATENATOR, process_type) + return f'{node_type}{FULL_TYPE_CONCATENATOR}{process_type}' def get_full_type_filters(full_type): @@ -96,10 +92,10 @@ def get_full_type_filters(full_type): for entry in (node_type, process_type): if entry.count(LIKE_OPERATOR_CHARACTER) > 1: - raise ValueError('full type component `{}` contained more than one like-operator character'.format(entry)) + raise ValueError(f'full type component `{entry}` contained more than one like-operator character') if LIKE_OPERATOR_CHARACTER in entry and entry[-1] != LIKE_OPERATOR_CHARACTER: - raise ValueError('like-operator character in full type component `{}` is not at the end'.format(entry)) + raise ValueError(f'like-operator character in full type component `{entry}` is not at the end') if LIKE_OPERATOR_CHARACTER in node_type: # Remove the trailing `LIKE_OPERATOR_CHARACTER`, escape the string and reattach the character @@ -144,7 +140,7 @@ def load_entry_point_from_full_type(full_type): try: return load_entry_point_from_string(process_type) except EntryPointError: - raise EntryPointError('could not load entry point `{}`'.format(process_type)) + raise EntryPointError(f'could not load entry point `{process_type}`') elif node_type.startswith(data_prefix): @@ -154,7 +150,7 @@ def load_entry_point_from_full_type(full_type): try: return load_entry_point('aiida.data', entry_point_name) except EntryPointError: - raise EntryPointError('could not load entry point `{}`'.format(process_type)) + raise EntryPointError(f'could not load entry point `{process_type}`') # Here we are dealing with a `ProcessNode` with a `process_type` that is not an entry point string. # Which means it is most likely a full module path (the fallback option) and we cannot necessarily load the @@ -233,7 +229,7 @@ def _infer_full_type(self, full_type): full_type = full_type_template.format(plugin_name=plugin_name) return full_type - full_type += '.{}{}'.format(LIKE_OPERATOR_CHARACTER, FULL_TYPE_CONCATENATOR) + full_type += f'.{LIKE_OPERATOR_CHARACTER}{FULL_TYPE_CONCATENATOR}' if full_type.startswith('process.'): full_type += LIKE_OPERATOR_CHARACTER @@ -290,7 +286,7 @@ def create_namespace(self, name, **kwargs): :raises: ValueError if any sub namespace is occupied by a non-Namespace port """ if not isinstance(name, str): - raise ValueError('name has to be a string type, not {}'.format(type(name))) + raise ValueError(f'name has to be a string type, not {type(name)}') if not name: raise ValueError('name cannot be an empty string') @@ -298,7 +294,7 @@ def create_namespace(self, name, **kwargs): namespace = name.split(self.namespace_separator) port_name = namespace.pop(0) - path = '{}{}{}'.format(self._path, self.namespace_separator, port_name) + path = f'{self._path}{self.namespace_separator}{port_name}' # If this is True, the (sub) port namespace does not yet exist, so we create it if port_name not in self: @@ -323,7 +319,7 @@ def create_namespace(self, name, **kwargs): # namespace is the "concrete" version of the namespace, so we add the leaf version to the namespace. elif not self[port_name].is_leaf and not namespace: kwargs['is_leaf'] = True - self[port_name][port_name] = self.__class__(port_name, path='{}.{}'.format(path, port_name), **kwargs) + self[port_name][port_name] = self.__class__(port_name, path=f'{path}.{port_name}', **kwargs) # If there is still `namespace` left, we create the next namespace if namespace: diff --git a/aiida/restapi/common/utils.py b/aiida/restapi/common/utils.py index 8303b7648e..f0ed01f6cc 100644 --- a/aiida/restapi/common/utils.py +++ b/aiida/restapi/common/utils.py @@ -137,7 +137,7 @@ def strip_api_prefix(self, path): if path.startswith(self.prefix): return path[len(self.prefix):] - raise ValidationError('path has to start with {}'.format(self.prefix)) + raise ValidationError(f'path has to start with {self.prefix}') @staticmethod def split_path(path): @@ -219,7 +219,7 @@ def parse_path(self, path_string, parse_pk_uuid=None): query_type = path.pop(0) elif path[0] in ['repo']: path.pop(0) - query_type = 'repo_' + path.pop(0) + query_type = f'repo_{path.pop(0)}' if not path: return (resource_type, page, node_id, query_type) @@ -313,8 +313,7 @@ def paginate(self, page, perpage, total_count): # and next page if page > last_page or page < 1: raise RestInputValidationError( - 'Non existent page requested. The ' - 'page range is [{} : {}]'.format(first_page, last_page) + f'Non existent page requested. The page range is [{first_page} : {last_page}]' ) limit = perpage @@ -383,8 +382,7 @@ def split_url(url): def make_rel_url(rel, page): new_path_elems = path_elems + ['page', str(page)] - return '<' + '/'.join(new_path_elems) + \ - question_mark + query_string + '>; rel={}, '.format(rel) + return f"<{'/'.join(new_path_elems)}{question_mark}{query_string}>; rel={rel}, " ## Setting non-mandatory parameters # set links to related pages @@ -705,7 +703,7 @@ def parse_query_string(self, query_string): ## Define grammar # key types - key = Word(alphas + '_', alphanums + '_') + key = Word(f'{alphas}_', f'{alphanums}_') # operators operator = ( Literal('=like=') | Literal('=ilike=') | Literal('=in=') | Literal('=notin=') | Literal('=') | @@ -842,7 +840,7 @@ def list_routes(): continue methods = ','.join(rule.methods) - line = urllib.parse.unquote('{:15s} {:20s} {}'.format(rule.endpoint, methods, rule)) + line = urllib.parse.unquote(f'{rule.endpoint:15s} {methods:20s} {rule}') output.append(line) return sorted(set(output)) diff --git a/aiida/restapi/resources.py b/aiida/restapi/resources.py index 1856064d1e..06a6a41228 100644 --- a/aiida/restapi/resources.py +++ b/aiida/restapi/resources.py @@ -122,7 +122,7 @@ def _load_and_verify(self, node_id=None): if not isinstance(node, self.trans._aiida_class): # pylint: disable=protected-access,isinstance-second-argument-not-valid-type raise RestInputValidationError( - 'node {} is not of the required type {}'.format(node_id, self.trans._aiida_class) # pylint: disable=protected-access + f'node {node_id} is not of the required type {self.trans._aiida_class}' # pylint: disable=protected-access ) return node @@ -318,7 +318,7 @@ def get(self, id=None, page=None): # pylint: disable=redefined-builtin,invalid- if query_type == 'repo_contents' and results: response = make_response(results) response.headers['content-type'] = 'application/octet-stream' - response.headers['Content-Disposition'] = 'attachment; filename="{}"'.format(filename) + response.headers['Content-Disposition'] = f'attachment; filename="{filename}"' return response if query_type == 'download' and download not in ['false', 'False', False] and results: @@ -341,8 +341,8 @@ def get(self, id=None, page=None): # pylint: disable=redefined-builtin,invalid- if not isinstance(attributes_filter, list): attributes_filter = [attributes_filter] for attr in attributes_filter: - node['attributes'][str(attr)] = node['attributes.' + str(attr)] - del node['attributes.' + str(attr)] + node['attributes'][str(attr)] = node[f'attributes.{str(attr)}'] + del node[f'attributes.{str(attr)}'] if extras_filter is not None and extras: for node in results['nodes']: @@ -350,8 +350,8 @@ def get(self, id=None, page=None): # pylint: disable=redefined-builtin,invalid- if not isinstance(extras_filter, list): extras_filter = [extras_filter] for extra in extras_filter: - node['extras'][str(extra)] = node['extras.' + str(extra)] - del node['extras.' + str(extra)] + node['extras'][str(extra)] = node[f'extras.{str(extra)}'] + del node[f'extras.{str(extra)}'] ## Build response data = dict( diff --git a/aiida/restapi/run_api.py b/aiida/restapi/run_api.py index ba6f91f157..c9ba63fc82 100755 --- a/aiida/restapi/run_api.py +++ b/aiida/restapi/run_api.py @@ -59,7 +59,7 @@ def run_api(flask_app=api_classes.App, flask_api=api_classes.AiidaApi, **kwargs) if hookup: # Run app through built-in werkzeug server - print(' * REST API running on http://{}:{}{}'.format(hostname, port, API_CONFIG['PREFIX'])) + print(f" * REST API running on http://{hostname}:{port}{API_CONFIG['PREFIX']}") api.app.run(debug=debug, host=hostname, port=int(port), threaded=True) else: @@ -90,7 +90,7 @@ def configure_api(flask_app=api_classes.App, flask_api=api_classes.AiidaApi, **k wsgi_profile = kwargs.pop('wsgi_profile', CLI_DEFAULTS['WSGI_PROFILE']) if kwargs: - raise ValueError('Unknown keyword arguments: {}'.format(kwargs)) + raise ValueError(f'Unknown keyword arguments: {kwargs}') # Import the configuration file spec = importlib.util.spec_from_file_location(os.path.join(config, 'config'), os.path.join(config, 'config.py')) diff --git a/aiida/restapi/translator/base.py b/aiida/restapi/translator/base.py index a9ad35cce4..578ccc3708 100644 --- a/aiida/restapi/translator/base.py +++ b/aiida/restapi/translator/base.py @@ -322,7 +322,7 @@ def set_query( if not isinstance(attributes_filter, list): attributes_filter = [attributes_filter] for attr in attributes_filter: - default_projections.append('attributes.' + str(attr)) + default_projections.append(f'attributes.{str(attr)}') elif attributes is not None and attributes is not False: raise RestValidationError('The attributes filter is false by default and can only be set to true.') @@ -334,7 +334,7 @@ def set_query( if not isinstance(extras_filter, list): extras_filter = [extras_filter] for extra in extras_filter: - default_projections.append('extras.' + str(extra)) + default_projections.append(f'extras.{str(extra)}') elif extras is not None and extras is not False: raise RestValidationError('The extras filter is false by default and can only be set to true.') @@ -376,7 +376,7 @@ def set_limit_offset(self, limit=None, offset=None): except ValueError: raise InputValidationError('Limit value must be an integer') if limit > self.limit_default: - raise RestValidationError('Limit and perpage cannot be bigger than {}'.format(self.limit_default)) + raise RestValidationError(f'Limit and perpage cannot be bigger than {self.limit_default}') else: limit = self.limit_default @@ -419,8 +419,8 @@ def get_formatted_result(self, label): # Note: In code cleanup and design change, remove this node dependant part # from base class and move it to node translator. if self._result_type in ['with_outgoing', 'with_incoming']: - tmp['link_type'] = res[self.__label__ + '--' + label]['type'] - tmp['link_label'] = res[self.__label__ + '--' + label]['label'] + tmp['link_type'] = res[f'{self.__label__}--{label}']['type'] + tmp['link_label'] = res[f'{self.__label__}--{label}']['label'] results.append(tmp) # TODO think how to make it less hardcoded diff --git a/aiida/restapi/translator/nodes/data/__init__.py b/aiida/restapi/translator/nodes/data/__init__.py index a2c0761d0d..9ec3031c3b 100644 --- a/aiida/restapi/translator/nodes/data/__init__.py +++ b/aiida/restapi/translator/nodes/data/__init__.py @@ -56,7 +56,7 @@ def get_downloadable_data(node, download_format=None): try: response['filename'] = node.filename except AttributeError: - response['filename'] = node.uuid + '.' + download_format + response['filename'] = f'{node.uuid}.{download_format}' except LicensingException as exc: response['status'] = 500 response['data'] = str(exc) diff --git a/aiida/restapi/translator/nodes/node.py b/aiida/restapi/translator/nodes/node.py index 1a77aa8762..1cb86b402e 100644 --- a/aiida/restapi/translator/nodes/node.py +++ b/aiida/restapi/translator/nodes/node.py @@ -115,11 +115,11 @@ def set_query_type( self._content_type = 'repo_contents' self._filename = filename else: - raise InputValidationError('invalid result/content value: {}'.format(query_type)) + raise InputValidationError(f'invalid result/content value: {query_type}') # Add input/output relation to the query help if self._result_type != self.__label__: - edge_tag = self.__label__ + '--' + self._result_type + edge_tag = f'{self.__label__}--{self._result_type}' self._query_help['path'].append({ 'cls': self._aiida_class, 'tag': self._result_type, @@ -342,10 +342,10 @@ def _get_subclasses(self, parent=None, parent_class=None, recursive=True): if is_pkg: app_module = imp.load_package(full_path_base, full_path_base) else: - full_path = full_path_base + '.py' + full_path = f'{full_path_base}.py' # I could use load_module but it takes lots of arguments, # then I use load_source - app_module = imp.load_source('rst' + name, full_path) + app_module = imp.load_source(f'rst{name}', full_path) # Go through the content of the module if not is_pkg: @@ -401,7 +401,7 @@ def get_all_download_formats(full_type=None): try: node_cls = load_entry_point_from_full_type(full_type) except (TypeError, ValueError): - raise RestInputValidationError('The full type {} is invalid.'.format(full_type)) + raise RestInputValidationError(f'The full type {full_type} is invalid.') except EntryPointError: raise RestFeatureNotAvailable('The download formats for this node type are not available.') @@ -469,7 +469,7 @@ def get_repo_list(node, filename=''): try: flist = node.list_objects(filename) except IOError: - raise RestInputValidationError('{} is not a directory in this repository'.format(filename)) + raise RestInputValidationError(f'{filename} is not a directory in this repository') response = [] for fobj in flist: response.append({'name': fobj.name, 'type': fobj.file_type.name}) @@ -506,7 +506,7 @@ def get_comments(node): response.append({ 'created_time': cobj.ctime, 'modified_time': cobj.mtime, - 'user': cobj.user.first_name + ' ' + cobj.user.last_name, + 'user': f'{cobj.user.first_name} {cobj.user.last_name}', 'message': cobj.content }) return response diff --git a/aiida/schedulers/datastructures.py b/aiida/schedulers/datastructures.py index 757dd816ac..188f1264b5 100644 --- a/aiida/schedulers/datastructures.py +++ b/aiida/schedulers/datastructures.py @@ -108,7 +108,7 @@ def validate_resources(cls, **kwargs): def is_greater_equal_one(parameter): value = getattr(resources, parameter, None) if value is not None and value < 1: - raise ValueError('`{}` must be greater than or equal to one.'.format(parameter)) + raise ValueError(f'`{parameter}` must be greater than or equal to one.') # Validate that all fields are valid integers if they are specified, otherwise initialize them to `None` for parameter in list(cls._default_fields) + ['tot_num_mpiprocs']: @@ -119,10 +119,10 @@ def is_greater_equal_one(parameter): try: setattr(resources, parameter, int(value)) except ValueError: - raise ValueError('`{}` must be an integer when specified'.format(parameter)) + raise ValueError(f'`{parameter}` must be an integer when specified') if kwargs: - raise ValueError('these parameters were not recognized: {}'.format(', '.join(list(kwargs.keys())))) + raise ValueError(f"these parameters were not recognized: {', '.join(list(kwargs.keys()))}") # At least two of the following parameters need to be defined as non-zero if [resources.num_machines, resources.num_mpiprocs_per_machine, resources.tot_num_mpiprocs].count(None) > 1: @@ -207,7 +207,7 @@ def validate_resources(cls, **kwargs): raise ValueError('`tot_num_mpiprocs` must be greater than or equal to one.') if kwargs: - raise ValueError('these parameters were not recognized: {}'.format(', '.join(list(kwargs.keys())))) + raise ValueError(f"these parameters were not recognized: {', '.join(list(kwargs.keys()))}") return resources @@ -440,7 +440,7 @@ class JobInfo(DefaultFieldsAttributeDict): # pylint: disable=too-many-instance- def _serialize_job_state(job_state): """Return the serialized value of the JobState instance.""" if not isinstance(job_state, JobState): - raise TypeError('invalid type for value {}, should be an instance of `JobState`'.format(job_state)) + raise TypeError(f'invalid type for value {job_state}, should be an instance of `JobState`') return job_state.value @@ -510,7 +510,7 @@ def serialize_field(cls, value, field_type): if field_type is None: return value - serializer_method = getattr(cls, '_serialize_{}'.format(field_type)) + serializer_method = getattr(cls, f'_serialize_{field_type}') return serializer_method(value) @@ -525,7 +525,7 @@ def deserialize_field(cls, value, field_type): if field_type is None: return value - deserializer_method = getattr(cls, '_deserialize_{}'.format(field_type)) + deserializer_method = getattr(cls, f'_deserialize_{field_type}') return deserializer_method(value) diff --git a/aiida/schedulers/plugins/direct.py b/aiida/schedulers/plugins/direct.py index 5a8c77d973..f353f10c0e 100644 --- a/aiida/schedulers/plugins/direct.py +++ b/aiida/schedulers/plugins/direct.py @@ -96,10 +96,10 @@ def _get_joblist_command(self, jobs=None, user=None): if jobs: if isinstance(jobs, str): - command += ' {}'.format(escape_for_bash(jobs)) + command += f' {escape_for_bash(jobs)}' else: try: - command += ' {}'.format(' '.join(escape_for_bash(job) for job in jobs if job)) + command += f" {' '.join(escape_for_bash(job) for job in jobs if job)}" except TypeError: raise TypeError("If provided, the 'jobs' variable must be a string or a list of strings") @@ -123,7 +123,7 @@ def _get_submit_script_header(self, job_tmpl): # Redirecting script output on the correct files # Should be one of the first commands if job_tmpl.sched_output_path: - lines.append('exec > {}'.format(job_tmpl.sched_output_path)) + lines.append(f'exec > {job_tmpl.sched_output_path}') if job_tmpl.sched_join_files: # TODO: manual says: # pylint: disable=fixme @@ -136,7 +136,7 @@ def _get_submit_script_header(self, job_tmpl): self.logger.info('sched_join_files is True, but sched_error_path is set; ignoring sched_error_path') else: if job_tmpl.sched_error_path: - lines.append('exec 2> {}'.format(job_tmpl.sched_error_path)) + lines.append(f'exec 2> {job_tmpl.sched_error_path}') else: # To avoid automatic join of files lines.append('exec 2>&1') @@ -152,7 +152,7 @@ def _get_submit_script_header(self, job_tmpl): "a positive integer (in kB)! It is instead '{}'" ''.format((job_tmpl.max_memory_kb)) ) - lines.append('ulimit -v {}'.format(virtual_memory_kb)) + lines.append(f'ulimit -v {virtual_memory_kb}') if not job_tmpl.import_sys_environment: lines.append('env --ignore-environment \\') @@ -171,7 +171,7 @@ def _get_submit_script_header(self, job_tmpl): if not isinstance(job_tmpl.job_environment, dict): raise ValueError('If you provide job_environment, it must be a dictionary') for key, value in job_tmpl.job_environment.items(): - lines.append('export {}={}'.format(key.strip(), escape_for_bash(value))) + lines.append(f'export {key.strip()}={escape_for_bash(value)}') lines.append('# ENVIRONMENT VARIABLES END ###') lines.append(empty_line) @@ -204,9 +204,9 @@ def _get_submit_command(self, submit_script): directory. IMPORTANT: submit_script should be already escaped. """ - submit_command = 'bash -e {} > /dev/null 2>&1 & echo $!'.format(submit_script) + submit_command = f'bash -e {submit_script} > /dev/null 2>&1 & echo $!' - self.logger.info('submitting with: ' + submit_command) + self.logger.info(f'submitting with: {submit_command}') return submit_command @@ -255,7 +255,7 @@ def _parse_joblist_output(self, retval, stdout, stderr): try: job_state_string = job[1][0] # I just check the first character except IndexError: - self.logger.debug("No 'job_state' field for job id {}".format(this_job.job_id)) + self.logger.debug(f"No 'job_state' field for job id {this_job.job_id}") this_job.job_state = JobState.UNDETERMINED else: try: @@ -272,7 +272,7 @@ def _parse_joblist_output(self, retval, stdout, stderr): # I strip the part after the @: is this always ok? this_job.job_owner = job[2] except KeyError: - self.logger.debug("No 'job_owner' field for job id {}".format(this_job.job_id)) + self.logger.debug(f"No 'job_owner' field for job id {this_job.job_id}") try: this_job.wallclock_time_seconds = self._convert_time(job[3]) @@ -280,7 +280,7 @@ def _parse_joblist_output(self, retval, stdout, stderr): # May not have started yet pass except ValueError: - self.logger.warning("Error parsing 'resources_used.walltime' for job id {}".format(this_job.job_id)) + self.logger.warning(f"Error parsing 'resources_used.walltime' for job id {this_job.job_id}") # I append to the list of jobs to return job_list.append(this_job) @@ -323,7 +323,7 @@ def _convert_time(self, string): pieces = re.split('[:.]', string) if len(pieces) != 3: - self.logger.warning('Wrong number of pieces (expected 3) for time string {}'.format(string)) + self.logger.warning(f'Wrong number of pieces (expected 3) for time string {string}') raise ValueError('Wrong number of pieces for time string.') days = 0 @@ -338,7 +338,7 @@ def _convert_time(self, string): if hours < 0: raise ValueError except ValueError: - self.logger.warning('Not a valid number of hours: {}'.format(pieces[0])) + self.logger.warning(f'Not a valid number of hours: {pieces[0]}') raise ValueError('Not a valid number of hours.') try: @@ -346,7 +346,7 @@ def _convert_time(self, string): if mins < 0: raise ValueError except ValueError: - self.logger.warning('Not a valid number of minutes: {}'.format(pieces[1])) + self.logger.warning(f'Not a valid number of minutes: {pieces[1]}') raise ValueError('Not a valid number of minutes.') try: @@ -354,7 +354,7 @@ def _convert_time(self, string): if secs < 0: raise ValueError except ValueError: - self.logger.warning('Not a valid number of seconds: {}'.format(pieces[2])) + self.logger.warning(f'Not a valid number of seconds: {pieces[2]}') raise ValueError('Not a valid number of seconds.') return days * 86400 + hours * 3600 + mins * 60 + secs @@ -369,30 +369,17 @@ def _parse_submit_output(self, retval, stdout, stderr): Return a string with the JobID. """ if retval != 0: - self.logger.error( - 'Error in _parse_submit_output: retval={}; ' - 'stdout={}; stderr={}'.format(retval, stdout, stderr) - ) - raise SchedulerError( - 'Error during submission, retval={}\n' - 'stdout={}\nstderr={}'.format(retval, stdout, stderr) - ) + self.logger.error(f'Error in _parse_submit_output: retval={retval}; stdout={stdout}; stderr={stderr}') + raise SchedulerError(f'Error during submission, retval={retval}\nstdout={stdout}\nstderr={stderr}') if stderr.strip(): self.logger.warning( - 'in _parse_submit_output for {}: ' - 'there was some text in stderr: {}'.format(str(self.transport), stderr) + f'in _parse_submit_output for {str(self.transport)}: there was some text in stderr: {stderr}' ) if not stdout.strip(): - self.logger.debug( - 'Unable to get the PID: retval={}; ' - 'stdout={}; stderr={}'.format(retval, stdout, stderr) - ) - raise SchedulerError( - 'Unable to get the PID: retval={}; ' - 'stdout={}; stderr={}'.format(retval, stdout, stderr) - ) + self.logger.debug(f'Unable to get the PID: retval={retval}; stdout={stdout}; stderr={stderr}') + raise SchedulerError(f'Unable to get the PID: retval={retval}; stdout={stdout}; stderr={stderr}') return stdout.strip() @@ -400,9 +387,9 @@ def _get_kill_command(self, jobid): """ Return the command to kill the job with specified jobid. """ - submit_command = 'kill {}'.format(jobid) + submit_command = f'kill {jobid}' - self.logger.info('killing job {}'.format(jobid)) + self.logger.info(f'killing job {jobid}') return submit_command @@ -415,22 +402,17 @@ def _parse_kill_output(self, retval, stdout, stderr): :return: True if everything seems ok, False otherwise. """ if retval != 0: - self.logger.error( - 'Error in _parse_kill_output: retval={}; ' - 'stdout={}; stderr={}'.format(retval, stdout, stderr) - ) + self.logger.error(f'Error in _parse_kill_output: retval={retval}; stdout={stdout}; stderr={stderr}') return False if stderr.strip(): self.logger.warning( - 'in _parse_kill_output for {}: ' - 'there was some text in stderr: {}'.format(str(self.transport), stderr) + f'in _parse_kill_output for {str(self.transport)}: there was some text in stderr: {stderr}' ) if stdout.strip(): self.logger.warning( - 'in _parse_kill_output for {}: ' - 'there was some text in stdout: {}'.format(str(self.transport), stdout) + f'in _parse_kill_output for {str(self.transport)}: there was some text in stdout: {stdout}' ) return True diff --git a/aiida/schedulers/plugins/lsf.py b/aiida/schedulers/plugins/lsf.py index 66c2e1fd92..1240c5e0b1 100644 --- a/aiida/schedulers/plugins/lsf.py +++ b/aiida/schedulers/plugins/lsf.py @@ -252,15 +252,13 @@ def _get_joblist_command(self, jobs=None, user=None): # I add the environment variable SLURM_TIME_FORMAT in front to be # sure to get the times in 'standard' format - command = [ - 'bjobs', '-noheader', "-o '{} delimiter=\"{}\"'".format(' '.join(self._joblist_fields), _FIELD_SEPARATOR) - ] + command = ['bjobs', '-noheader', f"-o '{' '.join(self._joblist_fields)} delimiter=\"{_FIELD_SEPARATOR}\"'"] if user and jobs: raise FeatureNotAvailable('Cannot query by user and job(s) in LSF') if user: - command.append('-u{}'.format(user)) + command.append(f'-u{user}') if jobs: joblist = [] @@ -273,7 +271,7 @@ def _get_joblist_command(self, jobs=None, user=None): command.append(' '.join(joblist)) comm = ' '.join(command) - self.logger.debug('bjobs command: {}'.format(comm)) + self.logger.debug(f'bjobs command: {comm}') return comm def _get_detailed_job_info_command(self, job_id): @@ -283,7 +281,7 @@ def _get_detailed_job_info_command(self, job_id): The output text is just retrieved, and returned for logging purposes. """ - return 'bjobs -l {}'.format(escape_for_bash(job_id)) + return f'bjobs -l {escape_for_bash(job_id)}' def _get_submit_script_header(self, job_tmpl): """ @@ -313,7 +311,7 @@ def _get_submit_script_header(self, job_tmpl): if job_tmpl.email: # If not specified, but email events are set, SLURM # sends the mail to the job owner by default - lines.append('#BSUB -u {}'.format(job_tmpl.email)) + lines.append(f'#BSUB -u {job_tmpl.email}') if job_tmpl.email_on_started: lines.append('#BSUB -B') @@ -333,22 +331,22 @@ def _get_submit_script_header(self, job_tmpl): # prepend a 'j' (for 'job') before the string if the string # is now empty or does not start with a valid character if not job_title or (job_title[0] not in string.ascii_letters + string.digits): - job_title = 'j' + job_title + job_title = f'j{job_title}' # Truncate to the first 128 characters # Nothing is done if the string is shorter. job_title = job_title[:128] - lines.append('#BSUB -J "{}"'.format(job_title)) + lines.append(f'#BSUB -J "{job_title}"') if not job_tmpl.import_sys_environment: self.logger.warning('LSF scheduler cannot ignore the user environment') if job_tmpl.sched_output_path: - lines.append('#BSUB -o {}'.format(job_tmpl.sched_output_path)) + lines.append(f'#BSUB -o {job_tmpl.sched_output_path}') sched_error_path = getattr(job_tmpl, 'sched_error_path', None) if job_tmpl.sched_join_files: - sched_error_path = '{}_'.format(job_tmpl.sched_output_path) + sched_error_path = f'{job_tmpl.sched_output_path}_' self.logger.warning( 'LSF scheduler does not support joining ' 'the standard output and standard error ' @@ -357,10 +355,10 @@ def _get_submit_script_header(self, job_tmpl): ) if sched_error_path: - lines.append('#BSUB -e {}'.format(job_tmpl.sched_error_path)) + lines.append(f'#BSUB -e {job_tmpl.sched_error_path}') if job_tmpl.queue_name: - lines.append('#BSUB -q {}'.format(job_tmpl.queue_name)) + lines.append(f'#BSUB -q {job_tmpl.queue_name}') if job_tmpl.priority: # Specifies user-assigned job priority that orders all jobs @@ -369,18 +367,18 @@ def _get_submit_script_header(self, job_tmpl): # (configured in lsb.params, displayed by "bparams -l"). # Jobs are scheduled based first on their queue priority first, then # job priority, and lastly in first-come first-served order. - lines.append('#BSUB -sp {}'.format(job_tmpl.priority)) + lines.append(f'#BSUB -sp {job_tmpl.priority}') if not job_tmpl.job_resource: raise ValueError('Job resources (as the tot_num_mpiprocs) are required for the LSF scheduler plugin') - lines.append('#BSUB -n {}'.format(job_tmpl.job_resource.get_tot_num_mpiprocs())) + lines.append(f'#BSUB -n {job_tmpl.job_resource.get_tot_num_mpiprocs()}') # Note: make sure that PARALLEL_SCHED_BY_SLOT=Y is NOT # defined in lsb.params (you can check with the output of bparams -l). # Note: the -n option of bsub can also contain a maximum number of # procs to be used if job_tmpl.job_resource.parallel_env: - lines.append('#BSUB -m "{}"'.format(job_tmpl.job_resource.parallel_env)) + lines.append(f'#BSUB -m "{job_tmpl.job_resource.parallel_env}"') if job_tmpl.max_wallclock_seconds is not None: # ABS_RUNLIMIT=Y should be set, in lsb.params (check with bparams -l) @@ -398,7 +396,7 @@ def _get_submit_script_header(self, job_tmpl): # The double negation results in the ceiling rather than the floor # of the division minutes = -(-(tot_secs % 3600) // 60) - lines.append('#BSUB -W {:02d}:{:02d}'.format(hours, minutes)) + lines.append(f'#BSUB -W {hours:02d}:{minutes:02d}') # TODO: check if this is the memory per node # pylint: disable=fixme if job_tmpl.max_memory_kb: @@ -414,7 +412,7 @@ def _get_submit_script_header(self, job_tmpl): ) # The -M option sets a per-process (soft) memory limit for all the # processes that belong to this job - lines.append('#BSUB -M {}'.format(virtual_memory_kb)) + lines.append(f'#BSUB -M {virtual_memory_kb}') if job_tmpl.custom_scheduler_commands: lines.append(job_tmpl.custom_scheduler_commands) @@ -430,7 +428,7 @@ def _get_submit_script_header(self, job_tmpl): if not isinstance(job_tmpl.job_environment, dict): raise ValueError('If you provide job_environment, it must be a dictionary') for key, value in job_tmpl.job_environment.items(): - lines.append('export {}={}'.format(key.strip(), escape_for_bash(value))) + lines.append(f'export {key.strip()}={escape_for_bash(value)}') lines.append('# ENVIRONMENT VARIABLES END ###') lines.append(empty_line) @@ -480,9 +478,9 @@ def _get_submit_command(self, submit_script): directory. IMPORTANT: submit_script should be already escaped. """ - submit_command = 'bsub < {}'.format(submit_script) + submit_command = f'bsub < {submit_script}' - self.logger.info('submitting with: ' + submit_command) + self.logger.info(f'submitting with: {submit_command}') return submit_command @@ -507,14 +505,9 @@ def _parse_joblist_output(self, retval, stdout, stderr): num_fields = len(self._joblist_fields) if retval != 0: - self.logger.warning( - 'Error in _parse_joblist_output: retval={}; ' - 'stdout={}; stderr={}'.format(retval, stdout, stderr) - ) + self.logger.warning(f'Error in _parse_joblist_output: retval={retval}; stdout={stdout}; stderr={stderr}') raise SchedulerError( - 'Error during parsing joblist output, ' - 'retval={}\n' - 'stdout={}\nstderr={}'.format(retval, stdout, stderr) + f'Error during parsing joblist output, retval={retval}\nstdout={stdout}\nstderr={stderr}' ) # will contain raw data parsed from output: only lines with the @@ -534,7 +527,7 @@ def _parse_joblist_output(self, retval, stdout, stderr): if len(job) != num_fields: # I skip this calculation # (I don't append anything to job_list before continuing) - self.logger.error("Wrong line length in squeue output! '{}'".format(job)) + self.logger.error(f"Wrong line length in squeue output! '{job}'") continue this_job = JobInfo() @@ -571,16 +564,14 @@ def _parse_joblist_output(self, retval, stdout, stderr): this_job.num_machines = int(number_nodes) except ValueError: self.logger.warning( - 'The number of allocated nodes is not ' - 'an integer ({}) for job id {}!'.format(number_nodes, this_job.job_id) + f'The number of allocated nodes is not an integer ({number_nodes}) for job id {this_job.job_id}!' ) try: this_job.num_mpiprocs = int(number_cpus) except ValueError: self.logger.warning( - 'The number of allocated cores is not ' - 'an integer ({}) for job id {}!'.format(number_cpus, this_job.job_id) + f'The number of allocated cores is not an integer ({number_cpus}) for job id {this_job.job_id}!' ) # ALLOCATED NODES HERE @@ -622,18 +613,18 @@ def _parse_joblist_output(self, retval, stdout, stderr): this_job.requested_wallclock_time_seconds = requested_walltime.total_seconds() # pylint: disable=invalid-name except (TypeError, ValueError): - self.logger.warning('Error parsing the time limit for job id {}'.format(this_job.job_id)) + self.logger.warning(f'Error parsing the time limit for job id {this_job.job_id}') try: psd_percent_complete = float(percent_complete.strip(' L').strip('%')) this_job.wallclock_time_seconds = requested_walltime.total_seconds() * psd_percent_complete / 100. except ValueError: - self.logger.warning('Error parsing the time used for job id {}'.format(this_job.job_id)) + self.logger.warning(f'Error parsing the time used for job id {this_job.job_id}') try: this_job.submission_time = psd_submission_time except ValueError: - self.logger.warning('Error parsing submission time for job id {}'.format(this_job.job_id)) + self.logger.warning(f'Error parsing submission time for job id {this_job.job_id}') this_job.title = job_name @@ -668,30 +659,21 @@ def _parse_submit_output(self, retval, stdout, stderr): Return a string with the JobID. """ if retval != 0: - self.logger.error( - 'Error in _parse_submit_output: retval={}; ' - 'stdout={}; stderr={}'.format(retval, stdout, stderr) - ) - raise SchedulerError( - 'Error during submission, retval={}\n' - 'stdout={}\nstderr={}'.format(retval, stdout, stderr) - ) + self.logger.error(f'Error in _parse_submit_output: retval={retval}; stdout={stdout}; stderr={stderr}') + raise SchedulerError(f'Error during submission, retval={retval}\nstdout={stdout}\nstderr={stderr}') try: - transport_string = ' for {}'.format(self.transport) + transport_string = f' for {self.transport}' except SchedulerError: transport_string = '' if stderr.strip(): - self.logger.warning( - 'in _parse_submit_output{}: ' - 'there was some text in stderr: {}'.format(transport_string, stderr) - ) + self.logger.warning(f'in _parse_submit_output{transport_string}: there was some text in stderr: {stderr}') try: return stdout.strip().split('Job <')[1].split('>')[0] except IndexError: - raise SchedulerParsingError('Cannot parse submission output: {}'.format(stdout)) + raise SchedulerParsingError(f'Cannot parse submission output: {stdout}') def _parse_time_string(self, string, fmt='%b %d %H:%M'): """ @@ -706,16 +688,16 @@ def _parse_time_string(self, string, fmt='%b %d %H:%M'): # The year is not specified. I have to add it, and I set it to the # current year. This is actually not correct, if we are close # new year... we should ask the scheduler also the year. - actual_string = '{} {}'.format(datetime.datetime.now().year, string) - actual_fmt = '%Y {}'.format(fmt) + actual_string = f'{datetime.datetime.now().year} {string}' + actual_fmt = f'%Y {fmt}' try: try: thetime = datetime.datetime.strptime(actual_string, actual_fmt) except ValueError: - thetime = datetime.datetime.strptime(actual_string, '{} L'.format(actual_fmt)) + thetime = datetime.datetime.strptime(actual_string, f'{actual_fmt} L') except Exception as exc: - self.logger.debug('Unable to parse time string {}, the message was {}'.format(string, exc)) + self.logger.debug(f'Unable to parse time string {string}, the message was {exc}') raise ValueError('Problem parsing the time string.') return thetime @@ -724,8 +706,8 @@ def _get_kill_command(self, jobid): """ Return the command to kill the job with specified jobid. """ - submit_command = 'bkill {}'.format(jobid) - self.logger.info('killing job {}'.format(jobid)) + submit_command = f'bkill {jobid}' + self.logger.info(f'killing job {jobid}') return submit_command def _parse_kill_output(self, retval, stdout, stderr): @@ -736,33 +718,23 @@ def _parse_kill_output(self, retval, stdout, stderr): """ if retval == 255: self.logger.error( - 'Error in _parse_kill_output: retval={} (Job already finished); ' - 'stdout={}; stderr={}'.format(retval, stdout, stderr) + f'Error in _parse_kill_output: retval={retval} (Job already finished); stdout={stdout}; stderr={stderr}' ) return False if retval != 0: - self.logger.error( - 'Error in _parse_kill_output: retval={}; ' - 'stdout={}; stderr={}'.format(retval, stdout, stderr) - ) + self.logger.error(f'Error in _parse_kill_output: retval={retval}; stdout={stdout}; stderr={stderr}') return False try: - transport_string = ' for {}'.format(self.transport) + transport_string = f' for {self.transport}' except SchedulerError: transport_string = '' if stderr.strip(): - self.logger.warning( - 'in _parse_kill_output{}: ' - 'there was some text in stderr: {}'.format(transport_string, stderr) - ) + self.logger.warning(f'in _parse_kill_output{transport_string}: there was some text in stderr: {stderr}') if stdout.strip(): - self.logger.warning( - 'in _parse_kill_output{}: ' - 'there was some text in stdout: {}'.format(transport_string, stdout) - ) + self.logger.warning(f'in _parse_kill_output{transport_string}: there was some text in stdout: {stdout}') return True diff --git a/aiida/schedulers/plugins/pbsbaseclasses.py b/aiida/schedulers/plugins/pbsbaseclasses.py index 9fe8f10926..e8f2d3d5b7 100644 --- a/aiida/schedulers/plugins/pbsbaseclasses.py +++ b/aiida/schedulers/plugins/pbsbaseclasses.py @@ -20,7 +20,7 @@ # This maps PbsPro status letters to our own status list -## List of states from the man page of qstat +# List of states from the man page of qstat # B Array job has at least one subjob running. # E Job is exiting after having run. # F Job is finished. @@ -34,7 +34,7 @@ # W Job is waiting for its submitter-assigned start time to be reached. # X Subjob has completed execution or has been deleted. -## These are instead the states from PBS/Torque v.2.4.16 (from Ubuntu) +# These are instead the states from PBS/Torque v.2.4.16 (from Ubuntu) # C - Job is completed after having run [different from above, but not clashing] # E - Job is exiting after having run. [same as above] # H - Job is held. [same as above] @@ -150,19 +150,19 @@ def _get_joblist_command(self, jobs=None, user=None): raise FeatureNotAvailable('Cannot query by user and job(s) in PBS') if user: - command.append('-u{}'.format(user)) + command.append(f'-u{user}') if jobs: if isinstance(jobs, str): - command.append('{}'.format(escape_for_bash(jobs))) + command.append(f'{escape_for_bash(jobs)}') else: try: - command.append('{}'.format(' '.join(escape_for_bash(j) for j in jobs))) + command.append(f"{' '.join(escape_for_bash(j) for j in jobs)}") except TypeError: raise TypeError("If provided, the 'jobs' variable must be a string or an iterable of strings") comm = ' '.join(command) - _LOGGER.debug('qstat command: {}'.format(comm)) + _LOGGER.debug(f'qstat command: {comm}') return comm def _get_detailed_job_info_command(self, job_id): @@ -172,7 +172,7 @@ def _get_detailed_job_info_command(self, job_id): The output text is just retrieved, and returned for logging purposes. """ - return 'tracejob -v {}'.format(escape_for_bash(job_id)) + return f'tracejob -v {escape_for_bash(job_id)}' def _get_submit_script_header(self, job_tmpl): """ @@ -202,7 +202,7 @@ def _get_submit_script_header(self, job_tmpl): if job_tmpl.email: # If not specified, but email events are set, PBSPro # sends the mail to the job owner by default - lines.append('#PBS -M {}'.format(job_tmpl.email)) + lines.append(f'#PBS -M {job_tmpl.email}') email_events = '' if job_tmpl.email_on_started: @@ -210,7 +210,7 @@ def _get_submit_script_header(self, job_tmpl): if job_tmpl.email_on_terminated: email_events += 'ea' if email_events: - lines.append('#PBS -m {}'.format(email_events)) + lines.append(f'#PBS -m {email_events}') if not job_tmpl.email: _LOGGER.info( 'Email triggers provided to PBSPro script for job,' @@ -236,19 +236,19 @@ def _get_submit_script_header(self, job_tmpl): # prepend a 'j' (for 'job') before the string if the string # is now empty or does not start with a valid charachter if not job_title or (job_title[0] not in string.ascii_letters + string.digits): - job_title = 'j' + job_title + job_title = f'j{job_title}' # Truncate to the first 15 characters # Nothing is done if the string is shorter. job_title = job_title[:15] - lines.append('#PBS -N {}'.format(job_title)) + lines.append(f'#PBS -N {job_title}') if job_tmpl.import_sys_environment: lines.append('#PBS -V') if job_tmpl.sched_output_path: - lines.append('#PBS -o {}'.format(job_tmpl.sched_output_path)) + lines.append(f'#PBS -o {job_tmpl.sched_output_path}') if job_tmpl.sched_join_files: # from qsub man page: @@ -265,13 +265,13 @@ def _get_submit_script_header(self, job_tmpl): ) else: if job_tmpl.sched_error_path: - lines.append('#PBS -e {}'.format(job_tmpl.sched_error_path)) + lines.append(f'#PBS -e {job_tmpl.sched_error_path}') if job_tmpl.queue_name: - lines.append('#PBS -q {}'.format(job_tmpl.queue_name)) + lines.append(f'#PBS -q {job_tmpl.queue_name}') if job_tmpl.account: - lines.append('#PBS -A {}'.format(job_tmpl.account)) + lines.append(f'#PBS -A {job_tmpl.account}') if job_tmpl.priority: # Priority of the job. Format: host-dependent integer. Default: @@ -279,7 +279,7 @@ def _get_submit_script_header(self, job_tmpl): # attribute to priority. # TODO: Here I expect that priority is passed in the correct PBSPro # pylint: disable=fixme # format. To fix. - lines.append('#PBS -p {}'.format(job_tmpl.priority)) + lines.append(f'#PBS -p {job_tmpl.priority}') if not job_tmpl.job_resource: raise ValueError('Job resources (as the num_machines) are required for the PBSPro scheduler plugin') @@ -309,7 +309,7 @@ def _get_submit_script_header(self, job_tmpl): if not isinstance(job_tmpl.job_environment, dict): raise ValueError('If you provide job_environment, it must be a dictionary') for key, value in job_tmpl.job_environment.items(): - lines.append('export {}={}'.format(key.strip(), escape_for_bash(value))) + lines.append(f'export {key.strip()}={escape_for_bash(value)}') lines.append('# ENVIRONMENT VARIABLES END ###') lines.append(empty_line) @@ -329,9 +329,9 @@ def _get_submit_command(self, submit_script): directory. IMPORTANT: submit_script should be already escaped. """ - submit_command = 'qsub {}'.format(submit_script) + submit_command = f'qsub {submit_script}' - _LOGGER.info('submitting with: {}'.format(submit_command)) + _LOGGER.info(f'submitting with: {submit_command}') return submit_command @@ -369,15 +369,9 @@ def _parse_joblist_output(self, retval, stdout, stderr): l for l in stderr.split('\n') if 'Unknown Job Id' not in l and 'Job has finished' not in l ) if filtered_stderr.strip(): - _LOGGER.warning( - 'Warning in _parse_joblist_output, non-empty ' - "(filtered) stderr='{}'".format(filtered_stderr) - ) + _LOGGER.warning(f"Warning in _parse_joblist_output, non-empty (filtered) stderr='{filtered_stderr}'") if retval != 0: - raise SchedulerError( - 'Error during qstat parsing, retval={}\n' - 'stdout={}\nstderr={}'.format(retval, stdout, stderr) - ) + raise SchedulerError(f'Error during qstat parsing, retval={retval}\nstdout={stdout}\nstderr={stderr}') jobdata_raw = [] # will contain raw data parsed from qstat output # Get raw data and split in lines @@ -410,8 +404,7 @@ def _parse_joblist_output(self, retval, stdout, stderr): # stripping the TAB if not jobdata_raw[-1]['lines']: raise SchedulerParsingError( - 'Line {} is the first line of the job, but it ' - 'starts with a TAB! ({})'.format(line_num, line) + f'Line {line_num} is the first line of the job, but it starts with a TAB! ({line})' ) jobdata_raw[-1]['lines'][-1] += line[1:] else: @@ -422,7 +415,7 @@ def _parse_joblist_output(self, retval, stdout, stderr): ## 'Variable_List', for instance, can have ## newlines if they are included... # I do a ## workaround - jobdata_raw[-1]['lines'][-1] += '\n{}'.format(line) + jobdata_raw[-1]['lines'][-1] += f'\n{line}' jobdata_raw[-1]['warning_lines_idx'].append(len(jobdata_raw[-1]['lines']) - 1) # Create dictionary and parse specific fields @@ -436,7 +429,7 @@ def _parse_joblist_output(self, retval, stdout, stderr): # There are lines without equals sign: this is bad if lines_without_equals_sign: # Should I only warn? - _LOGGER.error('There are lines without equals sign! {}'.format(lines_without_equals_sign)) + _LOGGER.error(f'There are lines without equals sign! {lines_without_equals_sign}') raise SchedulerParsingError('There are lines without equals sign.') raw_data = { @@ -470,7 +463,7 @@ def _parse_joblist_output(self, retval, stdout, stderr): try: this_job.title = raw_data['job_name'] except KeyError: - _LOGGER.debug("No 'job_name' field for job id {}".format(this_job.job_id)) + _LOGGER.debug(f"No 'job_name' field for job id {this_job.job_id}") try: this_job.annotation = raw_data['comment'] @@ -485,19 +478,16 @@ def _parse_joblist_output(self, retval, stdout, stderr): try: this_job.job_state = self._map_status[job_state_string] except KeyError: - _LOGGER.warning( - "Unrecognized job_state '{}' for job " - 'id {}'.format(job_state_string, this_job.job_id) - ) + _LOGGER.warning(f"Unrecognized job_state '{job_state_string}' for job id {this_job.job_id}") this_job.job_state = JobState.UNDETERMINED except KeyError: - _LOGGER.debug("No 'job_state' field for job id {}".format(this_job.job_id)) + _LOGGER.debug(f"No 'job_state' field for job id {this_job.job_id}") this_job.job_state = JobState.UNDETERMINED try: this_job.job_substate = raw_data['substate'] except KeyError: - _LOGGER.debug("No 'substate' field for job id {}".format(this_job.job_id)) + _LOGGER.debug(f"No 'substate' field for job id {this_job.job_id}") try: exec_hosts = raw_data['exec_host'].split('+') @@ -527,79 +517,73 @@ def _parse_joblist_output(self, retval, stdout, stderr): node.num_cpus = int(data[1]) else: raise ValueError( - 'Wrong number of pieces: {} ' - 'instead of 1 or 2 in exec_hosts: ' - '{}'.format(len(data), exec_hosts) + f'Wrong number of pieces: {len(data)} instead of 1 or 2 in exec_hosts: {exec_hosts}' ) exec_host_list.append(node) this_job.allocated_machines = exec_host_list except Exception as exc: # pylint: disable=broad-except _LOGGER.debug( - 'Problem parsing the node names, I ' - 'got Exception {} with message {}; ' - 'exec_hosts was {}'.format(str(type(exc)), exc, exec_hosts) + f'Problem parsing the node names, I got Exception {str(type(exc))} with message {exc}; ' + f'exec_hosts was {exec_hosts}' ) try: # I strip the part after the @: is this always ok? this_job.job_owner = raw_data['job_owner'].split('@')[0] except KeyError: - _LOGGER.debug("No 'job_owner' field for job id {}".format(this_job.job_id)) + _LOGGER.debug(f"No 'job_owner' field for job id {this_job.job_id}") try: this_job.num_cpus = int(raw_data['resource_list.ncpus']) # TODO: understand if this is the correct field also for multithreaded (OpenMP) jobs. # pylint: disable=fixme except KeyError: - _LOGGER.debug("No 'resource_list.ncpus' field for job id {}".format(this_job.job_id)) + _LOGGER.debug(f"No 'resource_list.ncpus' field for job id {this_job.job_id}") except ValueError: _LOGGER.warning( - "'resource_list.ncpus' is not an integer " - '({}) for job id {}!'.format(raw_data['resource_list.ncpus'], this_job.job_id) + f"'resource_list.ncpus' is not an integer " + f"({raw_data['resource_list.ncpus']}) for job id {this_job.job_id}!" ) try: this_job.num_mpiprocs = int(raw_data['resource_list.mpiprocs']) # TODO: understand if this is the correct field also for multithreaded (OpenMP) jobs. # pylint: disable=fixme except KeyError: - _LOGGER.debug("No 'resource_list.mpiprocs' field for job id {}".format(this_job.job_id)) + _LOGGER.debug(f"No 'resource_list.mpiprocs' field for job id {this_job.job_id}") except ValueError: _LOGGER.warning( - "'resource_list.mpiprocs' is not an integer " - '({}) for job id {}!'.format(raw_data['resource_list.mpiprocs'], this_job.job_id) + f"'resource_list.mpiprocs' is not an integer " + f"({raw_data['resource_list.mpiprocs']}) for job id {this_job.job_id}!" ) try: this_job.num_machines = int(raw_data['resource_list.nodect']) except KeyError: - _LOGGER.debug("No 'resource_list.nodect' field for job id {}".format(this_job.job_id)) + _LOGGER.debug(f"No 'resource_list.nodect' field for job id {this_job.job_id}") except ValueError: _LOGGER.warning( - "'resource_list.nodect' is not an integer " - '({}) for job id {}!'.format(raw_data['resource_list.nodect'], this_job.job_id) + f"'resource_list.nodect' is not an integer " + f"{raw_data['resource_list.nodect']}) for job id {this_job.job_id}!" ) # Double check of redundant info if (this_job.allocated_machines is not None and this_job.num_machines is not None): if len(set(machine.name for machine in this_job.allocated_machines)) != this_job.num_machines: _LOGGER.error( - 'The length of the list of allocated ' - 'nodes ({}) is different from the ' - 'expected number of nodes ({})!'.format( - len(this_job.allocated_machines), this_job.num_machines - ) + f'The length of the list of allocated nodes ({len(this_job.allocated_machines)}) is different ' + f'from the expected number of nodes ({this_job.num_machines})!' ) try: this_job.queue_name = raw_data['queue'] except KeyError: - _LOGGER.debug("No 'queue' field for job id {}".format(this_job.job_id)) + _LOGGER.debug(f"No 'queue' field for job id {this_job.job_id}") try: this_job.requested_wallclock_time = (self._convert_time(raw_data['resource_list.walltime'])) # pylint: disable=invalid-name except KeyError: - _LOGGER.debug("No 'resource_list.walltime' field for job id {}".format(this_job.job_id)) + _LOGGER.debug(f"No 'resource_list.walltime' field for job id {this_job.job_id}") except ValueError: - _LOGGER.warning("Error parsing 'resource_list.walltime' for job id {}".format(this_job.job_id)) + _LOGGER.warning(f"Error parsing 'resource_list.walltime' for job id {this_job.job_id}") try: this_job.wallclock_time_seconds = (self._convert_time(raw_data['resources_used.walltime'])) @@ -607,7 +591,7 @@ def _parse_joblist_output(self, retval, stdout, stderr): # May not have started yet pass except ValueError: - _LOGGER.warning("Error parsing 'resources_used.walltime' for job id {}".format(this_job.job_id)) + _LOGGER.warning(f"Error parsing 'resources_used.walltime' for job id {this_job.job_id}") try: this_job.cpu_time = (self._convert_time(raw_data['resources_used.cput'])) @@ -615,7 +599,7 @@ def _parse_joblist_output(self, retval, stdout, stderr): # May not have started yet pass except ValueError: - _LOGGER.warning("Error parsing 'resources_used.cput' for job id {}".format(this_job.job_id)) + _LOGGER.warning(f"Error parsing 'resources_used.cput' for job id {this_job.job_id}") # # ctime: The time that the job was created @@ -629,9 +613,9 @@ def _parse_joblist_output(self, retval, stdout, stderr): try: this_job.submission_time = self._parse_time_string(raw_data['ctime']) except KeyError: - _LOGGER.debug("No 'ctime' field for job id {}".format(this_job.job_id)) + _LOGGER.debug(f"No 'ctime' field for job id {this_job.job_id}") except ValueError: - _LOGGER.warning("Error parsing 'ctime' for job id {}".format(this_job.job_id)) + _LOGGER.warning(f"Error parsing 'ctime' for job id {this_job.job_id}") try: this_job.dispatch_time = self._parse_time_string(raw_data['stime']) @@ -639,7 +623,7 @@ def _parse_joblist_output(self, retval, stdout, stderr): # The job may not have been started yet pass except ValueError: - _LOGGER.warning("Error parsing 'stime' for job id {}".format(this_job.job_id)) + _LOGGER.warning(f"Error parsing 'stime' for job id {this_job.job_id}") # TODO: see if we want to set also finish_time for finished jobs, if there are any # pylint: disable=fixme @@ -658,7 +642,7 @@ def _convert_time(string): """ pieces = string.split(':') if len(pieces) != 3: - _LOGGER.warning('Wrong number of pieces (expected 3) for time string {}'.format(string)) + _LOGGER.warning(f'Wrong number of pieces (expected 3) for time string {string}') raise ValueError('Wrong number of pieces for time string.') try: @@ -666,7 +650,7 @@ def _convert_time(string): if hours < 0: raise ValueError except ValueError: - _LOGGER.warning('Not a valid number of hours: {}'.format(pieces[0])) + _LOGGER.warning(f'Not a valid number of hours: {pieces[0]}') raise ValueError('Not a valid number of hours.') try: @@ -674,7 +658,7 @@ def _convert_time(string): if mins < 0: raise ValueError except ValueError: - _LOGGER.warning('Not a valid number of minutes: {}'.format(pieces[1])) + _LOGGER.warning(f'Not a valid number of minutes: {pieces[1]}') raise ValueError('Not a valid number of minutes.') try: @@ -682,7 +666,7 @@ def _convert_time(string): if secs < 0: raise ValueError except ValueError: - _LOGGER.warning('Not a valid number of seconds: {}'.format(pieces[2])) + _LOGGER.warning(f'Not a valid number of seconds: {pieces[2]}') raise ValueError('Not a valid number of seconds.') return hours * 3600 + mins * 60 + secs @@ -699,7 +683,7 @@ def _parse_time_string(string, fmt='%a %b %d %H:%M:%S %Y'): try: time_struct = time.strptime(string, fmt) except Exception as exc: - _LOGGER.debug('Unable to parse time string {}, the message was {}'.format(string, exc)) + _LOGGER.debug(f'Unable to parse time string {string}, the message was {exc}') raise ValueError('Problem parsing the time string.') # I convert from a time_struct to a datetime object going through @@ -717,17 +701,11 @@ def _parse_submit_output(self, retval, stdout, stderr): Return a string with the JobID. """ if retval != 0: - _LOGGER.error( - 'Error in _parse_submit_output: retval={}; ' - 'stdout={}; stderr={}'.format(retval, stdout, stderr) - ) - raise SchedulerError( - 'Error during submission, retval={}\n' - 'stdout={}\nstderr={}'.format(retval, stdout, stderr) - ) + _LOGGER.error(f'Error in _parse_submit_output: retval={retval}; stdout={stdout}; stderr={stderr}') + raise SchedulerError(f'Error during submission, retval={retval}; stdout={stdout}; stderr={stderr}') if stderr.strip(): - _LOGGER.warning('in _parse_submit_output there was some text in stderr: {}'.format(stderr)) + _LOGGER.warning(f'in _parse_submit_output there was some text in stderr: {stderr}') return stdout.strip() @@ -735,9 +713,9 @@ def _get_kill_command(self, jobid): """ Return the command to kill the job with specified jobid. """ - submit_command = 'qdel {}'.format(jobid) + submit_command = f'qdel {jobid}' - _LOGGER.info('killing job {}'.format(jobid)) + _LOGGER.info(f'killing job {jobid}') return submit_command @@ -750,16 +728,13 @@ def _parse_kill_output(self, retval, stdout, stderr): :return: True if everything seems ok, False otherwise. """ if retval != 0: - _LOGGER.error( - 'Error in _parse_kill_output: retval={}; ' - 'stdout={}; stderr={}'.format(retval, stdout, stderr) - ) + _LOGGER.error(f'Error in _parse_kill_output: retval={retval}; stdout={stdout}; stderr={stderr}') return False if stderr.strip(): - _LOGGER.warning('in _parse_kill_output there was some text in stderr: {}'.format(stderr)) + _LOGGER.warning(f'in _parse_kill_output there was some text in stderr: {stderr}') if stdout.strip(): - _LOGGER.warning('in _parse_kill_output there was some text in stdout: {}'.format(stdout)) + _LOGGER.warning(f'in _parse_kill_output there was some text in stdout: {stdout}') return True diff --git a/aiida/schedulers/plugins/pbspro.py b/aiida/schedulers/plugins/pbspro.py index fa044b789b..fc03e48cac 100644 --- a/aiida/schedulers/plugins/pbspro.py +++ b/aiida/schedulers/plugins/pbspro.py @@ -61,11 +61,11 @@ def _get_resource_lines( return_lines = [] - select_string = 'select={}'.format(num_machines) + select_string = f'select={num_machines}' if num_mpiprocs_per_machine: - select_string += ':mpiprocs={}'.format(num_mpiprocs_per_machine) + select_string += f':mpiprocs={num_mpiprocs_per_machine}' if num_cores_per_machine: - select_string += ':ppn={}'.format(num_cores_per_machine) + select_string += f':ppn={num_cores_per_machine}' if max_wallclock_seconds is not None: try: @@ -82,7 +82,7 @@ def _get_resource_lines( tot_minutes = tot_secs % 3600 minutes = tot_minutes // 60 seconds = tot_minutes % 60 - return_lines.append('#PBS -l walltime={:02d}:{:02d}:{:02d}'.format(hours, minutes, seconds)) + return_lines.append(f'#PBS -l walltime={hours:02d}:{minutes:02d}:{seconds:02d}') if max_memory_kb: try: @@ -95,7 +95,7 @@ def _get_resource_lines( "a positive integer (in kB)! It is instead '{}'" ''.format((max_memory_kb)) ) - select_string += ':mem={}kb'.format(virtual_memory_kb) + select_string += f':mem={virtual_memory_kb}kb' - return_lines.append('#PBS -l {}'.format(select_string)) + return_lines.append(f'#PBS -l {select_string}') return return_lines diff --git a/aiida/schedulers/plugins/sge.py b/aiida/schedulers/plugins/sge.py index ade7595c12..85e55eb89c 100644 --- a/aiida/schedulers/plugins/sge.py +++ b/aiida/schedulers/plugins/sge.py @@ -123,17 +123,17 @@ def _get_joblist_command(self, jobs=None, user=None): command = 'qstat -ext -urg -xml ' if user: - command += '-u {}'.format(str(user)) + command += f'-u {str(user)}' else: # All users if no user is specified command += "-u '*'" - self.logger.debug('qstat command: {}'.format(command)) + self.logger.debug(f'qstat command: {command}') return command # raise NotImplementedError def _get_detailed_job_info_command(self, job_id): - command = 'qacct -j {}'.format(escape_for_bash(job_id)) + command = f'qacct -j {escape_for_bash(job_id)}' return command def _get_submit_script_header(self, job_tmpl): @@ -156,7 +156,7 @@ def _get_submit_script_header(self, job_tmpl): # SGE provides flags for wd and cwd if job_tmpl.working_directory: - lines.append('#$ -wd {}'.format(job_tmpl.working_directory)) + lines.append(f'#$ -wd {job_tmpl.working_directory}') else: lines.append('#$ -cwd') @@ -165,16 +165,16 @@ def _get_submit_script_header(self, job_tmpl): if job_tmpl.submit_as_hold: # if isinstance(job_tmpl.submit_as_hold, str): - lines.append('#$ -h {}'.format(job_tmpl.submit_as_hold)) + lines.append(f'#$ -h {job_tmpl.submit_as_hold}') if job_tmpl.rerunnable: # if isinstance(job_tmpl.rerunnable, str): - lines.append('#$ -r {}'.format(job_tmpl.rerunnable)) + lines.append(f'#$ -r {job_tmpl.rerunnable}') if job_tmpl.email: # If not specified, but email events are set, PBSPro # sends the mail to the job owner by default - lines.append('#$ -M {}'.format(job_tmpl.email)) + lines.append(f'#$ -M {job_tmpl.email}') email_events = '' if job_tmpl.email_on_started: @@ -182,7 +182,7 @@ def _get_submit_script_header(self, job_tmpl): if job_tmpl.email_on_terminated: email_events += 'ea' if email_events: - lines.append('#$ -m {}'.format(email_events)) + lines.append(f'#$ -m {email_events}') if not job_tmpl.email: self.logger.info( 'Email triggers provided to SGE script for job,' @@ -204,15 +204,15 @@ def _get_submit_script_header(self, job_tmpl): # (the first symbol cannot be digit, at least in some versions # of the scheduler) if not job_title or (job_title[0] not in string.ascii_letters): - job_title = 'j' + job_title + job_title = f'j{job_title}' - lines.append('#$ -N {}'.format(job_tmpl.job_name)) + lines.append(f'#$ -N {job_tmpl.job_name}') if job_tmpl.import_sys_environment: lines.append('#$ -V') if job_tmpl.sched_output_path: - lines.append('#$ -o {}'.format(job_tmpl.sched_output_path)) + lines.append(f'#$ -o {job_tmpl.sched_output_path}') if job_tmpl.sched_join_files: # from qsub man page: @@ -227,26 +227,24 @@ def _get_submit_script_header(self, job_tmpl): ) else: if job_tmpl.sched_error_path: - lines.append('#$ -e {}'.format(job_tmpl.sched_error_path)) + lines.append(f'#$ -e {job_tmpl.sched_error_path}') if job_tmpl.queue_name: - lines.append('#$ -q {}'.format(job_tmpl.queue_name)) + lines.append(f'#$ -q {job_tmpl.queue_name}') if job_tmpl.account: - lines.append('#$ -P {}'.format(job_tmpl.account)) + lines.append(f'#$ -P {job_tmpl.account}') if job_tmpl.priority: # Priority of the job. Format: host-dependent integer. Default: # zero. Range: [-1023, +1024]. Sets job's Priority # attribute to priority. - lines.append('#$ -p {}'.format(job_tmpl.priority)) + lines.append(f'#$ -p {job_tmpl.priority}') if not job_tmpl.job_resource: raise ValueError('Job resources (as the tot_num_mpiprocs) are required for the SGE scheduler plugin') # Setting up the parallel environment - lines.append('#$ -pe {} {}'. \ - format(str(job_tmpl.job_resource.parallel_env), \ - int(job_tmpl.job_resource.tot_num_mpiprocs))) + lines.append(f'#$ -pe {str(job_tmpl.job_resource.parallel_env)} {int(job_tmpl.job_resource.tot_num_mpiprocs)}') if job_tmpl.max_wallclock_seconds is not None: try: @@ -263,7 +261,7 @@ def _get_submit_script_header(self, job_tmpl): tot_minutes = tot_secs % 3600 minutes = tot_minutes // 60 seconds = tot_minutes % 60 - lines.append('#$ -l h_rt={:02d}:{:02d}:{:02d}'.format(hours, minutes, seconds)) + lines.append(f'#$ -l h_rt={hours:02d}:{minutes:02d}:{seconds:02d}') if job_tmpl.custom_scheduler_commands: lines.append(job_tmpl.custom_scheduler_commands) @@ -280,7 +278,7 @@ def _get_submit_script_header(self, job_tmpl): if not isinstance(job_tmpl.job_environment, dict): raise ValueError('If you provide job_environment, it must be a dictionary') for key, value in job_tmpl.job_environment.items(): - lines.append('export {}={}'.format(key.strip(), escape_for_bash(value))) + lines.append(f'export {key.strip()}={escape_for_bash(value)}') lines.append('# ENVIRONMENT VARIABLES END ###') lines.append(empty_line) @@ -296,39 +294,31 @@ def _get_submit_command(self, submit_script): IMPORTANT: submit_script should be already escaped. """ # pylint: disable=too-many-statements,too-many-branches - submit_command = 'qsub -terse {}'.format(submit_script) + submit_command = f'qsub -terse {submit_script}' - self.logger.info('submitting with: ' + submit_command) + self.logger.info(f'submitting with: {submit_command}') return submit_command def _parse_joblist_output(self, retval, stdout, stderr): # pylint: disable=too-many-statements,too-many-branches if retval != 0: - self.logger.error( - 'Error in _parse_joblist_output: retval={}; ' - 'stdout={}; stderr={}'.format(retval, stdout, stderr) - ) - raise SchedulerError('Error during joblist retrieval, retval={}'. \ - format(retval)) + self.logger.error(f'Error in _parse_joblist_output: retval={retval}; stdout={stdout}; stderr={stderr}') + raise SchedulerError(f'Error during joblist retrieval, retval={retval}') if stderr.strip(): self.logger.warning( - 'in _parse_joblist_output for {}: ' - 'there was some text in stderr: {}'.format(str(self.transport), stderr) + f'in _parse_joblist_output for {str(self.transport)}: there was some text in stderr: {stderr}' ) if stdout: try: xmldata = xml.dom.minidom.parseString(stdout) except xml.parsers.expat.ExpatError: - self.logger.error('in sge._parse_joblist_output: xml parsing of stdout failed: {}'.format(stdout)) + self.logger.error(f'in sge._parse_joblist_output: xml parsing of stdout failed: {stdout}') raise SchedulerParsingError('Error during joblist retrieval, xml parsing of stdout failed') else: - self.logger.error( - 'Error in sge._parse_joblist_output: retval={}; ' - 'stdout={}; stderr={}'.format(retval, stdout, stderr) - ) + self.logger.error(f'Error in sge._parse_joblist_output: retval={retval}; stdout={stdout}; stderr={stderr}') raise SchedulerError('Error during joblist retrieval, no stdout produced') try: @@ -337,18 +327,13 @@ def _parse_joblist_output(self, retval, stdout, stderr): tag_names_sec = [elem.tagName for elem in second_childs \ if elem.nodeType == 1] if 'queue_info' not in tag_names_sec: - self.logger.error('Error in sge._parse_joblist_output: ' - 'no queue_info: {}'. \ - format(stdout)) + self.logger.error(f'Error in sge._parse_joblist_output: no queue_info: {stdout}') raise SchedulerError if 'job_info' not in tag_names_sec: - self.logger.error('Error in sge._parse_joblist_output: ' - 'no job_info: {}'. \ - format(stdout)) + self.logger.error(f'Error in sge._parse_joblist_output: no job_info: {stdout}') raise SchedulerError except SchedulerError: - self.logger.error('Error in sge._parse_joblist_output: stdout={}' \ - .format(stdout)) + self.logger.error(f'Error in sge._parse_joblist_output: stdout={stdout}') raise SchedulerError( 'Error during xml processing, of stdout:' "There is no 'job_info' or no 'queue_info'" @@ -356,8 +341,7 @@ def _parse_joblist_output(self, retval, stdout, stderr): ) # If something weird happens while firstChild, pop, etc: except Exception: - self.logger.error('Error in sge._parse_joblist_output: stdout={}' \ - .format(stdout)) + self.logger.error(f'Error in sge._parse_joblist_output: stdout={stdout}') raise SchedulerError('Error during xml processing, of stdout') jobs = list(first_child.getElementsByTagName('job_list')) @@ -378,9 +362,7 @@ def _parse_joblist_output(self, retval, stdout, stderr): if not this_job.job_id: raise SchedulerError except SchedulerError: - self.logger.error('Error in sge._parse_joblist_output:' - 'no job id is given, stdout={}' \ - .format(stdout)) + self.logger.error(f'Error in sge._parse_joblist_output:no job id is given, stdout={stdout}') raise SchedulerError('Error in sge._parse_joblist_output: no job id is given') except IndexError: self.logger.error("No 'job_number' given for job index {} in " @@ -409,14 +391,14 @@ def _parse_joblist_output(self, retval, stdout, stderr): element_child = job_element.childNodes.pop(0) this_job.job_owner = str(element_child.data).strip() except IndexError: - self.logger.warning("No 'job_owner' field for job id {}".format(this_job.job_id)) + self.logger.warning(f"No 'job_owner' field for job id {this_job.job_id}") try: job_element = job.getElementsByTagName('JB_name').pop(0) element_child = job_element.childNodes.pop(0) this_job.title = str(element_child.data).strip() except IndexError: - self.logger.warning("No 'title' field for job id {}".format(this_job.job_id)) + self.logger.warning(f"No 'title' field for job id {this_job.job_id}") try: job_element = job.getElementsByTagName('queue_name').pop(0) @@ -424,7 +406,7 @@ def _parse_joblist_output(self, retval, stdout, stderr): this_job.queue_name = str(element_child.data).strip() except IndexError: if this_job.job_state == JobState.RUNNING: - self.logger.warning("No 'queue_name' field for job id {}".format(this_job.job_id)) + self.logger.warning(f"No 'queue_name' field for job id {this_job.job_id}") try: job_element = job.getElementsByTagName('JB_submission_time').pop(0) @@ -434,8 +416,7 @@ def _parse_joblist_output(self, retval, stdout, stderr): this_job.submission_time = self._parse_time_string(time_string) except ValueError: self.logger.warning( - "Error parsing 'JB_submission_time' " - "for job id {} ('{}')".format(this_job.job_id, time_string) + f"Error parsing 'JB_submission_time' for job id {this_job.job_id} ('{time_string}')" ) except IndexError: try: @@ -446,8 +427,7 @@ def _parse_joblist_output(self, retval, stdout, stderr): this_job.dispatch_time = self._parse_time_string(time_string) except ValueError: self.logger.warning( - "Error parsing 'JAT_start_time'" - "for job id {} ('{}')".format(this_job.job_id, time_string) + f"Error parsing 'JAT_start_time'for job id {this_job.job_id} ('{time_string}')" ) except IndexError: self.logger.warning( @@ -463,7 +443,7 @@ def _parse_joblist_output(self, retval, stdout, stderr): element_child = job_element.childNodes.pop(0) this_job.num_mpiprocs = str(element_child.data).strip() except IndexError: - self.logger.warning("No 'slots' field for job id {}".format(this_job.job_id)) + self.logger.warning(f"No 'slots' field for job id {this_job.job_id}") joblist.append(this_job) # self.logger.debug("joblist final: {}".format(joblist)) @@ -479,19 +459,12 @@ def _parse_submit_output(self, retval, stdout, stderr): Return a string with the JobID. """ if retval != 0: - self.logger.error( - 'Error in _parse_submit_output: retval={}; ' - 'stdout={}; stderr={}'.format(retval, stdout, stderr) - ) - raise SchedulerError( - 'Error during submission, retval={}\n' - 'stdout={}\nstderr={}'.format(retval, stdout, stderr) - ) + self.logger.error(f'Error in _parse_submit_output: retval={retval}; stdout={stdout}; stderr={stderr}') + raise SchedulerError(f'Error during submission, retval={retval}\nstdout={stdout}\nstderr={stderr}') if stderr.strip(): self.logger.warning( - 'in _parse_submit_output for {}: ' - 'there was some text in stderr: {}'.format(str(self.transport), stderr) + f'in _parse_submit_output for {str(self.transport)}: there was some text in stderr: {stderr}' ) return stdout.strip() @@ -508,7 +481,7 @@ def _parse_time_string(self, string, fmt='%Y-%m-%dT%H:%M:%S'): try: time_struct = time.strptime(string, fmt) except Exception as exc: - self.logger.debug('Unable to parse time string {}, the message was {}'.format(string, exc)) + self.logger.debug(f'Unable to parse time string {string}, the message was {exc}') raise ValueError('Problem parsing the time string.') # I convert from a time_struct to a datetime object going through @@ -520,9 +493,9 @@ def _get_kill_command(self, jobid): """ Return the command to kill the job with specified jobid. """ - submit_command = 'qdel {}'.format(jobid) + submit_command = f'qdel {jobid}' - self.logger.info('killing job {}'.format(jobid)) + self.logger.info(f'killing job {jobid}') return submit_command @@ -535,22 +508,17 @@ def _parse_kill_output(self, retval, stdout, stderr): :return: True if everything seems ok, False otherwise. """ if retval != 0: - self.logger.error( - 'Error in _parse_kill_output: retval={}; ' - 'stdout={}; stderr={}'.format(retval, stdout, stderr) - ) + self.logger.error(f'Error in _parse_kill_output: retval={retval}; stdout={stdout}; stderr={stderr}') return False if stderr.strip(): self.logger.warning( - 'in _parse_kill_output for {}: ' - 'there was some text in stderr: {}'.format(str(self.transport), stderr) + f'in _parse_kill_output for {str(self.transport)}: there was some text in stderr: {stderr}' ) if stdout.strip(): self.logger.info( - 'in _parse_kill_output for {}: ' - 'there was some text in stdout: {}'.format(str(self.transport), stdout) + f'in _parse_kill_output for {str(self.transport)}: there was some text in stdout: {stdout}' ) return True diff --git a/aiida/schedulers/plugins/slurm.py b/aiida/schedulers/plugins/slurm.py index f420ddba54..6263d6512f 100644 --- a/aiida/schedulers/plugins/slurm.py +++ b/aiida/schedulers/plugins/slurm.py @@ -198,14 +198,14 @@ def _get_joblist_command(self, jobs=None, user=None): # sure to get the times in 'standard' format command = [ "SLURM_TIME_FORMAT='standard'", 'squeue', '--noheader', - "-o '{}'".format(_FIELD_SEPARATOR.join(_[0] for _ in self.fields)) + f"-o '{_FIELD_SEPARATOR.join(_[0] for _ in self.fields)}'" ] if user and jobs: raise FeatureNotAvailable('Cannot query by user and job(s) in SLURM') if user: - command.append('-u{}'.format(user)) + command.append(f'-u{user}') if jobs: joblist = [] @@ -231,10 +231,10 @@ def _get_joblist_command(self, jobs=None, user=None): if len(joblist) == 1: joblist += [joblist[0]] - command.append('--jobs={}'.format(','.join(joblist))) + command.append(f"--jobs={','.join(joblist)}") comm = ' '.join(command) - self.logger.debug('squeue command: {}'.format(comm)) + self.logger.debug(f'squeue command: {comm}') return comm def _get_detailed_job_info_command(self, job_id): @@ -247,7 +247,7 @@ def _get_detailed_job_info_command(self, job_id): the end. """ fields = ','.join(self._detailed_job_info_fields) - return 'sacct --format={} --parsable --jobs={}'.format(fields, job_id) + return f'sacct --format={fields} --parsable --jobs={job_id}' def _get_submit_script_header(self, job_tmpl): """ @@ -276,7 +276,7 @@ def _get_submit_script_header(self, job_tmpl): if job_tmpl.email: # If not specified, but email events are set, SLURM # sends the mail to the job owner by default - lines.append('#SBATCH --mail-user={}'.format(job_tmpl.email)) + lines.append(f'#SBATCH --mail-user={job_tmpl.email}') if job_tmpl.email_on_started: lines.append('#SBATCH --mail-type=BEGIN') @@ -297,19 +297,19 @@ def _get_submit_script_header(self, job_tmpl): # prepend a 'j' (for 'job') before the string if the string # is now empty or does not start with a valid charachter if not job_title or (job_title[0] not in string.ascii_letters + string.digits): - job_title = 'j' + job_title + job_title = f'j{job_title}' # Truncate to the first 128 characters # Nothing is done if the string is shorter. job_title = job_title[:128] - lines.append('#SBATCH --job-name="{}"'.format(job_title)) + lines.append(f'#SBATCH --job-name="{job_title}"') if job_tmpl.import_sys_environment: lines.append('#SBATCH --get-user-env') if job_tmpl.sched_output_path: - lines.append('#SBATCH --output={}'.format(job_tmpl.sched_output_path)) + lines.append(f'#SBATCH --output={job_tmpl.sched_output_path}') if job_tmpl.sched_join_files: # TODO: manual says: # pylint: disable=fixme @@ -325,36 +325,36 @@ def _get_submit_script_header(self, job_tmpl): ) else: if job_tmpl.sched_error_path: - lines.append('#SBATCH --error={}'.format(job_tmpl.sched_error_path)) + lines.append(f'#SBATCH --error={job_tmpl.sched_error_path}') else: # To avoid automatic join of files lines.append('#SBATCH --error=slurm-%j.err') if job_tmpl.queue_name: - lines.append('#SBATCH --partition={}'.format(job_tmpl.queue_name)) + lines.append(f'#SBATCH --partition={job_tmpl.queue_name}') if job_tmpl.account: - lines.append('#SBATCH --account={}'.format(job_tmpl.account)) + lines.append(f'#SBATCH --account={job_tmpl.account}') if job_tmpl.qos: - lines.append('#SBATCH --qos={}'.format(job_tmpl.qos)) + lines.append(f'#SBATCH --qos={job_tmpl.qos}') if job_tmpl.priority: # Run the job with an adjusted scheduling priority within SLURM. # With no adjustment value the scheduling priority is decreased by # 100. The adjustment range is from -10000 (highest priority) to # 10000 (lowest priority). - lines.append('#SBATCH --nice={}'.format(job_tmpl.priority)) + lines.append(f'#SBATCH --nice={job_tmpl.priority}') if not job_tmpl.job_resource: raise ValueError('Job resources (as the num_machines) are required for the SLURM scheduler plugin') - lines.append('#SBATCH --nodes={}'.format(job_tmpl.job_resource.num_machines)) + lines.append(f'#SBATCH --nodes={job_tmpl.job_resource.num_machines}') if job_tmpl.job_resource.num_mpiprocs_per_machine: - lines.append('#SBATCH --ntasks-per-node={}'.format(job_tmpl.job_resource.num_mpiprocs_per_machine)) + lines.append(f'#SBATCH --ntasks-per-node={job_tmpl.job_resource.num_mpiprocs_per_machine}') if job_tmpl.job_resource.num_cores_per_mpiproc: - lines.append('#SBATCH --cpus-per-task={}'.format(job_tmpl.job_resource.num_cores_per_mpiproc)) + lines.append(f'#SBATCH --cpus-per-task={job_tmpl.job_resource.num_cores_per_mpiproc}') if job_tmpl.max_wallclock_seconds is not None: try: @@ -374,9 +374,9 @@ def _get_submit_script_header(self, job_tmpl): minutes = tot_minutes // 60 seconds = tot_minutes % 60 if days == 0: - lines.append('#SBATCH --time={:02d}:{:02d}:{:02d}'.format(hours, minutes, seconds)) + lines.append(f'#SBATCH --time={hours:02d}:{minutes:02d}:{seconds:02d}') else: - lines.append('#SBATCH --time={:d}-{:02d}:{:02d}:{:02d}'.format(days, hours, minutes, seconds)) + lines.append(f'#SBATCH --time={days:d}-{hours:02d}:{minutes:02d}:{seconds:02d}') # It is the memory per node, not per cpu! if job_tmpl.max_memory_kb: @@ -392,7 +392,7 @@ def _get_submit_script_header(self, job_tmpl): ) # --mem: Specify the real memory required per node in MegaBytes. # --mem and --mem-per-cpu are mutually exclusive. - lines.append('#SBATCH --mem={}'.format(virtual_memory_kb // 1024)) + lines.append(f'#SBATCH --mem={virtual_memory_kb // 1024}') if job_tmpl.custom_scheduler_commands: lines.append(job_tmpl.custom_scheduler_commands) @@ -409,7 +409,7 @@ def _get_submit_script_header(self, job_tmpl): if not isinstance(job_tmpl.job_environment, dict): raise ValueError('If you provide job_environment, it must be a dictionary') for key, value in job_tmpl.job_environment.items(): - lines.append('export {}={}'.format(key.strip(), escape_for_bash(value))) + lines.append(f'export {key.strip()}={escape_for_bash(value)}') lines.append('# ENVIRONMENT VARIABLES END ###') lines.append(empty_line) @@ -426,9 +426,9 @@ def _get_submit_command(self, submit_script): directory. IMPORTANT: submit_script should be already escaped. """ - submit_command = 'sbatch {}'.format(submit_script) + submit_command = f'sbatch {submit_script}' - self.logger.info('submitting with: ' + submit_command) + self.logger.info(f'submitting with: {submit_command}') return submit_command @@ -442,25 +442,16 @@ def _parse_submit_output(self, retval, stdout, stderr): Return a string with the JobID. """ if retval != 0: - self.logger.error( - 'Error in _parse_submit_output: retval={}; ' - 'stdout={}; stderr={}'.format(retval, stdout, stderr) - ) - raise SchedulerError( - 'Error during submission, retval={}\n' - 'stdout={}\nstderr={}'.format(retval, stdout, stderr) - ) + self.logger.error(f'Error in _parse_submit_output: retval={retval}; stdout={stdout}; stderr={stderr}') + raise SchedulerError(f'Error during submission, retval={retval}\nstdout={stdout}\nstderr={stderr}') try: - transport_string = ' for {}'.format(self.transport) + transport_string = f' for {self.transport}' except SchedulerError: transport_string = '' if stderr.strip(): - self.logger.warning( - 'in _parse_submit_output{}: ' - 'there was some text in stderr: {}'.format(transport_string, stderr) - ) + self.logger.warning(f'in _parse_submit_output{transport_string}: there was some text in stderr: {stderr}') # I check for a valid string in the output. # See comments near the regexp above. @@ -470,10 +461,7 @@ def _parse_submit_output(self, retval, stdout, stderr): if match: return match.group('jobid') # If I am here, no valid line could be found. - self.logger.error( - 'in _parse_submit_output{}: ' - 'unable to find the job id: {}'.format(transport_string, stdout) - ) + self.logger.error(f'in _parse_submit_output{transport_string}: unable to find the job id: {stdout}') raise SchedulerError( 'Error during submission, could not retrieve the jobID from ' 'sbatch output; see log for more info.' @@ -502,15 +490,13 @@ def _parse_joblist_output(self, retval, stdout, stderr): # See discussion in _get_joblist_command on how we ensure that AiiDA can expect exit code 0 here. if retval != 0: raise SchedulerError( - """squeue returned exit code {} (_parse_joblist_output function) -stdout='{}' -stderr='{}'""".format(retval, stdout.strip(), stderr.strip()) + f"""squeue returned exit code {retval} (_parse_joblist_output function) +stdout='{stdout.strip()}' +stderr='{stderr.strip()}'""" ) if stderr.strip(): self.logger.warning( - "squeue returned exit code 0 (_parse_joblist_output function) but non-empty stderr='{}'".format( - stderr.strip() - ) + f"squeue returned exit code 0 (_parse_joblist_output function) but non-empty stderr='{stderr.strip()}'" ) # will contain raw data parsed from output: only lines with the @@ -537,7 +523,7 @@ def _parse_joblist_output(self, retval, stdout, stderr): except KeyError: # I skip this calculation if I couldn't find this basic info # (I don't append anything to job_list before continuing) - self.logger.error("Wrong line length in squeue output! '{}'".format(job)) + self.logger.error(f"Wrong line length in squeue output! '{job}'") continue try: @@ -623,7 +609,7 @@ def _parse_joblist_output(self, retval, stdout, stderr): walltime = (self._convert_time(thisjob_dict['time_limit'])) this_job.requested_wallclock_time_seconds = walltime # pylint: disable=invalid-name except ValueError: - self.logger.warning('Error parsing the time limit for job id {}'.format(this_job.job_id)) + self.logger.warning(f'Error parsing the time limit for job id {this_job.job_id}') # Only if it is RUNNING; otherwise it is not meaningful, # and may be not set (in my test, it is set to zero) @@ -631,17 +617,17 @@ def _parse_joblist_output(self, retval, stdout, stderr): try: this_job.wallclock_time_seconds = (self._convert_time(thisjob_dict['time_used'])) except ValueError: - self.logger.warning('Error parsing time_used for job id {}'.format(this_job.job_id)) + self.logger.warning(f'Error parsing time_used for job id {this_job.job_id}') try: this_job.dispatch_time = self._parse_time_string(thisjob_dict['dispatch_time']) except ValueError: - self.logger.warning('Error parsing dispatch_time for job id {}'.format(this_job.job_id)) + self.logger.warning(f'Error parsing dispatch_time for job id {this_job.job_id}') try: this_job.submission_time = self._parse_time_string(thisjob_dict['submission_time']) except ValueError: - self.logger.warning('Error parsing submission_time for job id {}'.format(this_job.job_id)) + self.logger.warning(f'Error parsing submission_time for job id {this_job.job_id}') this_job.title = thisjob_dict['job_name'] @@ -679,7 +665,7 @@ def _convert_time(self, string): groups = _TIME_REGEXP.match(string) if groups is None: - self.logger.warning("Unrecognized format for time string '{}'".format(string)) + self.logger.warning(f"Unrecognized format for time string '{string}'") raise ValueError('Unrecognized format for time string.') groupdict = groups.groupdict() @@ -702,7 +688,7 @@ def _parse_time_string(self, string, fmt='%Y-%m-%dT%H:%M:%S'): try: time_struct = time.strptime(string, fmt) except Exception as exc: - self.logger.debug('Unable to parse time string {}, the message was {}'.format(string, exc)) + self.logger.debug(f'Unable to parse time string {string}, the message was {exc}') raise ValueError('Problem parsing the time string.') # I convert from a time_struct to a datetime object going through @@ -714,9 +700,9 @@ def _get_kill_command(self, jobid): """ Return the command to kill the job with specified jobid. """ - submit_command = 'scancel {}'.format(jobid) + submit_command = f'scancel {jobid}' - self.logger.info('killing job {}'.format(jobid)) + self.logger.info(f'killing job {jobid}') return submit_command @@ -729,28 +715,19 @@ def _parse_kill_output(self, retval, stdout, stderr): :return: True if everything seems ok, False otherwise. """ if retval != 0: - self.logger.error( - 'Error in _parse_kill_output: retval={}; ' - 'stdout={}; stderr={}'.format(retval, stdout, stderr) - ) + self.logger.error(f'Error in _parse_kill_output: retval={retval}; stdout={stdout}; stderr={stderr}') return False try: - transport_string = ' for {}'.format(self.transport) + transport_string = f' for {self.transport}' except SchedulerError: transport_string = '' if stderr.strip(): - self.logger.warning( - 'in _parse_kill_output{}: ' - 'there was some text in stderr: {}'.format(transport_string, stderr) - ) + self.logger.warning(f'in _parse_kill_output{transport_string}: there was some text in stderr: {stderr}') if stdout.strip(): - self.logger.warning( - 'in _parse_kill_output{}: ' - 'there was some text in stdout: {}'.format(transport_string, stdout) - ) + self.logger.warning(f'in _parse_kill_output{transport_string}: there was some text in stdout: {stdout}') return True diff --git a/aiida/schedulers/plugins/torque.py b/aiida/schedulers/plugins/torque.py index f5a5c34e5a..5d6ff49566 100644 --- a/aiida/schedulers/plugins/torque.py +++ b/aiida/schedulers/plugins/torque.py @@ -52,13 +52,13 @@ def _get_resource_lines( """ return_lines = [] - select_string = 'nodes={}'.format(num_machines) + select_string = f'nodes={num_machines}' if num_cores_per_machine: - select_string += ':ppn={}'.format(num_cores_per_machine) + select_string += f':ppn={num_cores_per_machine}' elif num_mpiprocs_per_machine: # if num_cores_per_machine is not defined then use # num_mpiprocs_per_machine - select_string += ':ppn={}'.format(num_mpiprocs_per_machine) + select_string += f':ppn={num_mpiprocs_per_machine}' if max_wallclock_seconds is not None: try: @@ -77,7 +77,7 @@ def _get_resource_lines( seconds = tot_minutes % 60 # There is always something before, at least the total # # of nodes - select_string += (',walltime={:02d}:{:02d}:{:02d}'.format(hours, minutes, seconds)) + select_string += f',walltime={hours:02d}:{minutes:02d}:{seconds:02d}' if max_memory_kb: try: @@ -92,7 +92,7 @@ def _get_resource_lines( ) # There is always something before, at least the total # # of nodes - select_string += ',mem={}kb'.format(virtual_memory_kb) + select_string += f',mem={virtual_memory_kb}kb' - return_lines.append('#PBS -l {}'.format(select_string)) + return_lines.append(f'#PBS -l {select_string}') return return_lines diff --git a/aiida/schedulers/scheduler.py b/aiida/schedulers/scheduler.py index 193b75bb61..b8e0e278b9 100644 --- a/aiida/schedulers/scheduler.py +++ b/aiida/schedulers/scheduler.py @@ -111,7 +111,7 @@ def get_feature(self, feature_name): try: return self._features[feature_name] except KeyError: - raise NotImplementedError('Feature {} not implemented for this scheduler'.format(feature_name)) + raise NotImplementedError(f'Feature {feature_name} not implemented for this scheduler') @property def logger(self): @@ -163,7 +163,7 @@ def get_submit_script(self, job_tmpl): elif job_tmpl.shebang is None: script_lines.append('#!/bin/bash') else: - raise ValueError('Invalid shebang set: {}'.format(job_tmpl.shebang)) + raise ValueError(f'Invalid shebang set: {job_tmpl.shebang}') script_lines.append(self._get_submit_script_header(job_tmpl)) script_lines.append(empty_line) @@ -220,20 +220,20 @@ def _get_run_line(self, codes_info, codes_run_mode): command_to_exec_list.append(escape_for_bash(arg)) command_to_exec = ' '.join(command_to_exec_list) - stdin_str = '< {}'.format(escape_for_bash(code_info.stdin_name)) if code_info.stdin_name else '' - stdout_str = '> {}'.format(escape_for_bash(code_info.stdout_name)) if code_info.stdout_name else '' + stdin_str = f'< {escape_for_bash(code_info.stdin_name)}' if code_info.stdin_name else '' + stdout_str = f'> {escape_for_bash(code_info.stdout_name)}' if code_info.stdout_name else '' join_files = code_info.join_files if join_files: stderr_str = '2>&1' else: - stderr_str = '2> {}'.format(escape_for_bash(code_info.stderr_name)) if code_info.stderr_name else '' + stderr_str = f'2> {escape_for_bash(code_info.stderr_name)}' if code_info.stderr_name else '' - output_string = ('{} {} {} {}'.format(command_to_exec, stdin_str, stdout_str, stderr_str)) + output_string = f'{command_to_exec} {stdin_str} {stdout_str} {stderr_str}' list_of_runlines.append(output_string) - self.logger.debug('_get_run_line output: {}'.format(list_of_runlines)) + self.logger.debug(f'_get_run_line output: {list_of_runlines}') if codes_run_mode == CodeRunMode.PARALLEL: list_of_runlines.append('wait\n') @@ -310,14 +310,14 @@ def get_detailed_jobinfo(self, jobid): with self.transport: retval, stdout, stderr = self.transport.exec_command_wait(command) - return """Detailed jobinfo obtained with command '{}' -Return Code: {} + return f"""Detailed jobinfo obtained with command '{command}' +Return Code: {retval} ------------------------------------------------------------- stdout: -{} +{stdout} stderr: -{} -""".format(command, retval, stdout, stderr) +{stderr} +""" @abc.abstractmethod def _parse_joblist_output(self, retval, stdout, stderr): @@ -425,4 +425,4 @@ def parse_output(self, detailed_job_info, stdout, stderr): :param stderr: string with the output written by the scheduler to stderr :return: None or an instance of `aiida.engine.processes.exit_code.ExitCode` """ - raise exceptions.FeatureNotAvailable('output parsing is not available for `{}`'.format(self.__class__.__name__)) + raise exceptions.FeatureNotAvailable(f'output parsing is not available for `{self.__class__.__name__}`') diff --git a/aiida/sphinxext/process.py b/aiida/sphinxext/process.py index 73c879c059..04e62cfa1f 100644 --- a/aiida/sphinxext/process.py +++ b/aiida/sphinxext/process.py @@ -72,15 +72,13 @@ def initialize(self): self.class_name = self.arguments[0].split('(')[0] self.module_name = self.options['module'] - self.process_name = self.module_name + '.' + self.class_name + self.process_name = f'{self.module_name}.{self.class_name}' self.process = get_object_from_string(self.process_name) try: self.process_spec = self.process.spec() except Exception as exc: - raise RuntimeError( - "Error while building the spec for process '{}': '{!r}.'".format(self.process_name, exc) - ) from exc + raise RuntimeError(f"Error while building the spec for process '{self.process_name}': '{exc!r}.'") from exc def build_node_tree(self): """Returns the docutils node tree.""" @@ -93,7 +91,7 @@ def build_signature(self): """Returns the signature of the process.""" signature = addnodes.desc_signature(first=False, fullname=self.signature) signature += addnodes.desc_annotation(text=self.annotation) - signature += addnodes.desc_addname(text=self.module_name + '.') + signature += addnodes.desc_addname(text=f'{self.module_name}.') signature += addnodes.desc_name(text=self.class_name) return signature @@ -192,7 +190,7 @@ def format_valid_types(valid_type): return valid_type.__name__ except AttributeError: try: - return '(' + ', '.join(v.__name__ for v in valid_type) + ')' + return f"({', '.join(v.__name__ for v in valid_type)})" except (AttributeError, TypeError): return str(valid_type) diff --git a/aiida/tools/data/array/kpoints/__init__.py b/aiida/tools/data/array/kpoints/__init__.py index 21330687a4..59c40e53f6 100644 --- a/aiida/tools/data/array/kpoints/__init__.py +++ b/aiida/tools/data/array/kpoints/__init__.py @@ -44,7 +44,7 @@ def get_kpoints_path(structure, method='seekpath', **kwargs): :returns: dictionary as described above in the docstring """ if method not in _GET_KPOINTS_PATH_METHODS.keys(): - raise ValueError("the method '{}' is not implemented".format(method)) + raise ValueError(f"the method '{method}' is not implemented") method = _GET_KPOINTS_PATH_METHODS[method] @@ -80,7 +80,7 @@ def get_explicit_kpoints_path(structure, method='seekpath', **kwargs): :returns: dictionary as described above in the docstring """ if method not in _GET_EXPLICIT_KPOINTS_PATH_METHODS.keys(): - raise ValueError("the method '{}' is not implemented".format(method)) + raise ValueError(f"the method '{method}' is not implemented") method = _GET_EXPLICIT_KPOINTS_PATH_METHODS[method] @@ -118,7 +118,7 @@ def _seekpath_get_kpoints_path(structure, **kwargs): unknown_args = set(kwargs).difference(recognized_args) if unknown_args: - raise ValueError('unknown arguments {}'.format(unknown_args)) + raise ValueError(f'unknown arguments {unknown_args}') return seekpath.get_kpoints_path(structure, kwargs) @@ -158,7 +158,7 @@ def _seekpath_get_explicit_kpoints_path(structure, **kwargs): unknown_args = set(kwargs).difference(recognized_args) if unknown_args: - raise ValueError('unknown arguments {}'.format(unknown_args)) + raise ValueError(f'unknown arguments {unknown_args}') return seekpath.get_explicit_kpoints_path(structure, kwargs) @@ -178,7 +178,7 @@ def _legacy_get_kpoints_path(structure, **kwargs): args_unknown = set(kwargs).difference(args_recognized) if args_unknown: - raise ValueError('unknown arguments {}'.format(args_unknown)) + raise ValueError(f'unknown arguments {args_unknown}') point_coords, path, bravais_info = legacy.get_kpoints_path(cell=structure.cell, pbc=structure.pbc, **kwargs) @@ -210,7 +210,7 @@ def _legacy_get_explicit_kpoints_path(structure, **kwargs): args_unknown = set(kwargs).difference(args_recognized) if args_unknown: - raise ValueError('unknown arguments {}'.format(args_unknown)) + raise ValueError(f'unknown arguments {args_unknown}') point_coords, path, bravais_info, explicit_kpoints, labels = legacy.get_explicit_kpoints_path( # pylint: disable=unbalanced-tuple-unpacking cell=structure.cell, pbc=structure.pbc, **kwargs diff --git a/aiida/tools/data/array/kpoints/legacy.py b/aiida/tools/data/array/kpoints/legacy.py index d217e83282..1b09eef128 100644 --- a/aiida/tools/data/array/kpoints/legacy.py +++ b/aiida/tools/data/array/kpoints/legacy.py @@ -343,7 +343,7 @@ def _num_points_from_coordinates(path, point_coordinates, kpoint_distance=None): labels = [(0, path[0][0])] assert all([_.is_integer() for _ in num_points if isinstance(_, (float, numpy.float64))] - ), 'Could not determine number of points as a whole number. num_points={}'.format(num_points) + ), f'Could not determine number of points as a whole number. num_points={num_points}' num_points = [int(_) for _ in num_points] for count_piece, i in enumerate(path): @@ -1112,8 +1112,8 @@ def a_are_equals(a, b): permutation = [0, 1, 2] bravais_info = { - 'short_name': '{}D'.format(dimension), - 'extended_name': '{}D'.format(dimension), + 'short_name': f'{dimension}D', + 'extended_name': f'{dimension}D', 'index': 1, 'permutation': permutation, } diff --git a/aiida/tools/data/cif.py b/aiida/tools/data/cif.py index a2eb83c69f..8cd02baa60 100644 --- a/aiida/tools/data/cif.py +++ b/aiida/tools/data/cif.py @@ -63,12 +63,12 @@ def symop_string_from_symop_matrix_tr(matrix, tr=(0, 0, 0), eps=0): elif matrix[i][j] < -eps: sign = '-' if sign: - parts[i] = format('{}{}{}'.format(parts[i], sign, axes[j])) + parts[i] = format(f'{parts[i]}{sign}{axes[j]}') if tr[i] < -eps or tr[i] > eps: sign = '+' if tr[i] < -eps: sign = '-' - parts[i] = format('{}{}{}'.format(parts[i], sign, abs(tr[i]))) + parts[i] = format(f'{parts[i]}{sign}{abs(tr[i])}') parts[i] = re.sub(r'^\+', '', parts[i]) return ','.join(parts) diff --git a/aiida/tools/data/orbital/orbital.py b/aiida/tools/data/orbital/orbital.py index f975414ae7..d08c603160 100644 --- a/aiida/tools/data/orbital/orbital.py +++ b/aiida/tools/data/orbital/orbital.py @@ -110,7 +110,7 @@ def __init__(self, **kwargs): self.set_orbital_dict(kwargs) def __repr__(self): - return '<{}: {}>'.format(self.__class__.__name__, str(self)) + return f'<{self.__class__.__name__}: {str(self)}>' def _validate_keys(self, input_dict): """ @@ -141,12 +141,12 @@ def _validate_keys(self, input_dict): try: value = input_dict.pop(name) except KeyError: - raise ValidationError("Missing required parameter '{}'".format(name)) + raise ValidationError(f"Missing required parameter '{name}'") # This might raise ValidationError try: value = validator(value) except ValidationError as exc: - raise exc.__class__("Error validating '{}': {}".format(name, str(exc))) + raise exc.__class__(f"Error validating '{name}': {str(exc)}") validated_dict[name] = value for name, validator, default_value in self._base_fields_optional: @@ -158,11 +158,11 @@ def _validate_keys(self, input_dict): try: value = validator(value) except ValidationError as exc: - raise exc.__class__("Error validating '{}': {}".format(name, str(exc))) + raise exc.__class__(f"Error validating '{name}': {str(exc)}") validated_dict[name] = value if input_dict: - raise ValidationError('Unknown keys: {}'.format(list(input_dict.keys()))) + raise ValidationError(f'Unknown keys: {list(input_dict.keys())}') return validated_dict def set_orbital_dict(self, init_dict): diff --git a/aiida/tools/data/orbital/realhydrogen.py b/aiida/tools/data/orbital/realhydrogen.py index ede67e9cde..a02728e800 100644 --- a/aiida/tools/data/orbital/realhydrogen.py +++ b/aiida/tools/data/orbital/realhydrogen.py @@ -329,7 +329,7 @@ def _validate_keys(self, input_dict): accepted_range = [0, -angular_momentum] if magnetic_number < min(accepted_range) or magnetic_number > max(accepted_range): raise ValidationError( - 'the magnetic number must be in the range [{}, {}]'.format(min(accepted_range), max(accepted_range)) + f'the magnetic number must be in the range [{min(accepted_range)}, {max(accepted_range)}]' ) # Check if it is a known combination @@ -355,8 +355,7 @@ def get_name_from_quantum_numbers(cls, angular_momentum, magnetic_number=None): ] if not orbital_name: raise InputValidationError( - 'No orbital name corresponding to the ' - 'angular_momentum {} could be found'.format(angular_momentum) + f'No orbital name corresponding to the angular_momentum {angular_momentum} could be found' ) if magnetic_number is not None: # finds angular momentum @@ -368,9 +367,7 @@ def get_name_from_quantum_numbers(cls, angular_momentum, magnetic_number=None): if not orbital_name: raise InputValidationError( - 'No orbital name corresponding to ' - 'the magnetic_number {} could be ' - 'found'.format(magnetic_number) + f'No orbital name corresponding to the magnetic_number {magnetic_number} could be found' ) return orbital_name[0] diff --git a/aiida/tools/data/structure/__init__.py b/aiida/tools/data/structure/__init__.py index d308d04373..c9a33a7c9c 100644 --- a/aiida/tools/data/structure/__init__.py +++ b/aiida/tools/data/structure/__init__.py @@ -135,8 +135,7 @@ def spglib_tuple_to_structure(structure_tuple, kind_info=None, kinds=None): # p symbols = [elements[num]['symbol'] for num in numbers] except KeyError as exc: raise ValueError( - 'You did not pass kind_info, but at least one number ' - 'is not a valid Z number: {}'.format(exc.args[0]) + f'You did not pass kind_info, but at least one number is not a valid Z number: {exc.args[0]}' ) _kind_info = {elements[num]['symbol']: num for num in set(numbers)} @@ -153,12 +152,12 @@ def spglib_tuple_to_structure(structure_tuple, kind_info=None, kinds=None): # p try: mapping_to_kinds = {num: _kinds_dict[kindname] for num, kindname in mapping_num_kindname.items()} except KeyError as exc: - raise ValueError("Unable to find '{}' in the kinds list".format(exc.args[0])) + raise ValueError(f"Unable to find '{exc.args[0]}' in the kinds list") try: site_kinds = [mapping_to_kinds[num] for num in numbers] except KeyError as exc: - raise ValueError('Unable to find kind in kind_info for number {}'.format(exc.args[0])) + raise ValueError(f'Unable to find kind in kind_info for number {exc.args[0]}') structure = StructureData(cell=cell) for k in _kinds: @@ -207,18 +206,14 @@ def __next__(self): # pylint: disable=missing-docstring else: # otherwise we got too less entries raise TypeError( - 'Number of atom entries ({}) is smaller than the number of atoms ({})'.format( - self._catom, self._natoms - ) + f'Number of atom entries ({self._catom}) is smaller than the number of atoms ({self._natoms})' ) self._catom += 1 if self._catom > self._natoms: raise TypeError( - 'Number of atom entries ({}) is larger than the number of atoms ({})'.format( - self._catom, self._natoms - ) + f'Number of atom entries ({self._catom}) is larger than the number of atoms ({self._natoms})' ) return (match.group('sym'), (float(match.group('x')), float(match.group('y')), float(match.group('z')))) diff --git a/aiida/tools/dbimporters/plugins/cod.py b/aiida/tools/dbimporters/plugins/cod.py index 12f35e0c42..0dce3a4bb3 100644 --- a/aiida/tools/dbimporters/plugins/cod.py +++ b/aiida/tools/dbimporters/plugins/cod.py @@ -23,8 +23,8 @@ def _int_clause(self, key, alias, values): """ for value in values: if not isinstance(value, int) and not isinstance(value, str): - raise ValueError("incorrect value for keyword '" + alias + "' only integers and strings are accepted") - return key + ' IN (' + ', '.join(str(int(i)) for i in values) + ')' + raise ValueError(f"incorrect value for keyword '{alias}' only integers and strings are accepted") + return f"{key} IN ({', '.join(str(int(i)) for i in values)})" def _str_exact_clause(self, key, alias, values): """ @@ -33,11 +33,11 @@ def _str_exact_clause(self, key, alias, values): clause_parts = [] for value in values: if not isinstance(value, int) and not isinstance(value, str): - raise ValueError("incorrect value for keyword '" + alias + "' only integers and strings are accepted") + raise ValueError(f"incorrect value for keyword '{alias}' only integers and strings are accepted") if isinstance(value, int): value = str(value) - clause_parts.append("'" + value + "'") - return key + ' IN (' + ', '.join(clause_parts) + ')' + clause_parts.append(f"'{value}'") + return f"{key} IN ({', '.join(clause_parts)})" def _str_exact_or_none_clause(self, key, alias, values): """ @@ -51,9 +51,9 @@ def _str_exact_or_none_clause(self, key, alias, values): values_now.append(value) if values_now: clause = self._str_exact_clause(key, alias, values_now) - return '{} OR {} IS NULL'.format(clause, key) + return f'{clause} OR {key} IS NULL' - return '{} IS NULL'.format(key) + return f'{key} IS NULL' return self._str_exact_clause(key, alias, values) @@ -63,8 +63,8 @@ def _formula_clause(self, key, alias, values): """ for value in values: if not isinstance(value, str): - raise ValueError("incorrect value for keyword '" + alias + "' only strings are accepted") - return self._str_exact_clause(key, alias, ['- {} -'.format(f) for f in values]) + raise ValueError(f"incorrect value for keyword '{alias}' only strings are accepted") + return self._str_exact_clause(key, alias, [f'- {f} -' for f in values]) def _str_fuzzy_clause(self, key, alias, values): """ @@ -73,10 +73,10 @@ def _str_fuzzy_clause(self, key, alias, values): clause_parts = [] for value in values: if not isinstance(value, int) and not isinstance(value, str): - raise ValueError("incorrect value for keyword '" + alias + "' only integers and strings are accepted") + raise ValueError(f"incorrect value for keyword '{alias}' only integers and strings are accepted") if isinstance(value, int): value = str(value) - clause_parts.append(key + " LIKE '%" + value + "%'") + clause_parts.append(f"{key} LIKE '%{value}%'") return ' OR '.join(clause_parts) def _composition_clause(self, _, alias, values): @@ -86,8 +86,8 @@ def _composition_clause(self, _, alias, values): clause_parts = [] for value in values: if not isinstance(value, str): - raise ValueError("incorrect value for keyword '" + alias + "' only strings are accepted") - clause_parts.append("formula REGEXP ' " + value + "[0-9 ]'") + raise ValueError(f"incorrect value for keyword '{alias}' only strings are accepted") + clause_parts.append(f"formula REGEXP ' {value}[0-9 ]'") return ' AND '.join(clause_parts) def _double_clause(self, key, alias, values, precision): @@ -96,7 +96,7 @@ def _double_clause(self, key, alias, values, precision): """ for value in values: if not isinstance(value, int) and not isinstance(value, float): - raise ValueError("incorrect value for keyword '" + alias + "' only integers and floats are accepted") + raise ValueError(f"incorrect value for keyword '{alias}' only integers and floats are accepted") return ' OR '.join('{} BETWEEN {} AND {}'.format(key, d - precision, d + precision) for d in values) length_precision = 0.001 @@ -187,12 +187,12 @@ def query_sql(self, **kwargs): values = kwargs.pop(key) if not isinstance(values, list): values = [values] - sql_parts.append('(' + self._keywords[key][1](self, self._keywords[key][0], key, values) + ')') + sql_parts.append(f'({self._keywords[key][1](self, self._keywords[key][0], key, values)})') if kwargs: - raise NotImplementedError('following keyword(s) are not implemented: {}'.format(', '.join(kwargs.keys()))) + raise NotImplementedError(f"following keyword(s) are not implemented: {', '.join(kwargs.keys())}") - return 'SELECT file, svnrevision FROM data WHERE ' + ' AND '.join(sql_parts) + return f"SELECT file, svnrevision FROM data WHERE {' AND '.join(sql_parts)}" def query(self, **kwargs): """ @@ -292,10 +292,10 @@ def _get_url(self, result_dict): :param result_dict: dictionary, describing an entry in the results. """ - url = self._base_url + result_dict['id'] + '.cif' + url = f"{self._base_url + result_dict['id']}.cif" if 'svnrevision' in result_dict and \ result_dict['svnrevision'] is not None: - return '{}@{}'.format(url, result_dict['svnrevision']) + return f"{url}@{result_dict['svnrevision']}" return url diff --git a/aiida/tools/dbimporters/plugins/icsd.py b/aiida/tools/dbimporters/plugins/icsd.py index 2584a41081..524ceef9a1 100644 --- a/aiida/tools/dbimporters/plugins/icsd.py +++ b/aiida/tools/dbimporters/plugins/icsd.py @@ -101,7 +101,7 @@ def _int_clause(self, key, alias, values): for value in values: if not isinstance(value, int) and not isinstance(value, str): raise ValueError("incorrect value for keyword '" + alias + ' only integers and strings are accepted') - return '{} IN ({})'.format(key, ', '.join(str(int(i)) for i in values)) + return f"{key} IN ({', '.join(str(int(i)) for i in values)})" def _str_exact_clause(self, key, alias, values): """ @@ -378,7 +378,7 @@ def _query_sql_db(self, **kwargs): for key, value in kwargs.items(): if not isinstance(value, list): value = [value] - sql_where_query.append('({})'.format(self.keywords_db[key][1](self, self.keywords_db[key][0], key, value))) + sql_where_query.append(f'({self.keywords_db[key][1](self, self.keywords_db[key][0], key, value)})') if 'crystal_system' in kwargs: # to query another table than the main one, add LEFT JOIN in front of WHERE sql_query = 'LEFT JOIN space_group ON space_group.sgr=icsd.sgr LEFT '\ 'JOIN space_group_number ON '\ @@ -419,7 +419,7 @@ def _queryweb(self, **kwargs): else: self.actual_args[realname] = newv except KeyError as exc: - raise TypeError("ICSDImporter got an unexpected keyword argument '{}'".format(exc.args[0])) + raise TypeError(f"ICSDImporter got an unexpected keyword argument '{exc.args[0]}'") url_values = urlencode(self.actual_args) query_url = self.db_parameters['urladd'] + url_values @@ -538,7 +538,7 @@ def query_db_version(self): sql_from_query = 'FROM icsd.icsd_database_information ' self._connect_db() - query_statement = '{}{}'.format(sql_select_query, sql_from_query) + query_statement = f'{sql_select_query}{sql_from_query}' self.cursor.execute(query_statement) self.db.commit() diff --git a/aiida/tools/dbimporters/plugins/materialsproject.py b/aiida/tools/dbimporters/plugins/materialsproject.py index 0f09d3b324..f4d1ced5c3 100644 --- a/aiida/tools/dbimporters/plugins/materialsproject.py +++ b/aiida/tools/dbimporters/plugins/materialsproject.py @@ -115,7 +115,7 @@ def query(self, **kwargs): properties = self._properties if properties != 'structure': - raise ValueError('Unsupported properties: {}'.format(properties)) + raise ValueError(f'Unsupported properties: {properties}') results = [] properties_list = ['material_id', 'cif'] diff --git a/aiida/tools/dbimporters/plugins/mpds.py b/aiida/tools/dbimporters/plugins/mpds.py index a5be35e9c4..6b4d733177 100644 --- a/aiida/tools/dbimporters/plugins/mpds.py +++ b/aiida/tools/dbimporters/plugins/mpds.py @@ -166,7 +166,7 @@ def query(self, query, collection=None): # pylint: disable=arguments-differ search_results = MpdsSearchResults(results, return_class=MpdsCifEntry) else: - raise ValueError('Unsupported collection: {}'.format(collection)) + raise ValueError(f'Unsupported collection: {collection}') return search_results @@ -247,14 +247,14 @@ def get_response_content(response, fmt=DEFAULT_API_FORMAT): :raises ValueError: HTTP response 200 contained non zero error message """ if not response.ok: - raise RuntimeError('HTTP[{}] request failed: {}'.format(response.status_code, response.text)) + raise RuntimeError(f'HTTP[{response.status_code}] request failed: {response.text}') if fmt == ApiFormat.JSON: content = response.json() error = content.get('error', None) if error is not None: - raise ValueError('Got error response: {}'.format(error)) + raise ValueError(f'Got error response: {error}') return content diff --git a/aiida/tools/dbimporters/plugins/mpod.py b/aiida/tools/dbimporters/plugins/mpod.py index 139dcf963e..d9ef9a450d 100644 --- a/aiida/tools/dbimporters/plugins/mpod.py +++ b/aiida/tools/dbimporters/plugins/mpod.py @@ -22,8 +22,8 @@ def _str_clause(self, key, alias, values): Returns part of HTTP GET query for querying string fields. """ if not isinstance(values, str) and not isinstance(values, int): - raise ValueError("incorrect value for keyword '" + alias + "' -- only strings and integers are accepted") - return '{}={}'.format(key, values) + raise ValueError(f"incorrect value for keyword '{alias}' -- only strings and integers are accepted") + return f'{key}={values}' _keywords = { 'phase_name': ['phase_name', _str_clause], @@ -60,14 +60,14 @@ def query_get(self, **kwargs): get_parts.append(self._keywords[key][1](self, self._keywords[key][0], key, values)) if kwargs: - raise NotImplementedError('following keyword(s) are not implemented: {}'.format(', '.join(kwargs.keys()))) + raise NotImplementedError(f"following keyword(s) are not implemented: {', '.join(kwargs.keys())}") queries = [] for element in elements: clauses = [self._str_clause('formula', 'element', element)] - queries.append(self._query_url + '?' + '&'.join(get_parts + clauses)) + queries.append(f"{self._query_url}?{'&'.join(get_parts + clauses)}") if not queries: - queries.append(self._query_url + '?' + '&'.join(get_parts)) + queries.append(f"{self._query_url}?{'&'.join(get_parts)}") return queries @@ -102,7 +102,7 @@ def setup_db(self, query_url=None, **kwargs): # pylint: disable=arguments-diffe self._query_url = query_url if kwargs: - raise NotImplementedError('following keyword(s) are not implemented: {}'.format(', '.join(kwargs.keys()))) + raise NotImplementedError(f"following keyword(s) are not implemented: {', '.join(kwargs.keys())}") def get_supported_keywords(self): """ @@ -141,7 +141,7 @@ def _get_url(self, result_dict): :param result_dict: dictionary, describing an entry in the results. """ - return self._base_url + result_dict['id'] + '.mpod' + return f"{self._base_url + result_dict['id']}.mpod" class MpodEntry(CifEntry): # pylint: disable=abstract-method diff --git a/aiida/tools/dbimporters/plugins/nninc.py b/aiida/tools/dbimporters/plugins/nninc.py index ce2e724f47..6ddb4f2bf3 100644 --- a/aiida/tools/dbimporters/plugins/nninc.py +++ b/aiida/tools/dbimporters/plugins/nninc.py @@ -26,7 +26,7 @@ def _str_clause(self, key, alias, values): "incorrect value for keyword '{}' -- only " 'strings and integers are accepted'.format(alias) ) - return '{}={}'.format(key, values) + return f'{key}={values}' _keywords = { 'xc_approximation': ['frmxcprox', _str_clause], @@ -54,9 +54,9 @@ def query_get(self, **kwargs): get_parts.append(self._keywords[key][1](self, self._keywords[key][0], key, values)) if kwargs: - raise NotImplementedError('following keyword(s) are not implemented: {}'.format(', '.join(kwargs.keys()))) + raise NotImplementedError(f"following keyword(s) are not implemented: {', '.join(kwargs.keys())}") - return self._query_url + '?' + '&'.join(get_parts) + return f"{self._query_url}?{'&'.join(get_parts)}" def query(self, **kwargs): """ @@ -81,7 +81,7 @@ def query(self, **kwargs): results_now = set() for psp in results: for element in elements: - if psp.startswith('{}.'.format(element)): + if psp.startswith(f'{element}.'): results_now = results_now | set([psp]) results = list(results_now) @@ -138,7 +138,7 @@ def _get_url(self, result_dict): :param result_dict: dictionary, describing an entry in the results. """ - return self._base_url + result_dict['id'] + '.UPF' + return f"{self._base_url + result_dict['id']}.UPF" class NnincEntry(UpfEntry): diff --git a/aiida/tools/dbimporters/plugins/oqmd.py b/aiida/tools/dbimporters/plugins/oqmd.py index 5af78fe127..81db833813 100644 --- a/aiida/tools/dbimporters/plugins/oqmd.py +++ b/aiida/tools/dbimporters/plugins/oqmd.py @@ -22,8 +22,8 @@ def _str_clause(self, key, alias, values): Returns part of HTTP GET query for querying string fields. """ if not isinstance(values, str) and not isinstance(values, int): - raise ValueError("incorrect value for keyword '" + alias + "' -- only strings and integers are accepted") - return '{}={}'.format(key, values) + raise ValueError(f"incorrect value for keyword '{alias}' -- only strings and integers are accepted") + return f'{key}={values}' _keywords = {'element': ['element', None]} @@ -43,7 +43,7 @@ def query_get(self, **kwargs): if not isinstance(elements, list): elements = [elements] - return '{}/materials/composition/{}'.format(self._query_url, ''.join(elements)) + return f"{self._query_url}/materials/composition/{''.join(elements)}" def query(self, **kwargs): """ @@ -62,7 +62,7 @@ def query(self, **kwargs): results = [] for entry in entries: - response = urlopen('{}{}'.format(self._query_url, entry)).read() + response = urlopen(f'{self._query_url}{entry}').read() structures = re.findall(r'/materials/export/conventional/cif/(\d+)', response) for struct in structures: results.append({'id': struct}) @@ -77,7 +77,7 @@ def setup_db(self, query_url=None, **kwargs): # pylint: disable=arguments-diffe self._query_url = query_url if kwargs: - raise NotImplementedError('following keyword(s) are not implemented: {}'.format(', '.join(kwargs.keys()))) + raise NotImplementedError(f"following keyword(s) are not implemented: {', '.join(kwargs.keys())}") def get_supported_keywords(self): """ diff --git a/aiida/tools/dbimporters/plugins/pcod.py b/aiida/tools/dbimporters/plugins/pcod.py index 4550ab5634..79b586060b 100644 --- a/aiida/tools/dbimporters/plugins/pcod.py +++ b/aiida/tools/dbimporters/plugins/pcod.py @@ -50,11 +50,11 @@ def query_sql(self, **kwargs): values = kwargs.pop(key) if not isinstance(values, list): values = [values] - sql_parts.append('(' + self._keywords[key][1](self, self._keywords[key][0], key, values) + ')') + sql_parts.append(f'({self._keywords[key][1](self, self._keywords[key][0], key, values)})') if kwargs: - raise NotImplementedError('following keyword(s) are not implemented: {}'.format(', '.join(kwargs.keys()))) + raise NotImplementedError(f"following keyword(s) are not implemented: {', '.join(kwargs.keys())}") - return 'SELECT file FROM data WHERE ' + ' AND '.join(sql_parts) + return f"SELECT file FROM data WHERE {' AND '.join(sql_parts)}" def query(self, **kwargs): """ @@ -94,7 +94,7 @@ def _get_url(self, result_dict): :param result_dict: dictionary, describing an entry in the results. """ - return self._base_url + result_dict['id'][0] + '/' + result_dict['id'][0:3] + '/' + result_dict['id'] + '.cif' + return f"{self._base_url + result_dict['id'][0]}/{result_dict['id'][0:3]}/{result_dict['id']}.cif" class PcodEntry(CodEntry): # pylint: disable=abstract-method diff --git a/aiida/tools/graph/age_entities.py b/aiida/tools/graph/age_entities.py index 7d69d9540b..8af45ade93 100644 --- a/aiida/tools/graph/age_entities.py +++ b/aiida/tools/graph/age_entities.py @@ -127,7 +127,7 @@ def __len__(self): return len(self.keyset) def __repr__(self): - return '{{{}}}'.format(','.join(map(str, self.keyset))) + return f"{{{','.join(map(str, self.keyset))}}}" def __eq__(self, other): return self.keyset == other.keyset @@ -177,7 +177,7 @@ def __init__(self, aiida_cls): """ super().__init__() if not aiida_cls in VALID_ENTITY_CLASSES: - raise TypeError('aiida_cls has to be among:{}'.format(VALID_ENTITY_CLASSES)) + raise TypeError(f'aiida_cls has to be among:{VALID_ENTITY_CLASSES}') self._aiida_cls = aiida_cls self.keyset = set() self._identifier = 'id' @@ -254,7 +254,7 @@ def __init__(self, aiida_cls_to, aiida_cls_from): super().__init__() for aiida_cls in (aiida_cls_to, aiida_cls_from): if not aiida_cls in VALID_ENTITY_CLASSES: - raise TypeError('aiida_cls has to be among:{}'.format(VALID_ENTITY_CLASSES)) + raise TypeError(f'aiida_cls has to be among:{VALID_ENTITY_CLASSES}') self._aiida_cls_to = aiida_cls_to self._aiida_cls_from = aiida_cls_from self.keyset = set() @@ -270,11 +270,9 @@ def __init__(self, aiida_cls_to, aiida_cls_from): self._edge_identifiers = (('nodes', 'id'), ('groups', 'id')) self._edge_namedtuple = GroupNodeEdge else: - raise TypeError( - 'Unexpted types aiida_cls_from={} and aiida_cls_to={}'.format(aiida_cls_from, aiida_cls_to) - ) + raise TypeError(f'Unexpted types aiida_cls_from={aiida_cls_from} and aiida_cls_to={aiida_cls_to}') else: - raise TypeError('Unexpted types aiida_cls_from={} and aiida_cls_to={}'.format(aiida_cls_from, aiida_cls_to)) + raise TypeError(f'Unexpted types aiida_cls_from={aiida_cls_from} and aiida_cls_to={aiida_cls_to}') def _check_self_and_other(self, other): if not isinstance(other, DirectedEdgeSet): @@ -289,11 +287,11 @@ def _check_self_and_other(self, other): def _check_input_for_set(self, input_for_set): if not isinstance(input_for_set, tuple): - raise TypeError('value for `input_for_set` {} is not a tuple'.format(input_for_set)) + raise TypeError(f'value for `input_for_set` {input_for_set} is not a tuple') if len(input_for_set) != len(self._edge_identifiers): inputs_len = len(input_for_set) inside_len = len(self._edge_identifiers) - raise ValueError('tuple passed has len = {}, but there are {} identifiers'.format(inputs_len, inside_len)) + raise ValueError(f'tuple passed has len = {inputs_len}, but there are {inside_len} identifiers') return input_for_set def get_template(self): @@ -354,7 +352,7 @@ def get_check_set_entity_set(input_object, keyword, aiida_class): if isinstance(input_object, AiidaEntitySet): if input_object.aiida_cls is aiida_class: return input_object - raise TypeError('{} has to have {} as aiida_cls'.format(keyword, aiida_class)) + raise TypeError(f'{keyword} has to have {aiida_class} as aiida_cls') else: raise ValueError( @@ -368,13 +366,13 @@ def get_check_set_directed_edge_set(var, keyword, cls_from, cls_to): return DirectedEdgeSet(aiida_cls_to=cls_to, aiida_cls_from=cls_from) if isinstance(var, DirectedEdgeSet): if var.aiida_cls_from is not cls_from: - raise TypeError('{} has to have {} as aiida_cls_from'.format(keyword, cls_from)) + raise TypeError(f'{keyword} has to have {cls_from} as aiida_cls_from') elif var.aiida_cls_to is not cls_to: - raise TypeError('{} has to have {} as aiida_cls_to'.format(keyword, cls_to)) + raise TypeError(f'{keyword} has to have {cls_to} as aiida_cls_to') else: return var else: - raise TypeError('{} has to be an instance of DirectedEdgeSet'.format(keyword)) + raise TypeError(f'{keyword} has to be an instance of DirectedEdgeSet') nodes = get_check_set_entity_set(nodes, 'nodes', orm.Node) groups = get_check_set_entity_set(groups, 'groups', orm.Group) @@ -451,8 +449,8 @@ def __ne__(self, other): def __repr__(self): ret_str = '' for key, val in self._dict.items(): - ret_str += ' ' + key + ': ' - ret_str += str(val) + '\n' + ret_str += f' {key}: ' + ret_str += f'{str(val)}\n' return ret_str def empty(self): diff --git a/aiida/tools/graph/age_rules.py b/aiida/tools/graph/age_rules.py index 09f373a901..90982bc43b 100644 --- a/aiida/tools/graph/age_rules.py +++ b/aiida/tools/graph/age_rules.py @@ -89,7 +89,7 @@ def get_spec_from_path(queryhelp, idx): elif queryhelp['path'][idx]['entity_type'].startswith(GROUP_ENTITY_TYPE_PREFIX): result = 'groups' else: - raise Exception('not understood entity from ( {} )'.format(queryhelp['path'][idx]['entity_type'])) + raise Exception(f"not understood entity from ( {queryhelp['path'][idx]['entity_type']} )") return result queryhelp = querybuilder.queryhelp @@ -200,7 +200,7 @@ def _init_run(self, operational_set): # For now I can only specify edge_identifiers as 'edge', ie. project on the edge # itself, or by the entity_from, entity_to keyword, ie. groups or nodes. # One could think of other keywords... - raise ValueError('This tag ({}) is not known'.format(tag)) + raise ValueError(f'This tag ({tag}) is not known') self._edge_keys.append((actual_tag, projection)) projections[actual_tag].append(projection) diff --git a/aiida/tools/graph/graph_traversers.py b/aiida/tools/graph/graph_traversers.py index 793e845f73..cee4e9e52a 100644 --- a/aiida/tools/graph/graph_traversers.py +++ b/aiida/tools/graph/graph_traversers.py @@ -109,9 +109,7 @@ def validate_traversal_rules(ruleset=GraphTraversalRules.DEFAULT, **kwargs): if not isinstance(ruleset, GraphTraversalRules): raise TypeError( - 'ruleset input must be of type aiida.common.links.GraphTraversalRules\ninstead, it is: {}'.format( - type(ruleset) - ) + f'ruleset input must be of type aiida.common.links.GraphTraversalRules\ninstead, it is: {type(ruleset)}' ) rules_applied = {} @@ -125,12 +123,12 @@ def validate_traversal_rules(ruleset=GraphTraversalRules.DEFAULT, **kwargs): if name in kwargs: if not rule.toggleable: - raise ValueError('input rule {} is not toggleable for ruleset {}'.format(name, ruleset)) + raise ValueError(f'input rule {name} is not toggleable for ruleset {ruleset}') follow = kwargs.pop(name) if not isinstance(follow, bool): - raise ValueError('the value of rule {} must be boolean, but it is: {}'.format(name, follow)) + raise ValueError(f'the value of rule {name} must be boolean, but it is: {follow}') if follow: @@ -139,14 +137,12 @@ def validate_traversal_rules(ruleset=GraphTraversalRules.DEFAULT, **kwargs): elif rule.direction == 'backward': links_backward.append(rule.link_type) else: - raise exceptions.InternalError( - 'unrecognized direction `{}` for graph traversal rule'.format(rule.direction) - ) + raise exceptions.InternalError(f'unrecognized direction `{rule.direction}` for graph traversal rule') rules_applied[name] = follow if kwargs: - error_message = 'unrecognized keywords: {}'.format(', '.join(kwargs.keys())) + error_message = f"unrecognized keywords: {', '.join(kwargs.keys())}" raise exceptions.ValidationError(error_message) valid_output = { @@ -197,19 +193,19 @@ def traverse_graph(starting_pks, max_iterations=None, get_links=False, links_for linktype_list = [] for linktype in links_forward: if not isinstance(linktype, LinkType): - raise TypeError('links_forward should contain links, but one of them is: {}'.format(type(linktype))) + raise TypeError(f'links_forward should contain links, but one of them is: {type(linktype)}') linktype_list.append(linktype.value) filters_forwards = {'type': {'in': linktype_list}} linktype_list = [] for linktype in links_backward: if not isinstance(linktype, LinkType): - raise TypeError('links_backward should contain links, but one of them is: {}'.format(type(linktype))) + raise TypeError(f'links_backward should contain links, but one of them is: {type(linktype)}') linktype_list.append(linktype.value) filters_backwards = {'type': {'in': linktype_list}} if not isinstance(starting_pks, (list, set, tuple)): - raise TypeError('starting_pks must be of type list, set or tuple\ninstead, it is {}'.format(type(starting_pks))) + raise TypeError(f'starting_pks must be of type list, set or tuple\ninstead, it is {type(starting_pks)}') if not starting_pks: if get_links: @@ -219,7 +215,7 @@ def traverse_graph(starting_pks, max_iterations=None, get_links=False, links_for return output if any([not isinstance(pk, int) for pk in starting_pks]): - raise TypeError('one of the starting_pks is not of type int:\n {}'.format(starting_pks)) + raise TypeError(f'one of the starting_pks is not of type int:\n {starting_pks}') operational_set = set(starting_pks) query_nodes = orm.QueryBuilder() @@ -228,7 +224,7 @@ def traverse_graph(starting_pks, max_iterations=None, get_links=False, links_for missing_pks = operational_set.difference(existing_pks) if missing_pks: raise exceptions.NotExistent( - 'The following pks are not in the database and must be pruned before this call: {}'.format(missing_pks) + f'The following pks are not in the database and must be pruned before this call: {missing_pks}' ) rules = [] diff --git a/aiida/tools/groups/paths.py b/aiida/tools/groups/paths.py index 40505e987d..4c57300285 100644 --- a/aiida/tools/groups/paths.py +++ b/aiida/tools/groups/paths.py @@ -30,7 +30,7 @@ class GroupNotFoundError(Exception): """An exception raised when a path does not have an associated group.""" def __init__(self, grouppath): - msg = 'No such group: {}'.format(grouppath.path) + msg = f'No such group: {grouppath.path}' super().__init__(msg) @@ -38,7 +38,7 @@ class GroupNotUniqueError(Exception): """An exception raised when a path has multiple associated groups.""" def __init__(self, grouppath): - msg = 'The path is not unique: {}'.format(grouppath.path) + msg = f'The path is not unique: {grouppath.path}' super().__init__(msg) @@ -46,7 +46,7 @@ class NoGroupsInPathError(Exception): """An exception raised when a path has multiple associated groups.""" def __init__(self, grouppath): - msg = 'The path does not contain any descendant groups: {}'.format(grouppath.path) + msg = f'The path does not contain any descendant groups: {grouppath.path}' super().__init__(msg) @@ -69,7 +69,7 @@ def __init__(self, path: str = '', cls: orm.groups.GroupMeta = orm.Group, warn_i """ if not issubclass(cls, orm.Group): - raise TypeError('cls must a subclass of Group: {}'.format(cls)) + raise TypeError(f'cls must a subclass of Group: {cls}') self._delimiter = '/' self._cls = cls @@ -82,14 +82,14 @@ def _validate_path(self, path): if path == self._delimiter: return '' if self._delimiter * 2 in path: - raise InvalidPath("The path may not contain a duplicate delimiter '{}': {}".format(self._delimiter, path)) + raise InvalidPath(f"The path may not contain a duplicate delimiter '{self._delimiter}': {path}") if (path.startswith(self._delimiter) or path.endswith(self._delimiter)): - raise InvalidPath("The path may not start/end with the delimiter '{}': {}".format(self._delimiter, path)) + raise InvalidPath(f"The path may not start/end with the delimiter '{self._delimiter}': {path}") return path def __repr__(self) -> str: """Represent the instantiated class.""" - return "{}('{}', cls='{}')".format(self.__class__.__name__, self.path, self.cls) + return f"{self.__class__.__name__}('{self.path}', cls='{self.cls}')" def __eq__(self, other: Any) -> bool: """Compare equality of path and ``Group`` subclass to another ``GroupPath`` object.""" @@ -142,7 +142,7 @@ def parent(self) -> Optional['GroupPath']: def __truediv__(self, path: str) -> 'GroupPath': """Return a child ``GroupPath``, with a new path formed by appending ``path`` to the current path.""" if not isinstance(path, str): - raise TypeError('path is not a string: {}'.format(path)) + raise TypeError(f'path is not a string: {path}') path = self._validate_path(path) child = GroupPath( path=self.path + self.delimiter + path if self.path else path, @@ -204,7 +204,7 @@ def children(self) -> Iterator['GroupPath']: query = orm.QueryBuilder() filters = {} if self.path: - filters['label'] = {'like': self.path + self.delimiter + '%'} + filters['label'] = {'like': f'{self.path + self.delimiter}%'} query.append(self.cls, subclassing=False, filters=filters, project='label') if query.count() == 0 and self.is_virtual: raise NoGroupsInPathError(self) @@ -221,7 +221,7 @@ def children(self) -> Iterator['GroupPath']: yield GroupPath(path=path_string, cls=self.cls, warn_invalid_child=self._warn_invalid_child) except InvalidPath: if self._warn_invalid_child: - warnings.warn('invalid path encountered: {}'.format(path_string)) # pylint: disable=no-member + warnings.warn(f'invalid path encountered: {path_string}') # pylint: disable=no-member def __iter__(self) -> Iterator['GroupPath']: """Iterate through all (direct) children of this path.""" @@ -264,7 +264,7 @@ def walk_nodes( query = orm.QueryBuilder() group_filters = {} if self.path: - group_filters['label'] = {'or': [{'==': self.path}, {'like': self.path + self.delimiter + '%'}]} + group_filters['label'] = {'or': [{'==': self.path}, {'like': f'{self.path + self.delimiter}%'}]} query.append(self.cls, subclassing=False, filters=group_filters, project='label', tag='group') query.append( orm.Node if node_class is None else node_class, @@ -300,7 +300,7 @@ def __init__(self, group_path: GroupPath) -> None: def __repr__(self) -> str: """Represent the instantiated class.""" - return "{}('{}', type='{}')".format(self.__class__.__name__, self._group_path.path, self._group_path.cls) + return f"{self.__class__.__name__}('{self._group_path.path}', type='{self._group_path.cls}')" def __call__(self) -> GroupPath: """Return the ``GroupPath``.""" diff --git a/aiida/tools/importexport/common/archive.py b/aiida/tools/importexport/common/archive.py index f330bff117..004330b0a2 100644 --- a/aiida/tools/importexport/common/archive.py +++ b/aiida/tools/importexport/common/archive.py @@ -90,7 +90,7 @@ def unpack(self): raise CorruptArchive('unrecognized archive format') if not self.folder.get_content_list(): - raise ContentNotExistent('the provided archive {} is empty'.format(self.filepath)) + raise ContentNotExistent(f'the provided archive {self.filepath} is empty') self._unpacked = True @@ -233,7 +233,7 @@ def update_description(path, refresh: bool = False): (path, description) = os.path.split(path) while description == '': (path, description) = os.path.split(path) - description = 'EXTRACTING: {}'.format(description) + description = f'EXTRACTING: {description}' progress_bar = get_progress_bar() progress_bar.set_description_str(description, refresh=refresh) @@ -275,7 +275,7 @@ def get_file_iterator(file_handle, folderpath, silent=True, **kwargs): # pylint else: file_handle.extract(path=folderpath, member=json_file) except KeyError: - raise CorruptArchive('required file `{}` is not included'.format(json_file)) + raise CorruptArchive(f'required file `{json_file}` is not included') close_progress_bar(leave=False) if file_format == 'tar': @@ -371,12 +371,12 @@ def extract_tar(infile, folder, nodes_export_subfolder=None, **kwargs): for member in file_iterator: if member.isdev(): # safety: skip if character device, block device or FIFO - print('WARNING, device found inside the import file: {}'.format(member.name), file=sys.stderr) + print(f'WARNING, device found inside the import file: {member.name}', file=sys.stderr) continue if member.issym() or member.islnk(): # safety: in export, I set dereference=True therefore # there should be no symbolic or hard links. - print('WARNING, symlink found inside the import file: {}'.format(member.name), file=sys.stderr) + print(f'WARNING, symlink found inside the import file: {member.name}', file=sys.stderr) continue # Check that we are only exporting nodes within the subfolder! # TODO: better check such that there are no .. in the diff --git a/aiida/tools/importexport/common/progress_bar.py b/aiida/tools/importexport/common/progress_bar.py index be942a3553..3c91294df3 100644 --- a/aiida/tools/importexport/common/progress_bar.py +++ b/aiida/tools/importexport/common/progress_bar.py @@ -67,8 +67,7 @@ def get_progress_bar(iterable=None, total=None, leave=None, **kwargs): setattr(PROGRESS_BAR, attribute, value) except AttributeError: raise ProgressBarError( - 'The given attribute {} either can not be set or does not exist for the progress bar.'. - format(attribute) + f'The given attribute {attribute} either can not be set or does not exist for the progress bar.' ) return PROGRESS_BAR diff --git a/aiida/tools/importexport/common/utils.py b/aiida/tools/importexport/common/utils.py index 0aef11888a..6b67fe8966 100644 --- a/aiida/tools/importexport/common/utils.py +++ b/aiida/tools/importexport/common/utils.py @@ -70,7 +70,7 @@ def handle_starttag(self, tag, attrs): if tag == 'a': for key, value in attrs: if key == 'href': - if (self.filter_extension is None or value.endswith('.{}'.format(self.filter_extension))): + if (self.filter_extension is None or value.endswith(f'.{self.filter_extension}')): self.links.append(value) def get_links(self): diff --git a/aiida/tools/importexport/dbexport/__init__.py b/aiida/tools/importexport/dbexport/__init__.py index d6646024c6..5f10fc28b2 100644 --- a/aiida/tools/importexport/dbexport/__init__.py +++ b/aiida/tools/importexport/dbexport/__init__.py @@ -135,7 +135,7 @@ def export( filename = 'export_data.aiida' if not overwrite and os.path.exists(filename): - raise exceptions.ArchiveExportError("The output file '{}' already exists".format(filename)) + raise exceptions.ArchiveExportError(f"The output file '{filename}' already exists") if silent: logging.disable(level=logging.CRITICAL) @@ -391,7 +391,7 @@ def export_tree( entities_starting_set[COMPUTER_ENTITY_NAME].add(entry.uuid) else: raise exceptions.ArchiveExportError( - 'I was given {} ({}), which is not a Node, Computer, or Group instance'.format(entry, type(entry)) + f'I was given {entry} ({type(entry)}), which is not a Node, Computer, or Group instance' ) # Add all the nodes contained within the specified groups @@ -491,7 +491,7 @@ def export_tree( entries_to_add = dict() for given_entity in given_entities: - progress_bar.set_description_str(pbar_base_str + ' - {}s'.format(given_entity), refresh=False) + progress_bar.set_description_str(f'{pbar_base_str} - {given_entity}s', refresh=False) progress_bar.update() project_cols = ['id'] @@ -555,7 +555,7 @@ def export_tree( entity_separator = '_' for entity_name, partial_query in entries_to_add.items(): - progress_bar.set_description_str('Exporting {}s'.format(entity_name), refresh=False) + progress_bar.set_description_str(f'Exporting {entity_name}s', refresh=False) progress_bar.update() foreign_fields = {k: v for k, v in all_fields_info[entity_name].items() if 'requires' in v} @@ -595,7 +595,7 @@ def export_tree( EXPORT_LOGGER.log(msg='Nothing to store, exiting...', level=LOG_LEVEL_REPORT) return EXPORT_LOGGER.log( - msg='Exporting a total of {} database entries, of which {} are Nodes.'.format(model_data, len(all_node_pks)), + msg=f'Exporting a total of {model_data} database entries, of which {len(all_node_pks)} are Nodes.', level=LOG_LEVEL_REPORT ) @@ -710,7 +710,7 @@ def export_tree( for uuid in all_node_uuids: sharded_uuid = export_shard_uuid(uuid) - progress_bar.set_description_str(pbar_base_str + 'UUID={}'.format(uuid.split('-')[0]), refresh=False) + progress_bar.set_description_str(f"{pbar_base_str}UUID={uuid.split('-')[0]}", refresh=False) progress_bar.update() # Important to set create=False, otherwise creates twice a subfolder. Maybe this is a bug of insert_path? @@ -720,7 +720,7 @@ def export_tree( src = RepositoryFolder(section=Repository._section_name, uuid=uuid) # pylint: disable=protected-access if not src.exists(): raise exceptions.ArchiveExportError( - 'Unable to find the repository folder for Node with UUID={} in the local repository'.format(uuid) + f'Unable to find the repository folder for Node with UUID={uuid} in the local repository' ) # In this way, I copy the content of the folder, and not the folder itself diff --git a/aiida/tools/importexport/dbexport/utils.py b/aiida/tools/importexport/dbexport/utils.py index 84306c218a..38b0c7ba62 100644 --- a/aiida/tools/importexport/dbexport/utils.py +++ b/aiida/tools/importexport/dbexport/utils.py @@ -131,10 +131,7 @@ def check_licenses(node_licenses, allowed_licenses, forbidden_licenses): raise LicensingException except LicensingException: raise LicensingException( - 'Node {} is licensed ' - 'under {} license, which ' - 'is not in the list of ' - 'allowed licenses'.format(pk, license_) + f'Node {pk} is licensed under {license_} license, which is not in the list of allowed licenses' ) if forbidden_licenses is not None: try: @@ -149,10 +146,7 @@ def check_licenses(node_licenses, allowed_licenses, forbidden_licenses): raise LicensingException except LicensingException: raise LicensingException( - 'Node {} is licensed ' - 'under {} license, which ' - 'is in the list of ' - 'forbidden licenses'.format(pk, license_) + f'Node {pk} is licensed under {license_} license, which is in the list of forbidden licenses' ) @@ -293,7 +287,7 @@ def summary(file_format, outfile, **kwargs): parameters = [['Archive', outfile], ['Format', file_format], ['Export version', EXPORT_VERSION]] - result = '\n{}'.format(tabulate(parameters, headers=['EXPORT', ''])) + result = f"\n{tabulate(parameters, headers=['EXPORT', ''])}" include_comments = kwargs.get('include_comments', True) include_logs = kwargs.get('include_logs', True) @@ -303,13 +297,13 @@ def summary(file_format, outfile, **kwargs): call_reversed = kwargs.get('call_reversed', False) inclusions = [['Include Comments', include_comments], ['Include Logs', include_logs]] - result += '\n\n{}'.format(tabulate(inclusions, headers=['Inclusion rules', ''])) + result += f"\n\n{tabulate(inclusions, headers=['Inclusion rules', ''])}" traversal_rules = [['Follow INPUT Links forwards', input_forward], ['Follow CREATE Links backwards', create_reversed], ['Follow RETURN Links backwards', return_reversed], ['Follow CALL Links backwards', call_reversed]] - result += '\n\n{}\n'.format(tabulate(traversal_rules, headers=['Traversal rules', ''])) + result += f"\n\n{tabulate(traversal_rules, headers=['Traversal rules', ''])}\n" EXPORT_LOGGER.log(msg=result, level=LOG_LEVEL_REPORT) @@ -327,9 +321,9 @@ def deprecated_parameters(old, new): """ if old.get('value', None) is not None: if new.get('value', None) is not None: - message = '`{}` is deprecated, the supplied `{}` input will be used'.format(old['name'], new['name']) + message = f"`{old['name']}` is deprecated, the supplied `{new['name']}` input will be used" else: - message = '`{}` is deprecated, please use `{}` instead'.format(old['name'], new['name']) + message = f"`{old['name']}` is deprecated, please use `{new['name']}` instead" new['value'] = old['value'] warnings.warn(message, AiidaDeprecationWarning) # pylint: disable=no-member diff --git a/aiida/tools/importexport/dbexport/zip.py b/aiida/tools/importexport/dbexport/zip.py index 5a155ba50a..bb3753eed5 100644 --- a/aiida/tools/importexport/dbexport/zip.py +++ b/aiida/tools/importexport/dbexport/zip.py @@ -132,7 +132,7 @@ def insert_path(self, src, dest_name=None, overwrite=True): raise ValueError('src must be an absolute path in insert_file') if not overwrite and self.exists(base_filename): - raise IOError('destination already exists: {}'.format(base_filename)) + raise IOError(f'destination already exists: {base_filename}') if os.path.isdir(src): for dirpath, dirnames, filenames in os.walk(src): diff --git a/aiida/tools/importexport/dbimport/__init__.py b/aiida/tools/importexport/dbimport/__init__.py index 5e5a44c20c..6b3c95261b 100644 --- a/aiida/tools/importexport/dbimport/__init__.py +++ b/aiida/tools/importexport/dbimport/__init__.py @@ -78,4 +78,4 @@ def import_data(in_path, group=None, silent=False, **kwargs): return import_data_dj(in_path, group=group, silent=silent, **kwargs) # else - raise ArchiveImportError('Unknown backend: {}'.format(backend)) + raise ArchiveImportError(f'Unknown backend: {backend}') diff --git a/aiida/tools/importexport/dbimport/backends/django/__init__.py b/aiida/tools/importexport/dbimport/backends/django/__init__.py index 7d1854bb4a..49e58dfa2c 100644 --- a/aiida/tools/importexport/dbimport/backends/django/__init__.py +++ b/aiida/tools/importexport/dbimport/backends/django/__init__.py @@ -143,7 +143,7 @@ def import_data_dj( ) if not folder.get_content_list(): - raise exceptions.CorruptArchive('The provided file/folder ({}) is empty'.format(in_path)) + raise exceptions.CorruptArchive(f'The provided file/folder ({in_path}) is empty') try: with open(folder.get_abs_path('metadata.json'), 'r', encoding='utf8') as fhandle: metadata = json.load(fhandle) @@ -151,9 +151,7 @@ def import_data_dj( with open(folder.get_abs_path('data.json'), 'r', encoding='utf8') as fhandle: data = json.load(fhandle) except IOError as error: - raise exceptions.CorruptArchive( - 'Unable to find the file {} in the import file or folder'.format(error.filename) - ) + raise exceptions.CorruptArchive(f'Unable to find the file {error.filename} in the import file or folder') ###################### # PRELIMINARY CHECKS # @@ -205,7 +203,7 @@ def import_data_dj( for import_field_name in metadata['all_fields_info']: if import_field_name not in model_order: raise exceptions.ImportValidationError( - "You are trying to import an unknown model '{}'!".format(import_field_name) + f"You are trying to import an unknown model '{import_field_name}'!" ) for idx, model_name in enumerate(model_order): @@ -219,7 +217,7 @@ def import_data_dj( for dependency in dependencies: if dependency not in model_order[:idx]: raise exceptions.ArchiveImportError( - 'Model {} requires {} but would be loaded first; stopping...'.format(model_name, dependency) + f'Model {model_name} requires {dependency} but would be loaded first; stopping...' ) ################################################### @@ -284,7 +282,7 @@ def import_data_dj( relevant_db_entries = {} if import_unique_ids: relevant_db_entries_result = model.objects.filter( - **{'{}__in'.format(unique_identifier): import_unique_ids} + **{f'{unique_identifier}__in': import_unique_ids} ) # Note: UUIDs need to be converted to strings @@ -353,8 +351,8 @@ def import_data_dj( # I import data from the given model for model_name in model_order: # Progress bar initialization - Model - pbar_base_str = '{}s - '.format(model_name) - progress_bar.set_description_str(pbar_base_str + 'Initializing', refresh=True) + pbar_base_str = f'{model_name}s - ' + progress_bar.set_description_str(f'{pbar_base_str}Initializing', refresh=True) cls_signature = entity_names_to_signatures[model_name] model = get_object_from_string(cls_signature) @@ -365,7 +363,7 @@ def import_data_dj( if existing_entries[model_name]: # Progress bar update - Model progress_bar.set_description_str( - pbar_base_str + '{} existing entries'.format(len(existing_entries[model_name])), refresh=True + f'{pbar_base_str}{len(existing_entries[model_name])} existing entries', refresh=True ) for import_entry_pk, entry_data in existing_entries[model_name].items(): @@ -406,7 +404,7 @@ def import_data_dj( if new_entries[model_name]: # Progress bar update - Model progress_bar.set_description_str( - pbar_base_str + '{} new entries'.format(len(new_entries[model_name])), refresh=True + f'{pbar_base_str}{len(new_entries[model_name])} new entries', refresh=True ) for import_entry_pk, entry_data in new_entries[model_name].items(): @@ -434,7 +432,7 @@ def import_data_dj( # Progress bar initialization - Node progress_bar.update() - pbar_node_base_str = pbar_base_str + 'UUID={} - '.format(import_entry_uuid.split('-')[0]) + pbar_node_base_str = f"{pbar_base_str}UUID={import_entry_uuid.split('-')[0]} - " # Before storing entries in the DB, I store the files (if these are nodes). # Note: only for new entries! @@ -449,32 +447,32 @@ def import_data_dj( destdir = RepositoryFolder(section=Repository._section_name, uuid=import_entry_uuid) # Replace the folder, possibly destroying existing previous folders, and move the files # (faster if we are on the same filesystem, and in any case the source is a SandboxFolder) - progress_bar.set_description_str(pbar_node_base_str + 'Repository', refresh=True) + progress_bar.set_description_str(f'{pbar_node_base_str}Repository', refresh=True) destdir.replace_with_folder(subfolder.abspath, move=True, overwrite=True) # For DbNodes, we also have to store its attributes IMPORT_LOGGER.debug('STORING NEW NODE ATTRIBUTES...') - progress_bar.set_description_str(pbar_node_base_str + 'Attributes', refresh=True) + progress_bar.set_description_str(f'{pbar_node_base_str}Attributes', refresh=True) # Get attributes from import file try: object_.attributes = data['node_attributes'][str(import_entry_pk)] except KeyError: raise exceptions.CorruptArchive( - 'Unable to find attribute info for Node with UUID={}'.format(import_entry_uuid) + f'Unable to find attribute info for Node with UUID={import_entry_uuid}' ) # For DbNodes, we also have to store its extras if extras_mode_new == 'import': IMPORT_LOGGER.debug('STORING NEW NODE EXTRAS...') - progress_bar.set_description_str(pbar_node_base_str + 'Extras', refresh=True) + progress_bar.set_description_str(f'{pbar_node_base_str}Extras', refresh=True) # Get extras from import file try: extras = data['node_extras'][str(import_entry_pk)] except KeyError: raise exceptions.CorruptArchive( - 'Unable to find extra info for Node with UUID={}'.format(import_entry_uuid) + f'Unable to find extra info for Node with UUID={import_entry_uuid}' ) # TODO: remove when aiida extras will be moved somewhere else # from here @@ -504,8 +502,8 @@ def import_data_dj( import_entry_pk = import_existing_entry_pks[import_entry_uuid] # Progress bar initialization - Node - pbar_node_base_str = pbar_base_str + 'UUID={} - '.format(import_entry_uuid.split('-')[0]) - progress_bar.set_description_str(pbar_node_base_str + 'Extras', refresh=False) + pbar_node_base_str = f"{pbar_base_str}UUID={import_entry_uuid.split('-')[0]} - " + progress_bar.set_description_str(f'{pbar_node_base_str}Extras', refresh=False) progress_bar.update() # Get extras from import file @@ -513,7 +511,7 @@ def import_data_dj( extras = data['node_extras'][str(import_entry_pk)] except KeyError: raise exceptions.CorruptArchive( - 'Unable to find extra info for Node with UUID={}'.format(import_entry_uuid) + f'Unable to find extra info for Node with UUID={import_entry_uuid}' ) old_extras = node.extras.copy() @@ -534,7 +532,7 @@ def import_data_dj( # Update progress bar with new non-Node entries progress_bar.update(n=len(existing_entries[model_name]) + len(new_entries[model_name])) - progress_bar.set_description_str(pbar_base_str + 'Storing', refresh=True) + progress_bar.set_description_str(f'{pbar_base_str}Storing', refresh=True) # If there is an mtime in the field, disable the automatic update # to keep the mtime that we have set here @@ -546,11 +544,9 @@ def import_data_dj( model.objects.bulk_create(objects_to_create, batch_size=batch_size) # Get back the just-saved entries - just_saved_queryset = model.objects.filter( - **{ - '{}__in'.format(unique_identifier): import_new_entry_pks.keys() - } - ).values_list(unique_identifier, 'pk') + just_saved_queryset = model.objects.filter(**{ + f'{unique_identifier}__in': import_new_entry_pks.keys() + }).values_list(unique_identifier, 'pk') # note: convert uuids from type UUID to strings just_saved = {str(key): value for key, value in just_saved_queryset} @@ -563,7 +559,7 @@ def import_data_dj( ret_dict[model_name] = {'new': [], 'existing': []} ret_dict[model_name]['new'].append((import_entry_pk, new_pk)) - IMPORT_LOGGER.debug('New %s: %s (%s->%s)' % (model_name, unique_id, import_entry_pk, new_pk)) + IMPORT_LOGGER.debug(f'New {model_name}: {unique_id} ({import_entry_pk}->{new_pk})') IMPORT_LOGGER.debug('STORING NODE LINKS...') import_links = data['links_uuid'] @@ -596,7 +592,7 @@ def import_data_dj( for link in import_links: # Check for dangling Links within the, supposed, self-consistent archive - progress_bar.set_description_str(pbar_base_str + 'label={}'.format(link['label']), refresh=False) + progress_bar.set_description_str(f"{pbar_base_str}label={link['label']}", refresh=False) progress_bar.update() try: @@ -619,7 +615,7 @@ def import_data_dj( try: validate_link_label(link['label']) except ValueError as why: - raise exceptions.ImportValidationError('Error during Link label validation: {}'.format(why)) + raise exceptions.ImportValidationError(f'Error during Link label validation: {why}') source = models.DbNode.objects.get(id=in_id) target = models.DbNode.objects.get(id=out_id) @@ -633,20 +629,20 @@ def import_data_dj( # Check if source Node is a valid type if not source.node_type.startswith(type_source): raise exceptions.ImportValidationError( - 'Cannot add a {} link from {} to {}'.format(link_type, source.node_type, target.node_type) + f'Cannot add a {link_type} link from {source.node_type} to {target.node_type}' ) # Check if target Node is a valid type if not target.node_type.startswith(type_target): raise exceptions.ImportValidationError( - 'Cannot add a {} link from {} to {}'.format(link_type, source.node_type, target.node_type) + f'Cannot add a {link_type} link from {source.node_type} to {target.node_type}' ) # If the outdegree is `unique` there cannot already be any other outgoing link of that type, # i.e., the source Node may not have a LinkType of current LinkType, going out, existing already. if outdegree == 'unique' and (in_id, link['type']) in existing_outgoing_unique: raise exceptions.ImportValidationError( - 'Node<{}> already has an outgoing {} link'.format(source.uuid, link_type) + f'Node<{source.uuid}> already has an outgoing {link_type} link' ) # If the outdegree is `unique_pair`, @@ -656,16 +652,14 @@ def import_data_dj( elif outdegree == 'unique_pair' and \ (in_id, link['label'], link['type']) in existing_outgoing_unique_pair: raise exceptions.ImportValidationError( - 'Node<{}> already has an outgoing {} link with label "{}"'.format( - source.uuid, link_type, link['label'] - ) + f"Node<{source.uuid}> already has an outgoing {link_type} link with label \"{link['label']}\"" ) # If the indegree is `unique` there cannot already be any other incoming links of that type, # i.e., the target Node may not have a LinkType of current LinkType, coming in, existing already. if indegree == 'unique' and (out_id, link['type']) in existing_incoming_unique: raise exceptions.ImportValidationError( - 'Node<{}> already has an incoming {} link'.format(target.uuid, link_type) + f'Node<{target.uuid}> already has an incoming {link_type} link' ) # If the indegree is `unique_pair`, @@ -675,9 +669,7 @@ def import_data_dj( elif indegree == 'unique_pair' and \ (out_id, link['label'], link['type']) in existing_incoming_unique_pair: raise exceptions.ImportValidationError( - 'Node<{}> already has an incoming {} link with label "{}"'.format( - target.uuid, link_type, link['label'] - ) + f"Node<{target.uuid}> already has an incoming {link_type} link with label \"{link['label']}\"" ) # New link @@ -715,7 +707,7 @@ def import_data_dj( # TODO: cache these to avoid too many queries group_ = models.DbGroup.objects.get(uuid=groupuuid) - progress_bar.set_description_str(pbar_base_str + 'label={}'.format(group_.label), refresh=False) + progress_bar.set_description_str(f'{pbar_base_str}label={group_.label}', refresh=False) progress_bar.update() nodes_to_store = [foreign_ids_reverse_mappings[NODE_ENTITY_NAME][node_uuid] for node_uuid in groupnodes] @@ -743,7 +735,7 @@ def import_data_dj( while Group.objects.find(filters={'label': group_label}): counter += 1 - group_label = '{}_{}'.format(basename, counter) + group_label = f'{basename}_{counter}' if counter == 100: raise exceptions.ImportUniquenessError( diff --git a/aiida/tools/importexport/dbimport/backends/sqla/__init__.py b/aiida/tools/importexport/dbimport/backends/sqla/__init__.py index ecf1f3429e..21a4cea423 100644 --- a/aiida/tools/importexport/dbimport/backends/sqla/__init__.py +++ b/aiida/tools/importexport/dbimport/backends/sqla/__init__.py @@ -147,7 +147,7 @@ def import_data_sqla( ) if not folder.get_content_list(): - raise exceptions.CorruptArchive('The provided file/folder ({}) is empty'.format(in_path)) + raise exceptions.CorruptArchive(f'The provided file/folder ({in_path}) is empty') try: IMPORT_LOGGER.debug('CACHING metadata.json') with open(folder.get_abs_path('metadata.json'), encoding='utf8') as fhandle: @@ -157,9 +157,7 @@ def import_data_sqla( with open(folder.get_abs_path('data.json'), encoding='utf8') as fhandle: data = json.load(fhandle) except IOError as error: - raise exceptions.CorruptArchive( - 'Unable to find the file {} in the import file or folder'.format(error.filename) - ) + raise exceptions.CorruptArchive(f'Unable to find the file {error.filename} in the import file or folder') ###################### # PRELIMINARY CHECKS # @@ -217,7 +215,7 @@ def import_data_sqla( for import_field_name in metadata['all_fields_info']: if import_field_name not in entity_order: raise exceptions.ImportValidationError( - "You are trying to import an unknown model '{}'!".format(import_field_name) + f"You are trying to import an unknown model '{import_field_name}'!" ) for idx, entity_name in enumerate(entity_order): @@ -232,7 +230,7 @@ def import_data_sqla( for dependency in dependencies: if dependency not in entity_order[:idx]: raise exceptions.ArchiveImportError( - 'Entity {} requires {} but would be loaded first; stopping...'.format(entity_name, dependency) + f'Entity {entity_name} requires {dependency} but would be loaded first; stopping...' ) ################################################### @@ -402,14 +400,14 @@ def import_data_sqla( progress_bar = get_progress_bar(total=reset_progress_bar['total'], disable=silent) progress_bar.n = reset_progress_bar['n'] reset_progress_bar = {} - pbar_base_str = '{}s - '.format(entity_name) - progress_bar.set_description_str(pbar_base_str + 'Initializing', refresh=True) + pbar_base_str = f'{entity_name}s - ' + progress_bar.set_description_str(f'{pbar_base_str}Initializing', refresh=True) # EXISTING ENTRIES if existing_entries[entity_name]: # Progress bar update - Model progress_bar.set_description_str( - pbar_base_str + '{} existing entries'.format(len(existing_entries[entity_name])), refresh=True + f'{pbar_base_str}{len(existing_entries[entity_name])} existing entries', refresh=True ) for import_entry_pk, entry_data in existing_entries[entity_name].items(): @@ -451,7 +449,7 @@ def import_data_sqla( if new_entries[entity_name]: # Progress bar update - Model progress_bar.set_description_str( - pbar_base_str + '{} new entries'.format(len(new_entries[entity_name])), refresh=True + f'{pbar_base_str}{len(new_entries[entity_name])} new entries', refresh=True ) for import_entry_pk, entry_data in new_entries[entity_name].items(): @@ -504,7 +502,7 @@ def import_data_sqla( # Progress bar initialization - Node progress_bar.update() - pbar_node_base_str = pbar_base_str + 'UUID={} - '.format(import_entry_uuid.split('-')[0]) + pbar_node_base_str = f"{pbar_base_str}UUID={import_entry_uuid.split('-')[0]} - " # Before storing entries in the DB, I store the files (if these are nodes). # Note: only for new entries! @@ -519,32 +517,32 @@ def import_data_sqla( destdir = RepositoryFolder(section=Repository._section_name, uuid=import_entry_uuid) # Replace the folder, possibly destroying existing previous folders, and move the files # (faster if we are on the same filesystem, and in any case the source is a SandboxFolder) - progress_bar.set_description_str(pbar_node_base_str + 'Repository', refresh=True) + progress_bar.set_description_str(f'{pbar_node_base_str}Repository', refresh=True) destdir.replace_with_folder(subfolder.abspath, move=True, overwrite=True) # For Nodes, we also have to store Attributes! IMPORT_LOGGER.debug('STORING NEW NODE ATTRIBUTES...') - progress_bar.set_description_str(pbar_node_base_str + 'Attributes', refresh=True) + progress_bar.set_description_str(f'{pbar_node_base_str}Attributes', refresh=True) # Get attributes from import file try: object_.attributes = data['node_attributes'][str(import_entry_pk)] except KeyError: raise exceptions.CorruptArchive( - 'Unable to find attribute info for Node with UUID={}'.format(import_entry_uuid) + f'Unable to find attribute info for Node with UUID={import_entry_uuid}' ) # For DbNodes, we also have to store extras if extras_mode_new == 'import': IMPORT_LOGGER.debug('STORING NEW NODE EXTRAS...') - progress_bar.set_description_str(pbar_node_base_str + 'Extras', refresh=True) + progress_bar.set_description_str(f'{pbar_node_base_str}Extras', refresh=True) # Get extras from import file try: extras = data['node_extras'][str(import_entry_pk)] except KeyError: raise exceptions.CorruptArchive( - 'Unable to find extra info for Node with UUID={}'.format(import_entry_uuid) + f'Unable to find extra info for Node with UUID={import_entry_uuid}' ) # TODO: remove when aiida extras will be moved somewhere else # from here @@ -573,8 +571,8 @@ def import_data_sqla( import_entry_pk = import_existing_entry_pks[import_entry_uuid] # Progress bar initialization - Node - pbar_node_base_str = pbar_base_str + 'UUID={} - '.format(import_entry_uuid.split('-')[0]) - progress_bar.set_description_str(pbar_node_base_str + 'Extras', refresh=False) + pbar_node_base_str = f"{pbar_base_str}UUID={import_entry_uuid.split('-')[0]} - " + progress_bar.set_description_str(f'{pbar_node_base_str}Extras', refresh=False) progress_bar.update() # Get extras from import file @@ -582,7 +580,7 @@ def import_data_sqla( extras = data['node_extras'][str(import_entry_pk)] except KeyError: raise exceptions.CorruptArchive( - 'Unable to find extra info for Node with UUID={}'.format(import_entry_uuid) + f'Unable to find extra info for Node with UUID={import_entry_uuid}' ) old_extras = node.extras.copy() @@ -602,7 +600,7 @@ def import_data_sqla( # Update progress bar with new non-Node entries progress_bar.update(n=len(existing_entries[entity_name]) + len(new_entries[entity_name])) - progress_bar.set_description_str(pbar_base_str + 'Storing', refresh=True) + progress_bar.set_description_str(f'{pbar_base_str}Storing', refresh=True) # Store them all in once; However, the PK are not set in this way... if objects_to_create: @@ -631,7 +629,7 @@ def import_data_sqla( just_saved.update({entry[0]: entry[1]}) - progress_bar.set_description_str(pbar_base_str + 'Done!', refresh=True) + progress_bar.set_description_str(f'{pbar_base_str}Done!', refresh=True) # Now I have the PKs, print the info # Moreover, add newly created Nodes to foreign_ids_reverse_mappings @@ -657,7 +655,7 @@ def import_data_sqla( for link in import_links: # Check for dangling Links within the, supposed, self-consistent archive - progress_bar.set_description_str(pbar_base_str + 'label={}'.format(link['label']), refresh=False) + progress_bar.set_description_str(f"{pbar_base_str}label={link['label']}", refresh=False) progress_bar.update() try: @@ -684,7 +682,7 @@ def import_data_sqla( try: validate_link(source, target, link_type, link['label']) except ValueError as why: - raise exceptions.ImportValidationError('Error occurred during Link validation: {}'.format(why)) + raise exceptions.ImportValidationError(f'Error occurred during Link validation: {why}') # New link session.add(DbLink(input_id=in_id, output_id=out_id, label=link['label'], type=link['type'])) @@ -707,7 +705,7 @@ def import_data_sqla( qb_group = QueryBuilder().append(Group, filters={'uuid': {'==': groupuuid}}) group_ = qb_group.first()[0] - progress_bar.set_description_str(pbar_base_str + 'label={}'.format(group_.label), refresh=False) + progress_bar.set_description_str(f'{pbar_base_str}label={group_.label}', refresh=False) progress_bar.update() nodes_ids_to_add = [ @@ -740,7 +738,7 @@ def import_data_sqla( group_label = basename while session.query(DbGroup).filter(DbGroup.label == group_label).count() > 0: counter += 1 - group_label = '{}_{}'.format(basename, counter) + group_label = f'{basename}_{counter}' if counter == 100: raise exceptions.ImportUniquenessError( diff --git a/aiida/tools/importexport/dbimport/utils.py b/aiida/tools/importexport/dbimport/utils.py index d25aab0cc0..f062066e5b 100644 --- a/aiida/tools/importexport/dbimport/utils.py +++ b/aiida/tools/importexport/dbimport/utils.py @@ -38,7 +38,7 @@ def merge_comment(incoming_comment, comment_mode): # Get existing Comment's 'mtime' and 'content' builder = QueryBuilder().append(Comment, filters={'uuid': incoming_uuid}, project=['mtime', 'content']) if builder.count() != 1: - raise exceptions.ImportValidationError('Multiple Comments with the same UUID: {}'.format(incoming_uuid)) + raise exceptions.ImportValidationError(f'Multiple Comments with the same UUID: {incoming_uuid}') builder = builder.all() existing_mtime = builder[0][0] @@ -94,12 +94,10 @@ def merge_extras(old_extras, new_extras, mode): """ if not isinstance(mode, str): raise exceptions.ImportValidationError( - "Parameter 'mode' should be of string type, you provided '{}' type".format(type(mode)) + f"Parameter 'mode' should be of string type, you provided '{type(mode)}' type" ) elif not len(mode) == 3: - raise exceptions.ImportValidationError( - "Parameter 'mode' should be a 3-letter string, you provided: '{}'".format(mode) - ) + raise exceptions.ImportValidationError(f"Parameter 'mode' should be a 3-letter string, you provided: '{mode}'") old_keys = set(old_extras.keys()) new_keys = set(new_extras.keys()) @@ -149,7 +147,7 @@ def merge_extras(old_extras, new_extras, mode): final_extras[key] = old_extras[key] elif mode[0] != 'n': raise exceptions.ImportValidationError( - "Unknown first letter of the update extras mode: '{}'. Should be either 'k' or 'n'".format(mode) + f"Unknown first letter of the update extras mode: '{mode}'. Should be either 'k' or 'n'" ) if mode[1] == 'c': @@ -157,7 +155,7 @@ def merge_extras(old_extras, new_extras, mode): final_extras[key] = new_extras[key] elif mode[1] != 'n': raise exceptions.ImportValidationError( - "Unknown second letter of the update extras mode: '{}'. Should be either 'c' or 'n'".format(mode) + f"Unknown second letter of the update extras mode: '{mode}'. Should be either 'c' or 'n'" ) if mode[2] == 'u': @@ -180,7 +178,7 @@ def merge_extras(old_extras, new_extras, mode): final_extras[key] = old_extras[key] elif mode[2] != 'd': raise exceptions.ImportValidationError( - "Unknown third letter of the update extras mode: '{}'. Should be one of 'u'/'l'/'a'/'d'".format(mode) + f"Unknown third letter of the update extras mode: '{mode}'. Should be one of 'u'/'l'/'a'/'d'" ) return final_extras @@ -213,7 +211,7 @@ def deserialize_attributes(attributes_data, conversion_data): if conversion_data == 'date': ret_data = datetime.datetime.strptime(attributes_data, '%Y-%m-%dT%H:%M:%S.%f').replace(tzinfo=pytz.utc) else: - raise exceptions.ArchiveImportError("Unknown convert_type '{}'".format(conversion_data)) + raise exceptions.ArchiveImportError(f"Unknown convert_type '{conversion_data}'") return ret_data @@ -223,7 +221,7 @@ def deserialize_field(key, value, fields_info, import_unique_ids_mappings, forei try: field_info = fields_info[key] except KeyError: - raise exceptions.ArchiveImportError("Unknown field '{}'".format(key)) + raise exceptions.ArchiveImportError(f"Unknown field '{key}'") if key in ('id', 'pk'): raise exceptions.ImportValidationError('ID or PK explicitly passed!') @@ -244,23 +242,23 @@ def deserialize_field(key, value, fields_info, import_unique_ids_mappings, forei # I store it in the FIELDNAME_id variable, that directly stores the # PK in the remote table, rather than requiring to create Model # instances for the foreign relations - return ('{}_id'.format(key), foreign_ids_reverse_mappings[requires][unique_id]) + return (f'{key}_id', foreign_ids_reverse_mappings[requires][unique_id]) # else - return ('{}_id'.format(key), None) + return (f'{key}_id', None) def start_summary(archive, comment_mode, extras_mode_new, extras_mode_existing): """Print starting summary for import""" archive = os.path.basename(archive) - result = '\n{}'.format(tabulate([['Archive', archive]], headers=['IMPORT', ''])) + result = f"\n{tabulate([['Archive', archive]], headers=['IMPORT', ''])}" parameters = [ ['Comment rules', comment_mode], ['New Node Extras rules', extras_mode_new], ['Existing Node Extras rules', extras_mode_existing], ] - result += '\n\n{}'.format(tabulate(parameters, headers=['Parameters', ''])) + result += f"\n\n{tabulate(parameters, headers=['Parameters', ''])}" IMPORT_LOGGER.log(msg=result, level=LOG_LEVEL_REPORT) @@ -279,11 +277,11 @@ def result_summary(results, import_group_label): for model in results: value = [] if results[model].get('new', None): - value.append('{} new'.format(len(results[model]['new']))) + value.append(f"{len(results[model]['new'])} new") if results[model].get('existing', None): - value.append('{} existing'.format(len(results[model]['existing']))) + value.append(f"{len(results[model]['existing'])} existing") - parameters.extend([[param, val] for param, val in zip(['{}(s)'.format(model)], value)]) + parameters.extend([[param, val] for param, val in zip([f'{model}(s)'], value)]) if title: - IMPORT_LOGGER.log(msg='\n{}\n'.format(tabulate(parameters, headers=[title, ''])), level=LOG_LEVEL_REPORT) + IMPORT_LOGGER.log(msg=f"\n{tabulate(parameters, headers=[title, ''])}\n", level=LOG_LEVEL_REPORT) diff --git a/aiida/tools/importexport/migration/__init__.py b/aiida/tools/importexport/migration/__init__.py index fa23050277..c823baf5f3 100644 --- a/aiida/tools/importexport/migration/__init__.py +++ b/aiida/tools/importexport/migration/__init__.py @@ -58,7 +58,7 @@ def migrate_recursively(metadata, data, folder, version=EXPORT_VERSION): elif old_version in MIGRATE_FUNCTIONS: MIGRATE_FUNCTIONS[old_version](metadata, data, folder) else: - raise ArchiveMigrationError('Cannot migrate from version {}'.format(old_version)) + raise ArchiveMigrationError(f'Cannot migrate from version {old_version}') except ValueError as exception: raise ArchiveMigrationError(exception) except DanglingLinkError: diff --git a/aiida/tools/importexport/migration/utils.py b/aiida/tools/importexport/migration/utils.py index a1c5b7d2c9..5474b59398 100644 --- a/aiida/tools/importexport/migration/utils.py +++ b/aiida/tools/importexport/migration/utils.py @@ -30,7 +30,7 @@ def verify_metadata_version(metadata, version=None): if metadata_version != version: raise exceptions.MigrationValidationError( - 'expected export file with version {} but found version {}'.format(version, metadata_version) + f'expected export file with version {version} but found version {metadata_version}' ) return None @@ -47,7 +47,7 @@ def update_metadata(metadata, version): old_version = metadata['export_version'] conversion_info = metadata.get('conversion_info', []) - conversion_message = 'Converted from version {} to {} with AiiDA v{}'.format(old_version, version, get_version()) + conversion_message = f'Converted from version {old_version} to {version} with AiiDA v{get_version()}' conversion_info.append(conversion_message) metadata['aiida_version'] = get_version() diff --git a/aiida/tools/importexport/migration/v01_to_v02.py b/aiida/tools/importexport/migration/v01_to_v02.py index 2b044d37f6..a40d9a263a 100644 --- a/aiida/tools/importexport/migration/v01_to_v02.py +++ b/aiida/tools/importexport/migration/v01_to_v02.py @@ -33,7 +33,7 @@ def migrate_v1_to_v2(metadata, data, *args): def get_new_string(old_string): """Replace the old module prefix with the new.""" if old_string.startswith(old_start): - return '{}{}'.format(new_start, old_string[len(old_start):]) + return f'{new_start}{old_string[len(old_start):]}' return old_string diff --git a/aiida/tools/importexport/migration/v02_to_v03.py b/aiida/tools/importexport/migration/v02_to_v03.py index 37e5279fad..d6a1e2ec61 100644 --- a/aiida/tools/importexport/migration/v02_to_v03.py +++ b/aiida/tools/importexport/migration/v02_to_v03.py @@ -92,7 +92,7 @@ class NodeType(enum.Enum): input_type = NodeType(mapping[link['input']]) output_type = NodeType(mapping[link['output']]) except KeyError: - raise DanglingLinkError('Unknown node UUID {} or {}'.format(link['input'], link['output'])) + raise DanglingLinkError(f"Unknown node UUID {link['input']} or {link['output']}") # The following table demonstrates the logic for inferring the link type # (CODE, DATA) -> (WORK, CALC) : INPUT diff --git a/aiida/tools/importexport/migration/v03_to_v04.py b/aiida/tools/importexport/migration/v03_to_v04.py index 32745f73f7..804dc1e343 100644 --- a/aiida/tools/importexport/migration/v03_to_v04.py +++ b/aiida/tools/importexport/migration/v03_to_v04.py @@ -45,7 +45,7 @@ def migration_base_data_plugin_type_string(data): for content in data['export_data'].get('Node', {}).values(): if content.get('type', '').startswith('data.base.'): type_str = content['type'].replace('data.base.', '') - type_str = 'data.' + type_str.lower() + type_str + type_str = f'data.{type_str.lower()}{type_str}' content['type'] = type_str @@ -83,8 +83,8 @@ def migration_add_node_uuid_unique_constraint(data): unique_uuids = set(all_uuids) if len(all_uuids) != len(unique_uuids): echo.echo_critical( - """{}s with exactly the same UUID found, cannot proceed further. Please contact AiiDA - developers: http://www.aiida.net/mailing-list/ to help you resolve this issue.""".format(entry_type) + f"""{entry_type}s with exactly the same UUID found, cannot proceed further. Please contact AiiDA + developers: http://www.aiida.net/mailing-list/ to help you resolve this issue.""" ) diff --git a/aiida/tools/importexport/migration/v05_to_v06.py b/aiida/tools/importexport/migration/v05_to_v06.py index f2e4311448..5d9a8e6e1c 100644 --- a/aiida/tools/importexport/migration/v05_to_v06.py +++ b/aiida/tools/importexport/migration/v05_to_v06.py @@ -57,9 +57,9 @@ def migrate_deserialized_datetime(data, conversion): # `data.astimezone(pytz.utc).strftime('%Y-%m-%dT%H:%M:%S.%f') # Note that this first converted the datetime to UTC but then dropped the information from the string. # Since we know that all strings will be UTC, here we are simply reattaching that information. - ret_data = data + '+00:00' + ret_data = f'{data}+00:00' else: - raise ArchiveMigrationError("Unknown convert_type '{}'".format(conversion)) + raise ArchiveMigrationError(f"Unknown convert_type '{conversion}'") return ret_data diff --git a/aiida/tools/importexport/migration/v06_to_v07.py b/aiida/tools/importexport/migration/v06_to_v07.py index 0a5ee81f2b..0efde846e6 100644 --- a/aiida/tools/importexport/migration/v06_to_v07.py +++ b/aiida/tools/importexport/migration/v06_to_v07.py @@ -82,7 +82,7 @@ def migration_data_migration_legacy_process_attributes(data): for attr in attrs_to_remove: content.pop(attr, None) except KeyError as exc: - raise CorruptArchive('Your export archive is corrupt! Org. exception: {}'.format(exc)) + raise CorruptArchive(f'Your export archive is corrupt! Org. exception: {exc}') if illegal_cases: headers = ['UUID/PK', 'process_state'] diff --git a/aiida/tools/ipython/ipython_magics.py b/aiida/tools/ipython/ipython_magics.py index f279cfa512..8ae158a726 100644 --- a/aiida/tools/ipython/ipython_magics.py +++ b/aiida/tools/ipython/ipython_magics.py @@ -87,7 +87,7 @@ def aiida(self, line='', local_ns=None): else: profile = load_profile() - self.current_state = 'Loaded AiiDA DB environment - profile name: {}.'.format(profile.name) + self.current_state = f'Loaded AiiDA DB environment - profile name: {profile.name}.' user_ns = get_start_namespace() for key, value in user_ns.items(): @@ -126,7 +126,7 @@ def _repr_latex_(self): if self.is_warning: latex = '\\emph{%s}\n' % self.current_state else: - latex = '%s\n' % self.current_state + latex = f'{self.current_state}\n' return latex @@ -139,7 +139,7 @@ def _repr_pretty_(self, pretty_print, cycle): warning_str = '** ' else: warning_str = '' - text = '%s%s\n' % (warning_str, self.current_state) + text = f'{warning_str}{self.current_state}\n' pretty_print.text(text) diff --git a/aiida/tools/visualization/graph.py b/aiida/tools/visualization/graph.py index 362d069e22..648ea9cf11 100644 --- a/aiida/tools/visualization/graph.py +++ b/aiida/tools/visualization/graph.py @@ -69,7 +69,7 @@ def default_link_styles(link_pair, add_label, add_type): elif add_type and not add_label: style['label'] = link_pair.link_type.name elif add_label and add_type: - style['label'] = '{}\n{}'.format(link_pair.link_type.name, link_pair.link_label) + style['label'] = f'{link_pair.link_type.name}\n{link_pair.link_label}' return style @@ -213,19 +213,19 @@ def default_node_sublabels(node): class_node_type = node.class_node_type if class_node_type == 'data.int.Int.': - sublabel = 'value: {}'.format(node.get_attribute('value', '')) + sublabel = f"value: {node.get_attribute('value', '')}" elif class_node_type == 'data.float.Float.': - sublabel = 'value: {}'.format(node.get_attribute('value', '')) + sublabel = f"value: {node.get_attribute('value', '')}" elif class_node_type == 'data.str.Str.': - sublabel = '{}'.format(node.get_attribute('value', '')) + sublabel = f"{node.get_attribute('value', '')}" elif class_node_type == 'data.bool.Bool.': - sublabel = '{}'.format(node.get_attribute('value', '')) + sublabel = f"{node.get_attribute('value', '')}" elif class_node_type == 'data.code.Code.': - sublabel = '{}@{}'.format(os.path.basename(node.get_execname()), node.computer.label) + sublabel = f'{os.path.basename(node.get_execname())}@{node.computer.label}' elif class_node_type == 'data.singlefile.SinglefileData.': sublabel = node.filename elif class_node_type == 'data.remote.RemoteData.': - sublabel = '@{}'.format(node.computer.label) + sublabel = f'@{node.computer.label}' elif class_node_type == 'data.structure.StructureData.': sublabel = node.get_formula() elif class_node_type == 'data.cif.CifData.': @@ -238,13 +238,13 @@ def default_node_sublabels(node): sublabel_lines.append(', '.join(sg_numbers)) sublabel = '; '.join(sublabel_lines) elif class_node_type == 'data.upf.UpfData.': - sublabel = '{}'.format(node.get_attribute('element', '')) + sublabel = f"{node.get_attribute('element', '')}" elif isinstance(node, orm.ProcessNode): sublabel = [] if node.process_state is not None: - sublabel.append('State: {}'.format(node.process_state.value)) + sublabel.append(f'State: {node.process_state.value}') if node.exit_status is not None: - sublabel.append('Exit Code: {}'.format(node.exit_status)) + sublabel.append(f'Exit Code: {node.exit_status}') sublabel = '\n'.join(sublabel) else: sublabel = node.get_description() @@ -260,20 +260,20 @@ def get_node_id_label(node, id_type): return node.uuid.split('-')[0] if id_type == 'label': return node.label - raise ValueError('node_id_type not recognised: {}'.format(id_type)) + raise ValueError(f'node_id_type not recognised: {id_type}') def _get_node_label(node, id_type='pk'): """return a label text of node and the return format is ' ()'.""" if isinstance(node, orm.Data): - label = '{} ({})'.format(node.__class__.__name__, get_node_id_label(node, id_type)) + label = f'{node.__class__.__name__} ({get_node_id_label(node, id_type)})' elif isinstance(node, orm.ProcessNode): label = '{} ({})'.format( node.__class__.__name__ if node.process_label is None else node.process_label, get_node_id_label(node, id_type) ) else: - raise TypeError('Unknown type: {}'.format(type(node))) + raise TypeError(f'Unknown type: {type(node)}') return label @@ -324,7 +324,7 @@ def _add_graphviz_node( # coerce node style values to strings, required by graphviz node_style = {k: str(v) for k, v in node_style.items()} - return graph.node('N{}'.format(node.pk), **node_style) + return graph.node(f'N{node.pk}', **node_style) def _add_graphviz_edge(graph, in_node, out_node, style=None): @@ -343,7 +343,7 @@ def _add_graphviz_edge(graph, in_node, out_node, style=None): # coerce node style values to strings style = {k: str(v) for k, v in style.items()} - return graph.edge('N{}'.format(in_node.pk), 'N{}'.format(out_node.pk), **style) + return graph.edge(f'N{in_node.pk}', f'N{out_node.pk}', **style) class Graph: @@ -489,10 +489,10 @@ def add_edge(self, in_node, out_node, link_pair=None, style=None, overwrite=Fals """ in_node = self._load_node(in_node) if in_node.pk not in self._nodes: - raise AssertionError('in_node pk={} must have already been added to the graph'.format(in_node.pk)) + raise AssertionError(f'in_node pk={in_node.pk} must have already been added to the graph') out_node = self._load_node(out_node) if out_node.pk not in self._nodes: - raise AssertionError('out_node pk={} must have already been added to the graph'.format(out_node.pk)) + raise AssertionError(f'out_node pk={out_node.pk} must have already been added to the graph') if (in_node.pk, out_node.pk, link_pair) in self._edges and not overwrite: return @@ -528,8 +528,7 @@ def add_incoming(self, node, link_types=(), annotate_links=None, return_pks=True """ if annotate_links not in [None, False, 'label', 'type', 'both']: raise ValueError( - 'annotate_links must be one of False, "label", "type" or "both"\ninstead, it is: {}'. - format(annotate_links) + f'annotate_links must be one of False, "label", "type" or "both"\ninstead, it is: {annotate_links}' ) # incoming nodes are found traversing backwards @@ -585,8 +584,7 @@ def add_outgoing(self, node, link_types=(), annotate_links=None, return_pks=True """ if annotate_links not in [None, False, 'label', 'type', 'both']: raise ValueError( - 'annotate_links must be one of False, "label", "type" or "both"\ninstead, it is: {}'. - format(annotate_links) + f'annotate_links must be one of False, "label", "type" or "both"\ninstead, it is: {annotate_links}' ) # outgoing nodes are found traversing forwards diff --git a/aiida/transports/cli.py b/aiida/transports/cli.py index eba3ac0c76..f4d9951d50 100644 --- a/aiida/transports/cli.py +++ b/aiida/transports/cli.py @@ -28,9 +28,7 @@ def match_comp_transport(ctx, param, computer, transport_type): """Check the computer argument against the transport type.""" if computer.transport_type != transport_type: echo.echo_critical( - 'Computer {} has transport of type "{}", not {}!'.format( - computer.label, computer.transport_type, transport_type - ) + f'Computer {computer.label} has transport of type "{computer.transport_type}", not {transport_type}!' ) return computer @@ -42,12 +40,12 @@ def configure_computer_main(computer, user, **kwargs): user = user or orm.User.objects.get_default() - echo.echo_info('Configuring computer {} for user {}.'.format(computer.label, user.email)) + echo.echo_info(f'Configuring computer {computer.label} for user {user.email}.') if user.email != get_manager().get_profile().default_user: echo.echo_info('Configuring different user, defaults may not be appropriate.') computer.configure(user=user, **kwargs) - echo.echo_success('{} successfully configured for {}'.format(computer.label, user.email)) + echo.echo_success(f'{computer.label} successfully configured for {user.email}') def common_params(command_func): @@ -60,7 +58,7 @@ def common_params(command_func): def transport_option_default(name, computer): """Determine the default value for an auth_param key.""" transport_cls = computer.get_transport_class() - suggester_name = '_get_{}_suggestion_string'.format(name) + suggester_name = f'_get_{name}_suggestion_string' members = dict(inspect.getmembers(transport_cls)) suggester = members.get(suggester_name, None) default = None @@ -110,7 +108,7 @@ def create_option(name, spec): spec = deepcopy(spec) name_dashed = name.replace('_', '-') - option_name = '--{}'.format(name_dashed) + option_name = f'--{name_dashed}' existing_option = spec.pop('option', None) if spec.pop('switch', False): @@ -155,7 +153,7 @@ def apply_options(func): def create_configure_cmd(transport_type): """Create verdi computer configure subcommand for a transport type.""" - help_text = """Configure COMPUTER for {} transport.""".format(transport_type) + help_text = f"""Configure COMPUTER for {transport_type} transport.""" # pylint: disable=unused-argument def transport_configure_command(computer, user, non_interactive, **kwargs): diff --git a/aiida/transports/plugins/local.py b/aiida/transports/plugins/local.py index 1a563e1f21..9f8b96a8c9 100644 --- a/aiida/transports/plugins/local.py +++ b/aiida/transports/plugins/local.py @@ -90,7 +90,7 @@ def __str__(self): """ Return a description as a string. """ - return 'local [{}]'.format('OPEN' if self._is_open else 'CLOSED') + return f"local [{'OPEN' if self._is_open else 'CLOSED'}]" @property def curdir(self): @@ -111,9 +111,9 @@ def chdir(self, path): """ new_path = os.path.join(self.curdir, path) if not os.path.isdir(new_path): - raise IOError("'{}' is not a valid directory".format(new_path)) + raise IOError(f"'{new_path}' is not a valid directory") elif not os.access(new_path, os.R_OK): - raise IOError("Do not have read permission to '{}'".format(new_path)) + raise IOError(f"Do not have read permission to '{new_path}'") self._internal_dir = os.path.normpath(new_path) @@ -298,7 +298,7 @@ def put(self, localpath, remotepath, *args, **kwargs): if ignore_nonexisting: pass else: - raise OSError('The local path {} does not exist'.format(localpath)) + raise OSError(f'The local path {localpath} does not exist') def putfile(self, localpath, remotepath, *args, **kwargs): """ @@ -459,7 +459,7 @@ def get(self, remotepath, localpath, *args, **kwargs): if ignore_nonexisting: pass else: - raise IOError('The remote path {} does not exist'.format(remotepath)) + raise IOError(f'The remote path {remotepath} does not exist') def getfile(self, remotepath, localpath, *args, **kwargs): """ @@ -515,7 +515,7 @@ def gettree(self, remotepath, localpath, *args, **kwargs): raise ValueError('Localpaths must be an absolute path') if not self.isdir(remotepath): - raise IOError('Input remotepath is not a folder: {}'.format(remotepath)) + raise IOError(f'Input remotepath is not a folder: {remotepath}') if os.path.exists(localpath) and not overwrite: raise OSError("Can't overwrite existing files") @@ -743,7 +743,7 @@ def _exec_command_internal(self, command, **kwargs): # pylint: disable=unused-a # Note: The outer shell will eat one level of escaping, while # 'bash -l -c ...' will eat another. Thus, we need to escape again. - bash_commmand = self._bash_command_str + '-c ' + bash_commmand = f'{self._bash_command_str}-c ' command = bash_commmand + escape_for_bash(command) @@ -805,7 +805,7 @@ def gotocomputer_command(self, remotedir): :param str remotedir: the full path of the remote directory """ connect_string = self._gotocomputer_string(remotedir) - cmd = 'bash -c {}'.format(connect_string) + cmd = f'bash -c {connect_string}' return cmd def rename(self, oldpath, newpath): @@ -819,13 +819,13 @@ def rename(self, oldpath, newpath): :raises ValueError: if src/dst is not a valid string """ if not oldpath: - raise ValueError('Source {} is not a valid string'.format(oldpath)) + raise ValueError(f'Source {oldpath} is not a valid string') if not newpath: - raise ValueError('Destination {} is not a valid string'.format(newpath)) + raise ValueError(f'Destination {newpath} is not a valid string') if not os.path.exists(oldpath): - raise IOError('Source {} does not exist'.format(oldpath)) + raise IOError(f'Source {oldpath} does not exist') if not os.path.exists(newpath): - raise IOError('Destination {} does not exist'.format(newpath)) + raise IOError(f'Destination {newpath} does not exist') shutil.move(oldpath, newpath) @@ -855,7 +855,7 @@ def symlink(self, remotesource, remotedestination): try: os.symlink(remotesource, os.path.join(self.curdir, remotedestination)) except OSError: - raise OSError('!!: {}, {}, {}'.format(remotesource, self.curdir, remotedestination)) + raise OSError(f'!!: {remotesource}, {self.curdir}, {remotedestination}') def path_exists(self, path): """ diff --git a/aiida/transports/plugins/ssh.py b/aiida/transports/plugins/ssh.py index 0ffd7b06e1..f5e1ca021f 100644 --- a/aiida/transports/plugins/ssh.py +++ b/aiida/transports/plugins/ssh.py @@ -437,8 +437,8 @@ def open(self): self._client.connect(self._machine, **connection_arguments) except Exception as exc: self.logger.error( - "Error connecting to '{}' through SSH: ".format(self._machine) + - '[{}] {}, '.format(self.__class__.__name__, exc) + 'connect_args were: {}'.format(self._connect_args) + f"Error connecting to '{self._machine}' through SSH: " + f'[{self.__class__.__name__}] {exc}, ' + + f'connect_args were: {self._connect_args}' ) raise @@ -501,17 +501,17 @@ def __str__(self): """ conn_info = self._machine try: - conn_info = '{}@{}'.format(self._connect_args['username'], conn_info) + conn_info = f"{self._connect_args['username']}@{conn_info}" except KeyError: # No username explicitly defined: ignore pass try: - conn_info += ':{}'.format(self._connect_args['port']) + conn_info += f":{self._connect_args['port']}" except KeyError: # No port explicitly defined: ignore pass - return '{} [{}]'.format('OPEN' if self._is_open else 'CLOSED', conn_info) + return f"{'OPEN' if self._is_open else 'CLOSED'} [{conn_info}]" def chdir(self, path): """ @@ -672,21 +672,18 @@ def rmtree(self, path): rm_flags = '-r -f' # if in input I give an invalid object raise ValueError if not path: - raise ValueError('Input to rmtree() must be a non empty string. ' + 'Found instead %s as path' % path) + raise ValueError('Input to rmtree() must be a non empty string. ' + f'Found instead {path} as path') - command = '{} {} {}'.format(rm_exe, rm_flags, escape_for_bash(path)) + command = f'{rm_exe} {rm_flags} {escape_for_bash(path)}' retval, stdout, stderr = self.exec_command_wait(command) if retval == 0: if stderr.strip(): - self.logger.warning('There was nonempty stderr in the rm command: {}'.format(stderr)) + self.logger.warning(f'There was nonempty stderr in the rm command: {stderr}') return True - self.logger.error( - "Problem executing rm. Exit code: {}, stdout: '{}', " - "stderr: '{}'".format(retval, stdout, stderr) - ) - raise IOError('Error while executing rm. Exit code: {}'.format(retval)) + self.logger.error(f"Problem executing rm. Exit code: {retval}, stdout: '{stdout}', stderr: '{stderr}'") + raise IOError(f'Error while executing rm. Exit code: {retval}') def rmdir(self, path): """ @@ -814,7 +811,7 @@ def put(self, localpath, remotepath, callback=None, dereference=True, overwrite= self.putfile(localpath, remotepath, callback, dereference, overwrite) else: if not ignore_nonexisting: - raise OSError('The local path {} does not exist'.format(localpath)) + raise OSError(f'The local path {localpath} does not exist') def putfile(self, localpath, remotepath, callback=None, dereference=True, overwrite=True): # pylint: disable=arguments-differ """ @@ -870,7 +867,7 @@ def puttree(self, localpath, remotepath, callback=None, dereference=True, overwr raise OSError('The localpath does not exists') if not os.path.isdir(localpath): - raise ValueError('Input localpath is not a folder: {}'.format(localpath)) + raise ValueError(f'Input localpath is not a folder: {localpath}') if not remotepath: raise IOError('remotepath must be a non empty string') @@ -967,7 +964,7 @@ def get(self, remotepath, localpath, callback=None, dereference=True, overwrite= if ignore_nonexisting: pass else: - raise IOError('The remote path {} does not exist'.format(remotepath)) + raise IOError(f'The remote path {remotepath} does not exist') def getfile(self, remotepath, localpath, callback=None, dereference=True, overwrite=True): # pylint: disable=arguments-differ """ @@ -1028,7 +1025,7 @@ def gettree(self, remotepath, localpath, callback=None, dereference=True, overwr raise ValueError('Localpaths must be an absolute path') if not self.isdir(remotepath): - raise IOError('Input remotepath is not a folder: {}'.format(localpath)) + raise IOError(f'Input remotepath is not a folder: {localpath}') if os.path.exists(localpath) and not overwrite: raise OSError("Can't overwrite existing files") @@ -1105,13 +1102,13 @@ def copy(self, remotesource, remotedestination, dereference=False, recursive=Tru # if in input I give an invalid object raise ValueError if not remotesource: raise ValueError( - 'Input to copy() must be a non empty string. ' + 'Found instead %s as remotesource' % remotesource + 'Input to copy() must be a non empty string. ' + f'Found instead {remotesource} as remotesource' ) if not remotedestination: raise ValueError( 'Input to copy() must be a non empty string. ' + - 'Found instead %s as remotedestination' % remotedestination + f'Found instead {remotedestination} as remotedestination' ) if self.has_magic(remotedestination): @@ -1133,13 +1130,13 @@ def copy(self, remotesource, remotedestination, dereference=False, recursive=Tru def _exec_cp(self, cp_exe, cp_flags, src, dst): """Execute the ``cp`` command on the remote machine.""" # to simplify writing the above copy function - command = '{} {} {} {}'.format(cp_exe, cp_flags, escape_for_bash(src), escape_for_bash(dst)) + command = f'{cp_exe} {cp_flags} {escape_for_bash(src)} {escape_for_bash(dst)}' retval, stdout, stderr = self.exec_command_wait(command) if retval == 0: if stderr.strip(): - self.logger.warning('There was nonempty stderr in the cp command: {}'.format(stderr)) + self.logger.warning(f'There was nonempty stderr in the cp command: {stderr}') else: self.logger.error( "Problem executing cp. Exit code: {}, stdout: '{}', " @@ -1207,15 +1204,15 @@ def rename(self, oldpath, newpath): :raises ValueError: if sroldpathc/newpath is not a valid string """ if not oldpath: - raise ValueError('Source {} is not a valid string'.format(oldpath)) + raise ValueError(f'Source {oldpath} is not a valid string') if not newpath: - raise ValueError('Destination {} is not a valid string'.format(newpath)) + raise ValueError(f'Destination {newpath} is not a valid string') if not self.isfile(oldpath): if not self.isdir(oldpath): - raise IOError('Source {} does not exist'.format(oldpath)) + raise IOError(f'Source {oldpath} does not exist') if not self.isfile(newpath): if not self.isdir(newpath): - raise IOError('Destination {} does not exist'.format(newpath)) + raise IOError(f'Destination {newpath} does not exist') return self.sftp.rename(oldpath, newpath) @@ -1231,10 +1228,7 @@ def isfile(self, path): return False try: self.logger.debug( - "stat for path '{}' ('{}'): {} [{}]".format( - path, self.normalize(path), self.stat(path), - self.stat(path).st_mode - ) + f"stat for path '{path}' ('{self.normalize(path)}'): {self.stat(path)} [{self.stat(path).st_mode}]" ) return S_ISREG(self.stat(path).st_mode) except IOError as exc: @@ -1270,14 +1264,11 @@ def _exec_command_internal(self, command, combine_stderr=False, bufsize=-1): # if self.getcwd() is not None: escaped_folder = escape_for_bash(self.getcwd()) - command_to_execute = ( - 'cd {escaped_folder} && ' - '{real_command}'.format(escaped_folder=escaped_folder, real_command=command) - ) + command_to_execute = (f'cd {escaped_folder} && {command}') else: command_to_execute = command - self.logger.debug('Command to be executed: {}'.format(command_to_execute[:self._MAX_EXEC_COMMAND_LOG_SIZE])) + self.logger.debug(f'Command to be executed: {command_to_execute[:self._MAX_EXEC_COMMAND_LOG_SIZE]}') # Note: The default shell will eat one level of escaping, while # 'bash -l -c ...' will eat another. Thus, we need to escape again. @@ -1340,22 +1331,18 @@ def gotocomputer_command(self, remotedir): """ further_params = [] if 'username' in self._connect_args: - further_params.append('-l {}'.format(escape_for_bash(self._connect_args['username']))) + further_params.append(f"-l {escape_for_bash(self._connect_args['username'])}") if 'port' in self._connect_args and self._connect_args['port']: - further_params.append('-p {}'.format(self._connect_args['port'])) + further_params.append(f"-p {self._connect_args['port']}") if 'key_filename' in self._connect_args and self._connect_args['key_filename']: - further_params.append('-i {}'.format(escape_for_bash(self._connect_args['key_filename']))) + further_params.append(f"-i {escape_for_bash(self._connect_args['key_filename'])}") further_params_str = ' '.join(further_params) connect_string = self._gotocomputer_string(remotedir) - cmd = 'ssh -t {machine} {further_params} {connect_string}'.format( - further_params=further_params_str, - machine=self._machine, - connect_string=connect_string, - ) + cmd = f'ssh -t {self._machine} {further_params_str} {connect_string}' return cmd def _symlink(self, source, dest): diff --git a/aiida/transports/transport.py b/aiida/transports/transport.py index b98c11b480..54bcf7f9d1 100644 --- a/aiida/transports/transport.py +++ b/aiida/transports/transport.py @@ -31,7 +31,7 @@ def validate_positive_number(ctx, param, value): # pylint: disable=unused-argum """ if not isinstance(value, (int, float)) or value < 0: from click import BadParameter - raise BadParameter('{} is not a valid positive number'.format(value)) + raise BadParameter(f'{value} is not a valid positive number') class Transport(abc.ABC): @@ -151,7 +151,7 @@ def close(self): raise NotImplementedError def __repr__(self): - return '<{}: {}>'.format(self.__class__.__name__, str(self)) + return f'<{self.__class__.__name__}: {str(self)}>' # redefine this in each subclass def __str__(self): @@ -707,14 +707,11 @@ def whoami(self): retval, username, stderr = self.exec_command_wait(command) if retval == 0: if stderr.strip(): - self.logger.warning('There was nonempty stderr in the whoami command: {}'.format(stderr)) + self.logger.warning(f'There was nonempty stderr in the whoami command: {stderr}') return username.strip() - self.logger.error( - "Problem executing whoami. Exit code: {}, stdout: '{}', " - "stderr: '{}'".format(retval, username, stderr) - ) - raise IOError('Error while executing whoami. Exit code: {}'.format(retval)) + self.logger.error(f"Problem executing whoami. Exit code: {retval}, stdout: '{username}', stderr: '{stderr}'") + raise IOError(f'Error while executing whoami. Exit code: {retval}') def path_exists(self, path): """ diff --git a/docs/source/topics/calculations/include/snippets/calcjobs/arithmetic_add_spec_prepare_for_submission.py b/docs/source/topics/calculations/include/snippets/calcjobs/arithmetic_add_spec_prepare_for_submission.py index 731e75acbb..e87d8c85f5 100644 --- a/docs/source/topics/calculations/include/snippets/calcjobs/arithmetic_add_spec_prepare_for_submission.py +++ b/docs/source/topics/calculations/include/snippets/calcjobs/arithmetic_add_spec_prepare_for_submission.py @@ -23,7 +23,7 @@ def prepare_for_submission(self, folder): # Write the input file based on the inputs that were passed with folder.open(self.options.input_filename, 'w', encoding='utf8') as handle: - handle.write('{} {}\n'.format(input_x.value, input_y.value)) + handle.write(f'{input_x.value} {input_y.value}\n') codeinfo = CodeInfo() codeinfo.code_uuid = self.inputs.code.uuid diff --git a/docs/source/topics/workflows/include/snippets/workchains/run_workchain_submit_parallel.py b/docs/source/topics/workflows/include/snippets/workchains/run_workchain_submit_parallel.py index e7403671e2..08ca9a644c 100644 --- a/docs/source/topics/workflows/include/snippets/workchains/run_workchain_submit_parallel.py +++ b/docs/source/topics/workflows/include/snippets/workchains/run_workchain_submit_parallel.py @@ -15,10 +15,10 @@ def define(cls, spec): def submit_workchains(self): for i in range(3): future = self.submit(SomeWorkChain) - key = 'workchain_{}'.format(i) + key = f'workchain_{i}' self.to_context(**{key: future}) def inspect_workchains(self): for i in range(3): - key = 'workchain_{}'.format(i) + key = f'workchain_{i}' assert self.ctx[key].is_finished_ok diff --git a/pyproject.toml b/pyproject.toml index f90f7d4b7d..c9383102be 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -18,7 +18,7 @@ disable = [ "import-outside-toplevel", "inconsistent-return-statements", "locally-disabled", - "logging-format-interpolation", + "logging-fstring-interpolation", "missing-class-docstring", "missing-function-docstring", "no-else-raise", diff --git a/tests/backends/aiida_django/migrations/test_migrations_0037_attributes_extras_settings_json.py b/tests/backends/aiida_django/migrations/test_migrations_0037_attributes_extras_settings_json.py index 1f1c0312c7..a0f98cb9af 100644 --- a/tests/backends/aiida_django/migrations/test_migrations_0037_attributes_extras_settings_json.py +++ b/tests/backends/aiida_django/migrations/test_migrations_0037_attributes_extras_settings_json.py @@ -412,9 +412,7 @@ def create_value(cls, key, value, subspecifier_value=None, other_attribs={}): if cls._subspecifier_field_name is None: if subspecifier_value is not None: raise ValueError( - 'You cannot specify a subspecifier value for ' - 'class {} because it has no subspecifiers' - ''.format(cls.__name__) + f'You cannot specify a subspecifier value for class {cls.__name__} because it has no subspecifiers' ) if issubclass(cls, DbAttributeFunctionality): new_entry = db_attribute_base_model(key=key, **other_attribs) @@ -508,9 +506,7 @@ def create_value(cls, key, value, subspecifier_value=None, other_attribs={}): # expect no concurrency) # NOTE: I do not pass other_attribs list_to_return.extend( - cls.create_value( - key=('{}{}{:d}'.format(key, cls._sep, i)), value=subv, subspecifier_value=subspecifier_value - ) + cls.create_value(key=f'{key}{cls._sep}{i:d}', value=subv, subspecifier_value=subspecifier_value) ) elif isinstance(value, dict): @@ -530,17 +526,14 @@ def create_value(cls, key, value, subspecifier_value=None, other_attribs={}): # expect no concurrency) # NOTE: I do not pass other_attribs list_to_return.extend( - cls.create_value( - key='{}{}{}'.format(key, cls._sep, subk), value=subv, subspecifier_value=subspecifier_value - ) + cls.create_value(key=f'{key}{cls._sep}{subk}', value=subv, subspecifier_value=subspecifier_value) ) else: try: jsondata = json.dumps(value) except TypeError: raise ValueError( - 'Unable to store the value: it must be either a basic datatype, or json-serializable: {}'. - format(value) + f'Unable to store the value: it must be either a basic datatype, or json-serializable: {value}' ) new_entry.datatype = 'json' diff --git a/tests/backends/aiida_django/migrations/test_migrations_common.py b/tests/backends/aiida_django/migrations/test_migrations_common.py index 43f4f03b3d..0861006009 100644 --- a/tests/backends/aiida_django/migrations/test_migrations_common.py +++ b/tests/backends/aiida_django/migrations/test_migrations_common.py @@ -41,7 +41,7 @@ def setUp(self): self.current_autogroup = autogroup.CURRENT_AUTOGROUP autogroup.CURRENT_AUTOGROUP = None assert self.migrate_from and self.migrate_to, \ - "TestCase '{}' must define migrate_from and migrate_to properties".format(type(self).__name__) + f"TestCase '{type(self).__name__}' must define migrate_from and migrate_to properties" self.migrate_from = [(self.app, self.migrate_from)] self.migrate_to = [(self.app, self.migrate_to)] executor = MigrationExecutor(connection) diff --git a/tests/backends/aiida_django/migrations/test_migrations_many.py b/tests/backends/aiida_django/migrations/test_migrations_many.py index e260b1e406..2145bac9a3 100644 --- a/tests/backends/aiida_django/migrations/test_migrations_many.py +++ b/tests/backends/aiida_django/migrations/test_migrations_many.py @@ -52,7 +52,7 @@ def set_node_array(self, node, name, array): :param array: The numpy array to store. """ utils.store_numpy_array_in_repository(node.uuid, name, array) - self.set_attribute(node, 'array|{}'.format(name), list(array.shape)) + self.set_attribute(node, f'array|{name}', list(array.shape)) class TestNoMigrations(AiidaTestCase): diff --git a/tests/backends/aiida_sqlalchemy/test_migrations.py b/tests/backends/aiida_sqlalchemy/test_migrations.py index 1cbe698bc8..6b621b4763 100644 --- a/tests/backends/aiida_sqlalchemy/test_migrations.py +++ b/tests/backends/aiida_sqlalchemy/test_migrations.py @@ -59,7 +59,7 @@ def setUp(self): self.current_autogroup = autogroup.CURRENT_AUTOGROUP autogroup.CURRENT_AUTOGROUP = None assert self.migrate_from and self.migrate_to, \ - "TestCase '{}' must define migrate_from and migrate_to properties".format(type(self).__name__) + f"TestCase '{type(self).__name__}' must define migrate_from and migrate_to properties" try: self.migrate_db_down(self.migrate_from) @@ -202,7 +202,7 @@ def set_node_array(node, name, array): attributes = node.attributes if attributes is None: attributes = {} - attributes['array|{}'.format(name)] = list(array.shape) + attributes[f'array|{name}'] = list(array.shape) node.attributes = attributes flag_modified(node, 'attributes') @@ -347,7 +347,7 @@ def setUpBeforeMigration(self): try: session = Session(connection.engine) - user = DbUser(email='{}@aiida.net'.format(self.id())) + user = DbUser(email=f'{self.id()}@aiida.net') session.add(user) session.commit() @@ -447,7 +447,7 @@ def setUpBeforeMigration(self): with self.get_session() as session: try: - default_user = DbUser(email='{}@aiida.net'.format(self.id())) + default_user = DbUser(email=f'{self.id()}@aiida.net') session.add(default_user) session.commit() @@ -533,7 +533,7 @@ def setUpBeforeMigration(self): try: session = Session(connection.engine) - user = DbUser(email='{}@aiida.net'.format(self.id())) + user = DbUser(email=f'{self.id()}@aiida.net') session.add(user) session.commit() @@ -657,7 +657,7 @@ def setUpBeforeMigration(self): try: session = Session(connection.engine) - user = DbUser(email='{}@aiida.net'.format(self.id())) + user = DbUser(email=f'{self.id()}@aiida.net') session.add(user) session.commit() @@ -898,7 +898,7 @@ def setUpBeforeMigration(self): try: session = Session(connection.engine) - user = DbUser(email='{}@aiida.net'.format(self.id())) + user = DbUser(email=f'{self.id()}@aiida.net') session.add(user) session.commit() @@ -1007,7 +1007,7 @@ def setUpBeforeMigration(self): try: session = Session(connection.engine) - user = DbUser(email='{}@aiida.net'.format(self.id())) + user = DbUser(email=f'{self.id()}@aiida.net') session.add(user) session.commit() @@ -1064,7 +1064,7 @@ def setUpBeforeMigration(self): try: session = Session(connection.engine) - user = DbUser(email='{}@aiida.net'.format(self.id())) + user = DbUser(email=f'{self.id()}@aiida.net') session.add(user) session.commit() @@ -1129,7 +1129,7 @@ def setUpBeforeMigration(self): try: session = Session(connection.engine) - user = DbUser(email='{}@aiida.net'.format(self.id())) + user = DbUser(email=f'{self.id()}@aiida.net') session.add(user) session.commit() @@ -1188,7 +1188,7 @@ def setUpBeforeMigration(self): try: session = Session(connection.engine) - user = DbUser(email='{}@aiida.net'.format(self.id())) + user = DbUser(email=f'{self.id()}@aiida.net') session.add(user) session.commit() @@ -1242,7 +1242,7 @@ def setUpBeforeMigration(self): try: session = Session(connection.engine) - user = DbUser(email='{}@aiida.net'.format(self.id())) + user = DbUser(email=f'{self.id()}@aiida.net') session.add(user) session.commit() @@ -1293,7 +1293,7 @@ def setUpBeforeMigration(self): try: session = Session(connection.engine) - user = DbUser(email='{}@aiida.net'.format(self.id())) + user = DbUser(email=f'{self.id()}@aiida.net') session.add(user) session.commit() @@ -1358,7 +1358,7 @@ def setUpBeforeMigration(self): try: session = Session(connection.engine) - user = DbUser(email='{}@aiida.net'.format(self.id())) + user = DbUser(email=f'{self.id()}@aiida.net') session.add(user) session.commit() @@ -1410,7 +1410,7 @@ def setUpBeforeMigration(self): try: session = Session(connection.engine) - user = DbUser(email='{}@aiida.net'.format(self.id())) + user = DbUser(email=f'{self.id()}@aiida.net') session.add(user) session.commit() @@ -1511,7 +1511,7 @@ def setUpBeforeMigration(self): try: session = Session(connection.engine) - user = DbUser(email='{}@aiida.net'.format(self.id())) + user = DbUser(email=f'{self.id()}@aiida.net') session.add(user) session.commit() @@ -1606,7 +1606,7 @@ def setUpBeforeMigration(self): try: session = Session(connection.engine) - user = DbUser(email='{}@aiida.net'.format(self.id())) + user = DbUser(email=f'{self.id()}@aiida.net') session.add(user) session.commit() @@ -1657,7 +1657,7 @@ def setUpBeforeMigration(self): with self.get_session() as session: try: - default_user = DbUser(email='{}@aiida.net'.format(self.id())) + default_user = DbUser(email=f'{self.id()}@aiida.net') session.add(default_user) session.commit() @@ -1723,7 +1723,7 @@ def setUpBeforeMigration(self): with self.get_session() as session: try: - default_user = DbUser(email='{}@aiida.net'.format(self.id())) + default_user = DbUser(email=f'{self.id()}@aiida.net') session.add(default_user) session.commit() diff --git a/tests/backends/aiida_sqlalchemy/test_nodes.py b/tests/backends/aiida_sqlalchemy/test_nodes.py index 2ace41401d..0b18b6f652 100644 --- a/tests/backends/aiida_sqlalchemy/test_nodes.py +++ b/tests/backends/aiida_sqlalchemy/test_nodes.py @@ -111,7 +111,7 @@ def test_multiple_node_creation(self): backend = self.backend # Get the automatic user - dbuser = backend.users.create('{}@aiida.net'.format(self.id())).store().dbmodel + dbuser = backend.users.create(f'{self.id()}@aiida.net').store().dbmodel # Create a new node but don't add it to the session node_uuid = get_new_uuid() DbNode(user=dbuser, uuid=node_uuid, node_type=None) @@ -138,11 +138,11 @@ def test_multiple_node_creation(self): # Query the session before commit res = session.query(DbNode.uuid).filter(DbNode.uuid == node_uuid).all() - self.assertEqual(len(res), 1, 'There should be a node in the session/DB with the UUID {}'.format(node_uuid)) + self.assertEqual(len(res), 1, f'There should be a node in the session/DB with the UUID {node_uuid}') # Commit the transaction session.commit() # Check again that the node is in the db res = session.query(DbNode.uuid).filter(DbNode.uuid == node_uuid).all() - self.assertEqual(len(res), 1, 'There should be a node in the session/DB with the UUID {}'.format(node_uuid)) + self.assertEqual(len(res), 1, f'There should be a node in the session/DB with the UUID {node_uuid}') diff --git a/tests/backends/aiida_sqlalchemy/test_session.py b/tests/backends/aiida_sqlalchemy/test_session.py index 2a91e6c45d..28473017f1 100644 --- a/tests/backends/aiida_sqlalchemy/test_session.py +++ b/tests/backends/aiida_sqlalchemy/test_session.py @@ -164,9 +164,7 @@ def test_node_access_with_sessions(self): def check_attrs_match(name): node_attr = getattr(node, name) dbnode_attr = getattr(dbnode_reloaded, name) - self.assertEqual( - node_attr, dbnode_attr, "Values of '{}' don't match ({} != {})".format(name, node_attr, dbnode_attr) - ) + self.assertEqual(node_attr, dbnode_attr, f"Values of '{name}' don't match ({node_attr} != {dbnode_attr})") def do_value_checks(attr_name, original, changed): try: diff --git a/tests/backends/aiida_sqlalchemy/test_utils.py b/tests/backends/aiida_sqlalchemy/test_utils.py index 63484bb567..92380d9d36 100644 --- a/tests/backends/aiida_sqlalchemy/test_utils.py +++ b/tests/backends/aiida_sqlalchemy/test_utils.py @@ -58,7 +58,7 @@ def database_exists(url): try: if engine.dialect.name == 'postgresql': - text = "SELECT 1 FROM pg_database WHERE datname='%s'" % database + text = f"SELECT 1 FROM pg_database WHERE datname='{database}'" return bool(engine.execute(text).scalar()) raise Exception('Only PostgreSQL is supported.') @@ -98,7 +98,7 @@ def create_database(url, encoding='utf8'): from psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT engine.raw_connection().set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT) - text = "CREATE DATABASE {0} ENCODING '{1}'".format(quote(engine, database), encoding) + text = f"CREATE DATABASE {quote(engine, database)} ENCODING '{encoding}'" engine.execute(text) diff --git a/tests/benchmark/test_engine.py b/tests/benchmark/test_engine.py index 1f5d6a038b..77009e5c96 100644 --- a/tests/benchmark/test_engine.py +++ b/tests/benchmark/test_engine.py @@ -55,7 +55,7 @@ class WorkchainLoopWcSerial(WorkchainLoop): def run_task(self): future = self.submit(WorkchainLoop, iterations=Int(1)) - return ToContext(**{'wkchain' + str(self.ctx.counter): future}) + return ToContext(**{f'wkchain{str(self.ctx.counter)}': future}) class WorkchainLoopWcThreaded(WorkchainLoop): @@ -68,7 +68,7 @@ def init_loop(self): def run_task(self): context = { - 'wkchain' + str(i): self.submit(WorkchainLoop, iterations=Int(1)) + f'wkchain{str(i)}': self.submit(WorkchainLoop, iterations=Int(1)) for i in range(self.inputs.iterations.value) } return ToContext(**context) @@ -102,7 +102,7 @@ def run_task(self): 'y': Int(2), 'code': self.inputs.code, } - futures['addition' + str(i)] = self.submit(ArithmeticAddCalculation, **inputs) + futures[f'addition{str(i)}'] = self.submit(ArithmeticAddCalculation, **inputs) return ToContext(**futures) diff --git a/tests/benchmark/test_importexport.py b/tests/benchmark/test_importexport.py index d81f8a6e9c..603343d7c8 100644 --- a/tests/benchmark/test_importexport.py +++ b/tests/benchmark/test_importexport.py @@ -39,7 +39,7 @@ def recursive_provenance(in_node, depth, breadth, num_objects=0): out_node = Dict(dict={str(i): i for i in range(10)}) for idx in range(num_objects): - out_node.put_object_from_filelike(StringIO('a' * 10000), 'key' + str(idx)) + out_node.put_object_from_filelike(StringIO('a' * 10000), f'key{str(idx)}') out_node.add_incoming(calcfunc, link_type=LinkType.CREATE, link_label='output') out_node.store() diff --git a/tests/calculations/arithmetic/test_add.py b/tests/calculations/arithmetic/test_add.py index 59a1f6178b..f976945d75 100644 --- a/tests/calculations/arithmetic/test_add.py +++ b/tests/calculations/arithmetic/test_add.py @@ -40,7 +40,7 @@ def test_add_default(fixture_sandbox, aiida_localhost, generate_calc_job): with fixture_sandbox.open(options['input_filename'].default) as handle: input_written = handle.read() - assert input_written == 'echo $(({} + {}))\n'.format(inputs['x'].value, inputs['y'].value) + assert input_written == f"echo $(({inputs['x'].value} + {inputs['y'].value}))\n" @pytest.mark.usefixtures('clear_database_before_test') diff --git a/tests/calculations/test_templatereplacer.py b/tests/calculations/test_templatereplacer.py index ff700ff169..3330a3ae46 100644 --- a/tests/calculations/test_templatereplacer.py +++ b/tests/calculations/test_templatereplacer.py @@ -67,7 +67,7 @@ def test_base_template(fixture_sandbox, aiida_localhost, generate_calc_job): # Check the content of the generated script with fixture_sandbox.open(inputs['template']['input_file_name']) as handle: input_written = handle.read() - assert input_written == 'echo $(({} + {}))'.format(inputs['parameters']['x'], inputs['parameters']['y']) + assert input_written == f"echo $(({inputs['parameters']['x']} + {inputs['parameters']['y']}))" @pytest.mark.usefixtures('clear_database_before_test') diff --git a/tests/cmdline/commands/test_calcjob.py b/tests/cmdline/commands/test_calcjob.py index 3ee3a833f2..b62485a790 100644 --- a/tests/cmdline/commands/test_calcjob.py +++ b/tests/cmdline/commands/test_calcjob.py @@ -270,11 +270,10 @@ def test_calcjob_inoutputcat_old(self): # Make sure add_job does not specify options 'input_filename' and 'output_filename' self.assertIsNone( - add_job.get_option('input_filename'), msg="'input_filename' should not be an option for {}".format(add_job) + add_job.get_option('input_filename'), msg=f"'input_filename' should not be an option for {add_job}" ) self.assertIsNone( - add_job.get_option('output_filename'), - msg="'output_filename' should not be an option for {}".format(add_job) + add_job.get_option('output_filename'), msg=f"'output_filename' should not be an option for {add_job}" ) # Run `verdi calcjob inputcat add_job` diff --git a/tests/cmdline/commands/test_code.py b/tests/cmdline/commands/test_code.py index bda2992d40..d30362cd65 100644 --- a/tests/cmdline/commands/test_code.py +++ b/tests/cmdline/commands/test_code.py @@ -49,25 +49,23 @@ def test_noninteractive_remote(self): """Test non-interactive remote code setup.""" label = 'noninteractive_remote' options = [ - '--non-interactive', '--label={}'.format(label), '--description=description', - '--input-plugin=arithmetic.add', '--on-computer', '--computer={}'.format(self.computer.label), - '--remote-abs-path=/remote/abs/path' + '--non-interactive', f'--label={label}', '--description=description', '--input-plugin=arithmetic.add', + '--on-computer', f'--computer={self.computer.label}', '--remote-abs-path=/remote/abs/path' ] result = self.cli_runner.invoke(setup_code, options) self.assertClickResultNoException(result) - self.assertIsInstance(orm.Code.get_from_string('{}@{}'.format(label, self.computer.label)), orm.Code) + self.assertIsInstance(orm.Code.get_from_string(f'{label}@{self.computer.label}'), orm.Code) def test_noninteractive_upload(self): """Test non-interactive code setup.""" label = 'noninteractive_upload' options = [ - '--non-interactive', '--label={}'.format(label), '--description=description', - '--input-plugin=arithmetic.add', '--store-in-db', '--code-folder={}'.format(self.this_folder), - '--code-rel-path={}'.format(self.this_file) + '--non-interactive', f'--label={label}', '--description=description', '--input-plugin=arithmetic.add', + '--store-in-db', f'--code-folder={self.this_folder}', f'--code-rel-path={self.this_file}' ] result = self.cli_runner.invoke(setup_code, options) self.assertClickResultNoException(result) - self.assertIsInstance(orm.Code.get_from_string('{}'.format(label)), orm.Code) + self.assertIsInstance(orm.Code.get_from_string(f'{label}'), orm.Code) def test_from_config(self): """Test setting up a code from a config file. @@ -96,7 +94,7 @@ def test_from_config(self): ['--non-interactive', '--config', os.path.realpath(handle.name)] ) self.assertClickResultNoException(result) - self.assertIsInstance(orm.Code.get_from_string('{}'.format(label)), orm.Code) + self.assertIsInstance(orm.Code.get_from_string(f'{label}'), orm.Code) # url label = 'noninteractive_config_url' @@ -108,7 +106,7 @@ def test_from_config(self): result = self.cli_runner.invoke(setup_code, ['--non-interactive', '--config', fake_url]) self.assertClickResultNoException(result) - self.assertIsInstance(orm.Code.get_from_string('{}'.format(label)), orm.Code) + self.assertIsInstance(orm.Code.get_from_string(f'{label}'), orm.Code) class TestVerdiCodeCommands(AiidaTestCase): @@ -189,7 +187,7 @@ def test_code_list(self): code.label = 'code2' code.store() - options = ['-A', '-a', '-o', '--input-plugin=arithmetic.add', '--computer={}'.format(self.computer.label)] + options = ['-A', '-a', '-o', '--input-plugin=arithmetic.add', f'--computer={self.computer.label}'] result = self.cli_runner.invoke(code_list, options) self.assertIsNone(result.exception, result.output) self.assertTrue(str(self.code.pk) in result.output, 'PK of first code should be included') @@ -218,7 +216,7 @@ def test_code_show(self): def test_code_duplicate_non_interactive(self): """Test code duplication non-interactive.""" label = 'code_duplicate_noninteractive' - result = self.cli_runner.invoke(code_duplicate, ['--non-interactive', '--label=' + label, str(self.code.pk)]) + result = self.cli_runner.invoke(code_duplicate, ['--non-interactive', f'--label={label}', str(self.code.pk)]) self.assertIsNone(result.exception, result.output) new_code = orm.Code.get_from_string(label) @@ -246,7 +244,7 @@ def test_interactive_remote(clear_database_before_test, aiida_localhost, non_int user_input = '\n'.join([label, 'description', 'arithmetic.add', 'yes', aiida_localhost.label, '/remote/abs/path']) result = CliRunner().invoke(setup_code, input=user_input) assert result.exception is None - assert isinstance(orm.Code.get_from_string('{}@{}'.format(label, aiida_localhost.label)), orm.Code) + assert isinstance(orm.Code.get_from_string(f'{label}@{aiida_localhost.label}'), orm.Code) @pytest.mark.parametrize('non_interactive_editor', ('sleep 1; vim -cwq',), indirect=True) @@ -258,7 +256,7 @@ def test_interactive_upload(clear_database_before_test, aiida_localhost, non_int user_input = '\n'.join([label, 'description', 'arithmetic.add', 'no', dirname, basename]) result = CliRunner().invoke(setup_code, input=user_input) assert result.exception is None - assert isinstance(orm.Code.get_from_string('{}'.format(label)), orm.Code) + assert isinstance(orm.Code.get_from_string(f'{label}'), orm.Code) @pytest.mark.parametrize('non_interactive_editor', ('sleep 1; vim -cwq',), indirect=True) @@ -270,14 +268,14 @@ def test_mixed(clear_database_before_test, aiida_localhost, non_interactive_edit user_input = '\n'.join([label, 'arithmetic.add', aiida_localhost.label]) result = CliRunner().invoke(setup_code, options, input=user_input) assert result.exception is None - assert isinstance(Code.get_from_string('{}@{}'.format(label, aiida_localhost.label)), Code) + assert isinstance(Code.get_from_string(f'{label}@{aiida_localhost.label}'), Code) @pytest.mark.parametrize('non_interactive_editor', ('sleep 1; vim -cwq',), indirect=True) def test_code_duplicate_interactive(clear_database_before_test, aiida_local_code_factory, non_interactive_editor): """Test code duplication interactive.""" label = 'code_duplicate_interactive' - user_input = label + '\n\n\n\n\n\n' + user_input = f'{label}\n\n\n\n\n\n' code = aiida_local_code_factory('arithmetic.add', '/bin/cat', label='code') result = CliRunner().invoke(code_duplicate, [str(code.pk)], input=user_input) assert result.exception is None, result.exception @@ -295,7 +293,7 @@ def test_code_duplicate_ignore(clear_database_before_test, aiida_local_code_fact Regression test for: https://github.com/aiidateam/aiida-core/issues/3770 """ label = 'code_duplicate_interactive' - user_input = label + '\n!\n\n\n\n\n' + user_input = f'{label}\n!\n\n\n\n\n' code = aiida_local_code_factory('arithmetic.add', '/bin/cat', label='code') result = CliRunner().invoke(code_duplicate, [str(code.pk)], input=user_input, catch_exceptions=False) assert result.exception is None, result.exception diff --git a/tests/cmdline/commands/test_comment.py b/tests/cmdline/commands/test_comment.py index 23a1fa3458..e72b7dae58 100644 --- a/tests/cmdline/commands/test_comment.py +++ b/tests/cmdline/commands/test_comment.py @@ -35,7 +35,7 @@ def test_comment_show(self): def test_comment_add(self): """Test adding a comment.""" - options = ['-N', str(self.node.pk), '--', '{}'.format(COMMENT)] + options = ['-N', str(self.node.pk), '--', f'{COMMENT}'] result = self.cli_runner.invoke(cmd_comment.add, options, catch_exceptions=False) self.assertEqual(result.exit_code, 0) diff --git a/tests/cmdline/commands/test_computer.py b/tests/cmdline/commands/test_computer.py index 975b9bec40..57b8767524 100644 --- a/tests/cmdline/commands/test_computer.py +++ b/tests/cmdline/commands/test_computer.py @@ -75,9 +75,9 @@ def generate_setup_options(ordereddict): options = [] for key, value in ordereddict.items(): if value is None: - options.append('--{}'.format(key)) + options.append(f'--{key}') else: - options.append('--{}={}'.format(key, value)) + options.append(f'--{key}={value}') return options @@ -144,7 +144,7 @@ def test_mixed(self): options = generate_setup_options(non_interactive_options_dict) result = self.cli_runner.invoke(computer_setup, options, input=user_input) - self.assertIsNone(result.exception, msg='There was an unexpected exception. Output: {}'.format(result.output)) + self.assertIsNone(result.exception, msg=f'There was an unexpected exception. Output: {result.output}') new_computer = orm.Computer.objects.get(label=label) self.assertIsInstance(new_computer, orm.Computer) @@ -286,12 +286,12 @@ def test_noninteractive_from_config(self): label = 'noninteractive_config' with tempfile.NamedTemporaryFile('w') as handle: - handle.write("""--- -label: {l} + handle.write(f"""--- +label: {label} hostname: myhost transport: local scheduler: direct -""".format(l=label)) +""") handle.flush() options = ['--non-interactive', '--config', os.path.realpath(handle.name)] @@ -431,9 +431,9 @@ def test_local_from_config(self): interval = 20 with tempfile.NamedTemporaryFile('w') as handle: - handle.write("""--- + handle.write(f"""--- safe_interval: {interval} -""".format(interval=interval)) +""") handle.flush() options = ['local', computer.label, '--config', os.path.realpath(handle.name)] @@ -479,7 +479,7 @@ def test_ssh_ni_username(self): comp.store() username = 'TEST' - options = ['ssh', comp.label, '--non-interactive', '--username={}'.format(username), '--safe-interval', '1'] + options = ['ssh', comp.label, '--non-interactive', f'--username={username}', '--safe-interval', '1'] result = self.cli_runner.invoke(computer_configure, options, catch_exceptions=False) self.assertTrue(comp.is_user_configured(self.user), msg=result.output) self.assertEqual( @@ -685,7 +685,7 @@ def test_computer_duplicate_interactive(self): os.environ['VISUAL'] = 'sleep 1; vim -cwq' os.environ['EDITOR'] = 'sleep 1; vim -cwq' label = 'computer_duplicate_interactive' - user_input = label + '\n\n\n\n\n\n\n\n\nN' + user_input = f'{label}\n\n\n\n\n\n\n\n\nN' result = self.cli_runner.invoke( computer_duplicate, [str(self.comp.pk)], input=user_input, catch_exceptions=False ) @@ -708,7 +708,7 @@ def test_computer_duplicate_non_interactive(self): label = 'computer_duplicate_noninteractive' result = self.cli_runner.invoke( computer_duplicate, - ['--non-interactive', '--label=' + label, str(self.comp.pk)] + ['--non-interactive', f'--label={label}', str(self.comp.pk)] ) self.assertIsNone(result.exception, result.output) @@ -737,7 +737,7 @@ def test_interactive(clear_database_before_test, aiida_localhost, non_interactiv user_input = '\n'.join(generate_setup_options_interactive(options_dict)) result = CliRunner().invoke(computer_setup, input=user_input) - assert result.exception is None, 'There was an unexpected exception. Output: {}'.format(result.output) + assert result.exception is None, f'There was an unexpected exception. Output: {result.output}' new_computer = orm.Computer.objects.get(label=label) assert isinstance(new_computer, orm.Computer) diff --git a/tests/cmdline/commands/test_config.py b/tests/cmdline/commands/test_config.py index e0dbd33a11..5aec1ed3c5 100644 --- a/tests/cmdline/commands/test_config.py +++ b/tests/cmdline/commands/test_config.py @@ -73,7 +73,7 @@ def test_config_unset_option(self): options = ['config', option_name, '--unset'] result = self.cli_runner.invoke(cmd_verdi.verdi, options) self.assertClickSuccess(result) - self.assertIn('{} unset'.format(option_name), result.output.strip()) + self.assertIn(f'{option_name} unset', result.output.strip()) options = ['config', option_name] result = self.cli_runner.invoke(cmd_verdi.verdi, options) diff --git a/tests/cmdline/commands/test_data.py b/tests/cmdline/commands/test_data.py index 7d9609e327..c280f4bbc5 100644 --- a/tests/cmdline/commands/test_data.py +++ b/tests/cmdline/commands/test_data.py @@ -50,7 +50,7 @@ def data_export_test(self, datatype, ids, supported_formats): } if datatype is None or datatype not in datatype_mapping.keys(): - raise Exception('The listing of the objects {} is not supported'.format(datatype)) + raise Exception(f'The listing of the objects {datatype} is not supported') export_cmd = datatype_mapping[datatype] @@ -63,10 +63,7 @@ def data_export_test(self, datatype, ids, supported_formats): for frmt in supported_formats: options = [flag, frmt, str(ids[self.NODE_ID_STR])] res = self.cli_runner.invoke(export_cmd, options, catch_exceptions=False) - self.assertEqual( - res.exit_code, 0, 'The command did not finish ' - 'correctly. Output:\n{}'.format(res.output) - ) + self.assertEqual(res.exit_code, 0, f'The command did not finish correctly. Output:\n{res.output}') # Check that the output to file flags work correctly: # -o, --output @@ -77,10 +74,7 @@ def data_export_test(self, datatype, ids, supported_formats): filepath = os.path.join(tmpd, 'output_file.txt') options = [flag, filepath, str(ids[self.NODE_ID_STR])] res = self.cli_runner.invoke(export_cmd, options, catch_exceptions=False) - self.assertEqual( - res.exit_code, 0, 'The command should finish correctly.' - 'Output:\n{}'.format(res.output) - ) + self.assertEqual(res.exit_code, 0, f'The command should finish correctly.Output:\n{res.output}') # Try to export it again. It should fail because the # file exists @@ -91,10 +85,7 @@ def data_export_test(self, datatype, ids, supported_formats): # existing files options = [flag, filepath, '-f', str(ids[self.NODE_ID_STR])] res = self.cli_runner.invoke(export_cmd, options, catch_exceptions=False) - self.assertEqual( - res.exit_code, 0, 'The command should finish correctly.' - 'Output: {}'.format(res.output) - ) + self.assertEqual(res.exit_code, 0, f'The command should finish correctly.Output: {res.output}') finally: shutil.rmtree(tmpd) @@ -127,7 +118,7 @@ def data_listing_test(self, datatype, search_string, ids): } if datatype is None or datatype not in datatype_mapping.keys(): - raise Exception('The listing of the objects {} is not supported'.format(datatype)) + raise Exception(f'The listing of the objects {datatype} is not supported') listing_cmd = datatype_mapping[datatype] @@ -136,9 +127,7 @@ def data_listing_test(self, datatype, search_string, ids): # Check that the normal listing works as expected res = self.cli_runner.invoke(listing_cmd, [], catch_exceptions=False) - self.assertIn( - search_string_bytes, res.stdout_bytes, 'The string {} was not found in the listing'.format(search_string) - ) + self.assertIn(search_string_bytes, res.stdout_bytes, f'The string {search_string} was not found in the listing') # Check that the past days filter works as expected past_days_flags = ['-p', '--past-days'] @@ -146,15 +135,13 @@ def data_listing_test(self, datatype, search_string, ids): options = [flag, '1'] res = self.cli_runner.invoke(listing_cmd, options, catch_exceptions=False) self.assertIn( - search_string_bytes, res.stdout_bytes, - 'The string {} was not found in the listing'.format(search_string) + search_string_bytes, res.stdout_bytes, f'The string {search_string} was not found in the listing' ) options = [flag, '0'] res = self.cli_runner.invoke(listing_cmd, options, catch_exceptions=False) self.assertNotIn( - search_string_bytes, res.stdout_bytes, - 'A not expected string {} was found in the listing'.format(search_string) + search_string_bytes, res.stdout_bytes, f'A not expected string {search_string} was found in the listing' ) # Check that the group filter works as expected @@ -214,7 +201,7 @@ def test_reachable(self): subcommands = ['array', 'bands', 'cif', 'dict', 'remote', 'structure', 'trajectory', 'upf'] for sub_cmd in subcommands: output = sp.check_output(['verdi', 'data', sub_cmd, '--help']) - self.assertIn(b'Usage:', output, 'Sub-command verdi data {} --help failed.'.format(sub_cmd)) + self.assertIn(b'Usage:', output, f'Sub-command verdi data {sub_cmd} --help failed.') class TestVerdiDataArray(AiidaTestCase): @@ -911,7 +898,7 @@ def upload_family(self): def test_uploadfamilyhelp(self): output = sp.check_output(['verdi', 'data', 'upf', 'uploadfamily', '--help']) - self.assertIn(b'Usage:', output, 'Sub-command verdi data upf uploadfamily --help failed: {}'.format(output)) + self.assertIn(b'Usage:', output, f'Sub-command verdi data upf uploadfamily --help failed: {output}') def test_uploadfamily(self): self.upload_family() @@ -934,7 +921,7 @@ def test_exportfamily(self): output = sp.check_output(['ls', path]) self.assertIn( b'Ba.pbesol-spn-rrkjus_psl.0.2.3-tot-pslib030.UPF', output, - 'Sub-command verdi data upf exportfamily --help failed: {}'.format(output) + f'Sub-command verdi data upf exportfamily --help failed: {output}' ) self.assertIn( b'O.pbesol-n-rrkjus_psl.0.1-tested-pslib030.UPF', output, diff --git a/tests/cmdline/commands/test_export.py b/tests/cmdline/commands/test_export.py index 9a404bc3f3..bccb5e7bb8 100644 --- a/tests/cmdline/commands/test_export.py +++ b/tests/cmdline/commands/test_export.py @@ -64,7 +64,7 @@ def setUpClass(cls, *args, **kwargs): # Utility helper cls.fixture_archive = 'export/migrate' - cls.newest_archive = 'export_v{}_simple.aiida'.format(EXPORT_VERSION) + cls.newest_archive = f'export_v{EXPORT_VERSION}_simple.aiida' cls.penultimate_archive = 'export_v0.6_simple.aiida' @classmethod @@ -145,7 +145,7 @@ def test_migrate_versions_old(self): """Migrating archives with a version older than the current should work.""" archives = [] for version in range(1, int(EXPORT_VERSION.split('.')[-1]) - 1): - archives.append('export_v0.{}_simple.aiida'.format(version)) + archives.append(f'export_v0.{version}_simple.aiida') for archive in archives: @@ -269,7 +269,7 @@ def test_inspect(self): """Test the functionality of `verdi export inspect`.""" archives = [] for version in range(1, int(EXPORT_VERSION.split('.')[-1])): - archives.append(('export_v0.{}_simple.aiida'.format(version), '0.{}'.format(version))) + archives.append((f'export_v0.{version}_simple.aiida', f'0.{version}')) for archive, version_number in archives: diff --git a/tests/cmdline/commands/test_graph.py b/tests/cmdline/commands/test_graph.py index 82c07d565c..322a6f5730 100644 --- a/tests/cmdline/commands/test_graph.py +++ b/tests/cmdline/commands/test_graph.py @@ -66,7 +66,7 @@ def test_generate_graph(self): """ # Get a PK of a node which exists root_node = str(self.node.pk) - filename = root_node + '.dot.pdf' + filename = f'{root_node}.dot.pdf' options = [root_node] try: result = self.cli_runner.invoke(cmd_graph.generate, options) @@ -86,7 +86,7 @@ def test_catch_bad_pk(self): # Forbidden pk for root_node in ['xyz', '-5', '3.14']: options = [root_node] - filename = root_node + '.dot.pdf' + filename = f'{root_node}.dot.pdf' try: result = self.cli_runner.invoke(cmd_graph.generate, options) self.assertIsNotNone(result.exception) @@ -105,7 +105,7 @@ def test_catch_bad_pk(self): pass # Make sure verdi graph rejects this non-existant pk try: - filename = str(root_node) + '.dot.pdf' + filename = f'{str(root_node)}.dot.pdf' options = [str(root_node)] result = self.cli_runner.invoke(cmd_graph.generate, options) self.assertIsNotNone(result.exception) @@ -120,7 +120,7 @@ def test_check_recursion_flags(self): positive ints """ root_node = str(self.node.pk) - filename = root_node + '.dot.pdf' + filename = f'{root_node}.dot.pdf' # Test that the options don't fail for opt in ['-a', '--ancestor-depth', '-d', '--descendant-depth']: @@ -159,7 +159,7 @@ def test_check_io_flags(self): Test the input and output flags work. """ root_node = str(self.node.pk) - filename = root_node + '.dot.pdf' + filename = f'{root_node}.dot.pdf' for flag in ['-i', '--process-in', '-o', '--process-out']: options = [flag, root_node] @@ -183,7 +183,7 @@ def test_output_format(self): # we just use the built-ins dot and canon as a minimal check that # the option works. After all, this test is for the cmdline. for fileformat in ['pdf', 'png']: - filename = root_node + '.dot.' + fileformat + filename = f'{root_node}.dot.{fileformat}' options = [option, fileformat, root_node] try: result = self.cli_runner.invoke(cmd_graph.generate, options) @@ -197,7 +197,7 @@ def test_node_id_label_format(self): Test that the node id label format can be specified """ root_node = str(self.node.pk) - filename = root_node + '.dot.pdf' + filename = f'{root_node}.dot.pdf' for id_label_type in ['uuid', 'pk', 'label']: options = ['--identifier', id_label_type, root_node] diff --git a/tests/cmdline/commands/test_group.py b/tests/cmdline/commands/test_group.py index 73eb67a9c2..38c99042c8 100644 --- a/tests/cmdline/commands/test_group.py +++ b/tests/cmdline/commands/test_group.py @@ -278,8 +278,7 @@ def test_copy_existing_group(self): result = self.cli_runner.invoke(cmd_group.group_copy, options) self.assertClickResultNoException(result) self.assertIn( - 'Success: Nodes copied from group<{}> to group<{}>'.format(source_label, dest_label), result.output, - result.exception + f'Success: Nodes copied from group<{source_label}> to group<{dest_label}>', result.output, result.exception ) # Check destination group exists with source group's nodes @@ -292,7 +291,7 @@ def test_copy_existing_group(self): result = self.cli_runner.invoke(cmd_group.group_copy, options) self.assertIsNotNone(result.exception, result.output) self.assertIn( - 'Warning: Destination group<{}> already exists and is not empty.'.format(dest_label), result.output, + f'Warning: Destination group<{dest_label}> already exists and is not empty.', result.output, result.exception ) diff --git a/tests/cmdline/commands/test_group_ls.py b/tests/cmdline/commands/test_group_ls.py index d1982d56b5..f74f4379cf 100644 --- a/tests/cmdline/commands/test_group_ls.py +++ b/tests/cmdline/commands/test_group_ls.py @@ -23,7 +23,7 @@ def setup_groups(clear_database_before_test): """Setup some groups for testing.""" for label in ['a', 'a/b', 'a/c/d', 'a/c/e/g', 'a/f']: group, _ = orm.Group.objects.get_or_create(label) - group.description = 'A description of {}'.format(label) + group.description = f'A description of {label}' orm.UpfFamily.objects.get_or_create('a/x') yield diff --git a/tests/cmdline/commands/test_import.py b/tests/cmdline/commands/test_import.py index cd1668aded..ab9ca46745 100644 --- a/tests/cmdline/commands/test_import.py +++ b/tests/cmdline/commands/test_import.py @@ -32,7 +32,7 @@ def setUpClass(cls, *args, **kwargs): cls.url_path = 'https://raw.githubusercontent.com/aiidateam/aiida-core/' \ '0599dabf0887bee172a04f308307e99e3c3f3ff2/aiida/backends/tests/fixtures/export/migrate/' cls.archive_path = 'export/migrate' - cls.newest_archive = 'export_v{}_simple.aiida'.format(EXPORT_VERSION) + cls.newest_archive = f'export_v{EXPORT_VERSION}_simple.aiida' def setUp(self): self.cli_runner = CliRunner() @@ -103,9 +103,7 @@ def test_import_to_group(self): self.assertEqual( group.count(), nodes_in_group, - msg='The Group count should not have changed from {}. Instead it is now {}'.format( - nodes_in_group, group.count() - ) + msg=f'The Group count should not have changed from {nodes_in_group}. Instead it is now {group.count()}' ) # Invoke `verdi import` again with new archive, making sure Group count is upped @@ -131,7 +129,7 @@ def test_import_make_new_group(self): # Check Group does not already exist group_search = Group.objects.find(filters={'label': group_label}) self.assertEqual( - len(group_search), 0, msg="A Group with label '{}' already exists, this shouldn't be.".format(group_label) + len(group_search), 0, msg=f"A Group with label '{group_label}' already exists, this shouldn't be." ) # Invoke `verdi import`, making sure there are no exceptions @@ -157,7 +155,7 @@ def test_comment_mode(self): self.assertIsNone(result.exception, result.output) self.assertTrue( any([re.fullmatch(r'Comment rules[\s]*{}'.format(mode), line) for line in result.output.split('\n')]), - msg='Mode: {}. Output: {}'.format(mode, result.output) + msg=f'Mode: {mode}. Output: {result.output}' ) self.assertEqual(result.exit_code, 0, result.output) @@ -167,7 +165,7 @@ def test_import_old_local_archives(self): """ archives = [] for version in range(1, int(EXPORT_VERSION.split('.')[-1]) - 1): - archives.append(('export_v0.{}_simple.aiida'.format(version), '0.{}'.format(version))) + archives.append((f'export_v0.{version}_simple.aiida', f'0.{version}')) for archive, version in archives: options = [get_archive_file(archive, filepath=self.archive_path)] @@ -176,7 +174,7 @@ def test_import_old_local_archives(self): self.assertIsNone(result.exception, msg=result.output) self.assertEqual(result.exit_code, 0, msg=result.output) self.assertIn(version, result.output, msg=result.exception) - self.assertIn('Success: imported archive {}'.format(options[0]), result.output, msg=result.exception) + self.assertIn(f'Success: imported archive {options[0]}', result.output, msg=result.exception) def test_import_old_url_archives(self): """ Test import of old URL archives @@ -191,7 +189,7 @@ def test_import_old_url_archives(self): self.assertIsNone(result.exception, msg=result.output) self.assertEqual(result.exit_code, 0, msg=result.output) self.assertIn(version, result.output, msg=result.exception) - self.assertIn('Success: imported archive {}'.format(options[0]), result.output, msg=result.exception) + self.assertIn(f'Success: imported archive {options[0]}', result.output, msg=result.exception) def test_import_url_and_local_archives(self): """Test import of both a remote and local archive""" @@ -217,7 +215,7 @@ def test_import_url_timeout(self): with self.assertRaises(BadParameter) as cmd_exc: test_timeout_path(timeout_url) - error_message = 'Path "{}" could not be reached within 0 s.'.format(timeout_url) + error_message = f'Path "{timeout_url}" could not be reached within 0 s.' self.assertIn(error_message, str(cmd_exc.exception), str(cmd_exc.exception)) def test_raise_malformed_url(self): @@ -240,8 +238,8 @@ def test_non_interactive_and_migration(self): `migration` = False, `non_interactive` = True, Expected: No query, no migrate """ archive = get_archive_file('export_v0.1_simple.aiida', filepath=self.archive_path) - confirm_message = 'Do you want to try and migrate {} to the newest export file version?'.format(archive) - success_message = 'Success: imported archive {}'.format(archive) + confirm_message = f'Do you want to try and migrate {archive} to the newest export file version?' + success_message = f'Success: imported archive {archive}' # Import "normally", but explicitly specifying `--migration`, make sure confirm message is present # `migration` = True (default), `non_interactive` = False (default), Expected: Query user, migrate diff --git a/tests/cmdline/commands/test_node.py b/tests/cmdline/commands/test_node.py index c91fc6dc00..a99de1b532 100644 --- a/tests/cmdline/commands/test_node.py +++ b/tests/cmdline/commands/test_node.py @@ -309,7 +309,7 @@ def test_generate_graph(self): """ # Get a PK of a node which exists root_node = str(self.node.pk) - filename = root_node + '.dot.pdf' + filename = f'{root_node}.dot.pdf' options = [root_node] try: result = self.cli_runner.invoke(cmd_node.graph_generate, options) @@ -329,7 +329,7 @@ def test_catch_bad_pk(self): # Forbidden pk for root_node in ['xyz', '-5', '3.14']: options = [root_node] - filename = root_node + '.dot.pdf' + filename = f'{root_node}.dot.pdf' try: result = self.cli_runner.invoke(cmd_node.graph_generate, options) self.assertIsNotNone(result.exception) @@ -348,7 +348,7 @@ def test_catch_bad_pk(self): pass # Make sure verdi graph rejects this non-existant pk try: - filename = str(root_node) + '.dot.pdf' + filename = f'{str(root_node)}.dot.pdf' options = [str(root_node)] result = self.cli_runner.invoke(cmd_node.graph_generate, options) self.assertIsNotNone(result.exception) @@ -363,7 +363,7 @@ def test_check_recursion_flags(self): positive ints """ root_node = str(self.node.pk) - filename = root_node + '.dot.pdf' + filename = f'{root_node}.dot.pdf' # Test that the options don't fail for opt in ['-a', '--ancestor-depth', '-d', '--descendant-depth']: @@ -402,7 +402,7 @@ def test_check_io_flags(self): Test the input and output flags work. """ root_node = str(self.node.pk) - filename = root_node + '.dot.pdf' + filename = f'{root_node}.dot.pdf' for flag in ['-i', '--process-in', '-o', '--process-out']: options = [flag, root_node] @@ -426,7 +426,7 @@ def test_output_format(self): # we just use the built-ins dot and canon as a minimal check that # the option works. After all, this test is for the cmdline. for fileformat in ['pdf', 'png']: - filename = root_node + '.dot.' + fileformat + filename = f'{root_node}.dot.{fileformat}' options = [option, fileformat, root_node] try: result = self.cli_runner.invoke(cmd_node.graph_generate, options) @@ -440,7 +440,7 @@ def test_node_id_label_format(self): Test that the node id label format can be specified """ root_node = str(self.node.pk) - filename = root_node + '.dot.pdf' + filename = f'{root_node}.dot.pdf' for id_label_type in ['uuid', 'pk', 'label']: options = ['--identifier', id_label_type, root_node] @@ -479,7 +479,7 @@ def test_comment_show(self): def test_comment_add(self): """Test adding a comment.""" - options = ['-N', str(self.node.pk), '--', '{}'.format(COMMENT)] + options = ['-N', str(self.node.pk), '--', f'{COMMENT}'] result = self.cli_runner.invoke(cmd_node.comment_add, options, catch_exceptions=False) self.assertEqual(result.exit_code, 0) @@ -522,7 +522,7 @@ def test_rehash_interactive_yes(self): options = [] # no option, will ask in the prompt result = self.cli_runner.invoke(cmd_node.rehash, options, input='y') self.assertClickResultNoException(result) - self.assertTrue('{} nodes'.format(expected_node_count) in result.output) + self.assertTrue(f'{expected_node_count} nodes' in result.output) def test_rehash_interactive_no(self): """Passing no options and answering 'N' to the command will abort the command.""" @@ -537,7 +537,7 @@ def test_rehash(self): options = ['-f'] # force, so no questions are asked result = self.cli_runner.invoke(cmd_node.rehash, options) self.assertClickResultNoException(result) - self.assertTrue('{} nodes'.format(expected_node_count) in result.output) + self.assertTrue(f'{expected_node_count} nodes' in result.output) def test_rehash_bool(self): """Limiting the queryset by defining an entry point, in this case bool, should limit nodes to 2.""" @@ -545,7 +545,7 @@ def test_rehash_bool(self): options = ['-f', '-e', 'aiida.data:bool'] result = self.cli_runner.invoke(cmd_node.rehash, options) self.assertClickResultNoException(result) - self.assertTrue('{} nodes'.format(expected_node_count) in result.output) + self.assertTrue(f'{expected_node_count} nodes' in result.output) def test_rehash_float(self): """Limiting the queryset by defining an entry point, in this case float, should limit nodes to 1.""" @@ -553,7 +553,7 @@ def test_rehash_float(self): options = ['-f', '-e', 'aiida.data:float'] result = self.cli_runner.invoke(cmd_node.rehash, options) self.assertClickResultNoException(result) - self.assertTrue('{} nodes'.format(expected_node_count) in result.output) + self.assertTrue(f'{expected_node_count} nodes' in result.output) def test_rehash_int(self): """Limiting the queryset by defining an entry point, in this case int, should limit nodes to 1.""" @@ -561,7 +561,7 @@ def test_rehash_int(self): options = ['-f', '-e', 'aiida.data:int'] result = self.cli_runner.invoke(cmd_node.rehash, options) self.assertClickResultNoException(result) - self.assertTrue('{} nodes'.format(expected_node_count) in result.output) + self.assertTrue(f'{expected_node_count} nodes' in result.output) def test_rehash_explicit_pk(self): """Limiting the queryset by defining explicit identifiers, should limit nodes to 2 in this example.""" @@ -569,7 +569,7 @@ def test_rehash_explicit_pk(self): options = ['-f', str(self.node_bool_true.pk), str(self.node_float.uuid)] result = self.cli_runner.invoke(cmd_node.rehash, options) self.assertClickResultNoException(result) - self.assertTrue('{} nodes'.format(expected_node_count) in result.output) + self.assertTrue(f'{expected_node_count} nodes' in result.output) def test_rehash_explicit_pk_and_entry_point(self): """Limiting the queryset by defining explicit identifiers and entry point, should limit nodes to 1.""" @@ -577,7 +577,7 @@ def test_rehash_explicit_pk_and_entry_point(self): options = ['-f', '-e', 'aiida.data:bool', str(self.node_bool_true.pk), str(self.node_float.uuid)] result = self.cli_runner.invoke(cmd_node.rehash, options) self.assertClickResultNoException(result) - self.assertTrue('{} nodes'.format(expected_node_count) in result.output) + self.assertTrue(f'{expected_node_count} nodes' in result.output) def test_rehash_entry_point_no_matches(self): """Limiting the queryset by defining explicit entry point, with no nodes should exit with non-zero status.""" diff --git a/tests/cmdline/commands/test_profile.py b/tests/cmdline/commands/test_profile.py index fc774e2cce..120b09d762 100644 --- a/tests/cmdline/commands/test_profile.py +++ b/tests/cmdline/commands/test_profile.py @@ -74,8 +74,8 @@ def test_list(self): result = self.cli_runner.invoke(cmd_profile.profile_list) self.assertClickSuccess(result) - self.assertIn('Info: configuration folder: ' + self.config.dirpath, result.output) - self.assertIn('* {}'.format(self.profile_list[0]), result.output) + self.assertIn(f'Info: configuration folder: {self.config.dirpath}', result.output) + self.assertIn(f'* {self.profile_list[0]}', result.output) self.assertIn(self.profile_list[1], result.output) @with_temporary_config_instance @@ -89,8 +89,8 @@ def test_setdefault(self): result = self.cli_runner.invoke(cmd_profile.profile_list) self.assertClickSuccess(result) - self.assertIn('Info: configuration folder: ' + self.config.dirpath, result.output) - self.assertIn('* {}'.format(self.profile_list[1]), result.output) + self.assertIn(f'Info: configuration folder: {self.config.dirpath}', result.output) + self.assertIn(f'* {self.profile_list[1]}', result.output) self.assertClickSuccess(result) @with_temporary_config_instance diff --git a/tests/cmdline/commands/test_rehash.py b/tests/cmdline/commands/test_rehash.py index 0a89c1708d..7735952252 100644 --- a/tests/cmdline/commands/test_rehash.py +++ b/tests/cmdline/commands/test_rehash.py @@ -38,7 +38,7 @@ def test_rehash_interactive_yes(self): options = [] # no option, will ask in the prompt result = self.cli_runner.invoke(cmd_rehash.rehash, options, input='y') self.assertClickResultNoException(result) - self.assertTrue('{} nodes'.format(expected_node_count) in result.output) + self.assertTrue(f'{expected_node_count} nodes' in result.output) def test_rehash_interactive_no(self): """Passing no options and answering 'N' to the command will abort the command.""" @@ -53,7 +53,7 @@ def test_rehash(self): options = ['-f'] # force, so no questions are asked result = self.cli_runner.invoke(cmd_rehash.rehash, options) self.assertClickResultNoException(result) - self.assertTrue('{} nodes'.format(expected_node_count) in result.output) + self.assertTrue(f'{expected_node_count} nodes' in result.output) def test_rehash_bool(self): """Limiting the queryset by defining an entry point, in this case bool, should limit nodes to 2.""" @@ -61,7 +61,7 @@ def test_rehash_bool(self): options = ['-f', '-e', 'aiida.data:bool'] result = self.cli_runner.invoke(cmd_rehash.rehash, options) self.assertClickResultNoException(result) - self.assertTrue('{} nodes'.format(expected_node_count) in result.output) + self.assertTrue(f'{expected_node_count} nodes' in result.output) def test_rehash_float(self): """Limiting the queryset by defining an entry point, in this case float, should limit nodes to 1.""" @@ -69,7 +69,7 @@ def test_rehash_float(self): options = ['-f', '-e', 'aiida.data:float'] result = self.cli_runner.invoke(cmd_rehash.rehash, options) self.assertClickResultNoException(result) - self.assertTrue('{} nodes'.format(expected_node_count) in result.output) + self.assertTrue(f'{expected_node_count} nodes' in result.output) def test_rehash_int(self): """Limiting the queryset by defining an entry point, in this case int, should limit nodes to 1.""" @@ -77,7 +77,7 @@ def test_rehash_int(self): options = ['-f', '-e', 'aiida.data:int'] result = self.cli_runner.invoke(cmd_rehash.rehash, options) self.assertClickResultNoException(result) - self.assertTrue('{} nodes'.format(expected_node_count) in result.output) + self.assertTrue(f'{expected_node_count} nodes' in result.output) def test_rehash_explicit_pk(self): """Limiting the queryset by defining explicit identifiers, should limit nodes to 2 in this example.""" @@ -85,7 +85,7 @@ def test_rehash_explicit_pk(self): options = ['-f', str(self.node_bool_true.pk), str(self.node_float.uuid)] result = self.cli_runner.invoke(cmd_rehash.rehash, options) self.assertClickResultNoException(result) - self.assertTrue('{} nodes'.format(expected_node_count) in result.output) + self.assertTrue(f'{expected_node_count} nodes' in result.output) def test_rehash_explicit_pk_and_entry_point(self): """Limiting the queryset by defining explicit identifiers and entry point, should limit nodes to 1.""" @@ -93,7 +93,7 @@ def test_rehash_explicit_pk_and_entry_point(self): options = ['-f', '-e', 'aiida.data:bool', str(self.node_bool_true.pk), str(self.node_float.uuid)] result = self.cli_runner.invoke(cmd_rehash.rehash, options) self.assertClickResultNoException(result) - self.assertTrue('{} nodes'.format(expected_node_count) in result.output) + self.assertTrue(f'{expected_node_count} nodes' in result.output) def test_rehash_entry_point_no_matches(self): """Limiting the queryset by defining explicit entry point, with no nodes should exit with non-zero status.""" diff --git a/tests/cmdline/commands/test_run.py b/tests/cmdline/commands/test_run.py index 595ed2e131..b151f49639 100644 --- a/tests/cmdline/commands/test_run.py +++ b/tests/cmdline/commands/test_run.py @@ -437,5 +437,5 @@ def test_legacy_autogroup_name(self): ) self.assertEqual( all_auto_groups[0][0].label, group_label, - 'The auto group label is "{}" instead of "{}"'.format(all_auto_groups[0][0].label, group_label) + f'The auto group label is "{all_auto_groups[0][0].label}" instead of "{group_label}"' ) diff --git a/tests/cmdline/commands/test_setup.py b/tests/cmdline/commands/test_setup.py index c7c5fb8d18..d48061e3ee 100644 --- a/tests/cmdline/commands/test_setup.py +++ b/tests/cmdline/commands/test_setup.py @@ -87,13 +87,13 @@ def test_quicksetup_from_config_file(self): with tempfile.NamedTemporaryFile('w') as handle: handle.write( - """--- + f"""--- profile: testing first_name: Leopold last_name: Talirz institution: EPFL -db_backend: {} -email: 123@234.de""".format(self.backend) +db_backend: {self.backend} +email: 123@234.de""" ) handle.flush() result = self.cli_runner.invoke(cmd_setup.quicksetup, ['--config', os.path.realpath(handle.name)]) diff --git a/tests/cmdline/params/options/test_conditional.py b/tests/cmdline/params/options/test_conditional.py index 94c300eff8..f266288aac 100644 --- a/tests/cmdline/params/options/test_conditional.py +++ b/tests/cmdline/params/options/test_conditional.py @@ -110,7 +110,7 @@ def cmd(a_or_b, opt_a, opt_b): """test command for scenario a-or-b""" # pylint: disable=unused-argument - click.echo('{} / {}'.format(opt_a, opt_b)) + click.echo(f'{opt_a} / {opt_b}') runner = CliRunner() return runner, cmd @@ -183,7 +183,7 @@ def setup_flag_cond(**kwargs): def cmd(flag, opt_a): """A command with a flag and customizable options that depend on it.""" # pylint: disable=unused-argument - click.echo('{}'.format(opt_a)) + click.echo(f'{opt_a}') return cmd diff --git a/tests/cmdline/params/options/test_interactive.py b/tests/cmdline/params/options/test_interactive.py index bca18a7c7a..6b36db8f0c 100644 --- a/tests/cmdline/params/options/test_interactive.py +++ b/tests/cmdline/params/options/test_interactive.py @@ -44,7 +44,7 @@ def validate_positive_number(ctx, param, value): # pylint: disable=unused-argum """ if not isinstance(value, (int, float)) or value < 0: from click import BadParameter - raise BadParameter('{} is not a valid positive number'.format(value)) + raise BadParameter(f'{value} is not a valid positive number') class InteractiveOptionTest(unittest.TestCase): @@ -74,7 +74,7 @@ def prompt_output(self, cli_input, converted=None): """Return expected output of simple_command, given a commandline cli_input string.""" # pylint: disable=no-self-use - return 'Opt: {}\n{}\n'.format(cli_input, converted or cli_input) + return f'Opt: {cli_input}\n{converted or cli_input}\n' def test_callback_prompt_twice(self): """ @@ -160,7 +160,7 @@ def test_prompt_simple(self): for ptype, cli_input, output in params: cmd = self.simple_command(type=ptype, help='help msg') runner = CliRunner() - result = runner.invoke(cmd, [], input='\n?\n{}\n'.format(cli_input)) + result = runner.invoke(cmd, [], input=f'\n?\n{cli_input}\n') expected_1 = 'Opt: \nOpt: ?\n' expected_2 = 'help msg\n' expected_2 += self.prompt_output(cli_input, output) @@ -182,7 +182,7 @@ def test_prompt_complex(self): for ptype, cli_input in params: cmd = self.simple_command(type=ptype, help='help msg') runner = CliRunner() - result = runner.invoke(cmd, [], input='\n?\n{}\n'.format(cli_input)) + result = runner.invoke(cmd, [], input=f'\n?\n{cli_input}\n') expected_1 = 'Opt: \nOpt: ?\n' expected_2 = 'help msg\n' expected_2 += self.strip_line(self.prompt_output(cli_input)) diff --git a/tests/cmdline/params/types/test_calculation.py b/tests/cmdline/params/types/test_calculation.py index d91950a779..4dfe3caa3a 100644 --- a/tests/cmdline/params/types/test_calculation.py +++ b/tests/cmdline/params/types/test_calculation.py @@ -45,7 +45,7 @@ def test_get_by_id(self): """ Verify that using the ID will retrieve the correct entity """ - identifier = '{}'.format(self.entity_01.pk) + identifier = f'{self.entity_01.pk}' result = self.param.convert(identifier, None, None) self.assertEqual(result.uuid, self.entity_01.uuid) @@ -53,7 +53,7 @@ def test_get_by_uuid(self): """ Verify that using the UUID will retrieve the correct entity """ - identifier = '{}'.format(self.entity_01.uuid) + identifier = f'{self.entity_01.uuid}' result = self.param.convert(identifier, None, None) self.assertEqual(result.uuid, self.entity_01.uuid) @@ -61,7 +61,7 @@ def test_get_by_label(self): """ Verify that using the LABEL will retrieve the correct entity """ - identifier = '{}'.format(self.entity_01.label) + identifier = f'{self.entity_01.label}' result = self.param.convert(identifier, None, None) self.assertEqual(result.uuid, self.entity_01.uuid) @@ -72,11 +72,11 @@ def test_ambiguous_label_pk(self): Verify that using an ambiguous identifier gives precedence to the ID interpretation Appending the special ambiguity breaker character will force the identifier to be treated as a LABEL """ - identifier = '{}'.format(self.entity_02.label) + identifier = f'{self.entity_02.label}' result = self.param.convert(identifier, None, None) self.assertEqual(result.uuid, self.entity_01.uuid) - identifier = '{}{}'.format(self.entity_02.label, OrmEntityLoader.label_ambiguity_breaker) + identifier = f'{self.entity_02.label}{OrmEntityLoader.label_ambiguity_breaker}' result = self.param.convert(identifier, None, None) self.assertEqual(result.uuid, self.entity_02.uuid) @@ -87,10 +87,10 @@ def test_ambiguous_label_uuid(self): Verify that using an ambiguous identifier gives precedence to the UUID interpretation Appending the special ambiguity breaker character will force the identifier to be treated as a LABEL """ - identifier = '{}'.format(self.entity_03.label) + identifier = f'{self.entity_03.label}' result = self.param.convert(identifier, None, None) self.assertEqual(result.uuid, self.entity_01.uuid) - identifier = '{}{}'.format(self.entity_03.label, OrmEntityLoader.label_ambiguity_breaker) + identifier = f'{self.entity_03.label}{OrmEntityLoader.label_ambiguity_breaker}' result = self.param.convert(identifier, None, None) self.assertEqual(result.uuid, self.entity_03.uuid) diff --git a/tests/cmdline/params/types/test_code.py b/tests/cmdline/params/types/test_code.py index a3fd64e954..e14cad413e 100644 --- a/tests/cmdline/params/types/test_code.py +++ b/tests/cmdline/params/types/test_code.py @@ -45,7 +45,7 @@ def setup_codes(clear_database_before_test, aiida_localhost): def test_get_by_id(setup_codes, parameter_type): """Verify that using the ID will retrieve the correct entity.""" entity_01, entity_02, entity_03 = setup_codes - identifier = '{}'.format(entity_01.pk) + identifier = f'{entity_01.pk}' result = parameter_type.convert(identifier, None, None) assert result.uuid == entity_01.uuid @@ -53,7 +53,7 @@ def test_get_by_id(setup_codes, parameter_type): def test_get_by_uuid(setup_codes, parameter_type): """Verify that using the UUID will retrieve the correct entity.""" entity_01, entity_02, entity_03 = setup_codes - identifier = '{}'.format(entity_01.uuid) + identifier = f'{entity_01.uuid}' result = parameter_type.convert(identifier, None, None) assert result.uuid == entity_01.uuid @@ -61,7 +61,7 @@ def test_get_by_uuid(setup_codes, parameter_type): def test_get_by_label(setup_codes, parameter_type): """Verify that using the LABEL will retrieve the correct entity.""" entity_01, entity_02, entity_03 = setup_codes - identifier = '{}'.format(entity_01.label) + identifier = f'{entity_01.label}' result = parameter_type.convert(identifier, None, None) assert result.uuid == entity_01.uuid @@ -69,7 +69,7 @@ def test_get_by_label(setup_codes, parameter_type): def test_get_by_fullname(setup_codes, parameter_type): """Verify that using the LABEL@machinename will retrieve the correct entity.""" entity_01, entity_02, entity_03 = setup_codes - identifier = '{}@{}'.format(entity_01.label, entity_01.computer.label) + identifier = f'{entity_01.label}@{entity_01.computer.label}' result = parameter_type.convert(identifier, None, None) assert result.uuid == entity_01.uuid @@ -81,11 +81,11 @@ def test_ambiguous_label_pk(setup_codes, parameter_type): Appending the special ambiguity breaker character will force the identifier to be treated as a LABEL """ entity_01, entity_02, entity_03 = setup_codes - identifier = '{}'.format(entity_02.label) + identifier = f'{entity_02.label}' result = parameter_type.convert(identifier, None, None) assert result.uuid == entity_01.uuid - identifier = '{}{}'.format(entity_02.label, OrmEntityLoader.label_ambiguity_breaker) + identifier = f'{entity_02.label}{OrmEntityLoader.label_ambiguity_breaker}' result = parameter_type.convert(identifier, None, None) assert result.uuid == entity_02.uuid @@ -97,11 +97,11 @@ def test_ambiguous_label_uuid(setup_codes, parameter_type): Appending the special ambiguity breaker character will force the identifier to be treated as a LABEL """ entity_01, entity_02, entity_03 = setup_codes - identifier = '{}'.format(entity_03.label) + identifier = f'{entity_03.label}' result = parameter_type.convert(identifier, None, None) assert result.uuid == entity_01.uuid - identifier = '{}{}'.format(entity_03.label, OrmEntityLoader.label_ambiguity_breaker) + identifier = f'{entity_03.label}{OrmEntityLoader.label_ambiguity_breaker}' result = parameter_type.convert(identifier, None, None) assert result.uuid == entity_03.uuid @@ -110,12 +110,12 @@ def test_entry_point_validation(setup_codes): """Verify that when an `entry_point` is defined in the constructor, it is respected in the validation.""" entity_01, entity_02, entity_03 = setup_codes parameter_type = CodeParamType(entry_point='arithmetic.add') - identifier = '{}'.format(entity_02.pk) + identifier = f'{entity_02.pk}' result = parameter_type.convert(identifier, None, None) assert result.uuid == entity_02.uuid with pytest.raises(click.BadParameter): - identifier = '{}'.format(entity_03.pk) + identifier = f'{entity_03.pk}' result = parameter_type.convert(identifier, None, None) diff --git a/tests/cmdline/params/types/test_computer.py b/tests/cmdline/params/types/test_computer.py index f365e9684f..e776e1586d 100644 --- a/tests/cmdline/params/types/test_computer.py +++ b/tests/cmdline/params/types/test_computer.py @@ -44,7 +44,7 @@ def test_get_by_id(self): """ Verify that using the ID will retrieve the correct entity """ - identifier = '{}'.format(self.entity_01.pk) + identifier = f'{self.entity_01.pk}' result = self.param.convert(identifier, None, None) self.assertEqual(result.uuid, self.entity_01.uuid) @@ -52,7 +52,7 @@ def test_get_by_uuid(self): """ Verify that using the UUID will retrieve the correct entity """ - identifier = '{}'.format(self.entity_01.uuid) + identifier = f'{self.entity_01.uuid}' result = self.param.convert(identifier, None, None) self.assertEqual(result.uuid, self.entity_01.uuid) @@ -60,7 +60,7 @@ def test_get_by_label(self): """ Verify that using the LABEL will retrieve the correct entity """ - identifier = '{}'.format(self.entity_01.label) + identifier = f'{self.entity_01.label}' result = self.param.convert(identifier, None, None) self.assertEqual(result.uuid, self.entity_01.uuid) @@ -71,11 +71,11 @@ def test_ambiguous_label_pk(self): Verify that using an ambiguous identifier gives precedence to the ID interpretation Appending the special ambiguity breaker character will force the identifier to be treated as a LABEL """ - identifier = '{}'.format(self.entity_02.label) + identifier = f'{self.entity_02.label}' result = self.param.convert(identifier, None, None) self.assertEqual(result.uuid, self.entity_01.uuid) - identifier = '{}{}'.format(self.entity_02.label, OrmEntityLoader.label_ambiguity_breaker) + identifier = f'{self.entity_02.label}{OrmEntityLoader.label_ambiguity_breaker}' result = self.param.convert(identifier, None, None) self.assertEqual(result.uuid, self.entity_02.uuid) @@ -86,10 +86,10 @@ def test_ambiguous_label_uuid(self): Verify that using an ambiguous identifier gives precedence to the UUID interpretation Appending the special ambiguity breaker character will force the identifier to be treated as a LABEL """ - identifier = '{}'.format(self.entity_03.label) + identifier = f'{self.entity_03.label}' result = self.param.convert(identifier, None, None) self.assertEqual(result.uuid, self.entity_01.uuid) - identifier = '{}{}'.format(self.entity_03.label, OrmEntityLoader.label_ambiguity_breaker) + identifier = f'{self.entity_03.label}{OrmEntityLoader.label_ambiguity_breaker}' result = self.param.convert(identifier, None, None) self.assertEqual(result.uuid, self.entity_03.uuid) diff --git a/tests/cmdline/params/types/test_data.py b/tests/cmdline/params/types/test_data.py index a1f469e092..e541574bbf 100644 --- a/tests/cmdline/params/types/test_data.py +++ b/tests/cmdline/params/types/test_data.py @@ -41,7 +41,7 @@ def test_get_by_id(self): """ Verify that using the ID will retrieve the correct entity """ - identifier = '{}'.format(self.entity_01.pk) + identifier = f'{self.entity_01.pk}' result = self.param.convert(identifier, None, None) self.assertEqual(result.uuid, self.entity_01.uuid) @@ -49,7 +49,7 @@ def test_get_by_uuid(self): """ Verify that using the UUID will retrieve the correct entity """ - identifier = '{}'.format(self.entity_01.uuid) + identifier = f'{self.entity_01.uuid}' result = self.param.convert(identifier, None, None) self.assertEqual(result.uuid, self.entity_01.uuid) @@ -57,7 +57,7 @@ def test_get_by_label(self): """ Verify that using the LABEL will retrieve the correct entity """ - identifier = '{}'.format(self.entity_01.label) + identifier = f'{self.entity_01.label}' result = self.param.convert(identifier, None, None) self.assertEqual(result.uuid, self.entity_01.uuid) @@ -68,11 +68,11 @@ def test_ambiguous_label_pk(self): Verify that using an ambiguous identifier gives precedence to the ID interpretation Appending the special ambiguity breaker character will force the identifier to be treated as a LABEL """ - identifier = '{}'.format(self.entity_02.label) + identifier = f'{self.entity_02.label}' result = self.param.convert(identifier, None, None) self.assertEqual(result.uuid, self.entity_01.uuid) - identifier = '{}{}'.format(self.entity_02.label, OrmEntityLoader.label_ambiguity_breaker) + identifier = f'{self.entity_02.label}{OrmEntityLoader.label_ambiguity_breaker}' result = self.param.convert(identifier, None, None) self.assertEqual(result.uuid, self.entity_02.uuid) @@ -83,10 +83,10 @@ def test_ambiguous_label_uuid(self): Verify that using an ambiguous identifier gives precedence to the UUID interpretation Appending the special ambiguity breaker character will force the identifier to be treated as a LABEL """ - identifier = '{}'.format(self.entity_03.label) + identifier = f'{self.entity_03.label}' result = self.param.convert(identifier, None, None) self.assertEqual(result.uuid, self.entity_01.uuid) - identifier = '{}{}'.format(self.entity_03.label, OrmEntityLoader.label_ambiguity_breaker) + identifier = f'{self.entity_03.label}{OrmEntityLoader.label_ambiguity_breaker}' result = self.param.convert(identifier, None, None) self.assertEqual(result.uuid, self.entity_03.uuid) diff --git a/tests/cmdline/params/types/test_group.py b/tests/cmdline/params/types/test_group.py index 722c2d3fe7..f6e02dd4c5 100644 --- a/tests/cmdline/params/types/test_group.py +++ b/tests/cmdline/params/types/test_group.py @@ -40,7 +40,7 @@ def setup_groups(clear_database_before_test): def test_get_by_id(setup_groups, parameter_type): """Verify that using the ID will retrieve the correct entity.""" entity_01, entity_02, entity_03 = setup_groups - identifier = '{}'.format(entity_01.pk) + identifier = f'{entity_01.pk}' result = parameter_type.convert(identifier, None, None) assert result.uuid == entity_01.uuid @@ -48,7 +48,7 @@ def test_get_by_id(setup_groups, parameter_type): def test_get_by_uuid(setup_groups, parameter_type): """Verify that using the UUID will retrieve the correct entity.""" entity_01, entity_02, entity_03 = setup_groups - identifier = '{}'.format(entity_01.uuid) + identifier = f'{entity_01.uuid}' result = parameter_type.convert(identifier, None, None) assert result.uuid == entity_01.uuid @@ -56,7 +56,7 @@ def test_get_by_uuid(setup_groups, parameter_type): def test_get_by_label(setup_groups, parameter_type): """Verify that using the LABEL will retrieve the correct entity.""" entity_01, entity_02, entity_03 = setup_groups - identifier = '{}'.format(entity_01.label) + identifier = f'{entity_01.label}' result = parameter_type.convert(identifier, None, None) assert result.uuid == entity_01.uuid @@ -68,11 +68,11 @@ def test_ambiguous_label_pk(setup_groups, parameter_type): breaker character will force the identifier to be treated as a LABEL. """ entity_01, entity_02, entity_03 = setup_groups - identifier = '{}'.format(entity_02.label) + identifier = f'{entity_02.label}' result = parameter_type.convert(identifier, None, None) assert result.uuid == entity_01.uuid - identifier = '{}{}'.format(entity_02.label, OrmEntityLoader.label_ambiguity_breaker) + identifier = f'{entity_02.label}{OrmEntityLoader.label_ambiguity_breaker}' result = parameter_type.convert(identifier, None, None) assert result.uuid == entity_02.uuid @@ -84,11 +84,11 @@ def test_ambiguous_label_uuid(setup_groups, parameter_type): ambiguity breaker character will force the identifier to be treated as a LABEL. """ entity_01, entity_02, entity_03 = setup_groups - identifier = '{}'.format(entity_03.label) + identifier = f'{entity_03.label}' result = parameter_type.convert(identifier, None, None) assert result.uuid == entity_01.uuid - identifier = '{}{}'.format(entity_03.label, OrmEntityLoader.label_ambiguity_breaker) + identifier = f'{entity_03.label}{OrmEntityLoader.label_ambiguity_breaker}' result = parameter_type.convert(identifier, None, None) assert result.uuid == entity_03.uuid diff --git a/tests/cmdline/params/types/test_node.py b/tests/cmdline/params/types/test_node.py index 91e33527a1..ecd3b53d72 100644 --- a/tests/cmdline/params/types/test_node.py +++ b/tests/cmdline/params/types/test_node.py @@ -40,7 +40,7 @@ def test_get_by_id(self): """ Verify that using the ID will retrieve the correct entity """ - identifier = '{}'.format(self.entity_01.pk) + identifier = f'{self.entity_01.pk}' result = self.param.convert(identifier, None, None) self.assertEqual(result.uuid, self.entity_01.uuid) @@ -48,7 +48,7 @@ def test_get_by_uuid(self): """ Verify that using the UUID will retrieve the correct entity """ - identifier = '{}'.format(self.entity_01.uuid) + identifier = f'{self.entity_01.uuid}' result = self.param.convert(identifier, None, None) self.assertEqual(result.uuid, self.entity_01.uuid) @@ -56,7 +56,7 @@ def test_get_by_label(self): """ Verify that using the LABEL will retrieve the correct entity """ - identifier = '{}'.format(self.entity_01.label) + identifier = f'{self.entity_01.label}' result = self.param.convert(identifier, None, None) self.assertEqual(result.uuid, self.entity_01.uuid) @@ -67,11 +67,11 @@ def test_ambiguous_label_pk(self): Verify that using an ambiguous identifier gives precedence to the ID interpretation Appending the special ambiguity breaker character will force the identifier to be treated as a LABEL """ - identifier = '{}'.format(self.entity_02.label) + identifier = f'{self.entity_02.label}' result = self.param.convert(identifier, None, None) self.assertEqual(result.uuid, self.entity_01.uuid) - identifier = '{}{}'.format(self.entity_02.label, OrmEntityLoader.label_ambiguity_breaker) + identifier = f'{self.entity_02.label}{OrmEntityLoader.label_ambiguity_breaker}' result = self.param.convert(identifier, None, None) self.assertEqual(result.uuid, self.entity_02.uuid) @@ -82,10 +82,10 @@ def test_ambiguous_label_uuid(self): Verify that using an ambiguous identifier gives precedence to the UUID interpretation Appending the special ambiguity breaker character will force the identifier to be treated as a LABEL """ - identifier = '{}'.format(self.entity_03.label) + identifier = f'{self.entity_03.label}' result = self.param.convert(identifier, None, None) self.assertEqual(result.uuid, self.entity_01.uuid) - identifier = '{}{}'.format(self.entity_03.label, OrmEntityLoader.label_ambiguity_breaker) + identifier = f'{self.entity_03.label}{OrmEntityLoader.label_ambiguity_breaker}' result = self.param.convert(identifier, None, None) self.assertEqual(result.uuid, self.entity_03.uuid) diff --git a/tests/cmdline/utils/test_multiline.py b/tests/cmdline/utils/test_multiline.py index fb7cc168f7..0073c731ce 100644 --- a/tests/cmdline/utils/test_multiline.py +++ b/tests/cmdline/utils/test_multiline.py @@ -26,4 +26,4 @@ def test_new_comment(non_interactive_editor): def test_edit_comment(non_interactive_editor): old_comment = 'OldComment' new_comment = edit_comment(old_cmt=old_comment) - assert new_comment == old_comment + 'Test' + assert new_comment == f'{old_comment}Test' diff --git a/tests/common/test_utils.py b/tests/common/test_utils.py index ff5abce66f..ea93d7eb04 100644 --- a/tests/common/test_utils.py +++ b/tests/common/test_utils.py @@ -224,10 +224,10 @@ def test_patterns(self): for sample in match_true: self.assertTrue( escaping.sql_string_match(string=sample, pattern=pattern), - "String '{}' should have matched pattern '{}'".format(sample, pattern) + f"String '{sample}' should have matched pattern '{pattern}'" ) for sample in match_false: self.assertFalse( escaping.sql_string_match(string=sample, pattern=pattern), - "String '{}' should not have matched pattern '{}'".format(sample, pattern) + f"String '{sample}' should not have matched pattern '{pattern}'" ) diff --git a/tests/conftest.py b/tests/conftest.py index fa9876bdff..0dae835ee6 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -47,15 +47,15 @@ def edit_file(self, filename): environ = None try: process = subprocess.Popen( - '{} {}'.format(editor, filename), # This is the line that we change removing `shlex_quote` + f'{editor} {filename}', # This is the line that we change removing `shlex_quote` env=environ, shell=True, ) exit_code = process.wait() if exit_code != 0: - raise click.ClickException('{}: Editing failed!'.format(editor)) + raise click.ClickException(f'{editor}: Editing failed!') except OSError as exception: - raise click.ClickException('{}: Editing failed: {}'.format(editor, exception)) + raise click.ClickException(f'{editor}: Editing failed: {exception}') with patch.object(Editor, 'edit_file', edit_file): yield @@ -212,7 +212,7 @@ def _create_profile(name, **kwargs): 'database_name': kwargs.pop('database_name', name), 'database_username': kwargs.pop('database_username', 'user'), 'database_password': kwargs.pop('database_password', 'pass'), - 'repository_uri': 'file:///' + os.path.join(repository_dirpath, 'repository_' + name), + 'repository_uri': f"file:///{os.path.join(repository_dirpath, f'repository_{name}')}", } return Profile(name, profile_dictionary) diff --git a/tests/engine/test_calc_job.py b/tests/engine/test_calc_job.py index 94d18b6efe..08429c9a56 100644 --- a/tests/engine/test_calc_job.py +++ b/tests/engine/test_calc_job.py @@ -400,8 +400,8 @@ def test_parse_insufficient_data(process): logs = [log.message for log in orm.Log.objects.get_logs_for(process.node)] expected_logs = [ 'could not parse scheduler output: the `detailed_job_info` attribute is missing', - 'could not parse scheduler output: the `{}` file is missing'.format(filename_stderr), - 'could not parse scheduler output: the `{}` file is missing'.format(filename_stdout) + f'could not parse scheduler output: the `{filename_stderr}` file is missing', + f'could not parse scheduler output: the `{filename_stdout}` file is missing' ] for log in expected_logs: @@ -487,7 +487,7 @@ def raise_exception(*args, **kwargs): monkeypatch.setattr(DirectScheduler, 'parse_output', raise_exception) process.parse() logs = [log.message for log in orm.Log.objects.get_logs_for(process.node)] - expected_logs = ['the `parse_output` method of the scheduler excepted: {}'.format(msg)] + expected_logs = [f'the `parse_output` method of the scheduler excepted: {msg}'] for log in expected_logs: assert log in logs diff --git a/tests/engine/test_ports.py b/tests/engine/test_ports.py index 2a89d265d4..d4cd6d6246 100644 --- a/tests/engine/test_ports.py +++ b/tests/engine/test_ports.py @@ -91,7 +91,7 @@ def test_serialize_type_check(self): port_namespace = PortNamespace(base_namespace) port_namespace.create_port_namespace(nested_namespace) - with self.assertRaisesRegex(TypeError, '.*{}.*{}.*'.format(base_namespace, nested_namespace)): + with self.assertRaisesRegex(TypeError, f'.*{base_namespace}.*{nested_namespace}.*'): port_namespace.serialize({'some': {'nested': {'namespace': {Dict()}}}}) def test_lambda_default(self): diff --git a/tests/engine/test_process.py b/tests/engine/test_process.py index 3405c94a85..22f2c0391e 100644 --- a/tests/engine/test_process.py +++ b/tests/engine/test_process.py @@ -263,7 +263,7 @@ def test_process_type_with_entry_point(self): process_class = CalculationFactory(entry_point) process = process_class(inputs=inputs) - expected_process_type = 'aiida.calculations:{}'.format(entry_point) + expected_process_type = f'aiida.calculations:{entry_point}' self.assertEqual(process.node.process_type, expected_process_type) # Verify that process_class on the calculation node returns the original entry point class @@ -276,7 +276,7 @@ def test_process_type_without_entry_point(self): qualified class name """ process = test_processes.DummyProcess() - expected_process_type = '{}.{}'.format(process.__class__.__module__, process.__class__.__name__) + expected_process_type = f'{process.__class__.__module__}.{process.__class__.__name__}' self.assertEqual(process.node.process_type, expected_process_type) # Verify that process_class on the calculation node returns the original entry point class diff --git a/tests/engine/test_process_function.py b/tests/engine/test_process_function.py index 548b6a1dae..bbfbb39852 100644 --- a/tests/engine/test_process_function.py +++ b/tests/engine/test_process_function.py @@ -151,7 +151,7 @@ def test_process_state(self): def test_process_type(self): """Test that the process type correctly contains the module and name of original decorated function.""" _, node = self.function_defaults.run_get_node() - process_type = '{}.{}'.format(self.function_defaults.__module__, self.function_defaults.__name__) + process_type = f'{self.function_defaults.__module__}.{self.function_defaults.__name__}' self.assertEqual(node.process_type, process_type) def test_exit_status(self): diff --git a/tests/engine/test_work_chain.py b/tests/engine/test_work_chain.py index f66ce82656..040176894d 100644 --- a/tests/engine/test_work_chain.py +++ b/tests/engine/test_work_chain.py @@ -329,21 +329,21 @@ def test_run(self): # Check the steps that should have been run for step, finished in Wf.finished_steps.items(): if step not in ['step3', 'step4', 'is_b']: - self.assertTrue(finished, 'Step {} was not called by workflow'.format(step)) + self.assertTrue(finished, f'Step {step} was not called by workflow') # Try the elif(..) part finished_steps = launch.run(Wf, value=B, n=three) # Check the steps that should have been run for step, finished in finished_steps.items(): if step not in ['is_a', 'step2', 'step4']: - self.assertTrue(finished, 'Step {} was not called by workflow'.format(step)) + self.assertTrue(finished, f'Step {step} was not called by workflow') # Try the else... part finished_steps = launch.run(Wf, value=C, n=three) # Check the steps that should have been run for step, finished in finished_steps.items(): if step not in ['is_a', 'step2', 'is_b', 'step3']: - self.assertTrue(finished, 'Step {} was not called by workflow'.format(step)) + self.assertTrue(finished, f'Step {step} was not called by workflow') def test_incorrect_outline(self): @@ -507,21 +507,21 @@ def test_checkpointing(self): # Check the steps that should have been run for step, finished in finished_steps.items(): if step not in ['step3', 'step4', 'is_b']: - self.assertTrue(finished, 'Step {} was not called by workflow'.format(step)) + self.assertTrue(finished, f'Step {step} was not called by workflow') # Try the elif(..) part finished_steps = self._run_with_checkpoints(Wf, inputs={'value': B, 'n': three}) # Check the steps that should have been run for step, finished in finished_steps.items(): if step not in ['is_a', 'step2', 'step4']: - self.assertTrue(finished, 'Step {} was not called by workflow'.format(step)) + self.assertTrue(finished, f'Step {step} was not called by workflow') # Try the else... part finished_steps = self._run_with_checkpoints(Wf, inputs={'value': C, 'n': three}) # Check the steps that should have been run for step, finished in finished_steps.items(): if step not in ['is_a', 'step2', 'is_b', 'step3']: - self.assertTrue(finished, 'Step {} was not called by workflow'.format(step)) + self.assertTrue(finished, f'Step {step} was not called by workflow') def test_return(self): @@ -1446,6 +1446,4 @@ def test_unique_default_inputs(self): # Trying to load one of the inputs through the UUID should fail, # as both `child_one.a` and `child_two.a` should have the same UUID. node = load_node(uuid=node.get_incoming().get_node_by_label('child_one__a').uuid) - self.assertEqual( - len(uuids), len(nodes), 'Only {} unique UUIDS for {} input nodes'.format(len(uuids), len(nodes)) - ) + self.assertEqual(len(uuids), len(nodes), f'Only {len(uuids)} unique UUIDS for {len(nodes)} input nodes') diff --git a/tests/manage/backup/test_backup_script.py b/tests/manage/backup/test_backup_script.py index 6d780c31ed..0304dc4cad 100644 --- a/tests/manage/backup/test_backup_script.py +++ b/tests/manage/backup/test_backup_script.py @@ -191,8 +191,8 @@ def check_full_deserialization_serialization(self, input_string, backup_inst): self.assertEqual( input_variables, target_variables, - 'The test string {} did not succeed'.format(input_string) + ' the serialization deserialization test.\n' + - 'Input variables: {}\n'.format(input_variables) + 'Output variables: {}\n'.format(target_variables) + f'The test string {input_string} did not succeed' + ' the serialization deserialization test.\n' + + f'Input variables: {input_variables}\n' + f'Output variables: {target_variables}\n' ) def test_full_deserialization_serialization_1(self): @@ -248,19 +248,17 @@ def test_timezone_addition_and_dir_correction(self): self.assertIsNotNone( self._backup_setup_inst._oldest_object_bk.tzinfo, - 'Timezone info should not be none (timestamp: {}).'.format(self._backup_setup_inst._oldest_object_bk) + f'Timezone info should not be none (timestamp: {self._backup_setup_inst._oldest_object_bk}).' ) self.assertIsNotNone( self._backup_setup_inst._end_date_of_backup.tzinfo, - 'Timezone info should not be none (timestamp: {}).'.format(self._backup_setup_inst._end_date_of_backup) + f'Timezone info should not be none (timestamp: {self._backup_setup_inst._end_date_of_backup}).' ) self.assertIsNotNone( self._backup_setup_inst._internal_end_date_of_backup.tzinfo, - 'Timezone info should not be none (timestamp: {}).'.format( - self._backup_setup_inst._internal_end_date_of_backup - ) + f'Timezone info should not be none (timestamp: {self._backup_setup_inst._internal_end_date_of_backup}).' ) # The destination directory of the _backup_setup_inst @@ -351,7 +349,7 @@ def fill_repo(self): def create_backup_scripts(self, tmp_folder): """Utility function to create backup scripts""" - backup_full_path = '{}/{}/{}/'.format(tmp_folder, self._aiida_rel_path, self._backup_rel_path) + backup_full_path = f'{tmp_folder}/{self._aiida_rel_path}/{self._backup_rel_path}/' # The predefined answers for the setup script ac = utils.ArrayCounter() answers = [ diff --git a/tests/manage/backup/test_backup_setup_script.py b/tests/manage/backup/test_backup_setup_script.py index e6436dd986..0ffa6718f7 100644 --- a/tests/manage/backup/test_backup_setup_script.py +++ b/tests/manage/backup/test_backup_setup_script.py @@ -111,8 +111,7 @@ def test_full_backup_setup_script(self): backup_conf_records is not None and len(backup_conf_records) == 4 and 'backup_dest' in backup_conf_records and 'backup_info.json.tmpl' in backup_conf_records and 'start_backup.py' in backup_conf_records and 'backup_info.json' in backup_conf_records, - 'The created backup folder does not have the expected files. It contains: {}.' - ''.format(backup_conf_records) + f'The created backup folder does not have the expected files. It contains: {backup_conf_records}.' ) # Check the content of the main backup configuration file diff --git a/tests/manage/configuration/test_config.py b/tests/manage/configuration/test_config.py index e9bb5e620b..a4e47eadb9 100644 --- a/tests/manage/configuration/test_config.py +++ b/tests/manage/configuration/test_config.py @@ -74,7 +74,7 @@ def test_environment_variable_set_single_path_without_config_folder(self): # py directory = tempfile.mkdtemp() # Set the environment variable and call configuration initialization - env_variable = '{}'.format(directory) + env_variable = f'{directory}' os.environ[settings.DEFAULT_AIIDA_PATH_VARIABLE] = env_variable settings.set_configuration_directory() @@ -93,7 +93,7 @@ def test_environment_variable_set_single_path_with_config_folder(self): # pylin os.makedirs(os.path.join(directory, settings.DEFAULT_CONFIG_DIR_NAME)) # Set the environment variable and call configuration initialization - env_variable = '{}'.format(directory) + env_variable = f'{directory}' os.environ[settings.DEFAULT_AIIDA_PATH_VARIABLE] = env_variable settings.set_configuration_directory() @@ -116,7 +116,7 @@ def test_environment_variable_path_including_config_folder(self): # pylint: dis directory = tempfile.mkdtemp() # Set the environment variable with a path that include base folder name and call config initialization - env_variable = '{}'.format(os.path.join(directory, settings.DEFAULT_CONFIG_DIR_NAME)) + env_variable = f'{os.path.join(directory, settings.DEFAULT_CONFIG_DIR_NAME)}' os.environ[settings.DEFAULT_AIIDA_PATH_VARIABLE] = env_variable settings.set_configuration_directory() @@ -136,7 +136,7 @@ def test_environment_variable_set_multiple_path(self): # pylint: disable=invali directory_c = tempfile.mkdtemp() # Set the environment variable to contain three paths and call configuration initialization - env_variable = '{}:{}:{}'.format(directory_a, directory_b, directory_c) + env_variable = f'{directory_a}:{directory_b}:{directory_c}' os.environ[settings.DEFAULT_AIIDA_PATH_VARIABLE] = env_variable settings.set_configuration_directory() diff --git a/tests/manage/configuration/test_profile.py b/tests/manage/configuration/test_profile.py index 9599d27407..f60140b2d6 100644 --- a/tests/manage/configuration/test_profile.py +++ b/tests/manage/configuration/test_profile.py @@ -35,7 +35,7 @@ def setUpClass(cls, *args, **kwargs): 'database_hostname': 'localhost', 'database_username': 'user', 'database_password': 'pass', - 'repository_uri': 'file:///' + os.path.join('/some/path', 'repository_' + cls.profile_name), + 'repository_uri': f"file:///{os.path.join('/some/path', f'repository_{cls.profile_name}')}", } cls.profile = Profile(cls.profile_name, cls.profile_dictionary) diff --git a/tests/manage/external/test_rmq.py b/tests/manage/external/test_rmq.py index 5c27e6962f..761dcfb355 100644 --- a/tests/manage/external/test_rmq.py +++ b/tests/manage/external/test_rmq.py @@ -31,7 +31,7 @@ def test_get_rmq_url(args, kwargs, expected): url = rmq.get_rmq_url(*args, **kwargs) assert url.startswith(expected) for key, value in kwargs.items(): - assert '{}={}'.format(key, value) in url + assert f'{key}={value}' in url else: with pytest.raises(expected): rmq.get_rmq_url(*args, **kwargs) diff --git a/tests/orm/implementation/test_comments.py b/tests/orm/implementation/test_comments.py index 7bacd6bc1b..1f5eaf3f33 100644 --- a/tests/orm/implementation/test_comments.py +++ b/tests/orm/implementation/test_comments.py @@ -62,7 +62,7 @@ def test_creation(self): # Store the comment.ctime before the store as a reference now = timezone.now() comment_ctime_before_store = comment.ctime - self.assertTrue(now > comment.ctime, '{} is not smaller than now {}'.format(comment.ctime, now)) + self.assertTrue(now > comment.ctime, f'{comment.ctime} is not smaller than now {now}') comment.store() comment_ctime = comment.ctime @@ -71,7 +71,7 @@ def test_creation(self): # The comment.ctime should have been unchanged, but the comment.mtime should have changed self.assertEqual(comment.ctime, comment_ctime_before_store) self.assertIsNotNone(comment.mtime) - self.assertTrue(now < comment.mtime, '{} is not larger than now {}'.format(comment.mtime, now)) + self.assertTrue(now < comment.mtime, f'{comment.mtime} is not larger than now {now}') # After storing self.assertTrue(isinstance(comment.id, int)) @@ -328,15 +328,14 @@ def test_deleting_non_existent_entities(self): # delete_many should return an empty list deleted_entities = self.backend.comments.delete_many(filters={'id': id_}) self.assertEqual( - deleted_entities, [], - msg='No entities should have been deleted, since Comment id {} does not exist'.format(id_) + deleted_entities, [], msg=f'No entities should have been deleted, since Comment id {id_} does not exist' ) # Try to delete non-existing Comment - using delete # NotExistent should be raised, since no entities are found with self.assertRaises(exceptions.NotExistent) as exc: self.backend.comments.delete(comment_id=id_) - self.assertIn("Comment with id '{}' not found".format(id_), str(exc.exception)) + self.assertIn(f"Comment with id '{id_}' not found", str(exc.exception)) # Try to delete existing and non-existing Comment - using delete_many # delete_many should return a list that *only* includes the existing Comment @@ -344,7 +343,7 @@ def test_deleting_non_existent_entities(self): deleted_entities = self.backend.comments.delete_many(filters=filters) self.assertEqual([comment_id], deleted_entities, - msg='Only Comment id {} should be returned from delete_many'.format(comment_id)) + msg=f'Only Comment id {comment_id} should be returned from delete_many') # Make sure the existing Comment was deleted builder = orm.QueryBuilder().append(orm.Comment, filters={'uuid': comment_uuid}) diff --git a/tests/orm/implementation/test_groups.py b/tests/orm/implementation/test_groups.py index 3e19e88cc0..c4c45b0ecb 100644 --- a/tests/orm/implementation/test_groups.py +++ b/tests/orm/implementation/test_groups.py @@ -118,7 +118,7 @@ def test_add_nodes_skip_orm_batch(): # Add nodes to groups using different batch size. Check in the end the correct addition. batch_sizes = (1, 3, 10, 1000) for batch_size in batch_sizes: - group = orm.Group(label='test_batches_' + str(batch_size)).store() + group = orm.Group(label=f'test_batches_{str(batch_size)}').store() group.backend_entity.add_nodes(nodes, skip_orm=True, batch_size=batch_size) assert set(_.pk for _ in nodes) == set(_.pk for _ in group.nodes) diff --git a/tests/orm/implementation/test_logs.py b/tests/orm/implementation/test_logs.py index 54057cfee4..28a159a098 100644 --- a/tests/orm/implementation/test_logs.py +++ b/tests/orm/implementation/test_logs.py @@ -167,7 +167,7 @@ def test_delete_many_ids(self): self.assertEqual( count_logs_found, len(log_uuids), - msg='There should be {} Logs, instead {} Log(s) was/were found'.format(len(log_uuids), count_logs_found) + msg=f'There should be {len(log_uuids)} Logs, instead {count_logs_found} Log(s) was/were found' ) # Delete last two logs (log2, log3) @@ -198,7 +198,7 @@ def test_delete_many_dbnode_id(self): self.assertEqual( count_logs_found, len(log_uuids), - msg='There should be {} Logs, instead {} Log(s) was/were found'.format(len(log_uuids), count_logs_found) + msg=f'There should be {len(log_uuids)} Logs, instead {count_logs_found} Log(s) was/were found' ) # Delete logs for self.node @@ -279,23 +279,20 @@ def test_deleting_non_existent_entities(self): # delete_many should return an empty list deleted_entities = self.backend.logs.delete_many(filters={'id': id_}) self.assertEqual( - deleted_entities, [], - msg='No entities should have been deleted, since Log id {} does not exist'.format(id_) + deleted_entities, [], msg=f'No entities should have been deleted, since Log id {id_} does not exist' ) # Try to delete non-existing Log - using delete # NotExistent should be raised, since no entities are found with self.assertRaises(exceptions.NotExistent) as exc: self.backend.logs.delete(log_id=id_) - self.assertIn("Log with id '{}' not found".format(id_), str(exc.exception)) + self.assertIn(f"Log with id '{id_}' not found", str(exc.exception)) # Try to delete existing and non-existing Log - using delete_many # delete_many should return a list that *only* includes the existing Logs filters = {'id': {'in': [id_, log_id]}} deleted_entities = self.backend.logs.delete_many(filters=filters) - self.assertEqual([log_id], - deleted_entities, - msg='Only Log id {} should be returned from delete_many'.format(log_id)) + self.assertEqual([log_id], deleted_entities, msg=f'Only Log id {log_id} should be returned from delete_many') # Make sure the existing Log was deleted builder = orm.QueryBuilder().append(orm.Log, filters={'uuid': log_uuid}) diff --git a/tests/orm/implementation/test_nodes.py b/tests/orm/implementation/test_nodes.py index c6086d01f6..5653026dfc 100644 --- a/tests/orm/implementation/test_nodes.py +++ b/tests/orm/implementation/test_nodes.py @@ -66,7 +66,7 @@ def test_creation(self): # Store the node.ctime before the store as a reference now = timezone.now() node_ctime_before_store = node.ctime - self.assertTrue(now > node.ctime, '{} is not smaller than now {}'.format(node.ctime, now)) + self.assertTrue(now > node.ctime, f'{node.ctime} is not smaller than now {now}') node.store() node_ctime = node.ctime @@ -75,7 +75,7 @@ def test_creation(self): # The node.ctime should have been unchanged, but the node.mtime should have changed self.assertEqual(node.ctime, node_ctime_before_store) self.assertIsNotNone(node.mtime) - self.assertTrue(now < node.mtime, '{} is not larger than now {}'.format(node.mtime, now)) + self.assertTrue(now < node.mtime, f'{node.mtime} is not larger than now {now}') # After storing self.assertTrue(isinstance(node.id, int)) diff --git a/tests/orm/implementation/test_utils.py b/tests/orm/implementation/test_utils.py index d04ca4e3b6..d1444d98a4 100644 --- a/tests/orm/implementation/test_utils.py +++ b/tests/orm/implementation/test_utils.py @@ -21,7 +21,7 @@ class TestOrmImplementationUtils(AiidaTestCase): def test_invalid_attribute_extra_key(self): """Test supplying an invalid key to the `validate_attribute_extra_key` method.""" non_string_key = 5 - field_separator_key = 'invalid' + FIELD_SEPARATOR + 'key' + field_separator_key = f'invalid{FIELD_SEPARATOR}key' with self.assertRaises(exceptions.ValidationError): validate_attribute_extra_key(non_string_key) diff --git a/tests/orm/test_autogroups.py b/tests/orm/test_autogroups.py index 23ca495af9..e75915395f 100644 --- a/tests/orm/test_autogroups.py +++ b/tests/orm/test_autogroups.py @@ -33,51 +33,51 @@ def test_get_or_create(self): expected_label = label_prefix self.assertEqual( group.label, expected_label, - "The auto-group should be labelled '{}', it is instead '{}'".format(expected_label, group.label) + f"The auto-group should be labelled '{expected_label}', it is instead '{group.label}'" ) # Second group (only one with no suffix existing) autogroup = Autogroup() autogroup.set_group_label_prefix(label_prefix) group = autogroup.get_or_create_group() - expected_label = label_prefix + '_1' + expected_label = f'{label_prefix}_1' self.assertEqual( group.label, expected_label, - "The auto-group should be labelled '{}', it is instead '{}'".format(expected_label, group.label) + f"The auto-group should be labelled '{expected_label}', it is instead '{group.label}'" ) # Second group (only one suffix _1 existing) autogroup = Autogroup() autogroup.set_group_label_prefix(label_prefix) group = autogroup.get_or_create_group() - expected_label = label_prefix + '_2' + expected_label = f'{label_prefix}_2' self.assertEqual( group.label, expected_label, - "The auto-group should be labelled '{}', it is instead '{}'".format(expected_label, group.label) + f"The auto-group should be labelled '{expected_label}', it is instead '{group.label}'" ) # I create a group with a large integer suffix (9) - AutoGroup(label='{}_9'.format(label_prefix)).store() + AutoGroup(label=f'{label_prefix}_9').store() # The next autogroup should become number 10 autogroup = Autogroup() autogroup.set_group_label_prefix(label_prefix) group = autogroup.get_or_create_group() - expected_label = label_prefix + '_10' + expected_label = f'{label_prefix}_10' self.assertEqual( group.label, expected_label, - "The auto-group should be labelled '{}', it is instead '{}'".format(expected_label, group.label) + f"The auto-group should be labelled '{expected_label}', it is instead '{group.label}'" ) # I create a group with a non-integer suffix (15a), it should be ignored - AutoGroup(label='{}_15b'.format(label_prefix)).store() + AutoGroup(label=f'{label_prefix}_15b').store() # The next autogroup should become number 11 autogroup = Autogroup() autogroup.set_group_label_prefix(label_prefix) group = autogroup.get_or_create_group() - expected_label = label_prefix + '_11' + expected_label = f'{label_prefix}_11' self.assertEqual( group.label, expected_label, - "The auto-group should be labelled '{}', it is instead '{}'".format(expected_label, group.label) + f"The auto-group should be labelled '{expected_label}', it is instead '{group.label}'" ) def test_get_or_create_invalid_prefix(self): @@ -86,7 +86,7 @@ def test_get_or_create_invalid_prefix(self): label_prefix = 'new_test_prefix_TestAutogroup' # I create a group with the same prefix, but followed by non-underscore # characters. These should be ignored in the logic. - AutoGroup(label='{}xx'.format(label_prefix)).store() + AutoGroup(label=f'{label_prefix}xx').store() # Check that there are no groups to begin with queryb = QueryBuilder().append(AutoGroup, filters={'label': label_prefix}) @@ -101,15 +101,15 @@ def test_get_or_create_invalid_prefix(self): expected_label = label_prefix self.assertEqual( group.label, expected_label, - "The auto-group should be labelled '{}', it is instead '{}'".format(expected_label, group.label) + f"The auto-group should be labelled '{expected_label}', it is instead '{group.label}'" ) # Second group (only one with no suffix existing) autogroup = Autogroup() autogroup.set_group_label_prefix(label_prefix) group = autogroup.get_or_create_group() - expected_label = label_prefix + '_1' + expected_label = f'{label_prefix}_1' self.assertEqual( group.label, expected_label, - "The auto-group should be labelled '{}', it is instead '{}'".format(expected_label, group.label) + f"The auto-group should be labelled '{expected_label}', it is instead '{group.label}'" ) diff --git a/tests/orm/test_logs.py b/tests/orm/test_logs.py index a668caf1de..5798bbcda1 100644 --- a/tests/orm/test_logs.py +++ b/tests/orm/test_logs.py @@ -239,7 +239,7 @@ def test_db_log_handler(self): self.assertEqual(logs[0].message, message) # Launching a second log message ensuring that both messages are correctly stored - message2 = message + ' - Second message' + message2 = f'{message} - Second message' node.logger.critical(message2) order_by = [OrderSpecifier('time', ASCENDING)] diff --git a/tests/orm/test_querybuilder.py b/tests/orm/test_querybuilder.py index 11fbcbdf24..8c320bdb6d 100644 --- a/tests/orm/test_querybuilder.py +++ b/tests/orm/test_querybuilder.py @@ -834,34 +834,30 @@ def test_attribute_type(self): # Here I am testing which values contain a number 1. # Both 1 and 1.0 are legitimate values if ask for either 1 or 1.0 for val in (1.0, 1): - qb = orm.QueryBuilder().append(orm.Node, filters={'attributes.{}'.format(key): val}, project='uuid') + qb = orm.QueryBuilder().append(orm.Node, filters={f'attributes.{key}': val}, project='uuid') res = [str(_) for _, in qb.all()] self.assertEqual(set(res), set((n_float.uuid, n_int.uuid))) - qb = orm.QueryBuilder().append(orm.Node, filters={'attributes.{}'.format(key): {'>': 0.5}}, project='uuid') + qb = orm.QueryBuilder().append(orm.Node, filters={f'attributes.{key}': {'>': 0.5}}, project='uuid') res = [str(_) for _, in qb.all()] self.assertEqual(set(res), set((n_float.uuid, n_int.uuid))) - qb = orm.QueryBuilder().append(orm.Node, filters={'attributes.{}'.format(key): {'<': 1.5}}, project='uuid') + qb = orm.QueryBuilder().append(orm.Node, filters={f'attributes.{key}': {'<': 1.5}}, project='uuid') res = [str(_) for _, in qb.all()] self.assertEqual(set(res), set((n_float.uuid, n_int.uuid))) # Now I am testing the boolean value: - qb = orm.QueryBuilder().append(orm.Node, filters={'attributes.{}'.format(key): True}, project='uuid') + qb = orm.QueryBuilder().append(orm.Node, filters={f'attributes.{key}': True}, project='uuid') res = [str(_) for _, in qb.all()] self.assertEqual(set(res), set((n_bool.uuid,))) - qb = orm.QueryBuilder().append(orm.Node, filters={'attributes.{}'.format(key): {'like': '%n%'}}, project='uuid') + qb = orm.QueryBuilder().append(orm.Node, filters={f'attributes.{key}': {'like': '%n%'}}, project='uuid') res = [str(_) for _, in qb.all()] self.assertEqual(set(res), set((n_str2.uuid,))) - qb = orm.QueryBuilder().append( - orm.Node, filters={'attributes.{}'.format(key): { - 'ilike': 'On%' - }}, project='uuid' - ) + qb = orm.QueryBuilder().append(orm.Node, filters={f'attributes.{key}': {'ilike': 'On%'}}, project='uuid') res = [str(_) for _, in qb.all()] self.assertEqual(set(res), set((n_str2.uuid,))) - qb = orm.QueryBuilder().append(orm.Node, filters={'attributes.{}'.format(key): {'like': '1'}}, project='uuid') + qb = orm.QueryBuilder().append(orm.Node, filters={f'attributes.{key}': {'like': '1'}}, project='uuid') res = [str(_) for _, in qb.all()] self.assertEqual(set(res), set((n_str.uuid,))) - qb = orm.QueryBuilder().append(orm.Node, filters={'attributes.{}'.format(key): {'==': '1'}}, project='uuid') + qb = orm.QueryBuilder().append(orm.Node, filters={f'attributes.{key}': {'==': '1'}}, project='uuid') res = [str(_) for _, in qb.all()] self.assertEqual(set(res), set((n_str.uuid,))) if configuration.PROFILE.database_backend == 'sqlalchemy': @@ -869,11 +865,7 @@ def test_attribute_type(self): # so I exclude. Not the nicest way, But I would like to keep this piece # of code because of the initialization part, that would need to be # duplicated or wrapped otherwise. - qb = orm.QueryBuilder().append( - orm.Node, filters={'attributes.{}'.format(key): { - 'of_length': 3 - }}, project='uuid' - ) + qb = orm.QueryBuilder().append(orm.Node, filters={f'attributes.{key}': {'of_length': 3}}, project='uuid') res = [str(_) for _, in qb.all()] self.assertEqual(set(res), set((n_arr.uuid,))) @@ -987,7 +979,7 @@ def test_joins2(self): students = [orm.Data() for i in range(10)] advisors = [orm.CalculationNode() for i in range(3)] for i, a in enumerate(advisors): - a.label = 'advisor {}'.format(i) + a.label = f'advisor {i}' a.set_attribute('advisor_id', i) for n in advisors + students: @@ -995,15 +987,15 @@ def test_joins2(self): # advisor 0 get student 0, 1 for i in (0, 1): - students[i].add_incoming(advisors[0], link_type=LinkType.CREATE, link_label='is_advisor_{}'.format(i)) + students[i].add_incoming(advisors[0], link_type=LinkType.CREATE, link_label=f'is_advisor_{i}') # advisor 1 get student 3, 4 for i in (3, 4): - students[i].add_incoming(advisors[1], link_type=LinkType.CREATE, link_label='is_advisor_{}'.format(i)) + students[i].add_incoming(advisors[1], link_type=LinkType.CREATE, link_label=f'is_advisor_{i}') # advisor 2 get student 5, 6, 7 for i in (5, 6, 7): - students[i].add_incoming(advisors[2], link_type=LinkType.CREATE, link_label='is_advisor_{}'.format(i)) + students[i].add_incoming(advisors[2], link_type=LinkType.CREATE, link_label=f'is_advisor_{i}') # let's add a differnt relationship than advisor: students[9].add_incoming(advisors[2], link_type=LinkType.CREATE, link_label='lover') @@ -1332,7 +1324,7 @@ def test_len_results(self): # adding 5 links going out: for inode in range(5): output_node = orm.Data().store() - output_node.add_incoming(parent, link_type=LinkType.CREATE, link_label='link_{}'.format(inode)) + output_node.add_incoming(parent, link_type=LinkType.CREATE, link_label=f'link_{inode}') for projection in ('id', '*'): qb = orm.QueryBuilder() qb.append(orm.CalculationNode, filters={'id': parent.id}, tag='parent', project=projection) diff --git a/tests/parsers/test_parser.py b/tests/parsers/test_parser.py index 8e1ac45ccb..934a7c8230 100644 --- a/tests/parsers/test_parser.py +++ b/tests/parsers/test_parser.py @@ -103,7 +103,7 @@ def test_parse_from_node(self): node.store() retrieved = orm.FolderData() - retrieved.put_object_from_filelike(io.StringIO('{}'.format(summed)), output_filename) + retrieved.put_object_from_filelike(io.StringIO(f'{summed}'), output_filename) retrieved.store() retrieved.add_incoming(node, link_type=LinkType.CREATE, link_label='retrieved') diff --git a/tests/plugins/test_utils.py b/tests/plugins/test_utils.py index f0da7b2e4c..c39a56704e 100644 --- a/tests/plugins/test_utils.py +++ b/tests/plugins/test_utils.py @@ -30,7 +30,7 @@ def create_dynamic_plugin_module(plugin, plugin_version, add_module_to_sys=True, import uuid # Create a new module with a unique name and add the `plugin` and `plugin_version` as attributes - module_name = 'TestModule{}'.format(str(uuid.uuid4())[:5]) + module_name = f'TestModule{str(uuid.uuid4())[:5]}' dynamic_module = types.ModuleType(module_name, 'Dynamically created module for testing purposes') setattr(plugin, '__module__', dynamic_module.__name__) # pylint: disable=no-member setattr(dynamic_module, plugin.__name__, plugin) diff --git a/tests/restapi/conftest.py b/tests/restapi/conftest.py index f9a1945ce4..d4ae82bae0 100644 --- a/tests/restapi/conftest.py +++ b/tests/restapi/conftest.py @@ -44,9 +44,7 @@ def _restapi_server(restapi=None): def server_url(): from aiida.restapi.common.config import CLI_DEFAULTS, API_CONFIG - return 'http://{hostname}:{port}{api}'.format( - hostname=CLI_DEFAULTS['HOST_NAME'], port=CLI_DEFAULTS['PORT'], api=API_CONFIG['PREFIX'] - ) + return f"http://{CLI_DEFAULTS['HOST_NAME']}:{CLI_DEFAULTS['PORT']}{API_CONFIG['PREFIX']}" @pytest.fixture diff --git a/tests/restapi/test_identifiers.py b/tests/restapi/test_identifiers.py index 92ed4eb9d0..8116a47741 100644 --- a/tests/restapi/test_identifiers.py +++ b/tests/restapi/test_identifiers.py @@ -39,35 +39,23 @@ def test_get_full_type_filters(self): ) with self.assertRaises(ValueError): - get_full_type_filters( - 'not_at_{like}_the_end{concat}process_type'.format( - like=LIKE_OPERATOR_CHARACTER, concat=FULL_TYPE_CONCATENATOR - ) - ) + get_full_type_filters(f'not_at_{LIKE_OPERATOR_CHARACTER}_the_end{FULL_TYPE_CONCATENATOR}process_type') with self.assertRaises(ValueError): - get_full_type_filters( - 'node_type{concat}not_at_{like}_the_end'.format( - like=LIKE_OPERATOR_CHARACTER, concat=FULL_TYPE_CONCATENATOR - ) - ) + get_full_type_filters(f'node_type{FULL_TYPE_CONCATENATOR}not_at_{LIKE_OPERATOR_CHARACTER}_the_end') # Equals on both - filters = get_full_type_filters('node_type{concat}process_type'.format(concat=FULL_TYPE_CONCATENATOR)) + filters = get_full_type_filters(f'node_type{FULL_TYPE_CONCATENATOR}process_type') self.assertEqual(filters['node_type'], 'node\\_type') self.assertEqual(filters['process_type'], 'process\\_type') # Like on `node_type` - filters = get_full_type_filters( - 'node_type{like}{concat}process_type'.format(like=LIKE_OPERATOR_CHARACTER, concat=FULL_TYPE_CONCATENATOR) - ) + filters = get_full_type_filters(f'node_type{LIKE_OPERATOR_CHARACTER}{FULL_TYPE_CONCATENATOR}process_type') self.assertEqual(filters['node_type'], {'like': 'node\\_type%'}) self.assertEqual(filters['process_type'], 'process\\_type') # Like on `process_type` - filters = get_full_type_filters( - 'node_type{concat}process_type{like}'.format(like=LIKE_OPERATOR_CHARACTER, concat=FULL_TYPE_CONCATENATOR) - ) + filters = get_full_type_filters(f'node_type{FULL_TYPE_CONCATENATOR}process_type{LIKE_OPERATOR_CHARACTER}') self.assertEqual(filters['node_type'], 'node\\_type') self.assertEqual(filters['process_type'], {'like': 'process\\_type%'}) diff --git a/tests/restapi/test_routes.py b/tests/restapi/test_routes.py index 30dc4af061..13ffe0c417 100644 --- a/tests/restapi/test_routes.py +++ b/tests/restapi/test_routes.py @@ -265,7 +265,7 @@ def compare_extra_response_data(self, node_type, url, response, uuid=None): self.assertEqual(response['path'], path) self.assertEqual(response['id'], uuid) self.assertEqual(response['query_string'], query_string) - self.assertEqual(response['url'], 'http://localhost' + url) + self.assertEqual(response['url'], f'http://localhost{url}') self.assertEqual(response['url_root'], 'http://localhost/') # node details and list with limit, offset, page, perpage @@ -348,7 +348,7 @@ def test_server(self): """ Test that /server endpoint returns AiiDA version """ - url = self.get_url_prefix() + '/server' + url = f'{self.get_url_prefix()}/server' from aiida import __version__ with self.app.test_client() as client: @@ -362,7 +362,7 @@ def test_cors_headers(self): """ Test that REST API sets cross-origin resource sharing headers """ - url = self.get_url_prefix() + '/server' + url = f'{self.get_url_prefix()}/server' with self.app.test_client() as client: response = client.get(url) @@ -377,7 +377,7 @@ def test_computers_details(self): """ node_uuid = self.get_dummy_data()['computers'][1]['uuid'] RESTApiTestCase.process_test( - self, 'computers', '/computers/' + str(node_uuid), expected_list_ids=[1], uuid=node_uuid + self, 'computers', f'/computers/{str(node_uuid)}', expected_list_ids=[1], uuid=node_uuid ) def test_computers_list(self): @@ -490,7 +490,7 @@ def test_computers_filter_id1(self): """ node_pk = self.get_dummy_data()['computers'][1]['id'] - RESTApiTestCase.process_test(self, 'computers', '/computers?id=' + str(node_pk), expected_list_ids=[1]) + RESTApiTestCase.process_test(self, 'computers', f'/computers?id={str(node_pk)}', expected_list_ids=[1]) def test_computers_filter_id2(self): """ @@ -499,7 +499,7 @@ def test_computers_filter_id2(self): """ node_pk = self.get_dummy_data()['computers'][1]['id'] RESTApiTestCase.process_test( - self, 'computers', '/computers?id>' + str(node_pk) + '&orderby=+id', expected_range=[2, None] + self, 'computers', f'/computers?id>{str(node_pk)}&orderby=+id', expected_range=[2, None] ) def test_computers_filter_pk(self): @@ -508,7 +508,7 @@ def test_computers_filter_pk(self): list (e.g. id=1) """ node_pk = self.get_dummy_data()['computers'][1]['id'] - RESTApiTestCase.process_test(self, 'computers', '/computers?pk=' + str(node_pk), expected_list_ids=[1]) + RESTApiTestCase.process_test(self, 'computers', f'/computers?pk={str(node_pk)}', expected_list_ids=[1]) def test_computers_filter_name(self): """ @@ -563,7 +563,7 @@ def test_computers_orderby_name_asc(self): """ node_pk = self.get_dummy_data()['computers'][0]['id'] RESTApiTestCase.process_test( - self, 'computers', '/computers?pk>' + str(node_pk) + '&orderby=name', expected_list_ids=[1, 2, 3, 4] + self, 'computers', f'/computers?pk>{str(node_pk)}&orderby=name', expected_list_ids=[1, 2, 3, 4] ) def test_computers_orderby_name_asc_sign(self): @@ -573,7 +573,7 @@ def test_computers_orderby_name_asc_sign(self): """ node_pk = self.get_dummy_data()['computers'][0]['id'] RESTApiTestCase.process_test( - self, 'computers', '/computers?pk>' + str(node_pk) + '&orderby=+name', expected_list_ids=[1, 2, 3, 4] + self, 'computers', f'/computers?pk>{str(node_pk)}&orderby=+name', expected_list_ids=[1, 2, 3, 4] ) def test_computers_orderby_name_desc(self): @@ -583,7 +583,7 @@ def test_computers_orderby_name_desc(self): """ node_pk = self.get_dummy_data()['computers'][0]['id'] RESTApiTestCase.process_test( - self, 'computers', '/computers?pk>' + str(node_pk) + '&orderby=-name', expected_list_ids=[4, 3, 2, 1] + self, 'computers', f'/computers?pk>{str(node_pk)}&orderby=-name', expected_list_ids=[4, 3, 2, 1] ) def test_computers_orderby_scheduler_type_asc(self): @@ -595,7 +595,7 @@ def test_computers_orderby_scheduler_type_asc(self): RESTApiTestCase.process_test( self, 'computers', - '/computers?transport_type="ssh"&pk>' + str(node_pk) + '&orderby=scheduler_type', + f"/computers?transport_type=\"ssh\"&pk>{str(node_pk)}&orderby=scheduler_type", expected_list_ids=[1, 4, 2] ) @@ -608,7 +608,7 @@ def test_comp_orderby_scheduler_ascsign(self): RESTApiTestCase.process_test( self, 'computers', - '/computers?transport_type="ssh"&pk>' + str(node_pk) + '&orderby=+scheduler_type', + f"/computers?transport_type=\"ssh\"&pk>{str(node_pk)}&orderby=+scheduler_type", expected_list_ids=[1, 4, 2] ) @@ -621,7 +621,7 @@ def test_computers_orderby_schedulertype_desc(self): RESTApiTestCase.process_test( self, 'computers', - '/computers?pk>' + str(node_pk) + '&transport_type="ssh"&orderby=-scheduler_type', + f"/computers?pk>{str(node_pk)}&transport_type=\"ssh\"&orderby=-scheduler_type", expected_list_ids=[2, 4, 1] ) @@ -636,7 +636,7 @@ def test_computers_orderby_mixed1(self): RESTApiTestCase.process_test( self, 'computers', - '/computers?pk>' + str(node_pk) + '&orderby=transport_type,id', + f'/computers?pk>{str(node_pk)}&orderby=transport_type,id', expected_list_ids=[3, 1, 2, 4] ) @@ -650,7 +650,7 @@ def test_computers_orderby_mixed2(self): RESTApiTestCase.process_test( self, 'computers', - '/computers?pk>' + str(node_pk) + '&orderby=-scheduler_type,name', + f'/computers?pk>{str(node_pk)}&orderby=-scheduler_type,name', expected_list_ids=[2, 3, 4, 1] ) @@ -690,7 +690,7 @@ def test_computers_filter_mixed1(self): """ node_pk = self.get_dummy_data()['computers'][0]['id'] RESTApiTestCase.process_test( - self, 'computers', '/computers?id>' + str(node_pk) + '&hostname="test1.epfl.ch"', expected_list_ids=[1] + self, 'computers', f"/computers?id>{str(node_pk)}&hostname=\"test1.epfl.ch\"", expected_list_ids=[1] ) def test_computers_filter_mixed2(self): @@ -702,7 +702,7 @@ def test_computers_filter_mixed2(self): RESTApiTestCase.process_test( self, 'computers', - '/computers?id>' + str(node_pk) + '&hostname="test3.epfl.ch"&transport_type="ssh"', + f"/computers?id>{str(node_pk)}&hostname=\"test3.epfl.ch\"&transport_type=\"ssh\"", empty_list=True ) @@ -713,7 +713,7 @@ def test_computers_mixed1(self): """ node_pk = self.get_dummy_data()['computers'][0]['id'] RESTApiTestCase.process_test( - self, 'computers', '/computers?id>' + str(node_pk) + '&limit=2&offset=3&orderby=+id', expected_list_ids=[4] + self, 'computers', f'/computers?id>{str(node_pk)}&limit=2&offset=3&orderby=+id', expected_list_ids=[4] ) def test_computers_mixed2(self): @@ -722,10 +722,7 @@ def test_computers_mixed2(self): """ node_pk = self.get_dummy_data()['computers'][0]['id'] RESTApiTestCase.process_test( - self, - 'computers', - '/computers/page/2?id>' + str(node_pk) + '&perpage=2&orderby=+id', - expected_list_ids=[3, 4] + self, 'computers', f'/computers/page/2?id>{str(node_pk)}&perpage=2&orderby=+id', expected_list_ids=[3, 4] ) def test_computers_mixed3(self): @@ -736,7 +733,7 @@ def test_computers_mixed3(self): RESTApiTestCase.process_test( self, 'computers', - '/computers?id>=' + str(node_pk) + '&transport_type="ssh"&orderby=-id&limit=2', + f"/computers?id>={str(node_pk)}&transport_type=\"ssh\"&orderby=-id&limit=2", expected_list_ids=[4, 2] ) @@ -755,7 +752,7 @@ def test_calculation_retrieved_inputs(self): Get the list of given calculation retrieved_inputs """ node_uuid = self.get_dummy_data()['calculations'][1]['uuid'] - url = self.get_url_prefix() + '/calcjobs/' + str(node_uuid) + '/input_files' + url = f'{self.get_url_prefix()}/calcjobs/{str(node_uuid)}/input_files' with self.app.test_client() as client: response_value = client.get(url) response = json.loads(response_value.data) @@ -766,7 +763,7 @@ def test_calculation_retrieved_outputs(self): Get the list of given calculation retrieved_outputs """ node_uuid = self.get_dummy_data()['calculations'][1]['uuid'] - url = self.get_url_prefix() + '/calcjobs/' + str(node_uuid) + '/output_files' + url = f'{self.get_url_prefix()}/calcjobs/{str(node_uuid)}/output_files' with self.app.test_client() as client: response_value = client.get(url) response = json.loads(response_value.data) @@ -780,7 +777,7 @@ def test_calculation_inputs(self): node_uuid = self.get_dummy_data()['calculations'][1]['uuid'] self.process_test( 'nodes', - '/nodes/' + str(node_uuid) + '/links/incoming?orderby=id', + f'/nodes/{str(node_uuid)}/links/incoming?orderby=id', expected_list_ids=[5, 3], uuid=node_uuid, result_node_type='data', @@ -794,7 +791,7 @@ def test_calculation_input_filters(self): node_uuid = self.get_dummy_data()['calculations'][1]['uuid'] self.process_test( 'nodes', - '/nodes/' + str(node_uuid) + '/links/incoming?node_type="data.dict.Dict."', + f"/nodes/{str(node_uuid)}/links/incoming?node_type=\"data.dict.Dict.\"", expected_list_ids=[3], uuid=node_uuid, result_node_type='data', @@ -806,7 +803,7 @@ def test_calculation_iotree(self): Get filtered incoming list for given calculations """ node_uuid = self.get_dummy_data()['calculations'][1]['uuid'] - url = self.get_url_prefix() + '/nodes/' + str(node_uuid) + '/links/tree?in_limit=1&out_limit=1' + url = f'{self.get_url_prefix()}/nodes/{str(node_uuid)}/links/tree?in_limit=1&out_limit=1' with self.app.test_client() as client: response_value = client.get(url) response = json.loads(response_value.data) @@ -836,7 +833,7 @@ def test_calculation_attributes(self): }, } node_uuid = self.get_dummy_data()['calculations'][1]['uuid'] - url = self.get_url_prefix() + '/nodes/' + str(node_uuid) + '/contents/attributes' + url = f'{self.get_url_prefix()}/nodes/{str(node_uuid)}/contents/attributes' with self.app.test_client() as client: rv_obj = client.get(url) response = json.loads(rv_obj.data) @@ -849,7 +846,7 @@ def test_contents_attributes_filter(self): Get list of calculation attributes with filter attributes_filter """ node_uuid = self.get_dummy_data()['calculations'][1]['uuid'] - url = self.get_url_prefix() + '/nodes/' + str(node_uuid) + '/contents/attributes?attributes_filter="attr1"' + url = f"{self.get_url_prefix()}/nodes/{str(node_uuid)}/contents/attributes?attributes_filter=\"attr1\"" with self.app.test_client() as client: rv_obj = client.get(url) response = json.loads(rv_obj.data) @@ -871,7 +868,7 @@ def test_calculation_attributes_filter(self): }, } node_uuid = self.get_dummy_data()['calculations'][1]['uuid'] - url = self.get_url_prefix() + '/nodes/' + str(node_uuid) + '?attributes=true' + url = f'{self.get_url_prefix()}/nodes/{str(node_uuid)}?attributes=true' with self.app.test_client() as client: response_value = client.get(url) response = json.loads(response_value.data) @@ -884,7 +881,7 @@ def test_calculation_extras_filter(self): """ extras = {'extra1': False, 'extra2': 'extra_info'} node_uuid = self.get_dummy_data()['calculations'][1]['uuid'] - url = self.get_url_prefix() + '/nodes/' + str(node_uuid) + '?extras=true&extras_filter=extra1,extra2' + url = f'{self.get_url_prefix()}/nodes/{str(node_uuid)}?extras=true&extras_filter=extra1,extra2' with self.app.test_client() as client: response_value = client.get(url) response = json.loads(response_value.data) @@ -898,7 +895,7 @@ def test_structure_attributes_filter(self): """ cell = [[2., 0., 0.], [0., 2., 0.], [0., 0., 2.]] node_uuid = self.get_dummy_data()['structuredata'][0]['uuid'] - url = self.get_url_prefix() + '/nodes/' + str(node_uuid) + '?attributes=true&attributes_filter=cell' + url = f'{self.get_url_prefix()}/nodes/{str(node_uuid)}?attributes=true&attributes_filter=cell' with self.app.test_client() as client: rv_obj = client.get(url) response = json.loads(rv_obj.data) @@ -911,7 +908,7 @@ def test_node_attributes_filter_pagination(self): returned as a dictionary when pagination is set """ expected_attributes = ['resources', 'cell'] - url = self.get_url_prefix() + '/nodes/page/1?perpage=10&attributes=true&attributes_filter=resources,cell' + url = f'{self.get_url_prefix()}/nodes/page/1?perpage=10&attributes=true&attributes_filter=resources,cell' with self.app.test_client() as client: response_value = client.get(url) response = json.loads(response_value.data) @@ -931,7 +928,7 @@ def test_node_single_attributes_filter(self): only this attribute is returned as a dictionary when pagination is set """ expected_attribute = ['resources'] - url = self.get_url_prefix() + '/nodes/page/1?perpage=10&attributes=true&attributes_filter=resources' + url = f'{self.get_url_prefix()}/nodes/page/1?perpage=10&attributes=true&attributes_filter=resources' with self.app.test_client() as client: response_value = client.get(url) response = json.loads(response_value.data) @@ -946,7 +943,7 @@ def test_node_extras_filter_pagination(self): returned as a dictionary when pagination is set """ expected_extras = ['extra1', 'extra2'] - url = self.get_url_prefix() + '/nodes/page/1?perpage=10&extras=true&extras_filter=extra1,extra2' + url = f'{self.get_url_prefix()}/nodes/page/1?perpage=10&extras=true&extras_filter=extra1,extra2' with self.app.test_client() as client: response_value = client.get(url) response = json.loads(response_value.data) @@ -966,7 +963,7 @@ def test_node_single_extras_filter(self): only this extra is returned as a dictionary when pagination is set """ expected_extra = ['extra2'] - url = self.get_url_prefix() + '/nodes/page/1?perpage=10&extras=true&extras_filter=extra2' + url = f'{self.get_url_prefix()}/nodes/page/1?perpage=10&extras=true&extras_filter=extra2' with self.app.test_client() as client: response_value = client.get(url) response = json.loads(response_value.data) @@ -984,7 +981,7 @@ def test_nodes_full_type_filter(self): if calc['node_type'] == 'process.calculation.calcjob.CalcJobNode.': expected_node_uuids.append(calc['uuid']) - url = self.get_url_prefix() + '/nodes/' + '?full_type="process.calculation.calcjob.CalcJobNode.|"' + url = f"{self.get_url_prefix()}/nodes/?full_type=\"process.calculation.calcjob.CalcJobNode.|\"" with self.app.test_client() as client: rv_obj = client.get(url) response = json.loads(rv_obj.data) @@ -997,7 +994,7 @@ def test_structure_derived_properties(self): Get the list of give calculation incoming """ node_uuid = self.get_dummy_data()['structuredata'][0]['uuid'] - url = self.get_url_prefix() + '/nodes/' + str(node_uuid) + '/contents/derived_properties' + url = f'{self.get_url_prefix()}/nodes/{str(node_uuid)}/contents/derived_properties' with self.app.test_client() as client: rv_obj = client.get(url) response = json.loads(rv_obj.data) @@ -1019,7 +1016,7 @@ def test_structure_download(self): from aiida.orm import load_node node_uuid = self.get_dummy_data()['structuredata'][0]['uuid'] - url = self.get_url_prefix() + '/nodes/' + node_uuid + '/download?download_format=xsf' + url = f'{self.get_url_prefix()}/nodes/{node_uuid}/download?download_format=xsf' with self.app.test_client() as client: rv_obj = client.get(url) structure_data = load_node(node_uuid)._exportcontent('xsf')[0] # pylint: disable=protected-access @@ -1032,7 +1029,7 @@ def test_cif(self): from aiida.orm import load_node node_uuid = self.get_dummy_data()['cifdata'][0]['uuid'] - url = self.get_url_prefix() + '/nodes/' + node_uuid + '/download?download_format=cif' + url = f'{self.get_url_prefix()}/nodes/{node_uuid}/download?download_format=cif' with self.app.test_client() as client: rv_obj = client.get(url) cif = load_node(node_uuid)._prepare_cif()[0] # pylint: disable=protected-access @@ -1044,7 +1041,7 @@ def test_projectable_properties(self): test projectable_properties endpoint """ for nodetype in ['nodes', 'processes', 'computers', 'users', 'groups']: - url = self.get_url_prefix() + '/' + nodetype + '/projectable_properties' + url = f'{self.get_url_prefix()}/{nodetype}/projectable_properties' with self.app.test_client() as client: rv_obj = client.get(url) response = json.loads(rv_obj.data) @@ -1067,7 +1064,7 @@ def test_node_namespace(self): """ Test the rest api call to get list of available node namespace """ - url = self.get_url_prefix() + '/nodes/full_types' + url = f'{self.get_url_prefix()}/nodes/full_types' with self.app.test_client() as client: rv_obj = client.get(url) response = json.loads(rv_obj.data) @@ -1082,7 +1079,7 @@ def test_comments(self): Get the node comments """ node_uuid = self.get_dummy_data()['structuredata'][0]['uuid'] - url = self.get_url_prefix() + '/nodes/' + str(node_uuid) + '/contents/comments' + url = f'{self.get_url_prefix()}/nodes/{str(node_uuid)}/contents/comments' with self.app.test_client() as client: rv_obj = client.get(url) response = json.loads(rv_obj.data)['data']['comments'] @@ -1098,13 +1095,13 @@ def test_repo(self): from aiida.orm import load_node node_uuid = self.get_dummy_data()['calculations'][1]['uuid'] - url = self.get_url_prefix() + '/nodes/' + str(node_uuid) + '/repo/list?filename="calcjob_inputs"' + url = f"{self.get_url_prefix()}/nodes/{str(node_uuid)}/repo/list?filename=\"calcjob_inputs\"" with self.app.test_client() as client: response_value = client.get(url) response = json.loads(response_value.data) self.assertEqual(response['data']['repo_list'], [{'type': 'FILE', 'name': 'aiida.in'}]) - url = self.get_url_prefix() + '/nodes/' + str(node_uuid) + '/repo/contents?filename="calcjob_inputs/aiida.in"' + url = f"{self.get_url_prefix()}/nodes/{str(node_uuid)}/repo/contents?filename=\"calcjob_inputs/aiida.in\"" with self.app.test_client() as client: response_obj = client.get(url) input_file = load_node(node_uuid).get_object_content('calcjob_inputs/aiida.in', mode='rb') @@ -1115,7 +1112,7 @@ def test_process_report(self): Test process report """ node_uuid = self.get_dummy_data()['calculations'][1]['uuid'] - url = self.get_url_prefix() + '/processes/' + str(node_uuid) + '/report' + url = f'{self.get_url_prefix()}/processes/{str(node_uuid)}/report' with self.app.test_client() as client: response_value = client.get(url) response = json.loads(response_value.data) @@ -1132,7 +1129,7 @@ def test_download_formats(self): """ test for download format endpoint """ - url = self.get_url_prefix() + '/nodes/download_formats' + url = f'{self.get_url_prefix()}/nodes/download_formats' with self.app.test_client() as client: response_value = client.get(url) response = json.loads(response_value.data) diff --git a/tests/restapi/test_threaded_restapi.py b/tests/restapi/test_threaded_restapi.py index 41109a6336..02af3839ef 100644 --- a/tests/restapi/test_threaded_restapi.py +++ b/tests/restapi/test_threaded_restapi.py @@ -36,19 +36,19 @@ def test_run_threaded_server(restapi_server, server_url, aiida_localhost): server_thread.start() for _ in range(NO_OF_REQUESTS): - response = requests.get(server_url + '/computers/{}'.format(computer_id), timeout=10) + response = requests.get(f'{server_url}/computers/{computer_id}', timeout=10) assert response.status_code == 200 try: response_json = response.json() except ValueError: - pytest.fail('Could not turn response into JSON. Response: {}'.format(response.raw)) + pytest.fail(f'Could not turn response into JSON. Response: {response.raw}') else: assert 'data' in response_json except Exception as exc: # pylint: disable=broad-except - pytest.fail('Something went terribly wrong! Exception: {}'.format(repr(exc))) + pytest.fail(f'Something went terribly wrong! Exception: {repr(exc)}') finally: server.shutdown() @@ -100,13 +100,13 @@ def __init__(self, app=None, **kwargs): server_thread.start() for _ in range(NO_OF_REQUESTS): - requests.get(server_url + '/computers_no_close_session/{}'.format(computer_id), timeout=10) - pytest.fail('{} requests were not enough to raise a SQLAlchemy TimeoutError!'.format(NO_OF_REQUESTS)) + requests.get(f'{server_url}/computers_no_close_session/{computer_id}', timeout=10) + pytest.fail(f'{NO_OF_REQUESTS} requests were not enough to raise a SQLAlchemy TimeoutError!') except (requests.exceptions.ConnectionError, OSError): pass except Exception as exc: # pylint: disable=broad-except - pytest.fail('Something went terribly wrong! Exception: {}'.format(repr(exc))) + pytest.fail(f'Something went terribly wrong! Exception: {repr(exc)}') finally: server.shutdown() diff --git a/tests/schedulers/test_lsf.py b/tests/schedulers/test_lsf.py index 9eb6564f2a..7fc5fb80f8 100644 --- a/tests/schedulers/test_lsf.py +++ b/tests/schedulers/test_lsf.py @@ -145,7 +145,7 @@ def test_submit_script(self): self.assertTrue('#BSUB -W 24:00' in submit_script_text) self.assertTrue('#BSUB -n 2' in submit_script_text) - self.assertTrue("'mpirun' '-np' '2' 'pw.x' '-npool' '1'" + " < 'aiida.in'" in submit_script_text) + self.assertTrue("'mpirun' '-np' '2' 'pw.x' '-npool' '1' < 'aiida.in'" in submit_script_text) def test_submit_script_with_num_machines(self): """ diff --git a/tests/schedulers/test_pbspro.py b/tests/schedulers/test_pbspro.py index a75718ae18..9b365cdebd 100644 --- a/tests/schedulers/test_pbspro.py +++ b/tests/schedulers/test_pbspro.py @@ -918,8 +918,7 @@ def test_submit_script(self): self.assertTrue(submit_script_text.startswith('#!/bin/bash -l')) self.assertTrue('#PBS -l walltime=24:00:00' in submit_script_text) self.assertTrue('#PBS -l select=1' in submit_script_text) - self.assertTrue("'mpirun' '-np' '23' 'pw.x' '-npool' '1'" + \ - " < 'aiida.in'" in submit_script_text) + self.assertTrue("'mpirun' '-np' '23' 'pw.x' '-npool' '1' < 'aiida.in'" in submit_script_text) def test_submit_script_bad_shebang(self): """ @@ -979,7 +978,7 @@ def test_submit_script_with_num_cores_per_machine(self): self.assertTrue('#PBS -l select=1:mpiprocs=2' in submit_script_text) # Note: here 'num_cores_per_machine' should NOT override the mpiprocs - self.assertTrue("'mpirun' '-np' '23' 'pw.x' '-npool' '1'" + " < 'aiida.in'" in submit_script_text) + self.assertTrue("'mpirun' '-np' '23' 'pw.x' '-npool' '1' < 'aiida.in'" in submit_script_text) def test_submit_script_with_num_cores_per_mpiproc(self): """ @@ -1012,7 +1011,7 @@ def test_submit_script_with_num_cores_per_mpiproc(self): self.assertTrue('#PBS -l select=1:mpiprocs=1:ppn=24' in submit_script_text) # Note: here 'num_cores_per_machine' should NOT override the mpiprocs - self.assertTrue("'mpirun' '-np' '23' 'pw.x' '-npool' '1'" + " < 'aiida.in'" in submit_script_text) + self.assertTrue("'mpirun' '-np' '23' 'pw.x' '-npool' '1' < 'aiida.in'" in submit_script_text) def test_submit_script_with_num_cores_per_machine_and_mpiproc1(self): """ @@ -1046,7 +1045,7 @@ def test_submit_script_with_num_cores_per_machine_and_mpiproc1(self): self.assertTrue('#PBS -l select=1:mpiprocs=1:ppn=24' in submit_script_text) # Note: here 'num_cores_per_machine' should NOT override the mpiprocs - self.assertTrue("'mpirun' '-np' '23' 'pw.x' '-npool' '1'" + " < 'aiida.in'" in submit_script_text) + self.assertTrue("'mpirun' '-np' '23' 'pw.x' '-npool' '1' < 'aiida.in'" in submit_script_text) def test_submit_script_with_num_cores_per_machine_and_mpiproc2(self): """ diff --git a/tests/schedulers/test_sge.py b/tests/schedulers/test_sge.py index c0957d57b7..2c5fa680b9 100644 --- a/tests/schedulers/test_sge.py +++ b/tests/schedulers/test_sge.py @@ -350,7 +350,7 @@ def _parse_time_string(string, fmt='%Y-%m-%dT%H:%M:%S'): try: time_struct = time.strptime(string, fmt) except Exception as exc: - raise ValueError('Unable to parse time string {}, the message was {}'.format(string, exc)) + raise ValueError(f'Unable to parse time string {string}, the message was {exc}') # I convert from a time_struct to a datetime object going through # the seconds since epoch, as suggested on stackoverflow: diff --git a/tests/schedulers/test_slurm.py b/tests/schedulers/test_slurm.py index 0970344a39..9c3b0eeebe 100644 --- a/tests/schedulers/test_slurm.py +++ b/tests/schedulers/test_slurm.py @@ -200,7 +200,7 @@ def test_submit_script(self): self.assertTrue('#SBATCH --time=1-00:00:00' in submit_script_text) self.assertTrue('#SBATCH --nodes=1' in submit_script_text) - self.assertTrue("'mpirun' '-np' '23' 'pw.x' '-npool' '1'" + " < 'aiida.in'" in submit_script_text) + self.assertTrue("'mpirun' '-np' '23' 'pw.x' '-npool' '1' < 'aiida.in'" in submit_script_text) def test_submit_script_bad_shebang(self): """Test that first line of submit script is as expected.""" @@ -258,7 +258,7 @@ def test_submit_script_with_num_cores_per_machine(self): # pylint: disable=inva self.assertTrue('#SBATCH --ntasks-per-node=2' in submit_script_text) self.assertTrue('#SBATCH --cpus-per-task=12' in submit_script_text) - self.assertTrue("'mpirun' '-np' '23' 'pw.x' '-npool' '1'" + " < 'aiida.in'" in submit_script_text) + self.assertTrue("'mpirun' '-np' '23' 'pw.x' '-npool' '1' < 'aiida.in'" in submit_script_text) def test_submit_script_with_num_cores_per_mpiproc(self): # pylint: disable=invalid-name """ @@ -290,7 +290,7 @@ def test_submit_script_with_num_cores_per_mpiproc(self): # pylint: disable=inva self.assertTrue('#SBATCH --ntasks-per-node=1' in submit_script_text) self.assertTrue('#SBATCH --cpus-per-task=24' in submit_script_text) - self.assertTrue("'mpirun' '-np' '23' 'pw.x' '-npool' '1'" + " < 'aiida.in'" in submit_script_text) + self.assertTrue("'mpirun' '-np' '23' 'pw.x' '-npool' '1' < 'aiida.in'" in submit_script_text) def test_submit_script_with_num_cores_per_machine_and_mpiproc1(self): # pylint: disable=invalid-name """ @@ -325,7 +325,7 @@ def test_submit_script_with_num_cores_per_machine_and_mpiproc1(self): # pylint: self.assertTrue('#SBATCH --ntasks-per-node=1' in submit_script_text) self.assertTrue('#SBATCH --cpus-per-task=24' in submit_script_text) - self.assertTrue("'mpirun' '-np' '23' 'pw.x' '-npool' '1'" + " < 'aiida.in'" in submit_script_text) + self.assertTrue("'mpirun' '-np' '23' 'pw.x' '-npool' '1' < 'aiida.in'" in submit_script_text) def test_submit_script_with_num_cores_per_machine_and_mpiproc2(self): # pylint: disable=invalid-name """ @@ -403,7 +403,7 @@ def test_parse_output_invalid(detailed_job_info, expected): def test_parse_output_valid(): """Test `SlurmScheduler.parse_output` for valid arguments.""" number_of_fields = len(SlurmScheduler._detailed_job_info_fields) # pylint: disable=protected-access - detailed_job_info = {'stdout': 'Header\n{}'.format('|' * number_of_fields)} + detailed_job_info = {'stdout': f"Header\n{'|' * number_of_fields}"} scheduler = SlurmScheduler() assert scheduler.parse_output(detailed_job_info, '', '') is None diff --git a/tests/schedulers/test_torque.py b/tests/schedulers/test_torque.py index 775e358e3e..381f4f95e9 100644 --- a/tests/schedulers/test_torque.py +++ b/tests/schedulers/test_torque.py @@ -893,7 +893,7 @@ def test_submit_script(self): self.assertTrue('#PBS -r n' in submit_script_text) self.assertTrue(submit_script_text.startswith('#!/bin/bash')) self.assertTrue('#PBS -l nodes=1:ppn=1,walltime=24:00:00' in submit_script_text) - self.assertTrue("'mpirun' '-np' '23' 'pw.x' '-npool' '1'" + " < 'aiida.in'" in submit_script_text) + self.assertTrue("'mpirun' '-np' '23' 'pw.x' '-npool' '1' < 'aiida.in'" in submit_script_text) def test_submit_script_with_num_cores_per_machine(self): """ @@ -923,7 +923,7 @@ def test_submit_script_with_num_cores_per_machine(self): self.assertTrue('#PBS -r n' in submit_script_text) self.assertTrue(submit_script_text.startswith('#!/bin/bash')) self.assertTrue('#PBS -l nodes=1:ppn=24,walltime=24:00:00' in submit_script_text) - self.assertTrue("'mpirun' '-np' '23' 'pw.x' '-npool' '1'" + " < 'aiida.in'" in submit_script_text) + self.assertTrue("'mpirun' '-np' '23' 'pw.x' '-npool' '1' < 'aiida.in'" in submit_script_text) def test_submit_script_with_num_cores_per_mpiproc(self): """ @@ -953,7 +953,7 @@ def test_submit_script_with_num_cores_per_mpiproc(self): self.assertTrue('#PBS -r n' in submit_script_text) self.assertTrue(submit_script_text.startswith('#!/bin/bash')) self.assertTrue('#PBS -l nodes=1:ppn=24,walltime=24:00:00' in submit_script_text) - self.assertTrue("'mpirun' '-np' '23' 'pw.x' '-npool' '1'" + " < 'aiida.in'" in submit_script_text) + self.assertTrue("'mpirun' '-np' '23' 'pw.x' '-npool' '1' < 'aiida.in'" in submit_script_text) def test_submit_script_with_num_cores_per_machine_and_mpiproc1(self): """ @@ -985,7 +985,7 @@ def test_submit_script_with_num_cores_per_machine_and_mpiproc1(self): self.assertTrue('#PBS -r n' in submit_script_text) self.assertTrue(submit_script_text.startswith('#!/bin/bash')) self.assertTrue('#PBS -l nodes=1:ppn=24,walltime=24:00:00' in submit_script_text) - self.assertTrue("'mpirun' '-np' '23' 'pw.x' '-npool' '1'" + " < 'aiida.in'" in submit_script_text) + self.assertTrue("'mpirun' '-np' '23' 'pw.x' '-npool' '1' < 'aiida.in'" in submit_script_text) def test_submit_script_with_num_cores_per_machine_and_mpiproc2(self): """ diff --git a/tests/test_dbimporters.py b/tests/test_dbimporters.py index 086ba53b31..f004f72642 100644 --- a/tests/test_dbimporters.py +++ b/tests/test_dbimporters.py @@ -258,7 +258,7 @@ def test_upfentry_creation(self): entry = results.at(0) path_pseudos = os.path.join(STATIC_DIR, 'pseudos') - with open(os.path.join(path_pseudos, '{}.UPF'.format(upf)), 'r', encoding='utf8') as fpntr: + with open(os.path.join(path_pseudos, f'{upf}.UPF'), 'r', encoding='utf8') as fpntr: entry._contents = fpntr.read() # pylint: disable=protected-access upfnode = entry.get_upf_node() diff --git a/tests/test_nodes.py b/tests/test_nodes.py index 0f7dc23835..f032336afb 100644 --- a/tests/test_nodes.py +++ b/tests/test_nodes.py @@ -180,7 +180,7 @@ class TestQueryWithAiidaObjects(AiidaTestCase): def test_with_subclasses(self): from aiida.plugins import DataFactory - extra_name = self.__class__.__name__ + '/test_with_subclasses' + extra_name = f'{self.__class__.__name__}/test_with_subclasses' Dict = DataFactory('dict') @@ -1027,7 +1027,7 @@ def test_code_loading_from_string(self): self.assertEqual(q_code_1.get_remote_exec_path(), code1.get_remote_exec_path()) # Test that the code2 can be loaded correctly with its label - q_code_2 = orm.Code.get_from_string(code2.label + '@' + self.computer.label) # pylint: disable=no-member + q_code_2 = orm.Code.get_from_string(f'{code2.label}@{self.computer.label}') # pylint: disable=no-member self.assertEqual(q_code_2.id, code2.id) self.assertEqual(q_code_2.label, code2.label) self.assertEqual(q_code_2.get_remote_exec_path(), code2.get_remote_exec_path()) @@ -2024,9 +2024,9 @@ def _create_long_graph(total_calcs): for ii in range(total_calcs): new_calc = orm.CalculationNode() new_data = orm.Data().store() - new_calc.add_incoming(old_data, link_type=LinkType.INPUT_CALC, link_label='inp' + str(ii)) + new_calc.add_incoming(old_data, link_type=LinkType.INPUT_CALC, link_label=f'inp{str(ii)}') new_calc.store() - new_data.add_incoming(new_calc, link_type=LinkType.CREATE, link_label='out' + str(ii)) + new_data.add_incoming(new_calc, link_type=LinkType.CREATE, link_label=f'out{str(ii)}') node_list.append(new_calc) node_list.append(new_data) old_data = new_data diff --git a/tests/tools/dbimporters/test_icsd.py b/tests/tools/dbimporters/test_icsd.py index 0cd2cd7248..cc3185691d 100644 --- a/tests/tools/dbimporters/test_icsd.py +++ b/tests/tools/dbimporters/test_icsd.py @@ -74,7 +74,7 @@ def test_server(self): """ Test Icsd intranet webinterface """ - urllib.request.urlopen(self.server + 'icsd/').read() + urllib.request.urlopen(f'{self.server}icsd/').read() def test_mysqldb(self): """ diff --git a/tests/tools/graph/test_age.py b/tests/tools/graph/test_age.py index 538087c7d7..369040036e 100644 --- a/tests/tools/graph/test_age.py +++ b/tests/tools/graph/test_age.py @@ -58,7 +58,7 @@ def create_tree(max_depth=3, branching=3, starting_cls=orm.Data): for label_id in range(branching): new_node = current_class() - new_node.add_incoming(previous_node, link_type=current_links, link_label='link{}'.format(label_id)) + new_node.add_incoming(previous_node, link_type=current_links, link_label=f'link{label_id}') new_node.store() current_nodes.append(new_node) @@ -554,7 +554,7 @@ def test_groups(self): groups = [] for idx in range(total_groups): - new_group = orm.Group(label='group-{}'.format(idx)).store() + new_group = orm.Group(label=f'group-{idx}').store() groups.append(new_group) nodes = [] diff --git a/tests/tools/groups/test_paths.py b/tests/tools/groups/test_paths.py index 75e445bb97..ba9061b156 100644 --- a/tests/tools/groups/test_paths.py +++ b/tests/tools/groups/test_paths.py @@ -20,7 +20,7 @@ def setup_groups(clear_database_before_test): """Setup some groups for testing.""" for label in ['a', 'a/b', 'a/c/d', 'a/c/e/g', 'a/f']: group, _ = orm.Group.objects.get_or_create(label) - group.description = 'A description of {}'.format(label) + group.description = f'A description of {label}' yield diff --git a/tests/tools/importexport/migration/test_migration.py b/tests/tools/importexport/migration/test_migration.py index 4c470dd1ab..0320a6b386 100644 --- a/tests/tools/importexport/migration/test_migration.py +++ b/tests/tools/importexport/migration/test_migration.py @@ -91,7 +91,7 @@ def test_migrate_recursively(self): with open(folder.get_abs_path('metadata.json'), 'r', encoding='utf8') as fhandle: metadata = jsonload(fhandle) except IOError: - raise NotExistent('export archive does not contain the required file {}'.format(fhandle.filename)) + raise NotExistent(f'export archive does not contain the required file {fhandle.filename}') verify_metadata_version(metadata, version='0.1') @@ -169,7 +169,7 @@ def test_wrong_versions(self): self.assertNotIn( version, legal_versions, - msg="'{}' was not expected to be a legal version, legal version: {}".format(version, legal_versions) + msg=f"'{version}' was not expected to be a legal version, legal version: {legal_versions}" ) # Make sure migrate_recursively throws an ArchiveMigrationError @@ -219,9 +219,7 @@ def test_v02_to_newest(self, temp_dir): self.assertEqual( builder.count(), self.struct_count, - msg='There should be {} StructureData, instead {} were/was found'.format( - self.struct_count, builder.count() - ) + msg=f'There should be {self.struct_count} StructureData, instead {builder.count()} were/was found' ) for structures in builder.all(): structure = structures[0] @@ -232,7 +230,7 @@ def test_v02_to_newest(self, temp_dir): for [kinds] in builder.iterall(): self.assertEqual(len(kinds), len(self.known_kinds)) for kind in kinds: - self.assertIn(kind, self.known_kinds, msg="Kind '{}' not found in: {}".format(kind, self.known_kinds)) + self.assertIn(kind, self.known_kinds, msg=f"Kind '{kind}' not found in: {self.known_kinds}") # Check that there is a StructureData that is an input of a CalculationNode builder = orm.QueryBuilder() @@ -274,9 +272,7 @@ def test_v03_to_newest(self, temp_dir): self.assertEqual( builder.count(), self.struct_count, - msg='There should be {} StructureData, instead {} were/was found'.format( - self.struct_count, builder.count() - ) + msg=f'There should be {self.struct_count} StructureData, instead {builder.count()} were/was found' ) for structures in builder.all(): structure = structures[0] @@ -287,7 +283,7 @@ def test_v03_to_newest(self, temp_dir): for [kinds] in builder.iterall(): self.assertEqual(len(kinds), len(self.known_kinds)) for kind in kinds: - self.assertIn(kind, self.known_kinds, msg="Kind '{}' not found in: {}".format(kind, self.known_kinds)) + self.assertIn(kind, self.known_kinds, msg=f"Kind '{kind}' not found in: {self.known_kinds}") # Check that there is a StructureData that is an input of a CalculationNode builder = orm.QueryBuilder() @@ -329,9 +325,7 @@ def test_v04_to_newest(self, temp_dir): self.assertEqual( builder.count(), self.struct_count, - msg='There should be {} StructureData, instead {} were/was found'.format( - self.struct_count, builder.count() - ) + msg=f'There should be {self.struct_count} StructureData, instead {builder.count()} were/was found' ) for structures in builder.all(): structure = structures[0] @@ -342,7 +336,7 @@ def test_v04_to_newest(self, temp_dir): for [kinds] in builder.iterall(): self.assertEqual(len(kinds), len(self.known_kinds)) for kind in kinds: - self.assertIn(kind, self.known_kinds, msg="Kind '{}' not found in: {}".format(kind, self.known_kinds)) + self.assertIn(kind, self.known_kinds, msg=f"Kind '{kind}' not found in: {self.known_kinds}") # Check that there is a StructureData that is an input of a CalculationNode builder = orm.QueryBuilder() @@ -384,9 +378,7 @@ def test_v05_to_newest(self, temp_dir): self.assertEqual( builder.count(), self.struct_count, - msg='There should be {} StructureData, instead {} were/was found'.format( - self.struct_count, builder.count() - ) + msg=f'There should be {self.struct_count} StructureData, instead {builder.count()} were/was found' ) for structures in builder.all(): structure = structures[0] @@ -397,7 +389,7 @@ def test_v05_to_newest(self, temp_dir): for [kinds] in builder.iterall(): self.assertEqual(len(kinds), len(self.known_kinds)) for kind in kinds: - self.assertIn(kind, self.known_kinds, msg="Kind '{}' not found in: {}".format(kind, self.known_kinds)) + self.assertIn(kind, self.known_kinds, msg=f"Kind '{kind}' not found in: {self.known_kinds}") # Check that there is a StructureData that is an input of a CalculationNode builder = orm.QueryBuilder() @@ -439,9 +431,7 @@ def test_v06_to_newest(self, temp_dir): self.assertEqual( builder.count(), self.struct_count, - msg='There should be {} StructureData, instead {} were/was found'.format( - self.struct_count, builder.count() - ) + msg=f'There should be {self.struct_count} StructureData, instead {builder.count()} were/was found' ) for structures in builder.all(): structure = structures[0] @@ -452,7 +442,7 @@ def test_v06_to_newest(self, temp_dir): for [kinds] in builder.iterall(): self.assertEqual(len(kinds), len(self.known_kinds)) for kind in kinds: - self.assertIn(kind, self.known_kinds, msg="Kind '{}' not found in: {}".format(kind, self.known_kinds)) + self.assertIn(kind, self.known_kinds, msg=f"Kind '{kind}' not found in: {self.known_kinds}") # Check that there is a StructureData that is an input of a CalculationNode builder = orm.QueryBuilder() @@ -494,9 +484,7 @@ def test_v07_to_newest(self, temp_dir): self.assertEqual( builder.count(), self.struct_count, - msg='There should be {} StructureData, instead {} were/was found'.format( - self.struct_count, builder.count() - ) + msg=f'There should be {self.struct_count} StructureData, instead {builder.count()} were/was found' ) for structures in builder.all(): structure = structures[0] @@ -507,7 +495,7 @@ def test_v07_to_newest(self, temp_dir): for [kinds] in builder.iterall(): self.assertEqual(len(kinds), len(self.known_kinds)) for kind in kinds: - self.assertIn(kind, self.known_kinds, msg="Kind '{}' not found in: {}".format(kind, self.known_kinds)) + self.assertIn(kind, self.known_kinds, msg=f"Kind '{kind}' not found in: {self.known_kinds}") # Check that there is a StructureData that is an input of a CalculationNode builder = orm.QueryBuilder() diff --git a/tests/tools/importexport/migration/test_migration_array.py b/tests/tools/importexport/migration/test_migration_array.py index 76106666b5..e162e4de9c 100644 --- a/tests/tools/importexport/migration/test_migration_array.py +++ b/tests/tools/importexport/migration/test_migration_array.py @@ -30,11 +30,11 @@ def migration_data(request): """For a given tuple of two subsequent versions and corresponding migration method, return metadata and data.""" version_old, version_new, migration_method = request.param - filepath_archive = 'export_v{}_simple.aiida'.format(version_new) + filepath_archive = f'export_v{version_new}_simple.aiida' metadata_new, data_new = get_json_files(filepath_archive, filepath='export/migrate') verify_metadata_version(metadata_new, version=version_new) - filepath_archive = get_archive_file('export_v{}_simple.aiida'.format(version_old), filepath='export/migrate') + filepath_archive = get_archive_file(f'export_v{version_old}_simple.aiida', filepath='export/migrate') with Archive(filepath_archive) as archive: metadata_old = copy.deepcopy(archive.meta_data) @@ -63,7 +63,7 @@ def test_migrations(migration_data): # Assert conversion message in `metadata.json` is correct and then remove it for later assertions metadata_new.pop('conversion_info') - message = 'Converted from version {} to {} with AiiDA v{}'.format(version_old, version_new, get_version()) + message = f'Converted from version {version_old} to {version_new} with AiiDA v{get_version()}' assert metadata_old.pop('conversion_info')[-1] == message, 'Conversion message after migration is wrong' assert metadata_old == metadata_new diff --git a/tests/tools/importexport/migration/test_v02_to_v03.py b/tests/tools/importexport/migration/test_v02_to_v03.py index 8a0a0c0cc1..cdb81df6b4 100644 --- a/tests/tools/importexport/migration/test_v02_to_v03.py +++ b/tests/tools/importexport/migration/test_v02_to_v03.py @@ -25,7 +25,7 @@ def test_migrate_external(self): # Check link types legal_link_types = {'unspecified', 'createlink', 'returnlink', 'inputlink', 'calllink'} for link in data['links_uuid']: - self.assertIn('type', link, msg="key 'type' was not added to link: {}".format(link)) + self.assertIn('type', link, msg=f"key 'type' was not added to link: {link}") self.assertIn(link['type'], legal_link_types) # Check entity names @@ -35,9 +35,7 @@ def test_migrate_external(self): self.assertIn( entity, legal_entity_names, - msg="'{}' should now be equal to anyone of these: {}, but is not".format( - entity, legal_entity_names - ) + msg=f"'{entity}' should now be equal to anyone of these: {legal_entity_names}, but is not" ) if field == 'all_fields_info': @@ -55,7 +53,7 @@ def test_migrate_external(self): self.assertIn( entity, legal_entity_names, - msg="'{}' should now be equal to anyone of these: {}, but is not".format(entity, legal_entity_names) + msg=f"'{entity}' should now be equal to anyone of these: {legal_entity_names}, but is not" ) def test_compare_migration_with_aiida_made(self): @@ -125,7 +123,7 @@ def test_compare_migration_with_aiida_made(self): self.assertListEqual( sorted(details['migrated']), sorted(details['made']), - msg='Number of {}-entities differ, see diff for details'.format(entity) + msg=f'Number of {entity}-entities differ, see diff for details' ) fields = {'export_data', 'groups_uuid', 'node_attributes_conversion', 'node_attributes'} @@ -133,7 +131,7 @@ def test_compare_migration_with_aiida_made(self): self.assertEqual( len(data_v2[field]), len(data_v3[field]), - msg='Number of entities in {} differs for the export files'.format(field) + msg=f'Number of entities in {field} differs for the export files' ) number_of_links_v2 = { diff --git a/tests/tools/importexport/migration/test_v03_to_v04.py b/tests/tools/importexport/migration/test_v03_to_v04.py index 63a7f151b0..a3529b383f 100644 --- a/tests/tools/importexport/migration/test_v03_to_v04.py +++ b/tests/tools/importexport/migration/test_v03_to_v04.py @@ -47,7 +47,7 @@ def test_migrate_external(self): with open(folder.get_abs_path('metadata.json'), 'r', encoding='utf8') as fhandle: metadata = jsonload(fhandle) except IOError: - raise NotExistent('export archive does not contain the required file {}'.format(fhandle.filename)) + raise NotExistent(f'export archive does not contain the required file {fhandle.filename}') verify_metadata_version(metadata, version='0.3') @@ -75,12 +75,10 @@ def test_migrate_external(self): for change in new_node_attrs: # data.json for node in data['export_data']['Node'].values(): - self.assertIn(change, node, msg="'{}' not found for {}".format(change, node)) + self.assertIn(change, node, msg=f"'{change}' not found for {node}") # metadata.json self.assertIn( - change, - metadata['all_fields_info']['Node'], - msg="'{}' not found in metadata.json for Node".format(change) + change, metadata['all_fields_info']['Node'], msg=f"'{change}' not found in metadata.json for Node" ) # Check Node types @@ -95,14 +93,12 @@ def test_migrate_external(self): self.assertIn( node['node_type'], legal_node_types, - msg='{} is not a legal node_type. Legal node types: {}'.format(node['node_type'], legal_node_types) + msg=f"{node['node_type']} is not a legal node_type. Legal node types: {legal_node_types}" ) self.assertIn( node['process_type'], legal_process_types, - msg='{} is not a legal process_type. Legal process types: {}'.format( - node['process_type'], legal_node_types - ) + msg=f"{node['process_type']} is not a legal process_type. Legal process types: {legal_node_types}" ) # Check links @@ -117,7 +113,7 @@ def test_migrate_external(self): for link in data['links_uuid']: self.assertIn(link['type'], legal_link_types) for link in illegal_links: - self.assertNotIn(link, data['links_uuid'], msg='{} should not be in the migrated export file'.format(link)) + self.assertNotIn(link, data['links_uuid'], msg=f'{link} should not be in the migrated export file') # Check Groups # There is one Group in the export file, it is a user group @@ -126,14 +122,14 @@ def test_migrate_external(self): for attr in updated_attrs: # data.json for group in data['export_data']['Group'].values(): - self.assertIn(attr, group, msg='{} not found in Group {}'.format(attr, group)) + self.assertIn(attr, group, msg=f'{attr} not found in Group {group}') self.assertIn( group['type_string'], legal_group_type, - msg='{} is not a legal Group type_string'.format(group['type_string']) + msg=f"{group['type_string']} is not a legal Group type_string" ) # metadata.json - self.assertIn(attr, metadata['all_fields_info']['Group'], msg='{} not found in metadata.json'.format(attr)) + self.assertIn(attr, metadata['all_fields_info']['Group'], msg=f'{attr} not found in metadata.json') # Check node_attributes* calcjob_nodes = [] @@ -154,7 +150,7 @@ def test_migrate_external(self): self.assertIn( attr, data[field][node_id], - msg="Updated attribute name '{}' not found in {} for node_id: {}".format(attr, field, node_id) + msg=f"Updated attribute name '{attr}' not found in {field} for node_id: {node_id}" ) for old, new in optional_updated_calcjob_attrs.items(): self.assertNotIn( @@ -168,7 +164,7 @@ def test_migrate_external(self): self.assertIn( attr, data[field][node_id], - msg="Updated attribute name '{}' not found in {} for node_id: {}".format(attr, field, node_id) + msg=f"Updated attribute name '{attr}' not found in {field} for node_id: {node_id}" ) # Check TrajectoryData @@ -197,13 +193,13 @@ def test_migrate_external(self): # data.json for computer in data['export_data']['Computer'].values(): self.assertNotIn( - attr, computer, msg="'{}' should have been removed from Computer {}".format(attr, computer['name']) + attr, computer, msg=f"'{attr}' should have been removed from Computer {computer['name']}" ) # metadata.json self.assertNotIn( attr, metadata['all_fields_info']['Computer'], - msg="'{}' should have been removed from Computer in metadata.json".format(attr) + msg=f"'{attr}' should have been removed from Computer in metadata.json" ) # Check new entities @@ -211,7 +207,7 @@ def test_migrate_external(self): fields = {'all_fields_info', 'unique_identifiers'} for entity in new_entities: for field in fields: - self.assertIn(entity, metadata[field], msg='{} not found in {} in metadata.json'.format(entity, field)) + self.assertIn(entity, metadata[field], msg=f'{entity} not found in {field} in metadata.json') # Check extras # Dicts with key, vales equal to node_id, {} should be present @@ -219,7 +215,7 @@ def test_migrate_external(self): attrs_count = len(data['node_attributes']) new_fields = {'node_extras', 'node_extras_conversion'} for field in new_fields: - self.assertIn(field, list(data.keys()), msg="New field '{}' not found in data.json".format(field)) + self.assertIn(field, list(data.keys()), msg=f"New field '{field}' not found in data.json") self.assertEqual( len(data[field]), attrs_count, @@ -253,7 +249,7 @@ def test_compare_migration_with_aiida_made(self): with open(folder.get_abs_path('metadata.json'), 'r', encoding='utf8') as fhandle: metadata_v3 = jsonload(fhandle) except IOError: - raise NotExistent('export archive does not contain the required file {}'.format(fhandle.filename)) + raise NotExistent(f'export archive does not contain the required file {fhandle.filename}') # Migrate to v0.4 migrate_v3_to_v4(metadata_v3, data_v3, folder) @@ -309,7 +305,7 @@ def test_compare_migration_with_aiida_made(self): self.assertListEqual( sorted(details['migrated']), sorted(details['made']), - msg='Number of {}-entities differ, see diff for details'.format(entity) + msg=f'Number of {entity}-entities differ, see diff for details' ) fields = { @@ -324,7 +320,7 @@ def test_compare_migration_with_aiida_made(self): self.assertEqual( len(data_v3[field]), len(data_v4[field]) - correction, - msg='Number of entities in {} differs for the export files'.format(field) + msg=f'Number of entities in {field} differs for the export files' ) number_of_links_v3 = { @@ -389,7 +385,7 @@ def test_illegal_create_links(self): with open(folder.get_abs_path('metadata.json'), 'r', encoding='utf8') as fhandle: metadata = jsonload(fhandle) except IOError: - raise NotExistent('export archive does not contain the required file {}'.format(fhandle.filename)) + raise NotExistent(f'export archive does not contain the required file {fhandle.filename}') # Check illegal create links are present in org. export file links_count = len(data['links_uuid']) @@ -419,9 +415,7 @@ def test_illegal_create_links(self): self.assertEqual( len(data['links_uuid']), links_count_migrated, - msg='{} links were expected, instead {} was/were found'.format( - links_count_migrated, len(data['links_uuid']) - ) + msg=f"{links_count_migrated} links were expected, instead {len(data['links_uuid'])} was/were found" ) workfunc_uuids = { @@ -434,5 +428,5 @@ def test_illegal_create_links(self): if link['input'] in workfunc_uuids and link['type'] == 'create': violations.append(link) self.assertEqual( - len(violations), 0, msg='0 illegal links were expected, instead {} was/were found'.format(len(violations)) + len(violations), 0, msg=f'0 illegal links were expected, instead {len(violations)} was/were found' ) diff --git a/tests/tools/importexport/migration/test_v04_to_v05.py b/tests/tools/importexport/migration/test_v04_to_v05.py index 664d1f495a..c94dd528cb 100644 --- a/tests/tools/importexport/migration/test_v04_to_v05.py +++ b/tests/tools/importexport/migration/test_v04_to_v05.py @@ -26,20 +26,20 @@ def test_migrate_external(self): for change in removed_computer_attrs: # data.json for computer in data['export_data']['Computer'].values(): - self.assertNotIn(change, computer, msg="'{}' unexpectedly found for {}".format(change, computer)) + self.assertNotIn(change, computer, msg=f"'{change}' unexpectedly found for {computer}") # metadata.json self.assertNotIn( change, metadata['all_fields_info']['Computer'], - msg="'{}' unexpectedly found in metadata.json for Computer".format(change) + msg=f"'{change}' unexpectedly found in metadata.json for Computer" ) for change in removed_node_attrs: # data.json for node in data['export_data']['Node'].values(): - self.assertNotIn(change, node, msg="'{}' unexpectedly found for {}".format(change, node)) + self.assertNotIn(change, node, msg=f"'{change}' unexpectedly found for {node}") # metadata.json self.assertNotIn( change, metadata['all_fields_info']['Node'], - msg="'{}' unexpectedly found in metadata.json for Node".format(change) + msg=f"'{change}' unexpectedly found in metadata.json for Node" ) diff --git a/tests/tools/importexport/migration/test_v05_to_v06.py b/tests/tools/importexport/migration/test_v05_to_v06.py index 08f1490ded..9f30dffa48 100644 --- a/tests/tools/importexport/migration/test_v05_to_v06.py +++ b/tests/tools/importexport/migration/test_v05_to_v06.py @@ -26,7 +26,7 @@ def test_migrate_external(self): # Explicitly check that conversion dictionaries were removed illegal_data_dicts = {'node_attributes_conversion', 'node_extras_conversion'} for dict_ in illegal_data_dicts: - self.assertNotIn(dict_, data, msg="dictionary '{}' should have been removed from data.json".format(dict_)) + self.assertNotIn(dict_, data, msg=f"dictionary '{dict_}' should have been removed from data.json") def test_migrate_v5_to_v6_calc_states(self): """Test the data migration of legacy `JobCalcState` attributes. @@ -85,7 +85,7 @@ def test_migrate_v5_to_v6_datetime(self): continue serialized_original = values['scheduler_lastchecktime'] - msg = 'the serialized datetime before migration should not contain a plus: {}'.format(serialized_original) + msg = f'the serialized datetime before migration should not contain a plus: {serialized_original}' self.assertTrue('+' not in serialized_original, msg=msg) # Migrate to v0.6 @@ -93,7 +93,7 @@ def test_migrate_v5_to_v6_datetime(self): verify_metadata_version(metadata, version='0.6') serialized_migrated = data['node_attributes'][key]['scheduler_lastchecktime'] - self.assertEqual(serialized_migrated, serialized_original + '+00:00') + self.assertEqual(serialized_migrated, f'{serialized_original}+00:00') break else: diff --git a/tests/tools/importexport/migration/test_v06_to_v07.py b/tests/tools/importexport/migration/test_v06_to_v07.py index 34f2f10d87..aa64135125 100644 --- a/tests/tools/importexport/migration/test_v06_to_v07.py +++ b/tests/tools/importexport/migration/test_v06_to_v07.py @@ -30,14 +30,12 @@ def test_migrate_external(self): self.assertNotIn( attr, attrs, - msg="key '{}' should have been removed from attributes for Node ".format(attr, node_pk) + msg=f"key '{attr}' should have been removed from attributes for Node " ) # Check new attributes were added successfully for attr in new_attrs: - self.assertIn( - attr, attrs, msg="key '{}' was not added to attributes for Node ".format(attr, node_pk) - ) + self.assertIn(attr, attrs, msg=f"key '{attr}' was not added to attributes for Node ") self.assertEqual( attrs[attr], new_attrs[attr], @@ -53,7 +51,7 @@ def test_migrate_external(self): self.assertNotIn( entity, metadata[dict_], - msg="key '{}' should have been removed from '{}' in metadata.json".format(entity, dict_) + msg=f"key '{entity}' should have been removed from '{dict_}' in metadata.json" ) def test_migration_0040_corrupt_archive(self): diff --git a/tests/tools/importexport/migration/test_v08_to_v09.py b/tests/tools/importexport/migration/test_v08_to_v09.py index 4cc3e43d36..357716e625 100644 --- a/tests/tools/importexport/migration/test_v08_to_v09.py +++ b/tests/tools/importexport/migration/test_v08_to_v09.py @@ -22,7 +22,7 @@ def test_migrate_external(self): for attributes in data.get('export_data', {}).get('Group', {}).values(): if attributes['type_string'] not in ['core', 'core.upf', 'core.import', 'core.auto']: - raise AssertionError('encountered illegal type string `{}`'.format(attributes['type_string'])) + raise AssertionError(f"encountered illegal type string `{attributes['type_string']}`") def test_migration_dbgroup_type_string(self): """Test the `migration_dbgroup_type_string` function directly.""" diff --git a/tests/tools/importexport/orm/test_computers.py b/tests/tools/importexport/orm/test_computers.py index 059cbe9184..1ca0c6d84e 100644 --- a/tests/tools/importexport/orm/test_computers.py +++ b/tests/tools/importexport/orm/test_computers.py @@ -111,10 +111,7 @@ def test_same_computer_import(self, temp_dir): # did not change. builder = orm.QueryBuilder() builder.append(orm.Computer, project=['name', 'uuid', 'id']) - self.assertEqual( - builder.count(), 1, 'Found {} computers' - 'but only one computer should be found.'.format(builder.count()) - ) + self.assertEqual(builder.count(), 1, f'Found {builder.count()} computersbut only one computer should be found.') self.assertEqual(str(builder.first()[0]), comp_name, 'The computer name is not correct.') self.assertEqual(str(builder.first()[1]), comp_uuid, 'The computer uuid is not correct.') self.assertEqual(builder.first()[2], comp_id, 'The computer id is not correct.') @@ -156,7 +153,7 @@ def test_same_computer_different_name_import(self, temp_dir): export([calc1], filename=filename1, silent=True) # Rename the computer - comp1.label = comp1_name + '_updated' + comp1.label = f'{comp1_name}_updated' # Store a second calculation calc2_label = 'calc2' @@ -207,10 +204,7 @@ def test_same_computer_different_name_import(self, temp_dir): # did not change. builder = orm.QueryBuilder() builder.append(orm.Computer, project=['name']) - self.assertEqual( - builder.count(), 1, 'Found {} computers' - 'but only one computer should be found.'.format(builder.count()) - ) + self.assertEqual(builder.count(), 1, f'Found {builder.count()} computersbut only one computer should be found.') self.assertEqual(str(builder.first()[0]), comp1_name, 'The computer name is not correct.') @with_temp_dir diff --git a/tests/tools/importexport/orm/test_groups.py b/tests/tools/importexport/orm/test_groups.py index b2462a2135..2591a57050 100644 --- a/tests/tools/importexport/orm/test_groups.py +++ b/tests/tools/importexport/orm/test_groups.py @@ -153,11 +153,11 @@ def test_group_import_existing(self, temp_dir): import_data(filename, silent=True) # The import should have created a new group with a suffix # I check for this: - builder = orm.QueryBuilder().append(orm.Group, filters={'label': {'like': grouplabel + '%'}}) + builder = orm.QueryBuilder().append(orm.Group, filters={'label': {'like': f'{grouplabel}%'}}) self.assertEqual(builder.count(), 2) # Now I check for the group having one member, and whether the name is different: builder = orm.QueryBuilder() - builder.append(orm.Group, filters={'label': {'like': grouplabel + '%'}}, tag='g', project='label') + builder.append(orm.Group, filters={'label': {'like': f'{grouplabel}%'}}, tag='g', project='label') builder.append(orm.StructureData, with_group='g') self.assertEqual(builder.count(), 1) # I check that the group name was changed: @@ -165,7 +165,7 @@ def test_group_import_existing(self, temp_dir): # I import another name, the group should not be imported again import_data(filename, silent=True) builder = orm.QueryBuilder() - builder.append(orm.Group, filters={'label': {'like': grouplabel + '%'}}) + builder.append(orm.Group, filters={'label': {'like': f'{grouplabel}%'}}) self.assertEqual(builder.count(), 2) @with_temp_dir @@ -205,8 +205,7 @@ def test_import_to_group(self, temp_dir): self.assertEqual( builder.count(), 1, - msg='There should be exactly one Group with label {}. ' - 'Instead {} was found.'.format(group_label, builder.count()) + msg=f'There should be exactly one Group with label {group_label}. Instead {builder.count()} was found.' ) imported_group = load_group(builder.all()[0][0]) self.assertEqual(imported_group.uuid, group_uuid) diff --git a/tests/tools/importexport/orm/test_links.py b/tests/tools/importexport/orm/test_links.py index d636b8c710..9eddd5a7c0 100644 --- a/tests/tools/importexport/orm/test_links.py +++ b/tests/tools/importexport/orm/test_links.py @@ -343,9 +343,7 @@ def test_high_level_workflow_links(self, temp_dir): ) self.assertEqual( - builder.count(), - 13, - msg='Failed with c1={}, c2={}, w1={}, w2={}'.format(calcs[0], calcs[1], works[0], works[1]) + builder.count(), 13, msg=f'Failed with c1={calcs[0]}, c2={calcs[1]}, w1={works[0]}, w2={works[1]}' ) export_links = builder.all() @@ -364,7 +362,7 @@ def test_high_level_workflow_links(self, temp_dir): self.assertSetEqual( set(export_set), set(import_set), - msg='Failed with c1={}, c2={}, w1={}, w2={}'.format(calcs[0], calcs[1], works[0], works[1]) + msg=f'Failed with c1={calcs[0]}, c2={calcs[1]}, w1={works[0]}, w2={works[1]}' ) @staticmethod @@ -404,7 +402,7 @@ def link_flags_import_helper(self, test_data): ) ) for node_uuid in builder.iterall(): - self.assertIn(node_uuid[0], expected_nodes[node_type], msg='Failed for test: "{}"'.format(test)) + self.assertIn(node_uuid[0], expected_nodes[node_type], msg=f'Failed for test: "{test}"') def link_flags_export_helper(self, name, all_nodes, temp_dir, nodes_to_export, flags, expected_nodes): # pylint: disable=too-many-arguments """Helper function""" @@ -429,30 +427,26 @@ def link_flags_export_helper(self, name, all_nodes, temp_dir, nodes_to_export, f expected_nodes.append(expected_nodes_uuid) ret = { - '{}_follow_none'.format(name): ( - os.path.join(temp_dir, '{}_none.aiida'.format(name)), { - calc_flag: False, - work_flag: False - }, expected_nodes[0] - ), - '{}_follow_only_calc'.format(name): ( - os.path.join(temp_dir, '{}_calc.aiida'.format(name)), { - calc_flag: True, - work_flag: False - }, expected_nodes[1] - ), - '{}_follow_only_work'.format(name): ( - os.path.join(temp_dir, '{}_work.aiida'.format(name)), { - calc_flag: False, - work_flag: True - }, expected_nodes[2] - ), - '{}_follow_only_all'.format(name): ( - os.path.join(temp_dir, '{}_all.aiida'.format(name)), { - calc_flag: True, - work_flag: True - }, expected_nodes[3] - ) + f'{name}_follow_none': + (os.path.join(temp_dir, f'{name}_none.aiida'), { + calc_flag: False, + work_flag: False + }, expected_nodes[0]), + f'{name}_follow_only_calc': + (os.path.join(temp_dir, f'{name}_calc.aiida'), { + calc_flag: True, + work_flag: False + }, expected_nodes[1]), + f'{name}_follow_only_work': + (os.path.join(temp_dir, f'{name}_work.aiida'), { + calc_flag: False, + work_flag: True + }, expected_nodes[2]), + f'{name}_follow_only_all': + (os.path.join(temp_dir, f'{name}_all.aiida'), { + calc_flag: True, + work_flag: True + }, expected_nodes[3]) } self.prepare_link_flags_export(nodes_to_export, ret) @@ -654,7 +648,7 @@ def test_dangling_link_to_existing_db_node(self, temp_dir): self.assertEqual( builder.count(), 1, - msg='There should be a single CalculationNode, instead {} has been found'.format(builder.count()) + msg=f'There should be a single CalculationNode, instead {builder.count()} has been found' ) self.assertEqual(builder.all()[0][0], calc_uuid) @@ -665,9 +659,7 @@ def test_dangling_link_to_existing_db_node(self, temp_dir): import_data(filename, ignore_unknown_nodes=True, silent=True) builder = orm.QueryBuilder().append(orm.StructureData, project='uuid') self.assertEqual( - builder.count(), - 1, - msg='There should be a single StructureData, instead {} has been found'.format(builder.count()) + builder.count(), 1, msg=f'There should be a single StructureData, instead {builder.count()} has been found' ) self.assertEqual(builder.all()[0][0], struct_uuid) @@ -703,14 +695,12 @@ def test_multiple_post_return_links(self, temp_dir): # pylint: disable=too-many no_of_work = orm.QueryBuilder().append(orm.WorkflowNode).count() self.assertEqual( - no_of_work, 0, msg='{} WorkflowNode(s) was/were found, however, none should be present'.format(no_of_work) + no_of_work, 0, msg=f'{no_of_work} WorkflowNode(s) was/were found, however, none should be present' ) nodes = orm.QueryBuilder().append(orm.Node, project='uuid') self.assertEqual( - nodes.count(), - 2, - msg='{} Node(s) was/were found, however, exactly two should be present'.format(no_of_work) + nodes.count(), 2, msg=f'{no_of_work} Node(s) was/were found, however, exactly two should be present' ) for node in nodes.iterall(): self.assertIn(node[0], [data_uuid, calc_uuid]) @@ -733,16 +723,12 @@ def test_multiple_post_return_links(self, temp_dir): # pylint: disable=too-many no_of_work = orm.QueryBuilder().append(orm.WorkflowNode).count() self.assertEqual( - no_of_work, - 1, - msg='{} WorkflowNode(s) was/were found, however, exactly one should be present'.format(no_of_work) + no_of_work, 1, msg=f'{no_of_work} WorkflowNode(s) was/were found, however, exactly one should be present' ) nodes = orm.QueryBuilder().append(orm.Node, project='uuid') self.assertEqual( - nodes.count(), - 3, - msg='{} Node(s) was/were found, however, exactly three should be present'.format(no_of_work) + nodes.count(), 3, msg=f'{no_of_work} Node(s) was/were found, however, exactly three should be present' ) for node in nodes.iterall(): self.assertIn(node[0], [data_uuid, calc_uuid, work_uuid]) @@ -751,7 +737,6 @@ def test_multiple_post_return_links(self, temp_dir): # pylint: disable=too-many self.assertEqual( len(links), 2, - msg='Exactly two Links are expected, instead {} were found ' - '(in, out, label, type): {}'.format(len(links), links) + msg=f'Exactly two Links are expected, instead {len(links)} were found (in, out, label, type): {links}' ) self.assertListEqual(sorted(links), sorted(before_links)) diff --git a/tests/tools/importexport/test_complex.py b/tests/tools/importexport/test_complex.py index 08a11ef145..211968548c 100644 --- a/tests/tools/importexport/test_complex.py +++ b/tests/tools/importexport/test_complex.py @@ -100,7 +100,7 @@ def test_complex_graph_import_export(self, temp_dir): try: orm.load_node(uuid) except NotExistent: - self.fail('Node with UUID {} and label {} was not found.'.format(uuid, label)) + self.fail(f'Node with UUID {uuid} and label {label} was not found.') @with_temp_dir def test_reexport(self, temp_dir): @@ -170,7 +170,7 @@ def get_hash_from_db_content(grouplabel): param = orm.Dict(dict=trial_dict) param.label = str(datetime.now()) - param.description = 'd_' + str(datetime.now()) + param.description = f'd_{str(datetime.now())}' param.store() calc = orm.CalculationNode() # setting also trial dict as attributes, but randomizing the keys) @@ -196,7 +196,7 @@ def get_hash_from_db_content(grouplabel): # I export and reimport 3 times in a row: for i in range(3): # Always new filename: - filename = os.path.join(temp_dir, 'export-{}.aiida'.format(i)) + filename = os.path.join(temp_dir, f'export-{i}.aiida') # Loading the group from the string group = orm.Group.get(label=grouplabel) # exporting based on all members of the group diff --git a/tests/tools/importexport/test_prov_redesign.py b/tests/tools/importexport/test_prov_redesign.py index 85b945caa1..801657624f 100644 --- a/tests/tools/importexport/test_prov_redesign.py +++ b/tests/tools/importexport/test_prov_redesign.py @@ -37,7 +37,7 @@ def test_base_data_type_change(self, temp_dir): test_content = ('Hello', 6, -1.2399834e12, False) test_types = () for node_type in ['str', 'int', 'float', 'bool']: - add_type = ('data.{}.{}.'.format(node_type, node_type.capitalize()),) + add_type = (f'data.{node_type}.{node_type.capitalize()}.',) test_types = test_types.__add__(add_type) # List of nodes to be exported @@ -75,7 +75,7 @@ def test_base_data_type_change(self, temp_dir): # Check value/content self.assertEqual(base.value, refval) # Check type - msg = "type of node ('{}') is not updated according to db schema v0.4".format(base.node_type) + msg = f"type of node ('{base.node_type}') is not updated according to db schema v0.4" self.assertEqual(base.node_type, reftype, msg=msg) # List @@ -83,7 +83,7 @@ def test_base_data_type_change(self, temp_dir): self.assertEqual(list_value, refval) # Check List type - msg = "type of node ('{}') is not updated according to db schema v0.4".format(nlist.node_type) + msg = f"type of node ('{nlist.node_type}') is not updated according to db schema v0.4" self.assertEqual(nlist.node_type, 'data.list.List.', msg=msg) @with_temp_dir diff --git a/tests/tools/importexport/test_specific_import.py b/tests/tools/importexport/test_specific_import.py index df709fd9fb..cd86ede1c1 100644 --- a/tests/tools/importexport/test_specific_import.py +++ b/tests/tools/importexport/test_specific_import.py @@ -200,9 +200,7 @@ def test_missing_node_repo_folder_export(self, temp_dir): with self.assertRaises(exceptions.ArchiveExportError) as exc: export([node], filename=filename, silent=True) - self.assertIn( - 'Unable to find the repository folder for Node with UUID={}'.format(node_uuid), str(exc.exception) - ) + self.assertIn(f'Unable to find the repository folder for Node with UUID={node_uuid}', str(exc.exception)) self.assertFalse(os.path.exists(filename), msg='The export file should not exist') @with_temp_dir @@ -262,9 +260,7 @@ def test_missing_node_repo_folder_import(self, temp_dir): with self.assertRaises(exceptions.CorruptArchive) as exc: import_data(filename_corrupt, silent=True) - self.assertIn( - 'Unable to find the repository folder for Node with UUID={}'.format(node_uuid), str(exc.exception) - ) + self.assertIn(f'Unable to find the repository folder for Node with UUID={node_uuid}', str(exc.exception)) @with_temp_dir def test_empty_repo_folder_export(self, temp_dir): @@ -287,9 +283,7 @@ def test_empty_repo_folder_export(self, temp_dir): shutil.rmtree(abspath_filename, ignore_errors=False) self.assertFalse( node_repo.get_content_list(), - msg='Repository folder should be empty, instead the following was found: {}'.format( - node_repo.get_content_list() - ) + msg=f'Repository folder should be empty, instead the following was found: {node_repo.get_content_list()}' ) archive_variants = { @@ -305,7 +299,7 @@ def test_empty_repo_folder_export(self, temp_dir): for variant, filename in archive_variants.items(): self.reset_database() node_count = orm.QueryBuilder().append(orm.Dict, project='uuid').count() - self.assertEqual(node_count, 0, msg='After DB reset {} Dict Nodes was (wrongly) found'.format(node_count)) + self.assertEqual(node_count, 0, msg=f'After DB reset {node_count} Dict Nodes was (wrongly) found') import_data(filename, silent=True) builder = orm.QueryBuilder().append(orm.Dict, project='uuid') diff --git a/tests/transports/test_all_plugins.py b/tests/transports/test_all_plugins.py index 9a90aac453..2920126cd2 100644 --- a/tests/transports/test_all_plugins.py +++ b/tests/transports/test_all_plugins.py @@ -53,14 +53,14 @@ def get_all_custom_transports(): try: test_modules.remove(thisbasename) except IndexError: - print('Warning, this module ({}) was not found!'.format(thisbasename)) + print(f'Warning, this module ({thisbasename}) was not found!') all_custom_transports = {} for module in test_modules: module = importlib.import_module('.'.join([modulename, module])) custom_transport = module.__dict__.get('plugin_transport', None) if custom_transport is None: - print('Define the plugin_transport variable inside the {} module!'.format(module)) + print(f'Define the plugin_transport variable inside the {module} module!') else: all_custom_transports[module] = custom_transport @@ -99,7 +99,7 @@ def test_all_plugins(self): messages = ['*** At least one test for a subplugin failed. See below ***', ''] for exc in exceptions: - messages.append("*** [For plugin {}]: Exception '{}': {}".format(exc[2], type(exc[0]).__name__, exc[0])) + messages.append(f"*** [For plugin {exc[2]}]: Exception '{type(exc[0]).__name__}': {exc[0]}") messages.append(exc[1]) raise exception_to_raise('\n'.join(messages)) @@ -385,7 +385,7 @@ def test_dir_copy(self, custom_transport): directory += random.choice(string.ascii_uppercase + string.digits) transport.mkdir(directory) - dest_directory = directory + '_copy' + dest_directory = f'{directory}_copy' transport.copy(directory, dest_directory) with self.assertRaises(ValueError): diff --git a/tests/utils/archives.py b/tests/utils/archives.py index f9964263ef..a81f0dd561 100644 --- a/tests/utils/archives.py +++ b/tests/utils/archives.py @@ -61,7 +61,7 @@ def get_archive_file(archive, filepath=None, external_module=None): if not os.path.isfile(dirpath_archive): dirpath_parent = os.path.dirname(dirpath_archive) - raise ValueError('archive {} does not exist in the archives directory {}'.format(archive, dirpath_parent)) + raise ValueError(f'archive {archive} does not exist in the archives directory {dirpath_parent}') return dirpath_archive @@ -106,7 +106,7 @@ def get_json_files(archive, silent=True, filepath=None, external_module=None): with open(folder.get_abs_path('metadata.json'), 'r', encoding='utf8') as fhandle: metadata = json.load(fhandle) except IOError: - raise NotExistent('export archive does not contain the required file {}'.format(fhandle.filename)) + raise NotExistent(f'export archive does not contain the required file {fhandle.filename}') # Return metadata.json and data.json return metadata, data @@ -137,7 +137,7 @@ def migrate_archive(input_file, output_file, silent=True): with open(folder.get_abs_path('metadata.json'), 'r', encoding='utf8') as fhandle: metadata = json.load(fhandle) except IOError: - raise NotExistent('export archive does not contain the required file {}'.format(fhandle.filename)) + raise NotExistent(f'export archive does not contain the required file {fhandle.filename}') # Migrate migrate_recursively(metadata, data, folder) diff --git a/tests/utils/configuration.py b/tests/utils/configuration.py index 6838aecd47..ae922cef12 100644 --- a/tests/utils/configuration.py +++ b/tests/utils/configuration.py @@ -37,7 +37,7 @@ def create_mock_profile(name, repository_dirpath=None, **kwargs): 'database_name': kwargs.pop('database_name', name), 'database_username': kwargs.pop('database_username', 'user'), 'database_password': kwargs.pop('database_password', 'pass'), - 'repository_uri': 'file:///' + os.path.join(repository_dirpath, 'repository_' + name), + 'repository_uri': f"file:///{os.path.join(repository_dirpath, f'repository_{name}')}", } return Profile(name, profile_dictionary) diff --git a/utils/dependency_management.py b/utils/dependency_management.py index 5fa2d9154e..14f30c5280 100755 --- a/utils/dependency_management.py +++ b/utils/dependency_management.py @@ -45,7 +45,7 @@ def _load_setup_cfg(): with open(ROOT / 'setup.json') as setup_json_file: return json.load(setup_json_file) except json.decoder.JSONDecodeError as error: # pylint: disable=no-member - raise DependencySpecificationError("Error while parsing 'setup.json' file: {}".format(error)) + raise DependencySpecificationError(f"Error while parsing 'setup.json' file: {error}") except FileNotFoundError: raise DependencySpecificationError("The 'setup.json' file is missing!") @@ -56,7 +56,7 @@ def _load_environment_yml(): with open(ROOT / 'environment.yml') as file: return yaml.load(file, Loader=yaml.SafeLoader) except yaml.error.YAMLError as error: - raise DependencySpecificationError("Error while parsing 'environment.yml':\n{}".format(error)) + raise DependencySpecificationError(f"Error while parsing 'environment.yml':\n{error}") except FileNotFoundError as error: raise DependencySpecificationError(str(error)) @@ -222,12 +222,12 @@ def validate_environment_yml(): # pylint: disable=too-many-branches 'conda-forge', 'defaults' ], "channels should be 'conda-forge', 'defaults'." except AssertionError as error: - raise DependencySpecificationError("Error in 'environment.yml': {}".format(error)) + raise DependencySpecificationError(f"Error in 'environment.yml': {error}") try: conda_dependencies = {Requirement.parse(d) for d in environment_yml['dependencies']} except TypeError as error: - raise DependencySpecificationError("Error while parsing requirements from 'environment_yml': {}".format(error)) + raise DependencySpecificationError(f"Error while parsing requirements from 'environment_yml': {error}") # Attempt to find the specification of Python among the 'environment.yml' dependencies. for dependency in conda_dependencies: @@ -242,9 +242,7 @@ def validate_environment_yml(): # pylint: disable=too-many-branches for spec in conda_python_dependency.specifier: expected_classifier = 'Programming Language :: Python :: ' + spec.version if expected_classifier not in setup_cfg['classifiers']: - raise DependencySpecificationError( - "Trove classifier '{}' missing from 'setup.json'.".format(expected_classifier) - ) + raise DependencySpecificationError(f"Trove classifier '{expected_classifier}' missing from 'setup.json'.") # The Python version should be specified as supported in 'setup.json'. if not any(spec.version >= other_spec.version for other_spec in python_requires.specifier): @@ -254,7 +252,7 @@ def validate_environment_yml(): # pylint: disable=too-many-branches break else: - raise DependencySpecificationError("Missing specifier: '{}'.".format(conda_python_dependency)) + raise DependencySpecificationError(f"Missing specifier: '{conda_python_dependency}'.") # Check that all requirements specified in the setup.json file are found in the # conda environment specification. @@ -265,7 +263,7 @@ def validate_environment_yml(): # pylint: disable=too-many-branches try: conda_dependencies.remove(_setuptools_to_conda(req)) except KeyError: - raise DependencySpecificationError("Requirement '{}' not specified in 'environment.yml'.".format(req)) + raise DependencySpecificationError(f"Requirement '{req}' not specified in 'environment.yml'.") # The only dependency left should be the one for Python itself, which is not part of # the install_requirements for setuptools. @@ -301,7 +299,7 @@ def validate_pyproject_toml(): pyproject_requires = [Requirement.parse(r) for r in pyproject['build-system']['requires']] if reentry_requirement not in pyproject_requires: - raise DependencySpecificationError("Missing requirement '{}' in 'pyproject.toml'.".format(reentry_requirement)) + raise DependencySpecificationError(f"Missing requirement '{reentry_requirement}' in 'pyproject.toml'.") click.secho('Pyproject.toml dependency specification is consistent.', fg='green') diff --git a/utils/validate_consistency.py b/utils/validate_consistency.py index 696f87366f..b69f6fedda 100644 --- a/utils/validate_consistency.py +++ b/utils/validate_consistency.py @@ -123,7 +123,7 @@ def replace_block_in_file(filepath, block_start_marker, block_end_marker, block) try: index_start, index_end = determine_block_positions(lines, block_start_marker, block_end_marker) except RuntimeError as exception: - raise RuntimeError('problem rewriting file `{}`:: {}'.format(filepath, exception)) + raise RuntimeError(f'problem rewriting file `{filepath}`:: {exception}') lines = replace_line_block(lines, block, index_start, index_end) @@ -158,22 +158,22 @@ def validate_verdi_documentation(): # Generate the new block with the command help strings header = 'Commands' message = 'Below is a list with all available subcommands.' - block = ['{}\n{}\n{}\n\n'.format(header, '=' * len(header), message)] + block = [f"{header}\n{'=' * len(header)}\n{message}\n\n"] for name, command in sorted(verdi.commands.items()): ctx = click.Context(command, terminal_width=width) - header_label = '.. _reference:command-line:verdi-{name:}:'.format(name=name) - header_string = '``verdi {name:}``'.format(name=name) + header_label = f'.. _reference:command-line:verdi-{name}:' + header_string = f'``verdi {name}``' header_underline = '-' * len(header_string) - block.append(header_label + '\n\n') - block.append(header_string + '\n') - block.append(header_underline + '\n\n') + block.append(f'{header_label}\n\n') + block.append(f'{header_string}\n') + block.append(f'{header_underline}\n\n') block.append('.. code:: console\n\n') # Mark the beginning of a literal block for line in ctx.get_help().split('\n'): if line: - block.append(' {}\n'.format(line)) + block.append(f' {line}\n') else: block.append('\n') block.append('\n\n') @@ -203,9 +203,9 @@ def validate_version(): setup_content = get_setup_json() if version != setup_content['version']: click.echo('Version number mismatch detected:') - click.echo("Version number in '{}': {}".format(FILENAME_SETUP_JSON, setup_content['version'])) - click.echo("Version number in '{}/__init__.py': {}".format('aiida', version)) - click.echo("Updating version in '{}' to: {}".format(FILENAME_SETUP_JSON, version)) + click.echo(f"Version number in '{FILENAME_SETUP_JSON}': {setup_content['version']}") + click.echo(f"Version number in 'aiida/__init__.py': {version}") + click.echo(f"Updating version in '{FILENAME_SETUP_JSON}' to: {version}") setup_content['version'] = version write_setup_json(setup_content)