Skip to content

Commit

Permalink
Replace old format string interpolation with f-strings (#4400)
Browse files Browse the repository at this point in the history
Since Python 3.5 is no longer supported, format string interpolations
can now be replaced by f-strings, introduced in Python 3.6, which are
more readable, require less characters and are more efficient.

Note that `pylint` issues a warning when using f-strings for log
messages, just as it does for format interpolated strings. The reasoning
is that this is slightly inefficient as the strings are always
interpolated even if the log is discarded, but also by not passing the
formatting parameters as arguments, the available metadata is reduced.
I feel these inefficiencies are premature optimizations as they are
really minimal and don't weigh up against the improved readability and
maintainability of using f-strings. That is why the `pylint` config is
update to ignore the warning `logging-fstring-interpolation` which
replaces `logging-format-interpolation` that was ignored before.

The majority of the conversions were done automatically with the linting
tool `flynt` which is also added as a pre-commit hook. It is added
before the `yapf` step because since `flynt` will touch formatting,
`yapf` will then get a chance to check it.
  • Loading branch information
sphuber authored Sep 30, 2020
1 parent 4544bc4 commit 02248cf
Show file tree
Hide file tree
Showing 346 changed files with 2,152 additions and 2,625 deletions.
19 changes: 9 additions & 10 deletions .ci/polish/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -114,23 +114,23 @@ def launch(expression, code, use_calculations, use_calcfunctions, sleep, timeout
valid, error = validate(expression)

if not valid:
click.echo("the expression '{}' is invalid: {}".format(expression, error))
click.echo(f"the expression '{expression}' is invalid: {error}")
sys.exit(1)

filename = 'polish_{}.py'.format(str(uuid.uuid4().hex))
filename = f'polish_{str(uuid.uuid4().hex)}.py'
evaluated = evaluate(expression, modulo)
outlines, stack = generate_outlines(expression)
outlines_string = format_outlines(outlines, use_calculations, use_calcfunctions)
write_workchain(outlines_string, filename=filename)

click.echo('Expression: {}'.format(expression))
click.echo(f'Expression: {expression}')

if not dry_run:
try:
workchain_module = 'polish_workchains.{}'.format(filename.replace('.py', ''))
workchain_module = f"polish_workchains.{filename.replace('.py', '')}"
workchains = importlib.import_module(workchain_module)
except ImportError:
click.echo('could not import the {} module'.format(workchain_module))
click.echo(f'could not import the {workchain_module} module')
sys.exit(1)

inputs = {'modulo': Int(modulo), 'operands': Str(' '.join(stack))}
Expand All @@ -153,26 +153,25 @@ def launch(expression, code, use_calculations, use_calcfunctions, sleep, timeout
if timed_out:
click.secho('Failed: ', fg='red', bold=True, nl=False)
click.secho(
'the workchain<{}> did not finish in time and the operation timed out'.format(workchain.pk),
bold=True
f'the workchain<{workchain.pk}> did not finish in time and the operation timed out', bold=True
)
sys.exit(1)

try:
result = workchain.outputs.result
except AttributeError:
click.secho('Failed: ', fg='red', bold=True, nl=False)
click.secho('the workchain<{}> did not return a result output node'.format(workchain.pk), bold=True)
click.secho(f'the workchain<{workchain.pk}> did not return a result output node', bold=True)
sys.exit(1)

else:
results, workchain = run_get_node(workchains.Polish00WorkChain, **inputs)
result = results['result']

click.echo('Evaluated : {}'.format(evaluated))
click.echo(f'Evaluated : {evaluated}')

if not dry_run:
click.echo('Workchain : {} <{}>'.format(result, workchain.pk))
click.echo(f'Workchain : {result} <{workchain.pk}>')

if result != evaluated:
click.secho('Failed: ', fg='red', bold=True, nl=False)
Expand Down
10 changes: 5 additions & 5 deletions .ci/polish/lib/expression.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ def validate(expression):
try:
symbols = expression.split()
except ValueError as exception:
return False, 'failed to split the expression into symbols: {}'.format(exception)
return False, f'failed to split the expression into symbols: {exception}'

while len(symbols) > 1:
try:
Expand All @@ -85,19 +85,19 @@ def validate(expression):
try:
operand = int(operand)
except ValueError:
return False, 'the operand {} is not a valid integer'.format(operand)
return False, f'the operand {operand} is not a valid integer'

if operator not in OPERATORS.keys():
return False, 'the operator {} is not supported'.format(operator)
return False, f'the operator {operator} is not supported'

if OPERATORS[operator] is operators.pow and operand < 0:
return False, 'a negative operand {} was found for the ^ operator, which is not allowed'.format(operand)
return False, f'a negative operand {operand} was found for the ^ operator, which is not allowed'

# At this point the symbols list should only contain the initial operand
try:
operand = int(symbols.pop())
except ValueError:
return False, 'the operand {} is not a valid integer'.format(operand)
return False, f'the operand {operand} is not a valid integer'

if symbols:
return False, 'incorrect number of symbols found, should contain N operands followed by (N - 1) operators'
Expand Down
6 changes: 3 additions & 3 deletions .ci/polish/lib/workchain.py
Original file line number Diff line number Diff line change
Expand Up @@ -231,15 +231,15 @@ def write_workchain(outlines, directory=None, filename=None):

outline_string = ''
for subline in outline.split('\n'):
outline_string += '\t\t\t{}\n'.format(subline)
outline_string += f'\t\t\t{subline}\n'

if counter == len(outlines) - 1:
child_class = None
else:
child_class = 'Polish{:02d}WorkChain'.format(counter + 1)
child_class = f'Polish{counter + 1:02d}WorkChain'

subs = {
'class_name': 'Polish{:02d}WorkChain'.format(counter),
'class_name': f'Polish{counter:02d}WorkChain',
'child_class': child_class,
'outline': outline_string,
}
Expand Down
46 changes: 21 additions & 25 deletions .ci/test_daemon.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,14 +37,14 @@ def print_daemon_log():
daemon_client = get_daemon_client()
daemon_log = daemon_client.daemon_log_file

print("Output of 'cat {}':".format(daemon_log))
print(f"Output of 'cat {daemon_log}':")
try:
print(subprocess.check_output(
['cat', '{}'.format(daemon_log)],
['cat', f'{daemon_log}'],
stderr=subprocess.STDOUT,
))
except subprocess.CalledProcessError as exception:
print('Note: the command failed, message: {}'.format(exception))
print(f'Note: the command failed, message: {exception}')


def jobs_have_finished(pks):
Expand All @@ -55,21 +55,21 @@ def jobs_have_finished(pks):

for node in node_list:
if not node.is_terminated:
print('not terminated: {} [{}]'.format(node.pk, node.process_state))
print('{}/{} finished'.format(num_finished, len(finished_list)))
print(f'not terminated: {node.pk} [{node.process_state}]')
print(f'{num_finished}/{len(finished_list)} finished')
return False not in finished_list


def print_report(pk):
"""Print the process report for given pk."""
print("Output of 'verdi process report {}':".format(pk))
print(f"Output of 'verdi process report {pk}':")
try:
print(subprocess.check_output(
['verdi', 'process', 'report', '{}'.format(pk)],
['verdi', 'process', 'report', f'{pk}'],
stderr=subprocess.STDOUT,
))
except subprocess.CalledProcessError as exception:
print('Note: the command failed, message: {}'.format(exception))
print(f'Note: the command failed, message: {exception}')


def validate_calculations(expected_results):
Expand All @@ -79,18 +79,14 @@ def validate_calculations(expected_results):
for pk, expected_dict in expected_results.items():
calc = load_node(pk)
if not calc.is_finished_ok:
print(
'Calculation<{}> not finished ok: process_state<{}> exit_status<{}>'.format(
pk, calc.process_state, calc.exit_status
)
)
print(f'Calc<{pk}> not finished ok: process_state<{calc.process_state}> exit_status<{calc.exit_status}>')
print_report(pk)
valid = False

try:
actual_dict = calc.outputs.output_parameters.get_dict()
except exceptions.NotExistent:
print('Could not retrieve `output_parameters` node for Calculation<{}>'.format(pk))
print(f'Could not retrieve `output_parameters` node for Calculation<{pk}>')
print_report(pk)
valid = False

Expand All @@ -101,7 +97,7 @@ def validate_calculations(expected_results):
pass

if actual_dict != expected_dict:
print('* UNEXPECTED VALUE {} for calc pk={}: I expected {}'.format(actual_dict, pk, expected_dict))
print(f'* UNEXPECTED VALUE {actual_dict} for calc pk={pk}: I expected {expected_dict}')
valid = False

return valid
Expand Down Expand Up @@ -166,7 +162,7 @@ def validate_cached(cached_calcs):
valid = False

if '_aiida_cached_from' not in calc.extras or calc.get_hash() != calc.get_extra('_aiida_hash'):
print('Cached calculation<{}> has invalid hash'.format(calc.pk))
print(f'Cached calculation<{calc.pk}> has invalid hash')
print_report(calc.pk)
valid = False

Expand All @@ -176,7 +172,7 @@ def validate_cached(cached_calcs):
files_cached = calc.list_object_names()

if not files_cached:
print('Cached calculation <{}> does not have any raw inputs files'.format(calc.pk))
print(f'Cached calculation <{calc.pk}> does not have any raw inputs files')
print_report(calc.pk)
valid = False
if not files_original:
Expand Down Expand Up @@ -204,7 +200,7 @@ def launch_calculation(code, counter, inputval):
"""
process, inputs, expected_result = create_calculation_process(code=code, inputval=inputval)
calc = submit(process, **inputs)
print('[{}] launched calculation {}, pk={}'.format(counter, calc.uuid, calc.pk))
print(f'[{counter}] launched calculation {calc.uuid}, pk={calc.pk}')
return calc, expected_result


Expand All @@ -214,7 +210,7 @@ def run_calculation(code, counter, inputval):
"""
process, inputs, expected_result = create_calculation_process(code=code, inputval=inputval)
_, calc = run.get_node(process, **inputs)
print('[{}] ran calculation {}, pk={}'.format(counter, calc.uuid, calc.pk))
print(f'[{counter}] ran calculation {calc.uuid}, pk={calc.pk}')
return calc, expected_result


Expand Down Expand Up @@ -361,14 +357,14 @@ def main():
run_multiply_add_workchain()

# Submitting the Calculations the new way directly through the launchers
print('Submitting {} calculations to the daemon'.format(NUMBER_CALCULATIONS))
print(f'Submitting {NUMBER_CALCULATIONS} calculations to the daemon')
for counter in range(1, NUMBER_CALCULATIONS + 1):
inputval = counter
calc, expected_result = launch_calculation(code=code_doubler, counter=counter, inputval=inputval)
expected_results_calculations[calc.pk] = expected_result

# Submitting the Workchains
print('Submitting {} workchains to the daemon'.format(NUMBER_WORKCHAINS))
print(f'Submitting {NUMBER_WORKCHAINS} workchains to the daemon')
for index in range(NUMBER_WORKCHAINS):
inp = Int(index)
_, node = run.get_node(NestedWorkChain, inp=inp)
Expand Down Expand Up @@ -435,7 +431,7 @@ def main():
# that the test machine is shut down because there is no output

print('#' * 78)
print('####### TIME ELAPSED: {} s'.format(time.time() - start_time))
print(f'####### TIME ELAPSED: {time.time() - start_time} s')
print('#' * 78)
print("Output of 'verdi process list -a':")
try:
Expand All @@ -444,7 +440,7 @@ def main():
stderr=subprocess.STDOUT,
))
except subprocess.CalledProcessError as exception:
print('Note: the command failed, message: {}'.format(exception))
print(f'Note: the command failed, message: {exception}')

print("Output of 'verdi daemon status':")
try:
Expand All @@ -453,7 +449,7 @@ def main():
stderr=subprocess.STDOUT,
))
except subprocess.CalledProcessError as exception:
print('Note: the command failed, message: {}'.format(exception))
print(f'Note: the command failed, message: {exception}')

if jobs_have_finished(pks):
print('Calculation terminated its execution')
Expand All @@ -463,7 +459,7 @@ def main():
if exited_with_timeout:
print_daemon_log()
print('')
print('Timeout!! Calculation did not complete after {} seconds'.format(TIMEOUTSECS))
print(f'Timeout!! Calculation did not complete after {TIMEOUTSECS} seconds')
sys.exit(2)
else:
# Launch the same calculations but with caching enabled -- these should be FINISHED immediately
Expand Down
9 changes: 9 additions & 0 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,15 @@ repos:
- id: mixed-line-ending
- id: trailing-whitespace

- repo: https://github.com/ikamensh/flynt/
rev: '0.55'
hooks:
- id: flynt
args: [
'--line-length=120',
'--fail-on-change',
]

- repo: https://github.com/pre-commit/mirrors-yapf
rev: v0.30.0
hooks:
Expand Down
8 changes: 4 additions & 4 deletions aiida/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -122,10 +122,10 @@ def _get_raw_file_header():
:return: default AiiDA source file header
:rtype: str
"""
return """This file has been created with AiiDA v. {}
return f"""This file has been created with AiiDA v. {__version__}
If you use AiiDA for publication purposes, please cite:
{}
""".format(__version__, __paper__)
{__paper__}
"""


def get_file_header(comment_char='# '):
Expand All @@ -143,4 +143,4 @@ def get_file_header(comment_char='# '):
:rtype: str
"""
lines = _get_raw_file_header().splitlines()
return '\n'.join('{}{}'.format(comment_char, line) for line in lines)
return '\n'.join(f'{comment_char}{line}' for line in lines)
2 changes: 1 addition & 1 deletion aiida/backends/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,4 +27,4 @@ def get_backend_manager(backend):
from aiida.backends.sqlalchemy.manager import SqlaBackendManager
return SqlaBackendManager()

raise Exception('unknown backend type `{}`'.format(backend))
raise Exception(f'unknown backend type `{backend}`')
Original file line number Diff line number Diff line change
Expand Up @@ -31,8 +31,8 @@ class Migration(migrations.Migration):

operations = [
migrations.RunSQL(
""" DELETE FROM db_dbextra WHERE key='""" + _HASH_EXTRA_KEY + """';""",
reverse_sql=""" DELETE FROM db_dbextra WHERE key='""" + _HASH_EXTRA_KEY + """';"""
f" DELETE FROM db_dbextra WHERE key='{_HASH_EXTRA_KEY}';",
reverse_sql=f" DELETE FROM db_dbextra WHERE key='{_HASH_EXTRA_KEY}';"
),
upgrade_schema_version(REVISION, DOWN_REVISION)
]
4 changes: 2 additions & 2 deletions aiida/backends/djsite/db/migrations/0024_dblog_update.py
Original file line number Diff line number Diff line change
Expand Up @@ -181,7 +181,7 @@ def export_and_clean_workflow_logs(apps, schema_editor):

# If delete_on_close is False, we are running for the user and add additional message of file location
if not delete_on_close:
click.echo('Exported legacy workflow logs to {}'.format(filename))
click.echo(f'Exported legacy workflow logs to {filename}')

# Now delete the records
DbLog.objects.filter(objname__startswith=leg_workflow_prefix).delete()
Expand All @@ -205,7 +205,7 @@ def export_and_clean_workflow_logs(apps, schema_editor):

# If delete_on_close is False, we are running for the user and add additional message of file location
if not delete_on_close:
click.echo('Exported unexpected entity logs to {}'.format(filename))
click.echo(f'Exported unexpected entity logs to {filename}')

# Now delete the records
DbLog.objects.exclude(objname__startswith=node_prefix).exclude(objname__startswith=leg_workflow_prefix).delete()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ def export_workflow_data(apps, _):

# If delete_on_close is False, we are running for the user and add additional message of file location
if not delete_on_close:
echo.echo_info('Exported workflow data to {}'.format(filename))
echo.echo_info(f'Exported workflow data to {filename}')


class Migration(migrations.Migration):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -127,7 +127,7 @@ def attributes_to_dict(attr_list):
try:
tmp_d = select_from_key(a.key, d)
except ValueError:
echo.echo_error("Couldn't transfer attribute {} with key {} for dbnode {}".format(a.id, a.key, a.dbnode_id))
echo.echo_error(f"Couldn't transfer attribute {a.id} with key {a.key} for dbnode {a.dbnode_id}")
error = True
continue
key = a.key.split('.')[-1]
Expand Down
4 changes: 2 additions & 2 deletions aiida/backends/djsite/db/migrations/0039_reset_hash.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,8 +41,8 @@ class Migration(migrations.Migration):
operations = [
migrations.RunPython(notify_user, reverse_code=notify_user),
migrations.RunSQL(
"""UPDATE db_dbnode SET extras = extras #- '{""" + _HASH_EXTRA_KEY + """}'::text[];""",
reverse_sql="""UPDATE db_dbnode SET extras = extras #- '{""" + _HASH_EXTRA_KEY + """}'::text[];"""
f"UPDATE db_dbnode SET extras = extras #- '{{{_HASH_EXTRA_KEY}}}'::text[];",
reverse_sql=f"UPDATE db_dbnode SET extras = extras #- '{{{_HASH_EXTRA_KEY}}}'::text[];"
),
upgrade_schema_version(REVISION, DOWN_REVISION)
]
Loading

0 comments on commit 02248cf

Please sign in to comment.