From bc8bd0a5e88e3cd4bed5d10484a4696165b203cb Mon Sep 17 00:00:00 2001 From: Spyros Zoupanos Date: Fri, 3 May 2019 10:13:54 +0200 Subject: [PATCH 01/11] Migrate node attributes and extras to JSONB for Django backend With Django adding support for the Posgres JSONB field, the old custom implementation of the entity-attribute-value (EAV) model, used for the attributes and extras for the `DbNode` table, has been made obsolete. Here we change the `DbNode` model for the Django backend to use the `JSONB` type directly and migrations are added to change the schema and migrate the existing data. The `DbSetting` table also used the custom EAV model and has likewise been migrated to use a JSONB field instead. The hard coded dummy model that the query builder uses to map the Django database models onto SqlAlchemy variants, has been removed and replaced by Aldjemy models that are generated on the fly. --- .../0037_attributes_extras_settings_json.py | 283 +++++ .../backends/djsite/db/migrations/__init__.py | 31 +- aiida/backends/djsite/db/models.py | 1101 ++--------------- .../djsite/db/subtests/migrations/__init__.py | 9 + ...ns_0034_attributes_extras_settings_json.py | 630 ++++++++++ .../migrations/test_migrations_common.py | 111 ++ .../test_migrations_many.py} | 237 ++-- .../djsite/db/subtests/test_generic.py | 34 - .../backends/djsite/db/subtests/test_nodes.py | 7 +- .../backends/djsite/db/subtests/test_query.py | 29 +- aiida/backends/djsite/globalsettings.py | 35 +- aiida/backends/djsite/queries.py | 45 +- aiida/backends/djsite/settings.py | 16 +- aiida/backends/djsite/utils.py | 10 +- aiida/backends/sqlalchemy/globalsettings.py | 14 +- aiida/backends/sqlalchemy/models/computer.py | 4 + aiida/backends/sqlalchemy/models/node.py | 4 +- aiida/backends/sqlalchemy/models/utils.py | 74 -- aiida/backends/sqlalchemy/tests/test_nodes.py | 2 - aiida/backends/sqlalchemy/utils.py | 43 +- aiida/backends/testbase.py | 16 +- aiida/backends/testimplbase.py | 27 +- aiida/backends/tests/__init__.py | 5 +- .../backends/tests/test_export_and_import.py | 22 +- aiida/backends/tests/test_nodes.py | 20 +- aiida/backends/tests/test_query.py | 96 +- aiida/backends/utils.py | 99 ++ aiida/orm/implementation/django/convert.py | 51 +- .../orm/implementation/django/dummy_model.py | 235 ---- aiida/orm/implementation/django/groups.py | 1 + aiida/orm/implementation/django/nodes.py | 49 +- .../orm/implementation/django/querybuilder.py | 416 ++++--- aiida/orm/implementation/querybuilder.py | 47 +- aiida/orm/implementation/sqlalchemy/nodes.py | 15 +- .../implementation/sqlalchemy/querybuilder.py | 63 +- aiida/orm/implementation/sqlalchemy/utils.py | 15 +- aiida/orm/implementation/utils.py | 28 + aiida/orm/importexport.py | 174 +-- aiida/orm/querybuilder.py | 79 +- docs/source/nitpick-exceptions | 11 + 40 files changed, 2181 insertions(+), 2007 deletions(-) create mode 100644 aiida/backends/djsite/db/migrations/0037_attributes_extras_settings_json.py create mode 100644 aiida/backends/djsite/db/subtests/migrations/__init__.py create mode 100644 aiida/backends/djsite/db/subtests/migrations/test_migrations_0034_attributes_extras_settings_json.py create mode 100644 aiida/backends/djsite/db/subtests/migrations/test_migrations_common.py rename aiida/backends/djsite/db/subtests/{test_migrations.py => migrations/test_migrations_many.py} (82%) delete mode 100644 aiida/backends/sqlalchemy/models/utils.py delete mode 100644 aiida/orm/implementation/django/dummy_model.py create mode 100644 aiida/orm/implementation/utils.py diff --git a/aiida/backends/djsite/db/migrations/0037_attributes_extras_settings_json.py b/aiida/backends/djsite/db/migrations/0037_attributes_extras_settings_json.py new file mode 100644 index 0000000000..347784305a --- /dev/null +++ b/aiida/backends/djsite/db/migrations/0037_attributes_extras_settings_json.py @@ -0,0 +1,283 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida_core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +# pylint: disable=invalid-name,import-error,no-name-in-module,too-few-public-methods,no-member +"""Adding JSONB field for Node.attributes and Node.Extras""" +from __future__ import absolute_import +from __future__ import print_function + +import math + +import click +import django.contrib.postgres.fields.jsonb +from django.db import migrations, models +from django.db import transaction + +from aiida.backends.djsite.db.migrations import upgrade_schema_version +from aiida.backends.utils import datetime_to_isoformat +from aiida.cmdline.utils import echo + +REVISION = '1.0.37' +DOWN_REVISION = '1.0.36' + +# Nodes are processes in groups of the following size +group_size = 1000 + + +def lazy_bulk_fetch(max_obj, max_count, fetch_func, start=0): + counter = start + while counter < max_count: + yield fetch_func()[counter:counter + max_obj] + counter += max_obj + + +def transition_attributes_extras(apps, _): + """ Migrate the DbAttribute & the DbExtras tables into the attributes and extras columns of DbNode. """ + db_node_model = apps.get_model('db', 'DbNode') + + with transaction.atomic(): + total_node_no = db_node_model.objects.count() + + if total_node_no == 0: + return + + with click.progressbar(label='Updating attributes and extras', length=total_node_no, show_pos=True) as pr_bar: + fetcher = lazy_bulk_fetch(group_size, total_node_no, db_node_model.objects.all) + error = False + + for batch in fetcher: + for curr_dbnode in batch: + + # Migrating attributes + dbattrs = list(curr_dbnode.dbattributes.all()) + attrs, err_ = attributes_to_dict(sorted(dbattrs, key=lambda a: a.key)) + error |= err_ + curr_dbnode.attributes = attrs + + # Migrating extras + dbextr = list(curr_dbnode.dbextras.all()) + extr, err_ = attributes_to_dict(sorted(dbextr, key=lambda a: a.key)) + error |= err_ + curr_dbnode.extras = extr + + # Saving the result + curr_dbnode.save() + pr_bar.update(1) + + if error: + raise Exception("There has been some errors during the migration") + + +def transition_settings(apps, _): + """ Migrate the DbSetting EAV val into the JSONB val column of the same table. """ + db_setting_model = apps.get_model('db', 'DbSetting') + + with transaction.atomic(): + total_settings_no = db_setting_model.objects.count() + + if total_settings_no == 0: + return + + with click.progressbar(label='Updating settings', length=total_settings_no, show_pos=True) as pr_bar: + fetcher = lazy_bulk_fetch(group_size, total_settings_no, db_setting_model.objects.all) + error = False + + for batch in fetcher: + for curr_dbsetting in batch: + + # Migrating dbsetting.val + dt = curr_dbsetting.datatype + val = None + if dt == "txt": + val = curr_dbsetting.tval + elif dt == "float": + val = curr_dbsetting.fval + if math.isnan(val): + val = 'NaN' + elif dt == "int": + val = curr_dbsetting.ival + elif dt == "bool": + val = curr_dbsetting.bval + elif dt == "date": + val = datetime_to_isoformat(curr_dbsetting.dval) + + curr_dbsetting.val = val + + # Saving the result + curr_dbsetting.save() + pr_bar.update(1) + + if error: + raise Exception("There has been some errors during the migration") + + +def attributes_to_dict(attr_list): + """ + Transform the attributes of a node into a dictionary. It assumes the key + are ordered alphabetically, and that they all belong to the same node. + """ + d = {} + + error = False + for a in attr_list: + try: + tmp_d = select_from_key(a.key, d) + except ValueError: + echo.echo_critical("Couldn't transfer attribute {} with key {} for dbnode {}".format( + a.id, a.key, a.dbnode_id)) + error = True + continue + key = a.key.split('.')[-1] + + if key.isdigit(): + key = int(key) + + dt = a.datatype + + if dt == "dict": + tmp_d[key] = {} + elif dt == "list": + tmp_d[key] = [None] * a.ival + else: + val = None + if dt == "txt": + val = a.tval + elif dt == "float": + val = a.fval + if math.isnan(val): + val = 'NaN' + elif dt == "int": + val = a.ival + elif dt == "bool": + val = a.bval + elif dt == "date": + val = datetime_to_isoformat(a.dval) + + tmp_d[key] = val + + return d, error + + +def select_from_key(key, d): + """ + Return element of the dict to do the insertion on. If it is foo.1.bar, it + will return d["foo"][1]. If it is only foo, it will return d directly. + """ + path = key.split('.')[:-1] + + tmp_d = d + for p in path: + if p.isdigit(): + tmp_d = tmp_d[int(p)] + else: + tmp_d = tmp_d[p] + + return tmp_d + + +class Migration(migrations.Migration): + """ + This migration changes Django backend to support the JSONB fields. + It is a schema migration that removes the DbAttribute and DbExtra + tables and their reference to the DbNode tables and adds the + corresponding JSONB columns to the DbNode table. + It is also a data migration that transforms and adds the data of + the DbAttribute and DbExtra tables to the JSONB columns to the + DbNode table. + """ + + dependencies = [ + ('db', '0036_drop_computer_transport_params'), + ] + + operations = [ + # ############################################ + # Migration of the Attribute and Extras tables + # ############################################ + + # Create the DbNode.attributes JSONB and DbNode.extras JSONB fields + migrations.AddField( + model_name='dbnode', + name='attributes', + field=django.contrib.postgres.fields.jsonb.JSONField(default=None, null=True), + ), + migrations.AddField( + model_name='dbnode', + name='extras', + field=django.contrib.postgres.fields.jsonb.JSONField(default=None, null=True), + ), + # Migrate the data from the DbAttribute table to the JSONB field + migrations.RunPython(transition_attributes_extras, reverse_code=migrations.RunPython.noop), + migrations.AlterUniqueTogether( + name='dbattribute', + unique_together=set([]), + ), + # Delete the DbAttribute table + migrations.DeleteModel(name='DbAttribute',), + migrations.AlterUniqueTogether( + name='dbextra', + unique_together=set([]), + ), + # Delete the DbExtra table + migrations.DeleteModel(name='DbExtra',), + + # ############################### + # Migration of the Settings table + + # ############################### + # Create the DbSetting.val JSONB field + migrations.AddField( + model_name='dbsetting', + name='val', + field=django.contrib.postgres.fields.jsonb.JSONField(default=None, null=True), + ), + # Migrate the data from the DbSetting EAV to the JSONB val field + migrations.RunPython(transition_settings, reverse_code=migrations.RunPython.noop), + + # Delete the tval, fval, ival, bval, dval + migrations.RemoveField( + model_name='dbsetting', + name='tval', + ), + migrations.RemoveField( + model_name='dbsetting', + name='fval', + ), + migrations.RemoveField( + model_name='dbsetting', + name='ival', + ), + migrations.RemoveField( + model_name='dbsetting', + name='bval', + ), + migrations.RemoveField( + model_name='dbsetting', + name='dval', + ), + migrations.RemoveField( + model_name='dbsetting', + name='datatype', + ), + migrations.AlterField( + model_name='dbsetting', + name='key', + field=models.TextField(), + ), + migrations.AlterUniqueTogether( + name='dbsetting', + unique_together=set([]), + ), + migrations.AlterField( + model_name='dbsetting', + name='key', + field=models.CharField(max_length=1024, db_index=True, unique=True), + ), + upgrade_schema_version(REVISION, DOWN_REVISION), + ] diff --git a/aiida/backends/djsite/db/migrations/__init__.py b/aiida/backends/djsite/db/migrations/__init__.py index 29be557b63..c8451f0a25 100644 --- a/aiida/backends/djsite/db/migrations/__init__.py +++ b/aiida/backends/djsite/db/migrations/__init__.py @@ -16,18 +16,39 @@ from django.core.exceptions import ObjectDoesNotExist from aiida.common.exceptions import AiidaException, DbContentError from six.moves import range +from aiida.backends.djsite.utils import SCHEMA_VERSION_DB_KEY, SCHEMA_VERSION_DB_DESCRIPTION class DeserializationException(AiidaException): pass -LATEST_MIGRATION = '0036_drop_computer_transport_params' +LATEST_MIGRATION = '0037_attributes_extras_settings_json' def _update_schema_version(version, apps, schema_editor): - from aiida.backends.djsite.utils import set_db_schema_version - set_db_schema_version(version) + """ + The update schema uses the current models (and checks if the value is stored in EAV mode or JSONB) + to avoid to use the DbSettings schema that may change (as it changed with the migration of the + settings table to JSONB) + """ + db_setting_model = apps.get_model('db', 'DbSetting') + res = db_setting_model.objects.filter(key=SCHEMA_VERSION_DB_KEY).first() + # If there is no schema record, create ones + if res is None: + res = db_setting_model() + res.key = SCHEMA_VERSION_DB_KEY + res.description = SCHEMA_VERSION_DB_DESCRIPTION + + # If it stores the values in an EAV format, add the value in the tval field + if hasattr(res, 'tval'): + res.tval = str(version) + # Otherwise add it to the val (JSON) fiels + else: + res.val = str(version) + + # Store the final result + res.save() def upgrade_schema_version(up_revision, down_revision): @@ -54,7 +75,7 @@ def current_schema_version(): # This was done because: # 1) The DbAttribute object loaded with apps.get_model() does not provide the class methods # 2) When the django model changes the migration will continue to work -# 3) If we defined in the migration a new class with these methodds as an extension of the DbAttribute class, +# 3) If we defined in the migration a new class with these methods as an extension of the DbAttribute class, # django detects a change in the model and creates a new migration @@ -94,7 +115,7 @@ def _deserialize_attribute(mainitem, subitems, sep, original_class=None, from the number declared in the ival field). :return: the deserialized value - :raise aiida.backends.djsite.db.models.DeserializationException: if an error occurs + :raise aiida.backends.djsite.db.migrations.DeserializationException: if an error occurs """ from aiida.common import json from aiida.common.timezone import ( diff --git a/aiida/backends/djsite/db/models.py b/aiida/backends/djsite/db/models.py index e6cb080517..b9465aa409 100644 --- a/aiida/backends/djsite/db/models.py +++ b/aiida/backends/djsite/db/models.py @@ -7,24 +7,26 @@ # For further information on the license, see the LICENSE.txt file # # For further information please visit http://www.aiida.net # ########################################################################### +from __future__ import absolute_import from __future__ import division from __future__ import print_function -from __future__ import absolute_import import contextlib import six from six.moves import range -from django.db import models as m + from django.contrib.postgres.fields import JSONField -from django.utils.encoding import python_2_unicode_compatible -from django.core.exceptions import ObjectDoesNotExist +from django.db import models as m +from django.db import models as m from django.db.models.query import QuerySet +from django.utils.encoding import python_2_unicode_compatible +from pytz import UTC +import aiida.backends.djsite.db.migrations as migrations +from aiida.backends.djsite.settings import AUTH_USER_MODEL +from aiida.backends.utils import datetime_to_isoformat, isoformat_to_datetime from aiida.common import timezone from aiida.common.utils import get_new_uuid -from aiida.common.exceptions import DbContentError -import aiida.backends.djsite.db.migrations as migrations -from aiida.backends.utils import AIIDA_ATTRIBUTE_SEP # This variable identifies the schema version of this file. # Every time you change the schema below in *ANY* way, REMEMBER TO CHANGE @@ -130,10 +132,84 @@ class DbNode(m.Model): # to it. dbcomputer = m.ForeignKey('DbComputer', null=True, on_delete=m.PROTECT, related_name='dbnodes') + # JSON Attributes + attributes = JSONField(default=None, null=True) + # JSON Extras + extras = JSONField(default=None, null=True) + objects = m.Manager() # Return aiida Node instances or their subclasses instead of DbNode instances aiidaobjects = AiidaObjectManager() + def __init__(self, *args, **kwargs): + super(DbNode, self).__init__(*args, **kwargs) + + if self.attributes is None: + self.attributes = dict() + else: + self.attributes = datetime_to_isoformat(self.attributes) + + if self.extras is None: + self.extras = dict() + else: + self.extras = datetime_to_isoformat(self.extras) + + def set_attribute(self, key, value): + DbNode._set_attr(self.attributes, key, value) + self.save() + + def reset_attributes(self, attributes): + self.attributes = dict() + self.set_attributes(attributes) + + def set_attributes(self, attributes): + for key, value in attributes.items(): + DbNode._set_attr(self.attributes, key, value) + self.save() + + def set_extra(self, key, value): + DbNode._set_attr(self.extras, key, value) + self.save() + + def set_extras(self, extras): + for key, value in extras.items(): + DbNode._set_attr(self.extras, key, value) + self.save() + + def reset_extras(self, new_extras): + self.extras.clear() + self.extras.update(new_extras) + self.save() + + def del_attribute(self, key): + DbNode._del_attr(self.attributes, key) + self.save() + + def del_extra(self, key): + DbNode._del_attr(self.extras, key) + self.save() + + def get_attributes(self): + return isoformat_to_datetime(self.attributes) + + def get_extras(self): + return isoformat_to_datetime(self.extras) + + @ staticmethod + def _set_attr(d, key, value): + if '.' in key: + raise ValueError("We don't know how to treat key with dot in it yet") + # This is important in order to properly handle datetime objects + d[key] = datetime_to_isoformat(value) + + @ staticmethod + def _del_attr(d, key): + if '.' in key: + raise ValueError("We don't know how to treat key with dot in it yet") + if key not in d: + raise AttributeError("Key {} does not exists".format(key)) + del d[key] + def get_simple_name(self, invalid_result=None): """ Return a string with the last part of the type name. @@ -145,7 +221,7 @@ def get_simple_name(self, invalid_result=None): :param invalid_result: The value to be returned if the node type is not recognized. """ - thistype = self.type + thistype = self.node_type # Fix for base class if thistype == "": thistype = "node.Node." @@ -155,20 +231,6 @@ def get_simple_name(self, invalid_result=None): thistype = thistype[:-1] # Strip final dot return thistype.rpartition('.')[2] - @property - def attributes(self): - """ - Return all attributes of the given node as a single dictionary. - """ - return DbAttribute.get_all_values_for_node(self) - - @property - def extras(self): - """ - Return all extras of the given node as a single dictionary. - """ - return DbExtra.get_all_values_for_node(self) - def __str__(self): simplename = self.get_simple_name(invalid_result="Unknown") # node pk + type @@ -200,987 +262,62 @@ def __str__(self): self.output.pk, ) -attrdatatype_choice = ( - ('float', 'float'), - ('int', 'int'), - ('txt', 'txt'), - ('bool', 'bool'), - ('date', 'date'), - ('json', 'json'), - ('dict', 'dict'), - ('list', 'list'), - ('none', 'none')) - -from aiida.common.exceptions import AiidaException - - -class DeserializationException(AiidaException): - pass - - -def _deserialize_attribute(mainitem, subitems, sep, original_class=None, - original_pk=None, lesserrors=False): - """ - Deserialize a single attribute. - - :param mainitem: the main item (either the attribute itself for base - types (None, string, ...) or the main item for lists and dicts. - Must contain the 'key' key and also the following keys: - datatype, tval, fval, ival, bval, dval. - NOTE that a type check is not performed! tval is expected to be a string, - dval a date, etc. - :param subitems: must be a dictionary of dictionaries. In the top-level dictionary, - the key must be the key of the attribute, stripped of all prefixes - (i.e., if the mainitem has key 'a.b' and we pass subitems - 'a.b.0', 'a.b.1', 'a.b.1.c', their keys must be '0', '1', '1.c'). - It must be None if the value is not iterable (int, str, - float, ...). - It is an empty dictionary if there are no subitems. - :param sep: a string, the separator between subfields (to separate the - name of a dictionary from the keys it contains, for instance) - :param original_class: if these elements come from a specific subclass - of DbMultipleValueAttributeBaseClass, pass here the class (note: the class, - not the instance!). This is used only in case the wrong number of elements - is found in the raw data, to print a more meaningful message (if the class - has a dbnode associated to it) - :param original_pk: if the elements come from a specific subclass - of DbMultipleValueAttributeBaseClass that has a dbnode associated to it, - pass here the PK integer. This is used only in case the wrong number - of elements is found in the raw data, to print a more meaningful message - :param lesserrors: If set to True, in some cases where the content of the - DB is not consistent but data is still recoverable, - it will just log the message rather than raising - an exception (e.g. if the number of elements of a dictionary is different - from the number declared in the ival field). - - :return: the deserialized value - :raise aiida.backends.djsite.db.models.DeserializationException: if an error occurs - """ - from aiida.common import json - from aiida.common.timezone import ( - is_naive, make_aware, get_current_timezone) - - from aiida.common import AIIDA_LOGGER - - if mainitem['datatype'] == 'none': - if subitems: - raise DeserializationException("'{}' is of a base type, " - "but has subitems!".format(mainitem.key)) - return None - elif mainitem['datatype'] == 'bool': - if subitems: - raise DeserializationException("'{}' is of a base type, " - "but has subitems!".format(mainitem.key)) - return mainitem['bval'] - elif mainitem['datatype'] == 'int': - if subitems: - raise DeserializationException("'{}' is of a base type, " - "but has subitems!".format(mainitem.key)) - return mainitem['ival'] - elif mainitem['datatype'] == 'float': - if subitems: - raise DeserializationException("'{}' is of a base type, " - "but has subitems!".format(mainitem.key)) - return mainitem['fval'] - elif mainitem['datatype'] == 'txt': - if subitems: - raise DeserializationException("'{}' is of a base type, " - "but has subitems!".format(mainitem.key)) - return mainitem['tval'] - elif mainitem['datatype'] == 'date': - if subitems: - raise DeserializationException("'{}' is of a base type, " - "but has subitems!".format(mainitem.key)) - if is_naive(mainitem['dval']): - return make_aware(mainitem['dval'], get_current_timezone()) - else: - return mainitem['dval'] - - elif mainitem['datatype'] == 'list': - # subitems contains all subitems, here I store only those of - # deepness 1, i.e. if I have subitems '0', '1' and '1.c' I - # store only '0' and '1' - firstlevelsubdict = {k: v for k, v in subitems.items() - if sep not in k} - - # For checking, I verify the expected values - expected_set = set(["{:d}".format(i) - for i in range(mainitem['ival'])]) - received_set = set(firstlevelsubdict.keys()) - # If there are more entries than expected, but all expected - # ones are there, I just issue an error but I do not stop. - - if not expected_set.issubset(received_set): - if (original_class is not None and original_class._subspecifier_field_name is not None): - subspecifier_string = "{}={} and ".format( - original_class._subspecifier_field_name, - original_pk) - else: - subspecifier_string = "" - if original_class is None: - sourcestr = "the data passed" - else: - sourcestr = original_class.__name__ - - raise DeserializationException("Wrong list elements stored in {} for " - "{}key='{}' ({} vs {})".format( - sourcestr, - subspecifier_string, - mainitem['key'], expected_set, received_set)) - if expected_set != received_set: - if (original_class is not None and - original_class._subspecifier_field_name is not None): - subspecifier_string = "{}={} and ".format( - original_class._subspecifier_field_name, - original_pk) - else: - subspecifier_string = "" - if original_class is None: - sourcestr = "the data passed" - else: - sourcestr = original_class.__name__ - - msg = ("Wrong list elements stored in {} for " - "{}key='{}' ({} vs {})".format( - sourcestr, - subspecifier_string, - mainitem['key'], expected_set, received_set)) - if lesserrors: - AIIDA_LOGGER.error(msg) - else: - raise DeserializationException(msg) - - # I get the values in memory as a dictionary - tempdict = {} - for firstsubk, firstsubv in firstlevelsubdict.items(): - # I call recursively the same function to get subitems - newsubitems = {k[len(firstsubk) + len(sep):]: v - for k, v in subitems.items() - if k.startswith(firstsubk + sep)} - tempdict[firstsubk] = _deserialize_attribute(mainitem=firstsubv, - subitems=newsubitems, sep=sep, original_class=original_class, - original_pk=original_pk) - - # And then I put them in a list - retlist = [tempdict["{:d}".format(i)] for i in range(mainitem['ival'])] - return retlist - elif mainitem['datatype'] == 'dict': - # subitems contains all subitems, here I store only those of - # deepness 1, i.e. if I have subitems '0', '1' and '1.c' I - # store only '0' and '1' - firstlevelsubdict = {k: v for k, v in subitems.items() - if sep not in k} - - if len(firstlevelsubdict) != mainitem['ival']: - if (original_class is not None and - original_class._subspecifier_field_name is not None): - subspecifier_string = "{}={} and ".format( - original_class._subspecifier_field_name, - original_pk) - else: - subspecifier_string = "" - if original_class is None: - sourcestr = "the data passed" - else: - sourcestr = original_class.__name__ - - msg = ("Wrong dict length stored in {} for " - "{}key='{}' ({} vs {})".format( - sourcestr, - subspecifier_string, - mainitem['key'], len(firstlevelsubdict), - mainitem['ival'])) - if lesserrors: - AIIDA_LOGGER.error(msg) - else: - raise DeserializationException(msg) - - # I get the values in memory as a dictionary - tempdict = {} - for firstsubk, firstsubv in firstlevelsubdict.items(): - # I call recursively the same function to get subitems - newsubitems = {k[len(firstsubk) + len(sep):]: v - for k, v in subitems.items() - if k.startswith(firstsubk + sep)} - tempdict[firstsubk] = _deserialize_attribute(mainitem=firstsubv, - subitems=newsubitems, sep=sep, original_class=original_class, - original_pk=original_pk) - - return tempdict - elif mainitem['datatype'] == 'json': - try: - return json.loads(mainitem['tval']) - except ValueError: - raise DeserializationException("Error in the content of the json field") - else: - raise DeserializationException("The type field '{}' is not recognized".format( - mainitem['datatype'])) - - -def deserialize_attributes(data, sep, original_class=None, original_pk=None): - """ - Deserialize the attributes from the format internally stored in the DB - to the actual format (dictionaries, lists, integers, ... - - :param data: must be a dictionary of dictionaries. In the top-level dictionary, - the key must be the key of the attribute. The value must be a dictionary - with the following keys: datatype, tval, fval, ival, bval, dval. Other - keys are ignored. - NOTE that a type check is not performed! tval is expected to be a string, - dval a date, etc. - :param sep: a string, the separator between subfields (to separate the - name of a dictionary from the keys it contains, for instance) - :param original_class: if these elements come from a specific subclass - of DbMultipleValueAttributeBaseClass, pass here the class (note: the class, - not the instance!). This is used only in case the wrong number of elements - is found in the raw data, to print a more meaningful message (if the class - has a dbnode associated to it) - :param original_pk: if the elements come from a specific subclass - of DbMultipleValueAttributeBaseClass that has a dbnode associated to it, - pass here the PK integer. This is used only in case the wrong number - of elements is found in the raw data, to print a more meaningful message - - :return: a dictionary, where for each entry the corresponding value is - returned, deserialized back to lists, dictionaries, etc. - Example: if ``data = {'a': {'datatype': "list", "ival": 2, ...}, - 'a.0': {'datatype': "int", "ival": 2, ...}, - 'a.1': {'datatype': "txt", "tval": "yy"}]``, - it will return ``{"a": [2, "yy"]}`` - """ - from collections import defaultdict - - # I group results by zero-level entity - found_mainitems = {} - found_subitems = defaultdict(dict) - for mainkey, descriptiondict in data.items(): - prefix, thissep, postfix = mainkey.partition(sep) - if thissep: - found_subitems[prefix][postfix] = {k: v for k, v - in descriptiondict.items() if k != "key"} - else: - mainitem = descriptiondict.copy() - mainitem['key'] = prefix - found_mainitems[prefix] = mainitem - - # There can be mainitems without subitems, but there should not be subitems - # without mainitmes. - lone_subitems = set(found_subitems.keys()) - set(found_mainitems.keys()) - if lone_subitems: - raise DeserializationException("Missing base keys for the following " - "items: {}".format(",".join(lone_subitems))) - - # For each zero-level entity, I call the _deserialize_attribute function - retval = {} - for k, v in found_mainitems.items(): - # Note: found_subitems[k] will return an empty dictionary it the - # key does not exist, as it is a defaultdict - retval[k] = _deserialize_attribute(mainitem=v, - subitems=found_subitems[k], sep=sep, original_class=original_class, - original_pk=original_pk) - - return retval - - -class DbMultipleValueAttributeBaseClass(m.Model): +@python_2_unicode_compatible +class DbSetting(m.Model): """ - Abstract base class for tables storing attribute + value data, of - different data types (without any association to a Node). + This will store generic settings that should be database-wide. """ - key = m.CharField(max_length=1024, db_index=True, blank=False) - datatype = m.CharField(max_length=10, - default='none', - choices=attrdatatype_choice, db_index=True) - tval = m.TextField(default='', blank=True) - fval = m.FloatField(default=None, null=True) - ival = m.IntegerField(default=None, null=True) - bval = m.NullBooleanField(default=None, null=True) - dval = m.DateTimeField(default=None, null=True) - - # separator for subfields - _sep = AIIDA_ATTRIBUTE_SEP - - class Meta: - abstract = True - unique_together = (('key',),) - - # There are no subspecifiers. If instead you want to group attributes - # (e.g. by node, as it is done in the DbAttributeBaseClass), specify here - # the field name - _subspecifier_field_name = None - - @property - def subspecifiers_dict(self): - """ - Return a dict to narrow down the query to only those matching also the - subspecifier. - """ - if self._subspecifier_field_name is None: - return {} - else: - return {self._subspecifier_field_name: - getattr(self, self._subspecifier_field_name)} - - @property - def subspecifier_pk(self): - """ - Return the subspecifier PK in the database (or None, if no - subspecifier should be used) - """ - if self._subspecifier_field_name is None: - return None - else: - return getattr(self, self._subspecifier_field_name).pk - - @classmethod - def validate_key(cls, key): - """ - Validate the key string to check if it is valid (e.g., if it does not - contain the separator symbol.). + key = m.CharField(max_length=1024, db_index=True, blank=False, unique=True) + val = JSONField(default=None, null=True) + # I also add a description field for the variables + description = m.TextField(blank=True) + # Modification time of this attribute + time = m.DateTimeField(auto_now=True, editable=False) - :return: None if the key is valid - :raise aiida.common.ValidationError: if the key is not valid - """ - from aiida.backends.utils import validate_attribute_key - return validate_attribute_key(key) + def __str__(self): + return "'{}'={}".format(self.key, self.getvalue()) @classmethod def set_value(cls, key, value, with_transaction=True, subspecifier_value=None, other_attribs={}, stop_if_existing=False): - """ - Set a new value in the DB, possibly associated to the given subspecifier. - - :note: This method also stored directly in the DB. - - :param key: a string with the key to create (must be a level-0 - attribute, that is it cannot contain the separator cls._sep). - :param value: the value to store (a basic data type or a list or a dict) - :param subspecifier_value: must be None if this class has no - subspecifier set (e.g., the DbSetting class). - Must be the value of the subspecifier (e.g., the dbnode) for classes - that define it (e.g. DbAttribute and DbExtra) - :param with_transaction: True if you want this function to be managed - with transactions. Set to False if you already have a manual - management of transactions in the block where you are calling this - function (useful for speed improvements to avoid recursive - transactions) - :param other_attribs: a dictionary of other parameters, to store - only on the level-zero attribute (e.g. for description in DbSetting). - :param stop_if_existing: if True, it will stop with an - UniquenessError exception if the new entry would violate an - uniqueness constraint in the DB (same key, or same key+node, - depending on the specific subclass). Otherwise, it will - first delete the old value, if existent. The use with True is - useful if you want to use a given attribute as a "locking" value, - e.g. to avoid to perform an action twice on the same node. - Note that, if you are using transactions, you may get the error - only when the transaction is committed. - """ - from django.db import transaction - - cls.validate_key(key) - - try: - if with_transaction: - sid = transaction.savepoint() - - # create_value returns a list of nodes to store - to_store = cls.create_value(key, value, - subspecifier_value=subspecifier_value, - other_attribs=other_attribs) - - if to_store: - if not stop_if_existing: - # Delete the olf values if stop_if_existing is False, - # otherwise don't delete them and hope they don't - # exist. If they exist, I'll get an UniquenessError - - ## NOTE! Be careful in case the extra/attribute to - ## store is not a simple attribute but a list or dict: - ## like this, it should be ok because if we are - ## overwriting an entry it will stop anyway to avoid - ## to overwrite the main entry, but otherwise - ## there is the risk that trailing pieces remain - ## so in general it is good to recursively clean - ## all sub-items. - cls.del_value(key, - subspecifier_value=subspecifier_value) - cls.objects.bulk_create(to_store) - - if with_transaction: - transaction.savepoint_commit(sid) - except BaseException as exc: # All exceptions including CTRL+C, ... - from django.db.utils import IntegrityError - from aiida.common.exceptions import UniquenessError - - if with_transaction: - transaction.savepoint_rollback(sid) - if isinstance(exc, IntegrityError) and stop_if_existing: - raise UniquenessError("Impossible to create the required " - "entry " - "in table '{}', " - "another entry already exists and the creation would " - "violate an uniqueness constraint.\nFurther details: " - "{}".format(cls.__name__, exc)) - raise - - @classmethod - def create_value(cls, key, value, subspecifier_value=None, - other_attribs={}): - """ - Create a new list of attributes, without storing them, associated - with the current key/value pair (and to the given subspecifier, - e.g. the DbNode for DbAttributes and DbExtras). - - :note: No hits are done on the DB, in particular no check is done - on the existence of the given nodes. - - :param key: a string with the key to create (can contain the - separator cls._sep if this is a sub-attribute: indeed, this - function calls itself recursively) - :param value: the value to store (a basic data type or a list or a dict) - :param subspecifier_value: must be None if this class has no - subspecifier set (e.g., the DbSetting class). - Must be the value of the subspecifier (e.g., the dbnode) for classes - that define it (e.g. DbAttribute and DbExtra) - :param other_attribs: a dictionary of other parameters, to store - only on the level-zero attribute (e.g. for description in DbSetting). - - :return: always a list of class instances; it is the user - responsibility to store such entries (typically with a Django - bulk_create() call). - """ - import datetime - from aiida.common import json - from aiida.common.timezone import is_naive, make_aware, get_current_timezone - - if cls._subspecifier_field_name is None: - if subspecifier_value is not None: - raise ValueError("You cannot specify a subspecifier value for " - "class {} because it has no subspecifiers" - "".format(cls.__name__)) - new_entry = cls(key=key, **other_attribs) - else: - if subspecifier_value is None: - raise ValueError("You also have to specify a subspecifier value " - "for class {} (the {})".format(cls.__name__, - cls._subspecifier_field_name)) - further_params = other_attribs.copy() - further_params.update({cls._subspecifier_field_name: - subspecifier_value}) - new_entry = cls(key=key, **further_params) - - list_to_return = [new_entry] - - if value is None: - new_entry.datatype = 'none' - new_entry.bval = None - new_entry.tval = '' - new_entry.ival = None - new_entry.fval = None - new_entry.dval = None - - elif isinstance(value, bool): - new_entry.datatype = 'bool' - new_entry.bval = value - new_entry.tval = '' - new_entry.ival = None - new_entry.fval = None - new_entry.dval = None - - elif isinstance(value, six.integer_types): - new_entry.datatype = 'int' - new_entry.ival = value - new_entry.tval = '' - new_entry.bval = None - new_entry.fval = None - new_entry.dval = None - - elif isinstance(value, float): - new_entry.datatype = 'float' - new_entry.fval = value - new_entry.tval = '' - new_entry.ival = None - new_entry.bval = None - new_entry.dval = None - - elif isinstance(value, six.string_types): - new_entry.datatype = 'txt' - new_entry.tval = value - new_entry.bval = None - new_entry.ival = None - new_entry.fval = None - new_entry.dval = None - - elif isinstance(value, datetime.datetime): - - # current timezone is taken from the settings file of django - if is_naive(value): - value_to_set = make_aware(value, get_current_timezone()) - else: - value_to_set = value - - new_entry.datatype = 'date' - # TODO: time-aware and time-naive datetime objects, see - # https://docs.djangoproject.com/en/dev/topics/i18n/timezones/#naive-and-aware-datetime-objects - new_entry.dval = value_to_set - new_entry.tval = '' - new_entry.bval = None - new_entry.ival = None - new_entry.fval = None - - elif isinstance(value, (list, tuple)): - - new_entry.datatype = 'list' - new_entry.dval = None - new_entry.tval = '' - new_entry.bval = None - new_entry.ival = len(value) - new_entry.fval = None - - for i, subv in enumerate(value): - # I do not need get_or_create here, because - # above I deleted all children (and I - # expect no concurrency) - # NOTE: I do not pass other_attribs - list_to_return.extend(cls.create_value( - key=("{}{}{:d}".format(key, cls._sep, i)), - value=subv, - subspecifier_value=subspecifier_value)) - - elif isinstance(value, dict): - - new_entry.datatype = 'dict' - new_entry.dval = None - new_entry.tval = '' - new_entry.bval = None - new_entry.ival = len(value) - new_entry.fval = None - - for subk, subv in value.items(): - cls.validate_key(subk) - - # I do not need get_or_create here, because - # above I deleted all children (and I - # expect no concurrency) - # NOTE: I do not pass other_attribs - list_to_return.extend(cls.create_value( - key="{}{}{}".format(key, cls._sep, subk), - value=subv, - subspecifier_value=subspecifier_value)) + setting = DbSetting.objects.filter(key=key).first() + if setting is not None: + if stop_if_existing: + return else: - try: - jsondata = json.dumps(value) - except TypeError: - raise ValueError("Unable to store the value: it must be either a basic datatype, or json-serializable: {}".format(value)) - - new_entry.datatype = 'json' - new_entry.tval = jsondata - new_entry.bval = None - new_entry.ival = None - new_entry.fval = None + setting = cls() - return list_to_return - - @classmethod - def get_query_dict(cls, value): - """ - Return a dictionary that can be used in a django filter to query - for a specific value. This takes care of checking the type of the - input parameter 'value' and to convert it to the right query. - - :param value: The value that should be queried. Note: can only be - base datatype, not a list or dict. For those, query directly for - one of the sub-elements. - - :todo: see if we want to give the possibility to query for the existence - of a (possibly empty) dictionary or list, of for their length. - - :note: this will of course not find a data if this was stored in the - DB as a serialized JSON. - - :return: a dictionary to be used in the django .filter() method. - For instance, if 'value' is a string, it will return the dictionary - ``{'datatype': 'txt', 'tval': value}``. - - :raise: ValueError if value is not of a base datatype (string, integer, - float, bool, None, or date) - """ - import datetime - from aiida.common.timezone import ( - is_naive, make_aware, get_current_timezone) - - if value is None: - return {'datatype': 'none'} - elif isinstance(value, bool): - return {'datatype': 'bool', 'bval': value} - elif isinstance(value, six.integer_types): - return {'datatype': 'int', 'ival': value} - elif isinstance(value, float): - return {'datatype': 'float', 'fval': value} - elif isinstance(value, six.string_types): - return {'datatype': 'txt', 'tval': value} - elif isinstance(value, datetime.datetime): - # current timezone is taken from the settings file of django - if is_naive(value): - value_to_set = make_aware(value, get_current_timezone()) - else: - value_to_set = value - return {'datatype': 'date', 'dval': value_to_set} - elif isinstance(value, list): - raise ValueError("Lists are not supported for getting the " - "query_dict") - elif isinstance(value, dict): - raise ValueError("Dicts are not supported for getting the " - "query_dict") - else: - raise ValueError("Unsupported type for getting the " - "query_dict, it is {}".format(str(type(value)))) + setting.key = key + setting.val = datetime_to_isoformat(value) + setting.time = timezone.datetime.now(tz=UTC) + if "description" in other_attribs.keys(): + setting.description = other_attribs["description"] + setting.save() def getvalue(self): """ - This can be called on a given row and will get the corresponding value, - casting it correctly. - """ - try: - if self.datatype == 'list' or self.datatype == 'dict': - prefix = "{}{}".format(self.key, self._sep) - prefix_len = len(prefix) - dballsubvalues = self.__class__.objects.filter( - key__startswith=prefix, - **self.subspecifiers_dict).values_list('key', - 'datatype', 'tval', 'fval', - 'ival', 'bval', 'dval') - # Strip the FULL prefix and replace it with the simple - # "attr" prefix - data = {"attr.{}".format(_[0][prefix_len:]): { - "datatype": _[1], - "tval": _[2], - "fval": _[3], - "ival": _[4], - "bval": _[5], - "dval": _[6], - } for _ in dballsubvalues - } - # for _ in dballsubvalues} - # Append also the item itself - data["attr"] = { - # Replace the key (which may contain the separator) with the - # simple "attr" key. In any case I do not need to return it! - "key": "attr", - "datatype": self.datatype, - "tval": self.tval, - "fval": self.fval, - "ival": self.ival, - "bval": self.bval, - "dval": self.dval} - return deserialize_attributes(data, sep=self._sep, - original_class=self.__class__, - original_pk=self.subspecifier_pk)['attr'] - else: - data = {"attr": { - # Replace the key (which may contain the separator) with the - # simple "attr" key. In any case I do not need to return it! - "key": "attr", - "datatype": self.datatype, - "tval": self.tval, - "fval": self.fval, - "ival": self.ival, - "bval": self.bval, - "dval": self.dval}} - - return deserialize_attributes(data, sep=self._sep, - original_class=self.__class__, - original_pk=self.subspecifier_pk)['attr'] - except DeserializationException as exc: - exc = DbContentError(exc) - exc.original_exception = exc - raise exc - - @classmethod - def del_value(cls, key, only_children=False, subspecifier_value=None): - """ - Delete a value associated with the given key (if existing). - - :note: No exceptions are raised if no entry is found. - - :param key: the key to delete. Can contain the separator cls._sep if - you want to delete a subkey. - :param only_children: if True, delete only children and not the - entry itself. - :param subspecifier_value: must be None if this class has no - subspecifier set (e.g., the DbSetting class). - Must be the value of the subspecifier (e.g., the dbnode) for classes - that define it (e.g. DbAttribute and DbExtra) - """ - from django.db.models import Q - - if cls._subspecifier_field_name is None: - if subspecifier_value is not None: - raise ValueError("You cannot specify a subspecifier value for " - "class {} because it has no subspecifiers" - "".format(cls.__name__)) - subspecifiers_dict = {} - else: - if subspecifier_value is None: - raise ValueError("You also have to specify a subspecifier value " - "for class {} (the {})".format(cls.__name__, - cls._subspecifier_field_name)) - subspecifiers_dict = {cls._subspecifier_field_name: - subspecifier_value} - - query = Q(key__startswith="{parentkey}{sep}".format( - parentkey=key, sep=cls._sep), - **subspecifiers_dict) - - if not only_children: - query.add(Q(key=key, **subspecifiers_dict), Q.OR) - - cls.objects.filter(query).delete() - - -@python_2_unicode_compatible -class DbAttributeBaseClass(DbMultipleValueAttributeBaseClass): - """ - Abstract base class for tables storing element-attribute-value data. - Element is the dbnode; attribute is the key name. - Value is the specific value to store. - - This table had different SQL columns to store different types of data, and - a datatype field to know the actual datatype. - - Moreover, this class unpacks dictionaries and lists when possible, so that - it is possible to query inside recursive lists and dicts. - """ - # In this way, the related name for the DbAttribute inherited class will be - # 'dbattributes' and for 'dbextra' will be 'dbextras' - # Moreover, automatically destroy attributes and extras if the parent - # node is deleted - dbnode = m.ForeignKey('DbNode', related_name='%(class)ss', on_delete=m.CASCADE) - # max_length is required by MySql to have indexes and unique constraints - - _subspecifier_field_name = 'dbnode' - - class Meta: - unique_together = (("dbnode", "key")) - abstract = True - - @classmethod - def list_all_node_elements(cls, dbnode): - """ - Return a django queryset with the attributes of the given node, - only at deepness level zero (i.e., keys not containing the separator). + This can be called on a given row and will get the corresponding value. """ - from django.db.models import Q + return isoformat_to_datetime(self.val) - # This node, and does not contain the separator - # (=> show only level-zero entries) - query = Q(dbnode=dbnode) & ~Q(key__contains=cls._sep) - return cls.objects.filter(query) - - @classmethod - def get_value_for_node(cls, dbnode, key): + def get_description(self): """ - Get an attribute from the database for the given dbnode. - - :return: the value stored in the Db table, correctly converted - to the right type. - :raise AttributeError: if no key is found for the given dbnode + This can be called on a given row and will get the corresponding + description. """ - try: - attr = cls.objects.get(dbnode=dbnode, key=key) - except ObjectDoesNotExist: - raise AttributeError("{} with key {} for node {} not found " - "in db".format(cls.__name__, key, dbnode.pk)) - return attr.getvalue() - - @classmethod - def get_all_values_for_node(cls, dbnode): - """ - Return a dictionary with all attributes for the given dbnode. - - :return: a dictionary where each key is a level-0 attribute - stored in the Db table, correctly converted - to the right type. - """ - return cls.get_all_values_for_nodepk(dbnode.pk) - - @classmethod - def get_all_values_for_nodepk(cls, dbnodepk): - """ - Return a dictionary with all attributes for the dbnode with given PK. - - :return: a dictionary where each key is a level-0 attribute - stored in the Db table, correctly converted - to the right type. - """ - dballsubvalues = cls.objects.filter(dbnode__id=dbnodepk).values_list( - 'key', 'datatype', 'tval', 'fval', - 'ival', 'bval', 'dval') - - data = {_[0]: { - "datatype": _[1], - "tval": _[2], - "fval": _[3], - "ival": _[4], - "bval": _[5], - "dval": _[6], - } for _ in dballsubvalues - } - try: - return deserialize_attributes(data, sep=cls._sep, - original_class=cls, - original_pk=dbnodepk) - except DeserializationException as exc: - exc = DbContentError(exc) - exc.original_exception = exc - raise exc - - @classmethod - def reset_values_for_node(cls, dbnode, attributes, with_transaction=True, - return_not_store=False): - from django.db import transaction - - # cls.validate_key(key) - - nodes_to_store = [] - - try: - if with_transaction: - sid = transaction.savepoint() - - if isinstance(dbnode, six.integer_types): - dbnode_node = DbNode(id=dbnode) - else: - dbnode_node = dbnode - - # create_value returns a list of nodes to store - for k, v in attributes.items(): - nodes_to_store.extend( - cls.create_value(k, v, - subspecifier_value=dbnode_node, - )) - - if return_not_store: - return nodes_to_store - else: - # Reset. For set, use also a filter for key__in=attributes.keys() - cls.objects.filter(dbnode=dbnode_node).delete() - - if nodes_to_store: - cls.objects.bulk_create(nodes_to_store) - - if with_transaction: - transaction.savepoint_commit(sid) - except: - if with_transaction: - transaction.savepoint_rollback(sid) - raise + return self.description @classmethod - def set_value_for_node(cls, dbnode, key, value, with_transaction=True, - stop_if_existing=False): - """ - This is the raw-level method that accesses the DB. No checks are done - to prevent the user from (re)setting a valid key. - To be used only internally. - - :todo: there may be some error on concurrent write; - not checked in this unlucky case! - - :param dbnode: the dbnode for which the attribute should be stored; - in an integer is passed, this is used as the PK of the dbnode, - without any further check (for speed reasons) - :param key: the key of the attribute to store; must be a level-zero - attribute (i.e., no separators in the key) - :param value: the value of the attribute to store - :param with_transaction: if True (default), do this within a transaction, - so that nothing gets stored if a subitem cannot be created. - Otherwise, if this parameter is False, no transaction management - is performed. - :param stop_if_existing: if True, it will stop with an - UniquenessError exception if the key already exists - for the given node. Otherwise, it will - first delete the old value, if existent. The use with True is - useful if you want to use a given attribute as a "locking" value, - e.g. to avoid to perform an action twice on the same node. - Note that, if you are using transactions, you may get the error - only when the transaction is committed. - - :raise ValueError: if the key contains the separator symbol used - internally to unpack dictionaries and lists (defined in cls._sep). - """ - if isinstance(dbnode, six.integer_types): - dbnode_node = DbNode(id=dbnode) + def del_value(cls, key, only_children=False, subspecifier_value=None): + setting = DbSetting.objects.filter(key=key).first() + if setting is not None: + setting.val = None + setting.time = timezone.datetime.utcnow() + setting.save() else: - dbnode_node = dbnode - - cls.set_value(key, value, with_transaction=with_transaction, - subspecifier_value=dbnode_node, - stop_if_existing=stop_if_existing) - - @classmethod - def del_value_for_node(cls, dbnode, key): - """ - Delete an attribute from the database for the given dbnode. - - :note: no exception is raised if no attribute with the given key is - found in the DB. - - :param dbnode: the dbnode for which you want to delete the key. - :param key: the key to delete. - """ - cls.del_value(key, subspecifier_value=dbnode) - - @classmethod - def has_key(cls, dbnode, key): - """ - Return True if the given dbnode has an attribute with the given key, - False otherwise. - """ - return bool(cls.objects.filter(dbnode=dbnode, key=key)) - - def __str__(self): - return "[{} ({})].{} ({})".format( - self.dbnode.get_simple_name(invalid_result="Unknown node"), - self.dbnode.pk, - self.key, - self.datatype, ) - - -@python_2_unicode_compatible -class DbSetting(DbMultipleValueAttributeBaseClass): - """ - This will store generic settings that should be database-wide. - """ - # I also add a description field for the variables - description = m.TextField(blank=True) - # Modification time of this attribute - time = m.DateTimeField(auto_now=True, editable=False) - - def __str__(self): - return "'{}'={}".format(self.key, self.getvalue()) - - -class DbAttribute(DbAttributeBaseClass): - """ - This table stores attributes that uniquely define the content of the - node. Therefore, their modification corrupts the data. - """ - pass - - -class DbExtra(DbAttributeBaseClass): - """ - This table stores extra data, still in the key-value format, - that the user can attach to a node. - Therefore, their modification simply changes the user-defined data, - but does not corrupt the node (it will still be loadable without errors). - Could be useful to add "duplicate" information for easier querying, or - for tagging nodes. - """ - pass + raise KeyError() @python_2_unicode_compatible diff --git a/aiida/backends/djsite/db/subtests/migrations/__init__.py b/aiida/backends/djsite/db/subtests/migrations/__init__.py new file mode 100644 index 0000000000..a7e3fad50c --- /dev/null +++ b/aiida/backends/djsite/db/subtests/migrations/__init__.py @@ -0,0 +1,9 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida_core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### diff --git a/aiida/backends/djsite/db/subtests/migrations/test_migrations_0034_attributes_extras_settings_json.py b/aiida/backends/djsite/db/subtests/migrations/test_migrations_0034_attributes_extras_settings_json.py new file mode 100644 index 0000000000..20d6fd1f05 --- /dev/null +++ b/aiida/backends/djsite/db/subtests/migrations/test_migrations_0034_attributes_extras_settings_json.py @@ -0,0 +1,630 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida_core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +# pylint: disable=import-error,no-name-in-module,invalid-name +""" +Tests for the migrations of the attributes, extras and settings from EAV to JSONB +Migration 0037_attributes_extras_settings_json +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import copy + +import six +from six.moves import range + +from dateutil.parser import parse +from django.db import transaction +from django.utils.encoding import python_2_unicode_compatible + +from aiida.backends.djsite.db.subtests.migrations.test_migrations_common import TestMigrations +from aiida.backends.utils import isoformat_to_datetime + +# The following sample dictionary can be used for the conversion test of attributes and extras +SAMPLE_DICT = { + 'bool': True, + 'integer': 12, + 'float': 26.2, + 'string': "a string", + 'dict': { + "a": "b", + "sublist": [1, 2, 3], + "subdict": { + "c": "d" + } + }, + 'list': [1, True, "ggg", { + 'h': 'j' + }, [9, 8, 7]], +} + +# The following base classes contain just model declaration for DbAttributes +# and DbExtras and are needed for the methods found at the +# DbAttributeFunctionality and DbExtraFunctionality and used for the deserialization +# of attribute and extras dictionaries +db_attribute_base_model = None +db_extra_base_model = None + + +class TestAttributesExtrasToJSONMigrationSimple(TestMigrations): + """ + A "simple" test for the attributes and extra migration from EAV to JSONB. + It stores a sample dictionary using the EAV deserialization of AiiDA Django + for the attributes and extras. Then the test checks that they are corerctly + converted to JSONB. + """ + migrate_from = '0036_drop_computer_transport_params' + migrate_to = '0037_attributes_extras_settings_json' + + # In the following dictionary we store the generated nodes (ids, attributes and extras) + # The correct migration of these nodes will be checked at the test + nodes_to_verify = dict() + + def setUpBeforeMigration(self): + global db_attribute_base_model, db_extra_base_model # pylint: disable=global-statement + + db_node_model = self.apps.get_model('db', 'DbNode') + db_computer_model = self.apps.get_model('db', 'DbComputer') + # The following base models are initialized here since the model at this point + # it has the corresponding EAV tables + db_attribute_base_model = self.apps.get_model('db', 'DbAttribute') + db_extra_base_model = self.apps.get_model('db', 'DbExtra') + + computer = db_computer_model( + name='localhost_migration', + hostname='localhost', + transport_type='local', + scheduler_type='pbspro', + metadata={"workdir": "/tmp/aiida"}) + computer.save() + + node = db_node_model(node_type='data.Data.', dbcomputer_id=computer.id, user_id=self.default_user.id) + node.save() + + for key, value in SAMPLE_DICT.items(): + DbAttributeFunctionality.set_value_for_node(node, key, value) + + for key, value in SAMPLE_DICT.items(): + DbExtraFunctionality.set_value_for_node(node, key, value) + + self.nodes_to_verify[node.id] = dict() + self.nodes_to_verify[node.id]['attr'] = copy.deepcopy(SAMPLE_DICT) + self.nodes_to_verify[node.id]['extr'] = copy.deepcopy(SAMPLE_DICT) + + def test_attributes_extras_migration(self): + """Verify that the attributes and extras were migrated correctly""" + db_node_model = self.apps.get_model('db', 'DbNode') + for curr_dbnode in db_node_model.objects.all(): + self.assertEqual(curr_dbnode.attributes, self.nodes_to_verify[curr_dbnode.id]['attr']) + self.assertEqual(curr_dbnode.extras, self.nodes_to_verify[curr_dbnode.id]['extr']) + + +class TestAttributesExtrasToJSONMigrationManyNodes(TestMigrations): + """ + This test comparing to the previous one (TestAttributesExtrasToJSONMigrationSimple), it + creates several nodes with different atributes and extras and checks their correct + migration one-by-one. + """ + migrate_from = '0036_drop_computer_transport_params' + migrate_to = '0037_attributes_extras_settings_json' + + # In the following dictionary we store the generated nodes (ids, attributes and extras) + # The correct migration of these nodes will be checked at the test + nodes_to_verify = dict() + + # Number of nodes to create + nodes_no_to_create = 20 + + def setUpBeforeMigration(self): + global db_attribute_base_model, db_extra_base_model # pylint: disable=global-statement + + db_node_model = self.apps.get_model('db', 'DbNode') + db_computer_model = self.apps.get_model('db', 'DbComputer') + # The following base models are initialized here since the model at this point + # it has the corresponding EAV tables + db_attribute_base_model = self.apps.get_model('db', 'DbAttribute') + db_extra_base_model = self.apps.get_model('db', 'DbExtra') + + computer = db_computer_model( + name='localhost_migration', + hostname='localhost', + transport_type='local', + scheduler_type='pbspro', + metadata={"workdir": "/tmp/aiida"}) + computer.save() + + with transaction.atomic(): + for _ in range(self.nodes_no_to_create): + node = db_node_model(node_type='data.Data.', dbcomputer_id=computer.id, user_id=self.default_user.id) + node.save() + + attr_copy = copy.deepcopy(SAMPLE_DICT) + attr_copy['type_of_json'] = 'attr' + attr_copy['node_id'] = node.id + + # Setting the attributes as it used to be set (with the same methods) + for key in attr_copy.keys(): + DbAttributeFunctionality.set_value_for_node(node, key, attr_copy[key]) + + extr_copy = copy.deepcopy(SAMPLE_DICT) + extr_copy['type_of_json'] = 'extr' + extr_copy['node_id'] = node.id + + # Setting the extras as it used to be set (with the same methods) + for key in extr_copy.keys(): + DbExtraFunctionality.set_value_for_node(node, key, extr_copy[key]) + + self.nodes_to_verify[node.id] = dict() + self.nodes_to_verify[node.id]['attr'] = attr_copy + self.nodes_to_verify[node.id]['extr'] = extr_copy + + def test_attributes_extras_migration_many(self): + """Verify that the attributes and extras were migrated correctly""" + db_node_model = self.apps.get_model('db', 'DbNode') + for curr_dbnode in db_node_model.objects.all(): + self.assertEqual(curr_dbnode.attributes, self.nodes_to_verify[curr_dbnode.id]['attr']) + self.assertEqual(curr_dbnode.extras, self.nodes_to_verify[curr_dbnode.id]['extr']) + + +class TestSettingsToJSONMigration(TestMigrations): + """ + This test checks the correct migration of the settings. Setting records were used as an + example from a typical settings table of Django EAV. + """ + migrate_from = '0036_drop_computer_transport_params' + migrate_to = '0037_attributes_extras_settings_json' + + # The settings to create and verify + settings_info = dict() + + def setUpBeforeMigration(self): + db_setting_model = self.apps.get_model('db', 'DbSetting') + + self.settings_info['2daemon|task_stop|updater2'] = dict( + key='2daemon|task_stop|updater2', + datatype='date', + dval='2018-07-27 15:12:24.382552+02', + description='The last time the daemon finished to run ' + 'the task \'updater\' (updater)') + self.settings_info['2daemon|task_start|updater2'] = dict( + key='2daemon|task_start|updater2', + datatype='date', + dval='2018-07-27 15:12:45.264863+02', + description='The last time the daemon started to run ' + 'the task \'updater\' (updater)') + self.settings_info['2db|backend2'] = dict( + key='2db|backend2', + datatype='txt', + tval='django', + description='The backend used to communicate with the database.') + self.settings_info['2daemon|user2'] = dict( + key='2daemon|user2', + datatype='txt', + tval='aiida@theossrv5.epfl.ch', + description='The only user that is allowed to run the AiiDA daemon on ' + 'this DB instance') + self.settings_info['2db|schemaversion2'] = dict( + key='2db|schemaversion2', + datatype='txt', + tval=' 1.0.8', + description='The version of the schema used in this database.') + + with transaction.atomic(): + for setting_info in self.settings_info.values(): + setting = db_setting_model(**setting_info) + setting.save() + + def test_settings_migration(self): + """Verify that the settings were migrated correctly""" + db_setting_model = self.apps.get_model('db', 'DbSetting') + for curr_setting in db_setting_model.objects.filter(key__in=self.settings_info.keys()).all(): + curr_setting_info = self.settings_info[curr_setting.key] + self.assertEqual(curr_setting.description, curr_setting_info['description']) + if curr_setting_info['datatype'] == 'txt': + self.assertEqual(curr_setting.val, curr_setting_info['tval']) + elif curr_setting_info['datatype'] == 'date': + self.assertEqual(isoformat_to_datetime(curr_setting.val), parse(curr_setting_info['dval'])) + + def tearDown(self): + """ + Deletion of settings - this is needed because settings are not deleted by the + typical test cleanup methods. + """ + db_setting_model = self.apps.get_model('db', 'DbSetting') + db_setting_model.objects.filter(key__in=self.settings_info.keys()).delete() + super(TestSettingsToJSONMigration, self).tearDown() + + +# pylint: disable=no-init, old-style-class, too-few-public-methods, dangerous-default-value, too-many-statements +# pylint: disable= no-else-return, too-many-arguments, too-many-branches, fixme +class DbMultipleValueAttributeBaseClass(): + """ + Abstract base class for tables storing attribute + value data, of + different data types (without any association to a Node). + """ + # separator for subfields + _sep = '.' # The AIIDA_ATTRIBUTE_SEP + + class Meta: + abstract = True + unique_together = (('key',),) + + # There are no subspecifiers. If instead you want to group attributes + # (e.g. by node, as it is done in the DbAttributeBaseClass), specify here + # the field name + _subspecifier_field_name = None + + @property + def subspecifier_pk(self): + """ + Return the subspecifier PK in the database (or None, if no + subspecifier should be used) + """ + if self._subspecifier_field_name is None: + return None + else: + return getattr(self, self._subspecifier_field_name).pk + + @classmethod + def validate_key(cls, key): + """ + Validate the key string to check if it is valid (e.g., if it does not + contain the separator symbol.). + + :return: None if the key is valid + :raise aiida.common.ValidationError: if the key is not valid + """ + from aiida.backends.utils import validate_attribute_key + return validate_attribute_key(key) + + @classmethod + def set_value(cls, + key, + value, + with_transaction=True, + subspecifier_value=None, + other_attribs={}, + stop_if_existing=False): + """ + Set a new value in the DB, possibly associated to the given subspecifier. + + :note: This method also stored directly in the DB. + + :param key: a string with the key to create (must be a level-0 + attribute, that is it cannot contain the separator cls._sep). + :param value: the value to store (a basic data type or a list or a dict) + :param subspecifier_value: must be None if this class has no + subspecifier set (e.g., the DbSetting class). + Must be the value of the subspecifier (e.g., the dbnode) for classes + that define it (e.g. DbAttribute and DbExtra) + :param with_transaction: True if you want this function to be managed + with transactions. Set to False if you already have a manual + management of transactions in the block where you are calling this + function (useful for speed improvements to avoid recursive + transactions) + :param other_attribs: a dictionary of other parameters, to store + only on the level-zero attribute (e.g. for description in DbSetting). + :param stop_if_existing: if True, it will stop with an + UniquenessError exception if the new entry would violate an + uniqueness constraint in the DB (same key, or same key+node, + depending on the specific subclass). Otherwise, it will + first delete the old value, if existent. The use with True is + useful if you want to use a given attribute as a "locking" value, + e.g. to avoid to perform an action twice on the same node. + Note that, if you are using transactions, you may get the error + only when the transaction is committed. + """ + cls.validate_key(key) + + try: + if with_transaction: + sid = transaction.savepoint() + + # create_value returns a list of nodes to store + to_store = cls.create_value(key, value, subspecifier_value=subspecifier_value, other_attribs=other_attribs) + + if to_store: + # if not stop_if_existing: + # # Delete the olf values if stop_if_existing is False, + # # otherwise don't delete them and hope they don't + # # exist. If they exist, I'll get an UniquenessError + # + # ## NOTE! Be careful in case the extra/attribute to + # ## store is not a simple attribute but a list or dict: + # ## like this, it should be ok because if we are + # ## overwriting an entry it will stop anyway to avoid + # ## to overwrite the main entry, but otherwise + # ## there is the risk that trailing pieces remain + # ## so in general it is good to recursively clean + # ## all sub-items. + # cls.del_value(key, + # subspecifier_value=subspecifier_value) + for my_obj in to_store: + my_obj.save() + + # cls.objects.bulk_create(to_store) + + if with_transaction: + transaction.savepoint_commit(sid) + except BaseException as exc: # All exceptions including CTRL+C, ... + from django.db.utils import IntegrityError + from aiida.common.exceptions import UniquenessError + + if with_transaction: + transaction.savepoint_rollback(sid) + if isinstance(exc, IntegrityError) and stop_if_existing: + raise UniquenessError("Impossible to create the required " + "entry " + "in table '{}', " + "another entry already exists and the creation would " + "violate an uniqueness constraint.\nFurther details: " + "{}".format(cls.__name__, exc)) + raise + + @classmethod + def create_value(cls, key, value, subspecifier_value=None, other_attribs={}): + """ + Create a new list of attributes, without storing them, associated + with the current key/value pair (and to the given subspecifier, + e.g. the DbNode for DbAttributes and DbExtras). + + :note: No hits are done on the DB, in particular no check is done + on the existence of the given nodes. + + :param key: a string with the key to create (can contain the + separator cls._sep if this is a sub-attribute: indeed, this + function calls itself recursively) + :param value: the value to store (a basic data type or a list or a dict) + :param subspecifier_value: must be None if this class has no + subspecifier set (e.g., the DbSetting class). + Must be the value of the subspecifier (e.g., the dbnode) for classes + that define it (e.g. DbAttribute and DbExtra) + :param other_attribs: a dictionary of other parameters, to store + only on the level-zero attribute (e.g. for description in DbSetting). + + :return: always a list of class instances; it is the user + responsibility to store such entries (typically with a Django + bulk_create() call). + """ + import datetime + + from aiida.common import json + from aiida.common.timezone import is_naive, make_aware, get_current_timezone + + if cls._subspecifier_field_name is None: + if subspecifier_value is not None: + raise ValueError("You cannot specify a subspecifier value for " + "class {} because it has no subspecifiers" + "".format(cls.__name__)) + if issubclass(cls, DbAttributeFunctionality): + new_entry = db_attribute_base_model(key=key, **other_attribs) + else: + new_entry = db_extra_base_model(key=key, **other_attribs) + else: + if subspecifier_value is None: + raise ValueError("You also have to specify a subspecifier value " + "for class {} (the {})".format(cls.__name__, cls._subspecifier_field_name)) + further_params = other_attribs.copy() + further_params.update({cls._subspecifier_field_name: subspecifier_value}) + # new_entry = cls(key=key, **further_params) + if issubclass(cls, DbAttributeFunctionality): + new_entry = db_attribute_base_model(key=key, **further_params) + else: + new_entry = db_extra_base_model(key=key, **further_params) + + list_to_return = [new_entry] + + if value is None: + new_entry.datatype = 'none' + new_entry.bval = None + new_entry.tval = '' + new_entry.ival = None + new_entry.fval = None + new_entry.dval = None + + elif isinstance(value, bool): + new_entry.datatype = 'bool' + new_entry.bval = value + new_entry.tval = '' + new_entry.ival = None + new_entry.fval = None + new_entry.dval = None + + elif isinstance(value, six.integer_types): + new_entry.datatype = 'int' + new_entry.ival = value + new_entry.tval = '' + new_entry.bval = None + new_entry.fval = None + new_entry.dval = None + + elif isinstance(value, float): + new_entry.datatype = 'float' + new_entry.fval = value + new_entry.tval = '' + new_entry.ival = None + new_entry.bval = None + new_entry.dval = None + + elif isinstance(value, six.string_types): + new_entry.datatype = 'txt' + new_entry.tval = value + new_entry.bval = None + new_entry.ival = None + new_entry.fval = None + new_entry.dval = None + + elif isinstance(value, datetime.datetime): + + # current timezone is taken from the settings file of django + if is_naive(value): + value_to_set = make_aware(value, get_current_timezone()) + else: + value_to_set = value + + new_entry.datatype = 'date' + # TODO: time-aware and time-naive datetime objects, see + # https://docs.djangoproject.com/en/dev/topics/i18n/timezones/#naive-and-aware-datetime-objects + new_entry.dval = value_to_set + new_entry.tval = '' + new_entry.bval = None + new_entry.ival = None + new_entry.fval = None + + elif isinstance(value, (list, tuple)): + + new_entry.datatype = 'list' + new_entry.dval = None + new_entry.tval = '' + new_entry.bval = None + new_entry.ival = len(value) + new_entry.fval = None + + for i, subv in enumerate(value): + # I do not need get_or_create here, because + # above I deleted all children (and I + # expect no concurrency) + # NOTE: I do not pass other_attribs + list_to_return.extend( + cls.create_value( + key=("{}{}{:d}".format(key, cls._sep, i)), value=subv, subspecifier_value=subspecifier_value)) + + elif isinstance(value, dict): + + new_entry.datatype = 'dict' + new_entry.dval = None + new_entry.tval = '' + new_entry.bval = None + new_entry.ival = len(value) + new_entry.fval = None + + for subk, subv in value.items(): + cls.validate_key(subk) + + # I do not need get_or_create here, because + # above I deleted all children (and I + # expect no concurrency) + # NOTE: I do not pass other_attribs + list_to_return.extend( + cls.create_value( + key="{}{}{}".format(key, cls._sep, subk), value=subv, subspecifier_value=subspecifier_value)) + else: + try: + jsondata = json.dumps(value) + except TypeError: + raise ValueError( + "Unable to store the value: it must be either a basic datatype, or json-serializable: {}".format( + value)) + + new_entry.datatype = 'json' + new_entry.tval = jsondata + new_entry.bval = None + new_entry.ival = None + new_entry.fval = None + + return list_to_return + + +@python_2_unicode_compatible # pylint: disable=no-init +class DbAttributeBaseClass(DbMultipleValueAttributeBaseClass): + """ + Abstract base class for tables storing element-attribute-value data. + Element is the dbnode; attribute is the key name. + Value is the specific value to store. + + This table had different SQL columns to store different types of data, and + a datatype field to know the actual datatype. + + Moreover, this class unpacks dictionaries and lists when possible, so that + it is possible to query inside recursive lists and dicts. + """ + + # In this way, the related name for the DbAttribute inherited class will be + # 'dbattributes' and for 'dbextra' will be 'dbextras' + # Moreover, automatically destroy attributes and extras if the parent + # node is deleted + # dbnode = m.ForeignKey('DbNode', related_name='%(class)ss', on_delete=m.CASCADE) + # max_length is required by MySql to have indexes and unique constraints + + _subspecifier_field_name = 'dbnode' + + @classmethod + def set_value_for_node(cls, dbnode, key, value, with_transaction=True, stop_if_existing=False): + """ + This is the raw-level method that accesses the DB. No checks are done + to prevent the user from (re)setting a valid key. + To be used only internally. + + :todo: there may be some error on concurrent write; + not checked in this unlucky case! + + :param dbnode: the dbnode for which the attribute should be stored; + if an integer is passed, it will raise, since this functionality is not + supported in the models for the migrations. + :param key: the key of the attribute to store; must be a level-zero + attribute (i.e., no separators in the key) + :param value: the value of the attribute to store + :param with_transaction: if True (default), do this within a transaction, + so that nothing gets stored if a subitem cannot be created. + Otherwise, if this parameter is False, no transaction management + is performed. + :param stop_if_existing: if True, it will stop with an + UniquenessError exception if the key already exists + for the given node. Otherwise, it will + first delete the old value, if existent. The use with True is + useful if you want to use a given attribute as a "locking" value, + e.g. to avoid to perform an action twice on the same node. + Note that, if you are using transactions, you may get the error + only when the transaction is committed. + + :raise ValueError: if the key contains the separator symbol used + internally to unpack dictionaries and lists (defined in cls._sep). + """ + if isinstance(dbnode, six.integer_types): + raise ValueError("Integers (the dbnode pk) are not supported as input.") + else: + dbnode_node = dbnode + + cls.set_value( + key, + value, + with_transaction=with_transaction, + subspecifier_value=dbnode_node, + stop_if_existing=stop_if_existing) + + def __str__(self): + # pylint: disable=no-member + return "[{} ({})].{} ({})".format( + self.dbnode.get_simple_name(invalid_result="Unknown node"), + self.dbnode.pk, + self.key, + self.datatype, + ) + + +class DbAttributeFunctionality(DbAttributeBaseClass): # pylint: disable=no-init + """ + This class defines all the methods that are needed for the correct + deserialization of given attribute dictionaries to the EAV table. + It is a stripped-down Django EAV schema to the absolutely necessary + methods for this deserialization. + """ + pass # pylint: disable=unnecessary-pass + + +class DbExtraFunctionality(DbAttributeBaseClass): # pylint: disable=no-init + """ + This class defines all the methods that are needed for the correct + deserialization of given extras dictionaries to the EAV table. + It is a stripped-down Django EAV schema to the absolutely necessary + methods for this deserialization. + """ + pass # pylint: disable=unnecessary-pass diff --git a/aiida/backends/djsite/db/subtests/migrations/test_migrations_common.py b/aiida/backends/djsite/db/subtests/migrations/test_migrations_common.py new file mode 100644 index 0000000000..ad6de1fc5e --- /dev/null +++ b/aiida/backends/djsite/db/subtests/migrations/test_migrations_common.py @@ -0,0 +1,111 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida_core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +# pylint: disable=import-error,no-name-in-module,invalid-name +""" The basic functionality for the migration tests""" +from __future__ import division +from __future__ import print_function +from __future__ import absolute_import + +from django.apps import apps +from django.db.migrations.executor import MigrationExecutor +from django.db import connection + +from aiida.backends.testbase import AiidaTestCase + + +class TestMigrations(AiidaTestCase): + """ + This is the common test class that is used by all migration tests. It migrates to a given + migration point, allows you to set up the database & AiiDA at that point with the necessary + data and migrates then to the final migration point. + In the end it forwards the database at the final migration (as it should be and found before + the migration tests). + """ + + @property + def app(self): + return apps.get_containing_app_config(type(self).__module__).name.split('.')[-1] + + migrate_from = None + migrate_to = None + + def setUp(self): + """Go to a specific schema version before running tests.""" + from aiida.backends import sqlalchemy as sa + from aiida.orm import autogroup + + self.current_autogroup = autogroup.current_autogroup + autogroup.current_autogroup = None + assert self.migrate_from and self.migrate_to, \ + "TestCase '{}' must define migrate_from and migrate_to properties".format(type(self).__name__) + self.migrate_from = [(self.app, self.migrate_from)] + self.migrate_to = [(self.app, self.migrate_to)] + executor = MigrationExecutor(connection) + self.apps = executor.loader.project_state(self.migrate_from).apps + self.schema_editor = connection.schema_editor() + + # Reset session for the migration + sa.get_scoped_session().close() + # Reverse to the original migration + executor.migrate(self.migrate_from) + # Reset session after the migration + sa.get_scoped_session().close() + + self.DbNode = self.apps.get_model('db', 'DbNode') + self.DbUser = self.apps.get_model('db', 'DbUser') + self.DbUser.objects.all().delete() + self.default_user = self.DbUser(1, 'aiida@localhost') + self.default_user.save() + + try: + self.setUpBeforeMigration() + # Run the migration to test + executor = MigrationExecutor(connection) + executor.loader.build_graph() + + # Reset session for the migration + sa.get_scoped_session().close() + executor.migrate(self.migrate_to) + # Reset session after the migration + sa.get_scoped_session().close() + + self.apps = executor.loader.project_state(self.migrate_to).apps + except Exception: + # Bring back the DB to the correct state if this setup part fails + import traceback + traceback.print_stack() + self._revert_database_schema() + raise + + def tearDown(self): + """At the end make sure we go back to the latest schema version.""" + from aiida.orm import autogroup + self._revert_database_schema() + autogroup.current_autogroup = self.current_autogroup + + def setUpBeforeMigration(self): + """Anything to do before running the migrations, which should be implemented in test subclasses.""" + + def _revert_database_schema(self): + """Bring back the DB to the correct state.""" + from ...migrations import LATEST_MIGRATION + from aiida.backends import sqlalchemy as sa + + self.migrate_to = [(self.app, LATEST_MIGRATION)] + + # Reset session for the migration + sa.get_scoped_session().close() + executor = MigrationExecutor(connection) + executor.migrate(self.migrate_to) + # Reset session after the migration + sa.get_scoped_session().close() + + def load_node(self, pk): + return self.DbNode.objects.get(pk=pk) diff --git a/aiida/backends/djsite/db/subtests/test_migrations.py b/aiida/backends/djsite/db/subtests/migrations/test_migrations_many.py similarity index 82% rename from aiida/backends/djsite/db/subtests/test_migrations.py rename to aiida/backends/djsite/db/subtests/migrations/test_migrations_many.py index 96a10e261e..b5fce5b0d2 100644 --- a/aiida/backends/djsite/db/subtests/test_migrations.py +++ b/aiida/backends/djsite/db/subtests/migrations/test_migrations_many.py @@ -7,6 +7,11 @@ # For further information on the license, see the LICENSE.txt file # # For further information please visit http://www.aiida.net # ########################################################################### +# pylint: disable=invalid-name, import-error, no-name-in-module +""" +This file contains the majority of the migration tests that are too short to +go to a separate file. +""" from __future__ import division from __future__ import print_function from __future__ import absolute_import @@ -15,81 +20,12 @@ import numpy -from django.apps import apps -from django.db.migrations.executor import MigrationExecutor -from django.db import connection - from aiida.backends.testbase import AiidaTestCase from aiida.backends.djsite.db.migrations import ModelModifierV0025 from aiida.backends.general.migrations import utils from aiida.common.exceptions import IntegrityError from aiida.manage.database.integrity.duplicate_uuid import deduplicate_uuids, verify_uuid_uniqueness - - -class TestMigrations(AiidaTestCase): - - @property - def app(self): - return apps.get_containing_app_config(type(self).__module__).name.split('.')[-1] - - migrate_from = None - migrate_to = None - - def setUp(self): - """Go to a specific schema version before running tests.""" - from aiida.orm import autogroup - - self.current_autogroup = autogroup.current_autogroup - autogroup.current_autogroup = None - assert self.migrate_from and self.migrate_to, \ - "TestCase '{}' must define migrate_from and migrate_to properties".format(type(self).__name__) - self.migrate_from = [(self.app, self.migrate_from)] - self.migrate_to = [(self.app, self.migrate_to)] - executor = MigrationExecutor(connection) - self.apps = executor.loader.project_state(self.migrate_from).apps - self.schema_editor = connection.schema_editor() - - # Reverse to the original migration - executor.migrate(self.migrate_from) - - self.DbNode = self.apps.get_model('db', 'DbNode') - self.DbUser = self.apps.get_model('db', 'DbUser') - self.default_user = self.DbUser(1, 'aiida@localhost') - self.default_user.save() - - try: - self.setUpBeforeMigration() - # Run the migration to test - executor = MigrationExecutor(connection) - executor.loader.build_graph() - executor.migrate(self.migrate_to) - - self.apps = executor.loader.project_state(self.migrate_to).apps - except Exception: - # Bring back the DB to the correct state if this setup part fails - import traceback - traceback.print_stack() - self._revert_database_schema() - raise - - def tearDown(self): - """At the end make sure we go back to the latest schema version.""" - from aiida.orm import autogroup - self._revert_database_schema() - autogroup.current_autogroup = self.current_autogroup - - def setUpBeforeMigration(self): - """Anything to do before running the migrations, which should be implemented in test subclasses.""" - - def _revert_database_schema(self): - """Bring back the DB to the correct state.""" - from ..migrations import LATEST_MIGRATION - self.migrate_to = [(self.app, LATEST_MIGRATION)] - executor = MigrationExecutor(connection) - executor.migrate(self.migrate_to) - - def load_node(self, pk): - return self.DbNode.objects.get(pk=pk) +from .test_migrations_common import TestMigrations class TestMigrationsModelModifierV0025(TestMigrations): @@ -100,7 +36,7 @@ def set_attribute(self, node, key, value): modifier = ModelModifierV0025(self.apps, DbAttribute) modifier.set_value_for_node(node.pk, key, value) - def get_attribute(self, node, key, default=None): + def get_attribute(self, node, key, default=None): # pylint: disable=missing-docstring DbAttribute = self.apps.get_model('db', 'DbAttribute') modifier = ModelModifierV0025(self.apps, DbAttribute) try: @@ -108,7 +44,8 @@ def get_attribute(self, node, key, default=None): except AttributeError: return default - def get_node_array(self, node, name): + @staticmethod + def get_node_array(node, name): return utils.load_numpy_array_from_repository(node.uuid, name) def set_node_array(self, node, name, array): @@ -124,11 +61,13 @@ def set_node_array(self, node, name, array): class TestNoMigrations(AiidaTestCase): + """Verify that no django migrations remain.""" - def test_no_remaining_migrations(self): - """Verify that no django migrations remain. - - Equivalent to python manage.py makemigrations --check""" + def test_no_remaining_migrations(self): # pylint: disable=no-self-use + """ + Verify that no django migrations remain. + Equivalent to python manage.py makemigrations --check + """ from django.core.management import call_command @@ -137,6 +76,7 @@ def test_no_remaining_migrations(self): class TestDuplicateNodeUuidMigration(TestMigrations): + """Test the migration that verifies that there are no duplicate UUIDs""" migrate_from = '0013_django_1_8' migrate_to = '0014_add_node_uuid_unique_constraint' @@ -160,13 +100,13 @@ def setUpBeforeMigration(self): self.nodes_boolean.append(node_bool) self.nodes_integer.append(node_int) - for i in range(self.n_bool_duplicates): + for _ in range(self.n_bool_duplicates): node = self.DbNode(type='data.bool.Bool.', user_id=self.default_user.id, uuid=node_bool.uuid) node.save() utils.put_object_from_string(node.uuid, self.file_name, self.file_content) self.nodes_boolean.append(node) - for i in range(self.n_int_duplicates): + for _ in range(self.n_int_duplicates): node = self.DbNode(type='data.int.Int.', user_id=self.default_user.id, uuid=node_int.uuid) node.save() utils.put_object_from_string(node.uuid, self.file_name, self.file_content) @@ -199,6 +139,11 @@ def test_deduplicated_uuids(self): class TestUuidMigration(TestMigrations): + """ + This test class checks the migration 0018_django_1_11 which switches from the django_extensions + UUID field to the native UUIDField of django 1.11. It also introduces unique constraints + on all uuid columns (previously existed only on dbnode). + """ migrate_from = '0017_drop_dbcalcstate' migrate_to = '0018_django_1_11' @@ -217,6 +162,10 @@ def test_uuid_untouched(self): class TestGroupRenamingMigration(TestMigrations): + """ + This test class checks the migration 0022_dbgroup_type_string_change_content which updates the + type_string column of the groups. + """ migrate_from = '0021_dbgroup_name_to_label_type_to_type_string' migrate_to = '0022_dbgroup_type_string_change_content' @@ -231,7 +180,8 @@ def setUpBeforeMigration(self): self.group_user_pk = group_user.pk # test data.upf group type_string: 'data.upf.family' -> 'data.upf' - group_data_upf = DbGroup(label='test_data_upf_group', user_id=self.default_user.id, type_string='data.upf.family') + group_data_upf = DbGroup( + label='test_data_upf_group', user_id=self.default_user.id, type_string='data.upf.family') group_data_upf.save() self.group_data_upf_pk = group_data_upf.pk @@ -246,6 +196,7 @@ def setUpBeforeMigration(self): self.group_autorun_pk = group_autorun.pk def test_group_string_update(self): + """ Test that the type_string were updated correctly """ DbGroup = self.apps.get_model('db', 'DbGroup') # test user group type_string: '' -> 'user' @@ -266,6 +217,10 @@ def test_group_string_update(self): class TestCalcAttributeKeysMigration(TestMigrationsModelModifierV0025): + """ + This test class checks that the migration 0023_calc_job_option_attribute_keys works as expected + which migrates CalcJobNode attributes for metadata options whose key changed. + """ migrate_from = '0022_dbgroup_type_string_change_content' migrate_to = '0023_calc_job_option_attribute_keys' @@ -313,31 +268,39 @@ def test_attribute_key_changes(self): self.assertEqual(self.get_attribute(self.node_calc, self.KEY_PROCESS_LABEL_NEW), self.process_label) self.assertEqual(self.get_attribute(self.node_calc, self.KEY_RESOURCES_NEW), self.resources) - self.assertEqual(self.get_attribute(self.node_calc, self.KEY_ENVIRONMENT_VARIABLES_NEW), self.environment_variables) + self.assertEqual( + self.get_attribute(self.node_calc, self.KEY_ENVIRONMENT_VARIABLES_NEW), self.environment_variables) self.assertEqual(self.get_attribute(self.node_calc, self.KEY_PARSER_NAME_NEW), self.parser_name) self.assertEqual(self.get_attribute(self.node_calc, self.KEY_PROCESS_LABEL_OLD, default=NOT_FOUND), NOT_FOUND) self.assertEqual(self.get_attribute(self.node_calc, self.KEY_RESOURCES_OLD, default=NOT_FOUND), NOT_FOUND) - self.assertEqual(self.get_attribute(self.node_calc, self.KEY_ENVIRONMENT_VARIABLES_OLD, default=NOT_FOUND), NOT_FOUND) + self.assertEqual( + self.get_attribute(self.node_calc, self.KEY_ENVIRONMENT_VARIABLES_OLD, default=NOT_FOUND), NOT_FOUND) self.assertEqual(self.get_attribute(self.node_calc, self.KEY_PARSER_NAME_OLD, default=NOT_FOUND), NOT_FOUND) # The following node should not be migrated even if its attributes have the matching keys because # the node is not a ProcessNode self.assertEqual(self.get_attribute(self.node_other, self.KEY_PROCESS_LABEL_OLD), self.process_label) self.assertEqual(self.get_attribute(self.node_other, self.KEY_RESOURCES_OLD), self.resources) - self.assertEqual(self.get_attribute(self.node_other, self.KEY_ENVIRONMENT_VARIABLES_OLD), self.environment_variables) + self.assertEqual( + self.get_attribute(self.node_other, self.KEY_ENVIRONMENT_VARIABLES_OLD), self.environment_variables) self.assertEqual(self.get_attribute(self.node_other, self.KEY_PARSER_NAME_OLD), self.parser_name) self.assertEqual(self.get_attribute(self.node_other, self.KEY_PROCESS_LABEL_NEW, default=NOT_FOUND), NOT_FOUND) self.assertEqual(self.get_attribute(self.node_other, self.KEY_RESOURCES_NEW, default=NOT_FOUND), NOT_FOUND) - self.assertEqual(self.get_attribute(self.node_other, self.KEY_ENVIRONMENT_VARIABLES_NEW, default=NOT_FOUND), NOT_FOUND) + self.assertEqual( + self.get_attribute(self.node_other, self.KEY_ENVIRONMENT_VARIABLES_NEW, default=NOT_FOUND), NOT_FOUND) self.assertEqual(self.get_attribute(self.node_other, self.KEY_PARSER_NAME_NEW, default=NOT_FOUND), NOT_FOUND) class TestDbLogMigrationRecordCleaning(TestMigrations): + """ + This test class checks that the migration 0024_dblog_update works as expected. + That migration updates of the DbLog table and adds uuids + """ migrate_from = '0023_calc_job_option_attribute_keys' migrate_to = '0024_dblog_update' - def setUpBeforeMigration(self): + def setUpBeforeMigration(self): # pylint: disable=too-many-locals import json import importlib from aiida.backends.sqlalchemy.utils import dumps_json @@ -448,22 +411,20 @@ def setUpBeforeMigration(self): ) # Getting the serialized Dict logs - param_data = DbLog.objects.filter(objpk=param.pk).filter(objname='something.else.').values( - *update_024.values_to_export)[:1] + param_data = DbLog.objects.filter(objpk=param.pk).filter( + objname='something.else.').values(*update_024.values_to_export)[:1] serialized_param_data = dumps_json(list(param_data)) # Getting the serialized logs for the unknown entity logs (as the export migration fuction # provides them) - this should coincide to the above serialized_unknown_exp_logs = update_024.get_serialized_unknown_entity_logs(self.schema_editor) # Getting their number unknown_exp_logs_number = update_024.get_unknown_entity_log_number(self.schema_editor) - self.to_check['Dict'] = (serialized_param_data, serialized_unknown_exp_logs, - unknown_exp_logs_number) + self.to_check['Dict'] = (serialized_param_data, serialized_unknown_exp_logs, unknown_exp_logs_number) # Getting the serialized legacy workflow logs - leg_wf = DbLog.objects.filter( - objpk=leg_workf.pk).filter( + leg_wf = DbLog.objects.filter(objpk=leg_workf.pk).filter( objname='aiida.workflows.user.topologicalworkflows.topo.TopologicalWorkflow').values( - *update_024.values_to_export)[:1] + *update_024.values_to_export)[:1] serialized_leg_wf_logs = dumps_json(list(leg_wf)) # Getting the serialized logs for the legacy workflow logs (as the export migration function # provides them) - this should coincide to the above @@ -472,9 +433,7 @@ def setUpBeforeMigration(self): self.to_check['WorkflowNode'] = (serialized_leg_wf_logs, serialized_leg_wf_exp_logs, eg_wf_exp_logs_number) # Getting the serialized logs that don't correspond to a DbNode record - logs_no_node = DbLog.objects.filter( - id__in=[log_5.id, log_6.id]).values( - *update_024.values_to_export) + logs_no_node = DbLog.objects.filter(id__in=[log_5.id, log_6.id]).values(*update_024.values_to_export) serialized_logs_no_node = dumps_json(list(logs_no_node)) # Getting the serialized logs that don't correspond to a node (as the export migration function # provides them) - this should coincide to the above @@ -526,8 +485,9 @@ def test_dblog_correct_export_of_logs(self): self.assertEqual(self.to_check['WorkflowNode'][0], self.to_check['WorkflowNode'][1]) self.assertEqual(self.to_check['WorkflowNode'][2], 1) - self.assertEqual(sorted(list(json.loads(self.to_check['NoNode'][0])), key=lambda k: k['id']), - sorted(list(json.loads(self.to_check['NoNode'][1])), key=lambda k: k['id'])) + self.assertEqual( + sorted(list(json.loads(self.to_check['NoNode'][0])), key=lambda k: k['id']), + sorted(list(json.loads(self.to_check['NoNode'][1])), key=lambda k: k['id'])) self.assertEqual(self.to_check['NoNode'][2], 2) def test_dblog_unique_uuids(self): @@ -559,6 +519,7 @@ class TestDbLogMigrationBackward(TestMigrations): """ Check that backward migrations work also for the DbLog migration(s). """ + migrate_from = '0024_dblog_update' migrate_to = '0023_calc_job_option_attribute_keys' @@ -619,26 +580,36 @@ def test_objpk_objname(self): DbLog = self.apps.get_model('db', 'DbLog') # Check that only two log records exist with the correct objpk objname - for log_pk in self.to_check.keys(): + for log_pk, to_check_value in self.to_check.items(): log_entry = DbLog.objects.filter(pk=log_pk)[:1].get() - log_dbnode_id, type = self.to_check[log_pk] - self.assertEqual(log_dbnode_id, log_entry.objpk, - "The dbnode_id ({}) of the 0024 schema version should be identical to the objpk ({}) of " - "the 0023 schema version.".format(log_dbnode_id, log_entry.objpk)) - self.assertEqual(type, log_entry.objname, - "The type ({}) of the linked node of the 0024 schema version should be identical to the " - "objname ({}) of the 0023 schema version.".format(type, log_entry.objname)) - self.assertEqual(log_dbnode_id, json.loads(log_entry.metadata)['objpk'], - "The dbnode_id ({}) of the 0024 schema version should be identical to the objpk ({}) of " - "the 0023 schema version stored in the metadata.".format( - log_dbnode_id, json.loads(log_entry.metadata)['objpk'])) - self.assertEqual(type, json.loads(log_entry.metadata)['objname'], - "The type ({}) of the linked node of the 0024 schema version should be identical to the " - "objname ({}) of the 0023 schema version stored in the metadata.".format( - type, json.loads(log_entry.metadata)['objname'])) + log_dbnode_id, node_type = to_check_value + self.assertEqual( + log_dbnode_id, log_entry.objpk, + "The dbnode_id ({}) of the 0024 schema version should be identical to the objpk ({}) of " + "the 0023 schema version.".format(log_dbnode_id, log_entry.objpk)) + self.assertEqual( + node_type, log_entry.objname, + "The type ({}) of the linked node of the 0024 schema version should be identical to the " + "objname ({}) of the 0023 schema version.".format(node_type, log_entry.objname)) + self.assertEqual( + log_dbnode_id, + json.loads(log_entry.metadata)['objpk'], + "The dbnode_id ({}) of the 0024 schema version should be identical to the objpk ({}) of " + "the 0023 schema version stored in the metadata.".format(log_dbnode_id, + json.loads(log_entry.metadata)['objpk'])) + self.assertEqual( + node_type, + json.loads(log_entry.metadata)['objname'], + "The type ({}) of the linked node of the 0024 schema version should be identical to the " + "objname ({}) of the 0023 schema version stored in the metadata.".format( + node_type, + json.loads(log_entry.metadata)['objname'])) class TestDataMoveWithinNodeMigration(TestMigrations): + """ + Check that backward migrations work also for the DbLog migration(s). + """ migrate_from = '0024_dblog_update' migrate_to = '0025_move_data_within_node_module' @@ -658,16 +629,22 @@ def test_data_type_string(self): class TestTrajectoryDataMigration(TestMigrationsModelModifierV0025): + """ + This test class checks that the migrations 0026_trajectory_symbols_to_attribute and + 0027_delete_trajectory_symbols_array work as expected. + These are data migrations for `TrajectoryData` nodes where symbol lists are moved + from repository array to attributes. + """ migrate_from = '0025_move_data_within_node_module' migrate_to = '0027_delete_trajectory_symbols_array' stepids = numpy.array([60, 70]) times = stepids * 0.01 - positions = numpy.array( - [[[0., 0., 0.], [0.5, 0.5, 0.5], [1.5, 1.5, 1.5]], [[0., 0., 0.], [0.5, 0.5, 0.5], [1.5, 1.5, 1.5]]]) - velocities = numpy.array( - [[[0., 0., 0.], [0., 0., 0.], [0., 0., 0.]], [[0.5, 0.5, 0.5], [0.5, 0.5, 0.5], [-0.5, -0.5, -0.5]]]) + positions = numpy.array([[[0., 0., 0.], [0.5, 0.5, 0.5], [1.5, 1.5, 1.5]], + [[0., 0., 0.], [0.5, 0.5, 0.5], [1.5, 1.5, 1.5]]]) + velocities = numpy.array([[[0., 0., 0.], [0., 0., 0.], [0., 0., 0.]], + [[0.5, 0.5, 0.5], [0.5, 0.5, 0.5], [-0.5, -0.5, -0.5]]]) cells = numpy.array([[[2., 0., 0.], [0., 2., 0.], [0., 0., 2.]], [[3., 0., 0.], [0., 3., 0.], [0., 0., 3.]]]) def setUpBeforeMigration(self): @@ -683,6 +660,7 @@ def setUpBeforeMigration(self): self.set_node_array(self.node, 'velocities', self.velocities) def test_trajectory_symbols(self): + """ Check that the trajectories are migrated correctly """ node = self.load_node(self.node.id) self.assertSequenceEqual(self.get_attribute(node, 'symbols'), ['H', 'O', 'C']) self.assertSequenceEqual(self.get_node_array(node, 'velocities').tolist(), self.velocities.tolist()) @@ -692,6 +670,12 @@ def test_trajectory_symbols(self): class TestNodePrefixRemovalMigration(TestMigrations): + """ + This test class checks that the migration 0028_remove_node_prefix works as expected. + + That is the final data migration for `Nodes` after `aiida.orm.nodes` reorganization + was finalized to remove the `node.` prefix + """ migrate_from = '0027_delete_trajectory_symbols_array' migrate_to = '0028_remove_node_prefix' @@ -712,6 +696,11 @@ def test_data_node_type_string(self): class TestParameterDataToDictMigration(TestMigrations): + """ + This test class checks that the migration 0029_rename_parameter_data_to_dict works as expected. + + This is a data migration for the renaming of `ParameterData` to `Dict`. + """ migrate_from = '0028_remove_node_prefix' migrate_to = '0029_rename_parameter_data_to_dict' @@ -726,7 +715,12 @@ def test_data_node_type_string(self): self.assertEqual(node.type, 'data.dict.Dict.') -class TestTextFieldToJSONFieldMigration(TestMigrations): +class TestTextFieldToJSONFieldMigration(TestMigrations): # pylint: disable=too-many-instance-attributes + """ + This test class checks that the migration 0033_replace_text_field_with_json_field works as expected. + + That migration replaces the use of text fields to store JSON data with builtin JSONFields. + """ migrate_from = '0032_remove_legacy_workflows' migrate_to = '0033_replace_text_field_with_json_field' @@ -742,7 +736,14 @@ def setUpBeforeMigration(self): self.node = self.DbNode(node_type="node.process.calculation.CalculationNode.", user_id=self.default_user.id) self.node.save() - self.computer_metadata = {'shebang': '#!/bin/bash', 'workdir': '/scratch/', 'append_text': '', 'prepend_text': '', 'mpirun_command': ['mpirun', '-np', '{tot_num_mpiprocs}'], 'default_mpiprocs_per_machine': 1} + self.computer_metadata = { + 'shebang': '#!/bin/bash', + 'workdir': '/scratch/', + 'append_text': '', + 'prepend_text': '', + 'mpirun_command': ['mpirun', '-np', '{tot_num_mpiprocs}'], + 'default_mpiprocs_per_machine': 1 + } self.computer_kwargs = { 'name': 'localhost_testing', 'hostname': 'localhost', diff --git a/aiida/backends/djsite/db/subtests/test_generic.py b/aiida/backends/djsite/db/subtests/test_generic.py index 7c35fa01c2..cdb5c6ab9f 100644 --- a/aiida/backends/djsite/db/subtests/test_generic.py +++ b/aiida/backends/djsite/db/subtests/test_generic.py @@ -133,37 +133,3 @@ def test_creation_from_dbgroup(self): self.assertEquals(g.pk, gcopy.pk) self.assertEquals(g.uuid, gcopy.uuid) - - -class TestDbExtrasDjango(AiidaTestCase): - """Test DbAttributes.""" - - def test_replacement_1(self): - from aiida.backends.djsite.db.models import DbExtra - - n1 = Data().store() - n2 = Data().store() - - DbExtra.set_value_for_node(n1.backend_entity.dbmodel, "pippo", [1, 2, 'a']) - DbExtra.set_value_for_node(n1.backend_entity.dbmodel, "pippobis", [5, 6, 'c']) - DbExtra.set_value_for_node(n2.backend_entity.dbmodel, "pippo2", [3, 4, 'b']) - - self.assertEquals(n1.extras, {'pippo': [1, 2, 'a'], - 'pippobis': [5, 6, 'c'], - '_aiida_hash': n1.get_hash() - }) - self.assertEquals(n2.extras, {'pippo2': [3, 4, 'b'], - '_aiida_hash': n2.get_hash() - }) - - new_attrs = {"newval1": "v", "newval2": [1, {"c": "d", "e": 2}]} - - DbExtra.reset_values_for_node(n1.backend_entity.dbmodel, attributes=new_attrs) - self.assertEquals(n1.extras, new_attrs) - self.assertEquals(n2.extras, {'pippo2': [3, 4, 'b'], '_aiida_hash': n2.get_hash()}) - - DbExtra.del_value_for_node(n1.backend_entity.dbmodel, key='newval2') - del new_attrs['newval2'] - self.assertEquals(n1.extras, new_attrs) - # Also check that other nodes were not damaged - self.assertEquals(n2.extras, {'pippo2': [3, 4, 'b'], '_aiida_hash': n2.get_hash()}) diff --git a/aiida/backends/djsite/db/subtests/test_nodes.py b/aiida/backends/djsite/db/subtests/test_nodes.py index 05ffa2a2e2..4e9980ddb7 100644 --- a/aiida/backends/djsite/db/subtests/test_nodes.py +++ b/aiida/backends/djsite/db/subtests/test_nodes.py @@ -15,12 +15,12 @@ from __future__ import print_function from __future__ import absolute_import from aiida.backends.testbase import AiidaTestCase -from aiida.common.links import LinkType -from aiida.orm import Data, Node -from aiida.orm import CalculationNode +from aiida.orm import Data +from unittest import skip class TestNodeBasicDjango(AiidaTestCase): + @skip("This seems not to be needed. It should be a more general tests for both backends.") def test_replace_extras_2(self): """ This is a Django specific test which checks (manually) that, @@ -73,6 +73,7 @@ def test_replace_extras_2(self): self.assertEquals(len(DbExtra.objects.filter( dbnode=a.backend_entity.dbmodel, key__startswith=('dict' + DbExtra._sep))), 0) + @skip("This seems not to be needed. It should be a more general tests for both backends.") def test_attrs_and_extras_wrong_keyname(self): """ Attribute keys cannot include the separator symbol in the key diff --git a/aiida/backends/djsite/db/subtests/test_query.py b/aiida/backends/djsite/db/subtests/test_query.py index 0188b20990..62cd6c30fb 100644 --- a/aiida/backends/djsite/db/subtests/test_query.py +++ b/aiida/backends/djsite/db/subtests/test_query.py @@ -11,18 +11,17 @@ from __future__ import print_function from __future__ import absolute_import from aiida.backends.testbase import AiidaTestCase +from unittest import skip class TestQueryBuilderDjango(AiidaTestCase): + @skip("This test passes but we should see it is still valid under Django JSONB") def test_clsf_django(self): """ This tests the classifications of the QueryBuilder u. the django backend. """ - from aiida.orm.implementation.django.dummy_model import ( - DbNode, DbUser, DbComputer, - DbGroup, - ) + import aiida.backends.djsite.db.models as djmodels from aiida.common.exceptions import DbContentError from aiida.orm import QueryBuilder, Group, Node, Computer, Data, StructureData qb = QueryBuilder() @@ -39,44 +38,44 @@ def test_clsf_django(self): qb._get_ormclass(None, 'data.structure.StructureData.'), ): self.assertEqual(classifiers['ormclass_type_string'], 'data.structure.StructureData.') - self.assertTrue(issubclass(cls, DbNode)) + self.assertTrue(issubclass(cls, djmodels.DbNode.sa)) for cls, classifiers in ( - qb._get_ormclass(DbNode, None), + qb._get_ormclass(djmodels.DbNode.sa, None), ): self.assertEqual(classifiers['ormclass_type_string'], Node._plugin_type_string) - self.assertTrue(issubclass(cls, DbNode)) + self.assertTrue(issubclass(cls, djmodels.DbNode.sa)) for cls, classifiers in ( - qb._get_ormclass(DbGroup, None), + qb._get_ormclass(djmodels.DbGroup.sa, None), qb._get_ormclass(Group, None), qb._get_ormclass(None, 'group'), qb._get_ormclass(None, 'Group'), ): self.assertEqual(classifiers['ormclass_type_string'], 'group') - self.assertTrue(issubclass(cls, DbGroup)) + self.assertTrue(issubclass(cls, djmodels.DbGroup.sa)) for cls, classifiers in ( - qb._get_ormclass(DbUser, None), - qb._get_ormclass(DbUser, None), + qb._get_ormclass(djmodels.DbUser.sa, None), + qb._get_ormclass(djmodels.DbUser.sa, None), qb._get_ormclass(None, "user"), qb._get_ormclass(None, "User"), ): self.assertEqual(classifiers['ormclass_type_string'], 'user') - self.assertTrue(issubclass(cls, DbUser)) + self.assertTrue(issubclass(cls, djmodels.DbUser.sa)) for cls, classifiers in ( - qb._get_ormclass(DbComputer, None), + qb._get_ormclass(djmodels.DbComputer.sa, None), qb._get_ormclass(Computer, None), qb._get_ormclass(None, 'computer'), qb._get_ormclass(None, 'Computer'), ): self.assertEqual(classifiers['ormclass_type_string'], 'computer') - self.assertTrue(issubclass(cls, DbComputer)) + self.assertTrue(issubclass(cls, djmodels.DbComputer.sa)) for cls, classifiers in ( qb._get_ormclass(Data, None), qb._get_ormclass(None, 'data.Data.'), ): self.assertEqual(classifiers['ormclass_type_string'], Data._plugin_type_string) - self.assertTrue(issubclass(cls, DbNode)) + self.assertTrue(issubclass(cls, djmodels.DbNode.sa)) diff --git a/aiida/backends/djsite/globalsettings.py b/aiida/backends/djsite/globalsettings.py index f0d527a1fa..2bc75ddaac 100644 --- a/aiida/backends/djsite/globalsettings.py +++ b/aiida/backends/djsite/globalsettings.py @@ -14,24 +14,23 @@ from __future__ import division from __future__ import print_function from __future__ import absolute_import -from django.db import IntegrityError -from aiida.common.exceptions import UniquenessError +from aiida.backends.djsite.db.models import DbSetting +from aiida.backends.utils import validate_attribute_key +from aiida.backends.utils import get_value_of_sub_field +from aiida.common.exceptions import NotExistent def set_global_setting(key, value, description=None): """ Set a global setting in the DbSetting table (therefore, stored at the DB level). """ - from aiida.backends.djsite.db.models import DbSetting - # Before storing, validate the key - DbSetting.validate_key(key) + validate_attribute_key(key) - # This also saves in the DB - try: - DbSetting.set_value(key, value, other_attribs={"description": description}) - except IntegrityError as exception: - raise UniquenessError(exception) + other_attribs = dict() + if description is not None: + other_attribs["description"] = description + DbSetting.set_value(key, value, other_attribs=other_attribs) def del_global_setting(key): @@ -42,16 +41,12 @@ def del_global_setting(key): :raise KeyError: if the setting does not exist in the DB """ from aiida.backends.djsite.db.models import DbSetting - from django.core.exceptions import ObjectDoesNotExist try: - setting = DbSetting.objects.get(key=key) - except ObjectDoesNotExist: + DbSetting.del_value(key=key) + except KeyError: raise KeyError("No global setting with key={}".format(key)) - # This does not raise exceptions - DbSetting.del_value(key=key) - def get_global_setting(key): """ @@ -61,14 +56,16 @@ def get_global_setting(key): :raise KeyError: if the setting does not exist in the DB """ from aiida.backends.djsite.db.models import DbSetting - from django.core.exceptions import ObjectDoesNotExist # Check first that the table exists table_check_test() try: - return DbSetting.objects.get(key=key).getvalue() - except ObjectDoesNotExist: + res = get_value_of_sub_field(key, lambda given_key: DbSetting.objects.filter(key=given_key).first().getvalue()) + if res is None: + raise NotExistent + return res + except NotExistent: raise KeyError("No global setting with key={}".format(key)) diff --git a/aiida/backends/djsite/queries.py b/aiida/backends/djsite/queries.py index 2563ebe3cf..512a577f41 100644 --- a/aiida/backends/djsite/queries.py +++ b/aiida/backends/djsite/queries.py @@ -11,8 +11,6 @@ from __future__ import absolute_import from __future__ import print_function -from contextlib import contextmanager - from six.moves import zip from aiida.backends.general.abstractqueries import AbstractQueryManager @@ -47,23 +45,24 @@ def get_creation_statistics( an integer with the number of nodes created that day. """ import sqlalchemy as sa - from aiida.orm.implementation.django import dummy_model + import aiida.backends.djsite.db.models as djmodels + from aiida.orm.implementation.django.querybuilder import DjangoQueryBuilder # Get the session (uses internally aldjemy - so, sqlalchemy) also for the Djsite backend - s = dummy_model.get_aldjemy_session() + s = DjangoQueryBuilder.get_session() retdict = {} - total_query = s.query(dummy_model.DbNode) - types_query = s.query(dummy_model.DbNode.node_type.label('typestring'), - sa.func.count(dummy_model.DbNode.id)) - stat_query = s.query(sa.func.date_trunc('day', dummy_model.DbNode.ctime).label('cday'), - sa.func.count(dummy_model.DbNode.id)) + total_query = s.query(djmodels.DbNode.sa) + types_query = s.query(djmodels.DbNode.sa.node_type.label('typestring'), + sa.func.count(djmodels.DbNode.sa.id)) + stat_query = s.query(sa.func.date_trunc('day', djmodels.DbNode.sa.ctime).label('cday'), + sa.func.count(djmodels.DbNode.sa.id)) if user_pk is not None: - total_query = total_query.filter(dummy_model.DbNode.user_id == user_pk) - types_query = types_query.filter(dummy_model.DbNode.user_id == user_pk) - stat_query = stat_query.filter(dummy_model.DbNode.user_id == user_pk) + total_query = total_query.filter(djmodels.DbNode.sa.user_id == user_pk) + types_query = types_query.filter(djmodels.DbNode.sa.user_id == user_pk) + stat_query = stat_query.filter(djmodels.DbNode.sa.user_id == user_pk) # Total number of nodes retdict["total"] = total_query.count() @@ -115,7 +114,6 @@ def get_bands_and_parents_structure(self, args): """ Returns bands and closest parent structure """ - from collections import defaultdict from django.db.models import Q from aiida.backends.djsite.db import models from aiida.common.utils import grouper @@ -156,25 +154,10 @@ def get_bands_and_parents_structure(self, args): struc_pks = [structure_dict[pk] for pk in pks] # query for the attributes needed for the structure formula - attr_query = Q(key__startswith='kinds') | Q(key__startswith='sites') - attrs = models.DbAttribute.objects.filter(attr_query, - dbnode__in=struc_pks).values_list( - 'dbnode__pk', 'key', 'datatype', 'tval', 'fval', - 'ival', 'bval', 'dval') - - results = defaultdict(dict) - for attr in attrs: - results[attr[0]][attr[1]] = {"datatype": attr[2], - "tval": attr[3], - "fval": attr[4], - "ival": attr[5], - "bval": attr[6], - "dval": attr[7]} - # organize all of it in a dictionary + res_attr = models.DbNode.objects.filter(id__in=struc_pks).values_list('id', 'attributes') deser_data = {} - for k in results: - deser_data[k] = models.deserialize_attributes(results[k], - sep=models.DbAttribute._sep) + for rattr in res_attr: + deser_data[rattr[0]] = rattr[1] # prepare the printout for ((bid, blabel, bdate), struc_pk) in zip(this_chunk, struc_pks): diff --git a/aiida/backends/djsite/settings.py b/aiida/backends/djsite/settings.py index f5b9683a9c..9b785951d4 100644 --- a/aiida/backends/djsite/settings.py +++ b/aiida/backends/djsite/settings.py @@ -7,15 +7,17 @@ # For further information on the license, see the LICENSE.txt file # # For further information please visit http://www.aiida.net # ########################################################################### -""" -Django settings for the AiiDA project. -""" +# pylint: disable=import-error, no-name-in-module +""" Django settings for the AiiDA project. """ +from __future__ import absolute_import from __future__ import division from __future__ import print_function -from __future__ import absolute_import import sys + import os +from sqlalchemy.dialects.postgresql import JSONB +from sqlalchemy.dialects.postgresql import UUID from aiida.common import exceptions from aiida.common.timezone import get_current_timezone @@ -113,4 +115,10 @@ 'django.contrib.auth', 'django.contrib.contenttypes', 'aiida.backends.djsite.db', + 'aldjemy', ] + +ALDJEMY_DATA_TYPES = { + 'UUIDField': lambda field: UUID(), + 'JSONField': lambda field: JSONB(), +} diff --git a/aiida/backends/djsite/utils.py b/aiida/backends/djsite/utils.py index cc41cc181e..a974d50b21 100644 --- a/aiida/backends/djsite/utils.py +++ b/aiida/backends/djsite/utils.py @@ -7,7 +7,7 @@ # For further information on the license, see the LICENSE.txt file # # For further information please visit http://www.aiida.net # ########################################################################### -# pylint: disable=no-name-in-module, no-member, import-error +# pylint: disable=no-name-in-module,no-member,import-error """Utility functions specific to the Django backend.""" from __future__ import division from __future__ import print_function @@ -16,6 +16,9 @@ import os import django +SCHEMA_VERSION_DB_KEY = 'db|schemaversion' +SCHEMA_VERSION_DB_DESCRIPTION = "The version of the schema used in this database." + def load_dbenv(profile): """Load the database environment and ensure that the code and database schema versions are compatible. @@ -101,8 +104,7 @@ def set_db_schema_version(version): you are doing. """ from aiida.backends.utils import set_global_setting - return set_global_setting( - 'db|schemaversion', version, description="The version of the schema used in this database.") + return set_global_setting(SCHEMA_VERSION_DB_KEY, version, description=SCHEMA_VERSION_DB_DESCRIPTION) def get_db_schema_version(): @@ -112,7 +114,7 @@ def get_db_schema_version(): """ from aiida.backends.utils import get_global_setting try: - return get_global_setting('db|schemaversion') + return get_global_setting(SCHEMA_VERSION_DB_KEY) except KeyError: return None diff --git a/aiida/backends/sqlalchemy/globalsettings.py b/aiida/backends/sqlalchemy/globalsettings.py index e5b88c9c7a..f276e3ab38 100644 --- a/aiida/backends/sqlalchemy/globalsettings.py +++ b/aiida/backends/sqlalchemy/globalsettings.py @@ -18,6 +18,7 @@ from aiida.backends.sqlalchemy.models.settings import DbSetting from sqlalchemy.orm.exc import NoResultFound from aiida.backends.sqlalchemy import get_scoped_session +from aiida.backends.utils import validate_attribute_key def set_global_setting(key, value, description=None): @@ -25,7 +26,13 @@ def set_global_setting(key, value, description=None): Set a global setting in the DbSetting table (therefore, stored at the DB level). """ - DbSetting.set_value(key, value, other_attribs={"description": description}) + # Before storing, validate the key + validate_attribute_key(key) + + other_attribs = dict() + if description is not None: + other_attribs["description"] = description + DbSetting.set_value(key, value, other_attribs=other_attribs) def del_global_setting(key): @@ -49,7 +56,7 @@ def get_global_setting(key): :raise KeyError: if the setting does not exist in the DB """ - from aiida.backends.sqlalchemy.models.utils import get_value_of_sub_field + from aiida.backends.utils import get_value_of_sub_field # Check first that the table exists table_check_test() @@ -68,7 +75,7 @@ def get_global_setting_description(key): DB, or raise a KeyError if the setting is not present in the DB or the table doesn't exist. """ - from aiida.backends.sqlalchemy.models.utils import validate_key + from aiida.backends.utils import validate_key # Check first that the table exists table_check_test() @@ -87,7 +94,6 @@ def table_check_test(): it rainses a KeyError. """ from sqlalchemy.engine import reflection - from aiida.backends import sqlalchemy as sa inspector = reflection.Inspector.from_engine(get_scoped_session().bind) if 'db_dbsetting' not in inspector.get_table_names(): raise KeyError("No table found") diff --git a/aiida/backends/sqlalchemy/models/computer.py b/aiida/backends/sqlalchemy/models/computer.py index 3ba2a73ae5..62cb28b701 100644 --- a/aiida/backends/sqlalchemy/models/computer.py +++ b/aiida/backends/sqlalchemy/models/computer.py @@ -37,6 +37,10 @@ def __init__(self, *args, **kwargs): # TODO SP: it's supposed to be nullable, but there is a NOT constraint inside the DB. self.description = "" + # If someone passes metadata in **kwargs we change it to _metadata + if 'metadata' in kwargs.keys(): + kwargs['_metadata'] = kwargs.pop('metadata') + super(DbComputer, self).__init__(*args, **kwargs) @property diff --git a/aiida/backends/sqlalchemy/models/node.py b/aiida/backends/sqlalchemy/models/node.py index f9758e2d67..25ad815c5c 100644 --- a/aiida/backends/sqlalchemy/models/node.py +++ b/aiida/backends/sqlalchemy/models/node.py @@ -133,7 +133,7 @@ def get_simple_name(self, invalid_result=None): thistype = thistype[:-1] # Strip final dot return thistype.rpartition('.')[2] - def set_attr(self, key, value): + def set_attribute(self, key, value): DbNode._set_attr(self.attributes, key, value) flag_modified(self, "attributes") self.save() @@ -165,7 +165,7 @@ def reset_extras(self, new_extras): flag_modified(self, "extras") self.save() - def del_attr(self, key): + def del_attribute(self, key): DbNode._del_attr(self.attributes, key) flag_modified(self, "attributes") self.save() diff --git a/aiida/backends/sqlalchemy/models/utils.py b/aiida/backends/sqlalchemy/models/utils.py deleted file mode 100644 index 4ad6471f7c..0000000000 --- a/aiida/backends/sqlalchemy/models/utils.py +++ /dev/null @@ -1,74 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida_core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### - -from __future__ import division -from __future__ import print_function -from __future__ import absolute_import - -import six - -from aiida.common.exceptions import ValidationError -from aiida.common.exceptions import NotExistent - -# The separator for sub-fields (for JSON stored values).Keys are not allowed -# to contain the separator even if the -_sep = "." - - -def validate_key(key): - """ - Validate the key string to check if it is valid (e.g., if it does not - contain the separator symbol.). - - :return: None if the key is valid - :raise aiida.common.ValidationError: if the key is not valid - """ - if not isinstance(key, six.string_types): - raise ValidationError("The key must be a string.") - if not key: - raise ValidationError("The key cannot be an empty string.") - if _sep in key: - raise ValidationError("The separator symbol '{}' cannot be present " - "in the key of this field.".format(_sep)) - - -def get_value_of_sub_field(key, original_get_value): - """ - Get the value that corresponds to sub-fields of dictionaries stored in a - JSON. For example, if there is a dictionary {'b': 'c'} stored as value of - the key 'a' - value 'a' - :param key: The key that can be simple, a string, or complex, a set of keys - separated by the separator value. - :param original_get_value: The function that should be called to get the - original value (which can be a dictionary too). - :return: The value that correspond to the complex (or not) key. - :raise aiida.common.NotExistent: If the key doesn't correspond to a value - """ - keys = list() - if _sep in key: - keys.extend(key.split(_sep)) - else: - keys.append(key) - - if len(keys) == 1: - return original_get_value(keys[0]) - else: - try: - curr_val = original_get_value(keys[0]) - curr_pos = 1 - while curr_pos < len(keys): - curr_val = curr_val[keys[curr_pos]] - curr_pos += 1 - - return curr_val - except TypeError as KeyError: - raise NotExistent("The sub-field {} doesn't correspond " - "to a value.".format(key)) diff --git a/aiida/backends/sqlalchemy/tests/test_nodes.py b/aiida/backends/sqlalchemy/tests/test_nodes.py index 6e1ea46fe8..37695da6a5 100644 --- a/aiida/backends/sqlalchemy/tests/test_nodes.py +++ b/aiida/backends/sqlalchemy/tests/test_nodes.py @@ -62,8 +62,6 @@ def test_load_nodes(self): Test for load_node() function. """ from aiida.orm import load_node - from aiida.common.exceptions import NotExistent - import aiida.backends.sqlalchemy from aiida.backends.sqlalchemy import get_scoped_session a = Data() diff --git a/aiida/backends/sqlalchemy/utils.py b/aiida/backends/sqlalchemy/utils.py index 09f7d79d4e..a77da9187e 100644 --- a/aiida/backends/sqlalchemy/utils.py +++ b/aiida/backends/sqlalchemy/utils.py @@ -15,17 +15,13 @@ json_dumps = json.dumps json_loads = json.loads -import datetime -import re - -import six from alembic import command from alembic.config import Config from alembic.runtime.environment import EnvironmentContext from alembic.script import ScriptDirectory -from dateutil import parser from aiida.backends import sqlalchemy as sa +from aiida.backends.utils import isoformat_to_datetime, datetime_to_isoformat ALEMBIC_FILENAME = "alembic.ini" ALEMBIC_REL_PATH = "migrations" @@ -81,48 +77,15 @@ def dumps_json(d): """ Transforms all datetime object into isoformat and then returns the JSON """ - - def f(v): - if isinstance(v, list): - return [f(_) for _ in v] - elif isinstance(v, dict): - return dict((key, f(val)) for key, val in v.items()) - elif isinstance(v, datetime.datetime): - return v.isoformat() - return v - - return json_dumps(f(d)) - - -date_reg = re.compile(r'^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d+(\+\d{2}:\d{2})?$') + return json_dumps(datetime_to_isoformat(d)) def loads_json(s): """ Loads the json and try to parse each basestring as a datetime object """ - ret = json_loads(s) - - def f(d): - if isinstance(d, list): - for i, val in enumerate(d): - d[i] = f(val) - return d - elif isinstance(d, dict): - for k, v in d.items(): - d[k] = f(v) - return d - elif isinstance(d, six.string_types): - if date_reg.match(d): - try: - return parser.parse(d) - except (ValueError, TypeError): - return d - return d - return d - - return f(ret) + return isoformat_to_datetime(ret) # XXX the code here isn't different from the one use in Django. We may be able diff --git a/aiida/backends/testbase.py b/aiida/backends/testbase.py index 651396257d..5b8280e686 100644 --- a/aiida/backends/testbase.py +++ b/aiida/backends/testbase.py @@ -111,7 +111,21 @@ def reset_database(self): @classmethod def insert_data(cls): - cls.__backend_instance.insert_data() + """ + This method setups the database (by creating a default user) and + inserts default data into the database (which is for the moment a + default computer). + """ + cls.create_user() + cls.create_computer() + + @classmethod + def create_user(cls): + cls.__backend_instance.create_user() + + @classmethod + def create_computer(cls): + cls.__backend_instance.create_computer() @classmethod def clean_db(cls): diff --git a/aiida/backends/testimplbase.py b/aiida/backends/testimplbase.py index 222404361c..5627f23ae8 100644 --- a/aiida/backends/testimplbase.py +++ b/aiida/backends/testimplbase.py @@ -71,11 +71,27 @@ def clean_db(self): """ def insert_data(self): + pass + + def create_user(self): """ - This method inserts default data into the database. + This method creates and stores the default user. It has the same effect + as the verdi setup. """ from aiida.manage.configuration import get_config + self.user_email = get_config().current_profile.default_user + # Since the default user is needed for many operations in AiiDA, it is not deleted by clean_db. + # In principle, it should therefore always exist - if not we create it anyhow. + try: + self.user = orm.User.objects.get(email=self.user_email) + except exceptions.NotExistent: + self.user = orm.User(email=self.user_email).store() + + def create_computer(self): + """ + This method creates and stores a computer. + """ self.computer = orm.Computer( name='localhost', hostname='localhost', @@ -85,15 +101,6 @@ def insert_data(self): backend=self.backend ).store() - self.user_email = get_config().current_profile.default_user - - # Since the default user is needed for many operations in AiiDA, it is not deleted by clean_db. - # In principle, it should therefore always exist - if not we create it anyhow. - try: - self.user = orm.User.objects.get(email=self.user_email) - except exceptions.NotExistent: - self.user = orm.User(email=self.user_email).store() - def get_computer(self): """ A ORM Computer object present in the DB diff --git a/aiida/backends/tests/__init__.py b/aiida/backends/tests/__init__.py index 7850f8a579..71fe0de9b7 100644 --- a/aiida/backends/tests/__init__.py +++ b/aiida/backends/tests/__init__.py @@ -21,7 +21,10 @@ BACKEND_DJANGO: { 'generic': ['aiida.backends.djsite.db.subtests.test_generic'], 'nodes': ['aiida.backends.djsite.db.subtests.test_nodes'], - 'migrations': ['aiida.backends.djsite.db.subtests.test_migrations'], + 'migrations': [ + 'aiida.backends.djsite.db.subtests.migrations.test_migrations_many', + 'aiida.backends.djsite.db.subtests.migrations.test_migrations_0034_attributes_extras_settings_json' + ], 'query': ['aiida.backends.djsite.db.subtests.test_query'], }, BACKEND_SQLA: { diff --git a/aiida/backends/tests/test_export_and_import.py b/aiida/backends/tests/test_export_and_import.py index 900d30e5a7..9dcc93f413 100644 --- a/aiida/backends/tests/test_export_and_import.py +++ b/aiida/backends/tests/test_export_and_import.py @@ -73,6 +73,7 @@ def test_simple_import(self): # Clean the database and verify there are no nodes left self.clean_db() + self.create_user() self.assertEqual(orm.QueryBuilder().append(orm.Node).count(), 0) # After importing we should have the original number of nodes again @@ -136,6 +137,7 @@ def test_cycle_structure_data(self): # Clean the database and verify there are no nodes left self.clean_db() + self.create_user() self.assertEqual(orm.QueryBuilder().append(orm.Node).count(), 0) # After importing we should have the original number of nodes again @@ -198,6 +200,7 @@ def test_base_data_nodes(self, temp_dir): export(nodes, outfile=filename, silent=True) # cleaning: self.clean_db() + self.create_user() # Importing back the data: import_data(filename, silent=True) # Checking whether values are preserved: @@ -234,6 +237,7 @@ def test_calc_of_structuredata(self, temp_dir): export([calc], outfile=filename, silent=True) self.clean_db() + self.create_user() # NOTE: it is better to load new nodes by uuid, rather than assuming # that they will have the first 3 pks. In fact, a recommended policy in @@ -417,6 +421,7 @@ def test_nodes_belonging_to_different_users(self, temp_dir): export([sd3], outfile=filename, silent=True) self.clean_db() + self.create_user() import_data(filename, silent=True) # Check that the imported nodes are correctly imported and that @@ -849,6 +854,7 @@ def test_complex_graph_import_export(self, temp_dir): export([fd1], outfile=filename, silent=True) self.clean_db() + self.create_user() import_data(filename, silent=True, ignore_unknown_nodes=True) @@ -963,6 +969,7 @@ def get_hash_from_db_content(grouplabel): export([g] + [n for n in g.nodes], outfile=filename, silent=True) # cleaning the DB! self.clean_db() + self.create_user() # reimporting the data from the file import_data(filename, silent=True, ignore_unknown_nodes=True) # creating the hash from db content @@ -1026,6 +1033,7 @@ def test_same_computer_import(self, temp_dir): # Clean the local database self.clean_db() + self.create_user() # Check that there are no computers builder = orm.QueryBuilder() @@ -1127,6 +1135,7 @@ def test_same_computer_different_name_import(self, temp_dir): # Clean the local database self.clean_db() + self.create_user() # Check that there are no computers builder = orm.QueryBuilder() @@ -1232,6 +1241,7 @@ def test_different_computer_same_name_import(self, temp_dir): # Clean the local database self.clean_db() + self.create_user() # Check that there are no computers builder = orm.QueryBuilder() @@ -1294,17 +1304,16 @@ def test_correct_import_of_computer_json_params(self, temp_dir): # Clean the local database self.clean_db() + self.create_user() # Import the data import_data(filename1, silent=True) builder = orm.QueryBuilder() - builder.append(orm.Computer, project=['_metadata'], tag="comp") + builder.append(orm.Computer, project=['metadata'], tag="comp") self.assertEqual(builder.count(), 1, "Expected only one computer") res = builder.dict()[0] - self.assertEqual(res['comp']['_metadata'], - comp1_metadata, - "Not the expected metadata were found") + self.assertEqual(res['comp']['metadata'], comp1_metadata, "Not the expected metadata were found") def test_import_of_django_sqla_export_file(self): """Check that sqla import manages to import the django export file correctly""" @@ -1325,13 +1334,13 @@ def test_import_of_django_sqla_export_file(self): # Check that we got the correct metadata # Make sure to exclude the default computer builder = orm.QueryBuilder() - builder.append(orm.Computer, project=['_metadata'], tag="comp", + builder.append(orm.Computer, project=['metadata'], tag="comp", filters={'name': {'!==': self.computer.name}}) self.assertEqual(builder.count(), 1, "Expected only one computer") res = builder.dict()[0] - self.assertEqual(res['comp']['_metadata'], comp1_metadata) + self.assertEqual(res['comp']['metadata'], comp1_metadata) class TestLinks(AiidaTestCase): @@ -1391,6 +1400,7 @@ def test_links_to_unknown_nodes(self, temp_dir): tar.add(unpack.abspath, arcname="") self.clean_db() + self.create_user() with self.assertRaises(ValueError): import_data(filename, silent=True) diff --git a/aiida/backends/tests/test_nodes.py b/aiida/backends/tests/test_nodes.py index 03bada715f..7f91300091 100644 --- a/aiida/backends/tests/test_nodes.py +++ b/aiida/backends/tests/test_nodes.py @@ -489,14 +489,14 @@ def test_store_object(self): a = orm.Data() a.set_attribute('object', object(), clean=False) - # django raises ValueError + # django raises TypeError # sqlalchemy raises StatementError - with self.assertRaises((ValueError, StatementError)): + with self.assertRaises((TypeError, StatementError)): a.store() b = orm.Data() b.set_attribute('object_list', [object(), object()], clean=False) - with self.assertRaises((ValueError, StatementError)): + with self.assertRaises((TypeError, StatementError)): # objects are not json-serializable b.store() @@ -902,10 +902,9 @@ def test_settings_methods(self): self.assertEqual(get_global_setting_description('aaa'), "pippo") self.assertEqual(get_global_setting('aaa.b'), 'c') - # The following is disabled because it is not supported in SQLAlchemy + # The following is disabled because it is not supported in JSONB # Only top level elements can have descriptions # self.assertEqual(get_global_setting_description('aaa.b'), "") - del_global_setting('aaa') with self.assertRaises(KeyError): @@ -914,6 +913,17 @@ def test_settings_methods(self): with self.assertRaises(KeyError): get_global_setting('aaa') + set_global_setting(key="bbb", value={'c': 'd1'}, description="pippo2") + self.assertEqual(get_global_setting('bbb'), {'c': 'd1'}) + self.assertEqual(get_global_setting('bbb.c'), 'd1') + self.assertEqual(get_global_setting_description('bbb'), "pippo2") + set_global_setting(key="bbb", value={'c': 'd2'}) + self.assertEqual(get_global_setting('bbb'), {'c': 'd2'}) + self.assertEqual(get_global_setting('bbb.c'), 'd2') + self.assertEqual(get_global_setting_description('bbb'), "pippo2") + + del_global_setting('bbb') + def test_attr_listing(self): """ Checks that the list of attributes and extras is ok. diff --git a/aiida/backends/tests/test_query.py b/aiida/backends/tests/test_query.py index 2381b16d50..766fe199c2 100644 --- a/aiida/backends/tests/test_query.py +++ b/aiida/backends/tests/test_query.py @@ -502,7 +502,7 @@ def test_append_validation(self): # pylint: disable=protected-access self.assertTrue('s' not in qb._projections) self.assertTrue('s' not in qb._filters) - self.assertTrue('s' not in qb._tag_to_alias_map) + self.assertTrue('s' not in qb.tag_to_alias_map) self.assertTrue(len(qb._path) == 0) self.assertTrue(orm.StructureData not in qb._cls_to_tag_map) # So this should work now: @@ -614,7 +614,7 @@ def test_computer_json(self): # pylint: disable=no-self-use # a JSON field (in both backends). qb = orm.QueryBuilder() qb.append(orm.CalculationNode, project=['id'], tag='calc') - qb.append(orm.Computer, project=['id', '_metadata'], outerjoin=True, with_node='calc') + qb.append(orm.Computer, project=['id', 'metadata'], outerjoin=True, with_node='calc') qb.all() @@ -711,6 +711,8 @@ class QueryBuilderDateTimeAttribute(AiidaTestCase): @unittest.skipIf(configuration.PROFILE.database_backend == u'sqlalchemy', "SQLA doesn't have full datetime support in attributes") + @unittest.skipIf(configuration.PROFILE.database_backend == u'django', + "Django JSONB doesn't have full datetime support in attributes") def test_date(self): from aiida.common import timezone from datetime import timedelta @@ -907,6 +909,53 @@ def test_joins3_user_group(self): self.assertEqual(qb.count(), 1, "The expected user that owns the " "selected group was not found.") + def test_joins_group_node(self): + """ + This test checks that the querying for the nodes that belong to a group works correctly (using QueryBuilder). + This is important for the Django backend with the use of aldjemy for the Django to SQLA schema translation. + Since this is not backend specific test (even if it is mainly used to test the querying of Django backend + with QueryBuilder), we keep it at the general tests (ran by both backends). + """ + new_email = "newuser@new.n2" + user = orm.User(email=new_email).store() + + # Create a group that belongs to that user + group = orm.Group(label="node_group_2") + group.user = user + group.store() + + # Create nodes and add them to the created group + n1 = orm.Data() + n1.label = 'node1' + n1.set_attribute('foo', ['hello', 'goodbye']) + n1.store() + + n2 = orm.CalculationNode() + n2.label = 'node2' + n2.set_attribute('foo', 1) + n2.store() + + n3 = orm.Data() + n3.label = 'node3' + n3.set_attribute('foo', 1.0000) # Stored as fval + n3.store() + + n4 = orm.CalculationNode() + n4.label = 'node4' + n4.set_attribute('foo', 'bar') + n4.store() + + group.add_nodes([n1, n2, n3, n4]) + + # Check that the nodes are in the group + qb = orm.QueryBuilder() + qb.append(orm.Node, tag='node', project=['id']) + qb.append(orm.Group, with_node='node', filters={'id': {'==': group.id}}) + self.assertEqual(qb.count(), 4, "There should be 4 nodes in the group") + id_res = [_ for [_] in qb.all()] + for curr_id in [n1.id, n2.id, n3.id, n4.id]: + self.assertIn(curr_id, id_res) + class QueryBuilderPath(AiidaTestCase): @@ -1221,3 +1270,46 @@ def store_and_add(n, statistics): } self.assertEqual(new_db_statistics, expected_db_statistics) + + +class TestDoubleStar(AiidaTestCase): + """ + In this test class we check if QueryBuilder returns the correct results + when double star is provided as projection. + """ + + def test_statistics_default_class(self): + + # The expected result + # pylint: disable=no-member + expected_dict = { + u'description': self.computer.description, + u'scheduler_type': self.computer.get_scheduler_type(), + u'hostname': self.computer.hostname, + u'uuid': self.computer.uuid, + u'name': self.computer.name, + u'transport_type': self.computer.get_transport_type(), + u'id': self.computer.id, + u'metadata': self.computer.get_metadata(), + } + + qb = orm.QueryBuilder() + qb.append(orm.Computer, project=['**']) + # We expect one result + self.assertEqual(qb.count(), 1) + + # Get the one result record and check that the returned + # data are correct + res = list(qb.dict()[0].values())[0] + self.assertDictEqual(res, expected_dict) + + # Ask the same query as above using queryhelp + qh = {'project': {'computer': ['**']}, 'path': [{'tag': 'computer', 'cls': orm.Computer}]} + qb = orm.QueryBuilder(**qh) + # We expect one result + self.assertEqual(qb.count(), 1) + + # Get the one result record and check that the returned + # data are correct + res = list(qb.dict()[0].values())[0] + self.assertDictEqual(res, expected_dict) diff --git a/aiida/backends/utils.py b/aiida/backends/utils.py index df891a7602..4f8ac57696 100644 --- a/aiida/backends/utils.py +++ b/aiida/backends/utils.py @@ -13,10 +13,14 @@ from __future__ import absolute_import import six +import datetime +import re from aiida.backends import BACKEND_SQLA, BACKEND_DJANGO from aiida.common.exceptions import ConfigurationError from aiida.manage import configuration +from dateutil import parser +from aiida.common.exceptions import ValidationError, NotExistent AIIDA_ATTRIBUTE_SEP = '.' @@ -140,3 +144,98 @@ def delete_nodes_and_connections(pks): raise Exception("unknown backend {}".format(configuration.PROFILE.database_backend)) delete_nodes_backend(pks) + + +def datetime_to_isoformat(v): + """ + Transforms all datetime object into isoformat and then returns the final object. + """ + if isinstance(v, list): + return [datetime_to_isoformat(_) for _ in v] + elif isinstance(v, dict): + return dict((key, datetime_to_isoformat(val)) for key, val in v.items()) + elif isinstance(v, datetime.datetime): + return v.isoformat() + return v + + +date_reg = re.compile(r'^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d+(\+\d{2}:\d{2})?$') + + +def isoformat_to_datetime(d): + """ + Parses each basestring as a datetime object and if it suceeds, converts it. + """ + if isinstance(d, list): + for i, val in enumerate(d): + d[i] = isoformat_to_datetime(val) + return d + elif isinstance(d, dict): + for k, v in d.items(): + d[k] = isoformat_to_datetime(v) + return d + elif isinstance(d, six.string_types): + if date_reg.match(d): + try: + return parser.parse(d) + except (ValueError, TypeError): + return d + return d + return d + + +# The separator for sub-fields (for JSON stored values).Keys are not allowed +# to contain the separator even if the +_sep = "." + + +def validate_key(key): + """ + Validate the key string to check if it is valid (e.g., if it does not + contain the separator symbol.). + + :return: None if the key is valid + :raise aiida.common.ValidationError: if the key is not valid + """ + if not isinstance(key, six.string_types): + raise ValidationError("The key must be a string.") + if not key: + raise ValidationError("The key cannot be an empty string.") + if _sep in key: + raise ValidationError("The separator symbol '{}' cannot be present " + "in the key of this field.".format(_sep)) + + +def get_value_of_sub_field(key, original_get_value): + """ + Get the value that corresponds to sub-fields of dictionaries stored in a + JSON. For example, if there is a dictionary {'b': 'c'} stored as value of + the key 'a' + value 'a' + :param key: The key that can be simple, a string, or complex, a set of keys + separated by the separator value. + :param original_get_value: The function that should be called to get the + original value (which can be a dictionary too). + :return: The value that correspond to the complex (or not) key. + :raise aiida.common.NotExistent: If the key doesn't correspond to a value + """ + keys = list() + if _sep in key: + keys.extend(key.split(_sep)) + else: + keys.append(key) + + if len(keys) == 1: + return original_get_value(keys[0]) + else: + try: + curr_val = original_get_value(keys[0]) + curr_pos = 1 + while curr_pos < len(keys): + curr_val = curr_val[keys[curr_pos]] + curr_pos += 1 + + return curr_val + except TypeError: + raise NotExistent("The sub-field {} doesn't correspond " + "to a value.".format(key)) diff --git a/aiida/orm/implementation/django/convert.py b/aiida/orm/implementation/django/convert.py index f203990a1b..e7e483b14b 100644 --- a/aiida/orm/implementation/django/convert.py +++ b/aiida/orm/implementation/django/convert.py @@ -22,8 +22,7 @@ # pylint: disable=cyclic-import -from aiida.backends.djsite.db import models -from aiida.orm.implementation.django import dummy_model as dummy_models +import aiida.backends.djsite.db.models as djmodels __all__ = ('get_backend_entity',) @@ -39,7 +38,7 @@ def get_backend_entity(dbmodel, backend): # pylint: disable=unused-argument dbmodel.__class__.__name__)) -@get_backend_entity.register(models.DbUser) +@get_backend_entity.register(djmodels.DbUser) def _(dbmodel, backend): """ get_backend_entity for Django DbUser @@ -48,7 +47,7 @@ def _(dbmodel, backend): return users.DjangoUser.from_dbmodel(dbmodel, backend) -@get_backend_entity.register(models.DbGroup) +@get_backend_entity.register(djmodels.DbGroup) def _(dbmodel, backend): """ get_backend_entity for Django DbGroup @@ -57,7 +56,7 @@ def _(dbmodel, backend): return groups.DjangoGroup.from_dbmodel(dbmodel, backend) -@get_backend_entity.register(models.DbComputer) +@get_backend_entity.register(djmodels.DbComputer) def _(dbmodel, backend): """ get_backend_entity for Django DbGroup @@ -66,7 +65,7 @@ def _(dbmodel, backend): return computers.DjangoComputer.from_dbmodel(dbmodel, backend) -@get_backend_entity.register(models.DbNode) +@get_backend_entity.register(djmodels.DbNode) def _(dbmodel, backend): """ get_backend_entity for Django DbNode. It will return an ORM instance since @@ -76,7 +75,7 @@ def _(dbmodel, backend): return nodes.DjangoNode.from_dbmodel(dbmodel, backend) -@get_backend_entity.register(models.DbAuthInfo) +@get_backend_entity.register(djmodels.DbAuthInfo) def _(dbmodel, backend): """ get_backend_entity for Django DbAuthInfo @@ -85,26 +84,26 @@ def _(dbmodel, backend): return authinfos.DjangoAuthInfo.from_dbmodel(dbmodel, backend) -@get_backend_entity.register(models.DbComment) +@get_backend_entity.register(djmodels.DbComment) def _(dbmodel, backend): from . import comments return comments.DjangoComment.from_dbmodel(dbmodel, backend) -@get_backend_entity.register(models.DbLog) +@get_backend_entity.register(djmodels.DbLog) def _(dbmodel, backend): from . import logs return logs.DjangoLog.from_dbmodel(dbmodel, backend) -@get_backend_entity.register(dummy_models.DbUser) +@get_backend_entity.register(djmodels.DbUser.sa) def _(dbmodel, backend): """ get_backend_entity for DummyModel DbUser. DummyModel instances are created when QueryBuilder queries the Django backend. """ from . import users - djuser_instance = models.DbUser( + djuser_instance = djmodels.DbUser( id=dbmodel.id, email=dbmodel.email, first_name=dbmodel.first_name, @@ -113,14 +112,14 @@ def _(dbmodel, backend): return users.DjangoUser.from_dbmodel(djuser_instance, backend) -@get_backend_entity.register(dummy_models.DbGroup) +@get_backend_entity.register(djmodels.DbGroup.sa) def _(dbmodel, backend): """ get_backend_entity for DummyModel DbGroup. DummyModel instances are created when QueryBuilder queries the Django backend. """ from . import groups - djgroup_instance = models.DbGroup( + djgroup_instance = djmodels.DbGroup( id=dbmodel.id, type_string=dbmodel.type_string, uuid=dbmodel.uuid, @@ -132,14 +131,14 @@ def _(dbmodel, backend): return groups.DjangoGroup.from_dbmodel(djgroup_instance, backend) -@get_backend_entity.register(dummy_models.DbComputer) +@get_backend_entity.register(djmodels.DbComputer.sa) def _(dbmodel, backend): """ get_backend_entity for DummyModel DbComputer. DummyModel instances are created when QueryBuilder queries the Django backend. """ from . import computers - djcomputer_instance = models.DbComputer( + djcomputer_instance = djmodels.DbComputer( id=dbmodel.id, uuid=dbmodel.uuid, name=dbmodel.name, @@ -147,17 +146,17 @@ def _(dbmodel, backend): description=dbmodel.description, transport_type=dbmodel.transport_type, scheduler_type=dbmodel.scheduler_type, - metadata=dbmodel._metadata) # pylint: disable=protected-access + metadata=dbmodel.metadata) return computers.DjangoComputer.from_dbmodel(djcomputer_instance, backend) -@get_backend_entity.register(dummy_models.DbNode) +@get_backend_entity.register(djmodels.DbNode.sa) def _(dbmodel, backend): """ get_backend_entity for DummyModel DbNode. DummyModel instances are created when QueryBuilder queries the Django backend. """ - djnode_instance = models.DbNode( + djnode_instance = djmodels.DbNode( id=dbmodel.id, node_type=dbmodel.node_type, process_type=dbmodel.process_type, @@ -173,31 +172,31 @@ def _(dbmodel, backend): return nodes.DjangoNode.from_dbmodel(djnode_instance, backend) -@get_backend_entity.register(dummy_models.DbAuthInfo) +@get_backend_entity.register(djmodels.DbAuthInfo.sa) def _(dbmodel, backend): """ get_backend_entity for DummyModel DbAuthInfo. DummyModel instances are created when QueryBuilder queries the Django backend. """ from . import authinfos - djauthinfo_instance = models.DbAuthInfo( + djauthinfo_instance = djmodels.DbAuthInfo( id=dbmodel.id, aiidauser_id=dbmodel.aiidauser_id, dbcomputer_id=dbmodel.dbcomputer_id, - metadata=dbmodel._metadata, # pylint: disable=protected-access + metadata=dbmodel.metadata, # pylint: disable=protected-access auth_params=dbmodel.auth_params, enabled=dbmodel.enabled, ) return authinfos.DjangoAuthInfo.from_dbmodel(djauthinfo_instance, backend) -@get_backend_entity.register(dummy_models.DbComment) +@get_backend_entity.register(djmodels.DbComment.sa) def _(dbmodel, backend): """ Convert a dbcomment to the backend entity """ from . import comments - djcomment = models.DbComment( + djcomment = djmodels.DbComment( id=dbmodel.id, uuid=dbmodel.uuid, dbnode_id=dbmodel.dbnode_id, @@ -208,19 +207,19 @@ def _(dbmodel, backend): return comments.DjangoComment.from_dbmodel(djcomment, backend) -@get_backend_entity.register(dummy_models.DbLog) +@get_backend_entity.register(djmodels.DbLog.sa) def _(dbmodel, backend): """ Convert a dbcomment to the backend entity """ from . import logs - djlog = models.DbLog( + djlog = djmodels.DbLog( id=dbmodel.id, time=dbmodel.time, loggername=dbmodel.loggername, levelname=dbmodel.levelname, dbnode_id=dbmodel.dbnode_id, message=dbmodel.message, - metadata=dbmodel._metadata # pylint: disable=protected-access + metadata=dbmodel.metadata # pylint: disable=protected-access ) return logs.DjangoLog.from_dbmodel(djlog, backend) diff --git a/aiida/orm/implementation/django/dummy_model.py b/aiida/orm/implementation/django/dummy_model.py deleted file mode 100644 index 00adf287e7..0000000000 --- a/aiida/orm/implementation/django/dummy_model.py +++ /dev/null @@ -1,235 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida_core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -""" -The dummy model encodes the model defined by django in backends.djsite -using SQLAlchemy. -This is done to query the database with more performant ORM of SA. -""" - -from __future__ import division -from __future__ import print_function -from __future__ import absolute_import - -# pylint: disable=no-name-in-module, import-error, invalid-name -from sqlalchemy.ext.declarative import declarative_base -from sqlalchemy import (Column, Table, ForeignKey, UniqueConstraint) - -from sqlalchemy.types import ( - Integer, - String, - DateTime, - Float, - Boolean, - Text, -) -from sqlalchemy.orm import (relationship, backref, sessionmaker) - -from sqlalchemy.dialects.postgresql import UUID - -# MISC -from aiida.common import timezone -from aiida.common.utils import get_new_uuid - -Base = declarative_base() - -# pylint: disable=missing-docstring, too-few-public-methods - - -class DbLink(Base): - __tablename__ = "db_dblink" - id = Column(Integer, primary_key=True) - input_id = Column(Integer, ForeignKey('db_dbnode.id', deferrable=True, initially="DEFERRED")) - output_id = Column(Integer, ForeignKey('db_dbnode.id', ondelete="CASCADE", deferrable=True, initially="DEFERRED")) - type = Column(String(255)) - input = relationship("DbNode", primaryjoin="DbLink.input_id == DbNode.id") - output = relationship("DbNode", primaryjoin="DbLink.output_id == DbNode.id") - label = Column(String(255), index=True, nullable=False) - - -class DbAttribute(Base): - __tablename__ = "db_dbattribute" - id = Column(Integer, primary_key=True) - dbnode_id = Column(Integer, ForeignKey('db_dbnode.id')) - key = Column(String(255)) - datatype = Column(String(10)) - tval = Column(String, default='') - fval = Column(Float, default=None, nullable=True) - ival = Column(Integer, default=None, nullable=True) - bval = Column(Boolean, default=None, nullable=True) - dval = Column(DateTime, default=None, nullable=True) - - -class DbExtra(Base): - __tablename__ = "db_dbextra" - id = Column(Integer, primary_key=True) - dbnode_id = Column(Integer, ForeignKey('db_dbnode.id')) - key = Column(String(255)) - datatype = Column(String(10)) - tval = Column(String, default='') - fval = Column(Float, default=None, nullable=True) - ival = Column(Integer, default=None, nullable=True) - bval = Column(Boolean, default=None, nullable=True) - dval = Column(DateTime, default=None, nullable=True) - - -class DbComputer(Base): - __tablename__ = "db_dbcomputer" - - id = Column(Integer, primary_key=True) - - uuid = Column(UUID(as_uuid=True), default=get_new_uuid) - name = Column(String(255), unique=True, nullable=False) - hostname = Column(String(255)) - - description = Column(Text, nullable=True) - - scheduler_type = Column(String(255)) - transport_type = Column(String(255)) - - _metadata = Column('metadata', String(255), default="{}") - - -class DbUser(Base): - __tablename__ = "db_dbuser" - - id = Column(Integer, primary_key=True) - email = Column(String(254), unique=True, index=True) - first_name = Column(String(254), nullable=True) - last_name = Column(String(254), nullable=True) - institution = Column(String(254), nullable=True) - - -table_groups_nodes = Table( - 'db_dbgroup_dbnodes', Base.metadata, Column('id', Integer, primary_key=True), - Column('dbnode_id', Integer, ForeignKey('db_dbnode.id', deferrable=True, initially="DEFERRED")), - Column('dbgroup_id', Integer, ForeignKey('db_dbgroup.id', deferrable=True, initially="DEFERRED"))) - - -class DbGroup(Base): - __tablename__ = "db_dbgroup" - - id = Column(Integer, primary_key=True) - - uuid = Column(UUID(as_uuid=True), default=get_new_uuid) - label = Column(String(255), index=True) - - type_string = Column(String(255), default="", index=True) - - time = Column(DateTime(timezone=True), default=timezone.now) - description = Column(Text, nullable=True) - - user_id = Column(Integer, ForeignKey('db_dbuser.id', ondelete='CASCADE', deferrable=True, initially="DEFERRED")) - user = relationship('DbUser', backref=backref('dbgroups', cascade='merge')) - - dbnodes = relationship('DbNode', secondary=table_groups_nodes, backref="dbgroups", lazy='dynamic') - - __table_args__ = (UniqueConstraint('label', 'type_string'),) - - def __str__(self): - return ''.format(self.type_string, self.label) - - -class DbNode(Base): - __tablename__ = "db_dbnode" - id = Column(Integer, primary_key=True) - uuid = Column(UUID(as_uuid=True), default=get_new_uuid) - node_type = Column(String(255), index=True) - process_type = Column(String(255), index=True) - label = Column(String(255), index=True, nullable=True) - description = Column(Text(), nullable=True) - ctime = Column(DateTime(timezone=True), default=timezone.now) - mtime = Column(DateTime(timezone=True), default=timezone.now) - dbcomputer_id = Column( - Integer, ForeignKey('db_dbcomputer.id', deferrable=True, initially="DEFERRED"), nullable=True) - dbcomputer = relationship('DbComputer', backref=backref('dbnodes', passive_deletes=True)) - user_id = Column(Integer, ForeignKey('db_dbuser.id', deferrable=True, initially="DEFERRED"), nullable=False) - user = relationship('DbUser', backref='dbnodes') - - attributes = relationship('DbAttribute', uselist=True, backref='dbnode') - extras = relationship('DbExtra', uselist=True, backref='dbnode') - - outputs = relationship( - "DbNode", - secondary="db_dblink", - primaryjoin="DbNode.id == DbLink.input_id", - secondaryjoin="DbNode.id == DbLink.output_id", - backref=backref("inputs", passive_deletes=True), - passive_deletes=True) - - -class DbAuthInfo(Base): - __tablename__ = "db_dbauthinfo" - - id = Column(Integer, primary_key=True) - - aiidauser_id = Column(Integer, ForeignKey( - 'db_dbuser.id', ondelete='CASCADE', deferrable=True, initially="DEFERRED")) - aiidauser = relationship('DbUser', backref=backref('dbauthinfo', cascade='merge')) - dbcomputer_id = Column(Integer, - ForeignKey('db_dbcomputer.id', ondelete='CASCADE', deferrable=True, initially="DEFERRED")) - dbcomputer = relationship('DbComputer', backref=backref('dbauthinfo', passive_deletes=True)) - _metadata = Column('metadata', String(255), default="{}") - auth_params = Column('auth_params', String(255), default="{}") - - enabled = Column(Boolean, default=True) - - __table_args__ = (UniqueConstraint("aiidauser_id", "dbcomputer_id"),) - - -class DbLog(Base): - __tablename__ = "db_dblog" - - id = Column(Integer, primary_key=True) - - uuid = Column(UUID(as_uuid=True), default=get_new_uuid) - - time = Column(DateTime(timezone=True), default=timezone.now) - loggername = Column(String(255), index=True) - levelname = Column(String(255), index=True) - - dbnode_id = Column(Integer, ForeignKey('db_dbnode.id', deferrable=True, initially="DEFERRED"), nullable=True) - dbnode = relationship('DbNode', backref=backref('dblogs', passive_deletes=True)) - - message = Column(Text(), nullable=True) - _metadata = Column('metadata', String(255), default="{}") - - -class DbComment(Base): - __tablename__ = "db_dbcomment" - - id = Column(Integer, primary_key=True) - uuid = Column(UUID(as_uuid=True), default=get_new_uuid) - dbnode_id = Column(Integer, ForeignKey('db_dbnode.id', ondelete="CASCADE", deferrable=True, initially="DEFERRED")) - - ctime = Column(DateTime(timezone=True), default=timezone.now) - mtime = Column(DateTime(timezone=True), default=timezone.now, onupdate=timezone.now) - - user_id = Column(Integer, ForeignKey('db_dbuser.id', ondelete="CASCADE", deferrable=True, initially="DEFERRED")) - content = Column(Text, nullable=True) - - dbnode = relationship('DbNode', backref='dbcomments') - user = relationship("DbUser") - - -def get_aldjemy_session(): - """ - Use aldjemy to make a session - - .. note: - Use only in this case. In normal production mode - it is safer make session explictly because it is more robust - """ - from aldjemy.core import get_engine - engine = get_engine() - _Session = sessionmaker(bind=engine) - return _Session() - - -session = get_aldjemy_session() diff --git a/aiida/orm/implementation/django/groups.py b/aiida/orm/implementation/django/groups.py index 12c1931e21..e7e33ea321 100644 --- a/aiida/orm/implementation/django/groups.py +++ b/aiida/orm/implementation/django/groups.py @@ -7,6 +7,7 @@ # For further information on the license, see the LICENSE.txt file # # For further information please visit http://www.aiida.net # ########################################################################### +# pylint: disable=no-member """Django Group entity""" from __future__ import division from __future__ import print_function diff --git a/aiida/orm/implementation/django/nodes.py b/aiida/orm/implementation/django/nodes.py index 77d07271c8..5077d1379a 100644 --- a/aiida/orm/implementation/django/nodes.py +++ b/aiida/orm/implementation/django/nodes.py @@ -23,7 +23,8 @@ from .. import BackendNode, BackendNodeCollection from . import entities -from . import utils +from . import utils as dj_utils +from .. import utils as gen_utils from .computers import DjangoComputer from .users import DjangoUser @@ -34,8 +35,6 @@ class DjangoNode(entities.DjangoModelEntity[models.DbNode], BackendNode): # pylint: disable=too-many-public-methods MODEL_CLASS = models.DbNode - ATTRIBUTE_CLASS = models.DbAttribute - EXTRA_CLASS = models.DbExtra LINK_CLASS = models.DbLink def __init__(self, @@ -84,7 +83,7 @@ def __init__(self, type_check(mtime, datetime, 'the given mtime is of type {}'.format(type(mtime))) arguments['mtime'] = mtime - self._dbmodel = utils.ModelWrapper(models.DbNode(**arguments)) + self._dbmodel = dj_utils.ModelWrapper(models.DbNode(**arguments)) def clone(self): """Return an unstored clone of ourselves. @@ -102,7 +101,7 @@ def clone(self): clone = self.__class__.__new__(self.__class__) # pylint: disable=no-value-for-parameter clone.__init__(self.backend, self.node_type, self.user) - clone._dbmodel = utils.ModelWrapper(models.DbNode(**arguments)) # pylint: disable=protected-access + clone._dbmodel = dj_utils.ModelWrapper(models.DbNode(**arguments)) # pylint: disable=protected-access return clone @property @@ -156,9 +155,9 @@ def get_attribute(self, key): :raises AttributeError: if the attribute does not exist """ try: - return self.ATTRIBUTE_CLASS.get_value_for_node(dbnode=self.dbmodel, key=key) - except AttributeError: - raise AttributeError('Attribute `{}` does not exist'.format(key)) + return gen_utils.get_attr(self.dbmodel.get_attributes(), key) + except (KeyError, IndexError): + raise AttributeError("Attribute '{}' does not exist".format(key)) def get_attributes(self, keys): """Return a set of attributes. @@ -175,7 +174,7 @@ def set_attribute(self, key, value): :param key: name of the attribute :param value: value of the attribute """ - self.ATTRIBUTE_CLASS.set_value_for_node(self.dbmodel, key, value) + self.dbmodel.set_attribute(key, value) def set_attributes(self, attributes): """Set attributes. @@ -185,7 +184,7 @@ def set_attributes(self, attributes): :param attributes: the new attributes to set """ for key, value in attributes.items(): - self.ATTRIBUTE_CLASS.set_value_for_node(self.dbmodel, key, value) + self.dbmodel.set_attribute(key, value) def reset_attributes(self, attributes): """Reset the attributes. @@ -194,7 +193,7 @@ def reset_attributes(self, attributes): :param attributes: the new attributes to set """ - self.ATTRIBUTE_CLASS.reset_values_for_node(self.dbmodel, attributes) + self.dbmodel.reset_attributes(attributes) def delete_attribute(self, key): """Delete an attribute. @@ -202,10 +201,7 @@ def delete_attribute(self, key): :param key: name of the attribute :raises AttributeError: if the attribute does not exist """ - if not self.ATTRIBUTE_CLASS.has_key(self.dbmodel, key): - raise AttributeError('Attribute `{}` does not exist'.format(key)) - - self.ATTRIBUTE_CLASS.del_value_for_node(self.dbmodel, key) + self.dbmodel.del_attribute(key) def delete_attributes(self, keys): """Delete multiple attributes. @@ -246,8 +242,8 @@ def get_extra(self, key): :raises AttributeError: if the extra does not exist """ try: - return self.EXTRA_CLASS.get_value_for_node(dbnode=self.dbmodel, key=key) - except AttributeError: + return gen_utils.get_attr(self.dbmodel.extras, key) + except (KeyError, AttributeError): raise AttributeError('Extra `{}` does not exist'.format(key)) def get_extras(self, keys): @@ -265,7 +261,7 @@ def set_extra(self, key, value): :param key: name of the extra :param value: value of the extra """ - self.EXTRA_CLASS.set_value_for_node(self.dbmodel, key, value) + self.dbmodel.set_extra(key, value) def set_extras(self, extras): """Set extras. @@ -274,8 +270,7 @@ def set_extras(self, extras): :param extras: the new extras to set """ - for key, value in extras.items(): - self.EXTRA_CLASS.set_value_for_node(self.dbmodel, key, value) + self.dbmodel.set_extras(extras) def reset_extras(self, extras): """Reset the extras. @@ -284,7 +279,7 @@ def reset_extras(self, extras): :param extras: the new extras to set """ - raise NotImplementedError + self.dbmodel.reset_extras(extras) def delete_extra(self, key): """Delete an extra. @@ -292,10 +287,7 @@ def delete_extra(self, key): :param key: name of the extra :raises AttributeError: if the extra does not exist """ - if not self.EXTRA_CLASS.has_key(self.dbmodel, key): - raise AttributeError('Extra `{}` does not exist'.format(key)) - - self.EXTRA_CLASS.del_value_for_node(self.dbmodel, key) + self.dbmodel.del_extra(key) def delete_extras(self, keys): """Delete multiple extras. @@ -317,7 +309,7 @@ def extras_items(self): :return: an iterator with extra key value pairs """ - for key, value in self._dbmodel.extras.items(): + for key, value in self.dbmodel.extras.items(): yield key, value def extras_keys(self): @@ -325,7 +317,7 @@ def extras_keys(self): :return: an iterator with extras keys """ - for key in self._dbmodel.extras.keys(): + for key in self.dbmodel.extras.keys(): yield key def add_incoming(self, source, link_type, link_label): @@ -383,7 +375,8 @@ def store(self, attributes=None, links=None, with_transaction=True): self.dbmodel.save() if attributes: - self.ATTRIBUTE_CLASS.reset_values_for_node(self.dbmodel, attributes, with_transaction=False) + for key, value in attributes.items(): + self.dbmodel.set_attribute(key, value) if links: for link_triple in links: diff --git a/aiida/orm/implementation/django/querybuilder.py b/aiida/orm/implementation/django/querybuilder.py index d597a027a3..99064f790e 100644 --- a/aiida/orm/implementation/django/querybuilder.py +++ b/aiida/orm/implementation/django/querybuilder.py @@ -9,66 +9,111 @@ ########################################################################### """Django query builder""" +from __future__ import absolute_import from __future__ import division from __future__ import print_function -from __future__ import absolute_import -from datetime import datetime import uuid -import six +from datetime import datetime +import six +from aldjemy import core # Remove when https://github.com/PyCQA/pylint/issues/1931 is fixed # pylint: disable=no-name-in-module, import-error -from sqlalchemy import and_, or_, not_, select, exists, case -from sqlalchemy.types import Float, String -from sqlalchemy.orm import aliased -from sqlalchemy.sql.expression import cast -from aiida.orm.implementation.querybuilder import BackendQueryBuilder +from sqlalchemy_utils.types.choice import Choice +from sqlalchemy import and_, or_, not_, case +from sqlalchemy.dialects.postgresql import JSONB +from sqlalchemy.ext.compiler import compiles +from sqlalchemy.sql.expression import FunctionElement +from sqlalchemy.types import Integer, Float, Boolean, DateTime + +import aiida.backends.djsite.db.models as djmodels from aiida.common.exceptions import InputValidationError -from aiida.orm.implementation.django import dummy_model -from aiida.backends.djsite.db.models import DbAttribute, DbExtra, ObjectDoesNotExist +from aiida.orm.implementation.querybuilder import BackendQueryBuilder + + +class jsonb_array_length(FunctionElement): # pylint: disable=invalid-name + # pylint: disable=too-few-public-methods + name = 'jsonb_array_len' + + +@compiles(jsonb_array_length) +def compile(element, compiler, **_kw): # pylint: disable=function-redefined, redefined-builtin + """ + Get length of array defined in a JSONB column + """ + return "jsonb_array_length(%s)" % compiler.process(element.clauses) + + +class array_length(FunctionElement): # pylint: disable=invalid-name + # pylint: disable=too-few-public-methods + name = 'array_len' + + +@compiles(array_length) +def compile(element, compiler, **_kw): # pylint: disable=function-redefined + """ + Get length of array defined in a JSONB column + """ + return "array_length(%s)" % compiler.process(element.clauses) + + +class jsonb_typeof(FunctionElement): # pylint: disable=invalid-name + # pylint: disable=too-few-public-methods + name = 'jsonb_typeof' + + +@compiles(jsonb_typeof) +def compile(element, compiler, **_kw): # pylint: disable=function-redefined + """ + Get length of array defined in a JSONB column + """ + return "jsonb_typeof(%s)" % compiler.process(element.clauses) class DjangoQueryBuilder(BackendQueryBuilder): """Django query builder""" - # pylint: disable=too-many-public-methods + # pylint: disable=too-many-public-methods,no-member + + def __init__(self, backend): + BackendQueryBuilder.__init__(self, backend) @property def Node(self): - return dummy_model.DbNode + return djmodels.DbNode.sa @property def Link(self): - return dummy_model.DbLink + return djmodels.DbLink.sa @property def Computer(self): - return dummy_model.DbComputer + return djmodels.DbComputer.sa @property def User(self): - return dummy_model.DbUser + return djmodels.DbUser.sa @property def Group(self): - return dummy_model.DbGroup + return djmodels.DbGroup.sa @property def AuthInfo(self): - return dummy_model.DbAuthInfo + return djmodels.DbAuthInfo.sa @property def Comment(self): - return dummy_model.DbComment + return djmodels.DbComment.sa @property def Log(self): - return dummy_model.DbLog + return djmodels.DbLog.sa @property def table_groups_nodes(self): - return dummy_model.table_groups_nodes + return core.Cache.meta.tables['db_dbgroup_dbnodes'] def get_filter_expr(self, operator, value, attr_key, is_attribute, alias=None, column=None, column_name=None): """ @@ -146,6 +191,7 @@ def get_filter_expr(self, operator, value, attr_key, is_attribute, alias=None, c } # id is not 2 """ # pylint: disable=too-many-branches,too-many-arguments + # pylint: disable=too-many-branches,too-many-arguments expr = None if operator.startswith('~'): @@ -201,195 +247,159 @@ def get_filter_expr(self, operator, value, attr_key, is_attribute, alias=None, c return not_(expr) return expr - def get_session(self): - return dummy_model.get_aldjemy_session() - # return dummy_model.session + @staticmethod + def get_session(): + from aiida.manage.configuration import get_config + from aiida.backends.sqlalchemy import reset_session + from aiida.backends.sqlalchemy import get_scoped_session + + if get_scoped_session() is None: + config = get_config() + profile = config.current_profile + reset_session(profile) + + return get_scoped_session() def modify_expansions(self, alias, expansions): """ - For the Django schema, we have as additional expansions 'attributes' and 'extras' + For django, there are no additional expansions for now, so + I am returning an empty list """ - if issubclass(alias._sa_class_manager.class_, self.Node): # pylint: disable=protected-access - expansions.append("attributes") - expansions.append("extras") - elif issubclass(alias._sa_class_manager.class_, self.Computer): # pylint: disable=protected-access - try: - expansions.remove('metadata') - expansions.append('_metadata') - except KeyError: - pass - return expansions def get_filter_expr_from_attributes(self, operator, value, attr_key, column=None, column_name=None, alias=None): - # pylint: disable=too-many-statements, too-many-branches, too-many-arguments, too-many-locals - - def get_attribute_db_column(mapped_class, dtype, castas=None): - """Get the attribute column""" - if dtype == 't': - mapped_entity = mapped_class.tval - additional_type_constraint = None - elif dtype == 'b': - mapped_entity = mapped_class.bval - additional_type_constraint = None - elif dtype == 'f': - mapped_entity = mapped_class.fval - additional_type_constraint = None - elif dtype == 'i': - mapped_entity = mapped_class.ival - # IN the schema, we also have dicts and lists storing something in the - # ival column, namely the length. I need to check explicitly whether - # this is meant to be an integer! - additional_type_constraint = mapped_class.datatype == 'int' - elif dtype == 'd': - mapped_entity = mapped_class.dval - additional_type_constraint = None + # Too many everything! + # pylint: disable=too-many-branches, too-many-arguments, too-many-statements + + def cast_according_to_type(path_in_json, value): + """Cast the value according to the type""" + if isinstance(value, bool): + type_filter = jsonb_typeof(path_in_json) == 'boolean' + casted_entity = path_in_json.astext.cast(Boolean) + elif isinstance(value, (int, float)): + type_filter = jsonb_typeof(path_in_json) == 'number' + casted_entity = path_in_json.astext.cast(Float) + elif isinstance(value, dict) or value is None: + type_filter = jsonb_typeof(path_in_json) == 'object' + casted_entity = path_in_json.astext.cast(JSONB) # BOOLEANS? + elif isinstance(value, dict): + type_filter = jsonb_typeof(path_in_json) == 'array' + casted_entity = path_in_json.astext.cast(JSONB) # BOOLEANS? + elif isinstance(value, six.string_types): + type_filter = jsonb_typeof(path_in_json) == 'string' + casted_entity = path_in_json.astext + elif value is None: + type_filter = jsonb_typeof(path_in_json) == 'null' + casted_entity = path_in_json.astext.cast(JSONB) # BOOLEANS? + elif isinstance(value, datetime): + # type filter here is filter whether this attributes stores + # a string and a filter whether this string + # is compatible with a datetime (using a regex) + # - What about historical values (BC, or before 1000AD)?? + # - Different ways to represent the timezone + + type_filter = jsonb_typeof(path_in_json) == 'string' + regex_filter = path_in_json.astext.op("SIMILAR TO")( + "\d\d\d\d-[0-1]\d-[0-3]\dT[0-2]\d:[0-5]\d:\d\d\.\d+((\+|\-)\d\d:\d\d)?") # pylint: disable=anomalous-backslash-in-string + type_filter = and_(type_filter, regex_filter) + casted_entity = path_in_json.cast(DateTime) else: - raise InputValidationError("I don't know what to do with dtype {}".format(dtype)) - if castas == 't': - mapped_entity = cast(mapped_entity, String) - elif castas == 'f': - mapped_entity = cast(mapped_entity, Float) - - return mapped_entity, additional_type_constraint - - if column: - mapped_class = column.prop.mapper.class_ - else: - column = getattr(alias, column_name) - mapped_class = column.prop.mapper.class_ - # Ok, so we have an attribute key here. - # Unless cast is specified, will try to infer my self where the value - # is stored - # Datetime -> dval - # bool -> bval - # string -> tval - # integer -> ival, fval (cast ival to float) - # float -> ival, fval (cast ival to float) - # If the user specified of_type ?? - # That is basically a query for where the value is sitting - # (which db_column in the dbattribtues) - # If the user specified in what to cast, he wants an operation to - # be performed to cast the value to a different type - if isinstance(value, (list, tuple)): - value_type_set = set(type(i) for i in value) - if len(value_type_set) > 1: - raise InputValidationError('{} contains more than one type'.format(value)) - elif not value_type_set: - raise InputValidationError('Given list is empty, cannot determine type') - else: - value_to_consider = value[0] - else: - value_to_consider = value - - # First cases, I maybe need not do anything but just count the - # number of entries - if operator in ('of_length', 'shorter', 'longer'): - raise NotImplementedError("Filtering by lengths of arrays or lists is not implemented\n" - "in the Django-Backend") + raise TypeError('Unknown type {}'.format(type(value))) + return type_filter, casted_entity + + if column is None: + column = self.get_column(column_name, alias) + + database_entity = column[tuple(attr_key)] + if operator == '==': + type_filter, casted_entity = cast_according_to_type(database_entity, value) + expr = case([(type_filter, casted_entity == value)], else_=False) + elif operator == '>': + type_filter, casted_entity = cast_according_to_type(database_entity, value) + expr = case([(type_filter, casted_entity > value)], else_=False) + elif operator == '<': + type_filter, casted_entity = cast_according_to_type(database_entity, value) + expr = case([(type_filter, casted_entity < value)], else_=False) + elif operator in ('>=', '=>'): + type_filter, casted_entity = cast_according_to_type(database_entity, value) + expr = case([(type_filter, casted_entity >= value)], else_=False) + elif operator in ('<=', '=<'): + type_filter, casted_entity = cast_according_to_type(database_entity, value) + expr = case([(type_filter, casted_entity <= value)], else_=False) elif operator == 'of_type': - raise NotImplementedError("Filtering by type is not implemented\n" "in the Django-Backend") + # http://www.postgresql.org/docs/9.5/static/functions-json.html + # Possible types are object, array, string, number, boolean, and null. + valid_types = ('object', 'array', 'string', 'number', 'boolean', 'null') + if value not in valid_types: + raise InputValidationError("value {} for of_type is not among valid types\n" + "{}".format(value, valid_types)) + expr = jsonb_typeof(database_entity) == value + elif operator == 'like': + type_filter, casted_entity = cast_according_to_type(database_entity, value) + expr = case([(type_filter, casted_entity.like(value))], else_=False) + elif operator == 'ilike': + type_filter, casted_entity = cast_according_to_type(database_entity, value) + expr = case([(type_filter, casted_entity.ilike(value))], else_=False) + elif operator == 'in': + type_filter, casted_entity = cast_according_to_type(database_entity, value[0]) + expr = case([(type_filter, casted_entity.in_(value))], else_=False) elif operator == 'contains': - raise NotImplementedError("Contains is not implemented in the Django-backend") - + expr = database_entity.cast(JSONB).contains(value) elif operator == 'has_key': - if issubclass(mapped_class, dummy_model.DbAttribute): - expr = alias.attributes.any(mapped_class.key == '.'.join(attr_key + [value])) - elif issubclass(mapped_class, dummy_model.DbExtra): - expr = alias.extras.any(mapped_class.key == '.'.join(attr_key + [value])) - else: - raise TypeError("I was given {} as an attribute base class".format(mapped_class)) - + expr = database_entity.cast(JSONB).has_key(value) # noqa + elif operator == 'of_length': + expr = case( + [(jsonb_typeof(database_entity) == 'array', jsonb_array_length(database_entity.cast(JSONB)) == value)], + else_=False) + + elif operator == 'longer': + expr = case( + [(jsonb_typeof(database_entity) == 'array', jsonb_array_length(database_entity.cast(JSONB)) > value)], + else_=False) + elif operator == 'shorter': + expr = case( + [(jsonb_typeof(database_entity) == 'array', jsonb_array_length(database_entity.cast(JSONB)) < value)], + else_=False) else: - types_n_casts = [] - if isinstance(value_to_consider, six.string_types): - types_n_casts.append(('t', None)) - elif isinstance(value_to_consider, bool): - types_n_casts.append(('b', None)) - elif isinstance(value_to_consider, (int, float)): - types_n_casts.append(('f', None)) - types_n_casts.append(('i', 'f')) - elif isinstance(value_to_consider, datetime): - types_n_casts.append(('d', None)) - - expressions = [] - for dtype, castas in types_n_casts: - attr_column, additional_type_constraint = get_attribute_db_column(mapped_class, dtype, castas=castas) - expression_this_typ_cas = self.get_filter_expr( - operator, value, attr_key=[], column=attr_column, is_attribute=False) - if additional_type_constraint is not None: - expression_this_typ_cas = and_(expression_this_typ_cas, additional_type_constraint) - expressions.append(expression_this_typ_cas) - - actual_attr_key = '.'.join(attr_key) - expr = column.any(and_(mapped_class.key == actual_attr_key, or_(*expressions))) + raise InputValidationError("Unknown operator {} for filters in JSON field".format(operator)) return expr def get_projectable_attribute(self, alias, column_name, attrpath, cast=None, **kwargs): # pylint: disable=redefined-outer-name - """Get the list of projectable attributes""" - if cast is not None: - raise NotImplementedError("Casting is not implemented in the Django backend") - if not attrpath: - # If the user with Django backend wants all the attributes or all - # the extras, I will select as entity the ID of the node. - # in get_aiida_res, this is transformed to the dictionary of attributes. - if column_name in ('attributes', 'extras'): - entity = alias.id - else: - raise NotImplementedError("Whatever you asked for ({}) is not implemented".format(column_name)) + """ + :returns: An attribute store in a JSON field of the give column + """ + entity = self.get_column(column_name, alias)[attrpath] + if cast is None: + entity = entity + elif cast == 'f': + entity = entity.astext.cast(Float) + elif cast == 'i': + entity = entity.astext.cast(Integer) + elif cast == 'b': + entity = entity.astext.cast(Boolean) + elif cast == 't': + entity = entity.astext + elif cast == 'j': + entity = entity.astext.cast(JSONB) + elif cast == 'd': + entity = entity.astext.cast(DateTime) else: - aliased_attributes = aliased(getattr(alias, column_name).prop.mapper.class_) - - if not issubclass(alias._aliased_insp.class_, self.Node): # pylint: disable=protected-access - NotImplementedError("Other classes than Nodes are not implemented yet") - - attrkey = '.'.join(attrpath) - - exists_stmt = exists( - select([1], correlate=True).select_from(aliased_attributes).where( - and_(aliased_attributes.key == attrkey, aliased_attributes.dbnode_id == alias.id))) - - select_stmt = select([aliased_attributes.id], correlate=True).select_from(aliased_attributes).where( - and_(aliased_attributes.key == attrkey, aliased_attributes.dbnode_id == alias.id)).label('miao') - - entity = case([ - (exists_stmt, select_stmt), - ], else_=None) - + raise InputValidationError("Unkown casting key {}".format(cast)) return entity def get_aiida_res(self, key, res): """ - Some instance returned by ORM (django or SA) need to be converted to Aiida instances (eg nodes) + Some instance returned by ORM (django or SA) need to be converted + to Aiida instances (eg nodes). Choice (sqlalchemy_utils) + will return their value + :param key: The key :param res: the result returned by the query - :param key: the key that this entry would be return with :returns: an aiida-compatible instance """ - if key.startswith('attributes.'): - # If you want a specific attributes, that key was stored in res. - # So I call the getvalue method to expand into a dictionary - try: - returnval = DbAttribute.objects.get(id=res).getvalue() - except ObjectDoesNotExist: - # If the object does not exist, return None. This is consistent - # with SQLAlchemy inside the JSON - returnval = None - elif key.startswith('extras.'): - # Same as attributes - try: - returnval = DbExtra.objects.get(id=res).getvalue() - except ObjectDoesNotExist: - returnval = None - elif key == 'attributes': - # If you asked for all attributes, the QB return the ID of the node - # I use DbAttribute.get_all_values_for_nodepk - # to get the dictionary - return DbAttribute.get_all_values_for_nodepk(res) - elif key == 'extras': - # same as attributes - return DbExtra.get_all_values_for_nodepk(res) + if isinstance(res, Choice): + returnval = res.value elif isinstance(res, uuid.UUID): returnval = six.text_type(res) else: @@ -453,10 +463,14 @@ def iterall(self, query, batch_size, tag_to_index_dict): for colindex, rowitem in enumerate(resultrow) ] - def iterdict(self, query, batch_size, tag_to_projected_entity_dict): + def iterdict(self, query, batch_size, tag_to_projected_properties_dict, tag_to_alias_map): from django.db import transaction - nr_items = sum(len(v) for v in tag_to_projected_entity_dict.values()) + def get_table_name(aliased_class): + """ Returns the table name given an Aliased class based on Aldjemy""" + return aliased_class._aliased_insp._target.table.name # pylint: disable=protected-access + + nr_items = sum(len(v) for v in tag_to_projected_properties_dict.values()) if not nr_items: raise ValueError("Got an empty dictionary") @@ -469,26 +483,42 @@ def iterdict(self, query, batch_size, tag_to_projected_entity_dict): for this_result in results: yield { tag: { - attrkey: self.get_aiida_res(attrkey, this_result[index_in_sql_result]) + self.get_corresponding_property( + get_table_name(tag_to_alias_map[tag]), attrkey, self.inner_to_outer_schema): + self.get_aiida_res(attrkey, this_result[index_in_sql_result]) for attrkey, index_in_sql_result in projected_entities_dict.items() - } for tag, projected_entities_dict in tag_to_projected_entity_dict.items() + } for tag, projected_entities_dict in tag_to_projected_properties_dict.items() } elif nr_items == 1: # I this case, sql returns a list, where each listitem is the result # for one row. Here I am converting it to a list of lists (of length 1) - if [v for entityd in tag_to_projected_entity_dict.values() for v in entityd.keys()] == ['*']: + if [v for entityd in tag_to_projected_properties_dict.values() for v in entityd.keys()] == ['*']: for this_result in results: yield { tag: { - attrkey: self.get_aiida_res(attrkey, this_result) + self.get_corresponding_property( + get_table_name(tag_to_alias_map[tag]), attrkey, self.inner_to_outer_schema): + self.get_aiida_res(attrkey, this_result) for attrkey, position in projected_entities_dict.items() - } for tag, projected_entities_dict in tag_to_projected_entity_dict.items() + } for tag, projected_entities_dict in tag_to_projected_properties_dict.items() } else: for this_result, in results: yield { tag: { - attrkey: self.get_aiida_res(attrkey, this_result) + self.get_corresponding_property( + get_table_name(tag_to_alias_map[tag]), attrkey, self.inner_to_outer_schema): + self.get_aiida_res(attrkey, this_result) for attrkey, position in projected_entities_dict.items() - } for tag, projected_entities_dict in tag_to_projected_entity_dict.items() + } for tag, projected_entities_dict in tag_to_projected_properties_dict.items() } + + def get_column_names(self, alias): + """ + Given the backend specific alias, return the column names that correspond to the aliased table. + """ + # pylint: disable=protected-access + return [ + str(c).replace(alias._aliased_insp.class_.table.name + '.', '') + for c in alias._aliased_insp.class_.table._columns._all_columns + ] diff --git a/aiida/orm/implementation/querybuilder.py b/aiida/orm/implementation/querybuilder.py index 9016e95783..fe5148ea01 100644 --- a/aiida/orm/implementation/querybuilder.py +++ b/aiida/orm/implementation/querybuilder.py @@ -27,6 +27,9 @@ class BackendQueryBuilder(object): # pylint: disable=invalid-name,too-many-public-methods,useless-object-inheritance + outer_to_inner_schema = None + inner_to_outer_schema = None + def __init__(self, backend): """ :param backend: the backend @@ -34,6 +37,8 @@ def __init__(self, backend): from . import backends type_check(backend, backends.Backend) self._backend = backend + self.inner_to_outer_schema = dict() + self.outer_to_inner_schema = dict() @abc.abstractmethod def Node(self): @@ -131,6 +136,39 @@ def get_filter_expr_from_attributes(cls, operator, value, attr_key, column=None, :returns: An instance of sqlalchemy.sql.elements.BinaryExpression """ + @classmethod + def get_corresponding_properties(cls, entity_table, given_properties, mapper): + """ + This method returns a list of updated properties for a given list of properties. + If there is no update for the property, the given property is returned in the list. + """ + if entity_table in mapper.keys(): + res = list() + for given_property in given_properties: + res.append(cls.get_corresponding_property(entity_table, given_property, mapper)) + return res + + return given_properties + + @classmethod + def get_corresponding_property(cls, entity_table, given_property, mapper): + """ + This method returns an updated property for a given a property. + If there is no update for the property, the given property is returned. + """ + try: + # Get the mapping for the specific entity_table + property_mapping = mapper[entity_table] + try: + # Get the mapping for the specific property + return property_mapping[given_property] + except KeyError: + # If there is no mapping, the property remains unchanged + return given_property + except KeyError: + # If it doesn't exist, it means that the given_property remains v + return given_property + @classmethod def get_filter_expr_from_column(cls, operator, value, column): """ @@ -224,11 +262,18 @@ def iterall(self, query, batch_size, tag_to_index_dict): """ @abc.abstractmethod - def iterdict(self, query, batch_size, tag_to_projected_entity_dict): + def iterdict(self, query, batch_size, tag_to_projected_properties_dict, tag_to_alias_map): """ :returns: An iterator over all the results of a list of dictionaries. """ + @abc.abstractmethod + def get_column_names(self, alias): + """ + Return the column names of the given table (alias). + """ + pass + def get_column(self, colname, alias): # pylint: disable=no-self-use """ Return the column for a given projection. diff --git a/aiida/orm/implementation/sqlalchemy/nodes.py b/aiida/orm/implementation/sqlalchemy/nodes.py index eb6be8c534..bc1e7114a7 100644 --- a/aiida/orm/implementation/sqlalchemy/nodes.py +++ b/aiida/orm/implementation/sqlalchemy/nodes.py @@ -24,7 +24,8 @@ from .. import BackendNode, BackendNodeCollection from . import entities -from . import utils +from . import utils as sqla_utils +from .. import utils as gen_utils from .computers import SqlaComputer from .users import SqlaUser @@ -82,7 +83,7 @@ def __init__(self, type_check(mtime, datetime, 'the given mtime is of type {}'.format(type(mtime))) arguments['mtime'] = mtime - self._dbmodel = utils.ModelWrapper(models.DbNode(**arguments)) + self._dbmodel = sqla_utils.ModelWrapper(models.DbNode(**arguments)) def clone(self): """Return an unstored clone of ourselves. @@ -102,7 +103,7 @@ def clone(self): clone = self.__class__.__new__(self.__class__) # pylint: disable=no-value-for-parameter clone.__init__(self.backend, self.node_type, self.user) - clone._dbmodel = utils.ModelWrapper(models.DbNode(**arguments)) # pylint: disable=protected-access + clone._dbmodel = sqla_utils.ModelWrapper(models.DbNode(**arguments)) # pylint: disable=protected-access return clone @property @@ -156,7 +157,7 @@ def get_attribute(self, key): :raises AttributeError: if the attribute does not exist """ try: - return utils.get_attr(self._dbmodel.attributes, key) + return gen_utils.get_attr(self._dbmodel.attributes, key) except (KeyError, IndexError): raise AttributeError('Attribute `{}` does not exist'.format(key)) @@ -176,7 +177,7 @@ def set_attribute(self, key, value): :param value: value of the attribute """ try: - self.dbmodel.set_attr(key, value) + self.dbmodel.set_attribute(key, value) except Exception: # pylint: disable=bare-except session = get_scoped_session() session.rollback() @@ -217,7 +218,7 @@ def delete_attribute(self, key): :raises AttributeError: if the attribute does not exist """ try: - self._dbmodel.del_attr(key) + self._dbmodel.del_attribute(key) except Exception: # pylint: disable=bare-except session = get_scoped_session() session.rollback() @@ -262,7 +263,7 @@ def get_extra(self, key): :raises AttributeError: if the extra does not exist """ try: - return utils.get_attr(self._dbmodel.extras, key) + return gen_utils.get_attr(self._dbmodel.extras, key) except (KeyError, IndexError): raise AttributeError('Extra `{}` does not exist'.format(key)) diff --git a/aiida/orm/implementation/sqlalchemy/querybuilder.py b/aiida/orm/implementation/sqlalchemy/querybuilder.py index 4a3d28d2ad..fe649af03e 100644 --- a/aiida/orm/implementation/sqlalchemy/querybuilder.py +++ b/aiida/orm/implementation/sqlalchemy/querybuilder.py @@ -25,6 +25,7 @@ import aiida.backends.sqlalchemy from aiida.common.exceptions import InputValidationError +from aiida.common.exceptions import NotExistent from aiida.orm.implementation.querybuilder import BackendQueryBuilder @@ -75,6 +76,15 @@ class SqlaQueryBuilder(BackendQueryBuilder): # pylint: disable=redefined-outer-name, too-many-public-methods + def __init__(self, backend): + BackendQueryBuilder.__init__(self, backend) + + self.outer_to_inner_schema['db_dbcomputer'] = {'metadata': '_metadata'} + self.outer_to_inner_schema['db_dblog'] = {'metadata': '_metadata'} + + self.inner_to_outer_schema['db_dbcomputer'] = {'_metadata': 'metadata'} + self.inner_to_outer_schema['db_dblog'] = {'_metadata': 'metadata'} + @property def Node(self): import aiida.backends.sqlalchemy.models.node @@ -125,17 +135,16 @@ def get_session(self): def modify_expansions(self, alias, expansions): """ - For sqlalchemy, there are no additional expansions for now, so - I am returning an empty list + In SQLA, the metadata should be changed to _metadata to be in-line with the database schema """ # pylint: disable=protected-access - if issubclass(alias._sa_class_manager.class_, self.Computer) or \ - issubclass(alias._sa_class_manager.class_, self.Log): - try: - expansions.remove('metadata') - expansions.append('_metadata') - except KeyError: - pass + # The following check is added to avoided unnecessary calls to get_inner_property for QB edge queries + # The update of expansions makes sense only when AliasedClass is provided + if hasattr(alias, '_sa_class_manager'): + if '_metadata' in expansions: + raise NotExistent("_metadata doesn't exist for {}. Please try metadata.".format(alias)) + + return self.get_corresponding_properties(alias.__tablename__, expansions, self.outer_to_inner_schema) return expansions @@ -394,7 +403,7 @@ def get_projectable_attribute(self, alias, column_name, attrpath, cast=None, **k def get_aiida_res(self, key, res): """ Some instance returned by ORM (django or SA) need to be converted - to Aiida instances (eg nodes). Choice (sqlalchemy_utils) + to AiiDA instances (eg nodes). Choice (sqlalchemy_utils) will return their value :param key: The key @@ -479,9 +488,13 @@ def iterall(self, query, batch_size, tag_to_index_dict): self.get_session().rollback() raise - def iterdict(self, query, batch_size, tag_to_projected_entity_dict): + def iterdict(self, query, batch_size, tag_to_projected_properties_dict, tag_to_alias_map): - nr_items = sum(len(v) for v in tag_to_projected_entity_dict.values()) + def get_table_name(aliased_class): + """ Returns the table name given an Aliased class""" + return aliased_class.__tablename__ + + nr_items = sum(len(v) for v in tag_to_projected_properties_dict.values()) if not nr_items: raise ValueError("Got an empty dictionary") @@ -493,31 +506,43 @@ def iterdict(self, query, batch_size, tag_to_projected_entity_dict): for this_result in results: yield { tag: { - attrkey: self.get_aiida_res(attrkey, this_result[index_in_sql_result]) + self.get_corresponding_property( + get_table_name(tag_to_alias_map[tag]), attrkey, self.inner_to_outer_schema): + self.get_aiida_res(attrkey, this_result[index_in_sql_result]) for attrkey, index_in_sql_result in projected_entities_dict.items() - } for tag, projected_entities_dict in tag_to_projected_entity_dict.items() + } for tag, projected_entities_dict in tag_to_projected_properties_dict.items() } elif nr_items == 1: # I this case, sql returns a list, where each listitem is the result # for one row. Here I am converting it to a list of lists (of length 1) - if [v for entityd in tag_to_projected_entity_dict.values() for v in entityd.keys()] == ['*']: + if [v for entityd in tag_to_projected_properties_dict.values() for v in entityd.keys()] == ['*']: for this_result in results: yield { tag: { - attrkey: self.get_aiida_res(attrkey, this_result) + self.get_corresponding_property( + get_table_name(tag_to_alias_map[tag]), attrkey, self.inner_to_outer_schema): + self.get_aiida_res(attrkey, this_result) for attrkey, position in projected_entities_dict.items() - } for tag, projected_entities_dict in tag_to_projected_entity_dict.items() + } for tag, projected_entities_dict in tag_to_projected_properties_dict.items() } else: for this_result, in results: yield { tag: { - attrkey: self.get_aiida_res(attrkey, this_result) + self.get_corresponding_property( + get_table_name(tag_to_alias_map[tag]), attrkey, self.inner_to_outer_schema): + self.get_aiida_res(attrkey, this_result) for attrkey, position in projected_entities_dict.items() - } for tag, projected_entities_dict in tag_to_projected_entity_dict.items() + } for tag, projected_entities_dict in tag_to_projected_properties_dict.items() } else: raise ValueError("Got an empty dictionary") except Exception: self.get_session().rollback() raise + + def get_column_names(self, alias): + """ + Given the backend specific alias, return the column names that correspond to the aliased table. + """ + return [str(c).replace(alias.__table__.name + '.', '') for c in alias.__table__.columns] diff --git a/aiida/orm/implementation/sqlalchemy/utils.py b/aiida/orm/implementation/sqlalchemy/utils.py index da00cd2062..fd14576f65 100644 --- a/aiida/orm/implementation/sqlalchemy/utils.py +++ b/aiida/orm/implementation/sqlalchemy/utils.py @@ -22,7 +22,7 @@ from aiida.common import exceptions from aiida.backends.sqlalchemy import get_scoped_session -__all__ = ('django_filter', 'get_attr') +__all__ = ['django_filter'] class ModelWrapper(object): @@ -133,19 +133,6 @@ def iter_dict(attrs): yield "", attrs -def get_attr(attrs, key): - path = key.split('.') - - d = attrs - for p in path: - if p.isdigit(): - p = int(p) - # Let it raise the appropriate exception - d = d[p] - - return d - - def _create_op_func(op): def f(attr, val): return getattr(attr, op)(val) diff --git a/aiida/orm/implementation/utils.py b/aiida/orm/implementation/utils.py new file mode 100644 index 0000000000..7f59046c20 --- /dev/null +++ b/aiida/orm/implementation/utils.py @@ -0,0 +1,28 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida_core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +# pylint: disable=invalid-name +""" Utilities used by both backends """ +from __future__ import division + +__all__ = ['get_attr'] + + +def get_attr(attrs, key): + """ Get the attribute that corresponds to the given key""" + path = key.split('.') + + d = attrs + for p in path: + if p.isdigit(): + p = int(p) + # Let it raise the appropriate exception + d = d[p] + + return d diff --git a/aiida/orm/importexport.py b/aiida/orm/importexport.py index 37b5900406..323f9de225 100644 --- a/aiida/orm/importexport.py +++ b/aiida/orm/importexport.py @@ -23,6 +23,7 @@ from aiida.common.utils import export_shard_uuid, get_class_string, grouper, get_new_uuid from aiida.orm import Computer, Group, GroupTypeString, Node, QueryBuilder, User, Log, Comment from aiida.orm.utils.repository import Repository +from aiida.backends.utils import datetime_to_isoformat IMPORTGROUP_TYPE = GroupTypeString.IMPORTGROUP_TYPE.value DUPL_SUFFIX = ' (Imported #{})' @@ -142,6 +143,7 @@ def schema_to_entity_names(class_string): # model fields that can be used for the query of the database in both backends. # These are the names of the fields of the models that belong to the # corresponding entities. + file_fields_to_model_fields = { NODE_ENTITY_NAME: { "dbcomputer": "dbcomputer_id", @@ -150,19 +152,15 @@ def schema_to_entity_names(class_string): GROUP_ENTITY_NAME: { "user": "user_id" }, - COMPUTER_ENTITY_NAME: { - "metadata": "_metadata" - }, + COMPUTER_ENTITY_NAME: {}, LOG_ENTITY_NAME: { - "dbnode": "dbnode_id", - "metadata": "_metadata" + "dbnode": "dbnode_id" }, COMMENT_ENTITY_NAME: { "dbnode": "dbnode_id", "user": "user_id" } } - # As above but the opposite procedure model_fields_to_file_fields = { NODE_ENTITY_NAME: { @@ -173,13 +171,10 @@ def schema_to_entity_names(class_string): GROUP_ENTITY_NAME: { "user_id": "user" }, - COMPUTER_ENTITY_NAME: { - "_metadata": "metadata" - }, + COMPUTER_ENTITY_NAME: {}, USER_ENTITY_NAME: {}, LOG_ENTITY_NAME: { "dbnode_id": "dbnode", - "_metadata": "metadata" }, COMMENT_ENTITY_NAME: { "dbnode_id": "dbnode", @@ -933,43 +928,12 @@ def import_data_dj(in_path, group=None, ignore_unknown_nodes=False, destdir.replace_with_folder(subfolder.abspath, move=True, overwrite=True) - # If there is an mtime in the field, disable the automatic update - # to keep the mtime that we have set here - if 'mtime' in [field.name for field in Model._meta.local_fields]: - with suppress_auto_now([(Model, ['mtime'])]): - # Store them all in once; however, the PK are not set in this way... - Model.objects.bulk_create(objects_to_create) - else: - Model.objects.bulk_create(objects_to_create) - - # Get back the just-saved entries - just_saved_queryset = Model.objects.filter( - **{"{}__in".format(unique_identifier): - import_entry_ids.keys()}).values_list(unique_identifier, 'pk') - # note: convert uuids from type UUID to strings - just_saved = {str(k) : v for k, v in just_saved_queryset} - - # Now I have the PKs, print the info - # Moreover, set the foreign_ids_reverse_mappings - for unique_id, new_pk in just_saved.items(): - import_entry_id = import_entry_ids[unique_id] - foreign_ids_reverse_mappings[model_name][unique_id] = new_pk - if model_name not in ret_dict: - ret_dict[model_name] = {'new': [], 'existing': []} - ret_dict[model_name]['new'].append((import_entry_id, - new_pk)) + # For DbNodes, we also have to store its attributes + if not silent: + print("STORING NEW NODE ATTRIBUTES...") - if not silent: - print("NEW %s: %s (%s->%s)" % (model_name, unique_id, - import_entry_id, - new_pk)) + import_entry_id = import_entry_ids[o.uuid] - # For DbNodes, we also have to store its attributes - if model_name == NODE_ENTITY_NAME: - if not silent: - print("STORING NEW NODE ATTRIBUTES...") - for unique_id, new_pk in just_saved.items(): - import_entry_id = import_entry_ids[unique_id] # Get attributes from import file try: attributes = data['node_attributes'][ @@ -984,18 +948,17 @@ def import_data_dj(in_path, group=None, ignore_unknown_nodes=False, # Here I have to deserialize the attributes deserialized_attributes = deserialize_attributes( attributes, attributes_conversion) - models.DbAttribute.reset_values_for_node( - dbnode=new_pk, - attributes=deserialized_attributes, - with_transaction=False) + if deserialized_attributes: + o.attributes = dict() + for k, v in deserialized_attributes.items(): + o.attributes[k] = datetime_to_isoformat(v) - # For DbNodes, we also have to store its extras - if model_name == NODE_ENTITY_NAME: - if extras_mode_new == 'import': - if not silent: - print("STORING NEW NODE EXTRAS...") - for unique_id, new_pk in just_saved.items(): - import_entry_id = import_entry_ids[unique_id] + # For DbNodes, we also have to store its extras + if extras_mode_new == 'import': + if not silent: + print("STORING NEW NODE EXTRAS...") + # for unique_id, new_pk in just_saved.items(): + import_entry_id = import_entry_ids[o.uuid] # Get extras from import file try: extras = data['node_extras'][ @@ -1009,29 +972,33 @@ def import_data_dj(in_path, group=None, ignore_unknown_nodes=False, deserialized_extras = deserialize_attributes(extras, extras_conversion) # TODO: remove when aiida extras will be moved somewhere else # from here - deserialized_extras = {key:value for key, value in deserialized_extras.items() if not - key.startswith('_aiida_')} - if models.DbNode.objects.filter(uuid=unique_id)[0].node_type.endswith('code.Code.'): - deserialized_extras = {key:value for key, value in deserialized_extras.items() if not - key == 'hidden'} + deserialized_extras = {key: value for key, value in deserialized_extras.items() if not + key.startswith('_aiida_')} + if o.node_type.endswith('code.Code.'): + deserialized_extras = {key: value for key, value in deserialized_extras.items() if not + key == 'hidden'} # till here - models.DbExtra.reset_values_for_node( - dbnode=new_pk, - attributes=deserialized_extras, - with_transaction=False) - elif extras_mode_new == 'none': - if not silent: - print("SKIPPING NEW NODE EXTRAS...") - else: - raise ValueError("Unknown extras_mode_new value: {}, should be either 'import' or " - "'none'".format(extras_mode_new)) + o.extras = dict() + for k, v in deserialized_extras.items(): + o.extras[k] = datetime_to_isoformat(v) + elif extras_mode_new == 'none': + if not silent: + print("SKIPPING NEW NODE EXTRAS...") + else: + raise ValueError("Unknown extras_mode_new value: {}, should be either 'import' or " + "'none'".format(extras_mode_new)) - # For the existing DbNodes we may want to choose the import mode + # For the existing existing nodes that are also in the imported list + # we also update their extras if necessary if not silent: print("UPDATING EXISTING NODE EXTRAS (mode: {})".format(extras_mode_existing)) - for import_entry_id, entry_data in existing_entries[model_name].items(): - unique_id = entry_data[unique_identifier] + uuid_import_pk_match = {entry_data[unique_identifier]: import_entry_id for + import_entry_id, entry_data in existing_entries[model_name].items()} + for db_node in models.DbNode.objects.filter(uuid__in=uuid_import_pk_match).distinct(): + # for import_entry_id, entry_data in existing_entries[model_name].items(): + # unique_id = entry_data[unique_identifier] + import_entry_id = uuid_import_pk_match[str(db_node.uuid)] existing_entry_id = foreign_ids_reverse_mappings[model_name][unique_id] # Get extras from import file try: @@ -1045,22 +1012,55 @@ def import_data_dj(in_path, group=None, ignore_unknown_nodes=False, "for DbNode with UUID = {}".format(unique_id)) # Here I have to deserialize the extras - old_extras = models.DbExtra.get_all_values_for_nodepk(existing_entry_id) + old_extras = db_node.extras + # old_extras = models.DbExtra.get_all_values_for_nodepk(existing_entry_id) deserialized_extras = deserialize_attributes(extras, extras_conversion) # TODO: remove when aiida extras will be moved somewhere else # from here - deserialized_extras = {key:value for key, value in deserialized_extras.items() if not - key.startswith('_aiida_')} + deserialized_extras = {key: value for key, value in deserialized_extras.items() if not + key.startswith('_aiida_')} if models.DbNode.objects.filter(uuid=unique_id)[0].node_type.endswith('code.Code.'): - deserialized_extras = {key:value for key, value in deserialized_extras.items() if not - key == 'hidden'} + deserialized_extras = {key: value for key, value in deserialized_extras.items() if not + key == 'hidden'} # till here - merged_extras = merge_extras(old_extras, deserialized_extras, extras_mode_existing) + db_node.extras = merge_extras(old_extras, deserialized_extras, extras_mode_existing) + db_node.save() + # merged_extras = merge_extras(old_extras, deserialized_extras, extras_mode_existing) + # models.DbExtra.reset_values_for_node( + # dbnode=existing_entry_id, + # attributes=merged_extras, + # with_transaction=False) - models.DbExtra.reset_values_for_node( - dbnode=existing_entry_id, - attributes=merged_extras, - with_transaction=False) + # If there is an mtime in the field, disable the automatic update + # to keep the mtime that we have set here + if 'mtime' in [field.name for field in Model._meta.local_fields]: + with suppress_auto_now([(Model, ['mtime'])]): + # Store them all in once; however, the PK are not set in this way... + Model.objects.bulk_create(objects_to_create) + else: + Model.objects.bulk_create(objects_to_create) + + # Get back the just-saved entries + just_saved_queryset = Model.objects.filter( + **{"{}__in".format(unique_identifier): + import_entry_ids.keys()}).values_list(unique_identifier, 'pk') + # note: convert uuids from type UUID to strings + just_saved = { str(k) : v for k,v in just_saved_queryset } + + # Now I have the PKs, print the info + # Moreover, set the foreign_ids_reverse_mappings + for unique_id, new_pk in just_saved.items(): + import_entry_id = import_entry_ids[unique_id] + foreign_ids_reverse_mappings[model_name][unique_id] = new_pk + if model_name not in ret_dict: + ret_dict[model_name] = {'new': [], 'existing': []} + ret_dict[model_name]['new'].append((import_entry_id, + new_pk)) + + if not silent: + print("NEW %s: %s (%s->%s)" % (model_name, unique_id, + import_entry_id, + new_pk)) if not silent: print("STORING NODE LINKS...") @@ -1501,7 +1501,6 @@ def import_data_sqla(in_path, group=None, ignore_unknown_nodes=False, " already exists and I could not create a new one" "".format(orig_label)) - elif entity_name == COMPUTER_ENTITY_NAME: # The following is done for compatibility # reasons in case the export file was generated @@ -1592,6 +1591,8 @@ def import_data_sqla(in_path, group=None, ignore_unknown_nodes=False, # Store all objects for this model in a list, and store them # all in once at the end. objects_to_create = list() + # In the following list we add the objects to be updated + objects_to_update = list() # This is needed later to associate the import entry with the new pk import_entry_ids = dict() @@ -1751,11 +1752,14 @@ def import_data_sqla(in_path, group=None, ignore_unknown_nodes=False, # till here db_node.extras = merge_extras(old_extras, deserialized_extras, extras_mode_existing) flag_modified(db_node, "extras") + objects_to_update.append(db_node) # Store them all in once; However, the PK # are not set in this way... if objects_to_create: session.add_all(objects_to_create) + if objects_to_update: + session.add_all(objects_to_update) session.flush() diff --git a/aiida/orm/querybuilder.py b/aiida/orm/querybuilder.py index db97f9f327..9ab239fe37 100644 --- a/aiida/orm/querybuilder.py +++ b/aiida/orm/querybuilder.py @@ -329,7 +329,7 @@ def __init__(self, backend=None, **kwargs): # A dictionary tag:alias of ormclass # redundant but makes life easier - self._tag_to_alias_map = {} + self.tag_to_alias_map = {} # A dictionary tag: filter specification for this alias self._filters = {} @@ -528,7 +528,7 @@ def get_tag_from_type(classifiers): return classifiers['ormclass_type_string'].rstrip('.').split('.')[-1] or "node" basetag = get_tag_from_type(classifiers) - tags_used = self._tag_to_alias_map.keys() + tags_used = self.tag_to_alias_map.keys() for i in range(1, 100): tag = '{}_{}'.format(basetag, i) if tag not in tags_used: @@ -645,7 +645,7 @@ def append(self, "since this is used as a delimiter for links" "".format(self._EDGE_TAG_DELIM)) tag = tag - if tag in self._tag_to_alias_map.keys(): + if tag in self.tag_to_alias_map.keys(): raise InputValidationError("This tag ({}) is already in use".format(tag)) else: tag = self._get_unique_tag(classifiers) @@ -683,14 +683,14 @@ def append(self, # ALIASING ############################## try: - self._tag_to_alias_map[tag] = aliased(ormclass) + self.tag_to_alias_map[tag] = aliased(ormclass) except Exception as e: if self._debug: print("DEBUG: Exception caught in append, cleaning up") print(" ", e) if l_class_added_to_map: self._cls_to_tag_map.pop(cls) - self._tag_to_alias_map.pop(tag, None) + self.tag_to_alias_map.pop(tag, None) raise # FILTERS ###################################### @@ -720,7 +720,7 @@ def append(self, print(" ", e) if l_class_added_to_map: self._cls_to_tag_map.pop(cls) - self._tag_to_alias_map.pop(tag) + self.tag_to_alias_map.pop(tag) self._filters.pop(tag) raise @@ -735,7 +735,7 @@ def append(self, print(" ", e) if l_class_added_to_map: self._cls_to_tag_map.pop(cls) - self._tag_to_alias_map.pop(tag, None) + self.tag_to_alias_map.pop(tag, None) self._filters.pop(tag) self._projections.pop(tag) raise e @@ -793,7 +793,7 @@ def append(self, print(" ", e) if l_class_added_to_map: self._cls_to_tag_map.pop(cls) - self._tag_to_alias_map.pop(tag, None) + self.tag_to_alias_map.pop(tag, None) self._filters.pop(tag) self._projections.pop(tag) # There's not more to clean up here! @@ -808,14 +808,14 @@ def append(self, edge_destination_tag = self._get_tag_from_specification(joining_value) edge_tag = edge_destination_tag + self._EDGE_TAG_DELIM + tag else: - if edge_tag in self._tag_to_alias_map.keys(): + if edge_tag in self.tag_to_alias_map.keys(): raise InputValidationError("The tag {} is already in use".format(edge_tag)) if self._debug: print("I have chosen", edge_tag) # My edge is None for now, since this is created on the FLY, # the _tag_to_alias_map will be updated later (in _build) - self._tag_to_alias_map[edge_tag] = None + self.tag_to_alias_map[edge_tag] = None # Filters on links: # Beware, I alway add this entry now, but filtering here might be @@ -836,11 +836,11 @@ def append(self, print(traceback.format_exc()) if l_class_added_to_map: self._cls_to_tag_map.pop(cls) - self._tag_to_alias_map.pop(tag, None) + self.tag_to_alias_map.pop(tag, None) self._filters.pop(tag) self._projections.pop(tag) if edge_tag is not None: - self._tag_to_alias_map.pop(edge_tag, None) + self.tag_to_alias_map.pop(edge_tag, None) self._filters.pop(edge_tag, None) self._projections.pop(edge_tag, None) # There's not more to clean up here! @@ -1161,6 +1161,9 @@ def _add_to_projections(self, alias, projectable_entity_name, cast=None, func=No raise InputValidationError("\nInvalid function specification {}".format(func)) self._query = self._query.add_columns(entity_to_project) + def get_table_columns(self, table_alias): + raise NotImplementedError + def _build_projections(self, tag, items_to_project=None): if items_to_project is None: @@ -1174,27 +1177,29 @@ def _build_projections(self, tag, items_to_project=None): if not items_to_project: return - alias = self._tag_to_alias_map[tag] + alias = self.tag_to_alias_map[tag] - self.tag_to_projected_entity_dict[tag] = {} + self.tag_to_projected_property_dict[tag] = {} for projectable_spec in items_to_project: for projectable_entity_name, extraspec in projectable_spec.items(): + property_names = list() if projectable_entity_name == '**': # Need to expand - entity_names = self._impl.modify_expansions( - alias, [str(c).replace(alias.__table__.name + '.', '') for c in alias.__table__.columns]) + property_names.extend(self._impl.modify_expansions( + alias, self._impl.get_column_names(alias))) + # ~ for s in ('attributes', 'extras'): # ~ try: # ~ entity_names.remove(s) # ~ except ValueError: # ~ pass else: - entity_names = [projectable_entity_name] - for entity_name in entity_names: - self._add_to_projections(alias, entity_name, **extraspec) + property_names.extend(self._impl.modify_expansions(alias, [projectable_entity_name])) - self.tag_to_projected_entity_dict[tag][entity_name] = self.nr_of_projections + for property_name in property_names: + self._add_to_projections(alias, property_name, **extraspec) + self.tag_to_projected_property_dict[tag][property_name] = self.nr_of_projections self.nr_of_projections += 1 def _get_tag_from_specification(self, specification): @@ -1205,11 +1210,11 @@ def _get_tag_from_specification(self, specification): If it is a class, I check if it's in the _cls_to_tag_map! """ if isinstance(specification, six.string_types): - if specification in self._tag_to_alias_map.keys(): + if specification in self.tag_to_alias_map.keys(): tag = specification else: raise InputValidationError("tag {} is not among my known tags\n" - "My tags are: {}".format(specification, self._tag_to_alias_map.keys())) + "My tags are: {}".format(specification, self.tag_to_alias_map.keys())) else: if specification in self._cls_to_tag_map.keys(): tag = self._cls_to_tag_map[specification] @@ -1217,7 +1222,7 @@ def _get_tag_from_specification(self, specification): raise InputValidationError("You specified as a class for which I have to find a tag\n" "The classes that I can do this for are:{}\n" "The tags I have are: {}".format(specification, self._cls_to_tag_map.keys(), - self._tag_to_alias_map.keys())) + self.tag_to_alias_map.keys())) return tag def set_debug(self, debug): @@ -1718,12 +1723,12 @@ def _get_connecting_node(self, index, joining_keyword=None, joining_value=None, returnval = (self._aliased_path[joining_value], func) elif isinstance(joining_value, str): try: - returnval = self._tag_to_alias_map[self._get_tag_from_specification(joining_value)], func + returnval = self.tag_to_alias_map[self._get_tag_from_specification(joining_value)], func except KeyError: raise InputValidationError('Key {} is unknown to the types I know about:\n' '{}'.format( self._get_tag_from_specification(joining_value), - self._tag_to_alias_map.keys())) + self.tag_to_alias_map.keys())) return returnval def _get_json_compatible(self, inp): @@ -1814,12 +1819,12 @@ def _build(self): # Starting the query by receiving a session # Every subclass needs to have _get_session and give me the # right session - firstalias = self._tag_to_alias_map[self._path[0]['tag']] + firstalias = self.tag_to_alias_map[self._path[0]['tag']] self._query = self._impl.get_session().query(firstalias) # JOINS ################################ for index, verticespec in enumerate(self._path[1:], start=1): - alias = self._tag_to_alias_map[verticespec['tag']] + alias = self.tag_to_alias_map[verticespec['tag']] # looping through the queryhelp # ~ if index: # There is nothing to join if that is the first table @@ -1842,17 +1847,17 @@ def _build(self): else: aliased_edge = connection_func(toconnectwith, alias, isouterjoin=isouterjoin) if aliased_edge is not None: - self._tag_to_alias_map[edge_tag] = aliased_edge + self.tag_to_alias_map[edge_tag] = aliased_edge ######################### FILTERS ############################## for tag, filter_specs in self._filters.items(): try: - alias = self._tag_to_alias_map[tag] + alias = self.tag_to_alias_map[tag] except KeyError: # TODO Check KeyError before? raise InputValidationError('You looked for tag {} among the alias list\n' - 'The tags I know are:\n{}'.format(tag, self._tag_to_alias_map.keys())) + 'The tags I know are:\n{}'.format(tag, self.tag_to_alias_map.keys())) self._query = self._query.filter(self._build_filters(alias, filter_specs)) ######################### PROJECTIONS ########################## @@ -1862,8 +1867,8 @@ def _build(self): # Will be later set to this list: entities = [] - # Mapping between enitites and the tag used/ given by user: - self.tag_to_projected_entity_dict = {} + # Mapping between entities and the tag used/ given by user: + self.tag_to_projected_property_dict = {} self.nr_of_projections = 0 if self._debug: @@ -1897,7 +1902,7 @@ def _build(self): # ORDER ################################ for order_spec in self._order_by: for tag, entities in order_spec.items(): - alias = self._tag_to_alias_map[tag] + alias = self.tag_to_alias_map[tag] for entitydict in entities: for entitytag, entityspec in entitydict.items(): self._build_order(alias, entitytag, entityspec) @@ -1927,7 +1932,7 @@ def _build(self): # Make a list that helps the projection postprocessing self._attrkeys_as_in_sql_result = { - index_in_sql_result: attrkey for tag, projected_entities_dict in self.tag_to_projected_entity_dict.items() + index_in_sql_result: attrkey for tag, projected_entities_dict in self.tag_to_projected_property_dict.items() for attrkey, index_in_sql_result in projected_entities_dict.items() } @@ -1965,7 +1970,7 @@ def build_counterquery(calc_class): tag = node['tag'] requested_cols = [key for item in self._projections[tag] for key in item.keys()] if '*' in requested_cols: - input_alias_list.append(aliased(self._tag_to_alias_map[tag])) + input_alias_list.append(aliased(self.tag_to_alias_map[tag])) counterquery = self._imp._get_session().query(orm_calc_class) if type_spec: @@ -1997,7 +2002,7 @@ def get_alias(self, tag): :returns: the alias given for that vertice """ tag = self._get_tag_from_specification(tag) - return self._tag_to_alias_map[tag] + return self.tag_to_alias_map[tag] def get_used_tags(self, vertices=True, edges=True): """ @@ -2198,7 +2203,7 @@ def iterdict(self, batch_size=100): """ query = self.get_query() - for item in self._impl.iterdict(query, batch_size, self.tag_to_projected_entity_dict): + for item in self._impl.iterdict(query, batch_size, self.tag_to_projected_property_dict, self.tag_to_alias_map): for key, value in item.items(): item[key] = self.get_aiida_entity_res(value) diff --git a/docs/source/nitpick-exceptions b/docs/source/nitpick-exceptions index 8acae34ef2..c5eb7b9d77 100644 --- a/docs/source/nitpick-exceptions +++ b/docs/source/nitpick-exceptions @@ -258,3 +258,14 @@ py:class Collection # psychopg2 py:class psycopg2.extensions.cursor + +# Aldjemy exceptions +py:class aldjemy.orm.DbNode +py:class aldjemy.orm.DbLink +py:class aldjemy.orm.DbComputer +py:class aldjemy.orm.DbUser +py:class aldjemy.orm.DbGroup +py:class aldjemy.orm.DbAuthInfo +py:class aldjemy.orm.DbComment +py:class aldjemy.orm.DbLog +py:class aldjemy.orm.DbSetting From 66ed3835f42e3df2af828180b182894b7921f4a9 Mon Sep 17 00:00:00 2001 From: Sebastiaan Huber Date: Wed, 8 May 2019 17:08:10 +0200 Subject: [PATCH 02/11] Refactor the global setting functions to utility class The way to get, set and delete global settings in the `DbSetting` table was implemented by means of free functions. This is refactored into a `SettingsManager` utility class. --- .pre-commit-config.yaml | 2 - aiida/backends/djsite/globalsettings.py | 97 ---------- aiida/backends/djsite/utils.py | 89 ++++++++- aiida/backends/sqlalchemy/globalsettings.py | 100 ---------- aiida/backends/sqlalchemy/utils.py | 83 +++++++- .../tests/cmdline/commands/test_process.py | 45 ----- aiida/backends/tests/test_nodes.py | 32 ---- aiida/backends/utils.py | 179 ++++++------------ aiida/engine/utils.py | 13 +- aiida/orm/implementation/querybuilder.py | 1 - 10 files changed, 218 insertions(+), 423 deletions(-) delete mode 100644 aiida/backends/djsite/globalsettings.py delete mode 100644 aiida/backends/sqlalchemy/globalsettings.py diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 2dbc1f5f8d..0e3593a9be 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -34,13 +34,11 @@ aiida/backends/djsite/db/subtests/test_generic.py| aiida/backends/djsite/db/subtests/test_nodes.py| aiida/backends/djsite/db/subtests/test_query.py| - aiida/backends/djsite/globalsettings.py| aiida/backends/djsite/__init__.py| aiida/backends/djsite/manage.py| aiida/backends/djsite/queries.py| aiida/backends/profile.py| aiida/backends/general/abstractqueries.py| - aiida/backends/sqlalchemy/globalsettings.py| aiida/backends/sqlalchemy/__init__.py| aiida/backends/sqlalchemy/migrations/env.py| aiida/backends/sqlalchemy/migrations/versions/0aebbeab274d_base_data_plugin_type_string.py| diff --git a/aiida/backends/djsite/globalsettings.py b/aiida/backends/djsite/globalsettings.py deleted file mode 100644 index 2bc75ddaac..0000000000 --- a/aiida/backends/djsite/globalsettings.py +++ /dev/null @@ -1,97 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida_core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -""" -Functions to manage the global settings stored in the DB (in the DbSettings -table. -""" -from __future__ import division -from __future__ import print_function -from __future__ import absolute_import -from aiida.backends.djsite.db.models import DbSetting -from aiida.backends.utils import validate_attribute_key -from aiida.backends.utils import get_value_of_sub_field -from aiida.common.exceptions import NotExistent - - -def set_global_setting(key, value, description=None): - """ - Set a global setting in the DbSetting table (therefore, stored at the DB level). - """ - # Before storing, validate the key - validate_attribute_key(key) - - other_attribs = dict() - if description is not None: - other_attribs["description"] = description - DbSetting.set_value(key, value, other_attribs=other_attribs) - - -def del_global_setting(key): - """ - Return the value of the given setting, or raise a KeyError if the - setting is not present in the DB. - - :raise KeyError: if the setting does not exist in the DB - """ - from aiida.backends.djsite.db.models import DbSetting - - try: - DbSetting.del_value(key=key) - except KeyError: - raise KeyError("No global setting with key={}".format(key)) - - -def get_global_setting(key): - """ - Return the value of the given setting, or raise a KeyError if the - setting is not present in the DB. - - :raise KeyError: if the setting does not exist in the DB - """ - from aiida.backends.djsite.db.models import DbSetting - - # Check first that the table exists - table_check_test() - - try: - res = get_value_of_sub_field(key, lambda given_key: DbSetting.objects.filter(key=given_key).first().getvalue()) - if res is None: - raise NotExistent - return res - except NotExistent: - raise KeyError("No global setting with key={}".format(key)) - - -def get_global_setting_description(key): - """ - Return the description for the given setting variable, as stored in the - DB, or raise a KeyError if the setting is not present in the DB or the - table doesn't exist.. - """ - from aiida.backends.djsite.db.models import DbSetting - from django.core.exceptions import ObjectDoesNotExist - - # Check first that the table exists - table_check_test() - - try: - return DbSetting.objects.get(key=key).description - except ObjectDoesNotExist: - raise KeyError("No global setting with key={}".format(key)) - - -def table_check_test(): - """ - Checks if the db_setting table exists in the database. If it doesn't exist - it rainses a KeyError. - """ - from django.db import connection - if 'db_dbsetting' not in connection.introspection.table_names(): - raise KeyError("No table found") diff --git a/aiida/backends/djsite/utils.py b/aiida/backends/djsite/utils.py index a974d50b21..14971cbc74 100644 --- a/aiida/backends/djsite/utils.py +++ b/aiida/backends/djsite/utils.py @@ -16,10 +16,78 @@ import os import django +from aiida.backends.utils import validate_attribute_key, SettingsManager, Setting +from aiida.common import NotExistent + SCHEMA_VERSION_DB_KEY = 'db|schemaversion' SCHEMA_VERSION_DB_DESCRIPTION = "The version of the schema used in this database." +class DjangoSettingsManager(SettingsManager): + """Class to get, set and delete settings from the `DbSettings` table.""" + + table_name = 'db_dbsetting' + + def validate_table_existence(self): + """Verify that the `DbSetting` table actually exists. + + :raises: `~aiida.common.exceptions.NotExistent` if the settings table does not exist + """ + from django.db import connection + if self.table_name not in connection.introspection.table_names(): + raise NotExistent('the settings table does not exist') + + def get(self, key): + """Return the setting with the given key. + + :param key: the key identifying the setting + :return: Setting + :raises: `~aiida.common.exceptions.NotExistent` if the settings does not exist + """ + from aiida.backends.djsite.db.models import DbSetting + + self.validate_table_existence() + setting = DbSetting.objects.filter(key=key).first() + + if setting is None: + raise NotExistent('setting `{}` does not exist'.format(key)) + + return Setting(setting.key, setting.val, setting.description, setting.time) + + def set(self, key, value, description=None): + """Return the settings with the given key. + + :param key: the key identifying the setting + :param value: the value for the setting + :param description: optional setting description + """ + from aiida.backends.djsite.db.models import DbSetting + + self.validate_table_existence() + validate_attribute_key(key) + + other_attribs = dict() + if description is not None: + other_attribs['description'] = description + + DbSetting.set_value(key, value, other_attribs=other_attribs) + + def delete(self, key): + """Delete the setting with the given key. + + :param key: the key identifying the setting + :raises: `~aiida.common.exceptions.NotExistent` if the settings does not exist + """ + from aiida.backends.djsite.db.models import DbSetting + + self.validate_table_existence() + + try: + DbSetting.del_value(key=key) + except KeyError: + raise NotExistent('setting `{}` does not exist'.format(key)) + + def load_dbenv(profile): """Load the database environment and ensure that the code and database schema versions are compatible. @@ -100,11 +168,11 @@ def check_schema_version(profile_name): def set_db_schema_version(version): """ - Set the schema version stored in the DB. Use only if you know what - you are doing. + Set the schema version stored in the DB. Use only if you know what you are doing. """ - from aiida.backends.utils import set_global_setting - return set_global_setting(SCHEMA_VERSION_DB_KEY, version, description=SCHEMA_VERSION_DB_DESCRIPTION) + from aiida.backends.utils import get_settings_manager + manager = get_settings_manager() + return manager.set(SCHEMA_VERSION_DB_KEY, version, description=SCHEMA_VERSION_DB_DESCRIPTION) def get_db_schema_version(): @@ -112,11 +180,16 @@ def get_db_schema_version(): Get the current schema version stored in the DB. Return None if it is not stored. """ - from aiida.backends.utils import get_global_setting + from django.db.utils import ProgrammingError + from aiida.manage.manager import get_manager + backend = get_manager()._load_backend(schema_check=False) # pylint: disable=protected-access + try: - return get_global_setting(SCHEMA_VERSION_DB_KEY) - except KeyError: - return None + result = backend.execute_raw(r"""SELECT tval FROM db_dbsetting WHERE key = 'db|schemaversion';""") + except ProgrammingError: + result = backend.execute_raw(r"""SELECT val FROM db_dbsetting WHERE key = 'db|schemaversion';""") + + return result[0][0] def delete_nodes_and_connections_django(pks_to_delete): # pylint: disable=invalid-name diff --git a/aiida/backends/sqlalchemy/globalsettings.py b/aiida/backends/sqlalchemy/globalsettings.py deleted file mode 100644 index f276e3ab38..0000000000 --- a/aiida/backends/sqlalchemy/globalsettings.py +++ /dev/null @@ -1,100 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida_core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -""" -Functions to manage the global settings stored in the DB (in the DbSettings -table. -""" - -from __future__ import division -from __future__ import print_function -from __future__ import absolute_import -from aiida.backends.sqlalchemy.models.settings import DbSetting -from sqlalchemy.orm.exc import NoResultFound -from aiida.backends.sqlalchemy import get_scoped_session -from aiida.backends.utils import validate_attribute_key - - -def set_global_setting(key, value, description=None): - """ - Set a global setting in the DbSetting table (therefore, stored at the DB - level). - """ - # Before storing, validate the key - validate_attribute_key(key) - - other_attribs = dict() - if description is not None: - other_attribs["description"] = description - DbSetting.set_value(key, value, other_attribs=other_attribs) - - -def del_global_setting(key): - """ - Return the value of the given setting, or raise a KeyError if the - setting is not present in the DB. - - :raise KeyError: if the setting does not exist in the DB - """ - try: - setting = get_scoped_session().query(DbSetting).filter_by(key=key).one() - setting.delete() - except NoResultFound: - raise KeyError("No global setting with key={}".format(key)) - - -def get_global_setting(key): - """ - Return the value of the given setting, or raise a KeyError if the - setting is not present in the DB. - - :raise KeyError: if the setting does not exist in the DB - """ - from aiida.backends.utils import get_value_of_sub_field - - # Check first that the table exists - table_check_test() - - try: - return get_value_of_sub_field( - key, lambda given_key: get_scoped_session().query(DbSetting).filter_by( - key=given_key).one().getvalue()) - except NoResultFound: - raise KeyError("No global setting with key={}".format(key)) - - -def get_global_setting_description(key): - """ - Return the description for the given setting variable, as stored in the - DB, or raise a KeyError if the setting is not present in the DB or the - table doesn't exist. - """ - from aiida.backends.utils import validate_key - - # Check first that the table exists - table_check_test() - validate_key(key) - - try: - return (get_scoped_session().query(DbSetting).filter_by(key=key). - one().get_description()) - except NoResultFound: - raise KeyError("No global setting with key={}".format(key)) - - -def table_check_test(): - """ - Checks if the db_setting table exists in the database. If it doesn't exist - it rainses a KeyError. - """ - from sqlalchemy.engine import reflection - inspector = reflection.Inspector.from_engine(get_scoped_session().bind) - if 'db_dbsetting' not in inspector.get_table_names(): - raise KeyError("No table found") - diff --git a/aiida/backends/sqlalchemy/utils.py b/aiida/backends/sqlalchemy/utils.py index a77da9187e..8c08f92b75 100644 --- a/aiida/backends/sqlalchemy/utils.py +++ b/aiida/backends/sqlalchemy/utils.py @@ -10,23 +10,90 @@ from __future__ import division from __future__ import absolute_import from __future__ import print_function -from aiida.common import json - -json_dumps = json.dumps -json_loads = json.loads from alembic import command from alembic.config import Config from alembic.runtime.environment import EnvironmentContext from alembic.script import ScriptDirectory +from sqlalchemy.orm.exc import NoResultFound + from aiida.backends import sqlalchemy as sa -from aiida.backends.utils import isoformat_to_datetime, datetime_to_isoformat +from aiida.backends.sqlalchemy import get_scoped_session +from aiida.backends.utils import validate_attribute_key, SettingsManager, Setting, isoformat_to_datetime, datetime_to_isoformat +from aiida.common import NotExistent + ALEMBIC_FILENAME = "alembic.ini" ALEMBIC_REL_PATH = "migrations" +class SqlaSettingsManager(SettingsManager): + """Class to get, set and delete settings from the `DbSettings` table.""" + + table_name = 'db_dbsetting' + + def validate_table_existence(self): + """Verify that the `DbSetting` table actually exists. + + :raises: `~aiida.common.exceptions.NotExistent` if the settings table does not exist + """ + from sqlalchemy.engine import reflection + inspector = reflection.Inspector.from_engine(get_scoped_session().bind) + if self.table_name not in inspector.get_table_names(): + raise NotExistent('the settings table does not exist') + + def get(self, key): + """Return the setting with the given key. + + :param key: the key identifying the setting + :return: Setting + :raises: `~aiida.common.exceptions.NotExistent` if the settings does not exist + """ + from aiida.backends.sqlalchemy.models.settings import DbSetting + self.validate_table_existence() + + try: + setting = get_scoped_session().query(DbSetting).filter_by(key=key).one() + except NoResultFound: + raise NotExistent('setting `{}` does not exist'.format(key)) + + return Setting(key, setting.getvalue(), setting.description, setting.time) + + def set(self, key, value, description=None): + """Return the settings with the given key. + + :param key: the key identifying the setting + :param value: the value for the setting + :param description: optional setting description + """ + from aiida.backends.sqlalchemy.models.settings import DbSetting + self.validate_table_existence() + validate_attribute_key(key) + + other_attribs = dict() + if description is not None: + other_attribs['description'] = description + + DbSetting.set_value(key, value, other_attribs=other_attribs) + + def delete(self, key): + """Delete the setting with the given key. + + :param key: the key identifying the setting + :raises: `~aiida.common.exceptions.NotExistent` if the settings does not exist + """ + from aiida.backends.sqlalchemy.models.settings import DbSetting + self.validate_table_existence() + + try: + setting = get_scoped_session().query(DbSetting).filter_by(key=key).one() + setting.delete() + except NoResultFound: + raise NotExistent('setting `{}` does not exist'.format(key)) + + + def flag_modified(instance, key): """Wrapper around `sqlalchemy.orm.attributes.flag_modified` to correctly dereference utils.ModelWrapper @@ -77,14 +144,16 @@ def dumps_json(d): """ Transforms all datetime object into isoformat and then returns the JSON """ - return json_dumps(datetime_to_isoformat(d)) + from aiida.common import json + return json.dumps(datetime_to_isoformat(d)) def loads_json(s): """ Loads the json and try to parse each basestring as a datetime object """ - ret = json_loads(s) + from aiida.common import json + ret = json.loads(s) return isoformat_to_datetime(ret) diff --git a/aiida/backends/tests/cmdline/commands/test_process.py b/aiida/backends/tests/cmdline/commands/test_process.py index a3aee55ba1..d603200c00 100644 --- a/aiida/backends/tests/cmdline/commands/test_process.py +++ b/aiida/backends/tests/cmdline/commands/test_process.py @@ -245,51 +245,6 @@ def test_list(self): self.assertEqual(len(get_result_lines(result)), 1) self.assertEqual(get_result_lines(result)[0], str(self.calcs[0].pk)) - def test_list_worker_slot_warning(self): - """ - Test that the if the number of used worker process slots exceeds a threshold, - that the warning message is displayed to the user when running `verdi process list` - """ - from aiida.engine import ProcessState - - # Get the number of allowed processes per worker: - from aiida.manage.external.rmq import _RMQ_TASK_PREFETCH_COUNT - limit = int(_RMQ_TASK_PREFETCH_COUNT * 0.9) - - # Create additional active nodes such that we have 90% of the active slot limit - # (including the 6 active nodes created in the class setup fixture) - for _ in six.moves.range(limit - 6): - calc = WorkFunctionNode() - calc.set_process_state(ProcessState.RUNNING) - calc.store() - - # Override the call to the circus client to retrieve the number of workers - # As we don't have a running circus client, this will normally fail, so here we simulate the - # response by redefining the function to get the final value we want. - import aiida.cmdline.utils.common - real_get_num_workers = aiida.cmdline.utils.common.get_num_workers - aiida.cmdline.utils.common.get_num_workers = lambda: 1 - - # Default cmd should not throw the warning as we are below the limit - result = self.cli_runner.invoke(cmd_process.process_list) - self.assertIsNone(result.exception, result.output) - warning_phrase = "of the available daemon worker slots have been used!" - self.assertTrue(all([warning_phrase not in line for line in get_result_lines(result)])) - - # Add one more running node to put us over the limit - calc = WorkFunctionNode() - calc.set_process_state(ProcessState.RUNNING) - calc.store() - - # Now the warning should fire - result = self.cli_runner.invoke(cmd_process.process_list) - self.assertIsNone(result.exception, result.output) - warning_phrase = "% of the available daemon worker slots have been used!" - self.assertTrue(any([warning_phrase in line for line in get_result_lines(result)])) - - # Reset the redifined function - aiida.cmdline.utils.common.get_num_workers = real_get_num_workers - def test_process_show(self): """Test verdi process show""" # We must choose a Node we can store diff --git a/aiida/backends/tests/test_nodes.py b/aiida/backends/tests/test_nodes.py index 7f91300091..9700e5d65c 100644 --- a/aiida/backends/tests/test_nodes.py +++ b/aiida/backends/tests/test_nodes.py @@ -892,38 +892,6 @@ def test_attr_and_extras_multikey(self): # No problem, they are two different nodes n2.set_extra('samename', 1) - def test_settings_methods(self): - from aiida.backends.utils import (get_global_setting_description, get_global_setting, set_global_setting, - del_global_setting) - - set_global_setting(key="aaa", value={'b': 'c'}, description="pippo") - - self.assertEqual(get_global_setting('aaa'), {'b': 'c'}) - self.assertEqual(get_global_setting_description('aaa'), "pippo") - self.assertEqual(get_global_setting('aaa.b'), 'c') - - # The following is disabled because it is not supported in JSONB - # Only top level elements can have descriptions - # self.assertEqual(get_global_setting_description('aaa.b'), "") - del_global_setting('aaa') - - with self.assertRaises(KeyError): - get_global_setting('aaa.b') - - with self.assertRaises(KeyError): - get_global_setting('aaa') - - set_global_setting(key="bbb", value={'c': 'd1'}, description="pippo2") - self.assertEqual(get_global_setting('bbb'), {'c': 'd1'}) - self.assertEqual(get_global_setting('bbb.c'), 'd1') - self.assertEqual(get_global_setting_description('bbb'), "pippo2") - set_global_setting(key="bbb", value={'c': 'd2'}) - self.assertEqual(get_global_setting('bbb'), {'c': 'd2'}) - self.assertEqual(get_global_setting('bbb.c'), 'd2') - self.assertEqual(get_global_setting_description('bbb'), "pippo2") - - del_global_setting('bbb') - def test_attr_listing(self): """ Checks that the list of attributes and extras is ok. diff --git a/aiida/backends/utils.py b/aiida/backends/utils.py index 4f8ac57696..1d9e7d45c1 100644 --- a/aiida/backends/utils.py +++ b/aiida/backends/utils.py @@ -7,24 +7,70 @@ # For further information on the license, see the LICENSE.txt file # # For further information please visit http://www.aiida.net # ########################################################################### - from __future__ import division from __future__ import print_function from __future__ import absolute_import -import six +import abc +import collections import datetime +import dateutil import re +import six from aiida.backends import BACKEND_SQLA, BACKEND_DJANGO -from aiida.common.exceptions import ConfigurationError +from aiida.common.exceptions import ConfigurationError, ValidationError from aiida.manage import configuration -from dateutil import parser -from aiida.common.exceptions import ValidationError, NotExistent AIIDA_ATTRIBUTE_SEP = '.' +Setting = collections.namedtuple('Setting', ['key', 'value', 'description', 'time']) + + +class SettingsManager(object): + """Class to get, set and delete settings from the `DbSettings` table.""" + + @abc.abstractmethod + def get(self, key): + """Return the setting with the given key. + + :param key: the key identifying the setting + :return: Setting + :raises: `~aiida.common.exceptions.NotExistent` if the settings does not exist + """ + + @abc.abstractmethod + def set(self, key, value, description=None): + """Return the settings with the given key. + + :param key: the key identifying the setting + :param value: the value for the setting + :param description: optional setting description + """ + + @abc.abstractmethod + def delete(self, key): + """Delete the setting with the given key. + + :param key: the key identifying the setting + :raises: `~aiida.common.exceptions.NotExistent` if the settings does not exist + """ + + +def get_settings_manager(): + if configuration.PROFILE.database_backend == BACKEND_DJANGO: + from aiida.backends.djsite.utils import DjangoSettingsManager + manager = DjangoSettingsManager() + elif configuration.PROFILE.database_backend == BACKEND_SQLA: + from aiida.backends.sqlalchemy.utils import SqlaSettingsManager + manager = SqlaSettingsManager() + else: + raise Exception('unknown backend type `{}`'.format(configuration.PROFILE.database_backend)) + + return manager + + def validate_attribute_key(key): """ Validate the key string to check if it is valid (e.g., if it does not @@ -33,16 +79,13 @@ def validate_attribute_key(key): :return: None if the key is valid :raise aiida.common.ValidationError: if the key is not valid """ - from aiida.common.exceptions import ValidationError - if not isinstance(key, six.string_types): raise ValidationError("The key must be a string.") if not key: raise ValidationError("The key cannot be an empty string.") if AIIDA_ATTRIBUTE_SEP in key: raise ValidationError("The separator symbol '{}' cannot be present " - "in the key of attributes, extras, etc.".format( - AIIDA_ATTRIBUTE_SEP)) + "in the key of attributes, extras, etc.".format(AIIDA_ATTRIBUTE_SEP)) def load_dbenv(profile=None, *args, **kwargs): @@ -76,65 +119,6 @@ def _load_dbenv_noschemacheck(profile=None, *args, **kwargs): return to_return -def get_global_setting(key): - if configuration.PROFILE.database_backend == BACKEND_DJANGO: - from aiida.backends.djsite.globalsettings import get_global_setting - elif configuration.PROFILE.database_backend == BACKEND_SQLA: - from aiida.backends.sqlalchemy.globalsettings import get_global_setting - else: - raise Exception("unknown backend {}".format(configuration.PROFILE.database_backend)) - - return get_global_setting(key) - - -def get_global_setting_description(key): - if configuration.PROFILE.database_backend == BACKEND_DJANGO: - from aiida.backends.djsite.globalsettings import get_global_setting_description - elif configuration.PROFILE.database_backend == BACKEND_SQLA: - from aiida.backends.sqlalchemy.globalsettings import get_global_setting_description - else: - raise Exception("unknown backend {}".format(configuration.PROFILE.database_backend)) - - return get_global_setting_description(key) - - -def get_backend_type(): - """ - Set the schema version stored in the DB. Use only if you know what - you are doing. - """ - return get_global_setting('db|backend') - - -def set_global_setting(key, value, description=None): - if configuration.PROFILE.database_backend == BACKEND_DJANGO: - from aiida.backends.djsite.globalsettings import set_global_setting - elif configuration.PROFILE.database_backend == BACKEND_SQLA: - from aiida.backends.sqlalchemy.globalsettings import set_global_setting - else: - raise Exception("unknown backend {}".format(configuration.PROFILE.database_backend)) - - set_global_setting(key, value, description) - - -def del_global_setting(key): - if configuration.PROFILE.database_backend == BACKEND_DJANGO: - from aiida.backends.djsite.globalsettings import del_global_setting - elif configuration.PROFILE.database_backend == BACKEND_SQLA: - from aiida.backends.sqlalchemy.globalsettings import del_global_setting - else: - raise Exception("unknown backend {}".format(configuration.PROFILE.database_backend)) - - del_global_setting(key) - - -def set_backend_type(backend_name): - """Set the schema version stored in the DB. Use only if you know what you are doing.""" - return set_global_setting( - 'db|backend', backend_name, - description="The backend used to communicate with the database.") - - def delete_nodes_and_connections(pks): if configuration.PROFILE.database_backend == BACKEND_DJANGO: from aiida.backends.djsite.utils import delete_nodes_and_connections_django as delete_nodes_backend @@ -177,65 +161,8 @@ def isoformat_to_datetime(d): elif isinstance(d, six.string_types): if date_reg.match(d): try: - return parser.parse(d) + return dateutil.parser.parse(d) except (ValueError, TypeError): return d return d return d - - -# The separator for sub-fields (for JSON stored values).Keys are not allowed -# to contain the separator even if the -_sep = "." - - -def validate_key(key): - """ - Validate the key string to check if it is valid (e.g., if it does not - contain the separator symbol.). - - :return: None if the key is valid - :raise aiida.common.ValidationError: if the key is not valid - """ - if not isinstance(key, six.string_types): - raise ValidationError("The key must be a string.") - if not key: - raise ValidationError("The key cannot be an empty string.") - if _sep in key: - raise ValidationError("The separator symbol '{}' cannot be present " - "in the key of this field.".format(_sep)) - - -def get_value_of_sub_field(key, original_get_value): - """ - Get the value that corresponds to sub-fields of dictionaries stored in a - JSON. For example, if there is a dictionary {'b': 'c'} stored as value of - the key 'a' - value 'a' - :param key: The key that can be simple, a string, or complex, a set of keys - separated by the separator value. - :param original_get_value: The function that should be called to get the - original value (which can be a dictionary too). - :return: The value that correspond to the complex (or not) key. - :raise aiida.common.NotExistent: If the key doesn't correspond to a value - """ - keys = list() - if _sep in key: - keys.extend(key.split(_sep)) - else: - keys.append(key) - - if len(keys) == 1: - return original_get_value(keys[0]) - else: - try: - curr_val = original_get_value(keys[0]) - curr_pos = 1 - while curr_pos < len(keys): - curr_val = curr_val[keys[curr_pos]] - curr_pos += 1 - - return curr_val - except TypeError: - raise NotExistent("The sub-field {} doesn't correspond " - "to a value.".format(key)) diff --git a/aiida/engine/utils.py b/aiida/engine/utils.py index 64f959bad5..5e1ff76fee 100644 --- a/aiida/engine/utils.py +++ b/aiida/engine/utils.py @@ -242,7 +242,7 @@ def set_process_state_change_timestamp(process): :param process: the Process instance that changed its state """ - from aiida.backends.utils import set_global_setting + from aiida.backends.utils import get_settings_manager from aiida.common import timezone from aiida.common.exceptions import UniquenessError from aiida.orm import ProcessNode, CalculationNode, WorkflowNode @@ -262,7 +262,8 @@ def set_process_state_change_timestamp(process): value = timezone.now() try: - set_global_setting(key, value, description) + manager = get_settings_manager() + manager.set(key, value, description) except UniquenessError as exception: process.logger.debug('could not update the {} setting because of a UniquenessError: {}'.format(key, exception)) @@ -277,8 +278,10 @@ def get_process_state_change_timestamp(process_type=None): known process types will be returned. :return: a timestamp or None """ - from aiida.backends.utils import get_global_setting + from aiida.backends.utils import get_settings_manager + from aiida.common.exceptions import NotExistent + manager = get_settings_manager() valid_process_types = ['calculation', 'work'] if process_type is not None and process_type not in valid_process_types: @@ -294,8 +297,8 @@ def get_process_state_change_timestamp(process_type=None): for process_type_key in process_types: key = PROCESS_STATE_CHANGE_KEY.format(process_type_key) try: - timestamps.append(get_global_setting(key)) - except KeyError: + timestamps.append(manager.get(key).time) + except NotExistent: pass if not timestamps: diff --git a/aiida/orm/implementation/querybuilder.py b/aiida/orm/implementation/querybuilder.py index fe5148ea01..ec1371755a 100644 --- a/aiida/orm/implementation/querybuilder.py +++ b/aiida/orm/implementation/querybuilder.py @@ -272,7 +272,6 @@ def get_column_names(self, alias): """ Return the column names of the given table (alias). """ - pass def get_column(self, colname, alias): # pylint: disable=no-self-use """ From 2c15dcd8b2285b63c5681d7012b6495d5b102e0f Mon Sep 17 00:00:00 2001 From: Sebastiaan Huber Date: Wed, 8 May 2019 17:38:50 +0200 Subject: [PATCH 03/11] Move datetime isoformat serializers to `aiida.common.timezone` --- .../0037_attributes_extras_settings_json.py | 2 +- aiida/backends/djsite/db/models.py | 15 +++-- ...ns_0034_attributes_extras_settings_json.py | 2 +- aiida/backends/djsite/utils.py | 2 +- aiida/backends/sqlalchemy/utils.py | 3 +- aiida/backends/utils.py | 41 ------------ aiida/common/timezone.py | 63 ++++++++++++++++--- aiida/orm/importexport.py | 2 +- 8 files changed, 66 insertions(+), 64 deletions(-) diff --git a/aiida/backends/djsite/db/migrations/0037_attributes_extras_settings_json.py b/aiida/backends/djsite/db/migrations/0037_attributes_extras_settings_json.py index 347784305a..994003725c 100644 --- a/aiida/backends/djsite/db/migrations/0037_attributes_extras_settings_json.py +++ b/aiida/backends/djsite/db/migrations/0037_attributes_extras_settings_json.py @@ -20,8 +20,8 @@ from django.db import transaction from aiida.backends.djsite.db.migrations import upgrade_schema_version -from aiida.backends.utils import datetime_to_isoformat from aiida.cmdline.utils import echo +from aiida.common.timezone import datetime_to_isoformat REVISION = '1.0.37' DOWN_REVISION = '1.0.36' diff --git a/aiida/backends/djsite/db/models.py b/aiida/backends/djsite/db/models.py index b9465aa409..a12ca01f92 100644 --- a/aiida/backends/djsite/db/models.py +++ b/aiida/backends/djsite/db/models.py @@ -24,7 +24,6 @@ import aiida.backends.djsite.db.migrations as migrations from aiida.backends.djsite.settings import AUTH_USER_MODEL -from aiida.backends.utils import datetime_to_isoformat, isoformat_to_datetime from aiida.common import timezone from aiida.common.utils import get_new_uuid @@ -147,12 +146,12 @@ def __init__(self, *args, **kwargs): if self.attributes is None: self.attributes = dict() else: - self.attributes = datetime_to_isoformat(self.attributes) + self.attributes = timezone.datetime_to_isoformat(self.attributes) if self.extras is None: self.extras = dict() else: - self.extras = datetime_to_isoformat(self.extras) + self.extras = timezone.datetime_to_isoformat(self.extras) def set_attribute(self, key, value): DbNode._set_attr(self.attributes, key, value) @@ -190,17 +189,17 @@ def del_extra(self, key): self.save() def get_attributes(self): - return isoformat_to_datetime(self.attributes) + return timezone.isoformat_to_datetime(self.attributes) def get_extras(self): - return isoformat_to_datetime(self.extras) + return timezone.isoformat_to_datetime(self.extras) @ staticmethod def _set_attr(d, key, value): if '.' in key: raise ValueError("We don't know how to treat key with dot in it yet") # This is important in order to properly handle datetime objects - d[key] = datetime_to_isoformat(value) + d[key] = timezone.datetime_to_isoformat(value) @ staticmethod def _del_attr(d, key): @@ -290,7 +289,7 @@ def set_value(cls, key, value, with_transaction=True, setting = cls() setting.key = key - setting.val = datetime_to_isoformat(value) + setting.val = timezone.datetime_to_isoformat(value) setting.time = timezone.datetime.now(tz=UTC) if "description" in other_attribs.keys(): setting.description = other_attribs["description"] @@ -300,7 +299,7 @@ def getvalue(self): """ This can be called on a given row and will get the corresponding value. """ - return isoformat_to_datetime(self.val) + return timezone.isoformat_to_datetime(self.val) def get_description(self): """ diff --git a/aiida/backends/djsite/db/subtests/migrations/test_migrations_0034_attributes_extras_settings_json.py b/aiida/backends/djsite/db/subtests/migrations/test_migrations_0034_attributes_extras_settings_json.py index 20d6fd1f05..a6c498e985 100644 --- a/aiida/backends/djsite/db/subtests/migrations/test_migrations_0034_attributes_extras_settings_json.py +++ b/aiida/backends/djsite/db/subtests/migrations/test_migrations_0034_attributes_extras_settings_json.py @@ -26,7 +26,7 @@ from django.utils.encoding import python_2_unicode_compatible from aiida.backends.djsite.db.subtests.migrations.test_migrations_common import TestMigrations -from aiida.backends.utils import isoformat_to_datetime +from aiida.common.timezone import isoformat_to_datetime # The following sample dictionary can be used for the conversion test of attributes and extras SAMPLE_DICT = { diff --git a/aiida/backends/djsite/utils.py b/aiida/backends/djsite/utils.py index 14971cbc74..23cd3f499c 100644 --- a/aiida/backends/djsite/utils.py +++ b/aiida/backends/djsite/utils.py @@ -7,7 +7,7 @@ # For further information on the license, see the LICENSE.txt file # # For further information please visit http://www.aiida.net # ########################################################################### -# pylint: disable=no-name-in-module,no-member,import-error +# pylint: disable=no-name-in-module,no-member,import-error,cyclic-import """Utility functions specific to the Django backend.""" from __future__ import division from __future__ import print_function diff --git a/aiida/backends/sqlalchemy/utils.py b/aiida/backends/sqlalchemy/utils.py index 8c08f92b75..176d6d05a3 100644 --- a/aiida/backends/sqlalchemy/utils.py +++ b/aiida/backends/sqlalchemy/utils.py @@ -20,8 +20,9 @@ from aiida.backends import sqlalchemy as sa from aiida.backends.sqlalchemy import get_scoped_session -from aiida.backends.utils import validate_attribute_key, SettingsManager, Setting, isoformat_to_datetime, datetime_to_isoformat +from aiida.backends.utils import validate_attribute_key, SettingsManager, Setting from aiida.common import NotExistent +from aiida.common.timezone import isoformat_to_datetime, datetime_to_isoformat ALEMBIC_FILENAME = "alembic.ini" diff --git a/aiida/backends/utils.py b/aiida/backends/utils.py index 1d9e7d45c1..ff88fc93af 100644 --- a/aiida/backends/utils.py +++ b/aiida/backends/utils.py @@ -13,9 +13,6 @@ import abc import collections -import datetime -import dateutil -import re import six from aiida.backends import BACKEND_SQLA, BACKEND_DJANGO @@ -128,41 +125,3 @@ def delete_nodes_and_connections(pks): raise Exception("unknown backend {}".format(configuration.PROFILE.database_backend)) delete_nodes_backend(pks) - - -def datetime_to_isoformat(v): - """ - Transforms all datetime object into isoformat and then returns the final object. - """ - if isinstance(v, list): - return [datetime_to_isoformat(_) for _ in v] - elif isinstance(v, dict): - return dict((key, datetime_to_isoformat(val)) for key, val in v.items()) - elif isinstance(v, datetime.datetime): - return v.isoformat() - return v - - -date_reg = re.compile(r'^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d+(\+\d{2}:\d{2})?$') - - -def isoformat_to_datetime(d): - """ - Parses each basestring as a datetime object and if it suceeds, converts it. - """ - if isinstance(d, list): - for i, val in enumerate(d): - d[i] = isoformat_to_datetime(val) - return d - elif isinstance(d, dict): - for k, v in d.items(): - d[k] = isoformat_to_datetime(v) - return d - elif isinstance(d, six.string_types): - if date_reg.match(d): - try: - return dateutil.parser.parse(d) - except (ValueError, TypeError): - return d - return d - return d diff --git a/aiida/common/timezone.py b/aiida/common/timezone.py index c95de03b68..181668fadc 100644 --- a/aiida/common/timezone.py +++ b/aiida/common/timezone.py @@ -13,6 +13,12 @@ from __future__ import absolute_import from datetime import datetime +import re +import six + +import dateutil + +ISOFORMAT_DATETIME_REGEX = re.compile(r'^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d+(\+\d{2}:\d{2})?$') def get_current_timezone(): @@ -55,11 +61,9 @@ def is_aware(value): def make_aware(value, timezone=None, is_dst=None): - """ - Make the given datetime object timezone aware + """Make the given datetime object timezone aware. - :param value: The datetime to make aware - :type value: :class:`!datetime.datetime` + :param value: datetime object to make aware :param timezone: :param is_dst: :return: @@ -76,19 +80,19 @@ def make_aware(value, timezone=None, is_dst=None): def localtime(value, timezone=None): - """ - Converts an aware datetime.datetime to local time. - Local time is defined by the current time zone, unless another time zone - is specified. + """Converts an aware datetime.datetime to local time. + + Local time is defined by the current time zone, unless another time zone is specified. """ if timezone is None: timezone = get_current_timezone() - # If `value` is naive, astimezone() will raise a ValueError, - # so we don't need to perform a redundant check. + + # If `value` is naive, astimezone() will raise a ValueError, so we don't need to perform a redundant check. value = value.astimezone(timezone) if hasattr(timezone, 'normalize'): # This method is available for pytz time zones. value = timezone.normalize(value) + return value @@ -112,3 +116,42 @@ def delta(from_time, to_time=None): to_time_aware = to_time return to_time_aware - from_time_aware + + +def datetime_to_isoformat(value): + """Convert all datetime objects in the given value to string representations in ISO format. + + :param value: a mapping, sequence or single value optionally containing datetime objects + """ + if isinstance(value, list): + return [datetime_to_isoformat(_) for _ in value] + + if isinstance(value, dict): + return dict((key, datetime_to_isoformat(val)) for key, val in value.items()) + + if isinstance(value, datetime): + return value.isoformat() + + return value + + +def isoformat_to_datetime(value): + """Convert all string representations of a datetime in ISO format in the given value to datetime objects. + + :param value: a mapping, sequence or single value optionally containing datetime objects + """ + if isinstance(value, list): + return [isoformat_to_datetime(_) for _ in value] + + if isinstance(value, dict): + return dict((key, isoformat_to_datetime(val)) for key, val in value.items()) + + if isinstance(value, six.string_types): + if ISOFORMAT_DATETIME_REGEX.match(value): + try: + return dateutil.parser.parse(value) + except (ValueError, TypeError): + return value + return value + + return value diff --git a/aiida/orm/importexport.py b/aiida/orm/importexport.py index 323f9de225..f6b6976c18 100644 --- a/aiida/orm/importexport.py +++ b/aiida/orm/importexport.py @@ -20,10 +20,10 @@ from six.moves.html_parser import HTMLParser from distutils.version import StrictVersion from aiida.common import exceptions +from aiida.common.timezone import datetime_to_isoformat from aiida.common.utils import export_shard_uuid, get_class_string, grouper, get_new_uuid from aiida.orm import Computer, Group, GroupTypeString, Node, QueryBuilder, User, Log, Comment from aiida.orm.utils.repository import Repository -from aiida.backends.utils import datetime_to_isoformat IMPORTGROUP_TYPE = GroupTypeString.IMPORTGROUP_TYPE.value DUPL_SUFFIX = ' (Imported #{})' From 3724bafd6103d70e9bb7c0bc5aca6529141c7496 Mon Sep 17 00:00:00 2001 From: Sebastiaan Huber Date: Thu, 6 Jun 2019 22:45:57 +0200 Subject: [PATCH 04/11] Fix bug in the Django attributes and extras to JSONB migration The migrations of the old attributes and extras from the EAV tables to a JSONB column on the node table, used a lazy fetch method to fetch nodes in batches. However, the consecutive fetches do not sort on anything so the same nodes may be fetched multiple times and as a result, some nodes are never fetched. Their attributes and extras are therefore then also never migrated. This caused certain nodes to have lost their data after the migration. Sorting the lazy fetches by the pk of the nodes, guarantees that each and every node is migrated, and only once. --- .../db/migrations/0037_attributes_extras_settings_json.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/aiida/backends/djsite/db/migrations/0037_attributes_extras_settings_json.py b/aiida/backends/djsite/db/migrations/0037_attributes_extras_settings_json.py index 994003725c..e1955195ec 100644 --- a/aiida/backends/djsite/db/migrations/0037_attributes_extras_settings_json.py +++ b/aiida/backends/djsite/db/migrations/0037_attributes_extras_settings_json.py @@ -48,7 +48,7 @@ def transition_attributes_extras(apps, _): return with click.progressbar(label='Updating attributes and extras', length=total_node_no, show_pos=True) as pr_bar: - fetcher = lazy_bulk_fetch(group_size, total_node_no, db_node_model.objects.all) + fetcher = lazy_bulk_fetch(group_size, total_node_no, db_node_model.objects.order_by('id').all) error = False for batch in fetcher: @@ -85,7 +85,7 @@ def transition_settings(apps, _): return with click.progressbar(label='Updating settings', length=total_settings_no, show_pos=True) as pr_bar: - fetcher = lazy_bulk_fetch(group_size, total_settings_no, db_setting_model.objects.all) + fetcher = lazy_bulk_fetch(group_size, total_settings_no, db_setting_model.objects.order_by('id').all) error = False for batch in fetcher: @@ -129,8 +129,7 @@ def attributes_to_dict(attr_list): try: tmp_d = select_from_key(a.key, d) except ValueError: - echo.echo_critical("Couldn't transfer attribute {} with key {} for dbnode {}".format( - a.id, a.key, a.dbnode_id)) + echo.echo_error("Couldn't transfer attribute {} with key {} for dbnode {}".format(a.id, a.key, a.dbnode_id)) error = True continue key = a.key.split('.')[-1] From b060df81e7015183984fb088c4d69ee4b2574614 Mon Sep 17 00:00:00 2001 From: Sebastiaan Huber Date: Thu, 20 Jun 2019 08:48:18 +0200 Subject: [PATCH 05/11] Remove all support for datetime objects in attributes/extras Datetime objects were already not supported for SqlAlchemy since that always used JSONB, which does not have native support for this datatype. But now that Django also is migrated to use JSONB, the old support of the EAV implementation for datetime objects needs to be removed. The `clean_value` utility function that pre-processes each value that is entered to be stored in the database now checks a whitelist of acceptable value, which are essentially json-serializable, or raises an exception for anything else that is not recognized. The `aiida.common.timezone` module provides two utility functions named `datetime_to_isoformat` and `isoformat_to_datetime` to serialize and deserialize datetime objects to strings, respectively, that can be stored in the database. Co-authored-by: Giovanni Pizzi --- .../djsite/db/migrations/0024_dblog_update.py | 4 +- .../0037_attributes_extras_settings_json.py | 12 +++--- aiida/backends/djsite/db/models.py | 15 +++---- ...s_0037_attributes_extras_settings_json.py} | 23 ++++++----- .../migrations/test_migrations_many.py | 2 +- aiida/backends/general/migrations/utils.py | 30 ++++++++++++++ aiida/backends/sqlalchemy/__init__.py | 4 +- .../versions/041a79fc615f_dblog_cleaning.py | 2 +- .../sqlalchemy/tests/test_migrations.py | 2 +- aiida/backends/sqlalchemy/utils.py | 18 --------- aiida/backends/tests/__init__.py | 2 +- .../tests/cmdline/commands/test_calcjob.py | 3 ++ .../tests/cmdline/commands/test_import.py | 3 ++ .../tests/cmdline/commands/test_process.py | 4 +- .../backends/tests/test_export_and_import.py | 5 --- aiida/backends/tests/test_nodes.py | 23 ----------- .../importexport/migration/test_migration.py | 3 ++ aiida/common/timezone.py | 40 +++---------------- aiida/engine/utils.py | 7 ++-- .../orm/implementation/django/querybuilder.py | 16 +------- .../implementation/sqlalchemy/querybuilder.py | 14 +------ aiida/orm/importexport.py | 29 +------------- .../orm/nodes/process/calculation/calcjob.py | 10 ++++- aiida/orm/utils/node.py | 21 ++++++++-- 24 files changed, 111 insertions(+), 181 deletions(-) rename aiida/backends/djsite/db/subtests/migrations/{test_migrations_0034_attributes_extras_settings_json.py => test_migrations_0037_attributes_extras_settings_json.py} (98%) diff --git a/aiida/backends/djsite/db/migrations/0024_dblog_update.py b/aiida/backends/djsite/db/migrations/0024_dblog_update.py index 533c94757e..08eb7005a2 100644 --- a/aiida/backends/djsite/db/migrations/0024_dblog_update.py +++ b/aiida/backends/djsite/db/migrations/0024_dblog_update.py @@ -24,6 +24,7 @@ # pylint: disable=no-name-in-module,import-error from django.db import migrations, models from aiida.backends.djsite.db.migrations import upgrade_schema_version +from aiida.backends.general.migrations.utils import dumps_json from aiida.common.utils import get_new_uuid from aiida.manage import configuration @@ -74,7 +75,6 @@ def get_logs_with_no_nodes_number(schema_editor): def get_serialized_legacy_workflow_logs(schema_editor): """ Get the serialized log records that correspond to legacy workflows """ - from aiida.backends.sqlalchemy.utils import dumps_json with schema_editor.connection.cursor() as cursor: cursor.execute((""" SELECT db_dblog.id, db_dblog.time, db_dblog.loggername, db_dblog.levelname, db_dblog.objpk, db_dblog.objname, @@ -91,7 +91,6 @@ def get_serialized_legacy_workflow_logs(schema_editor): def get_serialized_unknown_entity_logs(schema_editor): """ Get the serialized log records that correspond to unknown entities """ - from aiida.backends.sqlalchemy.utils import dumps_json with schema_editor.connection.cursor() as cursor: cursor.execute((""" SELECT db_dblog.id, db_dblog.time, db_dblog.loggername, db_dblog.levelname, db_dblog.objpk, db_dblog.objname, @@ -109,7 +108,6 @@ def get_serialized_unknown_entity_logs(schema_editor): def get_serialized_logs_with_no_nodes(schema_editor): """ Get the serialized log records that don't correspond to a node """ - from aiida.backends.sqlalchemy.utils import dumps_json with schema_editor.connection.cursor() as cursor: cursor.execute((""" SELECT db_dblog.id, db_dblog.time, db_dblog.loggername, db_dblog.levelname, db_dblog.objpk, db_dblog.objname, diff --git a/aiida/backends/djsite/db/migrations/0037_attributes_extras_settings_json.py b/aiida/backends/djsite/db/migrations/0037_attributes_extras_settings_json.py index e1955195ec..c5f43f20d5 100644 --- a/aiida/backends/djsite/db/migrations/0037_attributes_extras_settings_json.py +++ b/aiida/backends/djsite/db/migrations/0037_attributes_extras_settings_json.py @@ -98,8 +98,8 @@ def transition_settings(apps, _): val = curr_dbsetting.tval elif dt == "float": val = curr_dbsetting.fval - if math.isnan(val): - val = 'NaN' + if math.isnan(val) or math.isinf(val): + val = str(val) elif dt == "int": val = curr_dbsetting.ival elif dt == "bool": @@ -134,7 +134,7 @@ def attributes_to_dict(attr_list): continue key = a.key.split('.')[-1] - if key.isdigit(): + if isinstance(tmp_d, (list, tuple)): key = int(key) dt = a.datatype @@ -149,8 +149,8 @@ def attributes_to_dict(attr_list): val = a.tval elif dt == "float": val = a.fval - if math.isnan(val): - val = 'NaN' + if math.isnan(val) or math.isinf(val): + val = str(val) elif dt == "int": val = a.ival elif dt == "bool": @@ -172,7 +172,7 @@ def select_from_key(key, d): tmp_d = d for p in path: - if p.isdigit(): + if isinstance(tmp_d, (list, tuple)): tmp_d = tmp_d[int(p)] else: tmp_d = tmp_d[p] diff --git a/aiida/backends/djsite/db/models.py b/aiida/backends/djsite/db/models.py index a12ca01f92..13ab242b1f 100644 --- a/aiida/backends/djsite/db/models.py +++ b/aiida/backends/djsite/db/models.py @@ -145,13 +145,9 @@ def __init__(self, *args, **kwargs): if self.attributes is None: self.attributes = dict() - else: - self.attributes = timezone.datetime_to_isoformat(self.attributes) if self.extras is None: self.extras = dict() - else: - self.extras = timezone.datetime_to_isoformat(self.extras) def set_attribute(self, key, value): DbNode._set_attr(self.attributes, key, value) @@ -189,17 +185,16 @@ def del_extra(self, key): self.save() def get_attributes(self): - return timezone.isoformat_to_datetime(self.attributes) + return self.attributes def get_extras(self): - return timezone.isoformat_to_datetime(self.extras) + return self.extras @ staticmethod def _set_attr(d, key, value): if '.' in key: raise ValueError("We don't know how to treat key with dot in it yet") - # This is important in order to properly handle datetime objects - d[key] = timezone.datetime_to_isoformat(value) + d[key] = value @ staticmethod def _del_attr(d, key): @@ -289,7 +284,7 @@ def set_value(cls, key, value, with_transaction=True, setting = cls() setting.key = key - setting.val = timezone.datetime_to_isoformat(value) + setting.val = value setting.time = timezone.datetime.now(tz=UTC) if "description" in other_attribs.keys(): setting.description = other_attribs["description"] @@ -299,7 +294,7 @@ def getvalue(self): """ This can be called on a given row and will get the corresponding value. """ - return timezone.isoformat_to_datetime(self.val) + return self.val def get_description(self): """ diff --git a/aiida/backends/djsite/db/subtests/migrations/test_migrations_0034_attributes_extras_settings_json.py b/aiida/backends/djsite/db/subtests/migrations/test_migrations_0037_attributes_extras_settings_json.py similarity index 98% rename from aiida/backends/djsite/db/subtests/migrations/test_migrations_0034_attributes_extras_settings_json.py rename to aiida/backends/djsite/db/subtests/migrations/test_migrations_0037_attributes_extras_settings_json.py index a6c498e985..e0c7171b11 100644 --- a/aiida/backends/djsite/db/subtests/migrations/test_migrations_0034_attributes_extras_settings_json.py +++ b/aiida/backends/djsite/db/subtests/migrations/test_migrations_0037_attributes_extras_settings_json.py @@ -21,27 +21,28 @@ import six from six.moves import range -from dateutil.parser import parse from django.db import transaction from django.utils.encoding import python_2_unicode_compatible from aiida.backends.djsite.db.subtests.migrations.test_migrations_common import TestMigrations -from aiida.common.timezone import isoformat_to_datetime # The following sample dictionary can be used for the conversion test of attributes and extras SAMPLE_DICT = { 'bool': True, + '001': 2, + '17': 'string', 'integer': 12, 'float': 26.2, 'string': "a string", 'dict': { - "a": "b", - "sublist": [1, 2, 3], - "subdict": { - "c": "d" + '25': [True, False], + 'a': 'b', + 'sublist': [1, 2, 3], + 'subdict': { + 'c': 'd' } }, - 'list': [1, True, "ggg", { + 'list': [1, True, 'ggg', { 'h': 'j' }, [9, 8, 7]], } @@ -186,18 +187,20 @@ class TestSettingsToJSONMigration(TestMigrations): settings_info = dict() def setUpBeforeMigration(self): + from aiida.common import timezone + db_setting_model = self.apps.get_model('db', 'DbSetting') self.settings_info['2daemon|task_stop|updater2'] = dict( key='2daemon|task_stop|updater2', datatype='date', - dval='2018-07-27 15:12:24.382552+02', + dval=timezone.datetime_to_isoformat(timezone.now()), description='The last time the daemon finished to run ' 'the task \'updater\' (updater)') self.settings_info['2daemon|task_start|updater2'] = dict( key='2daemon|task_start|updater2', datatype='date', - dval='2018-07-27 15:12:45.264863+02', + dval=timezone.datetime_to_isoformat(timezone.now()), description='The last time the daemon started to run ' 'the task \'updater\' (updater)') self.settings_info['2db|backend2'] = dict( @@ -231,7 +234,7 @@ def test_settings_migration(self): if curr_setting_info['datatype'] == 'txt': self.assertEqual(curr_setting.val, curr_setting_info['tval']) elif curr_setting_info['datatype'] == 'date': - self.assertEqual(isoformat_to_datetime(curr_setting.val), parse(curr_setting_info['dval'])) + self.assertEqual(curr_setting.val, curr_setting_info['dval']) def tearDown(self): """ diff --git a/aiida/backends/djsite/db/subtests/migrations/test_migrations_many.py b/aiida/backends/djsite/db/subtests/migrations/test_migrations_many.py index b5fce5b0d2..c98587b1db 100644 --- a/aiida/backends/djsite/db/subtests/migrations/test_migrations_many.py +++ b/aiida/backends/djsite/db/subtests/migrations/test_migrations_many.py @@ -303,7 +303,7 @@ class TestDbLogMigrationRecordCleaning(TestMigrations): def setUpBeforeMigration(self): # pylint: disable=too-many-locals import json import importlib - from aiida.backends.sqlalchemy.utils import dumps_json + from aiida.backends.general.migrations.utils import dumps_json update_024 = importlib.import_module('aiida.backends.djsite.db.migrations.0024_dblog_update') diff --git a/aiida/backends/general/migrations/utils.py b/aiida/backends/general/migrations/utils.py index 3e744d28c8..01b1ddf76b 100644 --- a/aiida/backends/general/migrations/utils.py +++ b/aiida/backends/general/migrations/utils.py @@ -12,11 +12,19 @@ from __future__ import division from __future__ import print_function from __future__ import absolute_import + +import datetime import errno import io import os +import re + import numpy +from aiida.common import json + +ISOFORMAT_DATETIME_REGEX = re.compile(r'^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d+(\+\d{2}:\d{2})?$') + def ensure_repository_folder_created(uuid): """Make sure that the repository sub folder for the node with the given UUID exists or create it. @@ -121,3 +129,25 @@ def load_numpy_array_from_repository(uuid, name): """ filepath = get_numpy_array_absolute_path(uuid, name) return numpy.load(filepath) + + +def recursive_datetime_to_isoformat(value): + """Convert all datetime objects in the given value to string representations in ISO format. + + :param value: a mapping, sequence or single value optionally containing datetime objects + """ + if isinstance(value, list): + return [recursive_datetime_to_isoformat(_) for _ in value] + + if isinstance(value, dict): + return dict((key, recursive_datetime_to_isoformat(val)) for key, val in value.items()) + + if isinstance(value, datetime.datetime): + return value.isoformat() + + return value + + +def dumps_json(dictionary): + """Transforms all datetime object into isoformat and then returns the JSON.""" + return json.dumps(recursive_datetime_to_isoformat(dictionary)) diff --git a/aiida/backends/sqlalchemy/__init__.py b/aiida/backends/sqlalchemy/__init__.py index 961e7056c1..6d39192a9d 100644 --- a/aiida/backends/sqlalchemy/__init__.py +++ b/aiida/backends/sqlalchemy/__init__.py @@ -56,8 +56,8 @@ def reset_session(profile=None): :param profile: the profile whose configuration to use to connect to the database """ from multiprocessing.util import register_after_fork + from aiida.common import json from aiida.manage.configuration import get_profile - from .utils import loads_json, dumps_json global ENGINE global SCOPED_SESSION_CLASS @@ -74,6 +74,6 @@ def reset_session(profile=None): port=profile.database_port, name=profile.database_name) - ENGINE = create_engine(engine_url, json_serializer=dumps_json, json_deserializer=loads_json, encoding='utf-8') + ENGINE = create_engine(engine_url, json_serializer=json.dumps, json_deserializer=json.loads, encoding='utf-8') SCOPED_SESSION_CLASS = scoped_session(sessionmaker(bind=ENGINE, expire_on_commit=True)) register_after_fork(ENGINE, recreate_after_fork) diff --git a/aiida/backends/sqlalchemy/migrations/versions/041a79fc615f_dblog_cleaning.py b/aiida/backends/sqlalchemy/migrations/versions/041a79fc615f_dblog_cleaning.py index 320f333c1b..30a45cfd2c 100644 --- a/aiida/backends/sqlalchemy/migrations/versions/041a79fc615f_dblog_cleaning.py +++ b/aiida/backends/sqlalchemy/migrations/versions/041a79fc615f_dblog_cleaning.py @@ -30,7 +30,7 @@ from sqlalchemy.sql import text from alembic import op -from aiida.backends.sqlalchemy.utils import dumps_json +from aiida.backends.general.migrations.utils import dumps_json from aiida.manage import configuration # revision identifiers, used by Alembic. diff --git a/aiida/backends/sqlalchemy/tests/test_migrations.py b/aiida/backends/sqlalchemy/tests/test_migrations.py index b0c5e2e2b8..a5aeace92c 100644 --- a/aiida/backends/sqlalchemy/tests/test_migrations.py +++ b/aiida/backends/sqlalchemy/tests/test_migrations.py @@ -647,7 +647,7 @@ def setUpBeforeMigration(self): # pylint: disable=too-many-locals,too-many-statements import importlib from sqlalchemy.orm import Session # pylint: disable=import-error,no-name-in-module - from aiida.backends.sqlalchemy.utils import dumps_json + from aiida.backends.general.migrations.utils import dumps_json log_migration = importlib.import_module( 'aiida.backends.sqlalchemy.migrations.versions.041a79fc615f_dblog_cleaning') diff --git a/aiida/backends/sqlalchemy/utils.py b/aiida/backends/sqlalchemy/utils.py index 176d6d05a3..4cc8ec2da8 100644 --- a/aiida/backends/sqlalchemy/utils.py +++ b/aiida/backends/sqlalchemy/utils.py @@ -22,7 +22,6 @@ from aiida.backends.sqlalchemy import get_scoped_session from aiida.backends.utils import validate_attribute_key, SettingsManager, Setting from aiida.common import NotExistent -from aiida.common.timezone import isoformat_to_datetime, datetime_to_isoformat ALEMBIC_FILENAME = "alembic.ini" @@ -141,23 +140,6 @@ def unload_dbenv(): _aiida_autouser_cache = None -def dumps_json(d): - """ - Transforms all datetime object into isoformat and then returns the JSON - """ - from aiida.common import json - return json.dumps(datetime_to_isoformat(d)) - - -def loads_json(s): - """ - Loads the json and try to parse each basestring as a datetime object - """ - from aiida.common import json - ret = json.loads(s) - return isoformat_to_datetime(ret) - - # XXX the code here isn't different from the one use in Django. We may be able # to refactor it in some way def install_tc(session): diff --git a/aiida/backends/tests/__init__.py b/aiida/backends/tests/__init__.py index 71fe0de9b7..8b07ca83ab 100644 --- a/aiida/backends/tests/__init__.py +++ b/aiida/backends/tests/__init__.py @@ -23,7 +23,7 @@ 'nodes': ['aiida.backends.djsite.db.subtests.test_nodes'], 'migrations': [ 'aiida.backends.djsite.db.subtests.migrations.test_migrations_many', - 'aiida.backends.djsite.db.subtests.migrations.test_migrations_0034_attributes_extras_settings_json' + 'aiida.backends.djsite.db.subtests.migrations.test_migrations_0037_attributes_extras_settings_json' ], 'query': ['aiida.backends.djsite.db.subtests.test_query'], }, diff --git a/aiida/backends/tests/cmdline/commands/test_calcjob.py b/aiida/backends/tests/cmdline/commands/test_calcjob.py index 7a505f707c..c3b940e185 100644 --- a/aiida/backends/tests/cmdline/commands/test_calcjob.py +++ b/aiida/backends/tests/cmdline/commands/test_calcjob.py @@ -13,6 +13,8 @@ from __future__ import print_function from __future__ import absolute_import +import unittest + from click.testing import CliRunner from aiida import orm @@ -28,6 +30,7 @@ def get_result_lines(result): return [e for e in result.output.split('\n') if e] +@unittest.skip("Reenable when issue #3056 has been solved (migrate exported files to 0.6)") class TestVerdiCalculation(AiidaTestCase): """Tests for `verdi calcjob`.""" diff --git a/aiida/backends/tests/cmdline/commands/test_import.py b/aiida/backends/tests/cmdline/commands/test_import.py index ac3687a223..2551f15fce 100644 --- a/aiida/backends/tests/cmdline/commands/test_import.py +++ b/aiida/backends/tests/cmdline/commands/test_import.py @@ -12,6 +12,8 @@ from __future__ import print_function from __future__ import absolute_import +import unittest + from click.testing import CliRunner from click.exceptions import BadParameter @@ -21,6 +23,7 @@ from aiida.orm import Group +@unittest.skip("Reenable when issue #3056 has been solved (migrate exported files to 0.6)") class TestVerdiImport(AiidaTestCase): """Tests for `verdi import`.""" diff --git a/aiida/backends/tests/cmdline/commands/test_process.py b/aiida/backends/tests/cmdline/commands/test_process.py index d603200c00..75565ea7db 100644 --- a/aiida/backends/tests/cmdline/commands/test_process.py +++ b/aiida/backends/tests/cmdline/commands/test_process.py @@ -395,7 +395,7 @@ def test_list_worker_slot_warning(self): # Default cmd should not throw the warning as we are below the limit result = self.cli_runner.invoke(cmd_process.process_list) - self.assertIsNone(result.exception, result.output) + self.assertClickResultNoException(result) warning_phrase = "of the available daemon worker slots have been used!" self.assertTrue(all([warning_phrase not in line for line in get_result_lines(result)])) @@ -406,7 +406,7 @@ def test_list_worker_slot_warning(self): # Now the warning should fire result = self.cli_runner.invoke(cmd_process.process_list) - self.assertIsNone(result.exception, result.output) + self.assertClickResultNoException(result) warning_phrase = "% of the available daemon worker slots have been used!" self.assertTrue(any([warning_phrase in line for line in get_result_lines(result)])) diff --git a/aiida/backends/tests/test_export_and_import.py b/aiida/backends/tests/test_export_and_import.py index 9dcc93f413..6e7e0625d3 100644 --- a/aiida/backends/tests/test_export_and_import.py +++ b/aiida/backends/tests/test_export_and_import.py @@ -928,11 +928,6 @@ def get_hash_from_db_content(grouplabel): trial_dict.update({str(k): np.random.random() for k in range(10, 20)}) # give some booleans: trial_dict.update({str(k): bool(np.random.randint(1)) for k in range(20, 30)}) - # give some datetime: - trial_dict.update({str(k): datetime( - year=2017, - month=np.random.randint(1, 12), - day=np.random.randint(1, 28)) for k in range(30, 40)}) # give some text: trial_dict.update({str(k): ''.join(random.choice(chars) for _ in range(size)) for k in range(20, 30)}) diff --git a/aiida/backends/tests/test_nodes.py b/aiida/backends/tests/test_nodes.py index 9700e5d65c..1e81f18fe6 100644 --- a/aiida/backends/tests/test_nodes.py +++ b/aiida/backends/tests/test_nodes.py @@ -519,29 +519,6 @@ def test_append_no_side_effects(self): self.assertEquals(a.get_attribute('list'), [1, 2, 3, 4]) self.assertEquals(mylist, [1, 2, 3]) - def test_datetime_attribute(self): - from aiida.common.timezone import (get_current_timezone, is_naive, make_aware, now) - - a = orm.Data() - - date = now() - - a.set_attribute('some_date', date) - a.store() - - retrieved = a.get_attribute('some_date') - - if is_naive(date): - date_to_compare = make_aware(date, get_current_timezone()) - else: - date_to_compare = date - - # Do not compare microseconds (they are not stored in the case of MySQL) - date_to_compare = date_to_compare.replace(microsecond=0) - retrieved = retrieved.replace(microsecond=0) - - self.assertEquals(date_to_compare, retrieved) - def test_attributes_on_clone(self): import copy diff --git a/aiida/backends/tests/tools/importexport/migration/test_migration.py b/aiida/backends/tests/tools/importexport/migration/test_migration.py index eec37cfad8..5d43dbf768 100644 --- a/aiida/backends/tests/tools/importexport/migration/test_migration.py +++ b/aiida/backends/tests/tools/importexport/migration/test_migration.py @@ -13,6 +13,7 @@ from __future__ import absolute_import import os +import unittest from aiida import orm from aiida.backends.testbase import AiidaTestCase @@ -196,6 +197,7 @@ def test_migrate_newest_version(self): msg="migrate_recursively should not return anything, " "hence the 'return' should be None, but instead it is {}".format(new_version)) + @unittest.skip("Reenable when issue #3056 has been solved (migrate exported files to 0.6)") @with_temp_dir def test_v02_to_newest(self, temp_dir): """Test migration of exported files from v0.2 to newest export version""" @@ -250,6 +252,7 @@ def test_v02_to_newest(self, temp_dir): builder.append(orm.RemoteData, with_incoming='parent') self.assertGreater(len(builder.all()), 0) + @unittest.skip("Reenable when issue #3056 has been solved (migrate exported files to 0.6)") @with_temp_dir def test_v03_to_newest(self, temp_dir): """Test migration of exported files from v0.3 to newest export version""" diff --git a/aiida/common/timezone.py b/aiida/common/timezone.py index 181668fadc..9918dc5ae7 100644 --- a/aiida/common/timezone.py +++ b/aiida/common/timezone.py @@ -13,13 +13,8 @@ from __future__ import absolute_import from datetime import datetime -import re -import six - import dateutil -ISOFORMAT_DATETIME_REGEX = re.compile(r'^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d+(\+\d{2}:\d{2})?$') - def get_current_timezone(): """Return the current timezone. @@ -119,39 +114,16 @@ def delta(from_time, to_time=None): def datetime_to_isoformat(value): - """Convert all datetime objects in the given value to string representations in ISO format. + """Convert a datetime object to string representations in ISO format. - :param value: a mapping, sequence or single value optionally containing datetime objects + :param value: a datetime object """ - if isinstance(value, list): - return [datetime_to_isoformat(_) for _ in value] - - if isinstance(value, dict): - return dict((key, datetime_to_isoformat(val)) for key, val in value.items()) - - if isinstance(value, datetime): - return value.isoformat() - - return value + return value.isoformat() def isoformat_to_datetime(value): - """Convert all string representations of a datetime in ISO format in the given value to datetime objects. + """Convert string representation of a datetime in ISO format to a datetime object. - :param value: a mapping, sequence or single value optionally containing datetime objects + :param value: a ISO format string representation of a datetime object """ - if isinstance(value, list): - return [isoformat_to_datetime(_) for _ in value] - - if isinstance(value, dict): - return dict((key, isoformat_to_datetime(val)) for key, val in value.items()) - - if isinstance(value, six.string_types): - if ISOFORMAT_DATETIME_REGEX.match(value): - try: - return dateutil.parser.parse(value) - except (ValueError, TypeError): - return value - return value - - return value + return dateutil.parser.parse(value) diff --git a/aiida/engine/utils.py b/aiida/engine/utils.py index 5e1ff76fee..6665526764 100644 --- a/aiida/engine/utils.py +++ b/aiida/engine/utils.py @@ -259,7 +259,7 @@ def set_process_state_change_timestamp(process): key = PROCESS_STATE_CHANGE_KEY.format(process_type) description = PROCESS_STATE_CHANGE_DESCRIPTION.format(process_type) - value = timezone.now() + value = timezone.datetime_to_isoformat(timezone.now()) try: manager = get_settings_manager() @@ -279,6 +279,7 @@ def get_process_state_change_timestamp(process_type=None): :return: a timestamp or None """ from aiida.backends.utils import get_settings_manager + from aiida.common import timezone from aiida.common.exceptions import NotExistent manager = get_settings_manager() @@ -297,9 +298,9 @@ def get_process_state_change_timestamp(process_type=None): for process_type_key in process_types: key = PROCESS_STATE_CHANGE_KEY.format(process_type_key) try: - timestamps.append(manager.get(key).time) + timestamps.append(timezone.isoformat_to_datetime(manager.get(key).value)) except NotExistent: - pass + continue if not timestamps: return None diff --git a/aiida/orm/implementation/django/querybuilder.py b/aiida/orm/implementation/django/querybuilder.py index 99064f790e..4be93a3966 100644 --- a/aiida/orm/implementation/django/querybuilder.py +++ b/aiida/orm/implementation/django/querybuilder.py @@ -8,15 +8,13 @@ # For further information please visit http://www.aiida.net # ########################################################################### """Django query builder""" - from __future__ import absolute_import from __future__ import division from __future__ import print_function import uuid -from datetime import datetime - import six + from aldjemy import core # Remove when https://github.com/PyCQA/pylint/issues/1931 is fixed # pylint: disable=no-name-in-module, import-error @@ -291,18 +289,6 @@ def cast_according_to_type(path_in_json, value): elif value is None: type_filter = jsonb_typeof(path_in_json) == 'null' casted_entity = path_in_json.astext.cast(JSONB) # BOOLEANS? - elif isinstance(value, datetime): - # type filter here is filter whether this attributes stores - # a string and a filter whether this string - # is compatible with a datetime (using a regex) - # - What about historical values (BC, or before 1000AD)?? - # - Different ways to represent the timezone - - type_filter = jsonb_typeof(path_in_json) == 'string' - regex_filter = path_in_json.astext.op("SIMILAR TO")( - "\d\d\d\d-[0-1]\d-[0-3]\dT[0-2]\d:[0-5]\d:\d\d\.\d+((\+|\-)\d\d:\d\d)?") # pylint: disable=anomalous-backslash-in-string - type_filter = and_(type_filter, regex_filter) - casted_entity = path_in_json.cast(DateTime) else: raise TypeError('Unknown type {}'.format(type(value))) return type_filter, casted_entity diff --git a/aiida/orm/implementation/sqlalchemy/querybuilder.py b/aiida/orm/implementation/sqlalchemy/querybuilder.py index fe649af03e..129f76b10d 100644 --- a/aiida/orm/implementation/sqlalchemy/querybuilder.py +++ b/aiida/orm/implementation/sqlalchemy/querybuilder.py @@ -11,7 +11,7 @@ from __future__ import division from __future__ import print_function from __future__ import absolute_import -from datetime import datetime + import uuid import six @@ -304,18 +304,6 @@ def cast_according_to_type(path_in_json, value): elif value is None: type_filter = jsonb_typeof(path_in_json) == 'null' casted_entity = path_in_json.astext.cast(JSONB) # BOOLEANS? - elif isinstance(value, datetime): - # type filter here is filter whether this attributes stores - # a string and a filter whether this string - # is compatible with a datetime (using a regex) - # - What about historical values (BC, or before 1000AD)?? - # - Different ways to represent the timezone - - type_filter = jsonb_typeof(path_in_json) == 'string' - regex_filter = path_in_json.astext.op("SIMILAR TO")( - "\d\d\d\d-[0-1]\d-[0-3]\dT[0-2]\d:[0-5]\d:\d\d\.\d+((\+|\-)\d\d:\d\d)?") # pylint: disable=anomalous-backslash-in-string - type_filter = and_(type_filter, regex_filter) - casted_entity = path_in_json.cast(DateTime) else: raise TypeError('Unknown type {}'.format(type(value))) return type_filter, casted_entity diff --git a/aiida/orm/importexport.py b/aiida/orm/importexport.py index f6b6976c18..e9ffa39ddf 100644 --- a/aiida/orm/importexport.py +++ b/aiida/orm/importexport.py @@ -20,7 +20,6 @@ from six.moves.html_parser import HTMLParser from distutils.version import StrictVersion from aiida.common import exceptions -from aiida.common.timezone import datetime_to_isoformat from aiida.common.utils import export_shard_uuid, get_class_string, grouper, get_new_uuid from aiida.orm import Computer, Group, GroupTypeString, Node, QueryBuilder, User, Log, Comment from aiida.orm.utils.repository import Repository @@ -946,12 +945,7 @@ def import_data_dj(in_path, group=None, ignore_unknown_nodes=False, "for DbNode with UUID = {}".format(unique_id)) # Here I have to deserialize the attributes - deserialized_attributes = deserialize_attributes( - attributes, attributes_conversion) - if deserialized_attributes: - o.attributes = dict() - for k, v in deserialized_attributes.items(): - o.attributes[k] = datetime_to_isoformat(v) + o.attributes = deserialize_attributes(attributes, attributes_conversion) # For DbNodes, we also have to store its extras if extras_mode_new == 'import': @@ -978,9 +972,7 @@ def import_data_dj(in_path, group=None, ignore_unknown_nodes=False, deserialized_extras = {key: value for key, value in deserialized_extras.items() if not key == 'hidden'} # till here - o.extras = dict() - for k, v in deserialized_extras.items(): - o.extras[k] = datetime_to_isoformat(v) + o.extras = deserialized_extras elif extras_mode_new == 'none': if not silent: print("SKIPPING NEW NODE EXTRAS...") @@ -3084,20 +3076,3 @@ def export(what, outfile='export_data.aiida.tar.gz', overwrite=False, if not silent: print("DONE.") - -# Following code: to serialize the date directly when dumping into JSON. -# In our case, it is better to have a finer control on how to parse fields. - -# def default_jsondump(data): -# import datetime -# -# if isinstance(data, datetime.datetime): -# return data.strftime('%Y-%m-%dT%H:%M:%S.%f%z') -# -# raise TypeError(repr(data) + " is not JSON serializable") -# with open('testout.json', 'w') as f: -# json.dump({ -# 'entries': serialized_entries, -# }, -# f, -# default=default_jsondump) diff --git a/aiida/orm/nodes/process/calculation/calcjob.py b/aiida/orm/nodes/process/calculation/calcjob.py index 5d71156708..3b0d471cb4 100644 --- a/aiida/orm/nodes/process/calculation/calcjob.py +++ b/aiida/orm/nodes/process/calculation/calcjob.py @@ -403,7 +403,7 @@ def set_scheduler_state(self, state): raise ValueError('scheduler state should be an instance of JobState, got: {}'.format(state)) self.set_attribute(self.SCHEDULER_STATE_KEY, state.value) - self.set_attribute(self.SCHEDULER_LAST_CHECK_TIME_KEY, timezone.now()) + self.set_attribute(self.SCHEDULER_LAST_CHECK_TIME_KEY, timezone.datetime_to_isoformat(timezone.now())) def get_scheduler_state(self): """Return the status of the calculation according to the cluster scheduler. @@ -424,7 +424,13 @@ def get_scheduler_lastchecktime(self): :return: a datetime object or None """ - return self.get_attribute(self.SCHEDULER_LAST_CHECK_TIME_KEY, None) + from aiida.common import timezone + value = self.get_attribute(self.SCHEDULER_LAST_CHECK_TIME_KEY, None) + + if value is not None: + value = timezone.isoformat_to_datetime(value) + + return value def set_last_job_info(self, last_job_info): """Set the last job info. diff --git a/aiida/orm/utils/node.py b/aiida/orm/utils/node.py index 5096ba77e3..79f8e402c4 100644 --- a/aiida/orm/utils/node.py +++ b/aiida/orm/utils/node.py @@ -176,14 +176,26 @@ def clean_builtin(val): It mainly checks that we don't store NaN or Inf. """ + # This is a whitelist of all the things we understand currently + if val is None or isinstance(val, (bool, six.string_types)): + return val + + # This fixes #2773 - in python3, ``numpy.int64(-1)`` cannot be json-serialized + # Note that `numbers.Integral` also match booleans but they are already returned above + if isinstance(val, numbers.Integral): + return int(val) + if isinstance(val, numbers.Real) and (math.isnan(val) or math.isinf(val)): # see https://www.postgresql.org/docs/current/static/datatype-json.html#JSON-TYPE-MAPPING-TABLE raise exceptions.ValidationError("nan and inf/-inf can not be serialized to the database") - # This fixes #2773 - in python3, ``numpy.int64(-1)`` cannot be json-serialized - if isinstance(val, numbers.Integral) and not isinstance(val, bool): - val = int(val) - return val + # This is for float-like types, like ``numpy.float128`` that are not json-serializable + # Note that `numbers.Real` also match booleans but they are already returned above + if isinstance(val, numbers.Real): + return float(val) + + # Anything else we do not understand and we refuse + raise exceptions.ValidationError('type `{}` is not supported as it is not json-serializable'.format(type(val))) if isinstance(value, BaseType): return clean_builtin(value.value) @@ -191,6 +203,7 @@ def clean_builtin(val): if isinstance(value, Mapping): # Check dictionary before iterables return {k: clean_value(v) for k, v in value.items()} + if (isinstance(value, Iterable) and not isinstance(value, six.string_types)): # list, tuple, ... but not a string # This should also properly take care of dealing with the From 0f3a335541fbcb77268366b1b0a32794aa48a469 Mon Sep 17 00:00:00 2001 From: Sebastiaan Huber Date: Thu, 20 Jun 2019 09:19:21 +0200 Subject: [PATCH 06/11] Remove stored restriction on settings node extras Extras could originally only be set and mutated on stored nodes. The reason for this limitation was that the original implementation of extras in Django used the custom EAV schema to set arbitrary key value pairs on a node. Since this requires a foreign key of the node for any extra to be set, the corresponding node needed to be stored. Attributes that used the same mechanism had this restriction alleviated by using an in memory cache on the node instance while it was not stored. Since this cache was never implemented for extras it was simply not allowed to set them on unstored nodes. Now that the Django implementation has switched from the custom EAV schema for attributes and extras to a JSONB field, the foreign key restriction no longer exists and the database model instance naturally comes with a cache while it is not stored. Therefore the restriction on nodes being stored for extras being mutable can now be lifted. Co-authored-by: Giovanni Pizzi --- aiida/backends/tests/test_nodes.py | 59 ++++++++++++++++-------------- aiida/orm/nodes/node.py | 28 -------------- 2 files changed, 31 insertions(+), 56 deletions(-) diff --git a/aiida/backends/tests/test_nodes.py b/aiida/backends/tests/test_nodes.py index 1e81f18fe6..a071d7aa22 100644 --- a/aiida/backends/tests/test_nodes.py +++ b/aiida/backends/tests/test_nodes.py @@ -811,39 +811,42 @@ def test_attr_with_reload(self): self.assertEquals(self.dictval, b.get_attribute('dict')) self.assertEquals(self.listval, b.get_attribute('list')) - # Reload directly - b = orm.Data.from_backend_entity(a.backend_entity) - self.assertIsNone(a.get_attribute('none')) - self.assertEquals(self.boolval, b.get_attribute('bool')) - self.assertEquals(self.intval, b.get_attribute('integer')) - self.assertEquals(self.floatval, b.get_attribute('float')) - self.assertEquals(self.stringval, b.get_attribute('string')) - self.assertEquals(self.dictval, b.get_attribute('dict')) - self.assertEquals(self.listval, b.get_attribute('list')) - - def test_attr_and_extras(self): + def test_extra_with_reload(self): a = orm.Data() - a.set_attribute('bool', self.boolval) - a.set_attribute('integer', self.intval) - a.set_attribute('float', self.floatval) - a.set_attribute('string', self.stringval) - a.set_attribute('dict', self.dictval) - a.set_attribute('list', self.listval) - - with self.assertRaises(ModificationNotAllowed): - # I did not store, I cannot modify - a.set_extra('bool', 'blablabla') + a.set_extra('none', None) + a.set_extra('bool', self.boolval) + a.set_extra('integer', self.intval) + a.set_extra('float', self.floatval) + a.set_extra('string', self.stringval) + a.set_extra('dict', self.dictval) + a.set_extra('list', self.listval) + + # Check before storing + self.assertEquals(self.boolval, a.get_extra('bool')) + self.assertEquals(self.intval, a.get_extra('integer')) + self.assertEquals(self.floatval, a.get_extra('float')) + self.assertEquals(self.stringval, a.get_extra('string')) + self.assertEquals(self.dictval, a.get_extra('dict')) + self.assertEquals(self.listval, a.get_extra('list')) a.store() - a_string = 'some non-boolean value' - # I now set an extra with the same name of an attr - a.set_extra('bool', a_string) - # and I check that there is no name clash - self.assertEquals(self.boolval, a.get_attribute('bool')) - self.assertEquals(a_string, a.get_extra('bool')) + # Check after storing + self.assertEquals(self.boolval, a.get_extra('bool')) + self.assertEquals(self.intval, a.get_extra('integer')) + self.assertEquals(self.floatval, a.get_extra('float')) + self.assertEquals(self.stringval, a.get_extra('string')) + self.assertEquals(self.dictval, a.get_extra('dict')) + self.assertEquals(self.listval, a.get_extra('list')) - self.assertEquals(a.extras, {'bool': a_string, '_aiida_hash': AnyValue()}) + b = orm.load_node(uuid=a.uuid) + self.assertIsNone(a.get_extra('none')) + self.assertEquals(self.boolval, b.get_extra('bool')) + self.assertEquals(self.intval, b.get_extra('integer')) + self.assertEquals(self.floatval, b.get_extra('float')) + self.assertEquals(self.stringval, b.get_extra('string')) + self.assertEquals(self.dictval, b.get_extra('dict')) + self.assertEquals(self.listval, b.get_extra('list')) def test_get_extras_with_default(self): a = orm.Data() diff --git a/aiida/orm/nodes/node.py b/aiida/orm/nodes/node.py index 31ff295989..929af69d09 100644 --- a/aiida/orm/nodes/node.py +++ b/aiida/orm/nodes/node.py @@ -119,12 +119,6 @@ def __init__(self, backend=None, user=None, computer=None, **kwargs): user = user.backend_entity if user else User.objects(backend).get_default() if user is None: - from aiida.manage.configuration import get_config, get_profile - config = get_config() - profile = get_profile() - print('CONFIG', config.dictionary) - print('PROFILE', profile.dictionary) - print('USERS', [user.email for user in User.objects(backend).all()]) raise ValueError('the user cannot be None') backend_entity = backend.nodes.create( @@ -526,11 +520,7 @@ def set_extra(self, key, value): :param key: name of the extra :param value: value of the extra - :raise aiida.common.ModificationNotAllowed: if the node is not stored """ - if not self.is_stored: - raise exceptions.ModificationNotAllowed('cannot set extras on unstored nodes') - self.backend_entity.set_extra(key, clean_value(value)) def set_extras(self, extras): @@ -540,9 +530,6 @@ def set_extras(self, extras): :param extras: the new extras to set """ - if not self.is_stored: - raise exceptions.ModificationNotAllowed('cannot set extras on unstored nodes') - self.backend_entity.set_extras(clean_value(extras)) def reset_extras(self, extras): @@ -552,12 +539,6 @@ def reset_extras(self, extras): :param extras: the new extras to set """ - if not self.is_stored: - raise exceptions.ModificationNotAllowed('cannot set extras on unstored nodes') - - if not isinstance(extras, dict): - raise TypeError('extras has to be a dictionary') - self.backend_entity.reset_extras(clean_value(extras)) def delete_extra(self, key): @@ -568,9 +549,6 @@ def delete_extra(self, key): :param key: name of the extra :raises AttributeError: if the extra does not exist """ - if not self.is_stored: - raise exceptions.ModificationNotAllowed('cannot delete extras on unstored nodes') - self.backend_entity.delete_extra(key) def delete_extras(self, keys): @@ -579,16 +557,10 @@ def delete_extras(self, keys): :param keys: names of the extras to delete :raises AttributeError: if at least on of the extra does not exist """ - if not self.is_stored: - raise exceptions.ModificationNotAllowed('cannot delete extras on unstored nodes') - self.backend_entity.delete_extras(keys) def clear_extras(self): """Delete all extras.""" - if not self.is_stored: - raise exceptions.ModificationNotAllowed('cannot clear the extras of unstored nodes') - self.backend_entity.clear_extras() def extras_items(self): From 299e3ee8635874079f5bf2f3227545e0d3ebd465 Mon Sep 17 00:00:00 2001 From: Sebastiaan Huber Date: Fri, 21 Jun 2019 16:25:25 +0200 Subject: [PATCH 07/11] Reimplement the `Node` and `BackendNode` attributes extras interface Now that the Django implementation for the node attributes and extras uses a JSONB field, just like SqlAlchemy, the interface for interacting with these node properties can be homogenized and corrected. The clear division between the front and back end classes `Node` and `BackendNode` allows us to define a clear separation of responsabilities as well. * `BackendNode`: ensure serializability of values stored in the fields as they will have to be stored as JSON. The implementation needs to ensure that cleaning of values is done as little as possible and scales linearly when appending to existing values. * `Node`: implements AiiDA specific business logic such as the rules for the mutability of attributes as well as the validity of key names. The use of the `BackendNode` allows us to remove all business logic from the database models, which in turn allows us to write exhaustive tests for the interface in a backend independent way. Co-authored-by: Giovanni Pizzi --- .pre-commit-config.yaml | 3 - .../0037_attributes_extras_settings_json.py | 4 +- aiida/backends/djsite/db/models.py | 68 +-- .../backends/djsite/db/subtests/test_nodes.py | 146 ----- .../backends/djsite/db/subtests/test_query.py | 81 --- aiida/backends/sqlalchemy/models/node.py | 68 +-- .../backends/sqlalchemy/tests/test_generic.py | 38 +- aiida/backends/tests/__init__.py | 2 - .../tests/orm/implementation/test_nodes.py | 560 +++++++++++++++++- aiida/backends/tests/orm/node/test_node.py | 337 ++++++++++- aiida/backends/tests/test_dataclasses.py | 30 +- .../backends/tests/test_export_and_import.py | 2 +- aiida/backends/tests/test_nodes.py | 134 +---- aiida/backends/tests/test_query.py | 38 +- aiida/orm/implementation/django/convert.py | 4 +- aiida/orm/implementation/django/nodes.py | 274 ++++++--- .../orm/implementation/django/querybuilder.py | 10 +- aiida/orm/implementation/django/utils.py | 27 +- aiida/orm/implementation/nodes.py | 141 +++-- aiida/orm/implementation/sqlalchemy/nodes.py | 318 ++++++---- aiida/orm/implementation/sqlalchemy/utils.py | 24 +- aiida/orm/nodes/data/array/kpoints.py | 4 +- aiida/orm/nodes/data/data.py | 2 +- aiida/orm/nodes/data/structure.py | 16 +- aiida/orm/nodes/node.py | 340 +++++------ aiida/orm/querybuilder.py | 14 +- aiida/orm/utils/mixins.py | 77 +-- aiida/orm/utils/node.py | 19 +- .../source/developer_guide/core/internals.rst | 36 +- 29 files changed, 1694 insertions(+), 1123 deletions(-) delete mode 100644 aiida/backends/djsite/db/subtests/test_nodes.py delete mode 100644 aiida/backends/djsite/db/subtests/test_query.py diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 0e3593a9be..2c69af7ddb 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -30,10 +30,7 @@ aiida/backends/djsite/db/migrations/0017_drop_dbcalcstate.py| aiida/backends/djsite/db/migrations/__init__.py| aiida/backends/djsite/db/models.py| - aiida/backends/djsite/db/subtests/test_migrations.py| aiida/backends/djsite/db/subtests/test_generic.py| - aiida/backends/djsite/db/subtests/test_nodes.py| - aiida/backends/djsite/db/subtests/test_query.py| aiida/backends/djsite/__init__.py| aiida/backends/djsite/manage.py| aiida/backends/djsite/queries.py| diff --git a/aiida/backends/djsite/db/migrations/0037_attributes_extras_settings_json.py b/aiida/backends/djsite/db/migrations/0037_attributes_extras_settings_json.py index c5f43f20d5..ae23be72c5 100644 --- a/aiida/backends/djsite/db/migrations/0037_attributes_extras_settings_json.py +++ b/aiida/backends/djsite/db/migrations/0037_attributes_extras_settings_json.py @@ -204,12 +204,12 @@ class Migration(migrations.Migration): migrations.AddField( model_name='dbnode', name='attributes', - field=django.contrib.postgres.fields.jsonb.JSONField(default=None, null=True), + field=django.contrib.postgres.fields.jsonb.JSONField(default=dict, null=True), ), migrations.AddField( model_name='dbnode', name='extras', - field=django.contrib.postgres.fields.jsonb.JSONField(default=None, null=True), + field=django.contrib.postgres.fields.jsonb.JSONField(default=dict, null=True), ), # Migrate the data from the DbAttribute table to the JSONB field migrations.RunPython(transition_attributes_extras, reverse_code=migrations.RunPython.noop), diff --git a/aiida/backends/djsite/db/models.py b/aiida/backends/djsite/db/models.py index 13ab242b1f..2032ba25ef 100644 --- a/aiida/backends/djsite/db/models.py +++ b/aiida/backends/djsite/db/models.py @@ -132,78 +132,14 @@ class DbNode(m.Model): dbcomputer = m.ForeignKey('DbComputer', null=True, on_delete=m.PROTECT, related_name='dbnodes') # JSON Attributes - attributes = JSONField(default=None, null=True) + attributes = JSONField(default=dict, null=True) # JSON Extras - extras = JSONField(default=None, null=True) + extras = JSONField(default=dict, null=True) objects = m.Manager() # Return aiida Node instances or their subclasses instead of DbNode instances aiidaobjects = AiidaObjectManager() - def __init__(self, *args, **kwargs): - super(DbNode, self).__init__(*args, **kwargs) - - if self.attributes is None: - self.attributes = dict() - - if self.extras is None: - self.extras = dict() - - def set_attribute(self, key, value): - DbNode._set_attr(self.attributes, key, value) - self.save() - - def reset_attributes(self, attributes): - self.attributes = dict() - self.set_attributes(attributes) - - def set_attributes(self, attributes): - for key, value in attributes.items(): - DbNode._set_attr(self.attributes, key, value) - self.save() - - def set_extra(self, key, value): - DbNode._set_attr(self.extras, key, value) - self.save() - - def set_extras(self, extras): - for key, value in extras.items(): - DbNode._set_attr(self.extras, key, value) - self.save() - - def reset_extras(self, new_extras): - self.extras.clear() - self.extras.update(new_extras) - self.save() - - def del_attribute(self, key): - DbNode._del_attr(self.attributes, key) - self.save() - - def del_extra(self, key): - DbNode._del_attr(self.extras, key) - self.save() - - def get_attributes(self): - return self.attributes - - def get_extras(self): - return self.extras - - @ staticmethod - def _set_attr(d, key, value): - if '.' in key: - raise ValueError("We don't know how to treat key with dot in it yet") - d[key] = value - - @ staticmethod - def _del_attr(d, key): - if '.' in key: - raise ValueError("We don't know how to treat key with dot in it yet") - if key not in d: - raise AttributeError("Key {} does not exists".format(key)) - del d[key] - def get_simple_name(self, invalid_result=None): """ Return a string with the last part of the type name. diff --git a/aiida/backends/djsite/db/subtests/test_nodes.py b/aiida/backends/djsite/db/subtests/test_nodes.py deleted file mode 100644 index 4e9980ddb7..0000000000 --- a/aiida/backends/djsite/db/subtests/test_nodes.py +++ /dev/null @@ -1,146 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida_core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -""" -Tests for nodes, attributes and links -""" - -from __future__ import division -from __future__ import print_function -from __future__ import absolute_import -from aiida.backends.testbase import AiidaTestCase -from aiida.orm import Data -from unittest import skip - - -class TestNodeBasicDjango(AiidaTestCase): - @skip("This seems not to be needed. It should be a more general tests for both backends.") - def test_replace_extras_2(self): - """ - This is a Django specific test which checks (manually) that, - when replacing list and dict with objects that have no deepness, - no junk is left in the DB (i.e., no 'dict.a', 'list.3.h', ... - """ - from aiida.backends.djsite.db.models import DbExtra - - a = Data().store() - extras_to_set = { - 'bool': True, - 'integer': 12, - 'float': 26.2, - 'string': "a string", - 'dict': {"a": "b", - "sublist": [1, 2, 3], - "subdict": { - "c": "d"}}, - 'list': [1, True, "ggg", {'h': 'j'}, [9, 8, 7]], - } - - # I redefine the keys with more complicated data, and - # changing the data type too - new_extras = { - 'bool': 12, - 'integer': [2, [3], 'a'], - 'float': {'n': 'm', 'x': [1, 'r', {}]}, - 'string': True, - 'dict': 'text', - 'list': 66.3, - } - - for k, v in extras_to_set.items(): - a.set_extra(k, v) - - for k, v in new_extras.items(): - # I delete one by one the keys and check if the operation is - # performed correctly - a.set_extra(k, v) - - # I update extras_to_set with the new entries, and do the comparison - # again - extras_to_set.update(new_extras) - - # Check (manually) that, when replacing list and dict with objects - # that have no deepness, no junk is left in the DB (i.e., no - # 'dict.a', 'list.3.h', ... - self.assertEquals(len(DbExtra.objects.filter( - dbnode=a.backend_entity.dbmodel, key__startswith=('list' + DbExtra._sep))), 0) - self.assertEquals(len(DbExtra.objects.filter( - dbnode=a.backend_entity.dbmodel, key__startswith=('dict' + DbExtra._sep))), 0) - - @skip("This seems not to be needed. It should be a more general tests for both backends.") - def test_attrs_and_extras_wrong_keyname(self): - """ - Attribute keys cannot include the separator symbol in the key - """ - from aiida.backends.djsite.db.models import DbAttributeBaseClass - from aiida.common.exceptions import ModificationNotAllowed, ValidationError - - separator = DbAttributeBaseClass._sep - - a = Data().store() - - with self.assertRaises(ModificationNotAllowed): - # Cannot change an attribute on a stored node - a.set_attribute('name' + separator, 'blablabla') - - with self.assertRaises(ValidationError): - # Cannot change an attribute on a stored node - a.set_attribute('name' + separator, 'blablabla', stored_check=False) - - with self.assertRaises(ValidationError): - # Passing an attribute key separator directly in the key is not allowed - a.set_extra('bool' + separator, 'blablabla') - - def test_settings(self): - """ - Test the settings table (similar to Attributes, but without the key. - """ - from aiida.backends.djsite.db import models - from django.db import IntegrityError, transaction - - models.DbSetting.set_value(key='pippo', value=[1, 2, 3]) - - s1 = models.DbSetting.objects.get(key='pippo') - - self.assertEqual(s1.getvalue(), [1, 2, 3]) - - s2 = models.DbSetting(key='pippo') - - sid = transaction.savepoint() - with self.assertRaises(IntegrityError): - # same name... - s2.save() - transaction.savepoint_rollback(sid) - - # Should replace pippo - models.DbSetting.set_value(key='pippo', value="a") - s1 = models.DbSetting.objects.get(key='pippo') - - self.assertEqual(s1.getvalue(), "a") - - def test_load_nodes(self): - """ - """ - from aiida.orm import load_node - - a = Data() - a.store() - self.assertEquals(a.pk, load_node(pk=a.pk).pk) - self.assertEquals(a.pk, load_node(uuid=a.uuid).pk) - - with self.assertRaises(ValueError): - load_node(identifier=a.pk, pk=a.pk) - with self.assertRaises(ValueError): - load_node(pk=a.pk, uuid=a.uuid) - with self.assertRaises(TypeError): - load_node(pk=a.uuid) - with self.assertRaises(TypeError): - load_node(uuid=a.pk) - with self.assertRaises(ValueError): - load_node() diff --git a/aiida/backends/djsite/db/subtests/test_query.py b/aiida/backends/djsite/db/subtests/test_query.py deleted file mode 100644 index 62cd6c30fb..0000000000 --- a/aiida/backends/djsite/db/subtests/test_query.py +++ /dev/null @@ -1,81 +0,0 @@ -# -*- coding: utf-8 -*- -########################################################################### -# Copyright (c), The AiiDA team. All rights reserved. # -# This file is part of the AiiDA code. # -# # -# The code is hosted on GitHub at https://github.com/aiidateam/aiida_core # -# For further information on the license, see the LICENSE.txt file # -# For further information please visit http://www.aiida.net # -########################################################################### -from __future__ import division -from __future__ import print_function -from __future__ import absolute_import -from aiida.backends.testbase import AiidaTestCase -from unittest import skip - - -class TestQueryBuilderDjango(AiidaTestCase): - - @skip("This test passes but we should see it is still valid under Django JSONB") - def test_clsf_django(self): - """ - This tests the classifications of the QueryBuilder u. the django backend. - """ - import aiida.backends.djsite.db.models as djmodels - from aiida.common.exceptions import DbContentError - from aiida.orm import QueryBuilder, Group, Node, Computer, Data, StructureData - qb = QueryBuilder() - - with self.assertRaises(DbContentError): - qb._get_ormclass(None, 'data') - with self.assertRaises(DbContentError): - qb._get_ormclass(None, 'data.Data') - with self.assertRaises(DbContentError): - qb._get_ormclass(None, '.') - - for cls, classifiers in ( - qb._get_ormclass(StructureData, None), - qb._get_ormclass(None, 'data.structure.StructureData.'), - ): - self.assertEqual(classifiers['ormclass_type_string'], 'data.structure.StructureData.') - self.assertTrue(issubclass(cls, djmodels.DbNode.sa)) - - for cls, classifiers in ( - qb._get_ormclass(djmodels.DbNode.sa, None), - ): - self.assertEqual(classifiers['ormclass_type_string'], Node._plugin_type_string) - self.assertTrue(issubclass(cls, djmodels.DbNode.sa)) - - for cls, classifiers in ( - qb._get_ormclass(djmodels.DbGroup.sa, None), - qb._get_ormclass(Group, None), - qb._get_ormclass(None, 'group'), - qb._get_ormclass(None, 'Group'), - ): - self.assertEqual(classifiers['ormclass_type_string'], 'group') - self.assertTrue(issubclass(cls, djmodels.DbGroup.sa)) - - for cls, classifiers in ( - qb._get_ormclass(djmodels.DbUser.sa, None), - qb._get_ormclass(djmodels.DbUser.sa, None), - qb._get_ormclass(None, "user"), - qb._get_ormclass(None, "User"), - ): - self.assertEqual(classifiers['ormclass_type_string'], 'user') - self.assertTrue(issubclass(cls, djmodels.DbUser.sa)) - - for cls, classifiers in ( - qb._get_ormclass(djmodels.DbComputer.sa, None), - qb._get_ormclass(Computer, None), - qb._get_ormclass(None, 'computer'), - qb._get_ormclass(None, 'Computer'), - ): - self.assertEqual(classifiers['ormclass_type_string'], 'computer') - self.assertTrue(issubclass(cls, djmodels.DbComputer.sa)) - - for cls, classifiers in ( - qb._get_ormclass(Data, None), - qb._get_ormclass(None, 'data.Data.'), - ): - self.assertEqual(classifiers['ormclass_type_string'], Data._plugin_type_string) - self.assertTrue(issubclass(cls, djmodels.DbNode.sa)) diff --git a/aiida/backends/sqlalchemy/models/node.py b/aiida/backends/sqlalchemy/models/node.py index 25ad815c5c..33408b41ec 100644 --- a/aiida/backends/sqlalchemy/models/node.py +++ b/aiida/backends/sqlalchemy/models/node.py @@ -7,14 +7,14 @@ # For further information on the license, see the LICENSE.txt file # # For further information please visit http://www.aiida.net # ########################################################################### - from __future__ import division from __future__ import print_function from __future__ import absolute_import -from sqlalchemy import ForeignKey, select + +from sqlalchemy import ForeignKey from sqlalchemy.orm import relationship, backref from sqlalchemy.schema import Column -from sqlalchemy.types import Integer, String, Boolean, DateTime, Text +from sqlalchemy.types import Integer, String, DateTime, Text # Specific to PGSQL. If needed to be agnostic # http://docs.sqlalchemy.org/en/rel_0_9/core/custom_types.html?highlight=guid#backend-agnostic-guid-type # Or maybe rely on sqlalchemy-utils UUID type @@ -23,7 +23,6 @@ from aiida.common import timezone from aiida.backends.sqlalchemy.models.base import Base from aiida.common.utils import get_new_uuid -from aiida.backends.sqlalchemy.utils import flag_modified class DbNode(Base): @@ -123,7 +122,7 @@ def get_simple_name(self, invalid_result=None): :param invalid_result: The value to be returned if the node type is not recognized. """ - thistype = self.type + thistype = self.node_type # Fix for base class if thistype == "": thistype = "node.Node." @@ -133,65 +132,6 @@ def get_simple_name(self, invalid_result=None): thistype = thistype[:-1] # Strip final dot return thistype.rpartition('.')[2] - def set_attribute(self, key, value): - DbNode._set_attr(self.attributes, key, value) - flag_modified(self, "attributes") - self.save() - - def reset_attributes(self, attributes): - self.attributes = dict() - self.set_attributes(attributes) - - def set_attributes(self, attributes): - for key, value in attributes.items(): - DbNode._set_attr(self.attributes, key, value) - flag_modified(self, "attributes") - self.save() - - def set_extra(self, key, value): - DbNode._set_attr(self.extras, key, value) - flag_modified(self, "extras") - self.save() - - def set_extras(self, extras): - for key, value in extras.items(): - DbNode._set_attr(self.extras, key, value) - flag_modified(self, "extras") - self.save() - - def reset_extras(self, new_extras): - self.extras.clear() - self.extras.update(new_extras) - flag_modified(self, "extras") - self.save() - - def del_attribute(self, key): - DbNode._del_attr(self.attributes, key) - flag_modified(self, "attributes") - self.save() - - def del_extra(self, key): - DbNode._del_attr(self.extras, key) - flag_modified(self, "extras") - self.save() - - @staticmethod - def _set_attr(d, key, value): - if '.' in key: - raise ValueError("We don't know how to treat key with dot in it yet") - - d[key] = value - - @staticmethod - def _del_attr(d, key): - if '.' in key: - raise ValueError("We don't know how to treat key with dot in it yet") - - if key not in d: - raise AttributeError("Key {} does not exists".format(key)) - - del d[key] - @property def pk(self): return self.id diff --git a/aiida/backends/sqlalchemy/tests/test_generic.py b/aiida/backends/sqlalchemy/tests/test_generic.py index cd37237c62..1b316a7ef3 100644 --- a/aiida/backends/sqlalchemy/tests/test_generic.py +++ b/aiida/backends/sqlalchemy/tests/test_generic.py @@ -14,10 +14,11 @@ from __future__ import print_function from __future__ import absolute_import -from aiida.backends.testbase import AiidaTestCase -from aiida.orm import Data, Node from six.moves import range +from aiida.backends.testbase import AiidaTestCase +from aiida.orm import Data + class TestComputer(AiidaTestCase): """ @@ -183,36 +184,3 @@ def test_group_batch_size(self): group = Group(name='test_batches_' + str(batch_size)).store() group.backend_entity.add_nodes(nodes, skip_orm=True, batch_size=batch_size) self.assertEqual(set(_.pk for _ in nodes), set(_.pk for _ in group.nodes)) - - -class TestDbExtrasSqla(AiidaTestCase): - """ - Characterized functions - """ - - def test_replacement_1(self): - n1 = Data().store() - n2 = Data().store() - - n1.set_extra("pippo", [1, 2, u'a']) - - n1.set_extra("pippobis", [5, 6, u'c']) - - n2.set_extra("pippo2", [3, 4, u'b']) - - self.assertEqual(n1.extras, - {'pippo': [1, 2, u'a'], 'pippobis': [5, 6, u'c'], '_aiida_hash': n1.get_hash()}) - - self.assertEquals(n2.extras, {'pippo2': [3, 4, 'b'], '_aiida_hash': n2.get_hash()}) - - new_attrs = {"newval1": "v", "newval2": [1, {"c": "d", "e": 2}]} - - n1.reset_extras(new_attrs) - self.assertEquals(n1.extras, new_attrs) - self.assertEquals(n2.extras, {'pippo2': [3, 4, 'b'], '_aiida_hash': n2.get_hash()}) - - n1.delete_extra('newval2') - del new_attrs['newval2'] - self.assertEquals(n1.extras, new_attrs) - # Also check that other nodes were not damaged - self.assertEquals(n2.extras, {'pippo2': [3, 4, 'b'], '_aiida_hash': n2.get_hash()}) diff --git a/aiida/backends/tests/__init__.py b/aiida/backends/tests/__init__.py index 8b07ca83ab..e633a2eaf0 100644 --- a/aiida/backends/tests/__init__.py +++ b/aiida/backends/tests/__init__.py @@ -20,12 +20,10 @@ DB_TEST_LIST = { BACKEND_DJANGO: { 'generic': ['aiida.backends.djsite.db.subtests.test_generic'], - 'nodes': ['aiida.backends.djsite.db.subtests.test_nodes'], 'migrations': [ 'aiida.backends.djsite.db.subtests.migrations.test_migrations_many', 'aiida.backends.djsite.db.subtests.migrations.test_migrations_0037_attributes_extras_settings_json' ], - 'query': ['aiida.backends.djsite.db.subtests.test_query'], }, BACKEND_SQLA: { 'generic': ['aiida.backends.sqlalchemy.tests.test_generic'], diff --git a/aiida/backends/tests/orm/implementation/test_nodes.py b/aiida/backends/tests/orm/implementation/test_nodes.py index b1639665a3..6bdbf45bec 100644 --- a/aiida/backends/tests/orm/implementation/test_nodes.py +++ b/aiida/backends/tests/orm/implementation/test_nodes.py @@ -7,16 +7,19 @@ # For further information on the license, see the LICENSE.txt file # # For further information please visit http://www.aiida.net # ########################################################################### +# pylint: disable=too-many-public-methods """Unit tests for the BackendNode and BackendNodeCollection classes.""" from __future__ import division from __future__ import print_function from __future__ import absolute_import +from collections import OrderedDict from datetime import datetime from uuid import UUID from aiida.backends.testbase import AiidaTestCase from aiida.common import timezone +from aiida.common import exceptions class TestBackendNode(AiidaTestCase): @@ -40,6 +43,9 @@ def setUp(self): label=self.node_label, description=self.node_description) + def create_node(self): + return self.backend.nodes.create(node_type=self.node_type, user=self.user) + def test_creation(self): """Test creation of a BackendNode and all its properties.""" node = self.backend.nodes.create( @@ -169,38 +175,554 @@ def test_user_methods(self): self.node.user = new_user self.assertEqual(self.node.user.id, new_user.id) - def test_attributes(self): - """Test the attribute properties of a BackendNode.""" - attribute_name = 'attribute' - attribute_value = 'nobatnight' + def test_get_set_attribute(self): + """Test the `get_attribute` and `set_attribute` method of a BackendNode.""" + attribute_1_name = 'a' + attribute_2_name = 'b' + attribute_3_name = 'c' + attribute_1_value = '1' + attribute_2_value = '2' + attribute_3_value = '3' with self.assertRaises(AttributeError): - self.node.get_attribute(attribute_name) + self.node.get_attribute(attribute_1_name) + + self.assertFalse(self.node.is_stored) + self.node.set_attribute(attribute_1_name, attribute_1_value) - self.assertEqual(self.node.attributes, dict()) + # Check that the attribute is set, but the node is not stored + self.assertFalse(self.node.is_stored) + self.assertEqual(self.node.get_attribute(attribute_1_name), attribute_1_value) self.node.store() - self.node.set_attribute(attribute_name, attribute_value) - self.assertEqual(self.node.get_attribute(attribute_name), attribute_value) - self.node.delete_attribute(attribute_name) + # Check that the attribute is set, and the node is stored + self.assertTrue(self.node.is_stored) + self.assertEqual(self.node.get_attribute(attribute_1_name), attribute_1_value) + + self.node.set_attribute(attribute_2_name, attribute_2_value) + self.assertEqual(self.node.get_attribute(attribute_1_name), attribute_1_value) + self.assertEqual(self.node.get_attribute(attribute_2_name), attribute_2_value) + + reloaded = self.backend.nodes.get(self.node.pk) + self.assertEqual(self.node.get_attribute(attribute_1_name), attribute_1_value) + self.assertEqual(self.node.get_attribute(attribute_2_name), attribute_2_value) + + reloaded.set_attribute(attribute_3_name, attribute_3_value) + self.assertEqual(reloaded.get_attribute(attribute_1_name), attribute_1_value) + self.assertEqual(reloaded.get_attribute(attribute_2_name), attribute_2_value) + self.assertEqual(reloaded.get_attribute(attribute_3_name), attribute_3_value) + + # Check deletion of a single + reloaded.delete_attribute(attribute_1_name) with self.assertRaises(AttributeError): - self.node.get_attribute(attribute_name) + reloaded.get_attribute(attribute_1_name) - def test_extras(self): - """Test the extra properties of a BackendNode.""" - extra_name = 'extra' - extra_value = 'nobatnight' + self.assertEqual(reloaded.get_attribute(attribute_2_name), attribute_2_value) + self.assertEqual(reloaded.get_attribute(attribute_3_name), attribute_3_value) + + with self.assertRaises(AttributeError): + self.node.get_attribute(attribute_1_name) + + def test_get_set_extras(self): + """Test the `get_extras` and `set_extras` method of a BackendNode.""" + extra_1_name = 'a' + extra_2_name = 'b' + extra_3_name = 'c' + extra_1_value = '1' + extra_2_value = '2' + extra_3_value = '3' with self.assertRaises(AttributeError): - self.node.get_extra(extra_name) + self.node.get_extra(extra_1_name) + + self.assertFalse(self.node.is_stored) + self.node.set_extra(extra_1_name, extra_1_value) + + # Check that the extra is set, but the node is not stored + self.assertFalse(self.node.is_stored) + self.assertEqual(self.node.get_extra(extra_1_name), extra_1_value) self.node.store() - self.node.set_extra(extra_name, extra_value) - self.assertEqual(self.node.get_extra(extra_name), extra_value) - self.node.delete_extra(extra_name) + # Check that the extra is set, and the node is stored + self.assertTrue(self.node.is_stored) + self.assertEqual(self.node.get_extra(extra_1_name), extra_1_value) + + self.node.set_extra(extra_2_name, extra_2_value) + self.assertEqual(self.node.get_extra(extra_1_name), extra_1_value) + self.assertEqual(self.node.get_extra(extra_2_name), extra_2_value) + + reloaded = self.backend.nodes.get(self.node.pk) + self.assertEqual(self.node.get_extra(extra_1_name), extra_1_value) + self.assertEqual(self.node.get_extra(extra_2_name), extra_2_value) + + reloaded.set_extra(extra_3_name, extra_3_value) + self.assertEqual(reloaded.get_extra(extra_1_name), extra_1_value) + self.assertEqual(reloaded.get_extra(extra_2_name), extra_2_value) + self.assertEqual(reloaded.get_extra(extra_3_name), extra_3_value) + + # Check deletion of a single + reloaded.delete_extra(extra_1_name) + + with self.assertRaises(AttributeError): + reloaded.get_extra(extra_1_name) + + self.assertEqual(reloaded.get_extra(extra_2_name), extra_2_value) + self.assertEqual(reloaded.get_extra(extra_3_name), extra_3_value) + + with self.assertRaises(AttributeError): + self.node.get_extra(extra_1_name) + + def test_attributes(self): + """Test the `BackendNode.attributes` property.""" + node = self.create_node() + self.assertEqual(node.attributes, {}) + node.set_attribute('attribute', 'value') + self.assertEqual(node.attributes, {'attribute': 'value'}) + + node.store() + self.assertEqual(node.attributes, {'attribute': 'value'}) + + def test_get_attribute(self): + """Test the `BackendNode.get_attribute` method.""" + node = self.create_node() + + with self.assertRaises(AttributeError): + node.get_attribute('attribute') + + node.set_attribute('attribute', 'value') + self.assertEqual(node.get_attribute('attribute'), 'value') + + node.store() + self.assertEqual(node.get_attribute('attribute'), 'value') + + def test_get_attribute_many(self): + """Test the `BackendNode.get_attribute_many` method.""" + node = self.create_node() + + with self.assertRaises(AttributeError): + node.get_attribute_many(['attribute']) + + node.set_attribute_many({'attribute': 'value', 'another': 'case'}) + + with self.assertRaises(AttributeError): + node.get_attribute_many(['attribute', 'unexisting']) + + self.assertEqual(node.get_attribute_many(['attribute', 'another']), ['value', 'case']) + + node.store() + self.assertEqual(node.get_attribute_many(['attribute', 'another']), ['value', 'case']) + + def test_set_attribute(self): + """Test the `BackendNode.set_attribute` method.""" + node = self.create_node() + + # When not stored, `set_attribute` will not clean values, so the following should be allowed + node.set_attribute('attribute_invalid', object()) + node.set_attribute('attribute_valid', 'value') + + # Calling store should cause the values to be cleaned which should raise + with self.assertRaises(exceptions.ValidationError): + node.store() + + # Replace the original invalid with a valid value + node.set_attribute('attribute_invalid', 'actually valid') + node.store() + self.assertEqual(node.get_attribute_many(['attribute_invalid', 'attribute_valid']), ['actually valid', 'value']) + + # Raises immediately when setting it if already stored + with self.assertRaises(exceptions.ValidationError): + node.set_attribute('attribute', object()) + + def test_set_attribute_many(self): + """Test the `BackendNode.set_attribute_many` method.""" + # Calling `set_attribute_many` on an unstored node + node = self.create_node() + + # When not stored, `set_attribute` will not clean values, so the following should be allowed + node.set_attribute_many({'attribute_invalid': object(), 'attribute_valid': 'value'}) + + # Calling store should cause the values to be cleaned which should raise + with self.assertRaises(exceptions.ValidationError): + node.store() + + # Replace the original invalid with a valid value + node.set_attribute_many({'attribute_invalid': 'actually valid'}) + node.store() + self.assertEqual(node.get_attribute_many(['attribute_invalid', 'attribute_valid']), ['actually valid', 'value']) + + attributes = OrderedDict() + attributes['another_attribute'] = 'value' + attributes['attribute_invalid'] = object() + + # Raises immediately when setting it if already stored + with self.assertRaises(exceptions.ValidationError): + node.set_attribute_many(attributes) + + self.assertTrue('another_attribute' not in node.attributes) + + attributes = {'attribute_one': 1, 'attribute_two': 2} + # Calling `set_attribute_many` on a stored node + node = self.create_node() + node.store() + + node.set_attribute_many(attributes) + self.assertEqual(node.attributes, attributes) + + def test_reset_attributes(self): + """Test the `BackendNode.reset_attributes` method.""" + node = self.create_node() + attributes_before = {'attribute_one': 1, 'attribute_two': 2} + attributes_after = {'attribute_three': 3, 'attribute_four': 4} + + # Reset attributes on an unstored node + node.set_attribute_many(attributes_before) + self.assertEqual(node.attributes, attributes_before) + + node.reset_attributes(attributes_after) + self.assertEqual(node.attributes, attributes_after) + + # Reset attributes on stored node + node = self.create_node() + node.store() + + node.set_attribute_many(attributes_before) + self.assertEqual(node.attributes, attributes_before) + + node.reset_attributes(attributes_after) + self.assertEqual(node.attributes, attributes_after) + + def test_delete_attribute(self): + """Test the `BackendNode.delete_attribute` method.""" + node = self.create_node() + + with self.assertRaises(AttributeError): + node.delete_attribute('notexisting') + + node.set_attribute('attribute', 'value') + node.delete_attribute('attribute') + self.assertEqual(node.attributes, {}) + + # Now for a stored node + node = self.create_node().store() + + with self.assertRaises(AttributeError): + node.delete_attribute('notexisting') + + node.set_attribute('attribute', 'value') + node.delete_attribute('attribute') + self.assertEqual(node.attributes, {}) + + def test_delete_attribute_many(self): + """Test the `BackendNode.delete_attribute_many` method.""" + node = self.create_node() + attributes = {'attribute_one': 1, 'attribute_two': 2} + + with self.assertRaises(AttributeError): + node.delete_attribute_many(['notexisting', 'some']) + + node.set_attribute_many(attributes) + + with self.assertRaises(AttributeError): + node.delete_attribute_many(['attribute_one', 'notexisting']) + + # Because one key failed during delete, none of the attributes should have been deleted + self.assertTrue('attribute_one' in node.attributes) + + # Now delete the keys that actually should exist + node.delete_attribute_many(attributes.keys()) + self.assertEqual(node.attributes, {}) + + # Now for a stored node + node = self.create_node().store() + + with self.assertRaises(AttributeError): + node.delete_attribute_many(['notexisting', 'some']) + + node.set_attribute_many(attributes) + + with self.assertRaises(AttributeError): + node.delete_attribute_many(['attribute_one', 'notexisting']) + + # Because one key failed during delete, none of the attributes should have been deleted + self.assertTrue('attribute_one' in node.attributes) + + # Now delete the keys that actually should exist + node.delete_attribute_many(attributes.keys()) + self.assertEqual(node.attributes, {}) + + def test_clear_attributes(self): + """Test the `BackendNode.clear_attributes` method.""" + node = self.create_node() + attributes = {'attribute_one': 1, 'attribute_two': 2} + node.set_attribute_many(attributes) + + self.assertEqual(node.attributes, attributes) + node.clear_attributes() + self.assertEqual(node.attributes, {}) + + # Now for a stored node + node = self.create_node().store() + node.set_attribute_many(attributes) + + self.assertEqual(node.attributes, attributes) + node.clear_attributes() + self.assertEqual(node.attributes, {}) + + def test_attribute_items(self): + """Test the `BackendNode.attribute_items` generator.""" + node = self.create_node() + attributes = {'attribute_one': 1, 'attribute_two': 2} + + node.set_attribute_many(attributes) + self.assertEqual(attributes, dict(node.attributes_items())) + + # Repeat for a stored node + node = self.create_node().store() + attributes = {'attribute_one': 1, 'attribute_two': 2} + + node.set_attribute_many(attributes) + self.assertEqual(attributes, dict(node.attributes_items())) + + def test_attribute_keys(self): + """Test the `BackendNode.attribute_keys` generator.""" + node = self.create_node() + attributes = {'attribute_one': 1, 'attribute_two': 2} + + node.set_attribute_many(attributes) + self.assertEqual(set(attributes), set(node.attributes_keys())) + + # Repeat for a stored node + node = self.create_node().store() + attributes = {'attribute_one': 1, 'attribute_two': 2} + + node.set_attribute_many(attributes) + self.assertEqual(set(attributes), set(node.attributes_keys())) + + def test_extras(self): + """Test the `BackendNode.extras` property.""" + node = self.create_node() + self.assertEqual(node.extras, {}) + node.set_extra('extra', 'value') + self.assertEqual(node.extras, {'extra': 'value'}) + + node.store() + self.assertEqual(node.extras, {'extra': 'value'}) + + def test_get_extra(self): + """Test the `BackendNode.get_extra` method.""" + node = self.create_node() + + with self.assertRaises(AttributeError): + node.get_extra('extra') + + node.set_extra('extra', 'value') + self.assertEqual(node.get_extra('extra'), 'value') + + node.store() + self.assertEqual(node.get_extra('extra'), 'value') + + def test_get_extra_many(self): + """Test the `BackendNode.get_extra_many` method.""" + node = self.create_node() with self.assertRaises(AttributeError): - self.node.get_extra(extra_name) + node.get_extra_many(['extra']) + + node.set_extra_many({'extra': 'value', 'another': 'case'}) + + with self.assertRaises(AttributeError): + node.get_extra_many(['extra', 'unexisting']) + + self.assertEqual(node.get_extra_many(['extra', 'another']), ['value', 'case']) + + node.store() + self.assertEqual(node.get_extra_many(['extra', 'another']), ['value', 'case']) + + def test_set_extra(self): + """Test the `BackendNode.set_extra` method.""" + node = self.create_node() + + # When not stored, `set_extra` will not clean values, so the following should be allowed + node.set_extra('extra_invalid', object()) + node.set_extra('extra_valid', 'value') + + # Calling store should cause the values to be cleaned which should raise + with self.assertRaises(exceptions.ValidationError): + node.store() + + # Replace the original invalid with a valid value + node.set_extra('extra_invalid', 'actually valid') + node.store() + self.assertEqual(node.get_extra_many(['extra_invalid', 'extra_valid']), ['actually valid', 'value']) + + # Raises immediately when setting it if already stored + with self.assertRaises(exceptions.ValidationError): + node.set_extra('extra', object()) + + def test_set_extra_many(self): + """Test the `BackendNode.set_extra_many` method.""" + # Calling `set_extra_many` on an unstored node + node = self.create_node() + + # When not stored, `set_extra` will not clean values, so the following should be allowed + node.set_extra_many({'extra_invalid': object(), 'extra_valid': 'value'}) + + # Calling store should cause the values to be cleaned which should raise + with self.assertRaises(exceptions.ValidationError): + node.store() + + # Replace the original invalid with a valid value + node.set_extra_many({'extra_invalid': 'actually valid'}) + node.store() + self.assertEqual(node.get_extra_many(['extra_invalid', 'extra_valid']), ['actually valid', 'value']) + + extras = OrderedDict() + extras['another_extra'] = 'value' + extras['extra_invalid'] = object() + + # Raises immediately when setting it if already stored + with self.assertRaises(exceptions.ValidationError): + node.set_extra_many(extras) + + self.assertTrue('another_extra' not in node.extras) + + extras = {'extra_one': 1, 'extra_two': 2} + # Calling `set_extra_many` on a stored node + node = self.create_node() + node.store() + + node.set_extra_many(extras) + self.assertEqual(node.extras, extras) + + def test_reset_extras(self): + """Test the `BackendNode.reset_extras` method.""" + node = self.create_node() + extras_before = {'extra_one': 1, 'extra_two': 2} + extras_after = {'extra_three': 3, 'extra_four': 4} + + # Reset extras on an unstored node + node.set_extra_many(extras_before) + self.assertEqual(node.extras, extras_before) + + node.reset_extras(extras_after) + self.assertEqual(node.extras, extras_after) + + # Reset extras on stored node + node = self.create_node() + node.store() + + node.set_extra_many(extras_before) + self.assertEqual(node.extras, extras_before) + + node.reset_extras(extras_after) + self.assertEqual(node.extras, extras_after) + + def test_delete_extra(self): + """Test the `BackendNode.delete_extra` method.""" + node = self.create_node() + + with self.assertRaises(AttributeError): + node.delete_extra('notexisting') + + node.set_extra('extra', 'value') + node.delete_extra('extra') + self.assertEqual(node.extras, {}) + + # Now for a stored node + node = self.create_node().store() + + with self.assertRaises(AttributeError): + node.delete_extra('notexisting') + + node.set_extra('extra', 'value') + node.delete_extra('extra') + self.assertEqual(node.extras, {}) + + def test_delete_extra_many(self): + """Test the `BackendNode.delete_extra_many` method.""" + node = self.create_node() + extras = {'extra_one': 1, 'extra_two': 2} + + with self.assertRaises(AttributeError): + node.delete_extra_many(['notexisting', 'some']) + + node.set_extra_many(extras) + + with self.assertRaises(AttributeError): + node.delete_extra_many(['extra_one', 'notexisting']) + + # Because one key failed during delete, none of the extras should have been deleted + self.assertTrue('extra_one' in node.extras) + + # Now delete the keys that actually should exist + node.delete_extra_many(extras.keys()) + self.assertEqual(node.extras, {}) + + # Now for a stored node + node = self.create_node().store() + + with self.assertRaises(AttributeError): + node.delete_extra_many(['notexisting', 'some']) + + node.set_extra_many(extras) + + with self.assertRaises(AttributeError): + node.delete_extra_many(['extra_one', 'notexisting']) + + # Because one key failed during delete, none of the extras should have been deleted + self.assertTrue('extra_one' in node.extras) + + # Now delete the keys that actually should exist + node.delete_extra_many(extras.keys()) + self.assertEqual(node.extras, {}) + + def test_clear_extras(self): + """Test the `BackendNode.clear_extras` method.""" + node = self.create_node() + extras = {'extra_one': 1, 'extra_two': 2} + node.set_extra_many(extras) + + self.assertEqual(node.extras, extras) + node.clear_extras() + self.assertEqual(node.extras, {}) + + # Now for a stored node + node = self.create_node().store() + node.set_extra_many(extras) + + self.assertEqual(node.extras, extras) + node.clear_extras() + self.assertEqual(node.extras, {}) + + def test_extra_items(self): + """Test the `BackendNode.extra_items` generator.""" + node = self.create_node() + extras = {'extra_one': 1, 'extra_two': 2} + + node.set_extra_many(extras) + self.assertEqual(extras, dict(node.extras_items())) + + # Repeat for a stored node + node = self.create_node().store() + extras = {'extra_one': 1, 'extra_two': 2} + + node.set_extra_many(extras) + self.assertEqual(extras, dict(node.extras_items())) + + def test_extra_keys(self): + """Test the `BackendNode.extra_keys` generator.""" + node = self.create_node() + extras = {'extra_one': 1, 'extra_two': 2} + + node.set_extra_many(extras) + self.assertEqual(set(extras), set(node.extras_keys())) + + # Repeat for a stored node + node = self.create_node().store() + extras = {'extra_one': 1, 'extra_two': 2} + + node.set_extra_many(extras) + self.assertEqual(set(extras), set(node.extras_keys())) diff --git a/aiida/backends/tests/orm/node/test_node.py b/aiida/backends/tests/orm/node/test_node.py index 7dacc478e5..69101d1ddf 100644 --- a/aiida/backends/tests/orm/node/test_node.py +++ b/aiida/backends/tests/orm/node/test_node.py @@ -7,6 +7,7 @@ # For further information on the license, see the LICENSE.txt file # # For further information please visit http://www.aiida.net # ########################################################################### +# pylint: disable=too-many-public-methods """Tests for the Node ORM class.""" from __future__ import division from __future__ import print_function @@ -16,17 +17,15 @@ from aiida.backends.testbase import AiidaTestCase from aiida.common import exceptions, LinkType -from aiida.orm import Data, Node, User, CalculationNode, WorkflowNode +from aiida.orm import Data, Node, User, CalculationNode, WorkflowNode, load_node from aiida.orm.utils.links import LinkTriple -class TestNodeLinks(AiidaTestCase): - """Test for linking from and to Node.""" +class TestNode(AiidaTestCase): + """Tests for generic node functionality.""" def setUp(self): - super(TestNodeLinks, self).setUp() - self.node_source = CalculationNode() - self.node_target = Data() + super(TestNode, self).setUp() self.user = User.objects.get_default() def test_repository_garbage_collection(self): @@ -48,6 +47,332 @@ def test_computer_user_immutability(self): with self.assertRaises(exceptions.ModificationNotAllowed): node.user = self.user + +class TestNodeAttributesExtras(AiidaTestCase): + """Test for node attributes and extras.""" + + def setUp(self): + super(TestNodeAttributesExtras, self).setUp() + self.node = Data() + + def test_attributes(self): + """Test the `Node.attributes` property.""" + original_attribute = {'nested': {'a': 1}} + + self.node.set_attribute('key', original_attribute) + node_attributes = self.node.attributes + self.assertEqual(node_attributes['key'], original_attribute) + node_attributes['key']['nested']['a'] = 2 + + self.assertEqual(original_attribute['nested']['a'], 2) + + # Now store the node and verify that `attributes` then returns a deep copy + self.node.store() + node_attributes = self.node.attributes + + # We change the returned node attributes but the original attribute should remain unchanged + node_attributes['key']['nested']['a'] = 3 + self.assertEqual(original_attribute['nested']['a'], 2) + + def test_get_attribute(self): + """Test the `Node.get_attribute` method.""" + original_attribute = {'nested': {'a': 1}} + + self.node.set_attribute('key', original_attribute) + node_attribute = self.node.get_attribute('key') + self.assertEqual(node_attribute, original_attribute) + node_attribute['nested']['a'] = 2 + + self.assertEqual(original_attribute['nested']['a'], 2) + + default = 'default' + self.assertEqual(self.node.get_attribute('not_existing', default=default), default) + with self.assertRaises(AttributeError): + self.node.get_attribute('not_existing') + + # Now store the node and verify that `get_attribute` then returns a deep copy + self.node.store() + node_attribute = self.node.get_attribute('key') + + # We change the returned node attributes but the original attribute should remain unchanged + node_attribute['nested']['a'] = 3 + self.assertEqual(original_attribute['nested']['a'], 2) + + default = 'default' + self.assertEqual(self.node.get_attribute('not_existing', default=default), default) + with self.assertRaises(AttributeError): + self.node.get_attribute('not_existing') + + def test_get_attribute_many(self): + """Test the `Node.get_attribute_many` method.""" + original_attribute = {'nested': {'a': 1}} + + self.node.set_attribute('key', original_attribute) + node_attribute = self.node.get_attribute_many(['key'])[0] + self.assertEqual(node_attribute, original_attribute) + node_attribute['nested']['a'] = 2 + + self.assertEqual(original_attribute['nested']['a'], 2) + + # Now store the node and verify that `get_attribute` then returns a deep copy + self.node.store() + node_attribute = self.node.get_attribute_many(['key'])[0] + + # We change the returned node attributes but the original attribute should remain unchanged + node_attribute['nested']['a'] = 3 + self.assertEqual(original_attribute['nested']['a'], 2) + + def test_set_attribute(self): + """Test the `Node.set_attribute` method.""" + with self.assertRaises(exceptions.ValidationError): + self.node.set_attribute('illegal.key', 'value') + + self.node.set_attribute('valid_key', 'value') + self.node.store() + + with self.assertRaises(exceptions.ModificationNotAllowed): + self.node.set_attribute('valid_key', 'value') + + def test_set_attribute_many(self): + """Test the `Node.set_attribute` method.""" + with self.assertRaises(exceptions.ValidationError): + self.node.set_attribute_many({'illegal.key': 'value', 'valid_key': 'value'}) + + self.node.set_attribute_many({'valid_key': 'value'}) + self.node.store() + + with self.assertRaises(exceptions.ModificationNotAllowed): + self.node.set_attribute_many({'valid_key': 'value'}) + + def test_reset_attribute(self): + """Test the `Node.reset_attribute` method.""" + attributes_before = {'attribute_one': 'value', 'attribute_two': 'value'} + attributes_after = {'attribute_three': 'value', 'attribute_four': 'value'} + attributes_illegal = {'attribute.illegal': 'value', 'attribute_four': 'value'} + + self.node.set_attribute_many(attributes_before) + self.assertEqual(self.node.attributes, attributes_before) + self.node.reset_attributes(attributes_after) + self.assertEqual(self.node.attributes, attributes_after) + + with self.assertRaises(exceptions.ValidationError): + self.node.reset_attributes(attributes_illegal) + + self.node.store() + + with self.assertRaises(exceptions.ModificationNotAllowed): + self.node.reset_attributes(attributes_after) + + def test_delete_attribute(self): + """Test the `Node.delete_attribute` method.""" + self.node.set_attribute('valid_key', 'value') + self.assertEqual(self.node.get_attribute('valid_key'), 'value') + self.node.delete_attribute('valid_key') + + with self.assertRaises(AttributeError): + self.node.delete_attribute('valid_key') + + # Repeat with stored node + self.node.set_attribute('valid_key', 'value') + self.node.store() + + with self.assertRaises(exceptions.ModificationNotAllowed): + self.node.delete_attribute('valid_key') + + def test_delete_attribute_many(self): + """Test the `Node.delete_attribute_many` method.""" + + def test_clear_attributes(self): + """Test the `Node.clear_attributes` method.""" + attributes = {'attribute_one': 'value', 'attribute_two': 'value'} + self.node.set_attribute_many(attributes) + self.assertEqual(self.node.attributes, attributes) + + self.node.clear_attributes() + self.assertEqual(self.node.attributes, {}) + + # Repeat for stored node + self.node.store() + + with self.assertRaises(exceptions.ModificationNotAllowed): + self.node.clear_attributes() + + def test_attributes_items(self): + """Test the `Node.attributes_items` generator.""" + attributes = {'attribute_one': 'value', 'attribute_two': 'value'} + self.node.set_attribute_many(attributes) + self.assertEqual(dict(self.node.attributes_items()), attributes) + + def test_attributes_keys(self): + """Test the `Node.attributes_keys` generator.""" + attributes = {'attribute_one': 'value', 'attribute_two': 'value'} + self.node.set_attribute_many(attributes) + self.assertEqual(set(self.node.attributes_keys()), set(attributes)) + + def test_extras(self): + """Test the `Node.extras` property.""" + original_extra = {'nested': {'a': 1}} + + self.node.set_extra('key', original_extra) + node_extras = self.node.extras + self.assertEqual(node_extras['key'], original_extra) + node_extras['key']['nested']['a'] = 2 + + self.assertEqual(original_extra['nested']['a'], 2) + + # Now store the node and verify that `extras` then returns a deep copy + self.node.store() + node_extras = self.node.extras + + # We change the returned node extras but the original extra should remain unchanged + node_extras['key']['nested']['a'] = 3 + self.assertEqual(original_extra['nested']['a'], 2) + + def test_get_extra(self): + """Test the `Node.get_extra` method.""" + original_extra = {'nested': {'a': 1}} + + self.node.set_extra('key', original_extra) + node_extra = self.node.get_extra('key') + self.assertEqual(node_extra, original_extra) + node_extra['nested']['a'] = 2 + + self.assertEqual(original_extra['nested']['a'], 2) + + default = 'default' + self.assertEqual(self.node.get_extra('not_existing', default=default), default) + with self.assertRaises(AttributeError): + self.node.get_extra('not_existing') + + # Now store the node and verify that `get_extra` then returns a deep copy + self.node.store() + node_extra = self.node.get_extra('key') + + # We change the returned node extras but the original extra should remain unchanged + node_extra['nested']['a'] = 3 + self.assertEqual(original_extra['nested']['a'], 2) + + default = 'default' + self.assertEqual(self.node.get_extra('not_existing', default=default), default) + with self.assertRaises(AttributeError): + self.node.get_extra('not_existing') + + def test_get_extra_many(self): + """Test the `Node.get_extra_many` method.""" + original_extra = {'nested': {'a': 1}} + + self.node.set_extra('key', original_extra) + node_extra = self.node.get_extra_many(['key'])[0] + self.assertEqual(node_extra, original_extra) + node_extra['nested']['a'] = 2 + + self.assertEqual(original_extra['nested']['a'], 2) + + # Now store the node and verify that `get_extra` then returns a deep copy + self.node.store() + node_extra = self.node.get_extra_many(['key'])[0] + + # We change the returned node extras but the original extra should remain unchanged + node_extra['nested']['a'] = 3 + self.assertEqual(original_extra['nested']['a'], 2) + + def test_set_extra(self): + """Test the `Node.set_extra` method.""" + with self.assertRaises(exceptions.ValidationError): + self.node.set_extra('illegal.key', 'value') + + self.node.set_extra('valid_key', 'value') + self.node.store() + + self.node.set_extra('valid_key', 'changed') + self.assertEqual(load_node(self.node.pk).get_extra('valid_key'), 'changed') + + def test_set_extra_many(self): + """Test the `Node.set_extra` method.""" + with self.assertRaises(exceptions.ValidationError): + self.node.set_extra_many({'illegal.key': 'value', 'valid_key': 'value'}) + + self.node.set_extra_many({'valid_key': 'value'}) + self.node.store() + + self.node.set_extra_many({'valid_key': 'changed'}) + self.assertEqual(load_node(self.node.pk).get_extra('valid_key'), 'changed') + + def test_reset_extra(self): + """Test the `Node.reset_extra` method.""" + extras_before = {'extra_one': 'value', 'extra_two': 'value'} + extras_after = {'extra_three': 'value', 'extra_four': 'value'} + extras_illegal = {'extra.illegal': 'value', 'extra_four': 'value'} + + self.node.set_extra_many(extras_before) + self.assertEqual(self.node.extras, extras_before) + self.node.reset_extras(extras_after) + self.assertEqual(self.node.extras, extras_after) + + with self.assertRaises(exceptions.ValidationError): + self.node.reset_extras(extras_illegal) + + self.node.store() + + self.node.reset_extras(extras_after) + self.assertEqual(load_node(self.node.pk).extras, extras_after) + + def test_delete_extra(self): + """Test the `Node.delete_extra` method.""" + self.node.set_extra('valid_key', 'value') + self.assertEqual(self.node.get_extra('valid_key'), 'value') + self.node.delete_extra('valid_key') + + with self.assertRaises(AttributeError): + self.node.delete_extra('valid_key') + + # Repeat with stored node + self.node.set_extra('valid_key', 'value') + self.node.store() + + self.node.delete_extra('valid_key') + with self.assertRaises(AttributeError): + load_node(self.node.pk).get_extra('valid_key') + + def test_delete_extra_many(self): + """Test the `Node.delete_extra_many` method.""" + + def test_clear_extras(self): + """Test the `Node.clear_extras` method.""" + extras = {'extra_one': 'value', 'extra_two': 'value'} + self.node.set_extra_many(extras) + self.assertEqual(self.node.extras, extras) + + self.node.clear_extras() + self.assertEqual(self.node.extras, {}) + + # Repeat for stored node + self.node.store() + + self.node.clear_extras() + self.assertEqual(load_node(self.node.pk).extras, {}) + + def test_extras_items(self): + """Test the `Node.extras_items` generator.""" + extras = {'extra_one': 'value', 'extra_two': 'value'} + self.node.set_extra_many(extras) + self.assertEqual(dict(self.node.extras_items()), extras) + + def test_extras_keys(self): + """Test the `Node.extras_keys` generator.""" + extras = {'extra_one': 'value', 'extra_two': 'value'} + self.node.set_extra_many(extras) + self.assertEqual(set(self.node.extras_keys()), set(extras)) + + +class TestNodeLinks(AiidaTestCase): + """Test for linking from and to Node.""" + + def setUp(self): + super(TestNodeLinks, self).setUp() + self.node_source = CalculationNode() + self.node_target = Data() + def test_get_stored_link_triples(self): """Validate the `get_stored_link_triples` method.""" data = Data().store() diff --git a/aiida/backends/tests/test_dataclasses.py b/aiida/backends/tests/test_dataclasses.py index 6765635dbe..f00e536ea0 100644 --- a/aiida/backends/tests/test_dataclasses.py +++ b/aiida/backends/tests/test_dataclasses.py @@ -1748,9 +1748,9 @@ def test_xyz_parser(self): xyz_string1 = """ 3 -Li 0.00000000 0.00000000 0.00000000 6.94100000 3 -Si 4.39194796 0.00000000 10.10068356 28.08550000 14 -Si 4.39194796 0.00000000 3.79747116 28.08550000 14 +Li 0.00000000 0.00000000 0.00000000 6.94100000 3 +Si 4.39194796 0.00000000 10.10068356 28.08550000 14 +Si 4.39194796 0.00000000 3.79747116 28.08550000 14 """ xyz_string2 = """ 2 @@ -1784,16 +1784,16 @@ def test_xyz_parser(self): xyz_string4 = """ 1 -Li 0.00000000 0.00000000 0.00000000 6.94100000 3 -Si 4.39194796 0.00000000 10.10068356 28.08550000 14 -Si 4.39194796 0.00000000 3.79747116 28.08550000 14 +Li 0.00000000 0.00000000 0.00000000 6.94100000 3 +Si 4.39194796 0.00000000 10.10068356 28.08550000 14 +Si 4.39194796 0.00000000 3.79747116 28.08550000 14 """ xyz_string5 = """ 10 -Li 0.00000000 0.00000000 0.00000000 6.94100000 3 -Si 4.39194796 0.00000000 10.10068356 28.08550000 14 -Si 4.39194796 0.00000000 3.79747116 28.08550000 14 +Li 0.00000000 0.00000000 0.00000000 6.94100000 3 +Si 4.39194796 0.00000000 10.10068356 28.08550000 14 +Si 4.39194796 0.00000000 3.79747116 28.08550000 14 """ xyz_string6 = """ 2 @@ -2024,7 +2024,7 @@ def test_conversion_of_types_1(self): @unittest.skipIf(not has_ase(), "Unable to import ase") def test_conversion_of_types_2(self): """ - Tests roundtrip ASE -> StructureData -> ASE, with tags, and + Tests roundtrip ASE -> StructureData -> ASE, with tags, and changing the atomic masses """ import ase @@ -3168,7 +3168,7 @@ def test_mesh(self): input_mesh = [4, 4, 4] k.set_kpoints_mesh(input_mesh) mesh, offset = k.get_kpoints_mesh() - self.assertEqual(mesh, list(input_mesh)) + self.assertEqual(mesh, input_mesh) self.assertEqual(offset, [0., 0., 0.]) # must be a tuple of three 0 by default # a too long list should fail @@ -3179,13 +3179,13 @@ def test_mesh(self): input_offset = [0.5, 0.5, 0.5] k.set_kpoints_mesh(input_mesh, input_offset) mesh, offset = k.get_kpoints_mesh() - self.assertEqual(mesh, list(input_mesh)) - self.assertEqual(offset, list(input_offset)) + self.assertEqual(mesh, input_mesh) + self.assertEqual(offset, input_offset) # verify the same but after storing k.store() - self.assertEqual(mesh, list(input_mesh)) - self.assertEqual(offset, list(input_offset)) + self.assertEqual(mesh, input_mesh) + self.assertEqual(offset, input_offset) # cannot modify it after storage with self.assertRaises(ModificationNotAllowed): diff --git a/aiida/backends/tests/test_export_and_import.py b/aiida/backends/tests/test_export_and_import.py index 6e7e0625d3..8af24a78f9 100644 --- a/aiida/backends/tests/test_export_and_import.py +++ b/aiida/backends/tests/test_export_and_import.py @@ -2896,7 +2896,7 @@ def setUpClass(cls, *args, **kwargs): data = orm.Data() data.label = 'my_test_data_node' data.store() - data.set_extras({'b': 2, 'c': 3}) + data.set_extra_many({'b': 2, 'c': 3}) cls.tmp_folder = tempfile.mkdtemp() cls.export_file = os.path.join(cls.tmp_folder, 'export.aiida') export([data], outfile=cls.export_file, silent=True) diff --git a/aiida/backends/tests/test_nodes.py b/aiida/backends/tests/test_nodes.py index a071d7aa22..cc4fd38009 100644 --- a/aiida/backends/tests/test_nodes.py +++ b/aiida/backends/tests/test_nodes.py @@ -21,11 +21,10 @@ import six from six.moves import range -from sqlalchemy.exc import StatementError from aiida import orm from aiida.backends.testbase import AiidaTestCase -from aiida.common.exceptions import InvalidOperation, ModificationNotAllowed, StoringNotAllowed +from aiida.common.exceptions import InvalidOperation, ModificationNotAllowed, StoringNotAllowed, ValidationError from aiida.common.links import LinkType from aiida.common.utils import Capturing from aiida.manage.database.delete.nodes import delete_nodes @@ -356,15 +355,6 @@ def test_attribute_mutability(self): with self.assertRaises(ModificationNotAllowed): a.set_attribute('integer', self.intval) - # Passing stored_check=False should disable the mutability check - a.delete_attribute('bool', stored_check=False) - a.set_attribute('integer', self.intval, stored_check=False) - - self.assertEquals(a.get_attribute('integer'), self.intval) - - with self.assertRaises(AttributeError): - a.get_attribute('bool') - def test_attr_before_storing(self): a = orm.Data() a.set_attribute('k1', self.boolval) @@ -485,40 +475,18 @@ def test_get_attrs_after_storing(self): self.assertEquals(a.attributes, target_attrs) def test_store_object(self): - """Trying to store objects should fail""" + """Trying to set objects as attributes should fail, because they are not json-serializable.""" a = orm.Data() - a.set_attribute('object', object(), clean=False) - # django raises TypeError - # sqlalchemy raises StatementError - with self.assertRaises((TypeError, StatementError)): + a.set_attribute('object', object()) + with self.assertRaises(ValidationError): a.store() b = orm.Data() - b.set_attribute('object_list', [object(), object()], clean=False) - with self.assertRaises((TypeError, StatementError)): - # objects are not json-serializable + b.set_attribute('object_list', [object(), object()]) + with self.assertRaises(ValidationError): b.store() - def test_append_to_empty_attr(self): - """Appending to an empty attribute""" - a = orm.Data() - a.append_to_attr('test', 0) - a.append_to_attr('test', 1) - - self.assertEquals(a.get_attribute('test'), [0, 1]) - - def test_append_no_side_effects(self): - """Check that append_to_attr has no side effects""" - a = orm.Data() - mylist = [1, 2, 3] - - a.set_attribute('list', mylist) - a.append_to_attr('list', 4) - - self.assertEquals(a.get_attribute('list'), [1, 2, 3, 4]) - self.assertEquals(mylist, [1, 2, 3]) - def test_attributes_on_clone(self): import copy @@ -1018,16 +986,16 @@ def test_basetype_as_attr(self): n = orm.Data() n.set_attribute('a', orm.Str("sometext2")) n.set_attribute('b', l2) - self.assertEqual(n.get_attribute('a'), "sometext2") - self.assertIsInstance(n.get_attribute('a'), six.string_types) - self.assertEqual(n.get_attribute('b'), ['f', True, {'gg': None}]) - self.assertIsInstance(n.get_attribute('b'), (list, tuple)) + self.assertEqual(n.get_attribute('a').value, 'sometext2') + self.assertIsInstance(n.get_attribute('a'), orm.Str) + self.assertEqual(n.get_attribute('b').get_list() , ['f', True, {'gg': None}]) + self.assertIsInstance(n.get_attribute('b'), orm.List) # Check also deep in a dictionary/list n = orm.Data() n.set_attribute('a', {'b': [orm.Str("sometext3")]}) - self.assertEqual(n.get_attribute('a')['b'][0], "sometext3") - self.assertIsInstance(n.get_attribute('a')['b'][0], six.string_types) + self.assertEqual(n.get_attribute('a')['b'][0].value, "sometext3") + self.assertIsInstance(n.get_attribute('a')['b'][0], orm.Str) n.store() self.assertEqual(n.get_attribute('a')['b'][0], "sometext3") self.assertIsInstance(n.get_attribute('a')['b'][0], six.string_types) @@ -1300,42 +1268,6 @@ def test_load_node(self): with self.assertRaises(NotExistent): orm.load_node(spec, sub_classes=(orm.ArrayData,)) - @unittest.skip('open issue JobCalculations cannot be stored') - def test_load_unknown_calculation_type(self): - """ - Test that the loader will choose a common calculation ancestor for an unknown data type. - For the case where, e.g., the user doesn't have the necessary plugin. - """ - from aiida.plugins import CalculationFactory - - TemplateReplacerCalc = CalculationFactory('templatereplacer') - testcalc = TemplateReplacerCalc(computer=self.computer) - testcalc.set_option('resources', {'num_machines': 1, 'num_mpiprocs_per_machine': 1}) - testcalc.store() - - # compare if plugin exist - obj = orm.load_node(uuid=testcalc.uuid) - self.assertEqual(type(testcalc), type(obj)) - - # Create a custom calculation type that inherits from CalcJobNode but change the plugin type string - class TestCalculation(orm.CalcJobNode): - pass - - TestCalculation._plugin_type_string = 'nodes.process.calculation.calcjob.notexisting.TemplatereplacerCalculation.' - TestCalculation._query_type_string = 'nodes.process.calculation.calcjob.notexisting.TemplatereplacerCalculation' - - jobcalc = orm.CalcJobNode(computer=self.computer) - jobcalc.set_option('resources', {'num_machines': 1, 'num_mpiprocs_per_machine': 1}) - jobcalc.store() - - testcalc = TestCalculation(computer=self.computer) - testcalc.set_option('resources', {'num_machines': 1, 'num_mpiprocs_per_machine': 1}) - testcalc.store() - - # Changed node should return CalcJobNode type as its plugin does not exist - obj = orm.load_node(uuid=testcalc.uuid) - self.assertEqual(type(jobcalc), type(obj)) - def test_load_unknown_data_type(self): """ Test that the loader will choose a common data ancestor for an unknown data type. @@ -1369,10 +1301,7 @@ class TestKpointsData(KpointsData): class TestSubNodesAndLinks(AiidaTestCase): def test_cachelink(self): - """ - Test the proper functionality of the links cache, with different - scenarios. - """ + """Test the proper functionality of the links cache, with different scenarios.""" n1 = orm.Data() n2 = orm.Data() n3 = orm.Data().store() @@ -1503,43 +1432,6 @@ def test_links_label_constraints(self): calc2a.add_incoming(d4, LinkType.INPUT_CALC, link_label='label3') calc2b.add_incoming(d4, LinkType.INPUT_CALC, link_label='label3') - @unittest.skip('activate this test once #2238 is addressed') - def test_links_label_autogenerator(self): - """Test the auto generation of link labels when labels are no longer required to be explicitly specified. - """ - n1 = orm.WorkflowNode().store() - n2 = orm.WorkflowNode().store() - n3 = orm.WorkflowNode().store() - n4 = orm.WorkflowNode().store() - n5 = orm.WorkflowNode().store() - n6 = orm.WorkflowNode().store() - n7 = orm.WorkflowNode().store() - n8 = orm.WorkflowNode().store() - n9 = orm.WorkflowNode().store() - data = orm.Data().store() - - data.add_incoming(n1, link_type=LinkType.RETURN) - # Label should be automatically generated - data.add_incoming(n2, link_type=LinkType.RETURN) - data.add_incoming(n3, link_type=LinkType.RETURN) - data.add_incoming(n4, link_type=LinkType.RETURN) - data.add_incoming(n5, link_type=LinkType.RETURN) - data.add_incoming(n6, link_type=LinkType.RETURN) - data.add_incoming(n7, link_type=LinkType.RETURN) - data.add_incoming(n8, link_type=LinkType.RETURN) - data.add_incoming(n9, link_type=LinkType.RETURN) - - all_labels = [_.link_label for _ in data.get_incoming()] - self.assertEquals(len(set(all_labels)), len(all_labels), "There are duplicate links, that are not expected") - - @unittest.skip('activate this test once #2238 is addressed') - def test_link_label_autogenerator(self): - """ - When the uniqueness constraints on links are reimplemented on the database level, auto generation of - labels that relies directly on those database level constraints should be reinstated and tested for here. - """ - raise NotImplementedError - def test_link_with_unstored(self): """ It is possible to store links between nodes even if they are unstored these links are cached. diff --git a/aiida/backends/tests/test_query.py b/aiida/backends/tests/test_query.py index 766fe199c2..ef0d0bd9da 100644 --- a/aiida/backends/tests/test_query.py +++ b/aiida/backends/tests/test_query.py @@ -7,23 +7,20 @@ # For further information on the license, see the LICENSE.txt file # # For further information please visit http://www.aiida.net # ########################################################################### +# pylint: disable=invalid-name,missing-docstring,too-many-lines """Tests for the QueryBuilder.""" - from __future__ import division from __future__ import absolute_import from __future__ import print_function -import unittest import warnings from six.moves import range, zip from aiida import orm -from aiida.manage import configuration from aiida.backends.testbase import AiidaTestCase from aiida.common.links import LinkType - -# pylint: disable=invalid-name,missing-docstring,too-many-lines +from aiida.manage import configuration class TestQueryBuilder(AiidaTestCase): @@ -707,37 +704,6 @@ def test_attribute_type(self): self.assertEqual(set(res), set((n_arr.uuid,))) -class QueryBuilderDateTimeAttribute(AiidaTestCase): - - @unittest.skipIf(configuration.PROFILE.database_backend == u'sqlalchemy', - "SQLA doesn't have full datetime support in attributes") - @unittest.skipIf(configuration.PROFILE.database_backend == u'django', - "Django JSONB doesn't have full datetime support in attributes") - def test_date(self): - from aiida.common import timezone - from datetime import timedelta - n = orm.Data() - now = timezone.now() - n.set_attribute('now', now) - n.store() - - qb = orm.QueryBuilder().append( - orm.Node, - filters={ - 'attributes.now': { - "and": [ - { - ">": now - timedelta(seconds=1) - }, - { - "<": now + timedelta(seconds=1) - }, - ] - } - }) - self.assertEqual(qb.count(), 1) - - class QueryBuilderLimitOffsetsTest(AiidaTestCase): def test_ordering_limits_offsets_of_results_general(self): diff --git a/aiida/orm/implementation/django/convert.py b/aiida/orm/implementation/django/convert.py index e7e483b14b..c89ea4e430 100644 --- a/aiida/orm/implementation/django/convert.py +++ b/aiida/orm/implementation/django/convert.py @@ -166,7 +166,9 @@ def _(dbmodel, backend): label=dbmodel.label, description=dbmodel.description, dbcomputer_id=dbmodel.dbcomputer_id, - user_id=dbmodel.user_id) + user_id=dbmodel.user_id, + attributes=dbmodel.attributes, + extras=dbmodel.extras) from . import nodes return nodes.DjangoNode.from_dbmodel(djnode_instance, backend) diff --git a/aiida/orm/implementation/django/nodes.py b/aiida/orm/implementation/django/nodes.py index 5077d1379a..3aebe3c0ff 100644 --- a/aiida/orm/implementation/django/nodes.py +++ b/aiida/orm/implementation/django/nodes.py @@ -20,11 +20,11 @@ from aiida.backends.djsite.db import models from aiida.common import exceptions from aiida.common.lang import type_check +from aiida.orm.utils.node import clean_value from .. import BackendNode, BackendNodeCollection from . import entities from . import utils as dj_utils -from .. import utils as gen_utils from .computers import DjangoComputer from .users import DjangoUser @@ -147,26 +147,59 @@ def user(self, user): type_check(user, DjangoUser) self._dbmodel.user = user.dbmodel + @property + def attributes(self): + """Return the complete attributes dictionary. + + .. warning:: While the node is unstored, this will return references of the attributes on the database model, + meaning that changes on the returned values (if they are mutable themselves, e.g. a list or dictionary) will + automatically be reflected on the database model as well. As soon as the node is stored, the returned + attributes will be a deep copy and mutations of the database attributes will have to go through the + appropriate set methods. Therefore, once stored, retrieving a deep copy can be a heavy operation. If you + only need the keys or some values, use the iterators `attributes_keys` and `attributes_items`, or the + getters `get_attribute` and `get_attribute_many` instead. + + :return: the attributes as a dictionary + """ + return self.dbmodel.attributes + def get_attribute(self, key): - """Return an attribute. + """Return the value of an attribute. + + .. warning:: While the node is unstored, this will return a reference of the attribute on the database model, + meaning that changes on the returned value (if they are mutable themselves, e.g. a list or dictionary) will + automatically be reflected on the database model as well. As soon as the node is stored, the returned + attribute will be a deep copy and mutations of the database attributes will have to go through the + appropriate set methods. :param key: name of the attribute :return: the value of the attribute - :raises AttributeError: if the attribute does not exist + :raises AttributeError: if the attribute does not exist and no default is specified """ try: - return gen_utils.get_attr(self.dbmodel.get_attributes(), key) - except (KeyError, IndexError): - raise AttributeError("Attribute '{}' does not exist".format(key)) - - def get_attributes(self, keys): - """Return a set of attributes. - - :param keys: names of the attributes - :return: the values of the attributes + return self._dbmodel.attributes[key] + except KeyError as exception: + raise AttributeError('attribute `{}` does not exist'.format(exception)) + + def get_attribute_many(self, keys): + """Return the values of multiple attributes. + + .. warning:: While the node is unstored, this will return references of the attributes on the database model, + meaning that changes on the returned values (if they are mutable themselves, e.g. a list or dictionary) will + automatically be reflected on the database model as well. As soon as the node is stored, the returned + attributes will be a deep copy and mutations of the database attributes will have to go through the + appropriate set methods. Therefore, once stored, retrieving a deep copy can be a heavy operation. If you + only need the keys or some values, use the iterators `attributes_keys` and `attributes_items`, or the + getters `get_attribute` and `get_attribute_many` instead. + + :param keys: a list of attribute names + :return: a list of attribute values :raises AttributeError: if at least one attribute does not exist """ - raise NotImplementedError + try: + return [self.get_attribute(key) for key in keys] + except KeyError as exception: + raise AttributeError('attribute `{}` does not exist'.format(exception)) def set_attribute(self, key, value): """Set an attribute to the given value. @@ -174,26 +207,40 @@ def set_attribute(self, key, value): :param key: name of the attribute :param value: value of the attribute """ - self.dbmodel.set_attribute(key, value) + if self.is_stored: + value = clean_value(value) + + self._dbmodel.attributes[key] = value + self._flush_if_stored() - def set_attributes(self, attributes): - """Set attributes. + def set_attribute_many(self, attributes): + """Set multiple attributes. .. note:: This will override any existing attributes that are present in the new dictionary. - :param attributes: the new attributes to set + :param attributes: a dictionary with the attributes to set """ + if self.is_stored: + attributes = {key: clean_value(value) for key, value in attributes.items()} + for key, value in attributes.items(): - self.dbmodel.set_attribute(key, value) + # We need to use `self.dbmodel` without the underscore, because otherwise the second iteration will refetch + # what is in the database and we lose the initial changes. + self.dbmodel.attributes[key] = value + self._flush_if_stored() def reset_attributes(self, attributes): """Reset the attributes. - .. note:: This will completely reset any existing attributes and replace them with the new dictionary. + .. note:: This will completely clear any existing attributes and replace them with the new dictionary. - :param attributes: the new attributes to set + :param attributes: a dictionary with the attributes to set """ - self.dbmodel.reset_attributes(attributes) + if self.is_stored: + attributes = clean_value(attributes) + + self.dbmodel.attributes = attributes + self._flush_if_stored() def delete_attribute(self, key): """Delete an attribute. @@ -201,25 +248,36 @@ def delete_attribute(self, key): :param key: name of the attribute :raises AttributeError: if the attribute does not exist """ - self.dbmodel.del_attribute(key) + try: + self._dbmodel.attributes.pop(key) + except KeyError as exception: + raise AttributeError('attribute `{}` does not exist'.format(exception)) + else: + self._flush_if_stored() - def delete_attributes(self, keys): + def delete_attribute_many(self, keys): """Delete multiple attributes. - .. note:: The implementation should guarantee that all the keys that are to be deleted actually exist or the - entire operation should be canceled without any change and an ``AttributeError`` should be raised. - :param keys: names of the attributes to delete - :raises AttributeError: if at least on of the attribute does not exist + :raises AttributeError: if at least one of the attribute does not exist """ - raise NotImplementedError + non_existing_keys = [key for key in keys if key not in self._dbmodel.attributes] + + if non_existing_keys: + raise AttributeError('attributes `{}` do not exist'.format(', '.join(non_existing_keys))) + + for key in keys: + self.dbmodel.attributes.pop(key) + + self._flush_if_stored() def clear_attributes(self): """Delete all attributes.""" - raise NotImplementedError + self._dbmodel.attributes = {} + self._flush_if_stored() def attributes_items(self): - """Return an iterator over the attribute items. + """Return an iterator over the attributes. :return: an iterator with attribute key value pairs """ @@ -231,29 +289,62 @@ def attributes_keys(self): :return: an iterator with attribute keys """ - for key in self._dbmodel.attributes.keys(): + for key in self._dbmodel.attributes: yield key + @property + def extras(self): + """Return the complete extras dictionary. + + .. warning:: While the node is unstored, this will return references of the extras on the database model, + meaning that changes on the returned values (if they are mutable themselves, e.g. a list or dictionary) will + automatically be reflected on the database model as well. As soon as the node is stored, the returned extras + will be a deep copy and mutations of the database extras will have to go through the appropriate set + methods. Therefore, once stored, retrieving a deep copy can be a heavy operation. If you only need the keys + or some values, use the iterators `extras_keys` and `extras_items`, or the getters `get_extra` and + `get_extra_many` instead. + + :return: the extras as a dictionary + """ + return self.dbmodel.extras + def get_extra(self, key): - """Return an extra. + """Return the value of an extra. + + .. warning:: While the node is unstored, this will return a reference of the extra on the database model, + meaning that changes on the returned value (if they are mutable themselves, e.g. a list or dictionary) will + automatically be reflected on the database model as well. As soon as the node is stored, the returned extra + will be a deep copy and mutations of the database extras will have to go through the appropriate set + methods. :param key: name of the extra :return: the value of the extra - :raises AttributeError: if the extra does not exist + :raises AttributeError: if the extra does not exist and no default is specified """ try: - return gen_utils.get_attr(self.dbmodel.extras, key) - except (KeyError, AttributeError): - raise AttributeError('Extra `{}` does not exist'.format(key)) - - def get_extras(self, keys): - """Return a set of extras. - - :param keys: names of the extras - :return: the values of the extras + return self._dbmodel.extras[key] + except KeyError as exception: + raise AttributeError('extra `{}` does not exist'.format(exception)) + + def get_extra_many(self, keys): + """Return the values of multiple extras. + + .. warning:: While the node is unstored, this will return references of the extras on the database model, + meaning that changes on the returned values (if they are mutable themselves, e.g. a list or dictionary) will + automatically be reflected on the database model as well. As soon as the node is stored, the returned extras + will be a deep copy and mutations of the database extras will have to go through the appropriate set + methods. Therefore, once stored, retrieving a deep copy can be a heavy operation. If you only need the keys + or some values, use the iterators `extras_keys` and `extras_items`, or the getters `get_extra` and + `get_extra_many` instead. + + :param keys: a list of extra names + :return: a list of extra values :raises AttributeError: if at least one extra does not exist """ - raise NotImplementedError + try: + return [self.get_extra(key) for key in keys] + except KeyError as exception: + raise AttributeError('extra `{}` does not exist'.format(exception)) def set_extra(self, key, value): """Set an extra to the given value. @@ -261,25 +352,39 @@ def set_extra(self, key, value): :param key: name of the extra :param value: value of the extra """ - self.dbmodel.set_extra(key, value) + if self.is_stored: + value = clean_value(value) + + self._dbmodel.extras[key] = value + self._flush_if_stored() - def set_extras(self, extras): - """Set extras. + def set_extra_many(self, extras): + """Set multiple extras. .. note:: This will override any existing extras that are present in the new dictionary. - :param extras: the new extras to set + :param extras: a dictionary with the extras to set """ - self.dbmodel.set_extras(extras) + if self.is_stored: + extras = {key: clean_value(value) for key, value in extras.items()} + + for key, value in extras.items(): + self.dbmodel.extras[key] = value + + self._flush_if_stored() def reset_extras(self, extras): """Reset the extras. - .. note:: This will completely reset any existing extras and replace them with the new dictionary. + .. note:: This will completely clear any existing extras and replace them with the new dictionary. - :param extras: the new extras to set + :param extras: a dictionary with the extras to set """ - self.dbmodel.reset_extras(extras) + if self.is_stored: + extras = clean_value(extras) + + self.dbmodel.extras = extras + self._flush_if_stored() def delete_extra(self, key): """Delete an extra. @@ -287,39 +392,54 @@ def delete_extra(self, key): :param key: name of the extra :raises AttributeError: if the extra does not exist """ - self.dbmodel.del_extra(key) + try: + self._dbmodel.extras.pop(key) + except KeyError as exception: + raise AttributeError('extra `{}` does not exist'.format(exception)) + else: + self._flush_if_stored() - def delete_extras(self, keys): + def delete_extra_many(self, keys): """Delete multiple extras. - .. note:: The implementation should guarantee that all the keys that are to be deleted actually exist or the - entire operation should be canceled without any change and an ``AttributeError`` should be raised. - :param keys: names of the extras to delete - :raises AttributeError: if at least on of the extra does not exist + :raises AttributeError: if at least one of the extra does not exist """ - raise NotImplementedError + non_existing_keys = [key for key in keys if key not in self._dbmodel.extras] + + if non_existing_keys: + raise AttributeError('extras `{}` do not exist'.format(', '.join(non_existing_keys))) + + for key in keys: + self.dbmodel.extras.pop(key) + + self._flush_if_stored() def clear_extras(self): """Delete all extras.""" - raise NotImplementedError + self._dbmodel.extras = {} + self._flush_if_stored() def extras_items(self): - """Return an iterator over the extra items. + """Return an iterator over the extras. :return: an iterator with extra key value pairs """ - for key, value in self.dbmodel.extras.items(): + for key, value in self._dbmodel.extras.items(): yield key, value def extras_keys(self): - """Return an iterator over the extras keys. + """Return an iterator over the extra keys. - :return: an iterator with extras keys + :return: an iterator with extra keys """ - for key in self.dbmodel.extras.keys(): + for key in self._dbmodel.extras: yield key + def _flush_if_stored(self): + if self._dbmodel.is_saved(): + self._dbmodel.save() + def add_incoming(self, source, link_type, link_label): """Add a link of the given type from a given node to ourself. @@ -358,15 +478,23 @@ def _add_link(self, source, link_type, link_label): transaction.savepoint_rollback(savepoint_id) raise exceptions.UniquenessError('failed to create the link: {}'.format(exception)) - def store(self, attributes=None, links=None, with_transaction=True): + def clean_values(self): + self._dbmodel.attributes = clean_value(self._dbmodel.attributes) + self._dbmodel.extras = clean_value(self._dbmodel.extras) + + def store(self, links=None, with_transaction=True, clean=True): """Store the node in the database. - :param attributes: optional attributes to set before storing, will override any existing attributes :param links: optional links to add before storing + :param with_transaction: if False, do not use a transaction because the caller will already have opened one. + :param clean: boolean, if True, will clean the attributes and extras before attempting to store """ from aiida.common.lang import EmptyContextManager from aiida.backends.djsite.db.models import suppress_auto_now + if clean: + self.clean_values() + with transaction.atomic() if with_transaction else EmptyContextManager(): with suppress_auto_now([(models.DbNode, ['mtime'])]) if self.mtime else EmptyContextManager(): # We need to save the node model instance itself first such that it has a pk @@ -374,10 +502,6 @@ def store(self, attributes=None, links=None, with_transaction=True): # attributes and links self.dbmodel.save() - if attributes: - for key, value in attributes.items(): - self.dbmodel.set_attribute(key, value) - if links: for link_triple in links: self._add_link(*link_triple) @@ -390,6 +514,16 @@ class DjangoNodeCollection(BackendNodeCollection): ENTITY_CLASS = DjangoNode + def get(self, pk): + """Return a Node entry from the collection with the given id + + :param pk: id of the node + """ + try: + return self.ENTITY_CLASS.from_dbmodel(models.DbNode.objects.get(pk=pk), self.backend) + except ObjectDoesNotExist: + raise exceptions.NotExistent("Node with pk '{}' not found".format(pk)) + def delete(self, pk): """Remove a Node entry from the collection with the given id diff --git a/aiida/orm/implementation/django/querybuilder.py b/aiida/orm/implementation/django/querybuilder.py index 4be93a3966..52e2b11e56 100644 --- a/aiida/orm/implementation/django/querybuilder.py +++ b/aiida/orm/implementation/django/querybuilder.py @@ -385,16 +385,16 @@ def get_aiida_res(self, key, res): :returns: an aiida-compatible instance """ if isinstance(res, Choice): - returnval = res.value + result = res.value elif isinstance(res, uuid.UUID): - returnval = six.text_type(res) + result = six.text_type(res) else: try: - returnval = self._backend.get_backend_entity(res) + result = self._backend.get_backend_entity(res) except TypeError: - returnval = res + result = res - return returnval + return result def yield_per(self, query, batch_size): """ diff --git a/aiida/orm/implementation/django/utils.py b/aiida/orm/implementation/django/utils.py index ad9aef158e..c78f195976 100644 --- a/aiida/orm/implementation/django/utils.py +++ b/aiida/orm/implementation/django/utils.py @@ -7,12 +7,13 @@ # For further information on the license, see the LICENSE.txt file # # For further information please visit http://www.aiida.net # ########################################################################### - from __future__ import division from __future__ import print_function from __future__ import absolute_import -import django.db + +from django.db import transaction, IntegrityError from django.db.models.fields import FieldDoesNotExist + from aiida.common import exceptions @@ -54,14 +55,12 @@ def is_saved(self): def save(self): """ Save the model (possibly updating values if changed) """ - from django.db import transaction - # transactions are needed here for Postgresql: # https://docs.djangoproject.com/en/1.7/topics/db/transactions/#handling-exceptions-within-postgresql-transactions with transaction.atomic(): try: self._model.save() - except django.db.IntegrityError as e: + except IntegrityError as e: # Convert to one of our exceptions raise exceptions.IntegrityError(str(e)) @@ -78,19 +77,19 @@ def _flush(self, fields=None): if self.is_saved(): try: # Manually append the `mtime` to fields to update, because when using the `update_fields` keyword of the - # `save` method, the `auto_now` property of `mtime` column is not triggered - if self._is_model_field('mtime'): + # `save` method, the `auto_now` property of `mtime` column is not triggered. If `update_fields` is None + # everything is updated, so we do not have to add anything + if fields is not None and self._is_model_field('mtime'): fields.add('mtime') self._model.save(update_fields=fields) - except django.db.IntegrityError as e: + except IntegrityError as e: # Convert to one of our exceptions raise exceptions.IntegrityError(str(e)) def _ensure_model_uptodate(self, fields=None): if self.is_saved(): - # For now we have no choice but to reload the entire model. - # Django 1.8 has support for refreshing an individual attribute, see: - # https://docs.djangoproject.com/en/1.8/ref/models/instances/#refreshing-objects-from-database - new_model = self._model.__class__.objects.get(pk=self._model.pk) - # Have to save this way so we don't hit the __setattr__ above - object.__setattr__(self, '_model', new_model) + self._model.refresh_from_db(fields=fields) + + @staticmethod + def _in_transaction(): + return not transaction.get_autocommit() diff --git a/aiida/orm/implementation/nodes.py b/aiida/orm/implementation/nodes.py index 7a15d777a2..641363e1c7 100644 --- a/aiida/orm/implementation/nodes.py +++ b/aiida/orm/implementation/nodes.py @@ -149,33 +149,50 @@ def mtime(self): """ return self._dbmodel.mtime - @property + @abc.abstractproperty def attributes(self): - """Return the attributes dictionary. + """Return the complete attributes dictionary. - .. note:: This will fetch all the attributes from the database which can be a heavy operation. If you only need - the keys or some values, use the iterators `attributes_keys` and `attributes_items`, or the getters - `get_attribute` and `get_attributes` instead. + .. warning:: While the node is unstored, this will return references of the attributes on the database model, + meaning that changes on the returned values (if they are mutable themselves, e.g. a list or dictionary) will + automatically be reflected on the database model as well. As soon as the node is stored, the returned + attributes will be a deep copy and mutations of the database attributes will have to go through the + appropriate set methods. Therefore, once stored, retrieving a deep copy can be a heavy operation. If you + only need the keys or some values, use the iterators `attributes_keys` and `attributes_items`, or the + getters `get_attribute` and `get_attribute_many` instead. :return: the attributes as a dictionary """ - return dict(self.attributes_items()) @abc.abstractmethod def get_attribute(self, key): - """Return an attribute. + """Return the value of an attribute. + + .. warning:: While the node is unstored, this will return a reference of the attribute on the database model, + meaning that changes on the returned value (if they are mutable themselves, e.g. a list or dictionary) will + automatically be reflected on the database model as well. As soon as the node is stored, the returned + attribute will be a deep copy and mutations of the database attributes will have to go through the + appropriate set methods. :param key: name of the attribute :return: the value of the attribute - :raises AttributeError: if the attribute does not exist + :raises AttributeError: if the attribute does not exist and no default is specified """ @abc.abstractmethod - def get_attributes(self, keys): - """Return a set of attributes. - - :param keys: names of the attributes - :return: the values of the attributes + def get_attribute_many(self, keys): + """Return the values of multiple attributes. + + .. warning:: While the node is unstored, this will return references of the attributes on the database model, + meaning that changes on the returned values (if they are mutable themselves, e.g. a list or dictionary) will + automatically be reflected on the database model as well. As soon as the node is stored, the returned + attributes will be a deep copy and mutations of the database attributes will have to go through the + appropriate set methods. Therefore, once stored, retrieving a deep copy can be a heavy operation. If you + only need the keys or some values, use the iterators `attributes_keys` and `attributes_items`, or the + getters `get_attribute` and `get_attribute_many` instead. + + :param keys: a list of attribute names + :return: a list of attribute values :raises AttributeError: if at least one attribute does not exist """ @@ -188,21 +205,21 @@ def set_attribute(self, key, value): """ @abc.abstractmethod - def set_attributes(self, attributes): - """Set attributes. + def set_attribute_many(self, attributes): + """Set multiple attributes. .. note:: This will override any existing attributes that are present in the new dictionary. - :param attributes: the new attributes to set + :param attributes: a dictionary with the attributes to set """ @abc.abstractmethod def reset_attributes(self, attributes): """Reset the attributes. - .. note:: This will completely reset any existing attributes and replace them with the new dictionary. + .. note:: This will completely clear any existing attributes and replace them with the new dictionary. - :param attributes: the new attributes to set + :param attributes: a dictionary with the attributes to set """ @abc.abstractmethod @@ -214,14 +231,11 @@ def delete_attribute(self, key): """ @abc.abstractmethod - def delete_attributes(self, keys): + def delete_attribute_many(self, keys): """Delete multiple attributes. - .. note:: The implementation should guarantee that all the keys that are to be deleted actually exist or the - entire operation should be canceled without any change and an ``AttributeError`` should be raised. - :param keys: names of the attributes to delete - :raises AttributeError: if at least on of the attribute does not exist + :raises AttributeError: if at least one of the attribute does not exist """ @abc.abstractmethod @@ -230,7 +244,7 @@ def clear_attributes(self): @abc.abstractmethod def attributes_items(self): - """Return an iterator over the attribute items. + """Return an iterator over the attributes. :return: an iterator with attribute key value pairs """ @@ -242,33 +256,50 @@ def attributes_keys(self): :return: an iterator with attribute keys """ - @property + @abc.abstractproperty def extras(self): - """Return the extras dictionary. + """Return the complete extras dictionary. - .. note:: This will fetch all the extras from the database which can be a heavy operation. If you only need - the keys or some values, use the iterators `extras_keys` and `extras_items`, or the getters `get_extra` - and `get_extras` instead. + .. warning:: While the node is unstored, this will return references of the extras on the database model, + meaning that changes on the returned values (if they are mutable themselves, e.g. a list or dictionary) will + automatically be reflected on the database model as well. As soon as the node is stored, the returned extras + will be a deep copy and mutations of the database extras will have to go through the appropriate set + methods. Therefore, once stored, retrieving a deep copy can be a heavy operation. If you only need the keys + or some values, use the iterators `extras_keys` and `extras_items`, or the getters `get_extra` and + `get_extra_many` instead. :return: the extras as a dictionary """ - return dict(self.extras_items()) @abc.abstractmethod def get_extra(self, key): - """Return an extra. + """Return the value of an extra. + + .. warning:: While the node is unstored, this will return a reference of the extra on the database model, + meaning that changes on the returned value (if they are mutable themselves, e.g. a list or dictionary) will + automatically be reflected on the database model as well. As soon as the node is stored, the returned extra + will be a deep copy and mutations of the database extras will have to go through the appropriate set + methods. :param key: name of the extra :return: the value of the extra - :raises AttributeError: if the extra does not exist + :raises AttributeError: if the extra does not exist and no default is specified """ @abc.abstractmethod - def get_extras(self, keys): - """Return a set of extras. - - :param keys: names of the extras - :return: the values of the extras + def get_extra_many(self, keys): + """Return the values of multiple extras. + + .. warning:: While the node is unstored, this will return references of the extras on the database model, + meaning that changes on the returned values (if they are mutable themselves, e.g. a list or dictionary) will + automatically be reflected on the database model as well. As soon as the node is stored, the returned extras + will be a deep copy and mutations of the database extras will have to go through the appropriate set + methods. Therefore, once stored, retrieving a deep copy can be a heavy operation. If you only need the keys + or some values, use the iterators `extras_keys` and `extras_items`, or the getters `get_extra` and + `get_extra_many` instead. + + :param keys: a list of extra names + :return: a list of extra values :raises AttributeError: if at least one extra does not exist """ @@ -281,21 +312,21 @@ def set_extra(self, key, value): """ @abc.abstractmethod - def set_extras(self, extras): - """Set extras. + def set_extra_many(self, extras): + """Set multiple extras. .. note:: This will override any existing extras that are present in the new dictionary. - :param extras: the new extras to set + :param extras: a dictionary with the extras to set """ @abc.abstractmethod def reset_extras(self, extras): """Reset the extras. - .. note:: This will completely reset any existing extras and replace them with the new dictionary. + .. note:: This will completely clear any existing extras and replace them with the new dictionary. - :param extras: the new extras to set + :param extras: a dictionary with the extras to set """ @abc.abstractmethod @@ -307,14 +338,11 @@ def delete_extra(self, key): """ @abc.abstractmethod - def delete_extras(self, keys): + def delete_extra_many(self, keys): """Delete multiple extras. - .. note:: The implementation should guarantee that all the keys that are to be deleted actually exist or the - entire operation should be canceled without any change and an ``AttributeError`` should be raised. - :param keys: names of the extras to delete - :raises AttributeError: if at least on of the extra does not exist + :raises AttributeError: if at least one of the extra does not exist """ @abc.abstractmethod @@ -323,16 +351,16 @@ def clear_extras(self): @abc.abstractmethod def extras_items(self): - """Return an iterator over the extra items. + """Return an iterator over the extras. :return: an iterator with extra key value pairs """ @abc.abstractmethod def extras_keys(self): - """Return an iterator over the attribute keys. + """Return an iterator over the extra keys. - :return: an iterator with attribute keys + :return: an iterator with extra keys """ @abc.abstractmethod @@ -348,12 +376,12 @@ def add_incoming(self, source, link_type, link_label): """ @abc.abstractmethod - def store(self, attributes=None, links=None, with_transaction=True): + def store(self, links=None, with_transaction=True, clean=True): """Store the node in the database. - :param attributes: optional attributes to set before storing, will override any existing attributes :param links: optional links to add before storing - :parameter with_transaction: if False, do not use a transaction because the caller will already have opened one. + :param with_transaction: if False, do not use a transaction because the caller will already have opened one. + :param clean: boolean, if True, will clean the attributes and extras before attempting to store """ @@ -365,6 +393,13 @@ class BackendNodeCollection(backends.BackendCollection[BackendNode]): ENTITY_CLASS = BackendNode + @abc.abstractmethod + def get(self, pk): + """Return a Node entry from the collection with the given id + + :param pk: id of the node + """ + @abc.abstractmethod def delete(self, pk): """Remove a Node entry from the collection with the given id diff --git a/aiida/orm/implementation/sqlalchemy/nodes.py b/aiida/orm/implementation/sqlalchemy/nodes.py index bc1e7114a7..4546733fda 100644 --- a/aiida/orm/implementation/sqlalchemy/nodes.py +++ b/aiida/orm/implementation/sqlalchemy/nodes.py @@ -21,11 +21,11 @@ from aiida.backends.sqlalchemy.models import node as models from aiida.common import exceptions from aiida.common.lang import type_check +from aiida.orm.utils.node import clean_value from .. import BackendNode, BackendNodeCollection from . import entities from . import utils as sqla_utils -from .. import utils as gen_utils from .computers import SqlaComputer from .users import SqlaUser @@ -149,26 +149,59 @@ def user(self, user): type_check(user, SqlaUser) self._dbmodel.user = user.dbmodel + @property + def attributes(self): + """Return the complete attributes dictionary. + + .. warning:: While the node is unstored, this will return references of the attributes on the database model, + meaning that changes on the returned values (if they are mutable themselves, e.g. a list or dictionary) will + automatically be reflected on the database model as well. As soon as the node is stored, the returned + attributes will be a deep copy and mutations of the database attributes will have to go through the + appropriate set methods. Therefore, once stored, retrieving a deep copy can be a heavy operation. If you + only need the keys or some values, use the iterators `attributes_keys` and `attributes_items`, or the + getters `get_attribute` and `get_attribute_many` instead. + + :return: the attributes as a dictionary + """ + return self._dbmodel.attributes + def get_attribute(self, key): - """Return an attribute. + """Return the value of an attribute. + + .. warning:: While the node is unstored, this will return a reference of the attribute on the database model, + meaning that changes on the returned value (if they are mutable themselves, e.g. a list or dictionary) will + automatically be reflected on the database model as well. As soon as the node is stored, the returned + attribute will be a deep copy and mutations of the database attributes will have to go through the + appropriate set methods. :param key: name of the attribute :return: the value of the attribute - :raises AttributeError: if the attribute does not exist + :raises AttributeError: if the attribute does not exist and no default is specified """ try: - return gen_utils.get_attr(self._dbmodel.attributes, key) - except (KeyError, IndexError): - raise AttributeError('Attribute `{}` does not exist'.format(key)) - - def get_attributes(self, keys): - """Return a set of attributes. - - :param keys: names of the attributes - :return: the values of the attributes + return self._dbmodel.attributes[key] + except KeyError as exception: + raise AttributeError('attribute `{}` does not exist'.format(exception)) + + def get_attribute_many(self, keys): + """Return the values of multiple attributes. + + .. warning:: While the node is unstored, this will return references of the attributes on the database model, + meaning that changes on the returned values (if they are mutable themselves, e.g. a list or dictionary) will + automatically be reflected on the database model as well. As soon as the node is stored, the returned + attributes will be a deep copy and mutations of the database attributes will have to go through the + appropriate set methods. Therefore, once stored, retrieving a deep copy can be a heavy operation. If you + only need the keys or some values, use the iterators `attributes_keys` and `attributes_items`, or the + getters `get_attribute` and `get_attribute_many` instead. + + :param keys: a list of attribute names + :return: a list of attribute values :raises AttributeError: if at least one attribute does not exist """ - raise NotImplementedError + try: + return [self.get_attribute(key) for key in keys] + except KeyError as exception: + raise AttributeError('attribute `{}` does not exist'.format(exception)) def set_attribute(self, key, value): """Set an attribute to the given value. @@ -176,40 +209,42 @@ def set_attribute(self, key, value): :param key: name of the attribute :param value: value of the attribute """ - try: - self.dbmodel.set_attribute(key, value) - except Exception: # pylint: disable=bare-except - session = get_scoped_session() - session.rollback() - raise + if self.is_stored: + value = clean_value(value) + + self._dbmodel.attributes[key] = value + self._flag_field('attributes') + self._flush_if_stored() - def set_attributes(self, attributes): - """Set attributes. + def set_attribute_many(self, attributes): + """Set multiple attributes. .. note:: This will override any existing attributes that are present in the new dictionary. - :param attributes: the new attributes to set + :param attributes: a dictionary with the attributes to set """ - try: - self.dbmodel.set_attributes(attributes) - except Exception: # pylint: disable=bare-except - session = get_scoped_session() - session.rollback() - raise + if self.is_stored: + attributes = {key: clean_value(value) for key, value in attributes.items()} + + for key, value in attributes.items(): + self.dbmodel.attributes[key] = value + + self._flag_field('attributes') + self._flush_if_stored() def reset_attributes(self, attributes): """Reset the attributes. - .. note:: This will completely reset any existing attributes and replace them with the new dictionary. + .. note:: This will completely clear any existing attributes and replace them with the new dictionary. - :param attributes: the new attributes to set + :param attributes: a dictionary with the attributes to set """ - try: - self.dbmodel.reset_attributes(attributes) - except Exception: # pylint: disable=bare-except - session = get_scoped_session() - session.rollback() - raise + if self.is_stored: + attributes = clean_value(attributes) + + self.dbmodel.attributes = attributes + self._flag_field('attributes') + self._flush_if_stored() def delete_attribute(self, key): """Delete an attribute. @@ -218,29 +253,36 @@ def delete_attribute(self, key): :raises AttributeError: if the attribute does not exist """ try: - self._dbmodel.del_attribute(key) - except Exception: # pylint: disable=bare-except - session = get_scoped_session() - session.rollback() - raise - - def delete_attributes(self, keys): + self._dbmodel.attributes.pop(key) + except KeyError as exception: + raise AttributeError('attribute `{}` does not exist'.format(exception)) + else: + self._flag_field('attributes') + self._flush_if_stored() + + def delete_attribute_many(self, keys): """Delete multiple attributes. - .. note:: The implementation should guarantee that all the keys that are to be deleted actually exist or the - entire operation should be canceled without any change and an ``AttributeError`` should be raised. - :param keys: names of the attributes to delete - :raises AttributeError: if at least on of the attribute does not exist + :raises AttributeError: if at least one of the attribute does not exist """ - raise NotImplementedError + non_existing_keys = [key for key in keys if key not in self._dbmodel.attributes] + + if non_existing_keys: + raise AttributeError('attributes `{}` do not exist'.format(', '.join(non_existing_keys))) + + for key in keys: + self.dbmodel.attributes.pop(key) + + self._flag_field('attributes') + self._flush_if_stored() def clear_attributes(self): """Delete all attributes.""" - raise NotImplementedError + self._dbmodel.attributes = {} def attributes_items(self): - """Return an iterator over the attribute items. + """Return an iterator over the attributes. :return: an iterator with attribute key value pairs """ @@ -255,26 +297,59 @@ def attributes_keys(self): for key in self._dbmodel.attributes.keys(): yield key + @property + def extras(self): + """Return the complete extras dictionary. + + .. warning:: While the node is unstored, this will return references of the extras on the database model, + meaning that changes on the returned values (if they are mutable themselves, e.g. a list or dictionary) will + automatically be reflected on the database model as well. As soon as the node is stored, the returned extras + will be a deep copy and mutations of the database extras will have to go through the appropriate set + methods. Therefore, once stored, retrieving a deep copy can be a heavy operation. If you only need the keys + or some values, use the iterators `extras_keys` and `extras_items`, or the getters `get_extra` and + `get_extra_many` instead. + + :return: the extras as a dictionary + """ + return self._dbmodel.extras + def get_extra(self, key): - """Return an extra. + """Return the value of an extra. + + .. warning:: While the node is unstored, this will return a reference of the extra on the database model, + meaning that changes on the returned value (if they are mutable themselves, e.g. a list or dictionary) will + automatically be reflected on the database model as well. As soon as the node is stored, the returned extra + will be a deep copy and mutations of the database extras will have to go through the appropriate set + methods. :param key: name of the extra :return: the value of the extra - :raises AttributeError: if the extra does not exist + :raises AttributeError: if the extra does not exist and no default is specified """ try: - return gen_utils.get_attr(self._dbmodel.extras, key) - except (KeyError, IndexError): - raise AttributeError('Extra `{}` does not exist'.format(key)) - - def get_extras(self, keys): - """Return a set of extras. - - :param keys: names of the extras - :return: the values of the extras + return self._dbmodel.extras[key] + except KeyError as exception: + raise AttributeError('extra `{}` does not exist'.format(exception)) + + def get_extra_many(self, keys): + """Return the values of multiple extras. + + .. warning:: While the node is unstored, this will return references of the extras on the database model, + meaning that changes on the returned values (if they are mutable themselves, e.g. a list or dictionary) will + automatically be reflected on the database model as well. As soon as the node is stored, the returned extras + will be a deep copy and mutations of the database extras will have to go through the appropriate set + methods. Therefore, once stored, retrieving a deep copy can be a heavy operation. If you only need the keys + or some values, use the iterators `extras_keys` and `extras_items`, or the getters `get_extra` and + `get_extra_many` instead. + + :param keys: a list of extra names + :return: a list of extra values :raises AttributeError: if at least one extra does not exist """ - raise NotImplementedError + try: + return [self.get_extra(key) for key in keys] + except KeyError as exception: + raise AttributeError('extra `{}` does not exist'.format(exception)) def set_extra(self, key, value): """Set an extra to the given value. @@ -282,40 +357,39 @@ def set_extra(self, key, value): :param key: name of the extra :param value: value of the extra """ - try: - self._dbmodel.set_extra(key, value) - except Exception: # pylint: disable=bare-except - session = get_scoped_session() - session.rollback() - raise + if self.is_stored: + value = clean_value(value) + + self._dbmodel.extras[key] = value + self._flag_field('extras') + self._flush_if_stored() - def set_extras(self, extras): - """Set extras. + def set_extra_many(self, extras): + """Set multiple extras. .. note:: This will override any existing extras that are present in the new dictionary. - :param extras: the new extras to set + :param extras: a dictionary with the extras to set """ - try: - self.dbmodel.set_extras(extras) - except Exception: # pylint: disable=bare-except - session = get_scoped_session() - session.rollback() - raise + if self.is_stored: + extras = {key: clean_value(value) for key, value in extras.items()} + + for key, value in extras.items(): + self.dbmodel.extras[key] = value + + self._flag_field('extras') + self._flush_if_stored() def reset_extras(self, extras): """Reset the extras. - .. note:: This will completely reset any existing extras and replace them with the new dictionary. + .. note:: This will completely clear any existing extras and replace them with the new dictionary. - :param extras: the new extras to set + :param extras: a dictionary with the extras to set """ - try: - self._dbmodel.reset_extras(extras) - except Exception: # pylint: disable=bare-except - session = get_scoped_session() - session.rollback() - raise + self.dbmodel.extras = extras + self._flag_field('extras') + self._flush_if_stored() def delete_extra(self, key): """Delete an extra. @@ -324,29 +398,36 @@ def delete_extra(self, key): :raises AttributeError: if the extra does not exist """ try: - self._dbmodel.del_extra(key) - except Exception: # pylint: disable=bare-except - session = get_scoped_session() - session.rollback() - raise - - def delete_extras(self, keys): + self._dbmodel.extras.pop(key) + except KeyError as exception: + raise AttributeError('extra `{}` does not exist'.format(exception)) + else: + self._flag_field('extras') + self._flush_if_stored() + + def delete_extra_many(self, keys): """Delete multiple extras. - .. note:: The implementation should guarantee that all the keys that are to be deleted actually exist or the - entire operation should be canceled without any change and an ``AttributeError`` should be raised. - :param keys: names of the extras to delete - :raises AttributeError: if at least on of the extra does not exist + :raises AttributeError: if at least one of the extra does not exist """ - raise NotImplementedError + non_existing_keys = [key for key in keys if key not in self._dbmodel.extras] + + if non_existing_keys: + raise AttributeError('extras `{}` do not exist'.format(', '.join(non_existing_keys))) + + for key in keys: + self.dbmodel.extras.pop(key) + + self._flag_field('extras') + self._flush_if_stored() def clear_extras(self): """Delete all extras.""" - raise NotImplementedError + self._dbmodel.extras = {} def extras_items(self): - """Return an iterator over the extra items. + """Return an iterator over the extras. :return: an iterator with extra key value pairs """ @@ -354,13 +435,21 @@ def extras_items(self): yield key, value def extras_keys(self): - """Return an iterator over the extras keys. + """Return an iterator over the extra keys. - :return: an iterator with extras keys + :return: an iterator with extra keys """ for key in self._dbmodel.extras.keys(): yield key + def _flag_field(self, field): + from aiida.backends.sqlalchemy.utils import flag_modified + flag_modified(self._dbmodel, field) + + def _flush_if_stored(self): + if self._dbmodel.is_saved(): + self._dbmodel.save() + def add_incoming(self, source, link_type, link_label): """Add a link of the given type from a given node to ourself. @@ -401,18 +490,23 @@ def _add_link(self, source, link_type, link_label): except SQLAlchemyError as exception: raise exceptions.UniquenessError('failed to create the link: {}'.format(exception)) - def store(self, attributes=None, links=None, with_transaction=True): + def clean_values(self): + self._dbmodel.attributes = clean_value(self._dbmodel.attributes) + self._dbmodel.extras = clean_value(self._dbmodel.extras) + + def store(self, links=None, with_transaction=True, clean=True): """Store the node in the database. - :param attributes: optional attributes to set before storing, will override any existing attributes :param links: optional links to add before storing + :param with_transaction: if False, do not use a transaction because the caller will already have opened one. + :param clean: boolean, if True, will clean the attributes and extras before attempting to store """ session = get_scoped_session() - session.add(self._dbmodel) + if clean: + self.clean_values() - if attributes: - self._dbmodel.attributes = attributes + session.add(self._dbmodel) if links: for link_triple in links: @@ -433,6 +527,18 @@ class SqlaNodeCollection(BackendNodeCollection): ENTITY_CLASS = SqlaNode + def get(self, pk): + """Return a Node entry from the collection with the given id + + :param pk: id of the node + """ + session = get_scoped_session() + + try: + return self.ENTITY_CLASS.from_dbmodel(session.query(models.DbNode).filter_by(id=pk).one(), self.backend) + except NoResultFound: + raise exceptions.NotExistent("Node with pk '{}' not found".format(pk)) + def delete(self, pk): """Remove a Node entry from the collection with the given id diff --git a/aiida/orm/implementation/sqlalchemy/utils.py b/aiida/orm/implementation/sqlalchemy/utils.py index fd14576f65..31f63ff607 100644 --- a/aiida/orm/implementation/sqlalchemy/utils.py +++ b/aiida/orm/implementation/sqlalchemy/utils.py @@ -19,8 +19,8 @@ from sqlalchemy.types import Integer, Boolean import sqlalchemy.exc -from aiida.common import exceptions from aiida.backends.sqlalchemy import get_scoped_session +from aiida.common import exceptions __all__ = ['django_filter'] @@ -111,28 +111,6 @@ def disable_expire_on_commit(session): finally: session.expire_on_commit = current_value - -def iter_dict(attrs): - if isinstance(attrs, dict): - for key in sorted(attrs.keys()): - it = iter_dict(attrs[key]) - for k, v in it: - new_key = key - if k: - new_key += "." + str(k) - yield new_key, v - elif isinstance(attrs, list): - for i, val in enumerate(attrs): - it = iter_dict(val) - for k, v in it: - new_key = str(i) - if k: - new_key += "." + str(k) - yield new_key, v - else: - yield "", attrs - - def _create_op_func(op): def f(attr, val): return getattr(attr, op)(val) diff --git a/aiida/orm/nodes/data/array/kpoints.py b/aiida/orm/nodes/data/array/kpoints.py index c7cee1556e..c28daac184 100644 --- a/aiida/orm/nodes/data/array/kpoints.py +++ b/aiida/orm/nodes/data/array/kpoints.py @@ -245,7 +245,7 @@ def set_kpoints_mesh(self, mesh, offset=None): from aiida.common.exceptions import ModificationNotAllowed # validate try: - the_mesh = tuple(int(i) for i in mesh) + the_mesh = [int(i) for i in mesh] if len(the_mesh) != 3: raise ValueError except (IndexError, ValueError, TypeError): @@ -253,7 +253,7 @@ def set_kpoints_mesh(self, mesh, offset=None): if offset is None: offset = [0., 0., 0.] try: - the_offset = tuple(float(i) for i in offset) + the_offset = [float(i) for i in offset] if len(the_offset) != 3: raise ValueError except (IndexError, ValueError, TypeError): diff --git a/aiida/orm/nodes/data/data.py b/aiida/orm/nodes/data/data.py index 19bd85056c..cef2532e32 100644 --- a/aiida/orm/nodes/data/data.py +++ b/aiida/orm/nodes/data/data.py @@ -79,7 +79,7 @@ def clone(self): backend_clone = self.backend_entity.clone() clone = self.__class__.from_backend_entity(backend_clone) - clone.set_attributes(copy.deepcopy(self.attributes)) + clone.reset_attributes(copy.deepcopy(self.attributes)) clone.put_object_from_tree(self._repository._get_base_folder().abspath) # pylint: disable=protected-access return clone diff --git a/aiida/orm/nodes/data/structure.py b/aiida/orm/nodes/data/structure.py index 97625b82b4..6fbb0cc3b6 100644 --- a/aiida/orm/nodes/data/structure.py +++ b/aiida/orm/nodes/data/structure.py @@ -1265,9 +1265,9 @@ def get_pymatgen(self, **kwargs): Default is False (no spin added). .. note:: The spins are set according to the following rule: - + * if the kind name ends with 1 -> spin=+1 - + * if the kind name ends with 2 -> spin=-1 .. note:: Requires the pymatgen module (version >= 3.0.13, usage @@ -1282,9 +1282,9 @@ def get_pymatgen_structure(self, **kwargs): Default is False (no spin added). .. note:: The spins are set according to the following rule: - + * if the kind name ends with 1 -> spin=+1 - + * if the kind name ends with 2 -> spin=-1 .. note:: Requires the pymatgen module (version >= 3.0.13, usage @@ -1330,7 +1330,7 @@ def append_kind(self, kind): raise ValueError("A kind with the same name ({}) already exists.".format(kind.name)) # If here, no exceptions have been raised, so I add the site. - self.append_to_attr('kinds', new_kind.get_raw()) + self.attributes.setdefault('kinds', []).append(new_kind.get_raw()) # Note, this is a dict (with integer keys) so it allows for empty # spots! if not hasattr(self, '_internal_kind_tags'): @@ -1357,7 +1357,7 @@ def append_site(self, site): "{}".format(site.kind_name, [k.name for k in self.kinds])) # If here, no exceptions have been raised, so I add the site. - self.append_to_attr('sites', new_site.get_raw()) + self.attributes.setdefault('sites', []).append(new_site.get_raw()) def append_atom(self, **kwargs): """ @@ -1826,9 +1826,9 @@ def _get_object_pymatgen_structure(self, **kwargs): Default is False (no spin added). .. note:: The spins are set according to the following rule: - + * if the kind name ends with 1 -> spin=+1 - + * if the kind name ends with 2 -> spin=-1 :return: a pymatgen Structure object corresponding to this diff --git a/aiida/orm/nodes/node.py b/aiida/orm/nodes/node.py index 929af69d09..dd066c8734 100644 --- a/aiida/orm/nodes/node.py +++ b/aiida/orm/nodes/node.py @@ -17,7 +17,6 @@ import importlib import six -from aiida.backends.utils import validate_attribute_key from aiida.common import exceptions from aiida.common.escaping import sql_string_match from aiida.common.hashing import make_hash, _HASH_EXTRA_KEY @@ -26,7 +25,7 @@ from aiida.manage.manager import get_manager from aiida.orm.utils.links import LinkManager, LinkTriple from aiida.orm.utils.repository import Repository -from aiida.orm.utils.node import AbstractNodeMeta, clean_value +from aiida.orm.utils.node import AbstractNodeMeta, validate_attribute_extra_key from ..comments import Comment from ..computers import Computer @@ -101,7 +100,6 @@ def delete(self, node_id): # These are to be initialized in the `initialization` method _incoming_cache = None - _attrs_cache = None _repository = None @classmethod @@ -149,7 +147,6 @@ def initialize(self): This needs to be called explicitly in each specific subclass implementation of the init. """ super(Node, self).initialize() - self._attrs_cache = {} # A cache of incoming links represented as a list of LinkTriples instances self._incoming_cache = list() @@ -315,296 +312,304 @@ def mtime(self): @property def attributes(self): - """Return the attributes dictionary. + """Return the complete attributes dictionary. - .. note:: This will fetch all the attributes from the database which can be a heavy operation. If you only need - the keys or some values, use the iterators `attributes_keys` and `attributes_items`, or the getters - `get_attribute` and `get_attributes` instead. + .. warning:: While the node is unstored, this will return references of the attributes on the database model, + meaning that changes on the returned values (if they are mutable themselves, e.g. a list or dictionary) will + automatically be reflected on the database model as well. As soon as the node is stored, the returned + attributes will be a deep copy and mutations of the database attributes will have to go through the + appropriate set methods. Therefore, once stored, retrieving a deep copy can be a heavy operation. If you + only need the keys or some values, use the iterators `attributes_keys` and `attributes_items`, or the + getters `get_attribute` and `get_attribute_many` instead. :return: the attributes as a dictionary """ + attributes = self.backend_entity.attributes + if self.is_stored: - return self.backend_entity.attributes + attributes = copy.deepcopy(attributes) - return self._attrs_cache + return attributes def get_attribute(self, key, default=_NO_DEFAULT): - """Return an attribute. + """Return the value of an attribute. + + .. warning:: While the node is unstored, this will return a reference of the attribute on the database model, + meaning that changes on the returned value (if they are mutable themselves, e.g. a list or dictionary) will + automatically be reflected on the database model as well. As soon as the node is stored, the returned + attribute will be a deep copy and mutations of the database attributes will have to go through the + appropriate set methods. :param key: name of the attribute - :param default: return this value instead of raising if the extra does not exist + :param default: return this value instead of raising if the attribute does not exist :return: the value of the attribute - :raises AttributeError: if the attribute does not exist + :raises AttributeError: if the attribute does not exist and no default is specified """ try: - if self.is_stored: - attribute = self.backend_entity.get_attribute(key=key) - else: - attribute = self._attrs_cache[key] - except (AttributeError, KeyError): + attribute = self.backend_entity.get_attribute(key) + except AttributeError: if default is _NO_DEFAULT: - raise AttributeError('attribute {} does not exist'.format(key)) + raise attribute = default + if self.is_stored: + attribute = copy.deepcopy(attribute) + return attribute - def get_attributes(self, keys): - """Return a set of attributes. + def get_attribute_many(self, keys): + """Return the values of multiple attributes. - :param keys: names of the attributes - :return: the values of the attributes + .. warning:: While the node is unstored, this will return references of the attributes on the database model, + meaning that changes on the returned values (if they are mutable themselves, e.g. a list or dictionary) will + automatically be reflected on the database model as well. As soon as the node is stored, the returned + attributes will be a deep copy and mutations of the database attributes will have to go through the + appropriate set methods. Therefore, once stored, retrieving a deep copy can be a heavy operation. If you + only need the keys or some values, use the iterators `attributes_keys` and `attributes_items`, or the + getters `get_attribute` and `get_attribute_many` instead. + + :param keys: a list of attribute names + :return: a list of attribute values :raises AttributeError: if at least one attribute does not exist """ - return self.backend_entity.get_attributes(keys) + attributes = self.backend_entity.get_attribute_many(keys) - def set_attribute(self, key, value, clean=True, stored_check=True): - """Set an attribute to the given value. + if self.is_stored: + attributes = copy.deepcopy(attributes) + + return attributes - Setting attributes on a stored node is forbidden unless `stored_check` is set to False. + def set_attribute(self, key, value): + """Set an attribute to the given value. :param key: name of the attribute :param value: value of the attribute - :param clean: boolean, when True will clean the value before passing it to the backend - :param stored_check: boolean, if True skips the check whether the node is stored - :raise aiida.common.ModificationNotAllowed: if the node is stored and `stored_check=False` + :raise aiida.common.ValidationError: if the key is invalid, i.e. contains periods + :raise aiida.common.ModificationNotAllowed: if the node is stored """ - if stored_check and self.is_stored: - raise exceptions.ModificationNotAllowed('cannot set an attribute on a stored node') - - validate_attribute_key(key) - - if clean: - value = clean_value(value) - if self.is_stored: - self.backend_entity.set_attribute(key, value) - else: - self._attrs_cache[key] = value + raise exceptions.ModificationNotAllowed('the attributes of a stored node are immutable') + + validate_attribute_extra_key(key) + self.backend_entity.set_attribute(key, value) - def set_attributes(self, attributes): - """Set attributes. + def set_attribute_many(self, attributes): + """Set multiple attributes. .. note:: This will override any existing attributes that are present in the new dictionary. - :param attributes: the new attributes to set + :param attributes: a dictionary with the attributes to set + :raise aiida.common.ValidationError: if any of the keys are invalid, i.e. contain periods + :raise aiida.common.ModificationNotAllowed: if the node is stored """ if self.is_stored: - raise exceptions.ModificationNotAllowed('cannot set attributes of a stored node') + raise exceptions.ModificationNotAllowed('the attributes of a stored node are immutable') - for key, value in attributes.items(): - self._attrs_cache[key] = clean_value(value) + for key in attributes: + validate_attribute_extra_key(key) + + self.backend_entity.set_attribute_many(attributes) def reset_attributes(self, attributes): """Reset the attributes. - .. note:: This will completely reset any existing attributes and replace them with the new dictionary. + .. note:: This will completely clear any existing attributes and replace them with the new dictionary. - :param attributes: the new attributes to set + :param attributes: a dictionary with the attributes to set + :raise aiida.common.ValidationError: if any of the keys are invalid, i.e. contain periods + :raise aiida.common.ModificationNotAllowed: if the node is stored """ if self.is_stored: - raise exceptions.ModificationNotAllowed('cannot reset the attributes of a stored node') + raise exceptions.ModificationNotAllowed('the attributes of a stored node are immutable') - self.clear_attributes() - self.set_attributes(attributes) + for key in attributes: + validate_attribute_extra_key(key) - def delete_attribute(self, key, stored_check=True): - """Delete an attribute. + self.backend_entity.reset_attributes(attributes) - Deleting attributes on a stored node is forbidden unless `stored_check` is set to False. + def delete_attribute(self, key): + """Delete an attribute. :param key: name of the attribute - :param stored_check: boolean, if True skips the check whether the node is stored :raises AttributeError: if the attribute does not exist - :raise aiida.common.ModificationNotAllowed: if the node is stored and `stored_check=False` + :raise aiida.common.ModificationNotAllowed: if the node is stored """ - if stored_check and self.is_stored: - raise exceptions.ModificationNotAllowed('cannot delete an attribute on a stored node') - if self.is_stored: - self.backend_entity.delete_attribute(key) - else: - try: - del self._attrs_cache[key] - except KeyError: - raise AttributeError('attribute {} does not exist'.format(key)) + raise exceptions.ModificationNotAllowed('the attributes of a stored node are immutable') + + self.backend_entity.delete_attribute(key) - def delete_attributes(self, keys): + def delete_attribute_many(self, keys): """Delete multiple attributes. :param keys: names of the attributes to delete - :raises AttributeError: if at least on of the attribute does not exist + :raises AttributeError: if at least one of the attribute does not exist + :raise aiida.common.ModificationNotAllowed: if the node is stored """ if self.is_stored: - raise exceptions.ModificationNotAllowed('cannot delete attributes of a stored node') + raise exceptions.ModificationNotAllowed('the attributes of a stored node are immutable') - attributes_backup = copy.deepcopy(self._attrs_cache) - - for key in keys: - try: - self._attrs_cache.pop(key) - except KeyError: - self._attrs_cache = attributes_backup - raise AttributeError('attribute {} does not exist'.format(key)) + self.backend_entity.delete_attribute_many(keys) def clear_attributes(self): """Delete all attributes.""" if self.is_stored: - raise exceptions.ModificationNotAllowed('cannot clear the attributes of a stored node') + raise exceptions.ModificationNotAllowed('the attributes of a stored node are immutable') - self._attrs_cache = {} + self.backend_entity.clear_attributes() def attributes_items(self): - """Return an iterator over the attribute items. + """Return an iterator over the attributes. :return: an iterator with attribute key value pairs """ - if self.is_stored: - for key, value in self.backend_entity.attributes_items(): - yield key, value - else: - for key, value in self._attrs_cache.items(): - yield key, value + return self.backend_entity.attributes_items() def attributes_keys(self): """Return an iterator over the attribute keys. :return: an iterator with attribute keys """ - if self.is_stored: - for key in self.backend_entity.attributes_keys(): - yield key - else: - for key in self._attrs_cache.keys(): - yield key + return self.backend_entity.attributes_keys() @property def extras(self): - """Return the extras dictionary. + """Return the complete extras dictionary. - .. note:: This will fetch all the extras from the database which can be a heavy operation. If you only need - the keys or some values, use the iterators `extras_keys` and `extras_items`, or the getters `get_extra` - and `get_extras` instead. + .. warning:: While the node is unstored, this will return references of the extras on the database model, + meaning that changes on the returned values (if they are mutable themselves, e.g. a list or dictionary) will + automatically be reflected on the database model as well. As soon as the node is stored, the returned extras + will be a deep copy and mutations of the database extras will have to go through the appropriate set + methods. Therefore, once stored, retrieving a deep copy can be a heavy operation. If you only need the keys + or some values, use the iterators `extras_keys` and `extras_items`, or the getters `get_extra` and + `get_extra_many` instead. :return: the extras as a dictionary """ - return self.backend_entity.extras + extras = self.backend_entity.extras + + if self.is_stored: + extras = copy.deepcopy(extras) + + return extras def get_extra(self, key, default=_NO_DEFAULT): - """Return an extra. + """Return the value of an extra. + + .. warning:: While the node is unstored, this will return a reference of the extra on the database model, + meaning that changes on the returned value (if they are mutable themselves, e.g. a list or dictionary) will + automatically be reflected on the database model as well. As soon as the node is stored, the returned extra + will be a deep copy and mutations of the database extras will have to go through the appropriate set + methods. :param key: name of the extra - :param default: return this value instead of raising if the extra does not exist + :param default: return this value instead of raising if the attribute does not exist :return: the value of the extra - :raises AttributeError: if the extra does not exist + :raises AttributeError: if the extra does not exist and no default is specified """ try: - return self.backend_entity.get_extra(key) + extra = self.backend_entity.get_extra(key) except AttributeError: - if default is not _NO_DEFAULT: - return default - raise + if default is _NO_DEFAULT: + raise + extra = default + + if self.is_stored: + extra = copy.deepcopy(extra) + + return extra - def get_extras(self, keys): - """Return a set of extras. + def get_extra_many(self, keys): + """Return the values of multiple extras. - :param keys: names of the extras - :return: the values of the extras + .. warning:: While the node is unstored, this will return references of the extras on the database model, + meaning that changes on the returned values (if they are mutable themselves, e.g. a list or dictionary) will + automatically be reflected on the database model as well. As soon as the node is stored, the returned extras + will be a deep copy and mutations of the database extras will have to go through the appropriate set + methods. Therefore, once stored, retrieving a deep copy can be a heavy operation. If you only need the keys + or some values, use the iterators `extras_keys` and `extras_items`, or the getters `get_extra` and + `get_extra_many` instead. + + :param keys: a list of extra names + :return: a list of extra values :raises AttributeError: if at least one extra does not exist """ - return self.backend_entity.get_extras(keys) + extras = self.backend_entity.get_extra_many(keys) + + if self.is_stored: + extras = copy.deepcopy(extras) + + return extras def set_extra(self, key, value): """Set an extra to the given value. - Setting extras on unstored nodes is forbidden. - :param key: name of the extra :param value: value of the extra + :raise aiida.common.ValidationError: if the key is invalid, i.e. contains periods """ - self.backend_entity.set_extra(key, clean_value(value)) + validate_attribute_extra_key(key) + self.backend_entity.set_extra(key, value) - def set_extras(self, extras): - """Set extras. + def set_extra_many(self, extras): + """Set multiple extras. .. note:: This will override any existing extras that are present in the new dictionary. - :param extras: the new extras to set + :param extras: a dictionary with the extras to set + :raise aiida.common.ValidationError: if any of the keys are invalid, i.e. contain periods """ - self.backend_entity.set_extras(clean_value(extras)) + for key in extras: + validate_attribute_extra_key(key) + + self.backend_entity.set_extra_many(extras) def reset_extras(self, extras): """Reset the extras. - .. note:: This will completely reset any existing extras and replace them with the new dictionary. + .. note:: This will completely clear any existing extras and replace them with the new dictionary. - :param extras: the new extras to set + :param extras: a dictionary with the extras to set + :raise aiida.common.ValidationError: if any of the keys are invalid, i.e. contain periods """ - self.backend_entity.reset_extras(clean_value(extras)) + for key in extras: + validate_attribute_extra_key(key) + + self.backend_entity.reset_extras(extras) def delete_extra(self, key): """Delete an extra. - Deleting extras on unstored nodes is forbidden. - :param key: name of the extra :raises AttributeError: if the extra does not exist """ self.backend_entity.delete_extra(key) - def delete_extras(self, keys): + def delete_extra_many(self, keys): """Delete multiple extras. :param keys: names of the extras to delete - :raises AttributeError: if at least on of the extra does not exist + :raises AttributeError: if at least one of the extra does not exist """ - self.backend_entity.delete_extras(keys) + self.backend_entity.delete_extra_many(keys) def clear_extras(self): """Delete all extras.""" self.backend_entity.clear_extras() def extras_items(self): - """Return an iterator over the extra items. + """Return an iterator over the extras. :return: an iterator with extra key value pairs """ - for key, value in self.backend_entity.extras(): - yield key, value + return self.backend_entity.extras_items() def extras_keys(self): - """Return an iterator over the attribute keys. - - :return: an iterator with attribute keys - """ - for key in self.backend_entity.extras(): - yield key + """Return an iterator over the extra keys. - def append_to_attr(self, key, value, clean=True): + :return: an iterator with extra keys """ - Append value to an attribute of the Node (in the DbAttribute table). - - :param key: key name of "list-type" attribute - If attribute doesn't exist, it is created. - :param value: the value to append to the list - :param clean: whether to clean the value - WARNING: when set to False, storing will throw errors - for any data types not recognized by the db backend - :raise aiida.common.ValidationError: if the key is not valid, e.g. it contains the separator symbol - """ - if self.is_stored: - raise exceptions.ModificationNotAllowed('can only call `append_to_attr` on unstored nodes') - - validate_attribute_key(key) - - self._attrs_cache.setdefault(key, list()) - - try: - if clean: - self._attrs_cache[key].append(clean_value(value)) - else: - self._attrs_cache[key].append(value) - except AttributeError: - raise AttributeError('can only call `append_to_attr` for attributes that are lists') + return self.backend_entity.extras_keys() def list_objects(self, key=None): """Return a list of the objects contained in this repository, optionally in the given sub directory. @@ -941,7 +946,7 @@ def store_all(self, with_transaction=True, use_cache=None): for link_triple in self._incoming_cache: if not link_triple.node.is_stored: - link_triple.node.store(with_transaction=False, use_cache=use_cache) + link_triple.node.store(with_transaction=with_transaction, use_cache=use_cache) return self.store(with_transaction, use_cache=use_cache) @@ -973,13 +978,17 @@ def store(self, with_transaction=True, use_cache=None): if use_cache is None: use_cache = get_use_cache(type(self)) + # Clean the values on the backend node *before* computing the hash in `_get_same_node`. This will allow + # us to set `clean=False` if we are storing normally, since the values will already have been cleaned + self._backend_entity.clean_values() + # Retrieve the cached node. same_node = self._get_same_node() if use_cache else None if same_node is not None: self._store_from_cache(same_node, with_transaction=with_transaction) else: - self._store(with_transaction=with_transaction) + self._store(with_transaction=with_transaction, clean=True) # Set up autogrouping used by verdi run from aiida.orm.autogroup import current_autogroup, Autogroup, VERDIAUTOGROUP_TYPE @@ -997,10 +1006,11 @@ def store(self, with_transaction=True, use_cache=None): return self - def _store(self, with_transaction=True): + def _store(self, with_transaction=True, clean=True): """Store the node in the database while saving its attributes and repository directory. - :parameter with_transaction: if False, do not use a transaction because the caller will already have opened one. + :param with_transaction: if False, do not use a transaction because the caller will already have opened one. + :param clean: boolean, if True, will clean the attributes and extras before attempting to store """ # First store the repository folder such that if this fails, there won't be an incomplete node in the database. # On the flipside, in the case that storing the node does fail, the repository will now have an orphaned node @@ -1008,15 +1018,13 @@ def _store(self, with_transaction=True): self._repository.store() try: - attributes = self._attrs_cache links = self._incoming_cache - self._backend_entity.store(attributes, links, with_transaction=with_transaction) + self._backend_entity.store(links, with_transaction=with_transaction, clean=clean) except Exception: # I put back the files in the sandbox folder since the transaction did not succeed self._repository.restore() raise - self._attrs_cache = {} self._incoming_cache = list() self._backend_entity.set_extra(_HASH_EXTRA_KEY, self.get_hash()) @@ -1050,7 +1058,7 @@ def _store_from_cache(self, cache_node, with_transaction): self.put_object_from_tree(cache_node._repository._get_base_folder().abspath) # pylint: disable=protected-access - self._store(with_transaction=with_transaction) + self._store(with_transaction=with_transaction, clean=False) self._add_outputs_from_cache(cache_node) self.set_extra('_aiida_cached_from', cache_node.uuid) diff --git a/aiida/orm/querybuilder.py b/aiida/orm/querybuilder.py index 9ab239fe37..f9c24e8d84 100644 --- a/aiida/orm/querybuilder.py +++ b/aiida/orm/querybuilder.py @@ -1169,9 +1169,7 @@ def _build_projections(self, tag, items_to_project=None): if items_to_project is None: items_to_project = self._projections.get(tag, []) - # Return here if there is nothing to project, - # reduces number of key in return dictionary - + # Return here if there is nothing to project, reduces number of key in return dictionary if self._debug: print(tag, items_to_project) if not items_to_project: @@ -1186,14 +1184,7 @@ def _build_projections(self, tag, items_to_project=None): property_names = list() if projectable_entity_name == '**': # Need to expand - property_names.extend(self._impl.modify_expansions( - alias, self._impl.get_column_names(alias))) - - # ~ for s in ('attributes', 'extras'): - # ~ try: - # ~ entity_names.remove(s) - # ~ except ValueError: - # ~ pass + property_names.extend(self._impl.modify_expansions(alias, self._impl.get_column_names(alias))) else: property_names.extend(self._impl.modify_expansions(alias, [projectable_entity_name])) @@ -2185,7 +2176,6 @@ def iterall(self, batch_size=100): item[i] = self.get_aiida_entity_res(item_entry) yield item - return def iterdict(self, batch_size=100): """ diff --git a/aiida/orm/utils/mixins.py b/aiida/orm/utils/mixins.py index 5b3f5cf9f7..e3cee17c68 100644 --- a/aiida/orm/utils/mixins.py +++ b/aiida/orm/utils/mixins.py @@ -15,7 +15,7 @@ import inspect import io -from aiida.common.exceptions import ModificationNotAllowed +from aiida.common import exceptions from aiida.common.lang import override from aiida.common.lang import classproperty @@ -66,16 +66,14 @@ def store_source_info(self, func): @property def function_name(self): - """ - Return the function name of the wrapped function + """Return the function name of the wrapped function. :returns: the function name or None """ return self.get_attribute(self.FUNCTION_NAME_KEY, None) def _set_function_name(self, function_name): - """ - Set the function name of the wrapped function + """Set the function name of the wrapped function. :param function_name: the function name """ @@ -83,16 +81,14 @@ def _set_function_name(self, function_name): @property def function_namespace(self): - """ - Return the function namespace of the wrapped function + """Return the function namespace of the wrapped function. :returns: the function namespace or None """ return self.get_attribute(self.FUNCTION_NAMESPACE_KEY, None) def _set_function_namespace(self, function_namespace): - """ - Set the function namespace of the wrapped function + """Set the function namespace of the wrapped function. :param function_namespace: the function namespace """ @@ -100,24 +96,21 @@ def _set_function_namespace(self, function_namespace): @property def function_starting_line_number(self): - """ - Return the starting line number of the wrapped function in its source file + """Return the starting line number of the wrapped function in its source file. :returns: the starting line number or None """ return self.get_attribute(self.FUNCTION_STARTING_LINE_KEY, None) def _set_function_starting_line_number(self, function_starting_line_number): - """ - Set the starting line number of the wrapped function in its source file + """Set the starting line number of the wrapped function in its source file. :param function_starting_line_number: the starting line number """ self.set_attribute(self.FUNCTION_STARTING_LINE_KEY, function_starting_line_number) def get_function_source_code(self): - """ - Return the absolute path to the source file in the repository + """Return the absolute path to the source file in the repository. :returns: the absolute path of the source file in the repository, or None if it does not exist """ @@ -135,8 +128,7 @@ def _updatable_attributes(cls): # pylint: disable=no-self-argument return (cls.SEALED_KEY,) def validate_incoming(self, source, link_type, link_label): - """ - Validate adding a link of the given type from a given node to ourself. + """Validate adding a link of the given type from a given node to ourself. Adding an incoming link to a sealed node is forbidden. @@ -146,13 +138,12 @@ def validate_incoming(self, source, link_type, link_label): :raise aiida.common.ModificationNotAllowed: if the target node (self) is sealed """ if self.is_sealed: - raise ModificationNotAllowed('Cannot add a link to a sealed node') + raise exceptions.ModificationNotAllowed('Cannot add a link to a sealed node') super(Sealable, self).validate_incoming(source, link_type=link_type, link_label=link_label) def validate_outgoing(self, target, link_type, link_label): - """ - Validate adding a link of the given type from ourself to a given node. + """Validate adding a link of the given type from ourself to a given node. Adding an outgoing link from a sealed node is forbidden. @@ -162,56 +153,50 @@ def validate_outgoing(self, target, link_type, link_label): :raise aiida.common.ModificationNotAllowed: if the source node (self) is sealed """ if self.is_sealed: - raise ModificationNotAllowed('Cannot add a link from a sealed node') + raise exceptions.ModificationNotAllowed('Cannot add a link from a sealed node') super(Sealable, self).validate_outgoing(target, link_type=link_type, link_label=link_label) @property def is_sealed(self): - """ - Returns whether the node is sealed, i.e. whether the sealed attribute has been set to True - """ + """Returns whether the node is sealed, i.e. whether the sealed attribute has been set to True.""" return self.get_attribute(self.SEALED_KEY, False) def seal(self): - """ - Seal the node by setting the sealed attribute to True - """ + """Seal the node by setting the sealed attribute to True.""" if not self.is_sealed: self.set_attribute(self.SEALED_KEY, True) @override - def set_attribute(self, key, value, **kwargs): - """ - Set a new attribute + def set_attribute(self, key, value): + """Set an attribute to the given value. - :param key: attribute name - :param value: attribute value - :raise aiida.common.ModificationNotAllowed: if the node is already sealed or if the node is already stored - and the attribute is not updatable + :param key: name of the attribute + :param value: value of the attribute + :raise aiida.common.exceptions.ModificationNotAllowed: if the node is already sealed or if the node + is already stored and the attribute is not updatable. """ if self.is_sealed: - raise ModificationNotAllowed('Cannot change the attributes of a sealed node') + raise exceptions.ModificationNotAllowed('attributes of a sealed node are immutable') if self.is_stored and key not in self._updatable_attributes: - raise ModificationNotAllowed('Cannot change the immutable attributes of a stored node') + raise exceptions.ModificationNotAllowed('`{}` is not an updatable attribute'.format(key)) - super(Sealable, self).set_attribute(key, value, stored_check=False, **kwargs) + self.backend_entity.set_attribute(key, value) @override def delete_attribute(self, key): - """ - Delete an attribute + """Delete an attribute. - :param key: attribute name - :raise AttributeError: if key does not exist - :raise aiida.common.ModificationNotAllowed: if the node is already sealed or if the node is already stored - and the attribute is not updatable + :param key: name of the attribute + :raises AttributeError: if the attribute does not exist + :raise aiida.common.exceptions.ModificationNotAllowed: if the node is already sealed or if the node + is already stored and the attribute is not updatable. """ if self.is_sealed: - raise ModificationNotAllowed('Cannot change the attributes of a sealed node') + raise exceptions.ModificationNotAllowed('attributes of a sealed node are immutable') if self.is_stored and key not in self._updatable_attributes: - raise ModificationNotAllowed('Cannot change the immutable attributes of a stored node') + raise exceptions.ModificationNotAllowed('`{}` is not an updatable attribute'.format(key)) - super(Sealable, self).delete_attribute(key, stored_check=False) + self.backend_entity.delete_attribute(key) diff --git a/aiida/orm/utils/node.py b/aiida/orm/utils/node.py index 79f8e402c4..6b19323312 100644 --- a/aiida/orm/utils/node.py +++ b/aiida/orm/utils/node.py @@ -23,8 +23,12 @@ from aiida.common import exceptions from aiida.common.utils import strip_prefix +# This separator character is reserved to indicate nested fields in node attribute and extras dictionaries and +# therefore is not allowed in individual attribute or extra keys. +FIELD_SEPARATOR = '.' + __all__ = ('load_node_class', 'get_type_string_from_class', 'get_query_type_from_type_string', 'AbstractNodeMeta', - 'clean_value') + 'validate_attribute_extra_key', 'clean_value') def load_node_class(type_string): @@ -151,6 +155,19 @@ def get_query_type_from_type_string(type_string): return type_string +def validate_attribute_extra_key(key): + """Validate the key for a node attribute or extra. + + :raise aiida.common.ValidationError: if the key is not a string or contains reserved separator character + """ + if not key or not isinstance(key, six.string_types): + raise exceptions.ValidationError('key for attributes or extras should be a string') + + if FIELD_SEPARATOR in key: + raise exceptions.ValidationError( + 'key for attributes or extras cannot contain the character `{}`'.format(FIELD_SEPARATOR)) + + def clean_value(value): """ Get value from input and (recursively) replace, if needed, all occurrences diff --git a/docs/source/developer_guide/core/internals.rst b/docs/source/developer_guide/core/internals.rst index 414a7ddb66..8e929327c9 100644 --- a/docs/source/developer_guide/core/internals.rst +++ b/docs/source/developer_guide/core/internals.rst @@ -17,7 +17,7 @@ of stored nodes is a core concept of AiiDA, this behavior is nonetheless enforce Node methods ****************** -- :py:meth:`~aiida.orm.utils.node.clean_value` takes a value and returns an object which can be serialized for storage in the database. Such an object must be able to be subsequently deserialized without changing value. If a simple datatype is passed (integer, float, etc.), a check is performed to see if it has a value of ``nan`` or ``inf``, as these cannot be stored. Otherwise, if a list, tuple, dictionary, etc., is passed, this check is performed for each value it contains. This is done recursively, automatically handling the case of nested objects. It is important to note that iterable type objects are converted to lists during this process, and mappings, such as dictionaries, are converted to normal dictionaries. This cleaning process is used by default when setting node attributes via :py:meth:`~aiida.orm.nodes.Node.set_attribute` and :py:meth:`~aiida.orm.nodes.Node.append_to_attr`, although it can be disabled by setting ``clean=False``. Values are also cleaned when setting extras on a stored node using :py:meth:`~aiida.orm.nodes.Node.set_extras` or :py:meth:`~aiida.orm.nodes.Node.reset_extras`, but this cannot be disabled. +- :py:meth:`~aiida.orm.utils.node.clean_value` takes a value and returns an object which can be serialized for storage in the database. Such an object must be able to be subsequently deserialized without changing value. If a simple datatype is passed (integer, float, etc.), a check is performed to see if it has a value of ``nan`` or ``inf``, as these cannot be stored. Otherwise, if a list, tuple, dictionary, etc., is passed, this check is performed for each value it contains. This is done recursively, automatically handling the case of nested objects. It is important to note that iterable type objects are converted to lists during this process, and mappings, such as dictionaries, are converted to normal dictionaries. This cleaning process is used by default when setting node attributes via :py:meth:`~aiida.orm.nodes.Node.set_attribute`, although it can be disabled by setting ``clean=False``. Values are also cleaned when setting extras on a stored node using :py:meth:`~aiida.orm.nodes.Node.set_extra_many` or :py:meth:`~aiida.orm.nodes.Node.reset_extras`, but this cannot be disabled. Node methods & properties @@ -158,7 +158,7 @@ Each :py:meth:`~aiida.orm.nodes.Node` object can have attributes which are prope - :py:meth:`~aiida.orm.nodes.Node.set_attribute` adds a new attribute to the node. The key of the attribute is the property name (e.g. ``energy``, ``lattice_vectors`` etc) and the value of the attribute is the value of that property. -- :py:meth:`~aiida.orm.nodes.Node.delete_attribute` & :py:meth:`~aiida.orm.nodes.Node.delete_attributes` delete a specific or all attributes. +- :py:meth:`~aiida.orm.nodes.Node.delete_attribute` & :py:meth:`~aiida.orm.nodes.Node.delete_attribute_many` delete one or multiple specific attributes. - :py:meth:`~aiida.orm.nodes.Node.get_attribute` returns a specific attribute. @@ -167,9 +167,9 @@ Extras related methods ====================== ``Extras`` are additional information that are added to the calculations. In contrast to ``files`` and ``attributes``, ``extras`` are information added by the user (user specific). -- :py:meth:`~aiida.orm.nodes.Node.set_extra` adds an ``extra`` to the database. To add a more ``extras`` at once, :py:meth:`~aiida.orm.nodes.Node.set_extras` can be used. +- :py:meth:`~aiida.orm.nodes.Node.set_extra` adds an ``extra`` to the database. To add more ``extras`` at once, :py:meth:`~aiida.orm.nodes.Node.set_extra_many` can be used. -- :py:meth:`~aiida.orm.nodes.Node.get_extra` and :py:meth:`~aiida.orm.nodes.Node.get_extras` return a specific ``extra`` or all the available ``extras`` respectively. +- :py:meth:`~aiida.orm.nodes.Node.get_extra` and :py:meth:`~aiida.orm.nodes.Node.get_extra_many` return one or multiple specific ``extras``, respectively. - :py:meth:`~aiida.orm.nodes.Node.delete_extra` deletes an ``extra``. @@ -260,7 +260,7 @@ Data Navigating inputs and outputs ***************************** -- :py:meth:`~aiida.orm.nodes.data.Data.creator` returns +- :py:meth:`~aiida.orm.nodes.data.Data.creator` returns either the CalculationNode that created it or ``None`` if this Data node created by a calculation. @@ -269,7 +269,7 @@ ProcessNode +++++++++++ Navigating inputs and outputs ***************************** -- :py:meth:`~aiida.orm.nodes.process.ProcessNode.caller` returns +- :py:meth:`~aiida.orm.nodes.process.ProcessNode.caller` returns either the caller WorkflowNode or ``None`` if this ProcessNode was not called by a process @@ -278,8 +278,8 @@ CalculationNode Navigating inputs and outputs ***************************** -- :py:meth:`~aiida.orm.nodes.process.calculation.CalculationNode.inputs` returns - a :py:meth:`~aiida.orm.utils.managers.NodeLinksManager` object that can be used +- :py:meth:`~aiida.orm.nodes.process.calculation.CalculationNode.inputs` returns + a :py:meth:`~aiida.orm.utils.managers.NodeLinksManager` object that can be used to access the node's incoming INPUT_CALC links. The ``NodeLinksManager`` can be used to quickly go from a node to a neighboring node. @@ -336,11 +336,11 @@ Navigating inputs and outputs u'remote_folder'] The ``.inputs`` manager for ``WorkflowNode`` and the ``.outputs`` manager - both for ``CalculationNode`` and ``WorkflowNode`` work in the same way + both for ``CalculationNode`` and ``WorkflowNode`` work in the same way (see below). -- :py:meth:`~aiida.orm.nodes.process.calculation.CalculationNode.outputs` - returns a :py:meth:`~aiida.orm.utils.managers.NodeLinksManager` object +- :py:meth:`~aiida.orm.nodes.process.calculation.CalculationNode.outputs` + returns a :py:meth:`~aiida.orm.utils.managers.NodeLinksManager` object that can be used to access the node's outgoing CREATE links. .. _calculation updatable attributes: @@ -358,12 +358,12 @@ WorkflowNode Navigating inputs and outputs ***************************** -- :py:meth:`~aiida.orm.nodes.process.workflow.WorkflowNode.inputs` returns a - :py:meth:`~aiida.orm.utils.managers.NodeLinksManager` object that can be used to +- :py:meth:`~aiida.orm.nodes.process.workflow.WorkflowNode.inputs` returns a + :py:meth:`~aiida.orm.utils.managers.NodeLinksManager` object that can be used to access the node's incoming INPUT_WORK links. -- :py:meth:`~aiida.orm.nodes.process.workflow.WorkflowNode.outputs` returns a - :py:meth:`~aiida.orm.utils.managers.NodeLinksManager` object that can be used +- :py:meth:`~aiida.orm.nodes.process.workflow.WorkflowNode.outputs` returns a + :py:meth:`~aiida.orm.utils.managers.NodeLinksManager` object that can be used to access the node's outgoing RETURN links. @@ -402,9 +402,9 @@ In case a method is renamed or removed, this is the procedure to follow: # If we call this DeprecationWarning, pycharm will properly strike out the function from aiida.common.warnings import AiidaDeprecationWarning as DeprecationWarning # pylint: disable=redefined-builtin warnings.warn("", DeprecationWarning) - + # - + (of course replace the parts between ``< >`` symbols with the correct strings). @@ -414,7 +414,7 @@ In case a method is renamed or removed, this is the procedure to follow: - Our ``AiidaDeprecationWarning`` does not inherit from ``DeprecationWarning``, so it will not be "hidden" by python - User can disable our warnings (and only those) by using AiiDA properties with:: - + verdi config warnings.showdeprecations False Changing the config.json structure From ee3d8e559b30b124dfec575de82ef51ab5d56135 Mon Sep 17 00:00:00 2001 From: Sebastiaan Huber Date: Sun, 23 Jun 2019 12:13:51 +0200 Subject: [PATCH 08/11] Add concept of "mutable" model fields in the `ModelWrapper` The goal of the `ModelWrapper` is to ensure that getting and setting fields of a database model instance automatically take care of refreshing the state from the database and flushing any changes. This is at the cost of more database operations which should be kept to a minimum especially if they are useless. For example, the refreshing and flushing of fields that are immutable model fields when the model is already stored. The get and set attribute methods are improved to ignore the refresh and flush if the model is already saved and the field is immutable. Currently these are hard coded to the set of `pk`, `id` and `uuid` since these are always immutable for all the database models implemented in AiiDA. --- .pre-commit-config.yaml | 2 - aiida/orm/implementation/django/utils.py | 92 ++++++-- aiida/orm/implementation/sqlalchemy/utils.py | 229 ++++++------------- docs/source/nitpick-exceptions | 1 + 4 files changed, 149 insertions(+), 175 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 2c69af7ddb..ab4f588643 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -115,8 +115,6 @@ aiida/manage/backup/backup_base.py| aiida/orm/autogroup.py| aiida/orm/querybuilder.py| - aiida/orm/implementation/django/utils.py| - aiida/orm/implementation/sqlalchemy/utils.py| aiida/orm/importexport.py| aiida/orm/nodes/data/array/bands.py| aiida/orm/nodes/data/array/projection.py| diff --git a/aiida/orm/implementation/django/utils.py b/aiida/orm/implementation/django/utils.py index c78f195976..963cc2aa68 100644 --- a/aiida/orm/implementation/django/utils.py +++ b/aiida/orm/implementation/django/utils.py @@ -7,25 +7,32 @@ # For further information on the license, see the LICENSE.txt file # # For further information please visit http://www.aiida.net # ########################################################################### +"""Utilities for the implementation of the Django backend.""" from __future__ import division from __future__ import print_function from __future__ import absolute_import +# pylint: disable=import-error,no-name-in-module from django.db import transaction, IntegrityError from django.db.models.fields import FieldDoesNotExist from aiida.common import exceptions +IMMUTABLE_MODEL_FIELDS = {'id', 'pk', 'uuid'} + class ModelWrapper(object): - """ - This wraps a Django model delegating all get/set attributes to the - underlying model class, BUT it will make sure that if the model is - stored then: - * before every read it has the latest value from the database, and, - * after ever write the updated value is flushed to the database. + """Wrap a database model instance to correctly update and flush the data model when getting or setting a field. + + If the model is not stored, the behavior of the get and set attributes is unaltered. However, if the model is + stored, which is to say, it has a primary key, the `getattr` and `setattr` are modified as follows: + + * `getattr`: if the item corresponds to a mutable model field, the model instance is refreshed first + * `setattr`: if the item corresponds to a mutable model field, changes are flushed after performing the change """ + # pylint: disable=too-many-instance-attributes,useless-object-inheritance + def __init__(self, model, auto_flush=()): """Construct the ModelWrapper. @@ -39,41 +46,81 @@ def __init__(self, model, auto_flush=()): object.__setattr__(self, '_auto_flush', auto_flush) def __getattr__(self, item): - if self.is_saved() and self._is_model_field(item): + """Get an attribute of the model instance. + + If the model is saved in the database, the item corresponds to a mutable model field and the current scope is + not in an open database connection, then the field's value is first refreshed from the database. + + :param item: the name of the model field + :return: the value of the model's attribute + """ + if self.is_saved() and self._is_mutable_model_field(item): self._ensure_model_uptodate(fields=(item,)) return getattr(self._model, item) def __setattr__(self, key, value): + """Set the attribute on the model instance. + + If the field being set is a mutable model field and the model is saved, the changes are flushed. + + :param key: the name of the model field + :param value: the value to set + """ setattr(self._model, key, value) - if self.is_saved() and self._is_model_field(key): + if self.is_saved() and self._is_mutable_model_field(key): fields = set((key,) + self._auto_flush) self._flush(fields=fields) def is_saved(self): + """Retun whether the wrapped model instance is saved in the database. + + :return: boolean, True if the model is saved in the database, False otherwise + """ return self._model.pk is not None def save(self): - """ Save the model (possibly updating values if changed) """ + """Store the model instance. + + :raises `aiida.common.IntegrityError`: if a database integrity error is raised during the save. + """ # transactions are needed here for Postgresql: # https://docs.djangoproject.com/en/1.7/topics/db/transactions/#handling-exceptions-within-postgresql-transactions with transaction.atomic(): try: self._model.save() - except IntegrityError as e: - # Convert to one of our exceptions - raise exceptions.IntegrityError(str(e)) + except IntegrityError as exception: + raise exceptions.IntegrityError(str(exception)) + + def _is_mutable_model_field(self, field): + """Return whether the field is a mutable field of the model. + + :return: boolean, True if the field is a model field and is not in the `IMMUTABLE_MODEL_FIELDS` set. + """ + if field in IMMUTABLE_MODEL_FIELDS: + return False + + return self._is_model_field(field) def _is_model_field(self, name): + """Return whether the field is a field of the model. + + :return: boolean, True if the field is a model field, False otherwise. + """ try: - # Check if it's a field - self._model.__class__._meta.get_field(name) - return True + self._model.__class__._meta.get_field(name) # pylint: disable=protected-access except FieldDoesNotExist: return False + else: + return True def _flush(self, fields=None): - """ If the user is stored then save the current value """ + """Flush the fields of the model to the database. + + .. note:: If the wrapped model is not actually save in the database yet, this method is a no-op. + + :param fields: the model fields whose currently value to flush to the database + """ if self.is_saved(): try: # Manually append the `mtime` to fields to update, because when using the `update_fields` keyword of the @@ -82,14 +129,21 @@ def _flush(self, fields=None): if fields is not None and self._is_model_field('mtime'): fields.add('mtime') self._model.save(update_fields=fields) - except IntegrityError as e: - # Convert to one of our exceptions - raise exceptions.IntegrityError(str(e)) + except IntegrityError as exception: + raise exceptions.IntegrityError(str(exception)) def _ensure_model_uptodate(self, fields=None): + """Refresh all fields of the wrapped model instance by fetching the current state of the database instance. + + :param fields: optionally refresh only these fields, if `None` all fields are refreshed. + """ if self.is_saved(): self._model.refresh_from_db(fields=fields) @staticmethod def _in_transaction(): + """Return whether the current scope is within an open database transaction. + + :return: boolean, True if currently in open transaction, False otherwise. + """ return not transaction.get_autocommit() diff --git a/aiida/orm/implementation/sqlalchemy/utils.py b/aiida/orm/implementation/sqlalchemy/utils.py index 31f63ff607..bd9bbf4826 100644 --- a/aiida/orm/implementation/sqlalchemy/utils.py +++ b/aiida/orm/implementation/sqlalchemy/utils.py @@ -7,33 +7,36 @@ # For further information on the license, see the LICENSE.txt file # # For further information please visit http://www.aiida.net # ########################################################################### +"""Utilities for the implementation of the SqlAlchemy backend.""" from __future__ import division from __future__ import print_function from __future__ import absolute_import + import contextlib -import six +# pylint: disable=import-error,no-name-in-module from sqlalchemy import inspect +from sqlalchemy.exc import IntegrityError from sqlalchemy.orm.attributes import flag_modified -from sqlalchemy.orm.mapper import Mapper -from sqlalchemy.types import Integer, Boolean -import sqlalchemy.exc from aiida.backends.sqlalchemy import get_scoped_session from aiida.common import exceptions -__all__ = ['django_filter'] +IMMUTABLE_MODEL_FIELDS = {'id', 'pk', 'uuid'} class ModelWrapper(object): - """ - This wraps a SQLA model delegating all get/set attributes to the - underlying model class, BUT it will make sure that if the model is - stored then: - * before every read it has the latest value from the database, and, - * after ever write the updated value is flushed to the database. + """Wrap a database model instance to correctly update and flush the data model when getting or setting a field. + + If the model is not stored, the behavior of the get and set attributes is unaltered. However, if the model is + stored, which is to say, it has a primary key, the `getattr` and `setattr` are modified as follows: + + * `getattr`: if the item corresponds to a mutable model field, the model instance is refreshed first + * `setattr`: if the item corresponds to a mutable model field, changes are flushed after performing the change """ + # pylint: disable=too-many-instance-attributes,useless-object-inheritance + def __init__(self, model, auto_flush=()): """Construct the ModelWrapper. @@ -47,40 +50,83 @@ def __init__(self, model, auto_flush=()): object.__setattr__(self, '_auto_flush', auto_flush) def __getattr__(self, item): + """Get an attribute of the model instance. + + If the model is saved in the database, the item corresponds to a mutable model field and the current scope is + not in an open database connection, then the field's value is first refreshed from the database. + + :param item: the name of the model field + :return: the value of the model's attribute + """ # Python 3's implementation of copy.copy does not call __init__ on the new object # but manually restores attributes instead. Make sure we never get into a recursive # loop by protecting the only special variable here: _model if item == '_model': raise AttributeError() - if self.is_saved() and not self._in_transaction() and self._is_model_field(item): + if self.is_saved() and self._is_mutable_model_field(item) and not self._in_transaction(): self._ensure_model_uptodate(fields=(item,)) return getattr(self._model, item) def __setattr__(self, key, value): + """Set the attribute on the model instance. + + If the field being set is a mutable model field and the model is saved, the changes are flushed. + + :param key: the name of the model field + :param value: the value to set + """ setattr(self._model, key, value) - if self.is_saved() and self._is_model_field(key): + if self.is_saved() and self._is_mutable_model_field(key): fields = set((key,) + self._auto_flush) self._flush(fields=fields) def is_saved(self): + """Retun whether the wrapped model instance is saved in the database. + + :return: boolean, True if the model is saved in the database, False otherwise + """ return self._model.id is not None def save(self): - """Store the model (possibly updating values if changed).""" + """Store the model instance. + + .. note:: If one is currently in a transaction, this method is a no-op. + + :raises `aiida.common.IntegrityError`: if a database integrity error is raised during the save. + """ try: commit = not self._in_transaction() self._model.save(commit=commit) - except sqlalchemy.exc.IntegrityError as e: + except IntegrityError as exception: self._model.session.rollback() - raise exceptions.IntegrityError(str(e)) + raise exceptions.IntegrityError(str(exception)) - def _is_model_field(self, name): - return inspect(self._model.__class__).has_property(name) + def _is_mutable_model_field(self, field): + """Return whether the field is a mutable field of the model. + + :return: boolean, True if the field is a model field and is not in the `IMMUTABLE_MODEL_FIELDS` set. + """ + if field in IMMUTABLE_MODEL_FIELDS: + return False + + return self._is_model_field(field) + + def _is_model_field(self, field): + """Return whether the field is a field of the model. + + :return: boolean, True if the field is a model field, False otherwise. + """ + return inspect(self._model.__class__).has_property(field) def _flush(self, fields=()): - """If the model is stored then save the current value.""" + """Flush the fields of the model to the database. + + .. note:: If the wrapped model is not actually save in the database yet, this method is a no-op. + + :param fields: the model fields whose currently value to flush to the database + """ if self.is_saved(): for field in fields: flag_modified(self._model, field) @@ -88,18 +134,24 @@ def _flush(self, fields=()): self.save() def _ensure_model_uptodate(self, fields=None): - if self.is_saved(): - self._model.session.expire(self._model, attribute_names=fields) + """Refresh all fields of the wrapped model instance by fetching the current state of the database instance. + + :param fields: optionally refresh only these fields, if `None` all fields are refreshed. + """ + self._model.session.expire(self._model, attribute_names=fields) @staticmethod def _in_transaction(): + """Return whether the current scope is within an open database transaction. + + :return: boolean, True if currently in open transaction, False otherwise. + """ return get_scoped_session().transaction.nested @contextlib.contextmanager def disable_expire_on_commit(session): - """ - Context manager that disables expire_on_commit and restores the original value on exit + """Context manager that disables expire_on_commit and restores the original value on exit :param session: The SQLA session :type session: :class:`sqlalchemy.orm.session.Session` @@ -110,134 +162,3 @@ def disable_expire_on_commit(session): yield session finally: session.expire_on_commit = current_value - -def _create_op_func(op): - def f(attr, val): - return getattr(attr, op)(val) - - return f - - -_from_op = { - 'in': _create_op_func('in_'), - 'gte': _create_op_func('__ge__'), - 'gt': _create_op_func('__gt__'), - 'lte': _create_op_func('__le__'), - 'lt': _create_op_func('__lt__'), - 'eq': _create_op_func('__eq__'), - 'startswith': lambda attr, val: attr.like('{}%'.format(val)), - 'contains': lambda attr, val: attr.like('%{}%'.format(val)), - 'endswith': lambda attr, val: attr.like('%{}'.format(val)), - 'istartswith': lambda attr, val: attr.ilike('{}%'.format(val)), - 'icontains': lambda attr, val: attr.ilike('%{}%'.format(val)), - 'iendswith': lambda attr, val: attr.ilike('%{}'.format(val)) -} - - -def django_filter(cls_query, **kwargs): - # Pass the query object you want to use. - # This also assume a AND between each arguments - - cls = inspect(cls_query)._entity_zero().class_ - q = cls_query - - # We regroup all the filter on a relationship at the same place, so that - # when a join is done, we can filter it, and then reset to the original - # query. - current_join = None - - tmp_attr = dict(key=None, val=None) - tmp_extra = dict(key=None, val=None) - - for key in sorted(kwargs.keys()): - val = kwargs[key] - - join, field, op = [None] * 3 - - splits = key.split("__") - if len(splits) > 3: - raise ValueError("Too many parameters to handle.") - # something like "computer__id__in" - elif len(splits) == 3: - join, field, op = splits - # we have either "computer__id", which means join + field quality or - # "id__gte" which means field + op - elif len(splits) == 2: - if splits[1] in _from_op.keys(): - field, op = splits - else: - join, field = splits - else: - field = splits[0] - - if "dbattributes" == join: - if "val" in field: - field = "val" - if field in ["key", "val"]: - tmp_attr[field] = val - continue - elif "dbextras" == join: - if "val" in field: - field = "val" - if field in ["key", "val"]: - tmp_extra[field] = val - continue - - current_cls = cls - if join: - if current_join != join: - q = q.join(join, aliased=True) - current_join = join - - current_cls = [r for r in inspect(cls).relationships.items() if r[0] == join][0][1].argument - if isinstance(current_cls, Mapper): - current_cls = current_cls.class_ - else: - current_cls = current_cls() - - else: - if current_join is not None: - # Filter on the queried class again - q = q.reset_joinpoint() - current_join = None - - if field == "pk": - field = "id" - - filtered_field = getattr(current_cls, field) - if not op: - op = "eq" - f = _from_op[op] - - q = q.filter(f(filtered_field, val)) - - # We reset one last time - q.reset_joinpoint() - - key = tmp_attr["key"] - if key: - val = tmp_attr["val"] - if val: - q = q.filter(apply_json_cast(cls.attributes[key], val) == val) - else: - q = q.filter(tmp_attr["key"] in cls.attributes) - key = tmp_extra["key"] - if key: - val = tmp_extra["val"] - if val: - q = q.filter(apply_json_cast(cls.extras[key], val) == val) - else: - q = q.filter(tmp_extra["key"] in cls.extras) - - return q - - -def apply_json_cast(attr, val): - if isinstance(val, six.string_types): - attr = attr.astext - if isinstance(val, six.integer_types): - attr = attr.astext.cast(Integer) - if isinstance(val, bool): - attr = attr.astext.cast(Boolean) - - return attr diff --git a/docs/source/nitpick-exceptions b/docs/source/nitpick-exceptions index c5eb7b9d77..9843b248e9 100644 --- a/docs/source/nitpick-exceptions +++ b/docs/source/nitpick-exceptions @@ -211,6 +211,7 @@ py:class sqlalchemy.ext.declarative.api.Model py:class sqlalchemy.sql.functions.FunctionElement py:class sqlalchemy.orm.query.Query py:class sqlalchemy.orm.util.AliasedClass +py:class sqlalchemy.orm.session.Session py:exc sqlalchemy.orm.exc.MultipleResultsFound py:class sphinx.ext.autodoc.ClassDocumenter From d3b37efcd9dcc21a627fd9e6ce4908c396df3b96 Mon Sep 17 00:00:00 2001 From: Sebastiaan Huber Date: Sat, 22 Jun 2019 08:57:45 +0200 Subject: [PATCH 09/11] Remove superfluous `from_dbmodel` override from backend implementations The Django and SqlAlchemy implementations of the `BackendUser` and `BackendComment` classes reimplemented the `from_dbmodel` method. However, the implementation was exactly that of the base class `BackendEntity` so it is better to rely on that. All that had to be done was to set the `ENTITY_CLASS` class attribute to the correct class. --- aiida/orm/implementation/django/comments.py | 9 +- aiida/orm/implementation/django/users.py | 93 +++++++++---------- .../orm/implementation/sqlalchemy/comments.py | 3 +- aiida/orm/implementation/sqlalchemy/users.py | 85 +++++++++-------- 4 files changed, 92 insertions(+), 98 deletions(-) diff --git a/aiida/orm/implementation/django/comments.py b/aiida/orm/implementation/django/comments.py index 9bc5d5df91..410f59365d 100644 --- a/aiida/orm/implementation/django/comments.py +++ b/aiida/orm/implementation/django/comments.py @@ -12,11 +12,10 @@ from __future__ import print_function from __future__ import absolute_import -# pylint: disable=import-error,no-name-in-module,fixme from datetime import datetime + from aiida.backends.djsite.db import models -from aiida.common import exceptions -from aiida.common import lang +from aiida.common import exceptions, lang from ..comments import BackendComment, BackendCommentCollection from .utils import ModelWrapper @@ -100,14 +99,12 @@ def content(self): def set_content(self, value): self._dbmodel.content = value - # self._dbmodel.save() class DjangoCommentCollection(BackendCommentCollection): """Django implementation for the CommentCollection.""" - def from_dbmodel(self, dbmodel): - return DjangoComment.from_dbmodel(dbmodel, self.backend) + ENTITY_CLASS = DjangoComment def create(self, node, user, content=None, **kwargs): """ diff --git a/aiida/orm/implementation/django/users.py b/aiida/orm/implementation/django/users.py index 58f7f94c31..90bfcd1b92 100644 --- a/aiida/orm/implementation/django/users.py +++ b/aiida/orm/implementation/django/users.py @@ -23,53 +23,6 @@ __all__ = ('DjangoUser', 'DjangoUserCollection') -class DjangoUserCollection(BackendUserCollection): - """The Django collection of users""" - - def create(self, email, first_name='', last_name='', institution=''): - """ - Create a user with the provided email address - - :return: A new user object - :rtype: :class:`aiida.orm.implementation.django.users.DjangoUser` - """ - return DjangoUser(self.backend, email, first_name, last_name, institution) - - def find(self, email=None, id=None): # pylint: disable=redefined-builtin, invalid-name - """ - Find users in this collection - - :param email: optional email address filter - :param id: optional id filter - :return: a list of the found users - :rtype: list - """ - # Constructing the default query - import operator - from django.db.models import Q # pylint: disable=import-error, no-name-in-module - query_list = [] - - # If an id is specified then we add it to the query - if id is not None: - query_list.append(Q(pk=id)) - - # If an email is specified then we add it to the query - if email is not None: - query_list.append(Q(email=email)) - - if not query_list: - dbusers = DbUser.objects.all() - else: - dbusers = DbUser.objects.filter(functools.reduce(operator.and_, query_list)) - found_users = [] - for dbuser in dbusers: - found_users.append(self.from_dbmodel(dbuser)) - return found_users - - def from_dbmodel(self, dbmodel): - return DjangoUser.from_dbmodel(dbmodel, self.backend) - - class DjangoUser(entities.DjangoModelEntity[models.DbUser], BackendUser): """The Django user class""" @@ -112,3 +65,49 @@ def institution(self): @institution.setter def institution(self, institution): self._dbmodel.institution = institution + + +class DjangoUserCollection(BackendUserCollection): + """The Django collection of users""" + + ENTITY_CLASS = DjangoUser + + def create(self, email, first_name='', last_name='', institution=''): + """ + Create a user with the provided email address + + :return: A new user object + :rtype: :class:`aiida.orm.implementation.django.users.DjangoUser` + """ + return DjangoUser(self.backend, email, first_name, last_name, institution) + + def find(self, email=None, id=None): # pylint: disable=redefined-builtin, invalid-name + """ + Find users in this collection + + :param email: optional email address filter + :param id: optional id filter + :return: a list of the found users + :rtype: list + """ + # Constructing the default query + import operator + from django.db.models import Q # pylint: disable=import-error, no-name-in-module + query_list = [] + + # If an id is specified then we add it to the query + if id is not None: + query_list.append(Q(pk=id)) + + # If an email is specified then we add it to the query + if email is not None: + query_list.append(Q(email=email)) + + if not query_list: + dbusers = DbUser.objects.all() + else: + dbusers = DbUser.objects.filter(functools.reduce(operator.and_, query_list)) + found_users = [] + for dbuser in dbusers: + found_users.append(self.from_dbmodel(dbuser)) + return found_users diff --git a/aiida/orm/implementation/sqlalchemy/comments.py b/aiida/orm/implementation/sqlalchemy/comments.py index 2c70250295..e94a89842d 100644 --- a/aiida/orm/implementation/sqlalchemy/comments.py +++ b/aiida/orm/implementation/sqlalchemy/comments.py @@ -102,8 +102,7 @@ def set_content(self, value): class SqlaCommentCollection(BackendCommentCollection): """SqlAlchemy implementation for the CommentCollection.""" - def from_dbmodel(self, dbmodel): - return SqlaComment.from_dbmodel(dbmodel, self.backend) + ENTITY_CLASS = SqlaComment def create(self, node, user, content=None, **kwargs): """ diff --git a/aiida/orm/implementation/sqlalchemy/users.py b/aiida/orm/implementation/sqlalchemy/users.py index 269231490c..afbf88af39 100644 --- a/aiida/orm/implementation/sqlalchemy/users.py +++ b/aiida/orm/implementation/sqlalchemy/users.py @@ -19,49 +19,6 @@ __all__ = ('SqlaUserCollection', 'SqlaUser') -class SqlaUserCollection(BackendUserCollection): - """Collection of SQLA Users""" - - def create(self, email, first_name='', last_name='', institution=''): - """ - Create a user with the provided email address - - :return: A new user object - :rtype: :class:`aiida.orm.User` - """ - return SqlaUser(self.backend, email, first_name, last_name, institution) - - def find(self, email=None, id=None): # pylint: disable=redefined-builtin, invalid-name - """ - Find a user in matching the given criteria - - :param email: the email address - :param id: the id - :return: the matching user - :rtype: :class:`aiida.orm.implementation.sqlalchemy.users.SqlaUser` - """ - - # Constructing the default query - dbuser_query = DbUser.query - - # If an id is specified then we add it to the query - if id is not None: - dbuser_query = dbuser_query.filter_by(id=id) - - # If an email is specified then we add it to the query - if email is not None: - dbuser_query = dbuser_query.filter_by(email=email) - - dbusers = dbuser_query.all() - found_users = [] - for dbuser in dbusers: - found_users.append(self.from_dbmodel(dbuser)) - return found_users - - def from_dbmodel(self, dbmodel): - return SqlaUser.from_dbmodel(dbmodel, self.backend) - - class SqlaUser(entities.SqlaModelEntity[DbUser], BackendUser): """SQLA user""" @@ -104,3 +61,45 @@ def institution(self): @institution.setter def institution(self, institution): self._dbmodel.institution = institution + + +class SqlaUserCollection(BackendUserCollection): + """Collection of SQLA Users""" + + ENTITY_CLASS = SqlaUser + + def create(self, email, first_name='', last_name='', institution=''): + """ + Create a user with the provided email address + + :return: A new user object + :rtype: :class:`aiida.orm.User` + """ + return SqlaUser(self.backend, email, first_name, last_name, institution) + + def find(self, email=None, id=None): # pylint: disable=redefined-builtin, invalid-name + """ + Find a user in matching the given criteria + + :param email: the email address + :param id: the id + :return: the matching user + :rtype: :class:`aiida.orm.implementation.sqlalchemy.users.SqlaUser` + """ + + # Constructing the default query + dbuser_query = DbUser.query + + # If an id is specified then we add it to the query + if id is not None: + dbuser_query = dbuser_query.filter_by(id=id) + + # If an email is specified then we add it to the query + if email is not None: + dbuser_query = dbuser_query.filter_by(email=email) + + dbusers = dbuser_query.all() + found_users = [] + for dbuser in dbusers: + found_users.append(self.from_dbmodel(dbuser)) + return found_users From e6830b9ed44449c149974b4f68855234f184c934 Mon Sep 17 00:00:00 2001 From: Sebastiaan Huber Date: Sat, 22 Jun 2019 10:12:48 +0200 Subject: [PATCH 10/11] Add data migration for legacy `JobCalcState` attributes The old `JobCalculation` maintained its state in the `DbCalcState` table whose value was also stored as an attribute with the key `state` as a proxy. With the change to processes, all `JobCalculation` nodes have been converted to `CalcJobNodes` but did not receive any of the typical process attributes such as `process_state` and `exit_status`. Moreover, they kept their `state` attribute which now contains deprecated values. The `state` attribute is still set for `CalcJobs` but it serves a different purpose as it is now a sub state to the process state, while the process is active for extra granularity. Here we add a data migration for those old calculation jobs where some process attributes are inferred from the old state attribute, which is then discarded. This will make sure that even for old migration calculations, commands like `verdi process list` will show some information that allows a user to discern what the entry was. --- ..._data_migration_legacy_job_calculations.py | 109 ++++++++++++++++++ .../backends/djsite/db/migrations/__init__.py | 2 +- ..._data_migration_legacy_job_calculations.py | 56 +++++++++ .../backends/general/migrations/calc_state.py | 24 ++++ ...c78e6209_drop_computer_transport_params.py | 4 +- ..._data_migration_legacy_job_calculations.py | 104 +++++++++++++++++ .../sqlalchemy/tests/test_migrations.py | 68 ++++++++++- aiida/backends/tests/__init__.py | 3 +- 8 files changed, 365 insertions(+), 5 deletions(-) create mode 100644 aiida/backends/djsite/db/migrations/0038_data_migration_legacy_job_calculations.py create mode 100644 aiida/backends/djsite/db/subtests/migrations/test_migrations_0038_data_migration_legacy_job_calculations.py create mode 100644 aiida/backends/general/migrations/calc_state.py create mode 100644 aiida/backends/sqlalchemy/migrations/versions/26d561acd560_data_migration_legacy_job_calculations.py diff --git a/aiida/backends/djsite/db/migrations/0038_data_migration_legacy_job_calculations.py b/aiida/backends/djsite/db/migrations/0038_data_migration_legacy_job_calculations.py new file mode 100644 index 0000000000..33140d9704 --- /dev/null +++ b/aiida/backends/djsite/db/migrations/0038_data_migration_legacy_job_calculations.py @@ -0,0 +1,109 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida_core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +# pylint: disable=invalid-name,too-few-public-methods +"""Data migration for legacy `JobCalculations`. + +These old nodes have already been migrated to the correct `CalcJobNode` type in a previous migration, but they can +still contain a `state` attribute with a deprecated `JobCalcState` value and they are missing a value for the +`process_state`, `process_status`, `process_label` and `exit_status`. The `process_label` is impossible to infer +consistently in SQL so it will be omitted. The other will be mapped from the `state` attribute as follows: + +.. code-block:: text + + Old state | Process state | Exit status | Process status + ---------------------|----------------|-------------|---------------------------------------------------------- + `NEW` | `killed` | `None` | Legacy `JobCalculation` with state `NEW` + `TOSUBMIT` | `killed` | `None` | Legacy `JobCalculation` with state `TOSUBMIT` + `SUBMITTING` | `killed` | `None` | Legacy `JobCalculation` with state `SUBMITTING` + `WITHSCHEDULER` | `killed` | `None` | Legacy `JobCalculation` with state `WITHSCHEDULER` + `COMPUTED` | `killed` | `None` | Legacy `JobCalculation` with state `COMPUTED` + `RETRIEVING` | `killed` | `None` | Legacy `JobCalculation` with state `RETRIEVING` + `PARSING` | `killed` | `None` | Legacy `JobCalculation` with state `PARSING` + `SUBMISSIONFAILED` | `excepted` | `None` | Legacy `JobCalculation` with state `SUBMISSIONFAILED` + `RETRIEVALFAILED` | `excepted` | `None` | Legacy `JobCalculation` with state `RETRIEVALFAILED` + `PARSINGFAILED` | `excepted` | `None` | Legacy `JobCalculation` with state `PARSINGFAILED` + `FAILED` | `finished` | 2 | - + `FINISHED` | `finished` | 0 | - + `IMPORTED` | - | - | - + +Note the `IMPORTED` state was never actually stored in the `state` attribute, so we do not have to consider it. +The old `state` attribute has to be removed after the data is migrated, because its value is no longer valid or useful. + +Note: in addition to the three attributes mentioned in the table, all matched nodes will get `Legacy JobCalculation` as +their `process_label` which is one of the default columns of `verdi process list`. +""" +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals +from __future__ import absolute_import + +# Remove when https://github.com/PyCQA/pylint/issues/1931 is fixed +# pylint: disable=no-name-in-module,import-error +from django.db import migrations +from aiida.backends.djsite.db.migrations import upgrade_schema_version + +REVISION = '1.0.38' +DOWN_REVISION = '1.0.37' + + +class Migration(migrations.Migration): + """Data migration for legacy `JobCalculations`.""" + + dependencies = [ + ('db', '0037_attributes_extras_settings_json'), + ] + + # Note that the condition on matching target nodes is done only on the `node_type` amd the `state` attribute value. + # New `CalcJobs` will have the same node type and while their active can have a `state` attribute with a value + # of the enum `CalcJobState`, some of which match the deprecated `JobCalcState`, however, the new ones are stored + # in lower case, so we do not run the risk of matching them by accident. + operations = [ + migrations.RunSQL( + sql=r""" + UPDATE db_dbnode + SET attributes = attributes - 'state' || '{"process_state": "killed", "process_status": "Legacy `JobCalculation` with state `NEW`", "process_label": "Legacy JobCalculation"}' + WHERE node_type = 'process.calculation.calcjob.CalcJobNode.' AND attributes @> '{"state": "NEW"}'; + UPDATE db_dbnode + SET attributes = attributes - 'state' || '{"process_state": "killed", "process_status": "Legacy `JobCalculation` with state `TOSUBMIT`", "process_label": "Legacy JobCalculation"}' + WHERE node_type = 'process.calculation.calcjob.CalcJobNode.' AND attributes @> '{"state": "TOSUBMIT"}'; + UPDATE db_dbnode + SET attributes = attributes - 'state' || '{"process_state": "killed", "process_status": "Legacy `JobCalculation` with state `SUBMITTING`", "process_label": "Legacy JobCalculation"}' + WHERE node_type = 'process.calculation.calcjob.CalcJobNode.' AND attributes @> '{"state": "SUBMITTING"}'; + UPDATE db_dbnode + SET attributes = attributes - 'state' || '{"process_state": "killed", "process_status": "Legacy `JobCalculation` with state `WITHSCHEDULER`", "process_label": "Legacy JobCalculation"}' + WHERE node_type = 'process.calculation.calcjob.CalcJobNode.' AND attributes @> '{"state": "WITHSCHEDULER"}'; + UPDATE db_dbnode + SET attributes = attributes - 'state' || '{"process_state": "killed", "process_status": "Legacy `JobCalculation` with state `COMPUTED`", "process_label": "Legacy JobCalculation"}' + WHERE node_type = 'process.calculation.calcjob.CalcJobNode.' AND attributes @> '{"state": "COMPUTED"}'; + UPDATE db_dbnode + SET attributes = attributes - 'state' || '{"process_state": "killed", "process_status": "Legacy `JobCalculation` with state `RETRIEVING`", "process_label": "Legacy JobCalculation"}' + WHERE node_type = 'process.calculation.calcjob.CalcJobNode.' AND attributes @> '{"state": "RETRIEVING"}'; + UPDATE db_dbnode + SET attributes = attributes - 'state' || '{"process_state": "killed", "process_status": "Legacy `JobCalculation` with state `PARSING`", "process_label": "Legacy JobCalculation"}' + WHERE node_type = 'process.calculation.calcjob.CalcJobNode.' AND attributes @> '{"state": "PARSING"}'; + UPDATE db_dbnode + SET attributes = attributes - 'state' || '{"process_state": "excepted", "process_status": "Legacy `JobCalculation` with state `SUBMISSIONFAILED`", "process_label": "Legacy JobCalculation"}' + WHERE node_type = 'process.calculation.calcjob.CalcJobNode.' AND attributes @> '{"state": "SUBMISSIONFAILED"}'; + UPDATE db_dbnode + SET attributes = attributes - 'state' || '{"process_state": "excepted", "process_status": "Legacy `JobCalculation` with state `RETRIEVALFAILED`", "process_label": "Legacy JobCalculation"}' + WHERE node_type = 'process.calculation.calcjob.CalcJobNode.' AND attributes @> '{"state": "RETRIEVALFAILED"}'; + UPDATE db_dbnode + SET attributes = attributes - 'state' || '{"process_state": "excepted", "process_status": "Legacy `JobCalculation` with state `PARSINGFAILED`", "process_label": "Legacy JobCalculation"}' + WHERE node_type = 'process.calculation.calcjob.CalcJobNode.' AND attributes @> '{"state": "PARSINGFAILED"}'; + UPDATE db_dbnode + SET attributes = attributes - 'state' || '{"process_state": "finished", "exit_status": 2, "process_label": "Legacy JobCalculation"}' + WHERE node_type = 'process.calculation.calcjob.CalcJobNode.' AND attributes @> '{"state": "FAILED"}'; + UPDATE db_dbnode + SET attributes = attributes - 'state' || '{"process_state": "finished", "exit_status": 0, "process_label": "Legacy JobCalculation"}' + WHERE node_type = 'process.calculation.calcjob.CalcJobNode.' AND attributes @> '{"state": "FINISHED"}'; + """, + reverse_sql=""), + upgrade_schema_version(REVISION, DOWN_REVISION) + ] diff --git a/aiida/backends/djsite/db/migrations/__init__.py b/aiida/backends/djsite/db/migrations/__init__.py index c8451f0a25..97ab0b5e74 100644 --- a/aiida/backends/djsite/db/migrations/__init__.py +++ b/aiida/backends/djsite/db/migrations/__init__.py @@ -23,7 +23,7 @@ class DeserializationException(AiidaException): pass -LATEST_MIGRATION = '0037_attributes_extras_settings_json' +LATEST_MIGRATION = '0038_data_migration_legacy_job_calculations' def _update_schema_version(version, apps, schema_editor): diff --git a/aiida/backends/djsite/db/subtests/migrations/test_migrations_0038_data_migration_legacy_job_calculations.py b/aiida/backends/djsite/db/subtests/migrations/test_migrations_0038_data_migration_legacy_job_calculations.py new file mode 100644 index 0000000000..d91d40a5d4 --- /dev/null +++ b/aiida/backends/djsite/db/subtests/migrations/test_migrations_0038_data_migration_legacy_job_calculations.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +########################################################################### +# Copyright (c), The AiiDA team. All rights reserved. # +# This file is part of the AiiDA code. # +# # +# The code is hosted on GitHub at https://github.com/aiidateam/aiida_core # +# For further information on the license, see the LICENSE.txt file # +# For further information please visit http://www.aiida.net # +########################################################################### +# pylint: disable=import-error,no-name-in-module,invalid-name +""" +Tests for the migrations of the attributes, extras and settings from EAV to JSONB +Migration 0037_attributes_extras_settings_json +""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import six + +from aiida.backends.djsite.db.subtests.migrations.test_migrations_common import TestMigrations +from aiida.backends.general.migrations.calc_state import STATE_MAPPING + + +class TestLegacyJobCalcStateDataMigration(TestMigrations): + """Test the migration that performs a data migration of legacy `JobCalcState`.""" + + migrate_from = '0037_attributes_extras_settings_json' + migrate_to = '0038_data_migration_legacy_job_calculations' + + def setUpBeforeMigration(self): + self.nodes = {} + + for state in STATE_MAPPING: + node = self.DbNode( + node_type='process.calculation.calcjob.CalcJobNode.', + user_id=self.default_user.id, + attributes={'state': state}) + node.save() + + self.nodes[state] = node.id + + def test_data_migrated(self): + """Verify that the `process_state`, `process_status` and `exit_status` are set correctly.""" + for state, pk in self.nodes.items(): + node = self.load_node(pk) + self.assertEqual(node.attributes.get('process_state', None), STATE_MAPPING[state].process_state) + self.assertEqual(node.attributes.get('process_status', None), STATE_MAPPING[state].process_status) + self.assertEqual(node.attributes.get('exit_status', None), STATE_MAPPING[state].exit_status) + self.assertEqual(node.attributes.get('process_label'), + 'Legacy JobCalculation') # All nodes should have this label + self.assertIsNone(node.attributes.get('state', None)) # The old state should have been removed + + exit_status = node.attributes.get('exit_status', None) + if exit_status is not None: + self.assertIsInstance(exit_status, six.integer_types) diff --git a/aiida/backends/general/migrations/calc_state.py b/aiida/backends/general/migrations/calc_state.py new file mode 100644 index 0000000000..8df01c78e2 --- /dev/null +++ b/aiida/backends/general/migrations/calc_state.py @@ -0,0 +1,24 @@ +# -*- coding: utf-8 -*- +"""Data structures for mapping legacy `JobCalculation` data to new process attributes.""" +from __future__ import absolute_import +from collections import namedtuple + +StateMapping = namedtuple('StateMapping', ['state', 'process_state', 'exit_status', 'process_status']) + +# Mapping of old `state` attribute values of legacy `JobCalculation` to new process related attributes. +# This is used in migration `0038_data_migration_legacy_job_calculations.py` +STATUS_TEMPLATE = 'Legacy `JobCalculation` with state `{}`' +STATE_MAPPING = { + 'NEW': StateMapping('NEW', 'killed', None, STATUS_TEMPLATE.format('NEW')), + 'TOSUBMIT': StateMapping('TOSUBMIT', 'killed', None, STATUS_TEMPLATE.format('TOSUBMIT')), + 'SUBMITTING': StateMapping('SUBMITTING', 'killed', None, STATUS_TEMPLATE.format('SUBMITTING')), + 'WITHSCHEDULER': StateMapping('WITHSCHEDULER', 'killed', None, STATUS_TEMPLATE.format('WITHSCHEDULER')), + 'COMPUTED': StateMapping('COMPUTED', 'killed', None, STATUS_TEMPLATE.format('COMPUTED')), + 'RETRIEVING': StateMapping('RETRIEVING', 'killed', None, STATUS_TEMPLATE.format('RETRIEVING')), + 'PARSING': StateMapping('PARSING', 'killed', None, STATUS_TEMPLATE.format('PARSING')), + 'SUBMISSIONFAILED': StateMapping('SUBMISSIONFAILED', 'excepted', None, STATUS_TEMPLATE.format('SUBMISSIONFAILED')), + 'RETRIEVALFAILED': StateMapping('RETRIEVALFAILED', 'excepted', None, STATUS_TEMPLATE.format('RETRIEVALFAILED')), + 'PARSINGFAILED': StateMapping('PARSINGFAILED', 'excepted', None, STATUS_TEMPLATE.format('PARSINGFAILED')), + 'FAILED': StateMapping('FAILED', 'finished', 2, None), + 'FINISHED': StateMapping('FINISHED', 'finished', 0, None), +} diff --git a/aiida/backends/sqlalchemy/migrations/versions/07fac78e6209_drop_computer_transport_params.py b/aiida/backends/sqlalchemy/migrations/versions/07fac78e6209_drop_computer_transport_params.py index 111eed9f4c..503bec604c 100644 --- a/aiida/backends/sqlalchemy/migrations/versions/07fac78e6209_drop_computer_transport_params.py +++ b/aiida/backends/sqlalchemy/migrations/versions/07fac78e6209_drop_computer_transport_params.py @@ -9,8 +9,8 @@ ########################################################################### """Drop the `transport_params` from the `Computer` database model. -Revision ID: 61fc0913fae9 -Revises: ce56d84bcc35 +Revision ID: 07fac78e6209 +Revises: de2eaf6978b4 Create Date: 2019-02-16 15:32:42.745450 """ diff --git a/aiida/backends/sqlalchemy/migrations/versions/26d561acd560_data_migration_legacy_job_calculations.py b/aiida/backends/sqlalchemy/migrations/versions/26d561acd560_data_migration_legacy_job_calculations.py new file mode 100644 index 0000000000..c401dfc957 --- /dev/null +++ b/aiida/backends/sqlalchemy/migrations/versions/26d561acd560_data_migration_legacy_job_calculations.py @@ -0,0 +1,104 @@ +# -*- coding: utf-8 -*- +"""Data migration for legacy `JobCalculations`. + +These old nodes have already been migrated to the correct `CalcJobNode` type in a previous migration, but they can +still contain a `state` attribute with a deprecated `JobCalcState` value and they are missing a value for the +`process_state`, `process_status`, `process_label` and `exit_status`. The `process_label` is impossible to infer +consistently in SQL so it will be omitted. The other will be mapped from the `state` attribute as follows: + +.. code-block:: text + + Old state | Process state | Exit status | Process status + ---------------------|----------------|-------------|---------------------------------------------------------- + `NEW` | `Killed` | `None` | Legacy `JobCalculation` with state `NEW` + `TOSUBMIT` | `Killed` | `None` | Legacy `JobCalculation` with state `TOSUBMIT` + `SUBMITTING` | `Killed` | `None` | Legacy `JobCalculation` with state `SUBMITTING` + `WITHSCHEDULER` | `Killed` | `None` | Legacy `JobCalculation` with state `WITHSCHEDULER` + `COMPUTED` | `Killed` | `None` | Legacy `JobCalculation` with state `COMPUTED` + `RETRIEVING` | `Killed` | `None` | Legacy `JobCalculation` with state `RETRIEVING` + `PARSING` | `Killed` | `None` | Legacy `JobCalculation` with state `PARSING` + `SUBMISSIONFAILED` | `Excepted` | `None` | Legacy `JobCalculation` with state `SUBMISSIONFAILED` + `RETRIEVALFAILED` | `Excepted` | `None` | Legacy `JobCalculation` with state `RETRIEVALFAILED` + `PARSINGFAILED` | `Excepted` | `None` | Legacy `JobCalculation` with state `PARSINGFAILED` + `FAILED` | `Finished` | 2 | - + `FINISHED` | `Finished` | 0 | - + `IMPORTED` | - | - | - + + +Note the `IMPORTED` state was never actually stored in the `state` attribute, so we do not have to consider it. +The old `state` attribute has to be removed after the data is migrated, because its value is no longer valid or useful. + +Note: in addition to the three attributes mentioned in the table, all matched nodes will get `Legacy JobCalculation` as +their `process_label` which is one of the default columns of `verdi process list`. + +Revision ID: 26d561acd560 +Revises: 07fac78e6209 +Create Date: 2019-06-22 09:55:25.284168 + +""" +# pylint: disable=invalid-name,no-member,import-error,no-name-in-module +from __future__ import division +from __future__ import print_function +from __future__ import absolute_import + +from alembic import op +from sqlalchemy.sql import text + +# revision identifiers, used by Alembic. +revision = '26d561acd560' +down_revision = '07fac78e6209' +branch_labels = None +depends_on = None + + +def upgrade(): + """Migrations for the upgrade.""" + conn = op.get_bind() # pylint: disable=no-member + + # Note that the condition on matching target nodes is done only on the `node_type` amd the `state` attribute value. + # New `CalcJobs` will have the same node type and while their active can have a `state` attribute with a value + # of the enum `CalcJobState`, some of which match the deprecated `JobCalcState`, however, the new ones are stored + # in lower case, so we do not run the risk of matching them by accident. + statement = text(""" + UPDATE db_dbnode + SET attributes = attributes - 'state' || '{"process_state": "killed", "process_status": "Legacy `JobCalculation` with state `NEW`", "process_label": "Legacy JobCalculation"}' + WHERE node_type = 'process.calculation.calcjob.CalcJobNode.' AND attributes @> '{"state": "NEW"}'; + UPDATE db_dbnode + SET attributes = attributes - 'state' || '{"process_state": "killed", "process_status": "Legacy `JobCalculation` with state `TOSUBMIT`", "process_label": "Legacy JobCalculation"}' + WHERE node_type = 'process.calculation.calcjob.CalcJobNode.' AND attributes @> '{"state": "TOSUBMIT"}'; + UPDATE db_dbnode + SET attributes = attributes - 'state' || '{"process_state": "killed", "process_status": "Legacy `JobCalculation` with state `SUBMITTING`", "process_label": "Legacy JobCalculation"}' + WHERE node_type = 'process.calculation.calcjob.CalcJobNode.' AND attributes @> '{"state": "SUBMITTING"}'; + UPDATE db_dbnode + SET attributes = attributes - 'state' || '{"process_state": "killed", "process_status": "Legacy `JobCalculation` with state `WITHSCHEDULER`", "process_label": "Legacy JobCalculation"}' + WHERE node_type = 'process.calculation.calcjob.CalcJobNode.' AND attributes @> '{"state": "WITHSCHEDULER"}'; + UPDATE db_dbnode + SET attributes = attributes - 'state' || '{"process_state": "killed", "process_status": "Legacy `JobCalculation` with state `COMPUTED`", "process_label": "Legacy JobCalculation"}' + WHERE node_type = 'process.calculation.calcjob.CalcJobNode.' AND attributes @> '{"state": "COMPUTED"}'; + UPDATE db_dbnode + SET attributes = attributes - 'state' || '{"process_state": "killed", "process_status": "Legacy `JobCalculation` with state `RETRIEVING`", "process_label": "Legacy JobCalculation"}' + WHERE node_type = 'process.calculation.calcjob.CalcJobNode.' AND attributes @> '{"state": "RETRIEVING"}'; + UPDATE db_dbnode + SET attributes = attributes - 'state' || '{"process_state": "killed", "process_status": "Legacy `JobCalculation` with state `PARSING`", "process_label": "Legacy JobCalculation"}' + WHERE node_type = 'process.calculation.calcjob.CalcJobNode.' AND attributes @> '{"state": "PARSING"}'; + UPDATE db_dbnode + SET attributes = attributes - 'state' || '{"process_state": "excepted", "process_status": "Legacy `JobCalculation` with state `SUBMISSIONFAILED`", "process_label": "Legacy JobCalculation"}' + WHERE node_type = 'process.calculation.calcjob.CalcJobNode.' AND attributes @> '{"state": "SUBMISSIONFAILED"}'; + UPDATE db_dbnode + SET attributes = attributes - 'state' || '{"process_state": "excepted", "process_status": "Legacy `JobCalculation` with state `RETRIEVALFAILED`", "process_label": "Legacy JobCalculation"}' + WHERE node_type = 'process.calculation.calcjob.CalcJobNode.' AND attributes @> '{"state": "RETRIEVALFAILED"}'; + UPDATE db_dbnode + SET attributes = attributes - 'state' || '{"process_state": "excepted", "process_status": "Legacy `JobCalculation` with state `PARSINGFAILED`", "process_label": "Legacy JobCalculation"}' + WHERE node_type = 'process.calculation.calcjob.CalcJobNode.' AND attributes @> '{"state": "PARSINGFAILED"}'; + UPDATE db_dbnode + SET attributes = attributes - 'state' || '{"process_state": "finished", "exit_status": 2, "process_label": "Legacy JobCalculation"}' + WHERE node_type = 'process.calculation.calcjob.CalcJobNode.' AND attributes @> '{"state": "FAILED"}'; + UPDATE db_dbnode + SET attributes = attributes - 'state' || '{"process_state": "finished", "exit_status": 0, "process_label": "Legacy JobCalculation"}' + WHERE node_type = 'process.calculation.calcjob.CalcJobNode.' AND attributes @> '{"state": "FINISHED"}'; + """) + conn.execute(statement) + + +def downgrade(): + """Migrations for the downgrade.""" diff --git a/aiida/backends/sqlalchemy/tests/test_migrations.py b/aiida/backends/sqlalchemy/tests/test_migrations.py index a5aeace92c..b681c0d017 100644 --- a/aiida/backends/sqlalchemy/tests/test_migrations.py +++ b/aiida/backends/sqlalchemy/tests/test_migrations.py @@ -14,10 +14,11 @@ from __future__ import print_function from contextlib import contextmanager - import os + from alembic import command from alembic.config import Config +import six from six.moves import zip from aiida.backends import sqlalchemy as sa @@ -1276,3 +1277,68 @@ def test_type_string(self): self.assertEqual(node.type, 'data.dict.Dict.') finally: session.close() + + +class TestLegacyJobCalcStateDataMigration(TestMigrationsSQLA): + """Test the migration that performs a data migration of legacy `JobCalcState`.""" + + migrate_from = '07fac78e6209' + migrate_to = '26d561acd560' + + def setUpBeforeMigration(self): + from sqlalchemy.orm import Session # pylint: disable=import-error,no-name-in-module + from aiida.backends.general.migrations.calc_state import STATE_MAPPING + + self.state_mapping = STATE_MAPPING + self.nodes = {} + + DbNode = self.get_auto_base().classes.db_dbnode # pylint: disable=invalid-name + DbUser = self.get_auto_base().classes.db_dbuser # pylint: disable=invalid-name + + with sa.ENGINE.begin() as connection: + try: + session = Session(connection.engine) + + user = DbUser(email='{}@aiida.net'.format(self.id())) + session.add(user) + session.commit() + + for state in self.state_mapping: + node = DbNode( + node_type='process.calculation.calcjob.CalcJobNode.', + user_id=user.id, + attributes={'state': state} + ) + session.add(node) + session.commit() + + self.nodes[state] = node.id + except Exception: + session.rollback() + raise + finally: + session.close() + + def test_data_migrated(self): + """Verify that the `process_state`, `process_status` and `exit_status` are set correctly.""" + from sqlalchemy.orm import Session # pylint: disable=import-error,no-name-in-module + + DbNode = self.get_auto_base().classes.db_dbnode # pylint: disable=invalid-name + + with sa.ENGINE.begin() as connection: + try: + session = Session(connection.engine) + for state, pk in self.nodes.items(): + node = session.query(DbNode).filter(DbNode.id == pk).one() + attrs = node.attributes + self.assertEqual(attrs.get('process_state', None), self.state_mapping[state].process_state) + self.assertEqual(attrs.get('process_status', None), self.state_mapping[state].process_status) + self.assertEqual(attrs.get('exit_status', None), self.state_mapping[state].exit_status) + self.assertEqual(attrs.get('process_label'), 'Legacy JobCalculation') + self.assertIsNone(attrs.get('state', None)) # The old state should have been removed + + exit_status = attrs.get('exit_status', None) + if exit_status is not None: + self.assertIsInstance(exit_status, six.integer_types) + finally: + session.close() diff --git a/aiida/backends/tests/__init__.py b/aiida/backends/tests/__init__.py index e633a2eaf0..727f931949 100644 --- a/aiida/backends/tests/__init__.py +++ b/aiida/backends/tests/__init__.py @@ -22,7 +22,8 @@ 'generic': ['aiida.backends.djsite.db.subtests.test_generic'], 'migrations': [ 'aiida.backends.djsite.db.subtests.migrations.test_migrations_many', - 'aiida.backends.djsite.db.subtests.migrations.test_migrations_0037_attributes_extras_settings_json' + 'aiida.backends.djsite.db.subtests.migrations.test_migrations_0037_attributes_extras_settings_json', + 'aiida.backends.djsite.db.subtests.migrations.test_migrations_0038_data_migration_legacy_job_calculations', ], }, BACKEND_SQLA: { From 83e02fdf310bd9027e817cc43361f418855b6ec6 Mon Sep 17 00:00:00 2001 From: Sebastiaan Huber Date: Wed, 26 Jun 2019 18:23:53 +0200 Subject: [PATCH 11/11] Update the export archive version to `v0.6` The change in the export archive is necessary to take the changes into account of two database migrations: * Moving of node attributes and extras to JSONB for the Django backend * Data migration of `state` attribute of legacy `JobCalculation` nodes Due to the migration of node attributes and extras to JSONB, which does not support datetime objects, in contrast with the old database schema, means that the deserialization from the export JSON data is no longer necessary. The only serialized types were datetime objects, but these are now no longer supported anyway in the database, just like SqlAlchemy already was. This allows us to remove the attribute and extra conversion dictionaries in its entirety. A second migration deals with the conversion of legacy job calculation states, by inferring the `process_state`, `exit_status` and `process_status` from it. Co-authored-by: Giovanni Pizzi --- aiida/backends/tests/__init__.py | 1 + .../tests/cmdline/commands/test_calcjob.py | 3 - .../tests/cmdline/commands/test_export.py | 8 +- .../tests/cmdline/commands/test_import.py | 18 +-- .../fixtures/calcjob/arithmetic.add.aiida | Bin 9062 -> 8846 bytes .../fixtures/calcjob/arithmetic.add_old.aiida | Bin 10832 -> 8677 bytes .../fixtures/export/compare/django.aiida | Bin 2338 -> 1871 bytes .../fixtures/export/compare/sqlalchemy.aiida | Bin 2322 -> 1859 bytes .../export/migrate/export_v0.6_simple.aiida | Bin 0 -> 52510 bytes .../importexport/migration/test_migration.py | 3 - .../importexport/migration/test_v05_to_v06.py | 118 ++++++++++++++ aiida/cmdline/utils/migration/__init__.py | 9 +- aiida/cmdline/utils/migration/v05_to_v06.py | 146 ++++++++++++++++++ aiida/orm/importexport.py | 119 +++----------- 14 files changed, 311 insertions(+), 114 deletions(-) create mode 100644 aiida/backends/tests/fixtures/export/migrate/export_v0.6_simple.aiida create mode 100644 aiida/backends/tests/tools/importexport/migration/test_v05_to_v06.py create mode 100644 aiida/cmdline/utils/migration/v05_to_v06.py diff --git a/aiida/backends/tests/__init__.py b/aiida/backends/tests/__init__.py index 727f931949..22d7fd2299 100644 --- a/aiida/backends/tests/__init__.py +++ b/aiida/backends/tests/__init__.py @@ -110,6 +110,7 @@ 'export_migration.v02_to_v03': ['aiida.backends.tests.tools.importexport.migration.test_v02_to_v03'], 'export_migration.v03_to_v04': ['aiida.backends.tests.tools.importexport.migration.test_v03_to_v04'], 'export_migration.v04_to_v05': ['aiida.backends.tests.tools.importexport.migration.test_v04_to_v05'], + 'export_migration.v05_to_v06': ['aiida.backends.tests.tools.importexport.migration.test_v05_to_v06'], 'generic': ['aiida.backends.tests.test_generic'], 'manage.configuration.config.': ['aiida.backends.tests.manage.configuration.test_config'], 'manage.configuration.migrations.': ['aiida.backends.tests.manage.configuration.migrations.test_migrations'], diff --git a/aiida/backends/tests/cmdline/commands/test_calcjob.py b/aiida/backends/tests/cmdline/commands/test_calcjob.py index c3b940e185..7a505f707c 100644 --- a/aiida/backends/tests/cmdline/commands/test_calcjob.py +++ b/aiida/backends/tests/cmdline/commands/test_calcjob.py @@ -13,8 +13,6 @@ from __future__ import print_function from __future__ import absolute_import -import unittest - from click.testing import CliRunner from aiida import orm @@ -30,7 +28,6 @@ def get_result_lines(result): return [e for e in result.output.split('\n') if e] -@unittest.skip("Reenable when issue #3056 has been solved (migrate exported files to 0.6)") class TestVerdiCalculation(AiidaTestCase): """Tests for `verdi calcjob`.""" diff --git a/aiida/backends/tests/cmdline/commands/test_export.py b/aiida/backends/tests/cmdline/commands/test_export.py index 33424f4193..8ad12340c0 100644 --- a/aiida/backends/tests/cmdline/commands/test_export.py +++ b/aiida/backends/tests/cmdline/commands/test_export.py @@ -149,7 +149,8 @@ def test_migrate_versions_old(self): 'export_v0.1_simple.aiida', 'export_v0.2_simple.aiida', 'export_v0.3_simple.aiida', - 'export_v0.4_simple.aiida' + 'export_v0.4_simple.aiida', + 'export_v0.5_simple.aiida' ] for archive in archives: @@ -169,7 +170,7 @@ def test_migrate_versions_old(self): def test_migrate_versions_recent(self): """Migrating an archive with the current version should exit with non-zero status.""" archives = [ - 'export_v0.5_simple.aiida', + 'export_v0.6_simple.aiida', ] for archive in archives: @@ -260,7 +261,8 @@ def test_inspect(self): ('export_v0.2_simple.aiida', '0.2'), ('export_v0.3_simple.aiida', '0.3'), ('export_v0.4_simple.aiida', '0.4'), - ('export_v0.5_simple.aiida', '0.5') + ('export_v0.5_simple.aiida', '0.5'), + ('export_v0.6_simple.aiida', '0.6') ] for archive, version_number in archives: diff --git a/aiida/backends/tests/cmdline/commands/test_import.py b/aiida/backends/tests/cmdline/commands/test_import.py index 2551f15fce..d8412814ee 100644 --- a/aiida/backends/tests/cmdline/commands/test_import.py +++ b/aiida/backends/tests/cmdline/commands/test_import.py @@ -12,8 +12,6 @@ from __future__ import print_function from __future__ import absolute_import -import unittest - from click.testing import CliRunner from click.exceptions import BadParameter @@ -23,7 +21,6 @@ from aiida.orm import Group -@unittest.skip("Reenable when issue #3056 has been solved (migrate exported files to 0.6)") class TestVerdiImport(AiidaTestCase): """Tests for `verdi import`.""" @@ -60,12 +57,12 @@ def test_import_archive(self): """ Test import for archive files from disk - NOTE: When the export format version is upped, the test export_v0.5.aiida archive will have to be + NOTE: When the export format version is upped, the test export_v0.6.aiida archive will have to be replaced with the version of the new format """ archives = [ get_archive_file('arithmetic.add.aiida', filepath='calcjob'), - get_archive_file('export_v0.5_simple.aiida', filepath=self.archive_path) + get_archive_file('export_v0.6_simple.aiida', filepath=self.archive_path) ] options = [] + archives @@ -81,7 +78,7 @@ def test_import_to_group(self): """ archives = [ get_archive_file('arithmetic.add.aiida', filepath='calcjob'), - get_archive_file('export_v0.5_simple.aiida', filepath=self.archive_path) + get_archive_file('export_v0.6_simple.aiida', filepath=self.archive_path) ] group_label = "import_madness" @@ -127,7 +124,7 @@ def test_import_make_new_group(self): """Make sure imported entities are saved in new Group""" # Initialization group_label = "new_group_for_verdi_import" - archives = [get_archive_file('export_v0.5_simple.aiida', filepath=self.archive_path)] + archives = [get_archive_file('export_v0.6_simple.aiida', filepath=self.archive_path)] # Check Group does not already exist group_search = Group.objects.find(filters={'label': group_label}) @@ -147,7 +144,7 @@ def test_import_make_new_group(self): def test_comment_mode(self): """Test comment mode flag works as intended""" - archives = [get_archive_file('export_v0.5_simple.aiida', filepath=self.archive_path)] + archives = [get_archive_file('export_v0.6_simple.aiida', filepath=self.archive_path)] options = ['--comment-mode', 'newest'] + archives result = self.cli_runner.invoke(cmd_import.cmd_import, options) @@ -166,7 +163,8 @@ def test_import_old_local_archives(self): Expected behavior: Automatically migrate to newest version and import correctly. """ archives = [('export_v0.1_simple.aiida', '0.1'), ('export_v0.2_simple.aiida', '0.2'), - ('export_v0.3_simple.aiida', '0.3'), ('export_v0.4_simple.aiida', '0.4')] + ('export_v0.3_simple.aiida', '0.3'), ('export_v0.4_simple.aiida', '0.4'), + ('export_v0.5_simple.aiida', '0.5')] for archive, version in archives: options = [get_archive_file(archive, filepath=self.archive_path)] @@ -238,7 +236,7 @@ def test_non_interactive_and_migration(self): `migration` = False, `non_interactive` = False (default), Expected: No query, no migrate `migration` = False, `non_interactive` = True, Expected: No query, no migrate """ - archive = get_archive_file('export_v0.4_simple.aiida', filepath=self.archive_path) + archive = get_archive_file('export_v0.5_simple.aiida', filepath=self.archive_path) confirm_message = "Do you want to try and migrate {} to the newest export file version?".format(archive) success_message = "Success: imported archive {}".format(archive) diff --git a/aiida/backends/tests/fixtures/calcjob/arithmetic.add.aiida b/aiida/backends/tests/fixtures/calcjob/arithmetic.add.aiida index 1e2712ecb47750b70e9d643ee20054433c9e50ce..af6c97daf71ce64652a797bda473cdfe0289105d 100644 GIT binary patch delta 4067 zcmZ`*2|SeB8y;E07>2PgV@Sw0wlUOD)@x5DWs)?unv!koHzu-`((;8Yp&}7cws7rB zp_J^~wPd*^LY8a)&(gSV{qHxw`JL~3&+|U-^PcmbWzs0t5MpJ<$ixexJn47)??5C- z?sCTa;FT^AJ=`a7IFCZ_|!=sbK= zodF((P}R5zO|DanYZfN1?T zX<-8G(qaQ{OSCQ>&~iG>XRanW(g(%oW;9h8KxcU(%S>;~*MO1E%KhGit`)Tmc8j=( z*CW+kqC7SnbaSaEYrpG@w=i}}T(=4?G>Jbu8epvcn%{Uf z&@m#K?VMiJdC{!9mCD)Mq~+F-V#9#`I2$0SL(uk%+o!QCCAqw)6EKaSo@#%Kl9?4# zXo^w;PsVmkpRmc_IMtQ|cdu@)bXQp9kr($LfT;sx35!#QRS0 zyG6AxiKRk&Z8E{W58$G|%t z53H8Vt{6#2mLy>I^JZNZyuB~5ins<=eC+|=nv<3Vbrm+BW@Z+-F@ zN?u_y^KMYO>T7qbOM73z+`F@*`wie|cV=uLhZGD~Aoj*)8IO@x#+iozK{^1S=-%!vX9L=Wm zMetaGp5;`P1r8n2mz6+?d41||p4*As+-x-QV};yTz0nS73`$LE z@LOPl&80hap}loF<*nHcu2R~fQyL!D_uIKkEd3vvmNs2_DQTYeI3%gB^3T`CUjplZ z;sK`N$@CH4uP^0ZnOtGnyTvidH@M-T-PoFl5iiVf|G1AZYf(u`c9$W3*Zca3Vemk0 z?$hL4!mUHw>(!);v@Z!GK`TVPHWg#VpLh8MuHWW^1+Pl&lP>RTeB3<1X?{@KY4G#|*W1lP zu;MKOOIm6>cA+cTYYwxvF+N+y`grqtQy1D^+R|m(uw>?w(`Ke6Lu;;F%q7`_21z+$ zA1CjhntoCdcFI31=MAFh=In4CwtLa~(rX*_KwMlCM=w#6Kw6iAFxJPl)sAO$CN%!F z)mrR}b?+!#l#uTJ-YNX-edOL?1`~|-JyYw2cI_2AQJu3v7Gtk?x(C|4#73^H5fpCu zXAvIoaRvVM$DK*skhwGW?Y!0!@2fRLVWs{Vm4r?;LmblEcQ#PX-agmlXuQ}vq5Q+Y zm4?i{eU#yjVO1n0XUQ|^+bo6Uk2j@azUHwFm8o?l;Ih)n1}CKct`APiXKTERjTGDh z>s<_w#AtT1)!c145}y6Gq$=0T!!qkaf77Uza?SXcdm702o|yMTBTC85=151YHCC`t z@;3t7U{x^2GH&fP(0%LA;k0M-X55+j{%#Z83ueu)RwnpJ*EEM@>Mc&r_zDQ={l*?q zV3o?`0aT}Gcq-gYI0Nkrth^l(PiRQAkU}b z6b>WY2Zs;HimHEBkTl|6R=4sf(pQp0W)`XlD?Y6J5P$J;QJkN&$kFlcP{$jZ`fc(? z(Q&3${Ev>OkmQ~#mLw6*!?O9>u!P6n85Jj>>P&h?J*ExHz@)VG;t`diawVSM7+;*$ ztW$_O(>Er|jF7_l*k0h=Gs2g1%vskid=Fq`BE{IQcKvnZ)SIr(X9x^`zo$&aw1S+M zvgwtB{RTizD-F*2dR}5yfwEWPd{~8DW|TbyV*%Y&>j$In*{_S zLCELrB^Mw3&Y2r`+~U>%0=g;BKP6|{H3SceIhCD|Q<|F7Qpa-WapTCuz8Ut_YJbq7 zNR2ZdmQD4^Xb-q@YLV%gR~qTgkeAqT>=JM}R;3l!ML-?@9^#9^96vES^X zDkZ0Ny?)-)P3N|Mm*1F^DQP2M-+bsso%zFAD)zR<~9z~*wgVDW`A6|*v9`SbOS3ozO1_UC5h1so>;H zlCSIe{XW6Ghh!vBO+`2-g>+$&r+RttZ5bPy+$>~A0NG+W)vZ&H=rVQ#qSTwBUW z`H>;aclD1LhHH-Qr?OHS^G+eW2g=9BCJnL67Gt&xs6Po0UZ(n_OWw>QBAGU2Kp$-j zl+w}&ny=lnJ|pS%LcOH{DqsPqI{sH9u*MEyBo`C~0Oam^_W znGjqu)b<#^0PsZY0C?AB5g6ehN8!@=ffInKIwDn7e&V6{rEscHF&{WB=vbf<4Bg?K z0@L~_d{bZ<0LQ^a$HBCmk!n8+rf^O}X+GlI)>NzyH;&ro@tmc$AGrG|Z6Op^!$5uM z;5|lt0T5hdrLIWL|HqokL03XkdK2PIWxEME1{iX3GgFrelY~kmWmS{1QA=jFQ1LJCG`Nh?+q=oMO-blMP5f7S2sI zlDWvnNHE|bDoi!d+zP8@lMW*qbZ&{RgERR2SUF9$FT zRskLe(OhX#YA8k|uq?D&3KRlZitmi zn~zc+0580I8u!4+1|$H7?1j+rv`|ALl+3JY-K`K|piP7}4z!LkdIt=^h|<^gwlKv9 z!Aqrdhj8xF|6}i^R$MT7LDCisxk{ZVTM0ch4Mmfh#S7F*?v}bhF(83{{+($imyI4D wolApUz3Bhoa^nNwP?enwX<=x^Q_2U-!ZmoA$+rg(NQV6LA{XLbe#+PX0PIblfB*mh delta 4345 zcmZu!cRbX87(b&r+_E}H*-k1MAuD^8Ei&RbD^f?94Zd9VIPx`qRw<%LqN2>q`XMDV zSE-YP>?9fe?oPCR-+w;O>+?L%``OQZj->lR4{m12#LNYueZr?-T!IS$)WZa5L$9$y zA&}?d5C{vE<%;*i?>ph^O&GBwlU_Yw4_>rZ%;`_f42%tledij++#n21E)?{b8Sk@` zl=s6Vmd(_A9}3P&+qLjHC;%?X^vnopt6Lrv>tAH>eeQ$gg_5?mr33<5`GCVjR&RA> zJ#448INlmSo;p_#EM6zyTPus`>>ZDqm0B=8Po>#O;bJlYB0mE@J z-?524!j*r3JC+aS;a-Kd2H_nBjXX^;SPN0?5u%sz#biA9mGsWil*jT0rKfv|1uk;@ zW~ZOO>C3tD;E-Ez_sPtom8+`7N$F?rE^s1e63?m__}6-0Gxv7UyuRl`t%M)bGG($* zVF=E|5pG4aAp$-1(&zPORrXn!V4Q#S4 zDzB32aXRIIk* ziZThkI(5-B#8bi4k3y&;kNR6{t(@?JM?L7P#zr=bzPTM7604DiBg>zTK=L*z}6G3cv2Uzo7GS^)ZA?A>I*IXGmQ}YN@csy>{L~md9HJgCd=;r8D5e{}NDZ%&Mi@HU^?i_17 zr*7NpyrNghh(t_L$ z4Qokq)>pUe>u2z26JhFnwrix~p>JLGzsd?x6wl(Ut8*x=Ehp}AXNbJP7;9n0PG_@< zmjq_yWx4?a630`eab7XEPDc(Wot6|Un0+DAx+O2>ZYV}1+u-Rf@mI|F{aUsf@xq^O z6q@XbJNYC#r7t`;HNQckDYY}@lDj(vdLqLsdxohrNY^6-WzWGM{Rk=zF6yatw?l@zCHp%@?wo!KnG11(Ik+@ewL{cO7qHd_$ z1qyTtAYlWrxw+AEq_8Z^grW!+%f1`BDK9$kHfg>b*$Mb9z%fPUHzy*+5TxoBzbPNs z^x=_CaaoR<+vy?J=N9aHv!W=sEhiop9oRXq*SH|to7Gu{a$6mKRGVZn=4AvhzA7Bn zOX43qwQ`&B(ARX?Hi_git*Qw`KMF$;RTKsR2woiDnAwbCy>e^xvP^@la_5(=tFK?& zW7OJ#mbu?hNS0bYg1vc!O>!)1P2XtEo=q}sa$B5W$#`jtutTTDWrhwlX!E$-DBrQs z+AWGN9 z+2cysjrw~nHya-<#DyjC5FH;Kjz33z#ELZ;Lv?Kpcg??0&y!c)`_7m4N zZI(a!bt4CuCNM%XR!N!3bIUeE?+{Q8{ zFFN_cHBdP9daRcJj8XntI-*-{2{VWmN`^L^wNo_`nlQvst+A8dHQ$eBTp@gj98%}vcNDyA(^bRgcB}u1UV986(5Gs$ zGMu}(SU##>viP{NVZcHgr+#TAH!B!W&3f8i@RU5|y5P7rJ3Qp#98x%s@1K;mICm~> z1ssYJzE2+I`Sz~-yNJv22Rl!^EyuK7?~W9}L4_yCM~%{8j{zCBlg7wBb0!n*FnDlE6szVtekk+jb2%P$WIOXPt z-+V+QSQ&bgrD1D(B0IH{@Dpy8PD}w30c1}e*vo4>fotXO4_j7T`v_d&Pm@%6nhmz} z>(Axs)XS1~1qCy>nD6;=-B|$nxT$QZSxmV6-=_Qv&Ju3@^Ds2ICM@DQ4|lFzBVO5aMlJCIl5(yyuS*iJsqW!R5f{FgpdX*X(fGP1E^>4x zWq}2EMxkmZBl@=Dffl7#rO5|nUJ^J-O3KiogaDVKLm^{~o)k>=^ z%uZ=f&L|nF&a|KNcR*2|{?ih%ke~S=b8XKOdaiw0*ICmtr%PM@W%2MkU)@1~zewF+ zbUgg)q@}}L;`kz2eWPvY&rcI-r-0$CoG3+Usoxl2C@UKDVTChMdy7slvux5RIsE?! z*Zm@VXp)b!3>xToknufXmOAYj@*#rly#RB|#UcGe z$C!Rfa@lO5w?62|=}NzGB;Uv_K?KV-z|C(Z99?w@=z_dd2?9`bZsa`2y6IJgWuZ7``B-uw=L<_KkQ5Y9?R z9EID`&jQ3C{nSP{A!r(^eLY@w3=l{LBWTX913uabzdM9ec)Pm!O3|Bc182Qr>msF_BGhj9X=+o1c~Y~F z6-1%Yzre0;>d@Qs|6oPgHY_X9I`}UUDy$(J81ftJnN1yPw*S;Q$b%7@)Xdlvp#~%` z`I!v zA4XSQ$LpqD%7H2ZFhN=|t(6WkF;h!H{at{lY?Oq0=7)*Ese<}H2B=;D!<-)$O1qK+ zQGy8Y4(B%Tj=(zbI&*Y`Dez;osIF3jGC_c*7~b_N(7g&mfE{cyU=`-qD4S@PQXnVq zW|UM_P9$i@^Q-DXBsk3@h5R$e>w!hnm>L^NqN)!d!BU}L)frLXn2^dZxiF?(NOAe# z{XG6E$$Fx5P#cL>a+TWuKb=Mo=`jkt3jf=$GCo=YIM}I7mEFG;=(j0T+e|xk$~2GD z#FaoP1PgpaWqpTD@1T{SX`MZVK<@pShV=paDwb+Zn^;;o)i*I|rOf4^V-D=#qSyTA z_JBYPe-mWl{9$|>b*EtHnR>O8X10}UlcB`F&R8!m?{=_C@Yf{kYzIw+{?5gEjS^{0 W1+Fdh#Uc(7hn%K1^3Zmg5B~udVCeb) diff --git a/aiida/backends/tests/fixtures/calcjob/arithmetic.add_old.aiida b/aiida/backends/tests/fixtures/calcjob/arithmetic.add_old.aiida index 6421a787e257547884bc7b756f274d17d54495f3..c0b269ad6bcd824582d5675ca599175fcf62d8d2 100644 GIT binary patch literal 8677 zcmb_gc|25o7awavSt8kx>TdrE~dW=2N#B~6lSMIn2Vk|If|sboo_ zBwLX!h03lhPf>PyFJj(NG{Gg(*r1f>9ezq6{aEdQSjFA#8KsRZss@9DZ>;*;XX3=I z?KCol;}45I>#i6}ZS3qjK6cIczQa}s$d1$a1U%9LBoo!e#AQK=k>tpAOVBprmMw!< zj|BHcxTK24ZY%j>#c`E;pZe(&ENbls3QzHVcveThvz z?{3$&w*Jd(3*&AyTe>p;*UoK3NqzT##kanbQS7I9bZmugzkgqK3~ zyk+G#rNMHCjA{!YP}1Y8dg7t@5X_2PZG5hLz0U)TBX=++LIl5UF60!KYJr& zcVOI1WCTCGDBiQtmLPua2=w$lDC4~^hl@lSIk=dRqnMbwXDKb<(%uuPo^aSjOJD*z0jbP zcw|yzT&*M57guDrymj}dpv23a0(nF2MQ&GRJ*1x{eJQ5+&J@LjM%(%$6$=?m?zfM)w14k1E63XP2JP8dUJdP(QBb-oigGPT zPpvhoa=Y|`yJQRjgRQ5a1;hr#A=03HB+luY0_Q{koO>%V4i4cg^dq{+*qJxQLqe{oto0hk2 zs+%PubMDwWc)rQsU!47ACpxJLhuZKmMUw5&(*2r9Ks*%`8x`C3^@zGJm>ew7j@`UrY`Gf zu*TTqU`N0DcbpApMem(&9f_Zi!)@-`eeTLfQ?!+7_EGHPz6XPeZ8gKkcRQ7l4d5oo zh&{f^wJ!usn8uGPRf>3C+Saq?fU?u)UZlt{hvHVfe3Yc}K!;&PZQ8d5b|J%;VY}U!pT;bal8Ht$fBpXJ^PJilE0FK z>U>Y_^L|@pbV}I<7Qs^1oudoVYnK1;m$Uh$303}ZnOj5qg~)!y(^$EV$QT8RE3t5V zc)BZ&EK`Uxc}~*!{I6QU) zbzk?p;%n+*UfbHDFk9EVG@Wi&umAAOU%MV_v4w@)96s#AhBeZsv^^bhX6tlA3|LBb z=uhoH8R!c73rtW(H-HqN>T6&IM=l%>*7nyDdY>A0IsZxYI$2h{<^$1=8va7b*W9gw z5=KggB^waaJ{kTcPJH41(-X&3IcIS9^C5pj0?3UTV4TWnIp18#Z5od0A zwhqcYtq@;*UA`8w#>iaEO``=Z&WHITC3*{gaN6Yh%_;BFar=w4k%MwCt9;HNA{pPGhj&*|_+fenEKSIzWjMw8R3>i4z?APQNx7zd14 zNaEPI$Gs)R#dq$&+;BLc*_m>?F$ltS3~o5%^ZA{v;ISK515NR95sZh6HK*ORr(E$H z_xqn7)hjS%`Dj}EV$ix?+nVf{-e7;At-4jrlHDO`?C#AhE6JS8TO>KaZ+Z4Q$ahEc za@=MBOEG2Xn}uq~N7d!uyT;hlt-f-QCv zF_=H$^hKa~qVj4JdMz>Wxsm^gbxon0HpkS}dhx&bcK69zbL$iqB{oG!^zf;N;+9o4 zUgJYj^`+9+y2k@EWJ~Wna-3%DA&2hEO?o};{-h+>L$~8t;h~x<=B|25irJbEak1Su zHX>g=&WsQXWPG@z)hDtp=!}R8>0w{$+DoRv5}o-mlD!bW>jfH*w^JtiYQhV2`i@2U zq?8zfvWI(j&fF>8r^eSe`FLUP{5*Wj4i@vn2MoVBf2?GtLZ2BLN*5YJg)TD~eP~D` zU1+G4%sBshLF`~F0D_lT5@85sFaeLmgAsTX6pVut5nu!ykAcF`c(^TYC8o-&g>uDt z+OP0xsMX@R#Zr*oAb4zpH zTXWS^BD2<04ZSq{rL{1Lg!GrETnw|L+gWKj?1*F@!ZUzCaX_^<_;U_FQsCfXi}iGJ zb(|9tUZ#~sPodmX{LsC%h$|!x)OwLfbTQ?|9$}xIoX5+#w%nNbw~0i)A94YH4{lve zsLbW(E0Sf}n%wk4&?n!g;IXgGxr0??cLKB4+ma`Iik$T+WJt0=kC&`rD{d3}ulFD@}6(ST&B^|2;`yQ zUW$`E?1It_!68z1lG4Nt(%W%Q@vJcwt8lloT>%hTUnMh-~5cCwm}ZKz&Kq zR%c08Y5;O*4&?KH&m|t`h$lJQxyr9n>KdTsQryz{-cUbI&pjsuI=r~ z6qAX;duI8?O>rfd-nytPi6J571DxlS`m8G2zkME=@w|RCB4YcPF5d?c4FUa)4G-2< z?|SADD`&?4aBBQ)F>Fvr(pJo<(aFF*H?t_Npt;Ff>`CGXzpox;l%aT3Hs{-p`ZSNL z`Z?L|XN@PWds3iizMK|cx56SheF#TQBeBIG@VetDbrLEK)b}R6q!n5-jZMo#ektW z3<`{Z!l7U^%nlERDx;CMD3}TwgNHAc+MidB+H2^9qMHQJ%n5`QB2!7sTiW4NU|<3Q z36Q|y0TM`*9T)~vK|)akpluTABS9w=-6Vif14`{wKQAAZ#JnX6ft%e;5&?$8?TBcM9p09RQl=~4bVB{eHv=Od)AHXxfn@?NZqT&G z?C-+Y&;_4ge-E9toW1V*O99VC@oQ{A>$X%W&-u)`X#mqFjc%s8{=C?;LTUb*eM?`q zg}0oOO$!l!=a#zYHYcO#b*o%-o0C=ax(zM5&B-i!-2?%uKP2I-?4s9g$D$i>aOrnD zxac+~%a)HeyW6P=h*)$35-=yzmiL^kc2v*YC7yK1w&kH`s~;7biG6Y9=3eTz(7!z! zvqFxFZ?;ryi`w61COrzUjx&q3u@q>OC0UNRN-0( zyfBzAY=)mbX%8lLi6j(~=%vE%Yigt;f5tsPi-VN_&u&w8s)pI)nzjqzhLIy@d3yr3bZwuHQgQ`nRt> zhyP0DJBO7R9# literal 10832 zcmb_h2|Uzk7#41TPA6KiM9w5o8iuzwEt^3gO+rr!e4Yz&@b^kxC^K{Q?|4sQ#*cE^|$Ax)CmD zg7(J{d=1iq;1CC*5iw71O?+|0JqpKsTs7I7{Q;|>mPi`ggr z=E&I>zvtoZ8H79?DX@H$Y-EFCvO9AUQ>l){gGIdETdI!I>-oC8bIrIm=;6@h45j6ca8QS6Vcx3GTwu_8h_P3 z{{vxMsChtdtyFF{iFS%`(xq-l3;E7W_EXXXs!NGhK2~Q#-J9Onnc@Sp6nlPO^OdZ9 zsXCX#OT%pO7=0P}31t=P^~qc)zV+o;qk#EwIW*QSa;Vfp8Ii7bcGW1gJVT9kmfHQr zU*^I%#{QU3-19J9tts1(=A$LOwD`wD+sogEz1_euJ-_JnvwD4_L}i zJHY>M59wJVFBm#y;6Mz>auq`9tiNIID7YaGRo#G4g^Cwe$eO3MU`2VOaem}XtA^@)df7Sx9CTMX2>jG%{ zslk>4v$Gl?{?Vv0OigOc;x}#pgcG}^L7&vKC`gjy^nqh`Qj=khSTYGlLQ+sf9NrO& zgD-yL#w5)mCN;fb;Mi4#-Y@_NGP{GOYdecVA)+xbgd+tHLn9Cvm?II3hM`bS6gSMn+?S!yJ(W9Oke%Cm0|y7LFqW z#}3Nk4|7b?Fs1G6{$)J9)^zuf4#|QW1X?Ep0!`0bGSQz%-+e@QzqM2N?>)xt9X-B1 zSiybm)+KM;jD%_}w%xvJnz;3K;H!^Hcf55#Wq03W!KwIXBlRo5o9vCuazqKG*HaX| zOCR@M3K2W_&cS6M&$(>;)(`fU}(-*+t5O7Tk6Z&UdChMrEqrEN*OezLRHZ==_J;via;m0^my=S)Xuf=Kt8qrkD*3UbooD3Z z<1+euS}*Q?qStEeR6pr`kB*G2Eo-BO!B$lq8{^DGe_afAE!*9HT( z#WM9BbwbM}`eT;}7`%(~i^6;HU30WyS=SZZa6;(xwSiX>LGC$>VajQ&tZ1v!#vjtWG zM|em+0wtUC1xNaM%HF4zJ$a!qQasG74e4n5@OTH0uI73PB@D-LN!ZB-aK+M$CxS)_ zW*0K*IjBwNtMP}vNPE~Xc@ipJ?$I3DTPGRb8WN^=Q5H4Ydqd;!pS&@mgl{jQ9o=$4 zRYURjGArUI74#HK4iz8uj=Rh21_!>BS!XwT$7YkC5ML%ww&MFvp?5;+*_}7PIO`4x zt*abs$>I{oh4KrFsPxE(drWo-KS(O}ZBjxi+z1w^FTZXtIZCTXA{=7cKT~`$QEQqD zb~xj+m%a)Ah<8h#&^%>7!FlJ#v0j75NyGhiWougwMrU#>YD3mP)CS)_KQ?iR*jm0N z_PCg7^Vy!;e|QZ>@u_sa*mU4ZfMZG3QrC6-UDZj+IkDDq&y-(tdrKvGv^XohO)zy= zi7bqPJmJpN5wFRS%4yJ{CgqC>Z8;ENJSr}>^_>^3sI;EP?-?bct2a3koe*a~9W{V!}U^NhAxw|6;z@TW1Vp0L$K;L+SQPJ;vW zTa%+I)CDYTQENUQ@q^`D$48c!Jsgi6>XBb1xz|YP_{MIJI$YbW?JAS23p;M}i5vUd@@xbf##X@XU^yjZ7=iP1un~40Dvh`}+xSs2S!v06`#eW3i zLeHu`k{Yv&FMgnz2f57s5_vt zuEwHT+0B#c(-k#x(Z??h>YP+g(~do8jhgy6)c536bj>7mi>z&KUiWGrRLYT@s!BPP z>ph7wd1fIIy;eH4T8bLbDHx@24gTv&gWcGoy^@xuzPimNQx&|!aA`tqOnuY(jk1Xw zT8{H$Wc(?`VN?#sG%(zRAQF1JQE zt*?q%x7+f?Ot+#R%MTaYdekPVH@!a%wM3P;p3n&AhPE6Ox%5fVyCLAEbd^UqmrFv} zHDl*@QIGBtj__HeB7Ecdqi*lK7D3A)j_JKha^o!JLK&V5C!GtE!MqCB>LpNpuSBk2 z{k%h|FX?zAUQAEEK3&G>l`Y?f-z~-_RvYIWm-oEdaBTm`!_Mc&rp)b#r4b^cas5F9 zy>G3PcGY%F)z;SSADeh?K2eghwI?2Y|GVzt2s<+TK*s_WMJA|@weqVIX~#)TAJ z5fxV}-{5FoC3)VjTIs=6srwP9xGhS>A1Xeyil<8Y3AqOb2HKxOwew338yr~ac$o?{ z-@Pa3u`(HIpuO=-+O?0{)}PKC8B)%0fz#51tcNf3@7i?~Ov5(6cCy*-?1GXmJHOV$ zU^shK8rB`Vag*~a^jXQgm*bvz+A_cTZ;+U;m-i;T3LWUOdKPjy{_D8dpyVsr&p$6A6b`u!v-o!=K+T7FXnuU16N;tSlMRT{+g%$zTCqCe|Sf!~-+ zCf8brB}wK7bsrfu37&G+K7a5qL^qme;)VN=Z_5gOg>wgmC}HMy)Sk|0T`0tGWe>*{t=VxG1}Vc%;(vXuch9PMl2RH>t?`87t51;c`t$ zq`@DZyoA}KePVN6s%rq`(e|3(qEvI?s6xH6tfv)Q`3*BGEu2b}FQq?PQxV>W@_#)3@%^Y~kS+30lZs+B z>98f0yfrp)1m${dc&&NKWamLmUkAL2K#_BYVCCg~7dieqV;S*y_};mg^tT9_=YZwt zZ+*ml6IyP$A8cNzi+*d7Ic9i7X1K2kDOOJPa#xt#W18%-(>uM*n=4rZeMPOWsTV6{ zB9nLE)PY`)o!wrS+=oB~Z=y6`C$vsMG`mDYN}pA%lvlo2;Iyl&-Y9fnS&mCa=QaOb z8QY}z%RYXvbn#qLMBDjWZ*+Uz<%nqEsWnqe%(ieWS;P7NXK`-OO5oTv{SXAb)t&zT zuW`dQYuqfjulx}fF9Vj&9M+79!#W&<0WN$#+>D{aIwFh#v2_6=6Xp)0jIB+jE zt1qAlFo>U0{w}~v5v@?3pt>@%?iV$OS9dQbxOuu z9A%aA7v*Anj(5f+Vtp8lxzGUY)PJVbKZX(Oh>W>7v;dI_^N4jy##~H$!t)35Q%bgT zfn%sgOW-<{CEod_(vJpBzY=G(lxCYPP(GmCL)X|R|Bu1+qgejG@o!)dB9Du0A?D1W zAH_h|gV73_uZJYCywySPHDeF_y{4`EWD#0_%v3Wum_9Uxk?T0NANa#xlufm6Gi;Va&y{<^K*9 zxRPgQ7>v0vn-4c*M=UlMDGLypuqIfiWX#1^Rw;i`E~oWmor@sLbWd%&S;6CFyA#^-tI2o>R}U|G>2L$&vu& z+Jl!j)ZYEY&ABx5P+vgT#TADb*Iezp`~HXMH2HH;x}R6w|Gf3H+mmaC_vU>4eE9U| z$A`}!U%vizr}GKXxv{TbPrD@OAfLJRtYz5O2m5cFssCTBc<}Q|tEc8Z`8h{7Cg|$B zueDnJdfO@Oh&<0+fo+RZ6nHN_biKEt%ddiW|G7VJKJcR^u`^D};U^gx7{0PHFz^DC zSZ-=bB0QO`4e`ytZNT$ayXlyG5lpoZb-ae>g!y?6JUw_SOAsnjikRJGDvH zGWQkV3r(?;-@lwVzT)v#)ARzlwJR^k=>9)c_i??mPD6@&x~cT%oZUZ+j^4-#+{<^g%3JT^-|&=leN1 z{nwT*loGsRx%`!jrod8-$HHAj7at_IaH-B{a9<*&_2P%Wwqoe94x`nVW@(+eP@bL~ z%~p5g8P_GZS>BS>AAa&gEqGLWX<3Ntb*-1``=6cO&9k^aWCimpnX_W;T7PocdxKac zcD(vJ>Do=HJ@bW_Hw&=`Tt0Z=Zg4V8G?3pDykFrc@ z|7Fx@e)#!HUXcmnTDPx#{d=f0@5v6w_~(bx&3P`%T4^uyX69CY?R9y3#`Asm53bl3 zvA%D4%+qG)%V&OZz2yEKCwNcg(CtmvL-#*%yZ2L6eM;`VXH24IFBLC-{$6(p8q0L^1$5@E(&&;ktw z0s)4AKXHX4R{IeImsW5yFtWU6W?%pl z0XWUzgqx9^N`Wyv$i^6_P+*cSnn^~;>ErY+MK2xv@Nchh6h0af3`ANRd4t=7@k#B+j^+MXiE?+^QgZ*}j`@Y!4< zP#V7>saLhZqGyu=SF=HF$EC_S=hKgx^lwfQYy7O(8_>!6TIBIs)!zzdJ)R}WwOwX8 z=4$f!Vphn-TbB%0ZTz02yV9WHMEP2-&&w{KV%%i%eWSS-laH{_V=c}@5^0P}Cb0c% za&Z)0w5Ea8Yr@1chu^at4>=X|_uY$#mun3Z`ELj<+a?#QtUdGh|HQZz+5@Y2$0}{SVw%WV=WwuVgkd%L}u5TNy{$odAupNLgy))iu9n_ie`u{kw!h{IPrvz^A0M+!jo~)VQU1GV_9n-QlTqywY8DHu z(w2u!n;Tf+ebeX{)7+Q^&WCtjaq_S?JZAH?P!c=*)0 z=;yV!+EK3m12ujnJ_$ zyfUh!Ftq9KC%v-N zgx5aeku{RPr9VmbxXfkRAN#v2dB+(UE7jbn>oH-g4SH+!?xl;=t1M`;S$xNd@9nJ3 zUHd*QP(8nP+a;b`SI-qKnxIu((epy_YgyR5pp2c&wokhsX3uP^6TT=gYmLS3j;yMe zMw37DOn7FsoYwZI zN%sV+3L4#VcblY4v;K`fo3oucq?q#?yfGuXG? z`{w))kJ4q{>h#`R|0-wGB-XX^(&e4A*Ph=0ACxvuec!$9AR_|5pi4O}7fY8EJwMk!-@ID8E?;uGE343y%IuzFACG9+Y>>#F zwlv-CvBRV*Qt#X9CR8@Q|M}8;!nwr>a;cXijkqV@*lWqZ`oEA^>ptB#7h1U!AEc*F zsWLpaQ2cp7RjPn`+y<`z)<}kvIceSbmd+6!PIX(Jl}NWab*?N^TA;B(QOfnA;Zen> zA&afKCT%!Y^FQfn$-;a6YpzW3Tl<#dOlY6Y)*q{cHtu@+M3hCWV{+L1j4DZmvg7;j zYF>$*6V%6YPXEPi<9Ul!oCCLgVmdKFygl;O(+TZyBJP{7_y-*b&B`u}S;D$0&B<(o zJo_n;@>>Z{Yk9QReDdJyHMo2?JB2AHpz+#{R~paMExi|tZ0*yTRZ+GxueJ5G=o!_Ze_`mX{t^!v#|MOjuua|$|3})J%SipPa%s$P(r$bFWlvO6i zu3vM+sjJ)A;^&nIb@d;1U-IO6Kku-vvCICVYg>fEL-wduP4{s{`a zoYGF3>)iJ8Cd@tey6))u2D>>+qn;kkJU#uK^u+ArDaF&ym+PPBFTXE8@7?rt&PHw1 z>Q}S-E(to^zi=zMV9l+A`#0X)art~JQ@{Ath5HgKA8YinzP6AuEG;pQjlI%vZHZyW zfo*kCmxOBfXhdDT$hiLD_0suo{xP7YtGFGHqE9d~FnncYVBiI&tK8I*M0na-8*)1D zwt>JO(Wd8z3bL2C-)jhK? z?DwLIS)N~<#dh}IC;d0#|7Rt$OnN)_>YFL)udk}Rnc11W;lI>)P`J_9$?(3_o6h=# z?K5W|y7SMzIC@5TYi0kBxASD}*IirvHSe?5$8`es`yK9lx<8TAe{boPIT9O-1QR}VYG+T715H6?-LyM&>j!H4-_PEKtNn|55XQhIfvJl#2-z3#@lu1oDVrdxe` zaN0bhv8(^&>>yv8Fi-ol*>*J=TYgMxm=!QRL(Rf%c@v!8 zZoDHOnIrR`5f6)ab8tIx?CMM}7 z8Jd{t8m5^hrzDyiCYoAU;4#XGRHF(KOEQ4wF*1oT<1So*h5~^A!(T@b4J&G4daxI^ zAeA7nrBMY)!m|g$Du_1pyad(*)>Z>7xlwW!LMvuqgW?1NK(5SZfw>YKLokP8Y6Ha) zLfbTCZQvNgtrHYy2%QJebP^GRxQzzIA;M^GU?BrfZ9qp6G#VPCte{$vfgK1h0gI|= IpeYOt0JH>|4gdfE literal 2322 zcmWIWW@Zs#0DAzjcKpHvAC;msW5yFtWU6W?%pl z0XWUzgqx9QM1e6p$i^5NP+*cSnn~tK#%8)Esi{V~CYFgwx=Ds6rn-h{rpYOZ=7x!; z78Vp(Wkjk~1&JjYMB2x}z`-D1iHNThb_NDeSf?bGB1_FD2YctJLcE57@(@F;ck(Y|?3U8Ncw1~Q1&ru0{pzNFY(|Ysivu{rQ2{<|{eP;GquPu9i{d-&`zI45=xR$QM6>z&Hwu=4ELL!*WdHPX7=L1d2j#aR$DOC z-rZW4lvj4_wy1E>Q{Fc@5ryxr{pJX6C`ioUJyXThGxfvDN6Y@1PCwO}+8g5YGSNt6 zs#n~lTW=3;=e{jH=ZwdkNd~c%5oY>mvU;9 zpIa()!DIK-=C9Kp+~Hl7uxXd_k$k;*-q%((=7rq77xDBW%kEN@=_$V6v4MpW2c2eV z?n&uCFKKFR#q(DrX8UjLOyicfSDJ&GM2~sQ6u!47K(Tyff1bwm*p?+sSFhfBu}bQC zrv8C*O)&wJXYadksaH6pNcYUToyV76ERAE1+%aTl7po(6w!5WY ze1A61e@^ayrT}kt4qsWNw;D_g44^~?$?BlwCySK)_OLQA@B))xZfZ#)Y7)!>CcQ); z2GM9qaB8q`{%r%EzuHabohRtMP2H}(Ot*@;^Q7zJ@-2BUs~*p1q|y{I9_ORc{*vH8y@f*YPoCP56CQ%even!j~Km7&mSY z>b_9%X2mbHX)|Y^+4I-xvzhUGqt9|5pYOG}x8~aFuX(|?2Ey~KpRbgYyyI~6qUKVU zkWEL_#pb(q_X}o)ByfH|VJLGUynarTh@|o=k#DDW-djH1kIUo3EB9rJ7$){pQTe9`Eew}KbX0!d4mvKh8_32Kh zRc{^|E^5DW+qKeb;ZwdZQ}o-`r%L_ub#czv^yrP!s+UVo`>_fZhc!*6s`AcE(^PO#*`Vqb@rX~mS)qQ zKH0!wlIqEq_9pBBfz9(>=cX6!`mu5Sw-?M;Q+ajc88es_j|L@g`#43Xy3+3J>&=(f zsk*GePW!)lJC7269|2Y1{y`AGMgkQ&Yi`6|BrbPI90!2L_fj>a#GARsschv$BD- OvjgEKU;*a^<^cc?qzm-` diff --git a/aiida/backends/tests/fixtures/export/migrate/export_v0.6_simple.aiida b/aiida/backends/tests/fixtures/export/migrate/export_v0.6_simple.aiida new file mode 100644 index 0000000000000000000000000000000000000000..520289b31994ac816abb43216e52591c5c29b4e1 GIT binary patch literal 52510 zcmce-1wdTsk_JkE0Kr{?1$TFMcXy|8cXta;Ah^4GaF^g7+}$leaCkj4cPG1h_sz@P zyKlQ`y8AzMzEl5Kb?UFGb6Q>s3>+2Yw?~h`lI-sv{_zh2_-^=Yt^9^eC-kIM^qLJ#ND7 zhEtpGrv+1^nC|LIhCKYx4%|ZT49VnitWpXKImn@J1k7pG+&`)8Mj(f&r|u&UsAwBo zKog4Q>+cZs*W)aIke9|ReVd49 znS*5($oN@BAyh?&8`(9k;;rm~;oIiavh0N)s3({?hFqGcd4sID6>NK&`PCnq2m`mc zMYJl|-)iikU2bzniC?nE84mQYL+tiv0{paXioyFt%7dKe`%5=}iKOZ$QzD&d)VBs5;977fv5eH8R6g%$a24Q| zw!%Zf&R4fT%Dm-lM1#vXevo!GO}w>J;z_W!E+s4X9)BfQ;u)1zKvq3MdVF}#1aMAW zvn_e7*y$jq;xXvme45ZFsnILtyBt6&bALDI;rD1cTj@V;n)*ZCa&@AiVGe~5Z~PT@ z4p|`D<14Qo*P{%8Z(R8dHfLW;U2b8J!*^bpU4n4I?8A>rrBNq^@t0zI)mbUU zO6cs-+1VRi&&>GR_s<^O9+M4ou$?>~=w34+j|jMZhXQAO^*gs6*HU{AI7`s;ECfV5 zp*O__u5L{SuI{;L=gip50r*(_<@=C>&sLUx+yq8jPZQON%TH1nRZ?_cUvra;>&S3K z>|RW(u@Jt`?zsth6J^qUwb+xYeZP!`Yp5=1#kmolL9Pf2HXxbJ1@8a{iPV}_Y5(Hu zO>EHyod2eBj;uJp2?!f%7k%#3CV?F-=nS~0vu_e-Bn_}Z zxDbf#VCn{p3*#4XM2l9N#-nnk6RM~IAk_pR%fLzZz_gRtm)i~AXX74xR3cxr(7Q^a zta>WfZE#qND(K7d{NjqBj*CAmRvyVtA(<=%wi!sw!5P~cMN>)y`>sU##s1T06~)<8 z&BWMP64`0i!w0!kA&4H;sf`xjFC-Q!bWoC`=)xazkE3&TXcWG%(AE;S>SuUAt(apx-?76UgNBw1 zCqbhuscdtw2whw>%4t zu@P{qSxF#K%WnHj%f59t{oFWNGCcU0X_$h4N5MSt=aWd zOsJ89aa5W1%-sYX;+_AE$BJ91?vzf#-d)Sa`rd?Ig?OiTB>8w9*%7$rWLVaZBF z``V=W&sJ25MUx*2ixcm{r8N-q`?9&wR5OEw@KCNF-LIa-WG;E-Bpd{4snUJ;CbKs1 z%;-J7WRWo5S*I5zsDNL|mtmJww+7{tvv1PA=TzTLdpUQn7ClUoGrHR)n z2q~A_ewR>XpBhppu*w z1~;=9Ix{n0#0L|?XQ7CK7ZoOo=Vp5|3$BtS+&35^y$GQmSByJAcj4S?WmdUCbF80| zK+;F64K)XgEOv_15YG`$;y7sbiKBZW`DK(zcFZB8^$X5DiyPWhs5*K8Z$LG6HTp9l zGc{Cn>aarItpXmu2n5=bQ^Q9&vGvazXk~G^enMJXN16Q&DU$pC zW;OkttLV!{q$#);t@seMnpMKykl7xZUCnv8^d2nfMINn2AL#X!*5vvpdwTDJca}V+ zt#D-zAkg}6I9>yw>9HI~G3y~j=pF6@)y3Am9}+U+N@;76W(chm(E0-{KrV zd~gEFf%>0;1Ofk`*(c%nfny0^LC}7aEd0eFP&!~i4Inw7AL`%y_VIT(d3eO(H-c~Z z&0v6g{ebzu$kU0}G* zYhP)FY{uCbY}$KTyTeq=rZI9*Kc^9fnIB+EQxsQm37V|aebU*`M~l)cDTnfKW7MW5 zSjw*20|(JVPsGoM8#hCjaZcfeFYhsgU6q$_)Q3=2)BTMIbdd)K~cTpSxI$ z*?L!~XB?+=@IIezfOl2<@M_28nrp=%E@DT|3>K!AFC?%`#2=2PV4<_D%da|pND7u9 zO51&__yr5~g_ZBY@v!7mnauKBs;C;GbuX*8V=Gpj!l=nX)YjcoW+l5Y!g{;6uY(uu zEbfsZ;^8DqZ)OQ6rxq-zZDCj1MH!;NSRX=7nh<}t-kss{-N5R`fi$aoH_i+DKmz+alS21y-p zw7O8`h_*BVwsY|733G5Y%MA9RPYzRTj%jH%ILsu}B^ERFHkmaHoFnWTVs0F6{)VvH z*~}^LI>UFQY}w;aI&CxcA6bwzC>B6=qYj zE#nj;Nqn{1o_K)k@|J+>T~&CC=MPw#ulfeNtcbB^(rBD_gQW;*ORg z?Bt0ZW>dcBzkHxOjk4F=I|U#wUqGZho_~r$SkK7JYUhgE$JU~|{f@yect}MOTVFjx z4Twq7P<`PHtSMHBKFVKAI$?WU>cpoQpn^ z!aYr@Jh(fJ>u>68DCiCkPRn;m%NJ)>VRc%q1gGX1Cwjoj*m%FIunE2SElPrEGF4zZX_OU4yr)UYag61<8At+YNp?nls&fae^hbHJ+eQU z+x^Bo(AFZ$kSVOx^*O0LYtvfY5ScdQ z2Jp~R?4Q`w2T+UTq#PuWOfB4cr*Y&^E?0?ktOoo4! z_J1X2ME^@-9LB#S24MLmF=mrr5;J1>B{5^xUlL^_&fOQ^vmwWoO`I_7|$L{ari< z16MtB8#`yGKSBTKDOtnBpYQ>}gn=*azYF@ilC-AgPUfaIwhjQ?VF74AdRXwTDr&bI z4xbMT6>x=x%$FPIXa>=B`j+qSphAYm_f**xW}CL)(ev|0Bydv0t|bQF{)j_3%F_FP zKXG{fu3|rDYv%}XHn#n9(*Jz6asBJqw*QIEe+K?Hl%_Q>H#au;Q|Uh}#c3c8U;YLJ z#2Fj}8>`rx_%zCLeL#Y) zmJ3l?L3|bFODfMRN{%!~Fso6zh9dTFDf|(xrIj)dG2Eq#N|T2;tyRRztAX!WrJcoP;&p2T;p`SR^)2@2BU= zy&>|VbH_9}!oU)_kT7#o?|kxYoe&VBPQr|hfL+7d`3j4zH9i)@U z(aV^vX8B#-F5^0NLxf6#$g;v+`~3CRw+w{VaW`*j?(N+t(Em6y{%`~aYcalCpaV1l zok8?pcLcrPJ;Bl0(AwOI*3ry4R!Rzt0de5`9<_$&grcefThibyT8`dRQF zRhAVP!JA;nBS@fNu9Vdi=A$L6{f%Hw&0;)-@C0&7lvaoV2x_#(YP9=YVHK1TjF(ik zNGNp+lsaKUTOEovst!aX1jZMRY^Uvq#m?^@4C=QMsXAJ|MK3lbKtkBVBIaShc_grl zC0jJ@ET|f=8>>x)HzO5IvxB#+$KMrgxR=pM_SMg|7VRm&Dl+(7nV;1;9^a38`AoNV z;;&WY?)H*aC+3r7{P zk-Zf z>fD-KL0&q^Huu7VGchb+Chr;~g{iBSRFn-y1UP?C9^N~9=6_d3LeLT>Q94;zbUf2Y zP;71Xvgzf1W?DYhbhJ40z4}f8+p~PYilUAwN%11}Wt~*^{YaB9Jq4@5(qh1s?3U3TLh-ime;{s=si_w7AuS!jFjrROpq8$o$V4u@Xg=Y=F96XM!nun*=Wp%#! zuW`miS)Mi-XvJQz{|ze|8CV&a+nCt?Zp92G9$*oL_)O*h^@Ers;ms_AP$K+ktaJ#$ z#kMk2p|F2`B&Nu#Cq)^}7CLTvuv=TJ*cunF&jh!q?L|;K3xP@2nM(@2D9l!3GS3Pi zh?Kuk9+50xQ}|6_jrf{q8l}r?qF2ZI?&K*&sx5m)Q2VP&YFvH7Bltr$d@)7@p5V8S zvRli$Eyv&`SIVTkU*FcjH0;tdg%!A zK}tC&B124xh;tD+CdMaK$t}C>`$f30Io=-c*nU%Ft1m5{*7mbT?%-SrHNjGP?|jNEB$?A+5%FpjVhhXyyk%2#B5(oEORw<*ZB zi8slq%*)P9)KgYWNXE$0OZWth>?$=%A^SEQ!;~03AN2lxhzVyZC-fI+sID73oz0Z= zw)h4x5ipSd8rB+@VKL-@>J9>5KON%lsBB{EU<7dd_cY~%F64y%`n$G>UH$ynllc*(UxfM}T={>lg#QlS|7oBA zp4ujV)zJAFDE^Q%;Gk!tVPiL7rC~N?rl;ZH00v|uRt`fJ1`~QCCd0q#H2q}v7omQc z-M@qPf66X!Sz`aICelxK|B&P`G6c}Do3H}IFcUKqjUkf>8;!9EJHW`$nAw<_{;wkX zPj-J1>X+I5J9z)6>;jvD^ncYY`^oMfl59ro1~km<^z6Vzj+KMPfQ`|JhLw%p*o4`b zm5Gi1*L?REp?;a&zk~OG$}X^%`j?daWcLqAHUJ|FjR_MQu&v2vY)oUwXkbEP$iT+N z%+AQh3}F4MT<@pv{vy;bv-@}O{!iHjwxAh)ZB_e2l8u?3mBz>f7<`$H*nz&wYHUDb zz+l1(Fk)jdFl7HV-~C0XUuO63;QgPn3+&vp{A_&wb9V95cmI$yFafS*CLAn`!0^Cs zL}LhG2bLM^>;??%Oa}B!#(#CW;HU5YBGi9j_n%ec0Opf_^kn}Io)kUs@mI^kzXAJK zS7`nX*uNt6-+=vjCFYO9O}Y){AqLofm;rX^#r`F3^&E}N0LIQ%00$aJCu4wv1Fe&r z)9LKGWG_8t=;oD9|3EbA*MqEjOP9Gq8Ym%=OVIj4&GabaikVgnr^qjThuZL8pLUCJ z52eUGI}#<2O?V3&MIsv9@@rG26jRTrXd*-pZv~o{YGSJtr8qBhXDB`{_B)FzS{8_X zn^O_8y1qHNDD)|vt;K^M&$PuScAhK9UTaabR+`PQu8RWR40)7`jnzgyVA~#Er~A%G z;yrSO?9_7Oi?j2k(8xYk+Jd==Pb_<)X8L|f*lE2wts!0Aw{YPECasEOKyyJN6Vm9H zx$yWdUCU5czN)kEOTnmKpOO8{^Cpe=`E%p_-Y>_h=9WQCZ%hUoR51b3L?`tD385XP zV?3_b&$k7iQ_$D+KP>o^w;RV#JuNnGBl@8#0F^15vzI_I6gjeUTG!_AWI|(+=S_ z1SrR|()*l@QpqW8{J;UE0{~I<3_BemD`F}3~ zvzOIR7vVra9P|G3rIf#O+WvMs0F8;c6@b>w+Uiut+A&QW`6W}Y`|z0NlUp1>ZKySP zO1v=A4@5G(y(QN;jlG5alN@2mj?Xik8Kz0!2VK}tif>z01o%5ApC?ym=rgDE_&VF| z_@2gGnss$tgKRpl=CjU4eQD;q)xKAYXWX@UUADvKhVEL_+&izPAP&47doJsh>0dJO zT2;+=XPvX#IKd<^RQKxK85u4y_3y$0AS$`eyPu`c?eP6h%?gnu<8spRT9m)|Z>6%f z%sm_wqiih+s2xDJT8gy`_ldK>&9gf9_11FVpclKY39A)qUmVYNp0$-{CB;M}!8hu> zAiSV*`gS%85tKFBnppd2jB`rz^1qHhDDNsFuO`lS|CIG-34HEg|Va`NA6A zd+`$j)ys;7VMq~2ZXJ6axb>z$7pSxAe*VeSbVYFMNEYHsTLqW0^DQf+9Ge2iL*bT0Q@9cq%@S z=eab4yNVP<;UsURa1QoluyjDojf*uF7*jsb5nF445KjXhVxA9w+vIcqV*Lf~mT9aH zup&BlscvFRK>WR*@O#}(!VFa$9qIRi#zDFwH9|c>-cwwMBdE-2>SbjEr@>o~AN@7^ zB4^G+H%)IhpF;)LFeKN!qi^t$&-vPh9(S)LcgWU!&H5g#+L7-U9z${4V8EyFkRT%O zPaqaDIWF~kGWz|ko1vdf87T)Ep&A8nuFZ#0`33@x{0MOhP{o8J+(?UINxD&)lyD&R zKhUPB7RIX+CR zTwPt*M`uklObTV9@GZYOE(zs<6i#gcw3hbyiq~iJRZp>6(&622Mm8zYd!oDOc?)4U z(`f9dlp8~D=Qo*+V9$t}ySE;l$%yh3Iaf9lv)bobBG*76U-Nb&K6)KEQtJtRFF3Pv zBS8?pG^EFY-rDV#l`rXq+?D$1b*F52Z%A^FcBWTJ%-8#ZcIIoR?n5B}*opD5hsWi} zGO^3oCp!!mH@Z{yK~JEY(V$OrIO>;R}*p_%ZZ-p=oj@o2%b` zj{#IUO)m@@k1uzc$VW`20qE$f8D=-`NPrtD&KM;v!!%bmQCelrcgl}(LHi0$%50qt zZ@wX62wp(v!cIqi{P>=7gv=L>I#3UH9Bwvb;+;a@a$y-Fj~|at)sM18H90)k&bVIQ@S7`X&@reKCZ;Sj#|%k6RKCL;Y{$*vI-<=J!#uC z``)}Byb?UMiauQcE_xg9IB0x?k@*x!o}hfM_)S>rFirNoCe^_clrkE>^;;c2zYtuwh3@1N8F<_51 zbCh>0sN29&YLI+^>e14H%3YcStwYF&$c7j&xECnik?#yfTssOtoc)Mi+K?-EOPOb! zY=IQS1}Z>o^=UPrs;5}DUcLYem#}!P#_L8^&Zpv=Ae61MK$wLX%&3o(EZ(-#*73OUZ0Cdd$RS%`SaCT_;*7GE zrv8tDD*ThxRA${rNNVH4%<~cY~DH;u< z)|808@}p(Vp2_3;ohJGbGd^-+!k zal$tRBtP`_q3oT84~%y_3|QNs4del<_{$z-^lHErU8G8vXx zWCemk_~J4=XVW8x8Kuv&QCY9aWc3 zXp|#NkhXCtmOj$(!1WV~IMgTQ{HO^A{-|MQRC=cUMtso#3XN#fO>XJ*#w-_E z6Y9Gire$|_v@MLMaWi~}2i?f*{IP7~?NhHW>|C1ltvQl@QRGYa)fqBcJzZCe7& zoc2ZkJ};e~mrb)rKnTJUSrZ0sKTfj+hAr*-o*? zXa*8xl1w;RO+yo50M!xX@*RTcfiXwlKq`N@68FvMw+VbwPCn9ynA4w=+QE^8u)oWD zXGFzTUD{IDDciD5*-(b16w^`y&A?rY7>|`$fT@HFh30-9X0E%X<<{@Ly#kolLfNg1 zUrROfO>w%-JCyO`nc>z4@l54zhwPC}HNkLOxHJJjo7E}?E0rvXWy@?5*WNoiF~pZ_ zu<g4D8iB^h@DTjRU&b%}eK}%NTG?`sx$FQ<^6s>ul zWPTgH=_w`U>naU3y7q+PH!fv-FSOjnhZjGyzacMaU*?@tJcw0*SjAVc2hC z;t`cytG*9hj{P@^pg)P6E$rHOH6C&7w1%Pm-IJh=8}9oL{c>d02rsSlzx`R3e4){I+k0Md*vh`SNeQv$YgzM%zu-wUI}ztgCQ?USQ0j{D z-|Q0mh=&mRcA##8@4oDm@lGT%+!7n5cN3Y!Hz`smue->tvc(bG%|NbYDpDH0S|3(v zEah#lI>Vkh*~z7uM{)*~s^67xgfH>7pMT2Go=w29!gh5fNA&^}u)-Ex6XRz(S z+=)v}d<;{!!%@rfreWckH@Hz}f}A<~ zbXm`{G5u1v`;8c&UMvlVxdbSR1bE129E2s>G5g7ppU$nL4b)1|aELM&7`8Hv@*i-O zYSH;n2E@r+BJ1}tn_G-5-V?sQ;i&Ayj=T(WcKi%Zw-0Gcs$$TH&R?*|J%($uSwAeZ*771Lrg=1 z{Jd-M?^KO{+AIZ8<*J~XdhNPL0!KStbHW8%1Dsov~+G=jgNO47+;8QrgB|% zvzJ=&bS$Si4Ewa2`5E7r-|rJYBI+B&eDc!dQ5y|H{)-X}ROsMR4DwE1tNEDaJ1p?$Q+3pwkA)2&llp<^_`wazoOXuu=+8o{!+k(ezve!H8%dtVPprg{pGLB1_j zvu8gSxp$hjpJs`*&Ks}5p9gq#Pk&4ttPUXfdfZ-adnTOD=m{rH z72Gv9CGEQ{ zb-1_c(tJ{373u8fT0wHK+8Os+cbJqKs5NnYJ^qIBaFyjFBG4~ZAPrX&^s|@@;QLq>u#jJmtz><~#Z^l$jBrb>OtsB+ZCkZ%yhEEJ^!}PX zjp55ao#`OgCbKr#>KoxJR`z_w6N0<`MI0Xwd75*rB5Q_Yn01I-hVk0r%VWp=GyJok zmDIP35#u^pwbrMvV@Xe}I~sSKtM9Au7<#RyuHMH`?q{d7Yt-tMvtc&O((dD3JKW>E z%4Pe!lpF(3?73G8n?v5k*n z9v`hxjJ|5+Uv7J#(am%!RQ8-Qz*t}Mb&!$+f3pFpJHo*#fge9R@765$9ivl^bfW+G zD>+eQ7}kk>yNs*c#DP|ecJ3qiX`S&UXeP8=a8ZxaX3533I6gF$SGvv@_WRUP*9GKZ zm%}HikgS*L!cJ*$jOXCB2MFKcv3hXd%QoHf%;JKpB&Eik#9p2gnY&6bsEOhxs|kdm z(8#Oy1eX5sNc-ED*zs{!`Z?67T zy=j#IhE(D*#Jz8bLc_O#W@qK`&^K;b!Nl)Yf!SZlxO|0`RXcJPCJ^zZh9!$L3 z<NR}|x(DxWUt{qEg5qQzdFIo>R-{Qz@HX}OWhjGz(tUHYq=@gzk1NJ{AUAeWU z8Q#q=_cOaX?zYLel!c+TZEQqcmTffpNW4?zj&i>^Btp-&FxsC?HYYd|TUbD2M!$(? zE39Z7;vQo=PhFc6NW1=W;e8%+?)kn@<(U}s{P_|fcMJ1?*x*D(^VU0dKHr)zR;CTX z^2x$C6pLXU?1kd1Ek51B*^W>P_X&%v-VAQ%JYWCJnD?iOq#MP!(2fTKBm)w^BG*FI|RwIhj$D*Ql|Vy5L}pXF$uhtQvPW?y$^ zvT(ZP1P+T{-J09;Jev>K>Gkh7YTl<-#fOGEWs=@kR>x}K^iUsogVnY1T|f^|`~a83 zN){X|-&(r8g;{qEgq(LZ{Q+7hgSe!CyaKsqcqYtQ9uUx>-k*9;rg|CDL)jwi`fiG^ z08aQdp-1e0(rG{}cD_MLUwyX{ran_Zm?kD#X=G6{51m5h5*i$*vx)f71w#y27 zn7Rpva8+IQsDR!&-hxoo`V@?unDwlpgJ;sTj$HcuGD6S;H1x>lqbBSHJAgAEx7GGN zs)493U`9yI{Kqo~8KjO@Q3MW)71oG$6O3V<%$4a@5=XK?==dN&g;&)I%d36oyyC}) z+43bJcsiEK;XLkSsz^Ac2lXrVJzWEYv9TbYmG+Ip&B>%N>NfRmXnmQy@yUF*lsnh* zgcrW!&o{#_uLu*H2ln;mQ~4}B;5?YQVhqC)mL6fA^r8q!9l|I)Cu*v=JlcWRW_0-R z?>(9d52kc^N%s#n6)?wVz?AYb@iGOxXLWgx4mMM!)lPS}sd>U88dC5~cY67zgyN@` zBUG4pylmc;4<4NMOMb$8Ba;6S4<+OS4i6|2k22<}U#exHn0!tsbRM?6^(1{UwRb6>3r#VWkfzRaqL?&d|~DNxr3&%3@Se5zX#YZ1Z5v!7T z;D{}jPX)&uTPXiyZ z=il&1OLKAa96YoM=*dXJqeSC%NW4~dn-@tVtS9@#=DXV|LBTCCyQ;Sw^GM9Y?-Fm<+(uFksEi@<#2AE@Mp)5_uIwQ z>>g0_`=o3SN2wH3+b0hzeZn7b3ToSrQI8$pd8>#aG-7+yt2zQaE(mJBl7ViL&+g`_ zXN@ehvRq%ZOsQ3j)iUz5xxAFE?EqPhLDHMmEyEB)SkLavuX79OB@abBceLr8%Q<$G zjwx|ML>EKIJH8q}FtG7nqXCks9gP<}k&!267K@kB--|280dmjYsCPVh(+tZKElVAZ z7bo#*V|pDHf`Z_hi09-@D~3?x1=JTK;>hK2>YtP@5;fJ)e6LtLh$$AZP3fQ6#!D|6?XZN zRwrv@e4p?igjl3fR6wq}9ABLc4*q6az=1t%&(;BfZWa|gFK4RD@HKlY?>Xg~*lOo7 z-uyoDgQS^W^VXdUkB1XF!anrY@)!guR9eYQphngRgn|7v;@3vYqq6ZWkyQ(?*3B+( z1tvJ}f(BlUF?>>>0fy8ILu+oD4QjRy2|9WCe%m3eB`0*zt!bMa5)WaY@z`-v8)!^3rVln~^Ucy!0=5 z*A{Qw5iUt;f6%t_CjHJ`v@O0mcRE>^EgKgSked<6|JV>|drp8S8-+J}=hBPfZQ3r zr(k00=zaGe`#N4(MS|Rsd${-5?PK$AyS4VuuR5D$jylHFY?xoQFBo1)ACf2fZ9?!P z15U0$+wU&xp4H`rfj`>r-tu|q3b6k7lknBmlSJnIUCD~JUz3onTy{BTO zmv?0HJ?|mtv&jXL@+uvU%EP;TOKEDE_kgtp9BX! zTFKSmL;0Y9>@FCZM5%-Mhk^K^6#LWzLzkvF+GDeZOs{a#eUl*r8HcTP+DwsM_$i~n z9V=9hSRvVpxJe5KydWGwyKn&QUO}x!f)-FP}WHoKR8C1DxmolCq*?O4?cWmcMl^GOqA<0xfK))Z&XJj z(|BJMWVRE)3lH+>5xY5QR8}#^_KK~8rj(x%B=}YDyW4|0Lh7w+JGlWcVN6jv1Agd%Glwf8}2jq|ysP2R!zR=ECjBAOeyD}i0? zq-s$qD(V^os(NAVbad&!>Zy?~Y^fd|mtFfiiyxdAh zWmPZNT+iq*RCa0b(L}wveC9(cDnc%aFT6;&S?RW(&wZaVvu`}#^pzg&D^A}IQ3adR zNjN&3#mTU-jv+cm~II46d@RnY@(D9Yuy^s;h?X93Ev303a}TcVGt^jIwD zpc8gm(pQ8uXbWn!uiZQM&|MmY?p5#7sn51gSzlz6AS7PCW9i!DBz3N?$5qLC3JzRm zqTBj@>?AEhd$gZa;uG#%%~~6begdJpyc|Beo_>9y>ij@eC2M6B2E+9(S$1tVhvD_! zhfQyB%VKlS4TSZ4Z?lbWt z^k7tOD}FU;oapxx@D;@X|CF1^KBb4 z8)N^v=VTs+s7zcz_zjj?nYf@=twLN=o2_me>d{2!Fp&3lEY%hvT1sdJ@q)yWDOt&qmG(07aMk>ZwBZ(m8s78?<3}|o zWy?VmcrrG}aM|=V#kUK1)0a!9^&w|{c^xnLnnJ9O?9KYGG7+p`>%M`w?lN zM#=m40(;0G2eoV)nenfK^a3rRD=0Mt{j_m-QtK@c+>c3MG5l3oj&G4!l!`)?%v6Su zL3b!;dd;UxdA>OpFGhBxSc@zS#Ku!D3^et4!!|s)%(4Qnqr`D8v!mKA3Vztxs<$|{ zn82+d9Jw|tQRP}@+&@(4la)XRCBP7i`7xW>%Agw3Fj4fHE4hbeC9aHQ0fEj+{IC*B zUN&5ZE{6~-PK@LL9U)dL8UsQgrhZTn+CwjiUOZEY=ouPp!0v(iUZ++Rp}7k{_lCOZ zL~GKBoz_!E4!5kTFBq|v?eyFA=WN8>MV83wl)k5&dl#0mu44^u$hOSdHaK0mM7;ho zLA-Aa^N<@0_v&7Wc<{o=7z5)S-73DOInSXA@S6uC&6td#G`v*qx-ofTBh_rgiXx?B zsfPyR-_8^9tS&W%uEcS81Ou-RsIiHg2FBiK&BLy2*B{O{4Rk}Q3DgB4-cO(z@6b{c zi|>xIhA59K((FpXef}n5Hr#gjBmYb9oI?`#+DJ*nVD|lbFEwnYPt6>pT;Y%grs^z_ zy2=di*OVI^##6yUoxys~5Eaxg-4yp z0^rWV!PFoUGy2eQ0`QbsBf)r}(T5KS(ikDAzk{V?P?5YLWc5HYI-KHE&yE3iuwhZ} zasvG@IJSlqqbp+b);ahw^zf5aI+@h63oIv2TZO3S=Mvm)DvtId(N&_QVYgIAWhHD131~ z!VUQ!+-wS01-5&>A4NRTNyGy^7O>@6u;u|v;ut4PJ5R|Ao%ER0&T>`N>#5L0Nc|4H zaRQ_eDGn)FG#JkU>5PypZ3xOW-WZrLst2|Ho^#=E`bY*SbNK2H!9;2z{wW=4rANA5hsixrWQk2m!OU%eLl* zT+;t&I&K_5qN`J%_MQN|RUuA2)I~iW% zh<4cc5e*!jyCw$K5P2H=t(pi6d$r@PEqK-DT_NR%5;l7W1l$ly=dw{jUiC04tQ$(@ z1Vs5rW)%3bqS=)x$UGmS{Ftj_ML~2(qxj*fEskUALU%pLi^-0Qjff~nESDXQ5ia{y zo;~EvnO0DwM@?|?drXdw1z0~R)*cayA#7q1#xLVLk0l%^@CX?%ToT|E1bIO%m)Pbi zaSn@;#6>W<^_zCXa!OmImSDBfOa$EQ-quFc>3^a%Z~&&yoG^2N_=<)C4kOrJa>m_hnvcQTM0X0DJ&zKK{V;rED{Lp29|@q6jr1KDg-uK;)5_U zUwce4D&E{ne2x}((TmYu$zS61_r;HZECLui>P1))wQ`yiPAjP zMnw660^Q6Ckn92%6b*G@9d32?qh}_eA@9l7o zH`=>Ufn&HFd<)#@(e!Y_*B;b#JPmhs(!D)18$)A&{nSdlvsKQ)a07`xe2^|_?-?rL)dGzzwgB5sj$jRiHJC#6IGHj zCTAx*PL?)aSR`;CP$%rQK^Wrt{t?!4oQ5k-f~$ndLl}dDm`(_R-4=X5YMeRnN)(LU z*6H(RH6k7SM0ymH9(#qa8Dn! z1`%tTwZEc77KcD}1*24p_V=Hz7BNezR)SeZ=NzRq86dmuFqXd5YP{}tH;y_% z_ZgqmCUiqzESa9Ny8YKF@Gkn7*aBHb>>yS;y#%c?aZ`}iRHP9NJ$FLj5ux(T3x+8y z!djEfbJ*~-hWzv{^GyZ-KQQt^O?*q`U`{LDADY2u!{keJ^mQ~0JIl9#Qx<42bbI2u z+U{gx)sMWMpW7{8gGzr87%sg2pX{eGeZfEm`?ocdC)FgHrh;q9NO9b zw*hKfcJ>#~Rq8n@@(BfXdS#utG4L1{na6eEDA8@W5%W+%Uv64>tS-QAnY;)%fZKr;5G_#wN=R3 zebmTMTGz=FRe3vlGm2b9JcfOUGZmQ0hv}uj2#!1*_TmyM5#G0&TY8yrvw8@=PJ9P* zdxG(P3-BS-<#I~W(v=g@P25|n^A7&qttF(30)dy@jj({yQge289n-|vaEFvv9!4D$ zGaHMe#i=gf8rBmZ7Us`R&07usDkgYl_fFvAwjHV#2m!)AR0}>%D0-PSp%idTG?MDSO>B=Q-X~;`@0GYyGh{&7UDacE>=uCeS0a zGa&`y$sT(NYKlLN!VvXt=R*+MN8B!dX?lLlIG!iK;JU`CDR4d`U;yi#x$oQThTOM) z3-htG`6BPeAV>i^xFrKt8nq1z_v;x*oLo!fZXhK>!cs zfB6apRK;e=Ys5D9SbKSuJJPQUuBoy<`E;X(&7Bg=#=Sd7im#LU6Mmlh4ZBnBd}Fky zPEwRr^Hon>d@n&wqP5&9lLiCNZ(T>4-cl>#%}R~8zs*r`a~`Pp8Ve?CM{jT5?ohk7 zjOJBQLkxY&RMi+CD+VJqu6~@1o%*;f58YAyj_MpM$?E&5hIc|NVg00i$duI=l*Sjj zQc&)ObY%&K20V@n3ml@ z@i+6PSc0z=X8(C;= zTWSQ&lx8Fn7^Z^k;jrJdunn{U>M1AiChBR%>?lg-zAt|+qekc3d)rwzt50B>2*X`Vhx@2pgG8AgrjM)=bYd39&p-5^G zBIa<5s1>9oT+lsy|3EGY2lefVGA2-uXtn53mt#iDWYd`s>_d*h%ve6_Upe&iohKYb zSXsYF$VCQ7(k`wVWO?OhdOAPDa`raZKR)ge*q;zu+iDYiSB*}N?tO2cBc^>nj(%`p zLWQ1@*1Cl}$9MbU|4Ep?J>=AB=4;A^q0@~AR_0@1EmCCnq-x`wj$|ylF-Xri z<7J?eUUEHj2e2+l)u|IYQh*khEk5gzA}d8ThyW`-Qx^8&At8}Kv|yGpRVP(V$9GXW zd96k!qY55a3z)cN>(<=a<91}cyQkEVVjd@M1B5&LgX^;pG63#_#^3c&@9;zs(9;Q1!VU3j8y4ca_OW3QJ7Pc@jMkpaap^4x%qZzySPGxBYzuIO_&CKgKk4u~Y+ zR2d&cEQ=9BMD^aV-2>$5m91?(pK}igHj7sk9_x6>E`IW5o!-V(XpL66;z4c!!@KWt-!5{RB_A7g=$B3P3=cL2=*53>UXQp} z+&W_l9C}u~HlJL!nl?`-i2WD)9#Pe}b@Nt(!WZqS(xVbMoaF zE4nT`(rD-7Yh$I;_zD@sQgfdAP~Omoj%QjZZV%99%V2q5TDr;PpGxbx@EAGWec?VSo5{JC9SR`@Nq*0xsnH9^*#ky40g*^(YhZ3Yn+SQ?H1i zTPynrY=HfB_kx;d<LR@u^2U|wKS-|Dyv038Do@)?B?=TPC?0Pw9W?Or?g z8LO>w3r#sVDW1g;S41UBm8{l3-dMrpsloNf3x* z`%X@o(-4tuEU0dM5{Nken59M*gY%V^XuXmCD+ zq*O!^#AGPd=e99n9QX2-hkR`P_Pmi&*Jlj)A(Et+M+EcH=EUkwIc= zXCac4QpWbjDQ#)CC;M&*uXW;M91-rQF;pmNxGWr*mzFL_bu#JfcG#aIrYB4%@;YW@ z8>F_~kG{(bqhj|7w31$KFIM(gq1(@Swz&O|xWGo1gZ$*cg^5CK@;n8V4F?bD*6HQI zk17)>btPSH3o!LS*bRP@FeBz#j(K3ZZsp`tAC~yXB_NgLk z7|KZ(GM3~OkKjnfQg z>ZvW#nN7#~Nf?^l(iV!b(AddjFcOHtUG(2kXeGp*bL(_6^q7S4iV-QtCB`u&_`HiR z4R6yhCDfg`UKEyb{TnBL+~x}oot{1PtzUouL`R0shbEwRq%0bD5fU^6>_(g^B){Ma zQ=Md$vWyT{!pW=(E%ek8r9?z{mwTBkZL*hM-|lt ztBFz_wONwwfDf|zAi;kJ`Huz+P&w&irj6cHvrQ%uTXwqW>+zMYSo9F5YHH}R20BWs zN}xQvq$kLVlD~B07Dn!cw5x(yMES7Wl!X?BHr+uMCGf`--<@kA%k+;!;`}0h>CW;B z+b{-I04IAvY8~fhnmriz5Kh^f*9*~O&yx7QXgMh^{QdX9XyQ!QV&JJlVrjW&m9DZ* zt{8Jqp4R1xLt7l(i&LUdkBB2n{*XoT&eLKA{+*rc;la*}{Qy=B(Kp-B*L<<`%LaK= zxd)CUi9F)5u*s-UK2ektA1`AAApS5>vsf z$*sp3K9@23>*RM%4TId>LIrp~v#fu?y?m1CB8Pe(w%u&@?Bk{X<`1HCmyfNDXIh7-62t6X>9C_cNZ2&LZlkhFYpJr_3LGLVy> z1B>{LSV-wL4XtvPK?ObBi=r+}^z4rjU}_YmWJ)5(=dq#{-J$}CfBR$cHpfe6OhiIq zw&Roe^fhoI*h*os^-S_9H#OcC;^lCkB)k1#`)u*4yV+rmkNnm!1D;QLg>7N&5X+?# zJSQ_#+YyE{X8PgB&lBJ^#{(-_J$G?i?O#|4ZcW5{tra>Peg?c6u_(UTQD$)ol73du zDwG^_ke0fTtgI~BLdYckAXLDx&N0P)!9_p3je4+zkx6v;hqN%&7UXO*#3cMesC1ACE3bBTm^^W|2-oEAC!uVahi>E=x>;hEyC>u$CXmxQX1AyjkGK#VCf6 ze##^l7T|Ib>L!+dAE_pJn1iX)7NnD>>YCs1@GA|Kqmv&&Ev7eksFtDJpo}a5bDj4j znoQKU)LE7>_{@81rYeFi1_A#WbT7^AwW;}=u?jn?Mf?1s6Xchzsmo;Fykf<7+@!Pw z6bX0_v*fgZU7@ktS_SYRxz_nF%v1HGJf8sIX7G-eYL~uydKwZxA6{Ce@_?cJ8&zr*&f ziHUCy*=wRFoc|DJWb!&tG0s~j@2tGbL5xfofVCWPso%QJzeRf5wc?XcY@I$O+fJC> z_N7o!-395WN4e%o?&5|D%^G}iPv}Or9o8rka70cbE)nm(z46W9 z)z(5TZz(8Gc=ju?xjHZLF%L=b=q2q$nFrzoeUTw#W9-@o8_N13%z!ANv5D<5N!w#M;YV%Q zRu=Dse;e2`JxZnw(In_rZ@1HKkeP{n7moQ*&ENbBsGxoyvd;*fTB!G(2&P(H6w!O7 zkVJF2;rFfeB72_O#w%%gck-RZN!*_?^+575@o-GvyoCc5v$b)wEvR$f-Q=~mRq0>$ zvbXKVY$@U3(@{tUNr#Fd%m_27)~+UKxjCfDwxlWAjW&;vai9G*&8gwg&lvq|$OVHI zf%22TIo8=iLEM=XS+PS1C49~D7m@wj57ovRB|ZQ%A5&W35emfN@TVx6u6_t#)0>EJ ze46}U%=}@#%Q2<1vqZ%!()%8_CgmNmefTR>()VPU0o-_UiDvz9R0q)lZ)IeZ)YXay zXCzqrxAty?38nF#${$ym+8x}gytm~jgKR!7A6)9j(8TFan{oVi$e_bxAguPvI!oJ0 zRv)sT-x=gtbl!KS2A_Tw?^&_zv>$OxS;z4R@YgJLl%gpA1yRa&xdFfm9NLAS6!Z|~ z_`FEX50L$=t1$eo!>k(6W=M}HUNY{YCmSKey^}7YLMPXIzxRP3^d$gw+B~SkBaF5B zr+v08KqDre6@!ipI5oMP!I()K}6b*dtBrZhWC1SL{Ye=l271xp23F zve<^vh*mcy=`>cHRRh5WqB>E>Gz?c3ZO{u_qandXEQO$pm}Fb(_UWSpUx;0te`*TO zsNb^pSl1(k=#YJx5LE6h%oFwx#6?!6OyvWVvnByL=Ab)hUY!gRe{PWzV~EZeFqfA< zm)jEdBw6xMwiI~VO+~e5=dB~h@@(rrUVBp7b|Ks@1MNQ1@Z>plw6V395^Q2O*Xc#p zcbM%i_w4#!g694>@dG>D|B@0FiQ#YPyW5Mssn$6lfzHbDhw~0OKkJ|Dt)gDH*cT0m z6DlgzV%d4xXJ6h7z3^6Adn=Zs@5q+v#@|v|;%qk6!`KPihsxU6PQ>$>N}J^Yj^*=< zw1mRqBsGW4Nt6=9`J{{QboUM75Ck%C%R|IoV`z}kACnf zu^Js3uOE@@_e=+(tM#=o+S-L1K5@y`#>u7K>DxDUnZ1+lnSd8BmUKgh!kV*#PcANJ z4H>O!zG#!>Mep>Mwl~|<+#`oOQcqcrGK2|?jcD2IjVIXz<;&{(Y+5D(8+l&4&NZH3 zt?WzwaHi&p+R=%}3shHMNmb|}vxLfoi#=D?t*b>uM5`@)<>r4yWejfwD2*d+&i~d$ z^BON1{vW#N-ik_BlnzYAJ`AqR{J&Az;)%+GbzQKVTEYcO<%87s-QWQetWugo=s~Fq z*LVJkVIyW;+m_J;kCDhy@8^?^;in!0okqubYTanD5#dFl*+Wtt-k!#*TM9+-`uDx1 zDZFfE1Qe@DBlSe^^E*X27Z5fAf*)V#m<9`tYFGuM4T zcF*dTzW78Fg-(Z6qMJW{vm4QU++j6`!`VGsww>-5syxS&WV4Q3{iH?xhR@3x|6Av1 zijq~I%*LoqSLU{F@l%SW?AQAa#_DTJt~c%4=sRoJQ3jj$N*Q#m->=fwcP_}XDqY#d%=;%b*Wo-OZHR_W9E|0 z+nU>$&xEm_NnjqZf2I7`U-;Ue%Pseh9#7Tzkmvsx{q~NUQ-g&gXF z68aL5&{6En7!d zkZjriaLG)PBbp=JY@aBi(#1_g{soUpNo2K;V5_akg%C|SM=hr3MB4E!wE^nQMtnQ16txKo-$3AnNQYnhV@n?x=QPx|`5QT41y*14J9=!$6O zck13|3>ry~nP~GB-h_Ps2hHniSGS+zd|w7JtxMZ>r0y}Z?f&n-;5NF|ZhyDGbZWm$ zZzfa`V;j+CN+bZV&4B{}$dq^VCfddo9KHzQ%zr}n*}`8lCaR;iNdevQc_YQbGp?>i z`^Q54Ek$?V696DV$1h%^H(E>XHH$yPlz5Bv;G>b2CO`ndE3?Gy6^(%`-4$@_4-E>9 zlaVNz`9fo?!Ko6eS~9KE}7L*tGFgN+7NrFp5rZ8Qpi{TNUT(HJ{&j!PM|XDjU^{D>~~KT zvknqzLI0uD37PHVm~ls~m?U6C7W8kqT%FxvmbW8nD!HaYcrs+Vbg+wW0jK{3@kptx z5n_~b1%dB-d=F+&`Uc%+Z@P!OFjFbH)Wmg33p_J9MYJ?3#rM#z>pFrr?$gpg@?Q6xc z{pixkS@T@*IPexHN~QXRJG6__bAb>&qqBRgd*R;#z$_^AhO-3WZ!h7~ta0@jPX~_` zO9?9a$}kFtxjv5PIO6AH2U#^sLTO^QJ328%utoVCqDbL)l_ywx3pcLKpw(34NUncW zWH+?8hCPlt*PVlnp6o8#2Y4@2+px*i?tM^xHyH!qGFq-i+TI?KQx!vGJG7Nx6)DgI z7MqPyh3=3Q%x5n(FP)5npqm|W^`Gl9)?$C?;hH+qqjL+)?1Cpyo%e00jF3X}Pge}G zjkR5;L-Hdk{}4^}?4S9}Ve86{8QtoR$W>&3I$ubgbgNO&O;9zbPPwOUYDO1LSn8?INuCfQd6ag_3YP~|Azl>2 zL9YgN&&6O>F@Fj;xRYuwUzeAVirHg&@y%^Rty~>p8J3b4tDelw{Y*I)%V{~mQrjv} z8+M@9AHpJ|Qk6nM&t-?1>+$kz5oGIw4BpU%^h0~JDWY)vr4@(3($M!`!6a)jrPy*W zAfDBWITejRD{h&N@qpDr50?g)^4D8mY0toesY6uC3T~CGnP+uOr6*6l6r8F^Js?hI zO=sDl_29W{hPzhL`4NhqRf<|0b0X;({3q4$CAjPmgT}KwQjG0l@EgWGgGd*b8eq?` zB|T87!i|snOu#91^=4xh_JE2LA(Xho@4=ePJgUV&gO%k271OCZBoRTuiPhUf>m z@lGL+=B^!(I( z_mVRbm6EdI65d2q769Y%;_Z=!Li(Qn=bfL=j2GUc_QjiYFse{O$%jC058f5!ft6bg z?zcg0E4Mmaf^PsUuGaQ8h(JFzjA$r}w5qqsFE=_R#qeR#xj~aF;#^F|MrPHOR-p+K zTMm^%QI>df*rwf3g@z{s<&xnB(0-NoUnQv*L?%Mslluzg(zLo~%gg54#sws*0Cgyx z>~V4H>|*nRf718QKQZ{bUfCNJ5^Nq> zlhVdUn?x--l!D;s=e2sa5RVZU=lgQYuK`sxnp>~!yXNFQ5DwdTARjBHI{Y&|V+Z$PZh`(4Hj}S0Prt`Qg9HId(2-7iKwsYaV6CsSP z=zC5Qf?WeFWMys)iTv^#uo0SY{ti#{raMk^n{vTb5+e+o~nuYLWLUU z;abaMq8L8hG=@noP0g90EAeHSmDEFA@5}|Hnn#T;PLJp8n#UR=fhes!SHY_VAXtwh zEs>QXKdWzGk|(=aU%NppBFmA6*UyzVo0RL08Z(eQxh@buSsFg>O+pV z>2`Jj;T>r^6i7hSy1~N(irsD~ZnQAQVuKW$21OG|KTo)6Dq-j5w*(0sc5qGiEgiBr zW*Tji#=H#R7Eqx5WR*dMKT9FmUa=z^G@48s<7bYQ<}5IzYd60S^&@xgRh_?@Bp5j@ z_u`E{gV=mCVRbiEb<8lNcQM!pcK7Gqos5e$F3RG{Id)Ph7HzU=wAOc$qZbp zClP?>-s}TEU#+cjaEXj~=Tsy|5B=7_PW~gS;pB%?Wn{QjdILa6mFC7ZtfsEsude+p zsoY&jtN9p`{j*TWefRYE+rwUX2e)AhS!M5`MlW`8XdWJ?w2Zt;iy<%EqaW;7xfuPR ziG8*ApAxNzs`1JNHl-Cf(EzOo>J(|yLo^A*OoSeQPYYRS?7lV=!KH;h5KwXH!U}m| znNf)qWfl%-XskyA*hcQcys=sg$NSvEQ0Xnv82rKVHW@_KiCXfbYS`skYkP419^TwQ zjy#gXZmbGWIizvWwSFl;%s%iiX>kHDiD(tJBw03Qopog#mrc3P^csYi%KLc#fKr*C zYxU_Fv(ZkkA)%?0iA@QQ;l8uGzR%0(h{u0;Q*QP;^m;w6OC}3dTK&s9VVq2~RSP!2 z&yqVwI)T0fNH3y{(-~=&+MO95Em^BOHZqj9jxT`4KMXhkX4da28jmEMdu(1xY6h4H zAXH$=fDBK?(cWOZ2nauHd$yD5AhZ?Pni0-iOx&8Y4GHMsGYId_m`yM1_;H1)(n1IR zI7$>`yicQjTP}p)OZ}hg7mN`V~;czB{VYJ>jdb5@Oc(ea`aqdL>?2Fv}Vl%x`usS<$^xaSxi6~t8e1?g|W}S<2nrGy? zQ~G*rJpv(MIijBMI21Q6wb^J;PV0y$O1>F_b4(Z6+Evb}2uk|_{ zQW9Dj3nL;fLW0Lqe(<1(sDc_#6UeD z&uHAYG}j^}0)h!l=I8;WhR%~AfMZe@tcr%82_I4{SXN{MEhW3i%t}NNIb$f`txSh+ zELNRK291qrm$5l$R0~W_$Cjz0l1sb;B&)m+Sg~lL$65=kZXAARy~;-!}(d);YtIZioOBpMcBl@Hjg-)uTLoUT5rky)*3Z?V*AL zf&mRgckid#aNX1A<;MK`f`y$g#4xcM=r|>_zoJoz?CKrx@CC(9+24j}(Z8g?Gv385N zrmqjdC1Nc{kSJ^*O5Y;Ru;rh)4%vG;Hh>hz@##mrx6>x@H`VD$hSeUgZ5*+gJO%AO+Q3kG;@W5V#Wnl0|j7UPXVmg1&oB-caxa2`=T^tJhF~I@m+?Zehb&AvrB7{diD%_bap*Vh8$mj+m>+IgV~xS9)9DOi^aDC z!a^WKXcJw)4q33XRKo_%GkSxc(#s@Omx>)OM<({t*;*{!EqBa_bpI1k3YFZ{6y zO9-9j<3V(4;vvJwZtsQ)3^Ca@?m%G3e4nrO2JI@r;#{d7A1zsd3pgQ4`}|vrMuZXq zn@udJ-%Bi@Hat+eEqEN-Op+YNh{}%h$*qq?LXOoX>8r@K+H;eal?XZb6P+YDXu|+7 z$4SXn_=@%`(DM<<7Gpe{$<`o{K>_SM`WeL5;tEGAR>YZ4_>och#Sdtc2=aQ)b0?}o zpG9z4#(NmoAm8{np9!(-MP2l+e$>5pzrL840Vfj@j)BD8V`LgMrM0{n$5{liL0u)b z@SOJN@be}IC#vimC~kzZNAFNKZfwBPoq>-0ch?!p!GK{QzglPysMWrm9(}yeK%`T0 zn%ai+ge9;jlN7$EKJ$Qd$37jNS`Of}Mr7Fx}Yj8URpL+8pQi*T$(-FTiQp zH2!KHkLLFPBv;}2A3Pk<>)s{R3OVJ_R|6fZ?vQ$l(Xj5!JJsgUop!xg$Fom*CszjS z)yUSdHNP*UZFbM}*P&zR8@q2$(mVFQh&a!i|HIK5MyccBYmbPq)F1Ss`O$ql8R?pP zhY;7lZph?|cUUvH?)Z8*Nb!mepxqwZ=jTH+g zj;2;uZnU-4BCU#@f)JvbtOx6qO6Z@GfwEj4jnj;)Xnfdf>Fd!Uq~6=5M-p680v{BoS)6Bt=|9(TDl?BzoaM5d<7BAm z(HAEjZ;kZVa}qZmc>vjg=zwYZSK-KR-ox9wHfKDy@Z$5nN)YJ0;=hoVwO?VKJ$EA0 z9NInvZyn>;dm1+n&h1Ew(TQnxd6C0OV~8n+B@azH+|Zp%i9ALEL>J@i5RmueDdn=- z1_F;!1HVVka{Dl8?V5@#d}?zA`932-aSuY!UM*$k(S=iggR7{(`y2>kXG%kNuf_Y}z}q&v(@Th!oVvt39@ zPPr(g=NsomuA75U>bWoObsP->O5JRL7*uB~U19RrNK0DX1?_RwLZ=iT5` zP6h|rR9RN}_M!nN(mcK!g1g zt)(UoHUH1&3;#;N1L&*p9)1JCu>v@;I6d`el1rON+B155gQV_JwYbP-QAP9*dXB&i z<7I93Mp%eaJetPwnl;V~N-GH99_RZw#OJPrEXFKvgivi`zu^ArixVP?l5ensGGO&b z<(y|ioZs%_*d)w}uUBQky1*&i&p~u=wE*$UjVNR7HtE&5sNMiDgr^k12`M7JtR&o0 zTk2%>6ueGPpzHL%mEQ}deGdn$_iS9R+{@j<`nuJ5HPCxBiYI5a?Elez%V8`ULX=tL zzdF{BAqU~Nob1A%%*S})eAwTzrC%+sMXleBGvv+yayGdh=8SQ?knnx|xtaDa{1-*3 zq}q%Tf~?&(Rk4@527?*eWlU7Nx2hSkS2sCHC_*Z_jqNap9o`#EvY1iAfLkZtVmGk5 z;6u*iE|~+p7X@vP7I(>~Cspk#uK+G8cJ^=xY)HW{%AyZ=H7W~waORYX$6BBvp?NsD z@glP8+_yro>&(zub!vrbo77{0;Vhk8M}pz>)9cdlL3E5m4pSX`&(rT=VSyhYap5#} z!it50wCvc)%P_I4eOyIY6ps3ZlrKG)7BJsH{keTl3%mx&r&m%*ObSdpk#w&sl0(H+ zi2R$ln6r$hl+h{!$}d)kcy5gyU2Ep36%foeg= zCVLTD#d&}VN`IOIaUtum0I`EUn>dU?fILkh|KK#*=^E)ZxxE0p>}w7!^6G7tpyT`5 z!q?jZ(v1eK7yYWD=Ys~~nS4hxAcML2g&gxEKsR%>kG{EerPZZwk{5AePW2>WdHlxl{KxcRb*Tx6H6-(xTXq)pm;pol(VC

FZmpp4oS1@5W-O-r{=DKN8un|W{1xMA+m5eRS?B+el3*Xr@{WE=rBW=}qhW&6 zpIcQhTX*lmFyTT$=hYDZDgm5)2w)JOUngrT82fLs<;9M|h0nEDbGDafRDr^t0o}yV z4+}YIf83_A1;km~CrD>MNYt0UiuZ@A#N)5G8nZ-zf54+*ldM7;n?W8gj(vUx*#(&~ zyL0>uJQE*Af59VD8uJbg>O162g+C-VNLhiT4*|l&ReG(fyOt zgB!?vb7`|u{s9d$;igRJaWldN>j;@bfy9Ye-`8TkKNKuGC~UL*LX!Rjl)up4t;txC z5!{OBf*!jpKxHyXRUKN+rTj4wvjZ>+1>PMl1TmFH_>3Jc;IpME{dMmsKL6Xj6Bxw? zC92S?& z0dm<}n~Pob1@oY<>@Bk+o*22!*lZ5_TmG!~!K}{k|MK>5K+kHl7yS-JK`$KPlX>a6 z^O1xQsUMaJpc+kgf4bHoII)x5MNF^`u<}X3<3NTvL!U1Aq!G=Y9PlF}mP$c4`U0S4^p*OaK;+*Xt` z^r8=1KDJ({#f1p4TU~uuG|08!<{9bLLkKSXdLdAA5;dR2(e~a2Ja^bP!5ay8@SjV` zWXI(~Z(#u42)_MW#HNs0lW(_tGa%<*_l#QHIVAT_VE7TS7eDGWFU9YR7$Yl1HFpO? zb$=9KNkL43Oq00wLKbyZ*@6)plFT0hK?qi)j1mbguL_6ZZ0?6iZtf*PL%?UDL z2;b~-Lb;|{GM4RGy1>*MoXit?1yCd>uaj5NRStY=O5f#~rOWoLH2Bm$SFSed_HP!7S2G;gStnI22g@CfmX0gmBvJ%GC z-GO-D3tf{@s}ZTiSX=wiuSJ{*YmeCzsYVE&y_z0uA2+*DKuXX(bZdQMs#RMq0(HqQ!)Lak$7XOyEEL;)& zkofP*JM`PuGixOek87nyp9k<7WU)8&9~qr;xqHJ-$t0}0w&w54h6$N!C>E>nvZ00P zs!q2(H&Q^3B;2=T9?R9aOyZeFp4XNKu+gE`2}n<>CdVhMu>^e`h(NvL-EWw2Jr;zG z1pI`|>$M@AwzuZ)I=x3&#&+14k@<%VzSdD(@918*L zD^;#;4q3L3W}&HHlmI#&`GTX3wrX)p!9(k;V;;(Qv<@#vf+F+6BgY6!!%KItRdapX z@wRi*hr-8KAZyi)&U+c{dkP{TJ6>|JIzLfi%Z)>~Zi3&`Zj6&Em|H;)&$%?k-ABxM z-M4gc--4V(uZOQdHh5U*FRsrFdggVj_@US{G!<~)!VNEJS9!A!wiWulN!W)Lc+F1h zqgX}nv2zfHd6dN=thxidi`F(2u9MC)ew+l~G3XDrjn#??EiIh>@eJP;mqcL!B^6ht z*b;l@nV0taBohdgAZ1PM3dZ*~MUEa7o<7`(MrI(-{1=2|)suMQcHzQzIg20b@pC<= zgMSFch2GZqtnZ*&-Z6JN!jp#+2XzT%D0q z8{?jTu-Xe-ayNJj)S9x;E;i2g46Ipa$oi;XrRpZo#tKR1?rHyKbtuunzZwN1Pb_ahG4f# z49f(H-Fem!GI^i^S#IjJsf5`4u3JH-1Sih+8?FeE<(Q`zp%#bBCGTqB)5aDX{E9q9 z^Xm4_oq>qepS)#3RB`D!qS+Qnwkmy(Q)ip<7`t+|lJQn+D{+Y}CbmiA_^lad@fdrl zv08D69^|~SPN@MA=fy_nwV(4{qcPpv8_;us(Z#jqKEf4j&`fWJtLs~M= zxDqz_Z6q5=yd}E4?6d@s&+NNoQl4fAvMu)KXeb^}(lRK_ustr^yw5bRr*~EhEiPn= zYoT&2zR-DGO{ApEW48la9rr|m<4p2$$Q0DPP3GZwBH&9x6X z1Q0fgRERG5-Q`Yz3)bbcWGQ%=ku5RJU1YU0p+)lT>6Ptm^+O&ihREL-3 zlvhIsjv!|D{+MIinTRIlP~enK!0>C~n}+5s6<-1?agJto8FR$dgY+^5;=!n$hac_2 z9-8;Aa>+x)h~-a{489T{a|v8Ixm;uXJw?PrpW|>#xe(AniNhvL{6dta#64N$x-=uZm?GaxJ`lrxSK%uU^ zQ-Iwj56fpR4X7vw;B~h#V&vkLRuNN!S1jHyL+w>ME?7QUJw}gv{k(cJZTD=m@Xk`( z1_CG$;9 zMY03B6zgWiK#(lROs!|Hqp#>2PYQeu;cS&6`l!|oBQXd=KG&I@u` z8^eXLq1QFF_fjgwHgEvJsF~ik#hA(i8c=9kG5J`#NlCt?$>T)&uM=!vlMekn6`J1q zN;`Bem+0#B^hGv5ZQEsi69~Wnp0ASz<^cnA6|C3NHy3vo+KC26RUj8X zJix=T8J6xffS)5xG?>0`AUMB(KhO*Ko{FHDIdsS-IY&G2ol)9(@z`0DX`{R}j35w8 z$S!f_*owJKj+(A9S6KoE(Is`Q(xX0*3%E;6yR3l0xuBWYHZXpFgd#+mel-xFtpQW$ zZ{rae9F@IU0e6tN)RNCXxOON({%bL*v`n(x6Yn+#&qpgi$>@cCXH?*o-fh=G<#KKx zVh0q_fnTRo4UVvkSEuy!1>~YS>Y5kX`mBEu0`tAR=CvAD@>YIN&MH`Qn64r3N<0v6 z0x%Q7ACqlG&HU3tnrh$}Tlx?x#d|N2Z#KUDXO^^{`Xs39)Ux0HaiEyw-Fl*vx3`^q z02#mijjg?x_O}sZm#H%E{WiYw7rlsRP8LPY0!Ti#D!e{Nr8+ut zg{2l(!rGP&BWMILSaX^SXKt&ob6jNaSpUvYen97>Lj$IUV3Iq?bG2@_lDKGprW=>2 zlXf3WR4GvQkjafQQzG>~VME}&Yj43ny$S5%r;Xx*?Fnu;81u*>c!bG%vYzdx3`>j@ zSYcZ*lGp8#9h{B zV}52P*YSy*`@(5zQ zvL*H2IV|@++8?MVj85Kvr^a{Bn5&=|C6c*r^LU8=j0=lel;ccgZbvjIk&Jlv}i+660Y+!8&VR zFfq^l0_#@_ISpxO0l2{C^2kcJ6{|i)S3!k{sTi8K;mmw#fldI5IK^eBQ>;mlSd~g;B=#lyDnZ3%+^w7URT7+hbA8Zrj{&(-c9?&P^4oaO z$1wlH(oTM!RHXslJ^w4yQFhsR*!AB%*Z{`3VtJDwg!d7oH8-{}?s?lcb8J=GAFMj55abxH#LV9UT; zfL{Dxtz8K`l;77M6jHXxnzW)HW1C?xA+l7KlCp%D#aPF_hbT+-iVC4pk;uLelO-l0 z%Dz)%-?NAJnM8m3{p;m@A0Kz-8J}~$_kPd0=bU>V^E^QB&6*R&59l&GfNo1c>?<6H zu1`JlX4T469Pt<@PTl-eGPn4E4&_o9Wi!XTs@r43Od_dz?lCS*lt4%?uanchkho+{ zRudpwsa7ZA&N83OkuVr(#WbG{Py8qXsmmu{cZS4b)s8O7Ya?OB>iWvpcAaWyx-=)C z>%-kDB%f3Dyu|UWZg!B6)^Yfict*EY+yROo%pN1Ce(Z81897yC z;pG}X;K+xH{|+s?Km3B{#msYuUXIwqG!MpIdc1mua6Ks4B{!nspOw*k>tI<>6;g1n zawTSMoxGtKeoaE1i0dOKG$W9}(FRwGG+t+TDE5fWuae1wyWtNw7fi1f(a6G5kKz>e zHpgXwh!b^4N~efs9{5ScsgBe)-P`A|EcIIYyoqE2hqLcHqEjs%I`D3U1jl3^BC;8**e#nx8~8x_x54 zG=Ob5Hb@^UX>MrIct`+$%38*N0HydZwcDSNrqlla&BdmskC8vPEFX zGjDm|Y%x6_Z5%nR5-pLSc1=3QeJToda%#`@8vNhn+2se1^E6Psn>h{5h&l^)yoX&% zHfymr3fHYWN=+wv28(@wYf8mg8v!z!b{r@ z1)I+^GsBBinc@0bF(QELbx=CkD}W6gt6k&qnpUiMGm03}!)$)u8E5Yh3hHW*-nOo#(DxcpscYL>T@lvcq8%P&l-NiTKF#~sdN}W zXJq1o z#ivD1TS~+=+q2Ip`!$!j0~+qoo3Q5*LyOEn(;gk;tIa=B8crQVTkhyhzZG zN*F53%J%a4E2BfC=QvKO0bdRRsS$YEhYrvsy~`m}6gu&SquKp=_`R-lqnKeAHEK?| zxCKz^Fg<*RQJEOk#DSTkb=9DsmI*%N#?%e>*L&Ur$L;INB1@q-t?*9yOfQbUBq?dU zer^tg%Wf_{wb#{g@AOD`D*KVHk1LF_)X{1pY8VcC$Ir%P$+Wo*j<%pNUOKq(r@UUE zM8RHX`f#*C{6iXFn|eD%nTl>o_O8*Y%-$ar6U=6$ez^naL=zJ(LN#eX3z)bEFhOv8 zH}gg>s{6{lkvz6@#g$NvPo~r<9MXXShVU;ISPc#_0`*0DU@7wo1S`Vs8cU2=q@bD$ z0;0KS5KCX0%_Ct4wtmTS{|&1OHIToiRZ5X;Z_P?d zOq^p}skT@83x$yJ*R*yM-9UjR_9HTWUreJyW!4_-oKxW4h|BXd28}+fat`x9lEK4N zwQ`<}%r~f)Pe-F13n^c{?zqYzVqpuGr0o^UQ_;SZm7P`Y2f&j8Ic?Z@lvUfI_tn_- z=4^(RSN9m?JeeEM24k<<5R0+8DL5&xCVSS8i#&DBqvOhSbfd(-t zynQegkQEd{I0kUM-SE~fmMF$jZc|HAA*R??m$b<*$6uwNHeSFH*6{3-Wud00TQUSw zFPDK}R1s6D>1|TL&907j1ECHHY92&2Q6vsUSF@gIFM1_OEc}}zH12lRS6@Ltx4N2` z!S!CQLFp2iT!WX60CNLR4^Ug+(){xp)m~bcP$y9}2VJQm8SBw`dkY&|g>|X$_h^Im zG)jz-TFr6M1!E*mR_?7o$=HmS@Gi)|$&hRoDNA&*JtM|94u3skc%={+BVcVB`0HjT zVjQ_N60ALc(}%TIlIa!e;dZuh8xh8QwcHs|EQv+8#gTTupaoFdFr>KzE(aI*ev6ke1$^Jkl_F^QcwfP*`XauP6(uWv?5~@S&mIh-AcO-1$h%0qCh zC~2}A57LGfd@u+=8>*d@WetMcMjmiTO7+^ z+nPOGZ4xqPwl85%v(A}*GDE7tvht97vcRk#%Lt^$x#zM++9lJv*n1^38I(O#FRk-5 zi6NRs$v|<5Oo)@dy>QXoml|f!#O~n!X4q;C1Gg6$@62QzNUTDt*@CdH9ypHnWhluq z#GG}gL7v9SjC_!^{fvfop~Pp;mt+>PRT7chLM|<(d*VslQNWY!Wb2a1fOz|`D0r3v zDRo(c!^XToXatJk^0;o0#I{djY$W}vJ*LMOPF2w@VHT@gf2~ltlytSAy#;+eh!kxiY)QkojMfN<%w;q8O zy)gZ=!c>qFhWCFOsAsPPPR<90BOAfoe}J7rp++vRveowRr#2C7~>a*w~dKg z@r={5I?q``sbr|CPN*!SuB*>3kUfR%l5`h)pD<3|N2}#OO}|vf+C!zR3(u6DBI){~ z_SxmB+jpxcCI-%7H?J|ySNZCyJ`>EPj(p&!9cN+{6Bn-R6=u=vCuI=Xia1+V=SXRt zM~gl_f7gP}CzdumPK&jQ=j5%3N_WcmaE+cxGPi`%hYp>kRf+B_FqOxj2;AoVwM|NC z2^q?GE~WFZPcQ%L`s>A(o3|YUWAwL4f`yW>ggifSL8#+PGc^X+Ee7iF0E4R!%hn{KR?REk+P3z z7*mP1-zGLrKy$F!DvElp(gjYi^Y6zUJn{XxQ;vIi?DzQblrL`l)doYJ3GuB5!d-G? z)dgo1#>BhE zHec_Cdft2KpeuP+4LdBc|Nil_0dh?|@|yCo3oc44=95W}rZUFFj9+-2BYT~B*@y8p z+v0nZ8G)sxB`2Ig+;bhe_#Wl+`#*o192H1{_BCP?&z-xJQGaLPpl{Z(yJ-DOmyRz4 zv$2JYCO)Cpet7#@z(T|^?@@|D`tq_v&`cjyiq(Yd@yc2|i3Xk_@?hweC?famtQ)ra zWiU})@#FiWDT9EVJka+T{|2uwja}{GY=>Viyyl)4lJm`%R+{b}Qf1+Ds*pLVI((e# z;cQ@KTgC2U-5TF)SB3}c_KvMUI$~|E<1gUjAst6Qo>g^Yq65A$TEQR9=^+8?ams$I zV{dP1I~$v$iMwO2H5$vpOU-cRyH?s_G~#OYTqCAC#vsIkGt+ZWzP z@E16xNOvn~3O_YrrN{HdW#F^CHuVIGdO{gWy^wak*fuwMRF<~s470D5e^P-wYM=Htg^Vd5+8TkAXpY3OylsuZN4(B0x;bWGtw>phUufX5{R6vJ;FyrPZ zVfK{by&jKH4I+uaPxJli+ng(gN^b{SkA6z;kuiGBw19S`MlIjCu@dxsj;%c3saZv% zvN$A*WmG?VkRR64xYU7pb0)xv`I)a>#1kWa5Zg`Ss|UZc=M!H z;cv60j|ZjG^88j89&+AT7SYeFh1+P>3FCOZ@&tEfsB59k?=`Jb#@v>Z(y3XFtIL$t?2Dy2zRoMuFxfV6Q0?r8 zV5^MIJ&UQampB`h9lR8~1CD%BXMuS(ED79%YW4x0m?|bYp?TU-odMFtls1S|mQDEpX{xEc#;gA-I$` z)cn4y&V*ald$+2%Lm?*42Wtk$6~$D?Kl{9JtXMr+9>#rL<|{04*LX&aYJzQzB)2$! zOl1qnhfu|WBxf{5cnW%X|Hr~95Vjyak+s{?w=+z8sM<}~D7TZ}wotDLzTjA(HIgA; zG8EpxY7^eOHqZnTu%W6i_u3NR>%AhB-dj(w23$SV#EjcmH zA|86CM_=O1>&lXQFV7||C28Zf%kbLX_fzbWdWRA z3MrgZ^vlCbk85kjSC>2LUB_3MFDjAkVgw%bzV_&M?mLNr;Upl?{4Nkk9{AM45^ZQJ zu$`ytDG34xM8MF*$k4(8VgBRE?DZX{ZGwyx6W67^Cr5AFO4)rQv6ellYQ$1;_ECpV zn6p8o_#>1;MtoPom%PEog3}57Hx&2ZI7gv^3F&g2Oi35Nyds}Xl8S;9 z^0+2C^nERfU3%3iO5lrl+)`s>E7mwVJ1LMdyX3%DHu+qu>Y=rp{*CJWH;*SDO7{x2 z*1RgxqaJ5(O&9PS0p0aMsRMDQDp8CwC@0z!m*|=wPQZeF$N?yl_$)}5Y z4mj0k^l>!6cbe!);dB+!pwrN)lN7V%enz_xw-zWttG4%y9B+bT_C4J-Rj72H57`W| zt(BNlJ;~bW&%hM6dDz%Bera!PhMwwT6PfjRm<#6EK!M=Z zQ+?wHqce(kljs?z)+DHE=!Hr!%Fd1C2Pu}i>{Il>SoJr1#WOe;_vkW5t7qoxTFPYH zQ($R#QbO~wGst!IUx5YQg3~@oD(5bA-&aTooz!Nx!=@GBuie1s^F9a&z&>rF5FjLg zJC}oUv&*#E2i`!&q+@XYbQPnFMW~4+ zo{1Ci#v!9?Ld>r+^~H9UyV@CqEVyQXAC*nVa9?TIM_L{WWe?KM z8EH89%|(js!q;;Tlt0wtv#OxO11%R>UVF(u&xzGIM+>@<8>>oz(9wJfGKz-o;_(KxNG!NN zli?i=lsA&*sIaUx_6R(BQ&q8J7P8O#fQE!{ca#WEXO#Q!?DF@{lXjQHCKSWGG)Bf} zIuqwkx8CBT_^+?2fQtM9STQEuJdl6%XoQ%?v@^3xsg#u zVx5V^hv4!OX$Dul@wE9l>CaM36hafLs&AH^Zfb6}M4~9GLm;M zt@z*3(RNf-m15I^l@DNKBwBoPP^DCc8Fk6Gr<>SKeWUGO<{U6g}Y`R#R(>sU3p^~R64^je9?+Db_RW*H4*OK)woN61d#1pgx zRLj*LSWYe7+1t(6=A%1hk(H(Vxy)50=7WaX{%S63sL=uM?=E^_@{x`9YN!0};d-h+ z9S9@DLwn`(JFOkwb$%LZ#5c)wd89oF+n32@ayXw9l)^BN_P$b1WwKA}o*D({#)z@0 zCjK;?|80<=hgTLZjh^(*C1b{aPPXa|I4Hj4aJ$30!e4;z9L1J36 zsMmj7h8^n>C@(RyQ1)UmiMAcuapE5Iu3B72kojCuqpG{dlh= z7kY^l;s|<>Q!0N-g}?k}UT;iFh}QQdPHW}>g?;UB>RwA98t2$-MTnRs~}zsT{`MHH6J_?|(+fZ^`skt`Qh9%If5d zw3z;0QrVIGi0izHzYYFkHELR*-}~zlr{W!OBfF-*((}R zSd*tKX3?uAzr9;nKg*{l(bZr4CYqX7^(4zN?_#B+HbOC9gi@gcfybt32BRKJiYXu! z?7W;Bz)Qs^q#0O10p&*uqUHvQ&vMd5J-jK3cVWE4e=KyjSu*Z=^)X-W2aYW{Qj3Z1 z-$S@HLnq5vzM;}`3-#Ra&OA(gIMh?LO_x-B+Wh$H?Mf}(K+ksgm!8k(RzMe(NJtsU zh?y7!DnYcW{&@4VA>TNm11-xCPuBACFPXLMt=_CzDMJlB8&?tp+Or`80%dw02hPw4 zI|RR}t)<0Qu@{uaDMv^_peGa{5FovAWO@byr&vQf#J`CC-pgNz#9lVjuJ;mx6!?Ys zH?h?XF_hpo#JK3z(Ztc)5L4L@GZfi|nB9gL6tfMnM;l@&p>2qjY>1&nw;?vTAqIhM zLyV4ook=8g8)C2xF`!udlezb8xEIk&u5E|`UIG>Vqa=8foy2SeG>~S zd%OPtS)1|Ly`dx0fO0wD_tXXmamG7s7v#qv_>D~LOpGinZ4CcpL#YiUq7CU8c03#_ zTSI%a<$sLo_wYb`#`oGm#KP_z3wYr7AAvRxs(j}_{0I{hG~z$7)=VXVI7^cR)MWO+ z(u!!Z^&`{sC=m760{XW7;Rh7I$!{TD2&*VP2O#7CfC}qKz-y)_FVl{}^nak)+9RaVJ|1n}QIYjbSX-jV))bEv=3JGFrX)HN{3e*~4^B{$Q1@X$I3Ymz7oR1}O9 zLu0^D3=#=OU_?d12oy{lCL|6;3IC;z*-Wp|c2)l?y+(&S5Ep@2Qw#iVp13(X(r;*C zXk+B8k3ga9Z4fADehVw-tr04xZ6EUm0PY>|yV;_qoZDpU$B;0VHYh{ee?xNaq^(>Z z+-4+(!#hD*8{B53-#uXS)GXXU0*YHf@oo4{{}xFxLzoy?3W`{7`;PFD_2I8|>aVK*)p!1T*>iJ8`gbxl*^jP|1eiRSKQA9PPfhtP zk_d4@7#Jpo5Cub#P(iS`I1mF-qT)yqA&ejjg8cJ3XfwTcwyS!Z^mgw^|4!(2KDJIT z5cu0Z z)!U?33RtJFRf21S{%hBN=-&ywO&dtSqEhhBtE0{IlAKuYcaB47*z z2CQpgXfzlpjKF}ALNFLqOc(|=6#a9>wb^@LZCCX+>0R8B{+-Yp%Dql6ut_1bozzyi zMG^)T6a}L&KmdlK!~pLVMI*onA&jUY3MPU;ifzYxDS0+`>aVJ|Nw3z9^zVe;$ql5n zD}R3qkej`i{p30a|I2HJe^R~KdokNpy@lT2W4ky|2Cf~%t%d9z>Hm4T^H0Or3`70T zHW~Ucj2|$6-Zb0{gWfsJpSS%s!{Bxf^Pii2YYQ&rg1w6&!1A^Z*z-|bHx2Nb>A9bG zC#31yqKpmE_U48*U|T!1p^Xi{os-=!ORa$taY%oHZ3ZO=TzmW1zc+&d8wvjg`lL;3 zw1pZ3vVFF7F9^FMP~!eP7-M2?NW9#>b%d)AgK2kL0=ovEs6fAV(}=V_15UR;!nHjo zU}Rf?w*K?LPgVgSlEj?={mZ`+Y=zu<(fSwU_oN*|ZoY)Q6>{sf)nAbBl6MTb`AX|n k$gLLwe?bPP?ih0OWx.py` + +Where XX are the numbers in the migrations' documentation: REV. 1.0.XX +And migration-name is the name of the particular migration. +The individual SQLAlchemy database migrations may be found at: + + `aiida.backends.sqlalchemy.migrations.versions._.py` + +Where id is a SQLA id and migration-name is the name of the particular migration. +""" +# pylint: disable=invalid-name +from __future__ import division +from __future__ import print_function +from __future__ import absolute_import + +from six.moves import zip +from aiida.cmdline.utils.migration.utils import verify_metadata_version, update_metadata + + +def migrate_deserialized_datetime(data, conversion): + """Deserialize datetime strings from export archives, meaning to reattach the UTC timezone information.""" + if isinstance(data, dict): + ret_data = {} + for key, value in data.items(): + if conversion is not None: + ret_data[key] = migrate_deserialized_datetime(value, conversion[key]) + else: + ret_data[key] = migrate_deserialized_datetime(value, None) + elif isinstance(data, (list, tuple)): + ret_data = [] + if conversion is not None: + for value, sub_conversion in zip(data, conversion): + ret_data.append(migrate_deserialized_datetime(value, sub_conversion)) + else: + for value in data: + ret_data.append(migrate_deserialized_datetime(value, None)) + else: + if conversion is None: + ret_data = data + else: + if conversion == 'date': + # Node attributes that were datetime objects were converted to a string since datetimes cannot be stored + # in JSON. The function used to serialize was: + # `data.astimezone(pytz.utc).strftime('%Y-%m-%dT%H:%M:%S.%f') + # Note that this first converted the datetime to UTC but then dropped the information from the string. + # Since we know that all strings will be UTC, here we are simply reattaching that information. + ret_data = data + '+00:00' + else: + raise ValueError("Unknown convert_type '{}'".format(conversion)) + + return ret_data + + +def migration_serialize_datetime_objects(_, data): + """Apply migration 0037 - REV. 1.0.37 + + Migrates the node `attributes` and `extras` from the EAV schema to JSONB columns. Since JSON does not support + datetime objects, and the EAV did, existing datetime objects have to be serialized to strings. Just like the + database migration they were serialized to the standard ISO format, except that they were first converted to UTC + timezone and then the stored without a timezone reference. Since existing datetimes in the attributes and extras in + the database were timezone aware and have been migrated to an ISO format string *including* the timezone information + we should now add the same timezone information to datetime attributes and extras in existing export archives. All + that one needs to do for this is to append the `+00:00` suffix, which signifies the UTC timezone. + + Since the datetime objects were the only types being serialized in the attributes and extras, after the reinstating + of the timeonze information, there is no longer a need for the de/serialization dictionaries for each node, stored + in `node_attributes_conversion` and `node_extras_conversion`, respectively. They are no longer added to new archives + and so they can and should be removed from existing archives, reducing the size enormously. + """ + data['node_attributes'] = migrate_deserialized_datetime(data['node_attributes'], data['node_attributes_conversion']) + data['node_extras'] = migrate_deserialized_datetime(data['node_extras'], data['node_extras_conversion']) + + data.pop('node_attributes_conversion', None) + data.pop('node_extras_conversion', None) + + +def migration_migrate_legacy_job_calculation_data(_, data): + """Apply migration 0038 - REV. 1.0.38 + + Migrates legacy `JobCalculation` data to the new process system. Essentially old `JobCalculation` nodes, which + have already been migrated to `CalcJobNodes`, are missing important attributes `process_state`, `exit_status` and + `process_status`. These are inferred from the old `state` attribute, which is then discarded as its values have + been deprecated. + """ + from aiida.backends.general.migrations.calc_state import STATE_MAPPING + + calc_job_node_type = 'process.calculation.calcjob.CalcJobNode.' + node_data = data['export_data'].get('Node', {}) + calc_jobs = {pk for pk, values in node_data.items() if values['node_type'] == calc_job_node_type} + + for pk in data['node_attributes']: + + # Get a reference to the attributes, so later we update the attribute dictionary in place + values = data['node_attributes'][pk] + + state = values.get('state', None) + + # Only continue if the pk corresponds to a `CalcJobNode` *and* the `state` is one in the `STATE_MAPPING` + if pk not in calc_jobs or state not in STATE_MAPPING: + continue + + # Pop the `state` attribute if it exists, since in any case it will have to be discarded since it is invalid + state = values.pop('state', None) + + try: + mapped = STATE_MAPPING[state] + except KeyError: + pass + else: + # Add the mapped process attributes to the export dictionary if not `None` even if it already exists + if mapped.exit_status is not None: + values['exit_status'] = mapped.exit_status + if mapped.process_state is not None: + values['process_state'] = mapped.process_state + if mapped.process_status is not None: + values['process_status'] = mapped.process_status + + values['process_label'] = 'Legacy JobCalculation' + + +def migrate_v5_to_v6(metadata, data, *args): # pylint: disable=unused-argument + """Migration of export files from v0.5 to v0.6""" + old_version = '0.5' + new_version = '0.6' + + verify_metadata_version(metadata, old_version) + update_metadata(metadata, new_version) + + # Apply migrations + migration_serialize_datetime_objects(metadata, data) + migration_migrate_legacy_job_calculation_data(metadata, data) diff --git a/aiida/orm/importexport.py b/aiida/orm/importexport.py index e9ffa39ddf..3942abda91 100644 --- a/aiida/orm/importexport.py +++ b/aiida/orm/importexport.py @@ -28,7 +28,7 @@ DUPL_SUFFIX = ' (Imported #{})' # Current export version -EXPORT_VERSION = '0.5' +EXPORT_VERSION = '0.6' # Giving names to the various entities. Attributes and links are not AiiDA # entities but we will refer to them as entities in the file (to simplify @@ -321,7 +321,6 @@ def deserialize_attributes(attributes_data, conversion_data): if isinstance(attributes_data, dict): ret_data = {} for k, v in attributes_data.items(): - # print("k: ", k, " v: ", v) if conversion_data is not None: ret_data[k] = deserialize_attributes(v, conversion_data[k]) else: @@ -935,18 +934,11 @@ def import_data_dj(in_path, group=None, ignore_unknown_nodes=False, # Get attributes from import file try: - attributes = data['node_attributes'][ - str(import_entry_id)] - attributes_conversion = data[ - 'node_attributes_conversion'][ - str(import_entry_id)] + o.attributes = data['node_attributes'][str(import_entry_id)] except KeyError: raise ValueError("Unable to find attribute info " "for DbNode with UUID = {}".format(unique_id)) - # Here I have to deserialize the attributes - o.attributes = deserialize_attributes(attributes, attributes_conversion) - # For DbNodes, we also have to store its extras if extras_mode_new == 'import': if not silent: @@ -955,24 +947,19 @@ def import_data_dj(in_path, group=None, ignore_unknown_nodes=False, import_entry_id = import_entry_ids[o.uuid] # Get extras from import file try: - extras = data['node_extras'][ - str(import_entry_id)] - extras_conversion = data[ - 'node_extras_conversion'][ - str(import_entry_id)] + extras = data['node_extras'][str(import_entry_id)] except KeyError: raise ValueError("Unable to find extras info " "for DbNode with UUID = {}".format(unique_id)) - deserialized_extras = deserialize_attributes(extras, extras_conversion) # TODO: remove when aiida extras will be moved somewhere else # from here - deserialized_extras = {key: value for key, value in deserialized_extras.items() if not + extras = {key: value for key, value in extras.items() if not key.startswith('_aiida_')} if o.node_type.endswith('code.Code.'): - deserialized_extras = {key: value for key, value in deserialized_extras.items() if not + extras = {key: value for key, value in extras.items() if not key == 'hidden'} # till here - o.extras = deserialized_extras + o.extras = extras elif extras_mode_new == 'none': if not silent: print("SKIPPING NEW NODE EXTRAS...") @@ -994,34 +981,23 @@ def import_data_dj(in_path, group=None, ignore_unknown_nodes=False, existing_entry_id = foreign_ids_reverse_mappings[model_name][unique_id] # Get extras from import file try: - extras = data['node_extras'][ - str(import_entry_id)] - extras_conversion = data[ - 'node_extras_conversion'][ - str(import_entry_id)] + extras = data['node_extras'][str(import_entry_id)] except KeyError: raise ValueError("Unable to find extras info " "for DbNode with UUID = {}".format(unique_id)) # Here I have to deserialize the extras old_extras = db_node.extras - # old_extras = models.DbExtra.get_all_values_for_nodepk(existing_entry_id) - deserialized_extras = deserialize_attributes(extras, extras_conversion) # TODO: remove when aiida extras will be moved somewhere else # from here - deserialized_extras = {key: value for key, value in deserialized_extras.items() if not + extras = {key: value for key, value in extras.items() if not key.startswith('_aiida_')} if models.DbNode.objects.filter(uuid=unique_id)[0].node_type.endswith('code.Code.'): - deserialized_extras = {key: value for key, value in deserialized_extras.items() if not + extras = {key: value for key, value in extras.items() if not key == 'hidden'} # till here - db_node.extras = merge_extras(old_extras, deserialized_extras, extras_mode_existing) + db_node.extras = merge_extras(old_extras, extras, extras_mode_existing) db_node.save() - # merged_extras = merge_extras(old_extras, deserialized_extras, extras_mode_existing) - # models.DbExtra.reset_values_for_node( - # dbnode=existing_entry_id, - # attributes=merged_extras, - # with_transaction=False) # If there is an mtime in the field, disable the automatic update # to keep the mtime that we have set here @@ -1449,11 +1425,8 @@ def import_data_sqla(in_path, group=None, ignore_unknown_nodes=False, unique_identifier = metadata['unique_identifiers'].get(entity_name, None) # so, new_entries. Also, since v0.3 it makes more sense to use the entity_name - #~ new_entries[entity_sig] = {} new_entries[entity_name] = {} - # existing_entries[entity_sig] = {} existing_entries[entity_name] = {} - # ~ foreign_ids_reverse_mappings[entity_sig] = {} foreign_ids_reverse_mappings[entity_name] = {} # Not necessarily all models are exported @@ -1655,57 +1628,34 @@ def import_data_sqla(in_path, group=None, ignore_unknown_nodes=False, import_entry_id = import_entry_ids[str(o.uuid)] # Get attributes from import file try: - attributes = data['node_attributes'][ - str(import_entry_id)] - - attributes_conversion = data[ - 'node_attributes_conversion'][ - str(import_entry_id)] + o.attributes = data['node_attributes'][str(import_entry_id)] except KeyError: raise ValueError( "Unable to find attribute info " "for DbNode with UUID = {}".format( o.uuid)) - # Here I have to deserialize the attributes - deserialized_attributes = deserialize_attributes( - attributes, attributes_conversion) - - if deserialized_attributes: - o.attributes = dict() - for k, v in deserialized_attributes.items(): - o.attributes[k] = v - # For DbNodes, we also have to store extras # Get extras from import file if extras_mode_new == 'import': if not silent: print("STORING NEW NODE EXTRAS...") try: - extras = data['node_extras'][ - str(import_entry_id)] - - extras_conversion = data[ - 'node_extras_conversion'][ - str(import_entry_id)] + extras = data['node_extras'][str(import_entry_id)] except KeyError: raise ValueError( "Unable to find extras info " "for DbNode with UUID = {}".format( o.uuid)) - # Here I have to deserialize the extras - deserialized_extras = deserialize_attributes(extras, extras_conversion) # TODO: remove when aiida extras will be moved somewhere else # from here - deserialized_extras = {key:value for key, value in deserialized_extras.items() if not + extras = {key:value for key, value in extras.items() if not key.startswith('_aiida_')} if o.node_type.endswith('code.Code.'): - deserialized_extras = {key:value for key, value in deserialized_extras.items() if not + extras = {key:value for key, value in extras.items() if not key == 'hidden'} # till here - o.extras = dict() - for k, v in deserialized_extras.items(): - o.extras[k] = v + o.extras = extras elif extras_mode_new == 'none': if not silent: print("SKIPPING NEW NODE EXTRAS...") @@ -1722,27 +1672,21 @@ def import_data_sqla(in_path, group=None, ignore_unknown_nodes=False, import_entry_id = uuid_import_pk_match[str(db_node.uuid)] # Get extras from import file try: - extras = data['node_extras'][ - str(import_entry_id)] - extras_conversion = data[ - 'node_extras_conversion'][ - str(import_entry_id)] + extras = data['node_extras'][str(import_entry_id)] except KeyError: raise ValueError("Unable to find extras info " "for DbNode with UUID = {}".format(db_node.uuid)) - # Here I have to deserialize the extras old_extras = db_node.extras - deserialized_extras = deserialize_attributes(extras, extras_conversion) # TODO: remove when aiida extras will be moved somewhere else # from here - deserialized_extras = {key:value for key, value in deserialized_extras.items() if not + extras = {key:value for key, value in extras.items() if not key.startswith('_aiida_')} if db_node.node_type.endswith('code.Code.'): - deserialized_extras = {key:value for key, value in deserialized_extras.items() if not + extras = {key:value for key, value in extras.items() if not key == 'hidden'} # till here - db_node.extras = merge_extras(old_extras, deserialized_extras, extras_mode_existing) + db_node.extras = merge_extras(old_extras, extras, extras_mode_existing) flag_modified(db_node, "extras") objects_to_update.append(db_node) @@ -2521,36 +2465,25 @@ def export_tree(what, folder, allowed_licenses=None, forbidden_licenses=None, if not silent: print("STORING NODE ATTRIBUTES...") node_attributes = {} - node_attributes_conversion = {} - # A second QueryBuilder query to get the attributes. See if this can be - # optimized + # A second QueryBuilder query to get the attributes. See if this can be optimized if len(all_nodes_pk) > 0: all_nodes_query = QueryBuilder() - all_nodes_query.append(Node, filters={"id": {"in": all_nodes_pk}}, - project=["*"]) + all_nodes_query.append(Node, filters={"id": {"in": all_nodes_pk}}, project=["*"]) for res in all_nodes_query.iterall(): - n = res[0] - (node_attributes[str(n.pk)], - node_attributes_conversion[str(n.pk)]) = serialize_dict( - n.attributes, track_conversion=True) + node_attributes[str(res[0].pk)] = res[0].attributes ## EXTRAS if not silent: print("STORING NODE EXTRAS...") node_extras = {} - node_extras_conversion = {} - # A second QueryBuilder query to get the extras. See if this can be - # optimized + # A second QueryBuilder query to get the extras. See if this can be optimized if len(all_nodes_pk) > 0: all_nodes_query = QueryBuilder() - all_nodes_query.append(Node, filters={"id": {"in": all_nodes_pk}}, - project=["*"]) + all_nodes_query.append(Node, filters={"id": {"in": all_nodes_pk}}, project=["*"]) for res in all_nodes_query.iterall(): - n = res[0] - (node_extras[str(n.pk)], - node_extras_conversion[str(n.pk)]) = serialize_dict(n.extras, track_conversion=True) + node_extras[str(res[0].pk)] = res[0].extras if not silent: print("STORING NODE LINKS...") @@ -2745,9 +2678,7 @@ def export_tree(what, folder, allowed_licenses=None, forbidden_licenses=None, data = { 'node_attributes': node_attributes, - 'node_attributes_conversion': node_attributes_conversion, 'node_extras': node_extras, - 'node_extras_conversion': node_extras_conversion, 'export_data': export_data, 'links_uuid': links_uuid, 'groups_uuid': groups_uuid,