Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merge release_v0.12.0 into workflows #1381

Merged
merged 34 commits into from
Apr 6, 2018
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
34 commits
Select commit Hold shift + click to select a range
9877217
Adding guide to the cookbook
giovannipizzi Mar 28, 2018
0bb39e4
Merge pull request #1349 from giovannipizzi/fix_add_cookbook_check_sc…
giovannipizzi Mar 28, 2018
a54ba63
Fix bug in inline_script generator of TcodExporter
sphuber Mar 29, 2018
425e07e
Merge pull request #1351 from sphuber/fix_bug_tcod_exporter_inline_sc…
Mar 29, 2018
7b12597
Serialize the context of a WorkChain before persisting
sphuber Mar 29, 2018
6ea0505
Update the version of plumpy
sphuber Mar 29, 2018
4998141
Merge pull request #1354 from sphuber/fix_1353_serialize_workchain_co…
muhrin Mar 29, 2018
accea5d
Ensure kind name uniqueness for pymatgen structures with partial occu…
sphuber Mar 29, 2018
7b3113c
Merge pull request #1357 from sphuber/fix_1356_pymatgen_structure_par…
Mar 29, 2018
44ad42d
start changelog for 0.12.0
ltalirz Apr 1, 2018
1deda11
Cherry-picked backport of PyCifRW update to v4.2.1
sphuber Apr 3, 2018
164ef7f
Serialize the context of a WorkChain before persisting
sphuber Mar 29, 2018
6b41461
Update the version of plumpy
sphuber Mar 29, 2018
3c1c2a0
Sort output of verdi code list by code pk to have consistent results
sphuber Feb 23, 2018
edbfcd2
Save and load the parsed inputs from the persisted state
sphuber Apr 3, 2018
f6662a7
Run the pre-release helper scripts
sphuber Apr 3, 2018
47607c8
Update the CHANGELOG.md
sphuber Apr 3, 2018
152e13e
Run pre-commit hooks
sphuber Apr 3, 2018
fe86f8c
Update version number to v0.11.4
sphuber Apr 3, 2018
879e55c
Merge pull request #1373 from aiidateam/release_v0.11.4
sphuber Apr 3, 2018
e2b775b
run sphinx-apidoc on rtd
ltalirz Mar 29, 2018
a89ed4a
Removing requirement of sqlalchemy-diff from docs
giovannipizzi Mar 29, 2018
3d14baf
Merge pull request #1375 from ltalirz/rtd_fix
sphuber Apr 3, 2018
a47c7c4
Merge branch 'release_v0.12.0' into fix_1348_requirements_for_docs
sphuber Apr 3, 2018
4a80568
Merge pull request #1350 from giovannipizzi/fix_1348_requirements_for…
sphuber Apr 3, 2018
b9a8d99
Merge master after release of v0.11.4 into release_v0.12.0
sphuber Apr 4, 2018
bdf974b
Merge pull request #1376 from aiidateam/merge_master_into_release_v0.…
sphuber Apr 4, 2018
0080e39
Changed SQLA Node to get live data from db
muhrin Apr 3, 2018
202ed5e
Fixed Django computer problem
muhrin Apr 4, 2018
be8a256
Cleanup to get rid of more direct node.dbnode() calls
muhrin Apr 4, 2018
20c1ec8
Added test that checks for this bug
muhrin Apr 5, 2018
0034008
Merge pull request #1379 from muhrin/fix_1372_sqla_attributes_not_cur…
sphuber Apr 5, 2018
1888fcc
Merge remote-tracking branch 'origin/release_v0.12.0' into workflows
sphuber Apr 5, 2018
78603f2
Remove direct dbnode property access in SqlAlchemy Node implementation
sphuber Apr 5, 2018
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
47 changes: 27 additions & 20 deletions .travis-data/test_daemon.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@
number_calculations = 15 # Number of calculations to submit
number_workchains = 8 # Number of workchains to submit


def print_daemon_log():
daemon_client = DaemonClient()
daemon_log = daemon_client.daemon_log_file
Expand All @@ -42,6 +43,7 @@ def print_daemon_log():
except subprocess.CalledProcessError as e:
print "Note: the command failed, message: {}".format(e.message)


def jobs_have_finished(pks):
finished_list = [load_node(pk).is_terminated for pk in pks]
node_list = [load_node(pk) for pk in pks]
Expand All @@ -53,16 +55,18 @@ def jobs_have_finished(pks):
print "{}/{} finished".format(num_finished, len(finished_list))
return not (False in finished_list)


def print_logshow(pk):
print "Output of 'verdi calculation logshow {}':".format(pk)
try:
print subprocess.check_output(
["verdi", "calculation", "logshow", "{}".format(pk)],
stderr=subprocess.STDOUT,
)
)
except subprocess.CalledProcessError as e2:
print "Note: the command failed, message: {}".format(e2.message)


def validate_calculations(expected_results):
valid = True
actual_dict = {}
Expand Down Expand Up @@ -93,6 +97,7 @@ def validate_calculations(expected_results):

return valid


def validate_workchains(expected_results):
valid = True
for pk, expected_value in expected_results.iteritems():
Expand All @@ -110,6 +115,7 @@ def validate_workchains(expected_results):

return valid


def validate_cached(cached_calcs):
"""
Check that the calculations with created with caching are indeed cached.
Expand All @@ -120,21 +126,22 @@ def validate_cached(cached_calcs):
for calc in cached_calcs
)


def create_calculation(code, counter, inputval, use_cache=False):
parameters = ParameterData(dict={'value': inputval})
template = ParameterData(dict={
## The following line adds a significant sleep time.
## I set it to 1 second to speed up tests
## I keep it to a non-zero value because I want
## To test the case when AiiDA finds some calcs
## in a queued state
#'cmdline_params': ["{}".format(counter % 3)], # Sleep time
'cmdline_params': ["1"],
'input_file_template': "{value}", # File just contains the value to double
'input_file_name': 'value_to_double.txt',
'output_file_name': 'output.txt',
'retrieve_temporary_files': ['triple_value.tmp']
})
## The following line adds a significant sleep time.
## I set it to 1 second to speed up tests
## I keep it to a non-zero value because I want
## To test the case when AiiDA finds some calcs
## in a queued state
# 'cmdline_params': ["{}".format(counter % 3)], # Sleep time
'cmdline_params': ["1"],
'input_file_template': "{value}", # File just contains the value to double
'input_file_name': 'value_to_double.txt',
'output_file_name': 'output.txt',
'retrieve_temporary_files': ['triple_value.tmp']
})
calc = code.new_calc()
calc.set_max_wallclock_seconds(5 * 60) # 5 min
calc.set_resources({"num_machines": 1})
Expand All @@ -150,9 +157,10 @@ def create_calculation(code, counter, inputval, use_cache=False):
'triple_value.tmp': str(inputval * 3)
}
}
print "[{}] created calculation {}, pk={}".format(counter, calc.uuid, calc.dbnode.pk)
print "[{}] created calculation {}, pk={}".format(counter, calc.uuid, calc.pk)
return calc, expected_result


def submit_calculation(code, counter, inputval):
calc, expected_result = create_calculation(
code=code, counter=counter, inputval=inputval
Expand Down Expand Up @@ -219,7 +227,6 @@ def create_cache_calc(code, counter, inputval):
def main():
expected_results_calculations = {}
expected_results_workchains = {}

code = Code.get_from_string(codename)

# Submitting the Calculations the old way, creating and storing a JobCalc first and submitting it
Expand Down Expand Up @@ -291,14 +298,14 @@ def main():
start_time = time.time()
exited_with_timeout = True
while time.time() - start_time < timeout_secs:
time.sleep(15) # Wait a few seconds
time.sleep(15) # Wait a few seconds

# Print some debug info, both for debugging reasons and to avoid
# that the test machine is shut down because there is no output

print "#"*78
print "#" * 78
print "####### TIME ELAPSED: {} s".format(time.time() - start_time)
print "#"*78
print "#" * 78
print "Output of 'verdi calculation list -a':"
try:
print subprocess.check_output(
Expand Down Expand Up @@ -348,8 +355,8 @@ def main():
cached_calcs.append(calc)
expected_results_calculations[calc.pk] = expected_result
if (validate_calculations(expected_results_calculations)
and validate_workchains(expected_results_workchains)
and validate_cached(cached_calcs)):
and validate_workchains(expected_results_workchains)
and validate_cached(cached_calcs)):
print_daemon_log()
print ""
print "OK, all calculations have the expected parsed result"
Expand Down
38 changes: 38 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,41 @@
## v0.12.0

### Improvements
- Hashing, caching and fast-forwarding [[#652]](https://github.com/aiidateam/aiida_core/pull/652)
- Calculation no longer stores full source file [[#1082]](https://github.com/aiidateam/aiida_core/pull/1082)
- Delete nodes via `verdi node delete` [[#1083]](https://github.com/aiidateam/aiida_core/pull/1083)
- Import structures using ASE [[#1085]](https://github.com/aiidateam/aiida_core/pull/1085)
- `StructureData` - `pymatgen` - `StructureData` roundtrip works for arbitrary kind names [[#1285]](https://github.com/aiidateam/aiida_core/pull/1285) [[#1306]](https://github.com/aiidateam/aiida_core/pull/1306) [[#1357]](https://github.com/aiidateam/aiida_core/pull/1357)

### Critical bug fixes
- Add `parser_name` `JobProcess` options [[#1118]](https://github.com/aiidateam/aiida_core/pull/1118)

### Minor bug fixes
- Cell vectors not printed correctly [[#1087]](https://github.com/aiidateam/aiida_core/pull/1087)
- Fix read-the-docs issues [[#1120]](https://github.com/aiidateam/aiida_core/pull/1120) [[#1143]](https://github.com/aiidateam/aiida_core/pull/1143)
- Fix structure/band visualization in REST API [[#1167]](https://github.com/aiidateam/aiida_core/pull/1167) [[#1182]](https://github.com/aiidateam/aiida_core/pull/1182)
- Fix `verdi work list` test [[#1286]](https://github.com/aiidateam/aiida_core/pull/1286)
- Fix `_inline_to_standalone_script` in `TCODExporter` [[#1351]](https://github.com/aiidateam/aiida_core/pull/1351)

### Miscellaneous
- Bump `qe-tools` version [[#1090]](https://github.com/aiidateam/aiida_core/pull/1090)
- Document link types [[#1174]](https://github.com/aiidateam/aiida_core/pull/1174)
- Switch to trusty + postgres 9.5 on Travis [[#1180]](https://github.com/aiidateam/aiida_core/pull/1180)
- Use raw SQL in sqlalchemy migration of `Code` [[#1291]](https://github.com/aiidateam/aiida_core/pull/1291)
- Document querying of list attributes [[#1326]](https://github.com/aiidateam/aiida_core/pull/1326)
- Cookbook: how to check the number of queued/running jobs in the scheduler [[#1349]](https://github.com/aiidateam/aiida_core/pull/1349)


## v0.11.4

### Improvements
- PyCifRW upgraded to 4.2.1 [[#1073]](https://github.com/aiidateam/aiida_core/pull/1073)

### Critical bug fixes
- Persist and load parsed workchain inputs and do not recreate to avoid creating duplicates for default inputs [[#1362]](https://github.com/aiidateam/aiida_core/pull/1362)
- Serialize `WorkChain` context before persisting [[#1354]](https://github.com/aiidateam/aiida_core/pull/1354)


## v0.11.3

### Improvements
Expand Down
2 changes: 1 addition & 1 deletion aiida/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@

__copyright__ = u"Copyright (c), This file is part of the AiiDA platform. For further information please visit http://www.aiida.net/. All rights reserved."
__license__ = "MIT license, see LICENSE.txt file."
__version__ = "0.11.3"
__version__ = "0.11.4"
__authors__ = "The AiiDA team."
__paper__ = """G. Pizzi, A. Cepellotti, R. Sabatini, N. Marzari, and B. Kozinsky, "AiiDA: automated interactive infrastructure and database for computational science", Comp. Mat. Sci 111, 218-230 (2016); http://dx.doi.org/10.1016/j.commatsci.2015.09.013 - http://www.aiida.net."""
__paper_short__ = """G. Pizzi et al., Comp. Mat. Sci 111, 218 (2016)."""
Expand Down
2 changes: 1 addition & 1 deletion aiida/backends/djsite/db/subtests/djangomigrations.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ def test_unexpected_calc_states(self):
job = JobCalculation(**calc_params)
job.store()
# Now save the errant state
DbCalcState(dbnode=job.dbnode, state=state).save()
DbCalcState(dbnode=job._dbnode, state=state).save()

time_before_fix = timezone.now()

Expand Down
20 changes: 9 additions & 11 deletions aiida/backends/djsite/db/subtests/generic.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@
from aiida.orm.node import Node



class TestComputer(AiidaTestCase):
"""
Test the Computer class.
Expand All @@ -43,8 +42,6 @@ def test_deletion(self):

_ = JobCalculation(**calc_params).store()

#print "Node stored with pk:", _.dbnode.pk

# This should fail, because there is at least a calculation
# using this computer (the one created just above)
with self.assertRaises(InvalidOperation):
Expand Down Expand Up @@ -156,31 +153,32 @@ class TestDbExtrasDjango(AiidaTestCase):
"""
Test DbAttributes.
"""

def test_replacement_1(self):
from aiida.backends.djsite.db.models import DbExtra

n1 = Node().store()
n2 = Node().store()

DbExtra.set_value_for_node(n1.dbnode, "pippo", [1, 2, 'a'])
DbExtra.set_value_for_node(n1.dbnode, "pippobis", [5, 6, 'c'])
DbExtra.set_value_for_node(n2.dbnode, "pippo2", [3, 4, 'b'])
DbExtra.set_value_for_node(n1._dbnode, "pippo", [1, 2, 'a'])
DbExtra.set_value_for_node(n1._dbnode, "pippobis", [5, 6, 'c'])
DbExtra.set_value_for_node(n2._dbnode, "pippo2", [3, 4, 'b'])

self.assertEquals(n1.get_extras(), {'pippo': [1, 2, 'a'],
'pippobis': [5, 6, 'c'],
'_aiida_hash': n1.get_hash()
})
'pippobis': [5, 6, 'c'],
'_aiida_hash': n1.get_hash()
})
self.assertEquals(n2.get_extras(), {'pippo2': [3, 4, 'b'],
'_aiida_hash': n2.get_hash()
})

new_attrs = {"newval1": "v", "newval2": [1, {"c": "d", "e": 2}]}

DbExtra.reset_values_for_node(n1.dbnode, attributes=new_attrs)
DbExtra.reset_values_for_node(n1._dbnode, attributes=new_attrs)
self.assertEquals(n1.get_extras(), new_attrs)
self.assertEquals(n2.get_extras(), {'pippo2': [3, 4, 'b'], '_aiida_hash': n2.get_hash()})

DbExtra.del_value_for_node(n1.dbnode, key='newval2')
DbExtra.del_value_for_node(n1._dbnode, key='newval2')
del new_attrs['newval2']
self.assertEquals(n1.get_extras(), new_attrs)
# Also check that other nodes were not damaged
Expand Down
3 changes: 1 addition & 2 deletions aiida/backends/sqlalchemy/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@

# The next two serve as 'global' variables, set in the load_dbenv
# call. They are properly reset upon forking.
engine = None
engine = None
scopedsessionclass = None


Expand All @@ -28,4 +28,3 @@ def get_scoped_session():
s = scopedsessionclass()

return s

7 changes: 4 additions & 3 deletions aiida/backends/sqlalchemy/models/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,12 +16,11 @@
import aiida.backends.sqlalchemy
from aiida.common.exceptions import InvalidOperation


# Taken from
# https://github.com/mitsuhiko/flask-sqlalchemy/blob/master/flask_sqlalchemy/__init__.py#L491




class _QueryProperty(object):

def __init__(self, query_class=orm.Query):
Expand Down Expand Up @@ -63,8 +62,8 @@ def __iter__(self):

from aiida.backends.sqlalchemy import get_scoped_session

class Model(object):

class Model(object):
query = _QueryProperty()

session = _SessionProperty()
Expand Down Expand Up @@ -92,4 +91,6 @@ def delete(self, commit=True):
sess.delete(self)
if commit:
sess.commit()


Base = declarative_base(cls=Model, name='Model')
Loading