From 4fa971fd9428ff68ce8530761c23da8dc3252b53 Mon Sep 17 00:00:00 2001 From: Peter Meisrimel Date: Mon, 11 Nov 2024 16:03:06 +0100 Subject: [PATCH] replaced tests conditional to Assimulo installation by markers --- pytest.ini | 2 + tests/test_fmi.py | 857 ++++++++++++++++++------------------- tests/test_fmi_coupled.py | 98 ++--- tests/test_fmi_estimate.py | 158 +++---- tests/test_io.py | 847 ++++++++++++++++++------------------ 5 files changed, 980 insertions(+), 982 deletions(-) diff --git a/pytest.ini b/pytest.ini index 3252469a..b58567ef 100644 --- a/pytest.ini +++ b/pytest.ini @@ -3,3 +3,5 @@ testpaths = tests filterwarnings = ignore:.*does not support directional derivatives.*:UserWarning +markers = + assimulo \ No newline at end of file diff --git a/tests/test_fmi.py b/tests/test_fmi.py index 07e6fd8a..6760808a 100644 --- a/tests/test_fmi.py +++ b/tests/test_fmi.py @@ -44,11 +44,10 @@ def solve(self): pass -assimulo_installed = True try: import assimulo except ImportError: - assimulo_installed = False + pass file_path = os.path.dirname(os.path.abspath(__file__)) @@ -72,107 +71,107 @@ def _helper_unzipped_fmu_exception_invalid_dir(fmu_loader): with pytest.raises(FMUException, match = err_msg): fmu = fmu_loader(temp_dir, allow_unzipped_fmu = True) -if assimulo_installed: - class Test_FMUModelME1_Simulation: - def test_simulate_with_debug_option_no_state(self): - """ Verify that an instance of CVodeDebugInformation is created """ - model = Dummy_FMUModelME1([], os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "NoState.Example1.fmu"), _connect_dll=False) +pytest.mark.assimulo +class Test_FMUModelME1_Simulation: + def test_simulate_with_debug_option_no_state(self): + """ Verify that an instance of CVodeDebugInformation is created """ + model = Dummy_FMUModelME1([], os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "NoState.Example1.fmu"), _connect_dll=False) - opts=model.simulate_options() - opts["logging"] = True - opts["result_handling"] = "csv" # set to anything except 'binary' + opts=model.simulate_options() + opts["logging"] = True + opts["result_handling"] = "csv" # set to anything except 'binary' - #Verify that a simulation is successful - res=model.simulate(options=opts) + #Verify that a simulation is successful + res=model.simulate(options=opts) - from pyfmi.debug import CVodeDebugInformation - debug = CVodeDebugInformation("NoState_Example1_debug.txt") + from pyfmi.debug import CVodeDebugInformation + debug = CVodeDebugInformation("NoState_Example1_debug.txt") - def test_no_result(self): - model = Dummy_FMUModelME1([], os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "NegatedAlias.fmu"), _connect_dll=False) + def test_no_result(self): + model = Dummy_FMUModelME1([], os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "NegatedAlias.fmu"), _connect_dll=False) - opts = model.simulate_options() - opts["result_handling"] = None - res = model.simulate(options=opts) + opts = model.simulate_options() + opts["result_handling"] = None + res = model.simulate(options=opts) - with pytest.raises(Exception): - res._get_result_data() + with pytest.raises(Exception): + res._get_result_data() - model = Dummy_FMUModelME1([], os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "NegatedAlias.fmu"), _connect_dll=False) + model = Dummy_FMUModelME1([], os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "NegatedAlias.fmu"), _connect_dll=False) - opts = model.simulate_options() - opts["return_result"] = False - res = model.simulate(options=opts) + opts = model.simulate_options() + opts["return_result"] = False + res = model.simulate(options=opts) + + with pytest.raises(Exception): + res._get_result_data() - with pytest.raises(Exception): - res._get_result_data() + def test_custom_result_handler(self): + model = Dummy_FMUModelME1([], os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "NegatedAlias.fmu"), _connect_dll=False) - def test_custom_result_handler(self): - model = Dummy_FMUModelME1([], os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "NegatedAlias.fmu"), _connect_dll=False) + class A: + pass + class B(ResultHandler): + def get_result(self): + return None - class A: - pass - class B(ResultHandler): - def get_result(self): - return None + opts = model.simulate_options() + opts["result_handling"] = "hejhej" + with pytest.raises(Exception): + model.simulate(options=opts) + opts["result_handling"] = "custom" + with pytest.raises(Exception): + model.simulate(options=opts) + opts["result_handler"] = A() + with pytest.raises(Exception): + model.simulate(options=opts) + opts["result_handler"] = B() + res = model.simulate(options=opts) - opts = model.simulate_options() - opts["result_handling"] = "hejhej" - with pytest.raises(Exception): - model.simulate(options=opts) - opts["result_handling"] = "custom" - with pytest.raises(Exception): - model.simulate(options=opts) - opts["result_handler"] = A() - with pytest.raises(Exception): - model.simulate(options=opts) - opts["result_handler"] = B() - res = model.simulate(options=opts) + def setup_atol_auto_update_test_base(self): + model = Dummy_FMUModelME1([], FMU_PATHS.ME1.nominal_test4, _connect_dll=False) + model.override_nominal_continuous_states = False + opts = model.simulate_options() + opts["return_result"] = False + opts["solver"] = "CVode" + return model, opts - def setup_atol_auto_update_test_base(self): - model = Dummy_FMUModelME1([], FMU_PATHS.ME1.nominal_test4, _connect_dll=False) - model.override_nominal_continuous_states = False - opts = model.simulate_options() - opts["return_result"] = False - opts["solver"] = "CVode" - return model, opts - - def test_atol_auto_update1(self): - """ - Tests that atol automatically gets updated when "atol = factor * pre_init_nominals". - """ - model, opts = self.setup_atol_auto_update_test_base() - - opts["CVode_options"]["atol"] = 0.01 * model.nominal_continuous_states - np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.02, 0.01]) - model.simulate(options=opts, algorithm=NoSolveAlg) - np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.03, 0.03]) - - def test_atol_auto_update2(self): - """ - Tests that atol doesn't get auto-updated when heuristic fails. - """ - model, opts = self.setup_atol_auto_update_test_base() - - opts["CVode_options"]["atol"] = (0.01 * model.nominal_continuous_states) + [0.01, 0.01] - np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.03, 0.02]) - model.simulate(options=opts, algorithm=NoSolveAlg) - np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.03, 0.02]) - - def test_atol_auto_update3(self): - """ - Tests that atol doesn't get auto-updated when nominals are never retrieved. - """ - model, opts = self.setup_atol_auto_update_test_base() - - opts["CVode_options"]["atol"] = [0.02, 0.01] - np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.02, 0.01]) - model.simulate(options=opts, algorithm=NoSolveAlg) - np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.02, 0.01]) - - # NOTE: - # There are more tests for ME2 for auto update of atol, but it should be enough to test - # one FMI version for that, because they mainly test algorithm drivers functionality. + def test_atol_auto_update1(self): + """ + Tests that atol automatically gets updated when "atol = factor * pre_init_nominals". + """ + model, opts = self.setup_atol_auto_update_test_base() + + opts["CVode_options"]["atol"] = 0.01 * model.nominal_continuous_states + np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.02, 0.01]) + model.simulate(options=opts, algorithm=NoSolveAlg) + np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.03, 0.03]) + + def test_atol_auto_update2(self): + """ + Tests that atol doesn't get auto-updated when heuristic fails. + """ + model, opts = self.setup_atol_auto_update_test_base() + + opts["CVode_options"]["atol"] = (0.01 * model.nominal_continuous_states) + [0.01, 0.01] + np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.03, 0.02]) + model.simulate(options=opts, algorithm=NoSolveAlg) + np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.03, 0.02]) + + def test_atol_auto_update3(self): + """ + Tests that atol doesn't get auto-updated when nominals are never retrieved. + """ + model, opts = self.setup_atol_auto_update_test_base() + + opts["CVode_options"]["atol"] = [0.02, 0.01] + np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.02, 0.01]) + model.simulate(options=opts, algorithm=NoSolveAlg) + np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.02, 0.01]) + + # NOTE: + # There are more tests for ME2 for auto update of atol, but it should be enough to test + # one FMI version for that, because they mainly test algorithm drivers functionality. class Test_FMUModelME1: @@ -703,419 +702,419 @@ def test_error_check_invalid_value(self): assert expected_substr in str(e), f"Error was {str(e)}, expected substring {expected_substr}" assert error_raised -if assimulo_installed: - class Test_FMUModelME2_Simulation: - def test_basicsens1(self): - #Noncompliant FMI test as 'd' is parameter is not supposed to be able to be set during simulation - model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "BasicSens1.fmu"), _connect_dll=False) +@pytest.mark.assimulo +class Test_FMUModelME2_Simulation: + def test_basicsens1(self): + #Noncompliant FMI test as 'd' is parameter is not supposed to be able to be set during simulation + model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "BasicSens1.fmu"), _connect_dll=False) - def f(*args, **kwargs): - d = model.values[model.variables["d"].value_reference] - x = model.continuous_states[0] - model.values[model.variables["der(x)"].value_reference] = d*x - return np.array([d*x]) + def f(*args, **kwargs): + d = model.values[model.variables["d"].value_reference] + x = model.continuous_states[0] + model.values[model.variables["der(x)"].value_reference] = d*x + return np.array([d*x]) - model.get_derivatives = f + model.get_derivatives = f - opts = model.simulate_options() - opts["sensitivities"] = ["d"] + opts = model.simulate_options() + opts["sensitivities"] = ["d"] - res = model.simulate(options=opts) - assert res.final('dx/dd') == pytest.approx(0.36789, abs = 1e-3) + res = model.simulate(options=opts) + assert res.final('dx/dd') == pytest.approx(0.36789, abs = 1e-3) - assert res.solver.statistics["nsensfcnfcns"] > 0 + assert res.solver.statistics["nsensfcnfcns"] > 0 - def test_basicsens1dir(self): - model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "BasicSens1.fmu"), _connect_dll=False) + def test_basicsens1dir(self): + model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "BasicSens1.fmu"), _connect_dll=False) - caps = model.get_capability_flags() - caps["providesDirectionalDerivatives"] = True - model.get_capability_flags = lambda : caps + caps = model.get_capability_flags() + caps["providesDirectionalDerivatives"] = True + model.get_capability_flags = lambda : caps - def f(*args, **kwargs): - d = model.values[model.variables["d"].value_reference] - x = model.continuous_states[0] - model.values[model.variables["der(x)"].value_reference] = d*x - return np.array([d*x]) + def f(*args, **kwargs): + d = model.values[model.variables["d"].value_reference] + x = model.continuous_states[0] + model.values[model.variables["der(x)"].value_reference] = d*x + return np.array([d*x]) + + def d(*args, **kwargs): + if args[0][0] == 40: + return np.array([-1.0]) + else: + return model.continuous_states + + model.get_directional_derivative = d + model.get_derivatives = f + model._provides_directional_derivatives = lambda : True - def d(*args, **kwargs): - if args[0][0] == 40: - return np.array([-1.0]) - else: - return model.continuous_states + opts = model.simulate_options() + opts["sensitivities"] = ["d"] - model.get_directional_derivative = d - model.get_derivatives = f - model._provides_directional_derivatives = lambda : True + res = model.simulate(options=opts) + assert res.final('dx/dd') == pytest.approx(0.36789, abs = 1e-3) - opts = model.simulate_options() - opts["sensitivities"] = ["d"] + assert res.solver.statistics["nsensfcnfcns"] > 0 + assert res.solver.statistics["nfcnjacs"] == 0 - res = model.simulate(options=opts) - assert res.final('dx/dd') == pytest.approx(0.36789, abs = 1e-3) + def test_basicsens2(self): + model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "BasicSens2.fmu"), _connect_dll=False) - assert res.solver.statistics["nsensfcnfcns"] > 0 - assert res.solver.statistics["nfcnjacs"] == 0 + caps = model.get_capability_flags() + caps["providesDirectionalDerivatives"] = True + model.get_capability_flags = lambda : caps - def test_basicsens2(self): - model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "BasicSens2.fmu"), _connect_dll=False) + def f(*args, **kwargs): + d = model.values[model.variables["d"].value_reference] + x = model.continuous_states[0] + model.values[model.variables["der(x)"].value_reference] = d*x + return np.array([d*x]) + + def d(*args, **kwargs): + if args[0][0] == 40: + return np.array([-1.0]) + else: + return model.continuous_states + + model.get_directional_derivative = d + model.get_derivatives = f + model._provides_directional_derivatives = lambda : True - caps = model.get_capability_flags() - caps["providesDirectionalDerivatives"] = True - model.get_capability_flags = lambda : caps + opts = model.simulate_options() + opts["sensitivities"] = ["d"] - def f(*args, **kwargs): - d = model.values[model.variables["d"].value_reference] - x = model.continuous_states[0] - model.values[model.variables["der(x)"].value_reference] = d*x - return np.array([d*x]) + res = model.simulate(options=opts) + assert res.final('dx/dd') == pytest.approx(0.36789, abs = 1e-3) - def d(*args, **kwargs): - if args[0][0] == 40: - return np.array([-1.0]) - else: - return model.continuous_states + assert res.solver.statistics["nsensfcnfcns"] == 0 - model.get_directional_derivative = d - model.get_derivatives = f - model._provides_directional_derivatives = lambda : True + def test_relative_tolerance(self): + model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NoState.Example1.fmu"), _connect_dll=False) - opts = model.simulate_options() - opts["sensitivities"] = ["d"] + opts = model.simulate_options() + opts["CVode_options"]["rtol"] = 1e-8 - res = model.simulate(options=opts) - assert res.final('dx/dd') == pytest.approx(0.36789, abs = 1e-3) + res = model.simulate(options=opts) - assert res.solver.statistics["nsensfcnfcns"] == 0 + assert res.options["CVode_options"]["atol"] == 1e-10 - def test_relative_tolerance(self): - model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NoState.Example1.fmu"), _connect_dll=False) + def test_simulate_with_debug_option_no_state(self): + """ Verify that an instance of CVodeDebugInformation is created """ + model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NoState.Example1.fmu"), _connect_dll=False) - opts = model.simulate_options() - opts["CVode_options"]["rtol"] = 1e-8 + opts=model.simulate_options() + opts["logging"] = True + opts["result_handling"] = "csv" # set to anything except 'binary' - res = model.simulate(options=opts) + #Verify that a simulation is successful + res=model.simulate(options=opts) + + from pyfmi.debug import CVodeDebugInformation + debug = CVodeDebugInformation("NoState_Example1_debug.txt") + + def test_maxord_is_set(self): + model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NoState.Example1.fmu"), _connect_dll=False) + opts = model.simulate_options() + opts["solver"] = "CVode" + opts["CVode_options"]["maxord"] = 1 - assert res.options["CVode_options"]["atol"] == 1e-10 + res = model.simulate(final_time=1.5,options=opts) - def test_simulate_with_debug_option_no_state(self): - """ Verify that an instance of CVodeDebugInformation is created """ - model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NoState.Example1.fmu"), _connect_dll=False) + assert res.solver.maxord == 1 - opts=model.simulate_options() - opts["logging"] = True - opts["result_handling"] = "csv" # set to anything except 'binary' + def test_with_jacobian_option(self): + model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NoState.Example1.fmu"), _connect_dll=False) + opts = model.simulate_options() + opts["solver"] = "CVode" + opts["result_handling"] = None - #Verify that a simulation is successful - res=model.simulate(options=opts) + def run_case(expected, default="Default"): + model.reset() + res = model.simulate(final_time=1.5,options=opts, algorithm=NoSolveAlg) + assert res.options["with_jacobian"] == default, res.options["with_jacobian"] + assert res.solver.problem._with_jacobian == expected, res.solver.problem._with_jacobian - from pyfmi.debug import CVodeDebugInformation - debug = CVodeDebugInformation("NoState_Example1_debug.txt") + run_case(False) - def test_maxord_is_set(self): - model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NoState.Example1.fmu"), _connect_dll=False) - opts = model.simulate_options() - opts["solver"] = "CVode" - opts["CVode_options"]["maxord"] = 1 + model.get_ode_sizes = lambda: (PYFMI_JACOBIAN_LIMIT+1, 0) + run_case(True) - res = model.simulate(final_time=1.5,options=opts) + opts["solver"] = "Radau5ODE" + run_case(False) - assert res.solver.maxord == 1 + opts["solver"] = "CVode" + opts["with_jacobian"] = False + run_case(False, False) - def test_with_jacobian_option(self): - model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NoState.Example1.fmu"), _connect_dll=False) + model.get_ode_sizes = lambda: (PYFMI_JACOBIAN_LIMIT-1, 0) + opts["with_jacobian"] = True + run_case(True, True) + + def test_sparse_option(self): + + def run_case(expected_jacobian, expected_sparse, fnbr=0, nnz={}, set_sparse=False): + class Sparse_FMUModelME2(Dummy_FMUModelME2): + def get_derivatives_dependencies(self): + return (nnz, {}) + + model = Sparse_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NoState.Example1.fmu"), _connect_dll=False) opts = model.simulate_options() opts["solver"] = "CVode" opts["result_handling"] = None + if set_sparse: + opts["CVode_options"]["linear_solver"] = "SPARSE" - def run_case(expected, default="Default"): - model.reset() - res = model.simulate(final_time=1.5,options=opts, algorithm=NoSolveAlg) - assert res.options["with_jacobian"] == default, res.options["with_jacobian"] - assert res.solver.problem._with_jacobian == expected, res.solver.problem._with_jacobian + model.get_ode_sizes = lambda: (fnbr, 0) - run_case(False) + res = model.simulate(final_time=1.5,options=opts, algorithm=NoSolveAlg) + assert res.solver.problem._with_jacobian == expected_jacobian, res.solver.problem._with_jacobian + assert res.solver.linear_solver == expected_sparse, res.solver.linear_solver - model.get_ode_sizes = lambda: (PYFMI_JACOBIAN_LIMIT+1, 0) - run_case(True) + run_case(False, "DENSE") + run_case(True, "DENSE", PYFMI_JACOBIAN_SPARSE_SIZE_LIMIT+1, {"Dep": [1]*PYFMI_JACOBIAN_SPARSE_SIZE_LIMIT**2}) + run_case(True, "SPARSE", PYFMI_JACOBIAN_SPARSE_SIZE_LIMIT+1, {"Dep": [1]*PYFMI_JACOBIAN_SPARSE_SIZE_LIMIT}) + run_case(True, "SPARSE", PYFMI_JACOBIAN_SPARSE_SIZE_LIMIT+1, {"Dep": [1]*PYFMI_JACOBIAN_SPARSE_SIZE_LIMIT}, True) - opts["solver"] = "Radau5ODE" - run_case(False) + def test_ncp_option(self): + model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NoState.Example1.fmu"), _connect_dll=False) + opts = model.simulate_options() + assert opts["ncp"] == 500, opts["ncp"] - opts["solver"] = "CVode" - opts["with_jacobian"] = False - run_case(False, False) + def test_solver_options(self): + model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NoState.Example1.fmu"), _connect_dll=False) + opts = model.simulate_options() - model.get_ode_sizes = lambda: (PYFMI_JACOBIAN_LIMIT-1, 0) - opts["with_jacobian"] = True - run_case(True, True) + try: + opts["CVode_options"] = "ShouldFail" + raise Exception("Setting an incorrect option should lead to exception being thrown, it wasn't") + except UnrecognizedOptionError: + pass - def test_sparse_option(self): + opts["CVode_options"] = {"maxh":1.0} + assert opts["CVode_options"]["atol"] == "Default", "Default should have been changed: " + opts["CVode_options"]["atol"] + assert opts["CVode_options"]["maxh"] == 1.0, "Value should have been changed to 1.0: " + opts["CVode_options"]["maxh"] - def run_case(expected_jacobian, expected_sparse, fnbr=0, nnz={}, set_sparse=False): - class Sparse_FMUModelME2(Dummy_FMUModelME2): - def get_derivatives_dependencies(self): - return (nnz, {}) + def test_solver_options_using_defaults(self): + model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NoState.Example1.fmu"), _connect_dll=False) + opts = model.simulate_options() - model = Sparse_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NoState.Example1.fmu"), _connect_dll=False) - opts = model.simulate_options() - opts["solver"] = "CVode" - opts["result_handling"] = None - if set_sparse: - opts["CVode_options"]["linear_solver"] = "SPARSE" + opts["CVode_options"] = {"maxh":1.0} + assert opts["CVode_options"]["atol"] == "Default", "Default should have been changed: " + opts["CVode_options"]["atol"] + assert opts["CVode_options"]["maxh"] == 1.0, "Value should have been changed to 1.0: " + opts["CVode_options"]["maxh"] - model.get_ode_sizes = lambda: (fnbr, 0) + opts["CVode_options"] = {"atol":1e-6} #Defaults should be used together with only the option atol set + assert opts["CVode_options"]["atol"] == 1e-6, "Default should have been changed: " + opts["CVode_options"]["atol"] + assert opts["CVode_options"]["maxh"] == "Default", "Value should have been default is: " + opts["CVode_options"]["maxh"] - res = model.simulate(final_time=1.5,options=opts, algorithm=NoSolveAlg) - assert res.solver.problem._with_jacobian == expected_jacobian, res.solver.problem._with_jacobian - assert res.solver.linear_solver == expected_sparse, res.solver.linear_solver + def test_deepcopy_option(self): + opts = AssimuloFMIAlgOptions() + opts["CVode_options"]["maxh"] = 2.0 - run_case(False, "DENSE") - run_case(True, "DENSE", PYFMI_JACOBIAN_SPARSE_SIZE_LIMIT+1, {"Dep": [1]*PYFMI_JACOBIAN_SPARSE_SIZE_LIMIT**2}) - run_case(True, "SPARSE", PYFMI_JACOBIAN_SPARSE_SIZE_LIMIT+1, {"Dep": [1]*PYFMI_JACOBIAN_SPARSE_SIZE_LIMIT}) - run_case(True, "SPARSE", PYFMI_JACOBIAN_SPARSE_SIZE_LIMIT+1, {"Dep": [1]*PYFMI_JACOBIAN_SPARSE_SIZE_LIMIT}, True) + import copy - def test_ncp_option(self): - model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NoState.Example1.fmu"), _connect_dll=False) - opts = model.simulate_options() - assert opts["ncp"] == 500, opts["ncp"] + opts_copy = copy.deepcopy(opts) - def test_solver_options(self): - model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NoState.Example1.fmu"), _connect_dll=False) - opts = model.simulate_options() + assert opts["CVode_options"]["maxh"] == opts_copy["CVode_options"]["maxh"], "Deepcopy not working..." - try: - opts["CVode_options"] = "ShouldFail" - raise Exception("Setting an incorrect option should lead to exception being thrown, it wasn't") - except UnrecognizedOptionError: - pass + def test_maxh_option(self): + model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NoState.Example1.fmu"), _connect_dll=False) + opts = model.simulate_options() + opts["result_handling"] = None - opts["CVode_options"] = {"maxh":1.0} - assert opts["CVode_options"]["atol"] == "Default", "Default should have been changed: " + opts["CVode_options"]["atol"] - assert opts["CVode_options"]["maxh"] == 1.0, "Value should have been changed to 1.0: " + opts["CVode_options"]["maxh"] + def run_case(tstart, tstop, solver, ncp="Default"): + model.reset() - def test_solver_options_using_defaults(self): - model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NoState.Example1.fmu"), _connect_dll=False) - opts = model.simulate_options() + opts["solver"] = solver - opts["CVode_options"] = {"maxh":1.0} - assert opts["CVode_options"]["atol"] == "Default", "Default should have been changed: " + opts["CVode_options"]["atol"] - assert opts["CVode_options"]["maxh"] == 1.0, "Value should have been changed to 1.0: " + opts["CVode_options"]["maxh"] + if ncp != "Default": + opts["ncp"] = ncp - opts["CVode_options"] = {"atol":1e-6} #Defaults should be used together with only the option atol set - assert opts["CVode_options"]["atol"] == 1e-6, "Default should have been changed: " + opts["CVode_options"]["atol"] - assert opts["CVode_options"]["maxh"] == "Default", "Value should have been default is: " + opts["CVode_options"]["maxh"] + if opts["ncp"] == 0: + expected = 0.0 + else: + expected = (float(tstop)-float(tstart))/float(opts["ncp"]) - def test_deepcopy_option(self): - opts = AssimuloFMIAlgOptions() - opts["CVode_options"]["maxh"] = 2.0 + res = model.simulate(start_time=tstart, final_time=tstop,options=opts, algorithm=NoSolveAlg) + assert res.solver.maxh == expected, res.solver.maxh + assert res.options[solver+"_options"]["maxh"] == "Default", res.options[solver+"_options"]["maxh"] - import copy + run_case(0,1,"CVode") + run_case(0,1,"CVode", 0) + run_case(0,1,"Radau5ODE") + run_case(0,1,"Dopri5") + run_case(0,1,"RodasODE") + run_case(0,1,"LSODAR") + run_case(0,1,"LSODAR") + + def test_rtol_auto_update(self): + """ Test that default rtol picks up the unbounded attribute. """ + model = Dummy_FMUModelME2([], FMU_PATHS.ME2.coupled_clutches_modified, _connect_dll=False) + + res = model.simulate() + + # verify appropriate rtol(s) + for i, state in enumerate(model.get_states_list().keys()): + if res.solver.supports.get('rtol_as_vector', False): + # automatic construction of rtol vector + if model.get_variable_unbounded(state): + assert res.solver.rtol[i] == 0 + else: + assert res.solver.rtol[i] > 0 + else: # no support: scalar rtol + assert isinstance(res.solver.rtol, float) - opts_copy = copy.deepcopy(opts) + def test_rtol_vector_manual_valid(self): + """ Tests manual valid rtol vector works; if supported. """ - assert opts["CVode_options"]["maxh"] == opts_copy["CVode_options"]["maxh"], "Deepcopy not working..." + model = Dummy_FMUModelME2([], FMU_PATHS.ME2.nominal_test4, _connect_dll=False) + + opts = model.simulate_options() + opts["CVode_options"]["rtol"] = [1e-5, 0.] + + try: + res = model.simulate(options=opts) + # solver support + assert res.solver.rtol[0] == 1e-5 + assert res.solver.rtol[1] == 0. + except InvalidOptionException as e: # if no solver support + assert str(e).startswith("Failed to set the solver option 'rtol'") + + def test_rtol_vector_manual_size_mismatch(self): + """ Tests invalid rtol vector: size mismatch. """ + model = Dummy_FMUModelME2([], FMU_PATHS.ME2.nominal_test4, _connect_dll=False) + + opts = model.simulate_options() + opts["CVode_options"]["rtol"] = [1e-5, 0, 1e-5] + + err_msg = "If the relative tolerance is provided as a vector, it need to be equal to the number of states." + with pytest.raises(InvalidOptionException, match = err_msg): + model.simulate(options=opts) - def test_maxh_option(self): - model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NoState.Example1.fmu"), _connect_dll=False) - opts = model.simulate_options() - opts["result_handling"] = None + def test_rtol_vector_manual_invalid(self): + """ Tests invalid rtol vector: different nonzero values. """ + + model = FMUModelME2(FMU_PATHS.ME2.coupled_clutches, _connect_dll=False) - def run_case(tstart, tstop, solver, ncp="Default"): - model.reset() + opts = model.simulate_options() + opts["CVode_options"]["rtol"] = [1e-5, 0, 1e-5, 1e-5, 0, 1e-5,1e-6, 0] + + err_msg = "If the relative tolerance is provided as a vector, the values need to be equal except for zeros." + with pytest.raises(InvalidOptionException, match = err_msg): + model.simulate(options=opts) - opts["solver"] = solver + def test_rtol_vector_manual_scalar_conversion(self): + """ Test automatic scalar conversion of trivial rtol vector. """ + model = Dummy_FMUModelME2([], FMU_PATHS.ME2.nominal_test4, _connect_dll=False) + + opts = model.simulate_options() + opts["CVode_options"]["rtol"] = [1e-5, 1e-5] + + #Verify no exception is raised as the rtol vector should be treated as a scalar + res = model.simulate(options=opts) + assert res.solver.rtol == 1e-5 + + def test_rtol_vector_unsupported(self): + """ Test that rtol as a vector triggers exceptions for unsupported solvers. """ + model = Dummy_FMUModelME2([], FMU_PATHS.ME2.nominal_test4, _connect_dll=False) + opts = model.simulate_options() + opts["result_handling"] = None - if ncp != "Default": - opts["ncp"] = ncp + def run_case(solver): + model.reset() - if opts["ncp"] == 0: - expected = 0.0 - else: - expected = (float(tstop)-float(tstart))/float(opts["ncp"]) - - res = model.simulate(start_time=tstart, final_time=tstop,options=opts, algorithm=NoSolveAlg) - assert res.solver.maxh == expected, res.solver.maxh - assert res.options[solver+"_options"]["maxh"] == "Default", res.options[solver+"_options"]["maxh"] - - run_case(0,1,"CVode") - run_case(0,1,"CVode", 0) - run_case(0,1,"Radau5ODE") - run_case(0,1,"Dopri5") - run_case(0,1,"RodasODE") - run_case(0,1,"LSODAR") - run_case(0,1,"LSODAR") - - def test_rtol_auto_update(self): - """ Test that default rtol picks up the unbounded attribute. """ - model = Dummy_FMUModelME2([], FMU_PATHS.ME2.coupled_clutches_modified, _connect_dll=False) - - res = model.simulate() - - # verify appropriate rtol(s) - for i, state in enumerate(model.get_states_list().keys()): - if res.solver.supports.get('rtol_as_vector', False): - # automatic construction of rtol vector - if model.get_variable_unbounded(state): - assert res.solver.rtol[i] == 0 - else: - assert res.solver.rtol[i] > 0 - else: # no support: scalar rtol - assert isinstance(res.solver.rtol, float) - - def test_rtol_vector_manual_valid(self): - """ Tests manual valid rtol vector works; if supported. """ - - model = Dummy_FMUModelME2([], FMU_PATHS.ME2.nominal_test4, _connect_dll=False) - - opts = model.simulate_options() - opts["CVode_options"]["rtol"] = [1e-5, 0.] + opts["solver"] = solver + opts[solver+"_options"]["rtol"] = [1e-5, 0.0] try: res = model.simulate(options=opts) - # solver support + # solver support; check tolerances assert res.solver.rtol[0] == 1e-5 - assert res.solver.rtol[1] == 0. - except InvalidOptionException as e: # if no solver support + assert res.solver.rtol[1] == 0.0 + except InvalidOptionException as e: assert str(e).startswith("Failed to set the solver option 'rtol'") - - def test_rtol_vector_manual_size_mismatch(self): - """ Tests invalid rtol vector: size mismatch. """ - model = Dummy_FMUModelME2([], FMU_PATHS.ME2.nominal_test4, _connect_dll=False) - - opts = model.simulate_options() - opts["CVode_options"]["rtol"] = [1e-5, 0, 1e-5] - - err_msg = "If the relative tolerance is provided as a vector, it need to be equal to the number of states." - with pytest.raises(InvalidOptionException, match = err_msg): - model.simulate(options=opts) + return # OK - def test_rtol_vector_manual_invalid(self): - """ Tests invalid rtol vector: different nonzero values. """ - - model = FMUModelME2(FMU_PATHS.ME2.coupled_clutches, _connect_dll=False) + run_case("CVode") + run_case("Radau5ODE") + run_case("Dopri5") + run_case("RodasODE") + run_case("LSODAR") + + def setup_atol_auto_update_test_base(self): + model = Dummy_FMUModelME2([], FMU_PATHS.ME2.nominal_test4, _connect_dll=False) + model.override_nominal_continuous_states = False + opts = model.simulate_options() + opts["return_result"] = False + opts["solver"] = "CVode" + return model, opts - opts = model.simulate_options() - opts["CVode_options"]["rtol"] = [1e-5, 0, 1e-5, 1e-5, 0, 1e-5,1e-6, 0] - - err_msg = "If the relative tolerance is provided as a vector, the values need to be equal except for zeros." - with pytest.raises(InvalidOptionException, match = err_msg): - model.simulate(options=opts) + def test_atol_auto_update1(self): + """ + Tests that atol automatically gets updated when "atol = factor * pre_init_nominals". + """ + model, opts = self.setup_atol_auto_update_test_base() - def test_rtol_vector_manual_scalar_conversion(self): - """ Test automatic scalar conversion of trivial rtol vector. """ - model = Dummy_FMUModelME2([], FMU_PATHS.ME2.nominal_test4, _connect_dll=False) - - opts = model.simulate_options() - opts["CVode_options"]["rtol"] = [1e-5, 1e-5] - - #Verify no exception is raised as the rtol vector should be treated as a scalar - res = model.simulate(options=opts) - assert res.solver.rtol == 1e-5 + opts["CVode_options"]["atol"] = 0.01 * model.nominal_continuous_states + np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.02, 0.01]) + model.simulate(options=opts, algorithm=NoSolveAlg) + np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.03, 0.03]) + + def test_atol_auto_update2(self): + """ + Tests that atol doesn't get auto-updated when heuristic fails. + """ + model, opts = self.setup_atol_auto_update_test_base() + + opts["CVode_options"]["atol"] = (0.01 * model.nominal_continuous_states) + [0.01, 0.01] + np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.03, 0.02]) + model.simulate(options=opts, algorithm=NoSolveAlg) + np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.03, 0.02]) + + def test_atol_auto_update3(self): + """ + Tests that atol doesn't get auto-updated when nominals are never retrieved. + """ + model, opts = self.setup_atol_auto_update_test_base() + + opts["CVode_options"]["atol"] = [0.02, 0.01] + np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.02, 0.01]) + model.simulate(options=opts, algorithm=NoSolveAlg) + np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.02, 0.01]) + + def test_atol_auto_update4(self): + """ + Tests that atol is not auto-updated when it's set the "correct" way (post initialization). + """ + model, opts = self.setup_atol_auto_update_test_base() - def test_rtol_vector_unsupported(self): - """ Test that rtol as a vector triggers exceptions for unsupported solvers. """ - model = Dummy_FMUModelME2([], FMU_PATHS.ME2.nominal_test4, _connect_dll=False) - opts = model.simulate_options() - opts["result_handling"] = None + model.setup_experiment() + model.initialize() + opts["initialize"] = False + opts["CVode_options"]["atol"] = 0.01 * model.nominal_continuous_states + np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.03, 0.03]) + model.simulate(options=opts, algorithm=NoSolveAlg) + np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.03, 0.03]) - def run_case(solver): - model.reset() - - opts["solver"] = solver - opts[solver+"_options"]["rtol"] = [1e-5, 0.0] - - try: - res = model.simulate(options=opts) - # solver support; check tolerances - assert res.solver.rtol[0] == 1e-5 - assert res.solver.rtol[1] == 0.0 - except InvalidOptionException as e: - assert str(e).startswith("Failed to set the solver option 'rtol'") - return # OK - - run_case("CVode") - run_case("Radau5ODE") - run_case("Dopri5") - run_case("RodasODE") - run_case("LSODAR") + def test_atol_auto_update5(self): + """ + Tests that atol is automatically set and depends on rtol. + """ + model, opts = self.setup_atol_auto_update_test_base() - def setup_atol_auto_update_test_base(self): - model = Dummy_FMUModelME2([], FMU_PATHS.ME2.nominal_test4, _connect_dll=False) - model.override_nominal_continuous_states = False - opts = model.simulate_options() - opts["return_result"] = False - opts["solver"] = "CVode" - return model, opts - - def test_atol_auto_update1(self): - """ - Tests that atol automatically gets updated when "atol = factor * pre_init_nominals". - """ - model, opts = self.setup_atol_auto_update_test_base() - - opts["CVode_options"]["atol"] = 0.01 * model.nominal_continuous_states - np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.02, 0.01]) - model.simulate(options=opts, algorithm=NoSolveAlg) - np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.03, 0.03]) - - def test_atol_auto_update2(self): - """ - Tests that atol doesn't get auto-updated when heuristic fails. - """ - model, opts = self.setup_atol_auto_update_test_base() - - opts["CVode_options"]["atol"] = (0.01 * model.nominal_continuous_states) + [0.01, 0.01] - np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.03, 0.02]) - model.simulate(options=opts, algorithm=NoSolveAlg) - np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.03, 0.02]) - - def test_atol_auto_update3(self): - """ - Tests that atol doesn't get auto-updated when nominals are never retrieved. - """ - model, opts = self.setup_atol_auto_update_test_base() - - opts["CVode_options"]["atol"] = [0.02, 0.01] - np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.02, 0.01]) - model.simulate(options=opts, algorithm=NoSolveAlg) - np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.02, 0.01]) - - def test_atol_auto_update4(self): - """ - Tests that atol is not auto-updated when it's set the "correct" way (post initialization). - """ - model, opts = self.setup_atol_auto_update_test_base() - - model.setup_experiment() - model.initialize() - opts["initialize"] = False - opts["CVode_options"]["atol"] = 0.01 * model.nominal_continuous_states - np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.03, 0.03]) - model.simulate(options=opts, algorithm=NoSolveAlg) - np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.03, 0.03]) - - def test_atol_auto_update5(self): - """ - Tests that atol is automatically set and depends on rtol. - """ - model, opts = self.setup_atol_auto_update_test_base() - - opts["CVode_options"]["rtol"] = 1e-6 - model.simulate(options=opts, algorithm=NoSolveAlg) - np.testing.assert_allclose(opts["CVode_options"]["atol"], [3e-8, 3e-8]) + opts["CVode_options"]["rtol"] = 1e-6 + model.simulate(options=opts, algorithm=NoSolveAlg) + np.testing.assert_allclose(opts["CVode_options"]["atol"], [3e-8, 3e-8]) - def test_atol_auto_update6(self): - """ - Tests that rtol doesn't affect explicitly set atol. - """ - model, opts = self.setup_atol_auto_update_test_base() + def test_atol_auto_update6(self): + """ + Tests that rtol doesn't affect explicitly set atol. + """ + model, opts = self.setup_atol_auto_update_test_base() - opts["CVode_options"]["rtol"] = 1e-9 - opts["CVode_options"]["atol"] = 0.01 * model.nominal_continuous_states - np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.02, 0.01]) - model.simulate(options=opts, algorithm=NoSolveAlg) - np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.03, 0.03]) + opts["CVode_options"]["rtol"] = 1e-9 + opts["CVode_options"]["atol"] = 0.01 * model.nominal_continuous_states + np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.02, 0.01]) + model.simulate(options=opts, algorithm=NoSolveAlg) + np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.03, 0.03]) class Test_FMUModelME2: @@ -1153,17 +1152,17 @@ def test_unzipped_fmu_exceptions(self): """ Verify exception is raised if 'fmu' is a file and allow_unzipped_fmu is set to True, with FMUModelME2. """ err_msg = "Argument named 'fmu' must be a directory if argument 'allow_unzipped_fmu' is set to True." with pytest.raises(FMUException, match = err_msg): - model = FMUModelME2(os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "LinearStability.SubSystem2.fmu"), _connect_dll=False, allow_unzipped_fmu=True) + FMUModelME2(os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "LinearStability.SubSystem2.fmu"), _connect_dll=False, allow_unzipped_fmu=True) def test_invalid_binary(self): err_msg = "The FMU could not be loaded." with pytest.raises(InvalidBinaryException, match = err_msg): - model = FMUModelME2(os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "LinearStability.SubSystem2.fmu"), _connect_dll=True) + FMUModelME2(os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "LinearStability.SubSystem2.fmu"), _connect_dll=True) def test_invalid_version(self): err_msg = "The FMU version is not supported by this class" with pytest.raises(InvalidVersionException, match = err_msg): - model = FMUModelME2(os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "RLC_Circuit.fmu"), _connect_dll=True) + FMUModelME2(os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "RLC_Circuit.fmu"), _connect_dll=True) def test_estimate_directional_derivatives_linearstate(self): model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "LinearStateSpace.fmu"), _connect_dll=False) diff --git a/tests/test_fmi_coupled.py b/tests/test_fmi_coupled.py index e4aa5f44..d1fe625e 100644 --- a/tests/test_fmi_coupled.py +++ b/tests/test_fmi_coupled.py @@ -23,72 +23,70 @@ import pyfmi.fmi as fmi from pyfmi.test_util import Dummy_FMUModelME2 -assimulo_installed = True try: import assimulo except ImportError: - assimulo_installed = False + pass file_path = os.path.dirname(os.path.abspath(__file__)) me2_xml_path = os.path.join(file_path, "files", "FMUs", "XML", "ME2.0") -if assimulo_installed: - class Test_CoupledFMUModelME2_Simulation: - def test_linear_example(self): - model_sub_1 = Dummy_FMUModelME2([], os.path.join(me2_xml_path, "LinearStability.SubSystem1.fmu"), _connect_dll=False) - model_sub_2 = Dummy_FMUModelME2([], os.path.join(me2_xml_path, "LinearStability.SubSystem2.fmu"), _connect_dll=False) +@pytest.mark.assimulo +class Test_CoupledFMUModelME2_Simulation: + def test_linear_example(self): + model_sub_1 = Dummy_FMUModelME2([], os.path.join(me2_xml_path, "LinearStability.SubSystem1.fmu"), _connect_dll=False) + model_sub_2 = Dummy_FMUModelME2([], os.path.join(me2_xml_path, "LinearStability.SubSystem2.fmu"), _connect_dll=False) - def sub1(*args, **kwargs): - u1 = model_sub_1.get_real([model_sub_1.get_variable_valueref("u1")], evaluate = False) - a1 = model_sub_1.get_real([model_sub_1.get_variable_valueref("a1")], evaluate = False) - b1 = model_sub_1.get_real([model_sub_1.get_variable_valueref("b1")], evaluate = False) - c1 = model_sub_1.get_real([model_sub_1.get_variable_valueref("c1")], evaluate = False) - d1 = model_sub_1.get_real([model_sub_1.get_variable_valueref("d1")], evaluate = False) - x1 = model_sub_1.continuous_states[0] - model_sub_1.set_real([model_sub_1.get_variable_valueref("y1")], c1*x1+d1*u1) - model_sub_1.set_real([model_sub_1.get_variable_valueref("x1")], [x1]) - return a1*x1+b1*u1 - - def sub2(*args, **kwargs): - u2 = model_sub_2.get_real([model_sub_2.get_variable_valueref("u2")], evaluate = False) - a2 = model_sub_2.get_real([model_sub_2.get_variable_valueref("a2")], evaluate = False) - b2 = model_sub_2.get_real([model_sub_2.get_variable_valueref("b2")], evaluate = False) - c2 = model_sub_2.get_real([model_sub_2.get_variable_valueref("c2")], evaluate = False) - d2 = model_sub_2.get_real([model_sub_2.get_variable_valueref("d2")], evaluate = False) - x2 = model_sub_2.continuous_states[0] - model_sub_2.set_real([model_sub_2.get_variable_valueref("y2")], c2*x2+d2*u2) - model_sub_2.set_real([model_sub_2.get_variable_valueref("x2")], [x2]) - return a2*x2+b2*u2 - - model_sub_1.get_derivatives = sub1 - model_sub_2.get_derivatives = sub2 - - models = [("First", model_sub_1), ("Second", model_sub_2)] - connections = [(model_sub_1,"y1",model_sub_2,"u2"), - (model_sub_2,"y2",model_sub_1,"u1")] - - coupled = CoupledFMUModelME2(models, connections=connections) + def sub1(*args, **kwargs): + u1 = model_sub_1.get_real([model_sub_1.get_variable_valueref("u1")], evaluate = False) + a1 = model_sub_1.get_real([model_sub_1.get_variable_valueref("a1")], evaluate = False) + b1 = model_sub_1.get_real([model_sub_1.get_variable_valueref("b1")], evaluate = False) + c1 = model_sub_1.get_real([model_sub_1.get_variable_valueref("c1")], evaluate = False) + d1 = model_sub_1.get_real([model_sub_1.get_variable_valueref("d1")], evaluate = False) + x1 = model_sub_1.continuous_states[0] + model_sub_1.set_real([model_sub_1.get_variable_valueref("y1")], c1*x1+d1*u1) + model_sub_1.set_real([model_sub_1.get_variable_valueref("x1")], [x1]) + return a1*x1+b1*u1 + + def sub2(*args, **kwargs): + u2 = model_sub_2.get_real([model_sub_2.get_variable_valueref("u2")], evaluate = False) + a2 = model_sub_2.get_real([model_sub_2.get_variable_valueref("a2")], evaluate = False) + b2 = model_sub_2.get_real([model_sub_2.get_variable_valueref("b2")], evaluate = False) + c2 = model_sub_2.get_real([model_sub_2.get_variable_valueref("c2")], evaluate = False) + d2 = model_sub_2.get_real([model_sub_2.get_variable_valueref("d2")], evaluate = False) + x2 = model_sub_2.continuous_states[0] + model_sub_2.set_real([model_sub_2.get_variable_valueref("y2")], c2*x2+d2*u2) + model_sub_2.set_real([model_sub_2.get_variable_valueref("x2")], [x2]) + return a2*x2+b2*u2 + + model_sub_1.get_derivatives = sub1 + model_sub_2.get_derivatives = sub2 + + models = [("First", model_sub_1), ("Second", model_sub_2)] + connections = [(model_sub_1,"y1",model_sub_2,"u2"), + (model_sub_2,"y2",model_sub_1,"u1")] + + coupled = CoupledFMUModelME2(models, connections=connections) - opts = {"CVode_options": {"rtol":1e-6, "atol":1e-6}, "ncp":0} + opts = {"CVode_options": {"rtol":1e-6, "atol":1e-6}, "ncp":0} - res = coupled.simulate(options=opts) + res = coupled.simulate(options=opts) - assert res.final("First.x1") == pytest.approx(0.08597302307099872) - assert res.final("Second.x2") == pytest.approx(0.0083923348082567) - assert res.initial("First.x1") == pytest.approx(1.0) - assert res.initial("Second.x2") == pytest.approx(1.0) - - assert res.final("First.u1") == pytest.approx(-0.25909975860402856) - assert res.final("Second.u2") == pytest.approx(-0.0011806893910324295) - assert res.initial("First.u1") == pytest.approx(-17.736842105263158) - assert res.initial("Second.u2") == pytest.approx(-14.73684210526316) + assert res.final("First.x1") == pytest.approx(0.08597302307099872) + assert res.final("Second.x2") == pytest.approx(0.0083923348082567) + assert res.initial("First.x1") == pytest.approx(1.0) + assert res.initial("Second.x2") == pytest.approx(1.0) + + assert res.final("First.u1") == pytest.approx(-0.25909975860402856) + assert res.final("Second.u2") == pytest.approx(-0.0011806893910324295) + assert res.initial("First.u1") == pytest.approx(-17.736842105263158) + assert res.initial("Second.u2") == pytest.approx(-14.73684210526316) class Test_CoupledFMUModelME2: def test_reversed_connections(self): model_sub_1 = FMUModelME2(os.path.join(me2_xml_path, "LinearStability.SubSystem1.fmu"), _connect_dll=False) model_sub_2 = FMUModelME2(os.path.join(me2_xml_path, "LinearStability.SubSystem2.fmu"), _connect_dll=False) - model_full = FMUModelME2(os.path.join(me2_xml_path, "LinearStability.FullSystem.fmu"), _connect_dll=False) models = [("First", model_sub_1), ("Second", model_sub_2)] connections = [(model_sub_2,"y1",model_sub_1,"u2"), @@ -106,7 +104,6 @@ def test_reversed_connections(self): def test_inputs_list(self): model_sub_1 = FMUModelME2(os.path.join(me2_xml_path, "LinearStability.SubSystem1.fmu"), _connect_dll=False) model_sub_2 = FMUModelME2(os.path.join(me2_xml_path, "LinearStability.SubSystem2.fmu"), _connect_dll=False) - model_full = FMUModelME2(os.path.join(me2_xml_path, "LinearStability.FullSystem.fmu"), _connect_dll=False) models = [("First", model_sub_1), ("Second", model_sub_2)] connections = [(model_sub_1,"y1",model_sub_2,"u2"), @@ -155,7 +152,6 @@ def test_loading(self): CoupledFMUModelME2(models, connections) models = [("First", model_cc_1), ("Second", model_cc_2)] - coupled = CoupledFMUModelME2(models, connections) connections = [("k")] with pytest.raises(fmi.FMUException): diff --git a/tests/test_fmi_estimate.py b/tests/test_fmi_estimate.py index 38d95ac9..5e5dcbfe 100644 --- a/tests/test_fmi_estimate.py +++ b/tests/test_fmi_estimate.py @@ -16,93 +16,93 @@ # along with this program. If not, see . import os +import pytest import numpy as np from pyfmi.test_util import Dummy_FMUModelME2 from scipy.io.matlab import loadmat -assimulo_installed = True try: import assimulo except ImportError: - assimulo_installed = False + pass file_path = os.path.dirname(os.path.abspath(__file__)) -if assimulo_installed: - class Test_FMUModelME2_Estimate: - def test_quadtank_estimate(self): - model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "QuadTankPack_Sim_QuadTank.fmu"), _connect_dll=False) - - g = model.get_real([model.get_variable_valueref("qt.g")], evaluate = False) - g1_nmp = model.get_real([model.get_variable_valueref("qt.g1_nmp")], evaluate = False) - g2_nmp = model.get_real([model.get_variable_valueref("qt.g2_nmp")], evaluate = False) - k1_nmp = model.get_real([model.get_variable_valueref("qt.k1_nmp")], evaluate = False) - k2_nmp = model.get_real([model.get_variable_valueref("qt.k2_nmp")], evaluate = False) - A1 = model.get_real([model.get_variable_valueref("qt.A1")], evaluate = False) - A2 = model.get_real([model.get_variable_valueref("qt.A2")], evaluate = False) - A3 = model.get_real([model.get_variable_valueref("qt.A3")], evaluate = False) - A4 = model.get_real([model.get_variable_valueref("qt.A4")], evaluate = False) - a3 = model.get_real([model.get_variable_valueref("qt.a3")], evaluate = False) - a4 = model.get_real([model.get_variable_valueref("qt.a4")], evaluate = False) - u1_vref = model.get_variable_valueref("u1") - u2_vref = model.get_variable_valueref("u2") - a1_vref = model.get_variable_valueref("qt.a1") - a2_vref = model.get_variable_valueref("qt.a2") - - def f(*args, **kwargs): - x1 = model.continuous_states[0] - x2 = model.continuous_states[1] - x3 = model.continuous_states[2] - x4 = model.continuous_states[3] - - u1 = model.get_real([u1_vref], evaluate = False) - u2 = model.get_real([u2_vref], evaluate = False) - a1 = model.get_real([a1_vref], evaluate = False) - a2 = model.get_real([a2_vref], evaluate = False) - - der_x1 = -a1/A1*np.sqrt(2.*g*x1) + a3/A1*np.sqrt(2*g*x3) + g1_nmp*k1_nmp/A1*u1 - der_x2 = -a2/A2*np.sqrt(2.*g*x2) + a4/A2*np.sqrt(2*g*x4) + g2_nmp*k2_nmp/A2*u2 - der_x3 = -a3/A3*np.sqrt(2.*g*x3) + (1.-g2_nmp)*k2_nmp/A3*u2 - der_x4 = -a4/A4*np.sqrt(2.*g*x4) + (1.-g1_nmp)*k1_nmp/A4*u1 - return np.concatenate([der_x1, der_x2, der_x3, der_x4]) - - model.get_derivatives = f - - # Load measurement data from file - data = loadmat(os.path.join(file_path, "files", "Results", "qt_par_est_data.mat"), appendmat=False) - - # Extract data series - t_meas = data['t'][6000::100,0]-60 - y1_meas = data['y1_f'][6000::100,0]/100 - y2_meas = data['y2_f'][6000::100,0]/100 - y3_meas = data['y3_d'][6000::100,0]/100 - y4_meas = data['y4_d'][6000::100,0]/100 - u1 = data['u1_d'][6000::100,0] - u2 = data['u2_d'][6000::100,0] - - # Build input trajectory matrix for use in simulation - u = np.transpose(np.vstack((t_meas,u1,u2))) - - # Estimation of 2 parameters - data = np.vstack((t_meas, y1_meas, y2_meas)).transpose() - - res = model.estimate(parameters=["qt.a1", "qt.a2"], - measurements = (['qt.x1', 'qt.x2'], data), input=(['u1','u2'],u)) - - - model.reset() - - # Set optimal values for a1 and a2 into the model - model.set(['qt.a1'], res["qt.a1"]) - model.set(['qt.a2'], res["qt.a2"]) - - # Simulate model response with optimal parameters a1 and a2 - res = model.simulate(input=(['u1','u2'], u), start_time=0., final_time=60) - - assert np.abs(res.final('qt.x1') - 0.07060188) < 1e-3, "Was: " + str(res.final('qt.x1')) + ", expected: 0.07060188" - assert np.abs(res.final('qt.x2') - 0.06654621) < 1e-3 - assert np.abs(res.final('qt.x3') - 0.02736549) < 1e-3 - assert np.abs(res.final('qt.x4') - 0.02789857) < 1e-3 - assert np.abs(res.final('u1') - 6.0) < 1e-3 - assert np.abs(res.final('u2') - 5.0) < 1e-3 +@pytest.mark.assimulo +class Test_FMUModelME2_Estimate: + def test_quadtank_estimate(self): + model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "QuadTankPack_Sim_QuadTank.fmu"), _connect_dll=False) + + g = model.get_real([model.get_variable_valueref("qt.g")], evaluate = False) + g1_nmp = model.get_real([model.get_variable_valueref("qt.g1_nmp")], evaluate = False) + g2_nmp = model.get_real([model.get_variable_valueref("qt.g2_nmp")], evaluate = False) + k1_nmp = model.get_real([model.get_variable_valueref("qt.k1_nmp")], evaluate = False) + k2_nmp = model.get_real([model.get_variable_valueref("qt.k2_nmp")], evaluate = False) + A1 = model.get_real([model.get_variable_valueref("qt.A1")], evaluate = False) + A2 = model.get_real([model.get_variable_valueref("qt.A2")], evaluate = False) + A3 = model.get_real([model.get_variable_valueref("qt.A3")], evaluate = False) + A4 = model.get_real([model.get_variable_valueref("qt.A4")], evaluate = False) + a3 = model.get_real([model.get_variable_valueref("qt.a3")], evaluate = False) + a4 = model.get_real([model.get_variable_valueref("qt.a4")], evaluate = False) + u1_vref = model.get_variable_valueref("u1") + u2_vref = model.get_variable_valueref("u2") + a1_vref = model.get_variable_valueref("qt.a1") + a2_vref = model.get_variable_valueref("qt.a2") + + def f(*args, **kwargs): + x1 = model.continuous_states[0] + x2 = model.continuous_states[1] + x3 = model.continuous_states[2] + x4 = model.continuous_states[3] + + u1 = model.get_real([u1_vref], evaluate = False) + u2 = model.get_real([u2_vref], evaluate = False) + a1 = model.get_real([a1_vref], evaluate = False) + a2 = model.get_real([a2_vref], evaluate = False) + + der_x1 = -a1/A1*np.sqrt(2.*g*x1) + a3/A1*np.sqrt(2*g*x3) + g1_nmp*k1_nmp/A1*u1 + der_x2 = -a2/A2*np.sqrt(2.*g*x2) + a4/A2*np.sqrt(2*g*x4) + g2_nmp*k2_nmp/A2*u2 + der_x3 = -a3/A3*np.sqrt(2.*g*x3) + (1.-g2_nmp)*k2_nmp/A3*u2 + der_x4 = -a4/A4*np.sqrt(2.*g*x4) + (1.-g1_nmp)*k1_nmp/A4*u1 + return np.concatenate([der_x1, der_x2, der_x3, der_x4]) + + model.get_derivatives = f + + # Load measurement data from file + data = loadmat(os.path.join(file_path, "files", "Results", "qt_par_est_data.mat"), appendmat=False) + + # Extract data series + t_meas = data['t'][6000::100,0]-60 + y1_meas = data['y1_f'][6000::100,0]/100 + y2_meas = data['y2_f'][6000::100,0]/100 + y3_meas = data['y3_d'][6000::100,0]/100 + y4_meas = data['y4_d'][6000::100,0]/100 + u1 = data['u1_d'][6000::100,0] + u2 = data['u2_d'][6000::100,0] + + # Build input trajectory matrix for use in simulation + u = np.transpose(np.vstack((t_meas,u1,u2))) + + # Estimation of 2 parameters + data = np.vstack((t_meas, y1_meas, y2_meas)).transpose() + + res = model.estimate(parameters=["qt.a1", "qt.a2"], + measurements = (['qt.x1', 'qt.x2'], data), input=(['u1','u2'],u)) + + + model.reset() + + # Set optimal values for a1 and a2 into the model + model.set(['qt.a1'], res["qt.a1"]) + model.set(['qt.a2'], res["qt.a2"]) + + # Simulate model response with optimal parameters a1 and a2 + res = model.simulate(input=(['u1','u2'], u), start_time=0., final_time=60) + + assert np.abs(res.final('qt.x1') - 0.07060188) < 1e-3, "Was: " + str(res.final('qt.x1')) + ", expected: 0.07060188" + assert np.abs(res.final('qt.x2') - 0.06654621) < 1e-3 + assert np.abs(res.final('qt.x3') - 0.02736549) < 1e-3 + assert np.abs(res.final('qt.x4') - 0.02789857) < 1e-3 + assert np.abs(res.final('u1') - 6.0) < 1e-3 + assert np.abs(res.final('u2') - 5.0) < 1e-3 diff --git a/tests/test_io.py b/tests/test_io.py index e4f95e08..3ba9d4e7 100644 --- a/tests/test_io.py +++ b/tests/test_io.py @@ -34,11 +34,10 @@ file_path = os.path.dirname(os.path.abspath(__file__)) -assimulo_installed = True try: import assimulo except ImportError: - assimulo_installed = False + pass def _run_negated_alias(model, result_type, result_file_name=""): opts = model.simulate_options() @@ -57,102 +56,102 @@ def _run_negated_alias(model, result_type, result_file_name=""): for i in range(len(x)): assert x[i] == -y[i] -if assimulo_installed: - class TestResultFileText_Simulation: +@pytest.mark.assimulo +class TestResultFileText_Simulation: - def _correct_syntax_after_simulation_failure(self, result_file_name): - simple_alias = Dummy_FMUModelME2([("x", "y")], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NegatedAlias.fmu"), _connect_dll=False) + def _correct_syntax_after_simulation_failure(self, result_file_name): + simple_alias = Dummy_FMUModelME2([("x", "y")], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NegatedAlias.fmu"), _connect_dll=False) - def f(*args, **kwargs): - if simple_alias.time > 0.5: - raise Exception - return -simple_alias.continuous_states + def f(*args, **kwargs): + if simple_alias.time > 0.5: + raise Exception + return -simple_alias.continuous_states - simple_alias.get_derivatives = f + simple_alias.get_derivatives = f - opts = simple_alias.simulate_options() - opts["result_handling"] = "file" - opts["solver"] = "ExplicitEuler" - opts["result_file_name"] = result_file_name + opts = simple_alias.simulate_options() + opts["result_handling"] = "file" + opts["solver"] = "ExplicitEuler" + opts["result_file_name"] = result_file_name - successful_simulation = False - try: - res = simple_alias.simulate(options=opts) - successful_simulation = True #The above simulation should fail... - except Exception: - pass + successful_simulation = False + try: + res = simple_alias.simulate(options=opts) + successful_simulation = True #The above simulation should fail... + except Exception: + pass - if successful_simulation: - raise Exception + if successful_simulation: + raise Exception - result = ResultDymolaTextual(result_file_name) + result = ResultDymolaTextual(result_file_name) - x = result.get_variable_data("x").x - y = result.get_variable_data("y").x + x = result.get_variable_data("x").x + y = result.get_variable_data("y").x - assert len(x) > 2 + assert len(x) > 2 - for i in range(len(x)): - assert x[i] == -y[i] + for i in range(len(x)): + assert x[i] == -y[i] - def test_correct_file_after_simulation_failure(self): - self._correct_syntax_after_simulation_failure("NegatedAlias_result.txt") + def test_correct_file_after_simulation_failure(self): + self._correct_syntax_after_simulation_failure("NegatedAlias_result.txt") - def test_correct_stream_after_simulation_failure(self): - stream = StringIO("") - self._correct_syntax_after_simulation_failure(stream) + def test_correct_stream_after_simulation_failure(self): + stream = StringIO("") + self._correct_syntax_after_simulation_failure(stream) - def test_read_all_variables_using_model_variables(self): - simple_alias = Dummy_FMUModelME2([("x", "y")], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NegatedAlias.fmu"), _connect_dll=False) + def test_read_all_variables_using_model_variables(self): + simple_alias = Dummy_FMUModelME2([("x", "y")], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NegatedAlias.fmu"), _connect_dll=False) - opts = simple_alias.simulate_options() - opts["result_handling"] = "custom" - opts["result_handler"] = ResultHandlerFile(simple_alias) + opts = simple_alias.simulate_options() + opts["result_handling"] = "custom" + opts["result_handler"] = ResultHandlerFile(simple_alias) - res = simple_alias.simulate(options=opts) + res = simple_alias.simulate(options=opts) - for var in simple_alias.get_model_variables(): - res[var] + for var in simple_alias.get_model_variables(): + res[var] - def test_read_alias_derivative(self): - simple_alias = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "Alias.fmu"), _connect_dll=False) + def test_read_alias_derivative(self): + simple_alias = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "Alias.fmu"), _connect_dll=False) - opts = simple_alias.simulate_options() - opts["result_handling"] = "file" + opts = simple_alias.simulate_options() + opts["result_handling"] = "file" - res = simple_alias.simulate(options=opts) + res = simple_alias.simulate(options=opts) - derx = res["der(x)"] - dery = res["der(y)"] + derx = res["der(x)"] + dery = res["der(y)"] - assert len(derx) > 0 - for i in range(len(derx)): - assert derx[i] == dery[i] + assert len(derx) > 0 + for i in range(len(derx)): + assert derx[i] == dery[i] - def test_no_variables(self): - model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "ParameterAlias.fmu"), _connect_dll=False) + def test_no_variables(self): + model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "ParameterAlias.fmu"), _connect_dll=False) - opts = model.simulate_options() - opts["result_handling"] = "file" - opts["result_file_name"] = "NoMatchingTest.txt" - opts["filter"] = "NoMatchingVariables" + opts = model.simulate_options() + opts["result_handling"] = "file" + opts["result_file_name"] = "NoMatchingTest.txt" + opts["filter"] = "NoMatchingVariables" - res = model.simulate(options=opts) + res = model.simulate(options=opts) - assert 1.0 == pytest.approx(res["time"][-1]) + assert 1.0 == pytest.approx(res["time"][-1]) - def test_enumeration_file(self): + def test_enumeration_file(self): - model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "Friction2.fmu"), _connect_dll=False) - data_type = model.get_variable_data_type("mode") + model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "Friction2.fmu"), _connect_dll=False) + data_type = model.get_variable_data_type("mode") - assert data_type == fmi.FMI2_ENUMERATION + assert data_type == fmi.FMI2_ENUMERATION - opts = model.simulate_options() - opts["result_handling"] = "file" + opts = model.simulate_options() + opts["result_handling"] = "file" - res = model.simulate(options=opts) - res["mode"] #Check that the enumeration variable is in the dict, otherwise exception + res = model.simulate(options=opts) + res["mode"] #Check that the enumeration variable is in the dict, otherwise exception class TestResultFileText: def _get_description(self, result_file_name): @@ -325,226 +324,226 @@ def readline(self): with pytest.raises(JIOError, match = msg): res = ResultDymolaTextual(stream) -if assimulo_installed: - class TestResultMemory_Simulation: - def test_memory_options_me1(self): - simple_alias = Dummy_FMUModelME1([40], os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "NegatedAlias.fmu"), _connect_dll=False) - _run_negated_alias(simple_alias, "memory") +@pytest.mark.assimulo +class TestResultMemory_Simulation: + def test_memory_options_me1(self): + simple_alias = Dummy_FMUModelME1([40], os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "NegatedAlias.fmu"), _connect_dll=False) + _run_negated_alias(simple_alias, "memory") - def test_memory_options_me2(self): - simple_alias = Dummy_FMUModelME2([("x", "y")], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NegatedAlias.fmu"), _connect_dll=False) - _run_negated_alias(simple_alias, "memory") + def test_memory_options_me2(self): + simple_alias = Dummy_FMUModelME2([("x", "y")], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NegatedAlias.fmu"), _connect_dll=False) + _run_negated_alias(simple_alias, "memory") - def test_only_parameters(self): - model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "ParameterAlias.fmu"), _connect_dll=False) + def test_only_parameters(self): + model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "ParameterAlias.fmu"), _connect_dll=False) - opts = model.simulate_options() - opts["result_handling"] = "memory" - opts["filter"] = "p2" + opts = model.simulate_options() + opts["result_handling"] = "memory" + opts["filter"] = "p2" - res = model.simulate(options=opts) + res = model.simulate(options=opts) - assert 3.0 == pytest.approx(res["p2"][0]) - assert not isinstance(res.initial("p2"), np.ndarray) - assert not isinstance(res.final("p2"), np.ndarray) + assert 3.0 == pytest.approx(res["p2"][0]) + assert not isinstance(res.initial("p2"), np.ndarray) + assert not isinstance(res.final("p2"), np.ndarray) - def test_no_variables(self): - model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "ParameterAlias.fmu"), _connect_dll=False) + def test_no_variables(self): + model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "ParameterAlias.fmu"), _connect_dll=False) - opts = model.simulate_options() - opts["result_handling"] = "memory" - opts["filter"] = "NoMatchingVariables" + opts = model.simulate_options() + opts["result_handling"] = "memory" + opts["filter"] = "NoMatchingVariables" - res = model.simulate(options=opts) + res = model.simulate(options=opts) - assert 1.0 == pytest.approx(res["time"][-1]) + assert 1.0 == pytest.approx(res["time"][-1]) - def test_enumeration_memory(self): - model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "Friction2.fmu"), _connect_dll=False) - data_type = model.get_variable_data_type("mode") + def test_enumeration_memory(self): + model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "Friction2.fmu"), _connect_dll=False) + data_type = model.get_variable_data_type("mode") - assert data_type == fmi.FMI2_ENUMERATION + assert data_type == fmi.FMI2_ENUMERATION - opts = model.simulate_options() - opts["result_handling"] = "memory" + opts = model.simulate_options() + opts["result_handling"] = "memory" - res = model.simulate(options=opts) - res["mode"] #Check that the enumeration variable is in the dict, otherwise exception + res = model.simulate(options=opts) + res["mode"] #Check that the enumeration variable is in the dict, otherwise exception class TestResultMemory: pass -if assimulo_installed: - class TestResultFileBinary_Simulation: - def _correct_file_after_simulation_failure(self, result_file_name): - simple_alias = Dummy_FMUModelME2([("x", "y")], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NegatedAlias.fmu"), _connect_dll=False) +@pytest.mark.assimulo +class TestResultFileBinary_Simulation: + def _correct_file_after_simulation_failure(self, result_file_name): + simple_alias = Dummy_FMUModelME2([("x", "y")], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NegatedAlias.fmu"), _connect_dll=False) - def f(*args, **kwargs): - if simple_alias.time > 0.5: - raise Exception - return -simple_alias.continuous_states + def f(*args, **kwargs): + if simple_alias.time > 0.5: + raise Exception + return -simple_alias.continuous_states - simple_alias.get_derivatives = f + simple_alias.get_derivatives = f - opts = simple_alias.simulate_options() - opts["result_handling"] = "binary" - opts["result_file_name"] = result_file_name - opts["solver"] = "ExplicitEuler" + opts = simple_alias.simulate_options() + opts["result_handling"] = "binary" + opts["result_file_name"] = result_file_name + opts["solver"] = "ExplicitEuler" - successful_simulation = False - try: - res = simple_alias.simulate(options=opts) - successful_simulation = True #The above simulation should fail... - except Exception: - pass + successful_simulation = False + try: + res = simple_alias.simulate(options=opts) + successful_simulation = True #The above simulation should fail... + except Exception: + pass - if successful_simulation: - raise Exception + if successful_simulation: + raise Exception - result = ResultDymolaBinary(result_file_name) + result = ResultDymolaBinary(result_file_name) - x = result.get_variable_data("x").x - y = result.get_variable_data("y").x + x = result.get_variable_data("x").x + y = result.get_variable_data("y").x - assert len(x) > 2 + assert len(x) > 2 - for i in range(len(x)): - assert x[i] == -y[i] + for i in range(len(x)): + assert x[i] == -y[i] - def test_work_flow_me2_file(self): - self._correct_file_after_simulation_failure("NegatedAlias_result.mat") + def test_work_flow_me2_file(self): + self._correct_file_after_simulation_failure("NegatedAlias_result.mat") - def test_work_flow_me2_stream(self): - stream = BytesIO() - self._correct_file_after_simulation_failure(stream) + def test_work_flow_me2_stream(self): + stream = BytesIO() + self._correct_file_after_simulation_failure(stream) - def _only_parameters(self, result_file_name): - model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "ParameterAlias.fmu"), _connect_dll=False) + def _only_parameters(self, result_file_name): + model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "ParameterAlias.fmu"), _connect_dll=False) - opts = model.simulate_options() - opts["result_handling"] = "custom" - opts["result_handler"] = ResultHandlerBinaryFile(model) - opts["filter"] = "p2" - opts["result_file_name"] = result_file_name + opts = model.simulate_options() + opts["result_handling"] = "custom" + opts["result_handler"] = ResultHandlerBinaryFile(model) + opts["filter"] = "p2" + opts["result_file_name"] = result_file_name - res = model.simulate(options=opts) + res = model.simulate(options=opts) - assert 3.0 == pytest.approx(res["p2"][0]) + assert 3.0 == pytest.approx(res["p2"][0]) - def test_only_parameters_file(self): - self._only_parameters("ParameterAlias_result.mat") + def test_only_parameters_file(self): + self._only_parameters("ParameterAlias_result.mat") - def test_only_parameters_stream(self): - stream = BytesIO() - self._only_parameters(stream) + def test_only_parameters_stream(self): + stream = BytesIO() + self._only_parameters(stream) - def _no_variables(self, result_file_name): - model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "ParameterAlias.fmu"), _connect_dll=False) + def _no_variables(self, result_file_name): + model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "ParameterAlias.fmu"), _connect_dll=False) - opts = model.simulate_options() - opts["result_handling"] = "custom" - opts["result_handler"] = ResultHandlerBinaryFile(model) - opts["filter"] = "NoMatchingVariables" - opts["result_file_name"] = result_file_name + opts = model.simulate_options() + opts["result_handling"] = "custom" + opts["result_handler"] = ResultHandlerBinaryFile(model) + opts["filter"] = "NoMatchingVariables" + opts["result_file_name"] = result_file_name - res = model.simulate(options=opts) + res = model.simulate(options=opts) - assert 1.0 == pytest.approx(res["time"][-1]) + assert 1.0 == pytest.approx(res["time"][-1]) - def test_no_variables_file(self): - self._no_variables("ParameterAlias_result.mat") + def test_no_variables_file(self): + self._no_variables("ParameterAlias_result.mat") - def test_no_variables_stream(self): - stream = BytesIO() - self._no_variables(stream) + def test_no_variables_stream(self): + stream = BytesIO() + self._no_variables(stream) - def test_read_alias_derivative(self): - simple_alias = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "Alias.fmu"), _connect_dll=False) + def test_read_alias_derivative(self): + simple_alias = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "Alias.fmu"), _connect_dll=False) - opts = simple_alias.simulate_options() - opts["result_handling"] = "binary" + opts = simple_alias.simulate_options() + opts["result_handling"] = "binary" - res = simple_alias.simulate(options=opts) + res = simple_alias.simulate(options=opts) - derx = res["der(x)"] - dery = res["der(y)"] + derx = res["der(x)"] + dery = res["der(y)"] - assert len(derx) > 0 - for i in range(len(derx)): - assert derx[i] == dery[i] + assert len(derx) > 0 + for i in range(len(derx)): + assert derx[i] == dery[i] - def test_enumeration_binary(self): - model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "Friction2.fmu"), _connect_dll=False) - data_type = model.get_variable_data_type("mode") + def test_enumeration_binary(self): + model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "Friction2.fmu"), _connect_dll=False) + data_type = model.get_variable_data_type("mode") - assert data_type == fmi.FMI2_ENUMERATION + assert data_type == fmi.FMI2_ENUMERATION - opts = model.simulate_options() - opts["result_handling"] = "custom" - opts["result_handler"] = ResultHandlerBinaryFile(model) + opts = model.simulate_options() + opts["result_handling"] = "custom" + opts["result_handler"] = ResultHandlerBinaryFile(model) - res = model.simulate(options=opts) - res["mode"] #Check that the enumeration variable is in the dict, otherwise exception + res = model.simulate(options=opts) + res["mode"] #Check that the enumeration variable is in the dict, otherwise exception - def test_integer_start_time(self): - model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "Alias.fmu"), _connect_dll=False) + def test_integer_start_time(self): + model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "Alias.fmu"), _connect_dll=False) - opts = model.simulate_options() - opts["result_handling"] = "binary" + opts = model.simulate_options() + opts["result_handling"] = "binary" - #Assert that there is no exception when reloading the file - res = model.simulate(start_time=0, options=opts) + #Assert that there is no exception when reloading the file + res = model.simulate(start_time=0, options=opts) - def test_read_all_variables_using_model_variables(self): - simple_alias = Dummy_FMUModelME2([("x", "y")], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NegatedAlias.fmu"), _connect_dll=False) + def test_read_all_variables_using_model_variables(self): + simple_alias = Dummy_FMUModelME2([("x", "y")], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NegatedAlias.fmu"), _connect_dll=False) - opts = simple_alias.simulate_options() - opts["result_handling"] = "custom" - opts["result_handler"] = ResultHandlerBinaryFile(simple_alias) + opts = simple_alias.simulate_options() + opts["result_handling"] = "custom" + opts["result_handler"] = ResultHandlerBinaryFile(simple_alias) - res = simple_alias.simulate(options=opts) + res = simple_alias.simulate(options=opts) - for var in simple_alias.get_model_variables(): - res[var] + for var in simple_alias.get_model_variables(): + res[var] - def test_variable_alias_custom_handler(self): - simple_alias = Dummy_FMUModelME2([("x", "y")], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NegatedAlias.fmu"), _connect_dll=False) + def test_variable_alias_custom_handler(self): + simple_alias = Dummy_FMUModelME2([("x", "y")], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NegatedAlias.fmu"), _connect_dll=False) - opts = simple_alias.simulate_options() - opts["result_handling"] = "custom" - opts["result_handler"] = ResultHandlerBinaryFile(simple_alias) + opts = simple_alias.simulate_options() + opts["result_handling"] = "custom" + opts["result_handler"] = ResultHandlerBinaryFile(simple_alias) - res = simple_alias.simulate(options=opts) + res = simple_alias.simulate(options=opts) - # test that res['y'] returns a vector of the same length as the time - # vector - assert len(res['y']) ==len(res['time']), "Wrong size of result vector." + # test that res['y'] returns a vector of the same length as the time + # vector + assert len(res['y']) ==len(res['time']), "Wrong size of result vector." - x = res["x"] - y = res["y"] + x = res["x"] + y = res["y"] - for i in range(len(x)): - assert x[i] == -y[i] + for i in range(len(x)): + assert x[i] == -y[i] - def test_binary_options_me1(self): - simple_alias = Dummy_FMUModelME1([40], os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "NegatedAlias.fmu"), _connect_dll=False) - _run_negated_alias(simple_alias, "binary") + def test_binary_options_me1(self): + simple_alias = Dummy_FMUModelME1([40], os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "NegatedAlias.fmu"), _connect_dll=False) + _run_negated_alias(simple_alias, "binary") - def test_binary_options_me2(self): - simple_alias = Dummy_FMUModelME2([("x", "y")], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NegatedAlias.fmu"), _connect_dll=False) - _run_negated_alias(simple_alias, "binary") + def test_binary_options_me2(self): + simple_alias = Dummy_FMUModelME2([("x", "y")], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NegatedAlias.fmu"), _connect_dll=False) + _run_negated_alias(simple_alias, "binary") - def test_binary_options_me1_stream(self): - simple_alias = Dummy_FMUModelME1([40], os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "NegatedAlias.fmu"), _connect_dll=False) - stream = BytesIO() - _run_negated_alias(simple_alias, "binary", stream) + def test_binary_options_me1_stream(self): + simple_alias = Dummy_FMUModelME1([40], os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "NegatedAlias.fmu"), _connect_dll=False) + stream = BytesIO() + _run_negated_alias(simple_alias, "binary", stream) - def test_binary_options_me2_stream(self): - simple_alias = Dummy_FMUModelME2([("x", "y")], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NegatedAlias.fmu"), _connect_dll=False) - stream = BytesIO() - _run_negated_alias(simple_alias, "binary", stream) + def test_binary_options_me2_stream(self): + simple_alias = Dummy_FMUModelME2([("x", "y")], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NegatedAlias.fmu"), _connect_dll=False) + stream = BytesIO() + _run_negated_alias(simple_alias, "binary", stream) class TestResultFileBinary: def _get_description_unicode(self, result_file_name): @@ -1390,83 +1389,84 @@ def test_get_last_result_file3(self): test_model._result_file = 123 # arbitrary number, just verify get_last_result_file works assert test_model.get_last_result_file() is None, "Expected None but got {}".format(test_model.get_last_result_file()) -if assimulo_installed: - class TestResultCSVTextual_Simulation: - def test_only_parameters(self): - model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "ParameterAlias.fmu"), _connect_dll=False) - opts = model.simulate_options() - opts["result_handling"] = "custom" - opts["result_handler"] = ResultHandlerCSV(model) - opts["filter"] = "p2" +@pytest.mark.assimulo +class TestResultCSVTextual_Simulation: + def test_only_parameters(self): + model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "ParameterAlias.fmu"), _connect_dll=False) - res = model.simulate(options=opts) + opts = model.simulate_options() + opts["result_handling"] = "custom" + opts["result_handler"] = ResultHandlerCSV(model) + opts["filter"] = "p2" - assert 3.0 == pytest.approx(res["p2"][0]) + res = model.simulate(options=opts) - def test_no_variables(self): - model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "ParameterAlias.fmu"), _connect_dll=False) + assert 3.0 == pytest.approx(res["p2"][0]) - opts = model.simulate_options() - opts["result_handling"] = "custom" - opts["result_handler"] = ResultHandlerCSV(model) - opts["filter"] = "NoMatchingVariables" - opts["result_file_name"] = "NoMatchingTest.csv" + def test_no_variables(self): + model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "ParameterAlias.fmu"), _connect_dll=False) - res = model.simulate(options=opts) + opts = model.simulate_options() + opts["result_handling"] = "custom" + opts["result_handler"] = ResultHandlerCSV(model) + opts["filter"] = "NoMatchingVariables" + opts["result_file_name"] = "NoMatchingTest.csv" - assert 1.0 == pytest.approx(res["time"][-1]) + res = model.simulate(options=opts) - def test_variable_alias_custom_handler(self): - simple_alias = Dummy_FMUModelME1([40], os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "NegatedAlias.fmu"), _connect_dll=False) + assert 1.0 == pytest.approx(res["time"][-1]) - opts = simple_alias.simulate_options() - opts["result_handling"] = "custom" - opts["result_handler"] = ResultHandlerCSV(simple_alias) + def test_variable_alias_custom_handler(self): + simple_alias = Dummy_FMUModelME1([40], os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "NegatedAlias.fmu"), _connect_dll=False) - res = simple_alias.simulate(options=opts) + opts = simple_alias.simulate_options() + opts["result_handling"] = "custom" + opts["result_handler"] = ResultHandlerCSV(simple_alias) - # test that res['y'] returns a vector of the same length as the time - # vector - assert len(res['y']) ==len(res['time']), "Wrong size of result vector." + res = simple_alias.simulate(options=opts) - x = res["x"] - y = res["y"] + # test that res['y'] returns a vector of the same length as the time + # vector + assert len(res['y']) ==len(res['time']), "Wrong size of result vector." - for i in range(len(x)): - assert x[i] == -y[i] + x = res["x"] + y = res["y"] - def test_csv_options_me1(self): - simple_alias = Dummy_FMUModelME1([40], os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "NegatedAlias.fmu"), _connect_dll=False) - _run_negated_alias(simple_alias, "csv") + for i in range(len(x)): + assert x[i] == -y[i] - def test_csv_options_me2(self): - simple_alias = Dummy_FMUModelME2([("x", "y")], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NegatedAlias.fmu"), _connect_dll=False) - _run_negated_alias(simple_alias, "csv") + def test_csv_options_me1(self): + simple_alias = Dummy_FMUModelME1([40], os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "NegatedAlias.fmu"), _connect_dll=False) + _run_negated_alias(simple_alias, "csv") - def test_csv_options_me1_stream(self): - simple_alias = Dummy_FMUModelME1([40], os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "NegatedAlias.fmu"), _connect_dll=False) - stream = StringIO() - _run_negated_alias(simple_alias, "csv", stream) + def test_csv_options_me2(self): + simple_alias = Dummy_FMUModelME2([("x", "y")], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NegatedAlias.fmu"), _connect_dll=False) + _run_negated_alias(simple_alias, "csv") - def test_csv_options_me2(self): - simple_alias = Dummy_FMUModelME2([("x", "y")], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NegatedAlias.fmu"), _connect_dll=False) - stream = StringIO() - _run_negated_alias(simple_alias, "csv", stream) + def test_csv_options_me1_stream(self): + simple_alias = Dummy_FMUModelME1([40], os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "NegatedAlias.fmu"), _connect_dll=False) + stream = StringIO() + _run_negated_alias(simple_alias, "csv", stream) - def test_enumeration_csv(self): + def test_csv_options_me2(self): + simple_alias = Dummy_FMUModelME2([("x", "y")], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NegatedAlias.fmu"), _connect_dll=False) + stream = StringIO() + _run_negated_alias(simple_alias, "csv", stream) - model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "Friction2.fmu"), _connect_dll=False) - data_type = model.get_variable_data_type("mode") + def test_enumeration_csv(self): - assert data_type == fmi.FMI2_ENUMERATION + model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "Friction2.fmu"), _connect_dll=False) + data_type = model.get_variable_data_type("mode") - opts = model.simulate_options() - opts["result_handling"] = "custom" - opts["result_handler"] = ResultHandlerCSV(model) + assert data_type == fmi.FMI2_ENUMERATION - res = model.simulate(options=opts) - res["mode"] #Check that the enumeration variable is in the dict, otherwise exception + opts = model.simulate_options() + opts["result_handling"] = "custom" + opts["result_handler"] = ResultHandlerCSV(model) + + res = model.simulate(options=opts) + res["mode"] #Check that the enumeration variable is in the dict, otherwise exception class TestResultCSVTextual: @@ -1781,187 +1781,188 @@ def test_get_variables_data_values4(self): np.testing.assert_array_almost_equal(test_data[0], reference_data['time'][index]) np.testing.assert_array_almost_equal(test_data[1], reference_data['@Diagnostics.nbr_steps'][index]) -if assimulo_installed: - class TestFileSizeLimit: - def _setup(self, result_type, result_file_name="", fmi_type="me"): - if fmi_type == "me": - model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "CoupledClutches.fmu"), _connect_dll=False) - else: - model = Dummy_FMUModelCS2([], os.path.join(file_path, "files", "FMUs", "XML", "CS2.0", "CoupledClutches.fmu"), _connect_dll=False) - - opts = model.simulate_options() - opts["result_handling"] = result_type - opts["result_file_name"] = result_file_name +@pytest.mark.assimulo +class TestFileSizeLimit: - return model, opts + def _setup(self, result_type, result_file_name="", fmi_type="me"): + if fmi_type == "me": + model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "CoupledClutches.fmu"), _connect_dll=False) + else: + model = Dummy_FMUModelCS2([], os.path.join(file_path, "files", "FMUs", "XML", "CS2.0", "CoupledClutches.fmu"), _connect_dll=False) + + opts = model.simulate_options() + opts["result_handling"] = result_type + opts["result_file_name"] = result_file_name - def _test_result(self, result_type, result_file_name="", max_size=1e6): - model, opts = self._setup(result_type, result_file_name) + return model, opts - opts["result_max_size"] = max_size + def _test_result(self, result_type, result_file_name="", max_size=1e6): + model, opts = self._setup(result_type, result_file_name) - #No exception should be raised. - res = model.simulate(options=opts) + opts["result_max_size"] = max_size - def _test_result_exception(self, result_type, result_file_name="", fmi_type="me"): - model, opts = self._setup(result_type, result_file_name, fmi_type) + #No exception should be raised. + res = model.simulate(options=opts) - opts["result_max_size"] = 10 + def _test_result_exception(self, result_type, result_file_name="", fmi_type="me"): + model, opts = self._setup(result_type, result_file_name, fmi_type) - with pytest.raises(ResultSizeError): - model.simulate(options=opts) + opts["result_max_size"] = 10 - def _test_result_size_verification(self, result_type, result_file_name="", dynamic_diagnostics=False): - """ - Verifies that the ResultSizeError exception is triggered (due to too large result) and also verifies - that the resulting file is within bounds of the set maximum size. - """ - model, opts = self._setup(result_type, result_file_name) - model.setup_experiment() - model.initialize() + with pytest.raises(ResultSizeError): + model.simulate(options=opts) - max_size = 1e6 - opts["result_max_size"] = max_size - opts["dynamic_diagnostics"] = dynamic_diagnostics - opts["logging"] = dynamic_diagnostics - opts["ncp"] = 0 #Set to zero to circumvent the early size check - ncp = 10000 + def _test_result_size_verification(self, result_type, result_file_name="", dynamic_diagnostics=False): + """ + Verifies that the ResultSizeError exception is triggered (due to too large result) and also verifies + that the resulting file is within bounds of the set maximum size. + """ + model, opts = self._setup(result_type, result_file_name) + model.setup_experiment() + model.initialize() - result_handler = get_result_handler(model, opts) + max_size = 1e6 + opts["result_max_size"] = max_size + opts["dynamic_diagnostics"] = dynamic_diagnostics + opts["logging"] = dynamic_diagnostics + opts["ncp"] = 0 #Set to zero to circumvent the early size check + ncp = 10000 - result_handler.set_options(opts) - result_handler.initialize_complete() + result_handler = get_result_handler(model, opts) - if opts["dynamic_diagnostics"]: - opts['CVode_options']['rtol'] = 1e-6 - opts['CVode_options']['atol'] = model.nominal_continuous_states * opts['CVode_options']['rtol'] - diag_params, diag_vars = setup_diagnostics_variables(model, 0, opts, opts['CVode_options']) - result_handler.simulation_start(diag_params, diag_vars) - else: - result_handler.simulation_start() + result_handler.set_options(opts) + result_handler.initialize_complete() - with pytest.raises(ResultSizeError): - for _ in range(ncp): - result_handler.integration_point() + if opts["dynamic_diagnostics"]: + opts['CVode_options']['rtol'] = 1e-6 + opts['CVode_options']['atol'] = model.nominal_continuous_states * opts['CVode_options']['rtol'] + diag_params, diag_vars = setup_diagnostics_variables(model, 0, opts, opts['CVode_options']) + result_handler.simulation_start(diag_params, diag_vars) + else: + result_handler.simulation_start() - if opts["dynamic_diagnostics"]: - result_handler.diagnostics_point(np.array([val[0] for val in diag_vars.values()], dtype=float)) + with pytest.raises(ResultSizeError): + for _ in range(ncp): + result_handler.integration_point() - result_file = model.get_last_result_file() - file_size = os.path.getsize(result_file) + if opts["dynamic_diagnostics"]: + result_handler.diagnostics_point(np.array([val[0] for val in diag_vars.values()], dtype=float)) - assert file_size > max_size*0.9 and file_size < max_size*1.1, \ - "The file size is not within 10% of the given max size" - - def _test_result_size_early_abort(self, result_type, result_file_name=""): - """ - Verifies that the ResultSizeError is triggered and also verifies that the cause of the error being - triggered was due to that the ESTIMATE for the result size was too big. - """ - model, opts = self._setup(result_type, result_file_name) - - max_size = 1e6 - opts["result_max_size"] = max_size - opts["ncp"] = 10000000 - - with pytest.raises(ResultSizeError): - model.simulate(options=opts) - - result_file = model.get_last_result_file() - if result_file: - file_size = os.path.getsize(result_file) - - assert file_size < max_size*0.1, \ - "The file size is not small, no early abort" - - # TODO: Pytest parametrization + result_file = model.get_last_result_file() + file_size = os.path.getsize(result_file) + + assert file_size > max_size*0.9 and file_size < max_size*1.1, \ + "The file size is not within 10% of the given max size" + + def _test_result_size_early_abort(self, result_type, result_file_name=""): """ - Binary + Verifies that the ResultSizeError is triggered and also verifies that the cause of the error being + triggered was due to that the ESTIMATE for the result size was too big. """ - def test_binary_file_size_verification_diagnostics(self): - """ - Make sure that the diagnostics variables are also taken into account. - """ - self._test_result_size_verification("binary", dynamic_diagnostics=True) - - def test_binary_file_size_verification(self): - self._test_result_size_verification("binary") - - def test_binary_file_size_early_abort(self): - self._test_result_size_early_abort("binary") + model, opts = self._setup(result_type, result_file_name) - def test_small_size_binary_file(self): - self._test_result_exception("binary") - - def test_small_size_binary_file_cs(self): - self._test_result_exception("binary", fmi_type="cs") - - def test_small_size_binary_file_stream(self): - self._test_result_exception("binary", BytesIO()) + max_size = 1e6 + opts["result_max_size"] = max_size + opts["ncp"] = 10000000 - def test_large_size_binary_file(self): - self._test_result("binary") + with pytest.raises(ResultSizeError): + model.simulate(options=opts) - def test_large_size_binary_file_stream(self): - self._test_result("binary", BytesIO()) + result_file = model.get_last_result_file() + if result_file: + file_size = os.path.getsize(result_file) + assert file_size < max_size*0.1, \ + "The file size is not small, no early abort" + + # TODO: Pytest parametrization + """ + Binary + """ + def test_binary_file_size_verification_diagnostics(self): """ - Text + Make sure that the diagnostics variables are also taken into account. """ - def test_text_file_size_verification(self): - self._test_result_size_verification("file") + self._test_result_size_verification("binary", dynamic_diagnostics=True) - def test_text_file_size_early_abort(self): - self._test_result_size_early_abort("file") + def test_binary_file_size_verification(self): + self._test_result_size_verification("binary") + + def test_binary_file_size_early_abort(self): + self._test_result_size_early_abort("binary") - def test_small_size_text_file(self): - self._test_result_exception("file") - - def test_small_size_text_file_stream(self): - self._test_result_exception("file", StringIO()) + def test_small_size_binary_file(self): + self._test_result_exception("binary") + + def test_small_size_binary_file_cs(self): + self._test_result_exception("binary", fmi_type="cs") + + def test_small_size_binary_file_stream(self): + self._test_result_exception("binary", BytesIO()) - def test_large_size_text_file(self): - self._test_result("file") + def test_large_size_binary_file(self): + self._test_result("binary") - def test_large_size_text_file_stream(self): - self._test_result("file", StringIO()) + def test_large_size_binary_file_stream(self): + self._test_result("binary", BytesIO()) - """ - CSV - """ - def test_csv_file_size_verification(self): - self._test_result_size_verification("csv") - - def test_csv_file_size_early_abort(self): - self._test_result_size_early_abort("csv") + """ + Text + """ + def test_text_file_size_verification(self): + self._test_result_size_verification("file") + + def test_text_file_size_early_abort(self): + self._test_result_size_early_abort("file") - def test_small_size_csv_file(self): - self._test_result_exception("csv") - - def test_small_size_csv_file_stream(self): - self._test_result_exception("csv", StringIO()) + def test_small_size_text_file(self): + self._test_result_exception("file") + + def test_small_size_text_file_stream(self): + self._test_result_exception("file", StringIO()) - def test_large_size_csv_file(self): - self._test_result("csv", max_size=10000000) + def test_large_size_text_file(self): + self._test_result("file") - def test_large_size_csv_file_stream(self): - self._test_result("csv", StringIO(), max_size=10000000) + def test_large_size_text_file_stream(self): + self._test_result("file", StringIO()) - """ - Memory - """ - def test_small_size_memory(self): - self._test_result_exception("memory") - - def test_memory_size_early_abort(self): - self._test_result_size_early_abort("memory") - - def test_small_size_memory_stream(self): - self._test_result_exception("memory", StringIO()) + """ + CSV + """ + def test_csv_file_size_verification(self): + self._test_result_size_verification("csv") + + def test_csv_file_size_early_abort(self): + self._test_result_size_early_abort("csv") + + def test_small_size_csv_file(self): + self._test_result_exception("csv") + + def test_small_size_csv_file_stream(self): + self._test_result_exception("csv", StringIO()) + + def test_large_size_csv_file(self): + self._test_result("csv", max_size=10000000) + + def test_large_size_csv_file_stream(self): + self._test_result("csv", StringIO(), max_size=10000000) + + """ + Memory + """ + def test_small_size_memory(self): + self._test_result_exception("memory") + + def test_memory_size_early_abort(self): + self._test_result_size_early_abort("memory") + + def test_small_size_memory_stream(self): + self._test_result_exception("memory", StringIO()) - def test_large_size_memory(self): - self._test_result("memory") + def test_large_size_memory(self): + self._test_result("memory") - def test_large_size_memory_stream(self): - self._test_result("memory", StringIO()) + def test_large_size_memory_stream(self): + self._test_result("memory", StringIO())