Skip to content

Commit

Permalink
Add test for ExperimentData pickle serialization
Browse files Browse the repository at this point in the history
  • Loading branch information
wshanks committed Nov 18, 2023
1 parent 4897eb5 commit 91f031c
Show file tree
Hide file tree
Showing 3 changed files with 49 additions and 17 deletions.
3 changes: 2 additions & 1 deletion test/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -143,6 +143,7 @@ def assertEqualExtended(
*,
msg: Optional[str] = None,
strict_type: bool = False,
**kwargs
):
"""Extended equality assertion which covers Qiskit Experiments classes.
Expand All @@ -163,7 +164,7 @@ def assertEqualExtended(
default_msg = f"{first} != {second}"

self.assertTrue(
is_equivalent(first, second, strict_type=strict_type),
is_equivalent(first, second, strict_type=strict_type, **kwargs),
msg=msg or default_msg,
)

Expand Down
51 changes: 35 additions & 16 deletions test/extended_equality.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,7 @@ def is_equivalent(
*,
strict_type: bool = True,
numerical_precision: float = 1e-8,
**kwargs
) -> bool:
"""Check if two input data are equivalent.
Expand Down Expand Up @@ -75,6 +76,7 @@ def is_equivalent(
data2,
strict_type=strict_type,
numerical_precision=numerical_precision,
**kwargs
)
if not isinstance(evaluated, (bool, np.bool_)):
# When either one of input is numpy array type, it may broadcast equality check
Expand Down Expand Up @@ -240,24 +242,28 @@ def _check_curvefit_results(
def _check_service_analysis_results(
data1: AnalysisResult,
data2: AnalysisResult,
ignore_result_id: bool = False,
**kwargs,
):
"""Check equality of AnalysisResult class which is payload for experiment service."""
attrs=[
"name",
"value",
"extra",
"device_components",
"experiment_id",
"chisq",
"quality",
"verified",
"tags",
"auto_save",
"source",
]
if not ignore_result_id:
attrs.append("result_id")

return _check_all_attributes(
attrs=[
"name",
"value",
"extra",
"device_components",
"result_id",
"experiment_id",
"chisq",
"quality",
"verified",
"tags",
"auto_save",
"source",
],
attrs,
data1=data1,
data2=data2,
**kwargs,
Expand Down Expand Up @@ -292,12 +298,25 @@ def _check_dataframes(
def _check_result_table(
data1: AnalysisResultTable,
data2: AnalysisResultTable,
ignore_result_id: bool = False,
**kwargs,
):
"""Check equality of data frame which may involve Qiskit Experiments class value."""
table1 = data1.copy().to_dict(orient="index")
table2 = data2.copy().to_dict(orient="index")
for table in (table1, table2):
for result in table.values():
result.pop("created_time")
if ignore_result_id:
result.pop("result_id")
if ignore_result_id:
# Keys of the dict are based on the result ids so they must be ignored
# as well
table1 = list(table1.values())
table2 = list(table2.values())
return is_equivalent(
data1.copy().to_dict(orient="index"),
data2.copy().to_dict(orient="index"),
table1,
table2,
**kwargs,
)

Expand Down
12 changes: 12 additions & 0 deletions test/framework/test_framework.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@

"""Tests for base experiment framework."""

import pickle
from test.fake_experiment import FakeExperiment, FakeAnalysis
from test.base import QiskitExperimentsTestCase
from itertools import product
Expand Down Expand Up @@ -116,6 +117,17 @@ def circuits(self):
num_jobs += 1
self.assertEqual(len(job_ids), num_jobs)

def test_run_analysis_experiment_data_pickle_roundtrip(self):
"""Test running analysis on ExperimentData after pickle roundtrip"""
analysis = FakeAnalysis()
expdata1 = analysis.run(ExperimentData(), seed=54321)
self.assertExperimentDone(expdata1)

expdata2 = pickle.loads(pickle.dumps(expdata1))
expdata2 = analysis.run(expdata2, replace_results=True, seed=54321)
self.assertExperimentDone(expdata2)
self.assertEqualExtended(expdata1, expdata2, ignore_result_id=True)

def test_analysis_replace_results_true(self):
"""Test running analysis with replace_results=True"""
analysis = FakeAnalysis()
Expand Down

0 comments on commit 91f031c

Please sign in to comment.