Skip to content

Commit

Permalink
Rename is_differentiable -> requires_grad
Browse files Browse the repository at this point in the history
  • Loading branch information
HGSilveri committed Dec 18, 2024
1 parent c22d33c commit 8d5a5d2
Show file tree
Hide file tree
Showing 10 changed files with 32 additions and 38 deletions.
6 changes: 3 additions & 3 deletions pulser-core/pulser/math/abstract_array.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,8 +72,8 @@ def is_tensor(self) -> bool:
return self.has_torch() and isinstance(self._array, torch.Tensor)

@property
def is_differentiable(self) -> bool:
"""Whether the stored array is a differentiable tensor."""
def requires_grad(self) -> bool:
"""Whether the stored array is a tensor that needs a gradient."""
return self.is_tensor and cast(torch.Tensor, self._array).requires_grad

def astype(self, dtype: DTypeLike) -> AbstractArray:
Expand Down Expand Up @@ -276,7 +276,7 @@ def __setitem__(self, indices: Any, values: AbstractArrayLike) -> None:
self._process_indices(indices)
] = values # type: ignore[assignment]
except RuntimeError as e:
if self.is_differentiable:
if self.requires_grad:
raise RuntimeError(
"Failed to modify a tensor that requires grad in place."
) from e
Expand Down
4 changes: 2 additions & 2 deletions tests/test_channels.py
Original file line number Diff line number Diff line change
Expand Up @@ -290,7 +290,7 @@ def test_modulation(channel, tr, eom, side_buffer_len, requires_grad):
tr,
tr,
)
assert out_.is_differentiable == requires_grad
assert out_.requires_grad == requires_grad

wf2 = BlackmanWaveform(800, wf_vals[1])
out_ = channel.modulate(wf2.samples, eom=eom)
Expand All @@ -299,7 +299,7 @@ def test_modulation(channel, tr, eom, side_buffer_len, requires_grad):
side_buffer_len,
side_buffer_len,
)
assert out_.is_differentiable == requires_grad
assert out_.requires_grad == requires_grad


@pytest.mark.parametrize(
Expand Down
4 changes: 2 additions & 2 deletions tests/test_eom.py
Original file line number Diff line number Diff line change
Expand Up @@ -190,7 +190,7 @@ def calc_offset(amp):
]
)
assert calculated_det_off == min(det_off_options, key=abs)
assert calculated_det_off.is_differentiable == requires_grad
assert calculated_det_off.requires_grad == requires_grad

# Case where the EOM pulses are off-resonant
detuning_on = detuning_on + 1.0
Expand All @@ -209,4 +209,4 @@ def calc_offset(amp):
assert off_options[0] == eom_.calculate_detuning_off(
amp, detuning_on, optimal_detuning_off=0.0
)
assert off_options.is_differentiable == requires_grad
assert off_options.requires_grad == requires_grad
6 changes: 3 additions & 3 deletions tests/test_math.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ def test_pad(cast_to, requires_grad):
arr = torch.tensor(arr, requires_grad=requires_grad)

def check_match(arr1: pm.AbstractArray, arr2):
assert arr1.is_differentiable == requires_grad
assert arr1.requires_grad == requires_grad
np.testing.assert_array_equal(
arr1.as_array(detach=requires_grad), arr2
)
Expand Down Expand Up @@ -259,7 +259,7 @@ def test_items(self, use_tensor, requires_grad, indices):
assert item == val[i]
assert isinstance(item, pm.AbstractArray)
assert item.is_tensor == use_tensor
assert item.is_differentiable == requires_grad
assert item.requires_grad == requires_grad

# setitem
if not requires_grad:
Expand Down Expand Up @@ -291,7 +291,7 @@ def test_items(self, use_tensor, requires_grad, indices):
assert np.all(arr_np == new_val)
assert arr_np.is_tensor
# The resulting tensor requires grad if the assigned one did
assert arr_np.is_differentiable == requires_grad
assert arr_np.requires_grad == requires_grad

@pytest.mark.parametrize("scalar", [False, True])
@pytest.mark.parametrize(
Expand Down
6 changes: 2 additions & 4 deletions tests/test_parametrized.py
Original file line number Diff line number Diff line change
Expand Up @@ -104,9 +104,7 @@ def test_var_diff(a, b, requires_grad):
b._assign(torch.tensor([-1.0, 1.0], requires_grad=requires_grad))

for var in [a, b]:
assert (
a.value is not None and a.value.is_differentiable == requires_grad
)
assert a.value is not None and a.value.requires_grad == requires_grad


def test_varitem(a, b, d):
Expand Down Expand Up @@ -166,7 +164,7 @@ def test_paramobj(bwf, t, a, b):
def test_opsupport(a, b, with_diff_tensor):
def check_var_grad(var):
if with_diff_tensor:
assert var.build().is_differentiable
assert var.build().requires_grad

a._assign(-2.0)
if with_diff_tensor:
Expand Down
6 changes: 3 additions & 3 deletions tests/test_pulse.py
Original file line number Diff line number Diff line change
Expand Up @@ -234,9 +234,9 @@ def test_eq():


def _assert_pulse_requires_grad(pulse: Pulse, invert: bool = False) -> None:
assert pulse.amplitude.samples.is_differentiable == (not invert)
assert pulse.detuning.samples.is_differentiable == (not invert)
assert pulse.phase.is_differentiable == (not invert)
assert pulse.amplitude.samples.requires_grad == (not invert)
assert pulse.detuning.samples.requires_grad == (not invert)
assert pulse.phase.requires_grad == (not invert)


@pytest.mark.parametrize("requires_grad", [True, False])
Expand Down
4 changes: 2 additions & 2 deletions tests/test_register.py
Original file line number Diff line number Diff line change
Expand Up @@ -508,9 +508,9 @@ def _assert_reg_requires_grad(
) -> None:
for coords in reg.qubits.values():
if invert:
assert not coords.is_differentiable
assert not coords.requires_grad
else:
assert coords.is_tensor and coords.is_differentiable
assert coords.is_tensor and coords.requires_grad


@pytest.mark.parametrize(
Expand Down
12 changes: 6 additions & 6 deletions tests/test_sequence.py
Original file line number Diff line number Diff line change
Expand Up @@ -2870,12 +2870,12 @@ def test_sequence_diff(device, parametrized, with_modulation, with_eom):

seq_samples = sample(seq, modulation=with_modulation)
ryd_ch_samples = seq_samples.channel_samples["ryd_global"]
assert ryd_ch_samples.amp.is_differentiable
assert ryd_ch_samples.det.is_differentiable
assert ryd_ch_samples.phase.is_differentiable
assert ryd_ch_samples.amp.requires_grad
assert ryd_ch_samples.det.requires_grad
assert ryd_ch_samples.phase.requires_grad
if "dmm_0" in seq_samples.channel_samples:
dmm_ch_samples = seq_samples.channel_samples["dmm_0"]
# Only detuning is modulated
assert not dmm_ch_samples.amp.is_differentiable
assert dmm_ch_samples.det.is_differentiable
assert not dmm_ch_samples.phase.is_differentiable
assert not dmm_ch_samples.amp.requires_grad
assert dmm_ch_samples.det.requires_grad
assert not dmm_ch_samples.phase.requires_grad
10 changes: 5 additions & 5 deletions tests/test_sequence_sampler.py
Original file line number Diff line number Diff line change
Expand Up @@ -503,11 +503,11 @@ def test_phase_modulation(off_center, with_diff):
seq_samples = sample(seq).channel_samples["rydberg_global"]

if with_diff:
assert full_phase.samples.is_differentiable
assert not seq_samples.amp.is_differentiable
assert seq_samples.det.is_differentiable
assert seq_samples.phase.is_differentiable
assert seq_samples.phase_modulation.is_differentiable
assert full_phase.samples.requires_grad
assert not seq_samples.amp.requires_grad
assert seq_samples.det.requires_grad
assert seq_samples.phase.requires_grad
assert seq_samples.phase_modulation.requires_grad

np.testing.assert_allclose(
seq_samples.phase_modulation.as_array(detach=with_diff)
Expand Down
12 changes: 4 additions & 8 deletions tests/test_waveforms.py
Original file line number Diff line number Diff line change
Expand Up @@ -490,24 +490,20 @@ def test_waveform_diff(

samples_tensor = wf.samples.as_tensor()
assert samples_tensor.requires_grad == requires_grad
assert (
wf.modulated_samples(rydberg_global).is_differentiable == requires_grad
)
assert wf.modulated_samples(rydberg_global).requires_grad == requires_grad
wfx2_tensor = (-wf * 2).samples.as_tensor()
assert torch.equal(wfx2_tensor, samples_tensor * -2.0)
assert wfx2_tensor.requires_grad == requires_grad

wfdiv2 = wf / torch.tensor(2.0, requires_grad=True)
assert torch.equal(wfdiv2.samples.as_tensor(), samples_tensor / 2.0)
# Should always be true because it was divided by diff tensor
assert wfdiv2.samples.is_differentiable
assert wfdiv2.samples.requires_grad

assert wf[-1].is_differentiable == requires_grad
assert wf[-1].requires_grad == requires_grad

try:
assert (
wf.change_duration(1000).samples.is_differentiable == requires_grad
)
assert wf.change_duration(1000).samples.requires_grad == requires_grad
except NotImplementedError:
pass

Expand Down

0 comments on commit 8d5a5d2

Please sign in to comment.