diff --git a/examples/PortZygote/chainrules_patch.jl b/examples/PortZygote/chainrules_patch.jl index 71807e1f9..76b706fc3 100644 --- a/examples/PortZygote/chainrules_patch.jl +++ b/examples/PortZygote/chainrules_patch.jl @@ -1,16 +1,16 @@ import ChainRulesCore: rrule, @non_differentiable, NoTangent using Yao, Yao.AD -function rrule(::typeof(apply!), reg::ArrayReg, block::AbstractBlock) - out = apply!(reg, block) +function rrule(::typeof(apply), reg::ArrayReg, block::AbstractBlock) + out = apply(reg, block) out, function (outδ) - (in, inδ), paramsδ = apply_back((out, outδ), block) + (in, inδ), paramsδ = apply_back((copy(out), outδ), block) return (NoTangent(), inδ, paramsδ) end end -function rrule(::typeof(dispatch!), block::AbstractBlock, params) - out = dispatch!(block, params) +function rrule(::typeof(dispatch), block::AbstractBlock, params) + out = dispatch(block, params) out, function (outδ) (NoTangent(), NoTangent(), outδ) end diff --git a/examples/PortZygote/gate_learning.jl b/examples/PortZygote/gate_learning.jl index 30af24763..2deb7fbf1 100644 --- a/examples/PortZygote/gate_learning.jl +++ b/examples/PortZygote/gate_learning.jl @@ -20,8 +20,8 @@ Learn a general U4 gate. The optimizer is LBFGS. function learn_u4(u::AbstractMatrix; niter=100) ansatz = general_U4() * put(2, 1=>phase(0.0)) # initial values are 0, here, we attach a global phase. params = parameters(ansatz) - g!(G, x) = (dispatch!(ansatz, x); G .= Zygote.gradient(ansatz->loss(u, ansatz), ansatz)[1]) - optimize(x->(dispatch!(ansatz, x); loss(u, ansatz)), g!, parameters(ansatz), + g!(G, x) = (ansatz=dispatch(ansatz, x); G .= Zygote.gradient(ansatz->loss(u, ansatz), ansatz)[1]) + optimize(x->(ansatz=dispatch(ansatz, x); loss(u, ansatz)), g!, parameters(ansatz), LBFGS(), Optim.Options(iterations=niter)) println("final loss = $(loss(u,ansatz))") return ansatz diff --git a/examples/PortZygote/shared_parameters.jl b/examples/PortZygote/shared_parameters.jl index b4705003d..02602d824 100644 --- a/examples/PortZygote/shared_parameters.jl +++ b/examples/PortZygote/shared_parameters.jl @@ -8,8 +8,8 @@ h = YaoExtensions.heisenberg(5) function loss(h, c, θ) where N # the assign is nessesary! - c = dispatch!(c, fill(θ, nparameters(c))) - reg = apply!(zero_state(nqubits(c)), c) + c = dispatch(c, fill(θ, nparameters(c))) + reg = apply(zero_state(nqubits(c)), c) real(expect(h, reg)) end @@ -28,9 +28,9 @@ true_grad = sum(gparams) # the batched version function loss2(h, c, θ) where N # the assign is nessesary! - c = dispatch!(c, fill(θ, nparameters(c))) + c = dispatch(c, fill(θ, nparameters(c))) reg = zero_state(nqubits(c),nbatch=2) - reg = apply!(reg, c) + reg = apply(reg, c) sum(real(expect(h, reg))) end diff --git a/examples/PortZygote/simple_example.jl b/examples/PortZygote/simple_example.jl index 50bff360f..f3993c95f 100644 --- a/examples/PortZygote/simple_example.jl +++ b/examples/PortZygote/simple_example.jl @@ -8,7 +8,7 @@ dispatch!(c, :random) function loss(reg::AbstractRegister, circuit::AbstractBlock{N}) where N #copy(reg) |> circuit - reg = apply!(copy(reg), circuit) + reg = apply(copy(reg), circuit) st = state(reg) sum(real(st.*st)) end