Skip to content

Commit

Permalink
Revert "fix: honor atomic validations in destroy actions using filter"
Browse files Browse the repository at this point in the history
This reverts commit a6a9961.
  • Loading branch information
zachdaniel committed Jul 17, 2024
1 parent 1db2b0f commit 05cdaa0
Show file tree
Hide file tree
Showing 5 changed files with 37 additions and 73 deletions.
6 changes: 6 additions & 0 deletions lib/ash/actions/destroy/bulk.ex
Original file line number Diff line number Diff line change
Expand Up @@ -539,6 +539,7 @@ defmodule Ash.Actions.Destroy.Bulk do
authorize_bulk_query(query, atomic_changeset, opts),
{:ok, atomic_changeset, query} <-
authorize_atomic_changeset(query, atomic_changeset, opts),
{query, atomic_changeset} <- add_changeset_filters(query, atomic_changeset),
{:ok, data_layer_query} <- Ash.Query.data_layer_query(query) do
case Ash.DataLayer.destroy_query(
data_layer_query,
Expand Down Expand Up @@ -767,6 +768,10 @@ defmodule Ash.Actions.Destroy.Bulk do
end
end

defp add_changeset_filters(query, changeset) do
{Ash.Query.do_filter(query, changeset.filter), %{changeset | filter: nil}}
end

defp do_run(domain, stream, action, input, opts, not_atomic_reason) do
resource = opts[:resource]
opts = Ash.Actions.Helpers.set_opts(opts, domain)
Expand Down Expand Up @@ -1093,6 +1098,7 @@ defmodule Ash.Actions.Destroy.Bulk do

resource
|> Ash.Changeset.new()
|> Ash.Changeset.filter(opts[:filter])
|> Map.put(:domain, domain)
|> Ash.Actions.Helpers.add_context(opts)
|> Ash.Changeset.set_context(opts[:context] || %{})
Expand Down
7 changes: 7 additions & 0 deletions lib/ash/actions/update/bulk.ex
Original file line number Diff line number Diff line change
Expand Up @@ -495,6 +495,8 @@ defmodule Ash.Actions.Update.Bulk do
authorize_bulk_query(query, atomic_changeset, opts),
{:ok, atomic_changeset, query} <-
authorize_atomic_changeset(query, atomic_changeset, opts),
{query, atomic_changeset} <-
add_changeset_filters(query, atomic_changeset),
%Ash.Changeset{valid?: true} = atomic_changeset <-
Ash.Changeset.handle_allow_nil_atomics(atomic_changeset, opts[:actor]),
atomic_changeset <- sort_atomic_changes(atomic_changeset),
Expand Down Expand Up @@ -1056,6 +1058,7 @@ defmodule Ash.Actions.Update.Bulk do
)
|> Ash.Query.set_context(%{private: %{internal?: true}})
|> Ash.Query.filter(^pkeys)
|> Ash.Query.filter(^atomic_changeset.filter)
|> Ash.Query.select([])
|> then(fn query ->
run(domain, query, action.name, input,
Expand Down Expand Up @@ -1373,6 +1376,10 @@ defmodule Ash.Actions.Update.Bulk do
|> Enum.with_index()
end

defp add_changeset_filters(query, changeset) do
{Ash.Query.do_filter(query, changeset.filter), %{changeset | filter: nil}}
end

defp pre_template(opts, changeset, actor) do
if Ash.Expr.template_references_context?(opts) do
opts
Expand Down
62 changes: 24 additions & 38 deletions lib/ash/changeset/changeset.ex
Original file line number Diff line number Diff line change
Expand Up @@ -2429,47 +2429,33 @@ defmodule Ash.Changeset do
)}

:error ->
if changeset.action.type == :update || Map.get(changeset.action, :soft?) do
[first_pkey_field | _] = Ash.Resource.Info.primary_key(changeset.resource)
[first_pkey_field | _] = Ash.Resource.Info.primary_key(changeset.resource)

full_atomic_update =
expr(
if ^condition_expr do
^error_expr
else
^atomic_ref(changeset, first_pkey_field)
end
)
full_atomic_update =
expr(
if ^condition_expr do
^error_expr
else
^atomic_ref(changeset, first_pkey_field)
end
)

case Ash.Filter.hydrate_refs(full_atomic_update, %{
resource: changeset.resource,
public: false
}) do
{:ok, full_atomic_update} ->
{:cont,
atomic_update(
changeset,
first_pkey_field,
full_atomic_update
)}
case Ash.Filter.hydrate_refs(full_atomic_update, %{
resource: changeset.resource,
public: false
}) do
{:ok, full_atomic_update} ->
{:cont,
atomic_update(
changeset,
first_pkey_field,
full_atomic_update
)}

{:error, error} ->
{:halt,
{:not_atomic,
"Failed to validate expression #{inspect(full_atomic_update)}: #{inspect(error)}"}}
end
else
{:cont,
filter(
changeset,
expr(
if ^condition_expr do
^error_expr
else
true
end
)
)}
{:error, error} ->
{:halt,
{:not_atomic,
"Failed to validate expression #{inspect(full_atomic_update)}: #{inspect(error)}"}}
end
end
else
Expand Down
8 changes: 0 additions & 8 deletions lib/ash/data_layer/ets/ets.ex
Original file line number Diff line number Diff line change
Expand Up @@ -1235,14 +1235,6 @@ defmodule Ash.DataLayer.Ets do
log_destroy_query(resource, query)

query
|> Map.update!(:filter, fn filter ->
if is_nil(changeset.filter) do
filter
else
filter = filter || %Ash.Filter{resource: changeset.resource}
Ash.Filter.add_to_filter!(filter, changeset.filter)
end
end)
|> run_query(resource)
|> case do
{:ok, results} ->
Expand Down
27 changes: 0 additions & 27 deletions test/actions/bulk/bulk_destroy_test.exs
Original file line number Diff line number Diff line change
Expand Up @@ -129,10 +129,6 @@ defmodule Ash.Test.Actions.BulkDestroyTest do
end
end

destroy :destroy_with_validation do
validate attribute_does_not_equal(:title, "can't delete")
end

destroy :destroy_with_argument do
require_atomic? false

Expand Down Expand Up @@ -429,29 +425,6 @@ defmodule Ash.Test.Actions.BulkDestroyTest do
assert [] = Ash.read!(Post)
end

test "runs validations" do
assert_raise Ash.Error.Invalid, ~r/must not equal "can't delete"/, fn ->
assert %Ash.BulkResult{
records: [
%{title: "title1", title2: nil},
%{title: "title2", title2: nil}
]
} =
Ash.bulk_create!([%{title: "can't delete"}, %{title: "title2"}], Post, :create,
return_stream?: true,
return_records?: true
)
|> Stream.map(fn {:ok, result} ->
result
end)
|> Ash.bulk_destroy!(:destroy_with_validation, %{},
resource: Post,
return_records?: true,
return_errors?: true
)
end
end

test "runs after batch hooks" do
assert %Ash.BulkResult{
records: [
Expand Down

0 comments on commit 05cdaa0

Please sign in to comment.