Skip to content

Commit

Permalink
Fix event trigger cleanup on deletion via replace_metadata (fix hasur…
Browse files Browse the repository at this point in the history
  • Loading branch information
lexi-lambda committed Nov 5, 2020
1 parent c5f1392 commit f09d0cf
Show file tree
Hide file tree
Showing 9 changed files with 107 additions and 44 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -127,6 +127,7 @@ This release contains the [PDV refactor (#4111)](https://github.com/hasura/graph
- server: fix issue with tracking custom functions that return `SETOF` materialized view (close #5294) (#5945)
- server: introduce optional custom table name in table configuration to track the table according to the custom name. The `set_table_custom_fields` API has been deprecated, A new API `set_table_customization` has been added to set the configuration. (#3811)
- server: allow remote relationships with union, interface and enum type fields as well (fixes #5875) (#6080)
- server: fix event trigger cleanup on deletion via replace_metadata (fix #5461) (#6137)
- console: allow user to cascade Postgres dependencies when dropping Postgres objects (close #5109) (#5248)
- console: mark inconsistent remote schemas in the UI (close #5093) (#5181)
- console: remove ONLY as default for ALTER TABLE in column alter operations (close #5512) #5706
Expand Down
52 changes: 40 additions & 12 deletions server/src-lib/Hasura/RQL/DDL/EventTrigger.hs
Original file line number Diff line number Diff line change
Expand Up @@ -9,19 +9,20 @@ module Hasura.RQL.DDL.EventTrigger

-- TODO(from master): review
, delEventTriggerFromCatalog
, subTableP2
, subTableP2Setup
, mkEventTriggerInfo
, mkAllTriggersQ
, delTriggerQ
, getEventTriggerDef
, getWebhookInfoFromConf
, getHeaderInfosFromConf
, updateEventTriggerInCatalog
, replaceEventTriggersInCatalog
) where

import Hasura.Prelude

import qualified Data.Environment as Env
import qualified Data.HashMap.Strict.Extended as Map
import qualified Data.Text as T
import qualified Data.Text.Lazy as TL
import qualified Database.PG.Query as Q
Expand Down Expand Up @@ -213,13 +214,13 @@ subTableP1 (CreateEventTriggerQuery name qt insert update delete enableManual re
SubCStar -> return ()
SubCArray pgcols -> forM_ pgcols (assertPGCol (_tciFieldInfoMap ti) "")

subTableP2Setup
mkEventTriggerInfo
:: QErrM m
=> Env.Environment
-> QualifiedTable
-> EventTriggerConf
-> m (EventTriggerInfo, [SchemaDependency])
subTableP2Setup env qt (EventTriggerConf name def webhook webhookFromEnv rconf mheaders) = do
mkEventTriggerInfo env qt (EventTriggerConf name def webhook webhookFromEnv rconf mheaders) = do
webhookConf <- case (webhook, webhookFromEnv) of
(Just w, Nothing) -> return $ WCValue w
(Nothing, Just wEnv) -> return $ WCEnv wEnv
Expand Down Expand Up @@ -251,19 +252,14 @@ getTrigDefDeps qt (TriggerOpsDef mIns mUpd mDel _) =
SubCStar -> []
SubCArray pgcols -> pgcols

subTableP2
:: (MonadTx m)
=> QualifiedTable -> Bool -> EventTriggerConf -> m ()
subTableP2 qt replace etc = liftTx if replace
then updateEventTriggerInCatalog etc
else addEventTriggerToCatalog qt etc

runCreateEventTriggerQuery
:: (QErrM m, UserInfoM m, CacheRWM m, MonadTx m)
=> CreateEventTriggerQuery -> m EncJSON
runCreateEventTriggerQuery q = do
(qt, replace, etc) <- subTableP1 q
subTableP2 qt replace etc
liftTx if replace
then updateEventTriggerInCatalog etc
else addEventTriggerToCatalog qt etc
buildSchemaCacheFor $ MOTableObj qt (MTOTrigger $ etcName etc)
return successMsg

Expand Down Expand Up @@ -365,3 +361,35 @@ updateEventTriggerInCatalog trigConf =
configuration = $1
WHERE name = $2
|] (Q.AltJ $ toJSON trigConf, etcName trigConf) True

-- | Replaces /all/ event triggers in the catalog with new ones, taking care to
-- drop SQL trigger functions and archive events for any deleted event triggers.
--
-- See Note [Diff-and-patch event triggers on replace] for more details.
replaceEventTriggersInCatalog
:: MonadTx m
=> HashMap TriggerName (QualifiedTable, EventTriggerConf)
-> m ()
replaceEventTriggersInCatalog triggerConfs = do
existingTriggers <- Map.fromListOn id <$> fetchExistingTriggers
liftTx $ for_ (align existingTriggers triggerConfs) \case
This triggerName -> delEventTriggerFromCatalog triggerName
That (tableName, triggerConf) -> addEventTriggerToCatalog tableName triggerConf
These _ (_, triggerConf) -> updateEventTriggerInCatalog triggerConf
where
fetchExistingTriggers = liftTx $ map runIdentity <$>
Q.listQE defaultTxErrorHandler
[Q.sql|SELECT name FROM hdb_catalog.event_triggers|] () True

{- Note [Diff-and-patch event triggers on replace]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When executing a replace_metadata API call, we usually just drop everything in
the catalog and recreate it from scratch, then rebuild the schema cache. This
works fine for most things, but it’s a bad idea for event triggers, because
delEventTriggerFromCatalog does extra work: it deletes the SQL trigger functions
and archives all associated events.
Therefore, we have to be more careful about which event triggers we drop. We
diff the new metadata against the old metadata, and we only drop triggers that
are actually absent in the new metadata. The replaceEventTriggersInCatalog
function implements this diff-and-patch operation. -}
15 changes: 10 additions & 5 deletions server/src-lib/Hasura/RQL/DDL/Metadata.hs
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ module Hasura.RQL.DDL.Metadata
import Hasura.Prelude

import qualified Data.Aeson.Ordered as AO
import qualified Data.HashMap.Strict as HM
import qualified Data.HashMap.Strict.InsOrd as HMIns
import qualified Data.HashSet as HS
import qualified Data.HashSet.InsOrd as HSIns
Expand All @@ -36,7 +37,8 @@ import qualified Hasura.RQL.DDL.Schema as Schema
import Hasura.Backends.Postgres.SQL.Types
import Hasura.EncJSON
import Hasura.RQL.DDL.ComputedField (dropComputedFieldFromCatalog)
import Hasura.RQL.DDL.EventTrigger (delEventTriggerFromCatalog, subTableP2)
import Hasura.RQL.DDL.EventTrigger (delEventTriggerFromCatalog,
replaceEventTriggersInCatalog)
import Hasura.RQL.DDL.Metadata.Types
import Hasura.RQL.DDL.Permission.Internal (dropPermFromCatalog)
import Hasura.RQL.DDL.RemoteSchema (addRemoteSchemaToCatalog,
Expand All @@ -49,10 +51,11 @@ import Hasura.RQL.Types
-- | Purge all user-defined metadata; metadata with is_system_defined = false
clearUserMetadata :: MonadTx m => m ()
clearUserMetadata = liftTx $ Q.catchE defaultTxErrorHandler $ do
-- Note: we don’t drop event triggers here because we update them a different
-- way; see Note [Diff-and-patch event triggers on replace] in Hasura.RQL.DDL.EventTrigger.
Q.unitQ "DELETE FROM hdb_catalog.hdb_function WHERE is_system_defined <> 'true'" () False
Q.unitQ "DELETE FROM hdb_catalog.hdb_permission WHERE is_system_defined <> 'true'" () False
Q.unitQ "DELETE FROM hdb_catalog.hdb_relationship WHERE is_system_defined <> 'true'" () False
Q.unitQ "DELETE FROM hdb_catalog.event_triggers" () False
Q.unitQ "DELETE FROM hdb_catalog.hdb_computed_field" () False
Q.unitQ "DELETE FROM hdb_catalog.hdb_remote_relationship" () False
Q.unitQ "DELETE FROM hdb_catalog.hdb_table WHERE is_system_defined <> 'true'" () False
Expand All @@ -69,6 +72,7 @@ runClearMetadata
=> ClearMetadata -> m EncJSON
runClearMetadata _ = do
clearUserMetadata
replaceEventTriggersInCatalog mempty
buildSchemaCacheStrict
return successMsg

Expand Down Expand Up @@ -110,9 +114,10 @@ saveMetadata (Metadata tables functions
withPathK "update_permissions" $ processPerms _tmTable _tmUpdatePermissions
withPathK "delete_permissions" $ processPerms _tmTable _tmDeletePermissions

-- Event triggers
withPathK "event_triggers" $
indexedForM_ _tmEventTriggers $ \etc -> subTableP2 _tmTable False etc
-- Event triggers
let allEventTriggers = HMIns.elems tables & map \table ->
(_tmTable table,) <$> HMIns.toHashMap (_tmEventTriggers table)
replaceEventTriggersInCatalog $ HM.unions allEventTriggers

-- sql functions
withPathK "functions" $ indexedForM_ functions $
Expand Down
2 changes: 1 addition & 1 deletion server/src-lib/Hasura/RQL/DDL/Schema/Cache.hs
Original file line number Diff line number Diff line change
Expand Up @@ -332,7 +332,7 @@ buildSchemaCacheRule env = proc (catalogMetadata, invalidationKeys) -> do
(| withRecordInconsistency (
(| modifyErrA (do
etc <- bindErrorA -< decodeValue configuration
(info, dependencies) <- bindErrorA -< subTableP2Setup env qt etc
(info, dependencies) <- bindErrorA -< mkEventTriggerInfo env qt etc
let tableColumns = M.mapMaybe (^? _FIColumn) (_tciFieldInfoMap tableInfo)
recreateViewIfNeeded -< (qt, tableColumns, trn, etcDefinition etc)
recordDependencies -< (metadataObject, schemaObjectId, dependencies)
Expand Down
2 changes: 1 addition & 1 deletion server/src-rsr/catalog_version.txt
Original file line number Diff line number Diff line change
@@ -1 +1 @@
41
42
4 changes: 1 addition & 3 deletions server/src-rsr/initialise.sql
Original file line number Diff line number Diff line change
Expand Up @@ -297,9 +297,7 @@ CREATE TABLE hdb_catalog.event_triggers
schema_name TEXT NOT NULL,
table_name TEXT NOT NULL,
configuration JSON,
comment TEXT,
FOREIGN KEY (schema_name, table_name)
REFERENCES hdb_catalog.hdb_table(table_schema, table_name) ON UPDATE CASCADE
comment TEXT
);

CREATE TABLE hdb_catalog.event_log
Expand Down
2 changes: 2 additions & 0 deletions server/src-rsr/migrations/41_to_42.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
ALTER TABLE hdb_catalog.event_triggers
DROP CONSTRAINT event_triggers_schema_name_table_name_fkey;
5 changes: 5 additions & 0 deletions server/src-rsr/migrations/42_to_41.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
ALTER TABLE hdb_catalog.event_triggers
ADD CONSTRAINT event_triggers_schema_name_table_name_fkey
FOREIGN KEY (schema_name, table_name)
REFERENCES hdb_catalog.hdb_table(table_schema, table_name)
ON UPDATE CASCADE;
Original file line number Diff line number Diff line change
@@ -1,26 +1,50 @@
description: create an event trigger and then reset metadata
url: /v1/query
status: 200
query:
type: bulk
args:
- type: track_table
- description: create an event trigger and then reset metadata
url: /v1/query
status: 200
query:
type: bulk
args:
schema: hge_tests
name: test_t1
- type: create_event_trigger
args: &def_args
name: t1_1
table:
- type: track_table
args:
schema: hge_tests
name: test_t1
insert:
columns: "*"
update:
columns: "*"
delete:
columns: "*"
webhook: http://127.0.0.1:5592
- type: create_event_trigger
args: &def_args
name: t1_1
table:
schema: hge_tests
name: test_t1
insert:
columns: "*"
update:
columns: "*"
delete:
columns: "*"
webhook: http://127.0.0.1:5592
- type: insert
args:
table:
schema: hge_tests
name: test_t1
objects:
- c1: 1
c2: world
returning: []
- type: clear_metadata
args: {}

- type: clear_metadata
args: {}
- description: ensure the event was archived
url: /v1/query
status: 200
response:
- trigger_name: t1_1
archived: true
query:
type: select
args:
table:
schema: hdb_catalog
name: event_log
columns: [trigger_name, archived]
order_by: ['-created_at']
limit: 1

0 comments on commit f09d0cf

Please sign in to comment.