diff --git a/.github/workflows/e2e.yml b/.github/workflows/e2e.yml index 2d18b076..8dbfa3f2 100644 --- a/.github/workflows/e2e.yml +++ b/.github/workflows/e2e.yml @@ -26,13 +26,18 @@ env: # group: e2e-${{ github.workflow }} # cancel-in-progress: false +concurrency: + group: e2e-${{ github.ref }} + cancel-in-progress: true + jobs: e2e_satellite_tests: - name: E2E Satellite tests runs-on: ubuntu-latest strategy: matrix: dialect: [SQLite, Postgres] + dal: [true, false] + name: E2E Satellite tests (Dialect ${{ matrix.dialect }} - uses DAL? ${{ matrix.dal }}) defaults: run: working-directory: e2e @@ -40,6 +45,7 @@ jobs: # BUILDKITE_ANALYTICS_TOKEN: ${{ secrets.BUILDKITE_TEST_ANALYTICS_E2E }} ELECTRIC_REPO: e2e/electric_repo DIALECT: ${{ matrix.dialect }} + DAL: ${{ matrix.dal }} steps: - uses: actions/checkout@v4 with: @@ -48,10 +54,10 @@ jobs: # Make sure that only one E2E workflow is running at the same time # Ports used in the tests are fixed and cannot be reused - - uses: ahmadnassri/action-workflow-queue@v1 - with: - # milliseconds - timeout: 900000 # 15 minutes + # - uses: ahmadnassri/action-workflow-queue@v1 + # with: + # # milliseconds + # timeout: 1500000 # 25 minutes - name: Inject slug/short variables uses: rlespinasse/github-slug-action@v4 diff --git a/README.md b/README.md index 481cc45d..0f9bb245 100644 --- a/README.md +++ b/README.md @@ -25,7 +25,7 @@ Client based on the Typescript client from the `clients/typescript` subfolder fr * [NPM package](https://www.npmjs.com/package/electric-sql). * Version `v0.12.1-dev` -* Commit: `5e2e276c81a6fd7a7db3f9a3853b0132e219cba6` +* Commit: `837ce928e3a887ee8a48966efd029224a585e6f1` ### What is ElectricSQL? @@ -131,13 +131,13 @@ In this example, projects are synced with all its related content (project issue final shape = await electric.syncTable( db.projects, include: (p) => [ - SyncInputRelation.from( + ShapeInputRelation.from( p.$relations.issues, include: (i) => [ - SyncInputRelation.from( + ShapeInputRelation.from( i.$relations.comments, include: (c) => [ - SyncInputRelation.from(c.$relations.author), + ShapeInputRelation.from(c.$relations.author), ], ), ], diff --git a/e2e/common.mk b/e2e/common.mk index 2311e666..fc34b1b2 100644 --- a/e2e/common.mk +++ b/e2e/common.mk @@ -109,6 +109,7 @@ start_satellite_client_%: --rm \ -e TERM=dumb \ -e DIALECT=${DIALECT} \ + -e DAL=${DAL} \ satellite_client_$* diff --git a/e2e/satellite_client/db/01-create_e2e_tables.sql b/e2e/satellite_client/db/01-create_e2e_tables.sql index 28c29c9c..ba69f244 100644 --- a/e2e/satellite_client/db/01-create_e2e_tables.sql +++ b/e2e/satellite_client/db/01-create_e2e_tables.sql @@ -1,3 +1,5 @@ +-- migrate:up + /* * This migration file defines all the tables used by the e2e tests. * Use it to migrate a Postgres database and then generate the Electric client from it. @@ -92,4 +94,6 @@ CREATE TABLE "blobs" ( blob BYTEA ); -ALTER TABLE "blobs" ENABLE ELECTRIC; \ No newline at end of file +ALTER TABLE "blobs" ENABLE ELECTRIC; + +-- migrate:down diff --git a/e2e/satellite_client/lib/cli/prompt.dart b/e2e/satellite_client/lib/cli/prompt.dart index 05e53b77..04d509e8 100644 --- a/e2e/satellite_client/lib/cli/prompt.dart +++ b/e2e/satellite_client/lib/cli/prompt.dart @@ -285,11 +285,11 @@ Future start() async { command, getItemColumns, ); - } else if (name == "insert_item") { + } else if (name == "insert_items") { await processCommand2Params, void>( state, command, - (electric, keys) => insertItem(electric, keys.cast()), + (electric, keys) => insertItems(electric, keys.cast()), ); } else if (name == "insert_extended_into") { await processCommand3Params start() async { command, getOtherItems, ); - } else if (name == "insert_other_item") { + } else if (name == "insert_other_items") { await processCommand2Params, void>( state, command, @@ -369,11 +369,11 @@ Future start() async { return await reconnect(electric, exp); }, ); - } else if (name == "custom_03_25_sync_items") { + } else if (name == "custom_03_26_sync_items") { await processCommand1Param( state, command, - custom0325SyncItems, + custom0326SyncItems, ); } else { throw Exception("Unknown command: $name"); diff --git a/e2e/satellite_client/lib/client_commands.dart b/e2e/satellite_client/lib/client_commands.dart index 1ae2add5..20d85429 100644 --- a/e2e/satellite_client/lib/client_commands.dart +++ b/e2e/satellite_client/lib/client_commands.dart @@ -13,6 +13,8 @@ import 'package:satellite_dart_client/drift/database.dart'; import 'package:electricsql/drivers/drift.dart'; import 'package:drift/native.dart'; import 'package:satellite_dart_client/generated/electric/drift_schema.dart'; +import 'package:satellite_dart_client/generated/electric/schema.dart' + as raw_schema; import 'package:satellite_dart_client/util/json.dart'; import 'package:satellite_dart_client/util/pretty_output.dart'; @@ -20,6 +22,13 @@ late String dbName; final QueryBuilder builder = dialect() == Dialect.postgres ? kPostgresQueryBuilder : kSqliteQueryBuilder; +final withDal = dal(); +// final schema = withDal ? raw_schema.kDbSchema : kDriftSchemaWithoutDal; +final DBSchema rawSchema = raw_schema.kDbSchema; + +final Converter converter = + dialect() == Dialect.postgres ? kPostgresConverter : kSqliteConverter; + int? tokenExpirationMillis; Dialect dialect() { @@ -36,6 +45,22 @@ Dialect dialect() { } } +bool dal() { + final dalEnv = Platform.environment['DAL']?.toLowerCase(); + switch (dalEnv) { + case 'false': + print('Running without DAL'); + return false; + case 'true': + case '': + case null: + print('Running with DAL'); + return true; + default: + throw Exception('Illegal value for DAL option: $dalEnv'); + } +} + typedef MyDriftElectricClient = ElectricClient; Future makeDb(String name) async { @@ -120,13 +145,13 @@ Future electrifyDb( } // reconnects with Electric, e.g. after expiration of the JWT -Future reconnect(ElectricClient electric, Duration? exp) async { +Future reconnect(BaseElectricClient electric, Duration? exp) async { final token = await mockSecureAuthToken(exp: exp); await electric.connect(token); } Future checkTokenExpiration( - ElectricClient electric, int minimalTime) async { + BaseElectricClient electric, int minimalTime) async { final start = DateTime.now().millisecondsSinceEpoch; late void Function() unsubscribe; unsubscribe = electric.notifier.subscribeToConnectivityStateChanges((x) { @@ -158,22 +183,52 @@ void setSubscribers(DriftElectricClient db) { }); } +Future syncTableWithShape( + MyDriftElectricClient electric, + String tableName, { + ShapeWhereBuilder? shapeFilterDal, + ShapeIncludeBuilder? includeDal, + String? shapeFilterRaw, + List? includeRaw, +}) async { + if (withDal) { + final table = electric.db.allTables + .where((t) => t.actualTableName == tableName) + .first as T; + final subs = await electric.syncTable( + table, + where: shapeFilterDal ?? (_) => CustomExpression(shapeFilterRaw!), + include: includeDal, + ); + return await subs.synced; + } else { + final subs = await electric.syncManager.subscribe( + ShapeInputRaw( + tableName: tableName, + where: shapeFilterRaw == null ? null : ShapeWhere.raw(shapeFilterRaw), + include: includeRaw, + ), + ); + return await subs.synced; + } +} + Future syncItemsTable( MyDriftElectricClient electric, String shapeFilter) async { - final subs = await electric.syncTable( - electric.db.items, - where: (_) => CustomExpression(shapeFilter), + return await syncTableWithShape( + electric, + 'items', + shapeFilterRaw: shapeFilter, ); - return await subs.synced; } Future syncOtherItemsTable( MyDriftElectricClient electric, String shapeFilter) async { - final subs = await electric.syncTable( - electric.db.otherItems, - where: (_) => CustomExpression(shapeFilter), + return await syncTableWithShape( + electric, + 'other_items', + shapeFilterRaw: shapeFilter, ); - return await subs.synced; } Future syncTable(String table) async { @@ -192,12 +247,12 @@ Future lowLevelSubscribe( return await synced; } -Future getTables(DriftElectricClient electric) async { +Future getTables(BaseElectricClient electric) async { final rows = await electric.adapter.query(builder.getLocalTableNames()); return Rows(rows); } -Future getColumns(DriftElectricClient electric, String table) async { +Future getColumns(BaseElectricClient electric, String table) async { final namespace = builder.defaultNamespace; final qualifiedTablename = QualifiedTablename(namespace, table); final rows = await electric.adapter.query( @@ -206,13 +261,9 @@ Future getColumns(DriftElectricClient electric, String table) async { return Rows(rows); } -Future getRows(DriftElectricClient electric, String table) async { - final rows = await electric.db - .customSelect( - 'SELECT * FROM "$table";', - ) - .get(); - return _toRows(rows); +Future getRows(BaseElectricClient electric, String table) async { + final rows = await rawQuery(electric, 'SELECT * FROM "$table";'); + return Rows(rows); } Future getTimestamps(MyDriftElectricClient electric) async { @@ -220,7 +271,7 @@ Future getTimestamps(MyDriftElectricClient electric) async { //await electric.db.timestamps.findMany(); } -Future writeTimestamp( +Future writeTimestampDal( MyDriftElectricClient electric, Map timestampMap) async { final companion = TimestampsCompanion.insert( id: timestampMap['id'] as String, @@ -230,7 +281,22 @@ Future writeTimestamp( await electric.db.timestamps.insert().insert(companion); } -Future writeDatetime( +Future writeTimestampRaw( + BaseElectricClient electric, Map timestampMap) async { + final createdAt = TypeConverters.timestamp + .encode(DateTime.parse(timestampMap['created_at'] as String)); + final updatedAt = TypeConverters.timestampTZ + .encode(DateTime.parse(timestampMap['updated_at'] as String)); + + await electric.adapter.run(Statement( + 'INSERT INTO timestamps (id, created_at, updated_at) VALUES (${builder.makePositionalParam(1)}, ${builder.makePositionalParam(2)}, ${builder.makePositionalParam(3)});', + [timestampMap['id'] as String, createdAt, updatedAt], + )); +} + +final writeTimestamp = withDal ? writeTimestampDal : writeTimestampRaw; + +Future writeDatetimeDal( MyDriftElectricClient electric, Map datetimeMap) async { final companion = DatetimesCompanion.insert( id: datetimeMap['id'] as String, @@ -240,7 +306,22 @@ Future writeDatetime( await electric.db.datetimes.insert().insert(companion); } -Future getTimestamp( +Future writeDatetimeRaw( + BaseElectricClient electric, Map datetimeMap) async { + final d = + TypeConverters.date.encode(DateTime.parse(datetimeMap['d'] as String)); + final t = + TypeConverters.time.encode(DateTime.parse(datetimeMap['t'] as String)); + + await electric.adapter.run(Statement( + '''INSERT INTO datetimes (id, d, t) VALUES (${builder.makePositionalParam(1)}, ${builder.makePositionalParam(2)}, ${builder.makePositionalParam(3)});''', + [datetimeMap['id'] as String, d, t], + )); +} + +final writeDatetime = withDal ? writeDatetimeDal : writeDatetimeRaw; + +Future getTimestampDal( MyDriftElectricClient electric, String id) async { final timestamp = await (electric.db.timestamps.select() ..where((tbl) => tbl.id.equals(id))) @@ -248,26 +329,84 @@ Future getTimestamp( return timestamp; } -Future getDatetime(MyDriftElectricClient electric, String id) async { +Future getTimestampRaw( + BaseElectricClient electric, String id) async { + final rows = await rawQuery( + electric, + '''SELECT * FROM timestamps WHERE id = ${builder.makePositionalParam(1)};''', + [id], + ); + final result = rows.isNotEmpty ? rows[0] : null; + + return result == null + ? null + : Timestamp( + id: result['id'] as String, + createdAt: + decodeRawColumn(result, 'timestamps', 'created_at'), + updatedAt: + decodeRawColumn(result, 'timestamps', 'updated_at')); +} + +final getTimestamp = withDal ? getTimestampDal : getTimestampRaw; + +T decodeRawColumn(Map row, String table, String column) { + final tableSchema = rawSchema.tableSchemas[table]!; + final pgtype = tableSchema.fields[column]!; + final rawColVal = row[column]; + final decoded = converter.decode(rawColVal, pgtype) as T; + + if (pgtype == PgType.int8 && decoded is int) { + return BigInt.from(decoded as int) as T; + } else { + return decoded; + } +} + +Map decodeRow(Map row, String table) { + return row.map((k, v) { + final newV = decodeRawColumn(row, table, k); + return MapEntry(k, newV); + }); +} + +Future getDatetimeDal( + MyDriftElectricClient electric, String id) async { final datetime = await (electric.db.datetimes.select() ..where((tbl) => tbl.id.equals(id))) .getSingleOrNull(); - final rowJ = JsonEncoder.withIndent(' ') - .convert(toColumns(datetime)?.map((key, value) { - final Object? effectiveValue; - if (value is DateTime) { - effectiveValue = value.toIso8601String(); - } else { - effectiveValue = value; - } - return MapEntry(key, effectiveValue); - })); + final rowJ = + JsonEncoder.withIndent(' ').convert(toEncodableMap(toColumns(datetime))); print('Found date time?:\n$rowJ'); return datetime; } +Future getDatetimeRaw(BaseElectricClient electric, String id) async { + final rows = await rawQuery( + electric, + '''SELECT * FROM datetimes WHERE id = ${builder.makePositionalParam(1)};''', + [id], + ); + final result = rows.isNotEmpty ? rows[0] : null; + + final datetime = result == null + ? null + : Datetime( + id: result['id'] as String, + d: decodeRawColumn(result, 'datetimes', 'd'), + t: decodeRawColumn(result, 'datetimes', 't'), + ); + final rowJ = + JsonEncoder.withIndent(' ').convert(toEncodableMap(toColumns(datetime))); + print('Found date time?:\n$rowJ'); + + return datetime; +} + +final getDatetime = withDal ? getDatetimeDal : getDatetimeRaw; + Future assertTimestamp(MyDriftElectricClient electric, String id, String expectedCreatedAt, String expectedUpdatedAt) async { final timestamp = await getTimestamp(electric, id); @@ -285,6 +424,14 @@ Future assertDatetime(MyDriftElectricClient electric, String id, bool checkTimestamp( Timestamp? timestamp, String expectedCreatedAt, String expectedUpdatedAt) { + print("Timestamp: ${timestamp?.toJson()}"); + print("Created at: ${timestamp?.createdAt.millisecondsSinceEpoch}"); + print( + "Expected created at: ${DateTime.parse(expectedCreatedAt).millisecondsSinceEpoch}"); + print("Updated at: ${timestamp?.updatedAt.millisecondsSinceEpoch}"); + print( + "Expected updated at: ${DateTime.parse(expectedUpdatedAt).millisecondsSinceEpoch}"); + if (timestamp == null) return false; return timestamp.createdAt.millisecondsSinceEpoch == @@ -302,7 +449,7 @@ bool checkDatetime( DateTime.parse(expectedTime).millisecondsSinceEpoch; } -Future writeBool( +Future writeBoolDal( MyDriftElectricClient electric, String id, bool b) async { final row = await electric.db.bools.insertReturning( BoolsCompanion.insert( @@ -313,12 +460,37 @@ Future writeBool( return SingleRow.fromItem(row); } -Future getBool(MyDriftElectricClient electric, String id) async { +Future writeBoolRaw( + BaseElectricClient electric, String id, bool b) async { + final boolVal = + converter.encode(b, rawSchema.tableSchemas['bools']!.fields['b']!); + final rows = await electric.adapter.query(Statement( + '''INSERT INTO bools (id, b) VALUES (${builder.makePositionalParam(1)}, ${builder.makePositionalParam(2)}) RETURNING *;''', + [id, boolVal], + )); + return SingleRow.fromColumns(decodeRow(rows.first, 'bools')); +} + +final writeBool = withDal ? writeBoolDal : writeBoolRaw; + +Future getBoolDal(MyDriftElectricClient electric, String id) async { final row = await (electric.db.bools.select()..where((t) => t.id.equals(id))) .getSingle(); return row.b; } +Future getBoolRaw(MyDriftElectricClient electric, String id) async { + final rows = await rawQuery( + electric, + 'SELECT b FROM bools WHERE id = ${builder.makePositionalParam(1)};', + [id]); + return rows.length == 1 + ? decodeRawColumn(rows.first, 'bools', 'b') + : null; +} + +final getBool = withDal ? getBoolDal : getBoolRaw; + Future getDatetimes(MyDriftElectricClient electric) async { // final rows = await electric.db.datetimes.select().get(); throw UnimplementedError(); @@ -352,12 +524,22 @@ Future existsItemWithContent( return item != null; } -Future getUUID(MyDriftElectricClient electric, String id) async { +Future getUUIDDal(MyDriftElectricClient electric, String id) async { final row = await (electric.db.uuids.select()..where((t) => t.id.equals(id))) .getSingle(); return SingleRow.fromItem(row); } +Future getUUIDRaw(MyDriftElectricClient electric, String id) async { + final rows = await rawQuery( + electric, + 'SELECT * FROM uuids WHERE id = ${builder.makePositionalParam(1)};', + [id]); + return SingleRow.fromColumns(rows.first); +} + +final getUUID = withDal ? getUUIDDal : getUUIDRaw; + Future getUUIDs(MyDriftElectricClient electric) async { final rows = await electric.db .customSelect( @@ -367,7 +549,8 @@ Future getUUIDs(MyDriftElectricClient electric) async { return _toRows(rows); } -Future writeUUID(MyDriftElectricClient electric, String id) async { +Future writeUUIDDal( + MyDriftElectricClient electric, String id) async { final item = await electric.db.uuids.insertReturning( UuidsCompanion.insert( id: id, @@ -376,14 +559,37 @@ Future writeUUID(MyDriftElectricClient electric, String id) async { return SingleRow.fromItem(item); } -Future getInt(MyDriftElectricClient electric, String id) async { +Future writeUUIDRaw( + MyDriftElectricClient electric, String id) async { + final uuidVal = + converter.encode(id, rawSchema.tableSchemas['uuids']!.fields['id']!); + final rows = await electric.adapter.query(Statement( + '''INSERT INTO uuids (id) VALUES (${builder.makePositionalParam(1)}) RETURNING *;''', + [uuidVal], + )); + return SingleRow.fromColumns(rows.first); +} + +final writeUUID = withDal ? writeUUIDDal : writeUUIDRaw; + +Future getIntDal(MyDriftElectricClient electric, String id) async { final item = await (electric.db.ints.select()..where((t) => t.id.equals(id))) .getSingle(); return SingleRow.fromItem(item); } -Future writeInt(MyDriftElectricClient electric, String id, int? i2, - int? i4, BigInt? i8) async { +Future getIntRaw(MyDriftElectricClient electric, String id) async { + final rows = await rawQuery( + electric, + 'SELECT id, i2, i4, i8 FROM ints WHERE id = ${builder.makePositionalParam(1)};', + [id]); + return SingleRow.fromColumns(decodeRow(rows.first, 'ints')); +} + +final getInt = withDal ? getIntDal : getIntRaw; + +Future writeIntDal(MyDriftElectricClient electric, String id, + int? i2, int? i4, BigInt? i8) async { try { final item = await electric.db.ints.insertReturning( IntsCompanion.insert( @@ -404,14 +610,49 @@ Future writeInt(MyDriftElectricClient electric, String id, int? i2, } } -Future getFloat(MyDriftElectricClient electric, String id) async { +Future writeIntRaw(MyDriftElectricClient electric, String id, + int? i2, int? i4, BigInt? i8) async { + try { + final rows = await electric.adapter.query(Statement( + '''INSERT INTO ints (id, i2, i4, i8) VALUES (${builder.makePositionalParam(1)}, ${builder.makePositionalParam(2)}, ${builder.makePositionalParam(3)}, ${builder.makePositionalParam(4)}) RETURNING id, i2, i4, i8;''', + [ + id, + converter.encode(i2, PgType.int2), + converter.encode(i4, PgType.int4), + converter.encode(i8, PgType.int8), + ], + )); + return SingleRow.fromColumns(decodeRow(rows.first, 'ints')); + } catch (e) { + final eStr = e.toString(); + if (eStr + .contains("Invalid argument (this): Should be in signed 64bit range")) { + throw Exception("BigInt value exceeds the range of 64 bits"); + } + rethrow; + } +} + +final writeInt = withDal ? writeIntDal : writeIntRaw; + +Future getFloatDal(MyDriftElectricClient electric, String id) async { final item = await (electric.db.floats.select() ..where((t) => t.id.equals(id))) .getSingle(); return SingleRow.fromItem(item); } -Future writeFloat( +Future getFloatRaw(MyDriftElectricClient electric, String id) async { + final rows = await rawQuery( + electric, + 'SELECT * FROM floats WHERE id = ${builder.makePositionalParam(1)};', + [id]); + return SingleRow.fromColumns(decodeRow(rows.first, 'floats')); +} + +final getFloat = withDal ? getFloatDal : getFloatRaw; + +Future writeFloatDal( MyDriftElectricClient electric, String id, double f4, double f8) async { final item = await electric.db.floats.insertReturning( FloatsCompanion.insert( @@ -423,12 +664,37 @@ Future writeFloat( return SingleRow.fromItem(item); } +Future writeFloatRaw( + MyDriftElectricClient electric, String id, double f4, double f8) async { + final rows = await electric.adapter.query(Statement( + '''INSERT INTO floats (id, f4, f8) VALUES (${builder.makePositionalParam(1)}, ${builder.makePositionalParam(2)}, ${builder.makePositionalParam(3)}) RETURNING *;''', + [ + id, + converter.encode(f4, PgType.float4), + converter.encode(f8, PgType.float8), + ], + )); + return SingleRow.fromColumns(decodeRow(rows.first, 'floats')); +} + +final writeFloat = withDal ? writeFloatDal : writeFloatRaw; + Future getJsonRaw(MyDriftElectricClient electric, String id) async { - final res = await electric.db.customSelect( + final res = await rawQuery( + electric, 'SELECT js FROM jsons WHERE id = ${builder.makePositionalParam(1)};', - variables: [Variable(id)], - ).get(); - return res[0].read('js'); + [id], + ); + return res[0]['js'] as String?; +} + +Future>> rawQuery( + BaseElectricClient electric, + String query, [ + List args = const [], +]) async { + final rows = await electric.adapter.query(Statement(query, args)); + return rows; } Future getJsonbRaw(MyDriftElectricClient electric, String id) async { @@ -450,7 +716,7 @@ Future getJsonbRaw(MyDriftElectricClient electric, String id) async { return valueToPrettyStr(effectiveJ); } -Future getJson(MyDriftElectricClient electric, String id) async { +Future getJsonDal(MyDriftElectricClient electric, String id) async { final item = await (electric.db.jsons.select()..where((t) => t.id.equals(id))) .getSingle(); final cols = toColumns(item)!; @@ -458,7 +724,19 @@ Future getJson(MyDriftElectricClient electric, String id) async { return SingleRow.fromColumns(cols); } -Future getJsonb(MyDriftElectricClient electric, String id) async { +Future getJsonRawInternal( + MyDriftElectricClient electric, String id) async { + final rows = await rawQuery( + electric, + 'SELECT id FROM jsons WHERE id = ${builder.makePositionalParam(1)};', + [id], + ); + return SingleRow.fromColumns(decodeRow(rows.first, 'jsons')); +} + +final getJson = withDal ? getJsonDal : getJsonRawInternal; + +Future getJsonbDal(MyDriftElectricClient electric, String id) async { final item = await (electric.db.jsons.select()..where((t) => t.id.equals(id))) .getSingle(); final cols = toColumns(item)!; @@ -466,7 +744,19 @@ Future getJsonb(MyDriftElectricClient electric, String id) async { return SingleRow.fromColumns(cols); } -Future writeJson( +Future getJsonbRawInternal( + MyDriftElectricClient electric, String id) async { + final rows = await rawQuery( + electric, + 'SELECT id, jsb FROM jsons WHERE id = ${builder.makePositionalParam(1)};', + [id], + ); + return SingleRow.fromColumns(decodeRow(rows.first, 'jsons')); +} + +final getJsonb = withDal ? getJsonbDal : getJsonbRawInternal; + +Future writeJsonDal( MyDriftElectricClient electric, String id, Object? jsb) async { final item = await electric.db.jsons.insertReturning( JsonsCompanion.insert( @@ -477,13 +767,42 @@ Future writeJson( return SingleRow.fromItem(item); } -Future getEnum(MyDriftElectricClient electric, String id) async { +Future writeJsonRaw( + MyDriftElectricClient electric, String id, Object? jsb) async { + final rows = await electric.adapter.query(Statement( + '''INSERT INTO jsons (id, jsb) VALUES (${builder.makePositionalParam(1)}, ${builder.makePositionalParam(2)}) RETURNING *;''', + [ + id, + converter.encode(jsb, PgType.jsonb), + ], + )); + return SingleRow.fromColumns(decodeRow(rows.first, 'jsons')); +} + +final writeJson = withDal ? writeJsonDal : writeJsonRaw; + +Future getEnumDal(MyDriftElectricClient electric, String id) async { final item = await (electric.db.enums.select()..where((t) => t.id.equals(id))) .getSingle(); return _enumClassToRawRow(item); } -Future writeEnum( +Future getEnumRaw(MyDriftElectricClient electric, String id) async { + final rows = await rawQuery( + electric, + 'SELECT * FROM enums WHERE id = ${builder.makePositionalParam(1)};', + [id], + ); + final row = rows.first; + if (dialect() == Dialect.postgres) { + row['c'] = (row['c'] as pg.UndecodedBytes?)?.asString; + } + return SingleRow.fromColumns(decodeRow(row, 'enums')); +} + +final getEnum = withDal ? getEnumDal : getEnumRaw; + +Future writeEnumDal( MyDriftElectricClient electric, String id, String? enumStr) async { final enumValue = enumStr == null ? null : ElectricEnumCodecs.color.decode(enumStr); @@ -498,6 +817,22 @@ Future writeEnum( return _enumClassToRawRow(item); } +Future writeEnumRaw( + MyDriftElectricClient electric, String id, String? enumStr) async { + final rows = await electric.adapter.query(Statement( + '''INSERT INTO enums (id, c) VALUES (${builder.makePositionalParam(1)}, ${builder.makePositionalParam(2)}) RETURNING *;''', + [id, enumStr], + )); + final row = rows.first; + + if (dialect() == Dialect.postgres) { + row['c'] = (row['c'] as pg.UndecodedBytes?)?.asString; + } + return SingleRow.fromColumns(decodeRow(row, 'enums')); +} + +final writeEnum = withDal ? writeEnumDal : writeEnumRaw; + // Converts the dart enum into string for the Lux expected output SingleRow _enumClassToRawRow(Enum item) { final driftCols = toColumns(item)!; @@ -509,13 +844,24 @@ SingleRow _enumClassToRawRow(Enum item) { return SingleRow(driftCols); } -Future getBlob(MyDriftElectricClient electric, String id) async { +Future getBlobDal(MyDriftElectricClient electric, String id) async { final item = await (electric.db.blobs.select()..where((t) => t.id.equals(id))) .getSingle(); return SingleRow.fromItem(item); } -Future writeBlob( +Future getBlobRaw(MyDriftElectricClient electric, String id) async { + final rows = await rawQuery( + electric, + 'SELECT * FROM blobs WHERE id = ${builder.makePositionalParam(1)};', + [id], + ); + return SingleRow.fromColumns(decodeRow(rows.first, 'blobs')); +} + +final getBlob = withDal ? getBlobDal : getBlobRaw; + +Future writeBlobDal( MyDriftElectricClient electric, String id, List? blob) async { final item = await electric.db.blobs.insertReturning( BlobsCompanion.insert( @@ -526,6 +872,18 @@ Future writeBlob( return SingleRow.fromItem(item); } +Future writeBlobRaw( + MyDriftElectricClient electric, String id, List? blob) async { + final rows = await electric.adapter.query(Statement( + '''INSERT INTO blobs (id, blob) VALUES (${builder.makePositionalParam(1)}, ${builder.makePositionalParam(2)}) RETURNING *;''', + [id, converter.encode(blob, PgType.bytea)], + )); + final row = rows.first; + return SingleRow.fromColumns(decodeRow(row, 'blobs')); +} + +final writeBlob = withDal ? writeBlobDal : writeBlobRaw; + Future getItemColumns( MyDriftElectricClient electric, String table, String column) async { final rows = await electric.db @@ -536,7 +894,7 @@ Future getItemColumns( return _toRows(rows); } -Future insertItem( +Future insertItems( MyDriftElectricClient electric, List keys) async { await electric.db.transaction(() async { for (final key in keys) { @@ -618,7 +976,7 @@ Future insertOtherItem( }); } -void setItemReplicatonTransform(MyDriftElectricClient electric) { +void setItemReplicatonTransformDal(MyDriftElectricClient electric) { electric.setTableReplicationTransform( electric.db.items, transformOutbound: (item) { @@ -638,6 +996,32 @@ void setItemReplicatonTransform(MyDriftElectricClient electric) { ); } +void setItemReplicatonTransformRaw(MyDriftElectricClient electric) { + final namespace = builder.defaultNamespace; + // ignore: invalid_use_of_internal_member + electric.rawClient.setReplicationTransform( + QualifiedTablename(namespace, 'items'), + ReplicatedRowTransformer(transformInbound: (itemRow) { + final newContent = (itemRow['content']! as String) + .split('') + .map((char) => String.fromCharCode(char.codeUnitAt(0) - 1)) + .join(''); + itemRow['content'] = newContent; + return itemRow; + }, transformOutbound: (itemRow) { + final newContent = (itemRow['content']! as String) + .split('') + .map((char) => String.fromCharCode(char.codeUnitAt(0) + 1)) + .join(''); + itemRow['content'] = newContent; + return itemRow; + }), + ); +} + +final setItemReplicatonTransform = + withDal ? setItemReplicatonTransformDal : setItemReplicatonTransformRaw; + Future stop(MyDriftElectricClient db) async { await globalRegistry.stopAll(); } @@ -658,14 +1042,21 @@ void disconnect(MyDriftElectricClient db) { db.disconnect(); } -Future custom0325SyncItems(MyDriftElectricClient electric) async { - final subs = await electric.syncTable( - electric.db.items, - where: (items) => items.content.like('items-_-'), - include: (items) => [SyncInputRelation.from(items.$relations.otherItems)], +Future custom0326SyncItems(MyDriftElectricClient electric) async { + await syncTableWithShape( + electric, + 'items', + shapeFilterDal: (items) => items.content.like('items-_-'), + shapeFilterRaw: "this.content like 'items-_-'", + includeDal: (items) => + [ShapeInputRelation.from(items.$relations.otherItems)], + includeRaw: [ + IncludeRelRaw( + foreignKey: ['item_id'], + select: ShapeInputRaw(tableName: 'other_items'), + ), + ], ); - - await subs.synced; } ///////////////////////////////// diff --git a/e2e/satellite_client/lib/generated/electric/drift_schema.dart b/e2e/satellite_client/lib/generated/electric/drift_schema.dart index 9c5b27d1..f5d79f0f 100644 --- a/e2e/satellite_client/lib/generated/electric/drift_schema.dart +++ b/e2e/satellite_client/lib/generated/electric/drift_schema.dart @@ -1,82 +1,52 @@ // GENERATED CODE - DO NOT MODIFY BY HAND // ignore_for_file: always_use_package_imports, depend_on_referenced_packages -// ignore_for_file: prefer_double_quotes +// ignore_for_file: prefer_double_quotes, require_trailing_commas import 'package:drift/drift.dart'; import 'package:electricsql/drivers/drift.dart'; import 'package:electricsql/electricsql.dart'; +import './migrations.dart'; +import './pg_migrations.dart'; + +const kElectricMigrations = ElectricMigrations( + sqliteMigrations: kSqliteMigrations, + pgMigrations: kPostgresMigrations, +); const kElectrifiedTables = [ + Blobs, + Bools, + Datetimes, + Enums, + Floats, + Ints, Items, + Jsons, OtherItems, Timestamps, - Datetimes, - Bools, Uuids, - Ints, - Floats, - Jsons, - Enums, - Blobs, ]; -class Items extends Table with ElectricTableMixin { - TextColumn get id => text().named('id')(); - - TextColumn get content => text().named('content')(); - - TextColumn get contentTextNull => - text().named('content_text_null').nullable()(); - - TextColumn get contentTextNullDefault => - text().named('content_text_null_default').nullable()(); - - IntColumn get intvalueNull => - customType(ElectricTypes.int4).named('intvalue_null').nullable()(); - - IntColumn get intvalueNullDefault => customType(ElectricTypes.int4) - .named('intvalue_null_default') - .nullable()(); - - @override - String? get tableName => 'items'; - - @override - Set>? get primaryKey => {id}; - - @override - $ItemsTableRelations get $relations => const $ItemsTableRelations(); -} - -class OtherItems extends Table with ElectricTableMixin { +class Blobs extends Table { TextColumn get id => text().named('id')(); - TextColumn get content => text().named('content')(); - - TextColumn get itemId => text().named('item_id').nullable()(); + BlobColumn get blob$ => blob().named('blob').nullable()(); @override - String? get tableName => 'other_items'; + String? get tableName => 'blobs'; @override Set>? get primaryKey => {id}; - - @override - $OtherItemsTableRelations get $relations => const $OtherItemsTableRelations(); } -class Timestamps extends Table { +class Bools extends Table { TextColumn get id => text().named('id')(); - Column get createdAt => - customType(ElectricTypes.timestamp).named('created_at')(); - - Column get updatedAt => - customType(ElectricTypes.timestampTZ).named('updated_at')(); + BoolColumn get b => boolean().named('b').nullable()(); @override - String? get tableName => 'timestamps'; + String? get tableName => 'bools'; @override Set>? get primaryKey => {id}; @@ -96,23 +66,30 @@ class Datetimes extends Table { Set>? get primaryKey => {id}; } -class Bools extends Table { +class Enums extends Table { TextColumn get id => text().named('id')(); - BoolColumn get b => boolean().named('b').nullable()(); + Column get c => + customType(ElectricEnumTypes.color).named('c').nullable()(); @override - String? get tableName => 'bools'; + String? get tableName => 'enums'; @override Set>? get primaryKey => {id}; } -class Uuids extends Table { - TextColumn get id => customType(ElectricTypes.uuid).named('id')(); +class Floats extends Table { + TextColumn get id => text().named('id')(); + + RealColumn get f4 => + customType(ElectricTypes.float4).named('f4').nullable()(); + + RealColumn get f8 => + customType(ElectricTypes.float8).named('f8').nullable()(); @override - String? get tableName => 'uuids'; + String? get tableName => 'floats'; @override Set>? get primaryKey => {id}; @@ -134,28 +111,37 @@ class Ints extends Table { Set>? get primaryKey => {id}; } -class Floats extends Table { +class Items extends Table with ElectricTableMixin { TextColumn get id => text().named('id')(); - RealColumn get f4 => - customType(ElectricTypes.float4).named('f4').nullable()(); + TextColumn get content => text().named('content')(); - RealColumn get f8 => - customType(ElectricTypes.float8).named('f8').nullable()(); + TextColumn get contentTextNull => + text().named('content_text_null').nullable()(); + + TextColumn get contentTextNullDefault => + text().named('content_text_null_default').nullable()(); + + IntColumn get intvalueNull => + customType(ElectricTypes.int4).named('intvalue_null').nullable()(); + + IntColumn get intvalueNullDefault => customType(ElectricTypes.int4) + .named('intvalue_null_default') + .nullable()(); @override - String? get tableName => 'floats'; + String? get tableName => 'items'; @override Set>? get primaryKey => {id}; + + @override + $ItemsTableRelations get $relations => const $ItemsTableRelations(); } class Jsons extends Table { TextColumn get id => text().named('id')(); - Column get js => - customType(ElectricTypes.json).named('js').nullable()(); - Column get jsb => customType(ElectricTypes.jsonb).named('jsb').nullable()(); @@ -166,26 +152,44 @@ class Jsons extends Table { Set>? get primaryKey => {id}; } -class Enums extends Table { +class OtherItems extends Table with ElectricTableMixin { TextColumn get id => text().named('id')(); - Column get c => - customType(ElectricEnumTypes.color).named('c').nullable()(); + TextColumn get content => text().named('content')(); + + TextColumn get itemId => text().named('item_id').nullable()(); @override - String? get tableName => 'enums'; + String? get tableName => 'other_items'; @override Set>? get primaryKey => {id}; + + @override + $OtherItemsTableRelations get $relations => const $OtherItemsTableRelations(); } -class Blobs extends Table { +class Timestamps extends Table { TextColumn get id => text().named('id')(); - BlobColumn get blob$ => blob().named('blob').nullable()(); + Column get createdAt => + customType(ElectricTypes.timestamp).named('created_at')(); + + Column get updatedAt => + customType(ElectricTypes.timestampTZ).named('updated_at')(); @override - String? get tableName => 'blobs'; + String? get tableName => 'timestamps'; + + @override + Set>? get primaryKey => {id}; +} + +class Uuids extends Table { + TextColumn get id => customType(ElectricTypes.uuid).named('id')(); + + @override + String? get tableName => 'uuids'; @override Set>? get primaryKey => {id}; diff --git a/e2e/satellite_client/lib/generated/electric/migrations.dart b/e2e/satellite_client/lib/generated/electric/migrations.dart new file mode 100644 index 00000000..aa35c816 --- /dev/null +++ b/e2e/satellite_client/lib/generated/electric/migrations.dart @@ -0,0 +1,128 @@ +// GENERATED CODE - DO NOT MODIFY BY HAND + +// ignore_for_file: always_use_package_imports, depend_on_referenced_packages +// ignore_for_file: prefer_double_quotes, require_trailing_commas + +import 'package:electricsql/electricsql.dart'; + +const kSqliteMigrations = [ + Migration( + statements: [ + 'CREATE TABLE "items" (\n "id" TEXT NOT NULL,\n "content" TEXT NOT NULL,\n "content_text_null" TEXT,\n "content_text_null_default" TEXT,\n "intvalue_null" INTEGER,\n "intvalue_null_default" INTEGER,\n CONSTRAINT "items_pkey" PRIMARY KEY ("id")\n) WITHOUT ROWID;\n', + 'CREATE TABLE "other_items" (\n "id" TEXT NOT NULL,\n "content" TEXT NOT NULL,\n "item_id" TEXT,\n CONSTRAINT "other_items_item_id_fkey" FOREIGN KEY ("item_id") REFERENCES "items" ("id"),\n CONSTRAINT "other_items_pkey" PRIMARY KEY ("id")\n) WITHOUT ROWID;\n', + 'CREATE TABLE "timestamps" (\n "id" TEXT NOT NULL,\n "created_at" TEXT NOT NULL,\n "updated_at" TEXT NOT NULL,\n CONSTRAINT "timestamps_pkey" PRIMARY KEY ("id")\n) WITHOUT ROWID;\n', + 'CREATE TABLE "datetimes" (\n "id" TEXT NOT NULL,\n "d" TEXT NOT NULL,\n "t" TEXT NOT NULL,\n CONSTRAINT "datetimes_pkey" PRIMARY KEY ("id")\n) WITHOUT ROWID;\n', + 'CREATE TABLE "bools" (\n "id" TEXT NOT NULL,\n "b" INTEGER,\n CONSTRAINT "bools_pkey" PRIMARY KEY ("id")\n) WITHOUT ROWID;\n', + 'CREATE TABLE "uuids" (\n "id" TEXT NOT NULL,\n CONSTRAINT "uuids_pkey" PRIMARY KEY ("id")\n) WITHOUT ROWID;\n', + 'CREATE TABLE "ints" (\n "id" TEXT NOT NULL,\n "i2" INTEGER,\n "i4" INTEGER,\n "i8" INTEGER,\n CONSTRAINT "ints_pkey" PRIMARY KEY ("id")\n) WITHOUT ROWID;\n', + 'CREATE TABLE "floats" (\n "id" TEXT NOT NULL,\n "f4" REAL,\n "f8" REAL,\n CONSTRAINT "floats_pkey" PRIMARY KEY ("id")\n) WITHOUT ROWID;\n', + 'CREATE TABLE "jsons" (\n "id" TEXT NOT NULL,\n "jsb" TEXT_JSON,\n CONSTRAINT "jsons_pkey" PRIMARY KEY ("id")\n) WITHOUT ROWID;\n', + 'CREATE TABLE "enums" (\n "id" TEXT NOT NULL,\n "c" TEXT,\n CONSTRAINT "enums_pkey" PRIMARY KEY ("id")\n) WITHOUT ROWID;\n', + 'CREATE TABLE "blobs" (\n "id" TEXT NOT NULL,\n "blob" BLOB,\n CONSTRAINT "blobs_pkey" PRIMARY KEY ("id")\n) WITHOUT ROWID;\n', + 'INSERT OR IGNORE INTO _electric_trigger_settings (namespace, tablename, flag) VALUES (\'main\', \'items\', 1);', + 'DROP TRIGGER IF EXISTS update_ensure_main_items_primarykey;', + 'CREATE TRIGGER update_ensure_main_items_primarykey\n BEFORE UPDATE ON "main"."items"\nBEGIN\n SELECT\n CASE\n WHEN old."id" != new."id" THEN\n RAISE (ABORT, \'cannot change the value of column id as it belongs to the primary key\')\n END;\nEND;', + 'DROP TRIGGER IF EXISTS insert_main_items_into_oplog;', + 'CREATE TRIGGER insert_main_items_into_oplog\n AFTER INSERT ON "main"."items"\n WHEN 1 = (SELECT flag from _electric_trigger_settings WHERE namespace = \'main\' AND tablename = \'items\')\nBEGIN\n INSERT INTO _electric_oplog (namespace, tablename, optype, primaryKey, newRow, oldRow, timestamp)\n VALUES (\'main\', \'items\', \'INSERT\', json_patch(\'{}\', json_object(\'id\', new."id")), json_object(\'content\', new."content", \'content_text_null\', new."content_text_null", \'content_text_null_default\', new."content_text_null_default", \'id\', new."id", \'intvalue_null\', new."intvalue_null", \'intvalue_null_default\', new."intvalue_null_default"), NULL, NULL);\nEND;', + 'DROP TRIGGER IF EXISTS update_main_items_into_oplog;', + 'CREATE TRIGGER update_main_items_into_oplog\n AFTER UPDATE ON "main"."items"\n WHEN 1 = (SELECT flag from _electric_trigger_settings WHERE namespace = \'main\' AND tablename = \'items\')\nBEGIN\n INSERT INTO _electric_oplog (namespace, tablename, optype, primaryKey, newRow, oldRow, timestamp)\n VALUES (\'main\', \'items\', \'UPDATE\', json_patch(\'{}\', json_object(\'id\', new."id")), json_object(\'content\', new."content", \'content_text_null\', new."content_text_null", \'content_text_null_default\', new."content_text_null_default", \'id\', new."id", \'intvalue_null\', new."intvalue_null", \'intvalue_null_default\', new."intvalue_null_default"), json_object(\'content\', old."content", \'content_text_null\', old."content_text_null", \'content_text_null_default\', old."content_text_null_default", \'id\', old."id", \'intvalue_null\', old."intvalue_null", \'intvalue_null_default\', old."intvalue_null_default"), NULL);\nEND;', + 'DROP TRIGGER IF EXISTS delete_main_items_into_oplog;', + 'CREATE TRIGGER delete_main_items_into_oplog\n AFTER DELETE ON "main"."items"\n WHEN 1 = (SELECT flag from _electric_trigger_settings WHERE namespace = \'main\' AND tablename = \'items\')\nBEGIN\n INSERT INTO _electric_oplog (namespace, tablename, optype, primaryKey, newRow, oldRow, timestamp)\n VALUES (\'main\', \'items\', \'DELETE\', json_patch(\'{}\', json_object(\'id\', old."id")), NULL, json_object(\'content\', old."content", \'content_text_null\', old."content_text_null", \'content_text_null_default\', old."content_text_null_default", \'id\', old."id", \'intvalue_null\', old."intvalue_null", \'intvalue_null_default\', old."intvalue_null_default"), NULL);\nEND;', + 'INSERT OR IGNORE INTO _electric_trigger_settings (namespace, tablename, flag) VALUES (\'main\', \'other_items\', 1);', + 'DROP TRIGGER IF EXISTS update_ensure_main_other_items_primarykey;', + 'CREATE TRIGGER update_ensure_main_other_items_primarykey\n BEFORE UPDATE ON "main"."other_items"\nBEGIN\n SELECT\n CASE\n WHEN old."id" != new."id" THEN\n RAISE (ABORT, \'cannot change the value of column id as it belongs to the primary key\')\n END;\nEND;', + 'DROP TRIGGER IF EXISTS insert_main_other_items_into_oplog;', + 'CREATE TRIGGER insert_main_other_items_into_oplog\n AFTER INSERT ON "main"."other_items"\n WHEN 1 = (SELECT flag from _electric_trigger_settings WHERE namespace = \'main\' AND tablename = \'other_items\')\nBEGIN\n INSERT INTO _electric_oplog (namespace, tablename, optype, primaryKey, newRow, oldRow, timestamp)\n VALUES (\'main\', \'other_items\', \'INSERT\', json_patch(\'{}\', json_object(\'id\', new."id")), json_object(\'content\', new."content", \'id\', new."id", \'item_id\', new."item_id"), NULL, NULL);\nEND;', + 'DROP TRIGGER IF EXISTS update_main_other_items_into_oplog;', + 'CREATE TRIGGER update_main_other_items_into_oplog\n AFTER UPDATE ON "main"."other_items"\n WHEN 1 = (SELECT flag from _electric_trigger_settings WHERE namespace = \'main\' AND tablename = \'other_items\')\nBEGIN\n INSERT INTO _electric_oplog (namespace, tablename, optype, primaryKey, newRow, oldRow, timestamp)\n VALUES (\'main\', \'other_items\', \'UPDATE\', json_patch(\'{}\', json_object(\'id\', new."id")), json_object(\'content\', new."content", \'id\', new."id", \'item_id\', new."item_id"), json_object(\'content\', old."content", \'id\', old."id", \'item_id\', old."item_id"), NULL);\nEND;', + 'DROP TRIGGER IF EXISTS delete_main_other_items_into_oplog;', + 'CREATE TRIGGER delete_main_other_items_into_oplog\n AFTER DELETE ON "main"."other_items"\n WHEN 1 = (SELECT flag from _electric_trigger_settings WHERE namespace = \'main\' AND tablename = \'other_items\')\nBEGIN\n INSERT INTO _electric_oplog (namespace, tablename, optype, primaryKey, newRow, oldRow, timestamp)\n VALUES (\'main\', \'other_items\', \'DELETE\', json_patch(\'{}\', json_object(\'id\', old."id")), NULL, json_object(\'content\', old."content", \'id\', old."id", \'item_id\', old."item_id"), NULL);\nEND;', + 'DROP TRIGGER IF EXISTS compensation_insert_main_other_items_item_id_into_oplog;', + ' CREATE TRIGGER compensation_insert_main_other_items_item_id_into_oplog\n AFTER INSERT ON "main"."other_items"\n WHEN 1 = (SELECT flag from _electric_trigger_settings WHERE namespace = \'main\' AND tablename = \'other_items\') AND\n 1 = (SELECT value from _electric_meta WHERE key = \'compensations\')\n BEGIN\n INSERT INTO _electric_oplog (namespace, tablename, optype, primaryKey, newRow, oldRow, timestamp)\n SELECT \'main\', \'items\', \'COMPENSATION\', json_patch(\'{}\', json_object(\'id\', "id")), json_object(\'id\', "id"), NULL, NULL\n FROM "main"."items" WHERE "id" = new."item_id";\n END;\n ', + 'DROP TRIGGER IF EXISTS compensation_update_main_other_items_item_id_into_oplog;', + ' CREATE TRIGGER compensation_update_main_other_items_item_id_into_oplog\n AFTER UPDATE ON "main"."other_items"\n WHEN 1 = (SELECT flag from _electric_trigger_settings WHERE namespace = \'main\' AND tablename = \'other_items\') AND\n 1 = (SELECT value from _electric_meta WHERE key = \'compensations\')\n BEGIN\n INSERT INTO _electric_oplog (namespace, tablename, optype, primaryKey, newRow, oldRow, timestamp)\n SELECT \'main\', \'items\', \'COMPENSATION\', json_patch(\'{}\', json_object(\'id\', "id")), json_object(\'id\', "id"), NULL, NULL\n FROM "main"."items" WHERE "id" = new."item_id";\n END;\n ', + 'INSERT OR IGNORE INTO _electric_trigger_settings (namespace, tablename, flag) VALUES (\'main\', \'timestamps\', 1);', + 'DROP TRIGGER IF EXISTS update_ensure_main_timestamps_primarykey;', + 'CREATE TRIGGER update_ensure_main_timestamps_primarykey\n BEFORE UPDATE ON "main"."timestamps"\nBEGIN\n SELECT\n CASE\n WHEN old."id" != new."id" THEN\n RAISE (ABORT, \'cannot change the value of column id as it belongs to the primary key\')\n END;\nEND;', + 'DROP TRIGGER IF EXISTS insert_main_timestamps_into_oplog;', + 'CREATE TRIGGER insert_main_timestamps_into_oplog\n AFTER INSERT ON "main"."timestamps"\n WHEN 1 = (SELECT flag from _electric_trigger_settings WHERE namespace = \'main\' AND tablename = \'timestamps\')\nBEGIN\n INSERT INTO _electric_oplog (namespace, tablename, optype, primaryKey, newRow, oldRow, timestamp)\n VALUES (\'main\', \'timestamps\', \'INSERT\', json_patch(\'{}\', json_object(\'id\', new."id")), json_object(\'created_at\', new."created_at", \'id\', new."id", \'updated_at\', new."updated_at"), NULL, NULL);\nEND;', + 'DROP TRIGGER IF EXISTS update_main_timestamps_into_oplog;', + 'CREATE TRIGGER update_main_timestamps_into_oplog\n AFTER UPDATE ON "main"."timestamps"\n WHEN 1 = (SELECT flag from _electric_trigger_settings WHERE namespace = \'main\' AND tablename = \'timestamps\')\nBEGIN\n INSERT INTO _electric_oplog (namespace, tablename, optype, primaryKey, newRow, oldRow, timestamp)\n VALUES (\'main\', \'timestamps\', \'UPDATE\', json_patch(\'{}\', json_object(\'id\', new."id")), json_object(\'created_at\', new."created_at", \'id\', new."id", \'updated_at\', new."updated_at"), json_object(\'created_at\', old."created_at", \'id\', old."id", \'updated_at\', old."updated_at"), NULL);\nEND;', + 'DROP TRIGGER IF EXISTS delete_main_timestamps_into_oplog;', + 'CREATE TRIGGER delete_main_timestamps_into_oplog\n AFTER DELETE ON "main"."timestamps"\n WHEN 1 = (SELECT flag from _electric_trigger_settings WHERE namespace = \'main\' AND tablename = \'timestamps\')\nBEGIN\n INSERT INTO _electric_oplog (namespace, tablename, optype, primaryKey, newRow, oldRow, timestamp)\n VALUES (\'main\', \'timestamps\', \'DELETE\', json_patch(\'{}\', json_object(\'id\', old."id")), NULL, json_object(\'created_at\', old."created_at", \'id\', old."id", \'updated_at\', old."updated_at"), NULL);\nEND;', + 'INSERT OR IGNORE INTO _electric_trigger_settings (namespace, tablename, flag) VALUES (\'main\', \'datetimes\', 1);', + 'DROP TRIGGER IF EXISTS update_ensure_main_datetimes_primarykey;', + 'CREATE TRIGGER update_ensure_main_datetimes_primarykey\n BEFORE UPDATE ON "main"."datetimes"\nBEGIN\n SELECT\n CASE\n WHEN old."id" != new."id" THEN\n RAISE (ABORT, \'cannot change the value of column id as it belongs to the primary key\')\n END;\nEND;', + 'DROP TRIGGER IF EXISTS insert_main_datetimes_into_oplog;', + 'CREATE TRIGGER insert_main_datetimes_into_oplog\n AFTER INSERT ON "main"."datetimes"\n WHEN 1 = (SELECT flag from _electric_trigger_settings WHERE namespace = \'main\' AND tablename = \'datetimes\')\nBEGIN\n INSERT INTO _electric_oplog (namespace, tablename, optype, primaryKey, newRow, oldRow, timestamp)\n VALUES (\'main\', \'datetimes\', \'INSERT\', json_patch(\'{}\', json_object(\'id\', new."id")), json_object(\'d\', new."d", \'id\', new."id", \'t\', new."t"), NULL, NULL);\nEND;', + 'DROP TRIGGER IF EXISTS update_main_datetimes_into_oplog;', + 'CREATE TRIGGER update_main_datetimes_into_oplog\n AFTER UPDATE ON "main"."datetimes"\n WHEN 1 = (SELECT flag from _electric_trigger_settings WHERE namespace = \'main\' AND tablename = \'datetimes\')\nBEGIN\n INSERT INTO _electric_oplog (namespace, tablename, optype, primaryKey, newRow, oldRow, timestamp)\n VALUES (\'main\', \'datetimes\', \'UPDATE\', json_patch(\'{}\', json_object(\'id\', new."id")), json_object(\'d\', new."d", \'id\', new."id", \'t\', new."t"), json_object(\'d\', old."d", \'id\', old."id", \'t\', old."t"), NULL);\nEND;', + 'DROP TRIGGER IF EXISTS delete_main_datetimes_into_oplog;', + 'CREATE TRIGGER delete_main_datetimes_into_oplog\n AFTER DELETE ON "main"."datetimes"\n WHEN 1 = (SELECT flag from _electric_trigger_settings WHERE namespace = \'main\' AND tablename = \'datetimes\')\nBEGIN\n INSERT INTO _electric_oplog (namespace, tablename, optype, primaryKey, newRow, oldRow, timestamp)\n VALUES (\'main\', \'datetimes\', \'DELETE\', json_patch(\'{}\', json_object(\'id\', old."id")), NULL, json_object(\'d\', old."d", \'id\', old."id", \'t\', old."t"), NULL);\nEND;', + 'INSERT OR IGNORE INTO _electric_trigger_settings (namespace, tablename, flag) VALUES (\'main\', \'bools\', 1);', + 'DROP TRIGGER IF EXISTS update_ensure_main_bools_primarykey;', + 'CREATE TRIGGER update_ensure_main_bools_primarykey\n BEFORE UPDATE ON "main"."bools"\nBEGIN\n SELECT\n CASE\n WHEN old."id" != new."id" THEN\n RAISE (ABORT, \'cannot change the value of column id as it belongs to the primary key\')\n END;\nEND;', + 'DROP TRIGGER IF EXISTS insert_main_bools_into_oplog;', + 'CREATE TRIGGER insert_main_bools_into_oplog\n AFTER INSERT ON "main"."bools"\n WHEN 1 = (SELECT flag from _electric_trigger_settings WHERE namespace = \'main\' AND tablename = \'bools\')\nBEGIN\n INSERT INTO _electric_oplog (namespace, tablename, optype, primaryKey, newRow, oldRow, timestamp)\n VALUES (\'main\', \'bools\', \'INSERT\', json_patch(\'{}\', json_object(\'id\', new."id")), json_object(\'b\', new."b", \'id\', new."id"), NULL, NULL);\nEND;', + 'DROP TRIGGER IF EXISTS update_main_bools_into_oplog;', + 'CREATE TRIGGER update_main_bools_into_oplog\n AFTER UPDATE ON "main"."bools"\n WHEN 1 = (SELECT flag from _electric_trigger_settings WHERE namespace = \'main\' AND tablename = \'bools\')\nBEGIN\n INSERT INTO _electric_oplog (namespace, tablename, optype, primaryKey, newRow, oldRow, timestamp)\n VALUES (\'main\', \'bools\', \'UPDATE\', json_patch(\'{}\', json_object(\'id\', new."id")), json_object(\'b\', new."b", \'id\', new."id"), json_object(\'b\', old."b", \'id\', old."id"), NULL);\nEND;', + 'DROP TRIGGER IF EXISTS delete_main_bools_into_oplog;', + 'CREATE TRIGGER delete_main_bools_into_oplog\n AFTER DELETE ON "main"."bools"\n WHEN 1 = (SELECT flag from _electric_trigger_settings WHERE namespace = \'main\' AND tablename = \'bools\')\nBEGIN\n INSERT INTO _electric_oplog (namespace, tablename, optype, primaryKey, newRow, oldRow, timestamp)\n VALUES (\'main\', \'bools\', \'DELETE\', json_patch(\'{}\', json_object(\'id\', old."id")), NULL, json_object(\'b\', old."b", \'id\', old."id"), NULL);\nEND;', + 'INSERT OR IGNORE INTO _electric_trigger_settings (namespace, tablename, flag) VALUES (\'main\', \'uuids\', 1);', + 'DROP TRIGGER IF EXISTS update_ensure_main_uuids_primarykey;', + 'CREATE TRIGGER update_ensure_main_uuids_primarykey\n BEFORE UPDATE ON "main"."uuids"\nBEGIN\n SELECT\n CASE\n WHEN old."id" != new."id" THEN\n RAISE (ABORT, \'cannot change the value of column id as it belongs to the primary key\')\n END;\nEND;', + 'DROP TRIGGER IF EXISTS insert_main_uuids_into_oplog;', + 'CREATE TRIGGER insert_main_uuids_into_oplog\n AFTER INSERT ON "main"."uuids"\n WHEN 1 = (SELECT flag from _electric_trigger_settings WHERE namespace = \'main\' AND tablename = \'uuids\')\nBEGIN\n INSERT INTO _electric_oplog (namespace, tablename, optype, primaryKey, newRow, oldRow, timestamp)\n VALUES (\'main\', \'uuids\', \'INSERT\', json_patch(\'{}\', json_object(\'id\', new."id")), json_object(\'id\', new."id"), NULL, NULL);\nEND;', + 'DROP TRIGGER IF EXISTS update_main_uuids_into_oplog;', + 'CREATE TRIGGER update_main_uuids_into_oplog\n AFTER UPDATE ON "main"."uuids"\n WHEN 1 = (SELECT flag from _electric_trigger_settings WHERE namespace = \'main\' AND tablename = \'uuids\')\nBEGIN\n INSERT INTO _electric_oplog (namespace, tablename, optype, primaryKey, newRow, oldRow, timestamp)\n VALUES (\'main\', \'uuids\', \'UPDATE\', json_patch(\'{}\', json_object(\'id\', new."id")), json_object(\'id\', new."id"), json_object(\'id\', old."id"), NULL);\nEND;', + 'DROP TRIGGER IF EXISTS delete_main_uuids_into_oplog;', + 'CREATE TRIGGER delete_main_uuids_into_oplog\n AFTER DELETE ON "main"."uuids"\n WHEN 1 = (SELECT flag from _electric_trigger_settings WHERE namespace = \'main\' AND tablename = \'uuids\')\nBEGIN\n INSERT INTO _electric_oplog (namespace, tablename, optype, primaryKey, newRow, oldRow, timestamp)\n VALUES (\'main\', \'uuids\', \'DELETE\', json_patch(\'{}\', json_object(\'id\', old."id")), NULL, json_object(\'id\', old."id"), NULL);\nEND;', + 'INSERT OR IGNORE INTO _electric_trigger_settings (namespace, tablename, flag) VALUES (\'main\', \'ints\', 1);', + 'DROP TRIGGER IF EXISTS update_ensure_main_ints_primarykey;', + 'CREATE TRIGGER update_ensure_main_ints_primarykey\n BEFORE UPDATE ON "main"."ints"\nBEGIN\n SELECT\n CASE\n WHEN old."id" != new."id" THEN\n RAISE (ABORT, \'cannot change the value of column id as it belongs to the primary key\')\n END;\nEND;', + 'DROP TRIGGER IF EXISTS insert_main_ints_into_oplog;', + 'CREATE TRIGGER insert_main_ints_into_oplog\n AFTER INSERT ON "main"."ints"\n WHEN 1 = (SELECT flag from _electric_trigger_settings WHERE namespace = \'main\' AND tablename = \'ints\')\nBEGIN\n INSERT INTO _electric_oplog (namespace, tablename, optype, primaryKey, newRow, oldRow, timestamp)\n VALUES (\'main\', \'ints\', \'INSERT\', json_patch(\'{}\', json_object(\'id\', new."id")), json_object(\'i2\', new."i2", \'i4\', new."i4", \'i8\', cast(new."i8" as TEXT), \'id\', new."id"), NULL, NULL);\nEND;', + 'DROP TRIGGER IF EXISTS update_main_ints_into_oplog;', + 'CREATE TRIGGER update_main_ints_into_oplog\n AFTER UPDATE ON "main"."ints"\n WHEN 1 = (SELECT flag from _electric_trigger_settings WHERE namespace = \'main\' AND tablename = \'ints\')\nBEGIN\n INSERT INTO _electric_oplog (namespace, tablename, optype, primaryKey, newRow, oldRow, timestamp)\n VALUES (\'main\', \'ints\', \'UPDATE\', json_patch(\'{}\', json_object(\'id\', new."id")), json_object(\'i2\', new."i2", \'i4\', new."i4", \'i8\', cast(new."i8" as TEXT), \'id\', new."id"), json_object(\'i2\', old."i2", \'i4\', old."i4", \'i8\', cast(old."i8" as TEXT), \'id\', old."id"), NULL);\nEND;', + 'DROP TRIGGER IF EXISTS delete_main_ints_into_oplog;', + 'CREATE TRIGGER delete_main_ints_into_oplog\n AFTER DELETE ON "main"."ints"\n WHEN 1 = (SELECT flag from _electric_trigger_settings WHERE namespace = \'main\' AND tablename = \'ints\')\nBEGIN\n INSERT INTO _electric_oplog (namespace, tablename, optype, primaryKey, newRow, oldRow, timestamp)\n VALUES (\'main\', \'ints\', \'DELETE\', json_patch(\'{}\', json_object(\'id\', old."id")), NULL, json_object(\'i2\', old."i2", \'i4\', old."i4", \'i8\', cast(old."i8" as TEXT), \'id\', old."id"), NULL);\nEND;', + 'INSERT OR IGNORE INTO _electric_trigger_settings (namespace, tablename, flag) VALUES (\'main\', \'floats\', 1);', + 'DROP TRIGGER IF EXISTS update_ensure_main_floats_primarykey;', + 'CREATE TRIGGER update_ensure_main_floats_primarykey\n BEFORE UPDATE ON "main"."floats"\nBEGIN\n SELECT\n CASE\n WHEN old."id" != new."id" THEN\n RAISE (ABORT, \'cannot change the value of column id as it belongs to the primary key\')\n END;\nEND;', + 'DROP TRIGGER IF EXISTS insert_main_floats_into_oplog;', + 'CREATE TRIGGER insert_main_floats_into_oplog\n AFTER INSERT ON "main"."floats"\n WHEN 1 = (SELECT flag from _electric_trigger_settings WHERE namespace = \'main\' AND tablename = \'floats\')\nBEGIN\n INSERT INTO _electric_oplog (namespace, tablename, optype, primaryKey, newRow, oldRow, timestamp)\n VALUES (\'main\', \'floats\', \'INSERT\', json_patch(\'{}\', json_object(\'id\', new."id")), json_object(\'f4\', cast(new."f4" as TEXT), \'f8\', cast(new."f8" as TEXT), \'id\', new."id"), NULL, NULL);\nEND;', + 'DROP TRIGGER IF EXISTS update_main_floats_into_oplog;', + 'CREATE TRIGGER update_main_floats_into_oplog\n AFTER UPDATE ON "main"."floats"\n WHEN 1 = (SELECT flag from _electric_trigger_settings WHERE namespace = \'main\' AND tablename = \'floats\')\nBEGIN\n INSERT INTO _electric_oplog (namespace, tablename, optype, primaryKey, newRow, oldRow, timestamp)\n VALUES (\'main\', \'floats\', \'UPDATE\', json_patch(\'{}\', json_object(\'id\', new."id")), json_object(\'f4\', cast(new."f4" as TEXT), \'f8\', cast(new."f8" as TEXT), \'id\', new."id"), json_object(\'f4\', cast(old."f4" as TEXT), \'f8\', cast(old."f8" as TEXT), \'id\', old."id"), NULL);\nEND;', + 'DROP TRIGGER IF EXISTS delete_main_floats_into_oplog;', + 'CREATE TRIGGER delete_main_floats_into_oplog\n AFTER DELETE ON "main"."floats"\n WHEN 1 = (SELECT flag from _electric_trigger_settings WHERE namespace = \'main\' AND tablename = \'floats\')\nBEGIN\n INSERT INTO _electric_oplog (namespace, tablename, optype, primaryKey, newRow, oldRow, timestamp)\n VALUES (\'main\', \'floats\', \'DELETE\', json_patch(\'{}\', json_object(\'id\', old."id")), NULL, json_object(\'f4\', cast(old."f4" as TEXT), \'f8\', cast(old."f8" as TEXT), \'id\', old."id"), NULL);\nEND;', + 'INSERT OR IGNORE INTO _electric_trigger_settings (namespace, tablename, flag) VALUES (\'main\', \'jsons\', 1);', + 'DROP TRIGGER IF EXISTS update_ensure_main_jsons_primarykey;', + 'CREATE TRIGGER update_ensure_main_jsons_primarykey\n BEFORE UPDATE ON "main"."jsons"\nBEGIN\n SELECT\n CASE\n WHEN old."id" != new."id" THEN\n RAISE (ABORT, \'cannot change the value of column id as it belongs to the primary key\')\n END;\nEND;', + 'DROP TRIGGER IF EXISTS insert_main_jsons_into_oplog;', + 'CREATE TRIGGER insert_main_jsons_into_oplog\n AFTER INSERT ON "main"."jsons"\n WHEN 1 = (SELECT flag from _electric_trigger_settings WHERE namespace = \'main\' AND tablename = \'jsons\')\nBEGIN\n INSERT INTO _electric_oplog (namespace, tablename, optype, primaryKey, newRow, oldRow, timestamp)\n VALUES (\'main\', \'jsons\', \'INSERT\', json_patch(\'{}\', json_object(\'id\', new."id")), json_object(\'id\', new."id", \'jsb\', new."jsb"), NULL, NULL);\nEND;', + 'DROP TRIGGER IF EXISTS update_main_jsons_into_oplog;', + 'CREATE TRIGGER update_main_jsons_into_oplog\n AFTER UPDATE ON "main"."jsons"\n WHEN 1 = (SELECT flag from _electric_trigger_settings WHERE namespace = \'main\' AND tablename = \'jsons\')\nBEGIN\n INSERT INTO _electric_oplog (namespace, tablename, optype, primaryKey, newRow, oldRow, timestamp)\n VALUES (\'main\', \'jsons\', \'UPDATE\', json_patch(\'{}\', json_object(\'id\', new."id")), json_object(\'id\', new."id", \'jsb\', new."jsb"), json_object(\'id\', old."id", \'jsb\', old."jsb"), NULL);\nEND;', + 'DROP TRIGGER IF EXISTS delete_main_jsons_into_oplog;', + 'CREATE TRIGGER delete_main_jsons_into_oplog\n AFTER DELETE ON "main"."jsons"\n WHEN 1 = (SELECT flag from _electric_trigger_settings WHERE namespace = \'main\' AND tablename = \'jsons\')\nBEGIN\n INSERT INTO _electric_oplog (namespace, tablename, optype, primaryKey, newRow, oldRow, timestamp)\n VALUES (\'main\', \'jsons\', \'DELETE\', json_patch(\'{}\', json_object(\'id\', old."id")), NULL, json_object(\'id\', old."id", \'jsb\', old."jsb"), NULL);\nEND;', + 'INSERT OR IGNORE INTO _electric_trigger_settings (namespace, tablename, flag) VALUES (\'main\', \'enums\', 1);', + 'DROP TRIGGER IF EXISTS update_ensure_main_enums_primarykey;', + 'CREATE TRIGGER update_ensure_main_enums_primarykey\n BEFORE UPDATE ON "main"."enums"\nBEGIN\n SELECT\n CASE\n WHEN old."id" != new."id" THEN\n RAISE (ABORT, \'cannot change the value of column id as it belongs to the primary key\')\n END;\nEND;', + 'DROP TRIGGER IF EXISTS insert_main_enums_into_oplog;', + 'CREATE TRIGGER insert_main_enums_into_oplog\n AFTER INSERT ON "main"."enums"\n WHEN 1 = (SELECT flag from _electric_trigger_settings WHERE namespace = \'main\' AND tablename = \'enums\')\nBEGIN\n INSERT INTO _electric_oplog (namespace, tablename, optype, primaryKey, newRow, oldRow, timestamp)\n VALUES (\'main\', \'enums\', \'INSERT\', json_patch(\'{}\', json_object(\'id\', new."id")), json_object(\'c\', new."c", \'id\', new."id"), NULL, NULL);\nEND;', + 'DROP TRIGGER IF EXISTS update_main_enums_into_oplog;', + 'CREATE TRIGGER update_main_enums_into_oplog\n AFTER UPDATE ON "main"."enums"\n WHEN 1 = (SELECT flag from _electric_trigger_settings WHERE namespace = \'main\' AND tablename = \'enums\')\nBEGIN\n INSERT INTO _electric_oplog (namespace, tablename, optype, primaryKey, newRow, oldRow, timestamp)\n VALUES (\'main\', \'enums\', \'UPDATE\', json_patch(\'{}\', json_object(\'id\', new."id")), json_object(\'c\', new."c", \'id\', new."id"), json_object(\'c\', old."c", \'id\', old."id"), NULL);\nEND;', + 'DROP TRIGGER IF EXISTS delete_main_enums_into_oplog;', + 'CREATE TRIGGER delete_main_enums_into_oplog\n AFTER DELETE ON "main"."enums"\n WHEN 1 = (SELECT flag from _electric_trigger_settings WHERE namespace = \'main\' AND tablename = \'enums\')\nBEGIN\n INSERT INTO _electric_oplog (namespace, tablename, optype, primaryKey, newRow, oldRow, timestamp)\n VALUES (\'main\', \'enums\', \'DELETE\', json_patch(\'{}\', json_object(\'id\', old."id")), NULL, json_object(\'c\', old."c", \'id\', old."id"), NULL);\nEND;', + 'INSERT OR IGNORE INTO _electric_trigger_settings (namespace, tablename, flag) VALUES (\'main\', \'blobs\', 1);', + 'DROP TRIGGER IF EXISTS update_ensure_main_blobs_primarykey;', + 'CREATE TRIGGER update_ensure_main_blobs_primarykey\n BEFORE UPDATE ON "main"."blobs"\nBEGIN\n SELECT\n CASE\n WHEN old."id" != new."id" THEN\n RAISE (ABORT, \'cannot change the value of column id as it belongs to the primary key\')\n END;\nEND;', + 'DROP TRIGGER IF EXISTS insert_main_blobs_into_oplog;', + 'CREATE TRIGGER insert_main_blobs_into_oplog\n AFTER INSERT ON "main"."blobs"\n WHEN 1 = (SELECT flag from _electric_trigger_settings WHERE namespace = \'main\' AND tablename = \'blobs\')\nBEGIN\n INSERT INTO _electric_oplog (namespace, tablename, optype, primaryKey, newRow, oldRow, timestamp)\n VALUES (\'main\', \'blobs\', \'INSERT\', json_patch(\'{}\', json_object(\'id\', new."id")), json_object(\'blob\', CASE WHEN new."blob" IS NOT NULL THEN hex(new."blob") ELSE NULL END, \'id\', new."id"), NULL, NULL);\nEND;', + 'DROP TRIGGER IF EXISTS update_main_blobs_into_oplog;', + 'CREATE TRIGGER update_main_blobs_into_oplog\n AFTER UPDATE ON "main"."blobs"\n WHEN 1 = (SELECT flag from _electric_trigger_settings WHERE namespace = \'main\' AND tablename = \'blobs\')\nBEGIN\n INSERT INTO _electric_oplog (namespace, tablename, optype, primaryKey, newRow, oldRow, timestamp)\n VALUES (\'main\', \'blobs\', \'UPDATE\', json_patch(\'{}\', json_object(\'id\', new."id")), json_object(\'blob\', CASE WHEN new."blob" IS NOT NULL THEN hex(new."blob") ELSE NULL END, \'id\', new."id"), json_object(\'blob\', CASE WHEN old."blob" IS NOT NULL THEN hex(old."blob") ELSE NULL END, \'id\', old."id"), NULL);\nEND;', + 'DROP TRIGGER IF EXISTS delete_main_blobs_into_oplog;', + 'CREATE TRIGGER delete_main_blobs_into_oplog\n AFTER DELETE ON "main"."blobs"\n WHEN 1 = (SELECT flag from _electric_trigger_settings WHERE namespace = \'main\' AND tablename = \'blobs\')\nBEGIN\n INSERT INTO _electric_oplog (namespace, tablename, optype, primaryKey, newRow, oldRow, timestamp)\n VALUES (\'main\', \'blobs\', \'DELETE\', json_patch(\'{}\', json_object(\'id\', old."id")), NULL, json_object(\'blob\', CASE WHEN old."blob" IS NOT NULL THEN hex(old."blob") ELSE NULL END, \'id\', old."id"), NULL);\nEND;', + ], + version: '20240626155106_189', + ) +]; diff --git a/e2e/satellite_client/lib/generated/electric/pg_migrations.dart b/e2e/satellite_client/lib/generated/electric/pg_migrations.dart new file mode 100644 index 00000000..fefcd8dd --- /dev/null +++ b/e2e/satellite_client/lib/generated/electric/pg_migrations.dart @@ -0,0 +1,175 @@ +// GENERATED CODE - DO NOT MODIFY BY HAND + +// ignore_for_file: always_use_package_imports, depend_on_referenced_packages +// ignore_for_file: prefer_double_quotes, require_trailing_commas + +import 'package:electricsql/electricsql.dart'; + +const kPostgresMigrations = [ + Migration( + statements: [ + 'CREATE TABLE items (\n id text NOT NULL,\n content text NOT NULL,\n content_text_null text,\n content_text_null_default text,\n intvalue_null integer,\n intvalue_null_default integer,\n CONSTRAINT items_pkey PRIMARY KEY (id)\n)', + 'CREATE TABLE other_items (\n id text NOT NULL,\n content text NOT NULL,\n item_id text,\n CONSTRAINT other_items_pkey PRIMARY KEY (id),\n CONSTRAINT other_items_item_id_fkey FOREIGN KEY (item_id) REFERENCES items(id)\n)', + 'CREATE TABLE timestamps (\n id text NOT NULL,\n created_at timestamp without time zone NOT NULL,\n updated_at timestamp with time zone NOT NULL,\n CONSTRAINT timestamps_pkey PRIMARY KEY (id)\n)', + 'CREATE TABLE datetimes (\n id text NOT NULL,\n d date NOT NULL,\n t time without time zone NOT NULL,\n CONSTRAINT datetimes_pkey PRIMARY KEY (id)\n)', + 'CREATE TABLE bools (\n id text NOT NULL,\n b boolean,\n CONSTRAINT bools_pkey PRIMARY KEY (id)\n)', + 'CREATE TABLE uuids (\n id uuid NOT NULL,\n CONSTRAINT uuids_pkey PRIMARY KEY (id)\n)', + 'CREATE TABLE ints (\n id text NOT NULL,\n i2 smallint,\n i4 integer,\n i8 bigint,\n CONSTRAINT ints_pkey PRIMARY KEY (id)\n)', + 'CREATE TABLE floats (\n id text NOT NULL,\n f4 real,\n f8 double precision,\n CONSTRAINT floats_pkey PRIMARY KEY (id)\n)', + 'CREATE TABLE jsons (\n id text NOT NULL,\n jsb jsonb,\n CONSTRAINT jsons_pkey PRIMARY KEY (id)\n)', + 'CREATE TYPE "Color" AS ENUM (\n \'RED\',\n \'GREEN\',\n \'BLUE\'\n)', + 'CREATE TABLE enums (\n id text NOT NULL,\n c "Color",\n CONSTRAINT enums_pkey PRIMARY KEY (id)\n)', + 'CREATE TABLE blobs (\n id text NOT NULL,\n blob bytea,\n CONSTRAINT blobs_pkey PRIMARY KEY (id)\n)', + 'INSERT INTO "public"."_electric_trigger_settings" ("namespace", "tablename", "flag")\nVALUES (\'public\', \'items\', 1)\nON CONFLICT DO NOTHING;\n', + 'DROP TRIGGER IF EXISTS update_ensure_public_items_primarykey ON "public"."items";', + ' CREATE OR REPLACE FUNCTION update_ensure_public_items_primarykey_function()\n RETURNS TRIGGER AS \$\$\n BEGIN\n IF OLD."id" IS DISTINCT FROM NEW."id" THEN\n RAISE EXCEPTION \'Cannot change the value of column id as it belongs to the primary key\';\n END IF;\n RETURN NEW;\n END;\n \$\$ LANGUAGE plpgsql;\n ', + ' CREATE TRIGGER update_ensure_public_items_primarykey\n BEFORE UPDATE ON "public"."items"\n FOR EACH ROW\n EXECUTE FUNCTION update_ensure_public_items_primarykey_function();\n ', + 'DROP TRIGGER IF EXISTS insert_public_items_into_oplog ON "public"."items";', + ' CREATE OR REPLACE FUNCTION insert_public_items_into_oplog_function()\n RETURNS TRIGGER AS \$\$\n BEGIN\n DECLARE\n flag_value INTEGER;\n BEGIN\n -- Get the flag value from _electric_trigger_settings\n SELECT flag INTO flag_value FROM "public"._electric_trigger_settings WHERE namespace = \'public\' AND tablename = \'items\';\n \n IF flag_value = 1 THEN\n -- Insert into _electric_oplog\n INSERT INTO "public"._electric_oplog (namespace, tablename, optype, "primaryKey", "newRow", "oldRow", timestamp)\n VALUES (\n \'public\',\n \'items\',\n \'INSERT\',\n json_strip_nulls(json_build_object(\'id\', new."id")),\n jsonb_build_object(\'content\', new."content", \'content_text_null\', new."content_text_null", \'content_text_null_default\', new."content_text_null_default", \'id\', new."id", \'intvalue_null\', new."intvalue_null", \'intvalue_null_default\', new."intvalue_null_default"),\n NULL,\n NULL\n );\n END IF;\n \n RETURN NEW;\n END;\n END;\n \$\$ LANGUAGE plpgsql;\n ', + ' CREATE TRIGGER insert_public_items_into_oplog\n AFTER INSERT ON "public"."items"\n FOR EACH ROW\n EXECUTE FUNCTION insert_public_items_into_oplog_function();\n ', + 'DROP TRIGGER IF EXISTS update_public_items_into_oplog ON "public"."items";', + ' CREATE OR REPLACE FUNCTION update_public_items_into_oplog_function()\n RETURNS TRIGGER AS \$\$\n BEGIN\n DECLARE\n flag_value INTEGER;\n BEGIN\n -- Get the flag value from _electric_trigger_settings\n SELECT flag INTO flag_value FROM "public"._electric_trigger_settings WHERE namespace = \'public\' AND tablename = \'items\';\n \n IF flag_value = 1 THEN\n -- Insert into _electric_oplog\n INSERT INTO "public"._electric_oplog (namespace, tablename, optype, "primaryKey", "newRow", "oldRow", timestamp)\n VALUES (\n \'public\',\n \'items\',\n \'UPDATE\',\n json_strip_nulls(json_build_object(\'id\', new."id")),\n jsonb_build_object(\'content\', new."content", \'content_text_null\', new."content_text_null", \'content_text_null_default\', new."content_text_null_default", \'id\', new."id", \'intvalue_null\', new."intvalue_null", \'intvalue_null_default\', new."intvalue_null_default"),\n jsonb_build_object(\'content\', old."content", \'content_text_null\', old."content_text_null", \'content_text_null_default\', old."content_text_null_default", \'id\', old."id", \'intvalue_null\', old."intvalue_null", \'intvalue_null_default\', old."intvalue_null_default"),\n NULL\n );\n END IF;\n \n RETURN NEW;\n END;\n END;\n \$\$ LANGUAGE plpgsql;\n ', + ' CREATE TRIGGER update_public_items_into_oplog\n AFTER UPDATE ON "public"."items"\n FOR EACH ROW\n EXECUTE FUNCTION update_public_items_into_oplog_function();\n ', + 'DROP TRIGGER IF EXISTS delete_public_items_into_oplog ON "public"."items";', + ' CREATE OR REPLACE FUNCTION delete_public_items_into_oplog_function()\n RETURNS TRIGGER AS \$\$\n BEGIN\n DECLARE\n flag_value INTEGER;\n BEGIN\n -- Get the flag value from _electric_trigger_settings\n SELECT flag INTO flag_value FROM "public"._electric_trigger_settings WHERE namespace = \'public\' AND tablename = \'items\';\n \n IF flag_value = 1 THEN\n -- Insert into _electric_oplog\n INSERT INTO "public"._electric_oplog (namespace, tablename, optype, "primaryKey", "newRow", "oldRow", timestamp)\n VALUES (\n \'public\',\n \'items\',\n \'DELETE\',\n json_strip_nulls(json_build_object(\'id\', old."id")),\n NULL,\n jsonb_build_object(\'content\', old."content", \'content_text_null\', old."content_text_null", \'content_text_null_default\', old."content_text_null_default", \'id\', old."id", \'intvalue_null\', old."intvalue_null", \'intvalue_null_default\', old."intvalue_null_default"),\n NULL\n );\n END IF;\n \n RETURN NEW;\n END;\n END;\n \$\$ LANGUAGE plpgsql;\n ', + ' CREATE TRIGGER delete_public_items_into_oplog\n AFTER DELETE ON "public"."items"\n FOR EACH ROW\n EXECUTE FUNCTION delete_public_items_into_oplog_function();\n ', + 'INSERT INTO "public"."_electric_trigger_settings" ("namespace", "tablename", "flag")\nVALUES (\'public\', \'other_items\', 1)\nON CONFLICT DO NOTHING;\n', + 'DROP TRIGGER IF EXISTS update_ensure_public_other_items_primarykey ON "public"."other_items";', + ' CREATE OR REPLACE FUNCTION update_ensure_public_other_items_primarykey_function()\n RETURNS TRIGGER AS \$\$\n BEGIN\n IF OLD."id" IS DISTINCT FROM NEW."id" THEN\n RAISE EXCEPTION \'Cannot change the value of column id as it belongs to the primary key\';\n END IF;\n RETURN NEW;\n END;\n \$\$ LANGUAGE plpgsql;\n ', + ' CREATE TRIGGER update_ensure_public_other_items_primarykey\n BEFORE UPDATE ON "public"."other_items"\n FOR EACH ROW\n EXECUTE FUNCTION update_ensure_public_other_items_primarykey_function();\n ', + 'DROP TRIGGER IF EXISTS insert_public_other_items_into_oplog ON "public"."other_items";', + ' CREATE OR REPLACE FUNCTION insert_public_other_items_into_oplog_function()\n RETURNS TRIGGER AS \$\$\n BEGIN\n DECLARE\n flag_value INTEGER;\n BEGIN\n -- Get the flag value from _electric_trigger_settings\n SELECT flag INTO flag_value FROM "public"._electric_trigger_settings WHERE namespace = \'public\' AND tablename = \'other_items\';\n \n IF flag_value = 1 THEN\n -- Insert into _electric_oplog\n INSERT INTO "public"._electric_oplog (namespace, tablename, optype, "primaryKey", "newRow", "oldRow", timestamp)\n VALUES (\n \'public\',\n \'other_items\',\n \'INSERT\',\n json_strip_nulls(json_build_object(\'id\', new."id")),\n jsonb_build_object(\'content\', new."content", \'id\', new."id", \'item_id\', new."item_id"),\n NULL,\n NULL\n );\n END IF;\n \n RETURN NEW;\n END;\n END;\n \$\$ LANGUAGE plpgsql;\n ', + ' CREATE TRIGGER insert_public_other_items_into_oplog\n AFTER INSERT ON "public"."other_items"\n FOR EACH ROW\n EXECUTE FUNCTION insert_public_other_items_into_oplog_function();\n ', + 'DROP TRIGGER IF EXISTS update_public_other_items_into_oplog ON "public"."other_items";', + ' CREATE OR REPLACE FUNCTION update_public_other_items_into_oplog_function()\n RETURNS TRIGGER AS \$\$\n BEGIN\n DECLARE\n flag_value INTEGER;\n BEGIN\n -- Get the flag value from _electric_trigger_settings\n SELECT flag INTO flag_value FROM "public"._electric_trigger_settings WHERE namespace = \'public\' AND tablename = \'other_items\';\n \n IF flag_value = 1 THEN\n -- Insert into _electric_oplog\n INSERT INTO "public"._electric_oplog (namespace, tablename, optype, "primaryKey", "newRow", "oldRow", timestamp)\n VALUES (\n \'public\',\n \'other_items\',\n \'UPDATE\',\n json_strip_nulls(json_build_object(\'id\', new."id")),\n jsonb_build_object(\'content\', new."content", \'id\', new."id", \'item_id\', new."item_id"),\n jsonb_build_object(\'content\', old."content", \'id\', old."id", \'item_id\', old."item_id"),\n NULL\n );\n END IF;\n \n RETURN NEW;\n END;\n END;\n \$\$ LANGUAGE plpgsql;\n ', + ' CREATE TRIGGER update_public_other_items_into_oplog\n AFTER UPDATE ON "public"."other_items"\n FOR EACH ROW\n EXECUTE FUNCTION update_public_other_items_into_oplog_function();\n ', + 'DROP TRIGGER IF EXISTS delete_public_other_items_into_oplog ON "public"."other_items";', + ' CREATE OR REPLACE FUNCTION delete_public_other_items_into_oplog_function()\n RETURNS TRIGGER AS \$\$\n BEGIN\n DECLARE\n flag_value INTEGER;\n BEGIN\n -- Get the flag value from _electric_trigger_settings\n SELECT flag INTO flag_value FROM "public"._electric_trigger_settings WHERE namespace = \'public\' AND tablename = \'other_items\';\n \n IF flag_value = 1 THEN\n -- Insert into _electric_oplog\n INSERT INTO "public"._electric_oplog (namespace, tablename, optype, "primaryKey", "newRow", "oldRow", timestamp)\n VALUES (\n \'public\',\n \'other_items\',\n \'DELETE\',\n json_strip_nulls(json_build_object(\'id\', old."id")),\n NULL,\n jsonb_build_object(\'content\', old."content", \'id\', old."id", \'item_id\', old."item_id"),\n NULL\n );\n END IF;\n \n RETURN NEW;\n END;\n END;\n \$\$ LANGUAGE plpgsql;\n ', + ' CREATE TRIGGER delete_public_other_items_into_oplog\n AFTER DELETE ON "public"."other_items"\n FOR EACH ROW\n EXECUTE FUNCTION delete_public_other_items_into_oplog_function();\n ', + 'DROP TRIGGER IF EXISTS compensation_insert_public_other_items_item_id_into_oplog ON "public"."other_items";', + ' CREATE OR REPLACE FUNCTION compensation_insert_public_other_items_item_id_into_oplog_function()\n RETURNS TRIGGER AS \$\$\n BEGIN\n DECLARE\n flag_value INTEGER;\n meta_value INTEGER;\n BEGIN\n SELECT flag INTO flag_value FROM "public"._electric_trigger_settings WHERE namespace = \'public\' AND tablename = \'other_items\';\n \n SELECT value INTO meta_value FROM "public"._electric_meta WHERE key = \'compensations\';\n \n IF flag_value = 1 AND meta_value = 1 THEN\n INSERT INTO "public"._electric_oplog (namespace, tablename, optype, "primaryKey", "newRow", "oldRow", timestamp)\n SELECT\n \'public\',\n \'items\',\n \'COMPENSATION\',\n json_strip_nulls(json_strip_nulls(json_build_object(\'id\', "id"))),\n jsonb_build_object(\'id\', "id"),\n NULL,\n NULL\n FROM "public"."items"\n WHERE "id" = NEW."item_id";\n END IF;\n \n RETURN NEW;\n END;\n END;\n \$\$ LANGUAGE plpgsql;\n ', + ' CREATE TRIGGER compensation_insert_public_other_items_item_id_into_oplog\n AFTER INSERT ON "public"."other_items"\n FOR EACH ROW\n EXECUTE FUNCTION compensation_insert_public_other_items_item_id_into_oplog_function();\n ', + 'DROP TRIGGER IF EXISTS compensation_update_public_other_items_item_id_into_oplog ON "public"."other_items";', + ' CREATE OR REPLACE FUNCTION compensation_update_public_other_items_item_id_into_oplog_function()\n RETURNS TRIGGER AS \$\$\n BEGIN\n DECLARE\n flag_value INTEGER;\n meta_value INTEGER;\n BEGIN\n SELECT flag INTO flag_value FROM "public"._electric_trigger_settings WHERE namespace = \'public\' AND tablename = \'other_items\';\n \n SELECT value INTO meta_value FROM "public"._electric_meta WHERE key = \'compensations\';\n \n IF flag_value = 1 AND meta_value = 1 THEN\n INSERT INTO "public"._electric_oplog (namespace, tablename, optype, "primaryKey", "newRow", "oldRow", timestamp)\n SELECT\n \'public\',\n \'items\',\n \'COMPENSATION\',\n json_strip_nulls(json_strip_nulls(json_build_object(\'id\', "id"))),\n jsonb_build_object(\'id\', "id"),\n NULL,\n NULL\n FROM "public"."items"\n WHERE "id" = NEW."item_id";\n END IF;\n \n RETURN NEW;\n END;\n END;\n \$\$ LANGUAGE plpgsql;\n ', + ' CREATE TRIGGER compensation_update_public_other_items_item_id_into_oplog\n AFTER UPDATE ON "public"."other_items"\n FOR EACH ROW\n EXECUTE FUNCTION compensation_update_public_other_items_item_id_into_oplog_function();\n ', + 'INSERT INTO "public"."_electric_trigger_settings" ("namespace", "tablename", "flag")\nVALUES (\'public\', \'timestamps\', 1)\nON CONFLICT DO NOTHING;\n', + 'DROP TRIGGER IF EXISTS update_ensure_public_timestamps_primarykey ON "public"."timestamps";', + ' CREATE OR REPLACE FUNCTION update_ensure_public_timestamps_primarykey_function()\n RETURNS TRIGGER AS \$\$\n BEGIN\n IF OLD."id" IS DISTINCT FROM NEW."id" THEN\n RAISE EXCEPTION \'Cannot change the value of column id as it belongs to the primary key\';\n END IF;\n RETURN NEW;\n END;\n \$\$ LANGUAGE plpgsql;\n ', + ' CREATE TRIGGER update_ensure_public_timestamps_primarykey\n BEFORE UPDATE ON "public"."timestamps"\n FOR EACH ROW\n EXECUTE FUNCTION update_ensure_public_timestamps_primarykey_function();\n ', + 'DROP TRIGGER IF EXISTS insert_public_timestamps_into_oplog ON "public"."timestamps";', + ' CREATE OR REPLACE FUNCTION insert_public_timestamps_into_oplog_function()\n RETURNS TRIGGER AS \$\$\n BEGIN\n DECLARE\n flag_value INTEGER;\n BEGIN\n -- Get the flag value from _electric_trigger_settings\n SELECT flag INTO flag_value FROM "public"._electric_trigger_settings WHERE namespace = \'public\' AND tablename = \'timestamps\';\n \n IF flag_value = 1 THEN\n -- Insert into _electric_oplog\n INSERT INTO "public"._electric_oplog (namespace, tablename, optype, "primaryKey", "newRow", "oldRow", timestamp)\n VALUES (\n \'public\',\n \'timestamps\',\n \'INSERT\',\n json_strip_nulls(json_build_object(\'id\', new."id")),\n jsonb_build_object(\'created_at\', new."created_at", \'id\', new."id", \'updated_at\', new."updated_at"),\n NULL,\n NULL\n );\n END IF;\n \n RETURN NEW;\n END;\n END;\n \$\$ LANGUAGE plpgsql;\n ', + ' CREATE TRIGGER insert_public_timestamps_into_oplog\n AFTER INSERT ON "public"."timestamps"\n FOR EACH ROW\n EXECUTE FUNCTION insert_public_timestamps_into_oplog_function();\n ', + 'DROP TRIGGER IF EXISTS update_public_timestamps_into_oplog ON "public"."timestamps";', + ' CREATE OR REPLACE FUNCTION update_public_timestamps_into_oplog_function()\n RETURNS TRIGGER AS \$\$\n BEGIN\n DECLARE\n flag_value INTEGER;\n BEGIN\n -- Get the flag value from _electric_trigger_settings\n SELECT flag INTO flag_value FROM "public"._electric_trigger_settings WHERE namespace = \'public\' AND tablename = \'timestamps\';\n \n IF flag_value = 1 THEN\n -- Insert into _electric_oplog\n INSERT INTO "public"._electric_oplog (namespace, tablename, optype, "primaryKey", "newRow", "oldRow", timestamp)\n VALUES (\n \'public\',\n \'timestamps\',\n \'UPDATE\',\n json_strip_nulls(json_build_object(\'id\', new."id")),\n jsonb_build_object(\'created_at\', new."created_at", \'id\', new."id", \'updated_at\', new."updated_at"),\n jsonb_build_object(\'created_at\', old."created_at", \'id\', old."id", \'updated_at\', old."updated_at"),\n NULL\n );\n END IF;\n \n RETURN NEW;\n END;\n END;\n \$\$ LANGUAGE plpgsql;\n ', + ' CREATE TRIGGER update_public_timestamps_into_oplog\n AFTER UPDATE ON "public"."timestamps"\n FOR EACH ROW\n EXECUTE FUNCTION update_public_timestamps_into_oplog_function();\n ', + 'DROP TRIGGER IF EXISTS delete_public_timestamps_into_oplog ON "public"."timestamps";', + ' CREATE OR REPLACE FUNCTION delete_public_timestamps_into_oplog_function()\n RETURNS TRIGGER AS \$\$\n BEGIN\n DECLARE\n flag_value INTEGER;\n BEGIN\n -- Get the flag value from _electric_trigger_settings\n SELECT flag INTO flag_value FROM "public"._electric_trigger_settings WHERE namespace = \'public\' AND tablename = \'timestamps\';\n \n IF flag_value = 1 THEN\n -- Insert into _electric_oplog\n INSERT INTO "public"._electric_oplog (namespace, tablename, optype, "primaryKey", "newRow", "oldRow", timestamp)\n VALUES (\n \'public\',\n \'timestamps\',\n \'DELETE\',\n json_strip_nulls(json_build_object(\'id\', old."id")),\n NULL,\n jsonb_build_object(\'created_at\', old."created_at", \'id\', old."id", \'updated_at\', old."updated_at"),\n NULL\n );\n END IF;\n \n RETURN NEW;\n END;\n END;\n \$\$ LANGUAGE plpgsql;\n ', + ' CREATE TRIGGER delete_public_timestamps_into_oplog\n AFTER DELETE ON "public"."timestamps"\n FOR EACH ROW\n EXECUTE FUNCTION delete_public_timestamps_into_oplog_function();\n ', + 'INSERT INTO "public"."_electric_trigger_settings" ("namespace", "tablename", "flag")\nVALUES (\'public\', \'datetimes\', 1)\nON CONFLICT DO NOTHING;\n', + 'DROP TRIGGER IF EXISTS update_ensure_public_datetimes_primarykey ON "public"."datetimes";', + ' CREATE OR REPLACE FUNCTION update_ensure_public_datetimes_primarykey_function()\n RETURNS TRIGGER AS \$\$\n BEGIN\n IF OLD."id" IS DISTINCT FROM NEW."id" THEN\n RAISE EXCEPTION \'Cannot change the value of column id as it belongs to the primary key\';\n END IF;\n RETURN NEW;\n END;\n \$\$ LANGUAGE plpgsql;\n ', + ' CREATE TRIGGER update_ensure_public_datetimes_primarykey\n BEFORE UPDATE ON "public"."datetimes"\n FOR EACH ROW\n EXECUTE FUNCTION update_ensure_public_datetimes_primarykey_function();\n ', + 'DROP TRIGGER IF EXISTS insert_public_datetimes_into_oplog ON "public"."datetimes";', + ' CREATE OR REPLACE FUNCTION insert_public_datetimes_into_oplog_function()\n RETURNS TRIGGER AS \$\$\n BEGIN\n DECLARE\n flag_value INTEGER;\n BEGIN\n -- Get the flag value from _electric_trigger_settings\n SELECT flag INTO flag_value FROM "public"._electric_trigger_settings WHERE namespace = \'public\' AND tablename = \'datetimes\';\n \n IF flag_value = 1 THEN\n -- Insert into _electric_oplog\n INSERT INTO "public"._electric_oplog (namespace, tablename, optype, "primaryKey", "newRow", "oldRow", timestamp)\n VALUES (\n \'public\',\n \'datetimes\',\n \'INSERT\',\n json_strip_nulls(json_build_object(\'id\', new."id")),\n jsonb_build_object(\'d\', new."d", \'id\', new."id", \'t\', new."t"),\n NULL,\n NULL\n );\n END IF;\n \n RETURN NEW;\n END;\n END;\n \$\$ LANGUAGE plpgsql;\n ', + ' CREATE TRIGGER insert_public_datetimes_into_oplog\n AFTER INSERT ON "public"."datetimes"\n FOR EACH ROW\n EXECUTE FUNCTION insert_public_datetimes_into_oplog_function();\n ', + 'DROP TRIGGER IF EXISTS update_public_datetimes_into_oplog ON "public"."datetimes";', + ' CREATE OR REPLACE FUNCTION update_public_datetimes_into_oplog_function()\n RETURNS TRIGGER AS \$\$\n BEGIN\n DECLARE\n flag_value INTEGER;\n BEGIN\n -- Get the flag value from _electric_trigger_settings\n SELECT flag INTO flag_value FROM "public"._electric_trigger_settings WHERE namespace = \'public\' AND tablename = \'datetimes\';\n \n IF flag_value = 1 THEN\n -- Insert into _electric_oplog\n INSERT INTO "public"._electric_oplog (namespace, tablename, optype, "primaryKey", "newRow", "oldRow", timestamp)\n VALUES (\n \'public\',\n \'datetimes\',\n \'UPDATE\',\n json_strip_nulls(json_build_object(\'id\', new."id")),\n jsonb_build_object(\'d\', new."d", \'id\', new."id", \'t\', new."t"),\n jsonb_build_object(\'d\', old."d", \'id\', old."id", \'t\', old."t"),\n NULL\n );\n END IF;\n \n RETURN NEW;\n END;\n END;\n \$\$ LANGUAGE plpgsql;\n ', + ' CREATE TRIGGER update_public_datetimes_into_oplog\n AFTER UPDATE ON "public"."datetimes"\n FOR EACH ROW\n EXECUTE FUNCTION update_public_datetimes_into_oplog_function();\n ', + 'DROP TRIGGER IF EXISTS delete_public_datetimes_into_oplog ON "public"."datetimes";', + ' CREATE OR REPLACE FUNCTION delete_public_datetimes_into_oplog_function()\n RETURNS TRIGGER AS \$\$\n BEGIN\n DECLARE\n flag_value INTEGER;\n BEGIN\n -- Get the flag value from _electric_trigger_settings\n SELECT flag INTO flag_value FROM "public"._electric_trigger_settings WHERE namespace = \'public\' AND tablename = \'datetimes\';\n \n IF flag_value = 1 THEN\n -- Insert into _electric_oplog\n INSERT INTO "public"._electric_oplog (namespace, tablename, optype, "primaryKey", "newRow", "oldRow", timestamp)\n VALUES (\n \'public\',\n \'datetimes\',\n \'DELETE\',\n json_strip_nulls(json_build_object(\'id\', old."id")),\n NULL,\n jsonb_build_object(\'d\', old."d", \'id\', old."id", \'t\', old."t"),\n NULL\n );\n END IF;\n \n RETURN NEW;\n END;\n END;\n \$\$ LANGUAGE plpgsql;\n ', + ' CREATE TRIGGER delete_public_datetimes_into_oplog\n AFTER DELETE ON "public"."datetimes"\n FOR EACH ROW\n EXECUTE FUNCTION delete_public_datetimes_into_oplog_function();\n ', + 'INSERT INTO "public"."_electric_trigger_settings" ("namespace", "tablename", "flag")\nVALUES (\'public\', \'bools\', 1)\nON CONFLICT DO NOTHING;\n', + 'DROP TRIGGER IF EXISTS update_ensure_public_bools_primarykey ON "public"."bools";', + ' CREATE OR REPLACE FUNCTION update_ensure_public_bools_primarykey_function()\n RETURNS TRIGGER AS \$\$\n BEGIN\n IF OLD."id" IS DISTINCT FROM NEW."id" THEN\n RAISE EXCEPTION \'Cannot change the value of column id as it belongs to the primary key\';\n END IF;\n RETURN NEW;\n END;\n \$\$ LANGUAGE plpgsql;\n ', + ' CREATE TRIGGER update_ensure_public_bools_primarykey\n BEFORE UPDATE ON "public"."bools"\n FOR EACH ROW\n EXECUTE FUNCTION update_ensure_public_bools_primarykey_function();\n ', + 'DROP TRIGGER IF EXISTS insert_public_bools_into_oplog ON "public"."bools";', + ' CREATE OR REPLACE FUNCTION insert_public_bools_into_oplog_function()\n RETURNS TRIGGER AS \$\$\n BEGIN\n DECLARE\n flag_value INTEGER;\n BEGIN\n -- Get the flag value from _electric_trigger_settings\n SELECT flag INTO flag_value FROM "public"._electric_trigger_settings WHERE namespace = \'public\' AND tablename = \'bools\';\n \n IF flag_value = 1 THEN\n -- Insert into _electric_oplog\n INSERT INTO "public"._electric_oplog (namespace, tablename, optype, "primaryKey", "newRow", "oldRow", timestamp)\n VALUES (\n \'public\',\n \'bools\',\n \'INSERT\',\n json_strip_nulls(json_build_object(\'id\', new."id")),\n jsonb_build_object(\'b\', new."b", \'id\', new."id"),\n NULL,\n NULL\n );\n END IF;\n \n RETURN NEW;\n END;\n END;\n \$\$ LANGUAGE plpgsql;\n ', + ' CREATE TRIGGER insert_public_bools_into_oplog\n AFTER INSERT ON "public"."bools"\n FOR EACH ROW\n EXECUTE FUNCTION insert_public_bools_into_oplog_function();\n ', + 'DROP TRIGGER IF EXISTS update_public_bools_into_oplog ON "public"."bools";', + ' CREATE OR REPLACE FUNCTION update_public_bools_into_oplog_function()\n RETURNS TRIGGER AS \$\$\n BEGIN\n DECLARE\n flag_value INTEGER;\n BEGIN\n -- Get the flag value from _electric_trigger_settings\n SELECT flag INTO flag_value FROM "public"._electric_trigger_settings WHERE namespace = \'public\' AND tablename = \'bools\';\n \n IF flag_value = 1 THEN\n -- Insert into _electric_oplog\n INSERT INTO "public"._electric_oplog (namespace, tablename, optype, "primaryKey", "newRow", "oldRow", timestamp)\n VALUES (\n \'public\',\n \'bools\',\n \'UPDATE\',\n json_strip_nulls(json_build_object(\'id\', new."id")),\n jsonb_build_object(\'b\', new."b", \'id\', new."id"),\n jsonb_build_object(\'b\', old."b", \'id\', old."id"),\n NULL\n );\n END IF;\n \n RETURN NEW;\n END;\n END;\n \$\$ LANGUAGE plpgsql;\n ', + ' CREATE TRIGGER update_public_bools_into_oplog\n AFTER UPDATE ON "public"."bools"\n FOR EACH ROW\n EXECUTE FUNCTION update_public_bools_into_oplog_function();\n ', + 'DROP TRIGGER IF EXISTS delete_public_bools_into_oplog ON "public"."bools";', + ' CREATE OR REPLACE FUNCTION delete_public_bools_into_oplog_function()\n RETURNS TRIGGER AS \$\$\n BEGIN\n DECLARE\n flag_value INTEGER;\n BEGIN\n -- Get the flag value from _electric_trigger_settings\n SELECT flag INTO flag_value FROM "public"._electric_trigger_settings WHERE namespace = \'public\' AND tablename = \'bools\';\n \n IF flag_value = 1 THEN\n -- Insert into _electric_oplog\n INSERT INTO "public"._electric_oplog (namespace, tablename, optype, "primaryKey", "newRow", "oldRow", timestamp)\n VALUES (\n \'public\',\n \'bools\',\n \'DELETE\',\n json_strip_nulls(json_build_object(\'id\', old."id")),\n NULL,\n jsonb_build_object(\'b\', old."b", \'id\', old."id"),\n NULL\n );\n END IF;\n \n RETURN NEW;\n END;\n END;\n \$\$ LANGUAGE plpgsql;\n ', + ' CREATE TRIGGER delete_public_bools_into_oplog\n AFTER DELETE ON "public"."bools"\n FOR EACH ROW\n EXECUTE FUNCTION delete_public_bools_into_oplog_function();\n ', + 'INSERT INTO "public"."_electric_trigger_settings" ("namespace", "tablename", "flag")\nVALUES (\'public\', \'uuids\', 1)\nON CONFLICT DO NOTHING;\n', + 'DROP TRIGGER IF EXISTS update_ensure_public_uuids_primarykey ON "public"."uuids";', + ' CREATE OR REPLACE FUNCTION update_ensure_public_uuids_primarykey_function()\n RETURNS TRIGGER AS \$\$\n BEGIN\n IF OLD."id" IS DISTINCT FROM NEW."id" THEN\n RAISE EXCEPTION \'Cannot change the value of column id as it belongs to the primary key\';\n END IF;\n RETURN NEW;\n END;\n \$\$ LANGUAGE plpgsql;\n ', + ' CREATE TRIGGER update_ensure_public_uuids_primarykey\n BEFORE UPDATE ON "public"."uuids"\n FOR EACH ROW\n EXECUTE FUNCTION update_ensure_public_uuids_primarykey_function();\n ', + 'DROP TRIGGER IF EXISTS insert_public_uuids_into_oplog ON "public"."uuids";', + ' CREATE OR REPLACE FUNCTION insert_public_uuids_into_oplog_function()\n RETURNS TRIGGER AS \$\$\n BEGIN\n DECLARE\n flag_value INTEGER;\n BEGIN\n -- Get the flag value from _electric_trigger_settings\n SELECT flag INTO flag_value FROM "public"._electric_trigger_settings WHERE namespace = \'public\' AND tablename = \'uuids\';\n \n IF flag_value = 1 THEN\n -- Insert into _electric_oplog\n INSERT INTO "public"._electric_oplog (namespace, tablename, optype, "primaryKey", "newRow", "oldRow", timestamp)\n VALUES (\n \'public\',\n \'uuids\',\n \'INSERT\',\n json_strip_nulls(json_build_object(\'id\', new."id")),\n jsonb_build_object(\'id\', new."id"),\n NULL,\n NULL\n );\n END IF;\n \n RETURN NEW;\n END;\n END;\n \$\$ LANGUAGE plpgsql;\n ', + ' CREATE TRIGGER insert_public_uuids_into_oplog\n AFTER INSERT ON "public"."uuids"\n FOR EACH ROW\n EXECUTE FUNCTION insert_public_uuids_into_oplog_function();\n ', + 'DROP TRIGGER IF EXISTS update_public_uuids_into_oplog ON "public"."uuids";', + ' CREATE OR REPLACE FUNCTION update_public_uuids_into_oplog_function()\n RETURNS TRIGGER AS \$\$\n BEGIN\n DECLARE\n flag_value INTEGER;\n BEGIN\n -- Get the flag value from _electric_trigger_settings\n SELECT flag INTO flag_value FROM "public"._electric_trigger_settings WHERE namespace = \'public\' AND tablename = \'uuids\';\n \n IF flag_value = 1 THEN\n -- Insert into _electric_oplog\n INSERT INTO "public"._electric_oplog (namespace, tablename, optype, "primaryKey", "newRow", "oldRow", timestamp)\n VALUES (\n \'public\',\n \'uuids\',\n \'UPDATE\',\n json_strip_nulls(json_build_object(\'id\', new."id")),\n jsonb_build_object(\'id\', new."id"),\n jsonb_build_object(\'id\', old."id"),\n NULL\n );\n END IF;\n \n RETURN NEW;\n END;\n END;\n \$\$ LANGUAGE plpgsql;\n ', + ' CREATE TRIGGER update_public_uuids_into_oplog\n AFTER UPDATE ON "public"."uuids"\n FOR EACH ROW\n EXECUTE FUNCTION update_public_uuids_into_oplog_function();\n ', + 'DROP TRIGGER IF EXISTS delete_public_uuids_into_oplog ON "public"."uuids";', + ' CREATE OR REPLACE FUNCTION delete_public_uuids_into_oplog_function()\n RETURNS TRIGGER AS \$\$\n BEGIN\n DECLARE\n flag_value INTEGER;\n BEGIN\n -- Get the flag value from _electric_trigger_settings\n SELECT flag INTO flag_value FROM "public"._electric_trigger_settings WHERE namespace = \'public\' AND tablename = \'uuids\';\n \n IF flag_value = 1 THEN\n -- Insert into _electric_oplog\n INSERT INTO "public"._electric_oplog (namespace, tablename, optype, "primaryKey", "newRow", "oldRow", timestamp)\n VALUES (\n \'public\',\n \'uuids\',\n \'DELETE\',\n json_strip_nulls(json_build_object(\'id\', old."id")),\n NULL,\n jsonb_build_object(\'id\', old."id"),\n NULL\n );\n END IF;\n \n RETURN NEW;\n END;\n END;\n \$\$ LANGUAGE plpgsql;\n ', + ' CREATE TRIGGER delete_public_uuids_into_oplog\n AFTER DELETE ON "public"."uuids"\n FOR EACH ROW\n EXECUTE FUNCTION delete_public_uuids_into_oplog_function();\n ', + 'INSERT INTO "public"."_electric_trigger_settings" ("namespace", "tablename", "flag")\nVALUES (\'public\', \'ints\', 1)\nON CONFLICT DO NOTHING;\n', + 'DROP TRIGGER IF EXISTS update_ensure_public_ints_primarykey ON "public"."ints";', + ' CREATE OR REPLACE FUNCTION update_ensure_public_ints_primarykey_function()\n RETURNS TRIGGER AS \$\$\n BEGIN\n IF OLD."id" IS DISTINCT FROM NEW."id" THEN\n RAISE EXCEPTION \'Cannot change the value of column id as it belongs to the primary key\';\n END IF;\n RETURN NEW;\n END;\n \$\$ LANGUAGE plpgsql;\n ', + ' CREATE TRIGGER update_ensure_public_ints_primarykey\n BEFORE UPDATE ON "public"."ints"\n FOR EACH ROW\n EXECUTE FUNCTION update_ensure_public_ints_primarykey_function();\n ', + 'DROP TRIGGER IF EXISTS insert_public_ints_into_oplog ON "public"."ints";', + ' CREATE OR REPLACE FUNCTION insert_public_ints_into_oplog_function()\n RETURNS TRIGGER AS \$\$\n BEGIN\n DECLARE\n flag_value INTEGER;\n BEGIN\n -- Get the flag value from _electric_trigger_settings\n SELECT flag INTO flag_value FROM "public"._electric_trigger_settings WHERE namespace = \'public\' AND tablename = \'ints\';\n \n IF flag_value = 1 THEN\n -- Insert into _electric_oplog\n INSERT INTO "public"._electric_oplog (namespace, tablename, optype, "primaryKey", "newRow", "oldRow", timestamp)\n VALUES (\n \'public\',\n \'ints\',\n \'INSERT\',\n json_strip_nulls(json_build_object(\'id\', new."id")),\n jsonb_build_object(\'i2\', new."i2", \'i4\', new."i4", \'i8\', cast(new."i8" as TEXT), \'id\', new."id"),\n NULL,\n NULL\n );\n END IF;\n \n RETURN NEW;\n END;\n END;\n \$\$ LANGUAGE plpgsql;\n ', + ' CREATE TRIGGER insert_public_ints_into_oplog\n AFTER INSERT ON "public"."ints"\n FOR EACH ROW\n EXECUTE FUNCTION insert_public_ints_into_oplog_function();\n ', + 'DROP TRIGGER IF EXISTS update_public_ints_into_oplog ON "public"."ints";', + ' CREATE OR REPLACE FUNCTION update_public_ints_into_oplog_function()\n RETURNS TRIGGER AS \$\$\n BEGIN\n DECLARE\n flag_value INTEGER;\n BEGIN\n -- Get the flag value from _electric_trigger_settings\n SELECT flag INTO flag_value FROM "public"._electric_trigger_settings WHERE namespace = \'public\' AND tablename = \'ints\';\n \n IF flag_value = 1 THEN\n -- Insert into _electric_oplog\n INSERT INTO "public"._electric_oplog (namespace, tablename, optype, "primaryKey", "newRow", "oldRow", timestamp)\n VALUES (\n \'public\',\n \'ints\',\n \'UPDATE\',\n json_strip_nulls(json_build_object(\'id\', new."id")),\n jsonb_build_object(\'i2\', new."i2", \'i4\', new."i4", \'i8\', cast(new."i8" as TEXT), \'id\', new."id"),\n jsonb_build_object(\'i2\', old."i2", \'i4\', old."i4", \'i8\', cast(old."i8" as TEXT), \'id\', old."id"),\n NULL\n );\n END IF;\n \n RETURN NEW;\n END;\n END;\n \$\$ LANGUAGE plpgsql;\n ', + ' CREATE TRIGGER update_public_ints_into_oplog\n AFTER UPDATE ON "public"."ints"\n FOR EACH ROW\n EXECUTE FUNCTION update_public_ints_into_oplog_function();\n ', + 'DROP TRIGGER IF EXISTS delete_public_ints_into_oplog ON "public"."ints";', + ' CREATE OR REPLACE FUNCTION delete_public_ints_into_oplog_function()\n RETURNS TRIGGER AS \$\$\n BEGIN\n DECLARE\n flag_value INTEGER;\n BEGIN\n -- Get the flag value from _electric_trigger_settings\n SELECT flag INTO flag_value FROM "public"._electric_trigger_settings WHERE namespace = \'public\' AND tablename = \'ints\';\n \n IF flag_value = 1 THEN\n -- Insert into _electric_oplog\n INSERT INTO "public"._electric_oplog (namespace, tablename, optype, "primaryKey", "newRow", "oldRow", timestamp)\n VALUES (\n \'public\',\n \'ints\',\n \'DELETE\',\n json_strip_nulls(json_build_object(\'id\', old."id")),\n NULL,\n jsonb_build_object(\'i2\', old."i2", \'i4\', old."i4", \'i8\', cast(old."i8" as TEXT), \'id\', old."id"),\n NULL\n );\n END IF;\n \n RETURN NEW;\n END;\n END;\n \$\$ LANGUAGE plpgsql;\n ', + ' CREATE TRIGGER delete_public_ints_into_oplog\n AFTER DELETE ON "public"."ints"\n FOR EACH ROW\n EXECUTE FUNCTION delete_public_ints_into_oplog_function();\n ', + 'INSERT INTO "public"."_electric_trigger_settings" ("namespace", "tablename", "flag")\nVALUES (\'public\', \'floats\', 1)\nON CONFLICT DO NOTHING;\n', + 'DROP TRIGGER IF EXISTS update_ensure_public_floats_primarykey ON "public"."floats";', + ' CREATE OR REPLACE FUNCTION update_ensure_public_floats_primarykey_function()\n RETURNS TRIGGER AS \$\$\n BEGIN\n IF OLD."id" IS DISTINCT FROM NEW."id" THEN\n RAISE EXCEPTION \'Cannot change the value of column id as it belongs to the primary key\';\n END IF;\n RETURN NEW;\n END;\n \$\$ LANGUAGE plpgsql;\n ', + ' CREATE TRIGGER update_ensure_public_floats_primarykey\n BEFORE UPDATE ON "public"."floats"\n FOR EACH ROW\n EXECUTE FUNCTION update_ensure_public_floats_primarykey_function();\n ', + 'DROP TRIGGER IF EXISTS insert_public_floats_into_oplog ON "public"."floats";', + ' CREATE OR REPLACE FUNCTION insert_public_floats_into_oplog_function()\n RETURNS TRIGGER AS \$\$\n BEGIN\n DECLARE\n flag_value INTEGER;\n BEGIN\n -- Get the flag value from _electric_trigger_settings\n SELECT flag INTO flag_value FROM "public"._electric_trigger_settings WHERE namespace = \'public\' AND tablename = \'floats\';\n \n IF flag_value = 1 THEN\n -- Insert into _electric_oplog\n INSERT INTO "public"._electric_oplog (namespace, tablename, optype, "primaryKey", "newRow", "oldRow", timestamp)\n VALUES (\n \'public\',\n \'floats\',\n \'INSERT\',\n json_strip_nulls(json_build_object(\'id\', new."id")),\n jsonb_build_object(\'f4\', cast(new."f4" as TEXT), \'f8\', cast(new."f8" as TEXT), \'id\', new."id"),\n NULL,\n NULL\n );\n END IF;\n \n RETURN NEW;\n END;\n END;\n \$\$ LANGUAGE plpgsql;\n ', + ' CREATE TRIGGER insert_public_floats_into_oplog\n AFTER INSERT ON "public"."floats"\n FOR EACH ROW\n EXECUTE FUNCTION insert_public_floats_into_oplog_function();\n ', + 'DROP TRIGGER IF EXISTS update_public_floats_into_oplog ON "public"."floats";', + ' CREATE OR REPLACE FUNCTION update_public_floats_into_oplog_function()\n RETURNS TRIGGER AS \$\$\n BEGIN\n DECLARE\n flag_value INTEGER;\n BEGIN\n -- Get the flag value from _electric_trigger_settings\n SELECT flag INTO flag_value FROM "public"._electric_trigger_settings WHERE namespace = \'public\' AND tablename = \'floats\';\n \n IF flag_value = 1 THEN\n -- Insert into _electric_oplog\n INSERT INTO "public"._electric_oplog (namespace, tablename, optype, "primaryKey", "newRow", "oldRow", timestamp)\n VALUES (\n \'public\',\n \'floats\',\n \'UPDATE\',\n json_strip_nulls(json_build_object(\'id\', new."id")),\n jsonb_build_object(\'f4\', cast(new."f4" as TEXT), \'f8\', cast(new."f8" as TEXT), \'id\', new."id"),\n jsonb_build_object(\'f4\', cast(old."f4" as TEXT), \'f8\', cast(old."f8" as TEXT), \'id\', old."id"),\n NULL\n );\n END IF;\n \n RETURN NEW;\n END;\n END;\n \$\$ LANGUAGE plpgsql;\n ', + ' CREATE TRIGGER update_public_floats_into_oplog\n AFTER UPDATE ON "public"."floats"\n FOR EACH ROW\n EXECUTE FUNCTION update_public_floats_into_oplog_function();\n ', + 'DROP TRIGGER IF EXISTS delete_public_floats_into_oplog ON "public"."floats";', + ' CREATE OR REPLACE FUNCTION delete_public_floats_into_oplog_function()\n RETURNS TRIGGER AS \$\$\n BEGIN\n DECLARE\n flag_value INTEGER;\n BEGIN\n -- Get the flag value from _electric_trigger_settings\n SELECT flag INTO flag_value FROM "public"._electric_trigger_settings WHERE namespace = \'public\' AND tablename = \'floats\';\n \n IF flag_value = 1 THEN\n -- Insert into _electric_oplog\n INSERT INTO "public"._electric_oplog (namespace, tablename, optype, "primaryKey", "newRow", "oldRow", timestamp)\n VALUES (\n \'public\',\n \'floats\',\n \'DELETE\',\n json_strip_nulls(json_build_object(\'id\', old."id")),\n NULL,\n jsonb_build_object(\'f4\', cast(old."f4" as TEXT), \'f8\', cast(old."f8" as TEXT), \'id\', old."id"),\n NULL\n );\n END IF;\n \n RETURN NEW;\n END;\n END;\n \$\$ LANGUAGE plpgsql;\n ', + ' CREATE TRIGGER delete_public_floats_into_oplog\n AFTER DELETE ON "public"."floats"\n FOR EACH ROW\n EXECUTE FUNCTION delete_public_floats_into_oplog_function();\n ', + 'INSERT INTO "public"."_electric_trigger_settings" ("namespace", "tablename", "flag")\nVALUES (\'public\', \'jsons\', 1)\nON CONFLICT DO NOTHING;\n', + 'DROP TRIGGER IF EXISTS update_ensure_public_jsons_primarykey ON "public"."jsons";', + ' CREATE OR REPLACE FUNCTION update_ensure_public_jsons_primarykey_function()\n RETURNS TRIGGER AS \$\$\n BEGIN\n IF OLD."id" IS DISTINCT FROM NEW."id" THEN\n RAISE EXCEPTION \'Cannot change the value of column id as it belongs to the primary key\';\n END IF;\n RETURN NEW;\n END;\n \$\$ LANGUAGE plpgsql;\n ', + ' CREATE TRIGGER update_ensure_public_jsons_primarykey\n BEFORE UPDATE ON "public"."jsons"\n FOR EACH ROW\n EXECUTE FUNCTION update_ensure_public_jsons_primarykey_function();\n ', + 'DROP TRIGGER IF EXISTS insert_public_jsons_into_oplog ON "public"."jsons";', + ' CREATE OR REPLACE FUNCTION insert_public_jsons_into_oplog_function()\n RETURNS TRIGGER AS \$\$\n BEGIN\n DECLARE\n flag_value INTEGER;\n BEGIN\n -- Get the flag value from _electric_trigger_settings\n SELECT flag INTO flag_value FROM "public"._electric_trigger_settings WHERE namespace = \'public\' AND tablename = \'jsons\';\n \n IF flag_value = 1 THEN\n -- Insert into _electric_oplog\n INSERT INTO "public"._electric_oplog (namespace, tablename, optype, "primaryKey", "newRow", "oldRow", timestamp)\n VALUES (\n \'public\',\n \'jsons\',\n \'INSERT\',\n json_strip_nulls(json_build_object(\'id\', new."id")),\n jsonb_build_object(\'id\', new."id", \'jsb\', new."jsb"),\n NULL,\n NULL\n );\n END IF;\n \n RETURN NEW;\n END;\n END;\n \$\$ LANGUAGE plpgsql;\n ', + ' CREATE TRIGGER insert_public_jsons_into_oplog\n AFTER INSERT ON "public"."jsons"\n FOR EACH ROW\n EXECUTE FUNCTION insert_public_jsons_into_oplog_function();\n ', + 'DROP TRIGGER IF EXISTS update_public_jsons_into_oplog ON "public"."jsons";', + ' CREATE OR REPLACE FUNCTION update_public_jsons_into_oplog_function()\n RETURNS TRIGGER AS \$\$\n BEGIN\n DECLARE\n flag_value INTEGER;\n BEGIN\n -- Get the flag value from _electric_trigger_settings\n SELECT flag INTO flag_value FROM "public"._electric_trigger_settings WHERE namespace = \'public\' AND tablename = \'jsons\';\n \n IF flag_value = 1 THEN\n -- Insert into _electric_oplog\n INSERT INTO "public"._electric_oplog (namespace, tablename, optype, "primaryKey", "newRow", "oldRow", timestamp)\n VALUES (\n \'public\',\n \'jsons\',\n \'UPDATE\',\n json_strip_nulls(json_build_object(\'id\', new."id")),\n jsonb_build_object(\'id\', new."id", \'jsb\', new."jsb"),\n jsonb_build_object(\'id\', old."id", \'jsb\', old."jsb"),\n NULL\n );\n END IF;\n \n RETURN NEW;\n END;\n END;\n \$\$ LANGUAGE plpgsql;\n ', + ' CREATE TRIGGER update_public_jsons_into_oplog\n AFTER UPDATE ON "public"."jsons"\n FOR EACH ROW\n EXECUTE FUNCTION update_public_jsons_into_oplog_function();\n ', + 'DROP TRIGGER IF EXISTS delete_public_jsons_into_oplog ON "public"."jsons";', + ' CREATE OR REPLACE FUNCTION delete_public_jsons_into_oplog_function()\n RETURNS TRIGGER AS \$\$\n BEGIN\n DECLARE\n flag_value INTEGER;\n BEGIN\n -- Get the flag value from _electric_trigger_settings\n SELECT flag INTO flag_value FROM "public"._electric_trigger_settings WHERE namespace = \'public\' AND tablename = \'jsons\';\n \n IF flag_value = 1 THEN\n -- Insert into _electric_oplog\n INSERT INTO "public"._electric_oplog (namespace, tablename, optype, "primaryKey", "newRow", "oldRow", timestamp)\n VALUES (\n \'public\',\n \'jsons\',\n \'DELETE\',\n json_strip_nulls(json_build_object(\'id\', old."id")),\n NULL,\n jsonb_build_object(\'id\', old."id", \'jsb\', old."jsb"),\n NULL\n );\n END IF;\n \n RETURN NEW;\n END;\n END;\n \$\$ LANGUAGE plpgsql;\n ', + ' CREATE TRIGGER delete_public_jsons_into_oplog\n AFTER DELETE ON "public"."jsons"\n FOR EACH ROW\n EXECUTE FUNCTION delete_public_jsons_into_oplog_function();\n ', + 'INSERT INTO "public"."_electric_trigger_settings" ("namespace", "tablename", "flag")\nVALUES (\'public\', \'enums\', 1)\nON CONFLICT DO NOTHING;\n', + 'DROP TRIGGER IF EXISTS update_ensure_public_enums_primarykey ON "public"."enums";', + ' CREATE OR REPLACE FUNCTION update_ensure_public_enums_primarykey_function()\n RETURNS TRIGGER AS \$\$\n BEGIN\n IF OLD."id" IS DISTINCT FROM NEW."id" THEN\n RAISE EXCEPTION \'Cannot change the value of column id as it belongs to the primary key\';\n END IF;\n RETURN NEW;\n END;\n \$\$ LANGUAGE plpgsql;\n ', + ' CREATE TRIGGER update_ensure_public_enums_primarykey\n BEFORE UPDATE ON "public"."enums"\n FOR EACH ROW\n EXECUTE FUNCTION update_ensure_public_enums_primarykey_function();\n ', + 'DROP TRIGGER IF EXISTS insert_public_enums_into_oplog ON "public"."enums";', + ' CREATE OR REPLACE FUNCTION insert_public_enums_into_oplog_function()\n RETURNS TRIGGER AS \$\$\n BEGIN\n DECLARE\n flag_value INTEGER;\n BEGIN\n -- Get the flag value from _electric_trigger_settings\n SELECT flag INTO flag_value FROM "public"._electric_trigger_settings WHERE namespace = \'public\' AND tablename = \'enums\';\n \n IF flag_value = 1 THEN\n -- Insert into _electric_oplog\n INSERT INTO "public"._electric_oplog (namespace, tablename, optype, "primaryKey", "newRow", "oldRow", timestamp)\n VALUES (\n \'public\',\n \'enums\',\n \'INSERT\',\n json_strip_nulls(json_build_object(\'id\', new."id")),\n jsonb_build_object(\'c\', new."c", \'id\', new."id"),\n NULL,\n NULL\n );\n END IF;\n \n RETURN NEW;\n END;\n END;\n \$\$ LANGUAGE plpgsql;\n ', + ' CREATE TRIGGER insert_public_enums_into_oplog\n AFTER INSERT ON "public"."enums"\n FOR EACH ROW\n EXECUTE FUNCTION insert_public_enums_into_oplog_function();\n ', + 'DROP TRIGGER IF EXISTS update_public_enums_into_oplog ON "public"."enums";', + ' CREATE OR REPLACE FUNCTION update_public_enums_into_oplog_function()\n RETURNS TRIGGER AS \$\$\n BEGIN\n DECLARE\n flag_value INTEGER;\n BEGIN\n -- Get the flag value from _electric_trigger_settings\n SELECT flag INTO flag_value FROM "public"._electric_trigger_settings WHERE namespace = \'public\' AND tablename = \'enums\';\n \n IF flag_value = 1 THEN\n -- Insert into _electric_oplog\n INSERT INTO "public"._electric_oplog (namespace, tablename, optype, "primaryKey", "newRow", "oldRow", timestamp)\n VALUES (\n \'public\',\n \'enums\',\n \'UPDATE\',\n json_strip_nulls(json_build_object(\'id\', new."id")),\n jsonb_build_object(\'c\', new."c", \'id\', new."id"),\n jsonb_build_object(\'c\', old."c", \'id\', old."id"),\n NULL\n );\n END IF;\n \n RETURN NEW;\n END;\n END;\n \$\$ LANGUAGE plpgsql;\n ', + ' CREATE TRIGGER update_public_enums_into_oplog\n AFTER UPDATE ON "public"."enums"\n FOR EACH ROW\n EXECUTE FUNCTION update_public_enums_into_oplog_function();\n ', + 'DROP TRIGGER IF EXISTS delete_public_enums_into_oplog ON "public"."enums";', + ' CREATE OR REPLACE FUNCTION delete_public_enums_into_oplog_function()\n RETURNS TRIGGER AS \$\$\n BEGIN\n DECLARE\n flag_value INTEGER;\n BEGIN\n -- Get the flag value from _electric_trigger_settings\n SELECT flag INTO flag_value FROM "public"._electric_trigger_settings WHERE namespace = \'public\' AND tablename = \'enums\';\n \n IF flag_value = 1 THEN\n -- Insert into _electric_oplog\n INSERT INTO "public"._electric_oplog (namespace, tablename, optype, "primaryKey", "newRow", "oldRow", timestamp)\n VALUES (\n \'public\',\n \'enums\',\n \'DELETE\',\n json_strip_nulls(json_build_object(\'id\', old."id")),\n NULL,\n jsonb_build_object(\'c\', old."c", \'id\', old."id"),\n NULL\n );\n END IF;\n \n RETURN NEW;\n END;\n END;\n \$\$ LANGUAGE plpgsql;\n ', + ' CREATE TRIGGER delete_public_enums_into_oplog\n AFTER DELETE ON "public"."enums"\n FOR EACH ROW\n EXECUTE FUNCTION delete_public_enums_into_oplog_function();\n ', + 'INSERT INTO "public"."_electric_trigger_settings" ("namespace", "tablename", "flag")\nVALUES (\'public\', \'blobs\', 1)\nON CONFLICT DO NOTHING;\n', + 'DROP TRIGGER IF EXISTS update_ensure_public_blobs_primarykey ON "public"."blobs";', + ' CREATE OR REPLACE FUNCTION update_ensure_public_blobs_primarykey_function()\n RETURNS TRIGGER AS \$\$\n BEGIN\n IF OLD."id" IS DISTINCT FROM NEW."id" THEN\n RAISE EXCEPTION \'Cannot change the value of column id as it belongs to the primary key\';\n END IF;\n RETURN NEW;\n END;\n \$\$ LANGUAGE plpgsql;\n ', + ' CREATE TRIGGER update_ensure_public_blobs_primarykey\n BEFORE UPDATE ON "public"."blobs"\n FOR EACH ROW\n EXECUTE FUNCTION update_ensure_public_blobs_primarykey_function();\n ', + 'DROP TRIGGER IF EXISTS insert_public_blobs_into_oplog ON "public"."blobs";', + ' CREATE OR REPLACE FUNCTION insert_public_blobs_into_oplog_function()\n RETURNS TRIGGER AS \$\$\n BEGIN\n DECLARE\n flag_value INTEGER;\n BEGIN\n -- Get the flag value from _electric_trigger_settings\n SELECT flag INTO flag_value FROM "public"._electric_trigger_settings WHERE namespace = \'public\' AND tablename = \'blobs\';\n \n IF flag_value = 1 THEN\n -- Insert into _electric_oplog\n INSERT INTO "public"._electric_oplog (namespace, tablename, optype, "primaryKey", "newRow", "oldRow", timestamp)\n VALUES (\n \'public\',\n \'blobs\',\n \'INSERT\',\n json_strip_nulls(json_build_object(\'id\', new."id")),\n jsonb_build_object(\'blob\', CASE WHEN new."blob" IS NOT NULL THEN encode(new."blob"::bytea, \'hex\') ELSE NULL END, \'id\', new."id"),\n NULL,\n NULL\n );\n END IF;\n \n RETURN NEW;\n END;\n END;\n \$\$ LANGUAGE plpgsql;\n ', + ' CREATE TRIGGER insert_public_blobs_into_oplog\n AFTER INSERT ON "public"."blobs"\n FOR EACH ROW\n EXECUTE FUNCTION insert_public_blobs_into_oplog_function();\n ', + 'DROP TRIGGER IF EXISTS update_public_blobs_into_oplog ON "public"."blobs";', + ' CREATE OR REPLACE FUNCTION update_public_blobs_into_oplog_function()\n RETURNS TRIGGER AS \$\$\n BEGIN\n DECLARE\n flag_value INTEGER;\n BEGIN\n -- Get the flag value from _electric_trigger_settings\n SELECT flag INTO flag_value FROM "public"._electric_trigger_settings WHERE namespace = \'public\' AND tablename = \'blobs\';\n \n IF flag_value = 1 THEN\n -- Insert into _electric_oplog\n INSERT INTO "public"._electric_oplog (namespace, tablename, optype, "primaryKey", "newRow", "oldRow", timestamp)\n VALUES (\n \'public\',\n \'blobs\',\n \'UPDATE\',\n json_strip_nulls(json_build_object(\'id\', new."id")),\n jsonb_build_object(\'blob\', CASE WHEN new."blob" IS NOT NULL THEN encode(new."blob"::bytea, \'hex\') ELSE NULL END, \'id\', new."id"),\n jsonb_build_object(\'blob\', CASE WHEN old."blob" IS NOT NULL THEN encode(old."blob"::bytea, \'hex\') ELSE NULL END, \'id\', old."id"),\n NULL\n );\n END IF;\n \n RETURN NEW;\n END;\n END;\n \$\$ LANGUAGE plpgsql;\n ', + ' CREATE TRIGGER update_public_blobs_into_oplog\n AFTER UPDATE ON "public"."blobs"\n FOR EACH ROW\n EXECUTE FUNCTION update_public_blobs_into_oplog_function();\n ', + 'DROP TRIGGER IF EXISTS delete_public_blobs_into_oplog ON "public"."blobs";', + ' CREATE OR REPLACE FUNCTION delete_public_blobs_into_oplog_function()\n RETURNS TRIGGER AS \$\$\n BEGIN\n DECLARE\n flag_value INTEGER;\n BEGIN\n -- Get the flag value from _electric_trigger_settings\n SELECT flag INTO flag_value FROM "public"._electric_trigger_settings WHERE namespace = \'public\' AND tablename = \'blobs\';\n \n IF flag_value = 1 THEN\n -- Insert into _electric_oplog\n INSERT INTO "public"._electric_oplog (namespace, tablename, optype, "primaryKey", "newRow", "oldRow", timestamp)\n VALUES (\n \'public\',\n \'blobs\',\n \'DELETE\',\n json_strip_nulls(json_build_object(\'id\', old."id")),\n NULL,\n jsonb_build_object(\'blob\', CASE WHEN old."blob" IS NOT NULL THEN encode(old."blob"::bytea, \'hex\') ELSE NULL END, \'id\', old."id"),\n NULL\n );\n END IF;\n \n RETURN NEW;\n END;\n END;\n \$\$ LANGUAGE plpgsql;\n ', + ' CREATE TRIGGER delete_public_blobs_into_oplog\n AFTER DELETE ON "public"."blobs"\n FOR EACH ROW\n EXECUTE FUNCTION delete_public_blobs_into_oplog_function();\n ', + ], + version: '20240626155106_189', + ) +]; diff --git a/e2e/satellite_client/lib/generated/electric/schema.dart b/e2e/satellite_client/lib/generated/electric/schema.dart new file mode 100644 index 00000000..66858042 --- /dev/null +++ b/e2e/satellite_client/lib/generated/electric/schema.dart @@ -0,0 +1,118 @@ +// GENERATED CODE - DO NOT MODIFY BY HAND + +// ignore_for_file: always_use_package_imports, depend_on_referenced_packages +// ignore_for_file: prefer_double_quotes, require_trailing_commas + +import 'package:electricsql/electricsql.dart'; + +import './migrations.dart'; +import './pg_migrations.dart'; + +const kElectricMigrations = ElectricMigrations( + sqliteMigrations: kSqliteMigrations, + pgMigrations: kPostgresMigrations, +); +const DBSchema kDbSchema = DBSchemaRaw( + tableSchemas: { + 'items': TableSchema( + fields: { + 'id': PgType.text, + 'content': PgType.text, + 'content_text_null': PgType.text, + 'content_text_null_default': PgType.text, + 'intvalue_null': PgType.int4, + 'intvalue_null_default': PgType.int4, + }, + relations: [ + Relation( + fromField: '', + toField: '', + relationName: 'other_items_item_idToitems', + relatedTable: 'other_items', + ) + ], + ), + 'other_items': TableSchema( + fields: { + 'id': PgType.text, + 'content': PgType.text, + 'item_id': PgType.text, + }, + relations: [ + Relation( + fromField: 'item_id', + toField: 'id', + relationName: 'other_items_item_idToitems', + relatedTable: 'items', + ) + ], + ), + 'timestamps': TableSchema( + fields: { + 'id': PgType.text, + 'created_at': PgType.timestamp, + 'updated_at': PgType.timestampTz, + }, + relations: [], + ), + 'datetimes': TableSchema( + fields: { + 'id': PgType.text, + 'd': PgType.date, + 't': PgType.time, + }, + relations: [], + ), + 'bools': TableSchema( + fields: { + 'id': PgType.text, + 'b': PgType.bool, + }, + relations: [], + ), + 'uuids': TableSchema( + fields: {'id': PgType.uuid}, + relations: [], + ), + 'ints': TableSchema( + fields: { + 'id': PgType.text, + 'i2': PgType.int2, + 'i4': PgType.int4, + 'i8': PgType.int8, + }, + relations: [], + ), + 'floats': TableSchema( + fields: { + 'id': PgType.text, + 'f4': PgType.float4, + 'f8': PgType.float8, + }, + relations: [], + ), + 'jsons': TableSchema( + fields: { + 'id': PgType.text, + 'jsb': PgType.jsonb, + }, + relations: [], + ), + 'enums': TableSchema( + fields: { + 'id': PgType.text, + 'c': PgType.text, + }, + relations: [], + ), + 'blobs': TableSchema( + fields: { + 'id': PgType.text, + 'blob': PgType.bytea, + }, + relations: [], + ), + }, + migrations: kSqliteMigrations, + pgMigrations: kPostgresMigrations, +); diff --git a/e2e/satellite_client/lib/util/json.dart b/e2e/satellite_client/lib/util/json.dart index b815e500..e7f6cfa1 100644 --- a/e2e/satellite_client/lib/util/json.dart +++ b/e2e/satellite_client/lib/util/json.dart @@ -40,3 +40,15 @@ Migration _migrationFromJson(Map m) { version: version, ); } + +Map? toEncodableMap(Map? o) { + return o?.map((key, value) { + final Object? effectiveValue; + if (value is DateTime) { + effectiveValue = value.toIso8601String(); + } else { + effectiveValue = value; + } + return MapEntry(key, effectiveValue); + }); +} diff --git a/e2e/satellite_client/tool/generate_electric_code.dart b/e2e/satellite_client/tool/generate_electric_code.dart deleted file mode 100644 index a0be0e19..00000000 --- a/e2e/satellite_client/tool/generate_electric_code.dart +++ /dev/null @@ -1,36 +0,0 @@ -// Generates the drift_schema.dart file from the prisma schema. -// This is part of the process that electricsql_cli does, but -// for the e2e, we start from the prisma schema instead of from -// the Postgres schema. - -import 'dart:io'; - -import 'package:electricsql_cli/electricsql_cli.dart'; -import 'package:path/path.dart'; - -final Directory projectDir = - Directory(join(File(Platform.script.toFilePath()).parent.path, "..")) - .absolute; - -Future main() async { - final prismaSchemaFile = File(join(projectDir.path, "prisma/schema.prisma")); - - final prismaSchemaContent = prismaSchemaFile.readAsStringSync(); - - final schemaInfo = extractInfoFromPrismaSchema( - prismaSchemaContent, - genOpts: E2EDriftGenOpts(), - ); - - final driftSchemaFile = - File(join(projectDir.path, "lib/generated/electric/drift_schema.dart")); - await buildDriftSchemaDartFile(schemaInfo, driftSchemaFile); - print("Code generated!"); -} - -class E2EDriftGenOpts extends ElectricDriftGenOpts { - /// We want BigInts in E2E, because int8 with regular int is equivalent to - /// the int4 test suite - @override - bool? get int8AsBigInt => true; -} diff --git a/e2e/satellite_client/tool/generate_electric_code.sh b/e2e/satellite_client/tool/generate_electric_code.sh new file mode 100755 index 00000000..59370709 --- /dev/null +++ b/e2e/satellite_client/tool/generate_electric_code.sh @@ -0,0 +1,24 @@ +#!/bin/bash + +# We spawn a temporary ElectricSQL instance to introspect and generate the E2E client code + +set -e + +dart run electricsql_cli stop --remove +dart run electricsql_cli start --with-postgres --detach + +# {{ELECTRIC_PROXY}} will be replaced automatically with the correct value in the `with-config` command +MIGRATE_COMMAND="dbmate -u {{ELECTRIC_PROXY}} -d db --no-dump-schema up" + +# Run the migrations on Postgres +dart run electricsql_cli with-config "$MIGRATE_COMMAND" + +# Generate Raw client code +dart run electricsql_cli generate --with-dal=false + +# Generate Drift client code +# We want BigInts in E2E, because int8 with regular int is equivalent to +# the int4 test suite +dart run electricsql_cli generate --with-dal=true --int8-as-bigint + +dart run electricsql_cli stop --remove diff --git a/e2e/tests/03.26_node_satellite_can_resume_replication_after_server_restart.lux b/e2e/tests/03.26_node_satellite_can_resume_replication_after_server_restart.lux index 6a2481c2..27208277 100644 --- a/e2e/tests/03.26_node_satellite_can_resume_replication_after_server_restart.lux +++ b/e2e/tests/03.26_node_satellite_can_resume_replication_after_server_restart.lux @@ -73,7 +73,7 @@ [shell satellite_2] # Subscribe to "items" and include "other_items" - !custom_03_25_sync_items db + !custom_03_26_sync_items db ?send: #SatSubsReq\{id: ([a-f0-9-]{36}) [global client_2_subs_id=$1] diff --git a/e2e/tests/_satellite_macros.luxinc b/e2e/tests/_satellite_macros.luxinc index 9d0deef3..5f490809 100644 --- a/e2e/tests/_satellite_macros.luxinc +++ b/e2e/tests/_satellite_macros.luxinc @@ -184,7 +184,7 @@ [endmacro] [macro node_await_insert keys] - !insert_item db ${keys} + !insert_items db ${keys} ??$node [endmacro] @@ -203,7 +203,7 @@ [endmacro] [macro node_await_insert_other keys] - !insert_other_item db ${keys} + !insert_other_items db ${keys} ??$node [endmacro] diff --git a/packages/electricsql/lib/drivers/drift.dart b/packages/electricsql/lib/drivers/drift.dart index 1cf7ba7d..05a185f0 100644 --- a/packages/electricsql/lib/drivers/drift.dart +++ b/packages/electricsql/lib/drivers/drift.dart @@ -9,4 +9,13 @@ export '../src/drivers/drift/relation.dart' show TableRelation, TableRelations; export '../src/drivers/drift/schema.dart' show DBSchemaDrift, ElectricTableMixin; export '../src/drivers/drift/sync_input.dart' - show SyncIncludeBuilder, SyncInputRelation, SyncWhereBuilder; + show + ShapeIncludeBuilder, + ShapeInputRelation, + ShapeWhereBuilder, + // ignore: deprecated_member_use_from_same_package + SyncIncludeBuilder, + // ignore: deprecated_member_use_from_same_package + SyncInputRelation, + // ignore: deprecated_member_use_from_same_package + SyncWhereBuilder; diff --git a/packages/electricsql/lib/src/client/conversions/converter.dart b/packages/electricsql/lib/src/client/conversions/converter.dart new file mode 100644 index 00000000..a884b494 --- /dev/null +++ b/packages/electricsql/lib/src/client/conversions/converter.dart @@ -0,0 +1,6 @@ +import 'package:electricsql/src/client/model/index.dart'; + +abstract class Converter { + Object? encode(Object? v, PgType pgType); + Object? decode(Object? v, PgType pgType); +} diff --git a/packages/electricsql/lib/src/client/conversions/index.dart b/packages/electricsql/lib/src/client/conversions/index.dart new file mode 100644 index 00000000..a2c6f879 --- /dev/null +++ b/packages/electricsql/lib/src/client/conversions/index.dart @@ -0,0 +1,4 @@ +export './converter.dart' show Converter; +export './postgres/mapping.dart' show PostgresConverter, kPostgresConverter; +export './sqlite.dart' show SQLiteConverter, kSqliteConverter; +export './types.dart' show PgType; diff --git a/packages/electricsql/lib/src/client/conversions/postgres/fallback_mapping.dart b/packages/electricsql/lib/src/client/conversions/postgres/fallback_mapping.dart index c87ce957..23e21cc1 100644 --- a/packages/electricsql/lib/src/client/conversions/postgres/fallback_mapping.dart +++ b/packages/electricsql/lib/src/client/conversions/postgres/fallback_mapping.dart @@ -1,7 +1,24 @@ -import 'dart:convert'; +import 'dart:convert' hide Converter; +import 'package:electricsql/src/client/conversions/converter.dart'; import 'package:electricsql/src/client/conversions/types.dart'; +const kPostgresConverter = PostgresConverter(); + +class PostgresConverter implements Converter { + const PostgresConverter(); + + @override + Object? decode(Object? v, PgType pgType) { + throw UnimplementedError(); + } + + @override + Object? encode(Object? v, PgType pgType) { + throw UnimplementedError(); + } +} + /// Map the [dartValue] into a value understood by drift's postgres driver. /// /// This function is only called when the runtime dialect indicates that we're @@ -18,13 +35,16 @@ String mapToSqlLiteral( PgType type, Object dartValue, String typeName, - Codec codec, ) { throw UnsupportedError('Needs to be run in a dart:io environment.'); } /// Map the [sqlValue] into the Dart value used by the Drift schema. -Object mapToUser(PgType? type, Object sqlValue, Codec codec) { +Object mapToUser( + PgType? type, + Object sqlValue, + Codec? enumCodec, +) { throw UnsupportedError('Needs to be run in a dart:io environment.'); } diff --git a/packages/electricsql/lib/src/client/conversions/postgres/postgres_mapping.dart b/packages/electricsql/lib/src/client/conversions/postgres/postgres_mapping.dart index 6dbba38d..e63508c4 100644 --- a/packages/electricsql/lib/src/client/conversions/postgres/postgres_mapping.dart +++ b/packages/electricsql/lib/src/client/conversions/postgres/postgres_mapping.dart @@ -1,5 +1,6 @@ -import 'dart:convert'; +import 'dart:convert' hide Converter; +import 'package:electricsql/src/client/conversions/converter.dart'; import 'package:electricsql/src/client/conversions/types.dart'; import 'package:electricsql/src/util/converters/codecs/float4.dart'; import 'package:electricsql/src/util/converters/codecs/int2.dart'; @@ -7,11 +8,30 @@ import 'package:electricsql/src/util/converters/codecs/int4.dart'; import 'package:electricsql/src/util/converters/codecs/json.dart'; import 'package:electricsql/src/util/converters/codecs/uuid.dart'; import 'package:electricsql/src/util/converters/helpers.dart'; +import 'package:electricsql/src/util/converters/type_converters.dart'; import 'package:postgres/postgres.dart' as pg; // ignore: implementation_imports import 'package:postgres/src/types/text_codec.dart'; +const kPostgresConverter = PostgresConverter(); + +class PostgresConverter implements Converter { + const PostgresConverter(); + + @override + Object? decode(Object? v, PgType pgType) { + if (v == null) return null; + return mapToUser(pgType, v, null); + } + + @override + Object? encode(Object? v, PgType pgType) { + if (v == null) return null; + return mapToSql(pgType, v); + } +} + Object mapToSql(PgType? type, Object inputDartValue) { final pg.Type pgType = _mapElectricPgType(type); final dartValue = _updateDartInput(pgType, inputDartValue, isLiteral: false); @@ -65,7 +85,6 @@ String mapToSqlLiteral( PgType pgType, Object inputDartValue, String typeName, - Codec codec, ) { final pgLibType = _mapElectricPgType(pgType); final dartValue = @@ -86,9 +105,10 @@ String mapToSqlLiteral( } else if (pgType == PgType.timestamp) { pgEncoded = _encodeDateTimeWithoutTZ(dartValue as DateTime); } else if (pgType == PgType.json || pgType == PgType.jsonb) { - final jsonEncoded = dartValue == null - ? 'null' - : codec.encode(dartValue as Object) as String; + final codec = + pgType == PgType.json ? TypeConverters.json : TypeConverters.jsonb; + final String jsonEncoded = + dartValue == null ? 'null' : codec.encode(dartValue as Object); pgEncoded = _pgEncoder.convert(jsonEncoded); } else if (pgType == PgType.float4 && dartValue is num) { final dd = fround(dartValue); @@ -100,12 +120,16 @@ String mapToSqlLiteral( return pgEncoded; } -Object mapToUser(PgType? type, Object sqlValue, Codec codec) { +Object mapToUser( + PgType? type, + Object sqlValue, + Codec? enumCodec, +) { if (type == PgType.time) { return (sqlValue as pg.Time).utcDateTime; } else if (type == null) { final enumStr = _readEnum(sqlValue); - return codec.decode(enumStr); + return enumCodec!.decode(enumStr); } else { return sqlValue; } @@ -152,6 +176,9 @@ dynamic _updateDartInput( Int2Codec.validateInt(dartValue as int); } else if (pgType == pg.Type.integer) { Int4Codec.validateInt(dartValue as int); + } else if (pgType == pg.Type.bigInteger && dartValue is BigInt) { + final int8 = dartValue.rangeCheckedToInt(); + return int8; } if (dartValue is DateTime) { @@ -181,7 +208,9 @@ dynamic _updateDartInput( } Object toImplicitlyCastedValue(Object value) { - if (value is double) { + if (value is pg.TypedValue) { + return value; + } else if (value is double) { if (value.isNaN || value.isInfinite) { return pg.TypedValue(pg.Type.double, value); } @@ -193,20 +222,3 @@ Object toImplicitlyCastedValue(Object value) { return pg.TypedValue(pg.Type.unspecified, value); } - -extension on BigInt { - static final _bigIntMinValue64 = BigInt.from(-9223372036854775808); - static final _bigIntMaxValue64 = BigInt.from(9223372036854775807); - - int rangeCheckedToInt() { - if (this < _bigIntMinValue64 || this > _bigIntMaxValue64) { - throw ArgumentError.value( - this, - 'this', - 'BigInt value exceeds the range of 64 bits', - ); - } - - return toInt(); - } -} diff --git a/packages/electricsql/lib/src/client/conversions/sqlite.dart b/packages/electricsql/lib/src/client/conversions/sqlite.dart new file mode 100644 index 00000000..0ad55ce1 --- /dev/null +++ b/packages/electricsql/lib/src/client/conversions/sqlite.dart @@ -0,0 +1,67 @@ +import 'package:electricsql/src/client/model/index.dart'; +import 'package:electricsql/src/util/converters/helpers.dart'; +import 'package:electricsql/src/util/converters/type_converters.dart'; + +const kSqliteConverter = SQLiteConverter(); + +class SQLiteConverter implements Converter { + const SQLiteConverter(); + + @override + Object? encode(Object? v, PgType pgType) { + if (v == null) return null; + + final Object sqlVal = switch (pgType) { + PgType.bool => (v as bool) ? 1 : 0, + PgType.timestamp => TypeConverters.timestamp.encode(v as DateTime), + PgType.timestampTz => TypeConverters.timestampTZ.encode(v as DateTime), + PgType.date => TypeConverters.date.encode(v as DateTime), + PgType.time => TypeConverters.time.encode(v as DateTime), + PgType.timeTz => TypeConverters.timeTZ.encode(v as DateTime), + PgType.uuid => TypeConverters.uuid.encode(v as String), + PgType.int2 => TypeConverters.int2.encode(v as int), + PgType.int || + PgType.int4 || + PgType.integer => + TypeConverters.int4.encode(v as int), + PgType.int8 => switch (v) { + int() => TypeConverters.int8.encode(v), + BigInt() => v.rangeCheckedToInt(), + _ => throw ArgumentError('Invalid type for int8: $v'), + }, + PgType.real || PgType.float4 => TypeConverters.float4.encode(v as double), + PgType.float8 => TypeConverters.float8.encode(v as double), + PgType.json => TypeConverters.json.encode(v), + PgType.jsonb => TypeConverters.jsonb.encode(v), + _ => v, + }; + return sqlVal; + } + + @override + Object? decode(Object? v, PgType pgType) { + if (v == null) return null; + + final Object dartVal = switch (pgType) { + PgType.bool => (v as int) == 1, + PgType.timestamp => TypeConverters.timestamp.decode(v as String), + PgType.timestampTz => TypeConverters.timestampTZ.decode(v as String), + PgType.date => TypeConverters.date.decode(v as String), + PgType.time => TypeConverters.time.decode(v as String), + PgType.timeTz => TypeConverters.timeTZ.decode(v as String), + PgType.uuid => TypeConverters.uuid.decode(v as String), + PgType.int2 => TypeConverters.int2.decode(v as int), + PgType.int || + PgType.int4 || + PgType.integer => + TypeConverters.int4.decode(v as int), + PgType.int8 => TypeConverters.int8.decode(v as int), + PgType.real || PgType.float4 => TypeConverters.float4.decode(v), + PgType.float8 => TypeConverters.float8.decode(v), + PgType.json => TypeConverters.json.decode(v as String), + PgType.jsonb => TypeConverters.jsonb.decode(v as String), + _ => v, + }; + return dartVal; + } +} diff --git a/packages/electricsql/lib/src/client/input/sync_input.dart b/packages/electricsql/lib/src/client/input/sync_input.dart index 89431be1..47a4281b 100644 --- a/packages/electricsql/lib/src/client/input/sync_input.dart +++ b/packages/electricsql/lib/src/client/input/sync_input.dart @@ -1,14 +1,20 @@ import 'package:electricsql/src/client/model/schema.dart'; -class SyncInputRaw { +@Deprecated('Use ShapeInputRaw') +typedef SyncInputRaw = ShapeInputRaw; + +@Deprecated('Use ShapeWhere') +typedef SyncWhere = ShapeWhere; + +class ShapeInputRaw { final String tableName; final List? include; - final SyncWhere? where; + final ShapeWhere? where; /// Unique key for a shape subscription, allowing shape modification and unsubscribe final String? key; - SyncInputRaw({ + ShapeInputRaw({ required this.tableName, this.include, this.where, @@ -18,7 +24,7 @@ class SyncInputRaw { class IncludeRelRaw { final List foreignKey; - final SyncInputRaw select; + final ShapeInputRaw select; IncludeRelRaw({ required this.foreignKey, @@ -26,10 +32,10 @@ class IncludeRelRaw { }); } -class SyncWhere { +class ShapeWhere { final String where; - SyncWhere(Map map) : where = makeSqlWhereClause(map); + ShapeWhere(Map map) : where = makeSqlWhereClause(map); - SyncWhere.raw(this.where); + ShapeWhere.raw(this.where); } diff --git a/packages/electricsql/lib/src/client/model/client.dart b/packages/electricsql/lib/src/client/model/client.dart index bcdfd890..d5335833 100644 --- a/packages/electricsql/lib/src/client/model/client.dart +++ b/packages/electricsql/lib/src/client/model/client.dart @@ -1,10 +1,12 @@ import 'package:electricsql/electricsql.dart'; -import 'package:electricsql/src/client/model/schema.dart'; -import 'package:electricsql/src/client/model/transform.dart'; +import 'package:electricsql/src/client/model/sync.dart'; +import 'package:electricsql/src/client/model/transform.dart' as transform_lib; +import 'package:electricsql/src/client/model/transform.dart' + hide setReplicationTransform; import 'package:electricsql/src/migrators/query_builder/query_builder.dart'; import 'package:electricsql/src/notifiers/notifiers.dart'; import 'package:electricsql/src/satellite/satellite.dart'; -import 'package:electricsql/src/satellite/shapes/types.dart'; +import 'package:electricsql/util.dart'; import 'package:meta/meta.dart'; abstract interface class BaseElectricClient { @@ -12,7 +14,6 @@ abstract interface class BaseElectricClient { String get dbName; DatabaseAdapter get adapter; Notifier get notifier; - DBSchema get dbDescription; Registry get registry; bool get isConnected; @@ -24,21 +25,23 @@ abstract interface class BaseElectricClient { Future close(); // ElectricClient methods + DBSchema get dbDescription; Satellite get satellite; + SyncManager get syncManager; + @internal + IReplicationTransformManager get replicationTransformManager; Future connect([String? token]); void disconnect(); } abstract interface class ElectricClientRaw implements BaseElectricClient { - SyncManager get syncManager; - - Future sync(SyncInputRaw sync); - - @protected - Future syncShapeInternal(Shape shape, [String? key]); + void setReplicationTransform( + QualifiedTablename qualifiedTableName, + ReplicatedRowTransformer i, + ); } -class ElectricClientImpl extends ElectricNamespace +class ElectricClientRawImpl extends ElectricNamespace implements ElectricClientRaw { @override final Satellite satellite; @@ -46,14 +49,15 @@ class ElectricClientImpl extends ElectricNamespace @protected late final IShapeManager shapeManager; - @protected + @override + @internal late final IReplicationTransformManager replicationTransformManager; @override final DBSchema dbDescription; @override - late final SyncManager syncManager = _SyncManagerImpl(satellite: satellite); + late final SyncManager syncManager = _SyncManagerImpl(baseClient: this); final Dialect dialect; @@ -78,7 +82,7 @@ class ElectricClientImpl extends ElectricNamespace satellite.clientDisconnect(); } - factory ElectricClientImpl.create({ + factory ElectricClientRawImpl.create({ required String dbName, required DatabaseAdapter adapter, required DBSchema dbDescription, @@ -87,7 +91,7 @@ class ElectricClientImpl extends ElectricNamespace required Registry registry, required Dialect dialect, }) { - return ElectricClientImpl.internal( + return ElectricClientRawImpl.internal( dbName: dbName, adapter: adapter, notifier: notifier, @@ -99,7 +103,7 @@ class ElectricClientImpl extends ElectricNamespace } @protected - ElectricClientImpl.internal({ + ElectricClientRawImpl.internal({ required super.dbName, required super.adapter, required super.notifier, @@ -112,30 +116,43 @@ class ElectricClientImpl extends ElectricNamespace } @override - Future sync(SyncInputRaw syncInput) async { - final shape = computeShape(syncInput); - return syncShapeInternal(shape, syncInput.key); - } - - @override - @internal - Future syncShapeInternal(Shape shape, [String? key]) { - return satellite.subscribe([shape], key); + void setReplicationTransform( + QualifiedTablename qualifiedTableName, + ReplicatedRowTransformer i, + ) { + transform_lib.setReplicationTransform( + dbDescription: dbDescription, + replicationTransformManager: replicationTransformManager, + qualifiedTableName: qualifiedTableName, + validateFun: null, + transformInbound: i.transformInbound, + transformOutbound: i.transformOutbound, + toRecord: (r) => r, + fromRecord: (r) => r, + ); } } class _SyncManagerImpl implements SyncManager { - final Satellite satellite; + final BaseElectricClient baseClient; + + _SyncManagerImpl({required this.baseClient}); - _SyncManagerImpl({required this.satellite}); + @override + Future subscribe( + ShapeInputRaw i, [ + String? key, + ]) { + return syncShape(baseClient.satellite, baseClient.dbDescription, i, key); + } @override Future unsubscribe(List keys) { - return satellite.unsubscribe(keys); + return baseClient.satellite.unsubscribe(keys); } @override SyncStatus syncStatus(String key) { - return satellite.syncStatus(key); + return baseClient.satellite.syncStatus(key); } } diff --git a/packages/electricsql/lib/src/client/model/index.dart b/packages/electricsql/lib/src/client/model/index.dart index dd41f306..84a7af91 100644 --- a/packages/electricsql/lib/src/client/model/index.dart +++ b/packages/electricsql/lib/src/client/model/index.dart @@ -1,5 +1,13 @@ export 'package:electricsql/src/client/model/schema.dart' - show ElectricMigrations; + show + DBSchema, + DBSchemaRaw, + ElectricMigrations, + Fields, + Relation, + TableSchema; + +export '../conversions/index.dart'; export 'client.dart' show BaseElectricClient, ElectricClientRaw; export 'shapes.dart' diff --git a/packages/electricsql/lib/src/client/model/schema.dart b/packages/electricsql/lib/src/client/model/schema.dart index 29a31048..ea17081b 100644 --- a/packages/electricsql/lib/src/client/model/schema.dart +++ b/packages/electricsql/lib/src/client/model/schema.dart @@ -1,7 +1,6 @@ import 'package:electricsql/electricsql.dart'; -import 'package:electricsql/src/client/conversions/types.dart'; import 'package:electricsql/src/satellite/shapes/types.dart'; -import 'package:meta/meta.dart'; +import 'package:equatable/equatable.dart'; typedef FieldName = String; typedef RelationName = String; @@ -18,18 +17,21 @@ class ElectricMigrations { }); } -class TableSchema { +class TableSchema with EquatableMixin { final Fields fields; final List relations; - TableSchema({ + const TableSchema({ required this.fields, required this.relations, }); + + @override + List get props => [fields, relations]; } abstract class DBSchema { - final Map _tableSchemas; + final Map tableSchemas; final List _migrations; final List _pgMigrations; @@ -39,20 +41,19 @@ abstract class DBSchema { /// @param tables Description of the database tables /// @param migrations Bundled SQLite migrations /// @param pgMigrations Bundled Postgres migrations - DBSchema({ - required Map tableSchemas, + const DBSchema({ + required this.tableSchemas, required List migrations, required List pgMigrations, - }) : _tableSchemas = tableSchemas, - _migrations = migrations, + }) : _migrations = migrations, _pgMigrations = pgMigrations; bool hasTable(String table) { - return _tableSchemas.containsKey(table); + return tableSchemas.containsKey(table); } TableSchema getTableSchema(String table) { - return _tableSchemas[table]!; + return tableSchemas[table]!; } Fields getFields(String table) { @@ -63,6 +64,14 @@ abstract class DBSchema { return getTableSchema(table).relations; } + List getOutgoingRelations(TableName table) { + return getRelations(table).where((r) => r.isOutgoingRelation()).toList(); + } + + List getIncomingRelations(TableName table) { + return getRelations(table).where((r) => r.isIncomingRelation()).toList(); + } + // RelationName getRelationName(TableName table, FieldName field) { // return getRelations(table) // .firstWhere((r) => r.relationField == field) @@ -100,44 +109,17 @@ abstract class DBSchema { } } -@visibleForTesting class DBSchemaRaw extends DBSchema { Map get fields => - _tableSchemas.map((k, v) => MapEntry(k, v.fields)); + tableSchemas.map((k, v) => MapEntry(k, v.fields)); - DBSchemaRaw({ + const DBSchemaRaw({ required super.tableSchemas, required super.migrations, required super.pgMigrations, }); } -@protected -Shape computeShape(SyncInputRaw i) { - final include = i.include ?? []; - final SyncWhere where = i.where ?? SyncWhere.raw(''); - - Rel includeRelToRel(IncludeRelRaw ir) { - return Rel( - foreignKey: ir.foreignKey, - select: computeShape( - ir.select, - ), - ); - } - - // Recursively go over the included fields - final List includedTables = - include.map((e) => includeRelToRel(e)).toList(); - - final whereClause = where.where; - return Shape( - tablename: i.tableName, - include: includedTables.isEmpty ? null : includedTables, - where: whereClause == '' ? null : whereClause, - ); -} - // TODO(dart): Equivalent implementation from official electric to add support for other map based // operators like "in", "lt", "gt"... // Also update the test "nested shape is constructed" if this is done @@ -202,7 +184,7 @@ List> _extractWhereConditionsFor( return conditions; } -class Relation { +class Relation with EquatableMixin { // final String relationField; final String fromField; final String toField; @@ -228,4 +210,13 @@ class Relation { Relation getOppositeRelation(DBSchema dbDescription) { return dbDescription.getRelation(relatedTable, relationName); } + + @override + List get props => [ + // relationField, + fromField, + toField, + relationName, + relatedTable, + ]; } diff --git a/packages/electricsql/lib/src/client/model/shapes.dart b/packages/electricsql/lib/src/client/model/shapes.dart index 13d74dca..169a2908 100644 --- a/packages/electricsql/lib/src/client/model/shapes.dart +++ b/packages/electricsql/lib/src/client/model/shapes.dart @@ -1,3 +1,4 @@ +import 'package:electricsql/src/client/input/sync_input.dart'; import 'package:electricsql/src/satellite/satellite.dart'; import 'package:electricsql/src/satellite/shapes/types.dart'; import 'package:equatable/equatable.dart'; @@ -77,6 +78,26 @@ abstract interface class IShapeManager { } abstract interface class SyncManager { + /// Subscribes to the given shape, returnig a [ShapeSubscription] object which + /// can be used to wait for the shape to sync initial data. + /// + /// https://electric-sql.com/docs/usage/data-access/shapes + /// + /// NOTE: If you establish a shape subscription that has already synced its initial data, + /// awaiting `shape.synced` will always resolve immediately as shape subscriptions are persisted. + /// i.e.: imagine that you re-sync the same shape during subsequent application loads. + /// Awaiting `shape.synced` a second time will only ensure that the initial + /// shape load is complete. It does not ensure that the replication stream + /// has caught up to the central DB's more recent state. + /// + /// @param i - The shape to subscribe to + /// @param key - An optional unique key that identifies the subscription + /// @returns A shape subscription + Future subscribe( + ShapeInputRaw i, [ + String? key, + ]); + Future unsubscribe(List keys); SyncStatus syncStatus(String key); } diff --git a/packages/electricsql/lib/src/client/model/sync.dart b/packages/electricsql/lib/src/client/model/sync.dart new file mode 100644 index 00000000..b9863f21 --- /dev/null +++ b/packages/electricsql/lib/src/client/model/sync.dart @@ -0,0 +1,53 @@ +import 'package:electricsql/electricsql.dart'; +import 'package:electricsql/satellite.dart'; + +Future syncShape( + IShapeManager shapeManager, + DBSchema dbDescription, + ShapeInputRaw i, [ + String? key, +]) async { + // Check which table the user wants to sync + final tableName = i.tableName; + + if (tableName.isEmpty) { + throw Exception( + 'Cannot sync the requested shape. Table name must be a non-empty string', + ); + } + + final shape = computeShape(dbDescription, i); + return shapeManager.subscribe([shape], key); +} + +Shape computeShape(DBSchema dbSchema, ShapeInputRaw i) { + if (!dbSchema.hasTable(i.tableName)) { + throw Exception( + "Cannot sync the requested shape. Table '${i.tableName}' does not exist in the database schema.", + ); + } + + final include = i.include ?? []; + final ShapeWhere where = i.where ?? ShapeWhere.raw(''); + + // Recursively go over the included fields + final List includedTables = + include.map((e) => _createShapeRelation(dbSchema, e)).toList(); + + final whereClause = where.where; + return Shape( + tablename: i.tableName, + include: includedTables.isEmpty ? null : includedTables, + where: whereClause == '' ? null : whereClause, + ); +} + +Rel _createShapeRelation(DBSchema dbSchema, IncludeRelRaw ir) { + return Rel( + foreignKey: ir.foreignKey, + select: computeShape( + dbSchema, + ir.select, + ), + ); +} diff --git a/packages/electricsql/lib/src/client/model/transform.dart b/packages/electricsql/lib/src/client/model/transform.dart index 5b0a888d..7136c3d6 100644 --- a/packages/electricsql/lib/src/client/model/transform.dart +++ b/packages/electricsql/lib/src/client/model/transform.dart @@ -1,4 +1,5 @@ import 'package:electricsql/satellite.dart'; +import 'package:electricsql/src/client/model/schema.dart'; import 'package:electricsql/src/client/validation/validation.dart'; import 'package:electricsql/util.dart'; @@ -9,6 +10,16 @@ abstract class IReplicationTransformManager { ); void clearTableTransform(QualifiedTablename tableName); + + DbRecord transformTableRecord( + DbRecord record, + D Function(D row) transformRow, + Fields fields, + List immutableFields, { + required void Function(D)? validateFun, + required Map Function(D) toRecord, + required D Function(Map) fromRecord, + }); } class ReplicationTransformManager implements IReplicationTransformManager { @@ -28,6 +39,27 @@ class ReplicationTransformManager implements IReplicationTransformManager { void clearTableTransform(QualifiedTablename tableName) { satellite.clearReplicationTransform(tableName); } + + @override + DbRecord transformTableRecord( + DbRecord record, + D Function(D row) transformRow, + Fields fields, + List immutableFields, { + required void Function(D)? validateFun, + required Map Function(D) toRecord, + required D Function(Map) fromRecord, + }) { + return transformTableRecordGeneric( + record, + (r) => toRecord(transformRow(fromRecord(r))), + fields, + immutableFields, + validateFun: + validateFun == null ? null : (d) => validateFun(fromRecord(d)), + toRecord: (d) => d, + ); + } } /// Transform a raw record with the given typed row transformation {@link transformRow} @@ -39,18 +71,21 @@ class ReplicationTransformManager implements IReplicationTransformManager { /// @param schema schema to parse/validate raw record to record of type {@link T} /// @param immutableFields - fields that cannot be modified by {@link transformRow} /// @return the transformed raw record -D transformTableRecord( +D transformTableRecordGeneric( D record, D Function(D) transformRow, + Fields fields, List immutableFields, { - required void Function(D) validateFun, + required void Function(D)? validateFun, required Map Function(D) toRecord, }) { // apply specified transformation final transformedParsedRow = transformRow(record); - // validate transformed row - validateFun(transformedParsedRow); + // validate transformed row and convert back to raw record + // schema is only provided when using the DAL + // if validateFun is not provided, we skip validation + validateFun?.call(transformedParsedRow); final originalCols = toRecord(record); final transformedCols = toRecord(transformedParsedRow); @@ -73,3 +108,73 @@ D transformTableRecord( return transformedParsedRow; } + +void setReplicationTransform({ + required DBSchema dbDescription, + required IReplicationTransformManager replicationTransformManager, + required QualifiedTablename qualifiedTableName, + required T Function(T row) transformInbound, + required T Function(T row) transformOutbound, + required void Function(T)? validateFun, + required Map Function(T) toRecord, + required T Function(Map) fromRecord, +}) { + final tableName = qualifiedTableName.tablename; + + if (!dbDescription.hasTable(tableName)) { + throw Exception( + "Cannot set replication transform for table '$tableName'. Table does not exist in the database schema.", + ); + } + + final fields = dbDescription.getFields(tableName); + + // forbid transforming relation keys to avoid breaking + // referential integrity + + // the column could be the FK column when it is an outgoing FK + // or it could be a PK column when it is an incoming FK + final fkCols = dbDescription + .getOutgoingRelations(tableName) + .map((r) => r.fromField) + .toList(); + + // Incoming relations don't have the `fromField` and `toField` filled in + // so we need to fetch the `toField` from the opposite relation + // which is effectively a column in this table to which the FK points + final pkCols = dbDescription + .getIncomingRelations(tableName) + .map((r) => r.getOppositeRelation(dbDescription).toField); + + // Merge all columns that are part of a FK relation. + // Remove duplicate columns in case a column has both an outgoing FK and an incoming FK. + final immutableFields = {...fkCols, ...pkCols}.toList(); + + replicationTransformManager.setTableTransform( + qualifiedTableName, + ReplicatedRowTransformer( + transformInbound: (DbRecord record) { + return replicationTransformManager.transformTableRecord( + record, + transformInbound, + fields, + immutableFields, + validateFun: validateFun, + toRecord: toRecord, + fromRecord: fromRecord, + ); + }, + transformOutbound: (DbRecord record) { + return replicationTransformManager.transformTableRecord( + record, + transformOutbound, + fields, + immutableFields, + validateFun: validateFun, + toRecord: toRecord, + fromRecord: fromRecord, + ); + }, + ), + ); +} diff --git a/packages/electricsql/lib/src/client/util/relations.dart b/packages/electricsql/lib/src/client/util/relations.dart new file mode 100644 index 00000000..9428bb29 --- /dev/null +++ b/packages/electricsql/lib/src/client/util/relations.dart @@ -0,0 +1,183 @@ +import 'package:collection/collection.dart'; +import 'package:electricsql/src/client/conversions/types.dart'; +import 'package:electricsql/src/client/model/schema.dart'; +import 'package:electricsql/src/proto/satellite.pb.dart'; +import 'package:electricsql/src/satellite/shapes/types.dart'; + +Relation makeRelation( + SatOpMigrate_Table table, + SatOpMigrate_ForeignKey fk, + Map> groupedFks, + KeyedTables allTables, +) { + final childTable = table.name; + final childCols = fk.fkCols; + final parentCols = fk.pkCols; + final parentTable = fk.pkTable; + + if (childCols.length > 1 || parentCols.length > 1) { + throw Exception('Composite foreign keys are not supported'); + } + + final childCol = childCols[0]; + final parentCol = parentCols[0]; + + // If there is only a single foreign key to a certain parent table + // and there is no column that is named after the parent table + // and there is no FK from the parent table to the child table + // then we can name the relation field the same as the parent table name + // otherwise the relation field name is the relation name prefixed with the name of the related table + final noColNamedAfterParent = table.columns.every( + (col) => col.name != parentTable, + ); + final singleFk = groupedFks[parentTable]!.length == 1; + final fkFromParentToChild = allTables[parentTable]! + .fks + .firstWhereOrNull((fk) => fk.pkTable == childTable); + + final relationName = '${childTable}_${childCol}To$parentTable'; + // ignore: unused_local_variable + final relationFieldName = + singleFk && noColNamedAfterParent && fkFromParentToChild != null + ? parentTable + : '${parentTable}_$relationName'; + + return Relation( + // relationFieldName, + fromField: childCol, + toField: parentCol, + relatedTable: parentTable, + relationName: relationName, + ); +} + +typedef GroupedRelations = Map>; +typedef KeyedTables = Map; + +/// Creates a `Relation` object for each FK in the table, +/// as well as the opposite `Relation` object in order to +/// be able to traverse the relation in the opposite direction. +/// As a result, this function returns a map of relations grouped by table name. +GroupedRelations createRelationsFromTable( + SatOpMigrate_Table table, + KeyedTables allTables, +) { + final childTable = table.name; + final fks = table.fks; + final groupedFks = groupBy(fks, (fk) => fk.pkTable); + + final GroupedRelations groupedRelations = {}; + void extendGroupedRelations(TableName tableName, Relation relation) { + final relations = groupedRelations[tableName] ?? []; + relations.add(relation); + groupedRelations[tableName] = relations; + } + + // For each FK make a `Relation` + final forwardRelations = fks.map((fk) { + final rel = makeRelation(table, fk, groupedFks, allTables); + // Store the relation in the `groupedRelations` map + extendGroupedRelations(childTable, rel); + return rel; + }); + + // For each FK, also create the opposite `Relation` + // in order to be able to follow the relation in both directions + for (final relation in forwardRelations) { + final parentTableName = relation.relatedTable; + final parentTable = allTables[parentTableName]!; + final parentFks = parentTable.fks; + // If the parent table also has a FK to the child table + // than there is ambuigity because we can follow this FK + // or we could follow the FK that points to this table in the opposite direction + final fkToChildTable = parentFks.firstWhereOrNull( + (fk) => + fk.pkTable == childTable && + fk.fkCols[0] != + relation + .toField, // checks if this is another FK to the same table, assuming no composite FKs + ); + // Also check if there are others FKs from the child table to this table + final childFks = allTables[childTable]!.fks; + final otherFksToParentTable = childFks.firstWhereOrNull( + (fk) => + fk.pkTable == parentTableName && + fk.fkCols[0] != + relation + .fromField, // checks if this is another FK from the child table to this table, assuming no composite FKs + ); + final noColNamedAfterParent = + parentTable.columns.every((col) => col.name != childTable); + + // Make the relation field name + // which is the name of the related table (if it is unique) + // otherwise it is the relation name prefixed with the name of the related table + // ignore: unused_local_variable + final relationFieldName = fkToChildTable != null && + otherFksToParentTable != null && + noColNamedAfterParent + ? childTable + : '${childTable}_${relation.relationName}'; + + final backwardRelation = Relation( + // relationFieldName, + fromField: '', + toField: '', + relatedTable: childTable, + relationName: relation.relationName, + ); + + // Store the backward relation in the `groupedRelations` map + extendGroupedRelations(parentTableName, backwardRelation); + } + return groupedRelations; +} + +void mergeGroupedRelations( + GroupedRelations groupedRelations, + GroupedRelations relations, +) { + for (final entry in relations.entries) { + final relations = entry.value; + final tableName = entry.key; + final existingRelations = groupedRelations[tableName] ?? []; + groupedRelations[tableName] = [...existingRelations, ...relations]; + } +} + +GroupedRelations createRelationsFromAllTables(List tables) { + final KeyedTables keyedTables = + Map.fromEntries(tables.map((table) => MapEntry(table.name, table))); + final GroupedRelations groupedRelations = {}; + for (final table in tables) { + final relations = createRelationsFromTable(table, keyedTables); + mergeGroupedRelations(groupedRelations, relations); + } + return groupedRelations; +} + +// // TODO: remove the DbSchema type from the DAL and use this one instead +DBSchema createDbDescription(List tables) { + final relations = createRelationsFromAllTables(tables); + final tableSchemas = {}; + for (final table in tables) { + final tableName = table.name; + final rels = relations[tableName] ?? []; + final Fields fields = {}; + for (final col in table.columns) { + final pgType = maybePgTypeFromColumnType(col.pgType.name); + // Return text if the pgType is null AKA is an enum + fields[col.name] = pgType ?? PgType.text; + } + + tableSchemas[tableName] = TableSchema( + fields: fields, + relations: rels, + ); + } + return DBSchemaRaw( + tableSchemas: tableSchemas, + migrations: [], + pgMigrations: [], + ); +} diff --git a/packages/electricsql/lib/src/drivers/drift/custom_types.dart b/packages/electricsql/lib/src/drivers/drift/custom_types.dart index b299fe46..2bc8d350 100644 --- a/packages/electricsql/lib/src/drivers/drift/custom_types.dart +++ b/packages/electricsql/lib/src/drivers/drift/custom_types.dart @@ -3,6 +3,7 @@ import 'dart:convert'; import 'package:drift/drift.dart'; import 'package:electricsql/src/client/conversions/postgres/mapping.dart' as pg_mapping; +import 'package:electricsql/src/client/conversions/sqlite.dart'; import 'package:electricsql/src/client/conversions/types.dart'; import 'package:electricsql/src/util/converters/type_converters.dart'; @@ -22,35 +23,14 @@ class ElectricTypes { static const JsonBType jsonb = JsonBType(); } -class CustomElectricTypeGeneric - implements DialectAwareSqlType { - final Codec codec; +abstract class CustomElectricTypeGeneric implements DialectAwareSqlType { final String typeName; const CustomElectricTypeGeneric({ - required this.codec, required this.typeName, }); - @override - String mapToSqlLiteral(GenerationContext context, DartT dartValue) { - final encoded = codec.encode(dartValue); - if (encoded is String) { - return "'$encoded'"; - } - return '$encoded'; - } - - @override - Object mapToSqlParameter(GenerationContext context, DartT dartValue) { - return codec.encode(dartValue); - } - - @override - DartT read(SqlTypes types, Object fromSql) { - return codec.decode(fromSql as SQLType); - } - @override String sqlTypeName(GenerationContext context) => typeName; } @@ -60,7 +40,6 @@ abstract class CustomElectricType final PgType pgType; const CustomElectricType({ - required super.codec, required super.typeName, required this.pgType, }); @@ -68,38 +47,52 @@ abstract class CustomElectricType @override Object mapToSqlParameter(GenerationContext context, DartT dartValue) { if (context.dialect == SqlDialect.postgres) { - return pg_mapping.mapToSql(pgType, dartValue); + return pg_mapping.kPostgresConverter.encode(dartValue, pgType)!; } else { - return super.mapToSqlParameter(context, dartValue); + return kSqliteConverter.encode(dartValue, pgType)!; } } @override String mapToSqlLiteral(GenerationContext context, DartT dartValue) { if (context.dialect == SqlDialect.postgres) { - return pg_mapping.mapToSqlLiteral(pgType, dartValue, typeName, codec); + return pg_mapping.mapToSqlLiteral(pgType, dartValue, typeName); } else { - return super.mapToSqlLiteral(context, dartValue); + final encoded = kSqliteConverter.encode(dartValue, pgType)!; + if (encoded is String) { + return "'$encoded'"; + } + return '$encoded'; } } @override DartT read(SqlTypes types, Object fromSql) { if (types.dialect == SqlDialect.postgres) { - return pg_mapping.mapToUser(pgType, fromSql, codec) as DartT; + final decoded = pg_mapping.kPostgresConverter.decode(fromSql, pgType)!; + return decoded as DartT; } else { - return super.read(types, fromSql); + final decoded = kSqliteConverter.decode(fromSql, pgType)!; + return decoded as DartT; } } } class CustomElectricTypeEnum extends CustomElectricTypeGeneric { + final Codec codec; + const CustomElectricTypeEnum({ - required super.codec, + required this.codec, required super.typeName, }); + @override + String mapToSqlLiteral(GenerationContext context, DartT dartValue) { + final String encoded = codec.encode(dartValue); + return "'$encoded'"; + } + @override Object mapToSqlParameter(GenerationContext context, DartT dartValue) { if (context.dialect == SqlDialect.postgres) { @@ -108,7 +101,7 @@ class CustomElectricTypeEnum final String enumStr = codec.encode(dartValue); return pg_mapping.mapToSql(pgType, enumStr); } else { - return super.mapToSqlParameter(context, dartValue); + return codec.encode(dartValue); } } @@ -117,7 +110,7 @@ class CustomElectricTypeEnum if (types.dialect == SqlDialect.postgres) { return pg_mapping.mapToUser(null, fromSql, codec) as DartT; } else { - return super.read(types, fromSql); + return codec.decode(fromSql as String); } } } @@ -125,7 +118,6 @@ class CustomElectricTypeEnum class TimestampType extends CustomElectricType { const TimestampType() : super( - codec: TypeConverters.timestamp, typeName: 'timestamp', pgType: PgType.timestamp, ); @@ -134,7 +126,6 @@ class TimestampType extends CustomElectricType { class TimestampTZType extends CustomElectricType { const TimestampTZType() : super( - codec: TypeConverters.timestampTZ, typeName: 'timestamptz', pgType: PgType.timestampTz, ); @@ -143,7 +134,6 @@ class TimestampTZType extends CustomElectricType { class DateType extends CustomElectricType { const DateType() : super( - codec: TypeConverters.date, typeName: 'date', pgType: PgType.date, ); @@ -152,7 +142,6 @@ class DateType extends CustomElectricType { class TimeType extends CustomElectricType { const TimeType() : super( - codec: TypeConverters.time, typeName: 'time', pgType: PgType.time, ); @@ -161,7 +150,6 @@ class TimeType extends CustomElectricType { class TimeTZType extends CustomElectricType { const TimeTZType() : super( - codec: TypeConverters.timeTZ, typeName: 'timetz', pgType: PgType.timeTz, ); @@ -170,7 +158,6 @@ class TimeTZType extends CustomElectricType { class UUIDType extends CustomElectricType { const UUIDType() : super( - codec: TypeConverters.uuid, typeName: 'uuid', pgType: PgType.uuid, ); @@ -179,7 +166,6 @@ class UUIDType extends CustomElectricType { class Int2Type extends CustomElectricType { const Int2Type() : super( - codec: TypeConverters.int2, typeName: 'int2', pgType: PgType.int2, ); @@ -188,7 +174,6 @@ class Int2Type extends CustomElectricType { class Int4Type extends CustomElectricType { const Int4Type() : super( - codec: TypeConverters.int4, typeName: 'int4', pgType: PgType.int4, ); @@ -197,7 +182,6 @@ class Int4Type extends CustomElectricType { class Int8Type extends CustomElectricType { const Int8Type() : super( - codec: TypeConverters.int8, typeName: 'int8', pgType: PgType.int8, ); @@ -206,7 +190,6 @@ class Int8Type extends CustomElectricType { class Float4Type extends CustomElectricType { const Float4Type() : super( - codec: TypeConverters.float4, typeName: 'float4', pgType: PgType.float4, ); @@ -214,7 +197,7 @@ class Float4Type extends CustomElectricType { @override String mapToSqlLiteral(GenerationContext context, double dartValue) { if (context.dialect == SqlDialect.sqlite) { - return _doubleToSqliteLiteral(codec, dartValue); + return _doubleToSqliteLiteral(TypeConverters.float4, dartValue); } else { return super.mapToSqlLiteral(context, dartValue); } @@ -224,7 +207,6 @@ class Float4Type extends CustomElectricType { class Float8Type extends CustomElectricType { const Float8Type() : super( - codec: TypeConverters.float8, typeName: 'float8', pgType: PgType.float8, ); @@ -232,7 +214,7 @@ class Float8Type extends CustomElectricType { @override String mapToSqlLiteral(GenerationContext context, double dartValue) { if (context.dialect == SqlDialect.sqlite) { - return _doubleToSqliteLiteral(codec, dartValue); + return _doubleToSqliteLiteral(TypeConverters.float8, dartValue); } else { return super.mapToSqlLiteral(context, dartValue); } @@ -242,7 +224,6 @@ class Float8Type extends CustomElectricType { class JsonType extends CustomElectricType { const JsonType() : super( - codec: TypeConverters.json, typeName: 'json', pgType: PgType.json, ); @@ -251,7 +232,6 @@ class JsonType extends CustomElectricType { class JsonBType extends CustomElectricType { const JsonBType() : super( - codec: TypeConverters.jsonb, typeName: 'jsonb', pgType: PgType.jsonb, ); diff --git a/packages/electricsql/lib/src/drivers/drift/drift.dart b/packages/electricsql/lib/src/drivers/drift/drift.dart index 300db940..02e4fcfc 100644 --- a/packages/electricsql/lib/src/drivers/drift/drift.dart +++ b/packages/electricsql/lib/src/drivers/drift/drift.dart @@ -3,12 +3,10 @@ import 'package:electricsql/drivers/drift.dart'; import 'package:electricsql/electricsql.dart'; import 'package:electricsql/migrators.dart'; import 'package:electricsql/src/client/model/client.dart'; -import 'package:electricsql/src/client/model/schema.dart'; import 'package:electricsql/src/client/model/transform.dart'; import 'package:electricsql/src/config/config.dart'; import 'package:electricsql/src/drivers/drift/sync_input.dart'; import 'package:electricsql/src/electric/electric.dart' as electrify_lib; -import 'package:electricsql/src/electric/electric.dart'; import 'package:electricsql/src/notifiers/notifiers.dart'; import 'package:electricsql/src/satellite/satellite.dart'; import 'package:electricsql/src/sockets/sockets.dart'; @@ -65,7 +63,8 @@ Future> electrify({ ), ); - final driftClient = DriftElectricClient(namespace as ElectricClientImpl, db); + final driftClient = + DriftElectricClient(namespace as ElectricClientRawImpl, db); driftClient.init(); return driftClient; @@ -83,8 +82,10 @@ Dialect driftDialectToElectric(DatabaseConnectionUser db) { abstract interface class ElectricClient implements BaseElectricClient { + @internal + ElectricClientRaw get rawClient; + DB get db; - SyncManager get syncManager; /// Subscribes to the given shape, returnig a [ShapeSubscription] object which /// can be used to wait for the shape to sync initial data. @@ -99,18 +100,19 @@ abstract interface class ElectricClient /// has caught up to the central DB's more recent state. /// /// @param i - The shape to subscribe to + /// @param key - An optional unique key that identifies the subscription /// @returns A shape subscription Future syncTable( T table, { - SyncIncludeBuilder? include, - SyncWhereBuilder? where, + ShapeIncludeBuilder? include, + ShapeWhereBuilder? where, String? key, }); /// Same as [syncTable] but you would be providing table names, and foreign key /// relationships manually. This is more low-level and should be avoided if /// possible. - Future syncTableRaw(SyncInputRaw syncInput); + Future syncTableRaw(ShapeInputRaw shapeInput); /// Puts transforms in place such that any data being replicated /// to or from this table is first handled appropriately while @@ -143,7 +145,11 @@ class DriftElectricClient @override SyncManager get syncManager => _baseClient.syncManager; - final ElectricClientImpl _baseClient; + final ElectricClientRaw _baseClient; + + @override + @internal + ElectricClientRaw get rawClient => _baseClient; void Function()? _disposeHook; @@ -244,11 +250,20 @@ class DriftElectricClient @override Satellite get satellite => _baseClient.satellite; + @override + IReplicationTransformManager get replicationTransformManager => + _baseClient.replicationTransformManager; + @override void setIsConnected(ConnectivityState connectivityState) { return _baseClient.setIsConnected(connectivityState); } + /// Connects to the Electric sync service. + /// This method is idempotent, it is safe to call it multiple times. + /// @param token - The JWT token to use to connect to the Electric sync service. + /// This token is required on first connection but can be left out when reconnecting + /// in which case the last seen token is reused. @override Future connect([String? token]) { return _baseClient.connect(token); @@ -262,8 +277,8 @@ class DriftElectricClient @override Future syncTable( T table, { - SyncIncludeBuilder? include, - SyncWhereBuilder? where, + ShapeIncludeBuilder? include, + ShapeWhereBuilder? where, String? key, }) { final shape = computeShapeForDrift( @@ -275,14 +290,12 @@ class DriftElectricClient ); // print("SHAPE ${shape.toMap()}"); - - return _baseClient.syncShapeInternal(shape, key); + return _baseClient.satellite.subscribe([shape], key); } @override - Future syncTableRaw(SyncInputRaw syncInput) async { - final shape = computeShape(syncInput); - return _baseClient.syncShapeInternal(shape, syncInput.key); + Future syncTableRaw(ShapeInputRaw shapeInput) { + return syncManager.subscribe(shapeInput); } @override @@ -292,30 +305,6 @@ class DriftElectricClient required D Function(D row) transformOutbound, Insertable Function(D)? toInsertable, }) { - // forbid transforming relation keys to avoid breaking - // referential integrity - - final tableRelations = dbDescription.getRelations(table.actualTableName); - - final outgoingRelations = - tableRelations.where((r) => r.isOutgoingRelation()); - final incomingRelations = - tableRelations.where((r) => r.isIncomingRelation()); - - // the column could be the FK column when it is an outgoing FK - // or it could be a PK column when it is an incoming FK - final fkCols = outgoingRelations.map((r) => r.fromField); - - // Incoming relations don't have the `fromField` and `toField` filled in - // so we need to fetch the `toField` from the opposite relation - // which is effectively a column in this table to which the FK points - final pkCols = incomingRelations - .map((r) => r.getOppositeRelation(dbDescription).toField); - - // Merge all columns that are part of a FK relation. - // Remove duplicate columns in case a column has both an outgoing FK and an incoming FK. - final immutableFields = {...fkCols, ...pkCols}.toList(); - final QualifiedTablename qualifiedTableName = _getQualifiedTableName(table); Insertable _getInsertable(D d) { @@ -331,33 +320,15 @@ class DriftElectricClient } } - // ignore: invalid_use_of_protected_member - _baseClient.replicationTransformManager.setTableTransform( - qualifiedTableName, - ReplicatedRowTransformer( - transformInbound: (DbRecord record) { - final dataClass = table.map(record) as D; - final out = transformTableRecord( - dataClass, - transformInbound, - immutableFields, - validateFun: (d) => validateDriftRecord(table, _getInsertable(d)), - toRecord: (d) => driftInsertableToValues(_getInsertable(d)), - ); - return driftInsertableToValues(_getInsertable(out)); - }, - transformOutbound: (DbRecord record) { - final dataClass = table.map(record) as D; - final out = transformTableRecord( - dataClass, - transformOutbound, - immutableFields, - validateFun: (d) => validateDriftRecord(table, _getInsertable(d)), - toRecord: (d) => driftInsertableToValues(_getInsertable(d)), - ); - return driftInsertableToValues(_getInsertable(out)); - }, - ), + setReplicationTransform( + dbDescription: dbDescription, + replicationTransformManager: replicationTransformManager, + qualifiedTableName: qualifiedTableName, + transformInbound: transformInbound, + transformOutbound: transformOutbound, + validateFun: (d) => validateDriftRecord(table, _getInsertable(d)), + toRecord: (d) => driftInsertableToValues(_getInsertable(d)), + fromRecord: (r) => table.map(r) as D, ); } diff --git a/packages/electricsql/lib/src/drivers/drift/schema.dart b/packages/electricsql/lib/src/drivers/drift/schema.dart index b78839ea..5460b3b9 100644 --- a/packages/electricsql/lib/src/drivers/drift/schema.dart +++ b/packages/electricsql/lib/src/drivers/drift/schema.dart @@ -1,7 +1,6 @@ import 'package:drift/drift.dart'; import 'package:electricsql/drivers/drift.dart'; import 'package:electricsql/electricsql.dart'; -import 'package:electricsql/src/client/conversions/types.dart'; import 'package:electricsql/src/client/model/schema.dart'; import 'package:electricsql/src/drivers/drift/relation.dart'; diff --git a/packages/electricsql/lib/src/drivers/drift/sync_input.dart b/packages/electricsql/lib/src/drivers/drift/sync_input.dart index a4db9dcf..3d5f7d50 100644 --- a/packages/electricsql/lib/src/drivers/drift/sync_input.dart +++ b/packages/electricsql/lib/src/drivers/drift/sync_input.dart @@ -4,41 +4,44 @@ import 'package:electricsql/src/client/model/schema.dart'; import 'package:electricsql/src/drivers/drift/drift.dart'; import 'package:electricsql/src/satellite/shapes/types.dart'; -typedef SyncIncludeBuilder = List Function( - T table, -); -typedef SyncWhereBuilder = Expression Function(T table); +@Deprecated('Use ShapeIncludeBuilder') +typedef SyncIncludeBuilder = ShapeIncludeBuilder; -class SyncInput { - final Expression? where; - final List? include; +@Deprecated('Use ShapeWhereBuilder') +typedef SyncWhereBuilder = ShapeWhereBuilder; - SyncInput({this.where, this.include}); -} +@Deprecated('Use ShapeInputRelation') +typedef SyncInputRelation = ShapeInputRelation; -class SyncInputRelation { +typedef ShapeIncludeBuilder = List + Function( + T table, +); +typedef ShapeWhereBuilder = Expression Function(T table); + +class ShapeInputRelation { final TableRelation relation; - final SyncIncludeBuilder? include; - final SyncWhereBuilder? where; + final ShapeIncludeBuilder? include; + final ShapeWhereBuilder? where; - static SyncInputRelation from( + static ShapeInputRelation from( TableRelation relation, { - SyncIncludeBuilder? include, - SyncWhereBuilder? where, + ShapeIncludeBuilder? include, + ShapeWhereBuilder? where, }) { - return SyncInputRelation._( + return ShapeInputRelation._( relation, include: include, where: where, ); } - SyncInputRelation._(this.relation, {this.include, this.where}); + ShapeInputRelation._(this.relation, {this.include, this.where}); - SyncIncludeBuilder? get _genericInclude => + ShapeIncludeBuilder
? get _genericInclude => include == null ? null : (Table t) => include!.call(t as T); - SyncWhereBuilder
? get _genericWhere => + ShapeWhereBuilder
? get _genericWhere => where == null ? null : (Table t) => where!.call(t as T); } @@ -46,14 +49,20 @@ Shape computeShapeForDrift( GeneratedDatabase db, DBSchema dbDescription, T table, { - SyncIncludeBuilder? include, - SyncWhereBuilder? where, + ShapeIncludeBuilder? include, + ShapeWhereBuilder? where, }) { - final relationsToInclude = include?.call(table); - final tableInfo = findDriftTableInfo(db, table); final tableName = tableInfo.actualTableName; + if (!dbDescription.hasTable(tableName)) { + throw Exception( + "Cannot sync the requested shape. Table '$tableName' does not exist in the database schema.", + ); + } + + final relationsToInclude = include?.call(table); + final List? rels = relationsToInclude?.map((syncRel) { final relationDrift = syncRel.relation; diff --git a/packages/electricsql/lib/src/drivers/sqlite3/sqlite3.dart b/packages/electricsql/lib/src/drivers/sqlite3/sqlite3.dart index 1a014f6b..9dd36f12 100644 --- a/packages/electricsql/lib/src/drivers/sqlite3/sqlite3.dart +++ b/packages/electricsql/lib/src/drivers/sqlite3/sqlite3.dart @@ -1,9 +1,7 @@ import 'package:electricsql/drivers/sqlite3.dart'; import 'package:electricsql/electricsql.dart'; -import 'package:electricsql/src/client/model/schema.dart'; import 'package:electricsql/src/config/config.dart'; import 'package:electricsql/src/electric/electric.dart' as electrify_lib; -import 'package:electricsql/src/electric/electric.dart'; import 'package:electricsql/src/migrators/query_builder/query_builder.dart'; import 'package:electricsql/src/sockets/sockets.dart'; import 'package:sqlite3/sqlite3.dart'; diff --git a/packages/electricsql/lib/src/electric/electric.dart b/packages/electricsql/lib/src/electric/electric.dart index 64cf099a..f8720831 100644 --- a/packages/electricsql/lib/src/electric/electric.dart +++ b/packages/electricsql/lib/src/electric/electric.dart @@ -4,7 +4,6 @@ import 'package:electricsql/notifiers.dart'; import 'package:electricsql/satellite.dart'; import 'package:electricsql/sockets.dart'; import 'package:electricsql/src/client/model/client.dart'; -import 'package:electricsql/src/client/model/schema.dart'; import 'package:electricsql/src/config/config.dart'; import 'package:electricsql/src/devtools/devtools.dart' as devtools; import 'package:electricsql/util.dart'; @@ -93,7 +92,7 @@ Future electrifyBase({ ); final dialect = configWithDefaults.replication.dialect; - final electric = ElectricClientImpl.create( + final electric = ElectricClientRawImpl.create( dbName: dbName, adapter: adapter, notifier: notifier, diff --git a/packages/electricsql/lib/src/electric/index.dart b/packages/electricsql/lib/src/electric/index.dart index baa7719c..9717c038 100644 --- a/packages/electricsql/lib/src/electric/index.dart +++ b/packages/electricsql/lib/src/electric/index.dart @@ -1,3 +1,4 @@ export 'adapter.dart'; -export 'electric.dart' show ElectrifyBaseOptions, ElectrifyOptions; +export 'electric.dart' + show ElectrifyBaseOptions, ElectrifyOptions, electrifyBase; export 'namespace.dart' show ElectricNamespace; diff --git a/packages/electricsql/lib/src/satellite/index.dart b/packages/electricsql/lib/src/satellite/index.dart index 35127eee..350f50cc 100644 --- a/packages/electricsql/lib/src/satellite/index.dart +++ b/packages/electricsql/lib/src/satellite/index.dart @@ -1,3 +1,4 @@ +export '../proto/satellite.pb.dart'; export 'mock.dart' show MockRegistry; export 'process.dart' show SatelliteProcess, ShapeSubscription; export 'registry.dart' show GlobalRegistry, globalRegistry; diff --git a/packages/electricsql/lib/src/util/converters/helpers.dart b/packages/electricsql/lib/src/util/converters/helpers.dart index 4faba032..f67b7e17 100644 --- a/packages/electricsql/lib/src/util/converters/helpers.dart +++ b/packages/electricsql/lib/src/util/converters/helpers.dart @@ -131,3 +131,20 @@ ExtractedDateTime extractDateAndTime(DateTime v) { final time = match.group(2)!; return (date: date, time: time); } + +extension BigIntExt on BigInt { + static final _bigIntMinValue64 = BigInt.from(-9223372036854775808); + static final _bigIntMaxValue64 = BigInt.from(9223372036854775807); + + int rangeCheckedToInt() { + if (this < _bigIntMinValue64 || this > _bigIntMaxValue64) { + throw ArgumentError.value( + this, + 'this', + 'BigInt value exceeds the range of 64 bits', + ); + } + + return toInt(); + } +} diff --git a/packages/electricsql/lib/util.dart b/packages/electricsql/lib/util.dart index 78005b82..1b9318e0 100644 --- a/packages/electricsql/lib/util.dart +++ b/packages/electricsql/lib/util.dart @@ -1 +1,2 @@ +export 'src/client/util/relations.dart'; export 'src/util/index.dart'; diff --git a/packages/electricsql/pubspec.yaml b/packages/electricsql/pubspec.yaml index b3f56627..f4fcd90b 100644 --- a/packages/electricsql/pubspec.yaml +++ b/packages/electricsql/pubspec.yaml @@ -38,6 +38,7 @@ dev_dependencies: file: ^7.0.0 # Lowest version supported http: ^1.0.0 lint: ^2.3.0 + mocktail: ^1.0.4 path: ^1.9.0 pub_semver: ^2.1.4 # Lowest version supported stream_channel: ^2.1.2 diff --git a/packages/electricsql/test/client/model/shapes_test.dart b/packages/electricsql/test/client/model/shapes_test.dart index d87a13cf..4f8674d6 100644 --- a/packages/electricsql/test/client/model/shapes_test.dart +++ b/packages/electricsql/test/client/model/shapes_test.dart @@ -2,14 +2,13 @@ import 'package:drift/drift.dart' hide Migrator; import 'package:electricsql/drivers/drift.dart'; -import 'package:electricsql/electricsql.dart'; +import 'package:electricsql/electricsql.dart' hide Relation; import 'package:electricsql/migrators.dart'; import 'package:electricsql/satellite.dart'; import 'package:electricsql/src/client/model/client.dart'; -import 'package:electricsql/src/client/model/schema.dart' hide Relation; +import 'package:electricsql/src/client/model/sync.dart'; import 'package:electricsql/src/drivers/drift/sync_input.dart'; import 'package:electricsql/src/notifiers/mock.dart'; -import 'package:electricsql/src/proto/satellite.pb.dart'; import 'package:electricsql/src/satellite/config.dart'; import 'package:electricsql/src/satellite/mock.dart'; import 'package:electricsql/src/util/random.dart'; @@ -207,9 +206,9 @@ void main() { client.setRelations(relations); - final input = SyncInputRaw( + final input = ShapeInputRaw( tableName: 'Post', - where: SyncWhere({ + where: ShapeWhere({ 'OR': [ {'id': 5}, {'id': 42}, @@ -228,12 +227,12 @@ void main() { include: [ IncludeRelRaw( foreignKey: ['authorId'], - select: SyncInputRaw( + select: ShapeInputRaw( tableName: 'User', include: [ IncludeRelRaw( foreignKey: ['userId'], - select: SyncInputRaw( + select: ShapeInputRaw( tableName: 'Profile', ), ), @@ -243,7 +242,7 @@ void main() { ], ); - final shape = computeShape(input); + final shape = computeShape(electric.dbDescription, input); expect( shape, @@ -284,13 +283,13 @@ void main() { (p.id.isIn([3, 2]) | p.title.like('%hello')) & (p.id.equals(1) | p.id.equals(2)).not(), include: (p) => [ - SyncInputRelation.from( + ShapeInputRelation.from( p.$relations.author, // This is not allowed on the server (no filtering of many-to-one relations), but we're just testing that `where` // clauses on nested objects are parsed correctly where: (u) => u.id.isSmallerThanValue(5), include: (u) => [ - SyncInputRelation.from(u.$relations.profile), + ShapeInputRelation.from(u.$relations.profile), ], ), ], @@ -358,7 +357,7 @@ Future makeContext() async { pgMigrations: pgMigrations, ); - final baseElectricClient = ElectricClientImpl.create( + final baseElectricClient = ElectricClientRawImpl.create( dbName: dbName, adapter: adapter, notifier: notifier, diff --git a/packages/electricsql/test/client/model/table_test.dart b/packages/electricsql/test/client/model/table_test.dart index 3af4c95f..b4090530 100644 --- a/packages/electricsql/test/client/model/table_test.dart +++ b/packages/electricsql/test/client/model/table_test.dart @@ -38,7 +38,7 @@ void main() async { final client = context.client; final electric = DriftElectricClient( - ElectricClientImpl.create( + ElectricClientRawImpl.create( dbName: 'testDB', dbDescription: DBSchemaDrift(db: db, migrations: [], pgMigrations: []), adapter: adapter, @@ -104,7 +104,7 @@ void main() async { final client = context.client; final electric = DriftElectricClient( - ElectricClientImpl.create( + ElectricClientRawImpl.create( dbName: 'testDB', dbDescription: DBSchemaDrift(db: db, migrations: [], pgMigrations: []), adapter: adapter, diff --git a/packages/electricsql/test/client/model/transforms_test.dart b/packages/electricsql/test/client/model/transforms_test.dart index 4199cbf8..4347dc80 100644 --- a/packages/electricsql/test/client/model/transforms_test.dart +++ b/packages/electricsql/test/client/model/transforms_test.dart @@ -1,8 +1,11 @@ import 'package:drift/drift.dart'; +import 'package:electricsql/src/client/model/schema.dart'; import 'package:electricsql/src/client/model/transform.dart'; import 'package:electricsql/src/client/validation/validation.dart'; import 'package:electricsql/src/drivers/drift/drift.dart'; +import 'package:electricsql/src/drivers/drift/schema.dart'; import 'package:electricsql/util.dart'; +import 'package:mocktail/mocktail.dart'; import 'package:test/test.dart'; import '../drift/database.dart'; @@ -16,10 +19,13 @@ const post1 = PostData( ); late TestsDatabase db; +late DBSchema schema; Future main() async { setUp(() async { db = TestsDatabase.memory(); + + schema = DBSchemaDrift(db: db, migrations: [], pgMigrations: []); }); tearDown(() async { @@ -31,8 +37,10 @@ Future main() async { required DbRecord Function(DbRecord)? update, required List immutableFields, }) { + final fields = schema.getFields('Post'); + final origCols = driftInsertableToValues(r); - transformTableRecord>( + transformTableRecordGeneric>( r, (row) { final Map> updated; @@ -48,6 +56,7 @@ Future main() async { return RawValuesInsertable(updated); }, + fields, immutableFields, validateFun: (d) => validateDriftRecord(db.post, d), toRecord: (d) => driftInsertableToValues(d), @@ -160,4 +169,32 @@ Future main() async { throwsA(isA()), ); }); + + test('setReplicationTransform throws an error if table does not exist', () { + expect( + () => setReplicationTransform( + dbDescription: schema, + replicationTransformManager: ReplicatedRowTransformManagerMock(), + qualifiedTableName: + const QualifiedTablename('main', 'non_existent_table'), + transformInbound: (r) => r, + transformOutbound: (r) => r, + validateFun: null, + toRecord: (r) => r, + fromRecord: (r) => r, + ), + throwsA( + isA().having( + (e) => e.toString(), + 'message', + contains( + "Cannot set replication transform for table 'non_existent_table'. Table does not exist in the database schema.", + ), + ), + ), + ); + }); } + +class ReplicatedRowTransformManagerMock extends Mock + implements IReplicationTransformManager {} diff --git a/packages/electricsql/test/client/util/relations_test.dart b/packages/electricsql/test/client/util/relations_test.dart new file mode 100644 index 00000000..71c4016a --- /dev/null +++ b/packages/electricsql/test/client/util/relations_test.dart @@ -0,0 +1,627 @@ +import 'package:electricsql/src/client/conversions/types.dart'; +import 'package:electricsql/src/client/model/schema.dart'; +import 'package:electricsql/src/client/util/relations.dart'; +import 'package:electricsql/src/proto/satellite.pb.dart'; +import 'package:test/test.dart'; + +late SatOpMigrate_Table otherTable; +late SatOpMigrate_Table fooTable; +late SatOpMigrate_Table itemsTable; +late List tables; + +void main() { + setUp(() { + otherTable = SatOpMigrate_Table( + name: 'other', + columns: [ + SatOpMigrate_Column( + name: 'other_id', + sqliteType: 'TEXT', + pgType: SatOpMigrate_PgColumnType( + name: 'text', + array: [], + size: [], + ), + ), + ], + fks: [], + pks: ['other_id'], + ); + + fooTable = SatOpMigrate_Table( + name: 'foo', + columns: [ + SatOpMigrate_Column( + name: 'foo_id', + sqliteType: 'TEXT', + pgType: SatOpMigrate_PgColumnType( + name: 'text', + array: [], + size: [], + ), + ), + SatOpMigrate_Column( + name: 'otherr', + sqliteType: 'TEXT', + pgType: SatOpMigrate_PgColumnType( + name: 'text', + array: [], + size: [], + ), + ), + ], + fks: [ + SatOpMigrate_ForeignKey( + fkCols: ['otherr'], + pkTable: 'other', + pkCols: ['other_id'], + ), + ], + pks: ['foo_id'], + ); + + itemsTable = SatOpMigrate_Table( + name: 'items', + columns: [ + SatOpMigrate_Column( + name: 'items_id', + sqliteType: 'TEXT', + pgType: SatOpMigrate_PgColumnType( + name: 'text', + array: [], + size: [], + ), + ), + SatOpMigrate_Column( + name: 'other_id1', + sqliteType: 'TEXT', + pgType: SatOpMigrate_PgColumnType( + name: 'text', + array: [], + size: [], + ), + ), + SatOpMigrate_Column( + name: 'other_id2', + sqliteType: 'TEXT', + pgType: SatOpMigrate_PgColumnType( + name: 'text', + array: [], + size: [], + ), + ), + ], + fks: [ + SatOpMigrate_ForeignKey( + fkCols: ['other_id1'], + pkTable: 'other', + pkCols: ['other_id'], + ), + SatOpMigrate_ForeignKey( + fkCols: ['other_id2'], + pkTable: 'other', + pkCols: ['other_id'], + ), + ], + pks: ['items_id'], + ); + + tables = [otherTable, fooTable, itemsTable]; + }); + + test('createRelationsFromTable creates no relations on table without FKs', + () { + final KeyedTables keyedTables = keyBy(tables); + final relations = createRelationsFromTable(otherTable, keyedTables); + expect( + relations.length, + 0, + reason: 'Expected no relations on table without FKs', + ); + }); + + /* + * When a child table has a FK to a parent table + * we create a relation from the child table to the parent table + * and we also create the reserve relation from the parent table to the child table. + * The reverse relation is needed to be able to + * follow the relation in both directions. + * + * If there is only a single relation from the child table to the parent table + * then that relation is named after the parent table (except if there is already a column with that name). + * Similarly, if there is only a single relation from the parent table to the child table + * then that relation is named after the child table (except if there is already a column with that name). + */ + test('createRelationsFromTable creates two relations on table with one FK', + () { + final keyedTables = keyBy(tables); + final relations = createRelationsFromTable(fooTable, keyedTables); + + // Expect two relations + // one for forward direction + // and one for backward direction + expect( + relations.length, + 2, + reason: 'Expected two relations on table with one FK', + ); + + // Check forward relation + final relation = relations['foo']!; + expect( + relation.length, + 1, + reason: 'Expected one relation on table with one outgoing FK', + ); + + final [rel] = relation; + expect( + rel, + const Relation( + // 'other', + fromField: 'otherr', + toField: 'other_id', + relatedTable: 'other', + relationName: 'foo_otherrToother', + ), + reason: 'Expected relation to be created correctly', + ); + + // Check backward relation + final backwardRelation = relations['other']!; + expect( + backwardRelation.length, + 1, + reason: 'Expected one relation for table with an incoming FK', + ); + + final [backRel] = backwardRelation; + expect( + backRel, + const Relation( + //'foo', + fromField: '', + toField: '', + relatedTable: 'foo', + relationName: 'foo_otherrToother', + ), + reason: 'Expected relation to be created correctly', + ); + }); + + /* + * This test checks that if there is a single relation from the child table to the parent table + * but the child table has a column named after the parent table, than a unique relation field name is used. + */ + test( + 'createRelationsFromTable makes long relation field name if child column is named after parent table', + () { + // Name the child column after the parent table + fooTable.columns[1].name = 'other'; + fooTable.fks[0].fkCols[0] = 'other'; + + final keyedTables = keyBy(tables); + final relations = createRelationsFromTable(fooTable, keyedTables); + + // Expect two relations + // one for forward direction + // and one for backward direction + expect( + relations.length, + 2, + reason: 'Expected two relations on table with one FK', + ); + + // Check forward relation + final relation = relations['foo']!; + expect( + relation.length, + 1, + reason: 'Expected one relation on table with one outgoing FK', + ); + + final [rel] = relation; + expect( + rel, + const Relation( + // 'other_foo_otherToother', + fromField: 'other', + toField: 'other_id', + relatedTable: 'other', + relationName: 'foo_otherToother', + // 'one' + ), + reason: 'Expected relation to be created correctly', + ); + + // Check backward relation + final backwardRelation = relations['other']!; + expect( + backwardRelation.length, + 1, + reason: 'Expected one relation for table with an incoming FK', + ); + + final [backRel] = backwardRelation; + expect( + backRel, + const Relation( + // 'foo', + fromField: '', + toField: '', + relatedTable: 'foo', + relationName: 'foo_otherToother', + // 'many', + ), + reason: 'Expected relation to be created correctly', + ); + }); + + /* + * This test checks that if there is a single relation from the child table to the parent table + * and no relation from the parent table to the child table + * but the parent table has a column named after the child table, + * than a unique relation field name is used for the reverse relation. + */ + test( + 'createRelationsFromTable makes long relation field name if parent column is named after child table', + () { + // Name the parent column after the child table + otherTable.columns[0].name = 'foo'; + otherTable.pks[0] = 'foo'; + fooTable.fks[0].pkCols[0] = 'foo'; + + final keyedTables = keyBy(tables); + final relations = createRelationsFromTable(fooTable, keyedTables); + + // Expect two relations + // one for forward direction + // and one for backward direction + expect( + relations.length, + 2, + reason: 'Expected two relations on table with one FK', + ); + + // Check forward relation + final relation = relations['foo']!; + expect( + relation.length, + 1, + reason: 'Expected one relation on table with one outgoing FK', + ); + + final [rel] = relation; + expect( + rel, + const Relation( + // 'other', + fromField: 'otherr', + toField: 'foo', + relatedTable: 'other', + relationName: 'foo_otherrToother', + // 'one', + ), + reason: 'Expected relation to be created correctly', + ); + + // Check backward relation + final backwardRelation = relations['other']!; + expect( + backwardRelation.length, + 1, + reason: 'Expected one relation for table with an incoming FK', + ); + + final [backRel] = backwardRelation; + expect( + backRel, + const Relation( + // 'foo_foo_otherrToother', + fromField: '', + toField: '', + relatedTable: 'foo', + relationName: 'foo_otherrToother', + // 'many' + ), + reason: 'Expected relation to be created correctly', + ); + }); + + /* + * If there are multiple relations from the child table to the parent table + * than we need to create unique relation field names for each relation. + */ + test( + 'createRelationsFromTable makes long relation field name if several FKs are pointing to same parent table', + () { + final keyedTables = keyBy(tables); + final relations = createRelationsFromTable(itemsTable, keyedTables); + + // Check forward relations + final relation = relations['items']!; + expect( + relation.length, + 2, + reason: 'Expected two relations on table with two outgoing FKs', + ); + + final [rel1, rel2] = relation; + expect( + rel1, + const Relation( + // 'other_items_other_id1Toother', + fromField: 'other_id1', + toField: 'other_id', + relatedTable: 'other', + relationName: 'items_other_id1Toother', + // 'one' + ), + reason: 'Expected relation to be created correctly', + ); + expect( + rel2, + const Relation( + // 'other_items_other_id2Toother', + fromField: 'other_id2', + toField: 'other_id', + relatedTable: 'other', + relationName: 'items_other_id2Toother', + // 'one' + ), + reason: 'Expected relation to be created correctly', + ); + + // Check backward relations + final backwardRelation = relations['other']!; + expect( + backwardRelation.length, + 2, + reason: 'Expected two relations for table with an incoming FK', + ); + + final [backRel1, backRel2] = backwardRelation; + expect( + backRel1, + const Relation( + // 'items_items_other_id1Toother', + fromField: '', + toField: '', + relatedTable: 'items', + relationName: 'items_other_id1Toother', + // 'many' + ), + reason: 'Expected relation to be created correctly', + ); + expect( + backRel2, + const Relation( + // 'items_items_other_id2Toother', + fromField: '', + toField: '', + relatedTable: 'items', + relationName: 'items_other_id2Toother', + // 'many' + ), + reason: 'Expected relation to be created correctly', + ); + }); + + /* + * If we are creating a relation for a FK pointing from child table to the parent table + * and the parent table also has a FK from parent to child table + * then there are 2 possible ways to go from parent to child table + * 1. Follow the FK from parent to child table + * 2. Follow the FK from child to parent table in reverse direction + * To avoid this ambiguity, we introduce unique relation field names + * This test checks that this case is detected and a unique name is constructed + */ + test( + 'createRelationsFromTable makes long relation field name if parent table has a FK to the child table', + () { + // Extend the parent table `other` with a FK to the child table `foo` + final fIdColPointingToFoo = SatOpMigrate_Column( + name: 'f_id', + sqliteType: 'TEXT', + pgType: SatOpMigrate_PgColumnType( + name: 'text', + array: [], + size: [], + ), + ); + + final fk = SatOpMigrate_ForeignKey( + fkCols: ['f_id'], + pkTable: 'foo', + pkCols: ['foo_id'], + ); + + otherTable.columns.add(fIdColPointingToFoo); + otherTable.fks.add(fk); + + // Generate relations from the FKs of the `foo` table + final keyedTables = keyBy(tables); + final relations = createRelationsFromTable(fooTable, keyedTables); + + // Check forward relation + final relation = relations['foo']!; + expect( + relation.length, + 1, + reason: 'Expected one relation on table with one outgoing FK', + ); + + final [rel] = relation; + expect( + rel, + const Relation( + // 'other_foo_otherrToother', + fromField: 'otherr', + toField: 'other_id', + relatedTable: 'other', + relationName: 'foo_otherrToother', + // 'one' + ), + reason: 'Expected relation to be created correctly', + ); + + // Check backward relation + final backwardRelation = relations['other']!; + expect( + backwardRelation.length, + 1, + reason: 'Expected one relation for table with an incoming FK', + ); + + final [backRel] = backwardRelation; + expect( + backRel, + const Relation( + // 'foo_foo_otherrToother', + fromField: '', + toField: '', + relatedTable: 'foo', + relationName: 'foo_otherrToother', + // 'many' + ), + reason: 'Expected relation to be created correctly', + ); + }); + + test('createRelationsFromAllTables aggregates all relations', () { + final relations = createRelationsFromAllTables(tables); + + expect(relations, { + 'foo': [ + const Relation( + // 'other', + fromField: 'otherr', + toField: 'other_id', + relatedTable: 'other', + relationName: 'foo_otherrToother', + // 'one' + ), + ], + 'other': [ + const Relation( + //'foo', + fromField: '', toField: '', relatedTable: 'foo', + relationName: 'foo_otherrToother', + //'many' + ), + const Relation( + // 'items_items_other_id1Toother', + fromField: '', + toField: '', + relatedTable: 'items', + relationName: 'items_other_id1Toother', + // 'many' + ), + const Relation( + // 'items_items_other_id2Toother', + fromField: '', + toField: '', + relatedTable: 'items', + relationName: 'items_other_id2Toother', + // 'many' + ), + ], + 'items': [ + const Relation( + // 'other_items_other_id1Toother', + fromField: 'other_id1', + toField: 'other_id', + relatedTable: 'other', + relationName: 'items_other_id1Toother', + // 'one' + ), + const Relation( + // 'other_items_other_id2Toother', + fromField: 'other_id2', + toField: 'other_id', + relatedTable: 'other', + relationName: 'items_other_id2Toother', + // 'one' + ), + ], + }); + }); + + test('createDbDescription creates a DbSchema from tables', () { + final dbDescription = createDbDescription(tables); + expect(dbDescription.tableSchemas, { + 'foo': const TableSchema( + fields: { + 'foo_id': PgType.text, + 'otherr': PgType.text, + }, + relations: [ + Relation( + // 'other', + fromField: 'otherr', + toField: 'other_id', + relatedTable: 'other', + relationName: 'foo_otherrToother', + // 'one' + ), + ], + ), + 'other': const TableSchema( + fields: {'other_id': PgType.text}, + relations: [ + Relation( + //'foo', + fromField: '', toField: '', relatedTable: 'foo', + relationName: 'foo_otherrToother', + //'many' + ), + Relation( + // 'items_items_other_id1Toother', + fromField: '', + toField: '', + relatedTable: 'items', + relationName: 'items_other_id1Toother', + // 'many' + ), + Relation( + // 'items_items_other_id2Toother', + fromField: '', + toField: '', + relatedTable: 'items', + relationName: 'items_other_id2Toother', + // 'many' + ), + ], + ), + 'items': const TableSchema( + fields: { + 'items_id': PgType.text, + 'other_id1': PgType.text, + 'other_id2': PgType.text, + }, + relations: [ + Relation( + // 'other_items_other_id1Toother', + fromField: 'other_id1', + toField: 'other_id', + relatedTable: 'other', + relationName: 'items_other_id1Toother', + // 'one' + ), + Relation( + // 'other_items_other_id2Toother', + fromField: 'other_id2', + toField: 'other_id', + relatedTable: 'other', + relationName: 'items_other_id2Toother', + // 'one' + ), + ], + ), + }); + }); +} + +KeyedTables keyBy(List tables) { + return Map.fromEntries(tables.map((table) => MapEntry(table.name, table))); +} diff --git a/packages/electricsql/test/satellite/client_test.dart b/packages/electricsql/test/satellite/client_test.dart index 76c01966..bc914822 100644 --- a/packages/electricsql/test/satellite/client_test.dart +++ b/packages/electricsql/test/satellite/client_test.dart @@ -271,7 +271,7 @@ void main() { test('receive transaction over multiple messages', () async { await connectAndAuth(); - final dbDescription = DBSchemaRaw( + const dbDescription = DBSchemaRaw( tableSchemas: { 'table': TableSchema( fields: { @@ -711,7 +711,7 @@ void main() { ], ); - final TableSchema tbl = TableSchema( + const TableSchema tbl = TableSchema( fields: { 'id': PgType.uuid, 'content': PgType.varchar, @@ -723,7 +723,7 @@ void main() { relations: [], ); - final dbDescription = DBSchemaRaw( + const dbDescription = DBSchemaRaw( tableSchemas: { 'table': tbl, 'Items': tbl, @@ -1094,7 +1094,7 @@ void main() { const tablename = 'THE_TABLE_ID'; - final TableSchema tbl = TableSchema( + const TableSchema tbl = TableSchema( fields: { 'name1': PgType.text, 'name2': PgType.text, @@ -1102,7 +1102,7 @@ void main() { relations: [], ); - final dbDescription = DBSchemaRaw( + const dbDescription = DBSchemaRaw( tableSchemas: { 'table': tbl, tablename: tbl, @@ -1204,7 +1204,7 @@ void main() { test('client correctly handles additional data messages', () async { await connectAndAuth(); - final dbDescription = DBSchemaRaw( + const dbDescription = DBSchemaRaw( tableSchemas: { 'table': TableSchema( fields: { diff --git a/packages/electricsql/test/satellite/common.dart b/packages/electricsql/test/satellite/common.dart index 9c6c5088..006914ac 100644 --- a/packages/electricsql/test/satellite/common.dart +++ b/packages/electricsql/test/satellite/common.dart @@ -1,12 +1,10 @@ import 'dart:io'; import 'package:drift/drift.dart' show DatabaseConnectionUser; -import 'package:electricsql/electricsql.dart'; +import 'package:electricsql/electricsql.dart' hide Relation; import 'package:electricsql/migrators.dart'; import 'package:electricsql/satellite.dart'; -import 'package:electricsql/src/client/conversions/types.dart'; import 'package:electricsql/src/client/model/client.dart'; -import 'package:electricsql/src/client/model/schema.dart' hide Relation; import 'package:electricsql/src/drivers/drift/drift_adapter.dart'; import 'package:electricsql/src/drivers/sqlite3/sqlite3_adapter.dart'; import 'package:electricsql/src/migrators/bundle.dart'; @@ -14,7 +12,6 @@ import 'package:electricsql/src/migrators/schema.dart'; import 'package:electricsql/src/migrators/triggers.dart'; import 'package:electricsql/src/notifiers/index.dart'; import 'package:electricsql/src/notifiers/mock.dart'; -import 'package:electricsql/src/proto/satellite.pb.dart'; import 'package:electricsql/src/satellite/config.dart'; import 'package:electricsql/src/satellite/mock.dart'; import 'package:electricsql/src/util/random.dart'; @@ -33,7 +30,7 @@ SatelliteOpts opts(String namespace) => satelliteDefaults(namespace).copyWith( pollingInterval: const Duration(milliseconds: 200), ); -DBSchema kTestDbDescription = DBSchemaRaw( +const DBSchema kTestDbDescription = DBSchemaRaw( tableSchemas: { 'child': TableSchema( fields: { @@ -417,14 +414,14 @@ Future mockElectricClient( registry.satellites[dbName] = satellite; // Mock Electric client that does not contain the DAL - final electric = ElectricClientImpl.internal( + final electric = ElectricClientRawImpl.internal( dbName: dbName, adapter: adapter, notifier: notifier, registry: registry, satellite: satellite, dbDescription: - DBSchemaRaw(tableSchemas: {}, migrations: [], pgMigrations: []), + const DBSchemaRaw(tableSchemas: {}, migrations: [], pgMigrations: []), dialect: Dialect.sqlite, ); diff --git a/packages/electricsql/test/satellite/registry_test.dart b/packages/electricsql/test/satellite/registry_test.dart index ec57da90..8a49cfbe 100644 --- a/packages/electricsql/test/satellite/registry_test.dart +++ b/packages/electricsql/test/satellite/registry_test.dart @@ -16,7 +16,7 @@ import 'package:test/test.dart'; const dbName = 'test.db'; final DatabaseAdapter adapter = MockDatabaseAdapter(); -final DBSchema dbDescription = +const DBSchema dbDescription = DBSchemaRaw(tableSchemas: {}, migrations: [], pgMigrations: []); final Migrator migrator = MockMigrator(queryBuilder: kSqliteQueryBuilder); final SocketFactory socketFactory = WebSocketIOFactory(); diff --git a/packages/electricsql/test/satellite/serialization.dart b/packages/electricsql/test/satellite/serialization.dart index 62e1cb3e..c22b5514 100644 --- a/packages/electricsql/test/satellite/serialization.dart +++ b/packages/electricsql/test/satellite/serialization.dart @@ -49,7 +49,7 @@ void serializationTests({ ], ); - final dbDescription = DBSchemaRaw( + const dbDescription = DBSchemaRaw( tableSchemas: { 'table': TableSchema( fields: { @@ -218,7 +218,7 @@ void serializationTests({ ], ); - final dbDescription = DBSchemaRaw( + const dbDescription = DBSchemaRaw( tableSchemas: { 'table': TableSchema( fields: { @@ -278,7 +278,7 @@ void serializationTests({ expect(boolColumn.type, 'INTEGER'); // Db schema holds the correct Postgres types - final boolsDbDescription = DBSchemaRaw( + const boolsDbDescription = DBSchemaRaw( tableSchemas: { 'bools': TableSchema( fields: { @@ -334,7 +334,7 @@ void serializationTests({ expect(sqliteInferredRelations.length, 0); // Empty Db schema - final testDbDescription = DBSchemaRaw( + const testDbDescription = DBSchemaRaw( tableSchemas: {}, migrations: [], pgMigrations: [], diff --git a/packages/electricsql_cli/.pubignore b/packages/electricsql_cli/.pubignore index b850fbbb..425d72b9 100644 --- a/packages/electricsql_cli/.pubignore +++ b/packages/electricsql_cli/.pubignore @@ -1 +1,2 @@ -test/fixtures/expected_drift_gen_code.dart \ No newline at end of file +test/fixtures/expected_drift_gen_code.dart +test/fixtures/expected_raw_schema_gen_code.dart \ No newline at end of file diff --git a/packages/electricsql_cli/analysis_options.yaml b/packages/electricsql_cli/analysis_options.yaml index fd87adba..e8137b0b 100644 --- a/packages/electricsql_cli/analysis_options.yaml +++ b/packages/electricsql_cli/analysis_options.yaml @@ -3,4 +3,5 @@ include: ../../analysis_options.yaml analyzer: exclude: - test/fixtures/expected_drift_gen_code.dart + - test/fixtures/expected_raw_schema_gen_code.dart diff --git a/packages/electricsql_cli/lib/src/commands/generate/builder.dart b/packages/electricsql_cli/lib/src/commands/generate/builder.dart index d757302d..60365f33 100644 --- a/packages/electricsql_cli/lib/src/commands/generate/builder.dart +++ b/packages/electricsql_cli/lib/src/commands/generate/builder.dart @@ -3,8 +3,13 @@ import 'dart:io'; import 'package:code_builder/code_builder.dart'; import 'package:dart_style/dart_style.dart'; +import 'package:electricsql/electricsql.dart'; import 'package:electricsql/migrators.dart'; +import 'package:electricsql/satellite.dart'; +import 'package:electricsql/util.dart'; import 'package:electricsql_cli/src/commands/generate/builder/enums.dart'; +import 'package:electricsql_cli/src/commands/generate/builder/migrations.dart'; +import 'package:electricsql_cli/src/commands/generate/builder/raw_schema.dart'; import 'package:electricsql_cli/src/commands/generate/builder/relations.dart'; import 'package:electricsql_cli/src/commands/generate/builder/util.dart'; import 'package:electricsql_cli/src/commands/generate/drift_gen_opts.dart'; @@ -12,13 +17,35 @@ import 'package:electricsql_cli/src/commands/generate/drift_schema.dart'; import 'package:electricsql_cli/src/drift_gen_util.dart'; import 'package:path/path.dart' as path; -Future buildMigrations( +Future buildRawSchema(DBSchema dbDescription, File schemaFile) async { + final outParent = schemaFile.parent; + if (!outParent.existsSync()) { + await outParent.create(recursive: true); + } + + final contents = generateRawSchemaDartCode(dbDescription); + + await schemaFile.writeAsString(contents); +} + +String generateRawSchemaDartCode(DBSchema dbDescription) { + return _buildLibCode( + (b) => b + ..body.addAll([ + getElectricMigrationsField(), + getRawElectricDBSchemaCodeField(dbDescription), + ]), + ); +} + +Future buildMigrations( Directory migrationsFolder, File migrationsFile, QueryBuilder builder, { required String constantName, }) async { - final migrations = await loadMigrations(migrationsFolder, builder); + final migrationsAndSchema = await loadMigrations(migrationsFolder, builder); + final migrations = migrationsAndSchema.migrations; final outParent = migrationsFile.parent; if (!outParent.existsSync()) { @@ -30,9 +57,15 @@ Future buildMigrations( // Update the configuration file await migrationsFile.writeAsString(contents); + + return migrationsAndSchema.dbDescription; } -Future> loadMigrations( +/// Loads all migrations that are present in the provided migrations folder, +/// and builds a database description from them. +/// @param migrationsFolder Folder where migrations are stored. +/// @returns An object containing an array of migrations as well as database schema describing the tables. +Future<({List migrations, DBSchema dbDescription})> loadMigrations( Directory migrationsFolder, QueryBuilder builder, ) async { @@ -44,9 +77,32 @@ Future> loadMigrations( final migrationsMetadatas = await Future.wait( migrationFiles.map(_readMetadataFile), ); - return migrationsMetadatas - .map((data) => makeMigration(data, builder)) - .toList(); + + // Aggregate table information from all migrations + // and create the database description + final tables = aggregateTableInfo(migrationsMetadatas); + final dbDescription = createDbDescription(tables); + return ( + migrations: migrationsMetadatas + .map((data) => makeMigration(data, builder)) + .toList(), + dbDescription: dbDescription, + ); +} + +List aggregateTableInfo(List migrations) { + final tables = {}; + for (final migration in migrations) { + for (final satOpMigrate in migration.ops) { + if (satOpMigrate.hasTable()) { + final tbl = satOpMigrate.table; + // table information from later migrations + // overwrite information from earlier migrations + tables[tbl.name] = tbl; + } + } + } + return tables.values.toList(); } /// Reads the specified metadata file. @@ -123,6 +179,7 @@ String _buildLibCode(void Function(LibraryBuilder b) updateLib) { 'always_use_package_imports', 'depend_on_referenced_packages', 'prefer_double_quotes', + 'require_trailing_commas', ]); updateLib(b); }, @@ -164,7 +221,7 @@ String generateDriftSchemaDartCode(DriftSchemaInfo driftSchemaInfo) { (b) => b ..body.addAll( [ - _getElectricMigrationsField(), + getElectricMigrationsField(), _getElectrifiedTablesField(tableClasses), ...tableClasses, if (electricEnums.isNotEmpty) ...[ @@ -262,31 +319,6 @@ Field _getElectrifiedTablesField(List tableClasses) { ); } -Field _getElectricMigrationsField() { - /* - const kElectricMigrations = ElectricMigrations( - sqliteMigrations: kSqliteMigrations, - pgMigrations: kPostgresMigrations, - ); - */ - - final electricMigrationsRef = refer('ElectricMigrations', kElectricSqlImport); - final sqliteMigrationsRef = - refer('kSqliteMigrations', './$kSqliteMigrationsFileName'); - final pgMigrationsRef = - refer('kPostgresMigrations', './$kPostgresMigrationsFileName'); - - return Field( - (b) => b - ..name = 'kElectricMigrations' - ..modifier = FieldModifier.constant - ..assignment = electricMigrationsRef.newInstance([], { - 'sqliteMigrations': sqliteMigrationsRef, - 'pgMigrations': pgMigrationsRef, - }).code, - ); -} - Method? _getPrimaryKeyGetter(DriftTableInfo tableInfo) { final primaryKeyCols = tableInfo.columns.where((c) => c.isPrimaryKey); if (primaryKeyCols.isNotEmpty) { diff --git a/packages/electricsql_cli/lib/src/commands/generate/builder/migrations.dart b/packages/electricsql_cli/lib/src/commands/generate/builder/migrations.dart new file mode 100644 index 00000000..c7b00385 --- /dev/null +++ b/packages/electricsql_cli/lib/src/commands/generate/builder/migrations.dart @@ -0,0 +1,33 @@ +import 'package:code_builder/code_builder.dart'; +import 'package:electricsql_cli/src/commands/generate/builder/util.dart'; + +const kElectricMigrationsFieldName = 'kElectricMigrations'; + +Field getElectricMigrationsField() { + /* + const kElectricMigrations = ElectricMigrations( + sqliteMigrations: kSqliteMigrations, + pgMigrations: kPostgresMigrations, + ); + */ + + final electricMigrationsRef = refer('ElectricMigrations', kElectricSqlImport); + + return Field( + (b) => b + ..name = 'kElectricMigrations' + ..modifier = FieldModifier.constant + ..assignment = electricMigrationsRef.newInstance([], { + 'sqliteMigrations': getSqliteMigrationsRef(), + 'pgMigrations': getPgMigrationsRef(), + }).code, + ); +} + +Reference getSqliteMigrationsRef() { + return refer('kSqliteMigrations', './$kSqliteMigrationsFileName'); +} + +Reference getPgMigrationsRef() { + return refer('kPostgresMigrations', './$kPostgresMigrationsFileName'); +} diff --git a/packages/electricsql_cli/lib/src/commands/generate/builder/raw_schema.dart b/packages/electricsql_cli/lib/src/commands/generate/builder/raw_schema.dart new file mode 100644 index 00000000..ceb6dcca --- /dev/null +++ b/packages/electricsql_cli/lib/src/commands/generate/builder/raw_schema.dart @@ -0,0 +1,63 @@ +import 'package:code_builder/code_builder.dart'; +import 'package:electricsql/electricsql.dart'; +import 'package:electricsql_cli/src/commands/generate/builder/migrations.dart'; +import 'package:electricsql_cli/src/commands/generate/builder/util.dart'; + +Field getRawElectricDBSchemaCodeField(DBSchema dbDescription) { + /* +final dbDescription = DBSchemaRaw( + tableSchemas: { + 'table': TableSchema( + fields: { + 'name1': PgType.text, + 'name2': PgType.text, + }, + relations: [], + ), + }, + migrations: [], + pgMigrations: [], + ); + */ + + final dbSchemaRawRef = refer('DBSchemaRaw', kElectricSqlImport); + final tableSchemaRef = refer('TableSchema', kElectricSqlImport); + final pgTypeRef = refer('PgType', kElectricSqlImport); + + // global const immutable field for the schema + final dbSchemaField = Field( + (b) => b + ..name = 'kDbSchema' + ..type = refer('DBSchema', kElectricSqlImport) + ..modifier = FieldModifier.constant + ..assignment = dbSchemaRawRef.newInstance([], { + 'tableSchemas': literalMap( + dbDescription.tableSchemas.map((tableName, tableSchema) { + return MapEntry( + tableName, + tableSchemaRef.newInstance([], { + 'fields': literalMap({ + for (final entry in tableSchema.fields.entries) + entry.key: pgTypeRef.property(entry.value.name), + }), + 'relations': literalList( + tableSchema.relations.map((relation) { + return refer('Relation', kElectricSqlImport) + .newInstance([], { + 'fromField': literalString(relation.fromField), + 'toField': literalString(relation.toField), + 'relationName': literalString(relation.relationName), + 'relatedTable': literalString(relation.relatedTable), + }); + }), + ), + }).code, + ); + }), + ), + 'migrations': getSqliteMigrationsRef(), + 'pgMigrations': getPgMigrationsRef(), + }).code, + ); + return dbSchemaField; +} diff --git a/packages/electricsql_cli/lib/src/commands/generate/command.dart b/packages/electricsql_cli/lib/src/commands/generate/command.dart index d152697a..c7dcefdd 100644 --- a/packages/electricsql_cli/lib/src/commands/generate/command.dart +++ b/packages/electricsql_cli/lib/src/commands/generate/command.dart @@ -3,6 +3,7 @@ import 'dart:math'; import 'package:archive/archive_io.dart'; import 'package:args/command_runner.dart'; +import 'package:electricsql/electricsql.dart'; import 'package:electricsql/migrators.dart'; import 'package:electricsql_cli/src/commands/command_util.dart'; import 'package:electricsql_cli/src/commands/configure/command_with_config.dart'; @@ -22,6 +23,7 @@ import 'package:path/path.dart' as path; const String defaultDriftSchemaFileName = 'drift_schema.dart'; const bool _defaultDebug = false; +const bool _defaultWithDal = true; /// {@template sample_command} /// @@ -63,8 +65,16 @@ More information at: https://drift.simonbinder.eu/docs/getting-started/advanced_ Optional flag to enable debug mode When enabled, the temporary migration files used to generate the client will be retained for inspection.''', - defaultsTo: false, + defaultsTo: _defaultDebug, negatable: false, + ) + ..addOption( + 'with-dal', + help: ''' +Optional flag to disable generation of the Electric client. + +Defaults to true. When set to false, only the migrations will be generated and a minimal database description but no DAL.''', + defaultsTo: 'true', ); } @@ -86,6 +96,8 @@ When enabled, the temporary migration files used to generate the client will be final bool int8AsBigInt = opts['int8-as-bigint']! as bool; final bool debug = opts['debug']! as bool; + final String withDalRaw = opts['with-dal']! as String; + final bool withDal = withDalRaw != 'false'; final String? withMigrations = opts['with-migrations'] as String?; final _cliDriftGenOpts = _CLIDriftGenOpts( @@ -98,6 +110,7 @@ When enabled, the temporary migration files used to generate the client will be proxy: config.read('PROXY'), debug: debug, withMigrations: withMigrations, + withDal: withDal, logger: _logger, driftSchemaGenOpts: _cliDriftGenOpts, ); @@ -118,6 +131,7 @@ Future runElectricCodeGeneration({ ElectricDriftGenOpts? driftSchemaGenOpts, bool? debug, String? withMigrations, + bool? withDal, Logger? logger, }) async { final finalLogger = logger ?? Logger(); @@ -148,6 +162,7 @@ Future runElectricCodeGeneration({ driftSchemaGenOpts: driftSchemaGenOpts, withMigrations: withMigrations, debug: debug ?? _defaultDebug, + withDal: withDal ?? _defaultWithDal, logger: finalLogger, ); @@ -206,6 +221,7 @@ class _GeneratorOpts { required this.config, required this.debug, required this.withMigrations, + required this.withDal, required this.driftSchemaGenOpts, required this.logger, }); @@ -214,6 +230,7 @@ class _GeneratorOpts { final ElectricDriftGenOpts? driftSchemaGenOpts; final String? withMigrations; final bool debug; + final bool withDal; final Logger logger; } @@ -287,7 +304,6 @@ Future _runGenerator(_GeneratorOpts opts) async { Future _runGeneratorInner(_GeneratorOpts opts) async { final logger = opts.logger; - final config = opts.config; final currentDir = Directory.current; @@ -298,61 +314,27 @@ Future _runGeneratorInner(_GeneratorOpts opts) async { bool generationFailed = false; try { - final buildSqliteMigrations = await bundleMigrationsFor( - Dialect.sqlite, - opts, - tmpDir: tmpDir, - ); + // Build and bundle the SQLite and PG migrations + final dbDescription = await buildAndBundleMigrations(opts, tmpDir); - final buildPgMigrations = await bundleMigrationsFor( - Dialect.postgres, - opts, - tmpDir: tmpDir, - ); - - final prismaCLIDir = - await Directory(path.join(tmpDir.path, 'prisma-cli')).create(); - final prismaCLI = PrismaCLI(logger: logger, folder: prismaCLIDir); - - await wrapWithProgress( - logger, - () => prismaCLI.install(), - progressMsg: 'Installing Prisma CLI via Docker', - completeMsg: 'Prisma CLI installed', - ); - - final prismaSchema = - await createIntrospectionSchema(tmpDir, config: config); - - // Introspect the created DB to update the Prisma schema - await wrapWithProgress( - logger, - () => introspectDB(prismaCLI, prismaSchema), - progressMsg: 'Introspecting database', - completeMsg: 'Database introspected', - ); - - // print(prismaSchemaContent); - - final outFolder = config.read('CLIENT_PATH'); - - // Generate the Electric client from the given introspected schema - await wrapWithProgress( - logger, - () => _generateClient(prismaSchema, outFolder, opts), - progressMsg: 'Generating Drift DB schema', - completeMsg: 'Drift DB schema generated', - ); + if (opts.withDal) { + // Generate Electric client + await introspectDbAndGenerateClient(opts, tmpDir, dbDescription); + } - await wrapWithProgress( - logger, - () async { - await buildSqliteMigrations(); - await buildPgMigrations(); - }, - progressMsg: 'Generating bundled migrations', - completeMsg: 'Bundled migrations generated', - ); + if (!opts.withDal) { + // User doesn't want an Electric client + // Write the minimal database description to a file + opts.logger.info('Generating database schema...'); + final outFolder = Directory(opts.config.read('CLIENT_PATH')); + + await wrapWithProgress( + logger, + () => bundleDbDescription(dbDescription, outFolder), + progressMsg: 'Generating database schema...', + completeMsg: 'Successfully generated database schema', + ); + } } catch (e) { generationFailed = true; logger.err('generate command failed: $e'); @@ -364,7 +346,15 @@ Future _runGeneratorInner(_GeneratorOpts opts) async { } } -Future Function()> bundleMigrationsFor( +Future bundleDbDescription( + DBSchema dbDescription, + Directory outFolder, +) async { + final dbDescriptionFile = File(path.join(outFolder.path, 'schema.dart')); + await buildRawSchema(dbDescription, dbDescriptionFile); +} + +Future Function()> bundleMigrationsFor( Dialect dialect, _GeneratorOpts opts, { required Directory tmpDir, @@ -388,7 +378,7 @@ Future Function()> bundleMigrationsFor( dialect == Dialect.sqlite ? kSqliteQueryBuilder : kPostgresQueryBuilder; return () async { - await buildMigrations( + return await buildMigrations( migrationsDir, migrationsFile, builder, @@ -399,6 +389,80 @@ Future Function()> bundleMigrationsFor( }; } +Future buildAndBundleMigrations( + _GeneratorOpts opts, + Directory tmpDir, +) async { + final logger = opts.logger; + + final buildSqliteMigrations = await bundleMigrationsFor( + Dialect.sqlite, + opts, + tmpDir: tmpDir, + ); + + final buildPgMigrations = await bundleMigrationsFor( + Dialect.postgres, + opts, + tmpDir: tmpDir, + ); + + late DBSchema dbDescription; + await wrapWithProgress( + logger, + () async { + dbDescription = await buildSqliteMigrations(); + await buildPgMigrations(); + }, + progressMsg: 'Generating bundled migrations', + completeMsg: 'Bundled migrations generated', + ); + + return dbDescription; +} + +Future introspectDbAndGenerateClient( + _GeneratorOpts opts, + Directory tmpDir, + DBSchema dbDescription, +) async { + final logger = opts.logger; + final config = opts.config; + + final prismaCLIDir = + await Directory(path.join(tmpDir.path, 'prisma-cli')).create(); + final prismaCLI = PrismaCLI(logger: logger, folder: prismaCLIDir); + + await wrapWithProgress( + logger, + () => prismaCLI.install(), + progressMsg: 'Installing Prisma CLI via Docker', + completeMsg: 'Prisma CLI installed', + ); + + final prismaSchema = await createIntrospectionSchema(tmpDir, config: config); + + // Introspect the created DB to update the Prisma schema + await wrapWithProgress( + logger, + () => introspectDB(prismaCLI, prismaSchema), + progressMsg: 'Introspecting database', + completeMsg: 'Database introspected', + ); + + // print(prismaSchemaContent); + + final outFolder = config.read('CLIENT_PATH'); + + // Generate the Electric client from the given introspected schema + await wrapWithProgress( + logger, + () => _generateClient(prismaSchema, outFolder, opts), + progressMsg: 'Generating Drift DB schema', + completeMsg: 'Drift DB schema generated', + ); +} + Future _generateClient( File prismaSchema, String outFolder, diff --git a/packages/electricsql_cli/test/fixtures/expected_drift_gen_code.dart b/packages/electricsql_cli/test/fixtures/expected_drift_gen_code.dart index cf3987de..0028af1d 100644 --- a/packages/electricsql_cli/test/fixtures/expected_drift_gen_code.dart +++ b/packages/electricsql_cli/test/fixtures/expected_drift_gen_code.dart @@ -1,7 +1,7 @@ // GENERATED CODE - DO NOT MODIFY BY HAND // ignore_for_file: always_use_package_imports, depend_on_referenced_packages -// ignore_for_file: prefer_double_quotes +// ignore_for_file: prefer_double_quotes, require_trailing_commas import 'package:drift/drift.dart'; import 'package:electricsql/drivers/drift.dart'; diff --git a/packages/electricsql_cli/test/fixtures/expected_raw_schema_gen_code.dart b/packages/electricsql_cli/test/fixtures/expected_raw_schema_gen_code.dart new file mode 100644 index 00000000..d2404f0a --- /dev/null +++ b/packages/electricsql_cli/test/fixtures/expected_raw_schema_gen_code.dart @@ -0,0 +1,38 @@ +// GENERATED CODE - DO NOT MODIFY BY HAND + +// ignore_for_file: always_use_package_imports, depend_on_referenced_packages +// ignore_for_file: prefer_double_quotes, require_trailing_commas + +import 'package:electricsql/electricsql.dart'; + +import './migrations.dart'; +import './pg_migrations.dart'; + +const kElectricMigrations = ElectricMigrations( + sqliteMigrations: kSqliteMigrations, + pgMigrations: kPostgresMigrations, +); +const DBSchema kDbSchema = DBSchemaRaw( + tableSchemas: { + 'table1': TableSchema( + fields: { + 'col1': PgType.text, + 'col2': PgType.int2, + }, + relations: [ + Relation( + fromField: 'fromField1', + toField: 'toField1', + relationName: 'relationName1', + relatedTable: 'relatedTable1', + ) + ], + ), + 'table2': TableSchema( + fields: {'col3': PgType.bool}, + relations: [], + ), + }, + migrations: kSqliteMigrations, + pgMigrations: kPostgresMigrations, +); diff --git a/packages/electricsql_cli/test/src/cli/migrations/builder_test.dart b/packages/electricsql_cli/test/src/cli/migrations/builder_test.dart index 17c65d4a..74302eb1 100644 --- a/packages/electricsql_cli/test/src/cli/migrations/builder_test.dart +++ b/packages/electricsql_cli/test/src/cli/migrations/builder_test.dart @@ -1,5 +1,6 @@ import 'dart:io'; +import 'package:electricsql/electricsql.dart'; import 'package:electricsql/migrators.dart'; import 'package:electricsql_cli/src/commands/generate/builder.dart'; import 'package:path/path.dart'; @@ -15,9 +16,48 @@ void main() { test('read migration meta data', () async { for (final builder in [kSqliteQueryBuilder, kPostgresQueryBuilder]) { - final migrations = await loadMigrations(migrationsFolder, builder); + final (:migrations, :dbDescription) = + await loadMigrations(migrationsFolder, builder); final versions = migrations.map((m) => m.version); expect(versions, ['20230613112725_814', '20230613112735_992']); + + expect(dbDescription.tableSchemas, { + 'stars': const TableSchema( + fields: { + 'id': PgType.text, + 'avatar_url': PgType.text, + 'name': PgType.text, + 'starred_at': PgType.text, + 'username': PgType.text, + }, + relations: [ + Relation( + // 'beers', + fromField: '', + toField: '', + relatedTable: 'beers', + relationName: 'beers_star_idTostars', + // 'many' + ), + ], + ), + 'beers': const TableSchema( + fields: { + 'id': PgType.text, + 'star_id': PgType.text, + }, + relations: [ + Relation( + // 'stars', + fromField: 'star_id', + toField: 'id', + relatedTable: 'stars', + relationName: 'beers_star_idTostars', + // 'one' + ), + ], + ), + }); } }); } diff --git a/packages/electricsql_cli/test/src/generate_raw_schema_test.dart b/packages/electricsql_cli/test/src/generate_raw_schema_test.dart new file mode 100644 index 00000000..1f770e81 --- /dev/null +++ b/packages/electricsql_cli/test/src/generate_raw_schema_test.dart @@ -0,0 +1,43 @@ +import 'dart:io'; + +import 'package:electricsql/electricsql.dart'; +import 'package:electricsql_cli/src/commands/generate/builder.dart'; +import 'package:path/path.dart'; +import 'package:test/test.dart'; + +void main() { + test('generate raw schema code', () async { + const dbDescription = DBSchemaRaw( + tableSchemas: { + 'table1': TableSchema( + fields: { + 'col1': PgType.text, + 'col2': PgType.int2, + }, + relations: [ + Relation( + fromField: 'fromField1', + toField: 'toField1', + relationName: 'relationName1', + relatedTable: 'relatedTable1', + ), + ], + ), + 'table2': TableSchema(fields: {'col3': PgType.bool}, relations: []), + }, + migrations: [], + pgMigrations: [], + ); + + final contents = generateRawSchemaDartCode(dbDescription); + + final expectedFile = join( + Directory.current.path, + 'test/fixtures/expected_raw_schema_gen_code.dart', + ); + + // File("out_raw.dart").writeAsStringSync(contents); + + expect(contents, await File(expectedFile).readAsString()); + }); +} diff --git a/patch/e2e.patch b/patch/e2e.patch index c786eb97..29f1b055 100644 --- a/patch/e2e.patch +++ b/patch/e2e.patch @@ -196,7 +196,7 @@ diff -x lux -x satellite_client -x lux_logs -ur electric/tests/03.10_node_satell diff -x lux -x satellite_client -x lux_logs -ur electric/tests/03.11_node_satellite_compensations_work.lux dart/tests/03.11_node_satellite_compensations_work.lux --- electric/tests/03.11_node_satellite_compensations_work.lux 2024-01-01 00:00:00.000000000 +0000 +++ dart/tests/03.11_node_satellite_compensations_work.lux 2024-01-01 00:00:00.000000000 +0000 -@@ -16,21 +16,15 @@ +@@ -16,15 +16,15 @@ ??[rpc] recv: #SatInStartReplicationResp [invoke node_sync_other_items ""] ??[proto] recv: #SatSubsDataEnd @@ -204,13 +204,7 @@ diff -x lux -x satellite_client -x lux_logs -ur electric/tests/03.11_node_satell + !raw_statement db "UPDATE _electric_meta SET value = 1 WHERE key = 'compensations' RETURNING *" ?$node -- """!await db.db.items.create({ -- data: { -- id: "00000000-0000-0000-0000-000000000001", -- content: "hello world" -- } -- }) -- """ +- !await client.insert_item(db, "00000000-0000-0000-0000-000000000001", "hello world") + [invoke node_await_insert_extended "{id: '00000000-0000-0000-0000-000000000001', content: 'hello world'}"] ??[proto] send: #SatOpLog ??[proto] recv: #SatOpLog @@ -221,18 +215,11 @@ diff -x lux -x satellite_client -x lux_logs -ur electric/tests/03.11_node_satell [shell pg_1] # Concurrently, update and then delete the referenced row on the server -@@ -40,20 +34,13 @@ +@@ -34,13 +34,13 @@ [shell satellite_1] # On a disconnected client, insert a dependent row ?$node -- """!await db.db.other_items.create({ -- data: { -- id: "other_test_id_1", -- content: "", -- item_id: "00000000-0000-0000-0000-000000000001" -- } -- }) -- """ +- !await client.insert_other_item(db, "other_test_id_1", "", "00000000-0000-0000-0000-000000000001") + !raw_statement db "INSERT INTO other_items(id, content, item_id) VALUES('other_test_id_1', '', '00000000-0000-0000-0000-000000000001')" ?$node @@ -339,7 +326,7 @@ diff -x lux -x satellite_client -x lux_logs -ur electric/tests/03.16_node_satell - # Can't write invalid uuids to the DB # the uuid below has one digit too many in the last part -- !await client.write_uuid(db, '09e3e433-e9f1-46b4-a18f-1e4e0b6c62789') +- !await client.write_invalid_uuid(db, '09e3e433-e9f1-46b4-a18f-1e4e0b6c62789') - """?? - Uncaught: - [ @@ -586,17 +573,11 @@ diff -x lux -x satellite_client -x lux_logs -ur electric/tests/03.24_node_satell diff -x lux -x satellite_client -x lux_logs -ur electric/tests/03.25_node_pk_position_does_not_matter_for_compensations.lux dart/tests/03.25_node_pk_position_does_not_matter_for_compensations.lux --- electric/tests/03.25_node_pk_position_does_not_matter_for_compensations.lux 2024-01-01 00:00:00.000000000 +0000 +++ dart/tests/03.25_node_pk_position_does_not_matter_for_compensations.lux 2024-01-01 00:00:00.000000000 +0000 -@@ -35,18 +35,12 @@ +@@ -35,12 +35,12 @@ ??[proto] recv: #SatSubsDataEnd ?$node -- """!await db.db.items.create({ -- data: { -- id: "00000000-0000-0000-0000-000000000001", -- content: "hello world" -- } -- }) -- """ +- !await client.insert_item(db, "00000000-0000-0000-0000-000000000001", "hello world") + [invoke node_await_insert_extended "{id: '00000000-0000-0000-0000-000000000001', content: 'hello world'}"] ??[proto] send: #SatOpLog ??[proto] recv: #SatOpLog @@ -607,18 +588,11 @@ diff -x lux -x satellite_client -x lux_logs -ur electric/tests/03.25_node_pk_pos [shell pg_1] # Concurrently, update and then delete the referenced row on the server -@@ -56,20 +50,12 @@ +@@ -50,13 +50,12 @@ [shell satellite_1] # On a disconnected client, insert a dependent row ?$node -- """!await db.db.other_items.create({ -- data: { -- id: "other_test_id_1", -- content: "", -- item_id: "00000000-0000-0000-0000-000000000001" -- } -- }) -- """ +- !await client.insert_other_item(db, "other_test_id_1", "", "00000000-0000-0000-0000-000000000001") - ?$node + [invoke node_await_insert_extended_into "other_items" "{id: 'other_test_id_1', content: '', item_id: '00000000-0000-0000-0000-000000000001'}"] @@ -637,11 +611,11 @@ diff -x lux -x satellite_client -x lux_logs -ur electric/tests/03.26_node_satell [shell satellite_2] # Subscribe to "items" and include "other_items" -- !const { synced } = await db.db.items.sync({ \ +- !await client.syncTableWithShape(db, "items", { \ - where: "this.content like 'items-_-'", \ - include: { other_items: true } \ -- }); await synced -+ !custom_03_25_sync_items db +- }) ++ !custom_03_26_sync_items db ?send: #SatSubsReq\{id: ([a-f0-9-]{36}) [global client_2_subs_id=$1] @@ -884,8 +858,8 @@ diff -x lux -x satellite_client -x lux_logs -ur electric/tests/_satellite_macros [endmacro] [macro node_await_insert keys] -- !await client.insert_item(db, ${keys}) -+ !insert_item db ${keys} +- !await client.insert_items(db, ${keys}) ++ !insert_items db ${keys} ??$node [endmacro] @@ -907,8 +881,8 @@ diff -x lux -x satellite_client -x lux_logs -ur electric/tests/_satellite_macros [endmacro] [macro node_await_insert_other keys] -- !await client.insert_other_item(db, ${keys}) -+ !insert_other_item db ${keys} +- !await client.insert_other_items(db, ${keys}) ++ !insert_other_items db ${keys} ??$node [endmacro] diff --git a/todos_flutter/lib/generated/electric/drift_schema.dart b/todos_flutter/lib/generated/electric/drift_schema.dart index 9336da22..122a9e28 100644 --- a/todos_flutter/lib/generated/electric/drift_schema.dart +++ b/todos_flutter/lib/generated/electric/drift_schema.dart @@ -1,7 +1,7 @@ // GENERATED CODE - DO NOT MODIFY BY HAND // ignore_for_file: always_use_package_imports, depend_on_referenced_packages -// ignore_for_file: prefer_double_quotes +// ignore_for_file: prefer_double_quotes, require_trailing_commas import 'package:drift/drift.dart'; import 'package:electricsql/drivers/drift.dart'; diff --git a/todos_flutter/lib/generated/electric/migrations.dart b/todos_flutter/lib/generated/electric/migrations.dart index 5be9254d..2d967c51 100644 --- a/todos_flutter/lib/generated/electric/migrations.dart +++ b/todos_flutter/lib/generated/electric/migrations.dart @@ -1,7 +1,7 @@ // GENERATED CODE - DO NOT MODIFY BY HAND // ignore_for_file: always_use_package_imports, depend_on_referenced_packages -// ignore_for_file: prefer_double_quotes +// ignore_for_file: prefer_double_quotes, require_trailing_commas import 'package:electricsql/electricsql.dart'; diff --git a/todos_flutter/lib/generated/electric/pg_migrations.dart b/todos_flutter/lib/generated/electric/pg_migrations.dart index abd6b2d7..e6654839 100644 --- a/todos_flutter/lib/generated/electric/pg_migrations.dart +++ b/todos_flutter/lib/generated/electric/pg_migrations.dart @@ -1,7 +1,7 @@ // GENERATED CODE - DO NOT MODIFY BY HAND // ignore_for_file: always_use_package_imports, depend_on_referenced_packages -// ignore_for_file: prefer_double_quotes +// ignore_for_file: prefer_double_quotes, require_trailing_commas import 'package:electricsql/electricsql.dart'; diff --git a/todos_flutter/lib/main.dart b/todos_flutter/lib/main.dart index f273ff64..3ec838e4 100644 --- a/todos_flutter/lib/main.dart +++ b/todos_flutter/lib/main.dart @@ -121,7 +121,7 @@ class MyHomePage extends HookConsumerWidget { electricClient.syncTable( db.todolist, include: (tl) => [ - SyncInputRelation.from(tl.$relations.todo), + ShapeInputRelation.from(tl.$relations.todo), ], ); }