Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

refactor(core): Add support for implicit schema in postgres migrations #5233

Merged
merged 2 commits into from
Jan 24, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
19 changes: 13 additions & 6 deletions packages/cli/src/Db.ts
Original file line number Diff line number Diff line change
Expand Up @@ -127,6 +127,16 @@ export async function init(
connection = new Connection(connectionOptions);
await connection.initialize();

if (dbType === 'postgresdb') {
const schema = config.getEnv('database.postgresdb.schema');
const searchPath = ['public'];
if (schema !== 'public') {
await connection.query(`CREATE SCHEMA IF NOT EXISTS ${schema}`);
searchPath.unshift(schema);
}
await connection.query(`SET search_path TO ${searchPath.join(',')};`);
}

if (!testConnectionOptions && dbType === 'sqlite') {
// This specific migration changes database metadata.
// A field is now nullable. We need to reconnect so that
Expand All @@ -143,25 +153,23 @@ export async function init(

// If you remove this call, remember to turn back on the
// setting to run migrations automatically above.
await connection.runMigrations();
await connection.runMigrations({ transaction: 'each' });

// eslint-disable-next-line @typescript-eslint/no-unsafe-member-access
if (migrations.length === 0) {
await connection.destroy();
connection = new Connection(connectionOptions);
await connection.initialize();
}
} else {
await connection.runMigrations({ transaction: 'each' });
}

// @ts-ignore
collections.Credentials = linkRepository(entities.CredentialsEntity);
// @ts-ignore
collections.Execution = linkRepository(entities.ExecutionEntity);
collections.Workflow = linkRepository(entities.WorkflowEntity);
// @ts-ignore
collections.Webhook = linkRepository(entities.WebhookEntity);
collections.Tag = linkRepository(entities.TagEntity);

collections.Role = linkRepository(entities.Role);
collections.User = linkRepository(entities.User);
collections.SharedCredentials = linkRepository(entities.SharedCredentials);
Expand All @@ -170,7 +178,6 @@ export async function init(
collections.InstalledPackages = linkRepository(entities.InstalledPackages);
collections.InstalledNodes = linkRepository(entities.InstalledNodes);
collections.WorkflowStatistics = linkRepository(entities.WorkflowStatistics);

collections.EventDestinations = linkRepository(entities.EventDestinations);

isInitialized = true;
Expand Down
5 changes: 1 addition & 4 deletions packages/cli/src/commands/start.ts
Original file line number Diff line number Diff line change
Expand Up @@ -239,7 +239,7 @@ export class Start extends Command {

try {
// Start directly with the init of the database to improve startup time
const startDbInitPromise = Db.init().catch(async (error: Error) =>
await Db.init().catch(async (error: Error) =>
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The idea here was to allow db init to happen "async" while other things were happening; not a huge gain I believe, but are there any reasons to remove this behavior?

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

it wasn't working as expected when i was testing it. it was somehow only waiting until the db connection was ready, but not until all the migrations were done executing.
but, I can revert it, and test it again.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I don't think it really provides any major gains, but it's just something to watch out for if we see people saying n8n is taking a bit longer to start. I don't believe we'll be seeing those issues frequently since it's a very simple thing.

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

yeah. and since nodes are lazy loaded now, there shouldn't be much of a difference in startup times.

exitWithCrash('There was an error initializing DB', error),
);

Expand Down Expand Up @@ -281,9 +281,6 @@ export class Start extends Command {

await loadNodesAndCredentials.generateTypesForFrontend();

// Wait till the database is ready
await startDbInitPromise;

const installedPackages = await getAllInstalledPackages();
const missingPackages = new Set<{
packageName: string;
Expand Down
3 changes: 1 addition & 2 deletions packages/cli/src/databases/config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ const getDBConnectionOptions = (dbType: DatabaseType) => {
return {
entityPrefix,
entities: Object.values(entities),
migrationsRun: true,
migrationsRun: false,
migrationsTableName: `${entityPrefix}migrations`,
cli: { entitiesDir, migrationsDir },
...connectionDetails,
Expand All @@ -54,7 +54,6 @@ export const getOptionOverrides = async (dbType: 'postgresdb' | 'mysqldb') => ({
export const getSqliteConnectionOptions = (): SqliteConnectionOptions => ({
type: 'sqlite',
...getDBConnectionOptions('sqlite'),
migrationsRun: false, // sqlite migrations are manually run in `Db.ts`
migrations: sqliteMigrations,
});

Expand Down
Original file line number Diff line number Diff line change
@@ -1,62 +1,41 @@
import { MigrationInterface, QueryRunner } from 'typeorm';

import config from '@/config';
import { getTablePrefix } from '@db/utils/migrationHelpers';

export class InitialMigration1587669153312 implements MigrationInterface {
name = 'InitialMigration1587669153312';

async up(queryRunner: QueryRunner): Promise<void> {
let tablePrefix = config.getEnv('database.tablePrefix');
const tablePrefixIndex = tablePrefix;
const schema = config.getEnv('database.postgresdb.schema');
if (schema) {
tablePrefix = schema + '.' + tablePrefix;
}

await queryRunner.query(`SET search_path TO ${schema};`);
const tablePrefix = getTablePrefix();

await queryRunner.query(
`CREATE TABLE IF NOT EXISTS ${tablePrefix}credentials_entity ("id" SERIAL NOT NULL, "name" character varying(128) NOT NULL, "data" text NOT NULL, "type" character varying(32) NOT NULL, "nodesAccess" json NOT NULL, "createdAt" TIMESTAMP NOT NULL, "updatedAt" TIMESTAMP NOT NULL, CONSTRAINT PK_${tablePrefixIndex}814c3d3c36e8a27fa8edb761b0e PRIMARY KEY ("id"))`,
`CREATE TABLE IF NOT EXISTS ${tablePrefix}credentials_entity ("id" SERIAL NOT NULL, "name" character varying(128) NOT NULL, "data" text NOT NULL, "type" character varying(32) NOT NULL, "nodesAccess" json NOT NULL, "createdAt" TIMESTAMP NOT NULL, "updatedAt" TIMESTAMP NOT NULL, CONSTRAINT PK_${tablePrefix}814c3d3c36e8a27fa8edb761b0e PRIMARY KEY ("id"))`,
undefined,
);
await queryRunner.query(
`CREATE INDEX IF NOT EXISTS IDX_${tablePrefixIndex}07fde106c0b471d8cc80a64fc8 ON ${tablePrefix}credentials_entity (type) `,
`CREATE INDEX IF NOT EXISTS IDX_${tablePrefix}07fde106c0b471d8cc80a64fc8 ON ${tablePrefix}credentials_entity (type) `,
undefined,
);
await queryRunner.query(
`CREATE TABLE IF NOT EXISTS ${tablePrefix}execution_entity ("id" SERIAL NOT NULL, "data" text NOT NULL, "finished" boolean NOT NULL, "mode" character varying NOT NULL, "retryOf" character varying, "retrySuccessId" character varying, "startedAt" TIMESTAMP NOT NULL, "stoppedAt" TIMESTAMP NOT NULL, "workflowData" json NOT NULL, "workflowId" character varying, CONSTRAINT PK_${tablePrefixIndex}e3e63bbf986767844bbe1166d4e PRIMARY KEY ("id"))`,
`CREATE TABLE IF NOT EXISTS ${tablePrefix}execution_entity ("id" SERIAL NOT NULL, "data" text NOT NULL, "finished" boolean NOT NULL, "mode" character varying NOT NULL, "retryOf" character varying, "retrySuccessId" character varying, "startedAt" TIMESTAMP NOT NULL, "stoppedAt" TIMESTAMP NOT NULL, "workflowData" json NOT NULL, "workflowId" character varying, CONSTRAINT PK_${tablePrefix}e3e63bbf986767844bbe1166d4e PRIMARY KEY ("id"))`,
undefined,
);
await queryRunner.query(
`CREATE INDEX IF NOT EXISTS IDX_${tablePrefixIndex}c4d999a5e90784e8caccf5589d ON ${tablePrefix}execution_entity ("workflowId") `,
`CREATE INDEX IF NOT EXISTS IDX_${tablePrefix}c4d999a5e90784e8caccf5589d ON ${tablePrefix}execution_entity ("workflowId") `,
undefined,
);
await queryRunner.query(
`CREATE TABLE IF NOT EXISTS ${tablePrefix}workflow_entity ("id" SERIAL NOT NULL, "name" character varying(128) NOT NULL, "active" boolean NOT NULL, "nodes" json NOT NULL, "connections" json NOT NULL, "createdAt" TIMESTAMP NOT NULL, "updatedAt" TIMESTAMP NOT NULL, "settings" json, "staticData" json, CONSTRAINT PK_${tablePrefixIndex}eded7d72664448da7745d551207 PRIMARY KEY ("id"))`,
`CREATE TABLE IF NOT EXISTS ${tablePrefix}workflow_entity ("id" SERIAL NOT NULL, "name" character varying(128) NOT NULL, "active" boolean NOT NULL, "nodes" json NOT NULL, "connections" json NOT NULL, "createdAt" TIMESTAMP NOT NULL, "updatedAt" TIMESTAMP NOT NULL, "settings" json, "staticData" json, CONSTRAINT PK_${tablePrefix}eded7d72664448da7745d551207 PRIMARY KEY ("id"))`,
undefined,
);
}

async down(queryRunner: QueryRunner): Promise<void> {
let tablePrefix = config.getEnv('database.tablePrefix');
const tablePrefixIndex = tablePrefix;
const schema = config.getEnv('database.postgresdb.schema');
if (schema) {
tablePrefix = schema + '.' + tablePrefix;
}

await queryRunner.query(`SET search_path TO ${schema};`);
let tablePrefix = getTablePrefix();

await queryRunner.query(`DROP TABLE ${tablePrefix}workflow_entity`, undefined);
await queryRunner.query(
`DROP INDEX IDX_${tablePrefixIndex}c4d999a5e90784e8caccf5589d`,
undefined,
);
await queryRunner.query(`DROP INDEX IDX_${tablePrefix}c4d999a5e90784e8caccf5589d`, undefined);
await queryRunner.query(`DROP TABLE ${tablePrefix}execution_entity`, undefined);
await queryRunner.query(
`DROP INDEX IDX_${tablePrefixIndex}07fde106c0b471d8cc80a64fc8`,
undefined,
);
await queryRunner.query(`DROP INDEX IDX_${tablePrefix}07fde106c0b471d8cc80a64fc8`, undefined);
await queryRunner.query(`DROP TABLE ${tablePrefix}credentials_entity`, undefined);
}
}
Original file line number Diff line number Diff line change
@@ -1,33 +1,19 @@
import { MigrationInterface, QueryRunner } from 'typeorm';

import config from '@/config';
import { getTablePrefix } from '@db/utils/migrationHelpers';

export class WebhookModel1589476000887 implements MigrationInterface {
name = 'WebhookModel1589476000887';

async up(queryRunner: QueryRunner): Promise<void> {
let tablePrefix = config.getEnv('database.tablePrefix');
const tablePrefixIndex = tablePrefix;
const schema = config.getEnv('database.postgresdb.schema');
if (schema) {
tablePrefix = schema + '.' + tablePrefix;
}

await queryRunner.query(`SET search_path TO ${schema};`);

const tablePrefix = getTablePrefix();
await queryRunner.query(
`CREATE TABLE IF NOT EXISTS ${tablePrefix}webhook_entity ("workflowId" integer NOT NULL, "webhookPath" character varying NOT NULL, "method" character varying NOT NULL, "node" character varying NOT NULL, CONSTRAINT "PK_${tablePrefixIndex}b21ace2e13596ccd87dc9bf4ea6" PRIMARY KEY ("webhookPath", "method"))`,
`CREATE TABLE IF NOT EXISTS ${tablePrefix}webhook_entity ("workflowId" integer NOT NULL, "webhookPath" character varying NOT NULL, "method" character varying NOT NULL, "node" character varying NOT NULL, CONSTRAINT "PK_${tablePrefix}b21ace2e13596ccd87dc9bf4ea6" PRIMARY KEY ("webhookPath", "method"))`,
undefined,
);
}

async down(queryRunner: QueryRunner): Promise<void> {
let tablePrefix = config.getEnv('database.tablePrefix');
const schema = config.getEnv('database.postgresdb.schema');
if (schema) {
tablePrefix = schema + '.' + tablePrefix;
}
await queryRunner.query(`SET search_path TO ${schema};`);
const tablePrefix = getTablePrefix();
await queryRunner.query(`DROP TABLE ${tablePrefix}webhook_entity`, undefined);
}
}
Original file line number Diff line number Diff line change
@@ -1,34 +1,18 @@
import { MigrationInterface, QueryRunner } from 'typeorm';

import config from '@/config';
import { getTablePrefix } from '@/databases/utils/migrationHelpers';

export class CreateIndexStoppedAt1594828256133 implements MigrationInterface {
name = 'CreateIndexStoppedAt1594828256133';

async up(queryRunner: QueryRunner): Promise<void> {
let tablePrefix = config.getEnv('database.tablePrefix');
const tablePrefixPure = tablePrefix;
const schema = config.getEnv('database.postgresdb.schema');
if (schema) {
tablePrefix = schema + '.' + tablePrefix;
}

await queryRunner.query(`SET search_path TO ${schema};`);

const tablePrefix = getTablePrefix();
await queryRunner.query(
`CREATE INDEX IF NOT EXISTS IDX_${tablePrefixPure}33228da131bb1112247cf52a42 ON ${tablePrefix}execution_entity ("stoppedAt") `,
`CREATE INDEX IF NOT EXISTS IDX_${tablePrefix}33228da131bb1112247cf52a42 ON ${tablePrefix}execution_entity ("stoppedAt") `,
);
}

async down(queryRunner: QueryRunner): Promise<void> {
let tablePrefix = config.getEnv('database.tablePrefix');

const tablePrefixPure = tablePrefix;
const schema = config.getEnv('database.postgresdb.schema');
if (schema) {
tablePrefix = schema + '.' + tablePrefix;
}
await queryRunner.query(`SET search_path TO ${schema};`);
await queryRunner.query(`DROP INDEX IDX_${tablePrefixPure}33228da131bb1112247cf52a42`);
const tablePrefix = getTablePrefix();
await queryRunner.query(`DROP INDEX IDX_${tablePrefix}33228da131bb1112247cf52a42`);
}
}
Original file line number Diff line number Diff line change
@@ -1,19 +1,11 @@
import { MigrationInterface, QueryRunner } from 'typeorm';

import config from '@/config';
import { getTablePrefix } from '@db/utils/migrationHelpers';

export class MakeStoppedAtNullable1607431743768 implements MigrationInterface {
name = 'MakeStoppedAtNullable1607431743768';

async up(queryRunner: QueryRunner): Promise<void> {
let tablePrefix = config.getEnv('database.tablePrefix');
const schema = config.getEnv('database.postgresdb.schema');
if (schema) {
tablePrefix = schema + '.' + tablePrefix;
}

await queryRunner.query(`SET search_path TO ${schema};`);

const tablePrefix = getTablePrefix();
await queryRunner.query(
'ALTER TABLE ' + tablePrefix + 'execution_entity ALTER COLUMN "stoppedAt" DROP NOT NULL',
undefined,
Expand Down
Original file line number Diff line number Diff line change
@@ -1,38 +1,23 @@
import { MigrationInterface, QueryRunner } from 'typeorm';
import config from '@/config';
import { getTablePrefix } from '@db/utils/migrationHelpers';

export class AddWebhookId1611144599516 implements MigrationInterface {
name = 'AddWebhookId1611144599516';

async up(queryRunner: QueryRunner): Promise<void> {
let tablePrefix = config.getEnv('database.tablePrefix');
const tablePrefixPure = tablePrefix;
const schema = config.getEnv('database.postgresdb.schema');
if (schema) {
tablePrefix = schema + '.' + tablePrefix;
}

await queryRunner.query(`SET search_path TO ${schema};`);

const tablePrefix = getTablePrefix();
await queryRunner.query(
`ALTER TABLE ${tablePrefix}webhook_entity ADD "webhookId" character varying`,
);
await queryRunner.query(`ALTER TABLE ${tablePrefix}webhook_entity ADD "pathLength" integer`);
await queryRunner.query(
`CREATE INDEX IF NOT EXISTS IDX_${tablePrefixPure}16f4436789e804e3e1c9eeb240 ON ${tablePrefix}webhook_entity ("webhookId", "method", "pathLength") `,
`CREATE INDEX IF NOT EXISTS IDX_${tablePrefix}16f4436789e804e3e1c9eeb240 ON ${tablePrefix}webhook_entity ("webhookId", "method", "pathLength") `,
);
}

async down(queryRunner: QueryRunner): Promise<void> {
let tablePrefix = config.getEnv('database.tablePrefix');
const tablePrefixPure = tablePrefix;
const schema = config.getEnv('database.postgresdb.schema');
if (schema) {
tablePrefix = schema + '.' + tablePrefix;
}
await queryRunner.query(`SET search_path TO ${schema};`);

await queryRunner.query(`DROP INDEX IDX_${tablePrefixPure}16f4436789e804e3e1c9eeb240`);
const tablePrefix = getTablePrefix();
await queryRunner.query(`DROP INDEX IDX_${tablePrefix}16f4436789e804e3e1c9eeb240`);
await queryRunner.query(`ALTER TABLE ${tablePrefix}webhook_entity DROP COLUMN "pathLength"`);
await queryRunner.query(`ALTER TABLE ${tablePrefix}webhook_entity DROP COLUMN "webhookId"`);
}
Expand Down
Original file line number Diff line number Diff line change
@@ -1,42 +1,35 @@
import { MigrationInterface, QueryRunner } from 'typeorm';
import config from '@/config';
import { getTablePrefix } from '@db/utils/migrationHelpers';

export class CreateTagEntity1617270242566 implements MigrationInterface {
name = 'CreateTagEntity1617270242566';

async up(queryRunner: QueryRunner): Promise<void> {
let tablePrefix = config.getEnv('database.tablePrefix');
const tablePrefixPure = tablePrefix;
const schema = config.getEnv('database.postgresdb.schema');
if (schema) {
tablePrefix = schema + '.' + tablePrefix;
}

await queryRunner.query(`SET search_path TO ${schema};`);
const tablePrefix = getTablePrefix();

// create tags table + relationship with workflow entity

await queryRunner.query(
`CREATE TABLE ${tablePrefix}tag_entity ("id" SERIAL NOT NULL, "name" character varying(24) NOT NULL, "createdAt" TIMESTAMP NOT NULL, "updatedAt" TIMESTAMP NOT NULL, CONSTRAINT "PK_${tablePrefixPure}7a50a9b74ae6855c0dcaee25052" PRIMARY KEY ("id"))`,
`CREATE TABLE ${tablePrefix}tag_entity ("id" SERIAL NOT NULL, "name" character varying(24) NOT NULL, "createdAt" TIMESTAMP NOT NULL, "updatedAt" TIMESTAMP NOT NULL, CONSTRAINT "PK_${tablePrefix}7a50a9b74ae6855c0dcaee25052" PRIMARY KEY ("id"))`,
);
await queryRunner.query(
`CREATE UNIQUE INDEX IDX_${tablePrefixPure}812eb05f7451ca757fb98444ce ON ${tablePrefix}tag_entity ("name") `,
`CREATE UNIQUE INDEX IDX_${tablePrefix}812eb05f7451ca757fb98444ce ON ${tablePrefix}tag_entity ("name") `,
);

await queryRunner.query(
`CREATE TABLE ${tablePrefix}workflows_tags ("workflowId" integer NOT NULL, "tagId" integer NOT NULL, CONSTRAINT "PK_${tablePrefixPure}a60448a90e51a114e95e2a125b3" PRIMARY KEY ("workflowId", "tagId"))`,
`CREATE TABLE ${tablePrefix}workflows_tags ("workflowId" integer NOT NULL, "tagId" integer NOT NULL, CONSTRAINT "PK_${tablePrefix}a60448a90e51a114e95e2a125b3" PRIMARY KEY ("workflowId", "tagId"))`,
);
await queryRunner.query(
`CREATE INDEX IDX_${tablePrefixPure}31140eb41f019805b40d008744 ON ${tablePrefix}workflows_tags ("workflowId") `,
`CREATE INDEX IDX_${tablePrefix}31140eb41f019805b40d008744 ON ${tablePrefix}workflows_tags ("workflowId") `,
);
await queryRunner.query(
`CREATE INDEX IDX_${tablePrefixPure}5e29bfe9e22c5d6567f509d4a4 ON ${tablePrefix}workflows_tags ("tagId") `,
`CREATE INDEX IDX_${tablePrefix}5e29bfe9e22c5d6567f509d4a4 ON ${tablePrefix}workflows_tags ("tagId") `,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}workflows_tags ADD CONSTRAINT "FK_${tablePrefixPure}31140eb41f019805b40d0087449" FOREIGN KEY ("workflowId") REFERENCES ${tablePrefix}workflow_entity ("id") ON DELETE CASCADE ON UPDATE NO ACTION`,
`ALTER TABLE ${tablePrefix}workflows_tags ADD CONSTRAINT "FK_${tablePrefix}31140eb41f019805b40d0087449" FOREIGN KEY ("workflowId") REFERENCES ${tablePrefix}workflow_entity ("id") ON DELETE CASCADE ON UPDATE NO ACTION`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}workflows_tags ADD CONSTRAINT "FK_${tablePrefixPure}5e29bfe9e22c5d6567f509d4a46" FOREIGN KEY ("tagId") REFERENCES ${tablePrefix}tag_entity ("id") ON DELETE CASCADE ON UPDATE NO ACTION`,
`ALTER TABLE ${tablePrefix}workflows_tags ADD CONSTRAINT "FK_${tablePrefix}5e29bfe9e22c5d6567f509d4a46" FOREIGN KEY ("tagId") REFERENCES ${tablePrefix}tag_entity ("id") ON DELETE CASCADE ON UPDATE NO ACTION`,
);

// set default dates for `createdAt` and `updatedAt`
Expand Down Expand Up @@ -80,14 +73,7 @@ export class CreateTagEntity1617270242566 implements MigrationInterface {
}

async down(queryRunner: QueryRunner): Promise<void> {
let tablePrefix = config.getEnv('database.tablePrefix');
const tablePrefixPure = tablePrefix;
const schema = config.getEnv('database.postgresdb.schema');
if (schema) {
tablePrefix = schema + '.' + tablePrefix;
}

await queryRunner.query(`SET search_path TO ${schema};`);
const tablePrefix = getTablePrefix();

// `createdAt` and `updatedAt`

Expand Down Expand Up @@ -131,15 +117,15 @@ export class CreateTagEntity1617270242566 implements MigrationInterface {
// tags

await queryRunner.query(
`ALTER TABLE ${tablePrefix}workflows_tags DROP CONSTRAINT "FK_${tablePrefixPure}5e29bfe9e22c5d6567f509d4a46"`,
`ALTER TABLE ${tablePrefix}workflows_tags DROP CONSTRAINT "FK_${tablePrefix}5e29bfe9e22c5d6567f509d4a46"`,
);
await queryRunner.query(
`ALTER TABLE ${tablePrefix}workflows_tags DROP CONSTRAINT "FK_${tablePrefixPure}31140eb41f019805b40d0087449"`,
`ALTER TABLE ${tablePrefix}workflows_tags DROP CONSTRAINT "FK_${tablePrefix}31140eb41f019805b40d0087449"`,
);
await queryRunner.query(`DROP INDEX IDX_${tablePrefixPure}5e29bfe9e22c5d6567f509d4a4`);
await queryRunner.query(`DROP INDEX IDX_${tablePrefixPure}31140eb41f019805b40d008744`);
await queryRunner.query(`DROP INDEX IDX_${tablePrefix}5e29bfe9e22c5d6567f509d4a4`);
await queryRunner.query(`DROP INDEX IDX_${tablePrefix}31140eb41f019805b40d008744`);
await queryRunner.query(`DROP TABLE ${tablePrefix}workflows_tags`);
await queryRunner.query(`DROP INDEX IDX_${tablePrefixPure}812eb05f7451ca757fb98444ce`);
await queryRunner.query(`DROP INDEX IDX_${tablePrefix}812eb05f7451ca757fb98444ce`);
await queryRunner.query(`DROP TABLE ${tablePrefix}tag_entity`);
}
}
Loading