Merge remote-tracking branch 'origin/master' into release/1.0.1
This commit is contained in:
@@ -1,6 +1,8 @@
|
||||
import type { MigrationContext, ReversibleMigration } from '@db/types';
|
||||
|
||||
export class AddUserSettings1652367743993 implements ReversibleMigration {
|
||||
transaction = false as const;
|
||||
|
||||
async up({ queryRunner, tablePrefix }: MigrationContext) {
|
||||
await queryRunner.query(
|
||||
`CREATE TABLE "temporary_user" ("id" varchar PRIMARY KEY NOT NULL, "email" varchar(255), "firstName" varchar(32), "lastName" varchar(32), "password" varchar, "resetPasswordToken" varchar, "resetPasswordTokenExpiration" integer DEFAULT NULL, "personalizationAnswers" text, "createdAt" datetime(3) NOT NULL DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), "updatedAt" datetime(3) NOT NULL DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), "globalRoleId" integer NOT NULL, "settings" text, CONSTRAINT "FK_${tablePrefix}f0609be844f9200ff4365b1bb3d" FOREIGN KEY ("globalRoleId") REFERENCES "${tablePrefix}role" ("id") ON DELETE NO ACTION ON UPDATE NO ACTION)`,
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
import type { MigrationContext, ReversibleMigration } from '@db/types';
|
||||
|
||||
export class AddAPIKeyColumn1652905585850 implements ReversibleMigration {
|
||||
transaction = false as const;
|
||||
|
||||
async up({ queryRunner, tablePrefix }: MigrationContext) {
|
||||
await queryRunner.query(
|
||||
`CREATE TABLE "temporary_user" ("id" varchar PRIMARY KEY NOT NULL, "email" varchar(255), "firstName" varchar(32), "lastName" varchar(32), "password" varchar, "resetPasswordToken" varchar, "resetPasswordTokenExpiration" integer DEFAULT NULL, "personalizationAnswers" text, "createdAt" datetime(3) NOT NULL DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), "updatedAt" datetime(3) NOT NULL DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), "globalRoleId" integer NOT NULL, "settings" text, "apiKey" varchar, CONSTRAINT "FK_${tablePrefix}f0609be844f9200ff4365b1bb3d" FOREIGN KEY ("globalRoleId") REFERENCES "${tablePrefix}role" ("id") ON DELETE NO ACTION ON UPDATE NO ACTION)`,
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
import type { MigrationContext, ReversibleMigration } from '@db/types';
|
||||
|
||||
export class DeleteExecutionsWithWorkflows1673268682475 implements ReversibleMigration {
|
||||
transaction = false as const;
|
||||
|
||||
async up({ queryRunner, tablePrefix }: MigrationContext) {
|
||||
const workflowIds = (await queryRunner.query(`
|
||||
SELECT id FROM "${tablePrefix}workflow_entity"
|
||||
|
||||
@@ -1,18 +1,31 @@
|
||||
import { statSync } from 'fs';
|
||||
import path from 'path';
|
||||
import { UserSettings } from 'n8n-core';
|
||||
import type { MigrationContext, IrreversibleMigration } from '@db/types';
|
||||
import config from '@/config';
|
||||
import { copyTable } from '@/databases/utils/migrationHelpers';
|
||||
|
||||
export class MigrateIntegerKeysToString1690000000002 implements IrreversibleMigration {
|
||||
async up({ queryRunner, tablePrefix }: MigrationContext) {
|
||||
transaction = false as const;
|
||||
|
||||
async up(context: MigrationContext) {
|
||||
// eslint-disable-next-line @typescript-eslint/no-use-before-define
|
||||
await pruneExecutionsData(context);
|
||||
|
||||
const { queryRunner, tablePrefix } = context;
|
||||
|
||||
await queryRunner.query(`
|
||||
CREATE TABLE "${tablePrefix}TMP_workflow_entity" ("id" varchar(36) PRIMARY KEY NOT NULL, "name" varchar(128) NOT NULL, "active" boolean NOT NULL, "nodes" text, "connections" text NOT NULL, "createdAt" datetime(3) NOT NULL DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), "updatedAt" datetime(3) NOT NULL DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), "settings" text, "staticData" text, "pinData" text, "versionId" varchar(36), "triggerCount" integer NOT NULL DEFAULT 0);`);
|
||||
CREATE TABLE "${tablePrefix}TMP_workflow_entity" ("id" varchar(36) PRIMARY KEY NOT NULL, "name" varchar(128) NOT NULL, "active" boolean NOT NULL, "nodes" text, "connections" text NOT NULL, "createdAt" datetime(3) NOT NULL DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), "updatedAt" datetime(3) NOT NULL DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), "settings" text, "staticData" text, "pinData" text, "versionId" varchar(36), "triggerCount" integer NOT NULL DEFAULT 0);`);
|
||||
await queryRunner.query(
|
||||
`INSERT INTO "${tablePrefix}TMP_workflow_entity" (id, name, active, nodes, connections, createdAt, updatedAt, settings, staticData, pinData, triggerCount, versionId) SELECT id, name, active, nodes, connections, createdAt, updatedAt, settings, staticData, pinData, triggerCount, versionId FROM "${tablePrefix}workflow_entity";`,
|
||||
);
|
||||
await queryRunner.query(`DROP TABLE "${tablePrefix}workflow_entity";`);
|
||||
await queryRunner.query(`ALTER TABLE "${tablePrefix}TMP_workflow_entity" RENAME TO "${tablePrefix}workflow_entity";
|
||||
`);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "${tablePrefix}TMP_workflow_entity" RENAME TO "${tablePrefix}workflow_entity"`,
|
||||
);
|
||||
|
||||
await queryRunner.query(`
|
||||
CREATE TABLE "${tablePrefix}TMP_tag_entity" ("id" varchar(36) PRIMARY KEY NOT NULL, "name" varchar(24) NOT NULL, "createdAt" datetime(3) NOT NULL DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), "updatedAt" datetime(3) NOT NULL DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')));`);
|
||||
CREATE TABLE "${tablePrefix}TMP_tag_entity" ("id" varchar(36) PRIMARY KEY NOT NULL, "name" varchar(24) NOT NULL, "createdAt" datetime(3) NOT NULL DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), "updatedAt" datetime(3) NOT NULL DEFAULT (STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')));`);
|
||||
await queryRunner.query(
|
||||
`INSERT INTO "${tablePrefix}TMP_tag_entity" SELECT * FROM "${tablePrefix}tag_entity";`,
|
||||
);
|
||||
@@ -22,7 +35,7 @@ CREATE TABLE "${tablePrefix}TMP_tag_entity" ("id" varchar(36) PRIMARY KEY NOT NU
|
||||
);
|
||||
|
||||
await queryRunner.query(`
|
||||
CREATE TABLE "${tablePrefix}TMP_workflows_tags" ("workflowId" varchar(36) NOT NULL, "tagId" integer NOT NULL, CONSTRAINT "FK_${tablePrefix}workflows_tags_workflow_entity" FOREIGN KEY ("workflowId") REFERENCES "${tablePrefix}workflow_entity" ("id") ON DELETE CASCADE ON UPDATE NO ACTION, CONSTRAINT "FK_${tablePrefix}workflows_tags_tag_entity" FOREIGN KEY ("tagId") REFERENCES "${tablePrefix}tag_entity" ("id") ON DELETE CASCADE ON UPDATE NO ACTION, PRIMARY KEY ("workflowId", "tagId"));`);
|
||||
CREATE TABLE "${tablePrefix}TMP_workflows_tags" ("workflowId" varchar(36) NOT NULL, "tagId" integer NOT NULL, CONSTRAINT "FK_${tablePrefix}workflows_tags_workflow_entity" FOREIGN KEY ("workflowId") REFERENCES "${tablePrefix}workflow_entity" ("id") ON DELETE CASCADE ON UPDATE NO ACTION, CONSTRAINT "FK_${tablePrefix}workflows_tags_tag_entity" FOREIGN KEY ("tagId") REFERENCES "${tablePrefix}tag_entity" ("id") ON DELETE CASCADE ON UPDATE NO ACTION, PRIMARY KEY ("workflowId", "tagId"));`);
|
||||
await queryRunner.query(
|
||||
`INSERT INTO "${tablePrefix}TMP_workflows_tags" SELECT * FROM "${tablePrefix}workflows_tags";`,
|
||||
);
|
||||
@@ -105,9 +118,7 @@ CREATE TABLE "${tablePrefix}TMP_workflows_tags" ("workflowId" varchar(36) NOT NU
|
||||
"data" text NOT NULL, "status" varchar,
|
||||
FOREIGN KEY("workflowId") REFERENCES "${tablePrefix}workflow_entity" ("id") ON DELETE CASCADE
|
||||
);`);
|
||||
await queryRunner.query(
|
||||
`INSERT INTO "${tablePrefix}TMP_execution_entity" SELECT * FROM "${tablePrefix}execution_entity";`,
|
||||
);
|
||||
await copyTable({ tablePrefix, queryRunner }, 'execution_entity', 'TMP_execution_entity');
|
||||
await queryRunner.query(`DROP TABLE "${tablePrefix}execution_entity";`);
|
||||
await queryRunner.query(
|
||||
`ALTER TABLE "${tablePrefix}TMP_execution_entity" RENAME TO "${tablePrefix}execution_entity";`,
|
||||
@@ -175,3 +186,44 @@ CREATE TABLE "${tablePrefix}TMP_workflows_tags" ("workflowId" varchar(36) NOT NU
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
const DESIRED_DATABASE_FILE_SIZE = 1 * 1024 * 1024 * 1024; // 1 GB
|
||||
const migrationsPruningEnabled = process.env.MIGRATIONS_PRUNING_ENABLED === 'true';
|
||||
|
||||
function getSqliteDbFileSize(): number {
|
||||
const filename = path.resolve(
|
||||
UserSettings.getUserN8nFolderPath(),
|
||||
config.getEnv('database.sqlite.database'),
|
||||
);
|
||||
const { size } = statSync(filename);
|
||||
return size;
|
||||
}
|
||||
|
||||
const pruneExecutionsData = async ({ queryRunner, tablePrefix }: MigrationContext) => {
|
||||
if (migrationsPruningEnabled) {
|
||||
const dbFileSize = getSqliteDbFileSize();
|
||||
if (dbFileSize < DESIRED_DATABASE_FILE_SIZE) {
|
||||
console.log(`DB Size not large enough to prune: ${dbFileSize}`);
|
||||
return;
|
||||
}
|
||||
|
||||
console.time('pruningData');
|
||||
const counting = (await queryRunner.query(
|
||||
`select count(id) as rows from "${tablePrefix}execution_entity";`,
|
||||
)) as Array<{ rows: number }>;
|
||||
|
||||
const averageExecutionSize = dbFileSize / counting[0].rows;
|
||||
const numberOfExecutionsToKeep = Math.floor(DESIRED_DATABASE_FILE_SIZE / averageExecutionSize);
|
||||
|
||||
const query = `SELECT id FROM "${tablePrefix}execution_entity" ORDER BY id DESC limit ${numberOfExecutionsToKeep}, 1`;
|
||||
const idToKeep = await queryRunner
|
||||
.query(query)
|
||||
.then((rows: Array<{ id: number }>) => rows[0].id);
|
||||
|
||||
const removalQuery = `DELETE FROM "${tablePrefix}execution_entity" WHERE id < ${idToKeep} and status IN ('success')`;
|
||||
await queryRunner.query(removalQuery);
|
||||
console.timeEnd('pruningData');
|
||||
} else {
|
||||
console.log('Pruning was requested, but was not enabled');
|
||||
}
|
||||
};
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import type { MigrationContext, ReversibleMigration } from '@/databases/types';
|
||||
import { copyTable } from '@/databases/utils/migrationHelpers';
|
||||
|
||||
export class SeparateExecutionData1690000000010 implements ReversibleMigration {
|
||||
async up({ queryRunner, tablePrefix }: MigrationContext): Promise<void> {
|
||||
@@ -11,13 +12,12 @@ export class SeparateExecutionData1690000000010 implements ReversibleMigration {
|
||||
)`,
|
||||
);
|
||||
|
||||
await queryRunner.query(
|
||||
`INSERT INTO "${tablePrefix}execution_data" (
|
||||
"executionId",
|
||||
"workflowData",
|
||||
"data")
|
||||
SELECT "id", "workflowData", "data" FROM "${tablePrefix}execution_entity"
|
||||
`,
|
||||
await copyTable(
|
||||
{ tablePrefix, queryRunner },
|
||||
'execution_entity',
|
||||
'execution_data',
|
||||
['id', 'workflowData', 'data'],
|
||||
['executionId', 'workflowData', 'data'],
|
||||
);
|
||||
|
||||
await queryRunner.query(
|
||||
|
||||
@@ -12,15 +12,19 @@ export interface MigrationContext {
|
||||
migrationName: string;
|
||||
}
|
||||
|
||||
type MigrationFn = (ctx: MigrationContext) => Promise<void>;
|
||||
export type MigrationFn = (ctx: MigrationContext) => Promise<void>;
|
||||
|
||||
export interface ReversibleMigration {
|
||||
export interface BaseMigration {
|
||||
up: MigrationFn;
|
||||
down?: MigrationFn | never;
|
||||
transaction?: false;
|
||||
}
|
||||
|
||||
export interface ReversibleMigration extends BaseMigration {
|
||||
down: MigrationFn;
|
||||
}
|
||||
|
||||
export interface IrreversibleMigration {
|
||||
up: MigrationFn;
|
||||
export interface IrreversibleMigration extends BaseMigration {
|
||||
down?: never;
|
||||
}
|
||||
|
||||
|
||||
@@ -1,11 +1,10 @@
|
||||
/* eslint-disable no-await-in-loop */
|
||||
import { readFileSync, rmSync } from 'fs';
|
||||
import { UserSettings } from 'n8n-core';
|
||||
import type { QueryRunner } from 'typeorm/query-runner/QueryRunner';
|
||||
import config from '@/config';
|
||||
import { getLogger } from '@/Logger';
|
||||
import { inTest } from '@/constants';
|
||||
import type { Migration, MigrationContext } from '@db/types';
|
||||
import type { BaseMigration, Migration, MigrationContext, MigrationFn } from '@db/types';
|
||||
|
||||
const logger = getLogger();
|
||||
|
||||
@@ -39,30 +38,47 @@ export function loadSurveyFromDisk(): string | null {
|
||||
}
|
||||
}
|
||||
|
||||
let logFinishTimeout: NodeJS.Timeout;
|
||||
let runningMigrations = false;
|
||||
|
||||
export function logMigrationStart(migrationName: string, disableLogging = inTest): void {
|
||||
if (disableLogging) return;
|
||||
function logMigrationStart(migrationName: string): void {
|
||||
if (inTest) return;
|
||||
|
||||
if (!logFinishTimeout) {
|
||||
if (!runningMigrations) {
|
||||
logger.warn('Migrations in progress, please do NOT stop the process.');
|
||||
runningMigrations = true;
|
||||
}
|
||||
|
||||
logger.debug(`Starting migration ${migrationName}`);
|
||||
|
||||
clearTimeout(logFinishTimeout);
|
||||
}
|
||||
|
||||
export function logMigrationEnd(migrationName: string, disableLogging = inTest): void {
|
||||
if (disableLogging) return;
|
||||
function logMigrationEnd(migrationName: string): void {
|
||||
if (inTest) return;
|
||||
|
||||
logger.debug(`Finished migration ${migrationName}`);
|
||||
|
||||
logFinishTimeout = setTimeout(() => {
|
||||
logger.warn('Migrations finished.');
|
||||
}, 100);
|
||||
}
|
||||
|
||||
const runDisablingForeignKeys = async (
|
||||
migration: BaseMigration,
|
||||
context: MigrationContext,
|
||||
fn: MigrationFn,
|
||||
) => {
|
||||
const { dbType, queryRunner } = context;
|
||||
if (dbType !== 'sqlite') throw new Error('Disabling transactions only available in sqlite');
|
||||
await queryRunner.query('PRAGMA foreign_keys=OFF');
|
||||
await queryRunner.startTransaction();
|
||||
try {
|
||||
await fn.call(migration, context);
|
||||
await queryRunner.commitTransaction();
|
||||
} catch (e) {
|
||||
try {
|
||||
await queryRunner.rollbackTransaction();
|
||||
} catch {}
|
||||
throw e;
|
||||
} finally {
|
||||
await queryRunner.query('PRAGMA foreign_keys=ON');
|
||||
}
|
||||
};
|
||||
|
||||
export const wrapMigration = (migration: Migration) => {
|
||||
const dbType = config.getEnv('database.type');
|
||||
const dbName = config.getEnv(`database.${dbType === 'mariadb' ? 'mysqldb' : dbType}.database`);
|
||||
@@ -78,17 +94,58 @@ export const wrapMigration = (migration: Migration) => {
|
||||
|
||||
const { up, down } = migration.prototype;
|
||||
Object.assign(migration.prototype, {
|
||||
async up(queryRunner: QueryRunner) {
|
||||
async up(this: BaseMigration, queryRunner: QueryRunner) {
|
||||
logMigrationStart(migrationName);
|
||||
await up.call(this, { queryRunner, ...context });
|
||||
if (this.transaction === false) {
|
||||
await runDisablingForeignKeys(this, { queryRunner, ...context }, up);
|
||||
} else {
|
||||
await up.call(this, { queryRunner, ...context });
|
||||
}
|
||||
logMigrationEnd(migrationName);
|
||||
},
|
||||
async down(queryRunner: QueryRunner) {
|
||||
await down?.call(this, { queryRunner, ...context });
|
||||
async down(this: BaseMigration, queryRunner: QueryRunner) {
|
||||
if (down) {
|
||||
if (this.transaction === false) {
|
||||
await runDisablingForeignKeys(this, { queryRunner, ...context }, up);
|
||||
} else {
|
||||
await down.call(this, { queryRunner, ...context });
|
||||
}
|
||||
}
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
export const copyTable = async (
|
||||
{ tablePrefix, queryRunner }: Pick<MigrationContext, 'queryRunner' | 'tablePrefix'>,
|
||||
fromTable: string,
|
||||
toTable: string,
|
||||
fromFields: string[] = [],
|
||||
toFields: string[] = [],
|
||||
batchSize = 10,
|
||||
) => {
|
||||
const driver = queryRunner.connection.driver;
|
||||
fromTable = driver.escape(`${tablePrefix}${fromTable}`);
|
||||
toTable = driver.escape(`${tablePrefix}${toTable}`);
|
||||
const fromFieldsStr = fromFields.length
|
||||
? fromFields.map((f) => driver.escape(f)).join(', ')
|
||||
: '*';
|
||||
const toFieldsStr = toFields.length
|
||||
? `(${toFields.map((f) => driver.escape(f)).join(', ')})`
|
||||
: '';
|
||||
|
||||
const total = await queryRunner
|
||||
.query(`SELECT COUNT(*) as count from ${fromTable}`)
|
||||
.then((rows: Array<{ count: number }>) => rows[0].count);
|
||||
|
||||
let migrated = 0;
|
||||
while (migrated < total) {
|
||||
await queryRunner.query(
|
||||
`INSERT INTO ${toTable} ${toFieldsStr} SELECT ${fromFieldsStr} FROM ${fromTable} LIMIT ${migrated}, ${batchSize}`,
|
||||
);
|
||||
migrated += batchSize;
|
||||
}
|
||||
};
|
||||
|
||||
function batchQuery(query: string, limit: number, offset = 0): string {
|
||||
return `
|
||||
${query}
|
||||
|
||||
Reference in New Issue
Block a user