refactor(core): Delete more redundant code across migrations (Part 1) (no-changelog) (#6691)

This commit is contained in:
कारतोफ्फेलस्क्रिप्ट™
2023-07-31 17:35:53 +02:00
committed by GitHub
parent 7b27fa5898
commit b7ca27afcf
42 changed files with 830 additions and 2195 deletions

View File

@@ -1,16 +1,20 @@
import { Container } from 'typedi';
import { readFileSync, rmSync } from 'fs';
import { UserSettings } from 'n8n-core';
import type { ObjectLiteral } from 'typeorm';
import type { QueryRunner } from 'typeorm/query-runner/QueryRunner';
import config from '@/config';
import { getLogger } from '@/Logger';
import { inTest } from '@/constants';
import type { BaseMigration, Migration, MigrationContext, MigrationFn } from '@db/types';
import { getLogger } from '@/Logger';
import { NodeTypes } from '@/NodeTypes';
import { jsonParse } from 'n8n-workflow';
const logger = getLogger();
const PERSONALIZATION_SURVEY_FILENAME = 'personalizationSurvey.json';
export function loadSurveyFromDisk(): string | null {
function loadSurveyFromDisk(): string | null {
const userSettingsPath = UserSettings.getUserN8nFolderPath();
try {
const filename = `${userSettingsPath}/${PERSONALIZATION_SURVEY_FILENAME}`;
@@ -21,8 +25,7 @@ export function loadSurveyFromDisk(): string | null {
if (!kvPairs.length) {
throw new Error('personalizationSurvey is empty');
} else {
// eslint-disable-next-line @typescript-eslint/naming-convention
const emptyKeys = kvPairs.reduce((acc, [_key, value]) => {
const emptyKeys = kvPairs.reduce((acc, [, value]) => {
if (!value || (Array.isArray(value) && !value.length)) {
return acc + 1;
}
@@ -79,113 +82,120 @@ const runDisablingForeignKeys = async (
}
};
export const wrapMigration = (migration: Migration) => {
const dbType = config.getEnv('database.type');
const dbName = config.getEnv(`database.${dbType === 'mariadb' ? 'mysqldb' : dbType}.database`);
const tablePrefix = config.getEnv('database.tablePrefix');
const migrationName = migration.name;
const context: Omit<MigrationContext, 'queryRunner'> = {
tablePrefix,
dbType,
dbName,
migrationName,
logger,
};
function parseJson<T>(data: string | T): T {
return typeof data === 'string' ? jsonParse<T>(data) : data;
}
const dbType = config.getEnv('database.type');
const isMysql = ['mariadb', 'mysqldb'].includes(dbType);
const dbName = config.getEnv(`database.${dbType === 'mariadb' ? 'mysqldb' : dbType}.database`);
const tablePrefix = config.getEnv('database.tablePrefix');
const createContext = (queryRunner: QueryRunner, migration: Migration): MigrationContext => ({
logger,
tablePrefix,
dbType,
isMysql,
dbName,
migrationName: migration.name,
queryRunner,
nodeTypes: Container.get(NodeTypes),
loadSurveyFromDisk,
parseJson,
escape: {
columnName: (name) => queryRunner.connection.driver.escape(name),
tableName: (name) => queryRunner.connection.driver.escape(`${tablePrefix}${name}`),
indexName: (name) => queryRunner.connection.driver.escape(`IDX_${tablePrefix}${name}`),
},
runQuery: async <T>(
sql: string,
unsafeParameters?: ObjectLiteral,
safeParameters?: ObjectLiteral,
) => {
if (unsafeParameters) {
const [query, parameters] = queryRunner.connection.driver.escapeQueryWithParameters(
sql,
unsafeParameters,
safeParameters ?? {},
);
return queryRunner.query(query, parameters) as Promise<T>;
} else {
return queryRunner.query(sql) as Promise<T>;
}
},
runInBatches: async <T>(
query: string,
operation: (results: T[]) => Promise<void>,
limit = 100,
) => {
let offset = 0;
let batchedQuery: string;
let batchedQueryResults: T[];
if (query.trim().endsWith(';')) query = query.trim().slice(0, -1);
do {
batchedQuery = `${query} LIMIT ${limit} OFFSET ${offset}`;
batchedQueryResults = (await queryRunner.query(batchedQuery)) as T[];
// pass a copy to prevent errors from mutation
await operation([...batchedQueryResults]);
offset += limit;
} while (batchedQueryResults.length === limit);
},
copyTable: async (
fromTable: string,
toTable: string,
fromFields?: string[],
toFields?: string[],
batchSize?: number,
) => {
const { driver } = queryRunner.connection;
fromTable = driver.escape(`${tablePrefix}${fromTable}`);
toTable = driver.escape(`${tablePrefix}${toTable}`);
const fromFieldsStr = fromFields?.length
? fromFields.map((f) => driver.escape(f)).join(', ')
: '*';
const toFieldsStr = toFields?.length
? `(${toFields.map((f) => driver.escape(f)).join(', ')})`
: '';
const total = await queryRunner
.query(`SELECT COUNT(*) AS count FROM ${fromTable}`)
.then((rows: Array<{ count: number }>) => rows[0].count);
batchSize = batchSize ?? 10;
let migrated = 0;
while (migrated < total) {
await queryRunner.query(
`INSERT INTO ${toTable} ${toFieldsStr} SELECT ${fromFieldsStr} FROM ${fromTable} LIMIT ${migrated}, ${batchSize}`,
);
migrated += batchSize;
}
},
});
export const wrapMigration = (migration: Migration) => {
const { up, down } = migration.prototype;
Object.assign(migration.prototype, {
async up(this: BaseMigration, queryRunner: QueryRunner) {
logMigrationStart(migrationName);
logMigrationStart(migration.name);
const context = createContext(queryRunner, migration);
if (this.transaction === false) {
await runDisablingForeignKeys(this, { queryRunner, ...context }, up);
await runDisablingForeignKeys(this, context, up);
} else {
await up.call(this, { queryRunner, ...context });
await up.call(this, context);
}
logMigrationEnd(migrationName);
logMigrationEnd(migration.name);
},
async down(this: BaseMigration, queryRunner: QueryRunner) {
if (down) {
const context = createContext(queryRunner, migration);
if (this.transaction === false) {
await runDisablingForeignKeys(this, { queryRunner, ...context }, up);
await runDisablingForeignKeys(this, context, up);
} else {
await down.call(this, { queryRunner, ...context });
await down.call(this, context);
}
}
},
});
};
export const copyTable = async (
{ tablePrefix, queryRunner }: Pick<MigrationContext, 'queryRunner' | 'tablePrefix'>,
fromTable: string,
toTable: string,
fromFields: string[] = [],
toFields: string[] = [],
batchSize = 10,
) => {
const driver = queryRunner.connection.driver;
fromTable = driver.escape(`${tablePrefix}${fromTable}`);
toTable = driver.escape(`${tablePrefix}${toTable}`);
const fromFieldsStr = fromFields.length
? fromFields.map((f) => driver.escape(f)).join(', ')
: '*';
const toFieldsStr = toFields.length
? `(${toFields.map((f) => driver.escape(f)).join(', ')})`
: '';
const total = await queryRunner
.query(`SELECT COUNT(*) as count from ${fromTable}`)
.then((rows: Array<{ count: number }>) => rows[0].count);
let migrated = 0;
while (migrated < total) {
await queryRunner.query(
`INSERT INTO ${toTable} ${toFieldsStr} SELECT ${fromFieldsStr} FROM ${fromTable} LIMIT ${migrated}, ${batchSize}`,
);
migrated += batchSize;
}
};
function batchQuery(query: string, limit: number, offset = 0): string {
return `
${query}
LIMIT ${limit}
OFFSET ${offset}
`;
}
export async function runInBatches(
queryRunner: QueryRunner,
query: string,
// eslint-disable-next-line @typescript-eslint/no-explicit-any
operation: (results: any[]) => Promise<void>,
limit = 100,
): Promise<void> {
let offset = 0;
let batchedQuery: string;
let batchedQueryResults: unknown[];
if (query.trim().endsWith(';')) query = query.trim().slice(0, -1);
do {
batchedQuery = batchQuery(query, limit, offset);
batchedQueryResults = (await queryRunner.query(batchedQuery)) as unknown[];
// pass a copy to prevent errors from mutation
await operation([...batchedQueryResults]);
offset += limit;
} while (batchedQueryResults.length === limit);
}
export const escapeQuery = (
queryRunner: QueryRunner,
query: string,
params: { [property: string]: unknown },
): [string, unknown[]] =>
queryRunner.connection.driver.escapeQueryWithParameters(
query,
{
pinData: params.pinData,
id: params.id,
},
{},
);