feat(core, editor): Support pairedItem for pinned data (#3843)

* 📘 Adjust interface

*  Adjust pindata in state store

*  Add utils

*  Replace utils calls

*  Adjust pindata intake and display

* 🔥 Remove excess BE fixes

* 📝 Update comment

* 🧪 Adjust tests

* 🔥 Remove unneeded helper

* 🚚 Improve naming

* 🧹 Clean up `ormconfig.ts`

* 📘 Add types and type guards

*  Improve serializer for sqlite

*  Create migration utils

*  Set up sqlite serializer

* 🗃️ Write sqlite migration

* 🗃️ Write MySQL migration

* 🗃️ Write Postgres migration

*  Add imports and exports to barrels

* 🚚 Rename `runChunked` to `runInBatches`

*  Improve migration loggers

* ♻️ Address feedback

* 🚚 Improve naming
This commit is contained in:
Iván Ovejero
2022-08-22 17:46:22 +02:00
committed by GitHub
parent 6bd7a09a45
commit b1e715299d
24 changed files with 399 additions and 143 deletions

View File

@@ -1,7 +1,8 @@
/* eslint-disable no-await-in-loop */
import { readFileSync, rmSync } from 'fs';
import { UserSettings } from 'n8n-core';
import { QueryRunner } from 'typeorm/query-runner/QueryRunner';
import type { QueryRunner } from 'typeorm/query-runner/QueryRunner';
import config from '../../../config';
import { getLogger } from '../../Logger';
const PERSONALIZATION_SURVEY_FILENAME = 'personalizationSurvey.json';
@@ -35,28 +36,36 @@ export function loadSurveyFromDisk(): string | null {
}
let logFinishTimeout: NodeJS.Timeout;
const disableLogging = process.argv[1].split('/').includes('jest');
export function logMigrationStart(migrationName: string): void {
export function logMigrationStart(
migrationName: string,
disableLogging = process.env.NODE_ENV === 'test',
): void {
if (disableLogging) return;
const logger = getLogger();
if (!logFinishTimeout) {
logger.warn('Migrations in progress, please do NOT stop the process.');
getLogger().warn('Migrations in progress, please do NOT stop the process.');
}
logger.debug(`Starting migration ${migrationName}`);
getLogger().debug(`Starting migration ${migrationName}`);
clearTimeout(logFinishTimeout);
}
export function logMigrationEnd(migrationName: string): void {
export function logMigrationEnd(
migrationName: string,
disableLogging = process.env.NODE_ENV === 'test',
): void {
if (disableLogging) return;
const logger = getLogger();
logger.debug(`Finished migration ${migrationName}`);
getLogger().debug(`Finished migration ${migrationName}`);
logFinishTimeout = setTimeout(() => {
logger.warn('Migrations finished.');
getLogger().warn('Migrations finished.');
}, 100);
}
export function chunkQuery(query: string, limit: number, offset = 0): string {
export function batchQuery(query: string, limit: number, offset = 0): string {
return `
${query}
LIMIT ${limit}
@@ -64,7 +73,7 @@ export function chunkQuery(query: string, limit: number, offset = 0): string {
`;
}
export async function runChunked(
export async function runInBatches(
queryRunner: QueryRunner,
query: string,
// eslint-disable-next-line @typescript-eslint/no-explicit-any
@@ -72,14 +81,42 @@ export async function runChunked(
limit = 100,
): Promise<void> {
let offset = 0;
let chunkedQuery: string;
let chunkedQueryResults: unknown[];
let batchedQuery: string;
let batchedQueryResults: unknown[];
// eslint-disable-next-line no-param-reassign
if (query.trim().endsWith(';')) query = query.trim().slice(0, -1);
do {
chunkedQuery = chunkQuery(query, limit, offset);
chunkedQueryResults = (await queryRunner.query(chunkedQuery)) as unknown[];
batchedQuery = batchQuery(query, limit, offset);
batchedQueryResults = (await queryRunner.query(batchedQuery)) as unknown[];
// pass a copy to prevent errors from mutation
await operation([...chunkedQueryResults]);
await operation([...batchedQueryResults]);
offset += limit;
} while (chunkedQueryResults.length === limit);
} while (batchedQueryResults.length === limit);
}
export const getTablePrefix = () => {
const tablePrefix = config.getEnv('database.tablePrefix');
if (config.getEnv('database.type') === 'postgresdb') {
const schema = config.getEnv('database.postgresdb.schema');
return [schema, tablePrefix].join('.');
}
return tablePrefix;
};
export const escapeQuery = (
queryRunner: QueryRunner,
query: string,
params: { [property: string]: unknown },
): [string, unknown[]] =>
queryRunner.connection.driver.escapeQueryWithParameters(
query,
{
pinData: params.pinData,
id: params.id,
},
{},
);

View File

@@ -0,0 +1,22 @@
import type { IDataObject, INodeExecutionData } from 'n8n-workflow';
export namespace PinData {
export type Old = { [nodeName: string]: IDataObject[] };
export type New = { [nodeName: string]: INodeExecutionData[] };
export type FetchedWorkflow = { id: number; pinData: string | object };
}
export function isObjectLiteral(maybeObject: unknown): maybeObject is { [key: string]: string } {
return typeof maybeObject === 'object' && maybeObject !== null && !Array.isArray(maybeObject);
}
export function isJsonKeyObject(item: unknown): item is {
json: unknown;
[otherKeys: string]: unknown;
} {
if (!isObjectLiteral(item)) return false;
return Object.keys(item).includes('json');
}

View File

@@ -1,4 +1,5 @@
import { ValueTransformer } from 'typeorm';
import config from '../../../config';
export const idStringifier = {
from: (value: number): string | number => (typeof value === 'number' ? value.toString() : value),
@@ -20,11 +21,14 @@ export const objectRetriever: ValueTransformer = {
};
/**
* Transformer to store object as string and retrieve string as object.
* Transformer for sqlite JSON columns to mimic JSON-as-object behavior
* from Postgres and MySQL.
*/
export const serializer: ValueTransformer = {
to: (value: object | string): string =>
typeof value === 'object' ? JSON.stringify(value) : value,
const jsonColumn: ValueTransformer = {
to: (value: object): string | object =>
config.getEnv('database.type') === 'sqlite' ? JSON.stringify(value) : value,
from: (value: string | object): object =>
typeof value === 'string' ? (JSON.parse(value) as object) : value,
};
export const sqlite = { jsonColumn };