feat(Google BigQuery Node): Node improvements (#4877)
* ⚡ setup * ⚡ finished v2 setup * ⚡ fix return all, fix simplify with nested schema * ⚡ fix for external tables, updated scopes * ⚡ query operation * ⚡ linter fixes * ⚡ fixed not processed errors when inserting, move main loop to execute function to allow bulk request * ⚡ customizible batch size when inserting, improoved errors * ⚡ options for mapping input * ⚡ fix for inserting RECORD type * ⚡ updated simplify logic * ⚡ fix for return with selected fields * ⚡ option to return table schema * ⚡ linter fixes * ⚡ fix imports * ⚡ query resource and fixes, rlc for projects * ⚡ removed simplify, added raw output option * ⚡ rlc for tables and datasets, no urls option * ⚡ updated hints and description of query parameter, fix getMany VIEW, multioptions fo fields * ⚡ added case when rows are empty * ⚡ linter fixes * ⚡ UI update, one resource * ⚡ fix for output with field named json * ⚡ using jobs instead queries * ⚡ added error message * ⚡ search for RLCs, fixes * ⚡ json processing * ⚡ removed getAll operation * ⚡ executeQuery update * ⚡ unit test * ⚡ tests setup, fixes * ⚡ tests * Remove script for checking unused loadOptions --------- Co-authored-by: agobrech <ael.gobrecht@gmail.com>
This commit is contained in:
@@ -0,0 +1,64 @@
|
||||
import type { INodeProperties } from 'n8n-workflow';
|
||||
import { datasetRLC, projectRLC, tableRLC } from '../commonDescriptions/RLC.description';
|
||||
import * as insert from './insert.operation';
|
||||
import * as executeQuery from './executeQuery.operation';
|
||||
|
||||
export { executeQuery, insert };
|
||||
|
||||
export const description: INodeProperties[] = [
|
||||
{
|
||||
displayName: 'Operation',
|
||||
name: 'operation',
|
||||
type: 'options',
|
||||
noDataExpression: true,
|
||||
displayOptions: {
|
||||
show: {
|
||||
resource: ['database'],
|
||||
},
|
||||
},
|
||||
options: [
|
||||
{
|
||||
name: 'Execute Query',
|
||||
value: 'executeQuery',
|
||||
description: 'Execute a SQL query',
|
||||
action: 'Execute a SQL query',
|
||||
},
|
||||
{
|
||||
name: 'Insert',
|
||||
value: 'insert',
|
||||
description: 'Insert rows in a table',
|
||||
action: 'Insert rows in a table',
|
||||
},
|
||||
],
|
||||
default: 'executeQuery',
|
||||
},
|
||||
{
|
||||
...projectRLC,
|
||||
displayOptions: {
|
||||
show: {
|
||||
resource: ['database'],
|
||||
operation: ['executeQuery', 'insert'],
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
...datasetRLC,
|
||||
displayOptions: {
|
||||
show: {
|
||||
resource: ['database'],
|
||||
operation: ['insert'],
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
...tableRLC,
|
||||
displayOptions: {
|
||||
show: {
|
||||
resource: ['database'],
|
||||
operation: ['insert'],
|
||||
},
|
||||
},
|
||||
},
|
||||
...executeQuery.description,
|
||||
...insert.description,
|
||||
];
|
||||
@@ -0,0 +1,299 @@
|
||||
import type { IExecuteFunctions } from 'n8n-core';
|
||||
|
||||
import type { IDataObject, INodeExecutionData, INodeProperties } from 'n8n-workflow';
|
||||
|
||||
import { NodeOperationError, sleep } from 'n8n-workflow';
|
||||
import { updateDisplayOptions } from '../../../../../../utils/utilities';
|
||||
import type { JobInsertResponse } from '../../helpers/interfaces';
|
||||
|
||||
import { prepareOutput } from '../../helpers/utils';
|
||||
import { googleApiRequest } from '../../transport';
|
||||
|
||||
const properties: INodeProperties[] = [
|
||||
{
|
||||
displayName: 'SQL Query',
|
||||
name: 'sqlQuery',
|
||||
type: 'string',
|
||||
displayOptions: {
|
||||
hide: {
|
||||
'/options.useLegacySql': [true],
|
||||
},
|
||||
},
|
||||
default: '',
|
||||
placeholder: 'SELECT * FROM dataset.table LIMIT 100',
|
||||
description:
|
||||
'SQL query to execute, you can find more information <a href="https://cloud.google.com/bigquery/docs/reference/standard-sql/query-syntax" target="_blank">here</a>. Standard SQL syntax used by default, but you can also use Legacy SQL syntax by using optinon \'Use Legacy SQL\'.',
|
||||
},
|
||||
{
|
||||
displayName: 'SQL Query',
|
||||
name: 'sqlQuery',
|
||||
type: 'string',
|
||||
displayOptions: {
|
||||
show: {
|
||||
'/options.useLegacySql': [true],
|
||||
},
|
||||
},
|
||||
default: '',
|
||||
placeholder: 'SELECT * FROM [project:dataset.table] LIMIT 100;',
|
||||
hint: 'Legacy SQL syntax',
|
||||
description:
|
||||
'SQL query to execute, you can find more information about Legacy SQL syntax <a href="https://cloud.google.com/bigquery/docs/reference/standard-sql/query-syntax" target="_blank">here</a>',
|
||||
},
|
||||
{
|
||||
displayName: 'Options',
|
||||
name: 'options',
|
||||
type: 'collection',
|
||||
placeholder: 'Add Options',
|
||||
default: {},
|
||||
options: [
|
||||
{
|
||||
displayName: 'Default Dataset Name or ID',
|
||||
name: 'defaultDataset',
|
||||
type: 'options',
|
||||
typeOptions: {
|
||||
loadOptionsMethod: 'getDatasets',
|
||||
loadOptionsDependsOn: ['projectId.value'],
|
||||
},
|
||||
default: '',
|
||||
description:
|
||||
'If not set, all table names in the query string must be qualified in the format \'datasetId.tableId\'. Choose from the list, or specify an ID using an <a href="https://docs.n8n.io/code-examples/expressions/">expression</a>.',
|
||||
},
|
||||
{
|
||||
displayName: 'Dry Run',
|
||||
name: 'dryRun',
|
||||
type: 'boolean',
|
||||
default: false,
|
||||
description:
|
||||
"Whether set to true BigQuery doesn't run the job. Instead, if the query is valid, BigQuery returns statistics about the job such as how many bytes would be processed. If the query is invalid, an error returns.",
|
||||
},
|
||||
{
|
||||
displayName: 'Include Schema in Output',
|
||||
name: 'includeSchema',
|
||||
type: 'boolean',
|
||||
default: false,
|
||||
description:
|
||||
"Whether to include the schema in the output. If set to true, the output will contain key '_schema' with the schema of the table.",
|
||||
displayOptions: {
|
||||
hide: {
|
||||
rawOutput: [true],
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
displayName: 'Location',
|
||||
name: 'location',
|
||||
type: 'string',
|
||||
default: '',
|
||||
placeholder: 'e.g. europe-west3',
|
||||
description:
|
||||
'Location or the region where data would be stored and processed. Pricing for storage and analysis is also defined by location of data and reservations, more information <a href="https://cloud.google.com/bigquery/docs/locations" target="_blank">here</a>.',
|
||||
},
|
||||
{
|
||||
displayName: 'Maximum Bytes Billed',
|
||||
name: 'maximumBytesBilled',
|
||||
type: 'string',
|
||||
default: '',
|
||||
description:
|
||||
'Limits the bytes billed for this query. Queries with bytes billed above this limit will fail (without incurring a charge). String in <a href="https://developers.google.com/discovery/v1/type-format?utm_source=cloud.google.com&utm_medium=referral" target="_blank">Int64Value</a> format',
|
||||
},
|
||||
{
|
||||
displayName: 'Max Results',
|
||||
name: 'maxResults',
|
||||
type: 'number',
|
||||
default: 1000,
|
||||
description: 'The maximum number of rows of data to return',
|
||||
},
|
||||
{
|
||||
displayName: 'Timeout',
|
||||
name: 'timeoutMs',
|
||||
type: 'number',
|
||||
default: 10000,
|
||||
description: 'How long to wait for the query to complete, in milliseconds',
|
||||
},
|
||||
{
|
||||
displayName: 'Raw Output',
|
||||
name: 'rawOutput',
|
||||
type: 'boolean',
|
||||
default: false,
|
||||
displayOptions: {
|
||||
hide: {
|
||||
dryRun: [true],
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
displayName: 'Use Legacy SQL',
|
||||
name: 'useLegacySql',
|
||||
type: 'boolean',
|
||||
default: false,
|
||||
description:
|
||||
"Whether to use BigQuery's legacy SQL dialect for this query. If set to false, the query will use BigQuery's standard SQL.",
|
||||
},
|
||||
],
|
||||
},
|
||||
];
|
||||
|
||||
const displayOptions = {
|
||||
show: {
|
||||
resource: ['database'],
|
||||
operation: ['executeQuery'],
|
||||
},
|
||||
};
|
||||
|
||||
export const description = updateDisplayOptions(displayOptions, properties);
|
||||
|
||||
export async function execute(this: IExecuteFunctions): Promise<INodeExecutionData[]> {
|
||||
// https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query
|
||||
|
||||
const items = this.getInputData();
|
||||
const length = items.length;
|
||||
|
||||
const returnData: INodeExecutionData[] = [];
|
||||
|
||||
let jobs = [];
|
||||
|
||||
for (let i = 0; i < length; i++) {
|
||||
try {
|
||||
const sqlQuery = this.getNodeParameter('sqlQuery', i) as string;
|
||||
const options = this.getNodeParameter('options', i);
|
||||
const projectId = this.getNodeParameter('projectId', i, undefined, {
|
||||
extractValue: true,
|
||||
});
|
||||
|
||||
let rawOutput = false;
|
||||
let includeSchema = false;
|
||||
|
||||
if (options.rawOutput !== undefined) {
|
||||
rawOutput = options.rawOutput as boolean;
|
||||
delete options.rawOutput;
|
||||
}
|
||||
|
||||
if (options.includeSchema !== undefined) {
|
||||
includeSchema = options.includeSchema as boolean;
|
||||
delete options.includeSchema;
|
||||
}
|
||||
|
||||
const body: IDataObject = { ...options };
|
||||
|
||||
body.query = sqlQuery;
|
||||
|
||||
if (body.defaultDataset) {
|
||||
body.defaultDataset = {
|
||||
datasetId: options.defaultDataset,
|
||||
projectId,
|
||||
};
|
||||
}
|
||||
|
||||
if (body.useLegacySql === undefined) {
|
||||
body.useLegacySql = false;
|
||||
}
|
||||
|
||||
const response: JobInsertResponse = await googleApiRequest.call(
|
||||
this,
|
||||
'POST',
|
||||
`/v2/projects/${projectId}/jobs`,
|
||||
{
|
||||
configuration: {
|
||||
query: body,
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
if (!response?.jobReference?.jobId) {
|
||||
throw new NodeOperationError(this.getNode(), `No job ID returned, item ${i}`, {
|
||||
description: `sql: ${sqlQuery}`,
|
||||
itemIndex: i,
|
||||
});
|
||||
}
|
||||
|
||||
const jobId = response?.jobReference?.jobId;
|
||||
const raw = rawOutput || (options.dryRun as boolean) || false;
|
||||
|
||||
if (response.status?.state === 'DONE') {
|
||||
const qs = options.location ? { location: options.location } : {};
|
||||
|
||||
const queryResponse: IDataObject = await googleApiRequest.call(
|
||||
this,
|
||||
'GET',
|
||||
`/v2/projects/${projectId}/queries/${jobId}`,
|
||||
undefined,
|
||||
qs,
|
||||
);
|
||||
|
||||
returnData.push(...prepareOutput(queryResponse, i, raw, includeSchema));
|
||||
} else {
|
||||
jobs.push({ jobId, projectId, i, raw, includeSchema, location: options.location });
|
||||
}
|
||||
} catch (error) {
|
||||
if (this.continueOnFail()) {
|
||||
const executionErrorData = this.helpers.constructExecutionMetaData(
|
||||
this.helpers.returnJsonArray({ error: error.message }),
|
||||
{ itemData: { item: i } },
|
||||
);
|
||||
returnData.push(...executionErrorData);
|
||||
continue;
|
||||
}
|
||||
throw new NodeOperationError(this.getNode(), error.message as string, {
|
||||
itemIndex: i,
|
||||
description: error?.description,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
let waitTime = 1000;
|
||||
while (jobs.length > 0) {
|
||||
const completedJobs: string[] = [];
|
||||
|
||||
for (const job of jobs) {
|
||||
try {
|
||||
const qs = job.location ? { location: job.location } : {};
|
||||
|
||||
const response: IDataObject = await googleApiRequest.call(
|
||||
this,
|
||||
'GET',
|
||||
`/v2/projects/${job.projectId}/queries/${job.jobId}`,
|
||||
undefined,
|
||||
qs,
|
||||
);
|
||||
|
||||
if (response.jobComplete) {
|
||||
completedJobs.push(job.jobId);
|
||||
|
||||
returnData.push(...prepareOutput(response, job.i, job.raw, job.includeSchema));
|
||||
}
|
||||
if ((response?.errors as IDataObject[])?.length) {
|
||||
const errorMessages = (response.errors as IDataObject[]).map((error) => error.message);
|
||||
throw new Error(
|
||||
`Error(s) ocurring while executing query from item ${job.i.toString()}: ${errorMessages.join(
|
||||
', ',
|
||||
)}`,
|
||||
);
|
||||
}
|
||||
} catch (error) {
|
||||
if (this.continueOnFail()) {
|
||||
const executionErrorData = this.helpers.constructExecutionMetaData(
|
||||
this.helpers.returnJsonArray({ error: error.message }),
|
||||
{ itemData: { item: job.i } },
|
||||
);
|
||||
returnData.push(...executionErrorData);
|
||||
continue;
|
||||
}
|
||||
throw new NodeOperationError(this.getNode(), error.message as string, {
|
||||
itemIndex: job.i,
|
||||
description: error?.description,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
jobs = jobs.filter((job) => !completedJobs.includes(job.jobId));
|
||||
|
||||
if (jobs.length > 0) {
|
||||
await sleep(waitTime);
|
||||
if (waitTime < 30000) {
|
||||
waitTime = waitTime * 2;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return returnData;
|
||||
}
|
||||
@@ -0,0 +1,285 @@
|
||||
import type { IExecuteFunctions } from 'n8n-core';
|
||||
import type { IDataObject, INodeExecutionData, INodeProperties } from 'n8n-workflow';
|
||||
import { NodeOperationError } from 'n8n-workflow';
|
||||
import { v4 as uuid } from 'uuid';
|
||||
import { updateDisplayOptions } from '../../../../../../utils/utilities';
|
||||
import type { TableSchema } from '../../helpers/interfaces';
|
||||
import { checkSchema, wrapData } from '../../helpers/utils';
|
||||
import { googleApiRequest } from '../../transport';
|
||||
|
||||
const properties: INodeProperties[] = [
|
||||
{
|
||||
displayName: 'Data Mode',
|
||||
name: 'dataMode',
|
||||
type: 'options',
|
||||
options: [
|
||||
{
|
||||
name: 'Auto-Map Input Data',
|
||||
value: 'autoMap',
|
||||
description: 'Use when node input properties match destination field names',
|
||||
},
|
||||
{
|
||||
name: 'Map Each Field Below',
|
||||
value: 'define',
|
||||
description: 'Set the value for each destination field',
|
||||
},
|
||||
],
|
||||
default: 'autoMap',
|
||||
description: 'Whether to insert the input data this node receives in the new row',
|
||||
},
|
||||
{
|
||||
displayName:
|
||||
"In this mode, make sure the incoming data fields are named the same as the columns in BigQuery. (Use a 'set' node before this node to change them if required.)",
|
||||
name: 'info',
|
||||
type: 'notice',
|
||||
default: '',
|
||||
displayOptions: {
|
||||
show: {
|
||||
dataMode: ['autoMap'],
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
displayName: 'Fields to Send',
|
||||
name: 'fieldsUi',
|
||||
placeholder: 'Add Field',
|
||||
type: 'fixedCollection',
|
||||
typeOptions: {
|
||||
multipleValueButtonText: 'Add Field',
|
||||
multipleValues: true,
|
||||
},
|
||||
default: {},
|
||||
options: [
|
||||
{
|
||||
displayName: 'Field',
|
||||
name: 'values',
|
||||
values: [
|
||||
{
|
||||
displayName: 'Field Name or ID',
|
||||
name: 'fieldId',
|
||||
type: 'options',
|
||||
description:
|
||||
'Choose from the list, or specify an ID using an <a href="https://docs.n8n.io/code-examples/expressions/">expression</a>',
|
||||
typeOptions: {
|
||||
loadOptionsDependsOn: ['projectId.value', 'datasetId.value', 'tableId.value'],
|
||||
loadOptionsMethod: 'getSchema',
|
||||
},
|
||||
default: '',
|
||||
},
|
||||
{
|
||||
displayName: 'Field Value',
|
||||
name: 'fieldValue',
|
||||
type: 'string',
|
||||
default: '',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
displayOptions: {
|
||||
show: {
|
||||
dataMode: ['define'],
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
displayName: 'Options',
|
||||
name: 'options',
|
||||
type: 'collection',
|
||||
placeholder: 'Add Options',
|
||||
default: {},
|
||||
options: [
|
||||
{
|
||||
displayName: 'Batch Size',
|
||||
name: 'batchSize',
|
||||
type: 'number',
|
||||
default: 100,
|
||||
typeOptions: {
|
||||
minValue: 1,
|
||||
},
|
||||
},
|
||||
{
|
||||
displayName: 'Ignore Unknown Values',
|
||||
name: 'ignoreUnknownValues',
|
||||
type: 'boolean',
|
||||
default: false,
|
||||
description: 'Whether to gnore row values that do not match the schema',
|
||||
},
|
||||
{
|
||||
displayName: 'Skip Invalid Rows',
|
||||
name: 'skipInvalidRows',
|
||||
type: 'boolean',
|
||||
default: false,
|
||||
description: 'Whether to skip rows with values that do not match the schema',
|
||||
},
|
||||
{
|
||||
displayName: 'Template Suffix',
|
||||
name: 'templateSuffix',
|
||||
type: 'string',
|
||||
default: '',
|
||||
description:
|
||||
'Create a new table based on the destination table and insert rows into the new table. The new table will be named <code>{destinationTable}{templateSuffix}</code>',
|
||||
},
|
||||
{
|
||||
displayName: 'Trace ID',
|
||||
name: 'traceId',
|
||||
type: 'string',
|
||||
default: '',
|
||||
description:
|
||||
'Unique ID for the request, for debugging only. It is case-sensitive, limited to up to 36 ASCII characters. A UUID is recommended.',
|
||||
},
|
||||
],
|
||||
},
|
||||
];
|
||||
|
||||
const displayOptions = {
|
||||
show: {
|
||||
resource: ['database'],
|
||||
operation: ['insert'],
|
||||
},
|
||||
};
|
||||
|
||||
export const description = updateDisplayOptions(displayOptions, properties);
|
||||
|
||||
export async function execute(this: IExecuteFunctions): Promise<INodeExecutionData[]> {
|
||||
// https://cloud.google.com/bigquery/docs/reference/rest/v2/tabledata/insertAll
|
||||
const projectId = this.getNodeParameter('projectId', 0, undefined, {
|
||||
extractValue: true,
|
||||
});
|
||||
const datasetId = this.getNodeParameter('datasetId', 0, undefined, {
|
||||
extractValue: true,
|
||||
});
|
||||
const tableId = this.getNodeParameter('tableId', 0, undefined, {
|
||||
extractValue: true,
|
||||
});
|
||||
|
||||
const options = this.getNodeParameter('options', 0);
|
||||
const dataMode = this.getNodeParameter('dataMode', 0) as string;
|
||||
|
||||
let batchSize = 100;
|
||||
if (options.batchSize) {
|
||||
batchSize = options.batchSize as number;
|
||||
delete options.batchSize;
|
||||
}
|
||||
|
||||
const items = this.getInputData();
|
||||
const length = items.length;
|
||||
|
||||
const returnData: INodeExecutionData[] = [];
|
||||
const rows: IDataObject[] = [];
|
||||
const body: IDataObject = {};
|
||||
|
||||
Object.assign(body, options);
|
||||
if (body.traceId === undefined) {
|
||||
body.traceId = uuid();
|
||||
}
|
||||
|
||||
const schema = (
|
||||
await googleApiRequest.call(
|
||||
this,
|
||||
'GET',
|
||||
`/v2/projects/${projectId}/datasets/${datasetId}/tables/${tableId}`,
|
||||
{},
|
||||
)
|
||||
).schema as TableSchema;
|
||||
|
||||
if (schema === undefined) {
|
||||
throw new NodeOperationError(this.getNode(), 'The destination table has no defined schema');
|
||||
}
|
||||
|
||||
for (let i = 0; i < length; i++) {
|
||||
try {
|
||||
const record: IDataObject = {};
|
||||
|
||||
if (dataMode === 'autoMap') {
|
||||
schema.fields.forEach(({ name }) => {
|
||||
record[name] = items[i].json[name];
|
||||
});
|
||||
}
|
||||
|
||||
if (dataMode === 'define') {
|
||||
const fields = this.getNodeParameter('fieldsUi.values', i, []) as IDataObject[];
|
||||
|
||||
fields.forEach(({ fieldId, fieldValue }) => {
|
||||
record[`${fieldId}`] = fieldValue;
|
||||
});
|
||||
}
|
||||
|
||||
rows.push({ json: checkSchema.call(this, schema, record, i) });
|
||||
} catch (error) {
|
||||
if (this.continueOnFail()) {
|
||||
const executionErrorData = this.helpers.constructExecutionMetaData(
|
||||
this.helpers.returnJsonArray({ error: error.message }),
|
||||
{ itemData: { item: i } },
|
||||
);
|
||||
returnData.push(...executionErrorData);
|
||||
continue;
|
||||
}
|
||||
throw new NodeOperationError(this.getNode(), error.message as string, {
|
||||
itemIndex: i,
|
||||
description: error?.description,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
for (let i = 0; i < rows.length; i += batchSize) {
|
||||
const batch = rows.slice(i, i + batchSize);
|
||||
body.rows = batch;
|
||||
|
||||
const responseData = await googleApiRequest.call(
|
||||
this,
|
||||
'POST',
|
||||
`/v2/projects/${projectId}/datasets/${datasetId}/tables/${tableId}/insertAll`,
|
||||
body,
|
||||
);
|
||||
|
||||
if (responseData?.insertErrors && !options.skipInvalidRows) {
|
||||
const errors: string[] = [];
|
||||
const failedRows: number[] = [];
|
||||
const stopedRows: number[] = [];
|
||||
|
||||
(responseData.insertErrors as IDataObject[]).forEach((entry) => {
|
||||
const invalidRows = (entry.errors as IDataObject[]).filter(
|
||||
(error) => error.reason !== 'stopped',
|
||||
);
|
||||
if (invalidRows.length) {
|
||||
const entryIndex = (entry.index as number) + i;
|
||||
errors.push(
|
||||
`Row ${entryIndex} failed with error: ${invalidRows
|
||||
.map((error) => error.message)
|
||||
.join(', ')}`,
|
||||
);
|
||||
failedRows.push(entryIndex);
|
||||
} else {
|
||||
const entryIndex = (entry.index as number) + i;
|
||||
stopedRows.push(entryIndex);
|
||||
}
|
||||
});
|
||||
|
||||
if (this.continueOnFail()) {
|
||||
const executionErrorData = this.helpers.constructExecutionMetaData(
|
||||
this.helpers.returnJsonArray({ error: errors.join('\n, ') }),
|
||||
{ itemData: { item: i } },
|
||||
);
|
||||
returnData.push(...executionErrorData);
|
||||
continue;
|
||||
}
|
||||
|
||||
const failedMessage = `Problem inserting item(s) [${failedRows.join(', ')}]`;
|
||||
const stoppedMessage = stopedRows.length
|
||||
? `, nothing was inserted item(s) [${stopedRows.join(', ')}]`
|
||||
: '';
|
||||
throw new NodeOperationError(this.getNode(), `${failedMessage}${stoppedMessage}`, {
|
||||
description: errors.join('\n, '),
|
||||
});
|
||||
}
|
||||
|
||||
const executionData = this.helpers.constructExecutionMetaData(
|
||||
wrapData(responseData as IDataObject[]),
|
||||
{ itemData: { item: 0 } },
|
||||
);
|
||||
|
||||
returnData.push(...executionData);
|
||||
}
|
||||
|
||||
return returnData;
|
||||
}
|
||||
Reference in New Issue
Block a user