feat: Environments release using source control (#6653)

* initial telemetry setup and adjusted pull return

* quicksave before merge

* feat: add conflicting workflow list to pull modal

* feat: update source control pull modal

* fix: fix linting issue

* feat: add Enter keydown event for submitting source control push modal (no-changelog)

feat: add Enter keydown event for submitting source control push modal

* quicksave

* user workflow table for export

* improve telemetry data

* pull api telemetry

* fix lint

* Copy tweaks.

* remove authorName and authorEmail and pick from user

* rename owners.json to workflow_owners.json

* ignore credential conflicts on pull

* feat: several push/pull flow changes and design update

* pull and push return same data format

* fix: add One last step toast for successful pull

* feat: add up to date pull toast

* fix: add proper Learn more link for push and pull modals

* do not await tracking being sent

* fix import

* fix await

* add more sourcecontrolfile status

* Minor copy tweak for "More info".

* Minor copy tweak for "More info".

* ignore variable_stub conflicts on pull

* ignore whitespace differences

* do not show remote workflows that are not yet created

* fix telemetry

* fix toast when pulling deleted wf

* lint fix

* refactor and make some imports dynamic

* fix variable edit validation

* fix telemetry response

* improve telemetry

* fix unintenional delete commit

* fix status unknown issue

* fix up to date toast

* do not export active state and reapply versionid

* use update instead of upsert

* fix: show all workflows when clicking push to git

* feat: update Up to date pull translation

* fix: update read only env checks

* do not update versionid of only active flag changes

* feat: prevent access to new workflow and templates import when read only env

* feat: send only active state and version if workflow state is not dirty

* fix: Detect when only active state has changed and prevent generation a new version ID

* feat: improve readonly env messages

* make getPreferences public

* fix telemetry issue

* fix: add partial workflow update based on dirty state when changing active state

* update unit tests

* fix: remove unsaved changes check in readOnlyEnv

* fix: disable push to git button when read onyl env

* fix: update readonly toast duration

* fix: fix pinning and title input in protected mode

* initial commit (NOT working)

* working push

* cleanup and implement pull

* fix getstatus

* update import to new method

* var and tag diffs are no conflicts

* only show pull conflict for workflows

* refactor and ignore faulty credentials

* add sanitycheck for missing git folder

* prefer fetch over pull and limit depth to 1

* back to pull...

* fix setting branch on initial connect

* fix test

* remove clean workfolder

* refactor: Remove some unnecessary code

* Fixed links to docs.

* fix getstatus query params

* lint fix

* dialog to show local and remote name on conflict

* only show remote name on conflict

* fix credential expression export

* fix: Broken test

* dont show toast on pull with empty var/tags and refactor

* apply frontend changes from old branch

* fix tag with same name import

* fix buttons shown for non instance owners

* prepare local storage key for removal

* refactor: Change wording on pushing and pulling

* refactor: Change menu item

* test: Fix broken test

* Update packages/cli/src/environments/sourceControl/types/sourceControlPushWorkFolder.ts

Co-authored-by: Iván Ovejero <ivov.src@gmail.com>

---------

Co-authored-by: Alex Grozav <alex@grozav.com>
Co-authored-by: Giulio Andreini <g.andreini@gmail.com>
Co-authored-by: Omar Ajoue <krynble@gmail.com>
Co-authored-by: Iván Ovejero <ivov.src@gmail.com>
This commit is contained in:
Michael Auerswald
2023-07-26 09:25:01 +02:00
committed by GitHub
parent bcfc5e717b
commit fc7aa8bd66
51 changed files with 2210 additions and 1064 deletions

View File

@@ -3,7 +3,6 @@ import path from 'path';
import {
SOURCE_CONTROL_CREDENTIAL_EXPORT_FOLDER,
SOURCE_CONTROL_GIT_FOLDER,
SOURCE_CONTROL_OWNERS_EXPORT_FILE,
SOURCE_CONTROL_TAGS_EXPORT_FILE,
SOURCE_CONTROL_VARIABLES_EXPORT_FILE,
SOURCE_CONTROL_WORKFLOW_EXPORT_FOLDER,
@@ -16,15 +15,16 @@ import { Credentials, UserSettings } from 'n8n-core';
import type { IWorkflowToImport } from '@/Interfaces';
import type { ExportableCredential } from './types/exportableCredential';
import { Variables } from '@db/entities/Variables';
import type { ImportResult } from './types/importResult';
import { UM_FIX_INSTRUCTION } from '@/commands/BaseCommand';
import { SharedCredentials } from '@db/entities/SharedCredentials';
import type { WorkflowTagMapping } from '@db/entities/WorkflowTagMapping';
import type { TagEntity } from '@db/entities/TagEntity';
import { ActiveWorkflowRunner } from '../../ActiveWorkflowRunner';
import type { SourceControllPullOptions } from './types/sourceControlPullWorkFolder';
import { ActiveWorkflowRunner } from '@/ActiveWorkflowRunner';
import { In } from 'typeorm';
import { isUniqueConstraintError } from '../../ResponseHelper';
import { isUniqueConstraintError } from '@/ResponseHelper';
import type { SourceControlWorkflowVersionId } from './types/sourceControlWorkflowVersionId';
import { getCredentialExportPath, getWorkflowExportPath } from './sourceControlHelper.ee';
import type { SourceControlledFile } from './types/sourceControlledFile';
@Service()
export class SourceControlImportService {
@@ -143,65 +143,113 @@ export class SourceControlImportService {
return importCredentialsResult.filter((e) => e !== undefined);
}
private async importVariablesFromFile(valueOverrides?: {
[key: string]: string;
}): Promise<{ imported: string[] }> {
public async getRemoteVersionIdsFromFiles(): Promise<SourceControlWorkflowVersionId[]> {
const remoteWorkflowFiles = await glob('*.json', {
cwd: this.workflowExportFolder,
absolute: true,
});
const remoteWorkflowFilesParsed = await Promise.all(
remoteWorkflowFiles.map(async (file) => {
LoggerProxy.debug(`Parsing workflow file ${file}`);
const remote = jsonParse<IWorkflowToImport>(await fsReadFile(file, { encoding: 'utf8' }));
if (!remote?.id) {
return undefined;
}
return {
id: remote.id,
versionId: remote.versionId,
name: remote.name,
remoteId: remote.id,
filename: getWorkflowExportPath(remote.id, this.workflowExportFolder),
} as SourceControlWorkflowVersionId;
}),
);
return remoteWorkflowFilesParsed.filter(
(e) => e !== undefined,
) as SourceControlWorkflowVersionId[];
}
public async getLocalVersionIdsFromDb(): Promise<SourceControlWorkflowVersionId[]> {
const localWorkflows = await Db.collections.Workflow.find({
select: ['id', 'name', 'versionId', 'updatedAt'],
});
return localWorkflows.map((local) => ({
id: local.id,
versionId: local.versionId,
name: local.name,
localId: local.id,
filename: getWorkflowExportPath(local.id, this.workflowExportFolder),
updatedAt: local.updatedAt.toISOString(),
})) as SourceControlWorkflowVersionId[];
}
public async getRemoteCredentialsFromFiles(): Promise<
Array<ExportableCredential & { filename: string }>
> {
const remoteCredentialFiles = await glob('*.json', {
cwd: this.credentialExportFolder,
absolute: true,
});
const remoteCredentialFilesParsed = await Promise.all(
remoteCredentialFiles.map(async (file) => {
LoggerProxy.debug(`Parsing credential file ${file}`);
const remote = jsonParse<ExportableCredential>(
await fsReadFile(file, { encoding: 'utf8' }),
);
if (!remote?.id) {
return undefined;
}
return {
...remote,
filename: getCredentialExportPath(remote.id, this.credentialExportFolder),
};
}),
);
return remoteCredentialFilesParsed.filter((e) => e !== undefined) as Array<
ExportableCredential & { filename: string }
>;
}
public async getLocalCredentialsFromDb(): Promise<
Array<ExportableCredential & { filename: string }>
> {
const localCredentials = await Db.collections.Credentials.find({
select: ['id', 'name', 'type', 'nodesAccess'],
});
return localCredentials.map((local) => ({
id: local.id,
name: local.name,
type: local.type,
nodesAccess: local.nodesAccess,
filename: getCredentialExportPath(local.id, this.credentialExportFolder),
})) as Array<ExportableCredential & { filename: string }>;
}
public async getRemoteVariablesFromFile(): Promise<Variables[]> {
const variablesFile = await glob(SOURCE_CONTROL_VARIABLES_EXPORT_FILE, {
cwd: this.gitFolder,
absolute: true,
});
const result: { imported: string[] } = { imported: [] };
if (variablesFile.length > 0) {
LoggerProxy.debug(`Importing variables from file ${variablesFile[0]}`);
const importedVariables = jsonParse<Array<Partial<Variables>>>(
await fsReadFile(variablesFile[0], { encoding: 'utf8' }),
{ fallbackValue: [] },
);
const overriddenKeys = Object.keys(valueOverrides ?? {});
for (const variable of importedVariables) {
if (!variable.key) {
continue;
}
// by default no value is stored remotely, so an empty string is retuned
// it must be changed to undefined so as to not overwrite existing values!
if (variable.value === '') {
variable.value = undefined;
}
if (overriddenKeys.includes(variable.key) && valueOverrides) {
variable.value = valueOverrides[variable.key];
overriddenKeys.splice(overriddenKeys.indexOf(variable.key), 1);
}
try {
await Db.collections.Variables.upsert({ ...variable }, ['id']);
} catch (errorUpsert) {
if (isUniqueConstraintError(errorUpsert as Error)) {
LoggerProxy.debug(`Variable ${variable.key} already exists, updating instead`);
try {
await Db.collections.Variables.update({ key: variable.key }, { ...variable });
} catch (errorUpdate) {
LoggerProxy.debug(`Failed to update variable ${variable.key}, skipping`);
LoggerProxy.debug((errorUpdate as Error).message);
}
}
} finally {
result.imported.push(variable.key);
}
}
// add remaining overrides as new variables
if (overriddenKeys.length > 0 && valueOverrides) {
for (const key of overriddenKeys) {
result.imported.push(key);
const newVariable = new Variables({ key, value: valueOverrides[key] });
await Db.collections.Variables.save(newVariable);
}
}
return jsonParse<Variables[]>(await fsReadFile(variablesFile[0], { encoding: 'utf8' }), {
fallbackValue: [],
});
}
return result;
return [];
}
private async importTagsFromFile() {
public async getLocalVariablesFromDb(): Promise<Variables[]> {
const localVariables = await Db.collections.Variables.find({
select: ['id', 'key', 'type', 'value'],
});
return localVariables;
}
public async getRemoteTagsAndMappingsFromFile(): Promise<{
tags: TagEntity[];
mappings: WorkflowTagMapping[];
}> {
const tagsFile = await glob(SOURCE_CONTROL_TAGS_EXPORT_FILE, {
cwd: this.gitFolder,
absolute: true,
@@ -212,110 +260,51 @@ export class SourceControlImportService {
await fsReadFile(tagsFile[0], { encoding: 'utf8' }),
{ fallbackValue: { tags: [], mappings: [] } },
);
const existingWorkflowIds = new Set(
(
await Db.collections.Workflow.find({
select: ['id'],
})
).map((e) => e.id),
);
await Promise.all(
mappedTags.tags.map(async (tag) => {
await Db.collections.Tag.upsert(
{
...tag,
},
{
skipUpdateIfNoValuesChanged: true,
conflictPaths: { id: true },
},
);
}),
);
await Promise.all(
mappedTags.mappings.map(async (mapping) => {
if (!existingWorkflowIds.has(String(mapping.workflowId))) return;
await Db.collections.WorkflowTagMapping.upsert(
{ tagId: String(mapping.tagId), workflowId: String(mapping.workflowId) },
{
skipUpdateIfNoValuesChanged: true,
conflictPaths: { tagId: true, workflowId: true },
},
);
}),
);
return mappedTags;
}
return { tags: [], mappings: [] };
}
private async importWorkflowsFromFiles(
userId: string,
): Promise<Array<{ id: string; name: string }>> {
const workflowFiles = await glob('*.json', {
cwd: this.workflowExportFolder,
absolute: true,
public async getLocalTagsAndMappingsFromDb(): Promise<{
tags: TagEntity[];
mappings: WorkflowTagMapping[];
}> {
const localTags = await Db.collections.Tag.find({
select: ['id', 'name'],
});
const existingWorkflows = await Db.collections.Workflow.find({
select: ['id', 'name', 'active', 'versionId'],
const localMappings = await Db.collections.WorkflowTagMapping.find({
select: ['workflowId', 'tagId'],
});
return { tags: localTags, mappings: localMappings };
}
public async importWorkflowFromWorkFolder(candidates: SourceControlledFile[], userId: string) {
const ownerWorkflowRole = await this.getOwnerWorkflowRole();
const workflowRunner = Container.get(ActiveWorkflowRunner);
// read owner file if it exists and map workflow ids to owner emails
// then find existing users with those emails or fallback to passed in userId
const ownerRecords: Record<string, string> = {};
const ownersFile = await glob(SOURCE_CONTROL_OWNERS_EXPORT_FILE, {
cwd: this.gitFolder,
absolute: true,
const candidateIds = candidates.map((c) => c.id);
const existingWorkflows = await Db.collections.Workflow.find({
where: {
id: In(candidateIds),
},
select: ['id', 'name', 'versionId', 'active'],
});
if (ownersFile.length > 0) {
LoggerProxy.debug(`Reading workflow owners from file ${ownersFile[0]}`);
const ownerEmails = jsonParse<Record<string, string>>(
await fsReadFile(ownersFile[0], { encoding: 'utf8' }),
{ fallbackValue: {} },
);
if (ownerEmails) {
const uniqueOwnerEmails = new Set(Object.values(ownerEmails));
const existingUsers = await Db.collections.User.find({
where: { email: In([...uniqueOwnerEmails]) },
});
Object.keys(ownerEmails).forEach((workflowId) => {
ownerRecords[workflowId] =
existingUsers.find((e) => e.email === ownerEmails[workflowId])?.id ?? userId;
});
}
}
let importWorkflowsResult = new Array<{ id: string; name: string } | undefined>();
const allSharedWorkflows = await Db.collections.SharedWorkflow.find({
where: {
workflowId: In(candidateIds),
},
select: ['workflowId', 'roleId', 'userId'],
});
importWorkflowsResult = await Promise.all(
workflowFiles.map(async (file) => {
LoggerProxy.debug(`Parsing workflow file ${file}`);
const importedWorkflow = jsonParse<IWorkflowToImport>(
await fsReadFile(file, { encoding: 'utf8' }),
const cachedOwnerIds = new Map<string, string>();
const importWorkflowsResult = await Promise.all(
candidates.map(async (candidate) => {
LoggerProxy.debug(`Parsing workflow file ${candidate.file}`);
const importedWorkflow = jsonParse<IWorkflowToImport & { owner: string }>(
await fsReadFile(candidate.file, { encoding: 'utf8' }),
);
if (!importedWorkflow?.id) {
return;
}
const existingWorkflow = existingWorkflows.find((e) => e.id === importedWorkflow.id);
if (existingWorkflow?.versionId === importedWorkflow.versionId) {
LoggerProxy.debug(
`Skipping import of workflow ${importedWorkflow.id ?? 'n/a'} - versionId is up to date`,
);
return {
id: importedWorkflow.id ?? 'n/a',
name: 'skipped',
};
}
LoggerProxy.debug(`Importing workflow ${importedWorkflow.id ?? 'n/a'}`);
importedWorkflow.active = existingWorkflow?.active ?? false;
LoggerProxy.debug(`Updating workflow id ${importedWorkflow.id ?? 'new'}`);
const upsertResult = await Db.collections.Workflow.upsert({ ...importedWorkflow }, ['id']);
@@ -324,12 +313,31 @@ export class SourceControlImportService {
}
// Update workflow owner to the user who exported the workflow, if that user exists
// in the instance, and the workflow doesn't already have an owner
const workflowOwnerId = ownerRecords[importedWorkflow.id] ?? userId;
let workflowOwnerId = userId;
if (cachedOwnerIds.has(importedWorkflow.owner)) {
workflowOwnerId = cachedOwnerIds.get(importedWorkflow.owner) ?? userId;
} else {
const foundUser = await Db.collections.User.findOne({
where: {
email: importedWorkflow.owner,
},
select: ['id'],
});
if (foundUser) {
cachedOwnerIds.set(importedWorkflow.owner, foundUser.id);
workflowOwnerId = foundUser.id;
}
}
const existingSharedWorkflowOwnerByRoleId = allSharedWorkflows.find(
(e) => e.workflowId === importedWorkflow.id && e.roleId === ownerWorkflowRole.id,
(e) =>
e.workflowId === importedWorkflow.id &&
e.roleId.toString() === ownerWorkflowRole.id.toString(),
);
const existingSharedWorkflowOwnerByUserId = allSharedWorkflows.find(
(e) => e.workflowId === importedWorkflow.id && e.userId === workflowOwnerId,
(e) =>
e.workflowId === importedWorkflow.id &&
e.roleId.toString() === workflowOwnerId.toString(),
);
if (!existingSharedWorkflowOwnerByUserId && !existingSharedWorkflowOwnerByRoleId) {
// no owner exists yet, so create one
@@ -361,39 +369,218 @@ export class SourceControlImportService {
// try activating the imported workflow
LoggerProxy.debug(`Reactivating workflow id ${existingWorkflow.id}`);
await workflowRunner.add(existingWorkflow.id, 'activate');
// update the versionId of the workflow to match the imported workflow
} catch (error) {
LoggerProxy.error(`Failed to activate workflow ${existingWorkflow.id}`, error as Error);
} finally {
await Db.collections.Workflow.update(
{ id: existingWorkflow.id },
{ versionId: importedWorkflow.versionId },
);
}
}
return {
id: importedWorkflow.id ?? 'unknown',
name: file,
name: candidate.file,
};
}),
);
return importWorkflowsResult.filter((e) => e !== undefined) as Array<{
id: string;
name: string;
}>;
}
async importFromWorkFolder(options: SourceControllPullOptions): Promise<ImportResult> {
try {
const importedVariables = await this.importVariablesFromFile(options.variables);
const importedCredentials = await this.importCredentialsFromFiles(options.userId);
const importWorkflows = await this.importWorkflowsFromFiles(options.userId);
const importTags = await this.importTagsFromFile();
public async importCredentialsFromWorkFolder(candidates: SourceControlledFile[], userId: string) {
const candidateIds = candidates.map((c) => c.id);
const existingCredentials = await Db.collections.Credentials.find({
where: {
id: In(candidateIds),
},
select: ['id', 'name', 'type', 'data'],
});
const ownerCredentialRole = await this.getOwnerCredentialRole();
const ownerGlobalRole = await this.getOwnerGlobalRole();
const existingSharedCredentials = await Db.collections.SharedCredentials.find({
select: ['userId', 'credentialsId', 'roleId'],
where: {
credentialsId: In(candidateIds),
roleId: In([ownerCredentialRole.id, ownerGlobalRole.id]),
},
});
const encryptionKey = await UserSettings.getEncryptionKey();
let importCredentialsResult: Array<{ id: string; name: string; type: string }> = [];
importCredentialsResult = await Promise.all(
candidates.map(async (candidate) => {
LoggerProxy.debug(`Importing credentials file ${candidate.file}`);
const credential = jsonParse<ExportableCredential>(
await fsReadFile(candidate.file, { encoding: 'utf8' }),
);
const existingCredential = existingCredentials.find(
(e) => e.id === credential.id && e.type === credential.type,
);
const sharedOwner = existingSharedCredentials.find(
(e) => e.credentialsId === credential.id,
);
return {
variables: importedVariables,
credentials: importedCredentials,
workflows: importWorkflows,
tags: importTags,
};
const { name, type, data, id, nodesAccess } = credential;
const newCredentialObject = new Credentials({ id, name }, type, []);
if (existingCredential?.data) {
newCredentialObject.data = existingCredential.data;
} else {
newCredentialObject.setData(data, encryptionKey);
}
newCredentialObject.nodesAccess = nodesAccess || existingCredential?.nodesAccess || [];
LoggerProxy.debug(`Updating credential id ${newCredentialObject.id as string}`);
await Db.collections.Credentials.upsert(newCredentialObject, ['id']);
if (!sharedOwner) {
const newSharedCredential = new SharedCredentials();
newSharedCredential.credentialsId = newCredentialObject.id as string;
newSharedCredential.userId = userId;
newSharedCredential.roleId = ownerCredentialRole.id;
await Db.collections.SharedCredentials.upsert({ ...newSharedCredential }, [
'credentialsId',
'userId',
]);
}
return {
id: newCredentialObject.id as string,
name: newCredentialObject.name,
type: newCredentialObject.type,
};
}),
);
return importCredentialsResult.filter((e) => e !== undefined);
}
public async importTagsFromWorkFolder(candidate: SourceControlledFile) {
let mappedTags;
try {
LoggerProxy.debug(`Importing tags from file ${candidate.file}`);
mappedTags = jsonParse<{ tags: TagEntity[]; mappings: WorkflowTagMapping[] }>(
await fsReadFile(candidate.file, { encoding: 'utf8' }),
{ fallbackValue: { tags: [], mappings: [] } },
);
} catch (error) {
throw Error(`Failed to import workflows from work folder: ${(error as Error).message}`);
LoggerProxy.error(`Failed to import tags from file ${candidate.file}`, error as Error);
return;
}
if (mappedTags.mappings.length === 0 && mappedTags.tags.length === 0) {
return;
}
const existingWorkflowIds = new Set(
(
await Db.collections.Workflow.find({
select: ['id'],
})
).map((e) => e.id),
);
await Promise.all(
mappedTags.tags.map(async (tag) => {
const findByName = await Db.collections.Tag.findOne({
where: { name: tag.name },
select: ['id'],
});
if (findByName && findByName.id !== tag.id) {
throw new Error(
`A tag with the name <strong>${tag.name}</strong> already exists locally.<br />Please either rename the local tag, or the remote one with the id <strong>${tag.id}</strong> in the tags.json file.`,
);
}
await Db.collections.Tag.upsert(
{
...tag,
},
{
skipUpdateIfNoValuesChanged: true,
conflictPaths: { id: true },
},
);
}),
);
await Promise.all(
mappedTags.mappings.map(async (mapping) => {
if (!existingWorkflowIds.has(String(mapping.workflowId))) return;
await Db.collections.WorkflowTagMapping.upsert(
{ tagId: String(mapping.tagId), workflowId: String(mapping.workflowId) },
{
skipUpdateIfNoValuesChanged: true,
conflictPaths: { tagId: true, workflowId: true },
},
);
}),
);
return mappedTags;
}
public async importVariablesFromWorkFolder(
candidate: SourceControlledFile,
valueOverrides?: {
[key: string]: string;
},
) {
const result: { imported: string[] } = { imported: [] };
let importedVariables;
try {
LoggerProxy.debug(`Importing variables from file ${candidate.file}`);
importedVariables = jsonParse<Array<Partial<Variables>>>(
await fsReadFile(candidate.file, { encoding: 'utf8' }),
{ fallbackValue: [] },
);
} catch (error) {
LoggerProxy.error(`Failed to import tags from file ${candidate.file}`, error as Error);
return;
}
const overriddenKeys = Object.keys(valueOverrides ?? {});
for (const variable of importedVariables) {
if (!variable.key) {
continue;
}
// by default no value is stored remotely, so an empty string is retuned
// it must be changed to undefined so as to not overwrite existing values!
if (variable.value === '') {
variable.value = undefined;
}
if (overriddenKeys.includes(variable.key) && valueOverrides) {
variable.value = valueOverrides[variable.key];
overriddenKeys.splice(overriddenKeys.indexOf(variable.key), 1);
}
try {
await Db.collections.Variables.upsert({ ...variable }, ['id']);
} catch (errorUpsert) {
if (isUniqueConstraintError(errorUpsert as Error)) {
LoggerProxy.debug(`Variable ${variable.key} already exists, updating instead`);
try {
await Db.collections.Variables.update({ key: variable.key }, { ...variable });
} catch (errorUpdate) {
LoggerProxy.debug(`Failed to update variable ${variable.key}, skipping`);
LoggerProxy.debug((errorUpdate as Error).message);
}
}
} finally {
result.imported.push(variable.key);
}
}
// add remaining overrides as new variables
if (overriddenKeys.length > 0 && valueOverrides) {
for (const key of overriddenKeys) {
result.imported.push(key);
const newVariable = new Variables({ key, value: valueOverrides[key] });
await Db.collections.Variables.save(newVariable);
}
}
return result;
}
}