Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix typos #12293

Open
wants to merge 3 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
50 changes: 25 additions & 25 deletions CHANGELOG.md

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion cypress/e2e/12-canvas.cy.ts
Original file line number Diff line number Diff line change
Expand Up @@ -480,7 +480,7 @@ describe('Canvas Node Manipulation and Navigation', () => {
});

// FIXME: Canvas V2: Unknown nodes should still render connection endpoints
it('should render connections correctly if unkown nodes are present', () => {
it('should render connections correctly if unknown nodes are present', () => {
const unknownNodeName = 'Unknown node';
cy.createFixtureWorkflow('workflow-with-unknown-nodes.json', 'Unknown nodes');

Expand Down
18 changes: 9 additions & 9 deletions cypress/e2e/32-node-io-filter.cy.ts
Original file line number Diff line number Diff line change
Expand Up @@ -52,31 +52,31 @@ describe('Node IO Filter', () => {
const getInputPagination = () =>
ndv.getters.inputPanel().findChildByTestId('ndv-data-pagination');
const getInputCounter = () => ndv.getters.inputPanel().findChildByTestId('ndv-items-count');
const getOuputPagination = () =>
const getOutputPagination = () =>
ndv.getters.outputPanel().findChildByTestId('ndv-data-pagination');
const getOutputCounter = () => ndv.getters.outputPanel().findChildByTestId('ndv-items-count');

getInputPagination().find('li').should('have.length', 3);
getInputCounter().contains('21 items').should('exist');
getOuputPagination().find('li').should('have.length', 3);
getOutputPagination().find('li').should('have.length', 3);
getOutputCounter().contains('21 items').should('exist');
focusedInput.type('ar');

getInputPagination().find('li').should('have.length', 2);
getInputCounter().should('contain', '14 of 21 items');
getOuputPagination().find('li').should('have.length', 3);
getOutputPagination().find('li').should('have.length', 3);
getOutputCounter().should('contain', '21 items');
focusedInput.type('i');

getInputPagination().should('not.exist');
getInputCounter().should('contain', '8 of 21 items');
getOuputPagination().find('li').should('have.length', 3);
getOutputPagination().find('li').should('have.length', 3);
getOutputCounter().should('contain', '21 items');

focusedInput.clear();
getInputPagination().find('li').should('have.length', 3);
getInputCounter().contains('21 items').should('exist');
getOuputPagination().find('li').should('have.length', 3);
getOutputPagination().find('li').should('have.length', 3);
getOutputCounter().contains('21 items').should('exist');

ndv.getters.outputDataContainer().trigger('mouseover');
Expand All @@ -87,25 +87,25 @@ describe('Node IO Filter', () => {

getInputPagination().find('li').should('have.length', 3);
getInputCounter().contains('21 items').should('exist');
getOuputPagination().find('li').should('have.length', 3);
getOutputPagination().find('li').should('have.length', 3);
getOutputCounter().contains('21 items').should('exist');
focusedInput.type('ar');

getInputPagination().find('li').should('have.length', 3);
getInputCounter().contains('21 items').should('exist');
getOuputPagination().find('li').should('have.length', 2);
getOutputPagination().find('li').should('have.length', 2);
getOutputCounter().should('contain', '14 of 21 items');
focusedInput.type('i');

getInputPagination().find('li').should('have.length', 3);
getInputCounter().contains('21 items').should('exist');
getOuputPagination().should('not.exist');
getOutputPagination().should('not.exist');
getOutputCounter().should('contain', '8 of 21 items');

focusedInput.clear();
getInputPagination().find('li').should('have.length', 3);
getInputCounter().contains('21 items').should('exist');
getOuputPagination().find('li').should('have.length', 3);
getOutputPagination().find('li').should('have.length', 3);
getOutputCounter().contains('21 items').should('exist');
});
});
2 changes: 1 addition & 1 deletion cypress/e2e/4-node-creator.cy.ts
Original file line number Diff line number Diff line change
Expand Up @@ -108,7 +108,7 @@ describe('Node Creator', () => {
nodeCreatorFeature.getters.searchBar().find('input').type('{rightarrow}');
nodeCreatorFeature.getters.activeSubcategory().should('have.text', 'FTP');
nodeCreatorFeature.getters.searchBar().find('input').clear().type('file');
// The 1st trigger is selected, up 1x to the collapsable header, up 2x to the last action (rename)
// The 1st trigger is selected, up 1x to the collapsible header, up 2x to the last action (rename)
nodeCreatorFeature.getters.searchBar().find('input').type('{uparrow}{uparrow}{rightarrow}');
NDVModal.getters.parameterInput('operation').find('input').should('have.value', 'Rename');
});
Expand Down
2 changes: 1 addition & 1 deletion cypress/e2e/43-oauth-flow.cy.ts
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ describe('Credentials', () => {
'https://accounts.google.com/o/oauth2/v2/auth?access_type=offline&prompt=consent&client_id=test-key&redirect_uri=http%3A%2F%2Flocalhost%3A5678%2Frest%2Foauth2-credential%2Fcallback&response_type=code',
),
'OAuth Authorization',
'scrollbars=no,resizable=yes,status=no,titlebar=noe,location=no,toolbar=no,menubar=no,width=500,height=700',
'scrollbars=no,resizable=yes,status=no,titlebar=no,location=no,toolbar=no,menubar=no,width=500,height=700',
);

// Emulate successful save using BroadcastChannel
Expand Down
8 changes: 4 additions & 4 deletions cypress/e2e/5-ndv.cy.ts
Original file line number Diff line number Diff line change
Expand Up @@ -542,7 +542,7 @@ describe('NDV', () => {
});

// Since language model has no credentials set, it should show an error
// Sinse code tool require alphanumeric tool name it would also show an error(2 errors, 1 for each tool node)
// Since code tool require alphanumeric tool name it would also show an error(2 errors, 1 for each tool node)
cy.get('[class*=hasIssues]').should('have.length', 3);
});
});
Expand Down Expand Up @@ -633,7 +633,7 @@ describe('NDV', () => {
ndv.actions.typeIntoParameterInput('jsCode', 'testets');
ndv.getters.backToCanvas().click();
workflowPage.actions.executeWorkflow();
// Manual tigger node should show success indicator
// Manual trigger node should show success indicator
workflowPage.actions.openNode('When clicking ‘Test workflow’');
ndv.getters.nodeRunSuccessIndicator().should('exist');
ndv.getters.nodeRunTooltipIndicator().should('exist');
Expand Down Expand Up @@ -818,7 +818,7 @@ describe('NDV', () => {
.should('have.value', 'US');
});

it('should not show items count when seaching in schema view', () => {
it('should not show items count when searching in schema view', () => {
cy.createFixtureWorkflow('Test_ndv_search.json');
workflowPage.actions.zoomToFit();
workflowPage.actions.openNode('Edit Fields');
Expand All @@ -829,7 +829,7 @@ describe('NDV', () => {
ndv.getters.outputPanel().find('[data-test-id=ndv-items-count]').should('not.exist');
});

it('should show additional tooltip when seaching in schema view if no matches', () => {
it('should show additional tooltip when searching in schema view if no matches', () => {
cy.createFixtureWorkflow('Test_ndv_search.json');
workflowPage.actions.zoomToFit();
workflowPage.actions.openNode('Edit Fields');
Expand Down
4 changes: 2 additions & 2 deletions cypress/fixtures/Test_Template_2.json
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
"recentViews": 9887,
"totalViews": 650,
"createdAt": "2021-11-29T13:59:16.771Z",
"description": "This workflow will take all emails you put into a certain folder, upload any attachements to Nextcloud, and mark the emails as read (configurable).\n\nAttachements will be saved with automatically generated filenames:\n`2021-01-01_From-Sender-Name_Filename-of-attachement.pdf`\n\nInstructions:\n1. **Allow lodash to be used in n8n** (or rewrite the code...)\n `NODE_FUNCTION_ALLOW_EXTERNAL=lodash` (environment variable)\n2. Import workflow\n3. Set credentials for Email & Nextcloud nodes\n4. Configure to use correct folder / custom filters\n5. Activate\n\nCustom filter examples:\n- Only unread emails:\n `Custom Email Config` = `[\"UNSEEN\"]`\n- Filter emails by 'to' address:\n `Custom Email Config` = `[[\"TO\", \"[email protected]\"]]`",
"description": "This workflow will take all emails you put into a certain folder, upload any attachments to Nextcloud, and mark the emails as read (configurable).\n\nAttachements will be saved with automatically generated filenames:\n`2021-01-01_From-Sender-Name_Filename-of-attachment.pdf`\n\nInstructions:\n1. **Allow lodash to be used in n8n** (or rewrite the code...)\n `NODE_FUNCTION_ALLOW_EXTERNAL=lodash` (environment variable)\n2. Import workflow\n3. Set credentials for Email & Nextcloud nodes\n4. Configure to use correct folder / custom filters\n5. Activate\n\nCustom filter examples:\n- Only unread emails:\n `Custom Email Config` = `[\"UNSEEN\"]`\n- Filter emails by 'to' address:\n `Custom Email Config` = `[[\"TO\", \"[email protected]\"]]`",
"workflow": {
"nodes": [
{
Expand Down Expand Up @@ -47,7 +47,7 @@
420
],
"parameters": {
"functionCode": "const _ = require('lodash')\n\nconst sanitize = str => _.chain(str)\n .replace(/[^A-Za-z0-9&.-]/g, '-') // sanitise via whitelist of characters\n .replace(/-(?=-)/g, '') // remove repeated dashes - https://regexr.com/6ag8h\n .trim('-') // trim any leading/trailing dashes\n .truncate({\n length: 60,\n omission: '-' // when the string ends with '-', you'll know it was truncated\n })\n .value()\n\nconst result = _.flatMap(items.map(item => {\n //console.log({item})\n\n // Maps each attachment to a separate item\n return _.values(item.binary).map(file => {\n console.log(\"Saving attachement:\", file.fileName, 'from:', ...item.json.from.value)\n \n // sanitize filename but exclude extension\n const filename_parts = file.fileName.split('.')\n const ext = _.slice(filename_parts, filename_parts.length-1)\n const filename_main = _.join(_.dropRight(filename_parts), '.')\n file.fileName = sanitize(filename_main) + '.' + ext\n \n return {\n json: {\n from: sanitize(item.json.from.value[0].name),\n date: sanitize(new Date(item.json.date).toISOString().split(\"T\")[0]) // get date part \"2020-01-01\"\n }, \n binary: { file }\n }\n })\n}))\n\n//console.log(result)\nreturn result"
"functionCode": "const _ = require('lodash')\n\nconst sanitize = str => _.chain(str)\n .replace(/[^A-Za-z0-9&.-]/g, '-') // sanitise via whitelist of characters\n .replace(/-(?=-)/g, '') // remove repeated dashes - https://regexr.com/6ag8h\n .trim('-') // trim any leading/trailing dashes\n .truncate({\n length: 60,\n omission: '-' // when the string ends with '-', you'll know it was truncated\n })\n .value()\n\nconst result = _.flatMap(items.map(item => {\n //console.log({item})\n\n // Maps each attachment to a separate item\n return _.values(item.binary).map(file => {\n console.log(\"Saving attachment:\", file.fileName, 'from:', ...item.json.from.value)\n \n // sanitize filename but exclude extension\n const filename_parts = file.fileName.split('.')\n const ext = _.slice(filename_parts, filename_parts.length-1)\n const filename_main = _.join(_.dropRight(filename_parts), '.')\n file.fileName = sanitize(filename_main) + '.' + ext\n \n return {\n json: {\n from: sanitize(item.json.from.value[0].name),\n date: sanitize(new Date(item.json.date).toISOString().split(\"T\")[0]) // get date part \"2020-01-01\"\n }, \n binary: { file }\n }\n })\n}))\n\n//console.log(result)\nreturn result"
},
"typeVersion": 1
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -121,7 +121,7 @@ export class OutputParserStructured implements INodeType {
async supplyData(this: ISupplyDataFunctions, itemIndex: number): Promise<SupplyData> {
const schemaType = this.getNodeParameter('schemaType', itemIndex, '') as 'fromJson' | 'manual';
// We initialize these even though one of them will always be empty
// it makes it easer to navigate the ternary operator
// it makes it easier to navigate the ternary operator
const jsonExample = this.getNodeParameter('jsonSchemaExample', itemIndex, '') as string;
let inputSchema: string;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@ export class ToolCode implements INodeType {
type: 'string',
default: '',
placeholder:
'Call this tool to get a random color. The input should be a string with comma separted names of colors to exclude.',
'Call this tool to get a random color. The input should be a string with comma separated names of colors to exclude.',
typeOptions: {
rows: 3,
},
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,7 @@ export class ToolWorkflow implements INodeType {
type: 'string',
default: '',
placeholder:
'Call this tool to get a random color. The input should be a string with comma separted names of colors to exclude.',
'Call this tool to get a random color. The input should be a string with comma separated names of colors to exclude.',
typeOptions: {
rows: 3,
},
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -341,7 +341,7 @@ describe('OpenAi, Audio resource', () => {
});

it('transcribe => should call apiRequest with correct parameters', async () => {
(transport.apiRequest as jest.Mock).mockResolvedValueOnce({ text: 'transcribtion' });
(transport.apiRequest as jest.Mock).mockResolvedValueOnce({ text: 'transcription' });

const returnData = await audio.transcribe.execute.call(
createExecuteFunctionsMock({
Expand All @@ -356,7 +356,7 @@ describe('OpenAi, Audio resource', () => {

expect(returnData.length).toEqual(1);
expect(returnData[0].pairedItem).toBeDefined();
expect(returnData[0].json).toEqual({ text: 'transcribtion' });
expect(returnData[0].json).toEqual({ text: 'transcription' });

expect(transport.apiRequest).toHaveBeenCalledWith(
'POST',
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -108,7 +108,7 @@ export function validateRunForEachItemOutput(
validateItem(returnData, itemIndex);

// If at least one top-level key is a supported item key (`json`, `binary`, etc.),
// and another top-level key is unrecognized, then the user mis-added a property
// and another top-level key is unrecognized, then the user a property incorrectly
// directly on the item, when they intended to add it on the `json` property
validateTopLevelKeys(returnData, itemIndex);

Expand Down
4 changes: 2 additions & 2 deletions packages/@n8n_io/eslint-config/local-rules.js
Original file line number Diff line number Diff line change
Expand Up @@ -118,7 +118,7 @@ module.exports = {
recommended: 'error',
},
messages: {
noUneededBackticks: 'Use single or double quotes, not backticks',
noUnneededBackticks: 'Use single or double quotes, not backticks',
},
fixable: 'code',
},
Expand All @@ -132,7 +132,7 @@ module.exports = {
const escaped = q.value.raw.replace(/(?<!\\)'/g, "\\'");

context.report({
messageId: 'noUneededBackticks',
messageId: 'noUnneededBackticks',
node,
fix: (fixer) => fixer.replaceText(q, `'${escaped}'`),
});
Expand Down
10 changes: 5 additions & 5 deletions packages/cli/BREAKING-CHANGES.md
Original file line number Diff line number Diff line change
Expand Up @@ -135,7 +135,7 @@ If you are using the flag `N8N_CACHE_ENABLED`, remove it from your settings.

In regards to credentials, if you use expression in credentials, you might want to revisit them. Previously, n8n would stick to the first item only, but now it will try to match the proper paired item.

If you are using the Monday.com node and refering to `column_values` property, check in table below if you are using any of the affected properties of its entries.
If you are using the Monday.com node and referring to `column_values` property, check in table below if you are using any of the affected properties of its entries.

| Resource | Operation | Previous | New |
| ---------- | ------------------- | --------------- | ------------------- |
Expand Down Expand Up @@ -483,7 +483,7 @@ In the Postgres, CrateDB, QuestDB and TimescaleDB nodes the `Execute Query` oper

### When is action necessary?

If you use any of the above mentioned nodes with the `Execute Query` operation and the result is relevant to you, you are encouraged to revisit your logic. The node output may now contain more information than before. This change was made so that the behavior is more consistent across n8n where input with multiple rows should yield results acccording all input data instead of only one. Please note: n8n was already running multiple queries based on input. Only the output was changed.
If you use any of the above mentioned nodes with the `Execute Query` operation and the result is relevant to you, you are encouraged to revisit your logic. The node output may now contain more information than before. This change was made so that the behavior is more consistent across n8n where input with multiple rows should yield results according all input data instead of only one. Please note: n8n was already running multiple queries based on input. Only the output was changed.

## 0.117.0

Expand Down Expand Up @@ -636,7 +636,7 @@ In workflows using the Typeform Trigger node, nodes that reference such key name

### What changed?

In the Harvest Node, we moved the account field from the credentials to the node parameters. This will allow you to work witn multiples accounts without having to create multiples credentials.
In the Harvest Node, we moved the account field from the credentials to the node parameters. This will allow you to work with multiples accounts without having to create multiples credentials.

### When is action necessary?

Expand Down Expand Up @@ -756,7 +756,7 @@ If the operations `close_match` or `delete_match` are used, recreate them using

### What changed?

We have simplified how attachments are handled by the Twitter node. Rather than clicking on `Add Attachments` and having to specify the `Catergory`, you can now add attachments by just clicking on `Add Field` and selecting `Attachments`. There's no longer an option to specify the type of attachment you are adding.
We have simplified how attachments are handled by the Twitter node. Rather than clicking on `Add Attachments` and having to specify the `Category`, you can now add attachments by just clicking on `Add Field` and selecting `Attachments`. There's no longer an option to specify the type of attachment you are adding.

### When is action necessary?

Expand Down Expand Up @@ -950,7 +950,7 @@ After upgrading open all workflows which contain the concerning Nodes and set

### What changed?

Because of a typo very often `reponse` instead of `response` got used in code. So also on the Webhook-Node. Its parameter `reponseMode` had to be renamed to correct spelling `responseMode`.
Because of a typo very often `response` instead of `response` got used in code. So also on the Webhook-Node. Its parameter `responseMode` had to be renamed to correct spelling `responseMode`.

### When is action necessary?

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -182,7 +182,7 @@ describe('WorkflowExecuteAdditionalData', () => {
});

describe('getRunData', () => {
it('should throw error to add trigger ndoe', async () => {
it('should throw error to add trigger node', async () => {
const workflow = mock<IWorkflowBase>({
id: '1',
name: 'test',
Expand Down
2 changes: 1 addition & 1 deletion packages/cli/src/databases/entities/execution-entity.ts
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ export class ExecutionEntity {
id: string;

/**
* Whether the execution finished sucessfully.
* Whether the execution finished successfully.
*
* @deprecated Use `status` instead
*/
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -154,7 +154,7 @@ export class MigrateIntegerKeysToString1690000000000 implements IrreversibleMigr
await queryRunner.query(
`ALTER TABLE ${tablePrefix}execution_entity RENAME COLUMN "workflowId" to "tmp_workflowId";`,
);
// -- Intentionally NOT setting colum to NOT NULL
// -- Intentionally NOT setting column to NOT NULL
await queryRunner.query(
`ALTER TABLE ${tablePrefix}execution_entity ADD COLUMN "workflowId" varchar(36);`,
);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,7 @@ const pullResult: SourceControlledFile[] = [
{
file: 'credential_stubs/abcdeWGIeey9K4aa.json',
id: 'abcdeWGIeey9K4aa',
name: 'modfied credential',
name: 'modified credential',
type: 'credential',
status: 'modified',
location: 'local',
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ import type { ExportableCredential } from './types/exportable-credential';
import type { ImportResult } from './types/import-result';
import type { SourceControlGetStatus } from './types/source-control-get-status';
import type { SourceControlPreferences } from './types/source-control-preferences';
import type { SourceControllPullOptions } from './types/source-control-pull-work-folder';
import type { SourceControlPullOptions } from './types/source-control-pull-work-folder';
import type { SourceControlPushWorkFolder } from './types/source-control-push-work-folder';
import type { SourceControlWorkflowVersionId } from './types/source-control-workflow-version-id';
import type { SourceControlledFile } from './types/source-controlled-file';
Expand Down Expand Up @@ -321,7 +321,7 @@ export class SourceControlService {
}

async pullWorkfolder(
options: SourceControllPullOptions,
options: SourceControlPullOptions,
): Promise<{ statusCode: number; statusResult: SourceControlledFile[] }> {
await this.sanityCheck();

Expand Down
Loading