From 9694660a63eec943aba7cfdc65e7c2690188b9ea Mon Sep 17 00:00:00 2001 From: Maxim Karpov Date: Tue, 22 Oct 2024 12:10:03 +0300 Subject: [PATCH] fix: make collect async --- src/transform/fsContext.ts | 17 +++++- src/transform/plugins/images/collect.ts | 12 ++-- src/transform/plugins/includes/collect.ts | 74 +++++++++++------------ src/transform/plugins/includes/types.ts | 8 ++- src/transform/typings.ts | 3 + src/transform/utilsFS.ts | 11 ++++ test/include-included.test.ts | 7 +-- 7 files changed, 83 insertions(+), 49 deletions(-) diff --git a/src/transform/fsContext.ts b/src/transform/fsContext.ts index f37a3cae..42cf7c24 100644 --- a/src/transform/fsContext.ts +++ b/src/transform/fsContext.ts @@ -1,7 +1,8 @@ import {readFileSync, writeFileSync} from 'fs'; +import {readFile, writeFile} from 'fs/promises'; import {FsContext} from './typings'; -import {isFileExists} from './utilsFS'; +import {isFileExists, isFileExistsAsync} from './utilsFS'; export class DefaultFsContext implements FsContext { exist(path: string): boolean { @@ -17,6 +18,20 @@ export class DefaultFsContext implements FsContext { encoding: 'utf8', }); } + + async existAsync(path: string): Promise { + return await isFileExistsAsync(path); + } + + async readAsync(path: string): Promise { + return readFile(path, 'utf8'); + } + + async writeAsync(path: string, content: string): Promise { + writeFile(path, content, { + encoding: 'utf8', + }); + } } export const defaultFsContext = new DefaultFsContext(); diff --git a/src/transform/plugins/images/collect.ts b/src/transform/plugins/images/collect.ts index 9f8f3cba..c5c0579b 100644 --- a/src/transform/plugins/images/collect.ts +++ b/src/transform/plugins/images/collect.ts @@ -12,21 +12,21 @@ type Options = MarkdownItPluginOpts & { singlePage: boolean; }; -const collect = (input: string, options: Options) => { +const collect = async (input: string, options: Options) => { const md = new MarkdownIt().use(imsize); const {root, path, destPath = '', copyFile, singlePage, deps} = options; const tokens = md.parse(input, {}); let result = input; - tokens.forEach((token) => { + for (const token of tokens) { if (token.type !== 'inline') { return; } const children = token.children || []; - children.forEach((childToken) => { + for (const childToken of children) { if (childToken.type !== 'image') { return; } @@ -47,9 +47,9 @@ const collect = (input: string, options: Options) => { result = result.replace(src, newSrc); } - copyFile(targetPath, targetDestPath); - }); - }); + await copyFile(targetPath, targetDestPath); + } + } if (singlePage) { return result; diff --git a/src/transform/plugins/includes/collect.ts b/src/transform/plugins/includes/collect.ts index f8f8527a..a7c5acdb 100644 --- a/src/transform/plugins/includes/collect.ts +++ b/src/transform/plugins/includes/collect.ts @@ -1,21 +1,24 @@ import {relative} from 'path'; import {bold} from 'chalk'; -import {readFileSync} from 'fs'; import {getRelativePath, resolveRelativePath} from '../../utilsFS'; import {defaultFsContext} from '../../fsContext'; import {IncludeCollectOpts} from './types'; -const includesPaths: string[] = []; - -function processRecursive( +async function processRecursive( includePath: string, targetDestPath: string, options: IncludeCollectOpts, - appendix: Map, ) { - const {path, log, copyFile, includedParentPath: includedParentPathNullable, included} = options; + const { + path, + log, + copyFile, + includedParentPath: includedParentPathNullable, + included, + fs, + } = options; const includedParentPath = includedParentPathNullable || path; const includeOptions = { @@ -25,30 +28,26 @@ function processRecursive( }; try { - const contentProcessed = copyFile(includePath, targetDestPath, includeOptions); + const contentProcessed = await copyFile(includePath, targetDestPath, includeOptions); // To reduce file reading we can include the file content into the generated content if (included) { - const content = contentProcessed ?? readFileSync(targetDestPath, 'utf8'); + const content = contentProcessed ?? (await fs?.readAsync(targetDestPath)); if (content) { const includedRelativePath = getRelativePath(includedParentPath, includePath); // The appendix is the map that protects from multiple include files - if (!appendix.has(includedRelativePath)) { + if (!options.appendix?.has(includedRelativePath)) { // Recursive function to include the depth structure - const includeContent = collectRecursive( - content, - { - ...options, - path: includePath, - includedParentPath, - }, - appendix, - ); + const includeContent = await collectRecursive(content, { + ...options, + path: includePath, + includedParentPath, + }); // Add to appendix set structure - appendix.set( + options.appendix?.set( includedRelativePath, `{% included (${includedRelativePath}) %}\n${includeContent}\n{% endincluded %}`, ); @@ -60,11 +59,7 @@ function processRecursive( } } -function collectRecursive( - result: string, - options: IncludeCollectOpts, - appendix: Map, -) { +async function collectRecursive(result: string, options: IncludeCollectOpts) { const {root, path, destPath = '', log, singlePage, fs = defaultFsContext, deps} = options; const INCLUDE_REGEXP = /{%\s*include\s*(notitle)?\s*\[(.+?)]\((.+?)\)\s*%}/g; @@ -80,19 +75,21 @@ function collectRecursive( deps?.markDep?.(path, includePath, 'include'); - if (hashIndex > -1 && !fs.exist(includePath)) { + if (hashIndex > -1 && !(await fs.existAsync(includePath))) { includePath = includePath.slice(0, includePath.lastIndexOf('#')); relativePath = relativePath.slice(0, hashIndex); } const targetDestPath = resolveRelativePath(destPath, relativePath); - if (includesPaths.includes(includePath)) { - log.error(`Circular includes: ${bold(includesPaths.concat(path).join(' ▶ '))}`); + if (options.includesPaths?.includes(includePath)) { + log.error( + `Circular includes: ${bold(options.includesPaths?.concat(path).join(' ▶ '))}`, + ); break; } - if (singlePage && !includesPaths.length) { + if (singlePage && !options.includesPaths?.length) { const newRelativePath = relative(root, includePath); const newInclude = matchedInclude.replace(relativePath, newRelativePath); @@ -102,25 +99,28 @@ function collectRecursive( INCLUDE_REGEXP.lastIndex = INCLUDE_REGEXP.lastIndex - delta; } - includesPaths.push(includePath); + options.includesPaths?.push(includePath); - processRecursive(includePath, targetDestPath, options, appendix); + await processRecursive(includePath, targetDestPath, options); - includesPaths.pop(); + options.includesPaths?.pop(); } return result; } -function collect(input: string, options: IncludeCollectOpts) { - const appendix: Map = new Map(); +async function collect(input: string, options: IncludeCollectOpts) { + const shouldWriteAppendix = !options.appendix; + + options.includesPaths = options.includesPaths ?? []; + options.appendix = options.appendix ?? new Map(); - input = collectRecursive(input, options, appendix); + input = await collectRecursive(input, options); - if (!options.path.includes('_includes')) { + if (shouldWriteAppendix) { // Appendix should be appended to the end of the file (it supports depth structure, so the included files will have included as well) - if (appendix.size > 0) { - input += '\n' + [...appendix.values()].join('\n'); + if (options.appendix.size > 0) { + input += '\n' + [...options.appendix.values()].join('\n'); } } diff --git a/src/transform/plugins/includes/types.ts b/src/transform/plugins/includes/types.ts index 66eff785..76ce047e 100644 --- a/src/transform/plugins/includes/types.ts +++ b/src/transform/plugins/includes/types.ts @@ -9,9 +9,15 @@ export interface MarkdownItIncluded extends MarkdownIt { export type IncludeCollectOpts = MarkdownItPluginOpts & { destPath: string; - copyFile(path: string, dest: string, opts: IncludeCollectOpts): string | null | undefined; + copyFile( + path: string, + dest: string, + opts: IncludeCollectOpts, + ): Promise; singlePage: Boolean; included: Boolean; includedParentPath?: string; additionalIncludedList?: string[]; + includesPaths?: string[]; + appendix?: Map; }; diff --git a/src/transform/typings.ts b/src/transform/typings.ts index 5ebff539..9ff77ccd 100644 --- a/src/transform/typings.ts +++ b/src/transform/typings.ts @@ -33,6 +33,9 @@ export interface FsContext { read(path: string | null): string; exist(path: string | null): boolean; write(path: string | null, content: string): void; + readAsync(path: string | null): Promise; + existAsync(path: string | null): Promise; + writeAsync(path: string | null, content: string): Promise; } export interface DependencyContext { diff --git a/src/transform/utilsFS.ts b/src/transform/utilsFS.ts index b8f4cf77..d349b2ce 100644 --- a/src/transform/utilsFS.ts +++ b/src/transform/utilsFS.ts @@ -3,6 +3,7 @@ import type {Dictionary} from 'lodash'; import escapeRegExp from 'lodash/escapeRegExp'; import {join, parse, relative, resolve, sep} from 'path'; import {statSync} from 'fs'; +import {stat} from 'fs/promises'; import liquidSnippet from './liquid'; import {FsContext, StateCore} from './typings'; @@ -37,6 +38,16 @@ export function isFileExists(file: string) { } } +export async function isFileExistsAsync(file: string) { + try { + const stats = await stat(file); + + return stats.isFile(); + } catch (e) { + return false; + } +} + export function getFileTokens( fs: FsContext, path: string, diff --git a/test/include-included.test.ts b/test/include-included.test.ts index ee7ccbe6..4f26c9c4 100644 --- a/test/include-included.test.ts +++ b/test/include-included.test.ts @@ -1,5 +1,4 @@ import {resolve} from 'path'; -import {readFileSync} from 'fs'; import {readFile} from 'node:fs/promises'; import transform from '../src/transform'; @@ -26,7 +25,7 @@ const collectIncluded = (text: string, path: string) => { included: true, path: path, root: resolve(path, '../'), - copyFile: (includePath) => readFileSync(includePath, 'utf-8'), + copyFile: (includePath) => readFile(includePath, 'utf-8'), singlePage: false, destPath: '', isLintRun: false, @@ -46,7 +45,7 @@ describe('Included to md', () => { const expectPath = resolve(__dirname, './mocks/include-included-3.expect.md'); const expectContent = await readFile(expectPath, 'utf8'); - const result = collectIncluded(input, inputPath); + const result = await collectIncluded(input, inputPath); expect(result).toBe(expectContent); }); @@ -70,7 +69,7 @@ describe('Included to md', () => { const expectPath = resolve(__dirname, './mocks/include-included-3-deep.expect.md'); const expectContent = await readFile(expectPath, 'utf8'); - const result = collectIncluded(input, inputPath); + const result = await collectIncluded(input, inputPath); expect(result).toBe(expectContent); });