From 34b79f59d90aa2cb81379591450a729521a0c6d9 Mon Sep 17 00:00:00 2001 From: GyeongHo Kim Date: Tue, 25 Mar 2025 02:00:33 +0900 Subject: [PATCH 1/5] feat: added rc file support --- .gitignore | 2 +- .lfifyrc-sample.json | 16 ++++ index.cjs | 171 ++++++++++++++++++++++++++++--------------- package-lock.json | 17 +---- package.json | 2 +- 5 files changed, 129 insertions(+), 79 deletions(-) create mode 100644 .lfifyrc-sample.json diff --git a/.gitignore b/.gitignore index 83dab92..e4d619b 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,3 @@ node_modules -DS_Store +.DS_Store .npmrc \ No newline at end of file diff --git a/.lfifyrc-sample.json b/.lfifyrc-sample.json new file mode 100644 index 0000000..911748e --- /dev/null +++ b/.lfifyrc-sample.json @@ -0,0 +1,16 @@ +{ + "entry": "./", + "include": [ + "**/*.{js,ts,jsx,tsx}", + "**/*.{json,md}", + "**/*.{css,scss}", + "**/*.{html,vue}" + ], + "exclude": [ + "node_modules/**", + ".git/**", + "dist/**", + "build/**", + "coverage/**" + ] +} \ No newline at end of file diff --git a/index.cjs b/index.cjs index b968ed2..6576289 100644 --- a/index.cjs +++ b/index.cjs @@ -2,24 +2,50 @@ const fs = require("fs").promises; const path = require("path"); -const ignore = require("ignore"); +const micromatch = require("micromatch"); /** - * Constants definition - * @type {Object} + * @typedef {Object} Config + * @property {string} entry - 처리할 시작 디렉토리 경로 + * @property {string[]} include - 포함할 파일 패턴 목록 + * @property {string[]} exclude - 제외할 파일 패턴 목록 */ -const CONSTANTS = { - ALWAYS_EXCLUDED_DIRS: ['.git', 'node_modules'], - ALLOWED_EXTENSIONS: new Set([ - '.js', '.ts', '.json', '.txt', '.md', '.html', '.css', - '.jsx', '.tsx', '.xml', '.yml', '.yaml', '.scss', - '.env', '.vue', '.cjs', '.mjs' - ]) + +/** + * @typedef {Object} Logger + * @property {function(string, string): void} warn - 경고 메시지 출력 + * @property {function(string, string, Error=): void} error - 에러 메시지 출력 + * @property {function(string, string): void} info - 정보 메시지 출력 + */ + +/** + * @typedef {Object} CommandOptions + * @property {string} configPath - 설정 파일 경로 + */ + +/** + * Default configuration + * @type {Config} + */ +const DEFAULT_CONFIG = { + entry: './', + include: [], + exclude: [] +}; + +/** + * Configuration validation schema + * @type {Object.} + */ +const CONFIG_SCHEMA = { + entry: (value) => typeof value === 'string', + include: (value) => Array.isArray(value) && value.length > 0 && value.every(item => typeof item === 'string'), + exclude: (value) => Array.isArray(value) && value.length > 0 && value.every(item => typeof item === 'string'), }; /** * Logging utility - * @type {Object} + * @type {Logger} */ const logger = { warn: (msg, path) => console.warn(`${msg} ${path}`), @@ -28,67 +54,95 @@ const logger = { }; /** - * read .gitignore files and set ignore patterns - * @param {string} dirPath - directory path to search - * @returns {ignore.Ignore} - ignore instance + * Read and validate configuration file + * @param {string} configPath - path to configuration file + * @returns {Promise} - validated configuration + * @throws {Error} - if configuration is invalid or file is not found */ -async function getIgnoreInstance(dirPath) { - const ig = ignore(); - const gitignorePath = path.join(dirPath, ".gitignore"); +async function readConfig(configPath) { try { - const gitignoreContent = await fs.readFile(gitignorePath, "utf8"); - ig.add(gitignoreContent); + const configContent = await fs.readFile(configPath, 'utf8'); + const config = JSON.parse(configContent); + + // Validate required fields + for (const [key, validator] of Object.entries(CONFIG_SCHEMA)) { + if (!config[key]) { + throw new Error(`Invalid or missing "${key}" in configuration file`); + } + if (!validator(config[key])) { + throw new Error(`Invalid "${key}" in configuration file`); + } + } + + return { + ...DEFAULT_CONFIG, + ...config, + entry: path.resolve(process.cwd(), config.entry) + }; } catch (err) { - if (err.code === "ENOENT") { - logger.warn(".gitignore file not found. processing all files.", gitignorePath); + if (err.code === 'ENOENT') { + logger.error(`Configuration file not found: ${configPath}`, configPath); } else { - logger.error("error reading .gitignore file", gitignorePath, err); + logger.error(`Error reading configuration file: ${err.message}`, configPath); } + process.exit(1); } - return ig; } /** - * Check if file extension is processable - * @param {string} filePath - * @returns {boolean} + * Parse command line arguments + * @returns {CommandOptions} - parsed arguments */ -function isProcessableFile(filePath) { - const ext = path.extname(filePath).toLowerCase(); - return CONSTANTS.ALLOWED_EXTENSIONS.has(ext); +function parseArgs() { + const args = process.argv.slice(2); + const options = { + configPath: '.lfifyrc.json' + }; + + for (let i = 0; i < args.length; i++) { + if (args[i] === '--config' && args[i + 1]) { + options.configPath = args[i + 1]; + i++; + } + } + + return options; +} + +/** + * Check if file should be processed based on include/exclude patterns + * @param {string} filePath - relative file path + * @param {Config} config - configuration object + * @returns {boolean} - true if file should be processed + */ +function shouldProcessFile(filePath, config) { + const isIncluded = micromatch.isMatch(filePath, config.include); + const isExcluded = micromatch.isMatch(filePath, config.exclude); + + return isIncluded && !isExcluded; } /** * traverse a specific directory recursively and convert all files' CRLF to LF * @param {string} dirPath - directory path to search - * @param {ignore.Ignore} ig - ignore instance + * @param {Config} config - configuration object + * @returns {Promise} + * @throws {Error} - if there's an error reading directory or processing files */ -async function convertCRLFtoLF(dirPath, ig) { +async function convertCRLFtoLF(dirPath, config) { try { const entries = await fs.readdir(dirPath, { withFileTypes: true }); - if (!entries) { - logger.error(`error reading directory: ${dirPath}`, dirPath, new Error('no entries')); - return; - } await Promise.all(entries.map(async entry => { const fullPath = path.join(dirPath, entry.name); const relativePath = path.relative(process.cwd(), fullPath).replace(/\\/g, "/"); - if (CONSTANTS.ALWAYS_EXCLUDED_DIRS.includes(entry.name)) { - logger.info(`excluded (directory): ${relativePath}`, fullPath); - return; - } - - if (ig.ignores(relativePath)) { - logger.info(`excluded (pattern matching): ${relativePath}`, fullPath); - return; - } - if (entry.isDirectory()) { - await convertCRLFtoLF(fullPath, ig); - } else if (entry.isFile()) { + await convertCRLFtoLF(fullPath, config); + } else if (entry.isFile() && shouldProcessFile(relativePath, config)) { await processFile(fullPath); + } else { + logger.info(`skipped: ${relativePath}`, fullPath); } })); } catch (err) { @@ -100,14 +154,11 @@ async function convertCRLFtoLF(dirPath, ig) { /** * convert CRLF to LF * @param {string} filePath - full path of the file to process + * @returns {Promise} + * @throws {Error} - if there's an error reading or writing file */ async function processFile(filePath) { try { - if (!isProcessableFile(filePath)) { - logger.info(`skipped (extension): ${filePath}`, filePath); - return; - } - const content = await fs.readFile(filePath, "utf8"); const updatedContent = content.replace(/\r\n/g, "\n"); @@ -124,21 +175,19 @@ async function processFile(filePath) { } (async () => { - // can receive directory path as a command line argument. default is current directory. - const targetDir = process.argv[2] || process.cwd(); - - logger.info(`converting CRLF to LF in: ${targetDir}`, targetDir); + const options = parseArgs(); + const config = await readConfig(options.configPath); - // set ignore patterns - const ig = await getIgnoreInstance(targetDir); + logger.info(`converting CRLF to LF in: ${config.entry}`, config.entry); - await convertCRLFtoLF(targetDir, ig); + await convertCRLFtoLF(config.entry, config); - logger.info("conversion completed.", targetDir); + logger.info("conversion completed.", config.entry); })(); module.exports = { - getIgnoreInstance, convertCRLFtoLF, processFile, + readConfig, + parseArgs, }; diff --git a/package-lock.json b/package-lock.json index 665b0c1..1705db8 100644 --- a/package-lock.json +++ b/package-lock.json @@ -9,7 +9,7 @@ "version": "1.0.0", "license": "MIT", "dependencies": { - "ignore": "^6.0.2" + "micromatch": "^4.0.8" }, "bin": { "lfify": "index.cjs" @@ -1537,7 +1537,6 @@ "version": "3.0.3", "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", - "dev": true, "license": "MIT", "dependencies": { "fill-range": "^7.1.1" @@ -2256,7 +2255,6 @@ "version": "7.1.1", "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", - "dev": true, "license": "MIT", "dependencies": { "to-regex-range": "^5.0.1" @@ -2470,15 +2468,6 @@ "node": ">=10.17.0" } }, - "node_modules/ignore": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/ignore/-/ignore-6.0.2.tgz", - "integrity": "sha512-InwqeHHN2XpumIkMvpl/DCJVrAHgCsG5+cn1XlnLWGwtZBm8QJfSusItfrwx81CTp5agNZqpKU2J/ccC5nGT4A==", - "license": "MIT", - "engines": { - "node": ">= 4" - } - }, "node_modules/import-fresh": { "version": "3.3.0", "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.0.tgz", @@ -2625,7 +2614,6 @@ "version": "7.0.0", "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", - "dev": true, "license": "MIT", "engines": { "node": ">=0.12.0" @@ -3537,7 +3525,6 @@ "version": "4.0.8", "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz", "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==", - "dev": true, "license": "MIT", "dependencies": { "braces": "^3.0.3", @@ -3800,7 +3787,6 @@ "version": "2.3.1", "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", - "dev": true, "license": "MIT", "engines": { "node": ">=8.6" @@ -4204,7 +4190,6 @@ "version": "5.0.1", "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", - "dev": true, "license": "MIT", "dependencies": { "is-number": "^7.0.0" diff --git a/package.json b/package.json index 35ee4d0..f57498d 100644 --- a/package.json +++ b/package.json @@ -31,7 +31,7 @@ }, "homepage": "https://github.com/GyeongHoKim/lfify#readme", "dependencies": { - "ignore": "^6.0.2" + "micromatch": "^4.0.8" }, "devDependencies": { "@eslint/js": "^9.15.0", From b71f3f5a51844a2fc9bd31e9e828b3cb5870b123 Mon Sep 17 00:00:00 2001 From: GyeongHo Kim Date: Tue, 1 Apr 2025 00:01:41 +0900 Subject: [PATCH 2/5] test: added unit tests - unit test for every functions - manual mock for unit test(fs, micromatch, path) - launch.json to debug --- .vscode/launch.json | 31 ++++++ __mocks__/fs.js | 54 +++++++++++ __mocks__/micromatch.js | 23 +++++ __mocks__/path.js | 17 ++++ index.test.js | 205 +++++++++++++++++----------------------- 5 files changed, 210 insertions(+), 120 deletions(-) create mode 100644 .vscode/launch.json create mode 100644 __mocks__/fs.js create mode 100644 __mocks__/micromatch.js create mode 100644 __mocks__/path.js diff --git a/.vscode/launch.json b/.vscode/launch.json new file mode 100644 index 0000000..c2cdcc6 --- /dev/null +++ b/.vscode/launch.json @@ -0,0 +1,31 @@ +{ + // Use IntelliSense to learn about possible attributes. + // Hover to view descriptions of existing attributes. + // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 + "version": "0.2.0", + "configurations": [ + { + "type": "node", + "request": "launch", + "name": "Jest Tests Debug", + "program": "${workspaceFolder}/node_modules/jest/bin/jest", + "args": [ + "--runInBand", + "--watchAll=false", + "--testTimeout=100000", + "--detectOpenHandles" + ], + "console": "integratedTerminal", + "internalConsoleOptions": "neverOpen" + }, + { + "type": "node", + "request": "launch", + "name": "Launch Program", + "skipFiles": [ + "/**" + ], + "program": "${workspaceFolder}/index.cjs" + } + ] +} \ No newline at end of file diff --git a/__mocks__/fs.js b/__mocks__/fs.js new file mode 100644 index 0000000..af68ec4 --- /dev/null +++ b/__mocks__/fs.js @@ -0,0 +1,54 @@ +const fs = jest.createMockFromModule('fs'); + +const mockFiles = new Map(); + +function __setMockFiles(newMockFiles) { + mockFiles.clear(); + for (const [path, content] of Object.entries(newMockFiles)) { + mockFiles.set(path, content); + } +} + +function __setConfig(stringifiedConfig, path = '.lfifyrc.json') { + mockFiles.set(path, stringifiedConfig); +} + +const promises = { + /* eslint-disable-next-line no-unused-vars */ + readFile: jest.fn().mockImplementation((path, ...rest) => { + if (mockFiles.has(path)) { + return Promise.resolve(mockFiles.get(path)); + } + return Promise.reject(new Error(`ENOENT: no such file or directory, open '${path}'`)); + }), + + writeFile: jest.fn().mockImplementation((path, content) => { + mockFiles.set(path, content); + return Promise.resolve(); + }), + + /* eslint-disable-next-line no-unused-vars */ + readdir: jest.fn().mockImplementation((path, ...rest) => { + const entries = []; + for (const filePath of mockFiles.keys()) { + if (filePath.startsWith(path)) { + const relativePath = filePath.slice(path.length + 1); + const name = relativePath.split('/')[0]; + if (name && !entries.some(e => e.name === name)) { + entries.push({ + name, + isFile: () => !name.includes('/'), + isDirectory: () => name.includes('/') + }); + } + } + } + return Promise.resolve(entries); + }) +}; + +fs.promises = promises; +fs.__setMockFiles = __setMockFiles; +fs.__setConfig = __setConfig; + +module.exports = fs; diff --git a/__mocks__/micromatch.js b/__mocks__/micromatch.js new file mode 100644 index 0000000..682677a --- /dev/null +++ b/__mocks__/micromatch.js @@ -0,0 +1,23 @@ +const micromatch = jest.createMockFromModule('micromatch'); + +micromatch.isMatch = jest.fn().mockImplementation((filePath, patterns) => { + if (!Array.isArray(patterns)) { + patterns = [patterns]; + } + + // 간단한 glob 패턴 매칭 구현 + return patterns.some(pattern => { + // 정확한 매칭 + if (pattern === filePath) return true; + + // 와일드카드 매칭 + const regexPattern = pattern + .replace(/\./g, '\\.') + .replace(/\*/g, '.*') + .replace(/\?/g, '.'); + + return new RegExp(`^${regexPattern}$`).test(filePath); + }); +}); + +module.exports = micromatch; \ No newline at end of file diff --git a/__mocks__/path.js b/__mocks__/path.js new file mode 100644 index 0000000..29e2454 --- /dev/null +++ b/__mocks__/path.js @@ -0,0 +1,17 @@ +const path = jest.createMockFromModule('path'); + +const actualPath = jest.requireActual('path'); + +path.join = jest.fn().mockImplementation((...paths) => { + return actualPath.join(...paths); +}); + +path.resolve = jest.fn().mockImplementation((...paths) => { + return actualPath.resolve(...paths); +}); + +path.relative = jest.fn().mockImplementation((from, to) => { + return actualPath.relative(from, to); +}); + +module.exports = path; \ No newline at end of file diff --git a/index.test.js b/index.test.js index 921da7d..4956a11 100644 --- a/index.test.js +++ b/index.test.js @@ -1,142 +1,107 @@ -const fs = require('fs').promises; -const path = require('path'); +const { readConfig, parseArgs, processFile } = require('./index.cjs'); -jest.mock('fs', () => ({ - promises: { - readFile: jest.fn(), - writeFile: jest.fn(), - readdir: jest.fn(), - }, -})); - -const { getIgnoreInstance, convertCRLFtoLF, processFile } = require('./index.cjs'); +jest.mock('fs'); +jest.mock('path'); +jest.mock('micromatch'); describe('CRLF to LF Converter', () => { + const MOCK_FILE_INFO = { + './src/file1.txt': 'hello\r\nworld\r\n', + './src/file2.js': 'console.log("test");\r\n', + './src/subdir/file3.txt': 'test\r\n', + './test/file1.txt': 'hello\r\nworld\r\n', + './test/file2.js': 'console.log("test");\r\n', + './test/subdir/file3.txt': 'test\r\n', + './node_modules/file.js': 'console.log("test");\r\n', + './node_modules/subdir/file4.txt': 'test\r\n', + 'index.js': 'console.log("test");\r\n' + }; + beforeEach(() => { - // Clear all mocks before each test jest.clearAllMocks(); + require('fs').__setMockFiles(MOCK_FILE_INFO); }); - describe('getIgnoreInstance', () => { - it('should create ignore instance with gitignore content', async () => { - fs.readFile.mockResolvedValue('node_modules\n.git\n*.log'); - - const ig = await getIgnoreInstance('/test/path'); - - expect(fs.readFile).toHaveBeenCalledWith( - path.join('/test/path', '.gitignore'), - 'utf8' - ); - expect(ig.ignores('node_modules')).toBe(true); - expect(ig.ignores('test.log')).toBe(true); - expect(ig.ignores('src/index.js')).toBe(false); - }); + describe('readConfig', () => { + it('should return config when valid config file is provided', async () => { + // arrange + const validConfig = { + entry: './', + include: ['*.js'], + exclude: ['node_modules/**'] + }; + require('fs').__setConfig(JSON.stringify(validConfig)); - it('should handle missing gitignore file', async () => { - const error = new Error('ENOENT'); - error.code = 'ENOENT'; - fs.readFile.mockRejectedValue(error); - - const consoleSpy = jest.spyOn(console, 'warn'); - - const ig = await getIgnoreInstance('/test/path'); - - expect(consoleSpy).toHaveBeenCalledWith( - '.gitignore file not found. processing all files. /test/path/.gitignore' - ); - expect(ig.ignores('node_modules')).toBe(false); - }); - }); + // act + const config = await readConfig('.lfifyrc.json'); - describe('processFile', () => { - it('should convert CRLF to LF in text files', async () => { - const testContent = 'line1\r\nline2\r\nline3'; - const expectedContent = 'line1\nline2\nline3'; - - fs.readFile.mockResolvedValue(testContent); - - await processFile('test.js'); - - expect(fs.writeFile).toHaveBeenCalledWith( - 'test.js', - expectedContent, - 'utf8' - ); + // assert + expect(config).toEqual(expect.objectContaining({ + entry: expect.any(String), + include: expect.any(Array), + exclude: expect.any(Array) + })); }); - it('should skip files with unsupported extensions', async () => { - const consoleSpy = jest.spyOn(console, 'log'); - - await processFile('test.exe'); - - expect(fs.readFile).not.toHaveBeenCalled(); - expect(consoleSpy).toHaveBeenCalledWith( - expect.stringContaining('skipped (extension)') - ); + it('should throw error when config file is not found', async () => { + // act & assert + await expect(readConfig('.lfifyrc.json')).rejects.toThrow(); }); - it('should handle files that dont need conversion', async () => { - const content = 'line1\nline2\nline3'; - fs.readFile.mockResolvedValue(content); - - const consoleSpy = jest.spyOn(console, 'log'); - - await processFile('test.js'); - - expect(fs.writeFile).not.toHaveBeenCalled(); - expect(consoleSpy).toHaveBeenCalledWith( - expect.stringContaining('no need to convert') - ); + it('should throw error when config file is invalid json', async () => { + // arrange + require('fs').__setConfig('invalid json'); + + // act & assert + await expect(readConfig('.lfifyrc.json')).rejects.toThrow(); }); }); - describe('convertCRLFtoLF', () => { - const createDirent = (name, isDir) => ({ - name, - isDirectory: () => isDir, - isFile: () => !isDir, + describe('parseArgs', () => { + it('should return config path when --config option is provided', () => { + // arrange + process.argv = ['node', 'lfify', '--config', './path/for/test/.lfifyrc.json']; + + // act + const options = parseArgs(); + + // assert + expect(options.configPath).toBe('./path/for/test/.lfifyrc.json'); }); - it('should process all files in directory recursively', async () => { - const mockEntries = [ - createDirent('test.js', false), - createDirent('test.md', false), - createDirent('subfolder', true), - ]; - - fs.readdir.mockResolvedValueOnce(mockEntries); - fs.readdir.mockResolvedValueOnce([ - createDirent('test.js', false) - ]); - - fs.readFile.mockResolvedValue('test\r\ndata'); - - const ig = { ignores: jest.fn().mockReturnValue(false) }; - - await convertCRLFtoLF('/test/path', ig); - - expect(fs.readdir).toHaveBeenCalledTimes(2); - expect(fs.readFile).toHaveBeenCalledTimes(3); + it('should return default config path when --config option is not provided', () => { + // arrange + process.argv = ['node', 'lfify']; + + // act + const options = parseArgs(); + + // assert + expect(options.configPath).toBe('.lfifyrc.json'); }); + }); - it('should skip ignored files and directories', async () => { - const mockEntries = [ - createDirent('.git', true), - createDirent('node_modules', true), - createDirent('test.log', false), - ]; - - fs.readdir.mockResolvedValue(mockEntries); - - const ig = { ignores: jest.fn().mockReturnValue(true) }; - const consoleSpy = jest.spyOn(console, 'log'); - - await convertCRLFtoLF('/test/path', ig); - - expect(fs.readFile).not.toHaveBeenCalled(); - expect(consoleSpy).toHaveBeenCalledWith( - expect.stringContaining('excluded') - ); + describe('shouldProcessFile', () => { + it('should return true when file matches include pattern and does not match exclude pattern', () => { + /** + * This function uses micromatch to check config.include and config.exclude + * so this test case is already tested in micromatch's test file + * so I'm not going to test this function + */ }); }); -}); + + describe('processFile', () => { + it('should convert CRLF to LF when file is processed', async () => { + // arrange + const shouldbe = 'hello\nworld\n'; + + // act + await processFile('./src/file1.txt'); + const content = await require('fs').promises.readFile('./src/file1.txt', 'utf8'); + + // assert + expect(content).toBe(shouldbe); + }) + }); +}); \ No newline at end of file From 5e92616e2d791565fd28ca4df7f85833ab7d3fd0 Mon Sep 17 00:00:00 2001 From: GyeongHo Kim Date: Tue, 1 Apr 2025 00:03:48 +0900 Subject: [PATCH 3/5] feat: enable undefined fields - do not throw error when there is no key(CONFIG_SCHEMA), just validate if exists. - check whether index.cjs called from main or required sth else file(for test script) --- index.cjs | 30 +++++++++++++++++++++--------- 1 file changed, 21 insertions(+), 9 deletions(-) diff --git a/index.cjs b/index.cjs index 6576289..659bd4b 100644 --- a/index.cjs +++ b/index.cjs @@ -63,13 +63,10 @@ async function readConfig(configPath) { try { const configContent = await fs.readFile(configPath, 'utf8'); const config = JSON.parse(configContent); - + // Validate required fields for (const [key, validator] of Object.entries(CONFIG_SCHEMA)) { - if (!config[key]) { - throw new Error(`Invalid or missing "${key}" in configuration file`); - } - if (!validator(config[key])) { + if (config[key] && !validator(config[key])) { throw new Error(`Invalid "${key}" in configuration file`); } } @@ -77,7 +74,7 @@ async function readConfig(configPath) { return { ...DEFAULT_CONFIG, ...config, - entry: path.resolve(process.cwd(), config.entry) + entry: path.resolve(process.cwd(), config.entry || DEFAULT_CONFIG.entry) }; } catch (err) { if (err.code === 'ENOENT') { @@ -85,7 +82,11 @@ async function readConfig(configPath) { } else { logger.error(`Error reading configuration file: ${err.message}`, configPath); } - process.exit(1); + + if (require.main === module) { + process.exit(1); + } + throw err; } } @@ -133,6 +134,9 @@ async function convertCRLFtoLF(dirPath, config) { try { const entries = await fs.readdir(dirPath, { withFileTypes: true }); + /** + * @todo Node.js is single-threaded, if I want to convert files in parallel, I need to use worker_threads + */ await Promise.all(entries.map(async entry => { const fullPath = path.join(dirPath, entry.name); const relativePath = path.relative(process.cwd(), fullPath).replace(/\\/g, "/"); @@ -163,6 +167,10 @@ async function processFile(filePath) { const updatedContent = content.replace(/\r\n/g, "\n"); if (content !== updatedContent) { + /** + * @todo V8 javascript engine with 32-bit system cannot handle more than 2GB file, + * so I should use createReadStream and createWriteStream to handle large files + */ await fs.writeFile(filePath, updatedContent, "utf8"); logger.info(`converted: ${filePath}`); } else { @@ -174,7 +182,7 @@ async function processFile(filePath) { } } -(async () => { +async function main() { const options = parseArgs(); const config = await readConfig(options.configPath); @@ -183,7 +191,11 @@ async function processFile(filePath) { await convertCRLFtoLF(config.entry, config); logger.info("conversion completed.", config.entry); -})(); +} + +if (require.main === module) { + main(); +} module.exports = { convertCRLFtoLF, From b1e8dfb063d3cb391f0e26dda205eff8771ab4aa Mon Sep 17 00:00:00 2001 From: GyeongHo Kim Date: Tue, 1 Apr 2025 00:31:56 +0900 Subject: [PATCH 4/5] chore: deploy to npmjs.org - modify cd.yml to deploy to npmjs.org - delete username in project name --- .github/workflows/cd.yml | 51 +++++++++++++++++++++++----------------- package.json | 6 ++--- 2 files changed, 33 insertions(+), 24 deletions(-) diff --git a/.github/workflows/cd.yml b/.github/workflows/cd.yml index 666ca4f..63c1cbb 100644 --- a/.github/workflows/cd.yml +++ b/.github/workflows/cd.yml @@ -1,27 +1,36 @@ -name: Github packages NPM publish on release - +name: Release on: - release: - types: [created] + push: + branches: + - main + +permissions: + contents: read # for checkout jobs: - publish-github-registry: + release: + name: Release runs-on: ubuntu-latest permissions: - contents: read - packages: write + contents: write # to be able to publish a GitHub release + issues: write # to be able to comment on released issues + pull-requests: write # to be able to comment on released pull requests + id-token: write # to enable use of OIDC for npm provenance steps: - - uses: actions/checkout@v3 - - name: Setup Node.js - uses: actions/setup-node@v3 - with: - node-version: '18' - registry-url: https://npm.pkg.github.com - - name: Install dependencies - run: npm ci - - name: test - run: npm test - - name: Publish - run: npm publish - env: - NODE_AUTH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 + - name: Setup Node.js + uses: actions/setup-node@v4 + with: + node-version: "lts/*" + - name: Install dependencies + run: npm clean-install + - name: Verify the integrity of provenance attestations and registry signatures for installed dependencies + run: npm audit signatures + - name: Release + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + NPM_TOKEN: ${{ secrets.NPM_TOKEN }} + run: npx semantic-release \ No newline at end of file diff --git a/package.json b/package.json index f57498d..75f20ec 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { - "name": "@gyeonghokim/lfify", - "version": "1.0.0", + "name": "lfify", + "version": "0.0.0-development", "private": false, "description": "make your crlf to lf", "main": "index.cjs", @@ -17,7 +17,7 @@ "url": "https://github.com/GyeongHoKim/lfify" }, "publishConfig": { - "registry": "https://npm.pkg.github.com" + "registry": "https://registry.npmjs.org" }, "keywords": [ "eol", From 41be4a5259e006f9ac941f10113f9b0a4a8ff221 Mon Sep 17 00:00:00 2001 From: GyeongHo Kim Date: Tue, 1 Apr 2025 00:44:17 +0900 Subject: [PATCH 5/5] docs: update README --- README.md | 91 +++++++++++++++++-------------------------------------- 1 file changed, 28 insertions(+), 63 deletions(-) diff --git a/README.md b/README.md index 0374b43..38bc19a 100644 --- a/README.md +++ b/README.md @@ -1,81 +1,46 @@ # LFify +> ⚠️ **Warning**: All files must be encoded in UTF-8. The library is being developed to automatically convert UTF-8 with BOM to UTF-8 without BOM. Using different encodings may cause unexpected issues. + A lightweight Node.js library to convert CRLF to LF line endings. It is useful when your development environment is Windows. -웹 개발자에게 Windows OS PC를 제공하는 우리 회사, 당장 반성하라!! 뭐만 하면 windows-cp-949 인코딩에 CRLF 라인피드 때문에 문제가 생겨요. Node.js 개발자한테 Windows PC 제공, 이거 법적으로 금지해야 해요. - -# Features - - - Exclude files from your `.gitignore` - - Exclude `.git` and `node_modules` directories on default - - Recursive conversion - - Supports multiple file extensions(generally used on Web Development) - - # Installation - -> If you do not have a Github personal access token, please create **CLASSIC** token that has `packages: read` permission. - -first, edit your `.npmrc` file. - -```bash -@gyeonghokim:registry=https://npm.pkg.github.com -//npm.pkg.github.com/:_authToken= -``` - -then, install it. - -```bash -npm install --save-dev @gyeonghokim/lfify -``` - -# Usage - -## Basic Usage +## Getting started -if you do not want to install it, you can use it with `npx`, however, you still need to specify registry and auth token in your `.npmrc` file. +create .lfifyrc.json -```bash -npx @gyeonghokim/lfify +```json +{ + "entry": "./", + "include": [ + "**/*.{js,ts,jsx,tsx}", + "**/*.{json,md}", + "**/*.{css,scss}", + "**/*.{html,vue}" + ], + "exclude": [ + "node_modules/**", + ".git/**", + "dist/**", + "build/**", + "coverage/**" + ] +} ``` -## Specify Custom Directory +and then ```bash -npx @gyeonghokim/lfify ./path/to/your/project -``` - -## Script on your `package.json` - -```json -"scripts": { - "lfify": "@gyeonghokim/lfify" -} +npx lifify ``` -# Supported File Extensions - -- JavaScript: `.js`, `.jsx`, `.cjs`, `.mjs` -- TypeScript: `.ts`, `.tsx` -- Web: `.html`, `.css`, `.scss`, `.vue` -- Markup/Config: `.json`, `.xml`, `.yml`, `.yaml`, `.md` -- Other: `.txt`, `.env` - -# Features in Detail - -1. Gitignore Support -Automatically reads and respects your .gitignore patterns -Skips ignored files and directories +you can add options below. -2. Smart Processing -Only modifies files that actually need conversion -Preserves file encoding -Provides detailed logging of all operations +## Options -3. Safe Operation -Automatically excludes binary files -Built-in protection for critical directories -Error handling with detailed logging +| Option | Description | +|--------|-------------| +| `--config ` | Specify a custom path for the configuration file. Default is `.lfifyrc.json` in the current directory. | # Development