From 9eb41f63c17ce8e2f2884c08151bb0f6ad495296 Mon Sep 17 00:00:00 2001 From: Joaquin Santana Date: Fri, 26 Sep 2025 18:13:47 +0200 Subject: [PATCH] chore: build dist --- dist/index.js | 7943 ++++++++++++++++++++++++++++++++++++------------- 1 file changed, 5866 insertions(+), 2077 deletions(-) diff --git a/dist/index.js b/dist/index.js index 30cf727f..ef3e6e41 100644 --- a/dist/index.js +++ b/dist/index.js @@ -7789,231 +7789,230 @@ exports.stringify = __nccwpck_require__(66303) /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; - -// Copyright 2020 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getDiffString = exports.getChanges = exports.parseChanges = exports.getAllDiffs = exports.getGitFileData = exports.findRepoRoot = exports.resolvePath = void 0; -const child_process_1 = __nccwpck_require__(32081); -const types_1 = __nccwpck_require__(80721); -const logger_1 = __nccwpck_require__(44869); -const fs_1 = __nccwpck_require__(57147); -const path = __nccwpck_require__(71017); -class InstallationError extends Error { - constructor(message) { - super(message); - this.name = 'InstallationError'; - } -} -/** - * Get the absolute path of a relative path - * @param {string} dir the wildcard directory containing git change, not necessarily the root git directory - * @returns {string} the absolute path relative to the path that the user executed the bash command in - */ -function resolvePath(dir) { - const absoluteDir = path.resolve(process.cwd(), dir); - return absoluteDir; -} -exports.resolvePath = resolvePath; -/** - * Get the git root directory. - * Errors if the directory provided is not a git directory. - * @param {string} dir an absolute directory - * @returns {string} the absolute path of the git directory root - */ -function findRepoRoot(dir) { - try { - return (0, child_process_1.execSync)('git rev-parse --show-toplevel', { cwd: dir }) - .toString() - .trimRight(); // remove the trailing \n - } - catch (err) { - logger_1.logger.error(`The directory provided is not a git directory: ${dir}`); - throw err; - } -} -exports.findRepoRoot = findRepoRoot; -/** - * Returns the git diff old/new mode, status, and path. Given a git diff. - * Errors if there is a parsing error - * @param {string} gitDiffPattern A single file diff. Renames and copies are broken up into separate diffs. See https://git-scm.com/docs/git-diff#Documentation/git-diff.txt-git-diff-filesltpatterngt82308203 for more details - * @returns indexable git diff fields: old/new mode, status, and path - */ -function parseGitDiff(gitDiffPattern) { - try { - const fields = gitDiffPattern.split(' '); - const newMode = fields[1]; - const oldMode = fields[0].substring(1); - const statusAndPath = fields[4].split('\t'); - const status = statusAndPath[0]; - const relativePath = statusAndPath[1]; - return { oldMode, newMode, status, relativePath }; - } - catch (err) { - logger_1.logger.warn(`\`git diff --raw\` may have changed formats: \n ${gitDiffPattern}`); - throw err; - } -} -/** - * Get the GitHub mode, file content, and relative path asynchronously - * Rejects if there is a git diff error, or if the file contents could not be loaded. - * @param {string} gitRootDir the root of the local GitHub repository - * @param {string} gitDiffPattern A single file diff. Renames and copies are broken up into separate diffs. See https://git-scm.com/docs/git-diff#Documentation/git-diff.txt-git-diff-filesltpatterngt82308203 for more details - * @returns {Promise} the current mode, the relative path of the file in the Git Repository, and the file status. - */ -function getGitFileData(gitRootDir, gitDiffPattern) { - return new Promise((resolve, reject) => { - try { - const { oldMode, newMode, status, relativePath } = parseGitDiff(gitDiffPattern); - // if file is deleted, do not attempt to read it - if (status === 'D') { - resolve({ path: relativePath, fileData: new types_1.FileData(null, oldMode) }); - } - else { - // else read the file - (0, fs_1.readFile)(gitRootDir + '/' + relativePath, { - encoding: 'utf-8', - }, (err, content) => { - if (err) { - logger_1.logger.error(`Error loading file ${relativePath} in git directory ${gitRootDir}`); - reject(err); - } - resolve({ - path: relativePath, - fileData: new types_1.FileData(content, newMode), - }); - }); - } - } - catch (err) { - reject(err); - } - }); -} -exports.getGitFileData = getGitFileData; -/** - * Get all the diffs using `git diff` of a git directory. - * Errors if the git directory provided is not a git directory. - * @param {string} gitRootDir a git directory - * @returns {string[]} a list of git diffs - */ -function getAllDiffs(gitRootDir) { - (0, child_process_1.execSync)('git add -A', { cwd: gitRootDir }); - const diffs = (0, child_process_1.execSync)('git diff --raw --staged --no-renames', { - cwd: gitRootDir, - }) - .toString() // strictly return buffer for mocking purposes. sinon ts doesn't infer {encoding: 'utf-8'} - .trimRight() // remove the trailing new line - .split('\n') - .filter(line => !!line.trim()); - (0, child_process_1.execSync)('git reset .', { cwd: gitRootDir }); - return diffs; -} -exports.getAllDiffs = getAllDiffs; -/** - * Get the git changes of the current project asynchronously. - * Rejects if any of the files fails to load (if not deleted), - * or if there is a git diff parse error - * @param {string[]} diffs the git diff raw output (which only shows relative paths) - * @param {string} gitDir the root of the local GitHub repository - * @returns {Promise} the changeset - */ -async function parseChanges(diffs, gitDir) { - try { - // get updated file contents - const changes = new Map(); - const changePromises = []; - for (let i = 0; i < diffs.length; i++) { - // TODO - handle memory constraint - changePromises.push(getGitFileData(gitDir, diffs[i])); - } - const gitFileDatas = await Promise.all(changePromises); - for (let i = 0; i < gitFileDatas.length; i++) { - changes.set(gitFileDatas[i].path, gitFileDatas[i].fileData); - } - return changes; - } - catch (err) { - logger_1.logger.error('Error parsing git changes'); - throw err; - } -} -exports.parseChanges = parseChanges; -/** - * Throws an error if git is not installed - * @returns {void} void if git is installed - */ -function validateGitInstalled() { - try { - (0, child_process_1.execSync)('git --version'); - } - catch (err) { - logger_1.logger.error('git not installed'); - throw new InstallationError('git command is not recognized. Make sure git is installed.'); - } -} -/** - * Load the change set asynchronously. - * @param dir the directory containing git changes - * @returns {Promise} the change set - */ -function getChanges(dir) { - try { - validateGitInstalled(); - const absoluteDir = resolvePath(dir); - const gitRootDir = findRepoRoot(absoluteDir); - const diffs = getAllDiffs(gitRootDir); - return parseChanges(diffs, gitRootDir); - } - catch (err) { - if (!(err instanceof InstallationError)) { - logger_1.logger.error('Error loadng git changes.'); - } - throw err; - } -} -exports.getChanges = getChanges; -/** - * Get the git changes of the current project asynchronously. - * Rejects if any of the files fails to load (if not deleted), - * or if there is a git diff parse error - * @param {string[]} diffs the git diff raw output (which only shows relative paths) - * @param {string} gitDir the root of the local GitHub repository - * @returns {string} the diff - */ -function getDiffString(dir) { - try { - validateGitInstalled(); - const absoluteDir = resolvePath(dir); - const gitRootDir = findRepoRoot(absoluteDir); - (0, child_process_1.execSync)('git add -A', { cwd: gitRootDir }); - const diff = (0, child_process_1.execSync)('git diff --staged --no-renames', { - cwd: gitRootDir, - }) - .toString() // strictly return buffer for mocking purposes. sinon ts doesn't infer {encoding: 'utf-8'} - .trimRight(); // remove the trailing new line - (0, child_process_1.execSync)('git reset .', { cwd: gitRootDir }); - return diff; - } - catch (err) { - if (!(err instanceof InstallationError)) { - logger_1.logger.error('Error loadng git changes.'); - } - throw err; - } -} -exports.getDiffString = getDiffString; + +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.resolvePath = resolvePath; +exports.findRepoRoot = findRepoRoot; +exports.getGitFileData = getGitFileData; +exports.getAllDiffs = getAllDiffs; +exports.parseChanges = parseChanges; +exports.getChanges = getChanges; +exports.getDiffString = getDiffString; +const child_process_1 = __nccwpck_require__(32081); +const types_1 = __nccwpck_require__(80721); +const logger_1 = __nccwpck_require__(44869); +const fs_1 = __nccwpck_require__(57147); +const path = __nccwpck_require__(71017); +class InstallationError extends Error { + constructor(message) { + super(message); + this.name = 'InstallationError'; + } +} +/** + * Get the absolute path of a relative path + * @param {string} dir the wildcard directory containing git change, not necessarily the root git directory + * @returns {string} the absolute path relative to the path that the user executed the bash command in + */ +function resolvePath(dir) { + const absoluteDir = path.resolve(process.cwd(), dir); + return absoluteDir; +} +/** + * Get the git root directory. + * Errors if the directory provided is not a git directory. + * @param {string} dir an absolute directory + * @returns {string} the absolute path of the git directory root + */ +function findRepoRoot(dir) { + try { + return (0, child_process_1.execSync)('git rev-parse --show-toplevel', { cwd: dir }) + .toString() + .trimRight(); // remove the trailing \n + } + catch (err) { + logger_1.logger.error(`The directory provided is not a git directory: ${dir}`); + throw err; + } +} +/** + * Returns the git diff old/new mode, status, and path. Given a git diff. + * Errors if there is a parsing error + * @param {string} gitDiffPattern A single file diff. Renames and copies are broken up into separate diffs. See https://git-scm.com/docs/git-diff#Documentation/git-diff.txt-git-diff-filesltpatterngt82308203 for more details + * @returns indexable git diff fields: old/new mode, status, and path + */ +function parseGitDiff(gitDiffPattern) { + try { + const fields = gitDiffPattern.split(' '); + const newMode = fields[1]; + const oldMode = fields[0].substring(1); + const statusAndPath = fields[4].split('\t'); + const status = statusAndPath[0]; + const relativePath = statusAndPath[1]; + return { oldMode, newMode, status, relativePath }; + } + catch (err) { + logger_1.logger.warn(`\`git diff --raw\` may have changed formats: \n ${gitDiffPattern}`); + throw err; + } +} +/** + * Get the GitHub mode, file content, and relative path asynchronously + * Rejects if there is a git diff error, or if the file contents could not be loaded. + * @param {string} gitRootDir the root of the local GitHub repository + * @param {string} gitDiffPattern A single file diff. Renames and copies are broken up into separate diffs. See https://git-scm.com/docs/git-diff#Documentation/git-diff.txt-git-diff-filesltpatterngt82308203 for more details + * @returns {Promise} the current mode, the relative path of the file in the Git Repository, and the file status. + */ +function getGitFileData(gitRootDir, gitDiffPattern) { + return new Promise((resolve, reject) => { + try { + const { oldMode, newMode, status, relativePath } = parseGitDiff(gitDiffPattern); + // if file is deleted, do not attempt to read it + if (status === 'D') { + resolve({ path: relativePath, fileData: new types_1.FileData(null, oldMode) }); + } + else { + // else read the file + (0, fs_1.readFile)(gitRootDir + '/' + relativePath, { + encoding: 'utf-8', + }, (err, content) => { + if (err) { + logger_1.logger.error(`Error loading file ${relativePath} in git directory ${gitRootDir}`); + reject(err); + } + resolve({ + path: relativePath, + fileData: new types_1.FileData(content, newMode), + }); + }); + } + } + catch (err) { + reject(err); + } + }); +} +/** + * Get all the diffs using `git diff` of a git directory. + * Errors if the git directory provided is not a git directory. + * @param {string} gitRootDir a git directory + * @returns {string[]} a list of git diffs + */ +function getAllDiffs(gitRootDir) { + (0, child_process_1.execSync)('git add -A', { cwd: gitRootDir }); + const diffs = (0, child_process_1.execSync)('git diff --raw --staged --no-renames', { + cwd: gitRootDir, + }) + .toString() // strictly return buffer for mocking purposes. sinon ts doesn't infer {encoding: 'utf-8'} + .trimRight() // remove the trailing new line + .split('\n') + .filter(line => !!line.trim()); + (0, child_process_1.execSync)('git reset .', { cwd: gitRootDir }); + return diffs; +} +/** + * Get the git changes of the current project asynchronously. + * Rejects if any of the files fails to load (if not deleted), + * or if there is a git diff parse error + * @param {string[]} diffs the git diff raw output (which only shows relative paths) + * @param {string} gitDir the root of the local GitHub repository + * @returns {Promise} the changeset + */ +async function parseChanges(diffs, gitDir) { + try { + // get updated file contents + const changes = new Map(); + const changePromises = []; + for (let i = 0; i < diffs.length; i++) { + // TODO - handle memory constraint + changePromises.push(getGitFileData(gitDir, diffs[i])); + } + const gitFileDatas = await Promise.all(changePromises); + for (let i = 0; i < gitFileDatas.length; i++) { + changes.set(gitFileDatas[i].path, gitFileDatas[i].fileData); + } + return changes; + } + catch (err) { + logger_1.logger.error('Error parsing git changes'); + throw err; + } +} +/** + * Throws an error if git is not installed + * @returns {void} void if git is installed + */ +function validateGitInstalled() { + try { + (0, child_process_1.execSync)('git --version'); + } + catch (err) { + logger_1.logger.error('git not installed'); + throw new InstallationError('git command is not recognized. Make sure git is installed.'); + } +} +/** + * Load the change set asynchronously. + * @param dir the directory containing git changes + * @returns {Promise} the change set + */ +function getChanges(dir) { + try { + validateGitInstalled(); + const absoluteDir = resolvePath(dir); + const gitRootDir = findRepoRoot(absoluteDir); + const diffs = getAllDiffs(gitRootDir); + return parseChanges(diffs, gitRootDir); + } + catch (err) { + if (!(err instanceof InstallationError)) { + logger_1.logger.error('Error loadng git changes.'); + } + throw err; + } +} +/** + * Get the git changes of the current project asynchronously. + * Rejects if any of the files fails to load (if not deleted), + * or if there is a git diff parse error + * @param {string[]} diffs the git diff raw output (which only shows relative paths) + * @param {string} gitDir the root of the local GitHub repository + * @returns {string} the diff + */ +function getDiffString(dir) { + try { + validateGitInstalled(); + const absoluteDir = resolvePath(dir); + const gitRootDir = findRepoRoot(absoluteDir); + (0, child_process_1.execSync)('git add -A', { cwd: gitRootDir }); + const diff = (0, child_process_1.execSync)('git diff --staged --no-renames', { + cwd: gitRootDir, + }) + .toString() // strictly return buffer for mocking purposes. sinon ts doesn't infer {encoding: 'utf-8'} + .trimRight(); // remove the trailing new line + (0, child_process_1.execSync)('git reset .', { cwd: gitRootDir }); + return diff; + } + catch (err) { + if (!(err instanceof InstallationError)) { + logger_1.logger.error('Error loadng git changes.'); + } + throw err; + } +} //# sourceMappingURL=handle-git-dir-change.js.map /***/ }), @@ -8022,69 +8021,68 @@ exports.getDiffString = getDiffString; /***/ ((__unused_webpack_module, exports) => { "use strict"; - -// Copyright 2020 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.addReviewCommentsDefaults = exports.addPullRequestDefaults = void 0; -const DEFAULT_BRANCH_NAME = 'code-suggestions'; -const DEFAULT_PRIMARY_BRANCH = 'main'; -const DEFAULT_PAGE_SIZE = 100; -/** - * Add defaults to GitHub Pull Request options. - * Preserves the empty string. - * For ESCMAScript, null/undefined values are preserved for required fields. - * Recommended with an object validation function to check empty strings and incorrect types. - * @param {PullRequestUserOptions} options the user-provided github pull request options - * @returns {CreatePullRequest} git hub context with defaults applied - */ -function addPullRequestDefaults(options) { - const pullRequestSettings = { - upstreamOwner: options.upstreamOwner, - upstreamRepo: options.upstreamRepo, - description: options.description, - title: options.title, - message: options.message, - force: options.force || false, - branch: typeof options.branch === 'string' ? options.branch : DEFAULT_BRANCH_NAME, - primary: typeof options.primary === 'string' - ? options.primary - : DEFAULT_PRIMARY_BRANCH, - maintainersCanModify: options.maintainersCanModify === false ? false : true, - filesPerCommit: options.filesPerCommit, - }; - return pullRequestSettings; -} -exports.addPullRequestDefaults = addPullRequestDefaults; -/** - * Format user input for pull request review comments - * @param options The user's options input for review comments - * @returns the formatted version of user input for pull request review comments - */ -function addReviewCommentsDefaults(options) { - const createReviewComment = { - repo: options.repo, - owner: options.owner, - pullNumber: options.pullNumber, - // if zero set as 0 - pageSize: options.pageSize === null || options.pageSize === undefined - ? DEFAULT_PAGE_SIZE - : options.pageSize, - }; - return createReviewComment; -} -exports.addReviewCommentsDefaults = addReviewCommentsDefaults; + +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.addPullRequestDefaults = addPullRequestDefaults; +exports.addReviewCommentsDefaults = addReviewCommentsDefaults; +const DEFAULT_BRANCH_NAME = 'code-suggestions'; +const DEFAULT_PRIMARY_BRANCH = 'main'; +const DEFAULT_PAGE_SIZE = 100; +/** + * Add defaults to GitHub Pull Request options. + * Preserves the empty string. + * For ESCMAScript, null/undefined values are preserved for required fields. + * Recommended with an object validation function to check empty strings and incorrect types. + * @param {PullRequestUserOptions} options the user-provided github pull request options + * @returns {CreatePullRequest} git hub context with defaults applied + */ +function addPullRequestDefaults(options) { + const pullRequestSettings = { + upstreamOwner: options.upstreamOwner, + upstreamRepo: options.upstreamRepo, + description: options.description, + title: options.title, + message: options.message, + force: options.force || false, + branch: typeof options.branch === 'string' ? options.branch : DEFAULT_BRANCH_NAME, + primary: typeof options.primary === 'string' + ? options.primary + : DEFAULT_PRIMARY_BRANCH, + maintainersCanModify: options.maintainersCanModify === false ? false : true, + filesPerCommit: options.filesPerCommit, + }; + return pullRequestSettings; +} +/** + * Format user input for pull request review comments + * @param options The user's options input for review comments + * @returns the formatted version of user input for pull request review comments + */ +function addReviewCommentsDefaults(options) { + const createReviewComment = { + repo: options.repo, + owner: options.owner, + pullNumber: options.pullNumber, + // if zero set as 0 + pageSize: options.pageSize === null || options.pageSize === undefined + ? DEFAULT_PAGE_SIZE + : options.pageSize, + }; + return createReviewComment; +} //# sourceMappingURL=default-options-handler.js.map /***/ }), @@ -8093,29 +8091,29 @@ exports.addReviewCommentsDefaults = addReviewCommentsDefaults; /***/ ((__unused_webpack_module, exports) => { "use strict"; - -// Copyright 2023 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.CommitError = void 0; -class CommitError extends Error { - constructor(message, cause) { - super(message); - this.cause = cause; - } -} -exports.CommitError = CommitError; + +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.CommitError = void 0; +class CommitError extends Error { + constructor(message, cause) { + super(message); + this.cause = cause; + } +} +exports.CommitError = CommitError; //# sourceMappingURL=errors.js.map /***/ }), @@ -8124,130 +8122,129 @@ exports.CommitError = CommitError; /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; - -/* - * // Copyright 2020 Google LLC - * // - * // Licensed under the Apache License, Version 2.0 (the "License"); - * // you may not use this file except in compliance with the License. - * // You may obtain a copy of the License at - * // - * // https://www.apache.org/licenses/LICENSE-2.0 - * // - * // Unless required by applicable law or agreed to in writing, software - * // distributed under the License is distributed on an "AS IS" BASIS, - * // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * // See the License for the specific language governing permissions and - * // limitations under the License. - * // - * //Modifications made by Joaquin Santana on 18/11/24, 22:09 - */ -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.branch = exports.createBranch = exports.existsBranchWithName = exports.getBranchHead = exports.createRef = void 0; -const logger_1 = __nccwpck_require__(44869); -const REF_PREFIX = 'refs/heads/'; -const DEFAULT_PRIMARY_BRANCH = 'main'; -/** - * Create a new branch reference with the ref prefix - * @param {string} branchName name of the branch - */ -function createRef(branchName) { - return REF_PREFIX + branchName; -} -exports.createRef = createRef; -/** - * get branch commit HEAD SHA of a repository - * Throws an error if the branch cannot be found - * @param {Octokit} octokit The authenticated octokit instance - * @param {RepoDomain} origin The domain information of the remote origin repository - * @param {string} branch the name of the branch - * @returns {Promise} branch commit HEAD SHA - */ -async function getBranchHead(octokit, origin, branch) { - const branchData = (await octokit.repos.getBranch({ - owner: origin.owner, - repo: origin.repo, - branch, - })).data; - // @ts-ignore gitea adaption - logger_1.logger.info(`Successfully found branch HEAD sha "${branchData.commit.id}".`); - // @ts-ignore gitea adaption - return branchData.commit.id; -} -exports.getBranchHead = getBranchHead; -/** - * Determine if there is a branch with the provided name in the remote GitHub repository - * @param {Octokit} octokit The authenticated octokit instance - * @param {RepoDomain} remote The domain information of the remote repository - * @param {string} name The branch name to create on the repository - * @returns {Promise} if there is a branch already existing in the remote GitHub repository - */ -async function existsBranchWithName(octokit, remote, name) { - try { - const data = (await octokit.request('GET /repos/{owner}/{repo}/branches/{branch}', { - owner: remote.owner, - repo: remote.repo, - branch: name, - })).data; - // @ts-ignore - return !!data.commit.id; - } - catch (err) { - if (err.status === 404) - return false; - else - throw err; - } -} -exports.existsBranchWithName = existsBranchWithName; -/** - * Create a branch on the remote repository if there is not an existing branch - * @param {Octokit} octokit The authenticated octokit instance - * @param {RepoDomain} remote The domain information of the remote origin repository - * @param {string} name The branch name to create on the origin repository - * @param {string} baseSha the sha that the base of the reference points to - * @param {boolean} duplicate whether there is an existing branch or not - * @returns {Promise} - */ -async function createBranch(octokit, remote, name, baseSha, duplicate) { - if (!duplicate) { - const refData = (await octokit.request('POST /repos/{owner}/{repo}/branches', { - owner: remote.owner, - repo: remote.repo, - new_branch_name: name, - old_ref_name: baseSha, - })).data; - logger_1.logger.info(`Successfully created branch at ${refData.commit.url}`); - } - else { - logger_1.logger.info('Skipping branch creation step...'); - } -} -exports.createBranch = createBranch; -/** - * Create a GitHub branch given a remote origin. - * Throws an exception if octokit fails, or if the base branch is invalid - * @param {Octokit} octokit The authenticated octokit instance - * @param {RepoDomain} origin The domain information of the remote origin repository - * @param {RepoDomain} upstream The domain information of the remote upstream repository - * @param {string} name The branch name to create on the origin repository - * @param {string} baseBranch the name of the branch to base the new branch off of. Default is main - * @returns {Promise} the base SHA for subsequent commits to be based off for the origin branch - */ -async function branch(octokit, origin, upstream, name, baseBranch = DEFAULT_PRIMARY_BRANCH) { - // create branch from primary branch HEAD SHA - try { - const baseSha = await getBranchHead(octokit, upstream, baseBranch); - const duplicate = await existsBranchWithName(octokit, origin, name); - await createBranch(octokit, origin, name, baseSha, duplicate); - return baseSha; - } - catch (err) { - logger_1.logger.error('Error when creating branch'); - throw err; - } -} -exports.branch = branch; + +/* + * // Copyright 2020 Google LLC + * // + * // Licensed under the Apache License, Version 2.0 (the "License"); + * // you may not use this file except in compliance with the License. + * // You may obtain a copy of the License at + * // + * // https://www.apache.org/licenses/LICENSE-2.0 + * // + * // Unless required by applicable law or agreed to in writing, software + * // distributed under the License is distributed on an "AS IS" BASIS, + * // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * // See the License for the specific language governing permissions and + * // limitations under the License. + * // + * //Modifications made by Joaquin Santana on 18/11/24, 22:09 + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.createRef = createRef; +exports.getBranchHead = getBranchHead; +exports.existsBranchWithName = existsBranchWithName; +exports.createBranch = createBranch; +exports.branch = branch; +const logger_1 = __nccwpck_require__(44869); +const REF_PREFIX = 'refs/heads/'; +const DEFAULT_PRIMARY_BRANCH = 'main'; +/** + * Create a new branch reference with the ref prefix + * @param {string} branchName name of the branch + */ +function createRef(branchName) { + return REF_PREFIX + branchName; +} +/** + * get branch commit HEAD SHA of a repository + * Throws an error if the branch cannot be found + * @param {Octokit} octokit The authenticated octokit instance + * @param {RepoDomain} origin The domain information of the remote origin repository + * @param {string} branch the name of the branch + * @returns {Promise} branch commit HEAD SHA + */ +async function getBranchHead(octokit, origin, branch) { + const branchData = (await octokit.repos.getBranch({ + owner: origin.owner, + repo: origin.repo, + branch, + })).data; + // @ts-ignore gitea adaption + logger_1.logger.info(`Successfully found branch HEAD sha "${branchData.commit.id}".`); + // @ts-ignore gitea adaption + return branchData.commit.id; +} +/** + * Determine if there is a branch with the provided name in the remote GitHub repository + * @param {Octokit} octokit The authenticated octokit instance + * @param {RepoDomain} remote The domain information of the remote repository + * @param {string} name The branch name to create on the repository + * @returns {Promise} if there is a branch already existing in the remote GitHub repository + */ +async function existsBranchWithName(octokit, remote, name) { + try { + const data = (await octokit.request('GET /repos/{owner}/{repo}/branches/{branch}', { + owner: remote.owner, + repo: remote.repo, + branch: name, + })).data; + // @ts-ignore + return !!data.commit.id; + } + catch (err) { + if (err.status === 404) + return false; + else + throw err; + } +} +/** + * Create a branch on the remote repository if there is not an existing branch + * @param {Octokit} octokit The authenticated octokit instance + * @param {RepoDomain} remote The domain information of the remote origin repository + * @param {string} name The branch name to create on the origin repository + * @param {string} baseSha the sha that the base of the reference points to + * @param {boolean} duplicate whether there is an existing branch or not + * @returns {Promise} + */ +async function createBranch(octokit, remote, name, baseSha, duplicate) { + if (!duplicate) { + const refData = (await octokit.request('POST /repos/{owner}/{repo}/branches', { + owner: remote.owner, + repo: remote.repo, + new_branch_name: name, + old_ref_name: baseSha, + })).data; + logger_1.logger.info(`Successfully created branch at ${refData.commit.url}`); + } + else { + logger_1.logger.info('Skipping branch creation step...'); + } +} +/** + * Create a GitHub branch given a remote origin. + * Throws an exception if octokit fails, or if the base branch is invalid + * @param {Octokit} octokit The authenticated octokit instance + * @param {RepoDomain} origin The domain information of the remote origin repository + * @param {RepoDomain} upstream The domain information of the remote upstream repository + * @param {string} name The branch name to create on the origin repository + * @param {string} baseBranch the name of the branch to base the new branch off of. Default is main + * @returns {Promise} the base SHA for subsequent commits to be based off for the origin branch + */ +async function branch(octokit, origin, upstream, name, baseBranch = DEFAULT_PRIMARY_BRANCH) { + // create branch from primary branch HEAD SHA + try { + const baseSha = await getBranchHead(octokit, upstream, baseBranch); + const duplicate = await existsBranchWithName(octokit, origin, name); + await createBranch(octokit, origin, name, baseSha, duplicate); + return baseSha; + } + catch (err) { + logger_1.logger.error('Error when creating branch'); + throw err; + } +} //# sourceMappingURL=branch.js.map /***/ }), @@ -8256,171 +8253,227 @@ exports.branch = branch; /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; - -/* - * // Copyright 2020 Google LLC - * // - * // Licensed under the Apache License, Version 2.0 (the "License"); - * // you may not use this file except in compliance with the License. - * // You may obtain a copy of the License at - * // - * // https://www.apache.org/licenses/LICENSE-2.0 - * // - * // Unless required by applicable law or agreed to in writing, software - * // distributed under the License is distributed on an "AS IS" BASIS, - * // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * // See the License for the specific language governing permissions and - * // limitations under the License. - * // - * //Modifications made by Joaquin Santana on 18/11/24, 22:09 - */ -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.commitAndPush = exports.updateRef = exports.createTree = exports.generateTreeObjects = void 0; -const logger_1 = __nccwpck_require__(44869); -const create_commit_1 = __nccwpck_require__(18746); -const errors_1 = __nccwpck_require__(98535); -const git = __nccwpck_require__(85114); -const DEFAULT_FILES_PER_COMMIT = 100; -/** - * Generate and return a GitHub tree object structure - * containing the target change data - * See https://developer.github.com/v3/git/trees/#tree-object - * @param {Changes} changes the set of repository changes - * @returns {TreeObject[]} The new GitHub changes - */ -function generateTreeObjects(changes) { - const tree = []; - changes.forEach((fileData, path) => { - if (fileData.content === null) { - // if no file content then file is deleted - tree.push({ - path, - mode: fileData.mode, - type: 'blob', - sha: null, - }); - } - else { - // update file with its content - tree.push({ - path, - mode: fileData.mode, - type: 'blob', - content: fileData.content, - }); - } - }); - return tree; -} -exports.generateTreeObjects = generateTreeObjects; -function* inGroupsOf(all, groupSize) { - for (let i = 0; i < all.length; i += groupSize) { - yield all.slice(i, i + groupSize); - } -} -/** - * Upload and create a remote GitHub tree - * and resolves with the new tree SHA. - * Rejects if GitHub V3 API fails with the GitHub error response - * @param {Octokit} octokit The authenticated octokit instance - * @param {RepoDomain} origin the the remote repository to push changes to - * @param {string} refHead the base of the new commit(s) - * @param {TreeObject[]} tree the set of GitHub changes to upload - * @param gitConfig - * @returns {Promise} the GitHub tree SHA - * @throws {CommitError} - */ -async function createTree(octokit, origin, refHead, tree, gitConfig) { - const oldTreeSha = (await octokit.git.getCommit({ - owner: origin.owner, - repo: origin.repo, - commit_sha: refHead, - })) // @ts-ignore - .data.commit.tree.sha; - logger_1.logger.info('Got the latest commit tree'); - try { - const oldTree = await git.readTree({ ...gitConfig, oid: oldTreeSha }); - const transformTree = await Promise.all(tree.map(async (value) => { - if (value.content) { - value.sha = await git.writeBlob({ - ...gitConfig, - blob: Buffer.from(value.content), - }); - } - const treeEntry = { - mode: value.mode, - path: value.path, - oid: value.sha || '', - type: value.type, - }; - return treeEntry; - })); - //add all the old tree entries to the new tree if the path is not already in the new tree - oldTree.tree.forEach(value => { - if (!transformTree.find(treeEntry => treeEntry.path === value.path)) { - transformTree.push(value); - } - }); - const treeSha = await git.writeTree({ ...gitConfig, tree: transformTree }); - logger_1.logger.info(`Successfully created a tree with the desired changes with SHA ${treeSha}`); - return treeSha; - } - catch (e) { - throw new errors_1.CommitError(`Error adding to tree: ${refHead}`, e); - } -} -exports.createTree = createTree; -/** - * Update a reference to a SHA - * Rejects if GitHub V3 API fails with the GitHub error response - * @param {BranchDomain} origin the the remote branch to push changes to - * @param {string} newSha the ref to update the commit HEAD to - * @param {boolean} force to force the commit changes given refHead - * @param gitConfig - * @returns {Promise} - */ -async function updateRef(origin, newSha, force, gitConfig) { - logger_1.logger.info(`Updating reference heads/${origin.branch} to ${newSha}`); - try { - await git.writeRef({ - ...gitConfig, - ref: `refs/heads/${origin.branch}`, - value: newSha, - force, - }); - logger_1.logger.info(`Successfully updated reference ${origin.branch} to ${newSha}`); - } - catch (e) { - throw new errors_1.CommitError(`Error updating ref heads/${origin.branch} to ${newSha}`, e); - } -} -exports.updateRef = updateRef; -/** - * Given a set of changes, apply the commit(s) on top of the given branch's head and upload it to GitHub - * Rejects if GitHub V3 API fails with the GitHub error response - * @param {Octokit} octokit The authenticated octokit instance - * @param {string} refHead the base of the new commit(s) - * @param {Changes} changes the set of repository changes - * @param originBranch - * @param {string} commitMessage the message of the new commit - * @param {boolean} force to force the commit changes given refHead - * @param options - * @returns {Promise} - * @throws {CommitError} - */ -async function commitAndPush(octokit, refHead, changes, originBranch, commitMessage, force, options) { - var _a; - const filesPerCommit = (_a = options === null || options === void 0 ? void 0 : options.filesPerCommit) !== null && _a !== void 0 ? _a : DEFAULT_FILES_PER_COMMIT; - const tree = generateTreeObjects(changes); - for (const treeGroup of inGroupsOf(tree, filesPerCommit)) { - const treeSha = await createTree(octokit, originBranch, refHead, treeGroup, options === null || options === void 0 ? void 0 : options.gitConfig); - refHead = await (0, create_commit_1.createCommit)(refHead, treeSha, commitMessage, options); - } - await updateRef(originBranch, refHead, force, options === null || options === void 0 ? void 0 : options.gitConfig); - await git.push({ ...options === null || options === void 0 ? void 0 : options.gitConfig, force: force }); - logger_1.logger.info('Pushed to remote repository successfully'); -} -exports.commitAndPush = commitAndPush; + +/* + * // Copyright 2020 Google LLC + * // + * // Licensed under the Apache License, Version 2.0 (the "License"); + * // you may not use this file except in compliance with the License. + * // You may obtain a copy of the License at + * // + * // https://www.apache.org/licenses/LICENSE-2.0 + * // + * // Unless required by applicable law or agreed to in writing, software + * // distributed under the License is distributed on an "AS IS" BASIS, + * // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * // See the License for the specific language governing permissions and + * // limitations under the License. + * // + * //Modifications made by Joaquin Santana on 26/09/25, 17:42 + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.generateTreeObjects = generateTreeObjects; +exports.createTree = createTree; +exports.updateRef = updateRef; +exports.commitAndPush = commitAndPush; +const logger_1 = __nccwpck_require__(44869); +const create_commit_1 = __nccwpck_require__(18746); +const errors_1 = __nccwpck_require__(98535); +const git = __nccwpck_require__(85114); +const DEFAULT_FILES_PER_COMMIT = 100; +function mode2type$1(mode) { + // prettier-ignore + switch (mode) { + case '040000': return 'tree'; + case '100644': return 'blob'; + case '100755': return 'blob'; + case '120000': return 'blob'; + case '160000': return 'commit'; + } + throw new Error(`Unexpected GitTree entry mode: ${mode}`); +} +/** + * Generate and return a GitHub tree object structure + * containing the target change data + * See https://developer.github.com/v3/git/trees/#tree-object + * @param {Changes} changes the set of repository changes + * @returns {TreeObject[]} The new GitHub changes + */ +function generateTreeObjects(changes) { + const tree = []; + changes.forEach((fileData, path) => { + if (fileData.content === null) { + // if no file content then file is deleted + tree.push({ + path, + mode: fileData.mode, + type: mode2type$1(fileData.mode), + sha: null, + }); + } + else { + // update file with its content + tree.push({ + path, + mode: fileData.mode, + type: mode2type$1(fileData.mode), + content: fileData.content, + }); + } + }); + return tree; +} +function* inGroupsOf(all, groupSize) { + for (let i = 0; i < all.length; i += groupSize) { + yield all.slice(i, i + groupSize); + } +} +/** + * Upload and create a remote GitHub tree + * and resolves with the new tree SHA. + * Rejects if GitHub V3 API fails with the GitHub error response + * @param {Octokit} octokit The authenticated octokit instance + * @param {RepoDomain} origin the the remote repository to push changes to + * @param {string} refHead the base of the new commit(s) + * @param {TreeObject[]} tree the set of GitHub changes to upload + * @param gitConfig + * @returns {Promise} the GitHub tree SHA + * @throws {CommitError} + */ +async function createTree(octokit, origin, refHead, tree, gitConfig) { + try { + const commit = await octokit.git.getCommit({ + owner: origin.owner, + repo: origin.repo, + commit_sha: refHead, + }); + // @ts-ignore + const oldTreeSha = commit.data.commit.tree.sha; + logger_1.logger.info('Got the latest commit tree'); + let currentTree = (await git.readTree({ ...gitConfig, oid: oldTreeSha })) + .tree; + for (const fileData of tree) { + const pathParts = fileData.path.split('/'); + currentTree = await updateTreeRecursively(gitConfig, currentTree, pathParts, fileData); + } + const treeSha = await git.writeTree({ ...gitConfig, tree: currentTree }); + logger_1.logger.info(`Successfully created a tree with the desired changes with SHA ${treeSha}`); + return treeSha; + } + catch (e) { + throw new errors_1.CommitError(`Error adding to tree: ${refHead}`, e); + } +} +async function updateTreeRecursively(gitConfig, existingTree, pathParts, fileData) { + const newTree = [...existingTree]; + const part = pathParts[0]; + const remainingParts = pathParts.slice(1); + const existingIndex = newTree.findIndex(entry => entry.path === part); + if (remainingParts.length === 0) { + // Siamo al file/blob + const blobOid = fileData.content === null + ? null // Eliminazione + : await git.writeBlob({ + ...gitConfig, + blob: Buffer.from(fileData.content), + }); + if (blobOid === null) { + // Remove if existing + if (existingIndex !== -1) { + newTree.splice(existingIndex, 1); + } + } + else { + const newEntry = { + mode: fileData.mode, + path: part, + oid: blobOid, + type: 'blob', + }; + if (existingIndex !== -1) { + newTree[existingIndex] = newEntry; + } + else { + newTree.push(newEntry); + } + } + } + else { + // we are in a directory + let subTree = []; + if (existingIndex !== -1 && newTree[existingIndex].type === 'tree') { + const subTreeOid = newTree[existingIndex].oid; + subTree = (await git.readTree({ ...gitConfig, oid: subTreeOid })).tree; + } + const updatedSubTree = await updateTreeRecursively(gitConfig, subTree, remainingParts, fileData); + const newSubTreeOid = await git.writeTree({ + ...gitConfig, + tree: updatedSubTree, + }); + const newEntry = { + mode: '040000', // directory mode + path: part, + oid: newSubTreeOid, + type: 'tree', + }; + if (existingIndex !== -1) { + newTree[existingIndex] = newEntry; + } + else { + newTree.push(newEntry); + } + } + return newTree; +} +/** + * Update a reference to a SHA + * Rejects if GitHub V3 API fails with the GitHub error response + * @param {BranchDomain} origin the the remote branch to push changes to + * @param {string} newSha the ref to update the commit HEAD to + * @param {boolean} force to force the commit changes given refHead + * @param gitConfig + * @returns {Promise} + */ +async function updateRef(origin, newSha, force, gitConfig) { + logger_1.logger.info(`Updating reference heads/${origin.branch} to ${newSha}`); + try { + await git.writeRef({ + ...gitConfig, + ref: `refs/heads/${origin.branch}`, + value: newSha, + force, + }); + logger_1.logger.info(`Successfully updated reference ${origin.branch} to ${newSha}`); + } + catch (e) { + throw new errors_1.CommitError(`Error updating ref heads/${origin.branch} to ${newSha}`, e); + } +} +/** + * Given a set of changes, apply the commit(s) on top of the given branch's head and upload it to GitHub + * Rejects if GitHub V3 API fails with the GitHub error response + * @param {Octokit} octokit The authenticated octokit instance + * @param {string} refHead the base of the new commit(s) + * @param {Changes} changes the set of repository changes + * @param originBranch + * @param {string} commitMessage the message of the new commit + * @param {boolean} force to force the commit changes given refHead + * @param options + * @returns {Promise} + * @throws {CommitError} + */ +async function commitAndPush(octokit, refHead, changes, originBranch, commitMessage, force, options) { + var _a; + const filesPerCommit = (_a = options === null || options === void 0 ? void 0 : options.filesPerCommit) !== null && _a !== void 0 ? _a : DEFAULT_FILES_PER_COMMIT; + const tree = generateTreeObjects(changes); + for (const treeGroup of inGroupsOf(tree, filesPerCommit)) { + const treeSha = await createTree(octokit, originBranch, refHead, treeGroup, options === null || options === void 0 ? void 0 : options.gitConfig); + refHead = await (0, create_commit_1.createCommit)(refHead, treeSha, commitMessage, options); + } + await updateRef(originBranch, refHead, force, options === null || options === void 0 ? void 0 : options.gitConfig); + await git.push({ ...options === null || options === void 0 ? void 0 : options.gitConfig, force: force }); + logger_1.logger.info('Pushed to remote repository successfully'); +} //# sourceMappingURL=commit-and-push.js.map /***/ }), @@ -8429,74 +8482,73 @@ exports.commitAndPush = commitAndPush; /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; - -/* - * // Copyright 2020 Google LLC - * // - * // Licensed under the Apache License, Version 2.0 (the "License"); - * // you may not use this file except in compliance with the License. - * // You may obtain a copy of the License at - * // - * // https://www.apache.org/licenses/LICENSE-2.0 - * // - * // Unless required by applicable law or agreed to in writing, software - * // distributed under the License is distributed on an "AS IS" BASIS, - * // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * // See the License for the specific language governing permissions and - * // limitations under the License. - * // - * //Modifications made by Joaquin Santana on 18/11/24, 22:09 - */ -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.createCommit = void 0; -const logger_1 = __nccwpck_require__(44869); -const errors_1 = __nccwpck_require__(98535); -const git = __nccwpck_require__(85114); -/** - * Create a commit with a repo snapshot SHA on top of the reference HEAD - * and resolves with the SHA of the commit. - * Rejects if GitHub V3 API fails with the GitHub error response - * @param {Octokit} octokit The authenticated octokit instance - * @param {RepoDomain} origin the the remote repository to push changes to - * @param {string} refHead the base of the new commit(s) - * @param {string} treeSha the tree SHA that this commit will point to - * @param {string} message the message of the new commit - * @param options - * @returns {Promise} the new commit SHA - * @see https://docs.github.com/en/rest/git/commits?apiVersion=2022-11-28#create-a-commit - */ -async function createCommit(refHead, treeSha, message, options = {}) { - try { - const signature = options.signer - ? await options.signer.generateSignature({ - message, - tree: treeSha, - parents: [refHead], - author: options.author, - committer: options.committer, - }) - : undefined; - await git.fetch({ - ...options.gitConfig, - }); - logger_1.logger.info('fetched the latest changes from the remote repository'); - const sha = await git.commit({ - ...options.gitConfig, - message, - tree: treeSha, - parent: [refHead], - signingKey: signature, - author: options.author, - committer: options.committer, - }); - logger_1.logger.info(`Successfully created commit. See commit at ${sha}`); - return sha; - } - catch (e) { - throw new errors_1.CommitError(`Error creating commit for: ${treeSha}`, e); - } -} -exports.createCommit = createCommit; + +/* + * // Copyright 2020 Google LLC + * // + * // Licensed under the Apache License, Version 2.0 (the "License"); + * // you may not use this file except in compliance with the License. + * // You may obtain a copy of the License at + * // + * // https://www.apache.org/licenses/LICENSE-2.0 + * // + * // Unless required by applicable law or agreed to in writing, software + * // distributed under the License is distributed on an "AS IS" BASIS, + * // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * // See the License for the specific language governing permissions and + * // limitations under the License. + * // + * //Modifications made by Joaquin Santana on 18/11/24, 22:09 + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.createCommit = createCommit; +const logger_1 = __nccwpck_require__(44869); +const errors_1 = __nccwpck_require__(98535); +const git = __nccwpck_require__(85114); +/** + * Create a commit with a repo snapshot SHA on top of the reference HEAD + * and resolves with the SHA of the commit. + * Rejects if GitHub V3 API fails with the GitHub error response + * @param {Octokit} octokit The authenticated octokit instance + * @param {RepoDomain} origin the the remote repository to push changes to + * @param {string} refHead the base of the new commit(s) + * @param {string} treeSha the tree SHA that this commit will point to + * @param {string} message the message of the new commit + * @param options + * @returns {Promise} the new commit SHA + * @see https://docs.github.com/en/rest/git/commits?apiVersion=2022-11-28#create-a-commit + */ +async function createCommit(refHead, treeSha, message, options = {}) { + try { + const signature = options.signer + ? await options.signer.generateSignature({ + message, + tree: treeSha, + parents: [refHead], + author: options.author, + committer: options.committer, + }) + : undefined; + await git.fetch({ + ...options.gitConfig, + }); + logger_1.logger.info('fetched the latest changes from the remote repository'); + const sha = await git.commit({ + ...options.gitConfig, + message, + tree: treeSha, + parent: [refHead], + signingKey: signature, + author: options.author, + committer: options.committer, + }); + logger_1.logger.info(`Successfully created commit. See commit at ${sha}`); + return sha; + } + catch (e) { + throw new errors_1.CommitError(`Error creating commit for: ${treeSha}`, e); + } +} //# sourceMappingURL=create-commit.js.map /***/ }), @@ -8505,53 +8557,52 @@ exports.createCommit = createCommit; /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; - -// Copyright 2020 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.fork = void 0; -const logger_1 = __nccwpck_require__(44869); -/** - * Fork the GitHub owner's repository. - * Returns the fork owner and fork repo when the fork creation request to GitHub succeeds. - * Otherwise throws error. - * - * If fork already exists no new fork is created, no error occurs, and the existing Fork data is returned - * with the `updated_at` + any historical repo changes. - * @param {Octokit} octokit The authenticated octokit instance - * @param {RepoDomain} upstream upstream repository information - * @returns {Promise} the forked repository name, as well as the owner of that fork - */ -async function fork(octokit, upstream) { - try { - const forkedRepo = (await octokit.repos.createFork({ - owner: upstream.owner, - repo: upstream.repo, - })).data; - const origin = { - repo: forkedRepo.name, - owner: forkedRepo.owner.login, - }; - logger_1.logger.info(`Create fork request was successful for ${origin.owner}/${origin.repo}`); - return origin; - } - catch (err) { - logger_1.logger.error('Error when forking'); - throw err; - } -} -exports.fork = fork; + +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.fork = fork; +const logger_1 = __nccwpck_require__(44869); +/** + * Fork the GitHub owner's repository. + * Returns the fork owner and fork repo when the fork creation request to GitHub succeeds. + * Otherwise throws error. + * + * If fork already exists no new fork is created, no error occurs, and the existing Fork data is returned + * with the `updated_at` + any historical repo changes. + * @param {Octokit} octokit The authenticated octokit instance + * @param {RepoDomain} upstream upstream repository information + * @returns {Promise} the forked repository name, as well as the owner of that fork + */ +async function fork(octokit, upstream) { + try { + const forkedRepo = (await octokit.repos.createFork({ + owner: upstream.owner, + repo: upstream.repo, + })).data; + const origin = { + repo: forkedRepo.name, + owner: forkedRepo.owner.login, + }; + logger_1.logger.info(`Create fork request was successful for ${origin.owner}/${origin.repo}`); + return origin; + } + catch (err) { + logger_1.logger.error('Error when forking'); + throw err; + } +} //# sourceMappingURL=fork.js.map /***/ }), @@ -8560,47 +8611,46 @@ exports.fork = fork; /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; - -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.addLabels = void 0; -const logger_1 = __nccwpck_require__(44869); -/** - * Create a GitHub PR on the upstream organization's repo - * Throws an error if the GitHub API fails - * @param {Octokit} octokit The authenticated octokit instance - * @param {RepoDomain} upstream The upstream repository - * @param {BranchDomain} origin The remote origin information that contains the origin branch - * @param {number} issue_number The issue number to add labels to. Can also be a PR number - * @param {string[]} labels The list of labels to apply to the issue/pull request. Default is []. the funciton will no-op. - * @returns {Promise} The list of resulting labels after the addition of the given labels - */ -async function addLabels(octokit, upstream, origin, issue_number, labels) { - if (!labels || labels.length === 0) { - return []; - } - const labelsResponseData = (await octokit.issues.addLabels({ - owner: upstream.owner, - repo: origin.repo, - issue_number: issue_number, - labels: labels, - })).data; - logger_1.logger.info(`Successfully added labels ${labels} to issue: ${issue_number}`); - return labelsResponseData.map(l => l.name); -} -exports.addLabels = addLabels; + +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.addLabels = addLabels; +const logger_1 = __nccwpck_require__(44869); +/** + * Create a GitHub PR on the upstream organization's repo + * Throws an error if the GitHub API fails + * @param {Octokit} octokit The authenticated octokit instance + * @param {RepoDomain} upstream The upstream repository + * @param {BranchDomain} origin The remote origin information that contains the origin branch + * @param {number} issue_number The issue number to add labels to. Can also be a PR number + * @param {string[]} labels The list of labels to apply to the issue/pull request. Default is []. the funciton will no-op. + * @returns {Promise} The list of resulting labels after the addition of the given labels + */ +async function addLabels(octokit, upstream, origin, issue_number, labels) { + if (!labels || labels.length === 0) { + return []; + } + const labelsResponseData = (await octokit.issues.addLabels({ + owner: upstream.owner, + repo: origin.repo, + issue_number: issue_number, + labels: labels, + })).data; + logger_1.logger.info(`Successfully added labels ${labels} to issue: ${issue_number}`); + return labelsResponseData.map(l => l.name); +} //# sourceMappingURL=labels.js.map /***/ }), @@ -8609,66 +8659,65 @@ exports.addLabels = addLabels; /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; - -/* - * // Copyright 2020 Google LLC - * // - * // Licensed under the Apache License, Version 2.0 (the "License"); - * // you may not use this file except in compliance with the License. - * // You may obtain a copy of the License at - * // - * // https://www.apache.org/licenses/LICENSE-2.0 - * // - * // Unless required by applicable law or agreed to in writing, software - * // distributed under the License is distributed on an "AS IS" BASIS, - * // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * // See the License for the specific language governing permissions and - * // limitations under the License. - * // - * //Modifications made by Joaquin Santana on 18/11/24, 22:09 - */ -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.openPullRequest = void 0; -const logger_1 = __nccwpck_require__(44869); -const DEFAULT_PRIMARY = 'main'; -/** - * Create a GitHub PR on the upstream organization's repo - * Throws an error if the GitHub API fails - * @param {Octokit} octokit The authenticated octokit instance - * @param {RepoDomain} upstream The upstream repository - * @param {BranchDomain} origin The remote origin information that contains the origin branch - * @param {Description} description The pull request title and detailed description - * @param {boolean} maintainersCanModify Whether or not maintainers can modify the pull request. Default is true - * @param {string} upstreamPrimary The upstream repository's primary branch. Default is main. - * @param draft Open a DRAFT pull request. Defaults to false. - * @returns {Promise} - */ -async function openPullRequest(octokit, upstream, origin, description, maintainersCanModify = true, upstreamPrimary = DEFAULT_PRIMARY, draft = false) { - const head = `${origin.owner}:${origin.branch}`; - const existingPullRequest = (await octokit.pulls.list({ - owner: upstream.owner, - repo: origin.repo, - head, - state: 'open', - })).data.find(pr => `${pr.head.repo.owner.login}:${pr.head.label}` === head); - if (existingPullRequest) { - logger_1.logger.info(`Found existing pull request for reference ${origin.owner}:${origin.branch}. Skipping creating a new pull request.`); - return existingPullRequest.number; - } - const pullResponseData = (await octokit.pulls.create({ - owner: upstream.owner, - repo: origin.repo, - title: description.title, - head: `${origin.owner}:${origin.branch}`, - base: upstreamPrimary, - body: description.body, - maintainer_can_modify: maintainersCanModify, - draft: draft, - })).data; - logger_1.logger.info(`Successfully opened pull request available at url: ${pullResponseData.url}.`); - return pullResponseData.number; -} -exports.openPullRequest = openPullRequest; + +/* + * // Copyright 2020 Google LLC + * // + * // Licensed under the Apache License, Version 2.0 (the "License"); + * // you may not use this file except in compliance with the License. + * // You may obtain a copy of the License at + * // + * // https://www.apache.org/licenses/LICENSE-2.0 + * // + * // Unless required by applicable law or agreed to in writing, software + * // distributed under the License is distributed on an "AS IS" BASIS, + * // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * // See the License for the specific language governing permissions and + * // limitations under the License. + * // + * //Modifications made by Joaquin Santana on 18/11/24, 22:09 + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.openPullRequest = openPullRequest; +const logger_1 = __nccwpck_require__(44869); +const DEFAULT_PRIMARY = 'main'; +/** + * Create a GitHub PR on the upstream organization's repo + * Throws an error if the GitHub API fails + * @param {Octokit} octokit The authenticated octokit instance + * @param {RepoDomain} upstream The upstream repository + * @param {BranchDomain} origin The remote origin information that contains the origin branch + * @param {Description} description The pull request title and detailed description + * @param {boolean} maintainersCanModify Whether or not maintainers can modify the pull request. Default is true + * @param {string} upstreamPrimary The upstream repository's primary branch. Default is main. + * @param draft Open a DRAFT pull request. Defaults to false. + * @returns {Promise} + */ +async function openPullRequest(octokit, upstream, origin, description, maintainersCanModify = true, upstreamPrimary = DEFAULT_PRIMARY, draft = false) { + const head = `${origin.owner}:${origin.branch}`; + const existingPullRequest = (await octokit.pulls.list({ + owner: upstream.owner, + repo: origin.repo, + head, + state: 'open', + })).data.find(pr => `${pr.head.repo.owner.login}:${pr.head.label}` === head); + if (existingPullRequest) { + logger_1.logger.info(`Found existing pull request for reference ${origin.owner}:${origin.branch}. Skipping creating a new pull request.`); + return existingPullRequest.number; + } + const pullResponseData = (await octokit.pulls.create({ + owner: upstream.owner, + repo: origin.repo, + title: description.title, + head: `${origin.owner}:${origin.branch}`, + base: upstreamPrimary, + body: description.body, + maintainer_can_modify: maintainersCanModify, + draft: draft, + })).data; + logger_1.logger.info(`Successfully opened pull request available at url: ${pullResponseData.url}.`); + return pullResponseData.number; +} //# sourceMappingURL=open-pull-request.js.map /***/ }), @@ -8677,231 +8726,230 @@ exports.openPullRequest = openPullRequest; /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; - -// Copyright 2020 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getPullRequestHunks = exports.getCurrentPullRequestPatches = exports.createPullRequestReview = exports.makeInlineSuggestions = exports.buildReviewComments = exports.buildSummaryComment = void 0; -const logger_1 = __nccwpck_require__(44869); -const diff_utils_1 = __nccwpck_require__(85556); -const hunk_utils_1 = __nccwpck_require__(61689); -function hunkErrorMessage(hunk) { - return ` * lines ${hunk.oldStart}-${hunk.oldEnd}`; -} -function fileErrorMessage(filename, hunks) { - return `* ${filename}\n` + hunks.map(hunkErrorMessage).join('\n'); -} -/** - * Build an error message based on invalid hunks. - * Returns an empty string if the provided hunks are empty. - * @param invalidHunks a map of filename to hunks that are not suggestable - */ -function buildSummaryComment(invalidHunks) { - if (invalidHunks.size === 0) { - return ''; - } - return ('Some suggestions could not be made:\n' + - Array.from(invalidHunks, ([filename, hunks]) => fileErrorMessage(filename, hunks)).join('\n')); -} -exports.buildSummaryComment = buildSummaryComment; -const COMFORT_PREVIEW_HEADER = 'application/vnd.github.comfort-fade-preview+json'; -/** - * Convert the patch suggestions into GitHub parameter objects. - * Use this to generate review comments - * For information see: - * https://developer.github.com/v3/pulls/comments/#create-a-review-comment-for-a-pull-request - * @param suggestions - */ -function buildReviewComments(suggestions) { - const fileComments = []; - suggestions.forEach((hunks, fileName) => { - hunks.forEach(hunk => { - const newContent = hunk.newContent.join('\n'); - if (hunk.oldStart === hunk.oldEnd) { - const singleComment = { - path: fileName, - body: `\`\`\`suggestion\n${newContent}\n\`\`\``, - line: hunk.oldEnd, - side: 'RIGHT', - }; - fileComments.push(singleComment); - } - else { - const comment = { - path: fileName, - body: `\`\`\`suggestion\n${newContent}\n\`\`\``, - start_line: hunk.oldStart, - line: hunk.oldEnd, - side: 'RIGHT', - start_side: 'RIGHT', - }; - fileComments.push(comment); - } - }); - }); - return fileComments; -} -exports.buildReviewComments = buildReviewComments; -/** - * Make a request to GitHub to make review comments - * @param octokit an authenticated octokit instance - * @param suggestions code suggestions patches - * @param remote the repository domain - * @param pullNumber the pull request number to make a review on - */ -async function makeInlineSuggestions(octokit, suggestions, outOfScopeSuggestions, remote, pullNumber) { - const comments = buildReviewComments(suggestions); - if (!comments.length) { - logger_1.logger.info('No valid suggestions to make'); - } - if (!comments.length && !outOfScopeSuggestions.size) { - logger_1.logger.info('No suggestions were generated. Exiting...'); - return null; - } - const summaryComment = buildSummaryComment(outOfScopeSuggestions); - if (summaryComment) { - logger_1.logger.warn('Some suggestions could not be made'); - } - // apply the suggestions to the latest sha - // the latest Pull Request hunk range includes - // all previous commit valid hunk ranges - const headSha = (await octokit.pulls.get({ - owner: remote.owner, - repo: remote.repo, - pull_number: pullNumber, - })).data.head.sha; - const reviewNumber = (await octokit.pulls.createReview({ - owner: remote.owner, - repo: remote.repo, - pull_number: pullNumber, - commit_id: headSha, - event: 'COMMENT', - body: summaryComment, - headers: { accept: COMFORT_PREVIEW_HEADER }, - // Octokit type definitions doesn't support mulitiline comments, but the GitHub API does - comments: comments, - })).data.id; - logger_1.logger.info(`Successfully created a review on pull request: ${pullNumber}.`); - return reviewNumber; -} -exports.makeInlineSuggestions = makeInlineSuggestions; -/** - * Comment on a Pull Request - * @param {Octokit} octokit authenticated octokit isntance - * @param {RepoDomain} remote the Pull Request repository - * @param {number} pullNumber the Pull Request number - * @param {number} pageSize the number of files to comment on // TODO pagination - * @param {Map} diffContents the old and new contents of the files to suggest - * @returns the created review's id, or null if no review was made - */ -async function createPullRequestReview(octokit, remote, pullNumber, pageSize, diffContents) { - try { - // get the hunks from the pull request - const pullRequestHunks = await exports.getPullRequestHunks(octokit, remote, pullNumber, pageSize); - // get the hunks from the suggested change - const allSuggestedHunks = typeof diffContents === 'string' - ? (0, diff_utils_1.parseAllHunks)(diffContents) - : (0, hunk_utils_1.getRawSuggestionHunks)(diffContents); - // split hunks by commentable and uncommentable - const { validHunks, invalidHunks } = (0, hunk_utils_1.partitionSuggestedHunksByScope)(pullRequestHunks, allSuggestedHunks); - // create pull request review - const reviewNumber = await exports.makeInlineSuggestions(octokit, validHunks, invalidHunks, remote, pullNumber); - return reviewNumber; - } - catch (err) { - logger_1.logger.error('Failed to suggest'); - throw err; - } -} -exports.createPullRequestReview = createPullRequestReview; -/** - * For a pull request, get each remote file's patch text asynchronously - * Also get the list of files whose patch data could not be returned - * @param {Octokit} octokit the authenticated octokit instance - * @param {RepoDomain} remote the remote repository domain information - * @param {number} pullNumber the pull request number - * @param {number} pageSize the number of results to return per page - * @returns {Promise>} the stringified patch data for each file and the list of files whose patch data could not be resolved - */ -async function getCurrentPullRequestPatches(octokit, remote, pullNumber, pageSize) { - // TODO: support pagination - const filesMissingPatch = []; - const files = (await octokit.pulls.listFiles({ - owner: remote.owner, - repo: remote.repo, - pull_number: pullNumber, - per_page: pageSize, - })).data; - const patches = new Map(); - if (files.length === 0) { - logger_1.logger.error(`0 file results have returned from list files query for Pull Request #${pullNumber}. Cannot make suggestions on an empty Pull Request`); - throw Error('Empty Pull Request'); - } - files.forEach(file => { - if (file.patch === undefined) { - // files whose patch is too large do not return the patch text by default - // TODO handle file patches that are too large - logger_1.logger.warn(`File ${file.filename} may have a patch that is too large to display patch object.`); - filesMissingPatch.push(file.filename); - } - else { - patches.set(file.filename, file.patch); - } - }); - if (patches.size === 0) { - logger_1.logger.warn('0 patches have been returned. This could be because the patch results were too large to return.'); - } - return { patches, filesMissingPatch }; -} -exports.getCurrentPullRequestPatches = getCurrentPullRequestPatches; -/** - * For a pull request, get each remote file's current patch range to identify the scope of each patch as a Map. - * @param {Octokit} octokit the authenticated octokit instance - * @param {RepoDomain} remote the remote repository domain information - * @param {number} pullNumber the pull request number - * @param {number} pageSize the number of files to return per pull request list files query - * @returns {Promise>} the scope of each file in the pull request - */ -async function getPullRequestHunks(octokit, remote, pullNumber, pageSize) { - const files = (await octokit.pulls.listFiles({ - owner: remote.owner, - repo: remote.repo, - pull_number: pullNumber, - per_page: pageSize, - })).data; - const pullRequestHunks = new Map(); - if (files.length === 0) { - logger_1.logger.error(`0 file results have returned from list files query for Pull Request #${pullNumber}. Cannot make suggestions on an empty Pull Request`); - throw Error('Empty Pull Request'); - } - files.forEach(file => { - if (file.patch === undefined) { - // files whose patch is too large do not return the patch text by default - // TODO handle file patches that are too large - logger_1.logger.warn(`File ${file.filename} may have a patch that is too large to display patch object.`); - } - else { - const hunks = (0, diff_utils_1.parsePatch)(file.patch); - pullRequestHunks.set(file.filename, hunks); - } - }); - if (pullRequestHunks.size === 0) { - logger_1.logger.warn('0 patches have been returned. This could be because the patch results were too large to return.'); - } - return pullRequestHunks; -} -exports.getPullRequestHunks = getPullRequestHunks; + +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.buildSummaryComment = buildSummaryComment; +exports.buildReviewComments = buildReviewComments; +exports.makeInlineSuggestions = makeInlineSuggestions; +exports.createPullRequestReview = createPullRequestReview; +exports.getCurrentPullRequestPatches = getCurrentPullRequestPatches; +exports.getPullRequestHunks = getPullRequestHunks; +const logger_1 = __nccwpck_require__(44869); +const diff_utils_1 = __nccwpck_require__(85556); +const hunk_utils_1 = __nccwpck_require__(61689); +function hunkErrorMessage(hunk) { + return ` * lines ${hunk.oldStart}-${hunk.oldEnd}`; +} +function fileErrorMessage(filename, hunks) { + return `* ${filename}\n` + hunks.map(hunkErrorMessage).join('\n'); +} +/** + * Build an error message based on invalid hunks. + * Returns an empty string if the provided hunks are empty. + * @param invalidHunks a map of filename to hunks that are not suggestable + */ +function buildSummaryComment(invalidHunks) { + if (invalidHunks.size === 0) { + return ''; + } + return ('Some suggestions could not be made:\n' + + Array.from(invalidHunks, ([filename, hunks]) => fileErrorMessage(filename, hunks)).join('\n')); +} +const COMFORT_PREVIEW_HEADER = 'application/vnd.github.comfort-fade-preview+json'; +/** + * Convert the patch suggestions into GitHub parameter objects. + * Use this to generate review comments + * For information see: + * https://developer.github.com/v3/pulls/comments/#create-a-review-comment-for-a-pull-request + * @param suggestions + */ +function buildReviewComments(suggestions) { + const fileComments = []; + suggestions.forEach((hunks, fileName) => { + hunks.forEach(hunk => { + const newContent = hunk.newContent.join('\n'); + if (hunk.oldStart === hunk.oldEnd) { + const singleComment = { + path: fileName, + body: `\`\`\`suggestion\n${newContent}\n\`\`\``, + line: hunk.oldEnd, + side: 'RIGHT', + }; + fileComments.push(singleComment); + } + else { + const comment = { + path: fileName, + body: `\`\`\`suggestion\n${newContent}\n\`\`\``, + start_line: hunk.oldStart, + line: hunk.oldEnd, + side: 'RIGHT', + start_side: 'RIGHT', + }; + fileComments.push(comment); + } + }); + }); + return fileComments; +} +/** + * Make a request to GitHub to make review comments + * @param octokit an authenticated octokit instance + * @param suggestions code suggestions patches + * @param remote the repository domain + * @param pullNumber the pull request number to make a review on + */ +async function makeInlineSuggestions(octokit, suggestions, outOfScopeSuggestions, remote, pullNumber) { + const comments = buildReviewComments(suggestions); + if (!comments.length) { + logger_1.logger.info('No valid suggestions to make'); + } + if (!comments.length && !outOfScopeSuggestions.size) { + logger_1.logger.info('No suggestions were generated. Exiting...'); + return null; + } + const summaryComment = buildSummaryComment(outOfScopeSuggestions); + if (summaryComment) { + logger_1.logger.warn('Some suggestions could not be made'); + } + // apply the suggestions to the latest sha + // the latest Pull Request hunk range includes + // all previous commit valid hunk ranges + const headSha = (await octokit.pulls.get({ + owner: remote.owner, + repo: remote.repo, + pull_number: pullNumber, + })).data.head.sha; + const reviewNumber = (await octokit.pulls.createReview({ + owner: remote.owner, + repo: remote.repo, + pull_number: pullNumber, + commit_id: headSha, + event: 'COMMENT', + body: summaryComment, + headers: { accept: COMFORT_PREVIEW_HEADER }, + // Octokit type definitions doesn't support mulitiline comments, but the GitHub API does + comments: comments, + })).data.id; + logger_1.logger.info(`Successfully created a review on pull request: ${pullNumber}.`); + return reviewNumber; +} +/** + * Comment on a Pull Request + * @param {Octokit} octokit authenticated octokit isntance + * @param {RepoDomain} remote the Pull Request repository + * @param {number} pullNumber the Pull Request number + * @param {number} pageSize the number of files to comment on // TODO pagination + * @param {Map} diffContents the old and new contents of the files to suggest + * @returns the created review's id, or null if no review was made + */ +async function createPullRequestReview(octokit, remote, pullNumber, pageSize, diffContents) { + try { + // get the hunks from the pull request + const pullRequestHunks = await exports.getPullRequestHunks(octokit, remote, pullNumber, pageSize); + // get the hunks from the suggested change + const allSuggestedHunks = typeof diffContents === 'string' + ? (0, diff_utils_1.parseAllHunks)(diffContents) + : (0, hunk_utils_1.getRawSuggestionHunks)(diffContents); + // split hunks by commentable and uncommentable + const { validHunks, invalidHunks } = (0, hunk_utils_1.partitionSuggestedHunksByScope)(pullRequestHunks, allSuggestedHunks); + // create pull request review + const reviewNumber = await exports.makeInlineSuggestions(octokit, validHunks, invalidHunks, remote, pullNumber); + return reviewNumber; + } + catch (err) { + logger_1.logger.error('Failed to suggest'); + throw err; + } +} +/** + * For a pull request, get each remote file's patch text asynchronously + * Also get the list of files whose patch data could not be returned + * @param {Octokit} octokit the authenticated octokit instance + * @param {RepoDomain} remote the remote repository domain information + * @param {number} pullNumber the pull request number + * @param {number} pageSize the number of results to return per page + * @returns {Promise>} the stringified patch data for each file and the list of files whose patch data could not be resolved + */ +async function getCurrentPullRequestPatches(octokit, remote, pullNumber, pageSize) { + // TODO: support pagination + const filesMissingPatch = []; + const files = (await octokit.pulls.listFiles({ + owner: remote.owner, + repo: remote.repo, + pull_number: pullNumber, + per_page: pageSize, + })).data; + const patches = new Map(); + if (files.length === 0) { + logger_1.logger.error(`0 file results have returned from list files query for Pull Request #${pullNumber}. Cannot make suggestions on an empty Pull Request`); + throw Error('Empty Pull Request'); + } + files.forEach(file => { + if (file.patch === undefined) { + // files whose patch is too large do not return the patch text by default + // TODO handle file patches that are too large + logger_1.logger.warn(`File ${file.filename} may have a patch that is too large to display patch object.`); + filesMissingPatch.push(file.filename); + } + else { + patches.set(file.filename, file.patch); + } + }); + if (patches.size === 0) { + logger_1.logger.warn('0 patches have been returned. This could be because the patch results were too large to return.'); + } + return { patches, filesMissingPatch }; +} +/** + * For a pull request, get each remote file's current patch range to identify the scope of each patch as a Map. + * @param {Octokit} octokit the authenticated octokit instance + * @param {RepoDomain} remote the remote repository domain information + * @param {number} pullNumber the pull request number + * @param {number} pageSize the number of files to return per pull request list files query + * @returns {Promise>} the scope of each file in the pull request + */ +async function getPullRequestHunks(octokit, remote, pullNumber, pageSize) { + const files = (await octokit.pulls.listFiles({ + owner: remote.owner, + repo: remote.repo, + pull_number: pullNumber, + per_page: pageSize, + })).data; + const pullRequestHunks = new Map(); + if (files.length === 0) { + logger_1.logger.error(`0 file results have returned from list files query for Pull Request #${pullNumber}. Cannot make suggestions on an empty Pull Request`); + throw Error('Empty Pull Request'); + } + files.forEach(file => { + if (file.patch === undefined) { + // files whose patch is too large do not return the patch text by default + // TODO handle file patches that are too large + logger_1.logger.warn(`File ${file.filename} may have a patch that is too large to display patch object.`); + } + else { + const hunks = (0, diff_utils_1.parsePatch)(file.patch); + pullRequestHunks.set(file.filename, hunks); + } + }); + if (pullRequestHunks.size === 0) { + logger_1.logger.warn('0 patches have been returned. This could be because the patch results were too large to return.'); + } + return pullRequestHunks; +} //# sourceMappingURL=review-pull-request.js.map /***/ }), @@ -8910,218 +8958,218 @@ exports.getPullRequestHunks = getPullRequestHunks; /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; - -/* - * // Copyright 2020 Google LLC - * // - * // Licensed under the Apache License, Version 2.0 (the "License"); - * // you may not use this file except in compliance with the License. - * // You may obtain a copy of the License at - * // - * // https://www.apache.org/licenses/LICENSE-2.0 - * // - * // Unless required by applicable law or agreed to in writing, software - * // distributed under the License is distributed on an "AS IS" BASIS, - * // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * // See the License for the specific language governing permissions and - * // limitations under the License. - * // - * //Modifications made by Joaquin Santana on 19/11/24, 14:40 - */ -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.parseTextFiles = exports.createPullRequest = exports.reviewPullRequest = exports.CommitError = exports.getDiffString = exports.getChanges = void 0; -const types_1 = __nccwpck_require__(80721); -const logger_1 = __nccwpck_require__(44869); -const default_options_handler_1 = __nccwpck_require__(13265); -const retry = __nccwpck_require__(33415); -const web_1 = __nccwpck_require__(26672); -const git = __nccwpck_require__(85114); -const fs = __nccwpck_require__(60843); -const review_pull_request_1 = __nccwpck_require__(62476); -const branch_1 = __nccwpck_require__(42602); -const fork_1 = __nccwpck_require__(34248); -const commit_and_push_1 = __nccwpck_require__(12796); -const open_pull_request_1 = __nccwpck_require__(24703); -const labels_1 = __nccwpck_require__(26871); -const os = __nccwpck_require__(70612); -const path = __nccwpck_require__(49411); -var handle_git_dir_change_1 = __nccwpck_require__(74482); -Object.defineProperty(exports, "getChanges", ({ enumerable: true, get: function () { return handle_git_dir_change_1.getChanges; } })); -Object.defineProperty(exports, "getDiffString", ({ enumerable: true, get: function () { return handle_git_dir_change_1.getDiffString; } })); -var errors_1 = __nccwpck_require__(98535); -Object.defineProperty(exports, "CommitError", ({ enumerable: true, get: function () { return errors_1.CommitError; } })); -/** - * Given a set of suggestions, make all the multiline inline review comments on a given pull request given - * that they are in scope of the pull request. Outof scope suggestions are not made. - * - * In-scope suggestions are specifically: the suggestion for a file must correspond to a file in the remote pull request - * and the diff hunk computed for a file's contents must produce a range that is a subset of the pull request's files hunks. - * - * If a file is too large to load in the review, it is skipped in the suggestion phase. - * - * If changes are empty then the workflow will not run. - * Rethrows an HttpError if Octokit GitHub API returns an error. HttpError Octokit access_token and client_secret headers redact all sensitive information. - * @param octokit The authenticated octokit instance, instantiated with an access token having permissiong to create a fork on the target repository. - * @param diffContents A set of changes. The changes may be empty. - * @param options The configuration for interacting with GitHub provided by the user. - * @returns the created review's id number, or null if there are no changes to be made. - */ -async function reviewPullRequest(octokit, diffContents, options) { - (0, logger_1.setupLogger)(options.logger); - // if null undefined, or the empty map then no changes have been provided. - // Do not execute GitHub workflow - if (diffContents === null || - diffContents === undefined || - (typeof diffContents !== 'string' && diffContents.size === 0)) { - logger_1.logger.info('Empty changes provided. No suggestions to be made. Cancelling workflow.'); - return null; - } - const gitHubConfigs = (0, default_options_handler_1.addReviewCommentsDefaults)(options); - const remote = { - owner: gitHubConfigs.owner, - repo: gitHubConfigs.repo, - }; - const reviewNumber = await (0, review_pull_request_1.createPullRequestReview)(octokit, remote, gitHubConfigs.pullNumber, gitHubConfigs.pageSize, diffContents); - return reviewNumber; -} -exports.reviewPullRequest = reviewPullRequest; -/** - * Make a new GitHub Pull Request with a set of changes applied on top of primary branch HEAD. - * The changes are committed into a new branch based on the upstream repository options using the authenticated Octokit account. - * Then a Pull Request is made from that branch. - * - * Also throws error if git data from the fork is not ready in 5 minutes. - * - * From the docs - * https://developer.github.com/v3/repos/forks/#create-a-fork - * """ - * Forking a Repository happens asynchronously. - * You may have to wait a short period of time before you can access the git objects. - * If this takes longer than 5 minutes, be sure to contact GitHub Support or GitHub Premium Support. - * """ - * - * If changes are empty then the workflow will not run. - * Rethrows an HttpError if Octokit GitHub API returns an error. HttpError Octokit access_token and client_secret headers redact all sensitive information. - * @param {Octokit} octokit The authenticated octokit instance, instantiated with an access token having permissiong to create a fork on the target repository - * @param {Changes | null | undefined} changes A set of changes. The changes may be empty - * @param {CreatePullRequestUserOptions} options The configuration for interacting with GitHub provided by the user. - * @returns {Promise} the pull request number. Returns 0 if unsuccessful. - * @throws {CommitError} on failure during commit process - */ -async function createPullRequest(octokit, changes, options) { - (0, logger_1.setupLogger)(options.logger); - // if null undefined, or the empty map then no changes have been provided. - // Do not execute GitHub workflow - if (changes === null || changes === undefined || changes.size === 0) { - logger_1.logger.info('Empty change set provided. No changes need to be made. Cancelling workflow.'); - return 0; - } - const gitHubConfigs = (0, default_options_handler_1.addPullRequestDefaults)(options); - logger_1.logger.info('Starting GitHub PR workflow...'); - const upstream = { - owner: gitHubConfigs.upstreamOwner, - repo: gitHubConfigs.upstreamRepo, - }; - const origin = options.fork === false ? upstream : await (0, fork_1.fork)(octokit, upstream); - if (options.fork) { - // try to sync the fork - await retry(async () => await octokit.repos.mergeUpstream({ - owner: origin.owner, - repo: origin.repo, - branch: gitHubConfigs.primary, - }), { - retries: options.retry, - factor: 2.8411, - minTimeout: 3000, - randomize: false, - onRetry: (e, attempt) => { - e.message = `Error creating syncing upstream: ${e.message}`; - logger_1.logger.error(e); - logger_1.logger.info(`Retry attempt #${attempt}...`); - }, - }); - } - const originBranch = { - ...origin, - branch: gitHubConfigs.branch, - }; - // The `retry` flag defaults to `5` to maintain compatibility - options.retry = options.retry === undefined ? 5 : options.retry; - const refHeadSha = await retry(async () => await (0, branch_1.branch)(octokit, origin, upstream, originBranch.branch, gitHubConfigs.primary), { - retries: options.retry, - factor: 2.8411, - minTimeout: 3000, - randomize: false, - onRetry: (e, attempt) => { - e.message = `Error creating Pull Request: ${e.message}`; - logger_1.logger.error(e); - logger_1.logger.info(`Retry attempt #${attempt}...`); - }, - }); - const tempDirectory = await fs.mkdtemp(path.join(os.tmpdir(), 'git-')); - logger_1.logger.info(`Cloning repository to ${tempDirectory}`); - const gitConfig = { - fs, - http: web_1.default, - dir: tempDirectory, - onAuth: () => ({ - username: options.username, - password: options.password, - }), - }; - const octokitBaseUrl = octokit.request.endpoint.DEFAULTS.baseUrl; - const baseUrl = octokitBaseUrl.substring(0, octokitBaseUrl.indexOf('/api')); - const url = `${baseUrl}/${origin.owner}/${origin.repo}.git`; - await git.clone({ - ...gitConfig, - url: url, - ref: originBranch.branch, - }); - logger_1.logger.info(`repository cloned to branch ${originBranch.branch}`); - options = options !== null && options !== void 0 ? options : {}; - options.gitConfig = gitConfig; - await (0, commit_and_push_1.commitAndPush)(octokit, refHeadSha, changes, originBranch, gitHubConfigs.message, gitHubConfigs.force, options); - const description = { - body: gitHubConfigs.description, - title: gitHubConfigs.title, - }; - const prNumber = await (0, open_pull_request_1.openPullRequest)(octokit, upstream, originBranch, description, gitHubConfigs.maintainersCanModify, gitHubConfigs.primary, options.draft); - logger_1.logger.info(`Successfully opened pull request: ${prNumber}.`); - // addLabels will no-op if options.labels is undefined or empty. - await (0, labels_1.addLabels)(octokit, upstream, originBranch, prNumber, options.labels); - fs.rm(tempDirectory, { recursive: true, force: true }).catch(e => { }); - return prNumber; -} -exports.createPullRequest = createPullRequest; -/** - * Convert a Map or {[path: string]: string}, where the key is the relative file path in the repository, - * and the value is the text content. The files will be converted to a Map also containing the file mode information '100644' - * @param {Object | Map} textFiles a map/object where the key is the relative file path and the value is the text file content - * @returns {Changes} Map of the file path to the string file content and the file mode '100644' - */ -function parseTextFiles(textFiles) { - const changes = new Map(); - if (textFiles instanceof Map) { - textFiles.forEach((content, path) => { - if (typeof path !== 'string' || - (content !== null && typeof content !== 'string')) { - throw TypeError('The file changeset provided must have a string key and a string/null value'); - } - changes.set(path, new types_1.FileData(content)); - }); - } - else { - for (const [path, content] of Object.entries(textFiles)) { - if (typeof path !== 'string' || - (content !== null && typeof content !== 'string')) { - throw TypeError('The file changeset provided must have a string key and a string/null value'); - } - changes.set(path, new types_1.FileData(content)); - } - } - return changes; -} -exports.parseTextFiles = parseTextFiles; + +/* + * // Copyright 2020 Google LLC + * // + * // Licensed under the Apache License, Version 2.0 (the "License"); + * // you may not use this file except in compliance with the License. + * // You may obtain a copy of the License at + * // + * // https://www.apache.org/licenses/LICENSE-2.0 + * // + * // Unless required by applicable law or agreed to in writing, software + * // distributed under the License is distributed on an "AS IS" BASIS, + * // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * // See the License for the specific language governing permissions and + * // limitations under the License. + * // + * //Modifications made by Joaquin Santana on 19/11/24, 14:40 + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.CommitError = exports.getDiffString = exports.getChanges = void 0; +exports.reviewPullRequest = reviewPullRequest; +exports.createPullRequest = createPullRequest; +exports.parseTextFiles = parseTextFiles; +const types_1 = __nccwpck_require__(80721); +const logger_1 = __nccwpck_require__(44869); +const default_options_handler_1 = __nccwpck_require__(13265); +const retry = __nccwpck_require__(33415); +const web_1 = __nccwpck_require__(26672); +const git = __nccwpck_require__(85114); +const fs = __nccwpck_require__(60843); +const review_pull_request_1 = __nccwpck_require__(62476); +const branch_1 = __nccwpck_require__(42602); +const fork_1 = __nccwpck_require__(34248); +const commit_and_push_1 = __nccwpck_require__(12796); +const open_pull_request_1 = __nccwpck_require__(24703); +const labels_1 = __nccwpck_require__(26871); +const os = __nccwpck_require__(70612); +const path = __nccwpck_require__(49411); +var handle_git_dir_change_1 = __nccwpck_require__(74482); +Object.defineProperty(exports, "getChanges", ({ enumerable: true, get: function () { return handle_git_dir_change_1.getChanges; } })); +Object.defineProperty(exports, "getDiffString", ({ enumerable: true, get: function () { return handle_git_dir_change_1.getDiffString; } })); +var errors_1 = __nccwpck_require__(98535); +Object.defineProperty(exports, "CommitError", ({ enumerable: true, get: function () { return errors_1.CommitError; } })); +/** + * Given a set of suggestions, make all the multiline inline review comments on a given pull request given + * that they are in scope of the pull request. Outof scope suggestions are not made. + * + * In-scope suggestions are specifically: the suggestion for a file must correspond to a file in the remote pull request + * and the diff hunk computed for a file's contents must produce a range that is a subset of the pull request's files hunks. + * + * If a file is too large to load in the review, it is skipped in the suggestion phase. + * + * If changes are empty then the workflow will not run. + * Rethrows an HttpError if Octokit GitHub API returns an error. HttpError Octokit access_token and client_secret headers redact all sensitive information. + * @param octokit The authenticated octokit instance, instantiated with an access token having permissiong to create a fork on the target repository. + * @param diffContents A set of changes. The changes may be empty. + * @param options The configuration for interacting with GitHub provided by the user. + * @returns the created review's id number, or null if there are no changes to be made. + */ +async function reviewPullRequest(octokit, diffContents, options) { + (0, logger_1.setupLogger)(options.logger); + // if null undefined, or the empty map then no changes have been provided. + // Do not execute GitHub workflow + if (diffContents === null || + diffContents === undefined || + (typeof diffContents !== 'string' && diffContents.size === 0)) { + logger_1.logger.info('Empty changes provided. No suggestions to be made. Cancelling workflow.'); + return null; + } + const gitHubConfigs = (0, default_options_handler_1.addReviewCommentsDefaults)(options); + const remote = { + owner: gitHubConfigs.owner, + repo: gitHubConfigs.repo, + }; + const reviewNumber = await (0, review_pull_request_1.createPullRequestReview)(octokit, remote, gitHubConfigs.pullNumber, gitHubConfigs.pageSize, diffContents); + return reviewNumber; +} +/** + * Make a new GitHub Pull Request with a set of changes applied on top of primary branch HEAD. + * The changes are committed into a new branch based on the upstream repository options using the authenticated Octokit account. + * Then a Pull Request is made from that branch. + * + * Also throws error if git data from the fork is not ready in 5 minutes. + * + * From the docs + * https://developer.github.com/v3/repos/forks/#create-a-fork + * """ + * Forking a Repository happens asynchronously. + * You may have to wait a short period of time before you can access the git objects. + * If this takes longer than 5 minutes, be sure to contact GitHub Support or GitHub Premium Support. + * """ + * + * If changes are empty then the workflow will not run. + * Rethrows an HttpError if Octokit GitHub API returns an error. HttpError Octokit access_token and client_secret headers redact all sensitive information. + * @param {Octokit} octokit The authenticated octokit instance, instantiated with an access token having permissiong to create a fork on the target repository + * @param {Changes | null | undefined} changes A set of changes. The changes may be empty + * @param {CreatePullRequestUserOptions} options The configuration for interacting with GitHub provided by the user. + * @returns {Promise} the pull request number. Returns 0 if unsuccessful. + * @throws {CommitError} on failure during commit process + */ +async function createPullRequest(octokit, changes, options) { + (0, logger_1.setupLogger)(options.logger); + // if null undefined, or the empty map then no changes have been provided. + // Do not execute GitHub workflow + if (changes === null || changes === undefined || changes.size === 0) { + logger_1.logger.info('Empty change set provided. No changes need to be made. Cancelling workflow.'); + return 0; + } + const gitHubConfigs = (0, default_options_handler_1.addPullRequestDefaults)(options); + logger_1.logger.info('Starting GitHub PR workflow...'); + const upstream = { + owner: gitHubConfigs.upstreamOwner, + repo: gitHubConfigs.upstreamRepo, + }; + const origin = options.fork === false ? upstream : await (0, fork_1.fork)(octokit, upstream); + if (options.fork) { + // try to sync the fork + await retry(async () => await octokit.repos.mergeUpstream({ + owner: origin.owner, + repo: origin.repo, + branch: gitHubConfigs.primary, + }), { + retries: options.retry, + factor: 2.8411, // https://www.wolframalpha.com/input/?i=Sum%5B3000*x%5Ek%2C+%7Bk%2C+0%2C+4%7D%5D+%3D+5+*+60+*+1000 + minTimeout: 3000, + randomize: false, + onRetry: (e, attempt) => { + e.message = `Error creating syncing upstream: ${e.message}`; + logger_1.logger.error(e); + logger_1.logger.info(`Retry attempt #${attempt}...`); + }, + }); + } + const originBranch = { + ...origin, + branch: gitHubConfigs.branch, + }; + // The `retry` flag defaults to `5` to maintain compatibility + options.retry = options.retry === undefined ? 5 : options.retry; + const refHeadSha = await retry(async () => await (0, branch_1.branch)(octokit, origin, upstream, originBranch.branch, gitHubConfigs.primary), { + retries: options.retry, + factor: 2.8411, // https://www.wolframalpha.com/input/?i=Sum%5B3000*x%5Ek%2C+%7Bk%2C+0%2C+4%7D%5D+%3D+5+*+60+*+1000 + minTimeout: 3000, + randomize: false, + onRetry: (e, attempt) => { + e.message = `Error creating Pull Request: ${e.message}`; + logger_1.logger.error(e); + logger_1.logger.info(`Retry attempt #${attempt}...`); + }, + }); + const tempDirectory = await fs.mkdtemp(path.join(os.tmpdir(), 'git-')); + logger_1.logger.info(`Cloning repository to ${tempDirectory}`); + const gitConfig = { + fs, + http: web_1.default, + dir: tempDirectory, + onAuth: () => ({ + username: options.username, + password: options.password, + }), + }; + const octokitBaseUrl = octokit.request.endpoint.DEFAULTS.baseUrl; + const baseUrl = octokitBaseUrl.substring(0, octokitBaseUrl.indexOf('/api')); + const url = `${baseUrl}/${origin.owner}/${origin.repo}.git`; + await git.clone({ + ...gitConfig, + url: url, + ref: originBranch.branch, + }); + logger_1.logger.info(`repository cloned to branch ${originBranch.branch}`); + options = options !== null && options !== void 0 ? options : {}; + options.gitConfig = gitConfig; + await (0, commit_and_push_1.commitAndPush)(octokit, refHeadSha, changes, originBranch, gitHubConfigs.message, gitHubConfigs.force, options); + const description = { + body: gitHubConfigs.description, + title: gitHubConfigs.title, + }; + const prNumber = await (0, open_pull_request_1.openPullRequest)(octokit, upstream, originBranch, description, gitHubConfigs.maintainersCanModify, gitHubConfigs.primary, options.draft); + logger_1.logger.info(`Successfully opened pull request: ${prNumber}.`); + // addLabels will no-op if options.labels is undefined or empty. + await (0, labels_1.addLabels)(octokit, upstream, originBranch, prNumber, options.labels); + fs.rm(tempDirectory, { recursive: true, force: true }).catch(e => { }); + return prNumber; +} +/** + * Convert a Map or {[path: string]: string}, where the key is the relative file path in the repository, + * and the value is the text content. The files will be converted to a Map also containing the file mode information '100644' + * @param {Object | Map} textFiles a map/object where the key is the relative file path and the value is the text file content + * @returns {Changes} Map of the file path to the string file content and the file mode '100644' + */ +function parseTextFiles(textFiles) { + const changes = new Map(); + if (textFiles instanceof Map) { + textFiles.forEach((content, path) => { + if (typeof path !== 'string' || + (content !== null && typeof content !== 'string')) { + throw TypeError('The file changeset provided must have a string key and a string/null value'); + } + changes.set(path, new types_1.FileData(content)); + }); + } + else { + for (const [path, content] of Object.entries(textFiles)) { + if (typeof path !== 'string' || + (content !== null && typeof content !== 'string')) { + throw TypeError('The file changeset provided must have a string key and a string/null value'); + } + changes.set(path, new types_1.FileData(content)); + } + } + return changes; +} //# sourceMappingURL=index.js.map /***/ }), @@ -9130,42 +9178,42 @@ exports.parseTextFiles = parseTextFiles; /***/ ((__unused_webpack_module, exports) => { "use strict"; - -// Copyright 2020 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.setupLogger = exports.logger = void 0; -class NullLogger { - constructor() { - this.error = () => { }; - this.warn = () => { }; - this.info = () => { }; - this.debug = () => { }; - this.trace = () => { }; - } -} -let logger = new NullLogger(); -exports.logger = logger; -function setupLogger(userLogger) { - if (userLogger) { - exports.logger = logger = userLogger; - } - else { - exports.logger = logger = new NullLogger(); - } -} -exports.setupLogger = setupLogger; + +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.logger = void 0; +exports.setupLogger = setupLogger; +class NullLogger { + constructor() { + this.error = () => { }; + this.warn = () => { }; + this.info = () => { }; + this.debug = () => { }; + this.trace = () => { }; + } +} +let logger = new NullLogger(); +exports.logger = logger; +function setupLogger(userLogger) { + if (userLogger) { + exports.logger = logger = userLogger; + } + else { + exports.logger = logger = new NullLogger(); + } +} //# sourceMappingURL=logger.js.map /***/ }), @@ -9174,46 +9222,46 @@ exports.setupLogger = setupLogger; /***/ ((__unused_webpack_module, exports) => { "use strict"; - -/* - * // Copyright 2020 Google LLC - * // - * // Licensed under the Apache License, Version 2.0 (the "License"); - * // you may not use this file except in compliance with the License. - * // You may obtain a copy of the License at - * // - * // https://www.apache.org/licenses/LICENSE-2.0 - * // - * // Unless required by applicable law or agreed to in writing, software - * // distributed under the License is distributed on an "AS IS" BASIS, - * // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * // See the License for the specific language governing permissions and - * // limitations under the License. - * // - * //Modifications made by Joaquin Santana on 18/11/24, 22:09 - */ -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.PatchSyntaxError = exports.FileData = void 0; -/** - * The content and the mode of a file. - * Default file mode is a text file which has code '100644'. - * If `content` is not null, then `content` must be the entire file content. - * See https://developer.github.com/v3/git/trees/#tree-object for details on mode. - */ -class FileData { - constructor(content, mode = '100644') { - this.mode = mode; - this.content = content; - } -} -exports.FileData = FileData; -class PatchSyntaxError extends Error { - constructor(message) { - super(message); - this.name = 'PatchSyntaxError'; - } -} -exports.PatchSyntaxError = PatchSyntaxError; + +/* + * // Copyright 2020 Google LLC + * // + * // Licensed under the Apache License, Version 2.0 (the "License"); + * // you may not use this file except in compliance with the License. + * // You may obtain a copy of the License at + * // + * // https://www.apache.org/licenses/LICENSE-2.0 + * // + * // Unless required by applicable law or agreed to in writing, software + * // distributed under the License is distributed on an "AS IS" BASIS, + * // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * // See the License for the specific language governing permissions and + * // limitations under the License. + * // + * //Modifications made by Joaquin Santana on 18/11/24, 22:09 + */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.PatchSyntaxError = exports.FileData = void 0; +/** + * The content and the mode of a file. + * Default file mode is a text file which has code '100644'. + * If `content` is not null, then `content` must be the entire file content. + * See https://developer.github.com/v3/git/trees/#tree-object for details on mode. + */ +class FileData { + constructor(content, mode = '100644') { + this.mode = mode; + this.content = content; + } +} +exports.FileData = FileData; +class PatchSyntaxError extends Error { + constructor(message) { + super(message); + this.name = 'PatchSyntaxError'; + } +} +exports.PatchSyntaxError = PatchSyntaxError; //# sourceMappingURL=types.js.map /***/ }), @@ -9222,118 +9270,117 @@ exports.PatchSyntaxError = PatchSyntaxError; /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; - -// Copyright 2020 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getSuggestedHunks = exports.parseAllHunks = exports.parsePatch = void 0; -const parseDiff = __nccwpck_require__(94833); -const diff_1 = __nccwpck_require__(71672); -// This header is ignored for calculating patch ranges, but is neccessary -// for parsing a diff + +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.parsePatch = parsePatch; +exports.parseAllHunks = parseAllHunks; +exports.getSuggestedHunks = getSuggestedHunks; +const parseDiff = __nccwpck_require__(94833); +const diff_1 = __nccwpck_require__(71672); +// This header is ignored for calculating patch ranges, but is neccessary +// for parsing a diff const _DIFF_HEADER = `diff --git a/file.ext b/file.ext index cac8fbc..87f387c 100644 --- a/file.ext +++ b/file.ext -`; -/** - * Given a patch expressed in GNU diff format, return the range of lines - * from the original content that are changed. - * @param diff Diff expressed in GNU diff format. - * @returns Hunk[] - */ -function parsePatch(patch) { - return parseAllHunks(_DIFF_HEADER + patch).get('file.ext') || []; -} -exports.parsePatch = parsePatch; -/** - * Given a diff expressed in GNU diff format, return the range of lines - * from the original content that are changed. - * @param diff Diff expressed in GNU diff format. - * @returns Map - */ -function parseAllHunks(diff) { - const hunksByFile = new Map(); - parseDiff(diff).forEach(file => { - const filename = file.to ? file.to : file.from; - const chunks = file.chunks.map(chunk => { - let oldStart = chunk.oldStart; - let newStart = chunk.newStart; - let normalLines = 0; - let changeSeen = false; - const newLines = []; - let previousLine = null; - let nextLine = null; - chunk.changes.forEach(change => { - // strip off leading '+', '-', or ' ' and trailing carriage return - const content = change.content.substring(1).replace(/[\n\r]+$/g, ''); - if (change.type === 'normal') { - normalLines++; - if (changeSeen) { - if (nextLine === null) { - nextLine = content; - } - } - else { - previousLine = content; - } - } - else { - if (change.type === 'add') { - // strip off leading '+' and trailing carriage return - newLines.push(content); - } - if (!changeSeen) { - oldStart += normalLines; - newStart += normalLines; - changeSeen = true; - } - } - }); - const newEnd = newStart + chunk.newLines - normalLines - 1; - const oldEnd = oldStart + chunk.oldLines - normalLines - 1; - let hunk = { - oldStart: oldStart, - oldEnd: oldEnd, - newStart: newStart, - newEnd: newEnd, - newContent: newLines, - }; - if (previousLine) { - hunk = { ...hunk, previousLine: previousLine }; - } - if (nextLine) { - hunk = { ...hunk, nextLine: nextLine }; - } - return hunk; - }); - hunksByFile.set(filename, chunks); - }); - return hunksByFile; -} -exports.parseAllHunks = parseAllHunks; -/** - * Given two texts, return the range of lines that are changed. - * @param oldContent The original content. - * @param newContent The new content. - * @returns Hunk[] - */ -function getSuggestedHunks(oldContent, newContent) { - const diff = (0, diff_1.createPatch)('unused', oldContent, newContent); - return parseAllHunks(diff).get('unused') || []; -} -exports.getSuggestedHunks = getSuggestedHunks; +`; +/** + * Given a patch expressed in GNU diff format, return the range of lines + * from the original content that are changed. + * @param diff Diff expressed in GNU diff format. + * @returns Hunk[] + */ +function parsePatch(patch) { + return parseAllHunks(_DIFF_HEADER + patch).get('file.ext') || []; +} +/** + * Given a diff expressed in GNU diff format, return the range of lines + * from the original content that are changed. + * @param diff Diff expressed in GNU diff format. + * @returns Map + */ +function parseAllHunks(diff) { + const hunksByFile = new Map(); + parseDiff(diff).forEach(file => { + const filename = file.to ? file.to : file.from; + const chunks = file.chunks.map(chunk => { + let oldStart = chunk.oldStart; + let newStart = chunk.newStart; + let normalLines = 0; + let changeSeen = false; + const newLines = []; + let previousLine = null; + let nextLine = null; + chunk.changes.forEach(change => { + // strip off leading '+', '-', or ' ' and trailing carriage return + const content = change.content.substring(1).replace(/[\n\r]+$/g, ''); + if (change.type === 'normal') { + normalLines++; + if (changeSeen) { + if (nextLine === null) { + nextLine = content; + } + } + else { + previousLine = content; + } + } + else { + if (change.type === 'add') { + // strip off leading '+' and trailing carriage return + newLines.push(content); + } + if (!changeSeen) { + oldStart += normalLines; + newStart += normalLines; + changeSeen = true; + } + } + }); + const newEnd = newStart + chunk.newLines - normalLines - 1; + const oldEnd = oldStart + chunk.oldLines - normalLines - 1; + let hunk = { + oldStart: oldStart, + oldEnd: oldEnd, + newStart: newStart, + newEnd: newEnd, + newContent: newLines, + }; + if (previousLine) { + hunk = { ...hunk, previousLine: previousLine }; + } + if (nextLine) { + hunk = { ...hunk, nextLine: nextLine }; + } + return hunk; + }); + hunksByFile.set(filename, chunks); + }); + return hunksByFile; +} +/** + * Given two texts, return the range of lines that are changed. + * @param oldContent The original content. + * @param newContent The new content. + * @returns Hunk[] + */ +function getSuggestedHunks(oldContent, newContent) { + const diff = (0, diff_1.createPatch)('unused', oldContent, newContent); + return parseAllHunks(diff).get('unused') || []; +} //# sourceMappingURL=diff-utils.js.map /***/ }), @@ -9342,155 +9389,154 @@ exports.getSuggestedHunks = getSuggestedHunks; /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; - -// Copyright 2020 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.partitionSuggestedHunksByScope = exports.getRawSuggestionHunks = exports.adjustHunkDown = exports.adjustHunkUp = void 0; -const diff_utils_1 = __nccwpck_require__(85556); -const logger_1 = __nccwpck_require__(44869); -/** - * Shift a Hunk up one line so it starts one line earlier. - * @param {Hunk} hunk - * @returns {Hunk | null} the adjusted Hunk or null if there is no preceeding line. - */ -function adjustHunkUp(hunk) { - if (!hunk.previousLine) { - return null; - } - return { - oldStart: hunk.oldStart - 1, - oldEnd: hunk.oldEnd, - newStart: hunk.newStart - 1, - newEnd: hunk.newEnd, - newContent: [hunk.previousLine, ...hunk.newContent], - }; -} -exports.adjustHunkUp = adjustHunkUp; -/** - * Shift a Hunk up one line so it ends one line later. - * @param {Hunk} hunk - * @returns {Hunk | null} the adjusted Hunk or null if there is no following line. - */ -function adjustHunkDown(hunk) { - if (!hunk.nextLine) { - return null; - } - return { - oldStart: hunk.oldStart, - oldEnd: hunk.oldEnd + 1, - newStart: hunk.newStart, - newEnd: hunk.newEnd + 1, - newContent: hunk.newContent.concat(hunk.nextLine), - }; -} -exports.adjustHunkDown = adjustHunkDown; -/** - * Given a map where the key is the file name and the value is the - * old content and new content of the file - * compute the hunk for each file whose old and new contents differ. - * Do not compute the hunk if the old content is the same as the new content. - * The hunk list is sorted and each interval is disjoint. - * @param {Map} diffContents a map of the original file contents and the new file contents - * @returns the hunks for each file whose old and new contents differ - */ -function getRawSuggestionHunks(diffContents) { - const fileHunks = new Map(); - diffContents.forEach((fileDiffContent, fileName) => { - // if identical don't calculate the hunk and continue in the loop - if (fileDiffContent.oldContent === fileDiffContent.newContent) { - return; - } - const hunks = (0, diff_utils_1.getSuggestedHunks)(fileDiffContent.oldContent, fileDiffContent.newContent); - fileHunks.set(fileName, hunks); - }); - logger_1.logger.info('Parsed ranges of old and new patch'); - return fileHunks; -} -exports.getRawSuggestionHunks = getRawSuggestionHunks; -function hunkOverlaps(validHunk, suggestedHunk) { - return (suggestedHunk.oldStart >= validHunk.newStart && - suggestedHunk.oldEnd <= validHunk.newEnd); -} -function partitionFileHunks(pullRequestHunks, suggestedHunks) { - // check ranges: the entirety of the old range of the suggested - // hunk must fit inside the new range of the valid Hunks - let i = 0; - let candidateHunk = pullRequestHunks[i]; - const validFileHunks = []; - const invalidFileHunks = []; - suggestedHunks.forEach(suggestedHunk => { - while (candidateHunk && suggestedHunk.oldStart > candidateHunk.newEnd) { - i++; - candidateHunk = pullRequestHunks[i]; - } - if (!candidateHunk) { - invalidFileHunks.push(suggestedHunk); - return; - } - // if deletion only or addition only - if (suggestedHunk.newEnd < suggestedHunk.newStart || - suggestedHunk.oldEnd < suggestedHunk.oldStart) { - // try using previous line - let adjustedHunk = adjustHunkUp(suggestedHunk); - if (adjustedHunk && hunkOverlaps(candidateHunk, adjustedHunk)) { - validFileHunks.push(adjustedHunk); - return; - } - // try using next line - adjustedHunk = adjustHunkDown(suggestedHunk); - if (adjustedHunk && hunkOverlaps(candidateHunk, adjustedHunk)) { - validFileHunks.push(adjustedHunk); - return; - } - } - else if (hunkOverlaps(candidateHunk, suggestedHunk)) { - validFileHunks.push(suggestedHunk); - return; - } - invalidFileHunks.push(suggestedHunk); - }); - return { validFileHunks, invalidFileHunks }; -} -/** - * Split suggested hunks into commentable and non-commentable hunks. Compares the new line ranges - * from pullRequestHunks against the old line ranges from allSuggestedHunks. - * @param pullRequestHunks {Map} The parsed hunks from that represents the valid lines to comment. - * @param allSuggestedHunks {Map} The hunks that represent suggested changes. - * @returns {PartitionedHunks} split hunks - */ -function partitionSuggestedHunksByScope(pullRequestHunks, allSuggestedHunks) { - const validHunks = new Map(); - const invalidHunks = new Map(); - allSuggestedHunks.forEach((suggestedHunks, filename) => { - const pullRequestFileHunks = pullRequestHunks.get(filename); - if (!pullRequestFileHunks) { - // file is not the original PR - invalidHunks.set(filename, suggestedHunks); - return; - } - const { validFileHunks, invalidFileHunks } = partitionFileHunks(pullRequestFileHunks, suggestedHunks); - if (validFileHunks.length > 0) { - validHunks.set(filename, validFileHunks); - } - if (invalidFileHunks.length > 0) { - invalidHunks.set(filename, invalidFileHunks); - } - }); - return { validHunks, invalidHunks }; -} -exports.partitionSuggestedHunksByScope = partitionSuggestedHunksByScope; + +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.adjustHunkUp = adjustHunkUp; +exports.adjustHunkDown = adjustHunkDown; +exports.getRawSuggestionHunks = getRawSuggestionHunks; +exports.partitionSuggestedHunksByScope = partitionSuggestedHunksByScope; +const diff_utils_1 = __nccwpck_require__(85556); +const logger_1 = __nccwpck_require__(44869); +/** + * Shift a Hunk up one line so it starts one line earlier. + * @param {Hunk} hunk + * @returns {Hunk | null} the adjusted Hunk or null if there is no preceeding line. + */ +function adjustHunkUp(hunk) { + if (!hunk.previousLine) { + return null; + } + return { + oldStart: hunk.oldStart - 1, + oldEnd: hunk.oldEnd, + newStart: hunk.newStart - 1, + newEnd: hunk.newEnd, + newContent: [hunk.previousLine, ...hunk.newContent], + }; +} +/** + * Shift a Hunk up one line so it ends one line later. + * @param {Hunk} hunk + * @returns {Hunk | null} the adjusted Hunk or null if there is no following line. + */ +function adjustHunkDown(hunk) { + if (!hunk.nextLine) { + return null; + } + return { + oldStart: hunk.oldStart, + oldEnd: hunk.oldEnd + 1, + newStart: hunk.newStart, + newEnd: hunk.newEnd + 1, + newContent: hunk.newContent.concat(hunk.nextLine), + }; +} +/** + * Given a map where the key is the file name and the value is the + * old content and new content of the file + * compute the hunk for each file whose old and new contents differ. + * Do not compute the hunk if the old content is the same as the new content. + * The hunk list is sorted and each interval is disjoint. + * @param {Map} diffContents a map of the original file contents and the new file contents + * @returns the hunks for each file whose old and new contents differ + */ +function getRawSuggestionHunks(diffContents) { + const fileHunks = new Map(); + diffContents.forEach((fileDiffContent, fileName) => { + // if identical don't calculate the hunk and continue in the loop + if (fileDiffContent.oldContent === fileDiffContent.newContent) { + return; + } + const hunks = (0, diff_utils_1.getSuggestedHunks)(fileDiffContent.oldContent, fileDiffContent.newContent); + fileHunks.set(fileName, hunks); + }); + logger_1.logger.info('Parsed ranges of old and new patch'); + return fileHunks; +} +function hunkOverlaps(validHunk, suggestedHunk) { + return (suggestedHunk.oldStart >= validHunk.newStart && + suggestedHunk.oldEnd <= validHunk.newEnd); +} +function partitionFileHunks(pullRequestHunks, suggestedHunks) { + // check ranges: the entirety of the old range of the suggested + // hunk must fit inside the new range of the valid Hunks + let i = 0; + let candidateHunk = pullRequestHunks[i]; + const validFileHunks = []; + const invalidFileHunks = []; + suggestedHunks.forEach(suggestedHunk => { + while (candidateHunk && suggestedHunk.oldStart > candidateHunk.newEnd) { + i++; + candidateHunk = pullRequestHunks[i]; + } + if (!candidateHunk) { + invalidFileHunks.push(suggestedHunk); + return; + } + // if deletion only or addition only + if (suggestedHunk.newEnd < suggestedHunk.newStart || + suggestedHunk.oldEnd < suggestedHunk.oldStart) { + // try using previous line + let adjustedHunk = adjustHunkUp(suggestedHunk); + if (adjustedHunk && hunkOverlaps(candidateHunk, adjustedHunk)) { + validFileHunks.push(adjustedHunk); + return; + } + // try using next line + adjustedHunk = adjustHunkDown(suggestedHunk); + if (adjustedHunk && hunkOverlaps(candidateHunk, adjustedHunk)) { + validFileHunks.push(adjustedHunk); + return; + } + } + else if (hunkOverlaps(candidateHunk, suggestedHunk)) { + validFileHunks.push(suggestedHunk); + return; + } + invalidFileHunks.push(suggestedHunk); + }); + return { validFileHunks, invalidFileHunks }; +} +/** + * Split suggested hunks into commentable and non-commentable hunks. Compares the new line ranges + * from pullRequestHunks against the old line ranges from allSuggestedHunks. + * @param pullRequestHunks {Map} The parsed hunks from that represents the valid lines to comment. + * @param allSuggestedHunks {Map} The hunks that represent suggested changes. + * @returns {PartitionedHunks} split hunks + */ +function partitionSuggestedHunksByScope(pullRequestHunks, allSuggestedHunks) { + const validHunks = new Map(); + const invalidHunks = new Map(); + allSuggestedHunks.forEach((suggestedHunks, filename) => { + const pullRequestFileHunks = pullRequestHunks.get(filename); + if (!pullRequestFileHunks) { + // file is not the original PR + invalidHunks.set(filename, suggestedHunks); + return; + } + const { validFileHunks, invalidFileHunks } = partitionFileHunks(pullRequestFileHunks, suggestedHunks); + if (validFileHunks.length > 0) { + validHunks.set(filename, validFileHunks); + } + if (invalidFileHunks.length > 0) { + invalidHunks.set(filename, invalidFileHunks); + } + }); + return { validHunks, invalidHunks }; +} //# sourceMappingURL=hunk-utils.js.map /***/ }), @@ -10728,7 +10774,7 @@ class GitHub { message, author: { name: 'Gitea Actions [Bot]', - email: 'git@3caravelle.com', + email: 'git@gitea.com', }, logger: this.logger, draft: releasePullRequest.draft, @@ -11783,7 +11829,7 @@ Object.defineProperty(exports, "GitHub", ({ enumerable: true, get: function () { exports.configSchema = __nccwpck_require__(1383); exports.manifestSchema = __nccwpck_require__(94592); // x-release-please-start-version -exports.VERSION = '16.15.5'; +exports.VERSION = '16.15.6'; // x-release-please-end //# sourceMappingURL=index.js.map @@ -32824,6 +32870,160 @@ module.exports = { } }; +/***/ }), + +/***/ 19227: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +var bind = __nccwpck_require__(88334); + +var $apply = __nccwpck_require__(54177); +var $call = __nccwpck_require__(2808); +var $reflectApply = __nccwpck_require__(48309); + +/** @type {import('./actualApply')} */ +module.exports = $reflectApply || bind.call($call, $apply); + + +/***/ }), + +/***/ 82093: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +var bind = __nccwpck_require__(88334); +var $apply = __nccwpck_require__(54177); +var actualApply = __nccwpck_require__(19227); + +/** @type {import('./applyBind')} */ +module.exports = function applyBind() { + return actualApply(bind, $apply, arguments); +}; + + +/***/ }), + +/***/ 54177: +/***/ ((module) => { + +"use strict"; + + +/** @type {import('./functionApply')} */ +module.exports = Function.prototype.apply; + + +/***/ }), + +/***/ 2808: +/***/ ((module) => { + +"use strict"; + + +/** @type {import('./functionCall')} */ +module.exports = Function.prototype.call; + + +/***/ }), + +/***/ 86815: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +var bind = __nccwpck_require__(88334); +var $TypeError = __nccwpck_require__(6361); + +var $call = __nccwpck_require__(2808); +var $actualApply = __nccwpck_require__(19227); + +/** @type {(args: [Function, thisArg?: unknown, ...args: unknown[]]) => Function} TODO FIXME, find a way to use import('.') */ +module.exports = function callBindBasic(args) { + if (args.length < 1 || typeof args[0] !== 'function') { + throw new $TypeError('a function is required'); + } + return $actualApply(bind, $call, args); +}; + + +/***/ }), + +/***/ 48309: +/***/ ((module) => { + +"use strict"; + + +/** @type {import('./reflectApply')} */ +module.exports = typeof Reflect !== 'undefined' && Reflect && Reflect.apply; + + +/***/ }), + +/***/ 62977: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +var setFunctionLength = __nccwpck_require__(64056); + +var $defineProperty = __nccwpck_require__(6123); + +var callBindBasic = __nccwpck_require__(86815); +var applyBind = __nccwpck_require__(82093); + +module.exports = function callBind(originalFunction) { + var func = callBindBasic(arguments); + var adjustedLength = originalFunction.length - (arguments.length - 1); + return setFunctionLength( + func, + 1 + (adjustedLength > 0 ? adjustedLength : 0), + true + ); +}; + +if ($defineProperty) { + $defineProperty(module.exports, 'apply', { value: applyBind }); +} else { + module.exports.apply = applyBind; +} + + +/***/ }), + +/***/ 71785: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +var GetIntrinsic = __nccwpck_require__(74538); + +var callBindBasic = __nccwpck_require__(86815); + +/** @type {(thisArg: string, searchString: string, position?: number) => number} */ +var $indexOf = callBindBasic([GetIntrinsic('%String.prototype.indexOf%')]); + +/** @type {import('.')} */ +module.exports = function callBoundIntrinsic(name, allowMissing) { + /* eslint no-extra-parens: 0 */ + + var intrinsic = /** @type {(this: unknown, ...args: unknown[]) => unknown} */ (GetIntrinsic(name, !!allowMissing)); + if (typeof intrinsic === 'function' && $indexOf(name, '.prototype.') > -1) { + return callBindBasic(/** @type {const} */ ([intrinsic])); + } + return intrinsic; +}; + + /***/ }), /***/ 78818: @@ -38649,6 +38849,70 @@ formatters.O = function (v) { }; +/***/ }), + +/***/ 54564: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +var $defineProperty = __nccwpck_require__(6123); + +var $SyntaxError = __nccwpck_require__(75474); +var $TypeError = __nccwpck_require__(6361); + +var gopd = __nccwpck_require__(18501); + +/** @type {import('.')} */ +module.exports = function defineDataProperty( + obj, + property, + value +) { + if (!obj || (typeof obj !== 'object' && typeof obj !== 'function')) { + throw new $TypeError('`obj` must be an object or a function`'); + } + if (typeof property !== 'string' && typeof property !== 'symbol') { + throw new $TypeError('`property` must be a string or a symbol`'); + } + if (arguments.length > 3 && typeof arguments[3] !== 'boolean' && arguments[3] !== null) { + throw new $TypeError('`nonEnumerable`, if provided, must be a boolean or null'); + } + if (arguments.length > 4 && typeof arguments[4] !== 'boolean' && arguments[4] !== null) { + throw new $TypeError('`nonWritable`, if provided, must be a boolean or null'); + } + if (arguments.length > 5 && typeof arguments[5] !== 'boolean' && arguments[5] !== null) { + throw new $TypeError('`nonConfigurable`, if provided, must be a boolean or null'); + } + if (arguments.length > 6 && typeof arguments[6] !== 'boolean') { + throw new $TypeError('`loose`, if provided, must be a boolean'); + } + + var nonEnumerable = arguments.length > 3 ? arguments[3] : null; + var nonWritable = arguments.length > 4 ? arguments[4] : null; + var nonConfigurable = arguments.length > 5 ? arguments[5] : null; + var loose = arguments.length > 6 ? arguments[6] : false; + + /* @type {false | TypedPropertyDescriptor} */ + var desc = !!gopd && gopd(obj, property); + + if ($defineProperty) { + $defineProperty(obj, property, { + configurable: nonConfigurable === null && desc ? desc.configurable : !nonConfigurable, + enumerable: nonEnumerable === null && desc ? desc.enumerable : !nonEnumerable, + value: value, + writable: nonWritable === null && desc ? desc.writable : !nonWritable + }); + } else if (loose || (!nonEnumerable && !nonWritable && !nonConfigurable)) { + // must fall back to [[Set]], and was not explicitly asked to make non-enumerable, non-writable, or non-configurable + obj[property] = value; // eslint-disable-line no-param-reassign + } else { + throw new $SyntaxError('This environment does not support defining a property as non-configurable, non-writable, or non-enumerable.'); + } +}; + + /***/ }), /***/ 58932: @@ -44279,6 +44543,44 @@ module.exports = { }; +/***/ }), + +/***/ 62693: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +var callBind = __nccwpck_require__(86815); +var gOPD = __nccwpck_require__(18501); + +var hasProtoAccessor; +try { + // eslint-disable-next-line no-extra-parens, no-proto + hasProtoAccessor = /** @type {{ __proto__?: typeof Array.prototype }} */ ([]).__proto__ === Array.prototype; +} catch (e) { + if (!e || typeof e !== 'object' || !('code' in e) || e.code !== 'ERR_PROTO_ACCESS') { + throw e; + } +} + +// eslint-disable-next-line no-extra-parens +var desc = !!hasProtoAccessor && gOPD && gOPD(Object.prototype, /** @type {keyof typeof Object.prototype} */ ('__proto__')); + +var $Object = Object; +var $getPrototypeOf = $Object.getPrototypeOf; + +/** @type {import('./get')} */ +module.exports = desc && typeof desc.get === 'function' + ? callBind([desc.get]) + : typeof $getPrototypeOf === 'function' + ? /** @type {import('./get')} */ function getDunder(value) { + // eslint-disable-next-line eqeqeq + return $getPrototypeOf(value == null ? value : $Object(value)); + } + : false; + + /***/ }), /***/ 28685: @@ -48349,6 +48651,124 @@ Object.defineProperty(exports, "decodeHTML5Strict", ({ enumerable: true, get: fu Object.defineProperty(exports, "decodeXMLStrict", ({ enumerable: true, get: function () { return decode_js_2.decodeXML; } })); //# sourceMappingURL=index.js.map +/***/ }), + +/***/ 6123: +/***/ ((module) => { + +"use strict"; + + +/** @type {import('.')} */ +var $defineProperty = Object.defineProperty || false; +if ($defineProperty) { + try { + $defineProperty({}, 'a', { value: 1 }); + } catch (e) { + // IE 8 has a broken defineProperty + $defineProperty = false; + } +} + +module.exports = $defineProperty; + + +/***/ }), + +/***/ 91933: +/***/ ((module) => { + +"use strict"; + + +/** @type {import('./eval')} */ +module.exports = EvalError; + + +/***/ }), + +/***/ 28015: +/***/ ((module) => { + +"use strict"; + + +/** @type {import('.')} */ +module.exports = Error; + + +/***/ }), + +/***/ 54415: +/***/ ((module) => { + +"use strict"; + + +/** @type {import('./range')} */ +module.exports = RangeError; + + +/***/ }), + +/***/ 46279: +/***/ ((module) => { + +"use strict"; + + +/** @type {import('./ref')} */ +module.exports = ReferenceError; + + +/***/ }), + +/***/ 75474: +/***/ ((module) => { + +"use strict"; + + +/** @type {import('./syntax')} */ +module.exports = SyntaxError; + + +/***/ }), + +/***/ 6361: +/***/ ((module) => { + +"use strict"; + + +/** @type {import('./type')} */ +module.exports = TypeError; + + +/***/ }), + +/***/ 5065: +/***/ ((module) => { + +"use strict"; + + +/** @type {import('./uri')} */ +module.exports = URIError; + + +/***/ }), + +/***/ 78308: +/***/ ((module) => { + +"use strict"; + + +/** @type {import('.')} */ +module.exports = Object; + + /***/ }), /***/ 57099: @@ -48527,6 +48947,670 @@ module.exports = function (str) { }; +/***/ }), + +/***/ 43710: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +var isCallable = __nccwpck_require__(94615); + +var toStr = Object.prototype.toString; +var hasOwnProperty = Object.prototype.hasOwnProperty; + +/** @type {(arr: A, iterator: (this: This | void, value: A[number], index: number, arr: A) => void, receiver: This | undefined) => void} */ +var forEachArray = function forEachArray(array, iterator, receiver) { + for (var i = 0, len = array.length; i < len; i++) { + if (hasOwnProperty.call(array, i)) { + if (receiver == null) { + iterator(array[i], i, array); + } else { + iterator.call(receiver, array[i], i, array); + } + } + } +}; + +/** @type {(string: S, iterator: (this: This | void, value: S[number], index: number, string: S) => void, receiver: This | undefined) => void} */ +var forEachString = function forEachString(string, iterator, receiver) { + for (var i = 0, len = string.length; i < len; i++) { + // no such thing as a sparse string. + if (receiver == null) { + iterator(string.charAt(i), i, string); + } else { + iterator.call(receiver, string.charAt(i), i, string); + } + } +}; + +/** @type {(obj: O, iterator: (this: This | void, value: O[keyof O], index: keyof O, obj: O) => void, receiver: This | undefined) => void} */ +var forEachObject = function forEachObject(object, iterator, receiver) { + for (var k in object) { + if (hasOwnProperty.call(object, k)) { + if (receiver == null) { + iterator(object[k], k, object); + } else { + iterator.call(receiver, object[k], k, object); + } + } + } +}; + +/** @type {(x: unknown) => x is readonly unknown[]} */ +function isArray(x) { + return toStr.call(x) === '[object Array]'; +} + +/** @type {import('.')._internal} */ +module.exports = function forEach(list, iterator, thisArg) { + if (!isCallable(iterator)) { + throw new TypeError('iterator must be a function'); + } + + var receiver; + if (arguments.length >= 3) { + receiver = thisArg; + } + + if (isArray(list)) { + forEachArray(list, iterator, receiver); + } else if (typeof list === 'string') { + forEachString(list, iterator, receiver); + } else { + forEachObject(list, iterator, receiver); + } +}; + + +/***/ }), + +/***/ 19320: +/***/ ((module) => { + +"use strict"; + + +/* eslint no-invalid-this: 1 */ + +var ERROR_MESSAGE = 'Function.prototype.bind called on incompatible '; +var toStr = Object.prototype.toString; +var max = Math.max; +var funcType = '[object Function]'; + +var concatty = function concatty(a, b) { + var arr = []; + + for (var i = 0; i < a.length; i += 1) { + arr[i] = a[i]; + } + for (var j = 0; j < b.length; j += 1) { + arr[j + a.length] = b[j]; + } + + return arr; +}; + +var slicy = function slicy(arrLike, offset) { + var arr = []; + for (var i = offset || 0, j = 0; i < arrLike.length; i += 1, j += 1) { + arr[j] = arrLike[i]; + } + return arr; +}; + +var joiny = function (arr, joiner) { + var str = ''; + for (var i = 0; i < arr.length; i += 1) { + str += arr[i]; + if (i + 1 < arr.length) { + str += joiner; + } + } + return str; +}; + +module.exports = function bind(that) { + var target = this; + if (typeof target !== 'function' || toStr.apply(target) !== funcType) { + throw new TypeError(ERROR_MESSAGE + target); + } + var args = slicy(arguments, 1); + + var bound; + var binder = function () { + if (this instanceof bound) { + var result = target.apply( + this, + concatty(args, arguments) + ); + if (Object(result) === result) { + return result; + } + return this; + } + return target.apply( + that, + concatty(args, arguments) + ); + + }; + + var boundLength = max(0, target.length - args.length); + var boundArgs = []; + for (var i = 0; i < boundLength; i++) { + boundArgs[i] = '$' + i; + } + + bound = Function('binder', 'return function (' + joiny(boundArgs, ',') + '){ return binder.apply(this,arguments); }')(binder); + + if (target.prototype) { + var Empty = function Empty() {}; + Empty.prototype = target.prototype; + bound.prototype = new Empty(); + Empty.prototype = null; + } + + return bound; +}; + + +/***/ }), + +/***/ 88334: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +var implementation = __nccwpck_require__(19320); + +module.exports = Function.prototype.bind || implementation; + + +/***/ }), + +/***/ 74538: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +var undefined; + +var $Object = __nccwpck_require__(78308); + +var $Error = __nccwpck_require__(28015); +var $EvalError = __nccwpck_require__(91933); +var $RangeError = __nccwpck_require__(54415); +var $ReferenceError = __nccwpck_require__(46279); +var $SyntaxError = __nccwpck_require__(75474); +var $TypeError = __nccwpck_require__(6361); +var $URIError = __nccwpck_require__(5065); + +var abs = __nccwpck_require__(19775); +var floor = __nccwpck_require__(60924); +var max = __nccwpck_require__(52419); +var min = __nccwpck_require__(73373); +var pow = __nccwpck_require__(78029); +var round = __nccwpck_require__(59396); +var sign = __nccwpck_require__(39091); + +var $Function = Function; + +// eslint-disable-next-line consistent-return +var getEvalledConstructor = function (expressionSyntax) { + try { + return $Function('"use strict"; return (' + expressionSyntax + ').constructor;')(); + } catch (e) {} +}; + +var $gOPD = __nccwpck_require__(18501); +var $defineProperty = __nccwpck_require__(6123); + +var throwTypeError = function () { + throw new $TypeError(); +}; +var ThrowTypeError = $gOPD + ? (function () { + try { + // eslint-disable-next-line no-unused-expressions, no-caller, no-restricted-properties + arguments.callee; // IE 8 does not throw here + return throwTypeError; + } catch (calleeThrows) { + try { + // IE 8 throws on Object.getOwnPropertyDescriptor(arguments, '') + return $gOPD(arguments, 'callee').get; + } catch (gOPDthrows) { + return throwTypeError; + } + } + }()) + : throwTypeError; + +var hasSymbols = __nccwpck_require__(40587)(); + +var getProto = __nccwpck_require__(13592); +var $ObjectGPO = __nccwpck_require__(5045); +var $ReflectGPO = __nccwpck_require__(78859); + +var $apply = __nccwpck_require__(54177); +var $call = __nccwpck_require__(2808); + +var needsEval = {}; + +var TypedArray = typeof Uint8Array === 'undefined' || !getProto ? undefined : getProto(Uint8Array); + +var INTRINSICS = { + __proto__: null, + '%AggregateError%': typeof AggregateError === 'undefined' ? undefined : AggregateError, + '%Array%': Array, + '%ArrayBuffer%': typeof ArrayBuffer === 'undefined' ? undefined : ArrayBuffer, + '%ArrayIteratorPrototype%': hasSymbols && getProto ? getProto([][Symbol.iterator]()) : undefined, + '%AsyncFromSyncIteratorPrototype%': undefined, + '%AsyncFunction%': needsEval, + '%AsyncGenerator%': needsEval, + '%AsyncGeneratorFunction%': needsEval, + '%AsyncIteratorPrototype%': needsEval, + '%Atomics%': typeof Atomics === 'undefined' ? undefined : Atomics, + '%BigInt%': typeof BigInt === 'undefined' ? undefined : BigInt, + '%BigInt64Array%': typeof BigInt64Array === 'undefined' ? undefined : BigInt64Array, + '%BigUint64Array%': typeof BigUint64Array === 'undefined' ? undefined : BigUint64Array, + '%Boolean%': Boolean, + '%DataView%': typeof DataView === 'undefined' ? undefined : DataView, + '%Date%': Date, + '%decodeURI%': decodeURI, + '%decodeURIComponent%': decodeURIComponent, + '%encodeURI%': encodeURI, + '%encodeURIComponent%': encodeURIComponent, + '%Error%': $Error, + '%eval%': eval, // eslint-disable-line no-eval + '%EvalError%': $EvalError, + '%Float16Array%': typeof Float16Array === 'undefined' ? undefined : Float16Array, + '%Float32Array%': typeof Float32Array === 'undefined' ? undefined : Float32Array, + '%Float64Array%': typeof Float64Array === 'undefined' ? undefined : Float64Array, + '%FinalizationRegistry%': typeof FinalizationRegistry === 'undefined' ? undefined : FinalizationRegistry, + '%Function%': $Function, + '%GeneratorFunction%': needsEval, + '%Int8Array%': typeof Int8Array === 'undefined' ? undefined : Int8Array, + '%Int16Array%': typeof Int16Array === 'undefined' ? undefined : Int16Array, + '%Int32Array%': typeof Int32Array === 'undefined' ? undefined : Int32Array, + '%isFinite%': isFinite, + '%isNaN%': isNaN, + '%IteratorPrototype%': hasSymbols && getProto ? getProto(getProto([][Symbol.iterator]())) : undefined, + '%JSON%': typeof JSON === 'object' ? JSON : undefined, + '%Map%': typeof Map === 'undefined' ? undefined : Map, + '%MapIteratorPrototype%': typeof Map === 'undefined' || !hasSymbols || !getProto ? undefined : getProto(new Map()[Symbol.iterator]()), + '%Math%': Math, + '%Number%': Number, + '%Object%': $Object, + '%Object.getOwnPropertyDescriptor%': $gOPD, + '%parseFloat%': parseFloat, + '%parseInt%': parseInt, + '%Promise%': typeof Promise === 'undefined' ? undefined : Promise, + '%Proxy%': typeof Proxy === 'undefined' ? undefined : Proxy, + '%RangeError%': $RangeError, + '%ReferenceError%': $ReferenceError, + '%Reflect%': typeof Reflect === 'undefined' ? undefined : Reflect, + '%RegExp%': RegExp, + '%Set%': typeof Set === 'undefined' ? undefined : Set, + '%SetIteratorPrototype%': typeof Set === 'undefined' || !hasSymbols || !getProto ? undefined : getProto(new Set()[Symbol.iterator]()), + '%SharedArrayBuffer%': typeof SharedArrayBuffer === 'undefined' ? undefined : SharedArrayBuffer, + '%String%': String, + '%StringIteratorPrototype%': hasSymbols && getProto ? getProto(''[Symbol.iterator]()) : undefined, + '%Symbol%': hasSymbols ? Symbol : undefined, + '%SyntaxError%': $SyntaxError, + '%ThrowTypeError%': ThrowTypeError, + '%TypedArray%': TypedArray, + '%TypeError%': $TypeError, + '%Uint8Array%': typeof Uint8Array === 'undefined' ? undefined : Uint8Array, + '%Uint8ClampedArray%': typeof Uint8ClampedArray === 'undefined' ? undefined : Uint8ClampedArray, + '%Uint16Array%': typeof Uint16Array === 'undefined' ? undefined : Uint16Array, + '%Uint32Array%': typeof Uint32Array === 'undefined' ? undefined : Uint32Array, + '%URIError%': $URIError, + '%WeakMap%': typeof WeakMap === 'undefined' ? undefined : WeakMap, + '%WeakRef%': typeof WeakRef === 'undefined' ? undefined : WeakRef, + '%WeakSet%': typeof WeakSet === 'undefined' ? undefined : WeakSet, + + '%Function.prototype.call%': $call, + '%Function.prototype.apply%': $apply, + '%Object.defineProperty%': $defineProperty, + '%Object.getPrototypeOf%': $ObjectGPO, + '%Math.abs%': abs, + '%Math.floor%': floor, + '%Math.max%': max, + '%Math.min%': min, + '%Math.pow%': pow, + '%Math.round%': round, + '%Math.sign%': sign, + '%Reflect.getPrototypeOf%': $ReflectGPO +}; + +if (getProto) { + try { + null.error; // eslint-disable-line no-unused-expressions + } catch (e) { + // https://github.com/tc39/proposal-shadowrealm/pull/384#issuecomment-1364264229 + var errorProto = getProto(getProto(e)); + INTRINSICS['%Error.prototype%'] = errorProto; + } +} + +var doEval = function doEval(name) { + var value; + if (name === '%AsyncFunction%') { + value = getEvalledConstructor('async function () {}'); + } else if (name === '%GeneratorFunction%') { + value = getEvalledConstructor('function* () {}'); + } else if (name === '%AsyncGeneratorFunction%') { + value = getEvalledConstructor('async function* () {}'); + } else if (name === '%AsyncGenerator%') { + var fn = doEval('%AsyncGeneratorFunction%'); + if (fn) { + value = fn.prototype; + } + } else if (name === '%AsyncIteratorPrototype%') { + var gen = doEval('%AsyncGenerator%'); + if (gen && getProto) { + value = getProto(gen.prototype); + } + } + + INTRINSICS[name] = value; + + return value; +}; + +var LEGACY_ALIASES = { + __proto__: null, + '%ArrayBufferPrototype%': ['ArrayBuffer', 'prototype'], + '%ArrayPrototype%': ['Array', 'prototype'], + '%ArrayProto_entries%': ['Array', 'prototype', 'entries'], + '%ArrayProto_forEach%': ['Array', 'prototype', 'forEach'], + '%ArrayProto_keys%': ['Array', 'prototype', 'keys'], + '%ArrayProto_values%': ['Array', 'prototype', 'values'], + '%AsyncFunctionPrototype%': ['AsyncFunction', 'prototype'], + '%AsyncGenerator%': ['AsyncGeneratorFunction', 'prototype'], + '%AsyncGeneratorPrototype%': ['AsyncGeneratorFunction', 'prototype', 'prototype'], + '%BooleanPrototype%': ['Boolean', 'prototype'], + '%DataViewPrototype%': ['DataView', 'prototype'], + '%DatePrototype%': ['Date', 'prototype'], + '%ErrorPrototype%': ['Error', 'prototype'], + '%EvalErrorPrototype%': ['EvalError', 'prototype'], + '%Float32ArrayPrototype%': ['Float32Array', 'prototype'], + '%Float64ArrayPrototype%': ['Float64Array', 'prototype'], + '%FunctionPrototype%': ['Function', 'prototype'], + '%Generator%': ['GeneratorFunction', 'prototype'], + '%GeneratorPrototype%': ['GeneratorFunction', 'prototype', 'prototype'], + '%Int8ArrayPrototype%': ['Int8Array', 'prototype'], + '%Int16ArrayPrototype%': ['Int16Array', 'prototype'], + '%Int32ArrayPrototype%': ['Int32Array', 'prototype'], + '%JSONParse%': ['JSON', 'parse'], + '%JSONStringify%': ['JSON', 'stringify'], + '%MapPrototype%': ['Map', 'prototype'], + '%NumberPrototype%': ['Number', 'prototype'], + '%ObjectPrototype%': ['Object', 'prototype'], + '%ObjProto_toString%': ['Object', 'prototype', 'toString'], + '%ObjProto_valueOf%': ['Object', 'prototype', 'valueOf'], + '%PromisePrototype%': ['Promise', 'prototype'], + '%PromiseProto_then%': ['Promise', 'prototype', 'then'], + '%Promise_all%': ['Promise', 'all'], + '%Promise_reject%': ['Promise', 'reject'], + '%Promise_resolve%': ['Promise', 'resolve'], + '%RangeErrorPrototype%': ['RangeError', 'prototype'], + '%ReferenceErrorPrototype%': ['ReferenceError', 'prototype'], + '%RegExpPrototype%': ['RegExp', 'prototype'], + '%SetPrototype%': ['Set', 'prototype'], + '%SharedArrayBufferPrototype%': ['SharedArrayBuffer', 'prototype'], + '%StringPrototype%': ['String', 'prototype'], + '%SymbolPrototype%': ['Symbol', 'prototype'], + '%SyntaxErrorPrototype%': ['SyntaxError', 'prototype'], + '%TypedArrayPrototype%': ['TypedArray', 'prototype'], + '%TypeErrorPrototype%': ['TypeError', 'prototype'], + '%Uint8ArrayPrototype%': ['Uint8Array', 'prototype'], + '%Uint8ClampedArrayPrototype%': ['Uint8ClampedArray', 'prototype'], + '%Uint16ArrayPrototype%': ['Uint16Array', 'prototype'], + '%Uint32ArrayPrototype%': ['Uint32Array', 'prototype'], + '%URIErrorPrototype%': ['URIError', 'prototype'], + '%WeakMapPrototype%': ['WeakMap', 'prototype'], + '%WeakSetPrototype%': ['WeakSet', 'prototype'] +}; + +var bind = __nccwpck_require__(88334); +var hasOwn = __nccwpck_require__(62157); +var $concat = bind.call($call, Array.prototype.concat); +var $spliceApply = bind.call($apply, Array.prototype.splice); +var $replace = bind.call($call, String.prototype.replace); +var $strSlice = bind.call($call, String.prototype.slice); +var $exec = bind.call($call, RegExp.prototype.exec); + +/* adapted from https://github.com/lodash/lodash/blob/4.17.15/dist/lodash.js#L6735-L6744 */ +var rePropName = /[^%.[\]]+|\[(?:(-?\d+(?:\.\d+)?)|(["'])((?:(?!\2)[^\\]|\\.)*?)\2)\]|(?=(?:\.|\[\])(?:\.|\[\]|%$))/g; +var reEscapeChar = /\\(\\)?/g; /** Used to match backslashes in property paths. */ +var stringToPath = function stringToPath(string) { + var first = $strSlice(string, 0, 1); + var last = $strSlice(string, -1); + if (first === '%' && last !== '%') { + throw new $SyntaxError('invalid intrinsic syntax, expected closing `%`'); + } else if (last === '%' && first !== '%') { + throw new $SyntaxError('invalid intrinsic syntax, expected opening `%`'); + } + var result = []; + $replace(string, rePropName, function (match, number, quote, subString) { + result[result.length] = quote ? $replace(subString, reEscapeChar, '$1') : number || match; + }); + return result; +}; +/* end adaptation */ + +var getBaseIntrinsic = function getBaseIntrinsic(name, allowMissing) { + var intrinsicName = name; + var alias; + if (hasOwn(LEGACY_ALIASES, intrinsicName)) { + alias = LEGACY_ALIASES[intrinsicName]; + intrinsicName = '%' + alias[0] + '%'; + } + + if (hasOwn(INTRINSICS, intrinsicName)) { + var value = INTRINSICS[intrinsicName]; + if (value === needsEval) { + value = doEval(intrinsicName); + } + if (typeof value === 'undefined' && !allowMissing) { + throw new $TypeError('intrinsic ' + name + ' exists, but is not available. Please file an issue!'); + } + + return { + alias: alias, + name: intrinsicName, + value: value + }; + } + + throw new $SyntaxError('intrinsic ' + name + ' does not exist!'); +}; + +module.exports = function GetIntrinsic(name, allowMissing) { + if (typeof name !== 'string' || name.length === 0) { + throw new $TypeError('intrinsic name must be a non-empty string'); + } + if (arguments.length > 1 && typeof allowMissing !== 'boolean') { + throw new $TypeError('"allowMissing" argument must be a boolean'); + } + + if ($exec(/^%?[^%]*%?$/, name) === null) { + throw new $SyntaxError('`%` may not be present anywhere but at the beginning and end of the intrinsic name'); + } + var parts = stringToPath(name); + var intrinsicBaseName = parts.length > 0 ? parts[0] : ''; + + var intrinsic = getBaseIntrinsic('%' + intrinsicBaseName + '%', allowMissing); + var intrinsicRealName = intrinsic.name; + var value = intrinsic.value; + var skipFurtherCaching = false; + + var alias = intrinsic.alias; + if (alias) { + intrinsicBaseName = alias[0]; + $spliceApply(parts, $concat([0, 1], alias)); + } + + for (var i = 1, isOwn = true; i < parts.length; i += 1) { + var part = parts[i]; + var first = $strSlice(part, 0, 1); + var last = $strSlice(part, -1); + if ( + ( + (first === '"' || first === "'" || first === '`') + || (last === '"' || last === "'" || last === '`') + ) + && first !== last + ) { + throw new $SyntaxError('property names with quotes must have matching quotes'); + } + if (part === 'constructor' || !isOwn) { + skipFurtherCaching = true; + } + + intrinsicBaseName += '.' + part; + intrinsicRealName = '%' + intrinsicBaseName + '%'; + + if (hasOwn(INTRINSICS, intrinsicRealName)) { + value = INTRINSICS[intrinsicRealName]; + } else if (value != null) { + if (!(part in value)) { + if (!allowMissing) { + throw new $TypeError('base intrinsic for ' + name + ' exists, but the property is not available.'); + } + return void undefined; + } + if ($gOPD && (i + 1) >= parts.length) { + var desc = $gOPD(value, part); + isOwn = !!desc; + + // By convention, when a data property is converted to an accessor + // property to emulate a data property that does not suffer from + // the override mistake, that accessor's getter is marked with + // an `originalValue` property. Here, when we detect this, we + // uphold the illusion by pretending to see that original data + // property, i.e., returning the value rather than the getter + // itself. + if (isOwn && 'get' in desc && !('originalValue' in desc.get)) { + value = desc.get; + } else { + value = value[part]; + } + } else { + isOwn = hasOwn(value, part); + value = value[part]; + } + + if (isOwn && !skipFurtherCaching) { + INTRINSICS[intrinsicRealName] = value; + } + } + } + return value; +}; + + +/***/ }), + +/***/ 5045: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +var $Object = __nccwpck_require__(78308); + +/** @type {import('./Object.getPrototypeOf')} */ +module.exports = $Object.getPrototypeOf || null; + + +/***/ }), + +/***/ 78859: +/***/ ((module) => { + +"use strict"; + + +/** @type {import('./Reflect.getPrototypeOf')} */ +module.exports = (typeof Reflect !== 'undefined' && Reflect.getPrototypeOf) || null; + + +/***/ }), + +/***/ 13592: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +var reflectGetProto = __nccwpck_require__(78859); +var originalGetProto = __nccwpck_require__(5045); + +var getDunderProto = __nccwpck_require__(62693); + +/** @type {import('.')} */ +module.exports = reflectGetProto + ? function getProto(O) { + // @ts-expect-error TS can't narrow inside a closure, for some reason + return reflectGetProto(O); + } + : originalGetProto + ? function getProto(O) { + if (!O || (typeof O !== 'object' && typeof O !== 'function')) { + throw new TypeError('getProto: not an object'); + } + // @ts-expect-error TS can't narrow inside a closure, for some reason + return originalGetProto(O); + } + : getDunderProto + ? function getProto(O) { + // @ts-expect-error TS can't narrow inside a closure, for some reason + return getDunderProto(O); + } + : null; + + +/***/ }), + +/***/ 57087: +/***/ ((module) => { + +"use strict"; + + +/** @type {import('./gOPD')} */ +module.exports = Object.getOwnPropertyDescriptor; + + +/***/ }), + +/***/ 18501: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +/** @type {import('.')} */ +var $gOPD = __nccwpck_require__(57087); + +if ($gOPD) { + try { + $gOPD([], 'length'); + } catch (e) { + // IE 8 has a broken gOPD + $gOPD = null; + } +} + +module.exports = $gOPD; + + /***/ }), /***/ 96956: @@ -53738,6 +54822,143 @@ module.exports = (flag, argv = process.argv) => { }; +/***/ }), + +/***/ 90176: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +var $defineProperty = __nccwpck_require__(6123); + +var hasPropertyDescriptors = function hasPropertyDescriptors() { + return !!$defineProperty; +}; + +hasPropertyDescriptors.hasArrayLengthDefineBug = function hasArrayLengthDefineBug() { + // node v0.6 has a bug where array lengths can be Set but not Defined + if (!$defineProperty) { + return null; + } + try { + return $defineProperty([], 'length', { value: 1 }).length !== 1; + } catch (e) { + // In Firefox 4-22, defining length on an array throws an exception. + return true; + } +}; + +module.exports = hasPropertyDescriptors; + + +/***/ }), + +/***/ 40587: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +var origSymbol = typeof Symbol !== 'undefined' && Symbol; +var hasSymbolSham = __nccwpck_require__(57747); + +/** @type {import('.')} */ +module.exports = function hasNativeSymbols() { + if (typeof origSymbol !== 'function') { return false; } + if (typeof Symbol !== 'function') { return false; } + if (typeof origSymbol('foo') !== 'symbol') { return false; } + if (typeof Symbol('bar') !== 'symbol') { return false; } + + return hasSymbolSham(); +}; + + +/***/ }), + +/***/ 57747: +/***/ ((module) => { + +"use strict"; + + +/** @type {import('./shams')} */ +/* eslint complexity: [2, 18], max-statements: [2, 33] */ +module.exports = function hasSymbols() { + if (typeof Symbol !== 'function' || typeof Object.getOwnPropertySymbols !== 'function') { return false; } + if (typeof Symbol.iterator === 'symbol') { return true; } + + /** @type {{ [k in symbol]?: unknown }} */ + var obj = {}; + var sym = Symbol('test'); + var symObj = Object(sym); + if (typeof sym === 'string') { return false; } + + if (Object.prototype.toString.call(sym) !== '[object Symbol]') { return false; } + if (Object.prototype.toString.call(symObj) !== '[object Symbol]') { return false; } + + // temp disabled per https://github.com/ljharb/object.assign/issues/17 + // if (sym instanceof Symbol) { return false; } + // temp disabled per https://github.com/WebReflection/get-own-property-symbols/issues/4 + // if (!(symObj instanceof Symbol)) { return false; } + + // if (typeof Symbol.prototype.toString !== 'function') { return false; } + // if (String(sym) !== Symbol.prototype.toString.call(sym)) { return false; } + + var symVal = 42; + obj[sym] = symVal; + for (var _ in obj) { return false; } // eslint-disable-line no-restricted-syntax, no-unreachable-loop + if (typeof Object.keys === 'function' && Object.keys(obj).length !== 0) { return false; } + + if (typeof Object.getOwnPropertyNames === 'function' && Object.getOwnPropertyNames(obj).length !== 0) { return false; } + + var syms = Object.getOwnPropertySymbols(obj); + if (syms.length !== 1 || syms[0] !== sym) { return false; } + + if (!Object.prototype.propertyIsEnumerable.call(obj, sym)) { return false; } + + if (typeof Object.getOwnPropertyDescriptor === 'function') { + // eslint-disable-next-line no-extra-parens + var descriptor = /** @type {PropertyDescriptor} */ (Object.getOwnPropertyDescriptor(obj, sym)); + if (descriptor.value !== symVal || descriptor.enumerable !== true) { return false; } + } + + return true; +}; + + +/***/ }), + +/***/ 99038: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +var hasSymbols = __nccwpck_require__(57747); + +/** @type {import('.')} */ +module.exports = function hasToStringTagShams() { + return hasSymbols() && !!Symbol.toStringTag; +}; + + +/***/ }), + +/***/ 62157: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +var call = Function.prototype.call; +var $hasOwn = Object.prototype.hasOwnProperty; +var bind = __nccwpck_require__(88334); + +/** @type {import('.')} */ +module.exports = bind.call(call, $hasOwn); + + /***/ }), /***/ 93527: @@ -55209,6 +56430,136 @@ if (typeof Object.create === 'function') { } +/***/ }), + +/***/ 94615: +/***/ ((module) => { + +"use strict"; + + +var fnToStr = Function.prototype.toString; +var reflectApply = typeof Reflect === 'object' && Reflect !== null && Reflect.apply; +var badArrayLike; +var isCallableMarker; +if (typeof reflectApply === 'function' && typeof Object.defineProperty === 'function') { + try { + badArrayLike = Object.defineProperty({}, 'length', { + get: function () { + throw isCallableMarker; + } + }); + isCallableMarker = {}; + // eslint-disable-next-line no-throw-literal + reflectApply(function () { throw 42; }, null, badArrayLike); + } catch (_) { + if (_ !== isCallableMarker) { + reflectApply = null; + } + } +} else { + reflectApply = null; +} + +var constructorRegex = /^\s*class\b/; +var isES6ClassFn = function isES6ClassFunction(value) { + try { + var fnStr = fnToStr.call(value); + return constructorRegex.test(fnStr); + } catch (e) { + return false; // not a function + } +}; + +var tryFunctionObject = function tryFunctionToStr(value) { + try { + if (isES6ClassFn(value)) { return false; } + fnToStr.call(value); + return true; + } catch (e) { + return false; + } +}; +var toStr = Object.prototype.toString; +var objectClass = '[object Object]'; +var fnClass = '[object Function]'; +var genClass = '[object GeneratorFunction]'; +var ddaClass = '[object HTMLAllCollection]'; // IE 11 +var ddaClass2 = '[object HTML document.all class]'; +var ddaClass3 = '[object HTMLCollection]'; // IE 9-10 +var hasToStringTag = typeof Symbol === 'function' && !!Symbol.toStringTag; // better: use `has-tostringtag` + +var isIE68 = !(0 in [,]); // eslint-disable-line no-sparse-arrays, comma-spacing + +var isDDA = function isDocumentDotAll() { return false; }; +if (typeof document === 'object') { + // Firefox 3 canonicalizes DDA to undefined when it's not accessed directly + var all = document.all; + if (toStr.call(all) === toStr.call(document.all)) { + isDDA = function isDocumentDotAll(value) { + /* globals document: false */ + // in IE 6-8, typeof document.all is "object" and it's truthy + if ((isIE68 || !value) && (typeof value === 'undefined' || typeof value === 'object')) { + try { + var str = toStr.call(value); + return ( + str === ddaClass + || str === ddaClass2 + || str === ddaClass3 // opera 12.16 + || str === objectClass // IE 6-8 + ) && value('') == null; // eslint-disable-line eqeqeq + } catch (e) { /**/ } + } + return false; + }; + } +} + +module.exports = reflectApply + ? function isCallable(value) { + if (isDDA(value)) { return true; } + if (!value) { return false; } + if (typeof value !== 'function' && typeof value !== 'object') { return false; } + try { + reflectApply(value, null, badArrayLike); + } catch (e) { + if (e !== isCallableMarker) { return false; } + } + return !isES6ClassFn(value) && tryFunctionObject(value); + } + : function isCallable(value) { + if (isDDA(value)) { return true; } + if (!value) { return false; } + if (typeof value !== 'function' && typeof value !== 'object') { return false; } + if (hasToStringTag) { return tryFunctionObject(value); } + if (isES6ClassFn(value)) { return false; } + var strClass = toStr.call(value); + if (strClass !== fnClass && strClass !== genClass && !(/^\[object HTML/).test(strClass)) { return false; } + return tryFunctionObject(value); + }; + + +/***/ }), + +/***/ 8284: +/***/ ((module) => { + +"use strict"; + + +// Last updated for git 2.29.0. +// eslint-disable-next-line no-control-regex +const bad = /(^|[/.])([/.]|$)|^@$|@{|[\x00-\x20\x7f~^:?*[\\]|\.lock(\/|$)/ + +module.exports = function validRef (name, onelevel) { + if (typeof name !== 'string') { + throw new TypeError('Reference name must be a string') + } + + return !bad.test(name) && (!!onelevel || name.includes('/')) +} + + /***/ }), /***/ 51389: @@ -55269,6 +56620,22 @@ function isPlainObject(o) { exports.isPlainObject = isPlainObject; +/***/ }), + +/***/ 64486: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +var whichTypedArray = __nccwpck_require__(51257); + +/** @type {import('.')} */ +module.exports = function isTypedArray(value) { + return !!whichTypedArray(value); +}; + + /***/ }), /***/ 21917: @@ -61496,6 +62863,111 @@ const forEachStep = (self, fn, node, thisp) => { module.exports = LRUCache +/***/ }), + +/***/ 19775: +/***/ ((module) => { + +"use strict"; + + +/** @type {import('./abs')} */ +module.exports = Math.abs; + + +/***/ }), + +/***/ 60924: +/***/ ((module) => { + +"use strict"; + + +/** @type {import('./floor')} */ +module.exports = Math.floor; + + +/***/ }), + +/***/ 57661: +/***/ ((module) => { + +"use strict"; + + +/** @type {import('./isNaN')} */ +module.exports = Number.isNaN || function isNaN(a) { + return a !== a; +}; + + +/***/ }), + +/***/ 52419: +/***/ ((module) => { + +"use strict"; + + +/** @type {import('./max')} */ +module.exports = Math.max; + + +/***/ }), + +/***/ 73373: +/***/ ((module) => { + +"use strict"; + + +/** @type {import('./min')} */ +module.exports = Math.min; + + +/***/ }), + +/***/ 78029: +/***/ ((module) => { + +"use strict"; + + +/** @type {import('./pow')} */ +module.exports = Math.pow; + + +/***/ }), + +/***/ 59396: +/***/ ((module) => { + +"use strict"; + + +/** @type {import('./round')} */ +module.exports = Math.round; + + +/***/ }), + +/***/ 39091: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +var $isNaN = __nccwpck_require__(57661); + +/** @type {import('./sign')} */ +module.exports = function sign(number) { + if ($isNaN(number) || number === 0) { + return number; + } + return number < 0 ? -1 : +1; +}; + + /***/ }), /***/ 23432: @@ -72497,6 +73969,543 @@ module.exports = ZStream; function _typeof(obj){"@babel/helpers - typeof";return _typeof="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(obj){return typeof obj}:function(obj){return obj&&"function"==typeof Symbol&&obj.constructor===Symbol&&obj!==Symbol.prototype?"symbol":typeof obj},_typeof(obj)}function _createForOfIteratorHelper(o,allowArrayLike){var it=typeof Symbol!=="undefined"&&o[Symbol.iterator]||o["@@iterator"];if(!it){if(Array.isArray(o)||(it=_unsupportedIterableToArray(o))||allowArrayLike&&o&&typeof o.length==="number"){if(it)o=it;var i=0;var F=function F(){};return{s:F,n:function n(){if(i>=o.length)return{done:true};return{done:false,value:o[i++]}},e:function e(_e2){throw _e2},f:F}}throw new TypeError("Invalid attempt to iterate non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}var normalCompletion=true,didErr=false,err;return{s:function s(){it=it.call(o)},n:function n(){var step=it.next();normalCompletion=step.done;return step},e:function e(_e3){didErr=true;err=_e3},f:function f(){try{if(!normalCompletion&&it["return"]!=null)it["return"]()}finally{if(didErr)throw err}}}}function _defineProperty(obj,key,value){key=_toPropertyKey(key);if(key in obj){Object.defineProperty(obj,key,{value:value,enumerable:true,configurable:true,writable:true})}else{obj[key]=value}return obj}function _toPropertyKey(arg){var key=_toPrimitive(arg,"string");return _typeof(key)==="symbol"?key:String(key)}function _toPrimitive(input,hint){if(_typeof(input)!=="object"||input===null)return input;var prim=input[Symbol.toPrimitive];if(prim!==undefined){var res=prim.call(input,hint||"default");if(_typeof(res)!=="object")return res;throw new TypeError("@@toPrimitive must return a primitive value.")}return(hint==="string"?String:Number)(input)}function _slicedToArray(arr,i){return _arrayWithHoles(arr)||_iterableToArrayLimit(arr,i)||_unsupportedIterableToArray(arr,i)||_nonIterableRest()}function _nonIterableRest(){throw new TypeError("Invalid attempt to destructure non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}function _unsupportedIterableToArray(o,minLen){if(!o)return;if(typeof o==="string")return _arrayLikeToArray(o,minLen);var n=Object.prototype.toString.call(o).slice(8,-1);if(n==="Object"&&o.constructor)n=o.constructor.name;if(n==="Map"||n==="Set")return Array.from(o);if(n==="Arguments"||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n))return _arrayLikeToArray(o,minLen)}function _arrayLikeToArray(arr,len){if(len==null||len>arr.length)len=arr.length;for(var i=0,arr2=new Array(len);i { + +"use strict"; +// 'path' module extracted from Node.js v8.11.1 (only the posix part) +// transplited with Babel + +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + + + +function assertPath(path) { + if (typeof path !== 'string') { + throw new TypeError('Path must be a string. Received ' + JSON.stringify(path)); + } +} + +// Resolves . and .. elements in a path with directory names +function normalizeStringPosix(path, allowAboveRoot) { + var res = ''; + var lastSegmentLength = 0; + var lastSlash = -1; + var dots = 0; + var code; + for (var i = 0; i <= path.length; ++i) { + if (i < path.length) + code = path.charCodeAt(i); + else if (code === 47 /*/*/) + break; + else + code = 47 /*/*/; + if (code === 47 /*/*/) { + if (lastSlash === i - 1 || dots === 1) { + // NOOP + } else if (lastSlash !== i - 1 && dots === 2) { + if (res.length < 2 || lastSegmentLength !== 2 || res.charCodeAt(res.length - 1) !== 46 /*.*/ || res.charCodeAt(res.length - 2) !== 46 /*.*/) { + if (res.length > 2) { + var lastSlashIndex = res.lastIndexOf('/'); + if (lastSlashIndex !== res.length - 1) { + if (lastSlashIndex === -1) { + res = ''; + lastSegmentLength = 0; + } else { + res = res.slice(0, lastSlashIndex); + lastSegmentLength = res.length - 1 - res.lastIndexOf('/'); + } + lastSlash = i; + dots = 0; + continue; + } + } else if (res.length === 2 || res.length === 1) { + res = ''; + lastSegmentLength = 0; + lastSlash = i; + dots = 0; + continue; + } + } + if (allowAboveRoot) { + if (res.length > 0) + res += '/..'; + else + res = '..'; + lastSegmentLength = 2; + } + } else { + if (res.length > 0) + res += '/' + path.slice(lastSlash + 1, i); + else + res = path.slice(lastSlash + 1, i); + lastSegmentLength = i - lastSlash - 1; + } + lastSlash = i; + dots = 0; + } else if (code === 46 /*.*/ && dots !== -1) { + ++dots; + } else { + dots = -1; + } + } + return res; +} + +function _format(sep, pathObject) { + var dir = pathObject.dir || pathObject.root; + var base = pathObject.base || (pathObject.name || '') + (pathObject.ext || ''); + if (!dir) { + return base; + } + if (dir === pathObject.root) { + return dir + base; + } + return dir + sep + base; +} + +var posix = { + // path.resolve([from ...], to) + resolve: function resolve() { + var resolvedPath = ''; + var resolvedAbsolute = false; + var cwd; + + for (var i = arguments.length - 1; i >= -1 && !resolvedAbsolute; i--) { + var path; + if (i >= 0) + path = arguments[i]; + else { + if (cwd === undefined) + cwd = process.cwd(); + path = cwd; + } + + assertPath(path); + + // Skip empty entries + if (path.length === 0) { + continue; + } + + resolvedPath = path + '/' + resolvedPath; + resolvedAbsolute = path.charCodeAt(0) === 47 /*/*/; + } + + // At this point the path should be resolved to a full absolute path, but + // handle relative paths to be safe (might happen when process.cwd() fails) + + // Normalize the path + resolvedPath = normalizeStringPosix(resolvedPath, !resolvedAbsolute); + + if (resolvedAbsolute) { + if (resolvedPath.length > 0) + return '/' + resolvedPath; + else + return '/'; + } else if (resolvedPath.length > 0) { + return resolvedPath; + } else { + return '.'; + } + }, + + normalize: function normalize(path) { + assertPath(path); + + if (path.length === 0) return '.'; + + var isAbsolute = path.charCodeAt(0) === 47 /*/*/; + var trailingSeparator = path.charCodeAt(path.length - 1) === 47 /*/*/; + + // Normalize the path + path = normalizeStringPosix(path, !isAbsolute); + + if (path.length === 0 && !isAbsolute) path = '.'; + if (path.length > 0 && trailingSeparator) path += '/'; + + if (isAbsolute) return '/' + path; + return path; + }, + + isAbsolute: function isAbsolute(path) { + assertPath(path); + return path.length > 0 && path.charCodeAt(0) === 47 /*/*/; + }, + + join: function join() { + if (arguments.length === 0) + return '.'; + var joined; + for (var i = 0; i < arguments.length; ++i) { + var arg = arguments[i]; + assertPath(arg); + if (arg.length > 0) { + if (joined === undefined) + joined = arg; + else + joined += '/' + arg; + } + } + if (joined === undefined) + return '.'; + return posix.normalize(joined); + }, + + relative: function relative(from, to) { + assertPath(from); + assertPath(to); + + if (from === to) return ''; + + from = posix.resolve(from); + to = posix.resolve(to); + + if (from === to) return ''; + + // Trim any leading backslashes + var fromStart = 1; + for (; fromStart < from.length; ++fromStart) { + if (from.charCodeAt(fromStart) !== 47 /*/*/) + break; + } + var fromEnd = from.length; + var fromLen = fromEnd - fromStart; + + // Trim any leading backslashes + var toStart = 1; + for (; toStart < to.length; ++toStart) { + if (to.charCodeAt(toStart) !== 47 /*/*/) + break; + } + var toEnd = to.length; + var toLen = toEnd - toStart; + + // Compare paths to find the longest common path from root + var length = fromLen < toLen ? fromLen : toLen; + var lastCommonSep = -1; + var i = 0; + for (; i <= length; ++i) { + if (i === length) { + if (toLen > length) { + if (to.charCodeAt(toStart + i) === 47 /*/*/) { + // We get here if `from` is the exact base path for `to`. + // For example: from='/foo/bar'; to='/foo/bar/baz' + return to.slice(toStart + i + 1); + } else if (i === 0) { + // We get here if `from` is the root + // For example: from='/'; to='/foo' + return to.slice(toStart + i); + } + } else if (fromLen > length) { + if (from.charCodeAt(fromStart + i) === 47 /*/*/) { + // We get here if `to` is the exact base path for `from`. + // For example: from='/foo/bar/baz'; to='/foo/bar' + lastCommonSep = i; + } else if (i === 0) { + // We get here if `to` is the root. + // For example: from='/foo'; to='/' + lastCommonSep = 0; + } + } + break; + } + var fromCode = from.charCodeAt(fromStart + i); + var toCode = to.charCodeAt(toStart + i); + if (fromCode !== toCode) + break; + else if (fromCode === 47 /*/*/) + lastCommonSep = i; + } + + var out = ''; + // Generate the relative path based on the path difference between `to` + // and `from` + for (i = fromStart + lastCommonSep + 1; i <= fromEnd; ++i) { + if (i === fromEnd || from.charCodeAt(i) === 47 /*/*/) { + if (out.length === 0) + out += '..'; + else + out += '/..'; + } + } + + // Lastly, append the rest of the destination (`to`) path that comes after + // the common path parts + if (out.length > 0) + return out + to.slice(toStart + lastCommonSep); + else { + toStart += lastCommonSep; + if (to.charCodeAt(toStart) === 47 /*/*/) + ++toStart; + return to.slice(toStart); + } + }, + + _makeLong: function _makeLong(path) { + return path; + }, + + dirname: function dirname(path) { + assertPath(path); + if (path.length === 0) return '.'; + var code = path.charCodeAt(0); + var hasRoot = code === 47 /*/*/; + var end = -1; + var matchedSlash = true; + for (var i = path.length - 1; i >= 1; --i) { + code = path.charCodeAt(i); + if (code === 47 /*/*/) { + if (!matchedSlash) { + end = i; + break; + } + } else { + // We saw the first non-path separator + matchedSlash = false; + } + } + + if (end === -1) return hasRoot ? '/' : '.'; + if (hasRoot && end === 1) return '//'; + return path.slice(0, end); + }, + + basename: function basename(path, ext) { + if (ext !== undefined && typeof ext !== 'string') throw new TypeError('"ext" argument must be a string'); + assertPath(path); + + var start = 0; + var end = -1; + var matchedSlash = true; + var i; + + if (ext !== undefined && ext.length > 0 && ext.length <= path.length) { + if (ext.length === path.length && ext === path) return ''; + var extIdx = ext.length - 1; + var firstNonSlashEnd = -1; + for (i = path.length - 1; i >= 0; --i) { + var code = path.charCodeAt(i); + if (code === 47 /*/*/) { + // If we reached a path separator that was not part of a set of path + // separators at the end of the string, stop now + if (!matchedSlash) { + start = i + 1; + break; + } + } else { + if (firstNonSlashEnd === -1) { + // We saw the first non-path separator, remember this index in case + // we need it if the extension ends up not matching + matchedSlash = false; + firstNonSlashEnd = i + 1; + } + if (extIdx >= 0) { + // Try to match the explicit extension + if (code === ext.charCodeAt(extIdx)) { + if (--extIdx === -1) { + // We matched the extension, so mark this as the end of our path + // component + end = i; + } + } else { + // Extension does not match, so our result is the entire path + // component + extIdx = -1; + end = firstNonSlashEnd; + } + } + } + } + + if (start === end) end = firstNonSlashEnd;else if (end === -1) end = path.length; + return path.slice(start, end); + } else { + for (i = path.length - 1; i >= 0; --i) { + if (path.charCodeAt(i) === 47 /*/*/) { + // If we reached a path separator that was not part of a set of path + // separators at the end of the string, stop now + if (!matchedSlash) { + start = i + 1; + break; + } + } else if (end === -1) { + // We saw the first non-path separator, mark this as the end of our + // path component + matchedSlash = false; + end = i + 1; + } + } + + if (end === -1) return ''; + return path.slice(start, end); + } + }, + + extname: function extname(path) { + assertPath(path); + var startDot = -1; + var startPart = 0; + var end = -1; + var matchedSlash = true; + // Track the state of characters (if any) we see before our first dot and + // after any path separator we find + var preDotState = 0; + for (var i = path.length - 1; i >= 0; --i) { + var code = path.charCodeAt(i); + if (code === 47 /*/*/) { + // If we reached a path separator that was not part of a set of path + // separators at the end of the string, stop now + if (!matchedSlash) { + startPart = i + 1; + break; + } + continue; + } + if (end === -1) { + // We saw the first non-path separator, mark this as the end of our + // extension + matchedSlash = false; + end = i + 1; + } + if (code === 46 /*.*/) { + // If this is our first dot, mark it as the start of our extension + if (startDot === -1) + startDot = i; + else if (preDotState !== 1) + preDotState = 1; + } else if (startDot !== -1) { + // We saw a non-dot and non-path separator before our dot, so we should + // have a good chance at having a non-empty extension + preDotState = -1; + } + } + + if (startDot === -1 || end === -1 || + // We saw a non-dot character immediately before the dot + preDotState === 0 || + // The (right-most) trimmed path component is exactly '..' + preDotState === 1 && startDot === end - 1 && startDot === startPart + 1) { + return ''; + } + return path.slice(startDot, end); + }, + + format: function format(pathObject) { + if (pathObject === null || typeof pathObject !== 'object') { + throw new TypeError('The "pathObject" argument must be of type Object. Received type ' + typeof pathObject); + } + return _format('/', pathObject); + }, + + parse: function parse(path) { + assertPath(path); + + var ret = { root: '', dir: '', base: '', ext: '', name: '' }; + if (path.length === 0) return ret; + var code = path.charCodeAt(0); + var isAbsolute = code === 47 /*/*/; + var start; + if (isAbsolute) { + ret.root = '/'; + start = 1; + } else { + start = 0; + } + var startDot = -1; + var startPart = 0; + var end = -1; + var matchedSlash = true; + var i = path.length - 1; + + // Track the state of characters (if any) we see before our first dot and + // after any path separator we find + var preDotState = 0; + + // Get non-dir info + for (; i >= start; --i) { + code = path.charCodeAt(i); + if (code === 47 /*/*/) { + // If we reached a path separator that was not part of a set of path + // separators at the end of the string, stop now + if (!matchedSlash) { + startPart = i + 1; + break; + } + continue; + } + if (end === -1) { + // We saw the first non-path separator, mark this as the end of our + // extension + matchedSlash = false; + end = i + 1; + } + if (code === 46 /*.*/) { + // If this is our first dot, mark it as the start of our extension + if (startDot === -1) startDot = i;else if (preDotState !== 1) preDotState = 1; + } else if (startDot !== -1) { + // We saw a non-dot and non-path separator before our dot, so we should + // have a good chance at having a non-empty extension + preDotState = -1; + } + } + + if (startDot === -1 || end === -1 || + // We saw a non-dot character immediately before the dot + preDotState === 0 || + // The (right-most) trimmed path component is exactly '..' + preDotState === 1 && startDot === end - 1 && startDot === startPart + 1) { + if (end !== -1) { + if (startPart === 0 && isAbsolute) ret.base = ret.name = path.slice(1, end);else ret.base = ret.name = path.slice(startPart, end); + } + } else { + if (startPart === 0 && isAbsolute) { + ret.name = path.slice(1, startDot); + ret.base = path.slice(1, end); + } else { + ret.name = path.slice(startPart, startDot); + ret.base = path.slice(startPart, end); + } + ret.ext = path.slice(startDot, end); + } + + if (startPart > 0) ret.dir = path.slice(0, startPart - 1);else if (isAbsolute) ret.dir = '/'; + + return ret; + }, + + sep: '/', + delimiter: ':', + win32: null, + posix: null +}; + +posix.posix = posix; + +module.exports = posix; + + /***/ }), /***/ 64810: @@ -72573,6 +74582,31 @@ module.exports = (input, options) => { }; +/***/ }), + +/***/ 93183: +/***/ ((module) => { + +"use strict"; + + +/** @type {import('.')} */ +module.exports = [ + 'Float16Array', + 'Float32Array', + 'Float64Array', + 'Int8Array', + 'Int16Array', + 'Int32Array', + 'Uint8Array', + 'Uint8ClampedArray', + 'Uint16Array', + 'Uint32Array', + 'BigInt64Array', + 'BigUint64Array' +]; + + /***/ }), /***/ 71604: @@ -75487,92 +77521,146 @@ const validRange = (range, options) => { module.exports = validRange +/***/ }), + +/***/ 64056: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +var GetIntrinsic = __nccwpck_require__(74538); +var define = __nccwpck_require__(54564); +var hasDescriptors = __nccwpck_require__(90176)(); +var gOPD = __nccwpck_require__(18501); + +var $TypeError = __nccwpck_require__(6361); +var $floor = GetIntrinsic('%Math.floor%'); + +/** @type {import('.')} */ +module.exports = function setFunctionLength(fn, length) { + if (typeof fn !== 'function') { + throw new $TypeError('`fn` is not a function'); + } + if (typeof length !== 'number' || length < 0 || length > 0xFFFFFFFF || $floor(length) !== length) { + throw new $TypeError('`length` must be a positive 32-bit integer'); + } + + var loose = arguments.length > 2 && !!arguments[2]; + + var functionLengthIsConfigurable = true; + var functionLengthIsWritable = true; + if ('length' in fn && gOPD) { + var desc = gOPD(fn, 'length'); + if (desc && !desc.configurable) { + functionLengthIsConfigurable = false; + } + if (desc && !desc.writable) { + functionLengthIsWritable = false; + } + } + + if (functionLengthIsConfigurable || functionLengthIsWritable || !loose) { + if (hasDescriptors) { + define(/** @type {Parameters[0]} */ (fn), 'length', length, true, true); + } else { + define(/** @type {Parameters[0]} */ (fn), 'length', length); + } + } + return fn; +}; + + /***/ }), /***/ 13251: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -var Buffer = (__nccwpck_require__(21867).Buffer) +"use strict"; + + +var Buffer = (__nccwpck_require__(21867).Buffer); +var toBuffer = __nccwpck_require__(61259); // prototype class for hash functions -function Hash (blockSize, finalSize) { - this._block = Buffer.alloc(blockSize) - this._finalSize = finalSize - this._blockSize = blockSize - this._len = 0 +function Hash(blockSize, finalSize) { + this._block = Buffer.alloc(blockSize); + this._finalSize = finalSize; + this._blockSize = blockSize; + this._len = 0; } Hash.prototype.update = function (data, enc) { - if (typeof data === 'string') { - enc = enc || 'utf8' - data = Buffer.from(data, enc) - } + /* eslint no-param-reassign: 0 */ + data = toBuffer(data, enc || 'utf8'); - var block = this._block - var blockSize = this._blockSize - var length = data.length - var accum = this._len + var block = this._block; + var blockSize = this._blockSize; + var length = data.length; + var accum = this._len; - for (var offset = 0; offset < length;) { - var assigned = accum % blockSize - var remainder = Math.min(length - offset, blockSize - assigned) + for (var offset = 0; offset < length;) { + var assigned = accum % blockSize; + var remainder = Math.min(length - offset, blockSize - assigned); - for (var i = 0; i < remainder; i++) { - block[assigned + i] = data[offset + i] - } + for (var i = 0; i < remainder; i++) { + block[assigned + i] = data[offset + i]; + } - accum += remainder - offset += remainder + accum += remainder; + offset += remainder; - if ((accum % blockSize) === 0) { - this._update(block) - } - } + if ((accum % blockSize) === 0) { + this._update(block); + } + } - this._len += length - return this -} + this._len += length; + return this; +}; Hash.prototype.digest = function (enc) { - var rem = this._len % this._blockSize + var rem = this._len % this._blockSize; - this._block[rem] = 0x80 + this._block[rem] = 0x80; - // zero (rem + 1) trailing bits, where (rem + 1) is the smallest - // non-negative solution to the equation (length + 1 + (rem + 1)) === finalSize mod blockSize - this._block.fill(0, rem + 1) + /* + * zero (rem + 1) trailing bits, where (rem + 1) is the smallest + * non-negative solution to the equation (length + 1 + (rem + 1)) === finalSize mod blockSize + */ + this._block.fill(0, rem + 1); - if (rem >= this._finalSize) { - this._update(this._block) - this._block.fill(0) - } + if (rem >= this._finalSize) { + this._update(this._block); + this._block.fill(0); + } - var bits = this._len * 8 + var bits = this._len * 8; - // uint32 - if (bits <= 0xffffffff) { - this._block.writeUInt32BE(bits, this._blockSize - 4) + // uint32 + if (bits <= 0xffffffff) { + this._block.writeUInt32BE(bits, this._blockSize - 4); - // uint64 - } else { - var lowBits = (bits & 0xffffffff) >>> 0 - var highBits = (bits - lowBits) / 0x100000000 + // uint64 + } else { + var lowBits = (bits & 0xffffffff) >>> 0; + var highBits = (bits - lowBits) / 0x100000000; - this._block.writeUInt32BE(highBits, this._blockSize - 8) - this._block.writeUInt32BE(lowBits, this._blockSize - 4) - } + this._block.writeUInt32BE(highBits, this._blockSize - 8); + this._block.writeUInt32BE(lowBits, this._blockSize - 4); + } - this._update(this._block) - var hash = this._hash() + this._update(this._block); + var hash = this._hash(); - return enc ? hash.toString(enc) : hash -} + return enc ? hash.toString(enc) : hash; +}; Hash.prototype._update = function () { - throw new Error('_update must be implemented by subclass') -} + throw new Error('_update must be implemented by subclass'); +}; -module.exports = Hash +module.exports = Hash; /***/ }), @@ -75580,6 +77668,9 @@ module.exports = Hash /***/ 72398: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +"use strict"; + + /* * A JavaScript implementation of the Secure Hash Algorithm, SHA-1, as defined * in FIPS PUB 180-1 @@ -75589,96 +77680,104 @@ module.exports = Hash * See http://pajhome.org.uk/crypt/md5 for details. */ -var inherits = __nccwpck_require__(44124) -var Hash = __nccwpck_require__(13251) -var Buffer = (__nccwpck_require__(21867).Buffer) +var inherits = __nccwpck_require__(44124); +var Hash = __nccwpck_require__(13251); +var Buffer = (__nccwpck_require__(21867).Buffer); var K = [ - 0x5a827999, 0x6ed9eba1, 0x8f1bbcdc | 0, 0xca62c1d6 | 0 -] + 0x5a827999, 0x6ed9eba1, 0x8f1bbcdc | 0, 0xca62c1d6 | 0 +]; -var W = new Array(80) +var W = new Array(80); -function Sha1 () { - this.init() - this._w = W +function Sha1() { + this.init(); + this._w = W; - Hash.call(this, 64, 56) + Hash.call(this, 64, 56); } -inherits(Sha1, Hash) +inherits(Sha1, Hash); Sha1.prototype.init = function () { - this._a = 0x67452301 - this._b = 0xefcdab89 - this._c = 0x98badcfe - this._d = 0x10325476 - this._e = 0xc3d2e1f0 + this._a = 0x67452301; + this._b = 0xefcdab89; + this._c = 0x98badcfe; + this._d = 0x10325476; + this._e = 0xc3d2e1f0; - return this -} + return this; +}; -function rotl1 (num) { - return (num << 1) | (num >>> 31) +function rotl1(num) { + return (num << 1) | (num >>> 31); } -function rotl5 (num) { - return (num << 5) | (num >>> 27) +function rotl5(num) { + return (num << 5) | (num >>> 27); } -function rotl30 (num) { - return (num << 30) | (num >>> 2) +function rotl30(num) { + return (num << 30) | (num >>> 2); } -function ft (s, b, c, d) { - if (s === 0) return (b & c) | ((~b) & d) - if (s === 2) return (b & c) | (b & d) | (c & d) - return b ^ c ^ d +function ft(s, b, c, d) { + if (s === 0) { + return (b & c) | (~b & d); + } + if (s === 2) { + return (b & c) | (b & d) | (c & d); + } + return b ^ c ^ d; } Sha1.prototype._update = function (M) { - var W = this._w + var w = this._w; - var a = this._a | 0 - var b = this._b | 0 - var c = this._c | 0 - var d = this._d | 0 - var e = this._e | 0 + var a = this._a | 0; + var b = this._b | 0; + var c = this._c | 0; + var d = this._d | 0; + var e = this._e | 0; - for (var i = 0; i < 16; ++i) W[i] = M.readInt32BE(i * 4) - for (; i < 80; ++i) W[i] = rotl1(W[i - 3] ^ W[i - 8] ^ W[i - 14] ^ W[i - 16]) + for (var i = 0; i < 16; ++i) { + w[i] = M.readInt32BE(i * 4); + } + for (; i < 80; ++i) { + w[i] = rotl1(w[i - 3] ^ w[i - 8] ^ w[i - 14] ^ w[i - 16]); + } - for (var j = 0; j < 80; ++j) { - var s = ~~(j / 20) - var t = (rotl5(a) + ft(s, b, c, d) + e + W[j] + K[s]) | 0 + for (var j = 0; j < 80; ++j) { + var s = ~~(j / 20); + var t = (rotl5(a) + ft(s, b, c, d) + e + w[j] + K[s]) | 0; - e = d - d = c - c = rotl30(b) - b = a - a = t - } + e = d; + d = c; + c = rotl30(b); + b = a; + a = t; + } - this._a = (a + this._a) | 0 - this._b = (b + this._b) | 0 - this._c = (c + this._c) | 0 - this._d = (d + this._d) | 0 - this._e = (e + this._e) | 0 -} + this._a = (a + this._a) | 0; + this._b = (b + this._b) | 0; + this._c = (c + this._c) | 0; + this._d = (d + this._d) | 0; + this._e = (e + this._e) | 0; +}; Sha1.prototype._hash = function () { - var H = Buffer.allocUnsafe(20) + var H = Buffer.allocUnsafe(20); - H.writeInt32BE(this._a | 0, 0) - H.writeInt32BE(this._b | 0, 4) - H.writeInt32BE(this._c | 0, 8) - H.writeInt32BE(this._d | 0, 12) - H.writeInt32BE(this._e | 0, 16) + H.writeInt32BE(this._a | 0, 0); + H.writeInt32BE(this._b | 0, 4); + H.writeInt32BE(this._c | 0, 8); + H.writeInt32BE(this._d | 0, 12); + H.writeInt32BE(this._e | 0, 16); - return H -} + return H; +}; -module.exports = Sha1 +module.exports = Sha1; /***/ }), @@ -79013,6 +81112,135 @@ module.exports = { }; +/***/ }), + +/***/ 61259: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +var Buffer = (__nccwpck_require__(21867).Buffer); +var isArray = __nccwpck_require__(22715); +var typedArrayBuffer = __nccwpck_require__(24867); + +var isView = ArrayBuffer.isView || function isView(obj) { + try { + typedArrayBuffer(obj); + return true; + } catch (e) { + return false; + } +}; + +var useUint8Array = typeof Uint8Array !== 'undefined'; +var useArrayBuffer = typeof ArrayBuffer !== 'undefined' + && typeof Uint8Array !== 'undefined'; +var useFromArrayBuffer = useArrayBuffer && (Buffer.prototype instanceof Uint8Array || Buffer.TYPED_ARRAY_SUPPORT); + +module.exports = function toBuffer(data, encoding) { + if (Buffer.isBuffer(data)) { + if (data.constructor && !('isBuffer' in data)) { + // probably a SlowBuffer + return Buffer.from(data); + } + return data; + } + + if (typeof data === 'string') { + return Buffer.from(data, encoding); + } + + /* + * Wrap any TypedArray instances and DataViews + * Makes sense only on engines with full TypedArray support -- let Buffer detect that + */ + if (useArrayBuffer && isView(data)) { + // Bug in Node.js <6.3.1, which treats this as out-of-bounds + if (data.byteLength === 0) { + return Buffer.alloc(0); + } + + // When Buffer is based on Uint8Array, we can just construct it from ArrayBuffer + if (useFromArrayBuffer) { + var res = Buffer.from(data.buffer, data.byteOffset, data.byteLength); + /* + * Recheck result size, as offset/length doesn't work on Node.js <5.10 + * We just go to Uint8Array case if this fails + */ + if (res.byteLength === data.byteLength) { + return res; + } + } + + // Convert to Uint8Array bytes and then to Buffer + var uint8 = data instanceof Uint8Array ? data : new Uint8Array(data.buffer, data.byteOffset, data.byteLength); + var result = Buffer.from(uint8); + + /* + * Let's recheck that conversion succeeded + * We have .length but not .byteLength when useFromArrayBuffer is false + */ + if (result.length === data.byteLength) { + return result; + } + } + + /* + * Uint8Array in engines where Buffer.from might not work with ArrayBuffer, just copy over + * Doesn't make sense with other TypedArray instances + */ + if (useUint8Array && data instanceof Uint8Array) { + return Buffer.from(data); + } + + var isArr = isArray(data); + if (isArr) { + for (var i = 0; i < data.length; i += 1) { + var x = data[i]; + if ( + typeof x !== 'number' + || x < 0 + || x > 255 + || ~~x !== x // NaN and integer check + ) { + throw new RangeError('Array items must be numbers in the range 0-255.'); + } + } + } + + /* + * Old Buffer polyfill on an engine that doesn't have TypedArray support + * Also, this is from a different Buffer polyfill implementation then we have, as instanceof check failed + * Convert to our current Buffer implementation + */ + if ( + isArr || ( + Buffer.isBuffer(data) + && data.constructor + && typeof data.constructor.isBuffer === 'function' + && data.constructor.isBuffer(data) + ) + ) { + return Buffer.from(data); + } + + throw new TypeError('The "data" argument must be a string, an Array, a Buffer, a Uint8Array, or a DataView.'); +}; + + +/***/ }), + +/***/ 22715: +/***/ ((module) => { + +var toString = {}.toString; + +module.exports = Array.isArray || function (arr) { + return toString.call(arr) == '[object Array]'; +}; + + /***/ }), /***/ 84256: @@ -79494,6 +81722,33 @@ if (process.env.NODE_DEBUG && /\btunnel\b/.test(process.env.NODE_DEBUG)) { exports.debug = debug; // for test +/***/ }), + +/***/ 24867: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +var $TypeError = __nccwpck_require__(6361); + +var callBound = __nccwpck_require__(71785); + +/** @type {undefined | ((thisArg: import('.').TypedArray) => Buffer)} */ +var $typedArrayBuffer = callBound('TypedArray.prototype.buffer', true); + +var isTypedArray = __nccwpck_require__(64486); + +/** @type {import('.')} */ +// node <= 0.10, < 0.11.4 has a nonconfigurable own property instead of a prototype getter +module.exports = $typedArrayBuffer || function typedArrayBuffer(x) { + if (!isTypedArray(x)) { + throw new $TypeError('Not a Typed Array'); + } + return x.buffer; +}; + + /***/ }), /***/ 41773: @@ -104521,6 +106776,131 @@ module.exports.implForWrapper = function (wrapper) { +/***/ }), + +/***/ 51257: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +var forEach = __nccwpck_require__(43710); +var availableTypedArrays = __nccwpck_require__(71527); +var callBind = __nccwpck_require__(62977); +var callBound = __nccwpck_require__(71785); +var gOPD = __nccwpck_require__(18501); +var getProto = __nccwpck_require__(13592); + +var $toString = callBound('Object.prototype.toString'); +var hasToStringTag = __nccwpck_require__(99038)(); + +var g = typeof globalThis === 'undefined' ? global : globalThis; +var typedArrays = availableTypedArrays(); + +var $slice = callBound('String.prototype.slice'); + +/** @type {(array: readonly T[], value: unknown) => number} */ +var $indexOf = callBound('Array.prototype.indexOf', true) || function indexOf(array, value) { + for (var i = 0; i < array.length; i += 1) { + if (array[i] === value) { + return i; + } + } + return -1; +}; + +/** @typedef {import('./types').Getter} Getter */ +/** @type {import('./types').Cache} */ +var cache = { __proto__: null }; +if (hasToStringTag && gOPD && getProto) { + forEach(typedArrays, function (typedArray) { + var arr = new g[typedArray](); + if (Symbol.toStringTag in arr && getProto) { + var proto = getProto(arr); + // @ts-expect-error TS won't narrow inside a closure + var descriptor = gOPD(proto, Symbol.toStringTag); + if (!descriptor && proto) { + var superProto = getProto(proto); + // @ts-expect-error TS won't narrow inside a closure + descriptor = gOPD(superProto, Symbol.toStringTag); + } + // @ts-expect-error TODO: fix + cache['$' + typedArray] = callBind(descriptor.get); + } + }); +} else { + forEach(typedArrays, function (typedArray) { + var arr = new g[typedArray](); + var fn = arr.slice || arr.set; + if (fn) { + cache[ + /** @type {`$${import('.').TypedArrayName}`} */ ('$' + typedArray) + ] = /** @type {import('./types').BoundSlice | import('./types').BoundSet} */ ( + // @ts-expect-error TODO FIXME + callBind(fn) + ); + } + }); +} + +/** @type {(value: object) => false | import('.').TypedArrayName} */ +var tryTypedArrays = function tryAllTypedArrays(value) { + /** @type {ReturnType} */ var found = false; + forEach( + /** @type {Record<`\$${import('.').TypedArrayName}`, Getter>} */ (cache), + /** @type {(getter: Getter, name: `\$${import('.').TypedArrayName}`) => void} */ + function (getter, typedArray) { + if (!found) { + try { + // @ts-expect-error a throw is fine here + if ('$' + getter(value) === typedArray) { + found = /** @type {import('.').TypedArrayName} */ ($slice(typedArray, 1)); + } + } catch (e) { /**/ } + } + } + ); + return found; +}; + +/** @type {(value: object) => false | import('.').TypedArrayName} */ +var trySlices = function tryAllSlices(value) { + /** @type {ReturnType} */ var found = false; + forEach( + /** @type {Record<`\$${import('.').TypedArrayName}`, Getter>} */(cache), + /** @type {(getter: Getter, name: `\$${import('.').TypedArrayName}`) => void} */ function (getter, name) { + if (!found) { + try { + // @ts-expect-error a throw is fine here + getter(value); + found = /** @type {import('.').TypedArrayName} */ ($slice(name, 1)); + } catch (e) { /**/ } + } + } + ); + return found; +}; + +/** @type {import('.')} */ +module.exports = function whichTypedArray(value) { + if (!value || typeof value !== 'object') { return false; } + if (!hasToStringTag) { + /** @type {string} */ + var tag = $slice($toString(value), 8, -1); + if ($indexOf(typedArrays, tag) > -1) { + return tag; + } + if (tag !== 'Object') { + return false; + } + // node < 0.6 hits here on real Typed Arrays + return trySlices(value); + } + if (!gOPD) { return null; } // unknown engine + return tryTypedArrays(value); +}; + + /***/ }), /***/ 62940: @@ -110347,6 +112727,31 @@ module.exports = require("worker_threads"); "use strict"; module.exports = require("zlib"); +/***/ }), + +/***/ 71527: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + + +var possibleNames = __nccwpck_require__(93183); + +var g = typeof globalThis === 'undefined' ? global : globalThis; + +/** @type {import('.')} */ +module.exports = function availableTypedArrays() { + var /** @type {ReturnType} */ out = []; + for (var i = 0; i < possibleNames.length; i++) { + if (typeof g[possibleNames[i]] === 'function') { + // @ts-expect-error + out[out.length] = possibleNames[i]; + } + } + return out; +}; + + /***/ }), /***/ 8109: @@ -118963,11 +121368,13 @@ function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'defau var AsyncLock = _interopDefault(__nccwpck_require__(41542)); var Hash = _interopDefault(__nccwpck_require__(72398)); +var pathBrowserify = __nccwpck_require__(83501); var crc32 = _interopDefault(__nccwpck_require__(83201)); var pako = _interopDefault(__nccwpck_require__(31726)); var pify = _interopDefault(__nccwpck_require__(64810)); var ignore = _interopDefault(__nccwpck_require__(91230)); var cleanGitRef = _interopDefault(__nccwpck_require__(43268)); +var validRef = _interopDefault(__nccwpck_require__(8284)); var diff3Merge = _interopDefault(__nccwpck_require__(25211)); /** @@ -119267,6 +121674,11 @@ var diff3Merge = _interopDefault(__nccwpck_require__(25211)); */ /** + * @typedef {'push' | 'pop' | 'apply' | 'drop' | 'list' | 'clear'} StashOp the type of stash ops + */ + +/** + * @typedef {'equal' | 'modify' | 'add' | 'remove' | 'unknown'} StashChangeType - when compare WORDIR to HEAD, 'remove' could mean 'untracked' * @typedef {Object} ClientRef * @property {string} ref The name of the ref * @property {string} oid The SHA-1 object id the ref points to @@ -119595,7 +122007,7 @@ async function testSubtleSHA1() { // some browsers that have crypto.subtle.digest don't actually implement SHA-1. try { const hash = await subtleSHA1(new Uint8Array([])); - if (hash === 'da39a3ee5e6b4b0d3255bfef95601890afd80709') return true + return hash === 'da39a3ee5e6b4b0d3255bfef95601890afd80709' } catch (_) { // no bother } @@ -119913,8 +122325,6 @@ function compareStats(entry, stats, filemode = true, trustino = true) { return staleness } -// import LockManager from 'travix-lock-manager' - // import Lock from '../utils.js' // const lm = new LockManager() @@ -119922,6 +122332,10 @@ let lock = null; const IndexCache = Symbol('IndexCache'); +/** + * Creates a cache object to store GitIndex and file stats. + * @returns {object} A cache object with `map` and `stats` properties. + */ function createCache() { return { map: new Map(), @@ -119929,9 +122343,19 @@ function createCache() { } } +/** + * Updates the cached index file by reading the file system and parsing the Git index. + * @param {FSClient} fs - A file system implementation. + * @param {string} filepath - The path to the Git index file. + * @param {object} cache - The cache object to update. + * @returns {Promise} + */ async function updateCachedIndexFile(fs, filepath, cache) { - const stat = await fs.lstat(filepath); - const rawIndexFile = await fs.read(filepath); + const [stat, rawIndexFile] = await Promise.all([ + fs.lstat(filepath), + fs.read(filepath), + ]); + const index = await GitIndex.from(rawIndexFile); // cache the GitIndex object so we don't need to re-read it every time. cache.map.set(filepath, index); @@ -119939,28 +122363,40 @@ async function updateCachedIndexFile(fs, filepath, cache) { cache.stats.set(filepath, stat); } -// Determine whether our copy of the index file is stale +/** + * Determines whether the cached index file is stale by comparing file stats. + * @param {FSClient} fs - A file system implementation. + * @param {string} filepath - The path to the Git index file. + * @param {object} cache - The cache object containing file stats. + * @returns {Promise} `true` if the index file is stale, otherwise `false`. + */ async function isIndexStale(fs, filepath, cache) { const savedStats = cache.stats.get(filepath); if (savedStats === undefined) return true - const currStats = await fs.lstat(filepath); if (savedStats === null) return false + + const currStats = await fs.lstat(filepath); if (currStats === null) return false return compareStats(savedStats, currStats) } class GitIndexManager { /** + * Manages access to the Git index file, ensuring thread-safe operations and caching. * - * @param {object} opts - * @param {import('../models/FileSystem.js').FileSystem} opts.fs - * @param {string} opts.gitdir - * @param {object} opts.cache - * @param {bool} opts.allowUnmerged - * @param {function(GitIndex): any} closure + * @param {object} opts - Options for acquiring the Git index. + * @param {FSClient} opts.fs - A file system implementation. + * @param {string} opts.gitdir - The path to the `.git` directory. + * @param {object} opts.cache - A shared cache object for storing index data. + * @param {boolean} [opts.allowUnmerged=true] - Whether to allow unmerged paths in the index. + * @param {function(GitIndex): any} closure - A function to execute with the Git index. + * @returns {Promise} The result of the closure function. + * @throws {UnmergedPathsError} If unmerged paths exist and `allowUnmerged` is `false`. */ static async acquire({ fs, gitdir, cache, allowUnmerged = true }, closure) { - if (!cache[IndexCache]) cache[IndexCache] = createCache(); + if (!cache[IndexCache]) { + cache[IndexCache] = createCache(); + } const filepath = `${gitdir}/index`; if (lock === null) lock = new AsyncLock({ maxPending: Infinity }); @@ -119971,10 +122407,11 @@ class GitIndexManager { // to make sure other processes aren't writing to it // simultaneously, which could result in a corrupted index. // const fileLock = await Lock(filepath) - if (await isIndexStale(fs, filepath, cache[IndexCache])) { - await updateCachedIndexFile(fs, filepath, cache[IndexCache]); + const theIndexCache = cache[IndexCache]; + if (await isIndexStale(fs, filepath, theIndexCache)) { + await updateCachedIndexFile(fs, filepath, theIndexCache); } - const index = cache[IndexCache].map.get(filepath); + const index = theIndexCache.map.get(filepath); unmergedPaths = index.unmergedPaths; if (unmergedPaths.length && !allowUnmerged) @@ -119987,7 +122424,7 @@ class GitIndexManager { const buffer = await index.toObject(); await fs.write(filepath, buffer); // Update cached stat value - cache[IndexCache].stats.set(filepath, await fs.lstat(filepath)); + theIndexCache.stats.set(filepath, await fs.lstat(filepath)); index._dirty = false; } }); @@ -120438,42 +122875,12 @@ function compareRefNames(a, b) { return tmp } -const memo = new Map(); -function normalizePath(path) { - let normalizedPath = memo.get(path); - if (!normalizedPath) { - normalizedPath = normalizePathInternal(path); - memo.set(path, normalizedPath); - } - return normalizedPath -} - -function normalizePathInternal(path) { - path = path - .split('/./') - .join('/') // Replace '/./' with '/' - .replace(/\/{2,}/g, '/'); // Replace consecutive '/' - - if (path === '/.') return '/' // if path === '/.' return '/' - if (path === './') return '.' // if path === './' return '.' - - if (path.startsWith('./')) path = path.slice(2); // Remove leading './' - if (path.endsWith('/.')) path = path.slice(0, -2); // Remove trailing '/.' - if (path.length > 1 && path.endsWith('/')) path = path.slice(0, -1); // Remove trailing '/' - - if (path === '') return '.' // if path === '' return '.' - - return path -} - -// For some reason path.posix.join is undefined in webpack - -function join(...parts) { - return normalizePath(parts.map(normalizePath).join('/')) -} - // This is straight from parse_unit_factor in config.c of canonical git const num = val => { + if (typeof val === 'number') { + return val + } + val = val.toLowerCase(); let n = parseInt(val); if (val.endsWith('k')) n *= 1024; @@ -120484,6 +122891,10 @@ const num = val => { // This is straight from git_parse_maybe_bool_text in config.c of canonical git const bool = val => { + if (typeof val === 'boolean') { + return val + } + val = val.trim().toLowerCase(); if (val === 'true' || val === 'yes' || val === 'on') return true if (val === 'false' || val === 'no' || val === 'off') return false @@ -120587,7 +122998,7 @@ const getPath = (section, subsection, name) => { .join('.') }; -const normalizePath$1 = path => { +const normalizePath = path => { const pathSegments = path.split('.'); const section = pathSegments.shift(); const name = pathSegments.pop(); @@ -120599,6 +123010,7 @@ const normalizePath$1 = path => { name, path: getPath(section, subsection, name), sectionPath: getPath(section, subsection, null), + isSection: !!section, } }; @@ -120643,7 +123055,7 @@ class GitConfig { } async get(path, getall = false) { - const normalizedPath = normalizePath$1(path).path; + const normalizedPath = normalizePath(path).path; const allValues = this.parsedConfig .filter(config => config.path === normalizedPath) .map(({ section, name, value }) => { @@ -120659,7 +123071,7 @@ class GitConfig { async getSubsections(section) { return this.parsedConfig - .filter(config => config.section === section && config.isSection) + .filter(config => config.isSection && config.section === section) .map(config => config.subsection) } @@ -120681,7 +123093,9 @@ class GitConfig { name, path: normalizedPath, sectionPath, - } = normalizePath$1(path); + isSection, + } = normalizePath(path); + const configIndex = findLastIndex( this.parsedConfig, config => config.path === normalizedPath @@ -120723,6 +123137,7 @@ class GitConfig { } else { // Add a new section const newSection = { + isSection, section, subsection, modified: true, @@ -120757,7 +123172,18 @@ class GitConfig { } } +/** + * Manages access to the Git configuration file, providing methods to read and save configurations. + */ class GitConfigManager { + /** + * Reads the Git configuration file from the specified `.git` directory. + * + * @param {object} opts - Options for reading the Git configuration. + * @param {FSClient} opts.fs - A file system implementation. + * @param {string} opts.gitdir - The path to the `.git` directory. + * @returns {Promise} A `GitConfig` object representing the parsed configuration. + */ static async get({ fs, gitdir }) { // We can improve efficiency later if needed. // TODO: read from full list of git config files @@ -120765,6 +123191,15 @@ class GitConfigManager { return GitConfig.from(text) } + /** + * Saves the provided Git configuration to the specified `.git` directory. + * + * @param {object} opts - Options for saving the Git configuration. + * @param {FSClient} opts.fs - A file system implementation. + * @param {string} opts.gitdir - The path to the `.git` directory. + * @param {GitConfig} opts.config - The `GitConfig` object to save. + * @returns {Promise} Resolves when the configuration has been successfully saved. + */ static async save({ fs, gitdir, config }) { // We can improve efficiency later if needed. // TODO: handle saving to the correct global/user/repo location @@ -120774,8 +123209,6 @@ class GitConfigManager { } } -// This is a convenience wrapper for reading and writing files in the 'refs' directory. - // @see https://git-scm.com/docs/git-rev-parse.html#_specifying_revisions const refpaths = ref => [ `${ref}`, @@ -120796,7 +123229,25 @@ async function acquireLock(ref, callback) { return lock$1.acquire(ref, callback) } +/** + * A class for managing Git references, including reading, writing, deleting, and resolving refs. + */ class GitRefManager { + /** + * Updates remote refs based on the provided refspecs and options. + * + * @param {Object} args + * @param {FSClient} args.fs - A file system implementation. + * @param {string} [args.gitdir=join(dir, '.git')] - [required] The [git directory](dir-vs-gitdir.md) path + * @param {string} args.remote - The name of the remote. + * @param {Map} args.refs - A map of refs to their object IDs. + * @param {Map} args.symrefs - A map of symbolic refs. + * @param {boolean} args.tags - Whether to fetch tags. + * @param {string[]} [args.refspecs = undefined] - The refspecs to use. + * @param {boolean} [args.prune = false] - Whether to prune stale refs. + * @param {boolean} [args.pruneTags = false] - Whether to prune tags. + * @returns {Promise} - An object containing pruned refs. + */ static async updateRemoteRefs({ fs, gitdir, @@ -120903,12 +123354,22 @@ class GitRefManager { // and .git/refs/remotes/origin/refs/merge-requests for (const [key, value] of actualRefsToWrite) { await acquireLock(key, async () => - fs.write(join(gitdir, key), `${value.trim()}\n`, 'utf8') + fs.write(pathBrowserify.join(gitdir, key), `${value.trim()}\n`, 'utf8') ); } return { pruned } } + /** + * Writes a ref to the file system. + * + * @param {Object} args + * @param {FSClient} args.fs - A file system implementation. + * @param {string} [args.gitdir] - [required] The [git directory](dir-vs-gitdir.md) path + * @param {string} args.ref - The ref to write. + * @param {string} args.value - The object ID to write. + * @returns {Promise} + */ // TODO: make this less crude? static async writeRef({ fs, gitdir, ref, value }) { // Validate input @@ -120916,23 +123377,51 @@ class GitRefManager { throw new InvalidOidError(value) } await acquireLock(ref, async () => - fs.write(join(gitdir, ref), `${value.trim()}\n`, 'utf8') + fs.write(pathBrowserify.join(gitdir, ref), `${value.trim()}\n`, 'utf8') ); } + /** + * Writes a symbolic ref to the file system. + * + * @param {Object} args + * @param {FSClient} args.fs - A file system implementation. + * @param {string} [args.gitdir] - [required] The [git directory](dir-vs-gitdir.md) path + * @param {string} args.ref - The ref to write. + * @param {string} args.value - The target ref. + * @returns {Promise} + */ static async writeSymbolicRef({ fs, gitdir, ref, value }) { await acquireLock(ref, async () => - fs.write(join(gitdir, ref), 'ref: ' + `${value.trim()}\n`, 'utf8') + fs.write(pathBrowserify.join(gitdir, ref), 'ref: ' + `${value.trim()}\n`, 'utf8') ); } + /** + * Deletes a single ref. + * + * @param {Object} args + * @param {FSClient} args.fs - A file system implementation. + * @param {string} [args.gitdir] - [required] The [git directory](dir-vs-gitdir.md) path + * @param {string} args.ref - The ref to delete. + * @returns {Promise} + */ static async deleteRef({ fs, gitdir, ref }) { return GitRefManager.deleteRefs({ fs, gitdir, refs: [ref] }) } + /** + * Deletes multiple refs. + * + * @param {Object} args + * @param {FSClient} args.fs - A file system implementation. + * @param {string} [args.gitdir] - [required] The [git directory](dir-vs-gitdir.md) path + * @param {string[]} args.refs - The refs to delete. + * @returns {Promise} + */ static async deleteRefs({ fs, gitdir, refs }) { // Delete regular ref - await Promise.all(refs.map(ref => fs.rm(join(gitdir, ref)))); + await Promise.all(refs.map(ref => fs.rm(pathBrowserify.join(gitdir, ref)))); // Delete any packed ref let text = await acquireLock('packed-refs', async () => fs.read(`${gitdir}/packed-refs`, { encoding: 'utf8' }) @@ -120953,12 +123442,14 @@ class GitRefManager { } /** - * @param {object} args - * @param {import('../models/FileSystem.js').FileSystem} args.fs - * @param {string} args.gitdir - * @param {string} args.ref - * @param {number} [args.depth] - * @returns {Promise} + * Resolves a ref to its object ID. + * + * @param {Object} args + * @param {FSClient} args.fs - A file system implementation. + * @param {string} [args.gitdir] - [required] The [git directory](dir-vs-gitdir.md) path + * @param {string} args.ref - The ref to resolve. + * @param {number} [args.depth = undefined] - The maximum depth to resolve symbolic refs. + * @returns {Promise} - The resolved object ID. */ static async resolve({ fs, gitdir, ref, depth = undefined }) { if (depth !== undefined) { @@ -120997,6 +123488,15 @@ class GitRefManager { throw new NotFoundError(ref) } + /** + * Checks if a ref exists. + * + * @param {Object} args + * @param {FSClient} args.fs - A file system implementation. + * @param {string} [args.gitdir=join(dir, '.git')] - [required] The [git directory](dir-vs-gitdir.md) path + * @param {string} args.ref - The ref to check. + * @returns {Promise} - True if the ref exists, false otherwise. + */ static async exists({ fs, gitdir, ref }) { try { await GitRefManager.expand({ fs, gitdir, ref }); @@ -121006,6 +123506,15 @@ class GitRefManager { } } + /** + * Expands a ref to its full name. + * + * @param {Object} args + * @param {FSClient} args.fs - A file system implementation. + * @param {string} [args.gitdir=join(dir, '.git')] - [required] The [git directory](dir-vs-gitdir.md) path + * @param {string} args.ref - The ref to expand. + * @returns {Promise} - The full ref name. + */ static async expand({ fs, gitdir, ref }) { // Is it a complete and valid SHA? if (ref.length === 40 && /[0-9a-f]{40}/.test(ref)) { @@ -121026,6 +123535,14 @@ class GitRefManager { throw new NotFoundError(ref) } + /** + * Expands a ref against a provided map. + * + * @param {Object} args + * @param {string} args.ref - The ref to expand. + * @param {Map} args.map - The map of refs. + * @returns {Promise} - The expanded ref. + */ static async expandAgainstMap({ ref, map }) { // Look in all the proper paths, in this order const allpaths = refpaths(ref); @@ -121036,6 +123553,16 @@ class GitRefManager { throw new NotFoundError(ref) } + /** + * Resolves a ref against a provided map. + * + * @param {Object} args + * @param {string} args.ref - The ref to resolve. + * @param {string} [args.fullref = args.ref] - The full ref name. + * @param {number} [args.depth = undefined] - The maximum depth to resolve symbolic refs. + * @param {Map} args.map - The map of refs. + * @returns {Object} - An object containing the full ref and its object ID. + */ static resolveAgainstMap({ ref, fullref = ref, depth = undefined, map }) { if (depth !== undefined) { depth--; @@ -121069,6 +123596,14 @@ class GitRefManager { throw new NotFoundError(ref) } + /** + * Reads the packed refs file and returns a map of refs. + * + * @param {Object} args + * @param {FSClient} args.fs - A file system implementation. + * @param {string} [args.gitdir=join(dir, '.git')] - [required] The [git directory](dir-vs-gitdir.md) path + * @returns {Promise>} - A map of packed refs. + */ static async packedRefs({ fs, gitdir }) { const text = await acquireLock('packed-refs', async () => fs.read(`${gitdir}/packed-refs`, { encoding: 'utf8' }) @@ -121077,7 +123612,15 @@ class GitRefManager { return packed.refs } - // List all the refs that match the `filepath` prefix + /** + * Lists all refs matching a given filepath prefix. + * + * @param {Object} args + * @param {FSClient} args.fs - A file system implementation. + * @param {string} [args.gitdir=join(dir, '.git')] - [required] The [git directory](dir-vs-gitdir.md) path + * @param {string} args.filepath - The filepath prefix to match. + * @returns {Promise} - A sorted list of refs. + */ static async listRefs({ fs, gitdir, filepath }) { const packedMap = GitRefManager.packedRefs({ fs, gitdir }); let files = null; @@ -121104,6 +123647,15 @@ class GitRefManager { return files } + /** + * Lists all branches, optionally filtered by remote. + * + * @param {Object} args + * @param {FSClient} args.fs - A file system implementation. + * @param {string} [args.gitdir=join(dir, '.git')] - [required] The [git directory](dir-vs-gitdir.md) path + * @param {string} [args.remote] - The remote to filter branches by. + * @returns {Promise} - A list of branch names. + */ static async listBranches({ fs, gitdir, remote }) { if (remote) { return GitRefManager.listRefs({ @@ -121116,6 +123668,14 @@ class GitRefManager { } } + /** + * Lists all tags. + * + * @param {Object} args + * @param {FSClient} args.fs - A file system implementation. + * @param {string} [args.gitdir=join(dir, '.git')] - [required] The [git directory](dir-vs-gitdir.md) path + * @returns {Promise} - A list of tag names. + */ static async listTags({ fs, gitdir }) { const tags = await GitRefManager.listRefs({ fs, @@ -121267,14 +123827,41 @@ class GitTree { } } +/** + * Represents a Git object and provides methods to wrap and unwrap Git objects + * according to the Git object format. + */ class GitObject { + /** + * Wraps a raw object with a Git header. + * + * @param {Object} params - The parameters for wrapping. + * @param {string} params.type - The type of the Git object (e.g., 'blob', 'tree', 'commit'). + * @param {Uint8Array} params.object - The raw object data to wrap. + * @returns {Uint8Array} The wrapped Git object as a single buffer. + */ static wrap({ type, object }) { - return Buffer.concat([ - Buffer.from(`${type} ${object.byteLength.toString()}\x00`), - Buffer.from(object), - ]) + const header = `${type} ${object.length}\x00`; + const headerLen = header.length; + const totalLength = headerLen + object.length; + + // Allocate a single buffer for the header and object, rather than create multiple buffers + const wrappedObject = new Uint8Array(totalLength); + for (let i = 0; i < headerLen; i++) { + wrappedObject[i] = header.charCodeAt(i); + } + wrappedObject.set(object, headerLen); + + return wrappedObject } + /** + * Unwraps a Git object buffer into its type and raw object data. + * + * @param {Buffer|Uint8Array} buffer - The buffer containing the wrapped Git object. + * @returns {{ type: string, object: Buffer }} An object containing the type and the raw object data. + * @throws {InternalError} If the length specified in the header does not match the actual object length. + */ static unwrap(buffer) { const s = buffer.indexOf(32); // first space const i = buffer.indexOf(0); // first null value @@ -122061,7 +124648,7 @@ async function readObjectPacked({ }) { // Check to see if it's in a packfile. // Iterate through all the .idx files - let list = await fs.readdir(join(gitdir, 'objects/pack')); + let list = await fs.readdir(pathBrowserify.join(gitdir, 'objects/pack')); list = list.filter(x => x.endsWith('.idx')); for (const filename of list) { const indexFile = `${gitdir}/objects/pack/${filename}`; @@ -123032,9 +125619,9 @@ class GitWalkerRepo { const tree = GitTree.from(object); // cache all entries for (const entry of tree) { - map.set(join(filepath, entry.path), entry); + map.set(pathBrowserify.join(filepath, entry.path), entry); } - return tree.entries().map(entry => join(filepath, entry.path)) + return tree.entries().map(entry => pathBrowserify.join(filepath, entry.path)) } async type(entry) { @@ -123109,6 +125696,8 @@ class GitWalkerFs { this.cache = cache; this.dir = dir; this.gitdir = gitdir; + + this.config = null; const walker = this; this.ConstructEntry = class WorkdirEntry { constructor(fullpath) { @@ -123145,9 +125734,9 @@ class GitWalkerFs { async readdir(entry) { const filepath = entry._fullpath; const { fs, dir } = this; - const names = await fs.readdir(join(dir, filepath)); + const names = await fs.readdir(pathBrowserify.join(dir, filepath)); if (names === null) return null - return names.map(name => join(filepath, name)) + return names.map(name => pathBrowserify.join(filepath, name)) } async type(entry) { @@ -123195,7 +125784,7 @@ class GitWalkerFs { if ((await entry.type()) === 'tree') { entry._content = undefined; } else { - const config = await GitConfigManager.get({ fs, gitdir }); + const config = await this._getGitConfig(fs, gitdir); const autocrlf = await config.get('core.autocrlf'); const content = await fs.read(`${dir}/${entry._fullpath}`, { autocrlf }); // workaround for a BrowserFS edge case @@ -123211,6 +125800,7 @@ class GitWalkerFs { async oid(entry) { if (entry._oid === false) { + const self = this; const { fs, gitdir, cache } = this; let oid; // See if we can use the SHA1 hash in the index. @@ -123219,7 +125809,7 @@ class GitWalkerFs { ) { const stage = index.entriesMap.get(entry._fullpath); const stats = await entry.stat(); - const config = await GitConfigManager.get({ fs, gitdir }); + const config = await self._getGitConfig(fs, gitdir); const filemode = await config.get('core.filemode'); const trustino = typeof process !== 'undefined' @@ -123231,7 +125821,7 @@ class GitWalkerFs { oid = undefined; } else { oid = await shasum( - GitObject.wrap({ type: 'blob', object: await entry.content() }) + GitObject.wrap({ type: 'blob', object: content }) ); // Update the stats in the index so we will get a "cache hit" next time // 1) if we can (because the oid and mode are the same) @@ -123258,6 +125848,14 @@ class GitWalkerFs { } return entry._oid } + + async _getGitConfig(fs, gitdir) { + if (this.config) { + return this.config + } + this.config = await GitConfigManager.get({ fs, gitdir }); + return this.config + } } // @ts-check @@ -123414,15 +126012,20 @@ async function _walk({ const range = arrayRange(0, walkers.length); const unionWalkerFromReaddir = async entries => { range.map(i => { - entries[i] = entries[i] && new walkers[i].ConstructEntry(entries[i]); + const entry = entries[i]; + entries[i] = entry && new walkers[i].ConstructEntry(entry); }); const subdirs = await Promise.all( - range.map(i => (entries[i] ? walkers[i].readdir(entries[i]) : [])) + range.map(i => { + const entry = entries[i]; + return entry ? walkers[i].readdir(entry) : [] + }) ); // Now process child directories - const iterators = subdirs - .map(array => (array === null ? [] : array)) - .map(array => array[Symbol.iterator]()); + const iterators = subdirs.map(array => { + return (array === null ? [] : array)[Symbol.iterator]() + }); + return { entries, children: unionOfIterators(iterators), @@ -123457,7 +126060,7 @@ async function rmRecursive(fs, filepath) { } else if (entries.length) { await Promise.all( entries.map(entry => { - const subpath = join(filepath, entry); + const subpath = pathBrowserify.join(filepath, entry); return fs.lstat(subpath).then(stat => { if (!stat) return return stat.isDirectory() ? rmRecursive(fs, subpath) : fs.rm(subpath) @@ -123533,9 +126136,15 @@ function bindFs(target, fs) { } /** - * This is just a collection of helper functions really. At least that's how it started. + * A wrapper class for file system operations, providing a consistent API for both promise-based + * and callback-based file systems. It includes utility methods for common file system tasks. */ class FileSystem { + /** + * Creates an instance of FileSystem. + * + * @param {Object} fs - A file system implementation to wrap. + */ constructor(fs) { if (typeof fs._original_unwrapped_fs !== 'undefined') return fs @@ -123551,13 +126160,21 @@ class FileSystem { /** * Return true if a file exists, false if it doesn't exist. * Rethrows errors that aren't related to file existence. + * + * @param {string} filepath - The path to the file. + * @param {Object} [options] - Additional options. + * @returns {Promise} - `true` if the file exists, `false` otherwise. */ async exists(filepath, options = {}) { try { await this._stat(filepath); return true } catch (err) { - if (err.code === 'ENOENT' || err.code === 'ENOTDIR') { + if ( + err.code === 'ENOENT' || + err.code === 'ENOTDIR' || + (err.code || '').includes('ENS') + ) { return false } else { console.log('Unhandled error in "FileSystem.exists()" function', err); @@ -123569,10 +126186,9 @@ class FileSystem { /** * Return the contents of a file if it exists, otherwise returns null. * - * @param {string} filepath - * @param {object} [options] - * - * @returns {Promise} + * @param {string} filepath - The path to the file. + * @param {Object} [options] - Options for reading the file. + * @returns {Promise} - The file contents, or `null` if the file doesn't exist. */ async read(filepath, options = {}) { try { @@ -123599,9 +126215,10 @@ class FileSystem { /** * Write a file (creating missing directories if need be) without throwing errors. * - * @param {string} filepath - * @param {Buffer|Uint8Array|string} contents - * @param {object|string} [options] + * @param {string} filepath - The path to the file. + * @param {Buffer|Uint8Array|string} contents - The data to write. + * @param {Object|string} [options] - Options for writing the file. + * @returns {Promise} */ async write(filepath, contents, options = {}) { try { @@ -123616,6 +126233,10 @@ class FileSystem { /** * Make a directory (or series of nested directories) without throwing an error if it already exists. + * + * @param {string} filepath - The path to the directory. + * @param {boolean} [_selfCall=false] - Internal flag to prevent infinite recursion. + * @returns {Promise} */ async mkdir(filepath, _selfCall = false) { try { @@ -123642,6 +126263,9 @@ class FileSystem { /** * Delete a file without throwing an error if it is already deleted. + * + * @param {string} filepath - The path to the file. + * @returns {Promise} */ async rm(filepath) { try { @@ -123653,6 +126277,10 @@ class FileSystem { /** * Delete a directory without throwing an error if it is already deleted. + * + * @param {string} filepath - The path to the directory. + * @param {Object} [opts] - Options for deleting the directory. + * @returns {Promise} */ async rmdir(filepath, opts) { try { @@ -123668,6 +126296,9 @@ class FileSystem { /** * Read a directory without throwing an error is the directory doesn't exist + * + * @param {string} filepath - The path to the directory. + * @returns {Promise} - An array of file names, or `null` if the path is not a directory. */ async readdir(filepath) { try { @@ -123683,10 +126314,13 @@ class FileSystem { } /** - * Return a flast list of all the files nested inside a directory + * Return a flat list of all the files nested inside a directory * * Based on an elegant concurrent recursive solution from SO * https://stackoverflow.com/a/45130990/2168416 + * + * @param {string} dir - The directory to read. + * @returns {Promise} - A flat list of all files in the directory. */ async readdirDeep(dir) { const subdirs = await this._readdir(dir); @@ -123704,13 +126338,16 @@ class FileSystem { /** * Return the Stats of a file/symlink if it exists, otherwise returns null. * Rethrows errors that aren't related to file existence. + * + * @param {string} filename - The path to the file or symlink. + * @returns {Promise} - The stats object, or `null` if the file doesn't exist. */ async lstat(filename) { try { const stats = await this._lstat(filename); return stats } catch (err) { - if (err.code === 'ENOENT') { + if (err.code === 'ENOENT' || (err.code || '').includes('ENS')) { return null } throw err @@ -123720,6 +126357,10 @@ class FileSystem { /** * Reads the contents of a symlink if it exists, otherwise returns null. * Rethrows errors that aren't related to file existence. + * + * @param {string} filename - The path to the symlink. + * @param {Object} [opts={ encoding: 'buffer' }] - Options for reading the symlink. + * @returns {Promise} - The symlink target, or `null` if it doesn't exist. */ async readlink(filename, opts = { encoding: 'buffer' }) { // Note: FileSystem.readlink returns a buffer by default @@ -123728,7 +126369,7 @@ class FileSystem { const link = await this._readlink(filename, opts); return Buffer.isBuffer(link) ? link : Buffer.from(link) } catch (err) { - if (err.code === 'ENOENT') { + if (err.code === 'ENOENT' || (err.code || '').includes('ENS')) { return null } throw err @@ -123737,6 +126378,10 @@ class FileSystem { /** * Write the contents of buffer to a symlink. + * + * @param {string} filename - The path to the symlink. + * @param {Buffer} buffer - The symlink target. + * @returns {Promise} */ async writelink(filename, buffer) { return this._symlink(buffer.toString('utf8'), filename) @@ -123802,7 +126447,7 @@ async function modified(entry, base) { async function abortMerge({ fs: _fs, dir, - gitdir = join(dir, '.git'), + gitdir = pathBrowserify.join(dir, '.git'), commit = 'HEAD', cache = {}, }) { @@ -123881,21 +126526,31 @@ async function abortMerge({ // I'm putting this in a Manager because I reckon it could benefit // from a LOT of caching. class GitIgnoreManager { - static async isIgnored({ fs, dir, gitdir = join(dir, '.git'), filepath }) { + /** + * Determines whether a given file is ignored based on `.gitignore` rules and exclusion files. + * + * @param {Object} args + * @param {FSClient} args.fs - A file system implementation. + * @param {string} args.dir - The working directory. + * @param {string} [args.gitdir=join(dir, '.git')] - [required] The [git directory](dir-vs-gitdir.md) path + * @param {string} args.filepath - The path of the file to check. + * @returns {Promise} - `true` if the file is ignored, `false` otherwise. + */ + static async isIgnored({ fs, dir, gitdir = pathBrowserify.join(dir, '.git'), filepath }) { // ALWAYS ignore ".git" folders. if (basename(filepath) === '.git') return true // '.' is not a valid gitignore entry, so '.' is never ignored if (filepath === '.') return false // Check and load exclusion rules from project exclude file (.git/info/exclude) let excludes = ''; - const excludesFile = join(gitdir, 'info', 'exclude'); + const excludesFile = pathBrowserify.join(gitdir, 'info', 'exclude'); if (await fs.exists(excludesFile)) { excludes = await fs.read(excludesFile, 'utf8'); } // Find all the .gitignore files that could affect this file const pairs = [ { - gitignore: join(dir, '.gitignore'), + gitignore: pathBrowserify.join(dir, '.gitignore'), filepath, }, ]; @@ -123904,7 +126559,7 @@ class GitIgnoreManager { const folder = pieces.slice(0, i).join('/'); const file = pieces.slice(i).join('/'); pairs.push({ - gitignore: join(dir, folder, '.gitignore'), + gitignore: pathBrowserify.join(dir, folder, '.gitignore'), filepath: file, }); } @@ -124033,7 +126688,7 @@ function posixifyPathBuffer(buffer) { async function add({ fs: _fs, dir, - gitdir = join(dir, '.git'), + gitdir = pathBrowserify.join(dir, '.git'), filepath, cache = {}, force = false, @@ -124047,6 +126702,8 @@ async function add({ const fs = new FileSystem(_fs); await GitIndexManager.acquire({ fs, gitdir, cache }, async index => { + const config = await GitConfigManager.get({ fs, gitdir }); + const autocrlf = await config.get('core.autocrlf'); return addToIndex({ dir, gitdir, @@ -124055,6 +126712,7 @@ async function add({ index, force, parallel, + autocrlf, }) }); } catch (err) { @@ -124071,6 +126729,7 @@ async function addToIndex({ index, force, parallel, + autocrlf, }) { // TODO: Should ignore UNLESS it's already in the index. filepath = Array.isArray(filepath) ? filepath : [filepath]; @@ -124084,21 +126743,22 @@ async function addToIndex({ }); if (ignored) return } - const stats = await fs.lstat(join(dir, currentFilepath)); + const stats = await fs.lstat(pathBrowserify.join(dir, currentFilepath)); if (!stats) throw new NotFoundError(currentFilepath) if (stats.isDirectory()) { - const children = await fs.readdir(join(dir, currentFilepath)); + const children = await fs.readdir(pathBrowserify.join(dir, currentFilepath)); if (parallel) { const promises = children.map(child => addToIndex({ dir, gitdir, fs, - filepath: [join(currentFilepath, child)], + filepath: [pathBrowserify.join(currentFilepath, child)], index, force, parallel, + autocrlf, }) ); await Promise.all(promises); @@ -124108,19 +126768,18 @@ async function addToIndex({ dir, gitdir, fs, - filepath: [join(currentFilepath, child)], + filepath: [pathBrowserify.join(currentFilepath, child)], index, force, parallel, + autocrlf, }); } } } else { - const config = await GitConfigManager.get({ fs, gitdir }); - const autocrlf = await config.get('core.autocrlf'); const object = stats.isSymbolicLink() - ? await fs.readlink(join(dir, currentFilepath)).then(posixifyPathBuffer) - : await fs.read(join(dir, currentFilepath), { autocrlf }); + ? await fs.readlink(pathBrowserify.join(dir, currentFilepath)).then(posixifyPathBuffer) + : await fs.read(pathBrowserify.join(dir, currentFilepath), { autocrlf }); if (object === null) throw new NotFoundError(currentFilepath) const oid = await _writeObject({ fs, gitdir, type: 'blob', object }); index.insert({ filepath: currentFilepath, stats, oid }); @@ -124783,7 +127442,7 @@ async function addNote({ fs: _fs, onSign, dir, - gitdir = join(dir, '.git'), + gitdir = pathBrowserify.join(dir, '.git'), ref = 'refs/notes/commits', oid, note, @@ -124847,7 +127506,7 @@ async function addNote({ * */ async function _addRemote({ fs, gitdir, remote, url, force }) { - if (remote !== cleanGitRef.clean(remote)) { + if (!validRef(remote, true)) { throw new InvalidRefNameError(remote, cleanGitRef.clean(remote)) } const config = await GitConfigManager.get({ fs, gitdir }); @@ -124898,7 +127557,7 @@ async function _addRemote({ fs, gitdir, remote, url, force }) { async function addRemote({ fs, dir, - gitdir = join(dir, '.git'), + gitdir = pathBrowserify.join(dir, '.git'), remote, url, force = false, @@ -125049,7 +127708,7 @@ async function annotatedTag({ fs: _fs, onSign, dir, - gitdir = join(dir, '.git'), + gitdir = pathBrowserify.join(dir, '.git'), ref, tagger: _tagger, message = ref, @@ -125119,7 +127778,7 @@ async function _branch({ checkout = false, force = false, }) { - if (ref !== cleanGitRef.clean(ref)) { + if (!validRef(ref, true)) { throw new InvalidRefNameError(ref, cleanGitRef.clean(ref)) } @@ -125180,7 +127839,7 @@ async function _branch({ async function branch({ fs, dir, - gitdir = join(dir, '.git'), + gitdir = pathBrowserify.join(dir, '.git'), ref, object, checkout = false, @@ -125233,6 +127892,8 @@ const worthWalking = (filepath, root) => { * @param {boolean} [args.dryRun] * @param {boolean} [args.force] * @param {boolean} [args.track] + * @param {boolean} [args.nonBlocking] + * @param {number} [args.batchSize] * * @returns {Promise} Resolves successfully when filesystem operations are complete * @@ -125252,6 +127913,8 @@ async function _checkout({ dryRun, force, track = true, + nonBlocking = false, + batchSize = 100, }) { // oldOid is defined only if onPostCheckout hook is attached let oldOid; @@ -125384,10 +128047,10 @@ async function _checkout({ if (method === 'rmdir' || method === 'rmdir-index') { const filepath = `${dir}/${fullpath}`; try { - if (method === 'rmdir-index') { - index.delete({ filepath: fullpath }); + if (method === 'rmdir') { + await fs.rmdir(filepath); } - await fs.rmdir(filepath); + index.delete({ filepath: fullpath }); if (onProgress) { await onProgress({ phase: 'Updating workdir', @@ -125424,72 +128087,122 @@ async function _checkout({ }) ); - await GitIndexManager.acquire({ fs, gitdir, cache }, async function(index) { - await Promise.all( - ops - .filter( - ([method]) => - method === 'create' || - method === 'create-index' || - method === 'update' || - method === 'mkdir-index' - ) - .map(async function([method, fullpath, oid, mode, chmod]) { - const filepath = `${dir}/${fullpath}`; - try { - if (method !== 'create-index' && method !== 'mkdir-index') { - const { object } = await _readObject({ fs, cache, gitdir, oid }); - if (chmod) { - // Note: the mode option of fs.write only works when creating files, - // not updating them. Since the `fs` plugin doesn't expose `chmod` this - // is our only option. - await fs.rm(filepath); - } - if (mode === 0o100644) { - // regular file - await fs.write(filepath, object); - } else if (mode === 0o100755) { - // executable file - await fs.write(filepath, object, { mode: 0o777 }); - } else if (mode === 0o120000) { - // symlink - await fs.writelink(filepath, object); - } else { - throw new InternalError( - `Invalid mode 0o${mode.toString(8)} detected in blob ${oid}` - ) - } - } + if (nonBlocking) { + // Filter eligible operations first + const eligibleOps = ops.filter( + ([method]) => + method === 'create' || + method === 'create-index' || + method === 'update' || + method === 'mkdir-index' + ); - const stats = await fs.lstat(filepath); - // We can't trust the executable bit returned by lstat on Windows, - // so we need to preserve this value from the TREE. - // TODO: Figure out how git handles this internally. - if (mode === 0o100755) { - stats.mode = 0o755; - } - // Submodules are present in the git index but use a unique mode different from trees - if (method === 'mkdir-index') { - stats.mode = 0o160000; - } - index.insert({ - filepath: fullpath, - stats, - oid, - }); - if (onProgress) { - await onProgress({ - phase: 'Updating workdir', - loaded: ++count, - total, - }); - } - } catch (e) { - console.log(e); - } - }) + const updateWorkingDirResults = await batchAllSettled( + 'Update Working Dir', + eligibleOps.map(([method, fullpath, oid, mode, chmod]) => () => + updateWorkingDir({ fs, cache, gitdir, dir }, [ + method, + fullpath, + oid, + mode, + chmod, + ]) + ), + onProgress, + batchSize ); - }); + + await GitIndexManager.acquire( + { fs, gitdir, cache, allowUnmerged: true }, + async function(index) { + await batchAllSettled( + 'Update Index', + updateWorkingDirResults.map(([fullpath, oid, stats]) => () => + updateIndex({ index, fullpath, oid, stats }) + ), + onProgress, + batchSize + ); + } + ); + } else { + await GitIndexManager.acquire( + { fs, gitdir, cache, allowUnmerged: true }, + async function(index) { + await Promise.all( + ops + .filter( + ([method]) => + method === 'create' || + method === 'create-index' || + method === 'update' || + method === 'mkdir-index' + ) + .map(async function([method, fullpath, oid, mode, chmod]) { + const filepath = `${dir}/${fullpath}`; + try { + if (method !== 'create-index' && method !== 'mkdir-index') { + const { object } = await _readObject({ + fs, + cache, + gitdir, + oid, + }); + if (chmod) { + // Note: the mode option of fs.write only works when creating files, + // not updating them. Since the `fs` plugin doesn't expose `chmod` this + // is our only option. + await fs.rm(filepath); + } + if (mode === 0o100644) { + // regular file + await fs.write(filepath, object); + } else if (mode === 0o100755) { + // executable file + await fs.write(filepath, object, { mode: 0o777 }); + } else if (mode === 0o120000) { + // symlink + await fs.writelink(filepath, object); + } else { + throw new InternalError( + `Invalid mode 0o${mode.toString( + 8 + )} detected in blob ${oid}` + ) + } + } + + const stats = await fs.lstat(filepath); + // We can't trust the executable bit returned by lstat on Windows, + // so we need to preserve this value from the TREE. + // TODO: Figure out how git handles this internally. + if (mode === 0o100755) { + stats.mode = 0o755; + } + // Submodules are present in the git index but use a unique mode different from trees + if (method === 'mkdir-index') { + stats.mode = 0o160000; + } + index.insert({ + filepath: fullpath, + stats, + oid, + }); + if (onProgress) { + await onProgress({ + phase: 'Updating workdir', + loaded: ++count, + total, + }); + } + } catch (e) { + console.log(e); + } + }) + ); + } + ); + } if (onPostCheckout) { await onPostCheckout({ @@ -125668,7 +128381,7 @@ async function analyze({ case '101': { switch (await stage.type()) { case 'tree': { - return ['rmdir', fullpath] + return ['rmdir-index', fullpath] } case 'blob': { // Git checks that the workdir.oid === stage.oid before deleting file @@ -125807,6 +128520,78 @@ async function analyze({ }) } +async function updateIndex({ index, fullpath, stats, oid }) { + try { + index.insert({ + filepath: fullpath, + stats, + oid, + }); + } catch (e) { + console.warn(`Error inserting ${fullpath} into index:`, e); + } +} +async function updateWorkingDir( + { fs, cache, gitdir, dir }, + [method, fullpath, oid, mode, chmod] +) { + const filepath = `${dir}/${fullpath}`; + if (method !== 'create-index' && method !== 'mkdir-index') { + const { object } = await _readObject({ fs, cache, gitdir, oid }); + if (chmod) { + await fs.rm(filepath); + } + if (mode === 0o100644) { + // regular file + await fs.write(filepath, object); + } else if (mode === 0o100755) { + // executable file + await fs.write(filepath, object, { mode: 0o777 }); + } else if (mode === 0o120000) { + // symlink + await fs.writelink(filepath, object); + } else { + throw new InternalError( + `Invalid mode 0o${mode.toString(8)} detected in blob ${oid}` + ) + } + } + const stats = await fs.lstat(filepath); + if (mode === 0o100755) { + stats.mode = 0o755; + } + if (method === 'mkdir-index') { + stats.mode = 0o160000; + } + return [fullpath, oid, stats] +} + +async function batchAllSettled(operationName, tasks, onProgress, batchSize) { + const results = []; + try { + for (let i = 0; i < tasks.length; i += batchSize) { + const batch = tasks.slice(i, i + batchSize).map(task => task()); + const batchResults = await Promise.allSettled(batch); + batchResults.forEach(result => { + if (result.status === 'fulfilled') results.push(result.value); + }); + if (onProgress) { + await onProgress({ + phase: 'Updating workdir', + loaded: i + batch.length, + total: tasks.length, + }); + } + } + + return results + } catch (error) { + console.error(`Error during ${operationName}: ${error}`); + } + + return results +} + // @ts-check /** @@ -125829,6 +128614,8 @@ async function analyze({ * @param {boolean} [args.force = false] - If true, conflicts will be ignored and files will be overwritten regardless of local changes. * @param {boolean} [args.track = true] - If false, will not set the remote branch tracking information. Defaults to true. * @param {object} [args.cache] - a [cache](cache.md) object + * @param {boolean} [args.nonBlocking = false] - If true, will use non-blocking file system operations to allow for better performance in certain environments (For example, in Browsers) + * @param {number} [args.batchSize = 100] - If args.nonBlocking is true, batchSize is the number of files to process at a time avoid blocking the executing thread. The default value of 100 is a good starting point. * * @returns {Promise} Resolves successfully when filesystem operations are complete * @@ -125868,7 +128655,7 @@ async function checkout({ onProgress, onPostCheckout, dir, - gitdir = join(dir, '.git'), + gitdir = pathBrowserify.join(dir, '.git'), remote = 'origin', ref: _ref, filepaths, @@ -125878,6 +128665,8 @@ async function checkout({ force = false, track = true, cache = {}, + nonBlocking = false, + batchSize = 100, }) { try { assertParameter('fs', fs); @@ -125900,6 +128689,8 @@ async function checkout({ dryRun, force, track, + nonBlocking, + batchSize, }) } catch (err) { err.caller = 'git.checkout'; @@ -126252,22 +129043,33 @@ const stringifyBody = async res => { }; class GitRemoteHTTP { + /** + * Returns the capabilities of the GitRemoteHTTP class. + * + * @returns {Promise} - An array of supported capabilities. + */ static async capabilities() { return ['discover', 'connect'] } /** + * Discovers references from a remote Git repository. + * * @param {Object} args - * @param {HttpClient} args.http - * @param {ProgressCallback} [args.onProgress] - * @param {AuthCallback} [args.onAuth] - * @param {AuthFailureCallback} [args.onAuthFailure] - * @param {AuthSuccessCallback} [args.onAuthSuccess] - * @param {string} [args.corsProxy] - * @param {string} args.service - * @param {string} args.url - * @param {Object} args.headers - * @param {1 | 2} args.protocolVersion - Git Protocol Version + * @param {HttpClient} args.http - The HTTP client to use for requests. + * @param {ProgressCallback} [args.onProgress] - Callback for progress updates. + * @param {AuthCallback} [args.onAuth] - Callback for providing authentication credentials. + * @param {AuthFailureCallback} [args.onAuthFailure] - Callback for handling authentication failures. + * @param {AuthSuccessCallback} [args.onAuthSuccess] - Callback for handling successful authentication. + * @param {string} [args.corsProxy] - Optional CORS proxy URL. + * @param {string} args.service - The Git service (e.g., "git-upload-pack"). + * @param {string} args.url - The URL of the remote repository. + * @param {Object} args.headers - HTTP headers to include in the request. + * @param {1 | 2} args.protocolVersion - The Git protocol version to use. + * @returns {Promise} - The parsed response from the remote repository. + * @throws {HttpError} - If the HTTP request fails. + * @throws {SmartHttpError} - If the response cannot be parsed. + * @throws {UserCanceledError} - If the user cancels the operation. */ static async discover({ http, @@ -126363,15 +129165,19 @@ class GitRemoteHTTP { } /** + * Connects to a remote Git repository and sends a request. + * * @param {Object} args - * @param {HttpClient} args.http - * @param {ProgressCallback} [args.onProgress] - * @param {string} [args.corsProxy] - * @param {string} args.service - * @param {string} args.url - * @param {Object} [args.headers] - * @param {any} args.body - * @param {any} args.auth + * @param {HttpClient} args.http - The HTTP client to use for requests. + * @param {ProgressCallback} [args.onProgress] - Callback for progress updates. + * @param {string} [args.corsProxy] - Optional CORS proxy URL. + * @param {string} args.service - The Git service (e.g., "git-upload-pack"). + * @param {string} args.url - The URL of the remote repository. + * @param {Object} [args.headers] - HTTP headers to include in the request. + * @param {any} args.body - The request body to send. + * @param {any} args.auth - Authentication credentials. + * @returns {Promise} - The HTTP response from the remote repository. + * @throws {HttpError} - If the HTTP request fails. */ static async connect({ http, @@ -126409,6 +129215,47 @@ class GitRemoteHTTP { } } +/** + * A class for managing Git remotes and determining the appropriate remote helper for a given URL. + */ +class GitRemoteManager { + /** + * Determines the appropriate remote helper for the given URL. + * + * @param {Object} args + * @param {string} args.url - The URL of the remote repository. + * @returns {Object} - The remote helper class for the specified transport. + * @throws {UrlParseError} - If the URL cannot be parsed. + * @throws {UnknownTransportError} - If the transport is not supported. + */ + static getRemoteHelperFor({ url }) { + // TODO: clean up the remoteHelper API and move into PluginCore + const remoteHelpers = new Map(); + remoteHelpers.set('http', GitRemoteHTTP); + remoteHelpers.set('https', GitRemoteHTTP); + + const parts = parseRemoteUrl({ url }); + if (!parts) { + throw new UrlParseError(url) + } + if (remoteHelpers.has(parts.transport)) { + return remoteHelpers.get(parts.transport) + } + throw new UnknownTransportError( + url, + parts.transport, + parts.transport === 'ssh' ? translateSSHtoHTTP(url) : undefined + ) + } +} + +/** + * Parses a remote URL and extracts its transport and address. + * + * @param {Object} args + * @param {string} args.url - The URL of the remote repository. + * @returns {Object|undefined} - An object containing the transport and address, or undefined if parsing fails. + */ function parseRemoteUrl({ url }) { // the stupid "shorter scp-like syntax" if (url.startsWith('git@')) { @@ -126446,34 +129293,20 @@ function parseRemoteUrl({ url }) { } } -class GitRemoteManager { - static getRemoteHelperFor({ url }) { - // TODO: clean up the remoteHelper API and move into PluginCore - const remoteHelpers = new Map(); - remoteHelpers.set('http', GitRemoteHTTP); - remoteHelpers.set('https', GitRemoteHTTP); - - const parts = parseRemoteUrl({ url }); - if (!parts) { - throw new UrlParseError(url) - } - if (remoteHelpers.has(parts.transport)) { - return remoteHelpers.get(parts.transport) - } - throw new UnknownTransportError( - url, - parts.transport, - parts.transport === 'ssh' ? translateSSHtoHTTP(url) : undefined - ) - } -} - let lock$2 = null; class GitShallowManager { + /** + * Reads the `shallow` file in the Git repository and returns a set of object IDs (OIDs). + * + * @param {Object} args + * @param {FSClient} args.fs - A file system implementation. + * @param {string} [args.gitdir] - [required] The [git directory](dir-vs-gitdir.md) path + * @returns {Promise>} - A set of shallow object IDs. + */ static async read({ fs, gitdir }) { if (lock$2 === null) lock$2 = new AsyncLock(); - const filepath = join(gitdir, 'shallow'); + const filepath = pathBrowserify.join(gitdir, 'shallow'); const oids = new Set(); await lock$2.acquire(filepath, async function() { const text = await fs.read(filepath, { encoding: 'utf8' }); @@ -126487,9 +129320,19 @@ class GitShallowManager { return oids } + /** + * Writes a set of object IDs (OIDs) to the `shallow` file in the Git repository. + * If the set is empty, the `shallow` file is removed. + * + * @param {Object} args + * @param {FSClient} args.fs - A file system implementation. + * @param {string} [args.gitdir] - [required] The [git directory](dir-vs-gitdir.md) path + * @param {Set} args.oids - A set of shallow object IDs to write. + * @returns {Promise} + */ static async write({ fs, gitdir, oids }) { if (lock$2 === null) lock$2 = new AsyncLock(); - const filepath = join(gitdir, 'shallow'); + const filepath = pathBrowserify.join(gitdir, 'shallow'); if (oids.size > 0) { const text = [...oids].join('\n') + '\n'; await lock$2.acquire(filepath, async function() { @@ -126520,7 +129363,7 @@ async function hasObjectPacked({ }) { // Check to see if it's in a packfile. // Iterate through all the .idx files - let list = await fs.readdir(join(gitdir, 'objects/pack')); + let list = await fs.readdir(pathBrowserify.join(gitdir, 'objects/pack')); list = list.filter(x => x.endsWith('.idx')); for (const filename of list) { const indexFile = `${gitdir}/objects/pack/${filename}`; @@ -126586,8 +129429,8 @@ function filterCapabilities(server, client) { const pkg = { name: 'isomorphic-git', - version: '1.27.1', - agent: 'git/isomorphic-git@1.27.1', + version: '1.33.2', + agent: 'git/isomorphic-git@1.33.2', }; class FIFO { @@ -127253,7 +130096,7 @@ async function _fetch({ // c) compare the computed SHA with the last 20 bytes of the stream before saving to disk, and throwing a "packfile got corrupted during download" error if the SHA doesn't match. if (packfileSha !== '' && !emptyPackfile(packfile)) { res.packfile = `objects/pack/pack-${packfileSha}.pack`; - const fullpath = join(gitdir, res.packfile); + const fullpath = pathBrowserify.join(gitdir, res.packfile); await fs.write(fullpath, packfile); const getExternalRefDelta = oid => _readObject({ fs, cache, gitdir, oid }); const idx = await GitPackIndex.fromPack({ @@ -127283,7 +130126,7 @@ async function _init({ fs, bare = false, dir, - gitdir = bare ? dir : join(dir, '.git'), + gitdir = bare ? dir : pathBrowserify.join(dir, '.git'), defaultBranch = 'master', }) { // Don't overwrite an existing config @@ -127342,6 +130185,8 @@ async function _init({ * @param {string[]} args.exclude * @param {boolean} args.relative * @param {Object} args.headers + * @param {boolean} [args.nonBlocking] + * @param {number} [args.batchSize] * * @returns {Promise} Resolves successfully when clone completes * @@ -127370,6 +130215,8 @@ async function _clone({ noCheckout, noTags, headers, + nonBlocking, + batchSize = 100, }) { try { await _init({ fs, gitdir }); @@ -127414,6 +130261,8 @@ async function _clone({ ref, remote, noCheckout, + nonBlocking, + batchSize, }); } catch (err) { // Remove partial local repository, see #1283 @@ -127455,6 +130304,8 @@ async function _clone({ * @param {boolean} [args.relative = false] - Changes the meaning of `depth` to be measured from the current shallow depth rather than from the branch tip. * @param {Object} [args.headers = {}] - Additional headers to include in HTTP requests, similar to git's `extraHeader` config * @param {object} [args.cache] - a [cache](cache.md) object + * @param {boolean} [args.nonBlocking = false] - if true, checkout will happen non-blockingly (useful for long-running operations blocking the thread in browser environments) + * @param {number} [args.batchSize = 100] - If args.nonBlocking is true, batchSize is the number of files to process at a time avoid blocking the executing thread. The default value of 100 is a good starting point. * * @returns {Promise} Resolves successfully when clone completes * @@ -127481,7 +130332,7 @@ async function clone({ onAuthFailure, onPostCheckout, dir, - gitdir = join(dir, '.git'), + gitdir = pathBrowserify.join(dir, '.git'), url, corsProxy = undefined, ref = undefined, @@ -127495,6 +130346,8 @@ async function clone({ noTags = false, headers = {}, cache = {}, + nonBlocking = false, + batchSize = 100, }) { try { assertParameter('fs', fs); @@ -127529,6 +130382,8 @@ async function clone({ noCheckout, noTags, headers, + nonBlocking, + batchSize, }) } catch (err) { err.caller = 'git.clone'; @@ -127584,7 +130439,7 @@ async function commit({ fs: _fs, onSign, dir, - gitdir = join(dir, '.git'), + gitdir = pathBrowserify.join(dir, '.git'), message, author, committer, @@ -127656,7 +130511,7 @@ async function commit({ async function currentBranch({ fs, dir, - gitdir = join(dir, '.git'), + gitdir = pathBrowserify.join(dir, '.git'), fullname = false, test = false, }) { @@ -127733,7 +130588,7 @@ async function _deleteBranch({ fs, gitdir, ref }) { async function deleteBranch({ fs, dir, - gitdir = join(dir, '.git'), + gitdir = pathBrowserify.join(dir, '.git'), ref, }) { try { @@ -127768,7 +130623,7 @@ async function deleteBranch({ * console.log('done') * */ -async function deleteRef({ fs, dir, gitdir = join(dir, '.git'), ref }) { +async function deleteRef({ fs, dir, gitdir = pathBrowserify.join(dir, '.git'), ref }) { try { assertParameter('fs', fs); assertParameter('ref', ref); @@ -127816,7 +130671,7 @@ async function _deleteRemote({ fs, gitdir, remote }) { async function deleteRemote({ fs, dir, - gitdir = join(dir, '.git'), + gitdir = pathBrowserify.join(dir, '.git'), remote, }) { try { @@ -127873,7 +130728,7 @@ async function _deleteTag({ fs, gitdir, ref }) { * console.log('done') * */ -async function deleteTag({ fs, dir, gitdir = join(dir, '.git'), ref }) { +async function deleteTag({ fs, dir, gitdir = pathBrowserify.join(dir, '.git'), ref }) { try { assertParameter('fs', fs); assertParameter('ref', ref); @@ -127905,7 +130760,7 @@ async function expandOidPacked({ }) { // Iterate through all the .pack files const results = []; - let list = await fs.readdir(join(gitdir, 'objects/pack')); + let list = await fs.readdir(pathBrowserify.join(gitdir, 'objects/pack')); list = list.filter(x => x.endsWith('.idx')); for (const filename of list) { const indexFile = `${gitdir}/objects/pack/${filename}`; @@ -127975,7 +130830,7 @@ async function _expandOid({ fs, cache, gitdir, oid: short }) { async function expandOid({ fs, dir, - gitdir = join(dir, '.git'), + gitdir = pathBrowserify.join(dir, '.git'), oid, cache = {}, }) { @@ -128013,7 +130868,7 @@ async function expandOid({ * console.log(fullRef) * */ -async function expandRef({ fs, dir, gitdir = join(dir, '.git'), ref }) { +async function expandRef({ fs, dir, gitdir = pathBrowserify.join(dir, '.git'), ref }) { try { assertParameter('fs', fs); assertParameter('gitdir', gitdir); @@ -128153,7 +131008,7 @@ async function mergeTree({ fs, cache, dir, - gitdir = join(dir, '.git'), + gitdir = pathBrowserify.join(dir, '.git'), index, ourOid, baseOid, @@ -128195,6 +131050,16 @@ async function mergeTree({ } } case 'false-true': { + // if directory is deleted in theirs but not in ours we return our directory + if (!theirs && (await ours.type()) === 'tree') { + return { + mode: await ours.mode(), + path, + oid: await ours.oid(), + type: await ours.type(), + } + } + return theirs ? { mode: await theirs.mode(), @@ -128205,6 +131070,16 @@ async function mergeTree({ : undefined } case 'true-false': { + // if directory is deleted in ours but not in theirs we return their directory + if (!ours && (await theirs.type()) === 'tree') { + return { + mode: await theirs.mode(), + path, + oid: await theirs.oid(), + type: await theirs.type(), + } + } + return ours ? { mode: await ours.mode(), @@ -128215,13 +131090,26 @@ async function mergeTree({ : undefined } case 'true-true': { - // Modifications + // Handle tree-tree merges (directories) + if ( + ours && + theirs && + (await ours.type()) === 'tree' && + (await theirs.type()) === 'tree' + ) { + return { + mode: await ours.mode(), + path, + oid: await ours.oid(), + type: 'tree', + } + } + + // Modifications - both are blobs if ( ours && - base && theirs && (await ours.type()) === 'blob' && - (await base.type()) === 'blob' && (await theirs.type()) === 'blob' ) { return mergeBlobs({ @@ -128240,13 +131128,18 @@ async function mergeTree({ unmergedFiles.push(filepath); bothModified.push(filepath); if (!abortOnConflict) { - const baseOid = await base.oid(); + let baseOid = ''; + if (base && (await base.type()) === 'blob') { + baseOid = await base.oid(); + } const ourOid = await ours.oid(); const theirOid = await theirs.oid(); index.delete({ filepath }); - index.insert({ filepath, oid: baseOid, stage: 1 }); + if (baseOid) { + index.insert({ filepath, oid: baseOid, stage: 1 }); + } index.insert({ filepath, oid: ourOid, stage: 2 }); index.insert({ filepath, oid: theirOid, stage: 3 }); } @@ -128314,7 +131207,12 @@ async function mergeTree({ } // deleted by both - if (base && !ours && !theirs && (await base.type()) === 'blob') { + if ( + base && + !ours && + !theirs && + ((await base.type()) === 'blob' || (await base.type()) === 'tree') + ) { return undefined } @@ -128338,9 +131236,19 @@ async function mergeTree({ if (!parent) return // automatically delete directories if they have been emptied - if (parent && parent.type === 'tree' && entries.length === 0) return + // except for the root directory + if ( + parent && + parent.type === 'tree' && + entries.length === 0 && + parent.path !== '.' + ) + return - if (entries.length > 0) { + if ( + entries.length > 0 || + (parent.path === '.' && entries.length === 0) + ) { const tree = new GitTree(entries); const object = tree.toObject(); const oid = await _writeObject({ @@ -128418,10 +131326,16 @@ async function mergeBlobs({ const type = 'blob'; // Compute the new mode. // Since there are ONLY two valid blob modes ('100755' and '100644') it boils down to this + let baseMode = '100755'; + let baseOid = ''; + let baseContent = ''; + if (base && (await base.type()) === 'blob') { + baseMode = await base.mode(); + baseOid = await base.oid(); + baseContent = Buffer.from(await base.content()).toString('utf8'); + } const mode = - (await base.mode()) === (await ours.mode()) - ? await theirs.mode() - : await ours.mode(); + baseMode === (await ours.mode()) ? await theirs.mode() : await ours.mode(); // The trivial case: nothing to merge except maybe mode if ((await ours.oid()) === (await theirs.oid())) { return { @@ -128430,13 +131344,13 @@ async function mergeBlobs({ } } // if only one side made oid changes, return that side's oid - if ((await ours.oid()) === (await base.oid())) { + if ((await ours.oid()) === baseOid) { return { cleanMerge: true, mergeResult: { mode, path, oid: await theirs.oid(), type }, } } - if ((await theirs.oid()) === (await base.oid())) { + if ((await theirs.oid()) === baseOid) { return { cleanMerge: true, mergeResult: { mode, path, oid: await ours.oid(), type }, @@ -128444,7 +131358,6 @@ async function mergeBlobs({ } // if both sides made changes do a merge const ourContent = Buffer.from(await ours.content()).toString('utf8'); - const baseContent = Buffer.from(await base.content()).toString('utf8'); const theirContent = Buffer.from(await theirs.content()).toString('utf8'); const { mergedText, cleanMerge } = await mergeDriver({ branches: [baseName, ourName, theirName], @@ -128502,6 +131415,7 @@ async function mergeBlobs({ * @param {string} [args.signingKey] * @param {SignCallback} [args.onSign] - a PGP signing implementation * @param {MergeDriverCallback} [args.mergeDriver] + * @param {boolean} args.allowUnrelatedHistories * * @returns {Promise} Resolves to a description of the merge operation * @@ -128524,6 +131438,7 @@ async function _merge({ signingKey, onSign, mergeDriver, + allowUnrelatedHistories = false, }) { if (ours === undefined) { ours = await _currentBranch({ fs, gitdir, fullname: true }); @@ -128556,8 +131471,13 @@ async function _merge({ oids: [ourOid, theirOid], }); if (baseOids.length !== 1) { - // TODO: Recursive Merge strategy - throw new MergeNotSupportedError() + if (baseOids.length === 0 && allowUnrelatedHistories) { + // 4b825… == the empty tree used by git + baseOids.push('4b825dc642cb6eb9a060e54bf8d69288fbee4904'); + } else { + // TODO: Recursive Merge strategy + throw new MergeNotSupportedError() + } } const baseOid = baseOids[0]; // handle fast-forward case @@ -128604,7 +131524,7 @@ async function _merge({ ); // Defer throwing error until the index lock is relinquished and index is - // written to filsesystem + // written to filesystem if (tree instanceof MergeConflictError) throw tree if (!message) { @@ -128810,7 +131730,7 @@ async function fastForward({ onAuthSuccess, onAuthFailure, dir, - gitdir = join(dir, '.git'), + gitdir = pathBrowserify.join(dir, '.git'), ref, url, remote, @@ -128929,7 +131849,7 @@ async function fetch({ onAuthSuccess, onAuthFailure, dir, - gitdir = join(dir, '.git'), + gitdir = pathBrowserify.join(dir, '.git'), ref, remote, remoteRef, @@ -128998,7 +131918,7 @@ async function fetch({ async function findMergeBase({ fs, dir, - gitdir = join(dir, '.git'), + gitdir = pathBrowserify.join(dir, '.git'), oids, cache = {}, }) { @@ -129033,7 +131953,7 @@ async function findMergeBase({ * @returns {Promise} Resolves successfully with a root git directory path */ async function _findRoot({ fs, filepath }) { - if (await fs.exists(join(filepath, '.git'))) { + if (await fs.exists(pathBrowserify.join(filepath, '.git'))) { return filepath } else { const parent = dirname(filepath); @@ -129105,7 +132025,7 @@ async function findRoot({ fs, filepath }) { * console.log(value) * */ -async function getConfig({ fs, dir, gitdir = join(dir, '.git'), path }) { +async function getConfig({ fs, dir, gitdir = pathBrowserify.join(dir, '.git'), path }) { try { assertParameter('fs', fs); assertParameter('gitdir', gitdir); @@ -129159,7 +132079,7 @@ async function _getConfigAll({ fs, gitdir, path }) { async function getConfigAll({ fs, dir, - gitdir = join(dir, '.git'), + gitdir = pathBrowserify.join(dir, '.git'), path, }) { try { @@ -129485,17 +132405,18 @@ async function hashBlob({ object }) { // Convert object to buffer if (typeof object === 'string') { object = Buffer.from(object, 'utf8'); - } else { - object = Buffer.from(object); + } else if (!(object instanceof Uint8Array)) { + object = new Uint8Array(object); } const type = 'blob'; const { oid, object: _object } = await hashObject({ - type: 'blob', + type, format: 'content', object, }); - return { oid, type, object: new Uint8Array(_object), format: 'wrapped' } + + return { oid, type, object: _object, format: 'wrapped' } } catch (err) { err.caller = 'git.hashBlob'; throw err @@ -129524,7 +132445,7 @@ async function _indexPack({ filepath, }) { try { - filepath = join(dir, filepath); + filepath = pathBrowserify.join(dir, filepath); const pack = await fs.read(filepath); const getExternalRefDelta = oid => _readObject({ fs, cache, gitdir, oid }); const idx = await GitPackIndex.fromPack({ @@ -129577,7 +132498,7 @@ async function indexPack({ fs, onProgress, dir, - gitdir = join(dir, '.git'), + gitdir = pathBrowserify.join(dir, '.git'), filepath, cache = {}, }) { @@ -129623,7 +132544,7 @@ async function init({ fs, bare = false, dir, - gitdir = bare ? dir : join(dir, '.git'), + gitdir = bare ? dir : pathBrowserify.join(dir, '.git'), defaultBranch = 'master', }) { try { @@ -129744,7 +132665,7 @@ async function _isDescendent({ async function isDescendent({ fs, dir, - gitdir = join(dir, '.git'), + gitdir = pathBrowserify.join(dir, '.git'), oid, ancestor, depth = -1, @@ -129790,7 +132711,7 @@ async function isDescendent({ async function isIgnored({ fs, dir, - gitdir = join(dir, '.git'), + gitdir = pathBrowserify.join(dir, '.git'), filepath, }) { try { @@ -129843,7 +132764,7 @@ async function isIgnored({ async function listBranches({ fs, dir, - gitdir = join(dir, '.git'), + gitdir = pathBrowserify.join(dir, '.git'), remote, }) { try { @@ -129912,10 +132833,10 @@ async function accumulateFilesFromOid({ gitdir, oid: entry.oid, filenames, - prefix: join(prefix, entry.path), + prefix: pathBrowserify.join(prefix, entry.path), }); } else { - filenames.push(join(prefix, entry.path)); + filenames.push(pathBrowserify.join(prefix, entry.path)); } } } @@ -129949,7 +132870,7 @@ async function accumulateFilesFromOid({ async function listFiles({ fs, dir, - gitdir = join(dir, '.git'), + gitdir = pathBrowserify.join(dir, '.git'), ref, cache = {}, }) { @@ -130028,7 +132949,7 @@ async function _listNotes({ fs, cache, gitdir, ref }) { async function listNotes({ fs, dir, - gitdir = join(dir, '.git'), + gitdir = pathBrowserify.join(dir, '.git'), ref = 'refs/notes/commits', cache = {}, }) { @@ -130051,6 +132972,40 @@ async function listNotes({ // @ts-check +/** + * List refs + * + * @param {object} args + * @param {FsClient} args.fs - a file system client + * @param {string} [args.dir] - The [working tree](dir-vs-gitdir.md) directory path + * @param {string} [args.gitdir=join(dir,'.git')] - [required] The [git directory](dir-vs-gitdir.md) path + * @param {string} [args.filepath] - [required] The refs path to list + * + * @returns {Promise>} Resolves successfully with an array of ref names below the supplied `filepath` + * + * @example + * let refs = await git.listRefs({ fs, dir: '/tutorial', filepath: 'refs/heads' }) + * console.log(refs) + * + */ +async function listRefs({ + fs, + dir, + gitdir = pathBrowserify.join(dir, '.git'), + filepath, +}) { + try { + assertParameter('fs', fs); + assertParameter('gitdir', gitdir); + return GitRefManager.listRefs({ fs: new FileSystem(fs), gitdir, filepath }) + } catch (err) { + err.caller = 'git.listRefs'; + throw err + } +} + +// @ts-check + /** * @param {object} args * @param {import('../models/FileSystem.js').FileSystem} args.fs @@ -130087,7 +133042,7 @@ async function _listRemotes({ fs, gitdir }) { * console.log(remotes) * */ -async function listRemotes({ fs, dir, gitdir = join(dir, '.git') }) { +async function listRemotes({ fs, dir, gitdir = pathBrowserify.join(dir, '.git') }) { try { assertParameter('fs', fs); assertParameter('gitdir', gitdir); @@ -130331,7 +133286,7 @@ async function listServerRefs({ * console.log(tags) * */ -async function listTags({ fs, dir, gitdir = join(dir, '.git') }) { +async function listTags({ fs, dir, gitdir = pathBrowserify.join(dir, '.git') }) { try { assertParameter('fs', fs); assertParameter('gitdir', gitdir); @@ -130389,7 +133344,7 @@ async function _resolveFileId({ const walks = tree.entries().map(function(entry) { let result; if (entry.oid === fileId) { - result = join(parentPath, entry.path); + result = pathBrowserify.join(parentPath, entry.path); filepaths.push(result); } else if (entry.type === 'tree') { result = _readObject({ @@ -130406,7 +133361,7 @@ async function _resolveFileId({ fileId, oid, filepaths, - parentPath: join(parentPath, entry.path), + parentPath: pathBrowserify.join(parentPath, entry.path), }) }); } @@ -130616,7 +133571,7 @@ async function _log({ async function log({ fs, dir, - gitdir = join(dir, '.git'), + gitdir = pathBrowserify.join(dir, '.git'), filepath, ref = 'HEAD', depth, @@ -130746,6 +133701,7 @@ async function log({ * @param {string} [args.signingKey] - passed to [commit](commit.md) when creating a merge commit * @param {object} [args.cache] - a [cache](cache.md) object * @param {MergeDriverCallback} [args.mergeDriver] - a [merge driver](mergeDriver.md) implementation + * @param {boolean} [args.allowUnrelatedHistories = false] - If true, allows merging histories of two branches that started their lives independently. * * @returns {Promise} Resolves to a description of the merge operation * @see MergeResult @@ -130764,7 +133720,7 @@ async function merge({ fs: _fs, onSign, dir, - gitdir = join(dir, '.git'), + gitdir = pathBrowserify.join(dir, '.git'), ours, theirs, fastForward = true, @@ -130778,6 +133734,7 @@ async function merge({ signingKey, cache = {}, mergeDriver, + allowUnrelatedHistories = false, }) { try { assertParameter('fs', _fs); @@ -130819,6 +133776,7 @@ async function merge({ signingKey, onSign, mergeDriver, + allowUnrelatedHistories, }) } catch (err) { err.caller = 'git.merge'; @@ -130850,7 +133808,7 @@ async function _pack({ fs, cache, dir, - gitdir = join(dir, '.git'), + gitdir = pathBrowserify.join(dir, '.git'), oids, }) { const hash = new Hash(); @@ -130926,7 +133884,7 @@ async function _packObjects({ fs, cache, gitdir, oids, write }) { const packfileSha = packfile.slice(-20).toString('hex'); const filename = `pack-${packfileSha}.pack`; if (write) { - await fs.write(join(gitdir, `objects/pack/${filename}`), packfile); + await fs.write(pathBrowserify.join(gitdir, `objects/pack/${filename}`), packfile); return { filename } } return { @@ -130971,7 +133929,7 @@ async function _packObjects({ fs, cache, gitdir, oids, write }) { async function packObjects({ fs, dir, - gitdir = join(dir, '.git'), + gitdir = pathBrowserify.join(dir, '.git'), oids, write = false, cache = {}, @@ -131055,7 +134013,7 @@ async function pull({ onAuthSuccess, onAuthFailure, dir, - gitdir = join(dir, '.git'), + gitdir = pathBrowserify.join(dir, '.git'), ref, url, remote, @@ -131135,7 +134093,7 @@ async function listCommitsAndTags({ fs, cache, dir, - gitdir = join(dir, '.git'), + gitdir = pathBrowserify.join(dir, '.git'), start, finish, }) { @@ -131198,7 +134156,7 @@ async function listObjects({ fs, cache, dir, - gitdir = join(dir, '.git'), + gitdir = pathBrowserify.join(dir, '.git'), oids, }) { const visited = new Set(); @@ -131645,7 +134603,7 @@ async function push({ onAuthFailure, onPrePush, dir, - gitdir = join(dir, '.git'), + gitdir = pathBrowserify.join(dir, '.git'), ref, remoteRef, remote = 'origin', @@ -131780,7 +134738,7 @@ async function _readBlob({ async function readBlob({ fs, dir, - gitdir = join(dir, '.git'), + gitdir = pathBrowserify.join(dir, '.git'), oid, filepath, cache = {}, @@ -131830,7 +134788,7 @@ async function readBlob({ async function readCommit({ fs, dir, - gitdir = join(dir, '.git'), + gitdir = pathBrowserify.join(dir, '.git'), oid, cache = {}, }) { @@ -131904,7 +134862,7 @@ async function _readNote({ async function readNote({ fs, dir, - gitdir = join(dir, '.git'), + gitdir = pathBrowserify.join(dir, '.git'), ref = 'refs/notes/commits', oid, cache = {}, @@ -132121,7 +135079,7 @@ async function readNote({ async function readObject({ fs: _fs, dir, - gitdir = join(dir, '.git'), + gitdir = pathBrowserify.join(dir, '.git'), oid, format = 'parsed', filepath = undefined, @@ -132258,7 +135216,7 @@ async function _readTag({ fs, cache, gitdir, oid }) { async function readTag({ fs, dir, - gitdir = join(dir, '.git'), + gitdir = pathBrowserify.join(dir, '.git'), oid, cache = {}, }) { @@ -132308,7 +135266,7 @@ async function readTag({ async function readTree({ fs, dir, - gitdir = join(dir, '.git'), + gitdir = pathBrowserify.join(dir, '.git'), oid, filepath = undefined, cache = {}, @@ -132355,7 +135313,7 @@ async function readTree({ async function remove({ fs: _fs, dir, - gitdir = join(dir, '.git'), + gitdir = pathBrowserify.join(dir, '.git'), filepath, cache = {}, }) { @@ -132491,7 +135449,7 @@ async function removeNote({ fs: _fs, onSign, dir, - gitdir = join(dir, '.git'), + gitdir = pathBrowserify.join(dir, '.git'), ref = 'refs/notes/commits', oid, author: _author, @@ -132555,11 +135513,11 @@ async function _renameBranch({ ref, checkout = false, }) { - if (ref !== cleanGitRef.clean(ref)) { + if (!validRef(ref, true)) { throw new InvalidRefNameError(ref, cleanGitRef.clean(ref)) } - if (oldref !== cleanGitRef.clean(oldref)) { + if (!validRef(oldref, true)) { throw new InvalidRefNameError(oldref, cleanGitRef.clean(oldref)) } @@ -132623,7 +135581,7 @@ async function _renameBranch({ async function renameBranch({ fs, dir, - gitdir = join(dir, '.git'), + gitdir = pathBrowserify.join(dir, '.git'), ref, oldref, checkout = false, @@ -132675,7 +135633,7 @@ async function hashObject$1({ gitdir, type, object }) { async function resetIndex({ fs: _fs, dir, - gitdir = join(dir, '.git'), + gitdir = pathBrowserify.join(dir, '.git'), filepath, ref, cache = {}, @@ -132730,7 +135688,7 @@ async function resetIndex({ size: 0, }; // If the file exists in the workdir... - const object = dir && (await fs.read(join(dir, filepath))); + const object = dir && (await fs.read(pathBrowserify.join(dir, filepath))); if (object) { // ... and has the same hash as the desired state... workdirOid = await hashObject$1({ @@ -132740,7 +135698,7 @@ async function resetIndex({ }); if (oid === workdirOid) { // ... use the workdir Stats object - stats = await fs.lstat(join(dir, filepath)); + stats = await fs.lstat(pathBrowserify.join(dir, filepath)); } } await GitIndexManager.acquire({ fs, gitdir, cache }, async function(index) { @@ -132779,7 +135737,7 @@ async function resetIndex({ async function resolveRef({ fs, dir, - gitdir = join(dir, '.git'), + gitdir = pathBrowserify.join(dir, '.git'), ref, depth, }) { @@ -132848,7 +135806,7 @@ async function resolveRef({ async function setConfig({ fs: _fs, dir, - gitdir = join(dir, '.git'), + gitdir = pathBrowserify.join(dir, '.git'), path, value, append = false, @@ -132875,6 +135833,859 @@ async function setConfig({ // @ts-check +/** + * @param {object} args + * @param {import('../models/FileSystem.js').FileSystem} args.fs + * @param {string} args.gitdir + * @param {CommitObject} args.commit + * + * @returns {Promise} + * @see CommitObject + * + */ +async function _writeCommit({ fs, gitdir, commit }) { + // Convert object to buffer + const object = GitCommit.from(commit).toObject(); + const oid = await _writeObject({ + fs, + gitdir, + type: 'commit', + object, + format: 'content', + }); + return oid +} + +class GitRefStash { + // constructor removed + + static get timezoneOffsetForRefLogEntry() { + const offsetMinutes = new Date().getTimezoneOffset(); + const offsetHours = Math.abs(Math.floor(offsetMinutes / 60)); + const offsetMinutesFormatted = Math.abs(offsetMinutes % 60) + .toString() + .padStart(2, '0'); + const sign = offsetMinutes > 0 ? '-' : '+'; + return `${sign}${offsetHours + .toString() + .padStart(2, '0')}${offsetMinutesFormatted}` + } + + static createStashReflogEntry(author, stashCommit, message) { + const nameNoSpace = author.name.replace(/\s/g, ''); + const z40 = '0000000000000000000000000000000000000000'; // hard code for now, works with `git stash list` + const timestamp = Math.floor(Date.now() / 1000); + const timezoneOffset = GitRefStash.timezoneOffsetForRefLogEntry; + return `${z40} ${stashCommit} ${nameNoSpace} ${author.email} ${timestamp} ${timezoneOffset}\t${message}\n` + } + + static getStashReflogEntry(reflogString, parsed = false) { + const reflogLines = reflogString.split('\n'); + const entries = reflogLines + .filter(l => l) + .reverse() + .map((line, idx) => + parsed ? `stash@{${idx}}: ${line.split('\t')[1]}` : line + ); + return entries + } +} + +const _TreeMap = { + stage: STAGE, + workdir: WORKDIR, +}; + +let lock$3; +async function acquireLock$1(ref, callback) { + if (lock$3 === undefined) lock$3 = new AsyncLock(); + return lock$3.acquire(ref, callback) +} + +// make sure filepath, blob type and blob object (from loose objects) plus oid are in sync and valid +async function checkAndWriteBlob(fs, gitdir, dir, filepath, oid = null) { + const currentFilepath = pathBrowserify.join(dir, filepath); + const stats = await fs.lstat(currentFilepath); + if (!stats) throw new NotFoundError(currentFilepath) + if (stats.isDirectory()) + throw new InternalError( + `${currentFilepath}: file expected, but found directory` + ) + + // Look for it in the loose object directory. + const objContent = oid + ? await readObjectLoose({ fs, gitdir, oid }) + : undefined; + let retOid = objContent ? oid : undefined; + if (!objContent) { + await acquireLock$1({ fs, gitdir, currentFilepath }, async () => { + const object = stats.isSymbolicLink() + ? await fs.readlink(currentFilepath).then(posixifyPathBuffer) + : await fs.read(currentFilepath); + + if (object === null) throw new NotFoundError(currentFilepath) + + retOid = await _writeObject({ fs, gitdir, type: 'blob', object }); + }); + } + + return retOid +} + +async function processTreeEntries({ fs, dir, gitdir, entries }) { + // make sure each tree entry has valid oid + async function processTreeEntry(entry) { + if (entry.type === 'tree') { + if (!entry.oid) { + // Process children entries if the current entry is a tree + const children = await Promise.all(entry.children.map(processTreeEntry)); + // Write the tree with the processed children + entry.oid = await _writeTree({ + fs, + gitdir, + tree: children, + }); + entry.mode = 0o40000; // directory + } + } else if (entry.type === 'blob') { + entry.oid = await checkAndWriteBlob( + fs, + gitdir, + dir, + entry.path, + entry.oid + ); + entry.mode = 0o100644; // file + } + + // remove path from entry.path + entry.path = entry.path.split('/').pop(); + return entry + } + + return Promise.all(entries.map(processTreeEntry)) +} + +async function writeTreeChanges({ + fs, + dir, + gitdir, + treePair, // [TREE({ ref: 'HEAD' }), 'STAGE'] would be the equivalent of `git write-tree` +}) { + const isStage = treePair[1] === 'stage'; + const trees = treePair.map(t => (typeof t === 'string' ? _TreeMap[t]() : t)); + + const changedEntries = []; + // transform WalkerEntry objects into the desired format + const map = async (filepath, [head, stage]) => { + if ( + filepath === '.' || + (await GitIgnoreManager.isIgnored({ fs, dir, gitdir, filepath })) + ) { + return + } + + if (stage) { + if ( + !head || + ((await head.oid()) !== (await stage.oid()) && + (await stage.oid()) !== undefined) + ) { + changedEntries.push([head, stage]); + } + return { + mode: await stage.mode(), + path: filepath, + oid: await stage.oid(), + type: await stage.type(), + } + } + }; + + // combine mapped entries with their parent results + const reduce = async (parent, children) => { + children = children.filter(Boolean); // Remove undefined entries + if (!parent) { + return children.length > 0 ? children : undefined + } else { + parent.children = children; + return parent + } + }; + + // if parent is skipped, skip the children + const iterate = async (walk, children) => { + const filtered = []; + for (const child of children) { + const [head, stage] = child; + if (isStage) { + if (stage) { + // for deleted file in work dir, it also needs to be added on stage + if (await fs.exists(`${dir}/${stage.toString()}`)) { + filtered.push(child); + } else { + changedEntries.push([null, stage]); // record the change (deletion) while stop the iteration + } + } + } else if (head) { + // for deleted file in workdir, "stage" (workdir in our case) will be undefined + if (!stage) { + changedEntries.push([head, null]); // record the change (deletion) while stop the iteration + } else { + filtered.push(child); // workdir, tracked only + } + } + } + return filtered.length ? Promise.all(filtered.map(walk)) : [] + }; + + const entries = await _walk({ + fs, + cache: {}, + dir, + gitdir, + trees, + map, + reduce, + iterate, + }); + + if (changedEntries.length === 0 || entries.length === 0) { + return null // no changes found to stash + } + + const processedEntries = await processTreeEntries({ + fs, + dir, + gitdir, + entries, + }); + + const treeEntries = processedEntries.filter(Boolean).map(entry => ({ + mode: entry.mode, + path: entry.path, + oid: entry.oid, + type: entry.type, + })); + + return _writeTree({ fs, gitdir, tree: treeEntries }) +} + +async function applyTreeChanges({ + fs, + dir, + gitdir, + stashCommit, + parentCommit, + wasStaged, +}) { + const dirRemoved = []; + const stageUpdated = []; + + // analyze the changes + const ops = await _walk({ + fs, + cache: {}, + dir, + gitdir, + trees: [TREE({ ref: parentCommit }), TREE({ ref: stashCommit })], + map: async (filepath, [parent, stash]) => { + if ( + filepath === '.' || + (await GitIgnoreManager.isIgnored({ fs, dir, gitdir, filepath })) + ) { + return + } + const type = stash ? await stash.type() : await parent.type(); + if (type !== 'tree' && type !== 'blob') { + return + } + + // deleted tree or blob + if (!stash && parent) { + const method = type === 'tree' ? 'rmdir' : 'rm'; + if (type === 'tree') dirRemoved.push(filepath); + if (type === 'blob' && wasStaged) + stageUpdated.push({ filepath, oid: await parent.oid() }); // stats is undefined, will stage the deletion with index.insert + return { method, filepath } + } + + const oid = await stash.oid(); + if (!parent || (await parent.oid()) !== oid) { + // only apply changes if changed from the parent commit or doesn't exist in the parent commit + if (type === 'tree') { + return { method: 'mkdir', filepath } + } else { + if (wasStaged) + stageUpdated.push({ + filepath, + oid, + stats: await fs.lstat(pathBrowserify.join(dir, filepath)), + }); + return { + method: 'write', + filepath, + oid, + } + } + } + }, + }); + + // apply the changes to work dir + await acquireLock$1({ fs, gitdir, dirRemoved, ops }, async () => { + for (const op of ops) { + const currentFilepath = pathBrowserify.join(dir, op.filepath); + switch (op.method) { + case 'rmdir': + await fs.rmdir(currentFilepath); + break + case 'mkdir': + await fs.mkdir(currentFilepath); + break + case 'rm': + await fs.rm(currentFilepath); + break + case 'write': + // only writes if file is not in the removedDirs + if ( + !dirRemoved.some(removedDir => + currentFilepath.startsWith(removedDir) + ) + ) { + const { object } = await _readObject({ + fs, + cache: {}, + gitdir, + oid: op.oid, + }); + // just like checkout, since mode only applicable to create, not update, delete first + if (await fs.exists(currentFilepath)) { + await fs.rm(currentFilepath); + } + await fs.write(currentFilepath, object); // only handles regular files for now + } + break + } + } + }); + + // update the stage + await GitIndexManager.acquire({ fs, gitdir, cache: {} }, async index => { + stageUpdated.forEach(({ filepath, stats, oid }) => { + index.insert({ filepath, stats, oid }); + }); + }); +} + +class GitStashManager { + /** + * Creates an instance of GitStashManager. + * + * @param {Object} args + * @param {FSClient} args.fs - A file system implementation. + * @param {string} args.dir - The working directory. + * @param {string}[args.gitdir=join(dir, '.git')] - [required] The [git directory](dir-vs-gitdir.md) path + */ + constructor({ fs, dir, gitdir = pathBrowserify.join(dir, '.git') }) { + Object.assign(this, { + fs, + dir, + gitdir, + _author: null, + }); + } + + /** + * Gets the reference name for the stash. + * + * @returns {string} - The stash reference name. + */ + static get refStash() { + return 'refs/stash' + } + + /** + * Gets the reference name for the stash reflogs. + * + * @returns {string} - The stash reflogs reference name. + */ + static get refLogsStash() { + return 'logs/refs/stash' + } + + /** + * Gets the file path for the stash reference. + * + * @returns {string} - The file path for the stash reference. + */ + get refStashPath() { + return pathBrowserify.join(this.gitdir, GitStashManager.refStash) + } + + /** + * Gets the file path for the stash reflogs. + * + * @returns {string} - The file path for the stash reflogs. + */ + get refLogsStashPath() { + return pathBrowserify.join(this.gitdir, GitStashManager.refLogsStash) + } + + /** + * Retrieves the author information for the stash. + * + * @returns {Promise} - The author object. + * @throws {MissingNameError} - If the author name is missing. + */ + async getAuthor() { + if (!this._author) { + this._author = await normalizeAuthorObject({ + fs: this.fs, + gitdir: this.gitdir, + author: {}, + }); + if (!this._author) throw new MissingNameError('author') + } + return this._author + } + + /** + * Gets the SHA of a stash entry by its index. + * + * @param {number} refIdx - The index of the stash entry. + * @param {string[]} [stashEntries] - Optional preloaded stash entries. + * @returns {Promise} - The SHA of the stash entry or `null` if not found. + */ + async getStashSHA(refIdx, stashEntries) { + if (!(await this.fs.exists(this.refStashPath))) { + return null + } + + const entries = + stashEntries || (await this.readStashReflogs({ parsed: false })); + return entries[refIdx].split(' ')[1] + } + + /** + * Writes a stash commit to the repository. + * + * @param {Object} args + * @param {string} args.message - The commit message. + * @param {string} args.tree - The tree object ID. + * @param {string[]} args.parent - The parent commit object IDs. + * @returns {Promise} - The object ID of the written commit. + */ + async writeStashCommit({ message, tree, parent }) { + return _writeCommit({ + fs: this.fs, + gitdir: this.gitdir, + commit: { + message, + tree, + parent, + author: await this.getAuthor(), + committer: await this.getAuthor(), + }, + }) + } + + /** + * Reads a stash commit by its index. + * + * @param {number} refIdx - The index of the stash entry. + * @returns {Promise} - The stash commit object. + * @throws {InvalidRefNameError} - If the index is invalid. + */ + async readStashCommit(refIdx) { + const stashEntries = await this.readStashReflogs({ parsed: false }); + if (refIdx !== 0) { + // non-default case, throw exceptions if not valid + if (refIdx < 0 || refIdx > stashEntries.length - 1) { + throw new InvalidRefNameError( + `stash@${refIdx}`, + 'number that is in range of [0, num of stash pushed]' + ) + } + } + + const stashSHA = await this.getStashSHA(refIdx, stashEntries); + if (!stashSHA) { + return {} // no stash found + } + + // get the stash commit object + return _readCommit({ + fs: this.fs, + cache: {}, + gitdir: this.gitdir, + oid: stashSHA, + }) + } + + /** + * Writes a stash reference to the repository. + * + * @param {string} stashCommit - The object ID of the stash commit. + * @returns {Promise} + */ + async writeStashRef(stashCommit) { + return GitRefManager.writeRef({ + fs: this.fs, + gitdir: this.gitdir, + ref: GitStashManager.refStash, + value: stashCommit, + }) + } + + /** + * Writes a reflog entry for a stash commit. + * + * @param {Object} args + * @param {string} args.stashCommit - The object ID of the stash commit. + * @param {string} args.message - The reflog message. + * @returns {Promise} + */ + async writeStashReflogEntry({ stashCommit, message }) { + const author = await this.getAuthor(); + const entry = GitRefStash.createStashReflogEntry( + author, + stashCommit, + message + ); + const filepath = this.refLogsStashPath; + + await acquireLock$1({ filepath, entry }, async () => { + const appendTo = (await this.fs.exists(filepath)) + ? await this.fs.read(filepath, 'utf8') + : ''; + await this.fs.write(filepath, appendTo + entry, 'utf8'); + }); + } + + /** + * Reads the stash reflogs. + * + * @param {Object} args + * @param {boolean} [args.parsed=false] - Whether to parse the reflog entries. + * @returns {Promise} - The reflog entries as strings or parsed objects. + */ + async readStashReflogs({ parsed = false }) { + if (!(await this.fs.exists(this.refLogsStashPath))) { + return [] + } + + const reflogBuffer = await this.fs.read(this.refLogsStashPath); + const reflogString = reflogBuffer.toString(); + + return GitRefStash.getStashReflogEntry(reflogString, parsed) + } +} + +// @ts-check + +async function _stashPush({ fs, dir, gitdir, message = '' }) { + const stashMgr = new GitStashManager({ fs, dir, gitdir }); + + await stashMgr.getAuthor(); // ensure there is an author + const branch = await _currentBranch({ + fs, + gitdir, + fullname: false, + }); + + // prepare the stash commit: first parent is the current branch HEAD + const headCommit = await GitRefManager.resolve({ + fs, + gitdir, + ref: 'HEAD', + }); + + const headCommitObj = await readCommit({ fs, dir, gitdir, oid: headCommit }); + const headMsg = headCommitObj.commit.message; + + const stashCommitParents = [headCommit]; + let stashCommitTree = null; + let workDirCompareBase = TREE({ ref: 'HEAD' }); + + const indexTree = await writeTreeChanges({ + fs, + dir, + gitdir, + treePair: [TREE({ ref: 'HEAD' }), 'stage'], + }); + if (indexTree) { + // this indexTree will be the tree of the stash commit + // create a commit from the index tree, which has one parent, the current branch HEAD + const stashCommitOne = await stashMgr.writeStashCommit({ + message: `stash-Index: WIP on ${branch} - ${new Date().toISOString()}`, + tree: indexTree, // stashCommitTree + parent: stashCommitParents, + }); + stashCommitParents.push(stashCommitOne); + stashCommitTree = indexTree; + workDirCompareBase = STAGE(); + } + + const workingTree = await writeTreeChanges({ + fs, + dir, + gitdir, + treePair: [workDirCompareBase, 'workdir'], + }); + if (workingTree) { + // create a commit from the working directory tree, which has one parent, either the one we just had, or the headCommit + const workingHeadCommit = await stashMgr.writeStashCommit({ + message: `stash-WorkDir: WIP on ${branch} - ${new Date().toISOString()}`, + tree: workingTree, + parent: [stashCommitParents[stashCommitParents.length - 1]], + }); + + stashCommitParents.push(workingHeadCommit); + stashCommitTree = workingTree; + } + + if (!stashCommitTree || (!indexTree && !workingTree)) { + throw new NotFoundError('changes, nothing to stash') + } + + // create another commit from the tree, which has three parents: HEAD and the commit we just made: + const stashMsg = + (message.trim() || `WIP on ${branch}`) + + `: ${headCommit.substring(0, 7)} ${headMsg}`; + + const stashCommit = await stashMgr.writeStashCommit({ + message: stashMsg, + tree: stashCommitTree, + parent: stashCommitParents, + }); + + // next, write this commit into .git/refs/stash: + await stashMgr.writeStashRef(stashCommit); + + // write the stash commit to the logs + await stashMgr.writeStashReflogEntry({ + stashCommit, + message: stashMsg, + }); + + // finally, go back to a clean working directory + await checkout({ + fs, + dir, + gitdir, + ref: branch, + track: false, + force: true, // force checkout to discard changes + }); + + return stashCommit +} + +async function _stashApply({ fs, dir, gitdir, refIdx = 0 }) { + const stashMgr = new GitStashManager({ fs, dir, gitdir }); + + // get the stash commit object + const stashCommit = await stashMgr.readStashCommit(refIdx); + const { parent: stashParents = null } = stashCommit.commit + ? stashCommit.commit + : {}; + if (!stashParents || !Array.isArray(stashParents)) { + return // no stash found + } + + // compare the stash commit tree with its parent commit + for (let i = 0; i < stashParents.length - 1; i++) { + const applyingCommit = await _readCommit({ + fs, + cache: {}, + gitdir, + oid: stashParents[i + 1], + }); + const wasStaged = applyingCommit.commit.message.startsWith('stash-Index'); + + await applyTreeChanges({ + fs, + dir, + gitdir, + stashCommit: stashParents[i + 1], + parentCommit: stashParents[i], + wasStaged, + }); + } +} + +async function _stashDrop({ fs, dir, gitdir, refIdx = 0 }) { + const stashMgr = new GitStashManager({ fs, dir, gitdir }); + const stashCommit = await stashMgr.readStashCommit(refIdx); + if (!stashCommit.commit) { + return // no stash found + } + // remove stash ref first + const stashRefPath = stashMgr.refStashPath; + await acquireLock$1(stashRefPath, async () => { + if (await fs.exists(stashRefPath)) { + await fs.rm(stashRefPath); + } + }); + + // read from stash reflog and list the stash commits + const reflogEntries = await stashMgr.readStashReflogs({ parsed: false }); + if (!reflogEntries.length) { + return // no stash reflog entry + } + + // remove the specified stash reflog entry from reflogEntries, then update the stash reflog + reflogEntries.splice(refIdx, 1); + + const stashReflogPath = stashMgr.refLogsStashPath; + await acquireLock$1({ reflogEntries, stashReflogPath, stashMgr }, async () => { + if (reflogEntries.length) { + await fs.write(stashReflogPath, reflogEntries.join('\n'), 'utf8'); + const lastStashCommit = reflogEntries[reflogEntries.length - 1].split( + ' ' + )[1]; + await stashMgr.writeStashRef(lastStashCommit); + } else { + // remove the stash reflog file if no entry left + await fs.rm(stashReflogPath); + } + }); +} + +async function _stashList({ fs, dir, gitdir }) { + const stashMgr = new GitStashManager({ fs, dir, gitdir }); + return stashMgr.readStashReflogs({ parsed: true }) +} + +async function _stashClear({ fs, dir, gitdir }) { + const stashMgr = new GitStashManager({ fs, dir, gitdir }); + const stashRefPath = [stashMgr.refStashPath, stashMgr.refLogsStashPath]; + + await acquireLock$1(stashRefPath, async () => { + await Promise.all( + stashRefPath.map(async path => { + if (await fs.exists(path)) { + return fs.rm(path) + } + }) + ); + }); +} + +async function _stashPop({ fs, dir, gitdir, refIdx = 0 }) { + await _stashApply({ fs, dir, gitdir, refIdx }); + await _stashDrop({ fs, dir, gitdir, refIdx }); +} + +// @ts-check + +/** + * stash api, supports {'push' | 'pop' | 'apply' | 'drop' | 'list' | 'clear'} StashOp + * _note_, + * - all stash operations are done on tracked files only with loose objects, no packed objects + * - when op === 'push', both working directory and index (staged) changes will be stashed, tracked files only + * - when op === 'push', message is optional, and only applicable when op === 'push' + * - when op === 'apply | pop', the stashed changes will overwrite the working directory, no abort when conflicts + * + * @param {object} args + * @param {FsClient} args.fs - [required] a file system client + * @param {string} [args.dir] - [required] The [working tree](dir-vs-gitdir.md) directory path + * @param {string} [args.gitdir=join(dir,'.git')] - [optional] The [git directory](dir-vs-gitdir.md) path + * @param {'push' | 'pop' | 'apply' | 'drop' | 'list' | 'clear'} [args.op = 'push'] - [optional] name of stash operation, default to 'push' + * @param {string} [args.message = ''] - [optional] message to be used for the stash entry, only applicable when op === 'push' + * @param {number} [args.refIdx = 0] - [optional - Number] stash ref index of entry, only applicable when op === ['apply' | 'drop' | 'pop'], refIdx >= 0 and < num of stash pushed + * @returns {Promise} Resolves successfully when stash operations are complete + * + * @example + * // stash changes in the working directory and index + * let dir = '/tutorial' + * await fs.promises.writeFile(`${dir}/a.txt`, 'original content - a') + * await fs.promises.writeFile(`${dir}/b.js`, 'original content - b') + * await git.add({ fs, dir, filepath: [`a.txt`,`b.txt`] }) + * let sha = await git.commit({ + * fs, + * dir, + * author: { + * name: 'Mr. Stash', + * email: 'mstasher@stash.com', + * }, + * message: 'add a.txt and b.txt to test stash' + * }) + * console.log(sha) + * + * await fs.promises.writeFile(`${dir}/a.txt`, 'stashed chang- a') + * await git.add({ fs, dir, filepath: `${dir}/a.txt` }) + * await fs.promises.writeFile(`${dir}/b.js`, 'work dir change. not stashed - b') + * + * await git.stash({ fs, dir }) // default gitdir and op + * + * console.log(await git.status({ fs, dir, filepath: 'a.txt' })) // 'unmodified' + * console.log(await git.status({ fs, dir, filepath: 'b.txt' })) // 'unmodified' + * + * const refLog = await git.stash({ fs, dir, op: 'list' }) + * console.log(refLog) // [{stash{#} message}] + * + * await git.stash({ fs, dir, op: 'apply' }) // apply the stash + * + * console.log(await git.status({ fs, dir, filepath: 'a.txt' })) // 'modified' + * console.log(await git.status({ fs, dir, filepath: 'b.txt' })) // '*modified' + */ + +async function stash({ + fs, + dir, + gitdir = pathBrowserify.join(dir, '.git'), + op = 'push', + message = '', + refIdx = 0, +}) { + assertParameter('fs', fs); + assertParameter('dir', dir); + assertParameter('gitdir', gitdir); + assertParameter('op', op); + + const stashMap = { + push: _stashPush, + apply: _stashApply, + drop: _stashDrop, + list: _stashList, + clear: _stashClear, + pop: _stashPop, + }; + + const opsNeedRefIdx = ['apply', 'drop', 'pop']; + + try { + const _fs = new FileSystem(fs); + const folders = ['refs', 'logs', 'logs/refs']; + folders + .map(f => pathBrowserify.join(gitdir, f)) + .forEach(async folder => { + if (!(await _fs.exists(folder))) { + await _fs.mkdir(folder); + } + }); + + const opFunc = stashMap[op]; + if (opFunc) { + if (opsNeedRefIdx.includes(op) && refIdx < 0) { + throw new InvalidRefNameError( + `stash@${refIdx}`, + 'number that is in range of [0, num of stash pushed]' + ) + } + return await opFunc({ fs: _fs, dir, gitdir, message, refIdx }) + } + throw new Error(`To be implemented: ${op}`) + } catch (err) { + err.caller = 'git.stash'; + throw err + } +} + +// @ts-check + /** * Tell whether a file has been changed * @@ -132913,7 +136724,7 @@ async function setConfig({ async function status({ fs: _fs, dir, - gitdir = join(dir, '.git'), + gitdir = pathBrowserify.join(dir, '.git'), filepath, cache = {}, }) { @@ -132949,7 +136760,7 @@ async function status({ return null } ); - const stats = await fs.lstat(join(dir, filepath)); + const stats = await fs.lstat(pathBrowserify.join(dir, filepath)); const H = treeOid !== null; // head const I = indexEntry !== null; // index @@ -132959,7 +136770,7 @@ async function status({ if (I && !compareStats(indexEntry, stats)) { return indexEntry.oid } else { - const object = await fs.read(join(dir, filepath)); + const object = await fs.read(pathBrowserify.join(dir, filepath)); const workdirOid = await hashObject$1({ gitdir, type: 'blob', @@ -133221,7 +137032,7 @@ async function getHeadTree({ fs, cache, gitdir }) { async function statusMatrix({ fs: _fs, dir, - gitdir = join(dir, '.git'), + gitdir = pathBrowserify.join(dir, '.git'), ref = 'HEAD', filepaths = ['.'], filter, @@ -133331,7 +137142,7 @@ async function statusMatrix({ async function tag({ fs: _fs, dir, - gitdir = join(dir, '.git'), + gitdir = pathBrowserify.join(dir, '.git'), ref, object, force = false, @@ -133410,10 +137221,10 @@ async function tag({ * oid * }) */ -async function updateIndex({ +async function updateIndex$1({ fs: _fs, dir, - gitdir = join(dir, '.git'), + gitdir = pathBrowserify.join(dir, '.git'), cache = {}, filepath, oid, @@ -133433,11 +137244,9 @@ async function updateIndex({ return await GitIndexManager.acquire( { fs, gitdir, cache }, async function(index) { - let fileStats; - if (!force) { // Check if the file is still present in the working directory - fileStats = await fs.lstat(join(dir, filepath)); + const fileStats = await fs.lstat(pathBrowserify.join(dir, filepath)); if (fileStats) { if (fileStats.isDirectory()) { @@ -133464,7 +137273,7 @@ async function updateIndex({ let fileStats; if (!oid) { - fileStats = await fs.lstat(join(dir, filepath)); + fileStats = await fs.lstat(pathBrowserify.join(dir, filepath)); if (!fileStats) { throw new NotFoundError( @@ -133487,25 +137296,14 @@ async function updateIndex({ ) } - // By default we use 0 for the stats of the index file - let stats = { - ctime: new Date(0), - mtime: new Date(0), - dev: 0, - ino: 0, - mode, - uid: 0, - gid: 0, - size: 0, - }; - + let stats; if (!oid) { stats = fileStats; // Write the file to the object database const object = stats.isSymbolicLink() - ? await fs.readlink(join(dir, filepath)) - : await fs.read(join(dir, filepath)); + ? await fs.readlink(pathBrowserify.join(dir, filepath)) + : await fs.read(pathBrowserify.join(dir, filepath)); oid = await _writeObject({ fs, @@ -133514,6 +137312,18 @@ async function updateIndex({ format: 'content', object, }); + } else { + // By default we use 0 for the stats of the index file + stats = { + ctime: new Date(0), + mtime: new Date(0), + dev: 0, + ino: 0, + mode, + uid: 0, + gid: 0, + size: 0, + }; } index.insert({ @@ -133803,7 +137613,7 @@ function version() { async function walk({ fs, dir, - gitdir = join(dir, '.git'), + gitdir = pathBrowserify.join(dir, '.git'), trees, map, reduce, @@ -133855,7 +137665,7 @@ async function walk({ * console.log('oid', oid) // should be 'e69de29bb2d1d6434b8b29ae775ad8c2e48c5391' * */ -async function writeBlob({ fs, dir, gitdir = join(dir, '.git'), blob }) { +async function writeBlob({ fs, dir, gitdir = pathBrowserify.join(dir, '.git'), blob }) { try { assertParameter('fs', fs); assertParameter('gitdir', gitdir); @@ -133876,31 +137686,6 @@ async function writeBlob({ fs, dir, gitdir = join(dir, '.git'), blob }) { // @ts-check -/** - * @param {object} args - * @param {import('../models/FileSystem.js').FileSystem} args.fs - * @param {string} args.gitdir - * @param {CommitObject} args.commit - * - * @returns {Promise} - * @see CommitObject - * - */ -async function _writeCommit({ fs, gitdir, commit }) { - // Convert object to buffer - const object = GitCommit.from(commit).toObject(); - const oid = await _writeObject({ - fs, - gitdir, - type: 'commit', - object, - format: 'content', - }); - return oid -} - -// @ts-check - /** * Write a commit object directly * @@ -133917,7 +137702,7 @@ async function _writeCommit({ fs, gitdir, commit }) { async function writeCommit({ fs, dir, - gitdir = join(dir, '.git'), + gitdir = pathBrowserify.join(dir, '.git'), commit, }) { try { @@ -134006,7 +137791,7 @@ async function writeCommit({ async function writeObject({ fs: _fs, dir, - gitdir = join(dir, '.git'), + gitdir = pathBrowserify.join(dir, '.git'), type, object, format = 'parsed', @@ -134088,7 +137873,7 @@ async function writeObject({ async function writeRef({ fs: _fs, dir, - gitdir = join(dir, '.git'), + gitdir = pathBrowserify.join(dir, '.git'), ref, value, force = false, @@ -134102,7 +137887,7 @@ async function writeRef({ const fs = new FileSystem(_fs); - if (ref !== cleanGitRef.clean(ref)) { + if (!validRef(ref, true)) { throw new InvalidRefNameError(ref, cleanGitRef.clean(ref)) } @@ -134198,7 +137983,7 @@ async function _writeTag({ fs, gitdir, tag }) { * console.log('tag', oid) * */ -async function writeTag({ fs, dir, gitdir = join(dir, '.git'), tag }) { +async function writeTag({ fs, dir, gitdir = pathBrowserify.join(dir, '.git'), tag }) { try { assertParameter('fs', fs); assertParameter('gitdir', gitdir); @@ -134231,7 +138016,7 @@ async function writeTag({ fs, dir, gitdir = join(dir, '.git'), tag }) { * @see TreeEntry * */ -async function writeTree({ fs, dir, gitdir = join(dir, '.git'), tree }) { +async function writeTree({ fs, dir, gitdir = pathBrowserify.join(dir, '.git'), tree }) { try { assertParameter('fs', fs); assertParameter('gitdir', gitdir); @@ -134287,6 +138072,7 @@ var index = { listBranches, listFiles, listNotes, + listRefs, listRemotes, listServerRefs, listTags, @@ -134305,7 +138091,7 @@ var index = { removeNote, renameBranch, resetIndex, - updateIndex, + updateIndex: updateIndex$1, resolveRef, status, statusMatrix, @@ -134318,6 +138104,7 @@ var index = { writeRef, writeTag, writeTree, + stash, }; exports.Errors = Errors; @@ -134357,6 +138144,7 @@ exports.isIgnored = isIgnored; exports.listBranches = listBranches; exports.listFiles = listFiles; exports.listNotes = listNotes; +exports.listRefs = listRefs; exports.listRemotes = listRemotes; exports.listServerRefs = listServerRefs; exports.listTags = listTags; @@ -134377,10 +138165,11 @@ exports.renameBranch = renameBranch; exports.resetIndex = resetIndex; exports.resolveRef = resolveRef; exports.setConfig = setConfig; +exports.stash = stash; exports.status = status; exports.statusMatrix = statusMatrix; exports.tag = tag; -exports.updateIndex = updateIndex; +exports.updateIndex = updateIndex$1; exports.version = version; exports.walk = walk; exports.writeBlob = writeBlob; @@ -136475,7 +140264,7 @@ exports.JSONPath = JSONPath; /***/ ((module) => { "use strict"; -module.exports = {"i8":"16.15.5"}; +module.exports = {"i8":"16.15.6"}; /***/ }),