diff --git a/Cargo.lock b/Cargo.lock index a9e90df17..ddb7924a8 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -98,7 +98,7 @@ checksum = "5fd55a5ba1179988837d24ab4c7cc8ed6efdeff578ede0416b4225a5fca35bd0" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.28", ] [[package]] @@ -145,9 +145,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "bitflags" -version = "2.3.3" +version = "2.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "630be753d4e58660abd17930c71b647fe46c27ea6b63cc59e1e3851406972e42" +checksum = "b4682ae6287fcf752ecaabbfcc7b6f9b72aa33933dc23a554d853aea8eea8635" [[package]] name = "block-buffer" @@ -262,7 +262,7 @@ dependencies = [ "heck", "proc-macro2", "quote", - "syn", + "syn 2.0.28", ] [[package]] @@ -300,6 +300,12 @@ dependencies = [ "windows-sys 0.45.0", ] +[[package]] +name = "convert_case" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e" + [[package]] name = "core-foundation" version = "0.9.3" @@ -429,6 +435,19 @@ dependencies = [ "parking_lot_core", ] +[[package]] +name = "derive_more" +version = "0.99.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fb810d30a7c1953f91334de7244731fc3f3c10d7fe163338a35b9f640960321" +dependencies = [ + "convert_case", + "proc-macro2", + "quote", + "rustc_version", + "syn 1.0.109", +] + [[package]] name = "diff" version = "0.1.13" @@ -597,7 +616,7 @@ checksum = "89ca545a94061b6365f2c7355b4b32bd20df3ff95f02da9329b34ccc3bd6ee72" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.28", ] [[package]] @@ -933,9 +952,9 @@ checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f" [[package]] name = "linux-raw-sys" -version = "0.4.3" +version = "0.4.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09fc20d2ca12cb9f044c93e3bd6d32d523e6e2ec3db4f7b2939cd99026ecd3f0" +checksum = "3852614a3bd9ca9804678ba6be5e3b8ce76dfc902cae004e3e0c44051b6e88db" [[package]] name = "lock_api" @@ -1006,7 +1025,7 @@ checksum = "49e7bc1560b95a3c4a25d03de42fe76ca718ab92d1a22a55b9b4cf67b3ae635c" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.28", ] [[package]] @@ -1174,7 +1193,7 @@ checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.28", ] [[package]] @@ -1253,6 +1272,7 @@ dependencies = [ "pipe-trait", "reqwest", "tokio", + "which", ] [[package]] @@ -1278,6 +1298,7 @@ dependencies = [ "pacquet_cafs", "pacquet_diagnostics", "pacquet_executor", + "pacquet_lockfile", "pacquet_npmrc", "pacquet_package_json", "pacquet_registry", @@ -1315,9 +1336,16 @@ dependencies = [ name = "pacquet_lockfile" version = "0.0.1" dependencies = [ + "derive_more", + "node-semver", "pacquet_diagnostics", + "pacquet_package_json", + "pipe-trait", + "pretty_assertions", "serde", "serde_yaml", + "split-first-char", + "text-block-macros", ] [[package]] @@ -1338,6 +1366,7 @@ version = "0.0.1" dependencies = [ "insta", "pacquet_diagnostics", + "pipe-trait", "pretty_assertions", "serde", "serde_json", @@ -1673,13 +1702,22 @@ version = "0.1.23" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d626bb9dae77e28219937af045c257c28bfd3f69333c512553507f5f9798cb76" +[[package]] +name = "rustc_version" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366" +dependencies = [ + "semver", +] + [[package]] name = "rustix" -version = "0.38.4" +version = "0.38.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0a962918ea88d644592894bc6dc55acc6c0956488adcebbfb6e273506b7fd6e5" +checksum = "d7db8590df6dfcd144d22afd1b83b36c21a18d7cbc1dc4bb5295a8712e9eb662" dependencies = [ - "bitflags 2.3.3", + "bitflags 2.4.0", "errno", "libc", "linux-raw-sys", @@ -1745,6 +1783,12 @@ dependencies = [ "libc", ] +[[package]] +name = "semver" +version = "1.0.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b0293b4b29daaf487284529cc2f5675b8e57c61f70167ba415a463651fd6a918" + [[package]] name = "serde" version = "1.0.188" @@ -1762,7 +1806,7 @@ checksum = "4eca7ac642d82aa35b60049a6eccb4be6be75e599bd2e9adb5f875a737654af2" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.28", ] [[package]] @@ -1896,6 +1940,12 @@ dependencies = [ "winapi", ] +[[package]] +name = "split-first-char" +version = "0.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87ce492fd669c5b32b3e629c0b35278c51d4fae83176e248c9dea71f39c86e58" + [[package]] name = "ssri" version = "9.2.0" @@ -1938,7 +1988,7 @@ dependencies = [ "proc-macro2", "quote", "rustversion", - "syn", + "syn 2.0.28", ] [[package]] @@ -1969,6 +2019,17 @@ dependencies = [ "is-terminal", ] +[[package]] +name = "syn" +version = "1.0.109" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + [[package]] name = "syn" version = "2.0.28" @@ -2014,6 +2075,12 @@ dependencies = [ "winapi", ] +[[package]] +name = "text-block-macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f8b59b4da1c1717deaf1de80f0179a9d8b4ac91c986d5fd9f4a8ff177b84049" + [[package]] name = "textwrap" version = "0.15.2" @@ -2042,7 +2109,7 @@ checksum = "49922ecae66cc8a249b77e68d1d0623c1b2c514f0060c27cdc68bd62a1219d35" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.28", ] [[package]] @@ -2108,7 +2175,7 @@ checksum = "630bdcf245f78637c13ec01ffae6187cca34625e8c63150d424b59e55af2675e" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.28", ] [[package]] @@ -2161,7 +2228,7 @@ checksum = "5f4f31f56159e98206da9efd823404b79b6ef3143b4a7ab76e67b1751b25a4ab" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.28", ] [[package]] @@ -2341,7 +2408,7 @@ dependencies = [ "once_cell", "proc-macro2", "quote", - "syn", + "syn 2.0.28", "wasm-bindgen-shared", ] @@ -2375,7 +2442,7 @@ checksum = "54681b18a46765f095758388f2d0cf16eb8d4169b639ab575a8f5693af210c7b" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.28", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -2396,6 +2463,18 @@ dependencies = [ "wasm-bindgen", ] +[[package]] +name = "which" +version = "4.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87ba24419a2078cd2b0f2ede2691b6c66d8e47836da3b6db8265ebad47afbfc7" +dependencies = [ + "either", + "home", + "once_cell", + "rustix", +] + [[package]] name = "winapi" version = "0.3.9" diff --git a/Cargo.toml b/Cargo.toml index 6876ef99c..ab54c1b18 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -27,6 +27,7 @@ pacquet_diagnostics = { path = "crates/diagnostics" } async-recursion = { version = "1.0.5" } clap = { version = "4", features = ["derive", "string"] } dashmap = { version = "5.5.3" } +derive_more = { version = "0.99.17" } home = { version = "0.5.5" } insta = { version = "1.31.0", features = ["yaml", "glob", "walkdir"] } itertools = { version = "0.11.0" } @@ -42,14 +43,17 @@ serde = { version = "1.0.188", features = ["derive"] } serde_ini = { version = "0.2.0" } serde_json = { version = "1.0.107", features = ["preserve_order"] } serde_yaml = { version = "0.9.1" } +split-first-char = { version = "0.0.0" } ssri = { version = "9.0.0" } strum = { version = "0.25.0", features = ["derive"] } tar = { version = "0.4.40" } +text-block-macros = { version = "0.1.1" } thiserror = { version = "1.0.48" } tracing = { version = "0.1.37" } tracing-subscriber = { version = "0.3.17", features = ["env-filter"] } tokio = { version = "1", features = ["rt", "rt-multi-thread", "macros"] } walkdir = { version = "2.4.0" } +which = { version = "4.4.2" } zune-inflate = { version = "0.2.54" } diff --git a/README.md b/README.md index 68d781a53..1978d0967 100644 --- a/README.md +++ b/README.md @@ -23,7 +23,7 @@ TRACE=pacquet_tarball just cli add fastify ## Benchmarking -### Clean install between multiple revisions +### Install between multiple revisions First, you to start a local registry server, such as [verdaccio](https://verdaccio.org/): @@ -35,12 +35,22 @@ Then, you can use the script named `benchmark-install-against-revisions` to run ```sh # Comparing the branch you're working on against main -cargo benchmark-install-against-revisions my-branch main +cargo benchmark-install-against-revisions --scenario=frozen-lockfile my-branch main ``` ```sh # Comparing current commit against the previous commit -cargo benchmark-install-against-revisions HEAD HEAD~ +cargo benchmark-install-against-revisions --scenario=frozen-lockfile HEAD HEAD~ +``` + +```sh +# Comparing pacquet of current commit against pnpm +cargo benchmark-install-against-revisions --scenario=frozen-lockfile --with-pnpm HEAD +``` + +```sh +# Comparing pacquet of current commit, pacquet of main, and pnpm against each other +cargo benchmark-install-against-revisions --scenario=frozen-lockfile --with-pnpm HEAD main ``` ```sh diff --git a/crates/cli/Cargo.toml b/crates/cli/Cargo.toml index c5f27d4a8..292ed1c80 100644 --- a/crates/cli/Cargo.toml +++ b/crates/cli/Cargo.toml @@ -17,6 +17,7 @@ path = "src/bin/main.rs" [dependencies] pacquet_cafs = { workspace = true } pacquet_executor = { workspace = true } +pacquet_lockfile = { workspace = true } pacquet_npmrc = { workspace = true } pacquet_package_json = { workspace = true } pacquet_registry = { workspace = true } diff --git a/crates/cli/src/commands/add.rs b/crates/cli/src/commands/add.rs index af34069c4..f72387c4e 100644 --- a/crates/cli/src/commands/add.rs +++ b/crates/cli/src/commands/add.rs @@ -2,7 +2,7 @@ use clap::Parser; use std::collections::VecDeque; use crate::{ - package::{fetch_package_version_directly, find_package_version_from_registry}, + package::{fetch_package_version_directly, install_package_from_registry}, package_manager::{PackageManager, PackageManagerError}, }; use futures_util::future; @@ -68,8 +68,11 @@ impl PackageManager { &self.config.modules_dir, ) .await?; - let package_node_modules_path = - self.config.virtual_store_dir.join(latest_version.to_store_name()).join("node_modules"); + let package_node_modules_path = self + .config + .virtual_store_dir + .join(latest_version.to_virtual_store_name()) + .join("node_modules"); let mut queue: VecDeque>> = VecDeque::new(); let config = &self.config; @@ -78,7 +81,7 @@ impl PackageManager { let direct_dependency_handles = latest_version.dependencies(self.config.auto_install_peers).map(|(name, version)| { - find_package_version_from_registry( + install_package_from_registry( &self.tarball_cache, config, http_client, @@ -97,12 +100,12 @@ impl PackageManager { let node_modules_path = self .config .virtual_store_dir - .join(dependency.to_store_name()) + .join(dependency.to_virtual_store_name()) .join("node_modules"); let handles = dependency.dependencies(self.config.auto_install_peers).map( |(name, version)| { - find_package_version_from_registry( + install_package_from_registry( &self.tarball_cache, config, http_client, diff --git a/crates/cli/src/commands/install.rs b/crates/cli/src/commands/install.rs index c2542b6d5..29ae2f268 100644 --- a/crates/cli/src/commands/install.rs +++ b/crates/cli/src/commands/install.rs @@ -1,15 +1,21 @@ -use crate::package::find_package_version_from_registry; +use crate::package::{install_package_from_registry, install_single_package_to_virtual_store}; +use crate::package_import::symlink_pkg; use crate::package_manager::{PackageManager, PackageManagerError}; use async_recursion::async_recursion; use clap::Parser; use futures_util::future; use pacquet_diagnostics::tracing; +use pacquet_lockfile::{ + DependencyPath, Lockfile, PackageSnapshot, PkgName, PkgNameVerPeer, RootProjectSnapshot, +}; use pacquet_package_json::DependencyGroup; use pacquet_registry::PackageVersion; use pipe_trait::Pipe; +use rayon::prelude::*; +use std::collections::HashMap; -#[derive(Parser, Debug)] -pub struct InstallCommandArgs { +#[derive(Debug, Parser)] +pub struct CliDependencyOptions { /// pacquet will not install any package listed in devDependencies and will remove those insofar /// they were already installed, if the NODE_ENV environment variable is set to production. /// Use this flag to instruct pacquet to ignore NODE_ENV and take its production status from this @@ -25,11 +31,11 @@ pub struct InstallCommandArgs { pub no_optional: bool, } -impl InstallCommandArgs { +impl CliDependencyOptions { /// Convert the command arguments to an iterator of [`DependencyGroup`] /// which filters the types of dependencies to install. fn dependency_groups(&self) -> impl Iterator { - let &InstallCommandArgs { prod, dev, no_optional } = self; + let &CliDependencyOptions { prod, dev, no_optional } = self; let has_both = prod == dev; let has_prod = has_both || prod; let has_dev = has_both || dev; @@ -41,31 +47,45 @@ impl InstallCommandArgs { } } +#[derive(Parser, Debug)] +pub struct InstallCommandArgs { + /// --prod, --dev, and --no-optional + #[clap(flatten)] + pub dependency_options: CliDependencyOptions, + + /// Don't generate a lockfile and fail if the lockfile is outdated. + #[clap(long)] + pub frozen_lockfile: bool, +} + impl PackageManager { /// Install dependencies of a dependency. /// - /// This function is used by [`PackageManager::install`]. + /// This function is used by [`PackageManager::install`] without a lockfile. #[async_recursion] - async fn install_dependencies(&self, package: &PackageVersion) { - let node_modules_path = - self.config.virtual_store_dir.join(package.to_store_name()).join("node_modules"); + async fn install_dependencies_from_registry(&self, package: &PackageVersion) { + let node_modules_path = self + .config + .virtual_store_dir + .join(package.to_virtual_store_name()) + .join("node_modules"); tracing::info!(target: "pacquet::install", node_modules = ?node_modules_path, "Start subset"); package .dependencies(self.config.auto_install_peers) - .map(|(name, version)| async { - let dependency = find_package_version_from_registry( + .map(|(name, version_range)| async { + let dependency = install_package_from_registry( &self.tarball_cache, self.config, &self.http_client, name, - version, + version_range, &node_modules_path, ) .await .unwrap(); - self.install_dependencies(&dependency).await; + self.install_dependencies_from_registry(&dependency).await; }) .pipe(future::join_all) .await; @@ -73,27 +93,113 @@ impl PackageManager { tracing::info!(target: "pacquet::install", node_modules = ?node_modules_path, "Complete subset"); } - /// Jobs of the `install` command. - pub async fn install(&self, args: &InstallCommandArgs) -> Result<(), PackageManagerError> { - tracing::info!(target: "pacquet::install", "Start all"); - - self.package_json - .dependencies(args.dependency_groups()) - .map(|(name, version)| async move { - let dependency = find_package_version_from_registry( + /// Generate filesystem layout for the virtual store at `node_modules/.pacquet`. + async fn create_virtual_store( + &self, + packages: &Option>, + ) { + let Some(packages) = packages else { + todo!("check project_snapshot, error if it's not empty, do nothing if empty"); + }; + packages + .iter() + .map(|(dependency_path, package_snapshot)| async move { + install_single_package_to_virtual_store( &self.tarball_cache, - self.config, &self.http_client, - name, - version, - &self.config.modules_dir, + self.config, + dependency_path, + package_snapshot, ) .await .unwrap(); - self.install_dependencies(&dependency).await; }) .pipe(future::join_all) .await; + } + + /// Create symlinks for the direct dependencies. + /// + /// If package `foo@x.y.z` is declared as a dependency in `package.json`, + /// symlink `foo -> .pacquet/foo@x.y.z/node_modules/foo` shall be created + /// in the `node_modules` directory. + fn link_direct_dependencies( + &self, + project_snapshot: &RootProjectSnapshot, + args: &InstallCommandArgs, + ) { + let InstallCommandArgs { dependency_options, .. } = args; + + let RootProjectSnapshot::Single(project_snapshot) = project_snapshot else { + panic!("Monorepo is not yet supported"); + }; + + project_snapshot + .dependencies_by_groups(dependency_options.dependency_groups()) + .collect::>() + .par_iter() + .for_each(|(name, spec)| { + // TODO: the code below is not optimal + let virtual_store_name = + PkgNameVerPeer::new(PkgName::clone(name), spec.version.clone()) + .to_virtual_store_name(); + + let name_str = name.to_string(); + symlink_pkg( + &self + .config + .virtual_store_dir + .join(virtual_store_name) + .join("node_modules") + .join(&name_str), + &self.config.modules_dir.join(&name_str), + ); + }); + } + + /// Jobs of the `install` command. + pub async fn install(&self, args: &InstallCommandArgs) -> Result<(), PackageManagerError> { + let InstallCommandArgs { dependency_options, frozen_lockfile } = args; + tracing::info!(target: "pacquet::install", "Start all"); + + match (self.config.lockfile, frozen_lockfile, &self.lockfile) { + (false, _, _) => { + self.package_json + .dependencies(dependency_options.dependency_groups()) + .map(|(name, version_range)| async move { + let dependency = install_package_from_registry( + &self.tarball_cache, + self.config, + &self.http_client, + name, + version_range, + &self.config.modules_dir, + ) + .await + .unwrap(); + self.install_dependencies_from_registry(&dependency).await; + }) + .pipe(future::join_all) + .await; + } + (true, false, Some(_)) | (true, false, None) | (true, true, None) => { + unimplemented!(); + } + (true, true, Some(lockfile)) => { + let Lockfile { lockfile_version, project_snapshot, packages, .. } = lockfile; + assert_eq!(lockfile_version.major, 6); // compatibility check already happens at serde, but this still helps preventing programmer mistakes. + + // TODO: check if the lockfile is out-of-date + + assert!( + self.config.prefer_frozen_lockfile, + "Non frozen lockfile is not yet supported", + ); + + self.create_virtual_store(packages).await; + self.link_direct_dependencies(project_snapshot, args); + } + } tracing::info!(target: "pacquet::install", "Complete all"); Ok(()) @@ -104,7 +210,7 @@ impl PackageManager { mod tests { use std::env; - use crate::commands::install::InstallCommandArgs; + use crate::commands::install::{CliDependencyOptions, InstallCommandArgs}; use crate::fs::get_all_folders; use crate::package_manager::PackageManager; use pacquet_npmrc::Npmrc; @@ -115,53 +221,53 @@ mod tests { #[test] fn install_args_to_dependency_groups() { use DependencyGroup::{Default, Dev, Optional}; - let create_list = |args: InstallCommandArgs| args.dependency_groups().collect::>(); + let create_list = |opts: CliDependencyOptions| opts.dependency_groups().collect::>(); // no flags -> prod + dev + optional assert_eq!( - create_list(InstallCommandArgs { prod: false, dev: false, no_optional: false }), + create_list(CliDependencyOptions { prod: false, dev: false, no_optional: false }), [Default, Dev, Optional], ); // --prod -> prod + optional assert_eq!( - create_list(InstallCommandArgs { prod: true, dev: false, no_optional: false }), + create_list(CliDependencyOptions { prod: true, dev: false, no_optional: false }), [Default, Optional], ); // --dev -> dev + optional assert_eq!( - create_list(InstallCommandArgs { prod: false, dev: true, no_optional: false }), + create_list(CliDependencyOptions { prod: false, dev: true, no_optional: false }), [Dev, Optional], ); // --no-optional -> prod + dev assert_eq!( - create_list(InstallCommandArgs { prod: false, dev: false, no_optional: true }), + create_list(CliDependencyOptions { prod: false, dev: false, no_optional: true }), [Default, Dev], ); // --prod --no-optional -> prod assert_eq!( - create_list(InstallCommandArgs { prod: true, dev: false, no_optional: true }), + create_list(CliDependencyOptions { prod: true, dev: false, no_optional: true }), [Default], ); // --dev --no-optional -> dev assert_eq!( - create_list(InstallCommandArgs { prod: false, dev: true, no_optional: true }), + create_list(CliDependencyOptions { prod: false, dev: true, no_optional: true }), [Dev], ); // --prod --dev -> prod + dev + optional assert_eq!( - create_list(InstallCommandArgs { prod: true, dev: true, no_optional: false }), + create_list(CliDependencyOptions { prod: true, dev: true, no_optional: false }), [Default, Dev, Optional], ); // --prod --dev --no-optional -> prod + dev assert_eq!( - create_list(InstallCommandArgs { prod: true, dev: true, no_optional: true }), + create_list(CliDependencyOptions { prod: true, dev: true, no_optional: true }), [Default, Dev], ); } @@ -184,7 +290,14 @@ mod tests { let package_manager = PackageManager::new(&package_json_path, Npmrc::current().leak()).unwrap(); - let args = InstallCommandArgs { prod: false, dev: false, no_optional: false }; + let args = InstallCommandArgs { + dependency_options: CliDependencyOptions { + prod: false, + dev: false, + no_optional: false, + }, + frozen_lockfile: false, + }; package_manager.install(&args).await.unwrap(); // Make sure the package is installed diff --git a/crates/cli/src/package.rs b/crates/cli/src/package.rs index 50f68240b..bd75fa951 100644 --- a/crates/cli/src/package.rs +++ b/crates/cli/src/package.rs @@ -1,10 +1,14 @@ -use crate::package_import::ImportMethodImpl; -use crate::package_manager::PackageManagerError; +use crate::{ + package_import::{create_virtdir_by_snapshot, ImportMethodImpl}, + package_manager::PackageManagerError, +}; +use pacquet_lockfile::{DependencyPath, LockfileResolution, PackageSnapshot, PkgNameVerPeer}; use pacquet_npmrc::Npmrc; use pacquet_registry::{Package, PackageVersion}; use pacquet_tarball::{download_tarball_to_store, Cache}; +use pipe_trait::Pipe; use reqwest::Client; -use std::path::Path; +use std::{borrow::Cow, path::Path}; /// This function execute the following and returns the package /// - retrieves the package from the registry @@ -14,16 +18,16 @@ use std::path::Path; /// symlink_path will be appended by the name of the package. Therefore, /// it should be resolved into the node_modules folder of a subdependency such as /// `node_modules/.pacquet/fastify@1.0.0/node_modules`. -pub async fn find_package_version_from_registry( +pub async fn install_package_from_registry( tarball_cache: &Cache, config: &'static Npmrc, - http_client: &reqwest::Client, + http_client: &Client, name: &str, - version: &str, + version_range: &str, symlink_path: &Path, ) -> Result { let package = Package::fetch_from_registry(name, http_client, &config.registry).await?; - let package_version = package.pinned_version(version).unwrap(); + let package_version = package.pinned_version(version_range).unwrap(); internal_fetch(tarball_cache, http_client, package_version, config, symlink_path).await?; Ok(package_version.to_owned()) } @@ -31,7 +35,7 @@ pub async fn find_package_version_from_registry( pub async fn fetch_package_version_directly( tarball_cache: &Cache, config: &'static Npmrc, - http_client: &reqwest::Client, + http_client: &Client, name: &str, version: &str, symlink_path: &Path, @@ -49,7 +53,7 @@ async fn internal_fetch( config: &'static Npmrc, symlink_path: &Path, ) -> Result<(), PackageManagerError> { - let store_folder_name = package_version.to_store_name(); + let store_folder_name = package_version.to_virtual_store_name(); // TODO: skip when it already exists in store? let cas_paths = download_tarball_to_store( @@ -77,9 +81,64 @@ async fn internal_fetch( Ok(()) } +pub async fn install_single_package_to_virtual_store( + tarball_cache: &Cache, + http_client: &Client, + config: &'static Npmrc, + dependency_path: &DependencyPath, + package_snapshot: &PackageSnapshot, +) -> Result<(), PackageManagerError> { + let PackageSnapshot { resolution, .. } = package_snapshot; + let DependencyPath { custom_registry, package_specifier } = dependency_path; + + let (tarball_url, integrity) = match resolution { + LockfileResolution::Tarball(tarball_resolution) => { + let integrity = tarball_resolution.integrity.as_deref().unwrap_or_else(|| { + // TODO: how to handle the absent of integrity field? + panic!("Current implementation requires integrity, but {dependency_path} doesn't have it"); + }); + (tarball_resolution.tarball.as_str().pipe(Cow::Borrowed), integrity) + } + LockfileResolution::Registry(registry_resolution) => { + let registry = custom_registry.as_ref().unwrap_or(&config.registry); + let registry = registry.strip_suffix('/').unwrap_or(registry); + let PkgNameVerPeer { name, suffix: ver_peer } = package_specifier; + let version = ver_peer.version(); + let bare_name = name.bare.as_str(); + let tarball_url = format!("{registry}/{name}/-/{bare_name}-{version}.tgz"); + let integrity = registry_resolution.integrity.as_str(); + (Cow::Owned(tarball_url), integrity) + } + LockfileResolution::Directory(_) | LockfileResolution::Git(_) => { + panic!("Only TarballResolution and RegistryResolution is supported at the moment, but {dependency_path} requires {resolution:?}"); + } + }; + + // TODO: skip when already exists in store? + let cas_paths = download_tarball_to_store( + tarball_cache, + http_client, + &config.store_dir, + integrity, + None, + &tarball_url, + ) + .await?; + + create_virtdir_by_snapshot( + dependency_path, + &config.virtual_store_dir, + &cas_paths, + config.package_import_method, + package_snapshot, + )?; + + Ok(()) +} + #[cfg(test)] mod tests { - use crate::package::find_package_version_from_registry; + use crate::package::install_package_from_registry; use node_semver::Version; use pacquet_npmrc::Npmrc; use pipe_trait::Pipe; @@ -123,7 +182,7 @@ mod tests { .pipe(Box::leak); let http_client = reqwest::Client::new(); let symlink_path = tempdir().unwrap(); - let package = find_package_version_from_registry( + let package = install_package_from_registry( &Default::default(), config, &http_client, @@ -142,7 +201,7 @@ mod tests { let virtual_store_path = virtual_store_dir .path() - .join(package.to_store_name()) + .join(package.to_virtual_store_name()) .join("node_modules") .join(&package.name); assert!(virtual_store_path.is_dir()); diff --git a/crates/cli/src/package_import.rs b/crates/cli/src/package_import.rs index 59d547230..f3027a149 100644 --- a/crates/cli/src/package_import.rs +++ b/crates/cli/src/package_import.rs @@ -2,11 +2,15 @@ use std::{ collections::HashMap, ffi::OsString, fs, + io::ErrorKind, path::{Path, PathBuf}, }; use crate::package_manager::{AutoImportError, PackageManagerError}; use pacquet_diagnostics::tracing; +use pacquet_lockfile::{ + DependencyPath, PackageSnapshot, PackageSnapshotDependency, PkgNameVerPeer, +}; use pacquet_npmrc::PackageImportMethod; use rayon::prelude::*; @@ -52,6 +56,64 @@ impl ImportMethodImpl for PackageImportMethod { } } +/// This function does 2 things: +/// 1. Install the files from `cas_paths` +/// 2. Create the symlink layout +/// +/// **TODO:** may break this function into 2 later +pub fn create_virtdir_by_snapshot( + dependency_path: &DependencyPath, + virtual_store_dir: &Path, + cas_paths: &HashMap, + import_method: PackageImportMethod, + package_snapshot: &PackageSnapshot, +) -> Result<(), PackageManagerError> { + assert_eq!( + import_method, + PackageImportMethod::Auto, + "Only auto import method is supported, but {dependency_path} requires {import_method:?}", + ); + + // node_modules/.pacquet/pkg-name@x.y.z/node_modules + let virtual_node_modules_dir = virtual_store_dir + .join(dependency_path.package_specifier.to_virtual_store_name()) + .join("node_modules"); + fs::create_dir_all(&virtual_node_modules_dir).unwrap_or_else(|error| { + panic!("Failed to create directory at {virtual_node_modules_dir:?}: {error}") + }); // TODO: proper error propagation + + // 1. Install the files from `cas_paths` + let save_path = + virtual_node_modules_dir.join(dependency_path.package_specifier.name.to_string()); + if !save_path.exists() { + cas_paths.par_iter().try_for_each(|(cleaned_entry, store_path)| { + auto_import(store_path, &save_path.join(cleaned_entry)) + })?; + } + + // 2. Create the symlink layout + if let Some(dependencies) = &package_snapshot.dependencies { + dependencies.par_iter().for_each(|(name, spec)| { + let virtual_store_name = match spec { + PackageSnapshotDependency::PkgVerPeer(ver_peer) => { + let package_specifier = PkgNameVerPeer::new(name.clone(), ver_peer.clone()); // TODO: remove copying here + package_specifier.to_virtual_store_name() + } + PackageSnapshotDependency::DependencyPath(dependency_path) => { + dependency_path.package_specifier.to_virtual_store_name() + } + }; + let name_str = name.to_string(); + symlink_pkg( + &virtual_store_dir.join(virtual_store_name).join("node_modules").join(&name_str), + &virtual_node_modules_dir.join(&name_str), + ); + }); + } + + Ok(()) +} + fn auto_import(source_file: &Path, target_link: &Path) -> Result<(), AutoImportError> { if target_link.exists() { return Ok(()); @@ -74,3 +136,19 @@ fn auto_import(source_file: &Path, target_link: &Path) -> Result<(), AutoImportE Ok(()) } + +pub fn symlink_pkg(symlink_target: &Path, symlink_path: &Path) { + // NOTE: symlink target in pacquet is absolute yet in pnpm is relative + // TODO: change symlink target to relative + if let Some(parent) = symlink_path.parent() { + fs::create_dir_all(parent).expect("make sure node_modules exist"); // TODO: proper error propagation + } + if let Err(error) = crate::fs::symlink_dir(symlink_target, symlink_path) { + match error.kind() { + ErrorKind::AlreadyExists => {} + _ => panic!( + "Failed to create symlink at {symlink_path:?} to {symlink_target:?}: {error}" + ), // TODO: proper error propagation + } + } +} diff --git a/crates/cli/src/package_manager.rs b/crates/cli/src/package_manager.rs index 8468d82d5..d80b054c1 100644 --- a/crates/cli/src/package_manager.rs +++ b/crates/cli/src/package_manager.rs @@ -4,6 +4,7 @@ use pacquet_diagnostics::{ miette::{self, Diagnostic}, thiserror::{self, Error}, }; +use pacquet_lockfile::Lockfile; use pacquet_npmrc::Npmrc; use pacquet_package_json::PackageJson; use pacquet_tarball::Cache; @@ -36,6 +37,10 @@ pub enum PackageManagerError { #[diagnostic(transparent)] PackageJson(#[from] pacquet_package_json::PackageJsonError), + #[error(transparent)] + #[diagnostic(transparent)] + LoadLockfileError(#[from] pacquet_lockfile::LoadLockfileError), + #[error(transparent)] #[diagnostic(transparent)] Registry(#[from] pacquet_registry::RegistryError), @@ -52,6 +57,7 @@ pub enum PackageManagerError { pub struct PackageManager { pub config: &'static Npmrc, pub package_json: PackageJson, + pub lockfile: Option, pub http_client: reqwest::Client, pub(crate) tarball_cache: Cache, } @@ -64,8 +70,49 @@ impl PackageManager { Ok(PackageManager { config, package_json: PackageJson::create_if_needed(package_json_path.into())?, + lockfile: call_load_lockfile(config.lockfile, Lockfile::load_from_current_dir)?, http_client: reqwest::Client::new(), tarball_cache: Cache::new(), }) } } + +/// Private function to load lockfile from current directory should `config.lockfile` is `true`. +/// +/// This function was extracted to be tested independently. +fn call_load_lockfile( + config_lockfile: bool, + load_lockfile: LoadLockfile, +) -> Result, Error> +where + LoadLockfile: FnOnce() -> Result, Error>, +{ + config_lockfile.then(load_lockfile).transpose().map(Option::flatten) +} + +#[cfg(test)] +mod tests { + use super::*; + use pretty_assertions::assert_eq; + + #[test] + fn test_call_load_lockfile() { + macro_rules! case { + ($config_lockfile:expr, $load_lockfile:expr => $output:expr) => {{ + let config_lockfile = $config_lockfile; + let load_lockfile = $load_lockfile; + let output: Result, &str> = $output; + eprintln!( + "CASE: {config_lockfile:?}, {load_lockfile} => {output:?}", + load_lockfile = stringify!($load_lockfile), + ); + assert_eq!(call_load_lockfile(config_lockfile, load_lockfile), output); + }}; + } + + case!(false, || unreachable!() => Ok(None)); + case!(true, || Err("error") => Err("error")); + case!(true, || Ok(None) => Ok(None)); + case!(true, || Ok(Some("value")) => Ok(Some("value"))); + } +} diff --git a/crates/lockfile/Cargo.toml b/crates/lockfile/Cargo.toml index 22aa0bddf..992e4b874 100644 --- a/crates/lockfile/Cargo.toml +++ b/crates/lockfile/Cargo.toml @@ -11,7 +11,16 @@ license.workspace = true repository.workspace = true [dependencies] -pacquet_diagnostics = { workspace = true } +pacquet_diagnostics = { workspace = true } +pacquet_package_json = { workspace = true } -serde = { workspace = true } -serde_yaml = { workspace = true } +derive_more = { workspace = true } +node-semver = { workspace = true } +pipe-trait = { workspace = true } +serde = { workspace = true } +serde_yaml = { workspace = true } +split-first-char = { workspace = true } + +[dev-dependencies] +pretty_assertions = { workspace = true } +text-block-macros = { workspace = true } diff --git a/crates/lockfile/src/comver.rs b/crates/lockfile/src/comver.rs new file mode 100644 index 000000000..291b314b9 --- /dev/null +++ b/crates/lockfile/src/comver.rs @@ -0,0 +1,71 @@ +use derive_more::{Display, Error}; +use serde::{Deserialize, Serialize}; +use std::{num::ParseIntError, str::FromStr}; + +/// Information of the top-level field `lockfileVersion`. +/// +/// It contains only major and minor. +#[derive(Debug, Display, Clone, Copy, PartialEq, Eq, Deserialize, Serialize)] +#[display(fmt = "{major}.{minor}")] +#[serde(try_from = "&'de str", into = "String")] +pub struct ComVer { + pub major: u16, + pub minor: u16, +} + +impl ComVer { + /// Create a comver struct. + pub fn new(major: u16, minor: u16) -> Self { + Self { major, minor } + } +} + +/// Error when parsing [`ComVer`] from a string. +#[derive(Debug, Display, Error)] +pub enum ParseComVerError { + #[display(fmt = "Dot is missing")] + MissingDot, + #[display(fmt = "Major is not a valid number: {_0}")] + InvalidMajor(ParseIntError), + #[display(fmt = "Minor is not a valid number: {_0}")] + InvalidMinor(ParseIntError), +} + +impl FromStr for ComVer { + type Err = ParseComVerError; + fn from_str(s: &str) -> Result { + let (major, minor) = s.split_once('.').ok_or(ParseComVerError::MissingDot)?; + let major = major.parse::().map_err(ParseComVerError::InvalidMajor)?; + let minor = minor.parse::().map_err(ParseComVerError::InvalidMinor)?; + Ok(ComVer::new(major, minor)) + } +} + +impl<'a> TryFrom<&'a str> for ComVer { + type Error = ParseComVerError; + fn try_from(value: &'a str) -> Result { + value.parse() + } +} + +impl From for String { + fn from(value: ComVer) -> Self { + value.to_string() + } +} + +#[cfg(test)] +mod tests { + use super::*; + use pretty_assertions::assert_eq; + + #[test] + fn parse() { + assert_eq!("6.0".parse::().unwrap(), ComVer::new(6, 0)); + } + + #[test] + fn to_string() { + assert_eq!(ComVer::new(6, 0).to_string(), "6.0"); + } +} diff --git a/crates/lockfile/src/dependency_path.rs b/crates/lockfile/src/dependency_path.rs new file mode 100644 index 000000000..2e0f27eea --- /dev/null +++ b/crates/lockfile/src/dependency_path.rs @@ -0,0 +1,180 @@ +use crate::{ParsePkgNameVerPeerError, PkgNameVerPeer}; +use derive_more::{Display, Error}; +use serde::{Deserialize, Serialize}; +use std::str::FromStr; + +/// Dependency path is the key of the `packages` map. +/// +/// Specification: +/// +/// Syntax: `{custom_registry}/{package_specifier}` +/// +/// Syntax Examples: +/// * `/ts-node@10.9.1` +/// * `registry.npmjs.com/ts-node@10.9.1` +/// * `registry.node-modules.io/ts-node@10.9.1` +/// * `/ts-node@10.9.1(@types/node@18.7.19)(typescript@5.1.6)` +/// * `registry.npmjs.com/ts-node@10.9.1(@types/node@18.7.19)(typescript@5.1.6)` +/// * `registry.node-modules.io/ts-node@10.9.1(@types/node@18.7.19)(typescript@5.1.6)` +#[derive(Debug, Display, Clone, PartialEq, Eq, Hash, Deserialize, Serialize)] +#[display(fmt = "{}/{package_specifier}", "custom_registry.as_deref().unwrap_or_default()")] +#[serde(try_from = "&'de str", into = "String")] +pub struct DependencyPath { + pub custom_registry: Option, + pub package_specifier: PkgNameVerPeer, // TODO: add support for `{registry}/{name}/{version}({peers})` syntax +} + +/// Error when parsing [`DependencyPath`] from a string. +#[derive(Debug, Display, Error)] +pub enum ParseDependencyPathError { + #[display(fmt = "Invalid syntax")] + InvalidSyntax, + #[display(fmt = "Failed to parse specifier: {_0}")] + ParsePackageSpecifierFailure(ParsePkgNameVerPeerError), +} + +impl FromStr for DependencyPath { + type Err = ParseDependencyPathError; + fn from_str(s: &str) -> Result { + let (custom_registry, package_specifier) = + s.split_once('/').ok_or(ParseDependencyPathError::InvalidSyntax)?; + let custom_registry = + if custom_registry.is_empty() { None } else { Some(custom_registry.to_string()) }; + let package_specifier = package_specifier + .parse() + .map_err(ParseDependencyPathError::ParsePackageSpecifierFailure)?; + Ok(DependencyPath { custom_registry, package_specifier }) + } +} + +impl<'a> TryFrom<&'a str> for DependencyPath { + type Error = ParseDependencyPathError; + fn try_from(value: &'a str) -> Result { + value.parse() + } +} + +impl From for String { + fn from(value: DependencyPath) -> Self { + value.to_string() + } +} + +#[cfg(test)] +mod tests { + use super::*; + use pretty_assertions::assert_eq; + + #[test] + fn serialize() { + fn case( + (custom_registry, package_specifier): (Option<&'static str>, &'static str), + output: &'static str, + ) { + eprintln!("CASE: {custom_registry:?}, {package_specifier:?}"); + let custom_registry = custom_registry.map(ToString::to_string); + let package_specifier = package_specifier.parse().unwrap(); + let yaml = + serde_yaml::to_string(&DependencyPath { custom_registry, package_specifier }) + .unwrap(); + assert_eq!(yaml.trim(), output); + } + + case((None, "ts-node@10.9.1"), "/ts-node@10.9.1"); + case( + (Some("registry.node-modules.io"), "ts-node@10.9.1"), + "registry.node-modules.io/ts-node@10.9.1", + ); + case( + (None, "ts-node@10.9.1(@types/node@18.7.19)(typescript@5.1.6)"), + "/ts-node@10.9.1(@types/node@18.7.19)(typescript@5.1.6)", + ); + case( + ( + Some("registry.node-modules.io"), + "ts-node@10.9.1(@types/node@18.7.19)(typescript@5.1.6)", + ), + "registry.node-modules.io/ts-node@10.9.1(@types/node@18.7.19)(typescript@5.1.6)", + ); + case( + (None, "@babel/plugin-proposal-object-rest-spread@7.12.1"), + "/@babel/plugin-proposal-object-rest-spread@7.12.1", + ); + case( + (Some("registry.node-modules.io"), "@babel/plugin-proposal-object-rest-spread@7.12.1"), + "registry.node-modules.io/@babel/plugin-proposal-object-rest-spread@7.12.1", + ); + case( + (None, "@babel/plugin-proposal-object-rest-spread@7.12.1(@babel/core@7.12.9)"), + "/@babel/plugin-proposal-object-rest-spread@7.12.1(@babel/core@7.12.9)", + ); + case( + ( + Some("registry.node-modules.io"), + "@babel/plugin-proposal-object-rest-spread@7.12.1(@babel/core@7.12.9)", + ), + "registry.node-modules.io/@babel/plugin-proposal-object-rest-spread@7.12.1(@babel/core@7.12.9)", + ); + } + + #[test] + fn deserialize() { + fn case( + input: &'static str, + (custom_registry, package_specifier): (Option<&'static str>, &'static str), + ) { + eprintln!("CASE: {input:?}"); + let dependency_path: DependencyPath = serde_yaml::from_str(input).unwrap(); + assert_eq!( + dependency_path, + DependencyPath { + custom_registry: custom_registry.map(|x: &str| x.to_string()), + package_specifier: package_specifier.parse().unwrap(), + } + ); + } + + case("/ts-node@10.9.1", (None, "ts-node@10.9.1")); + case( + "registry.node-modules.io/ts-node@10.9.1", + (Some("registry.node-modules.io"), "ts-node@10.9.1"), + ); + case( + "/ts-node@10.9.1(@types/node@18.7.19)(typescript@5.1.6)", + (None, "ts-node@10.9.1(@types/node@18.7.19)(typescript@5.1.6)"), + ); + case( + "registry.node-modules.io/ts-node@10.9.1(@types/node@18.7.19)(typescript@5.1.6)", + ( + Some("registry.node-modules.io"), + "ts-node@10.9.1(@types/node@18.7.19)(typescript@5.1.6)", + ), + ); + case( + "/@babel/plugin-proposal-object-rest-spread@7.12.1", + (None, "@babel/plugin-proposal-object-rest-spread@7.12.1"), + ); + case( + "registry.node-modules.io/@babel/plugin-proposal-object-rest-spread@7.12.1", + (Some("registry.node-modules.io"), "@babel/plugin-proposal-object-rest-spread@7.12.1"), + ); + case( + "/@babel/plugin-proposal-object-rest-spread@7.12.1(@babel/core@7.12.9)", + (None, "@babel/plugin-proposal-object-rest-spread@7.12.1(@babel/core@7.12.9)"), + ); + case( + "registry.node-modules.io/@babel/plugin-proposal-object-rest-spread@7.12.1(@babel/core@7.12.9)", + ( + Some("registry.node-modules.io"), + "@babel/plugin-proposal-object-rest-spread@7.12.1(@babel/core@7.12.9)", + ), + ); + } + + #[test] + fn parse_error() { + let error = "ts-node@10.9.1".parse::().unwrap_err(); + assert_eq!(error.to_string(), "Invalid syntax"); + assert!(matches!(error, ParseDependencyPathError::InvalidSyntax)); + } +} diff --git a/crates/lockfile/src/lib.rs b/crates/lockfile/src/lib.rs index cb43fcf90..e962975f8 100644 --- a/crates/lockfile/src/lib.rs +++ b/crates/lockfile/src/lib.rs @@ -1,42 +1,41 @@ -mod package; - -pub use package::{LockfilePackage, LockfilePackageResolution}; - -use std::{ - collections::HashMap, - env, fs, - io::{Read, Write}, - path::PathBuf, +mod comver; +mod dependency_path; +mod load_lockfile; +mod lockfile_version; +mod multi_project_snapshot; +mod package_snapshot; +mod package_snapshot_dependency; +mod pkg_name; +mod pkg_name_suffix; +mod pkg_name_ver; +mod pkg_name_ver_peer; +mod pkg_ver_peer; +mod project_snapshot; +mod resolution; +mod resolved_dependency; +mod root_project_snapshot; + +pub use comver::{ComVer, ParseComVerError}; +pub use dependency_path::DependencyPath; +pub use load_lockfile::LoadLockfileError; +pub use lockfile_version::LockfileVersion; +pub use multi_project_snapshot::MultiProjectSnapshot; +pub use package_snapshot::{LockfilePeerDependencyMetaValue, PackageSnapshot}; +pub use package_snapshot_dependency::PackageSnapshotDependency; +pub use pkg_name::{ParsePkgNameError, PkgName}; +pub use pkg_name_suffix::{ParsePkgNameSuffixError, PkgNameSuffix}; +pub use pkg_name_ver::{ParsePkgNameVerError, PkgNameVer}; +pub use pkg_name_ver_peer::{ParsePkgNameVerPeerError, PkgNameVerPeer}; +pub use pkg_ver_peer::{ParsePkgVerPeerError, PkgVerPeer}; +pub use project_snapshot::ProjectSnapshot; +pub use resolution::{ + DirectoryResolution, GitResolution, LockfileResolution, RegistryResolution, TarballResolution, }; +pub use resolved_dependency::{ResolvedDependencyMap, ResolvedDependencySpec}; +pub use root_project_snapshot::RootProjectSnapshot; -use pacquet_diagnostics::{ - miette::{self, Diagnostic}, - thiserror::{self, Error}, -}; use serde::{Deserialize, Serialize}; - -#[derive(Error, Debug, Diagnostic)] -#[non_exhaustive] -pub enum LockfileError { - #[error(transparent)] - #[diagnostic(code(pacquet_lockfile::io_error))] - Io(#[from] std::io::Error), - - #[error(transparent)] - #[diagnostic(code(pacquet_lockfile::serialization_error))] - Serialization(#[from] serde_yaml::Error), -} - -#[derive(Debug, PartialEq, Serialize, Deserialize)] -pub struct LockfileDependency { - specifier: String, - version: String, -} - -#[derive(Debug, PartialEq, Serialize, Deserialize)] -pub struct LockfilePeerDependencyMeta { - optional: bool, -} +use std::collections::HashMap; #[derive(Debug, PartialEq, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] @@ -45,71 +44,25 @@ pub struct LockfileSettings { exclude_links_from_lockfile: bool, } +/// * Specification: +/// * Reference: #[derive(Debug, PartialEq, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] pub struct Lockfile { - pub lock_file_version: String, + pub lockfile_version: LockfileVersion<6>, + #[serde(skip_serializing_if = "Option::is_none")] pub settings: Option, + #[serde(skip_serializing_if = "Option::is_none")] pub never_built_dependencies: Option>, + #[serde(skip_serializing_if = "Option::is_none")] pub overrides: Option>, - pub dependencies: Option>, - pub packages: Option>, -} - -impl Default for Lockfile { - fn default() -> Self { - Self::new() - } + #[serde(flatten)] + pub project_snapshot: RootProjectSnapshot, + #[serde(skip_serializing_if = "Option::is_none")] + pub packages: Option>, } impl Lockfile { - pub fn path() -> Result { - Ok(env::current_dir()?.join("pacquet-lock.yaml")) - } - - pub fn new() -> Self { - Lockfile { - lock_file_version: "6.0".to_string(), - settings: Some(LockfileSettings { - auto_install_peers: true, - exclude_links_from_lockfile: false, - }), - never_built_dependencies: None, - overrides: None, - dependencies: None, - packages: None, - } - } - - pub fn create() -> Result { - let file = Lockfile::new(); - file.save()?; - Ok(file) - } - - pub fn open() -> Result { - let yaml_path = Lockfile::path()?; - let mut file = fs::File::open(yaml_path)?; - let mut buffer = String::new(); - file.read_to_string(&mut buffer)?; - let lockfile: Lockfile = serde_yaml::from_str(&buffer)?; - Ok(lockfile) - } - - pub fn create_or_open() -> Result { - let yaml_path = Lockfile::path()?; - if yaml_path.exists() { - Ok(Lockfile::open()?) - } else { - Ok(Lockfile::create()?) - } - } - - pub fn save(&self) -> Result<(), LockfileError> { - let yaml_path = Lockfile::path()?; - let mut file = fs::File::create(yaml_path)?; - let yaml = serde_yaml::to_string(&self)?; - file.write_all(yaml.as_bytes())?; - Ok(()) - } + /// Base file name of the lockfile. + const FILE_NAME: &str = "pnpm-lock.yaml"; } diff --git a/crates/lockfile/src/load_lockfile.rs b/crates/lockfile/src/load_lockfile.rs new file mode 100644 index 000000000..83b7f9b87 --- /dev/null +++ b/crates/lockfile/src/load_lockfile.rs @@ -0,0 +1,39 @@ +use crate::Lockfile; +use derive_more::{Display, Error}; +use pacquet_diagnostics::miette::{self, Diagnostic}; +use pipe_trait::Pipe; +use std::{ + env, fs, + io::{self, ErrorKind}, +}; + +/// Error when reading lockfile the filesystem. +#[derive(Debug, Display, Error, Diagnostic)] +#[non_exhaustive] +pub enum LoadLockfileError { + #[display(fmt = "Failed to get current_dir: {_0}")] + #[diagnostic(code(pacquet_lockfile::current_dir))] + CurrentDir(io::Error), + + #[display(fmt = "Failed to read lockfile content: {_0}")] + #[diagnostic(code(pacquet_lockfile::read_file))] + ReadFile(io::Error), + + #[display(fmt = "Failed to parse lockfile content as YAML: {_0}")] + #[diagnostic(code(pacquet_lockfile::parse_yaml))] + ParseYaml(serde_yaml::Error), +} + +impl Lockfile { + /// Load lockfile from the current directory. + pub fn load_from_current_dir() -> Result, LoadLockfileError> { + let file_path = + env::current_dir().map_err(LoadLockfileError::CurrentDir)?.join(Lockfile::FILE_NAME); + let content = match fs::read_to_string(file_path) { + Ok(content) => content, + Err(error) if error.kind() == ErrorKind::NotFound => return Ok(None), + Err(error) => return error.pipe(LoadLockfileError::ReadFile).pipe(Err), + }; + content.pipe_as_ref(serde_yaml::from_str).map_err(LoadLockfileError::ParseYaml) + } +} diff --git a/crates/lockfile/src/lockfile_version.rs b/crates/lockfile/src/lockfile_version.rs new file mode 100644 index 000000000..4fb6ccc6b --- /dev/null +++ b/crates/lockfile/src/lockfile_version.rs @@ -0,0 +1,70 @@ +use crate::ComVer; +use derive_more::{AsRef, Deref, Display, Error, Into}; +use serde::{Deserialize, Serialize}; + +/// Wrapper that checks compatibility of `lockfileVersion` against `MAJOR`. +#[derive( + Debug, Display, Clone, Copy, PartialEq, Eq, AsRef, Deref, Into, Deserialize, Serialize, +)] +#[serde(try_from = "ComVer", into = "ComVer")] +pub struct LockfileVersion(ComVer); + +impl LockfileVersion { + /// Check if `comver` is compatible with `MAJOR`. + pub const fn is_compatible(comver: ComVer) -> bool { + comver.major == MAJOR + } +} + +/// Error when [`ComVer`] fails compatibility check. +#[derive(Debug, Display, Error)] +pub enum LockfileVersionError { + #[display(fmt = "The lockfileVersion of {_0} is incompatible with {MAJOR}.x")] + IncompatibleMajor(#[error(not(source))] ComVer), +} + +impl TryFrom for LockfileVersion { + type Error = LockfileVersionError; + fn try_from(comver: ComVer) -> Result { + Self::is_compatible(comver) + .then_some(Self(comver)) + .ok_or(Self::Error::IncompatibleMajor(comver)) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use pipe_trait::Pipe; + use pretty_assertions::assert_eq; + + #[test] + fn compatible() { + macro_rules! case { + ($major:expr, $input:expr => $output:expr) => {{ + const MAJOR: u16 = $major; + let input = $input; + eprintln!("CASE: LockfileVersion::<{MAJOR}>::try_from({input:?})"); + let received: LockfileVersion = serde_yaml::from_str(input).unwrap(); + let expected = LockfileVersion::($output); + assert_eq!(&received, &expected); + }}; + } + + case!(6, "6.0" => ComVer { major: 6, minor: 0 }); + case!(6, "6.1" => ComVer { major: 6, minor: 1 }); + case!(5, "5.0" => ComVer { major: 5, minor: 0 }); + } + + #[test] + fn incompatible() { + let error = + "5.0".parse::().unwrap().pipe(LockfileVersion::<6>::try_from).unwrap_err(); + dbg!(&error); + assert_eq!(error.to_string(), "The lockfileVersion of 5.0 is incompatible with 6.x"); + assert!(matches!( + error, + LockfileVersionError::IncompatibleMajor(ComVer { major: 5, minor: 0 }), + )); + } +} diff --git a/crates/lockfile/src/multi_project_snapshot.rs b/crates/lockfile/src/multi_project_snapshot.rs new file mode 100644 index 000000000..d4f1dd3e2 --- /dev/null +++ b/crates/lockfile/src/multi_project_snapshot.rs @@ -0,0 +1,10 @@ +use crate::ProjectSnapshot; +use serde::{Deserialize, Serialize}; +use std::collections::HashMap; + +/// Snapshot of a multi-project monorepo. +#[derive(Debug, Default, PartialEq, Deserialize, Serialize)] +#[serde(deny_unknown_fields, rename_all = "camelCase")] +pub struct MultiProjectSnapshot { + pub importers: HashMap, +} diff --git a/crates/lockfile/src/package.rs b/crates/lockfile/src/package.rs deleted file mode 100644 index 93c54bec9..000000000 --- a/crates/lockfile/src/package.rs +++ /dev/null @@ -1,41 +0,0 @@ -use std::collections::HashMap; - -use serde::{Deserialize, Serialize}; - -use crate::LockfilePeerDependencyMeta; - -#[derive(Debug, PartialEq, Serialize, Deserialize)] -pub struct LockfilePackageResolution { - integrity: String, -} - -// Reference: https://github.com/pnpm/pnpm/blob/main/lockfile/lockfile-file/src/sortLockfileKeys.ts#L5 -#[derive(Debug, PartialEq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct LockfilePackage { - resolution: LockfilePackageResolution, - id: Option, - - name: Option, - version: Option, - - engines: Option>, - cpu: Option>, - os: Option>, - // TODO: Add `libc` - deprecated: Option, - has_bin: Option, - // TODO: Add `prepare` - requires_build: Option, - - // TODO: Add `bundleDependencies` - peer_dependencies: Option>, - peer_dependencies_meta: Option>, - - dependencies: Option>, - optional_dependencies: Option>, - - transitive_peer_dependencies: Option>, - dev: bool, - optional: Option, -} diff --git a/crates/lockfile/src/package_snapshot.rs b/crates/lockfile/src/package_snapshot.rs new file mode 100644 index 000000000..535e11e13 --- /dev/null +++ b/crates/lockfile/src/package_snapshot.rs @@ -0,0 +1,55 @@ +use crate::{LockfileResolution, PackageSnapshotDependency, PkgName}; +use serde::{Deserialize, Serialize}; +use std::collections::HashMap; + +#[derive(Debug, PartialEq, Serialize, Deserialize)] +pub struct LockfilePeerDependencyMetaValue { + optional: bool, +} + +// Reference: https://github.com/pnpm/pnpm/blob/main/lockfile/lockfile-file/src/sortLockfileKeys.ts#L5 +#[derive(Debug, PartialEq, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct PackageSnapshot { + pub resolution: LockfileResolution, + #[serde(skip_serializing_if = "Option::is_none")] + pub id: Option, + + #[serde(skip_serializing_if = "Option::is_none")] + pub name: Option, // TODO: name and version are required on non-default registry, create a struct for it + #[serde(skip_serializing_if = "Option::is_none")] + pub version: Option, // TODO: name and version are required on non-default registry, create a struct for it + + #[serde(skip_serializing_if = "Option::is_none")] + pub engines: Option>, + #[serde(skip_serializing_if = "Option::is_none")] + pub cpu: Option>, + #[serde(skip_serializing_if = "Option::is_none")] + pub os: Option>, + #[serde(skip_serializing_if = "Option::is_none")] + pub libc: Option>, + #[serde(skip_serializing_if = "Option::is_none")] + pub deprecated: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub has_bin: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub prepare: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub requires_build: Option, + + #[serde(skip_serializing_if = "Option::is_none")] + pub bundled_dependencies: Option>, + #[serde(skip_serializing_if = "Option::is_none")] + pub peer_dependencies: Option>, + #[serde(skip_serializing_if = "Option::is_none")] + pub peer_dependencies_meta: Option>, + + #[serde(skip_serializing_if = "Option::is_none")] + pub dependencies: Option>, + #[serde(skip_serializing_if = "Option::is_none")] + pub optional_dependencies: Option>, + + pub transitive_peer_dependencies: Option>, + pub dev: Option, + pub optional: Option, +} diff --git a/crates/lockfile/src/package_snapshot_dependency.rs b/crates/lockfile/src/package_snapshot_dependency.rs new file mode 100644 index 000000000..bfffb9a12 --- /dev/null +++ b/crates/lockfile/src/package_snapshot_dependency.rs @@ -0,0 +1,85 @@ +use crate::{DependencyPath, PkgVerPeer}; +use derive_more::{Display, From, TryInto}; +use serde::{Deserialize, Serialize}; + +/// Value of [`PackageSnapshot::dependencies`](crate::PackageSnapshot::dependencies). +#[derive(Debug, Display, Clone, PartialEq, Eq, From, TryInto, Deserialize, Serialize)] +#[serde(untagged)] +pub enum PackageSnapshotDependency { + PkgVerPeer(PkgVerPeer), + DependencyPath(DependencyPath), +} + +#[cfg(test)] +mod tests { + use super::*; + use pipe_trait::Pipe; + use pretty_assertions::assert_eq; + + #[test] + fn deserialize_to_correct_variants() { + macro_rules! case { + ($input:expr => $output:ident) => {{ + let input = $input; + eprintln!("CASE: {input:?}"); + let snapshot_dependency: PackageSnapshotDependency = + serde_yaml::from_str(input).unwrap(); + dbg!(&snapshot_dependency); + assert!(matches!(&snapshot_dependency, PackageSnapshotDependency::$output(_))); + }}; + } + + case!("1.21.3(@types/react@17.0.49)(react-dom@17.0.2)(react@17.0.2)" => PkgVerPeer); + case!("1.21.3(react@17.0.2)" => PkgVerPeer); + case!("1.21.3-rc.0(react@17.0.2)" => PkgVerPeer); + case!("1.21.3" => PkgVerPeer); + case!("1.21.3-rc.0" => PkgVerPeer); + case!("/react-json-view@1.21.3(@types/react@17.0.49)(react-dom@17.0.2)(react@17.0.2)" => DependencyPath); + case!("/react-json-view@1.21.3(react@17.0.2)" => DependencyPath); + case!("/react-json-view@1.21.3-rc.0(react@17.0.2)" => DependencyPath); + case!("/react-json-view@1.21.3" => DependencyPath); + case!("/react-json-view@1.21.3-rc.0" => DependencyPath); + case!("registry.npmjs.com/react-json-view@1.21.3(@types/react@17.0.49)(react-dom@17.0.2)(react@17.0.2)" => DependencyPath); + case!("registry.npmjs.com/react-json-view@1.21.3(react@17.0.2)" => DependencyPath); + case!("registry.npmjs.com/react-json-view@1.21.3-rc.0(react@17.0.2)" => DependencyPath); + case!("registry.npmjs.com/react-json-view@1.21.3" => DependencyPath); + case!("registry.npmjs.com/react-json-view@1.21.3-rc.0" => DependencyPath); + case!("/@docusaurus/react-loadable@5.5.2(react@17.0.2)" => DependencyPath); + case!("/@docusaurus/react-loadable@5.5.2" => DependencyPath); + case!("registry.npmjs.com/@docusaurus/react-loadable@5.5.2(react@17.0.2)" => DependencyPath); + case!("registry.npmjs.com/@docusaurus/react-loadable@5.5.2" => DependencyPath); + } + + #[test] + fn string_matches_yaml() { + fn case(input: &'static str) { + eprintln!("CASE: {input:?}"); + let snapshot_dependency: PackageSnapshotDependency = + serde_yaml::from_str(input).unwrap(); + dbg!(&snapshot_dependency); + let received = snapshot_dependency.to_string().pipe(serde_yaml::Value::String); + let expected: serde_yaml::Value = serde_yaml::from_str(input).unwrap(); + assert_eq!(&received, &expected); + } + + case("1.21.3(@types/react@17.0.49)(react-dom@17.0.2)(react@17.0.2)"); + case("1.21.3(react@17.0.2)"); + case("1.21.3-rc.0(react@17.0.2)"); + case("1.21.3"); + case("1.21.3-rc.0"); + case("/react-json-view@1.21.3(@types/react@17.0.49)(react-dom@17.0.2)(react@17.0.2)"); + case("/react-json-view@1.21.3(react@17.0.2)"); + case("/react-json-view@1.21.3-rc.0(react@17.0.2)"); + case("/react-json-view@1.21.3"); + case("/react-json-view@1.21.3-rc.0"); + case("registry.npmjs.com/react-json-view@1.21.3(@types/react@17.0.49)(react-dom@17.0.2)(react@17.0.2)"); + case("registry.npmjs.com/react-json-view@1.21.3(react@17.0.2)"); + case("registry.npmjs.com/react-json-view@1.21.3-rc.0(react@17.0.2)"); + case("registry.npmjs.com/react-json-view@1.21.3"); + case("registry.npmjs.com/react-json-view@1.21.3-rc.0"); + case("/@docusaurus/react-loadable@5.5.2(react@17.0.2)"); + case("/@docusaurus/react-loadable@5.5.2"); + case("registry.npmjs.com/@docusaurus/react-loadable@5.5.2(react@17.0.2)"); + case("registry.npmjs.com/@docusaurus/react-loadable@5.5.2"); + } +} diff --git a/crates/lockfile/src/pkg_name.rs b/crates/lockfile/src/pkg_name.rs new file mode 100644 index 000000000..d8c695427 --- /dev/null +++ b/crates/lockfile/src/pkg_name.rs @@ -0,0 +1,159 @@ +use derive_more::{Display, Error}; +use pipe_trait::Pipe; +use serde::{Deserialize, Serialize}; +use split_first_char::SplitFirstChar; +use std::{fmt, str::FromStr}; + +/// Represent the name of an npm package. +/// +/// Syntax: +/// * Without scope: `{bare}` +/// * With scope: `@{scope}/bare` +#[derive(Debug, Clone, PartialEq, Eq, Hash, Deserialize, Serialize)] +#[serde(try_from = "&'de str", into = "String")] +pub struct PkgName { + /// The scope (if any) without the `@` prefix. + pub scope: Option, + /// Either the whole package name (if without scope) or the bare name after the separator (if with scope). + pub bare: String, +} + +/// Error when parsing [`PkgName`] from a string input. +#[derive(Debug, Display, Error)] +pub enum ParsePkgNameError { + #[display(fmt = "Missing bare name")] + MissingName, + #[display(fmt = "Name is empty")] + EmptyName, +} + +impl PkgName { + /// Parse [`PkgName`] from a string input. + pub fn parse(input: Input) -> Result + where + Input: Into + AsRef, + { + match input.as_ref().split_first_char() { + Some(('@', rest)) => { + let (scope, bare) = rest.split_once('/').ok_or(ParsePkgNameError::MissingName)?; + let scope = scope.to_string().pipe(Some); + let bare = bare.to_string(); + Ok(PkgName { scope, bare }) + } + Some(_) => { + let scope = None; + let bare = input.into(); + Ok(PkgName { scope, bare }) + } + None => Err(ParsePkgNameError::EmptyName), + } + } +} + +impl TryFrom for PkgName { + type Error = ParsePkgNameError; + fn try_from(input: String) -> Result { + PkgName::parse(input) + } +} + +impl<'a> TryFrom<&'a str> for PkgName { + type Error = ParsePkgNameError; + fn try_from(input: &'a str) -> Result { + PkgName::parse(input) + } +} + +impl FromStr for PkgName { + type Err = ParsePkgNameError; + fn from_str(input: &str) -> Result { + PkgName::parse(input) + } +} + +impl fmt::Display for PkgName { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let PkgName { scope, bare } = self; + if let Some(scope) = scope { + write!(f, "@{scope}/")?; + } + write!(f, "{bare}") + } +} + +impl From for String { + fn from(value: PkgName) -> Self { + value.to_string() + } +} + +#[cfg(test)] +mod tests { + use super::*; + use pretty_assertions::assert_eq; + + #[test] + fn parse_ok() { + fn case(input: &'static str, output: PkgName) { + eprintln!("CASE: {input:?}"); + let actual: PkgName = input.parse().unwrap(); + assert_eq!(&actual, &output); + } + + case("@foo/bar", PkgName { scope: Some("foo".to_string()), bare: "bar".to_string() }); + case("foo-bar", PkgName { scope: None, bare: "foo-bar".to_string() }); + } + + #[test] + fn deserialize_ok() { + fn case(input: &'static str, output: PkgName) { + eprintln!("CASE: {input:?}"); + let actual: PkgName = serde_yaml::from_str(input).unwrap(); + assert_eq!(&actual, &output); + } + + case("'@foo/bar'", PkgName { scope: Some("foo".to_string()), bare: "bar".to_string() }); + case("foo-bar", PkgName { scope: None, bare: "foo-bar".to_string() }); + } + + #[test] + fn parse_err() { + macro_rules! case { + ($input:expr => $message:expr, $pattern:pat) => {{ + let input = $input; + eprintln!("CASE: {input:?}"); + let error = input.parse::().unwrap_err(); + dbg!(&error); + assert_eq!(error.to_string(), $message); + assert!(matches!(&error, $pattern)); + }}; + } + + case!("@foo" => "Missing bare name", ParsePkgNameError::MissingName); + case!("" => "Name is empty", ParsePkgNameError::EmptyName); + } + + #[test] + fn to_string() { + fn case(input: PkgName, output: &'static str) { + eprintln!("CASE: {input:?}"); + assert_eq!(input.to_string(), output); + } + + case(PkgName { scope: Some("foo".to_string()), bare: "bar".to_string() }, "@foo/bar"); + case(PkgName { scope: None, bare: "foo-bar".to_string() }, "foo-bar"); + } + + #[test] + fn serialize() { + fn case(input: PkgName, output: &'static str) { + eprintln!("CASE: {input:?}"); + let received = serde_yaml::to_value(&input).unwrap(); + let expected = output.to_string().pipe(serde_yaml::Value::String); + assert_eq!(&received, &expected); + } + + case(PkgName { scope: Some("foo".to_string()), bare: "bar".to_string() }, "@foo/bar"); + case(PkgName { scope: None, bare: "foo-bar".to_string() }, "foo-bar"); + } +} diff --git a/crates/lockfile/src/pkg_name_suffix.rs b/crates/lockfile/src/pkg_name_suffix.rs new file mode 100644 index 000000000..6f1e5e6a6 --- /dev/null +++ b/crates/lockfile/src/pkg_name_suffix.rs @@ -0,0 +1,88 @@ +use crate::{ParsePkgNameError, PkgName}; +use derive_more::{Display, Error}; +use serde::{Deserialize, Serialize}; +use split_first_char::SplitFirstChar; +use std::{fmt::Display, str::FromStr}; + +/// Syntax: `{name}@{suffix}` +/// +/// Examples: +/// * `ts-node@10.9.1`, `@types/node@18.7.19`, `typescript@5.1.6` +/// * `react-json-view@1.21.3(@types/react@17.0.49)(react-dom@17.0.2)(react@17.0.2)` +#[derive(Debug, Display, Clone, PartialEq, Eq, Hash, Deserialize, Serialize)] +#[display(fmt = "{name}@{suffix}")] +#[display(bound = "Suffix: Display")] +#[serde(try_from = "&'de str", into = "String")] +#[serde(bound( + deserialize = "Suffix: FromStr, Suffix::Err: Display", + serialize = "Suffix: Display + Clone", +))] +pub struct PkgNameSuffix { + pub name: PkgName, + pub suffix: Suffix, +} + +impl PkgNameSuffix { + /// Construct a [`PkgNameSuffix`]. + pub fn new(name: PkgName, suffix: Suffix) -> Self { + PkgNameSuffix { name, suffix } + } +} + +/// Error when parsing [`PkgNameSuffix`] from a string. +#[derive(Debug, Display, Error)] +#[display(bound = "ParseSuffixError: Display")] +pub enum ParsePkgNameSuffixError { + #[display(fmt = "Input is empty")] + EmptyInput, + #[display(fmt = "Suffix is missing")] + MissingSuffix, + #[display(fmt = "Name is empty")] + EmptyName, + #[display(fmt = "Failed to parse suffix: {_0}")] + ParseSuffixFailure(#[error(source)] ParseSuffixError), + #[display(fmt = "Failed to parse name: {_0}")] + ParseNameFailure(#[error(source)] ParsePkgNameError), +} + +impl FromStr for PkgNameSuffix { + type Err = ParsePkgNameSuffixError; + fn from_str(value: &str) -> Result { + // The parsing code of PkgName is insufficient for this, so the code have to be duplicated for now. + // TODO: use parser combinator pattern to enable code reuse + let (name, suffix) = match value.split_first_char() { + None => return Err(ParsePkgNameSuffixError::EmptyInput), + Some(('@', rest)) => { + let (name_without_at, suffix) = + rest.split_once('@').ok_or(ParsePkgNameSuffixError::MissingSuffix)?; + let name = &value[..name_without_at.len() + 1]; + debug_assert_eq!(name, format!("@{name_without_at}")); + (name, suffix) + } + Some((_, _)) => value.split_once('@').ok_or(ParsePkgNameSuffixError::MissingSuffix)?, + }; + if matches!(name, "" | "@" | "@/") { + return Err(ParsePkgNameSuffixError::EmptyName); + } + if suffix.is_empty() { + return Err(ParsePkgNameSuffixError::MissingSuffix); + } + let suffix = + suffix.parse::().map_err(ParsePkgNameSuffixError::ParseSuffixFailure)?; + let name = name.parse().map_err(ParsePkgNameSuffixError::ParseNameFailure)?; + Ok(PkgNameSuffix { name, suffix }) + } +} + +impl<'a, Suffix: FromStr> TryFrom<&'a str> for PkgNameSuffix { + type Error = ParsePkgNameSuffixError; + fn try_from(value: &'a str) -> Result { + value.parse() + } +} + +impl From> for String { + fn from(value: PkgNameSuffix) -> Self { + value.to_string() + } +} diff --git a/crates/lockfile/src/pkg_name_ver.rs b/crates/lockfile/src/pkg_name_ver.rs new file mode 100644 index 000000000..f22e2e7a4 --- /dev/null +++ b/crates/lockfile/src/pkg_name_ver.rs @@ -0,0 +1,89 @@ +use crate::{ParsePkgNameSuffixError, PkgNameSuffix}; +use node_semver::{SemverError, Version}; + +/// Syntax: `{name}@{version}` +/// +/// Examples: `ts-node@10.9.1`, `@types/node@18.7.19`, `typescript@5.1.6` +pub type PkgNameVer = PkgNameSuffix; + +/// Error when parsing [`PkgNameVer`] from a string. +pub type ParsePkgNameVerError = ParsePkgNameSuffixError; + +#[cfg(test)] +mod tests { + use super::*; + use pipe_trait::Pipe; + use pretty_assertions::assert_eq; + use serde_yaml::Value as YamlValue; + + fn name_ver(name: &str, ver: impl Into) -> PkgNameVer { + PkgNameVer::new(name.parse().unwrap(), ver.into()) + } + + #[test] + fn parse_ok() { + fn case(input: &'static str, expected: PkgNameVer) { + eprintln!("CASE: {input:?}"); + let received: PkgNameVer = input.parse().unwrap(); + assert_eq!(&received, &expected); + } + + case("ts-node@10.9.1", name_ver("ts-node", (10, 9, 1))); + case("@types/node@18.7.19", name_ver("@types/node", (18, 7, 19))); + case("typescript@5.1.6", name_ver("typescript", (5, 1, 6))); + case("foo@0.1.2-alpha.0", name_ver("foo", Version::parse("0.1.2-alpha.0").unwrap())); + case("@foo/bar@0.1.2-rc.0", name_ver("@foo/bar", Version::parse("0.1.2-rc.0").unwrap())); + } + + #[test] + fn deserialize_ok() { + fn case(input: &'static str, expected: PkgNameVer) { + eprintln!("CASE: {input:?}"); + let received: PkgNameVer = serde_yaml::from_str(input).unwrap(); + assert_eq!(&received, &expected); + } + + case("ts-node@10.9.1", name_ver("ts-node", (10, 9, 1))); + case("'@types/node@18.7.19'", name_ver("@types/node", (18, 7, 19))); + case("typescript@5.1.6", name_ver("typescript", (5, 1, 6))); + case("foo@0.1.2-alpha.0", name_ver("foo", Version::parse("0.1.2-alpha.0").unwrap())); + case("'@foo/bar@0.1.2-rc.0'", name_ver("@foo/bar", Version::parse("0.1.2-rc.0").unwrap())); + } + + #[test] + fn parse_err() { + macro_rules! case { + ($title:literal: $input:expr => $message:expr, $pattern:pat) => {{ + let title = $title; + let input = $input; + eprintln!("CASE: {title} (input = {input:?})"); + let error = input.parse::().unwrap_err(); + dbg!(&error); + assert_eq!(error.to_string(), $message); + assert!(matches!(&error, $pattern)); + }}; + } + + case!("Empty input": "" => "Input is empty", ParsePkgNameVerError::EmptyInput); + case!("Non-scope name without version": "ts-node" => "Suffix is missing", ParsePkgNameVerError::MissingSuffix); + case!("Scoped name without version": "@types/node" => "Suffix is missing", ParsePkgNameVerError::MissingSuffix); + case!("Non-scope name with empty version": "ts-node" => "Suffix is missing", ParsePkgNameVerError::MissingSuffix); + case!("Scoped name with empty version": "@types/node" => "Suffix is missing", ParsePkgNameVerError::MissingSuffix); + case!("Missing name": "10.9.1" => "Suffix is missing", ParsePkgNameVerError::MissingSuffix); // can't fix without parser combinator + case!("Empty non-scope name": "@19.9.1" => "Suffix is missing", ParsePkgNameVerError::MissingSuffix); // can't fix without parser combinator + case!("Empty scoped name": "@@18.7.19" => "Name is empty", ParsePkgNameVerError::EmptyName); + } + + #[test] + fn to_string() { + let string = name_ver("ts-node", (10, 9, 1)).to_string(); + assert_eq!(string, "ts-node@10.9.1"); + } + + #[test] + fn serialize() { + let received = name_ver("ts-node", (10, 9, 1)).pipe_ref(serde_yaml::to_value).unwrap(); + let expected = "ts-node@10.9.1".to_string().pipe(YamlValue::String); + assert_eq!(received, expected); + } +} diff --git a/crates/lockfile/src/pkg_name_ver_peer.rs b/crates/lockfile/src/pkg_name_ver_peer.rs new file mode 100644 index 000000000..d5e2f5438 --- /dev/null +++ b/crates/lockfile/src/pkg_name_ver_peer.rs @@ -0,0 +1,85 @@ +use crate::{ParsePkgNameSuffixError, ParsePkgVerPeerError, PkgNameSuffix, PkgVerPeer}; + +/// Syntax: `{name}@{version}({peers})` +/// +/// Example: `react-json-view@1.21.3(@types/react@17.0.49)(react-dom@17.0.2)(react@17.0.2)` +/// +/// **NOTE:** The suffix isn't guaranteed to be correct. It is only assumed to be. +pub type PkgNameVerPeer = PkgNameSuffix; + +/// Error when parsing [`PkgNameVerPeer`] from a string. +pub type ParsePkgNameVerPeerError = ParsePkgNameSuffixError; + +impl PkgNameVerPeer { + /// Construct the name of the corresponding subdirectory in the virtual store directory. + pub fn to_virtual_store_name(&self) -> String { + // the code below is far from optimal, + // optimization requires parser combinator + self.to_string().replace('/', "+").replace(")(", "_").replace('(', "_").replace(')', "") + } +} + +#[cfg(test)] +mod tests { + use super::*; + use pretty_assertions::assert_eq; + + fn name_peer_ver(name: &str, peer_ver: &str) -> PkgNameVerPeer { + let peer_ver = peer_ver.to_string().parse().unwrap(); + PkgNameVerPeer::new(name.parse().unwrap(), peer_ver) + } + + #[test] + fn parse() { + fn case(input: &'static str, expected: PkgNameVerPeer) { + eprintln!("CASE: {input:?}"); + let received: PkgNameVerPeer = input.parse().unwrap(); + assert_eq!(&received, &expected); + } + + case( + "react-json-view@1.21.3(@types/react@17.0.49)(react-dom@17.0.2)(react@17.0.2)", + name_peer_ver( + "react-json-view", + "1.21.3(@types/react@17.0.49)(react-dom@17.0.2)(react@17.0.2)", + ), + ); + case("react-json-view@1.21.3", name_peer_ver("react-json-view", "1.21.3")); + case( + "@algolia/autocomplete-core@1.9.3(@algolia/client-search@4.18.0)(algoliasearch@4.18.0)(search-insights@2.6.0)", + name_peer_ver( + "@algolia/autocomplete-core", + "1.9.3(@algolia/client-search@4.18.0)(algoliasearch@4.18.0)(search-insights@2.6.0)", + ), + ); + case( + "@algolia/autocomplete-core@1.9.3", + name_peer_ver("@algolia/autocomplete-core", "1.9.3"), + ); + } + + #[test] + fn to_virtual_store_name() { + fn case(input: &'static str, expected: &'static str) { + eprintln!("CASE: {input:?}"); + let name_ver_peer: PkgNameVerPeer = input.parse().unwrap(); + dbg!(&name_ver_peer); + let received = name_ver_peer.to_virtual_store_name(); + assert_eq!(received, expected); + } + + case("ts-node@10.9.1", "ts-node@10.9.1"); + case( + "ts-node@10.9.1(@types/node@18.7.19)(typescript@5.1.6)", + "ts-node@10.9.1_@types+node@18.7.19_typescript@5.1.6", + ); + case( + "@babel/plugin-proposal-object-rest-spread@7.12.1", + "@babel+plugin-proposal-object-rest-spread@7.12.1", + ); + case( + "@babel/plugin-proposal-object-rest-spread@7.12.1(@babel/core@7.12.9)", + "@babel+plugin-proposal-object-rest-spread@7.12.1_@babel+core@7.12.9", + ); + } +} diff --git a/crates/lockfile/src/pkg_ver_peer.rs b/crates/lockfile/src/pkg_ver_peer.rs new file mode 100644 index 000000000..a5c3acd32 --- /dev/null +++ b/crates/lockfile/src/pkg_ver_peer.rs @@ -0,0 +1,206 @@ +use derive_more::{Display, Error}; +use node_semver::{SemverError, Version}; +use serde::{Deserialize, Serialize}; +use std::str::FromStr; + +/// Suffix type of [`PkgNameVerPeer`](crate::PkgNameVerPeer) and +/// type of [`ResolvedDependencySpec::version`](crate::ResolvedDependencySpec::version). +/// +/// Example: `1.21.3(@types/react@17.0.49)(react-dom@17.0.2)(react@17.0.2)` +/// +/// **NOTE:** The peer part isn't guaranteed to be correct. It is only assumed to be. +#[derive(Debug, Display, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)] +#[display(fmt = "{version}{peer}")] +#[serde(try_from = "&'de str", into = "String")] +pub struct PkgVerPeer { + version: Version, + peer: String, +} + +impl PkgVerPeer { + /// Get the version part. + pub fn version(&self) -> &'_ Version { + &self.version + } + + /// Get the peer part. + pub fn peer(&self) -> &'_ str { + self.peer.as_str() + } + + /// Destructure the struct into a tuple of version and peer. + pub fn into_tuple(self) -> (Version, String) { + let PkgVerPeer { version, peer } = self; + (version, peer) + } +} + +/// Error when parsing [`PkgVerPeer`] from a string. +#[derive(Debug, Display, Error)] +pub enum ParsePkgVerPeerError { + #[display(fmt = "Failed to parse the version part: {_0}")] + ParseVersionFailure(#[error(source)] SemverError), + #[display(fmt = "Mismatch parenthesis")] + MismatchParenthesis, +} + +impl FromStr for PkgVerPeer { + type Err = ParsePkgVerPeerError; + fn from_str(value: &str) -> Result { + if !value.ends_with(')') { + if value.find(|char| char == '(' || char == ')').is_some() { + return Err(ParsePkgVerPeerError::MismatchParenthesis); + } + + let version = value.parse().map_err(ParsePkgVerPeerError::ParseVersionFailure)?; + return Ok(PkgVerPeer { version, peer: String::new() }); + } + + let opening_parenthesis = + value.find('(').ok_or(ParsePkgVerPeerError::MismatchParenthesis)?; + let version = value[..opening_parenthesis] + .parse() + .map_err(ParsePkgVerPeerError::ParseVersionFailure)?; + let peer = value[opening_parenthesis..].to_string(); + Ok(PkgVerPeer { version, peer }) + } +} + +impl<'a> TryFrom<&'a str> for PkgVerPeer { + type Error = ParsePkgVerPeerError; + fn try_from(value: &'a str) -> Result { + value.parse() + } +} + +impl From for String { + fn from(value: PkgVerPeer) -> Self { + value.to_string() + } +} + +#[cfg(test)] +mod tests { + use super::*; + use pretty_assertions::assert_eq; + + fn assert_ver_peer(received: PkgVerPeer, expected_version: Ver, expected_peer: Peer) + where + Ver: Into, + Peer: Into, + { + dbg!(&received); + let expected_version = expected_version.into(); + let expected_peer = expected_peer.into(); + assert_eq!( + (received.version(), received.peer()), + (&expected_version, expected_peer.as_str()), + ); + assert_eq!(received.into_tuple(), (expected_version, expected_peer)); + } + + fn decode_encode_case(input: &str, decode: Decode, encode: Encode) + where + Decode: Fn(&str) -> PkgVerPeer, + Encode: Fn(&PkgVerPeer) -> String, + { + eprintln!("CASE: {input:?}"); + let peer_ver = decode(input); + dbg!(&peer_ver); + let output = encode(&peer_ver); + assert_eq!(input, output); + } + + #[test] + fn parse_ok() { + fn case(input: &'static str, (expected_version, expected_peer): (Ver, Peer)) + where + Ver: Into, + Peer: Into, + { + eprintln!("CASE: {input:?}"); + assert_ver_peer(input.parse().unwrap(), expected_version, expected_peer); + } + + case( + "1.21.3(@types/react@17.0.49)(react-dom@17.0.2)(react@17.0.2)", + ((1, 21, 3), "(@types/react@17.0.49)(react-dom@17.0.2)(react@17.0.2)"), + ); + case("1.21.3(react@17.0.2)", ((1, 21, 3), "(react@17.0.2)")); + case( + "1.21.3-rc.0(react@17.0.2)", + ("1.21.3-rc.0".parse::().unwrap(), "(react@17.0.2)"), + ); + case("1.21.3", ((1, 21, 3), "")); + case("1.21.3-rc.0", ("1.21.3-rc.0".parse::().unwrap(), "")); + } + + #[test] + fn parse_err() { + macro_rules! case { + ($input:expr => $message:expr, $variant:pat) => {{ + let input = $input; + eprintln!("CASE: {input:?}"); + let error = input.parse::().unwrap_err(); + dbg!(&error); + assert_eq!(error.to_string(), $message); + assert!(matches!(error, $variant)); + }}; + } + case!("1.21.3(@types/react@17.0.49)(react-dom@17.0.2)(react@17.0.2" => "Mismatch parenthesis", ParsePkgVerPeerError::MismatchParenthesis); + case!("1.21.3(" => "Mismatch parenthesis", ParsePkgVerPeerError::MismatchParenthesis); + case!("1.21.3)" => "Mismatch parenthesis", ParsePkgVerPeerError::MismatchParenthesis); + case!("a.b.c" => "Failed to parse the version part: Failed to parse version.", ParsePkgVerPeerError::ParseVersionFailure(_)); + } + + #[test] + fn deserialize_ok() { + fn case(input: &'static str, (expected_version, expected_peer): (Ver, Peer)) + where + Ver: Into, + Peer: Into, + { + eprintln!("CASE: {input:?}"); + assert_ver_peer(serde_yaml::from_str(input).unwrap(), expected_version, expected_peer); + } + + case( + "1.21.3(@types/react@17.0.49)(react-dom@17.0.2)(react@17.0.2)", + ((1, 21, 3), "(@types/react@17.0.49)(react-dom@17.0.2)(react@17.0.2)"), + ); + case("1.21.3(react@17.0.2)", ((1, 21, 3), "(react@17.0.2)")); + case( + "1.21.3-rc.0(react@17.0.2)", + ("1.21.3-rc.0".parse::().unwrap(), "(react@17.0.2)"), + ); + case("1.21.3", ((1, 21, 3), "")); + case("1.21.3-rc.0", ("1.21.3-rc.0".parse::().unwrap(), "")); + } + + #[test] + fn parse_to_string() { + let case = + |input| decode_encode_case(input, |input| input.parse().unwrap(), ToString::to_string); + case("1.21.3(@types/react@17.0.49)(react-dom@17.0.2)(react@17.0.2)"); + case("1.21.3(react@17.0.2)"); + case("1.21.3-rc.0(react@17.0.2)"); + case("1.21.3"); + case("1.21.3-rc.0"); + } + + #[test] + fn deserialize_serialize() { + let case = |input| { + decode_encode_case( + input, + |input| serde_yaml::from_str(input).unwrap(), + |ver_peer| serde_yaml::to_string(&ver_peer).unwrap().trim().to_string(), + ) + }; + case("1.21.3(@types/react@17.0.49)(react-dom@17.0.2)(react@17.0.2)"); + case("1.21.3(react@17.0.2)"); + case("1.21.3-rc.0(react@17.0.2)"); + case("1.21.3"); + case("1.21.3-rc.0"); + } +} diff --git a/crates/lockfile/src/project_snapshot.rs b/crates/lockfile/src/project_snapshot.rs new file mode 100644 index 000000000..ab17e0b1a --- /dev/null +++ b/crates/lockfile/src/project_snapshot.rs @@ -0,0 +1,127 @@ +use crate::{PkgName, ResolvedDependencyMap, ResolvedDependencySpec}; +use pacquet_package_json::DependencyGroup; +use serde::{Deserialize, Serialize}; +use std::collections::HashMap; + +/// Snapshot of a single project. +#[derive(Debug, Default, PartialEq, Deserialize, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct ProjectSnapshot { + #[serde(skip_serializing_if = "Option::is_none")] + pub specifiers: Option>, + #[serde(skip_serializing_if = "Option::is_none")] + pub dependencies: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub optional_dependencies: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub dev_dependencies: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub dependencies_meta: Option, // TODO: DependenciesMeta + #[serde(skip_serializing_if = "Option::is_none")] + pub publish_directory: Option, +} + +impl ProjectSnapshot { + /// Lookup dependency map according to group. + pub fn get_map_by_group(&self, group: DependencyGroup) -> Option<&'_ ResolvedDependencyMap> { + match group { + DependencyGroup::Default => self.dependencies.as_ref(), + DependencyGroup::Optional => self.optional_dependencies.as_ref(), + DependencyGroup::Dev => self.dev_dependencies.as_ref(), + DependencyGroup::Peer => None, + } + } + + /// Iterate over combination of dependency maps according to groups. + pub fn dependencies_by_groups( + &self, + groups: impl IntoIterator, + ) -> impl Iterator { + groups.into_iter().flat_map(|group| self.get_map_by_group(group)).flatten() + } +} + +#[cfg(test)] +mod tests { + use super::*; + use pretty_assertions::assert_eq; + use text_block_macros::text_block; + + const YAML: &str = text_block! { + "dependencies:" + " react:" + " specifier: ^17.0.2" + " version: 17.0.2" + " react-dom:" + " specifier: ^17.0.2" + " version: 17.0.2(react@17.0.2)" + "optionalDependencies:" + " '@types/node':" + " specifier: ^18.7.19" + " version: 18.7.19" + "devDependencies:" + " ts-node:" + " specifier: 10.9.1" + " version: 10.9.1(@types/node@18.7.19)(typescript@5.1.6)" + " typescript:" + " specifier: ^5.1.6" + " version: 5.1.6" + }; + + fn fixture_project_snapshot() -> ProjectSnapshot { + serde_yaml::from_str(YAML).unwrap() + } + + #[test] + fn dependencies_by_groups() { + use DependencyGroup::{Default, Dev, Optional, Peer}; + + macro_rules! case { + ($input:expr => $output:expr) => {{ + let groups = $input; + eprintln!("CASE: {groups:?}"); + let mut received: Vec<_> = fixture_project_snapshot() + .dependencies_by_groups(groups) + .map(|(name, ResolvedDependencySpec { specifier, version })| { + (name.to_string(), specifier.to_string(), version.to_string()) + }) + .collect(); + received.sort(); // TODO: remove this line after switching to IndexMap + let expected = $output.map(|(name, specifier, version): (&str, &str, &str)| { + (name.to_string(), specifier.to_string(), version.to_string()) + }); + assert_eq!(received, expected); + }}; + } + + case!([] => []); + case!([Default] => [ + ("react", "^17.0.2", "17.0.2"), + ("react-dom", "^17.0.2", "17.0.2(react@17.0.2)"), + ]); + case!([Peer] => []); + case!([Optional] => [ + ("@types/node", "^18.7.19", "18.7.19"), + ]); + case!([Dev] => [ + ("ts-node", "10.9.1", "10.9.1(@types/node@18.7.19)(typescript@5.1.6)"), + ("typescript", "^5.1.6", "5.1.6"), + ]); + case!([Default, Peer] => [ + ("react", "^17.0.2", "17.0.2"), + ("react-dom", "^17.0.2", "17.0.2(react@17.0.2)"), + ]); + case!([Default, Peer, Optional] => [ + ("@types/node", "^18.7.19", "18.7.19"), + ("react", "^17.0.2", "17.0.2"), + ("react-dom", "^17.0.2", "17.0.2(react@17.0.2)"), + ]); + case!([Default, Peer, Optional, Dev] => [ + ("@types/node", "^18.7.19", "18.7.19"), + ("react", "^17.0.2", "17.0.2"), + ("react-dom", "^17.0.2", "17.0.2(react@17.0.2)"), + ("ts-node", "10.9.1", "10.9.1(@types/node@18.7.19)(typescript@5.1.6)"), + ("typescript", "^5.1.6", "5.1.6"), + ]); + } +} diff --git a/crates/lockfile/src/resolution.rs b/crates/lockfile/src/resolution.rs new file mode 100644 index 000000000..6b3c2e63b --- /dev/null +++ b/crates/lockfile/src/resolution.rs @@ -0,0 +1,250 @@ +use derive_more::{From, TryInto}; +use pipe_trait::Pipe; +use serde::{Deserialize, Serialize}; + +/// For tarball hosted remotely or locally. +#[derive(Debug, Clone, PartialEq, Deserialize, Serialize)] +#[serde(deny_unknown_fields, rename_all = "camelCase")] +pub struct TarballResolution { + pub tarball: String, + #[serde(skip_serializing_if = "Option::is_none")] + pub integrity: Option, +} + +/// For standard package specification, with package name and version range. +#[derive(Debug, Clone, PartialEq, Deserialize, Serialize)] +#[serde(deny_unknown_fields, rename_all = "camelCase")] +pub struct RegistryResolution { + pub integrity: String, +} + +/// For local directory on a filesystem. +#[derive(Debug, Clone, PartialEq, Deserialize, Serialize)] +#[serde(deny_unknown_fields, rename_all = "camelCase")] +pub struct DirectoryResolution { + pub directory: String, +} + +/// For git repository. +#[derive(Debug, Clone, PartialEq, Deserialize, Serialize)] +#[serde(deny_unknown_fields, rename_all = "camelCase")] +pub struct GitResolution { + pub repo: String, + pub commit: String, +} + +/// Represent the resolution object. +#[derive(Debug, Clone, PartialEq, Deserialize, Serialize, From, TryInto)] +#[serde(from = "ResolutionSerde", into = "ResolutionSerde")] +pub enum LockfileResolution { + Tarball(TarballResolution), + Registry(RegistryResolution), + Directory(DirectoryResolution), + Git(GitResolution), +} + +impl LockfileResolution { + /// Get the integrity field if available. + pub fn integrity(&self) -> Option<&'_ str> { + match self { + LockfileResolution::Tarball(resolution) => resolution.integrity.as_deref(), + LockfileResolution::Registry(resolution) => resolution.integrity.as_str().pipe(Some), + LockfileResolution::Directory(_) | LockfileResolution::Git(_) => None, + } + } +} + +/// Intermediate helper type for serde. +#[derive(Deserialize, Serialize, From, TryInto)] +#[serde(tag = "type", rename_all = "camelCase")] +enum TaggedResolution { + Directory(DirectoryResolution), + Git(GitResolution), +} + +/// Intermediate helper type for serde. +#[derive(Deserialize, Serialize, From, TryInto)] +#[serde(untagged)] +enum ResolutionSerde { + Tarball(TarballResolution), + Registry(RegistryResolution), + Tagged(TaggedResolution), +} + +impl From for LockfileResolution { + fn from(value: ResolutionSerde) -> Self { + match value { + ResolutionSerde::Tarball(resolution) => resolution.into(), + ResolutionSerde::Registry(resolution) => resolution.into(), + ResolutionSerde::Tagged(TaggedResolution::Directory(resolution)) => resolution.into(), + ResolutionSerde::Tagged(TaggedResolution::Git(resolution)) => resolution.into(), + } + } +} + +impl From for ResolutionSerde { + fn from(value: LockfileResolution) -> Self { + match value { + LockfileResolution::Tarball(resolution) => resolution.into(), + LockfileResolution::Registry(resolution) => resolution.into(), + LockfileResolution::Directory(resolution) => { + resolution.pipe(TaggedResolution::from).into() + } + LockfileResolution::Git(resolution) => resolution.pipe(TaggedResolution::from).into(), + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + use pretty_assertions::assert_eq; + use text_block_macros::text_block; + + #[test] + fn deserialize_tarball_resolution() { + eprintln!("CASE: without integrity"); + let yaml = text_block! { + "tarball: file:ts-pipe-compose-0.2.1.tgz" + }; + let received: LockfileResolution = serde_yaml::from_str(yaml).unwrap(); + dbg!(&received); + let expected = LockfileResolution::Tarball(TarballResolution { + tarball: "file:ts-pipe-compose-0.2.1.tgz".to_string(), + integrity: None, + }); + assert_eq!(received, expected); + + eprintln!("CASE: with integrity"); + let yaml = text_block! { + "tarball: file:ts-pipe-compose-0.2.1.tgz" + "integrity: sha512-gf6ZldcfCDyNXPRiW3lQjEP1Z9rrUM/4Cn7BZbv3SdTA82zxWRP8OmLwvGR974uuENhGCFgFdN11z3n1Ofpprg==" + }; + let received: LockfileResolution = serde_yaml::from_str(yaml).unwrap(); + dbg!(&received); + let expected = LockfileResolution::Tarball(TarballResolution { + tarball: "file:ts-pipe-compose-0.2.1.tgz".to_string(), + integrity: "sha512-gf6ZldcfCDyNXPRiW3lQjEP1Z9rrUM/4Cn7BZbv3SdTA82zxWRP8OmLwvGR974uuENhGCFgFdN11z3n1Ofpprg==".to_string().into() + }); + assert_eq!(received, expected); + } + + #[test] + fn serialize_tarball_resolution() { + eprintln!("CASE: without integrity"); + let resolution = LockfileResolution::Tarball(TarballResolution { + tarball: "file:ts-pipe-compose-0.2.1.tgz".to_string(), + integrity: None, + }); + let received = serde_yaml::to_string(&resolution).unwrap(); + let received = received.trim(); + eprintln!("RECEIVED:\n{received}"); + let expected = text_block! { + "tarball: file:ts-pipe-compose-0.2.1.tgz" + }; + assert_eq!(received, expected); + + eprintln!("CASE: with integrity"); + let resolution = LockfileResolution::Tarball(TarballResolution { + tarball: "file:ts-pipe-compose-0.2.1.tgz".to_string(), + integrity: "sha512-gf6ZldcfCDyNXPRiW3lQjEP1Z9rrUM/4Cn7BZbv3SdTA82zxWRP8OmLwvGR974uuENhGCFgFdN11z3n1Ofpprg==".to_string().into() + }); + let received = serde_yaml::to_string(&resolution).unwrap(); + let received = received.trim(); + eprintln!("RECEIVED:\n{received}"); + let expected = text_block! { + "tarball: file:ts-pipe-compose-0.2.1.tgz" + "integrity: sha512-gf6ZldcfCDyNXPRiW3lQjEP1Z9rrUM/4Cn7BZbv3SdTA82zxWRP8OmLwvGR974uuENhGCFgFdN11z3n1Ofpprg==" + }; + assert_eq!(received, expected); + } + + #[test] + fn deserialize_registry_resolution() { + let yaml = text_block! { + "integrity: sha512-gf6ZldcfCDyNXPRiW3lQjEP1Z9rrUM/4Cn7BZbv3SdTA82zxWRP8OmLwvGR974uuENhGCFgFdN11z3n1Ofpprg==" + }; + let received: LockfileResolution = serde_yaml::from_str(yaml).unwrap(); + dbg!(&received); + let expected = LockfileResolution::Registry(RegistryResolution { + integrity: "sha512-gf6ZldcfCDyNXPRiW3lQjEP1Z9rrUM/4Cn7BZbv3SdTA82zxWRP8OmLwvGR974uuENhGCFgFdN11z3n1Ofpprg==".to_string() + }); + assert_eq!(received, expected); + } + + #[test] + fn serialize_registry_resolution() { + let resolution = LockfileResolution::Registry(RegistryResolution { + integrity: "sha512-gf6ZldcfCDyNXPRiW3lQjEP1Z9rrUM/4Cn7BZbv3SdTA82zxWRP8OmLwvGR974uuENhGCFgFdN11z3n1Ofpprg==".to_string() + }); + let received = serde_yaml::to_string(&resolution).unwrap(); + let received = received.trim(); + eprintln!("RECEIVED:\n{received}"); + let expected = text_block! { + "integrity: sha512-gf6ZldcfCDyNXPRiW3lQjEP1Z9rrUM/4Cn7BZbv3SdTA82zxWRP8OmLwvGR974uuENhGCFgFdN11z3n1Ofpprg==" + }; + assert_eq!(received, expected); + } + + #[test] + fn deserialize_directory_resolution() { + let yaml = text_block! { + "type: directory" + "directory: ts-pipe-compose-0.2.1/package" + }; + let received: LockfileResolution = serde_yaml::from_str(yaml).unwrap(); + dbg!(&received); + let expected = LockfileResolution::Directory(DirectoryResolution { + directory: "ts-pipe-compose-0.2.1/package".to_string(), + }); + assert_eq!(received, expected); + } + + #[test] + fn serialize_directory_resolution() { + let resolution = LockfileResolution::Directory(DirectoryResolution { + directory: "ts-pipe-compose-0.2.1/package".to_string(), + }); + let received = serde_yaml::to_string(&resolution).unwrap(); + let received = received.trim(); + eprintln!("RECEIVED:\n{received}"); + let expected = text_block! { + "type: directory" + "directory: ts-pipe-compose-0.2.1/package" + }; + assert_eq!(received, expected); + } + + #[test] + fn deserialize_git_resolution() { + let yaml = text_block! { + "type: git" + "repo: https://github.com/ksxnodemodules/ts-pipe-compose.git" + "commit: e63c09e460269b0c535e4c34debf69bb91d57b22" + }; + let received: LockfileResolution = serde_yaml::from_str(yaml).unwrap(); + dbg!(&received); + let expected = LockfileResolution::Git(GitResolution { + repo: "https://github.com/ksxnodemodules/ts-pipe-compose.git".to_string(), + commit: "e63c09e460269b0c535e4c34debf69bb91d57b22".to_string(), + }); + assert_eq!(received, expected); + } + + #[test] + fn serialize_git_resolution() { + let resolution = LockfileResolution::Git(GitResolution { + repo: "https://github.com/ksxnodemodules/ts-pipe-compose.git".to_string(), + commit: "e63c09e460269b0c535e4c34debf69bb91d57b22".to_string(), + }); + let received = serde_yaml::to_string(&resolution).unwrap(); + let received = received.trim(); + eprintln!("RECEIVED:\n{received}"); + let expected = text_block! { + "type: git" + "repo: https://github.com/ksxnodemodules/ts-pipe-compose.git" + "commit: e63c09e460269b0c535e4c34debf69bb91d57b22" + }; + assert_eq!(received, expected); + } +} diff --git a/crates/lockfile/src/resolved_dependency.rs b/crates/lockfile/src/resolved_dependency.rs new file mode 100644 index 000000000..2788768d6 --- /dev/null +++ b/crates/lockfile/src/resolved_dependency.rs @@ -0,0 +1,16 @@ +use crate::{PkgName, PkgVerPeer}; +use serde::{Deserialize, Serialize}; +use std::collections::HashMap; + +/// Map of resolved dependencies stored in a [`ProjectSnapshot`](crate::ProjectSnapshot). +/// +/// The keys are package names. +pub type ResolvedDependencyMap = HashMap; + +/// Value type of [`ResolvedDependencyMap`]. +#[derive(Debug, PartialEq, Deserialize, Serialize)] +#[serde(deny_unknown_fields, rename_all = "camelCase")] +pub struct ResolvedDependencySpec { + pub specifier: String, + pub version: PkgVerPeer, +} diff --git a/crates/lockfile/src/root_project_snapshot.rs b/crates/lockfile/src/root_project_snapshot.rs new file mode 100644 index 000000000..02237c722 --- /dev/null +++ b/crates/lockfile/src/root_project_snapshot.rs @@ -0,0 +1,48 @@ +use crate::{MultiProjectSnapshot, ProjectSnapshot}; +use derive_more::{From, TryInto}; +use serde::{Deserialize, Serialize}; + +/// Snapshot of the root project. +#[derive(Debug, PartialEq, Deserialize, Serialize, From, TryInto)] +#[serde(untagged)] +pub enum RootProjectSnapshot { + Multi(MultiProjectSnapshot), + Single(ProjectSnapshot), +} + +#[cfg(test)] +mod tests { + use super::*; + use pretty_assertions::assert_eq; + + macro_rules! test_deserialization { + ($name:ident: $input:expr => $output:expr) => { + #[test] + fn $name() { + let yaml = $input; + let received: RootProjectSnapshot = serde_yaml::from_str(yaml).unwrap(); + let expected: RootProjectSnapshot = $output; + assert_eq!(received, expected); + } + }; + } + + test_deserialization!(empty_object_is_considered_single: "{}" => RootProjectSnapshot::Single(Default::default())); + test_deserialization!(empty_importers_is_considered_multi: "importers: {}" => RootProjectSnapshot::Multi(Default::default())); + + macro_rules! test_serialization { + ($name:ident: $input:expr => $output:expr) => { + #[test] + fn $name() { + let snapshot: RootProjectSnapshot = $input; + let received = serde_yaml::to_string(&snapshot).unwrap(); + let received = received.trim(); + let expected = $output; + assert_eq!(received, expected); + } + }; + } + + test_serialization!(default_single_becomes_empty_object: RootProjectSnapshot::Single(Default::default()) => "{}"); + test_serialization!(default_multi_gives_empty_importers: RootProjectSnapshot::Multi(Default::default()) => "importers: {}"); +} diff --git a/crates/npmrc/src/custom_deserializer.rs b/crates/npmrc/src/custom_deserializer.rs index bf595cc01..df11dd165 100644 --- a/crates/npmrc/src/custom_deserializer.rs +++ b/crates/npmrc/src/custom_deserializer.rs @@ -45,10 +45,12 @@ pub fn default_store_dir() -> PathBuf { } pub fn default_modules_dir() -> PathBuf { + // TODO: find directory with package.json env::current_dir().expect("current directory is unavailable").join("node_modules") } pub fn default_virtual_store_dir() -> PathBuf { + // TODO: find directory with package.json env::current_dir().expect("current directory is unavailable").join("node_modules/.pacquet") } diff --git a/crates/npmrc/src/lib.rs b/crates/npmrc/src/lib.rs index e45136a1e..c4b797f99 100644 --- a/crates/npmrc/src/lib.rs +++ b/crates/npmrc/src/lib.rs @@ -27,7 +27,7 @@ pub enum NodeLinker { Pnp, } -#[derive(Debug, Deserialize, Default, PartialEq)] +#[derive(Debug, Default, Clone, Copy, PartialEq, Eq, Deserialize)] #[serde(rename_all = "kebab-case")] pub enum PackageImportMethod { /// try to clone packages from the store. If cloning is not supported then hardlink packages diff --git a/crates/package_json/Cargo.toml b/crates/package_json/Cargo.toml index 44fd825a5..70186f9a4 100644 --- a/crates/package_json/Cargo.toml +++ b/crates/package_json/Cargo.toml @@ -18,6 +18,7 @@ serde_json = { workspace = true } strum = { workspace = true } [dev-dependencies] +pipe-trait = { workspace = true } pretty_assertions = { workspace = true } tempfile = { workspace = true } insta = { workspace = true } diff --git a/crates/package_json/src/lib.rs b/crates/package_json/src/lib.rs index e849cd0b5..d72765e22 100644 --- a/crates/package_json/src/lib.rs +++ b/crates/package_json/src/lib.rs @@ -8,6 +8,7 @@ use pacquet_diagnostics::{ miette::{self, Diagnostic}, thiserror::{self, Error}, }; +use serde::{Deserialize, Serialize}; use serde_json::{json, Map, Value}; use strum::IntoStaticStr; @@ -52,8 +53,13 @@ pub enum DependencyGroup { Optional, #[strum(serialize = "peerDependencies")] Peer, - #[strum(serialize = "bundledDependencies")] - Bundled, +} + +#[derive(Debug, PartialEq, Eq, Deserialize, Serialize)] +#[serde(untagged)] +pub enum BundleDependencies { + Boolean(bool), + List(Vec), } pub struct PackageJson { @@ -143,6 +149,15 @@ impl PackageJson { .flat_map(|(name, version)| version.as_str().map(|value| (name.as_str(), value))) } + pub fn bundle_dependencies(&self) -> Result, serde_json::Error> { + self.value + .get("bundleDependencies") + .or_else(|| self.value.get("bundledDependencies")) + .map(serde_json::Value::clone) + .map(serde_json::from_value) + .transpose() + } + pub fn add_dependency( &mut self, name: &str, @@ -193,6 +208,7 @@ mod tests { use std::{collections::HashMap, fs::read_to_string}; use insta::assert_snapshot; + use pipe_trait::Pipe; use pretty_assertions::assert_eq; use tempfile::{tempdir, NamedTempFile}; @@ -281,4 +297,35 @@ mod tests { assert!(dependencies([DependencyGroup::Peer]).contains_key("fast-querystring")); assert!(dependencies([DependencyGroup::Default]).contains_key("fastify")); } + + #[test] + fn bundle_dependencies() { + fn bundle_list(list: List) -> BundleDependencies + where + List: IntoIterator, + List::Item: Into, + { + list.into_iter().map(Into::into).collect::>().pipe(BundleDependencies::List) + } + + macro_rules! case { + ($input:expr => $output:expr) => {{ + let data = $input; + eprintln!("CASE: {data}"); + let tmp = NamedTempFile::new().unwrap(); + write!(tmp.as_file(), "{}", data).unwrap(); + let package_json = PackageJson::create_if_needed(tmp.path().to_path_buf()).unwrap(); + let bundle = package_json.bundle_dependencies().unwrap(); + assert_eq!(bundle, $output); + }}; + } + + case!(r#"{ "bundleDependencies": ["foo", "bar"] }"# => Some(bundle_list(["foo", "bar"]))); + case!(r#"{ "bundledDependencies": ["foo", "bar"] }"# => Some(bundle_list(["foo", "bar"]))); + case!(r#"{ "bundleDependencies": false }"# => false.pipe(BundleDependencies::Boolean).pipe(Some)); + case!(r#"{ "bundledDependencies": false }"# => false.pipe(BundleDependencies::Boolean).pipe(Some)); + case!(r#"{ "bundleDependencies": true }"# => true.pipe(BundleDependencies::Boolean).pipe(Some)); + case!(r#"{ "bundledDependencies": true }"# => true.pipe(BundleDependencies::Boolean).pipe(Some)); + case!(r#"{}"# => None); + } } diff --git a/crates/registry/src/package.rs b/crates/registry/src/package.rs index 7fdf39792..79f0f03ad 100644 --- a/crates/registry/src/package.rs +++ b/crates/registry/src/package.rs @@ -45,8 +45,8 @@ impl Package { .pipe(Ok) } - pub fn pinned_version(&self, version_field: &str) -> Option<&PackageVersion> { - let range: node_semver::Range = version_field.parse().unwrap(); + pub fn pinned_version(&self, version_range: &str) -> Option<&PackageVersion> { + let range: node_semver::Range = version_range.parse().unwrap(); // TODO: this step should have happened in PackageJson let mut satisfied_versions = self .versions .values() diff --git a/crates/registry/src/package_version.rs b/crates/registry/src/package_version.rs index 03301219f..1580da92e 100644 --- a/crates/registry/src/package_version.rs +++ b/crates/registry/src/package_version.rs @@ -1,4 +1,4 @@ -use std::collections::HashMap; +use std::{collections::HashMap, fmt::Display}; use pipe_trait::Pipe; use serde::{Deserialize, Serialize}; @@ -25,7 +25,7 @@ impl PartialEq for PackageVersion { impl PackageVersion { pub async fn fetch_from_registry( name: &str, - version: &str, + version: impl Display, // TODO: change to node_semver::Version to increase resistance against programmer errors http_client: &reqwest::Client, registry: &str, ) -> Result { @@ -44,7 +44,7 @@ impl PackageVersion { .pipe(Ok) } - pub fn to_store_name(&self) -> String { + pub fn to_virtual_store_name(&self) -> String { format!("{0}@{1}", self.name.replace('/', "+"), self.version) } diff --git a/crates/tarball/src/lib.rs b/crates/tarball/src/lib.rs index 3908006ea..f33783f95 100644 --- a/crates/tarball/src/lib.rs +++ b/crates/tarball/src/lib.rs @@ -121,6 +121,9 @@ pub async fn download_tarball_to_store( package_unpacked_size: Option, package_url: &str, ) -> Result>, TarballError> { + // QUESTION: I see no copying from existing store_dir, is there such mechanism? + // TODO: If it's not implemented yet, implement it + if let Some(cache_lock) = cache.get(package_url) { let notify = match &*cache_lock.write().await { CacheValue::Available(cas_paths) => { diff --git a/tasks/benchmark-install-against-revisions/Cargo.toml b/tasks/benchmark-install-against-revisions/Cargo.toml index 061e4d271..61e08d2ab 100644 --- a/tasks/benchmark-install-against-revisions/Cargo.toml +++ b/tasks/benchmark-install-against-revisions/Cargo.toml @@ -21,3 +21,4 @@ os_display = { workspace = true } pipe-trait = { workspace = true } reqwest = { workspace = true } tokio = { workspace = true } +which = { workspace = true } diff --git a/tasks/benchmark-install-against-revisions/src/cli_args.rs b/tasks/benchmark-install-against-revisions/src/cli_args.rs index db5d2f2d1..31487db54 100644 --- a/tasks/benchmark-install-against-revisions/src/cli_args.rs +++ b/tasks/benchmark-install-against-revisions/src/cli_args.rs @@ -1,8 +1,13 @@ -use clap::{Args, Parser}; +use crate::fixtures::LOCKFILE; +use clap::{Args, Parser, ValueEnum}; use std::{path::PathBuf, process::Command}; #[derive(Debug, Parser)] pub struct CliArgs { + /// Task to benchmark. + #[clap(long, short)] + pub scenario: BenchmarkScenario, + /// URL to the local virtual registry. #[clap(long, short, default_value = "http://localhost:4873")] pub registry: String, @@ -23,11 +28,49 @@ pub struct CliArgs { #[clap(long, short, default_value = "bench-work-env")] pub work_env: PathBuf, + /// Benchmark against pnpm. + #[clap(long)] + pub with_pnpm: bool, + /// Branch name, tag name, or commit id of the pacquet repo. #[clap(required = true)] pub revisions: Vec, } +#[derive(Debug, Clone, Copy, ValueEnum)] +pub enum BenchmarkScenario { + /// Benchmark clean install without lockfile and without local cache. + CleanInstall, + /// Benchmark install with a frozen lockfile and without local cache. + FrozenLockfile, +} + +impl BenchmarkScenario { + /// Infer CLI arguments for the install command. + pub fn install_args(self) -> impl IntoIterator { + match self { + BenchmarkScenario::CleanInstall => Vec::new(), + BenchmarkScenario::FrozenLockfile => vec!["--frozen-lockfile"], + } + } + + /// Return `lockfile=true` or `lockfile=false` for use in generating `.npmrc`. + pub fn npmrc_lockfile_setting(self) -> &'static str { + match self { + BenchmarkScenario::CleanInstall => "lockfile=false", + BenchmarkScenario::FrozenLockfile => "lockfile=true", + } + } + + /// Whether to use a lockfile. + pub fn lockfile(self) -> Option<&'static str> { + match self { + BenchmarkScenario::CleanInstall => None, + BenchmarkScenario::FrozenLockfile => Some(LOCKFILE), + } + } +} + #[derive(Debug, Args)] pub struct HyperfineOptions { /// Number of warmup runs to perform before the actual measured benchmark. @@ -46,6 +89,10 @@ pub struct HyperfineOptions { #[clap(long)] pub runs: Option, + /// Print stdout and stderr of the benchmarked program instead of suppressing it + #[clap(long)] + show_output: bool, + /// Ignore non-zero exit codes of the benchmarked program. #[clap(long)] pub ignore_failure: bool, @@ -53,7 +100,8 @@ pub struct HyperfineOptions { impl HyperfineOptions { pub fn append_to(&self, hyperfine_command: &mut Command) { - let &HyperfineOptions { warmup, min_runs, max_runs, runs, ignore_failure } = self; + let &HyperfineOptions { show_output, warmup, min_runs, max_runs, runs, ignore_failure } = + self; hyperfine_command.arg("--warmup").arg(warmup.to_string()); if let Some(min_runs) = min_runs { hyperfine_command.arg("--min-runs").arg(min_runs.to_string()); @@ -64,6 +112,9 @@ impl HyperfineOptions { if let Some(runs) = runs { hyperfine_command.arg("--runs").arg(runs.to_string()); } + if show_output { + hyperfine_command.arg("--show-output"); + } if ignore_failure { hyperfine_command.arg("--ignore-failures"); } diff --git a/tasks/benchmark-install-against-revisions/src/fixtures/install.bash b/tasks/benchmark-install-against-revisions/src/fixtures/install.bash deleted file mode 100644 index 13a27dca8..000000000 --- a/tasks/benchmark-install-against-revisions/src/fixtures/install.bash +++ /dev/null @@ -1,4 +0,0 @@ -#!/bin/bash -set -o errexit -o nounset -o pipefail -cd "$(dirname "$0")" -exec ./pacquet/target/release/pacquet install diff --git a/tasks/benchmark-install-against-revisions/src/fixtures/mod.rs b/tasks/benchmark-install-against-revisions/src/fixtures/mod.rs index a650e858d..9b8b287d2 100644 --- a/tasks/benchmark-install-against-revisions/src/fixtures/mod.rs +++ b/tasks/benchmark-install-against-revisions/src/fixtures/mod.rs @@ -1,2 +1,2 @@ pub const PACKAGE_JSON: &str = include_str!("package.json"); -pub const INSTALL_SCRIPT: &str = include_str!("install.bash"); +pub const LOCKFILE: &str = include_str!("pnpm-lock.yaml"); diff --git a/tasks/benchmark-install-against-revisions/src/fixtures/pnpm-lock.yaml b/tasks/benchmark-install-against-revisions/src/fixtures/pnpm-lock.yaml new file mode 100644 index 000000000..f4fa1fec7 --- /dev/null +++ b/tasks/benchmark-install-against-revisions/src/fixtures/pnpm-lock.yaml @@ -0,0 +1,352 @@ +lockfileVersion: '6.0' + +settings: + autoInstallPeers: false + excludeLinksFromLockfile: false + +dependencies: + '@tsfun/all': + specifier: 0.0.37 + version: 0.0.37 + '@types/node': + specifier: ^16.18.39 + version: 16.18.48 + cross-env: + specifier: ^7.0.3 + version: 7.0.3 + husky: + specifier: ^8.0.3 + version: 8.0.3 + keyv: + specifier: 4.5.3 + version: 4.5.3 + rimraf: + specifier: ^3.0.2 + version: 3.0.2 + shx: + specifier: ^0.3.4 + version: 0.3.4 + +packages: + + /@tsfun/all@0.0.37: + resolution: {integrity: sha512-qkhh/D1k7m6ocZZ2n9AK5zn1Jfine2TPopwpS9s05THQzT55hrtqjuRNZxQnOvxpmdYUIN+iy4qkvENnl1Rkfg==} + engines: {node: '>= 8.9.0'} + dependencies: + '@tsfun/array': 0.0.8 + '@tsfun/function': 0.0.11 + '@tsfun/misc': 0.0.11 + '@tsfun/object': 0.0.21 + '@tsfun/option': 0.0.19 + '@tsfun/pipe': 0.0.15 + '@tsfun/result': 0.0.20 + '@tsfun/tap': 0.0.14 + '@types/node': 14.18.58 + tslib: 2.6.2 + dev: false + + /@tsfun/array@0.0.8: + resolution: {integrity: sha512-7G8HXcIl5wPTuEdPG2Nz+jrIbtL1rvN6f1zgheG+FLJhWvaxJOgAX5oAC+e1nSK29yXIpW8WuMJDSpyUp2W3ag==} + dependencies: + '@types/node': 14.18.58 + tslib: 2.6.2 + dev: false + + /@tsfun/function@0.0.11: + resolution: {integrity: sha512-pFj1jdnZipRsln0StaBuHIMKchfS+UoKa2lu9uLbyXCNetD640WJbTFV06XEjKKge1c7qkHsq3WOdSXtKAM6Wg==} + dependencies: + '@types/node': 14.18.58 + ts-pipe-compose: 0.2.1 + tslib: 2.6.2 + dev: false + + /@tsfun/misc@0.0.11: + resolution: {integrity: sha512-yJR+AM+SZXCaLeLynb3EnMquBEQXhjTX36DR5Ih3e/U8COjyxayBJ80+WxCkx0xodCZH/XCTkbhw3w4aFIi8bw==} + dependencies: + '@types/node': 14.18.58 + tslib: 2.6.2 + dev: false + + /@tsfun/object@0.0.21: + resolution: {integrity: sha512-bq4HlP+5JFNGcKysmgPI1nR8vqPLlMRhWdZ/rph8sI8jHghQGSXV8BaqzTQ8krUgM/xkUfDK0x+fXMkw0020pQ==} + dependencies: + '@types/node': 14.18.58 + tslib: 2.6.2 + utility-types: 3.10.0 + dev: false + + /@tsfun/option@0.0.19: + resolution: {integrity: sha512-qOiWLoJb2P25/PC/0GbxeTCGFeOc8FJfI3+dLccBne/pjUuEri+JypZJKMkDgnrWhIWxNxZqsGlac+sy8SMr6A==} + engines: {node: '>= 8.9.0'} + dependencies: + '@tsfun/prv-option-result-common': 0.0.15 + '@tsfun/prv-types': 0.0.10 + '@tsfun/tap': 0.0.14 + '@types/node': 14.18.58 + tslib: 2.6.2 + dev: false + + /@tsfun/pipe@0.0.15: + resolution: {integrity: sha512-G7s7eRksa/lBkls+tIZhejyMUL7QxIQbwNnXIVAlq4QSZ2errhYnCUErcdhOVCYd3gqvVZH6uozNCMeYiQ7TMQ==} + engines: {node: '>= 8.9.0'} + dependencies: + '@tsfun/tap': 0.0.14 + '@types/node': 14.18.58 + tslib: 2.6.2 + dev: false + + /@tsfun/prv-option-result-common@0.0.15: + resolution: {integrity: sha512-oqmmMHkoD1IXADxtOupJ9JFBcocuToxbSuqMIw3VszZIyvysJ3t9y+LlP+qxo92pOj/GJz4oohhzKBHkaaLqWA==} + engines: {node: '>= 8.9.0'} + dependencies: + '@types/node': 14.18.58 + tslib: 2.6.2 + dev: false + + /@tsfun/prv-types@0.0.10: + resolution: {integrity: sha512-8QJUR81Zyf9gLhl7llp10bTZ+4WgXBM6LaiKZTmgjmviggFIQobm1hDzirZ9gUMxG1lGtTr4glszSkvuCcSjaA==} + dependencies: + '@types/node': 14.18.58 + tslib: 2.6.2 + dev: false + + /@tsfun/result@0.0.20: + resolution: {integrity: sha512-7IXm8ab6hRl7dNliHvFCoHdDXcDITSOPFKZl+7VxEKTXXmSrUe5QupvzzBro1G5Q104do/xiHZT1RBhwLzg1iw==} + engines: {node: '>= 8.9.0'} + dependencies: + '@tsfun/prv-option-result-common': 0.0.15 + '@tsfun/prv-types': 0.0.10 + '@tsfun/tap': 0.0.14 + '@types/node': 14.18.58 + tslib: 2.6.2 + dev: false + + /@tsfun/tap@0.0.14: + resolution: {integrity: sha512-hXrhnrFidsbi4zeo6HaiHEMFLcjNTSc4osYps2Py7cgbgwW6nNxcZwRbDlQaMIdvpuQuHFd1+bxhDYQ0C9r6xA==} + dependencies: + '@types/node': 14.18.58 + tslib: 2.6.2 + dev: false + + /@types/node@14.18.58: + resolution: {integrity: sha512-Y8ETZc8afYf6lQ/mVp096phIVsgD/GmDxtm3YaPcc+71jmi/J6zdwbwaUU4JvS56mq6aSfbpkcKhQ5WugrWFPw==} + dev: false + + /@types/node@16.18.48: + resolution: {integrity: sha512-mlaecDKQ7rIZrYD7iiKNdzFb6e/qD5I9U1rAhq+Fd+DWvYVs+G2kv74UFHmSOlg5+i/vF3XxuR522V4u8BqO+Q==} + dev: false + + /balanced-match@1.0.2: + resolution: {integrity: sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==} + dev: false + + /brace-expansion@1.1.11: + resolution: {integrity: sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==} + dependencies: + balanced-match: 1.0.2 + concat-map: 0.0.1 + dev: false + + /concat-map@0.0.1: + resolution: {integrity: sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==} + dev: false + + /cross-env@7.0.3: + resolution: {integrity: sha512-+/HKd6EgcQCJGh2PSjZuUitQBQynKor4wrFbRg4DtAgS1aWO+gU52xpH7M9ScGgXSYmAVS9bIJ8EzuaGw0oNAw==} + engines: {node: '>=10.14', npm: '>=6', yarn: '>=1'} + hasBin: true + dependencies: + cross-spawn: 7.0.3 + dev: false + + /cross-spawn@7.0.3: + resolution: {integrity: sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==} + engines: {node: '>= 8'} + dependencies: + path-key: 3.1.1 + shebang-command: 2.0.0 + which: 2.0.2 + dev: false + + /fs.realpath@1.0.0: + resolution: {integrity: sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==} + dev: false + + /function-bind@1.1.1: + resolution: {integrity: sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==} + dev: false + + /glob@7.2.3: + resolution: {integrity: sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==} + dependencies: + fs.realpath: 1.0.0 + inflight: 1.0.6 + inherits: 2.0.4 + minimatch: 3.1.2 + once: 1.4.0 + path-is-absolute: 1.0.1 + dev: false + + /has@1.0.3: + resolution: {integrity: sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==} + engines: {node: '>= 0.4.0'} + dependencies: + function-bind: 1.1.1 + dev: false + + /husky@8.0.3: + resolution: {integrity: sha512-+dQSyqPh4x1hlO1swXBiNb2HzTDN1I2IGLQx1GrBuiqFJfoMrnZWwVmatvSiO+Iz8fBUnf+lekwNo4c2LlXItg==} + engines: {node: '>=14'} + hasBin: true + dev: false + + /inflight@1.0.6: + resolution: {integrity: sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==} + dependencies: + once: 1.4.0 + wrappy: 1.0.2 + dev: false + + /inherits@2.0.4: + resolution: {integrity: sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==} + dev: false + + /interpret@1.4.0: + resolution: {integrity: sha512-agE4QfB2Lkp9uICn7BAqoscw4SZP9kTE2hxiFI3jBPmXJfdqiahTbUuKGsMoN2GtqL9AxhYioAcVvgsb1HvRbA==} + engines: {node: '>= 0.10'} + dev: false + + /is-core-module@2.13.0: + resolution: {integrity: sha512-Z7dk6Qo8pOCp3l4tsX2C5ZVas4V+UxwQodwZhLopL91TX8UyyHEXafPcyoeeWuLrwzHcr3igO78wNLwHJHsMCQ==} + dependencies: + has: 1.0.3 + dev: false + + /isexe@2.0.0: + resolution: {integrity: sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==} + dev: false + + /json-buffer@3.0.1: + resolution: {integrity: sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==} + dev: false + + /keyv@4.5.3: + resolution: {integrity: sha512-QCiSav9WaX1PgETJ+SpNnx2PRRapJ/oRSXM4VO5OGYGSjrxbKPVFVhB3l2OCbLCk329N8qyAtsJjSjvVBWzEug==} + dependencies: + json-buffer: 3.0.1 + dev: false + + /minimatch@3.1.2: + resolution: {integrity: sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==} + dependencies: + brace-expansion: 1.1.11 + dev: false + + /minimist@1.2.8: + resolution: {integrity: sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==} + dev: false + + /once@1.4.0: + resolution: {integrity: sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==} + dependencies: + wrappy: 1.0.2 + dev: false + + /path-is-absolute@1.0.1: + resolution: {integrity: sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==} + engines: {node: '>=0.10.0'} + dev: false + + /path-key@3.1.1: + resolution: {integrity: sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==} + engines: {node: '>=8'} + dev: false + + /path-parse@1.0.7: + resolution: {integrity: sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==} + dev: false + + /rechoir@0.6.2: + resolution: {integrity: sha512-HFM8rkZ+i3zrV+4LQjwQ0W+ez98pApMGM3HUrN04j3CqzPOzl9nmP15Y8YXNm8QHGv/eacOVEjqhmWpkRV0NAw==} + engines: {node: '>= 0.10'} + dependencies: + resolve: 1.22.4 + dev: false + + /resolve@1.22.4: + resolution: {integrity: sha512-PXNdCiPqDqeUou+w1C2eTQbNfxKSuMxqTCuvlmmMsk1NWHL5fRrhY6Pl0qEYYc6+QqGClco1Qj8XnjPego4wfg==} + hasBin: true + dependencies: + is-core-module: 2.13.0 + path-parse: 1.0.7 + supports-preserve-symlinks-flag: 1.0.0 + dev: false + + /rimraf@3.0.2: + resolution: {integrity: sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==} + hasBin: true + dependencies: + glob: 7.2.3 + dev: false + + /shebang-command@2.0.0: + resolution: {integrity: sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==} + engines: {node: '>=8'} + dependencies: + shebang-regex: 3.0.0 + dev: false + + /shebang-regex@3.0.0: + resolution: {integrity: sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==} + engines: {node: '>=8'} + dev: false + + /shelljs@0.8.5: + resolution: {integrity: sha512-TiwcRcrkhHvbrZbnRcFYMLl30Dfov3HKqzp5tO5b4pt6G/SezKcYhmDg15zXVBswHmctSAQKznqNW2LO5tTDow==} + engines: {node: '>=4'} + hasBin: true + dependencies: + glob: 7.2.3 + interpret: 1.4.0 + rechoir: 0.6.2 + dev: false + + /shx@0.3.4: + resolution: {integrity: sha512-N6A9MLVqjxZYcVn8hLmtneQWIJtp8IKzMP4eMnx+nqkvXoqinUPCbUFLp2UcWTEIUONhlk0ewxr/jaVGlc+J+g==} + engines: {node: '>=6'} + hasBin: true + dependencies: + minimist: 1.2.8 + shelljs: 0.8.5 + dev: false + + /supports-preserve-symlinks-flag@1.0.0: + resolution: {integrity: sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==} + engines: {node: '>= 0.4'} + dev: false + + /ts-pipe-compose@0.2.1: + resolution: {integrity: sha512-gf6ZldcfCDyNXPRiW3lQjEP1Z9rrUM/4Cn7BZbv3SdTA82zxWRP8OmLwvGR974uuENhGCFgFdN11z3n1Ofpprg==} + engines: {node: '>= 8.9.0'} + dev: false + + /tslib@2.6.2: + resolution: {integrity: sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==} + dev: false + + /utility-types@3.10.0: + resolution: {integrity: sha512-O11mqxmi7wMKCo6HKFt5AhO4BwY3VV68YU07tgxfz8zJTIxr4BpsezN49Ffwy9j3ZpwwJp4fkRwjRzq3uWE6Rg==} + engines: {node: '>= 4'} + dev: false + + /which@2.0.2: + resolution: {integrity: sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==} + engines: {node: '>= 8'} + hasBin: true + dependencies: + isexe: 2.0.0 + dev: false + + /wrappy@1.0.2: + resolution: {integrity: sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==} + dev: false diff --git a/tasks/benchmark-install-against-revisions/src/main.rs b/tasks/benchmark-install-against-revisions/src/main.rs index 25303a610..b364b407f 100644 --- a/tasks/benchmark-install-against-revisions/src/main.rs +++ b/tasks/benchmark-install-against-revisions/src/main.rs @@ -6,11 +6,13 @@ mod work_env; #[tokio::main] async fn main() { let cli_args::CliArgs { + scenario, registry, repository, package_json, hyperfine_options, work_env, + with_pnpm, revisions, } = clap::Parser::parse(); let repository = std::fs::canonicalize(repository).expect("get absolute path to repository"); @@ -20,11 +22,19 @@ async fn main() { let work_env = std::fs::canonicalize(work_env).expect("get absolute path to work env"); verify::ensure_virtual_registry(®istry).await; verify::ensure_git_repo(&repository); + verify::validate_revision_list(&revisions); + verify::ensure_program("bash"); + verify::ensure_program("cargo"); + verify::ensure_program("git"); + verify::ensure_program("hyperfine"); + verify::ensure_program("pnpm"); work_env::WorkEnv { root: work_env, + with_pnpm, revisions, registry, repository, + scenario, hyperfine_options, package_json, } diff --git a/tasks/benchmark-install-against-revisions/src/verify.rs b/tasks/benchmark-install-against-revisions/src/verify.rs index b2c54de8a..3367eed52 100644 --- a/tasks/benchmark-install-against-revisions/src/verify.rs +++ b/tasks/benchmark-install-against-revisions/src/verify.rs @@ -1,4 +1,5 @@ use std::path::Path; +use which::which; pub async fn ensure_virtual_registry(registry: &str) { if let Err(error) = reqwest::Client::new().head(registry).send().await { @@ -14,3 +15,31 @@ pub fn ensure_git_repo(path: &Path) { assert!(path.join("Cargo.toml").is_file()); assert!(path.join("Cargo.lock").is_file()); } + +pub fn ensure_program(program: &str) { + match which(program) { + Ok(_) => (), + Err(which::Error::CannotFindBinaryPath) => panic!("Cannot find {program} in $PATH"), + Err(error) => panic!("{error}"), + } +} + +pub fn validate_revision_list(list: List) +where + List: IntoIterator, + List::Item: AsRef, +{ + for revision in list { + let revision = revision.as_ref(); + let throw = |reason: &str| { + eprintln!("Revision {revision:?} is invalid"); + panic!("{reason}"); + }; + if revision.starts_with('.') { + throw("Revision cannot start with a dot"); + } + if revision == "PNPM" { + throw("PNPM is a reserved name"); + } + } +} diff --git a/tasks/benchmark-install-against-revisions/src/work_env.rs b/tasks/benchmark-install-against-revisions/src/work_env.rs index be4bdaf2a..c251ce92b 100644 --- a/tasks/benchmark-install-against-revisions/src/work_env.rs +++ b/tasks/benchmark-install-against-revisions/src/work_env.rs @@ -1,6 +1,6 @@ use crate::{ - cli_args::HyperfineOptions, - fixtures::{INSTALL_SCRIPT, PACKAGE_JSON}, + cli_args::{BenchmarkScenario, HyperfineOptions}, + fixtures::PACKAGE_JSON, }; use itertools::Itertools; use os_display::Quotable; @@ -16,14 +16,19 @@ use std::{ #[derive(Debug)] pub struct WorkEnv { pub root: PathBuf, + pub with_pnpm: bool, pub revisions: Vec, pub registry: String, pub repository: PathBuf, + pub scenario: BenchmarkScenario, pub hyperfine_options: HyperfineOptions, pub package_json: Option, } impl WorkEnv { + const INIT_PROXY_CACHE: &str = ".init-proxy-cache"; + const PNPM: &str = "PNPM"; + fn root(&self) -> &'_ Path { &self.root } @@ -72,23 +77,26 @@ impl WorkEnv { } fn init(&self) { - const INIT_PROXY_CACHE: &str = ".init-proxy-cache"; - eprintln!("Initializing..."); - for revision in self.revisions().chain(iter::once(INIT_PROXY_CACHE)) { + let entries = self + .revisions() + .map(|revision| (revision, false)) + .chain(iter::once((WorkEnv::INIT_PROXY_CACHE, true))) + .chain(self.with_pnpm.then_some((WorkEnv::PNPM, true))); + for (revision, for_pnpm) in entries { eprintln!("Revision: {revision:?}"); let dir = self.revision_root(revision); fs::create_dir_all(&dir).expect("create directory for the revision"); create_package_json(&dir, self.package_json.as_deref()); - create_script(&self.revision_install_script(revision), INSTALL_SCRIPT); - create_npmrc(&dir, self.registry()); + create_install_script(&dir, self.scenario, for_pnpm); + create_npmrc(&dir, self.registry(), self.scenario); + may_create_lockfile(&dir, self.scenario); } eprintln!("Populating proxy registry cache..."); - Command::new("pnpm") - .current_dir(self.revision_root(INIT_PROXY_CACHE)) - .arg("install") - .pipe(executor("pnpm install")); + self.revision_install_script(WorkEnv::INIT_PROXY_CACHE) + .pipe(Command::new) + .pipe_mut(executor("install.bash")) } fn build(&self) { @@ -152,7 +160,7 @@ impl WorkEnv { self.hyperfine_options.append_to(&mut command); - for revision in self.revisions() { + for revision in self.revisions().chain(self.with_pnpm.then_some(WorkEnv::PNPM)) { command.arg("--command-name").arg(revision).arg(self.revision_install_script(revision)); } @@ -183,20 +191,43 @@ fn create_package_json(dir: &Path, src: Option<&Path>) { } } -fn create_npmrc(dir: &Path, registry: &str) { +fn create_npmrc(dir: &Path, registry: &str, scenario: BenchmarkScenario) { let path = dir.join(".npmrc"); let store_dir = dir.join("store-dir"); let store_dir = store_dir.to_str().expect("path to store-dir is valid UTF-8"); + eprintln!("Creating config file {path:?}..."); let mut file = File::create(path).expect("create .npmrc"); writeln!(file, "registry={registry}").unwrap(); writeln!(file, "store-dir={store_dir}").unwrap(); writeln!(file, "auto-install-peers=false").unwrap(); - writeln!(file, "lockfile=false").unwrap(); + writeln!(file, "ignore-scripts=true").unwrap(); + writeln!(file, "{}", scenario.npmrc_lockfile_setting()).unwrap(); +} + +fn may_create_lockfile(dir: &Path, scenario: BenchmarkScenario) { + if let Some(lockfile) = scenario.lockfile() { + let path = dir.join("pnpm-lock.yaml"); + fs::write(path, lockfile).expect("write pnpm-lock.yaml for the revision"); + } } -fn create_script(path: &Path, content: &str) { +fn create_install_script(dir: &Path, scenario: BenchmarkScenario, for_pnpm: bool) { + let path = dir.join("install.bash"); + eprintln!("Creating script {path:?}..."); - fs::write(path, content).expect("write content to script"); + let mut file = File::create(&path).expect("create install.bash"); + + writeln!(file, "#!/bin/bash").unwrap(); + writeln!(file, "set -o errexit -o nounset -o pipefail").unwrap(); + writeln!(file, r#"cd "$(dirname "$0")""#).unwrap(); + + let command = if for_pnpm { "pnpm" } else { "./pacquet/target/release/pacquet" }; + write!(file, "exec {command} install").unwrap(); + for arg in scenario.install_args() { + write!(file, " {arg}").unwrap(); + } + writeln!(file).unwrap(); + #[cfg(unix)] { use std::os::unix::fs::PermissionsExt;