diff --git a/Cargo.lock b/Cargo.lock index 2e7cb78718..e7874e1a05 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3755,7 +3755,7 @@ dependencies = [ "walkdir", "walrus", "wasm-bindgen-externref-xform", - "wasm-encoder 0.228.0", + "wasm-encoder 0.229.0", "wasm-opt", "wasm-split-cli", "wasmparser 0.226.0", @@ -3775,6 +3775,7 @@ version = "0.7.0-alpha.0" dependencies = [ "anyhow", "browserslist-rs 0.16.0", + "built", "codemap", "const-serialize", "grass", @@ -12906,9 +12907,9 @@ dependencies = [ [[package]] name = "sqlx" -version = "0.8.5" +version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f3c3a85280daca669cfd3bcb68a337882a8bc57ec882f72c5d13a430613a738e" +checksum = "14e22987355fbf8cfb813a0cf8cd97b1b4ec834b94dbd759a9e8679d41fabe83" dependencies = [ "sqlx-core", "sqlx-macros", @@ -12919,9 +12920,9 @@ dependencies = [ [[package]] name = "sqlx-core" -version = "0.8.5" +version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f743f2a3cea30a58cd479013f75550e879009e3a02f616f18ca699335aa248c3" +checksum = "55c4720d7d4cd3d5b00f61d03751c685ad09c33ae8290c8a2c11335e0604300b" dependencies = [ "base64 0.22.1", "bigdecimal", @@ -12966,9 +12967,9 @@ dependencies = [ [[package]] name = "sqlx-macros" -version = "0.8.5" +version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f4200e0fde19834956d4252347c12a083bdcb237d7a1a1446bffd8768417dce" +checksum = "175147fcb75f353ac7675509bc58abb2cb291caf0fd24a3623b8f7e3eb0a754b" dependencies = [ "proc-macro2", "quote", @@ -12979,9 +12980,9 @@ dependencies = [ [[package]] name = "sqlx-macros-core" -version = "0.8.5" +version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "882ceaa29cade31beca7129b6beeb05737f44f82dbe2a9806ecea5a7093d00b7" +checksum = "1cde983058e53bfa75998e1982086c5efe3c370f3250bf0357e344fa3352e32b" dependencies = [ "dotenvy", "either", @@ -13005,9 +13006,9 @@ dependencies = [ [[package]] name = "sqlx-mysql" -version = "0.8.5" +version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0afdd3aa7a629683c2d750c2df343025545087081ab5942593a5288855b1b7a7" +checksum = "847d2e5393a4f39e47e4f36cab419709bc2b83cbe4223c60e86e1471655be333" dependencies = [ "atoi", "base64 0.22.1", @@ -13052,9 +13053,9 @@ dependencies = [ [[package]] name = "sqlx-postgres" -version = "0.8.5" +version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a0bedbe1bbb5e2615ef347a5e9d8cd7680fb63e77d9dafc0f29be15e53f1ebe6" +checksum = "cc35947a541b9e0a2e3d85da444f1c4137c13040267141b208395a0d0ca4659f" dependencies = [ "atoi", "base64 0.22.1", @@ -13099,9 +13100,9 @@ dependencies = [ [[package]] name = "sqlx-sqlite" -version = "0.8.5" +version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c26083e9a520e8eb87a06b12347679b142dc2ea29e6e409f805644a7a979a5bc" +checksum = "6c48291dac4e5ed32da0927a0b981788be65674aeb62666d19873ab4289febde" dependencies = [ "atoi", "chrono", @@ -15838,12 +15839,12 @@ dependencies = [ [[package]] name = "wasm-encoder" -version = "0.228.0" +version = "0.229.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05d30290541f2d4242a162bbda76b8f2d8b1ac59eab3568ed6f2327d52c9b2c4" +checksum = "38ba1d491ecacb085a2552025c10a675a6fddcbd03b1fc9b36c536010ce265d2" dependencies = [ "leb128fmt", - "wasmparser 0.228.0", + "wasmparser 0.229.0", ] [[package]] @@ -16000,9 +16001,9 @@ dependencies = [ [[package]] name = "wasmparser" -version = "0.228.0" +version = "0.229.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4abf1132c1fdf747d56bbc1bb52152400c70f336870f968b85e89ea422198ae3" +checksum = "0cc3b1f053f5d41aa55640a1fa9b6d1b8a9e4418d118ce308d20e24ff3575a8c" dependencies = [ "bitflags 2.9.0", "indexmap 2.9.0", diff --git a/packages/asset-resolver/src/lib.rs b/packages/asset-resolver/src/lib.rs index f61c6edc5b..ca1759dec9 100644 --- a/packages/asset-resolver/src/lib.rs +++ b/packages/asset-resolver/src/lib.rs @@ -72,7 +72,7 @@ pub fn serve_asset_from_raw_path(path: &str) -> Result>, AssetS /// - [ ] Linux (rpm) /// - [ ] Linux (deb) /// - [ ] Android -#[allow(unused)] +#[allow(unreachable_code)] fn get_asset_root() -> PathBuf { let cur_exe = std::env::current_exe().unwrap(); diff --git a/packages/cli-opt/Cargo.toml b/packages/cli-opt/Cargo.toml index 06c3210194..dea1f9c871 100644 --- a/packages/cli-opt/Cargo.toml +++ b/packages/cli-opt/Cargo.toml @@ -67,3 +67,6 @@ swc_parallel = { version = "=1.0.1", default-features = false } swc_timer = { version = "=1.0.0", default-features = false } swc_visit = { version = "=2.0.0", default-features = false } browserslist-rs = { version = "=0.16.0" } + +[build-dependencies] +built = { version = "0.7.5", features = ["git2"] } diff --git a/packages/cli-opt/build.rs b/packages/cli-opt/build.rs new file mode 100644 index 0000000000..d8f91cb913 --- /dev/null +++ b/packages/cli-opt/build.rs @@ -0,0 +1,3 @@ +fn main() { + built::write_built_file().expect("Failed to acquire build-time information"); +} diff --git a/packages/cli-opt/src/build_info.rs b/packages/cli-opt/src/build_info.rs new file mode 100644 index 0000000000..17332cac8e --- /dev/null +++ b/packages/cli-opt/src/build_info.rs @@ -0,0 +1,10 @@ +// The file has been placed there by the build script. +include!(concat!(env!("OUT_DIR"), "/built.rs")); + +pub(crate) fn version() -> String { + format!( + "{} ({})", + PKG_VERSION, + GIT_COMMIT_HASH_SHORT.unwrap_or("was built without git repository") + ) +} diff --git a/packages/cli-opt/src/css.rs b/packages/cli-opt/src/css.rs index 0af56e784e..b7c2503893 100644 --- a/packages/cli-opt/src/css.rs +++ b/packages/cli-opt/src/css.rs @@ -1,4 +1,4 @@ -use std::path::Path; +use std::{hash::Hasher, path::Path}; use anyhow::{anyhow, Context}; use codemap::SpanLoc; @@ -146,12 +146,11 @@ pub(crate) fn minify_css(css: &str) -> anyhow::Result { Ok(res.code) } -/// Process an scss/sass file into css. -pub(crate) fn process_scss( +/// Compile scss with grass +pub(crate) fn compile_scss( scss_options: &CssAssetOptions, source: &Path, - output_path: &Path, -) -> anyhow::Result<()> { +) -> anyhow::Result { let style = match scss_options.minified() { true => OutputStyle::Compressed, false => OutputStyle::Expanded, @@ -162,7 +161,18 @@ pub(crate) fn process_scss( .quiet(false) .logger(&ScssLogger {}); - let css = grass::from_path(source, &options)?; + let css = grass::from_path(source, &options) + .with_context(|| format!("Failed to compile scss file: {}", source.display()))?; + Ok(css) +} + +/// Process an scss/sass file into css. +pub(crate) fn process_scss( + scss_options: &CssAssetOptions, + source: &Path, + output_path: &Path, +) -> anyhow::Result<()> { + let css = compile_scss(scss_options, source)?; let minified = minify_css(&css)?; std::fs::write(output_path, minified).with_context(|| { @@ -199,3 +209,19 @@ impl grass::Logger for ScssLogger { ); } } + +/// Hash the inputs to the scss file +pub(crate) fn hash_scss( + scss_options: &CssAssetOptions, + source: &Path, + hasher: &mut impl Hasher, +) -> anyhow::Result<()> { + // Grass doesn't expose the ast for us to traverse the imports in the file. Instead of parsing scss ourselves + // we just hash the expanded version of the file for now + let css = compile_scss(scss_options, source)?; + + // Hash the compiled css + hasher.write(css.as_bytes()); + + Ok(()) +} diff --git a/packages/cli-opt/src/file.rs b/packages/cli-opt/src/file.rs index b1fce9f26f..687a074039 100644 --- a/packages/cli-opt/src/file.rs +++ b/packages/cli-opt/src/file.rs @@ -1,4 +1,5 @@ use anyhow::Context; +use manganis::{CssModuleAssetOptions, FolderAssetOptions}; use manganis_core::{AssetOptions, CssAssetOptions, ImageAssetOptions, JsAssetOptions}; use std::path::Path; @@ -33,7 +34,7 @@ pub(crate) fn process_file_to_with_options( } if let Some(parent) = output_path.parent() { if !parent.exists() { - std::fs::create_dir_all(parent)?; + std::fs::create_dir_all(parent).context("Failed to create directory")?; } } @@ -47,63 +48,94 @@ pub(crate) fn process_file_to_with_options( .unwrap_or_default() .to_string_lossy() )); + let resolved_options = resolve_asset_options(source, options); - match options { - AssetOptions::Unknown => match source.extension().map(|e| e.to_string_lossy()).as_deref() { - Some("css") => { - process_css(&CssAssetOptions::new(), source, &temp_path)?; - } - Some("scss" | "sass") => { - process_scss(&CssAssetOptions::new(), source, &temp_path)?; - } - Some("js") => { - process_js(&JsAssetOptions::new(), source, &temp_path, !in_folder)?; - } - Some("json") => { - process_json(source, &temp_path)?; - } - Some("jpg" | "jpeg" | "png" | "webp" | "avif") => { - process_image(&ImageAssetOptions::new(), source, &temp_path)?; - } - Some(_) | None => { - if source.is_dir() { - process_folder(source, &temp_path)?; - } else { - let source_file = std::fs::File::open(source)?; - let mut reader = std::io::BufReader::new(source_file); - let output_file = std::fs::File::create(&temp_path)?; - let mut writer = std::io::BufWriter::new(output_file); - std::io::copy(&mut reader, &mut writer).with_context(|| { - format!( - "Failed to write file to output location: {}", - temp_path.display() - ) - })?; - } - } - }, - AssetOptions::Css(options) => { + match &resolved_options { + ResolvedAssetType::Css(options) => { process_css(options, source, &temp_path)?; } - AssetOptions::CssModule(options) => { + ResolvedAssetType::CssModule(options) => { process_css_module(options, source, output_path, &temp_path)?; } - AssetOptions::Js(options) => { + ResolvedAssetType::Scss(options) => { + process_scss(options, source, &temp_path)?; + } + ResolvedAssetType::Js(options) => { process_js(options, source, &temp_path, !in_folder)?; } - AssetOptions::Image(options) => { + ResolvedAssetType::Image(options) => { process_image(options, source, &temp_path)?; } - AssetOptions::Folder(_) => { + ResolvedAssetType::Json => { + process_json(source, &temp_path)?; + } + ResolvedAssetType::Folder(_) => { process_folder(source, &temp_path)?; } - _ => { - tracing::warn!("Unknown asset options: {:?}", options); + ResolvedAssetType::File => { + let source_file = std::fs::File::open(source)?; + let mut reader = std::io::BufReader::new(source_file); + let output_file = std::fs::File::create(&temp_path)?; + let mut writer = std::io::BufWriter::new(output_file); + std::io::copy(&mut reader, &mut writer).with_context(|| { + format!( + "Failed to write file to output location: {}", + temp_path.display() + ) + })?; } } // If everything was successful, rename the temp file to the final output path - std::fs::rename(temp_path, output_path)?; + std::fs::rename(temp_path, output_path).context("Failed to rename output file")?; Ok(()) } + +pub(crate) enum ResolvedAssetType { + /// An image asset + Image(ImageAssetOptions), + /// A css asset + Css(CssAssetOptions), + /// A css module asset + CssModule(CssModuleAssetOptions), + /// A SCSS asset + Scss(CssAssetOptions), + /// A javascript asset + Js(JsAssetOptions), + /// A json asset + Json, + /// A folder asset + Folder(FolderAssetOptions), + /// A generic file + File, +} + +pub(crate) fn resolve_asset_options(source: &Path, options: &AssetOptions) -> ResolvedAssetType { + match options { + AssetOptions::Image(image) => ResolvedAssetType::Image(*image), + AssetOptions::Css(css) => ResolvedAssetType::Css(*css), + AssetOptions::CssModule(css) => ResolvedAssetType::CssModule(*css), + AssetOptions::Js(js) => ResolvedAssetType::Js(*js), + AssetOptions::Folder(folder) => ResolvedAssetType::Folder(*folder), + AssetOptions::Unknown => resolve_unknown_asset_options(source), + _ => { + tracing::warn!("Unknown asset options... you may need to update the Dioxus CLI. Defaulting to a generic file: {:?}", options); + resolve_unknown_asset_options(source) + } + } +} + +fn resolve_unknown_asset_options(source: &Path) -> ResolvedAssetType { + match source.extension().map(|e| e.to_string_lossy()).as_deref() { + Some("scss" | "sass") => ResolvedAssetType::Scss(CssAssetOptions::new()), + Some("css") => ResolvedAssetType::Css(CssAssetOptions::new()), + Some("js") => ResolvedAssetType::Js(JsAssetOptions::new()), + Some("json") => ResolvedAssetType::Json, + Some("jpg" | "jpeg" | "png" | "webp" | "avif") => { + ResolvedAssetType::Image(ImageAssetOptions::new()) + } + _ if source.is_dir() => ResolvedAssetType::Folder(FolderAssetOptions::new()), + _ => ResolvedAssetType::File, + } +} diff --git a/packages/cli-opt/src/hash.rs b/packages/cli-opt/src/hash.rs new file mode 100644 index 0000000000..7b888153cd --- /dev/null +++ b/packages/cli-opt/src/hash.rs @@ -0,0 +1,162 @@ +//! Utilities for creating hashed paths to assets in Manganis. This module defines [`AssetHash`] which is used to create a hashed path to an asset in both the CLI and the macro. + +use std::{ + hash::{Hash, Hasher}, + io::Read, + path::{Path, PathBuf}, +}; + +use crate::{ + css::hash_scss, + file::{resolve_asset_options, ResolvedAssetType}, + js::hash_js, +}; +use manganis::{AssetOptions, BundledAsset}; + +/// The opaque hash type manganis uses to identify assets. Each time an asset or asset options change, this hash will +/// change. This hash is included in the URL of the bundled asset for cache busting. +struct AssetHash { + /// We use a wrapper type here to hide the exact size of the hash so we can switch to a sha hash in a minor version bump + hash: [u8; 8], +} + +impl AssetHash { + /// Create a new asset hash + const fn new(hash: u64) -> Self { + Self { + hash: hash.to_le_bytes(), + } + } + + /// Get the hash bytes + pub const fn bytes(&self) -> &[u8] { + &self.hash + } + + /// Create a new asset hash for a file. The input file to this function should be fully resolved + pub fn hash_file_contents( + options: &AssetOptions, + file_path: impl AsRef, + ) -> anyhow::Result { + hash_file(options, file_path.as_ref()) + } +} + +/// Process a specific file asset with the given options reading from the source and writing to the output path +fn hash_file(options: &AssetOptions, source: &Path) -> anyhow::Result { + // Create a hasher + let mut hash = std::collections::hash_map::DefaultHasher::new(); + options.hash(&mut hash); + // Hash the version of CLI opt + hash.write(crate::build_info::version().as_bytes()); + hash_file_with_options(options, source, &mut hash, false)?; + + let hash = hash.finish(); + Ok(AssetHash::new(hash)) +} + +/// Process a specific file asset with additional options +pub(crate) fn hash_file_with_options( + options: &AssetOptions, + source: &Path, + hasher: &mut impl Hasher, + in_folder: bool, +) -> anyhow::Result<()> { + let resolved_options = resolve_asset_options(source, options); + + match &resolved_options { + // Scss and JS can import files during the bundling process. We need to hash + // both the files themselves and any imports they have + ResolvedAssetType::Scss(options) => { + hash_scss(options, source, hasher)?; + } + ResolvedAssetType::Js(options) => { + hash_js(options, source, hasher, !in_folder)?; + } + + // Otherwise, we can just hash the file contents + ResolvedAssetType::CssModule(_) + | ResolvedAssetType::Css(_) + | ResolvedAssetType::Image(_) + | ResolvedAssetType::Json + | ResolvedAssetType::File => { + hash_file_contents(source, hasher)?; + } + // Or the folder contents recursively + ResolvedAssetType::Folder(_) => { + let files = std::fs::read_dir(source)?; + for file in files.flatten() { + let path = file.path(); + hash_file_with_options(options, &path, hasher, true)?; + } + } + } + + Ok(()) +} + +pub(crate) fn hash_file_contents(source: &Path, hasher: &mut impl Hasher) -> anyhow::Result<()> { + // Otherwise, open the file to get its contents + let mut file = std::fs::File::open(source)?; + + // We add a hash to the end of the file so it is invalidated when the bundled version of the file changes + // The hash includes the file contents, the options, and the version of manganis. From the macro, we just + // know the file contents, so we only include that hash + let mut buffer = [0; 8192]; + loop { + let read = file.read(&mut buffer)?; + if read == 0 { + break; + } + hasher.write(&buffer[..read]); + } + Ok(()) +} + +/// Add a hash to the asset, or log an error if it fails +pub fn add_hash_to_asset(asset: &mut BundledAsset) { + let source = asset.absolute_source_path(); + match AssetHash::hash_file_contents(asset.options(), source) { + Ok(hash) => { + let options = *asset.options(); + + // Set the bundled path to the source path with the hash appended before the extension + let source_path = PathBuf::from(source); + let Some(file_name) = source_path.file_name() else { + tracing::error!("Failed to get file name from path: {source}"); + return; + }; + // The output extension path is the extension set by the options + // or the extension of the source file if we don't recognize the file + let mut ext = asset.options().extension().map(Into::into).or_else(|| { + source_path + .extension() + .map(|ext| ext.to_string_lossy().to_string()) + }); + + // Rewrite scss as css + if let Some("scss" | "sass") = ext.as_deref() { + ext = Some("css".to_string()); + } + + let hash = hash.bytes(); + let hash = hash + .iter() + .map(|byte| format!("{byte:x}")) + .collect::(); + let file_stem = source_path.file_stem().unwrap_or(file_name); + let mut bundled_path = PathBuf::from(format!("{}-{hash}", file_stem.to_string_lossy())); + + if let Some(ext) = ext { + bundled_path.set_extension(ext); + } + + let bundled_path = bundled_path.to_string_lossy().to_string(); + + *asset = BundledAsset::new(source, &bundled_path, options, asset.link_section()); + } + Err(err) => { + tracing::error!("Failed to hash asset: {err}"); + } + } +} diff --git a/packages/cli-opt/src/image/mod.rs b/packages/cli-opt/src/image/mod.rs index 4ab0087ae5..515079f643 100644 --- a/packages/cli-opt/src/image/mod.rs +++ b/packages/cli-opt/src/image/mod.rs @@ -14,7 +14,8 @@ pub(crate) fn process_image( output_path: &Path, ) -> anyhow::Result<()> { let mut image = image::ImageReader::new(std::io::Cursor::new(&*std::fs::read(source)?)) - .with_guessed_format()? + .with_guessed_format() + .context("Failed to guess image format")? .decode(); if let Ok(image) = &mut image { @@ -25,10 +26,10 @@ pub(crate) fn process_image( match (image, image_options.format()) { (image, ImageFormat::Png) => { - compress_png(image?, output_path); + compress_png(image.context("Failed to decode image")?, output_path); } (image, ImageFormat::Jpg) => { - compress_jpg(image?, output_path)?; + compress_jpg(image.context("Failed to decode image")?, output_path)?; } (Ok(image), ImageFormat::Avif) => { if let Err(error) = image.save(output_path) { @@ -41,20 +42,30 @@ pub(crate) fn process_image( } } (Ok(image), _) => { - image.save(output_path)?; + image.save(output_path).with_context(|| { + format!( + "Failed to save image (from {}) with path {}", + source.display(), + output_path.display() + ) + })?; } // If we can't decode the image or it is of an unknown type, we just copy the file _ => { - let source_file = std::fs::File::open(source)?; + let source_file = std::fs::File::open(source).context("Failed to open source file")?; let mut reader = std::io::BufReader::new(source_file); - let output_file = std::fs::File::create(output_path)?; - let mut writer = std::io::BufWriter::new(output_file); - std::io::copy(&mut reader, &mut writer).with_context(|| { - format!( - "Failed to write image to output location: {}", - output_path.display() - ) + let output_file = std::fs::File::create(output_path).with_context(|| { + format!("Failed to create output file: {}", output_path.display()) })?; + let mut writer = std::io::BufWriter::new(output_file); + std::io::copy(&mut reader, &mut writer) + .with_context(|| { + format!( + "Failed to write image to output location: {}", + output_path.display() + ) + }) + .context("Failed to copy image data")?; } } diff --git a/packages/cli-opt/src/js.rs b/packages/cli-opt/src/js.rs index 1e79067a45..2aacfb5268 100644 --- a/packages/cli-opt/src/js.rs +++ b/packages/cli-opt/src/js.rs @@ -1,3 +1,4 @@ +use std::hash::Hasher; use std::path::Path; use std::path::PathBuf; @@ -24,6 +25,8 @@ use swc_ecma_codegen::text_writer::JsWriter; use swc_ecma_loader::{resolvers::node::NodeModulesResolver, TargetEnv}; use swc_ecma_parser::{parse_file_as_module, Syntax}; +use crate::hash::hash_file_contents; + struct TracingEmitter; impl Emitter for TracingEmitter { @@ -43,30 +46,32 @@ impl Emitter for TracingEmitter { } } +/// Run a closure with the swc globals and handler set up +fn inside_handler(f: impl FnOnce(&Globals, Lrc) -> O) -> O { + let globals = Globals::new(); + let cm = Lrc::new(SourceMap::new(FilePathMapping::empty())); + let handler = Handler::with_emitter_and_flags(Box::new(TracingEmitter), Default::default()); + GLOBALS.set(&globals, || HANDLER.set(&handler, || f(&globals, cm))) +} + fn bundle_js_to_writer( file: PathBuf, bundle: bool, minify: bool, write_to: &mut impl std::io::Write, ) -> anyhow::Result<()> { - let globals = Globals::new(); - let handler = Handler::with_emitter_and_flags(Box::new(TracingEmitter), Default::default()); - GLOBALS.set(&globals, || { - HANDLER.set(&handler, || { - bundle_js_to_writer_inside_handler(&globals, file, bundle, minify, write_to) - }) + inside_handler(|globals, cm| { + bundle_js_to_writer_inside_handler(globals, cm, file, bundle, minify, write_to) }) } -fn bundle_js_to_writer_inside_handler( +fn resolve_js_inside_handler( globals: &Globals, file: PathBuf, bundle: bool, - minify: bool, - write_to: &mut impl std::io::Write, -) -> anyhow::Result<()> { - let cm = Lrc::new(SourceMap::new(FilePathMapping::empty())); - let mut module = if bundle { + cm: &Lrc, +) -> anyhow::Result { + if bundle { let node_resolver = NodeModulesResolver::new(TargetEnv::Browser, Default::default(), true); let mut bundler = Bundler::new( globals, @@ -89,7 +94,7 @@ fn bundle_js_to_writer_inside_handler( let bundle = bundles .pop() .ok_or_else(|| anyhow::anyhow!("swc did not output any bundles"))?; - bundle.module + Ok(bundle.module) } else { let fm = cm.load_file(Path::new(&file)).expect("Failed to load file"); @@ -108,8 +113,19 @@ fn bundle_js_to_writer_inside_handler( error.cancel(); anyhow::anyhow!("{}", error.message()) }) - })? - }; + }) + } +} + +fn bundle_js_to_writer_inside_handler( + globals: &Globals, + cm: Lrc, + file: PathBuf, + bundle: bool, + minify: bool, + write_to: &mut impl std::io::Write, +) -> anyhow::Result<()> { + let mut module = resolve_js_inside_handler(globals, file, bundle, &cm)?; if minify { module = swc_ecma_minifier::optimize( @@ -246,3 +262,34 @@ pub(crate) fn process_js( Ok(()) } + +fn hash_js_module(file: PathBuf, hasher: &mut impl Hasher, bundle: bool) -> anyhow::Result<()> { + inside_handler(|globals, cm| { + _ = resolve_js_inside_handler(globals, file, bundle, &cm)?; + + for file in cm.files().iter() { + let hash = file.src_hash; + hasher.write(&hash.to_le_bytes()); + } + + Ok(()) + }) +} + +pub(crate) fn hash_js( + js_options: &JsAssetOptions, + source: &Path, + hasher: &mut impl Hasher, + bundle: bool, +) -> anyhow::Result<()> { + if js_options.minified() { + if let Err(err) = hash_js_module(source.to_path_buf(), hasher, bundle) { + tracing::error!("Failed to minify js. Falling back to non-minified: {err}"); + hash_file_contents(source, hasher)?; + } + } else { + hash_file_contents(source, hasher)?; + } + + Ok(()) +} diff --git a/packages/cli-opt/src/lib.rs b/packages/cli-opt/src/lib.rs index 97c934a747..82d7be6af4 100644 --- a/packages/cli-opt/src/lib.rs +++ b/packages/cli-opt/src/lib.rs @@ -1,19 +1,23 @@ use anyhow::Context; -use manganis_core::linker::LinkSection; +use manganis::AssetOptions; use manganis_core::BundledAsset; -use object::{read::archive::ArchiveFile, File as ObjectFile, Object, ObjectSection}; +use rayon::iter::{IntoParallelRefIterator, ParallelIterator}; use serde::{Deserialize, Serialize}; -use std::path::Path; -use std::{collections::HashMap, path::PathBuf}; +use std::collections::{HashMap, HashSet}; +use std::path::{Path, PathBuf}; +use std::sync::{Arc, RwLock}; +mod build_info; mod css; mod file; mod folder; +mod hash; mod image; mod js; mod json; pub use file::process_file_to; +pub use hash::add_hash_to_asset; /// A manifest of all assets collected from dependencies /// @@ -21,7 +25,7 @@ pub use file::process_file_to; #[derive(Debug, PartialEq, Default, Clone, Serialize, Deserialize)] pub struct AssetManifest { /// Map of bundled asset name to the asset itself - pub assets: HashMap, + assets: HashMap>, } impl AssetManifest { @@ -31,100 +35,82 @@ impl AssetManifest { asset_path: &Path, options: manganis::AssetOptions, ) -> anyhow::Result { - let hash = manganis_core::hash::AssetHash::hash_file_contents(asset_path) - .context("Failed to hash file")?; - let output_path_str = asset_path.to_str().ok_or(anyhow::anyhow!( "Failed to convert wasm bindgen output path to string" ))?; - let bundled_asset = - manganis::macro_helpers::create_bundled_asset(output_path_str, hash.bytes(), options); + let mut bundled_asset = + manganis::macro_helpers::create_bundled_asset(output_path_str, options, ""); + add_hash_to_asset(&mut bundled_asset); - self.assets.insert(asset_path.into(), bundled_asset); + self.assets + .entry(asset_path.to_path_buf()) + .or_default() + .insert(bundled_asset); Ok(bundled_asset) } - #[allow(dead_code)] - pub fn load_from_file(path: &Path) -> anyhow::Result { - let src = std::fs::read_to_string(path)?; - - serde_json::from_str(&src) - .with_context(|| format!("Failed to parse asset manifest from {path:?}\n{src}")) + /// Insert an existing bundled asset to the manifest + pub fn insert_asset(&mut self, asset: BundledAsset) { + let asset_path = asset.absolute_source_path(); + self.assets + .entry(asset_path.into()) + .or_default() + .insert(asset); } - /// Fill this manifest with a file object/rlib files, typically extracted from the linker intercepted - pub fn add_from_object_path(&mut self, path: &Path) -> anyhow::Result<()> { - let data = std::fs::read(path)?; - - match path.extension().and_then(|ext| ext.to_str()) { - // Parse an rlib as a collection of objects - Some("rlib") => { - if let Ok(archive) = object::read::archive::ArchiveFile::parse(&*data) { - self.add_from_archive_file(&archive, &data)?; - } - } - _ => { - if let Ok(object) = object::File::parse(&*data) { - self.add_from_object_file(&object)?; - } - } - } - - Ok(()) + /// Get any assets that are tied to a specific source file + pub fn get_assets_for_source(&self, path: &Path) -> Option<&HashSet> { + self.assets.get(path) } - /// Fill this manifest from an rlib / ar file that contains many object files and their entries - fn add_from_archive_file(&mut self, archive: &ArchiveFile, data: &[u8]) -> object::Result<()> { - // Look through each archive member for object files. - // Read the archive member's binary data (we know it's an object file) - // And parse it with the normal `object::File::parse` to find the manganis string. - for member in archive.members() { - let member = member?; - let name = String::from_utf8_lossy(member.name()).to_string(); - - // Check if the archive member is an object file and parse it. - if name.ends_with(".o") { - let data = member.data(data)?; - let object = object::File::parse(data)?; - _ = self.add_from_object_file(&object); - } - } + /// Check if the manifest contains a specific asset + pub fn contains(&self, asset: &BundledAsset) -> bool { + self.assets + .get(&PathBuf::from(asset.absolute_source_path())) + .is_some_and(|assets| assets.contains(asset)) + } - Ok(()) + /// Iterate over all the assets in the manifest + pub fn assets(&self) -> impl Iterator { + self.assets.values().flat_map(|assets| assets.iter()) } - /// Fill this manifest with whatever tables might come from the object file - fn add_from_object_file(&mut self, obj: &ObjectFile) -> anyhow::Result<()> { - for section in obj.sections() { - let Ok(section_name) = section.name() else { - continue; - }; + pub fn load_from_file(path: &Path) -> anyhow::Result { + let src = std::fs::read_to_string(path)?; - // Check if the link section matches the asset section for one of the platforms we support. This may not be the current platform if the user is cross compiling - let matches = LinkSection::ALL - .iter() - .any(|x| x.link_section == section_name); + serde_json::from_str(&src) + .with_context(|| format!("Failed to parse asset manifest from {path:?}\n{src}")) + } +} - if !matches { - continue; +/// Optimize a list of assets in parallel +pub fn optimize_all_assets( + assets_to_transfer: Vec<(PathBuf, PathBuf, AssetOptions)>, + on_optimization_start: impl FnMut(&Path, &Path, &AssetOptions) + Sync + Send, + on_optimization_end: impl FnMut(&Path, &Path, &AssetOptions) + Sync + Send, +) -> anyhow::Result<()> { + let on_optimization_start = Arc::new(RwLock::new(on_optimization_start)); + let on_optimization_end = Arc::new(RwLock::new(on_optimization_end)); + assets_to_transfer + .par_iter() + .try_for_each(|(from, to, options)| { + { + let mut on_optimization_start = on_optimization_start.write().unwrap(); + on_optimization_start(from, to, options); } - let bytes = section - .uncompressed_data() - .context("Could not read uncompressed data from object file")?; + let res = process_file_to(options, from, to); + if let Err(err) = res.as_ref() { + tracing::error!("Failed to copy asset {from:?}: {err}"); + } - let mut buffer = const_serialize::ConstReadBuffer::new(&bytes); - while let Some((remaining_buffer, asset)) = - const_serialize::deserialize_const!(BundledAsset, buffer) { - self.assets - .insert(asset.absolute_source_path().into(), asset); - buffer = remaining_buffer; + let mut on_optimization_end = on_optimization_end.write().unwrap(); + on_optimization_end(from, to, options); } - } - Ok(()) - } + res.map(|_| ()) + }) } diff --git a/packages/cli/Cargo.toml b/packages/cli/Cargo.toml index f17a46bddf..ea5a2ec968 100644 --- a/packages/cli/Cargo.toml +++ b/packages/cli/Cargo.toml @@ -50,7 +50,6 @@ hyper-rustls = { workspace = true } rustls = { workspace = true } rayon = { workspace = true } futures-channel = { workspace = true } -target-lexicon = { version = "0.13.2", features = ["serde", "serde_support"] } krates = { workspace = true } regex = "1.11.1" console = "0.15.11" @@ -61,7 +60,12 @@ axum-server = { workspace = true, features = ["tls-rustls"] } axum-extra = { workspace = true, features = ["typed-header"] } tower-http = { workspace = true, features = ["full"] } proc-macro2 = { workspace = true, features = ["span-locations"] } -syn = { workspace = true, features = ["full", "extra-traits", "visit", "visit-mut"] } +syn = { workspace = true, features = [ + "full", + "extra-traits", + "visit", + "visit-mut", +] } headers = "0.4.0" walkdir = "2" @@ -109,6 +113,8 @@ log = { version = "0.4", features = ["max_level_off", "release_max_level_off"] } tempfile = "3.13" manganis = { workspace = true } manganis-core = { workspace = true } +target-lexicon = { version = "0.13.2", features = ["serde", "serde_support"] } +wasm-encoder = "0.229.0" # Extracting data from an executable object = { workspace = true, features = ["all"] } @@ -131,7 +137,6 @@ local-ip-address = "0.6.3" dircpy = "0.3.19" plist = "1.7.0" memoize = "0.5.1" -wasm-encoder = "0.228.0" backtrace = "0.3.74" ar = "0.9.0" wasm-bindgen-externref-xform = "0.2.100" diff --git a/packages/cli/src/build/builder.rs b/packages/cli/src/build/builder.rs index 99eab23c40..8dbb126ae3 100644 --- a/packages/cli/src/build/builder.rs +++ b/packages/cli/src/build/builder.rs @@ -564,16 +564,15 @@ impl AppBuilder { let original_artifacts = self.artifacts.as_ref().unwrap(); let asset_dir = self.build.asset_dir(); - for (k, bundled) in res.assets.assets.iter() { - let k = dunce::canonicalize(k)?; - if original_artifacts.assets.assets.contains_key(k.as_path()) { + for bundled in res.assets.assets() { + if original_artifacts.assets.contains(bundled) { continue; } + let from = dunce::canonicalize(PathBuf::from(bundled.absolute_source_path()))?; - let from = k.clone(); let to = asset_dir.join(bundled.bundled_path()); - tracing::debug!("Copying asset from patch: {}", k.display()); + tracing::debug!("Copying asset from patch: {}", from.display()); if let Err(e) = dioxus_cli_opt::process_file_to(bundled.options(), &from, &to) { tracing::error!("Failed to copy asset: {e}"); continue; @@ -581,13 +580,8 @@ impl AppBuilder { // If the emulator is android, we need to copy the asset to the device with `adb push asset /data/local/tmp/dx/assets/filename.ext` if self.build.platform == Platform::Android { - let changed_file = dunce::canonicalize(k).inspect_err(|e| { - tracing::debug!("Failed to canonicalize hotreloaded asset: {e}") - })?; let bundled_name = PathBuf::from(bundled.bundled_path()); - _ = self - .copy_file_to_android_tmp(&changed_file, &bundled_name) - .await; + _ = self.copy_file_to_android_tmp(&from, &bundled_name).await; } } @@ -646,10 +640,13 @@ impl AppBuilder { /// dir that the system simulator might be providing. We know this is the case for ios simulators /// and haven't yet checked for android. /// - /// This will return the bundled name of the asset such that we can send it to the clients letting + /// This will return the bundled name of the assets such that we can send it to the clients letting /// them know what to reload. It's not super important that this is robust since most clients will /// kick all stylsheets without necessarily checking the name. - pub(crate) async fn hotreload_bundled_asset(&self, changed_file: &PathBuf) -> Option { + pub(crate) async fn hotreload_bundled_asset( + &self, + changed_file: &PathBuf, + ) -> Option> { let artifacts = self.artifacts.as_ref()?; // Use the build dir if there's no runtime asset dir as the override. For the case of ios apps, @@ -665,32 +662,36 @@ impl AppBuilder { .ok()?; // The asset might've been renamed thanks to the manifest, let's attempt to reload that too - let resource = artifacts.assets.assets.get(&changed_file)?; - let output_path = asset_dir.join(resource.bundled_path()); - - tracing::debug!("Hotreloading asset {changed_file:?} in target {asset_dir:?}"); - - // Remove the old asset if it exists - _ = std::fs::remove_file(&output_path); - - // And then process the asset with the options into the **old** asset location. If we recompiled, - // the asset would be in a new location because the contents and hash have changed. Since we are - // hotreloading, we need to use the old asset location it was originally written to. - let options = *resource.options(); - let res = process_file_to(&options, &changed_file, &output_path); - let bundled_name = PathBuf::from(resource.bundled_path()); - if let Err(e) = res { - tracing::debug!("Failed to hotreload asset {e}"); - } + let resources = artifacts.assets.get_assets_for_source(&changed_file)?; + let mut bundled_names = Vec::new(); + for resource in resources { + let output_path = asset_dir.join(resource.bundled_path()); + + tracing::debug!("Hotreloading asset {changed_file:?} in target {asset_dir:?}"); + + // Remove the old asset if it exists + _ = std::fs::remove_file(&output_path); + + // And then process the asset with the options into the **old** asset location. If we recompiled, + // the asset would be in a new location because the contents and hash have changed. Since we are + // hotreloading, we need to use the old asset location it was originally written to. + let options = *resource.options(); + let res = process_file_to(&options, &changed_file, &output_path); + let bundled_name = PathBuf::from(resource.bundled_path()); + if let Err(e) = res { + tracing::debug!("Failed to hotreload asset {e}"); + } - // If the emulator is android, we need to copy the asset to the device with `adb push asset /data/local/tmp/dx/assets/filename.ext` - if self.build.platform == Platform::Android { - _ = self - .copy_file_to_android_tmp(&changed_file, &bundled_name) - .await; + // If the emulator is android, we need to copy the asset to the device with `adb push asset /data/local/tmp/dx/assets/filename.ext` + if self.build.platform == Platform::Android { + _ = self + .copy_file_to_android_tmp(&changed_file, &bundled_name) + .await; + } + bundled_names.push(bundled_name); } - Some(bundled_name) + Some(bundled_names) } /// Copy this file to the tmp folder on the android device, returning the path to the copied file diff --git a/packages/cli/src/build/context.rs b/packages/cli/src/build/context.rs index 33eec6f9f6..34512818e2 100644 --- a/packages/cli/src/build/context.rs +++ b/packages/cli/src/build/context.rs @@ -169,9 +169,4 @@ impl BuildContext { stage: BuildStage::CompressingAssets, }); } - pub(crate) fn status_extracting_assets(&self) { - _ = self.tx.unbounded_send(BuilderUpdate::Progress { - stage: BuildStage::ExtractingAssets, - }); - } } diff --git a/packages/cli/src/build/request.rs b/packages/cli/src/build/request.rs index d00012820e..de7fd37bc5 100644 --- a/packages/cli/src/build/request.rs +++ b/packages/cli/src/build/request.rs @@ -385,6 +385,7 @@ pub(crate) struct BuildRequest { pub(crate) session_cache_dir: Arc, pub(crate) link_args_file: Arc, pub(crate) link_err_file: Arc, + pub(crate) link_asset_manifest_file: Arc, pub(crate) rustc_wrapper_args_file: Arc, } @@ -657,6 +658,10 @@ impl BuildRequest { NamedTempFile::with_suffix(".txt") .context("Failed to create temporary file for linker args")?, ); + let link_asset_manifest_file = Arc::new( + NamedTempFile::with_suffix(".json") + .context("Failed to create temporary file for asset manifest")?, + ); let rustc_wrapper_args_file = Arc::new( NamedTempFile::with_suffix(".json") .context("Failed to create temporary file for rustc wrapper args")?, @@ -699,6 +704,7 @@ impl BuildRequest { custom_linker, link_args_file, link_err_file, + link_asset_manifest_file, session_cache_dir, rustc_wrapper_args_file, extra_rustc_args, @@ -889,7 +895,7 @@ impl BuildRequest { self.run_fat_link(ctx, &exe).await?; } - let assets = self.collect_assets(&exe, ctx)?; + let assets = AssetManifest::load_from_file(self.link_asset_manifest_file.path())?; let time_end = SystemTime::now(); let mode = ctx.mode.clone(); let platform = self.platform; @@ -908,23 +914,6 @@ impl BuildRequest { }) } - /// Traverse the target directory and collect all assets from the incremental cache - /// - /// This uses "known paths" that have stayed relatively stable during cargo's lifetime. - /// One day this system might break and we might need to go back to using the linker approach. - fn collect_assets(&self, exe: &Path, ctx: &BuildContext) -> Result { - // walk every file in the incremental cache dir, reading and inserting items into the manifest. - let mut manifest = AssetManifest::default(); - - // And then add from the exe directly, just in case it's LTO compiled and has no incremental cache - if !self.skip_assets { - ctx.status_extracting_assets(); - _ = manifest.add_from_object_path(exe); - } - - Ok(manifest) - } - /// Take the output of rustc and make it into the main exe of the bundle /// /// For wasm, we'll want to run `wasm-bindgen` to make it a wasm binary along with some other optimizations @@ -1014,8 +1003,7 @@ impl BuildRequest { // Create a set of all the paths that new files will be bundled to let mut keep_bundled_output_paths: HashSet<_> = assets - .assets - .values() + .assets() .map(|a| asset_dir.join(a.bundled_path())) .collect(); @@ -1054,8 +1042,8 @@ impl BuildRequest { let mut assets_to_transfer = vec![]; // Queue the bundled assets - for (asset, bundled) in &assets.assets { - let from = asset.clone(); + for bundled in assets.assets() { + let from = PathBuf::from(bundled.absolute_source_path()); let to = asset_dir.join(bundled.bundled_path()); // prefer to log using a shorter path relative to the workspace dir by trimming the workspace dir @@ -1269,11 +1257,6 @@ impl BuildRequest { _ = std::fs::remove_file(PathBuf::from(args[idx + 1])); } - // Now extract the assets from the fat binary - artifacts - .assets - .add_from_object_path(&self.patch_exe(artifacts.time_start))?; - // Clean up the temps manually // todo: we might want to keep them around for debugging purposes for file in object_files { @@ -1713,7 +1696,9 @@ impl BuildRequest { /// linker format. fn select_linker(&self) -> Result { let cc = match self.triple.operating_system { - OperatingSystem::Unknown if self.platform == Platform::Web => self.workspace.wasm_ld(), + OperatingSystem::Unknown if self.platform == Platform::Web => { + self.workspace.sysroot.wasm_ld() + } // The android clang linker is *special* and has some android-specific flags that we need // @@ -1726,11 +1711,11 @@ impl BuildRequest { // We could also use `lld` here, but it might not be installed by default. // // Note that this is *clang*, not `lld`. - OperatingSystem::Darwin(_) | OperatingSystem::IOS(_) => self.workspace.cc(), + OperatingSystem::Darwin(_) | OperatingSystem::IOS(_) => self.workspace.sysroot.cc(), // On windows, instead of trying to find the system linker, we just go with the lld.link // that rustup provides. It's faster and more stable then reyling on link.exe in path. - OperatingSystem::Windows => self.workspace.lld_link(), + OperatingSystem::Windows => self.workspace.sysroot.lld_link(), // The rest of the platforms use `cc` as the linker which should be available in your path, // provided you have build-tools setup. On mac/linux this is the default, but on Windows @@ -1744,7 +1729,7 @@ impl BuildRequest { // Note that "cc" is *not* a linker. It's a compiler! The arguments we pass need to be in // the form of `-Wl,` for them to make it to the linker. This matches how rust does it // which is confusing. - _ => self.workspace.cc(), + _ => self.workspace.sysroot.cc(), }; Ok(cc) @@ -1878,6 +1863,7 @@ impl BuildRequest { // Merge in extra args. Order shouldn't really matter. cargo_args.extend(self.extra_cargo_args.clone()); + cargo_args.push("--".to_string()); cargo_args.extend(self.extra_rustc_args.clone()); @@ -1887,15 +1873,11 @@ impl BuildRequest { cargo_args.push("-Clink-args=--emit-relocs".to_string()); } - // dx *always* links android and thin builds - if self.custom_linker.is_some() - || matches!(ctx.mode, BuildMode::Thin { .. } | BuildMode::Fat) - { - cargo_args.push(format!( - "-Clinker={}", - Workspace::path_to_dx().expect("can't find dx").display() - )); - } + // dx always injects itself as a linker intercept + cargo_args.push(format!( + "-Clinker={}", + Workspace::path_to_dx().expect("can't find dx").display() + )); // Our fancy hot-patching engine needs a lot of customization to work properly. // @@ -1988,18 +1970,28 @@ impl BuildRequest { env_vars.extend(self.android_env_vars()?); }; - // If we're either zero-linking or using a custom linker, make `dx` itself do the linking. - if self.custom_linker.is_some() - || matches!(ctx.mode, BuildMode::Thin { .. } | BuildMode::Fat) - { - LinkAction { - triple: self.triple.clone(), - linker: self.custom_linker.clone(), - link_err_file: dunce::canonicalize(self.link_err_file.path())?, - link_args_file: dunce::canonicalize(self.link_args_file.path())?, - } - .write_env_vars(&mut env_vars)?; + // Write the environment variables for the dx linker intercept used for both asset collection and hot reload builds. + LinkAction { + triple: self.triple.clone(), + linker: match self.custom_linker.clone() { + Some(linker) => crate::Linker::Override(linker), + None => { + if matches!(ctx.mode, BuildMode::Thin { .. }) { + crate::Linker::None + } else { + crate::Linker::Auto + } + } + }, + link_err_file: Some(dunce::canonicalize(self.link_err_file.path())?), + link_args_file: Some(dunce::canonicalize(self.link_args_file.path())?), + link_asset_manifest_file: (!self.skip_assets) + .then(|| dunce::canonicalize(self.link_asset_manifest_file.path())) + .transpose()?, + link_log_file: None, + link_asset_out_dir: None, } + .write_env_vars(&mut env_vars)?; // Disable reference types on wasm when using hotpatching // https://blog.rust-lang.org/2024/09/24/webassembly-targets-change-in-default-target-features/#disabling-on-by-default-webassembly-proposals @@ -3440,7 +3432,7 @@ impl BuildRequest { async fn verify_web_tooling(&self) -> Result<()> { // Install target using rustup. #[cfg(not(feature = "no-downloads"))] - if !self.workspace.has_wasm32_unknown_unknown() { + if !self.workspace.sysroot.has_wasm32_unknown_unknown() { tracing::info!( "Web platform requires wasm32-unknown-unknown to be installed. Installing..." ); @@ -3452,7 +3444,7 @@ impl BuildRequest { } // Ensure target is installed. - if !self.workspace.has_wasm32_unknown_unknown() { + if !self.workspace.sysroot.has_wasm32_unknown_unknown() { return Err(Error::Other(anyhow::anyhow!( "Missing target wasm32-unknown-unknown." ))); @@ -3659,7 +3651,7 @@ impl BuildRequest { } // Inject any resources from manganis into the head - for asset in assets.assets.values() { + for asset in assets.assets() { let asset_path = asset.bundled_path(); match asset.options() { AssetOptions::Css(css_options) => { @@ -3689,7 +3681,11 @@ impl BuildRequest { // Manually inject the wasm file for preloading. WASM currently doesn't support preloading in the manganis asset system let wasm_source_path = self.wasm_bindgen_wasm_output_file(); - if let Some(wasm_path) = assets.assets.get(&wasm_source_path) { + if let Some(wasm_assets) = assets.get_assets_for_source(&wasm_source_path) { + let wasm_path = wasm_assets + .iter() + .next() + .expect("There should be exactly one optimized wasm asset"); let wasm_path = wasm_path.bundled_path(); head_resources.push_str(&format!( "" diff --git a/packages/cli/src/cli/build_assets.rs b/packages/cli/src/cli/build_assets.rs deleted file mode 100644 index b81c905ae8..0000000000 --- a/packages/cli/src/cli/build_assets.rs +++ /dev/null @@ -1,40 +0,0 @@ -use std::{fs::create_dir_all, path::PathBuf}; - -use crate::{Result, StructuredOutput}; -use clap::Parser; -use dioxus_cli_opt::{process_file_to, AssetManifest}; -use tracing::debug; - -#[derive(Clone, Debug, Parser)] -pub struct BuildAssets { - /// The source executable to build assets for. - pub(crate) executable: PathBuf, - - /// The source directory for the assets. - pub(crate) source: PathBuf, - - /// The destination directory for the assets. - pub(crate) destination: PathBuf, -} - -impl BuildAssets { - pub async fn run(self) -> Result { - let mut manifest = AssetManifest::default(); - manifest.add_from_object_path(&self.executable)?; - - create_dir_all(&self.destination)?; - for (path, asset) in manifest.assets.iter() { - let source_path = self.source.join(path); - let destination_path = self.destination.join(asset.bundled_path()); - debug!( - "Processing asset {} --> {} {:#?}", - source_path.display(), - destination_path.display(), - asset - ); - process_file_to(asset.options(), &source_path, &destination_path)?; - } - - Ok(StructuredOutput::Success) - } -} diff --git a/packages/cli/src/cli/link.rs b/packages/cli/src/cli/link.rs index a32cf6e42d..0b836ba7b3 100644 --- a/packages/cli/src/cli/link.rs +++ b/packages/cli/src/cli/link.rs @@ -1,8 +1,20 @@ -use crate::Result; +use crate::{Result, Sysroot}; use anyhow::Context; +use const_serialize::ConstVec; +use dioxus_cli_opt::{process_file_to, AssetManifest}; +use manganis::BundledAsset; +use object::{Object, ObjectSection, ReadCache, ReadRef}; +use rayon::iter::{IntoParallelRefMutIterator, ParallelIterator}; use serde::{Deserialize, Serialize}; -use std::path::PathBuf; +use std::error::Error; +use std::fmt::Debug; +use std::fs::{self, create_dir_all}; +use std::io::{Seek, Write}; +use std::path::{Path, PathBuf}; +use std::process::Command; +use std::str::FromStr; use target_lexicon::Triple; +use target_lexicon::{Architecture, OperatingSystem}; /// `dx` can act as a linker in a few scenarios. Note that we don't *actually* implement the linker logic, /// instead just proxying to a specified linker (or not linking at all!). @@ -26,13 +38,22 @@ use target_lexicon::Triple; /// /// We use "BaseLink" when a linker is specified, and "NoLink" when it is not. Both generate a resulting /// object file. - #[derive(Debug)] pub struct LinkAction { - pub linker: Option, + pub linker: Linker, pub triple: Triple, - pub link_args_file: PathBuf, - pub link_err_file: PathBuf, + pub link_args_file: Option, + pub link_err_file: Option, + pub link_log_file: Option, + pub link_asset_manifest_file: Option, + pub link_asset_out_dir: Option, +} + +#[derive(Debug)] +pub enum Linker { + Override(PathBuf), + Auto, + None, } /// The linker flavor to use. This influences the argument style that gets passed to the linker. @@ -53,52 +74,106 @@ impl LinkAction { const DX_ARGS_FILE: &str = "DX_LINK_ARGS_FILE"; const DX_ERR_FILE: &str = "DX_LINK_ERR_FILE"; const DX_LINK_TRIPLE: &str = "DX_LINK_TRIPLE"; + const DX_LINK_ASSET_MANIFEST: &str = "DX_LINK_ASSET_MANIFEST"; + + // Publicly documented CLI APIs for linking const DX_LINK_CUSTOM_LINKER: &str = "DX_LINK_CUSTOM_LINKER"; + pub(crate) const ENV_VAR_NAME_ASSETS_TARGET: &'static str = "DX_LINK_ASSETS_TARGET"; // The target directory for the assets + pub(crate) const LOG_FILE_VAR_NAME: &'static str = "DX_LINK_LOG_FILE"; // The log file to use /// Should we write the input arguments to a file (aka act as a linker subprocess)? /// /// Just check if the magic env var is set pub(crate) fn from_env() -> Option { - if std::env::var(Self::DX_LINK_ARG).is_err() { + if std::env::var(Self::DX_LINK_ARG).is_err() + && std::env::var(Self::ENV_VAR_NAME_ASSETS_TARGET).is_err() + { return None; } + let linker = std::env::var(Self::DX_LINK_CUSTOM_LINKER); + let linker = match &linker { + Ok(linker) if linker.eq_ignore_ascii_case("auto") => Linker::Auto, + Ok(linker) => { + let linker = PathBuf::from(linker); + Linker::Override(linker) + } + Err(_) => Linker::None, + }; + Some(Self { - linker: std::env::var(Self::DX_LINK_CUSTOM_LINKER) + linker, + link_args_file: std::env::var(Self::DX_ARGS_FILE).ok().map(PathBuf::from), + link_err_file: std::env::var(Self::DX_ERR_FILE).ok().map(PathBuf::from), + triple: std::env::var(Self::DX_LINK_TRIPLE) + .ok() + .and_then(|s| s.parse().ok()) + .unwrap_or_else(guess_target_triple), + link_asset_manifest_file: std::env::var(Self::DX_LINK_ASSET_MANIFEST) + .ok() + .map(PathBuf::from), + link_asset_out_dir: std::env::var(Self::ENV_VAR_NAME_ASSETS_TARGET) + .ok() + .map(PathBuf::from), + link_log_file: std::env::var(Self::LOG_FILE_VAR_NAME) .ok() .map(PathBuf::from), - link_args_file: std::env::var(Self::DX_ARGS_FILE) - .expect("Linker args file not set") - .into(), - link_err_file: std::env::var(Self::DX_ERR_FILE) - .expect("Linker error file not set") - .into(), - triple: std::env::var(Self::DX_LINK_TRIPLE) - .expect("Linker triple not set") - .parse() - .expect("Failed to parse linker triple"), }) } pub(crate) fn write_env_vars(&self, env_vars: &mut Vec<(&str, String)>) -> Result<()> { env_vars.push((Self::DX_LINK_ARG, "1".to_string())); - env_vars.push(( - Self::DX_ARGS_FILE, - dunce::canonicalize(&self.link_args_file)? - .to_string_lossy() - .to_string(), - )); - env_vars.push(( - Self::DX_ERR_FILE, - dunce::canonicalize(&self.link_err_file)? - .to_string_lossy() - .to_string(), - )); + if let Some(link_args_file) = &self.link_args_file { + env_vars.push(( + Self::DX_ARGS_FILE, + dunce::canonicalize(link_args_file)? + .to_string_lossy() + .to_string(), + )); + } + if let Some(link_err_file) = &self.link_err_file { + env_vars.push(( + Self::DX_ERR_FILE, + dunce::canonicalize(link_err_file)? + .to_string_lossy() + .to_string(), + )); + } env_vars.push((Self::DX_LINK_TRIPLE, self.triple.to_string())); - if let Some(linker) = &self.linker { + match &self.linker { + Linker::Override(linker) => { + env_vars.push(( + Self::DX_LINK_CUSTOM_LINKER, + dunce::canonicalize(linker)?.to_string_lossy().to_string(), + )); + } + Linker::Auto => { + env_vars.push((Self::DX_LINK_CUSTOM_LINKER, "auto".to_string())); + } + Linker::None => {} + } + if let Some(link_asset_manifest_file) = &self.link_asset_manifest_file { + env_vars.push(( + Self::DX_LINK_ASSET_MANIFEST, + dunce::canonicalize(link_asset_manifest_file)? + .to_string_lossy() + .to_string(), + )); + } + if let Some(link_asset_out_dir) = &self.link_asset_out_dir { + env_vars.push(( + Self::ENV_VAR_NAME_ASSETS_TARGET, + dunce::canonicalize(link_asset_out_dir)? + .to_string_lossy() + .to_string(), + )); + } + if let Some(link_log_file) = &self.link_log_file { env_vars.push(( - Self::DX_LINK_CUSTOM_LINKER, - dunce::canonicalize(linker)?.to_string_lossy().to_string(), + Self::LOG_FILE_VAR_NAME, + dunce::canonicalize(link_log_file)? + .to_string_lossy() + .to_string(), )); } @@ -110,10 +185,13 @@ impl LinkAction { let res = self.run_link_inner().await; if let Err(err) = res { - // If we failed to run the linker, we need to write the error to the file - // so that the main process can read it. - _ = std::fs::create_dir_all(link_err_file.parent().unwrap()); - _ = std::fs::write(link_err_file, format!("Linker error: {err}")); + tracing::error!("Failed to run linker: {err}"); + if let Some(link_err_file) = &link_err_file { + // If we failed to run the linker, we need to write the error to the file + // so that the main process can read it. + _ = std::fs::create_dir_all(link_err_file.parent().unwrap()); + _ = std::fs::write(link_err_file, format!("Linker error: {err}")); + } } } @@ -122,50 +200,71 @@ impl LinkAction { /// The file will be given by the dx-magic-link-arg env var itself, so we use /// it both for determining if we should act as a linker and the for the file name itself. async fn run_link_inner(self) -> Result<()> { + self.init_linker_logger()?; + let mut args: Vec<_> = std::env::args().collect(); if args.is_empty() { + tracing::error!("No arguments passed to linker"); return Ok(()); } handle_linker_command_file(&mut args); + self.link_asset_manifest(&mut args)?; - // Write the linker args to a file for the main process to read - // todo: we might need to encode these as escaped shell words in case newlines are passed - std::fs::write(self.link_args_file, args.join("\n"))?; + if let Some(link_args_file) = &self.link_args_file { + // Write the linker args to a file for the main process to read + // todo: we might need to encode these as escaped shell words in case newlines are passed + std::fs::write(link_args_file, args.join("\n"))?; + } + + let linker = match &self.linker { + Linker::Override(linker) => Some(Command::new(linker)), + Linker::Auto => { + let sysroot = Sysroot::new().await?; + // Try to find the linker from the toolchain + let linker = find_linker(&sysroot); + Some(linker) + } + Linker::None => None, + }; // If there's a linker specified, we use that. Otherwise, we write a dummy object file to satisfy // any post-processing steps that rustc does. - match self.linker { - Some(linker) => { - let res = std::process::Command::new(linker) + match linker { + Some(mut linker) => { + let res = linker .args(args.iter().skip(1)) .output() - .expect("Failed to run linker"); + .context("Failed to await linker")?; if !res.stderr.is_empty() || !res.stdout.is_empty() { - _ = std::fs::create_dir_all(self.link_err_file.parent().unwrap()); - _ = std::fs::write( - self.link_err_file, - format!( - "Linker error: {}\n{}", - String::from_utf8_lossy(&res.stdout), - String::from_utf8_lossy(&res.stderr) - ), - ); + let stdout = String::from_utf8_lossy(&res.stdout); + let stderr = String::from_utf8_lossy(&res.stderr); + // Trim lines containing libcompiler_builtins. There is a noisy error rust-lld warns about + // but doesn't cause any issues. + // Eg. + // rust-lld: warning: /Users/_/.rustup/toolchains/stable-aarch64-apple-darwin/lib/rustlib/wasm32-unknown-unknown/lib/libcompiler_builtins-_.rlib: archive member '_.o' is neither Wasm object file nor LLVM bitcode + fn trim_libcompiler_builtins(messages: &str) -> String { + messages + .lines() + .filter(|line| !line.contains("libcompiler_builtins")) + .collect::>() + .join("\n") + } + let trimmed_stdout = trim_libcompiler_builtins(&stdout); + let trimmed_stderr = trim_libcompiler_builtins(&stderr); + tracing::info!("linker stdout: {}", trimmed_stdout); + tracing::error!("linker stderr: {}", trimmed_stderr); + let message = format!("Linker error: {}\n{}", trimmed_stdout, trimmed_stderr); + if let Some(link_err_file) = &self.link_err_file { + _ = std::fs::create_dir_all(link_err_file.parent().unwrap()); + _ = std::fs::write(link_err_file, message); + } } } None => { // Extract the out path - we're going to write a dummy object file to satisfy the linker - let out_file: PathBuf = match self.triple.operating_system { - target_lexicon::OperatingSystem::Windows => { - let out_arg = args.iter().find(|arg| arg.starts_with("/OUT")).unwrap(); - out_arg.trim_start_matches("/OUT:").to_string().into() - } - _ => { - let out = args.iter().position(|arg| arg == "-o").unwrap(); - args[out + 1].clone().into() - } - }; + let out_file: PathBuf = self.out_path(&args); // This creates an object file that satisfies rust's use of llvm-objcopy // @@ -219,6 +318,129 @@ impl LinkAction { Ok(()) } + + fn out_path(&self, args: &[String]) -> PathBuf { + match self.triple.operating_system { + target_lexicon::OperatingSystem::Windows => { + let out_arg = args.iter().find(|arg| arg.starts_with("/OUT")).unwrap(); + out_arg.trim_start_matches("/OUT:").to_string().into() + } + _ => { + let out = args.iter().position(|arg| arg == "-o").unwrap(); + args[out + 1].clone().into() + } + } + } + + fn out_dir(&self, args: &[String]) -> PathBuf { + let out_path = self.out_path(args); + if out_path.is_dir() { + out_path + } else { + out_path.parent().unwrap().to_path_buf() + } + } + + fn link_asset_manifest(&self, args: &mut Vec) -> Result<()> { + let mut references = AssetReferences::from_link_args(args); + + // Hash each file in parallel + references.assets.par_iter_mut().for_each(|asset| { + dioxus_cli_opt::add_hash_to_asset(&mut asset.bundled_asset); + }); + + // Look for --flavor wasm in the args + let targeting_wasm = self.triple.architecture == target_lexicon::Architecture::Wasm32 + || self.triple.architecture == target_lexicon::Architecture::Wasm64; + + // If we are targeting wasm, create an object file to satisfy the imports + if targeting_wasm { + let mut data_sections = Vec::new(); + for asset in references.assets.iter() { + let name = asset.bundled_asset.link_section(); + let data = const_serialize::serialize_const(&asset.bundled_asset, ConstVec::new()); + data_sections.push((name, data.as_ref().to_vec())); + } + + // Create the object file + let object_file = create_data_object_file( + data_sections + .iter() + .map(|(name, data)| (*name, data.as_ref())), + ); + let asset_file = self + .out_dir(args) + .join("manganis_assets_out") + .with_extension("o"); + std::fs::write(&asset_file, object_file).context("Failed to write object file")?; + args.push(asset_file.to_string_lossy().to_string()); + } + // Otherwise overwrite the object files + else { + for asset in &references.assets { + // Write the asset to the object file + if let Err(err) = asset.write() { + tracing::error!("Failed to write asset to object file: {err}"); + } + } + } + + // Extract the manifest from the hashed assets + let mut manifest = AssetManifest::default(); + for asset in references.assets.iter() { + // Add the asset to the manifest + manifest.insert_asset(asset.bundled_asset); + } + + tracing::info!("Found assets: {:#?}", manifest.assets().collect::>()); + + tracing::info!( + "writing asset manifest to {:?}", + self.link_asset_manifest_file + ); + if let Some(link_asset_manifest_file) = &self.link_asset_manifest_file { + // Write the asset manifest to the file + let contents = + serde_json::to_string(&manifest).context("Failed to write asset manifest")?; + std::fs::write(link_asset_manifest_file, contents) + .context("Failed to write asset manifest file")?; + } + + if let Some(link_asset_out_dir) = &self.link_asset_out_dir { + if let Err(err) = create_dir_all(link_asset_out_dir) { + tracing::error!("Failed to create destination directory: {err}"); + } + for asset in manifest.assets() { + let path = PathBuf::from(asset.absolute_source_path()); + let destination_path = link_asset_out_dir.join(asset.bundled_path()); + tracing::debug!( + "Processing asset {} --> {} {:#?}", + path.display(), + destination_path.display(), + asset + ); + process_file_to(asset.options(), &path, &destination_path)?; + } + } + + Ok(()) + } + + fn init_linker_logger(&self) -> Result<()> { + if let Some(log_path) = &self.link_log_file { + let log_file = std::fs::File::options() + .append(true) + .create(true) + .open(log_path)?; + tracing_subscriber::fmt() + .with_writer(log_file) + .with_max_level(tracing::Level::DEBUG) + .compact() + .with_ansi(false) + .init(); + } + Ok(()) + } } pub fn handle_linker_command_file(args: &mut Vec) { @@ -250,3 +472,242 @@ pub fn handle_linker_command_file(args: &mut Vec) { .collect(); } } + +struct AssetReference { + file: PathBuf, + offset: usize, + bundled_asset: BundledAsset, +} + +impl AssetReference { + fn write(&self) -> std::io::Result<()> { + let new_data = ConstVec::new(); + let new_data = const_serialize::serialize_const(&self.bundled_asset, new_data); + + let mut binary_data = fs::File::options() + .write(true) + .read(true) + .open(&self.file)?; + binary_data.seek(std::io::SeekFrom::Start(self.offset as u64))?; + // Write the modified binary data back to the file + binary_data.write_all(new_data.as_ref())?; + binary_data.sync_all()?; + + Ok(()) + } +} + +#[derive(Default)] +struct AssetReferences { + assets: Vec, +} + +impl AssetReferences { + fn from_link_args(args: &[String]) -> Self { + let mut args = args.to_vec(); + let mut references = AssetReferences::default(); + handle_linker_command_file(&mut args); + for file in args { + let path = PathBuf::from(file); + if path + .extension() + .is_some_and(|ext| ext == "o" || ext == "rlib") + { + if let Ok(path) = path.canonicalize() { + if let Err(err) = references.add_from_object_path(&path) { + tracing::error!("Failed to read object file {}: {err}", path.display()); + } + } + } + } + references + } + + fn add_from_object_path(&mut self, path: &PathBuf) -> Result<(), Box> { + let mut binary_data = fs::File::options().read(true).open(path)?; + let read_cache = ReadCache::new(&mut binary_data); + // Try to read the object file as an archive + if let Ok(archive) = object::read::archive::ArchiveFile::parse(&read_cache) { + for member in archive.members() { + let member = member?; + let (offset, _) = member.file_range(); + let member_data = member.data(&read_cache)?; + self.add_from_object_data( + path, + &ReadCache::new(&mut std::io::Cursor::new(member_data)), + offset as usize, + )?; + } + } + // Otherwise, read it as a regular object file + else { + self.add_from_object_data(path, &read_cache, 0)?; + } + Ok(()) + } + + fn add_from_object_data<'a>( + &mut self, + path: &Path, + read_cache: impl ReadRef<'a>, + file_offset: usize, + ) -> Result<(), Box> { + let mut range = None; + { + let file = object::File::parse(read_cache)?; + for section in file.sections() { + if let Ok(name) = section.name() { + if manganis_core::linker::LinkSection::ALL + .iter() + .any(|link_section| link_section.link_section == name) + { + let Some(file_range) = section.file_range() else { + continue; + }; + if file.format() == object::BinaryFormat::Wasm { + // In wasm this is actually the start and end + let (start, end) = file_range; + range = Some(start as usize..end as usize); + } else { + let (offset, len) = file_range; + range = Some(offset as usize..(offset + len) as usize); + } + break; + } + } + } + } + if let Some(range) = range { + let data_in_range = read_cache + .read_bytes_at(range.start as u64, range.len() as u64) + .map_err(|_| { + tracing::error!("Failed to read object file"); + std::io::Error::new(std::io::ErrorKind::Other, "Failed to read object file") + })?; + let mut offset = 0; + let mut buffer = const_serialize::ConstReadBuffer::new(data_in_range); + + while let Some((remaining_buffer, bundled_asset)) = + const_serialize::deserialize_const!(BundledAsset, buffer) + { + let len = (data_in_range.len() - remaining_buffer.remaining().len()) - offset; + self.assets.push(AssetReference { + file: path.to_path_buf(), + offset: range.start + file_offset + offset, + bundled_asset, + }); + offset += len; + buffer = remaining_buffer; + } + } + Ok(()) + } +} + +fn create_data_object_file<'a>( + data_sections: impl IntoIterator, +) -> Vec { + use wasm_encoder::{ + ConstExpr, DataSection, DataSymbolDefinition, LinkingSection, Module, SymbolTable, + }; + + let mut linking = LinkingSection::new(); + let mut sym_tab = SymbolTable::new(); + let mut data_section = DataSection::new(); + + let memory_index = 0; + let mut offset = 0; + let mut all_bytes = Vec::new(); + + for (symbol_name, data) in data_sections { + let flags = SymbolTable::WASM_SYM_EXPORTED; + let size = data.len() as u32; + + all_bytes.extend_from_slice(data); + sym_tab.data( + flags, + symbol_name, + Some(DataSymbolDefinition { + index: memory_index, + offset: offset as u32, + size, + }), + ); + linking.symbol_table(&sym_tab); + offset += data.len(); + } + data_section.active(memory_index, &ConstExpr::i32_const(0), all_bytes); + + // Add the linking section to a new Wasm module and get the encoded bytes. + let mut module = Module::new(); + module.section(&data_section).section(&linking); + module.finish() +} + +/// Try to guess the target triple we are linking for +fn guess_target_triple() -> Triple { + let args: Vec = std::env::args().collect(); + // Look for --flavor wasm in the args + let targeting_wasm = + args.contains(&"-flavor".to_string()) && args.contains(&"wasm".to_string()); + let toolchain = if targeting_wasm { + "stable-wasm32-unknown-unknown".to_string() + } else { + std::env::var("RUSTUP_TOOLCHAIN").unwrap() + }; + + // Get rid of the stable/nightly prefix + let toolchain = toolchain.split_once("-").unwrap().1; + + Triple::from_str(toolchain).unwrap_or_else(|_| { + tracing::error!("Failed to parse target triple from toolchain: {toolchain}"); + Triple::host() + }) +} + +// Try to guess the current linker from the toolchain +fn find_linker(sysroot: &Sysroot) -> Command { + let toolchain = guess_target_triple(); + match (toolchain.operating_system, toolchain.architecture) { + // usually just ld64 - uses your `cc` + // Eg. aarch64-apple-darwin + (OperatingSystem::Darwin(_), _) => { + let mut command = Command::new(sysroot.cc()); + command.env_remove("IPHONEOS_DEPLOYMENT_TARGET"); + command.env_remove("TVOS_DEPLOYMENT_TARGET"); + command.env_remove("XROS_DEPLOYMENT_TARGET"); + command + } + // Eg. nightly-x86_64-unknown-linux-gnu + (OperatingSystem::Linux, arch) => { + let mut command = Command::new(sysroot.cc()); + command.env("LC_ALL", "C"); + if arch == target_lexicon::Architecture::X86_64 { + command.arg("-m64"); + } + command + } + // Eg. stable-x86_64-pc-windows-msvc + (OperatingSystem::Windows, _) => { + let mut command = Command::new("link.exe"); + command.arg("/NOLOGO"); + command + } + // Eg. nightly-wasm32-unknown-unknown + (_, Architecture::Wasm32 | Architecture::Wasm64) => { + let mut command = Command::new(sysroot.rust_lld()); + command.env("LC_ALL", "C"); + command + } + _ => { + panic!( + "Unknown target {}. Please set the environment variable DIOXUS_LINKER to the path of your linker. +If you don't know where your linker is, create a blank rust file and run `rustc temp.rs --print link-args`. +On unix-like platforms, you can run this command to find your link args: +`echo \"fn main(){{}}\" > ./temp.rs && rustc +nightly temp.rs --print link-args -Z unstable-options && rm ./temp.rs` +Once you find the linker args for your platform feel free to open an issue with link args so we can +add support for the platform out of the box: https://github.com/DioxusLabs/dioxus/issues/new", toolchain + ) + } + } +} diff --git a/packages/cli/src/cli/mod.rs b/packages/cli/src/cli/mod.rs index d6dac5c798..cdaa4d49a3 100644 --- a/packages/cli/src/cli/mod.rs +++ b/packages/cli/src/cli/mod.rs @@ -1,6 +1,5 @@ pub(crate) mod autoformat; pub(crate) mod build; -pub(crate) mod build_assets; pub(crate) mod bundle; pub(crate) mod check; pub(crate) mod clean; @@ -35,6 +34,66 @@ use std::{ }; /// Dioxus: build web, desktop, and mobile apps with a single codebase. +/// +/// ## Creating a New Project +/// +/// You can use `dx new` to create a new dioxus project. The CLI will ask you a few questions about your project and then create a new project for you: +/// ```sh +/// dx new my-app +/// ``` +/// +/// ## Serving Your App +/// +/// You can use `dx serve` to serve your dioxus app. This will start a local server and watch for changes to your app: +/// ```sh +/// dx serve +/// ``` +/// +/// ## Bundling Your App +/// +/// Once you are ready to ship your app, you can use `dx bundle` to build your app. This will create a production-ready build of your app: +/// ```sh +/// dx bundle +/// ``` +/// +/// ## Asset Optimizer +/// +/// When you serve dioxus with dx, it will automatically handle the build process for you. If you need to integrate with a larger build system, +/// you can use the `dx` asset optimizer separately to link to your assets. If you set the `DX_LINK_ASSETS_TARGET` environment variable, dx will +/// proxy your linker and copy the optimized assets it finds in your program into the specified directory. +/// +/// ### Usage with trunk +/// +/// If you are using trunk, you need to create a temporary asset directory to store the output of the dx asset optimizer that will be copied by trunk into your dist directory: +/// ```html +/// +/// +/// +/// +/// +/// +///
+/// +/// +/// ``` +/// Then when you build, you need to set the `DX_LINK_ASSETS_TARGET` environment variable to the path of the temporary asset directory and `dx` as your linker: +/// ```sh +/// DX_LINK_ASSETS_TARGET="dist_assets" RUSTFLAGS="-Clinker=dx" trunk serve +/// ``` +/// +/// ### Usage with cargo +/// +/// If you are using cargo, you need to set the `DX_LINK_ASSETS_TARGET` environment variable to the path where your optimize assets will be stored and `dx` as your linker: +/// ```sh +/// DX_LINK_ASSETS_TARGET="dist_assets" RUSTFLAGS="-Clinker=dx" cargo run +/// ``` +/// +/// ### Custom linker path +/// +/// DX will try to find the default linker for your system, but if you need to use a custom linker on top of the dx proxy, you can set the `DX_LINK_CUSTOM_LINKER` environment variable to the path of your custom linker. For example, if you are using `lld` as your linker, you can set the `DX_LINK_CUSTOM_LINKER` environment variable to the path of `lld`: +/// ```sh +/// DX_LINK_CUSTOM_LINKER="/path/to/lld" DX_LINK_ASSETS_TARGET="dist_assets" RUSTFLAGS="-Clinker=dx" cargo run +/// ``` #[derive(Parser)] #[clap(name = "dioxus", version = VERSION.as_str())] #[clap(styles = CARGO_STYLING)] @@ -68,10 +127,6 @@ pub(crate) enum Commands { #[clap(name = "run")] Run(run::RunArgs), - /// Build the assets for a specific target. - #[clap(name = "assets")] - BuildAssets(build_assets::BuildAssets), - /// Init a new project for Dioxus in the current directory (by default). /// Will attempt to keep your project in a good state. #[clap(name = "init")] @@ -117,7 +172,6 @@ impl Display for Commands { Commands::Check(_) => write!(f, "check"), Commands::Bundle(_) => write!(f, "bundle"), Commands::Run(_) => write!(f, "run"), - Commands::BuildAssets(_) => write!(f, "assets"), Commands::SelfUpdate(_) => write!(f, "self-update"), } } diff --git a/packages/cli/src/main.rs b/packages/cli/src/main.rs index 105a66fe15..9cfd15db34 100644 --- a/packages/cli/src/main.rs +++ b/packages/cli/src/main.rs @@ -40,7 +40,8 @@ pub(crate) use workspace::*; async fn main() { // The CLI uses dx as a rustcwrapper in some instances (like binary patching) if rustcwrapper::is_wrapping_rustc() { - return rustcwrapper::run_rustc().await; + rustcwrapper::run_rustc().await; + return; } // If we're being ran as a linker (likely from ourselves), we want to act as a linker instead. @@ -61,7 +62,6 @@ async fn main() { Commands::Serve(opts) => opts.serve().await, Commands::Bundle(opts) => opts.bundle().await, Commands::Run(opts) => opts.run().await, - Commands::BuildAssets(opts) => opts.run().await, Commands::SelfUpdate(opts) => opts.self_update().await, }; @@ -81,5 +81,5 @@ async fn main() { std::process::exit(1); } - }; + } } diff --git a/packages/cli/src/serve/output.rs b/packages/cli/src/serve/output.rs index 0cfb411f7d..2209052c41 100644 --- a/packages/cli/src/serve/output.rs +++ b/packages/cli/src/serve/output.rs @@ -151,7 +151,7 @@ impl Output { use std::io::IsTerminal; if !stdout().is_terminal() { - return io::Result::Err(io::Error::new(io::ErrorKind::Other, "Not a terminal")); + return io::Result::Err(io::Error::other("Not a terminal")); } enable_raw_mode()?; @@ -538,7 +538,6 @@ impl Output { BuildStage::Restarting => lines.push("Restarting".yellow()), BuildStage::Linking => lines.push("Linking".yellow()), BuildStage::Hotpatching => lines.push("Hot-patching...".yellow()), - BuildStage::ExtractingAssets => lines.push("Extracting assets".yellow()), _ => {} }; diff --git a/packages/cli/src/serve/runner.rs b/packages/cli/src/serve/runner.rs index 610ae64aee..9113f72129 100644 --- a/packages/cli/src/serve/runner.rs +++ b/packages/cli/src/serve/runner.rs @@ -309,8 +309,10 @@ impl AppServer { // If it's an asset, we want to hotreload it // todo(jon): don't hardcode this here - if let Some(bundled_name) = self.client.hotreload_bundled_asset(path).await { - assets.push(PathBuf::from("/assets/").join(bundled_name)); + if let Some(bundled_names) = self.client.hotreload_bundled_asset(path).await { + for bundled_name in bundled_names { + assets.push(PathBuf::from("/assets/").join(bundled_name)); + } } // If it's a rust file, we want to hotreload it using the filemap diff --git a/packages/cli/src/workspace.rs b/packages/cli/src/workspace.rs index 346ef43af5..ad7c7ddd6c 100644 --- a/packages/cli/src/workspace.rs +++ b/packages/cli/src/workspace.rs @@ -15,7 +15,7 @@ pub struct Workspace { pub(crate) krates: Krates, pub(crate) settings: CliSettings, pub(crate) wasm_opt: Option, - pub(crate) sysroot: PathBuf, + pub(crate) sysroot: Sysroot, pub(crate) rustc_version: String, pub(crate) ignore: Gitignore, pub(crate) cargo_toml: cargo_toml::Manifest, @@ -47,12 +47,7 @@ impl Workspace { .context("Failed to run cargo metadata")?; let settings = CliSettings::global_or_default(); - let sysroot = Command::new("rustc") - .args(["--print", "sysroot"]) - .output() - .await - .map(|out| String::from_utf8(out.stdout))? - .context("Failed to extract rustc sysroot output")?; + let sysroot = Sysroot::new().await?; let rustc_version = Command::new("rustc") .args(["--version"]) @@ -75,7 +70,7 @@ impl Workspace { krates, settings, wasm_opt, - sysroot: sysroot.trim().into(), + sysroot, rustc_version: rustc_version.trim().into(), ignore, cargo_toml, @@ -88,7 +83,7 @@ impl Workspace { • rustc version: {rustc_version} • workspace root: {workspace_root} • dioxus versions: [{dioxus_versions:?}]"#, - sysroot = workspace.sysroot.display(), + sysroot = workspace.sysroot.sysroot.display(), rustc_version = workspace.rustc_version, workspace_root = workspace.workspace_root().display(), dioxus_versions = workspace @@ -179,51 +174,6 @@ impl Workspace { versions.sort(); versions } - - #[allow(unused)] - pub fn rust_lld(&self) -> PathBuf { - self.sysroot - .join("lib") - .join("rustlib") - .join(Triple::host().to_string()) - .join("bin") - .join("rust-lld") - } - - /// Return the path to the `cc` compiler - /// - /// This is used for the patching system to run the linker. - /// We could also just use lld given to us by rust itself. - pub fn cc(&self) -> PathBuf { - PathBuf::from("cc") - } - - /// The windows linker - pub fn lld_link(&self) -> PathBuf { - self.gcc_ld_dir().join("lld-link") - } - - pub fn wasm_ld(&self) -> PathBuf { - self.gcc_ld_dir().join("wasm-ld") - } - - // wasm-ld: ./rustup/toolchains/nightly-x86_64-unknown-linux-gnu/bin/wasm-ld - // rust-lld: ./rustup/toolchains/nightly-x86_64-unknown-linux-gnu/bin/rust-lld - fn gcc_ld_dir(&self) -> PathBuf { - self.sysroot - .join("lib") - .join("rustlib") - .join(Triple::host().to_string()) - .join("bin") - .join("gcc-ld") - } - - pub fn has_wasm32_unknown_unknown(&self) -> bool { - self.sysroot - .join("lib/rustlib/wasm32-unknown-unknown") - .exists() - } - /// Find the "main" package in the workspace. There might not be one! pub fn find_main_package(&self, package: Option) -> Result { if let Some(package) = package { @@ -453,3 +403,67 @@ impl std::fmt::Debug for Workspace { .finish() } } + +#[derive(Debug)] +pub(crate) struct Sysroot { + sysroot: PathBuf, +} + +impl Sysroot { + pub(crate) async fn new() -> Result { + let sysroot = Command::new("rustc") + .args(["--print", "sysroot"]) + .output() + .await + .map(|out| String::from_utf8(out.stdout))? + .context("Failed to extract rustc sysroot output")?; + + Ok(Self { + sysroot: sysroot.trim().into(), + }) + } + + #[allow(unused)] + pub fn rust_lld(&self) -> PathBuf { + self.sysroot + .join("lib") + .join("rustlib") + .join(Triple::host().to_string()) + .join("bin") + .join("rust-lld") + } + + /// Return the path to the `cc` compiler + /// + /// This is used for the patching system to run the linker. + /// We could also just use lld given to us by rust itself. + pub fn cc(&self) -> PathBuf { + PathBuf::from("cc") + } + + /// The windows linker + pub fn lld_link(&self) -> PathBuf { + self.gcc_ld_dir().join("lld-link") + } + + pub fn wasm_ld(&self) -> PathBuf { + self.gcc_ld_dir().join("wasm-ld") + } + + // wasm-ld: ./rustup/toolchains/nightly-x86_64-unknown-linux-gnu/bin/wasm-ld + // rust-lld: ./rustup/toolchains/nightly-x86_64-unknown-linux-gnu/bin/rust-lld + fn gcc_ld_dir(&self) -> PathBuf { + self.sysroot + .join("lib") + .join("rustlib") + .join(Triple::host().to_string()) + .join("bin") + .join("gcc-ld") + } + + pub fn has_wasm32_unknown_unknown(&self) -> bool { + self.sysroot + .join("lib/rustlib/wasm32-unknown-unknown") + .exists() + } +} diff --git a/packages/const-serialize/src/const_buffers.rs b/packages/const-serialize/src/const_buffers.rs index c230a6d8aa..4e93ddbdbc 100644 --- a/packages/const-serialize/src/const_buffers.rs +++ b/packages/const-serialize/src/const_buffers.rs @@ -30,4 +30,9 @@ impl<'a> ConstReadBuffer<'a> { pub const fn as_ref(&self) -> &[u8] { self.memory } + + /// Get a slice of the buffer from the current location to the end of the buffer + pub const fn remaining(&self) -> &[u8] { + self.memory.split_at(self.location).1 + } } diff --git a/packages/const-serialize/src/const_vec.rs b/packages/const-serialize/src/const_vec.rs index 5535b31a1d..888526530f 100644 --- a/packages/const-serialize/src/const_vec.rs +++ b/packages/const-serialize/src/const_vec.rs @@ -420,3 +420,17 @@ fn test_const_vec_remove() { assert_eq!(value, Some(5678)); assert_eq!(vec.as_ref(), &[]); } + +#[test] +fn test_const_vec_extend() { + const VEC: ConstVec = { + let mut vec = ConstVec::new(); + vec = vec.push(1234); + vec = vec.push(5678); + vec = vec.extend(&[91011, 1213]); + vec + }; + let vec = VEC; + println!("{:?}", vec); + assert_eq!(vec.as_ref(), &[1234, 5678, 91011, 1213]); +} diff --git a/packages/const-serialize/src/lib.rs b/packages/const-serialize/src/lib.rs index 0a32d503b2..7ee11099db 100644 --- a/packages/const-serialize/src/lib.rs +++ b/packages/const-serialize/src/lib.rs @@ -221,7 +221,7 @@ impl_serialize_const_tuple!(T1: 0, T2: 1, T3: 2, T4: 3, T5: 4, T6: 5, T7: 6, T8: const MAX_STR_SIZE: usize = 256; /// A string that is stored in a constant sized buffer that can be serialized and deserialized at compile time -#[derive(PartialEq, PartialOrd, Clone, Copy, Hash)] +#[derive(Eq, PartialEq, PartialOrd, Clone, Copy, Hash)] #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] pub struct ConstStr { #[cfg_attr(feature = "serde", serde(with = "serde_bytes"))] diff --git a/packages/const-serialize/tests/enum.rs b/packages/const-serialize/tests/enum.rs index 130a5dbaa0..a0df9f160c 100644 --- a/packages/const-serialize/tests/enum.rs +++ b/packages/const-serialize/tests/enum.rs @@ -95,6 +95,67 @@ fn test_serialize_enum() { assert_eq!(deserialize_const!(Enum, buf).unwrap().1, data); } +#[test] +fn test_serialize_list_of_lopsided_enums() { + #[derive(Clone, Copy, Debug, PartialEq, SerializeConst)] + #[repr(C, u8)] + enum Enum { + A, + B { one: u8, two: u16 } = 15, + } + + println!("{:#?}", Enum::MEMORY_LAYOUT); + + let data = [Enum::A, Enum::A]; + let mut buf = ConstVec::new(); + buf = serialize_const(&data, buf); + println!("{:?}", buf.as_ref()); + let buf = buf.read(); + assert_eq!(deserialize_const!([Enum; 2], buf).unwrap().1, data); + + let data = [ + Enum::B { + one: 0x11, + two: 0x2233, + }, + Enum::B { + one: 0x12, + two: 0x2244, + }, + ]; + let mut buf = ConstVec::new(); + buf = serialize_const(&data, buf); + println!("{:?}", buf.as_ref()); + let buf = buf.read(); + assert_eq!(deserialize_const!([Enum; 2], buf).unwrap().1, data); + + let data = [ + Enum::A, + Enum::B { + one: 0x11, + two: 0x2233, + }, + ]; + let mut buf = ConstVec::new(); + buf = serialize_const(&data, buf); + println!("{:?}", buf.as_ref()); + let buf = buf.read(); + assert_eq!(deserialize_const!([Enum; 2], buf).unwrap().1, data); + + let data = [ + Enum::B { + one: 0x11, + two: 0x2233, + }, + Enum::A, + ]; + let mut buf = ConstVec::new(); + buf = serialize_const(&data, buf); + println!("{:?}", buf.as_ref()); + let buf = buf.read(); + assert_eq!(deserialize_const!([Enum; 2], buf).unwrap().1, data); +} + #[test] fn test_serialize_u8_enum() { #[derive(Clone, Copy, Debug, PartialEq, SerializeConst)] diff --git a/packages/dx-wire-format/src/lib.rs b/packages/dx-wire-format/src/lib.rs index 43da1a1a6f..915b7a0beb 100644 --- a/packages/dx-wire-format/src/lib.rs +++ b/packages/dx-wire-format/src/lib.rs @@ -75,7 +75,6 @@ pub enum BuildStage { OptimizingWasm, Linking, Hotpatching, - ExtractingAssets, CopyingAssets { current: usize, total: usize, diff --git a/packages/manganis/manganis-core/src/asset.rs b/packages/manganis/manganis-core/src/asset.rs index 28434b5382..1d645963b3 100644 --- a/packages/manganis/manganis-core/src/asset.rs +++ b/packages/manganis/manganis-core/src/asset.rs @@ -1,23 +1,13 @@ use crate::AssetOptions; -use const_serialize::{ConstStr, SerializeConst}; -use std::path::PathBuf; +use const_serialize::{deserialize_const, ConstStr, ConstVec, SerializeConst}; +use std::{fmt::Debug, hash::Hash, path::PathBuf}; /// An asset that should be copied by the bundler with some options. This type will be /// serialized into the binary and added to the link section [`LinkSection::CURRENT`](crate::linker::LinkSection::CURRENT). /// CLIs that support manganis, should pull out the assets from the link section, optimize, /// and write them to the filesystem at [`BundledAsset::bundled_path`] for the application /// to use. -#[derive( - Debug, - PartialEq, - PartialOrd, - Clone, - Copy, - Hash, - SerializeConst, - serde::Serialize, - serde::Deserialize, -)] +#[derive(Debug, Eq, Clone, Copy, SerializeConst, serde::Serialize, serde::Deserialize)] pub struct BundledAsset { /// The absolute path of the asset absolute_source_path: ConstStr, @@ -25,6 +15,41 @@ pub struct BundledAsset { bundled_path: ConstStr, /// The options for the asset options: AssetOptions, + /// The link section the wasm asset is bundled at + link_section: ConstStr, +} + +impl PartialEq for BundledAsset { + fn eq(&self, other: &Self) -> bool { + self.absolute_source_path == other.absolute_source_path + && self.bundled_path == other.bundled_path + && self.options == other.options + } +} + +impl PartialOrd for BundledAsset { + fn partial_cmp(&self, other: &Self) -> Option { + match self + .absolute_source_path + .partial_cmp(&other.absolute_source_path) + { + Some(core::cmp::Ordering::Equal) => {} + ord => return ord, + } + match self.bundled_path.partial_cmp(&other.bundled_path) { + Some(core::cmp::Ordering::Equal) => {} + ord => return ord, + } + self.options.partial_cmp(&other.options) + } +} + +impl Hash for BundledAsset { + fn hash(&self, state: &mut H) { + self.absolute_source_path.hash(state); + self.bundled_path.hash(state); + self.options.hash(state); + } } impl BundledAsset { @@ -32,14 +57,16 @@ impl BundledAsset { /// This should only be called from the macro /// Create a new asset pub const fn new( - absolute_source_path: &'static str, - bundled_path: &'static str, + absolute_source_path: &str, + bundled_path: &str, options: AssetOptions, + link_section: &str, ) -> Self { Self { absolute_source_path: ConstStr::new(absolute_source_path), bundled_path: ConstStr::new(bundled_path), options, + link_section: ConstStr::new(link_section), } } @@ -55,8 +82,9 @@ impl BundledAsset { absolute_source_path: &'static str, bundled_path: &'static str, options: AssetOptions, + link_section: &'static str, ) -> Self { - Self::new(absolute_source_path, bundled_path, options) + Self::new(absolute_source_path, bundled_path, options, link_section) } #[doc(hidden)] @@ -66,11 +94,13 @@ impl BundledAsset { absolute_source_path: ConstStr, bundled_path: ConstStr, options: AssetOptions, + link_section: ConstStr, ) -> Self { Self { absolute_source_path, bundled_path, options, + link_section, } } @@ -83,10 +113,16 @@ impl BundledAsset { pub fn absolute_source_path(&self) -> &str { self.absolute_source_path.as_str() } + /// Get the options for the asset pub const fn options(&self) -> &AssetOptions { &self.options } + + /// Get the link section the asset is bundled at + pub fn link_section(&self) -> &str { + self.link_section.as_str() + } } /// A bundled asset with some options. The asset can be used in rsx! to reference the asset. @@ -101,28 +137,45 @@ impl BundledAsset { /// img { src: ASSET } /// }; /// ``` -#[derive(Debug, PartialEq, Clone, Copy)] +#[derive(PartialEq, Clone, Copy)] pub struct Asset { - /// The bundled asset - bundled: BundledAsset, - /// The link section for the asset - keep_link_section: fn() -> u8, + /// A pointer to the bundled asset. This will be resolved after the linker has run and + /// put into the lazy asset + bundled: *const [u8], +} + +impl Debug for Asset { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + self.resolve().fmt(f) + } } +unsafe impl Send for Asset {} +unsafe impl Sync for Asset {} + impl Asset { #[doc(hidden)] /// This should only be called from the macro /// Create a new asset from the bundled form of the asset and the link section - pub const fn new(bundled: BundledAsset, keep_link_section: fn() -> u8) -> Self { - Self { - bundled, - keep_link_section, - } + pub const fn new(bundled: *const [u8]) -> Self { + Self { bundled } } /// Get the bundled asset - pub const fn bundled(&self) -> &BundledAsset { - &self.bundled + pub fn bundled(&self) -> BundledAsset { + let len = self.bundled.len(); + let ptr = self.bundled as *const u8; + if ptr.is_null() { + panic!("Tried to use an asset that was not bundled. Make sure you are compiling dx as the linker"); + } + let mut bytes = ConstVec::new(); + for byte in 0..len { + // SAFETY: We checked that the pointer was not null above. The pointer is valid for reads and + // since we are reading a u8 there are no alignment requirements + bytes = bytes.push(unsafe { std::ptr::read_volatile(ptr.add(byte)) }); + } + let read = bytes.read(); + deserialize_const!(BundledAsset, read).expect("Failed to deserialize asset. Make sure you built with the matching version of the Dioxus CLI").1 } /// Return a canonicalized path to the asset @@ -130,13 +183,10 @@ impl Asset { /// Attempts to resolve it against an `assets` folder in the current directory. /// If that doesn't exist, it will resolve against the cargo manifest dir pub fn resolve(&self) -> PathBuf { - // Force a volatile read of the asset link section to ensure the symbol makes it into the binary - (self.keep_link_section)(); - #[cfg(feature = "dioxus")] // If the asset is relative, we resolve the asset at the current directory if !dioxus_core_types::is_bundled_app() { - return PathBuf::from(self.bundled.absolute_source_path.as_str()); + return PathBuf::from(self.bundled().absolute_source_path.as_str()); } #[cfg(feature = "dioxus")] @@ -156,7 +206,7 @@ impl Asset { // Otherwise presumably we're bundled and we can use the bundled path bundle_root.join(PathBuf::from( - self.bundled.bundled_path.as_str().trim_start_matches('/'), + self.bundled().bundled_path.as_str().trim_start_matches('/'), )) } } diff --git a/packages/manganis/manganis-core/src/css.rs b/packages/manganis/manganis-core/src/css.rs index 992da7a5dd..bc2dca03e3 100644 --- a/packages/manganis/manganis-core/src/css.rs +++ b/packages/manganis/manganis-core/src/css.rs @@ -5,6 +5,7 @@ use std::collections::HashSet; /// Options for a css asset #[derive( Debug, + Eq, PartialEq, PartialOrd, Clone, @@ -79,6 +80,7 @@ impl CssAssetOptions { /// Options for a css module asset #[derive( Debug, + Eq, PartialEq, PartialOrd, Clone, diff --git a/packages/manganis/manganis-core/src/folder.rs b/packages/manganis/manganis-core/src/folder.rs index 06127cf093..e1f62b8468 100644 --- a/packages/manganis/manganis-core/src/folder.rs +++ b/packages/manganis/manganis-core/src/folder.rs @@ -5,6 +5,7 @@ use crate::AssetOptions; /// The builder for [`FolderAsset`] #[derive( Debug, + Eq, PartialEq, PartialOrd, Clone, diff --git a/packages/manganis/manganis-core/src/hash.rs b/packages/manganis/manganis-core/src/hash.rs deleted file mode 100644 index 3609d3d220..0000000000 --- a/packages/manganis/manganis-core/src/hash.rs +++ /dev/null @@ -1,94 +0,0 @@ -//! Utilities for creating hashed paths to assets in Manganis. This module defines [`AssetHash`] which is used to create a hashed path to an asset in both the CLI and the macro. - -use std::{ - error::Error, - hash::{Hash, Hasher}, - io::Read, - path::{Path, PathBuf}, -}; - -/// An error that can occur when hashing an asset -#[derive(Debug)] -#[non_exhaustive] -pub enum AssetHashError { - /// An io error occurred - IoError { err: std::io::Error, path: PathBuf }, -} - -impl std::fmt::Display for AssetHashError { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { - AssetHashError::IoError { path, err } => { - write!(f, "Failed to read file: {}; {}", path.display(), err) - } - } - } -} - -impl Error for AssetHashError {} - -/// The opaque hash type manganis uses to identify assets. Each time an asset or asset options change, this hash will -/// change. This hash is included in the URL of the bundled asset for cache busting. -pub struct AssetHash { - /// We use a wrapper type here to hide the exact size of the hash so we can switch to a sha hash in a minor version bump - hash: [u8; 8], -} - -impl AssetHash { - /// Create a new asset hash - const fn new(hash: u64) -> Self { - Self { - hash: hash.to_le_bytes(), - } - } - - /// Get the hash bytes - pub const fn bytes(&self) -> &[u8] { - &self.hash - } - - /// Create a new asset hash for a file. The input file to this function should be fully resolved - pub fn hash_file_contents(file_path: &Path) -> Result { - // Create a hasher - let mut hash = std::collections::hash_map::DefaultHasher::new(); - - // If this is a folder, hash the folder contents - if file_path.is_dir() { - let files = std::fs::read_dir(file_path).map_err(|err| AssetHashError::IoError { - err, - path: file_path.to_path_buf(), - })?; - for file in files.flatten() { - let path = file.path(); - Self::hash_file_contents(&path)?.bytes().hash(&mut hash); - } - let hash = hash.finish(); - return Ok(AssetHash::new(hash)); - } - - // Otherwise, open the file to get its contents - let mut file = std::fs::File::open(file_path).map_err(|err| AssetHashError::IoError { - err, - path: file_path.to_path_buf(), - })?; - - // We add a hash to the end of the file so it is invalidated when the bundled version of the file changes - // The hash includes the file contents, the options, and the version of manganis. From the macro, we just - // know the file contents, so we only include that hash - let mut buffer = [0; 8192]; - loop { - let read = file - .read(&mut buffer) - .map_err(|err| AssetHashError::IoError { - err, - path: file_path.to_path_buf(), - })?; - if read == 0 { - break; - } - hash.write(&buffer[..read]); - } - - Ok(AssetHash::new(hash.finish())) - } -} diff --git a/packages/manganis/manganis-core/src/images.rs b/packages/manganis/manganis-core/src/images.rs index a51b858661..36d9d30d0b 100644 --- a/packages/manganis/manganis-core/src/images.rs +++ b/packages/manganis/manganis-core/src/images.rs @@ -5,6 +5,7 @@ use crate::AssetOptions; /// The type of an image. You can read more about the tradeoffs between image formats [here](https://developer.mozilla.org/en-US/docs/Web/Media/Formats/Image_types) #[derive( Debug, + Eq, PartialEq, PartialOrd, Clone, @@ -31,6 +32,7 @@ pub enum ImageFormat { /// The size of an image asset #[derive( Debug, + Eq, PartialEq, PartialOrd, Clone, @@ -56,6 +58,7 @@ pub enum ImageSize { /// Options for an image asset #[derive( Debug, + Eq, PartialEq, PartialOrd, Clone, diff --git a/packages/manganis/manganis-core/src/js.rs b/packages/manganis/manganis-core/src/js.rs index 8c284ba448..3bb2d1d37d 100644 --- a/packages/manganis/manganis-core/src/js.rs +++ b/packages/manganis/manganis-core/src/js.rs @@ -5,6 +5,7 @@ use crate::AssetOptions; /// Options for a javascript asset #[derive( Debug, + Eq, PartialEq, PartialOrd, Clone, diff --git a/packages/manganis/manganis-core/src/lib.rs b/packages/manganis/manganis-core/src/lib.rs index 52e6ac80b2..b3dddf836b 100644 --- a/packages/manganis/manganis-core/src/lib.rs +++ b/packages/manganis/manganis-core/src/lib.rs @@ -17,5 +17,3 @@ mod asset; pub use asset::*; pub mod linker; - -pub mod hash; diff --git a/packages/manganis/manganis-core/src/options.rs b/packages/manganis/manganis-core/src/options.rs index 8e600448d4..e0c024cbc4 100644 --- a/packages/manganis/manganis-core/src/options.rs +++ b/packages/manganis/manganis-core/src/options.rs @@ -7,6 +7,7 @@ use crate::{ /// Settings for a generic asset #[derive( Debug, + Eq, PartialEq, PartialOrd, Clone, diff --git a/packages/manganis/manganis-macro/src/asset.rs b/packages/manganis/manganis-macro/src/asset.rs index 4fa894b825..634b5d528b 100644 --- a/packages/manganis/manganis-macro/src/asset.rs +++ b/packages/manganis/manganis-macro/src/asset.rs @@ -1,9 +1,11 @@ use crate::{resolve_path, AssetParseError}; use macro_string::MacroString; -use manganis_core::hash::AssetHash; use proc_macro2::TokenStream as TokenStream2; use quote::{quote, ToTokens, TokenStreamExt}; -use std::path::PathBuf; +use std::{ + hash::{DefaultHasher, Hash, Hasher}, + path::PathBuf, +}; use syn::{ parse::{Parse, ParseStream}, spanned::Spanned as _, @@ -72,24 +74,19 @@ impl ToTokens for AssetParser { return; } }; - let asset_str = asset.to_string_lossy(); - let mut asset_str = proc_macro2::Literal::string(&asset_str); + let asset_string = asset.to_string_lossy(); + let mut asset_str = proc_macro2::Literal::string(&asset_string); asset_str.set_span(self.path_expr.span()); - let hash = match AssetHash::hash_file_contents(asset) { - Ok(hash) => hash, - Err(err) => { - let err = err.to_string(); - tokens.append_all(quote! { compile_error!(#err) }); - return; - } - }; - - let hash = hash.bytes(); + let mut hash = DefaultHasher::new(); + format!("{:?}", self.options.span()).hash(&mut hash); + format!("{:?}", self.options.to_string()).hash(&mut hash); + asset_string.hash(&mut hash); + let asset_hash = format!("__MANGANIS_ASSET_{:016x}", hash.finish()); // Generate the link section for the asset // The link section includes the source path and the output path of the asset - let link_section = crate::generate_link_section(quote!(__ASSET)); + let link_section = crate::generate_link_section(quote!(__ASSET), &asset_hash); // generate the asset::new method to deprecate the `./assets/blah.css` syntax let constructor = if asset.is_relative() { @@ -106,21 +103,28 @@ impl ToTokens for AssetParser { tokens.extend(quote! { { - // We keep a hash of the contents of the asset for cache busting - const __ASSET_HASH: &[u8] = &[#(#hash),*]; // The source is used by the CLI to copy the asset const __ASSET_SOURCE_PATH: &'static str = #asset_str; // The options give the CLI info about how to process the asset // Note: into_asset_options is not a trait, so we cannot accept the options directly // in the constructor. Stable rust doesn't have support for constant functions in traits const __ASSET_OPTIONS: manganis::AssetOptions = #options.into_asset_options(); + // The input token hash is used to uniquely identify the link section for this asset + const __ASSET_HASH: &'static str = #asset_hash; // Create the asset that the crate will use. This is used both in the return value and // added to the linker for the bundler to copy later - const __ASSET: manganis::BundledAsset = manganis::macro_helpers::#constructor(__ASSET_SOURCE_PATH, __ASSET_HASH, __ASSET_OPTIONS); + const __ASSET: manganis::BundledAsset = manganis::macro_helpers::#constructor(__ASSET_SOURCE_PATH, __ASSET_OPTIONS, __ASSET_HASH); #link_section - manganis::Asset::new(__ASSET, __keep_link_section) + #[cfg(target_arch = "wasm32")] + static __REFERENCE_TO_LINK_SECTION: &'static [u8] = unsafe { &__WASM_LINK_SECTION }; + #[cfg(not(target_arch = "wasm32"))] + static __REFERENCE_TO_LINK_SECTION: &'static [u8] = &__LINK_SECTION; + + manganis::Asset::new( + __REFERENCE_TO_LINK_SECTION as *const [u8] + ) } }) } diff --git a/packages/manganis/manganis-macro/src/linker.rs b/packages/manganis/manganis-macro/src/linker.rs index 42fad42011..27cbcf0e8b 100644 --- a/packages/manganis/manganis-macro/src/linker.rs +++ b/packages/manganis/manganis-macro/src/linker.rs @@ -6,7 +6,7 @@ use quote::ToTokens; /// We force rust to store a serialized representation of the asset description /// inside a particular region of the binary, with the label "manganis". /// After linking, the "manganis" sections of the different object files will be merged. -pub fn generate_link_section(asset: impl ToTokens) -> TokenStream2 { +pub fn generate_link_section(asset: impl ToTokens, asset_hash: &str) -> TokenStream2 { let position = proc_macro2::Span::call_site(); let section_name = syn::LitStr::new( manganis_core::linker::LinkSection::CURRENT.link_section, @@ -23,11 +23,13 @@ pub fn generate_link_section(asset: impl ToTokens) -> TokenStream2 { // Now that we have the size of the asset, copy the bytes into a static array #[link_section = #section_name] - #[used] + #[cfg_attr(target_arch = "wasm32", used)] static __LINK_SECTION: [u8; __LEN] = manganis::macro_helpers::copy_bytes(__BYTES); - fn __keep_link_section() -> u8 { - unsafe { std::ptr::read_volatile(__LINK_SECTION.as_ptr()) } + #[cfg(target_arch = "wasm32")] + extern "C" { + #[link_name = #asset_hash] + static __WASM_LINK_SECTION: [u8; __LEN]; } } } diff --git a/packages/manganis/manganis/src/hash.rs b/packages/manganis/manganis/src/hash.rs deleted file mode 100644 index 98dbb9252e..0000000000 --- a/packages/manganis/manganis/src/hash.rs +++ /dev/null @@ -1,39 +0,0 @@ -use const_serialize::{serialize_const, ConstVec, SerializeConst}; - -// From rustchash - https://github.com/rust-lang/rustc-hash/blob/6745258da00b7251bed4a8461871522d0231a9c7/src/lib.rs#L98 -const K: u64 = 0xf1357aea2e62a9c5; - -pub(crate) struct ConstHasher { - hash: u64, -} - -impl ConstHasher { - pub const fn new() -> Self { - Self { hash: 0 } - } - - pub const fn finish(&self) -> u64 { - self.hash - } - - pub const fn write(mut self, bytes: &[u8]) -> Self { - let mut i = 0; - while i < bytes.len() { - self = self.write_byte(bytes[i]); - i += 1; - } - self - } - - pub const fn write_byte(mut self, byte: u8) -> Self { - self.hash = self.hash.wrapping_add(byte as u64).wrapping_mul(K); - self - } - - pub const fn hash_by_bytes(self, item: &T) -> Self { - let mut bytes = ConstVec::new(); - bytes = serialize_const(item, bytes); - let bytes = bytes.as_ref(); - self.write(bytes) - } -} diff --git a/packages/manganis/manganis/src/lib.rs b/packages/manganis/manganis/src/lib.rs index 9e26177c5e..4cd01cce59 100644 --- a/packages/manganis/manganis/src/lib.rs +++ b/packages/manganis/manganis/src/lib.rs @@ -1,7 +1,6 @@ #![doc = include_str!("../README.md")] #![deny(missing_docs)] -mod hash; #[doc(hidden)] pub mod macro_helpers; pub use manganis_macro::{asset, css_module}; diff --git a/packages/manganis/manganis/src/macro_helpers.rs b/packages/manganis/manganis/src/macro_helpers.rs index a16d099ada..0745fa9eb3 100644 --- a/packages/manganis/manganis/src/macro_helpers.rs +++ b/packages/manganis/manganis/src/macro_helpers.rs @@ -2,16 +2,21 @@ pub use const_serialize; use const_serialize::{serialize_const, ConstStr, ConstVec}; use manganis_core::{AssetOptions, BundledAsset}; -use crate::hash::ConstHasher; +const PLACEHOLDER_HASH: ConstStr = + ConstStr::new("this is a placeholder path which will be replaced by the linker"); /// Create a bundled asset from the input path, the content hash, and the asset options pub const fn create_bundled_asset( input_path: &str, - content_hash: &[u8], asset_config: AssetOptions, + link_section: &str, ) -> BundledAsset { - let hashed_path = generate_unique_path_with_byte_hash(input_path, content_hash, &asset_config); - BundledAsset::new_from_const(ConstStr::new(input_path), hashed_path, asset_config) + BundledAsset::new_from_const( + ConstStr::new(input_path), + PLACEHOLDER_HASH, + asset_config, + ConstStr::new(link_section), + ) } /// Create a bundled asset from the input path, the content hash, and the asset options with a relative asset deprecation warning @@ -22,198 +27,10 @@ pub const fn create_bundled_asset( )] pub const fn create_bundled_asset_relative( input_path: &str, - content_hash: &[u8], asset_config: AssetOptions, + link_section: &str, ) -> BundledAsset { - create_bundled_asset(input_path, content_hash, asset_config) -} - -/// Format the input path with a hash to create an unique output path for the macro in the form `{input_path}-{hash}.{extension}` -pub const fn generate_unique_path( - input_path: &str, - content_hash: u64, - asset_config: &AssetOptions, -) -> ConstStr { - let byte_hash = content_hash.to_le_bytes(); - generate_unique_path_with_byte_hash(input_path, &byte_hash, asset_config) -} - -/// Format the input path with a hash to create an unique output path for the macro in the form `{input_path}-{hash}.{extension}` -const fn generate_unique_path_with_byte_hash( - input_path: &str, - content_hash: &[u8], - asset_config: &AssetOptions, -) -> ConstStr { - // Format the unique path with the format `{input_path}-{hash}.{extension}` - // Start with the input path - let mut input_path = ConstStr::new(input_path); - // Then strip the prefix from the input path. The path comes from the build platform, but - // in wasm, we don't know what the path separator is from the build platform. We need to - // split by both unix and windows paths and take the smallest one - let mut extension = None; - match (input_path.rsplit_once('/'), input_path.rsplit_once('\\')) { - (Some((_, unix_new_input_path)), Some((_, windows_new_input_path))) => { - input_path = if unix_new_input_path.len() < windows_new_input_path.len() { - unix_new_input_path - } else { - windows_new_input_path - }; - } - (Some((_, unix_new_input_path)), _) => { - input_path = unix_new_input_path; - } - (_, Some((_, windows_new_input_path))) => { - input_path = windows_new_input_path; - } - _ => {} - } - if let Some((new_input_path, new_extension)) = input_path.rsplit_once('.') { - extension = Some(new_extension); - input_path = new_input_path; - } - // Then add a dash - let mut macro_output_path = input_path.push_str("-"); - - // Hash the contents along with the asset config to create a unique hash for the asset - // When this hash changes, the client needs to re-fetch the asset - let mut hasher = ConstHasher::new(); - hasher = hasher.write(content_hash); - hasher = hasher.hash_by_bytes(asset_config); - let hash = hasher.finish(); - - // Then add the hash in hex form - let hash_bytes = hash.to_le_bytes(); - let mut i = 0; - while i < hash_bytes.len() { - let byte = hash_bytes[i]; - let first = byte >> 4; - let second = byte & 0x0f; - const fn byte_to_char(byte: u8) -> char { - match char::from_digit(byte as u32, 16) { - Some(c) => c, - None => panic!("byte must be a valid digit"), - } - } - macro_output_path = macro_output_path.push(byte_to_char(first)); - macro_output_path = macro_output_path.push(byte_to_char(second)); - i += 1; - } - - // Finally add the extension - match asset_config.extension() { - Some(extension) => { - macro_output_path = macro_output_path.push('.'); - macro_output_path = macro_output_path.push_str(extension) - } - None => { - if let Some(extension) = extension { - macro_output_path = macro_output_path.push('.'); - - let ext_bytes = extension.as_str().as_bytes(); - - // Rewrite scss as css - if bytes_equal(ext_bytes, b"scss") || bytes_equal(ext_bytes, b"sass") { - macro_output_path = macro_output_path.push_str("css") - } else { - macro_output_path = macro_output_path.push_str(extension.as_str()) - } - } - } - } - - macro_output_path -} - -/// Construct the hash used by manganis and cli-opt to uniquely identify a asset based on its contents -pub const fn hash_asset(asset_config: &AssetOptions, content_hash: u64) -> ConstStr { - let mut string = ConstStr::new(""); - - // Hash the contents along with the asset config to create a unique hash for the asset - // When this hash changes, the client needs to re-fetch the asset - let mut hasher = ConstHasher::new(); - hasher = hasher.write(&content_hash.to_le_bytes()); - hasher = hasher.hash_by_bytes(asset_config); - let hash = hasher.finish(); - - // Then add the hash in hex form - let hash_bytes = hash.to_le_bytes(); - let mut i = 0; - while i < hash_bytes.len() { - let byte = hash_bytes[i]; - let first = byte >> 4; - let second = byte & 0x0f; - const fn byte_to_char(byte: u8) -> char { - match char::from_digit(byte as u32, 16) { - Some(c) => c, - None => panic!("byte must be a valid digit"), - } - } - string = string.push(byte_to_char(first)); - string = string.push(byte_to_char(second)); - i += 1; - } - - string -} - -const fn bytes_equal(left: &[u8], right: &[u8]) -> bool { - if left.len() != right.len() { - return false; - } - - let mut i = 0; - while i < left.len() { - if left[i] != right[i] { - return false; - } - i += 1; - } - - true -} - -#[test] -fn test_unique_path() { - use manganis_core::{ImageAssetOptions, ImageFormat}; - use std::path::PathBuf; - let mut input_path = PathBuf::from("some"); - input_path.push("prefix"); - input_path.push("test.png"); - let content_hash = 123456789; - let asset_config = AssetOptions::Image(ImageAssetOptions::new().with_format(ImageFormat::Avif)); - let output_path = - generate_unique_path(&input_path.to_string_lossy(), content_hash, &asset_config); - assert_eq!(output_path.as_str(), "test-603a88fe296462a3.avif"); - - // Changing the path without changing the contents shouldn't change the hash - let mut input_path = PathBuf::from("some"); - input_path.push("prefix"); - input_path.push("prefix"); - input_path.push("test.png"); - let content_hash = 123456789; - let asset_config = AssetOptions::Image(ImageAssetOptions::new().with_format(ImageFormat::Avif)); - let output_path = - generate_unique_path(&input_path.to_string_lossy(), content_hash, &asset_config); - assert_eq!(output_path.as_str(), "test-603a88fe296462a3.avif"); - - let mut input_path = PathBuf::from("test"); - input_path.push("ing"); - input_path.push("test"); - let content_hash = 123456789; - let asset_config = AssetOptions::Unknown; - let output_path = - generate_unique_path(&input_path.to_string_lossy(), content_hash, &asset_config); - assert_eq!(output_path.as_str(), "test-8d6e32dc0b45f853"); - - // Just changing the content hash should change the total hash - let mut input_path = PathBuf::from("test"); - input_path.push("ing"); - input_path.push("test"); - let content_hash = 123456780; - let asset_config = AssetOptions::Unknown; - let output_path = - generate_unique_path(&input_path.to_string_lossy(), content_hash, &asset_config); - assert_eq!(output_path.as_str(), "test-40783366737abc4d"); + create_bundled_asset(input_path, asset_config, link_section) } /// Serialize an asset to a const buffer @@ -222,6 +39,15 @@ pub const fn serialize_asset(asset: &BundledAsset) -> ConstVec { serialize_const(asset, write) } +/// Deserialize a const buffer into a BundledAsset +pub const fn deserialize_asset(bytes: &[u8]) -> BundledAsset { + let bytes = ConstVec::new().extend(bytes); + match const_serialize::deserialize_const!(BundledAsset, bytes.read()) { + Some((_, asset)) => asset, + None => panic!("Failed to deserialize asset. This may be caused by a mismatch between your dioxus and dioxus-cli versions"), + } +} + /// Copy a slice into a constant sized buffer at compile time pub const fn copy_bytes(bytes: &[u8]) -> [u8; N] { let mut out = [0; N]; diff --git a/packages/playwright-tests/cli-optimization.spec.js b/packages/playwright-tests/cli-optimization.spec.js index a4678bff93..50d7ffcbfa 100644 --- a/packages/playwright-tests/cli-optimization.spec.js +++ b/packages/playwright-tests/cli-optimization.spec.js @@ -8,4 +8,19 @@ test("optimized scripts run", async ({ page }) => { // should add an editor to the page that shows a main function const main = page.locator("#main"); await expect(main).toContainText("hi"); + + // Expect the page to contain an image with the id "some_image" + const image = page.locator("#some_image"); + await expect(image).toBeVisible(); + // Get the image src + const src = await image.getAttribute("src"); + + // Expect the page to contain an image with the id "some_image_with_the_same_url" + const image2 = page.locator("#some_image_with_the_same_url"); + await expect(image2).toBeVisible(); + // Get the image src + const src2 = await image2.getAttribute("src"); + + // Expect the urls to be different + expect(src).not.toEqual(src2); }); diff --git a/packages/playwright-tests/cli-optimization/src/main.rs b/packages/playwright-tests/cli-optimization/src/main.rs index a216497178..d0301434ba 100644 --- a/packages/playwright-tests/cli-optimization/src/main.rs +++ b/packages/playwright-tests/cli-optimization/src/main.rs @@ -4,6 +4,8 @@ use dioxus::prelude::*; const MONACO_FOLDER: Asset = asset!("/monaco-editor-0.52.2/package/min/vs"); const SOME_IMAGE: Asset = asset!("/images/toasts.png", ImageAssetOptions::new().with_avif()); +const SOME_IMAGE_WITH_THE_SAME_URL: Asset = + asset!("/images/toasts.png", ImageAssetOptions::new().with_jpg()); fn main() { dioxus::launch(App); @@ -32,7 +34,12 @@ fn App() -> Element { "onload": script } img { + id: "some_image", src: "{SOME_IMAGE}" } + img { + id: "some_image_with_the_same_url", + src: "{SOME_IMAGE_WITH_THE_SAME_URL}" + } } } diff --git a/packages/playwright-tests/nested-suspense-ssg.spec.js b/packages/playwright-tests/nested-suspense-ssg.spec.js index 4ae98d6d7e..62cbb98883 100644 --- a/packages/playwright-tests/nested-suspense-ssg.spec.js +++ b/packages/playwright-tests/nested-suspense-ssg.spec.js @@ -1,50 +1,51 @@ -// @ts-check -const { test, expect } = require("@playwright/test"); +// TODO: ssg is disabled in the CLI server +// // @ts-check +// const { test, expect } = require("@playwright/test"); -test("nested suspense resolves", async ({ page }) => { - // Wait for the dev server to reload - await page.goto("http://localhost:5050"); - // Then wait for the page to start loading - await page.goto("http://localhost:5050", { waitUntil: "commit" }); +// test("nested suspense resolves", async ({ page }) => { +// // Wait for the dev server to reload +// await page.goto("http://localhost:6060"); +// // Then wait for the page to start loading +// await page.goto("http://localhost:6060", { waitUntil: "commit" }); - // Expect the page to contain the suspense result from the server - const mainMessageTitle = page.locator("#title-0"); - await expect(mainMessageTitle).toContainText("The robot says hello world"); - const mainMessageBody = page.locator("#body-0"); - await expect(mainMessageBody).toContainText( - "The robot becomes sentient and says hello world" - ); +// // Expect the page to contain the suspense result from the server +// const mainMessageTitle = page.locator("#title-0"); +// await expect(mainMessageTitle).toContainText("The robot says hello world"); +// const mainMessageBody = page.locator("#body-0"); +// await expect(mainMessageBody).toContainText( +// "The robot becomes sentient and says hello world" +// ); - // And expect the title to have resolved on the client - await expect(page).toHaveTitle("The robot says hello world"); +// // And expect the title to have resolved on the client +// await expect(page).toHaveTitle("The robot says hello world"); - // Nested suspense should be resolved - const nestedMessageTitle1 = page.locator("#title-1"); - await expect(nestedMessageTitle1).toContainText("The world says hello back"); - const nestedMessageBody1 = page.locator("#body-1"); - await expect(nestedMessageBody1).toContainText( - "In a stunning turn of events, the world collectively unites and says hello back" - ); +// // Nested suspense should be resolved +// const nestedMessageTitle1 = page.locator("#title-1"); +// await expect(nestedMessageTitle1).toContainText("The world says hello back"); +// const nestedMessageBody1 = page.locator("#body-1"); +// await expect(nestedMessageBody1).toContainText( +// "In a stunning turn of events, the world collectively unites and says hello back" +// ); - const nestedMessageDiv2 = page.locator("#children-2"); - await expect(nestedMessageDiv2).toBeEmpty(); - const nestedMessageTitle2 = page.locator("#title-2"); - await expect(nestedMessageTitle2).toContainText("Goodbye Robot"); - const nestedMessageBody2 = page.locator("#body-2"); - await expect(nestedMessageBody2).toContainText("The robot says goodbye"); +// const nestedMessageDiv2 = page.locator("#children-2"); +// await expect(nestedMessageDiv2).toBeEmpty(); +// const nestedMessageTitle2 = page.locator("#title-2"); +// await expect(nestedMessageTitle2).toContainText("Goodbye Robot"); +// const nestedMessageBody2 = page.locator("#body-2"); +// await expect(nestedMessageBody2).toContainText("The robot says goodbye"); - const nestedMessageDiv3 = page.locator("#children-3"); - await expect(nestedMessageDiv3).toBeEmpty(); - const nestedMessageTitle3 = page.locator("#title-3"); - await expect(nestedMessageTitle3).toContainText("Goodbye World"); - const nestedMessageBody3 = page.locator("#body-3"); - await expect(nestedMessageBody3).toContainText("The world says goodbye"); +// const nestedMessageDiv3 = page.locator("#children-3"); +// await expect(nestedMessageDiv3).toBeEmpty(); +// const nestedMessageTitle3 = page.locator("#title-3"); +// await expect(nestedMessageTitle3).toContainText("Goodbye World"); +// const nestedMessageBody3 = page.locator("#body-3"); +// await expect(nestedMessageBody3).toContainText("The world says goodbye"); - // Deeply nested suspense should be resolved - const nestedMessageDiv4 = page.locator("#children-4"); - await expect(nestedMessageDiv4).toBeEmpty(); - const nestedMessageTitle4 = page.locator("#title-4"); - await expect(nestedMessageTitle4).toContainText("Hello World"); - const nestedMessageBody4 = page.locator("#body-4"); - await expect(nestedMessageBody4).toContainText("The world says hello again"); -}); +// // Deeply nested suspense should be resolved +// const nestedMessageDiv4 = page.locator("#children-4"); +// await expect(nestedMessageDiv4).toBeEmpty(); +// const nestedMessageTitle4 = page.locator("#title-4"); +// await expect(nestedMessageTitle4).toContainText("Hello World"); +// const nestedMessageBody4 = page.locator("#body-4"); +// await expect(nestedMessageBody4).toContainText("The world says hello again"); +// }); diff --git a/packages/playwright-tests/playwright.config.js b/packages/playwright-tests/playwright.config.js index 6dfeeb7b33..302a05e678 100644 --- a/packages/playwright-tests/playwright.config.js +++ b/packages/playwright-tests/playwright.config.js @@ -87,8 +87,8 @@ module.exports = defineConfig({ { cwd: path.join(process.cwd(), "web"), command: - 'cargo run --package dioxus-cli --release -- serve --force-sequential --platform web --addr "127.0.0.1" --port 9999', - port: 9999, + 'cargo run --package dioxus-cli --release -- serve --force-sequential --platform web --addr "127.0.0.1" --port 9990', + port: 9990, timeout: 50 * 60 * 1000, reuseExistingServer: !process.env.CI, stdout: "pipe", diff --git a/packages/playwright-tests/wasm-split-harness/.cargo/config.toml b/packages/playwright-tests/wasm-split-harness/.cargo/config.toml new file mode 100644 index 0000000000..f7f69a6126 --- /dev/null +++ b/packages/playwright-tests/wasm-split-harness/.cargo/config.toml @@ -0,0 +1,3 @@ +# It's recommended to set the flag on a per-target basis: +[target.wasm32-unknown-unknown] +rustflags = ['--cfg', 'getrandom_backend="wasm_js"'] diff --git a/packages/playwright-tests/web.spec.js b/packages/playwright-tests/web.spec.js index 68617e42ca..f46f91a576 100644 --- a/packages/playwright-tests/web.spec.js +++ b/packages/playwright-tests/web.spec.js @@ -2,7 +2,7 @@ const { test, expect, defineConfig } = require("@playwright/test"); test("button click", async ({ page }) => { - await page.goto("http://localhost:9999"); + await page.goto("http://localhost:9990"); // Expect the page to contain the counter text. const main = page.locator("#main"); @@ -21,7 +21,7 @@ test("button click", async ({ page }) => { }); test("svg", async ({ page }) => { - await page.goto("http://localhost:9999"); + await page.goto("http://localhost:9990"); // Expect the page to contain the svg. const svg = page.locator("svg"); @@ -36,7 +36,7 @@ test("svg", async ({ page }) => { }); test("raw attribute", async ({ page }) => { - await page.goto("http://localhost:9999"); + await page.goto("http://localhost:9990"); // Expect the page to contain the div with the raw attribute. const div = page.locator("div.raw-attribute-div"); @@ -44,7 +44,7 @@ test("raw attribute", async ({ page }) => { }); test("hidden attribute", async ({ page }) => { - await page.goto("http://localhost:9999"); + await page.goto("http://localhost:9990"); // Expect the page to contain the div with the hidden attribute. const div = page.locator("div.hidden-attribute-div"); @@ -52,7 +52,7 @@ test("hidden attribute", async ({ page }) => { }); test("dangerous inner html", async ({ page }) => { - await page.goto("http://localhost:9999"); + await page.goto("http://localhost:9990"); // Expect the page to contain the div with the dangerous inner html. const div = page.locator("div.dangerous-inner-html-div"); @@ -60,7 +60,7 @@ test("dangerous inner html", async ({ page }) => { }); test("input value", async ({ page }) => { - await page.goto("http://localhost:9999"); + await page.goto("http://localhost:9990"); // Expect the page to contain the input with the value. const input = page.locator("input"); @@ -68,7 +68,7 @@ test("input value", async ({ page }) => { }); test("style", async ({ page }) => { - await page.goto("http://localhost:9999"); + await page.goto("http://localhost:9990"); // Expect the page to contain the div with the style. const div = page.locator("div.style-div"); @@ -77,7 +77,7 @@ test("style", async ({ page }) => { }); test("eval", async ({ page }) => { - await page.goto("http://localhost:9999"); + await page.goto("http://localhost:9990"); // Expect the page to contain the div with the eval and have no text. const div = page.locator("div.eval-result"); @@ -95,7 +95,7 @@ test("eval", async ({ page }) => { }); test("prevent default", async ({ page }) => { - await page.goto("http://localhost:9999"); + await page.goto("http://localhost:9990"); // Expect the page to contain the div with the eval and have no text. const a = page.locator("a.prevent-default"); @@ -109,7 +109,7 @@ test("prevent default", async ({ page }) => { }); test("onmounted", async ({ page }) => { - await page.goto("http://localhost:9999"); + await page.goto("http://localhost:9990"); // Expect the onmounted event to be called exactly once. const mountedDiv = page.locator("div.onmounted-div"); @@ -117,7 +117,7 @@ test("onmounted", async ({ page }) => { }); test("web-sys closure", async ({ page }) => { - await page.goto("http://localhost:9999"); + await page.goto("http://localhost:9990"); // wait until the div is mounted const scrollDiv = page.locator("div#web-sys-closure-div"); await scrollDiv.waitFor({ state: "attached" }); @@ -126,7 +126,7 @@ test("web-sys closure", async ({ page }) => { }); test("document elements", async ({ page }) => { - await page.goto("http://localhost:9999"); + await page.goto("http://localhost:9990"); // wait until the meta element is mounted const meta = page.locator("meta#meta-head[name='testing']"); await meta.waitFor({ state: "attached" }); @@ -157,7 +157,7 @@ test("document elements", async ({ page }) => { }); test("merge styles", async ({ page }) => { - await page.goto("http://localhost:9999"); + await page.goto("http://localhost:9990"); // wait until the div is mounted const div = page.locator("div#merge-styles-div"); await div.waitFor({ state: "attached" }); @@ -167,7 +167,7 @@ test("merge styles", async ({ page }) => { }); test("select multiple", async ({ page }) => { - await page.goto("http://localhost:9999"); + await page.goto("http://localhost:9990"); // wait until the select element is mounted const staticSelect = page.locator("select#static-multiple-select"); await staticSelect.waitFor({ state: "attached" });