Browse Source

Move asset hashing into the CLI (#3988)

* Fix deduping assets

* hash assets in the linker

* fix extracting assets

* Fix the hashed asset name

* deserialize the asset back from the link section

* prevent the read from being inlined

* remove some extra logs

* Fix liveview routing

* Only look for direct dependencies on dioxus features

* fix wasm split test

* On wasm, create a new object file with the asset information

* Fix multiple assets

* add a regression test for 3467

* fix image link

* fix linker out file

* fix linux linker args

* add windows linker command

* fix linux link args

* fix windows target triple

* Expose an option to build assets in the linker intercept

* Fix the standalone optimizer and document it in the dx help command

* include the cli opt version in the hash

* fix clippy

* fix ssg port

* add a safety note to the read_volatile call

* move found assets log

* fix some merge conflicts

* fix build

* make most env var for the linker optional

* guess the target triple when dx acts as the linker intercept

* automatically find the linker for normal builds

* use the right linker in wasm mode

* fix mac linker

* look inside archive files (rlibs)

* remove out.txt

* fix clippy

* fix typo

* fix clippy

* fix build

* fix dx link name in docs

* ignore libcompiler_builtins warnings

* fix playwright

* pass typocheck

* fix printing sysroot

* Modify final binary

* fix wasm assets

* remove old link section logic

* fix tests

* forward manganis export args

* fail to evaluate relocatable address

* retain all exports in the same pattern as other platforms

* Fix adding assets during a wasm hot patch

* less nesting and remove logs

* fix clippy

* log pdb files

* read from pdb

* print data section contents

* print with object

* normal windows builds working

* fix hot patch windows

* clean up logs

* fix clippy

* clean up assets

* use the most recent pdb

* fix doc test

* fix typo

* tiny nits

* make bundled a reference instead of a pointer

* add helpful comment to impl

* match on file format

* Fix asset hashing comment

---------

Co-authored-by: Jonathan Kelley <jkelleyrtp@gmail.com>
Evan Almloff 3 weeks ago
parent
commit
58669f892d
45 changed files with 1171 additions and 759 deletions
  1. 7 6
      Cargo.lock
  2. 1 1
      packages/asset-resolver/src/lib.rs
  3. 3 0
      packages/cli-opt/Cargo.toml
  4. 3 0
      packages/cli-opt/build.rs
  5. 10 0
      packages/cli-opt/src/build_info.rs
  6. 32 6
      packages/cli-opt/src/css.rs
  7. 75 43
      packages/cli-opt/src/file.rs
  8. 162 0
      packages/cli-opt/src/hash.rs
  9. 23 12
      packages/cli-opt/src/image/mod.rs
  10. 62 15
      packages/cli-opt/src/js.rs
  11. 63 77
      packages/cli-opt/src/lib.rs
  12. 8 3
      packages/cli/Cargo.toml
  13. 354 0
      packages/cli/src/build/assets.rs
  14. 37 36
      packages/cli/src/build/builder.rs
  15. 2 0
      packages/cli/src/build/mod.rs
  16. 21 14
      packages/cli/src/build/request.rs
  17. 5 9
      packages/cli/src/cli/build_assets.rs
  18. 3 0
      packages/cli/src/error.rs
  19. 1 1
      packages/cli/src/serve/output.rs
  20. 4 2
      packages/cli/src/serve/runner.rs
  21. 5 0
      packages/const-serialize/src/const_buffers.rs
  22. 14 0
      packages/const-serialize/src/const_vec.rs
  23. 2 2
      packages/const-serialize/src/lib.rs
  24. 61 0
      packages/const-serialize/tests/enum.rs
  25. 1 1
      packages/core/src/scope_context.rs
  26. 75 33
      packages/manganis/manganis-core/src/asset.rs
  27. 2 0
      packages/manganis/manganis-core/src/css.rs
  28. 1 0
      packages/manganis/manganis-core/src/folder.rs
  29. 0 94
      packages/manganis/manganis-core/src/hash.rs
  30. 3 0
      packages/manganis/manganis-core/src/images.rs
  31. 1 0
      packages/manganis/manganis-core/src/js.rs
  32. 0 4
      packages/manganis/manganis-core/src/lib.rs
  33. 0 71
      packages/manganis/manganis-core/src/linker.rs
  34. 1 0
      packages/manganis/manganis-core/src/options.rs
  35. 20 19
      packages/manganis/manganis-macro/src/asset.rs
  36. 4 12
      packages/manganis/manganis-macro/src/linker.rs
  37. 0 39
      packages/manganis/manganis/src/hash.rs
  38. 0 1
      packages/manganis/manganis/src/lib.rs
  39. 20 199
      packages/manganis/manganis/src/macro_helpers.rs
  40. 15 0
      packages/playwright-tests/cli-optimization.spec.js
  41. 7 0
      packages/playwright-tests/cli-optimization/src/main.rs
  42. 44 43
      packages/playwright-tests/nested-suspense-ssg.spec.js
  43. 2 2
      packages/playwright-tests/playwright.config.js
  44. 3 0
      packages/playwright-tests/wasm-split-harness/.cargo/config.toml
  45. 14 14
      packages/playwright-tests/web.spec.js

+ 7 - 6
Cargo.lock

@@ -3811,7 +3811,7 @@ dependencies = [
  "walkdir",
  "walrus",
  "wasm-bindgen-externref-xform",
- "wasm-encoder 0.228.0",
+ "wasm-encoder 0.229.0",
  "wasm-opt",
  "wasm-split-cli",
  "wasmparser 0.226.0",
@@ -3831,6 +3831,7 @@ version = "0.7.0-alpha.0"
 dependencies = [
  "anyhow",
  "browserslist-rs 0.16.0",
+ "built",
  "codemap",
  "const-serialize",
  "grass",
@@ -15940,12 +15941,12 @@ dependencies = [
 
 [[package]]
 name = "wasm-encoder"
-version = "0.228.0"
+version = "0.229.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "05d30290541f2d4242a162bbda76b8f2d8b1ac59eab3568ed6f2327d52c9b2c4"
+checksum = "38ba1d491ecacb085a2552025c10a675a6fddcbd03b1fc9b36c536010ce265d2"
 dependencies = [
  "leb128fmt",
- "wasmparser 0.228.0",
+ "wasmparser 0.229.0",
 ]
 
 [[package]]
@@ -16101,9 +16102,9 @@ dependencies = [
 
 [[package]]
 name = "wasmparser"
-version = "0.228.0"
+version = "0.229.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4abf1132c1fdf747d56bbc1bb52152400c70f336870f968b85e89ea422198ae3"
+checksum = "0cc3b1f053f5d41aa55640a1fa9b6d1b8a9e4418d118ce308d20e24ff3575a8c"
 dependencies = [
  "bitflags 2.9.1",
  "indexmap 2.9.0",

+ 1 - 1
packages/asset-resolver/src/lib.rs

@@ -72,7 +72,7 @@ pub fn serve_asset_from_raw_path(path: &str) -> Result<Response<Vec<u8>>, AssetS
 /// - [ ] Linux (rpm)
 /// - [ ] Linux (deb)
 /// - [ ] Android
-#[allow(unused)]
+#[allow(unreachable_code)]
 fn get_asset_root() -> PathBuf {
     let cur_exe = std::env::current_exe().unwrap();
 

+ 3 - 0
packages/cli-opt/Cargo.toml

@@ -67,3 +67,6 @@ swc_parallel = { version = "=1.0.1", default-features = false }
 swc_timer = { version = "=1.0.0", default-features = false }
 swc_visit = { version = "=2.0.0", default-features = false }
 browserslist-rs = { version = "=0.16.0" }
+
+[build-dependencies]
+built = { version = "0.7.5", features = ["git2"] }

+ 3 - 0
packages/cli-opt/build.rs

@@ -0,0 +1,3 @@
+fn main() {
+    built::write_built_file().expect("Failed to acquire build-time information");
+}

+ 10 - 0
packages/cli-opt/src/build_info.rs

@@ -0,0 +1,10 @@
+// The file has been placed there by the build script.
+include!(concat!(env!("OUT_DIR"), "/built.rs"));
+
+pub(crate) fn version() -> String {
+    format!(
+        "{} ({})",
+        PKG_VERSION,
+        GIT_COMMIT_HASH_SHORT.unwrap_or("was built without git repository")
+    )
+}

+ 32 - 6
packages/cli-opt/src/css.rs

@@ -1,4 +1,4 @@
-use std::path::Path;
+use std::{hash::Hasher, path::Path};
 
 use anyhow::{anyhow, Context};
 use codemap::SpanLoc;
@@ -146,12 +146,11 @@ pub(crate) fn minify_css(css: &str) -> anyhow::Result<String> {
     Ok(res.code)
 }
 
-/// Process an scss/sass file into css.
-pub(crate) fn process_scss(
+/// Compile scss with grass
+pub(crate) fn compile_scss(
     scss_options: &CssAssetOptions,
     source: &Path,
-    output_path: &Path,
-) -> anyhow::Result<()> {
+) -> anyhow::Result<String> {
     let style = match scss_options.minified() {
         true => OutputStyle::Compressed,
         false => OutputStyle::Expanded,
@@ -162,7 +161,18 @@ pub(crate) fn process_scss(
         .quiet(false)
         .logger(&ScssLogger {});
 
-    let css = grass::from_path(source, &options)?;
+    let css = grass::from_path(source, &options)
+        .with_context(|| format!("Failed to compile scss file: {}", source.display()))?;
+    Ok(css)
+}
+
+/// Process an scss/sass file into css.
+pub(crate) fn process_scss(
+    scss_options: &CssAssetOptions,
+    source: &Path,
+    output_path: &Path,
+) -> anyhow::Result<()> {
+    let css = compile_scss(scss_options, source)?;
     let minified = minify_css(&css)?;
 
     std::fs::write(output_path, minified).with_context(|| {
@@ -199,3 +209,19 @@ impl grass::Logger for ScssLogger {
         );
     }
 }
+
+/// Hash the inputs to the scss file
+pub(crate) fn hash_scss(
+    scss_options: &CssAssetOptions,
+    source: &Path,
+    hasher: &mut impl Hasher,
+) -> anyhow::Result<()> {
+    // Grass doesn't expose the ast for us to traverse the imports in the file. Instead of parsing scss ourselves
+    // we just hash the expanded version of the file for now
+    let css = compile_scss(scss_options, source)?;
+
+    // Hash the compiled css
+    hasher.write(css.as_bytes());
+
+    Ok(())
+}

+ 75 - 43
packages/cli-opt/src/file.rs

@@ -1,4 +1,5 @@
 use anyhow::Context;
+use manganis::{CssModuleAssetOptions, FolderAssetOptions};
 use manganis_core::{AssetOptions, CssAssetOptions, ImageAssetOptions, JsAssetOptions};
 use std::path::Path;
 
@@ -33,7 +34,7 @@ pub(crate) fn process_file_to_with_options(
     }
     if let Some(parent) = output_path.parent() {
         if !parent.exists() {
-            std::fs::create_dir_all(parent)?;
+            std::fs::create_dir_all(parent).context("Failed to create directory")?;
         }
     }
 
@@ -47,63 +48,94 @@ pub(crate) fn process_file_to_with_options(
             .unwrap_or_default()
             .to_string_lossy()
     ));
+    let resolved_options = resolve_asset_options(source, options);
 
-    match options {
-        AssetOptions::Unknown => match source.extension().map(|e| e.to_string_lossy()).as_deref() {
-            Some("css") => {
-                process_css(&CssAssetOptions::new(), source, &temp_path)?;
-            }
-            Some("scss" | "sass") => {
-                process_scss(&CssAssetOptions::new(), source, &temp_path)?;
-            }
-            Some("js") => {
-                process_js(&JsAssetOptions::new(), source, &temp_path, !in_folder)?;
-            }
-            Some("json") => {
-                process_json(source, &temp_path)?;
-            }
-            Some("jpg" | "jpeg" | "png" | "webp" | "avif") => {
-                process_image(&ImageAssetOptions::new(), source, &temp_path)?;
-            }
-            Some(_) | None => {
-                if source.is_dir() {
-                    process_folder(source, &temp_path)?;
-                } else {
-                    let source_file = std::fs::File::open(source)?;
-                    let mut reader = std::io::BufReader::new(source_file);
-                    let output_file = std::fs::File::create(&temp_path)?;
-                    let mut writer = std::io::BufWriter::new(output_file);
-                    std::io::copy(&mut reader, &mut writer).with_context(|| {
-                        format!(
-                            "Failed to write file to output location: {}",
-                            temp_path.display()
-                        )
-                    })?;
-                }
-            }
-        },
-        AssetOptions::Css(options) => {
+    match &resolved_options {
+        ResolvedAssetType::Css(options) => {
             process_css(options, source, &temp_path)?;
         }
-        AssetOptions::CssModule(options) => {
+        ResolvedAssetType::CssModule(options) => {
             process_css_module(options, source, output_path, &temp_path)?;
         }
-        AssetOptions::Js(options) => {
+        ResolvedAssetType::Scss(options) => {
+            process_scss(options, source, &temp_path)?;
+        }
+        ResolvedAssetType::Js(options) => {
             process_js(options, source, &temp_path, !in_folder)?;
         }
-        AssetOptions::Image(options) => {
+        ResolvedAssetType::Image(options) => {
             process_image(options, source, &temp_path)?;
         }
-        AssetOptions::Folder(_) => {
+        ResolvedAssetType::Json => {
+            process_json(source, &temp_path)?;
+        }
+        ResolvedAssetType::Folder(_) => {
             process_folder(source, &temp_path)?;
         }
-        _ => {
-            tracing::warn!("Unknown asset options: {:?}", options);
+        ResolvedAssetType::File => {
+            let source_file = std::fs::File::open(source)?;
+            let mut reader = std::io::BufReader::new(source_file);
+            let output_file = std::fs::File::create(&temp_path)?;
+            let mut writer = std::io::BufWriter::new(output_file);
+            std::io::copy(&mut reader, &mut writer).with_context(|| {
+                format!(
+                    "Failed to write file to output location: {}",
+                    temp_path.display()
+                )
+            })?;
         }
     }
 
     // If everything was successful, rename the temp file to the final output path
-    std::fs::rename(temp_path, output_path)?;
+    std::fs::rename(temp_path, output_path).context("Failed to rename output file")?;
 
     Ok(())
 }
+
+pub(crate) enum ResolvedAssetType {
+    /// An image asset
+    Image(ImageAssetOptions),
+    /// A css asset
+    Css(CssAssetOptions),
+    /// A css module asset
+    CssModule(CssModuleAssetOptions),
+    /// A SCSS asset
+    Scss(CssAssetOptions),
+    /// A javascript asset
+    Js(JsAssetOptions),
+    /// A json asset
+    Json,
+    /// A folder asset
+    Folder(FolderAssetOptions),
+    /// A generic file
+    File,
+}
+
+pub(crate) fn resolve_asset_options(source: &Path, options: &AssetOptions) -> ResolvedAssetType {
+    match options {
+        AssetOptions::Image(image) => ResolvedAssetType::Image(*image),
+        AssetOptions::Css(css) => ResolvedAssetType::Css(*css),
+        AssetOptions::CssModule(css) => ResolvedAssetType::CssModule(*css),
+        AssetOptions::Js(js) => ResolvedAssetType::Js(*js),
+        AssetOptions::Folder(folder) => ResolvedAssetType::Folder(*folder),
+        AssetOptions::Unknown => resolve_unknown_asset_options(source),
+        _ => {
+            tracing::warn!("Unknown asset options... you may need to update the Dioxus CLI. Defaulting to a generic file: {:?}", options);
+            resolve_unknown_asset_options(source)
+        }
+    }
+}
+
+fn resolve_unknown_asset_options(source: &Path) -> ResolvedAssetType {
+    match source.extension().map(|e| e.to_string_lossy()).as_deref() {
+        Some("scss" | "sass") => ResolvedAssetType::Scss(CssAssetOptions::new()),
+        Some("css") => ResolvedAssetType::Css(CssAssetOptions::new()),
+        Some("js") => ResolvedAssetType::Js(JsAssetOptions::new()),
+        Some("json") => ResolvedAssetType::Json,
+        Some("jpg" | "jpeg" | "png" | "webp" | "avif") => {
+            ResolvedAssetType::Image(ImageAssetOptions::new())
+        }
+        _ if source.is_dir() => ResolvedAssetType::Folder(FolderAssetOptions::new()),
+        _ => ResolvedAssetType::File,
+    }
+}

+ 162 - 0
packages/cli-opt/src/hash.rs

@@ -0,0 +1,162 @@
+//! Utilities for creating hashed paths to assets in Manganis. This module defines [`AssetHash`] which is used to create a hashed path to an asset in both the CLI and the macro.
+
+use std::{
+    hash::{Hash, Hasher},
+    io::Read,
+    path::{Path, PathBuf},
+};
+
+use crate::{
+    css::hash_scss,
+    file::{resolve_asset_options, ResolvedAssetType},
+    js::hash_js,
+};
+use manganis::{AssetOptions, BundledAsset};
+
+/// The opaque hash type manganis uses to identify assets. Each time an asset or asset options change, this hash will
+/// change. This hash is included in the URL of the bundled asset for cache busting.
+struct AssetHash {
+    /// We use a wrapper type here to hide the exact size of the hash so we can switch to a sha hash in a minor version bump
+    hash: [u8; 8],
+}
+
+impl AssetHash {
+    /// Create a new asset hash
+    const fn new(hash: u64) -> Self {
+        Self {
+            hash: hash.to_le_bytes(),
+        }
+    }
+
+    /// Get the hash bytes
+    pub const fn bytes(&self) -> &[u8] {
+        &self.hash
+    }
+
+    /// Create a new asset hash for a file. The input file to this function should be fully resolved
+    pub fn hash_file_contents(
+        options: &AssetOptions,
+        file_path: impl AsRef<Path>,
+    ) -> anyhow::Result<AssetHash> {
+        hash_file(options, file_path.as_ref())
+    }
+}
+
+/// Process a specific file asset with the given options reading from the source and writing to the output path
+fn hash_file(options: &AssetOptions, source: &Path) -> anyhow::Result<AssetHash> {
+    // Create a hasher
+    let mut hash = std::collections::hash_map::DefaultHasher::new();
+    options.hash(&mut hash);
+    // Hash the version of CLI opt
+    hash.write(crate::build_info::version().as_bytes());
+    hash_file_with_options(options, source, &mut hash, false)?;
+
+    let hash = hash.finish();
+    Ok(AssetHash::new(hash))
+}
+
+/// Process a specific file asset with additional options
+pub(crate) fn hash_file_with_options(
+    options: &AssetOptions,
+    source: &Path,
+    hasher: &mut impl Hasher,
+    in_folder: bool,
+) -> anyhow::Result<()> {
+    let resolved_options = resolve_asset_options(source, options);
+
+    match &resolved_options {
+        // Scss and JS can import files during the bundling process. We need to hash
+        // both the files themselves and any imports they have
+        ResolvedAssetType::Scss(options) => {
+            hash_scss(options, source, hasher)?;
+        }
+        ResolvedAssetType::Js(options) => {
+            hash_js(options, source, hasher, !in_folder)?;
+        }
+
+        // Otherwise, we can just hash the file contents
+        ResolvedAssetType::CssModule(_)
+        | ResolvedAssetType::Css(_)
+        | ResolvedAssetType::Image(_)
+        | ResolvedAssetType::Json
+        | ResolvedAssetType::File => {
+            hash_file_contents(source, hasher)?;
+        }
+        // Or the folder contents recursively
+        ResolvedAssetType::Folder(_) => {
+            let files = std::fs::read_dir(source)?;
+            for file in files.flatten() {
+                let path = file.path();
+                hash_file_with_options(options, &path, hasher, true)?;
+            }
+        }
+    }
+
+    Ok(())
+}
+
+pub(crate) fn hash_file_contents(source: &Path, hasher: &mut impl Hasher) -> anyhow::Result<()> {
+    // Otherwise, open the file to get its contents
+    let mut file = std::fs::File::open(source)?;
+
+    // We add a hash to the end of the file so it is invalidated when the bundled version of the file changes
+    // The hash includes the file contents, the options, and the version of manganis. From the macro, we just
+    // know the file contents, so we only include that hash
+    let mut buffer = [0; 8192];
+    loop {
+        let read = file.read(&mut buffer)?;
+        if read == 0 {
+            break;
+        }
+        hasher.write(&buffer[..read]);
+    }
+    Ok(())
+}
+
+/// Add a hash to the asset, or log an error if it fails
+pub fn add_hash_to_asset(asset: &mut BundledAsset) {
+    let source = asset.absolute_source_path();
+    match AssetHash::hash_file_contents(asset.options(), source) {
+        Ok(hash) => {
+            let options = *asset.options();
+
+            // Set the bundled path to the source path with the hash appended before the extension
+            let source_path = PathBuf::from(source);
+            let Some(file_name) = source_path.file_name() else {
+                tracing::error!("Failed to get file name from path: {source}");
+                return;
+            };
+            // The output extension path is the extension set by the options
+            // or the extension of the source file if we don't recognize the file
+            let mut ext = asset.options().extension().map(Into::into).or_else(|| {
+                source_path
+                    .extension()
+                    .map(|ext| ext.to_string_lossy().to_string())
+            });
+
+            // Rewrite scss as css
+            if let Some("scss" | "sass") = ext.as_deref() {
+                ext = Some("css".to_string());
+            }
+
+            let hash = hash.bytes();
+            let hash = hash
+                .iter()
+                .map(|byte| format!("{byte:x}"))
+                .collect::<String>();
+            let file_stem = source_path.file_stem().unwrap_or(file_name);
+            let mut bundled_path = PathBuf::from(format!("{}-{hash}", file_stem.to_string_lossy()));
+
+            if let Some(ext) = ext {
+                bundled_path.set_extension(ext);
+            }
+
+            let bundled_path = bundled_path.to_string_lossy().to_string();
+
+            *asset = BundledAsset::new(source, &bundled_path, options);
+        }
+        Err(err) => {
+            tracing::error!("Failed to hash asset: {err}");
+        }
+    }
+}

+ 23 - 12
packages/cli-opt/src/image/mod.rs

@@ -14,7 +14,8 @@ pub(crate) fn process_image(
     output_path: &Path,
 ) -> anyhow::Result<()> {
     let mut image = image::ImageReader::new(std::io::Cursor::new(&*std::fs::read(source)?))
-        .with_guessed_format()?
+        .with_guessed_format()
+        .context("Failed to guess image format")?
         .decode();
 
     if let Ok(image) = &mut image {
@@ -25,10 +26,10 @@ pub(crate) fn process_image(
 
     match (image, image_options.format()) {
         (image, ImageFormat::Png) => {
-            compress_png(image?, output_path);
+            compress_png(image.context("Failed to decode image")?, output_path);
         }
         (image, ImageFormat::Jpg) => {
-            compress_jpg(image?, output_path)?;
+            compress_jpg(image.context("Failed to decode image")?, output_path)?;
         }
         (Ok(image), ImageFormat::Avif) => {
             if let Err(error) = image.save(output_path) {
@@ -41,20 +42,30 @@ pub(crate) fn process_image(
             }
         }
         (Ok(image), _) => {
-            image.save(output_path)?;
+            image.save(output_path).with_context(|| {
+                format!(
+                    "Failed to save image (from {}) with path {}",
+                    source.display(),
+                    output_path.display()
+                )
+            })?;
         }
         // If we can't decode the image or it is of an unknown type, we just copy the file
         _ => {
-            let source_file = std::fs::File::open(source)?;
+            let source_file = std::fs::File::open(source).context("Failed to open source file")?;
             let mut reader = std::io::BufReader::new(source_file);
-            let output_file = std::fs::File::create(output_path)?;
-            let mut writer = std::io::BufWriter::new(output_file);
-            std::io::copy(&mut reader, &mut writer).with_context(|| {
-                format!(
-                    "Failed to write image to output location: {}",
-                    output_path.display()
-                )
+            let output_file = std::fs::File::create(output_path).with_context(|| {
+                format!("Failed to create output file: {}", output_path.display())
             })?;
+            let mut writer = std::io::BufWriter::new(output_file);
+            std::io::copy(&mut reader, &mut writer)
+                .with_context(|| {
+                    format!(
+                        "Failed to write image to output location: {}",
+                        output_path.display()
+                    )
+                })
+                .context("Failed to copy image data")?;
         }
     }
 

+ 62 - 15
packages/cli-opt/src/js.rs

@@ -1,3 +1,4 @@
+use std::hash::Hasher;
 use std::path::Path;
 use std::path::PathBuf;
 
@@ -24,6 +25,8 @@ use swc_ecma_codegen::text_writer::JsWriter;
 use swc_ecma_loader::{resolvers::node::NodeModulesResolver, TargetEnv};
 use swc_ecma_parser::{parse_file_as_module, Syntax};
 
+use crate::hash::hash_file_contents;
+
 struct TracingEmitter;
 
 impl Emitter for TracingEmitter {
@@ -43,30 +46,32 @@ impl Emitter for TracingEmitter {
     }
 }
 
+/// Run a closure with the swc globals and handler set up
+fn inside_handler<O>(f: impl FnOnce(&Globals, Lrc<SourceMap>) -> O) -> O {
+    let globals = Globals::new();
+    let cm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
+    let handler = Handler::with_emitter_and_flags(Box::new(TracingEmitter), Default::default());
+    GLOBALS.set(&globals, || HANDLER.set(&handler, || f(&globals, cm)))
+}
+
 fn bundle_js_to_writer(
     file: PathBuf,
     bundle: bool,
     minify: bool,
     write_to: &mut impl std::io::Write,
 ) -> anyhow::Result<()> {
-    let globals = Globals::new();
-    let handler = Handler::with_emitter_and_flags(Box::new(TracingEmitter), Default::default());
-    GLOBALS.set(&globals, || {
-        HANDLER.set(&handler, || {
-            bundle_js_to_writer_inside_handler(&globals, file, bundle, minify, write_to)
-        })
+    inside_handler(|globals, cm| {
+        bundle_js_to_writer_inside_handler(globals, cm, file, bundle, minify, write_to)
     })
 }
 
-fn bundle_js_to_writer_inside_handler(
+fn resolve_js_inside_handler(
     globals: &Globals,
     file: PathBuf,
     bundle: bool,
-    minify: bool,
-    write_to: &mut impl std::io::Write,
-) -> anyhow::Result<()> {
-    let cm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
-    let mut module = if bundle {
+    cm: &Lrc<SourceMap>,
+) -> anyhow::Result<Module> {
+    if bundle {
         let node_resolver = NodeModulesResolver::new(TargetEnv::Browser, Default::default(), true);
         let mut bundler = Bundler::new(
             globals,
@@ -89,7 +94,7 @@ fn bundle_js_to_writer_inside_handler(
         let bundle = bundles
             .pop()
             .ok_or_else(|| anyhow::anyhow!("swc did not output any bundles"))?;
-        bundle.module
+        Ok(bundle.module)
     } else {
         let fm = cm.load_file(Path::new(&file)).expect("Failed to load file");
 
@@ -108,8 +113,19 @@ fn bundle_js_to_writer_inside_handler(
                 error.cancel();
                 anyhow::anyhow!("{}", error.message())
             })
-        })?
-    };
+        })
+    }
+}
+
+fn bundle_js_to_writer_inside_handler(
+    globals: &Globals,
+    cm: Lrc<SourceMap>,
+    file: PathBuf,
+    bundle: bool,
+    minify: bool,
+    write_to: &mut impl std::io::Write,
+) -> anyhow::Result<()> {
+    let mut module = resolve_js_inside_handler(globals, file, bundle, &cm)?;
 
     if minify {
         module = swc_ecma_minifier::optimize(
@@ -246,3 +262,34 @@ pub(crate) fn process_js(
 
     Ok(())
 }
+
+fn hash_js_module(file: PathBuf, hasher: &mut impl Hasher, bundle: bool) -> anyhow::Result<()> {
+    inside_handler(|globals, cm| {
+        _ = resolve_js_inside_handler(globals, file, bundle, &cm)?;
+
+        for file in cm.files().iter() {
+            let hash = file.src_hash;
+            hasher.write(&hash.to_le_bytes());
+        }
+
+        Ok(())
+    })
+}
+
+pub(crate) fn hash_js(
+    js_options: &JsAssetOptions,
+    source: &Path,
+    hasher: &mut impl Hasher,
+    bundle: bool,
+) -> anyhow::Result<()> {
+    if js_options.minified() {
+        if let Err(err) = hash_js_module(source.to_path_buf(), hasher, bundle) {
+            tracing::error!("Failed to minify js. Falling back to non-minified: {err}");
+            hash_file_contents(source, hasher)?;
+        }
+    } else {
+        hash_file_contents(source, hasher)?;
+    }
+
+    Ok(())
+}

+ 63 - 77
packages/cli-opt/src/lib.rs

@@ -1,19 +1,23 @@
 use anyhow::Context;
-use manganis_core::linker::LinkSection;
+use manganis::AssetOptions;
 use manganis_core::BundledAsset;
-use object::{read::archive::ArchiveFile, File as ObjectFile, Object, ObjectSection};
+use rayon::iter::{IntoParallelRefIterator, ParallelIterator};
 use serde::{Deserialize, Serialize};
-use std::path::Path;
-use std::{collections::HashMap, path::PathBuf};
+use std::collections::{HashMap, HashSet};
+use std::path::{Path, PathBuf};
+use std::sync::{Arc, RwLock};
 
+mod build_info;
 mod css;
 mod file;
 mod folder;
+mod hash;
 mod image;
 mod js;
 mod json;
 
 pub use file::process_file_to;
+pub use hash::add_hash_to_asset;
 
 /// A manifest of all assets collected from dependencies
 ///
@@ -21,7 +25,7 @@ pub use file::process_file_to;
 #[derive(Debug, PartialEq, Default, Clone, Serialize, Deserialize)]
 pub struct AssetManifest {
     /// Map of bundled asset name to the asset itself
-    pub assets: HashMap<PathBuf, BundledAsset>,
+    assets: HashMap<PathBuf, HashSet<BundledAsset>>,
 }
 
 impl AssetManifest {
@@ -31,100 +35,82 @@ impl AssetManifest {
         asset_path: &Path,
         options: manganis::AssetOptions,
     ) -> anyhow::Result<BundledAsset> {
-        let hash = manganis_core::hash::AssetHash::hash_file_contents(asset_path)
-            .context("Failed to hash file")?;
-
         let output_path_str = asset_path.to_str().ok_or(anyhow::anyhow!(
             "Failed to convert wasm bindgen output path to string"
         ))?;
 
-        let bundled_asset =
-            manganis::macro_helpers::create_bundled_asset(output_path_str, hash.bytes(), options);
+        let mut bundled_asset =
+            manganis::macro_helpers::create_bundled_asset(output_path_str, options);
+        add_hash_to_asset(&mut bundled_asset);
 
-        self.assets.insert(asset_path.into(), bundled_asset);
+        self.assets
+            .entry(asset_path.to_path_buf())
+            .or_default()
+            .insert(bundled_asset);
 
         Ok(bundled_asset)
     }
 
-    #[allow(dead_code)]
-    pub fn load_from_file(path: &Path) -> anyhow::Result<Self> {
-        let src = std::fs::read_to_string(path)?;
-
-        serde_json::from_str(&src)
-            .with_context(|| format!("Failed to parse asset manifest from {path:?}\n{src}"))
+    /// Insert an existing bundled asset to the manifest
+    pub fn insert_asset(&mut self, asset: BundledAsset) {
+        let asset_path = asset.absolute_source_path();
+        self.assets
+            .entry(asset_path.into())
+            .or_default()
+            .insert(asset);
     }
 
-    /// Fill this manifest with a file object/rlib files, typically extracted from the linker intercepted
-    pub fn add_from_object_path(&mut self, path: &Path) -> anyhow::Result<()> {
-        let data = std::fs::read(path)?;
-
-        match path.extension().and_then(|ext| ext.to_str()) {
-            // Parse an rlib as a collection of objects
-            Some("rlib") => {
-                if let Ok(archive) = object::read::archive::ArchiveFile::parse(&*data) {
-                    self.add_from_archive_file(&archive, &data)?;
-                }
-            }
-            _ => {
-                if let Ok(object) = object::File::parse(&*data) {
-                    self.add_from_object_file(&object)?;
-                }
-            }
-        }
-
-        Ok(())
+    /// Get any assets that are tied to a specific source file
+    pub fn get_assets_for_source(&self, path: &Path) -> Option<&HashSet<BundledAsset>> {
+        self.assets.get(path)
     }
 
-    /// Fill this manifest from an rlib / ar file that contains many object files and their entries
-    fn add_from_archive_file(&mut self, archive: &ArchiveFile, data: &[u8]) -> object::Result<()> {
-        // Look through each archive member for object files.
-        // Read the archive member's binary data (we know it's an object file)
-        // And parse it with the normal `object::File::parse` to find the manganis string.
-        for member in archive.members() {
-            let member = member?;
-            let name = String::from_utf8_lossy(member.name()).to_string();
-
-            // Check if the archive member is an object file and parse it.
-            if name.ends_with(".o") {
-                let data = member.data(data)?;
-                let object = object::File::parse(data)?;
-                _ = self.add_from_object_file(&object);
-            }
-        }
+    /// Check if the manifest contains a specific asset
+    pub fn contains(&self, asset: &BundledAsset) -> bool {
+        self.assets
+            .get(&PathBuf::from(asset.absolute_source_path()))
+            .is_some_and(|assets| assets.contains(asset))
+    }
 
-        Ok(())
+    /// Iterate over all the assets in the manifest
+    pub fn assets(&self) -> impl Iterator<Item = &BundledAsset> {
+        self.assets.values().flat_map(|assets| assets.iter())
     }
 
-    /// Fill this manifest with whatever tables might come from the object file
-    fn add_from_object_file(&mut self, obj: &ObjectFile) -> anyhow::Result<()> {
-        for section in obj.sections() {
-            let Ok(section_name) = section.name() else {
-                continue;
-            };
+    pub fn load_from_file(path: &Path) -> anyhow::Result<Self> {
+        let src = std::fs::read_to_string(path)?;
 
-            // Check if the link section matches the asset section for one of the platforms we support. This may not be the current platform if the user is cross compiling
-            let matches = LinkSection::ALL
-                .iter()
-                .any(|x| x.link_section == section_name);
+        serde_json::from_str(&src)
+            .with_context(|| format!("Failed to parse asset manifest from {path:?}\n{src}"))
+    }
+}
 
-            if !matches {
-                continue;
+/// Optimize a list of assets in parallel
+pub fn optimize_all_assets(
+    assets_to_transfer: Vec<(PathBuf, PathBuf, AssetOptions)>,
+    on_optimization_start: impl FnMut(&Path, &Path, &AssetOptions) + Sync + Send,
+    on_optimization_end: impl FnMut(&Path, &Path, &AssetOptions) + Sync + Send,
+) -> anyhow::Result<()> {
+    let on_optimization_start = Arc::new(RwLock::new(on_optimization_start));
+    let on_optimization_end = Arc::new(RwLock::new(on_optimization_end));
+    assets_to_transfer
+        .par_iter()
+        .try_for_each(|(from, to, options)| {
+            {
+                let mut on_optimization_start = on_optimization_start.write().unwrap();
+                on_optimization_start(from, to, options);
             }
 
-            let bytes = section
-                .uncompressed_data()
-                .context("Could not read uncompressed data from object file")?;
+            let res = process_file_to(options, from, to);
+            if let Err(err) = res.as_ref() {
+                tracing::error!("Failed to copy asset {from:?}: {err}");
+            }
 
-            let mut buffer = const_serialize::ConstReadBuffer::new(&bytes);
-            while let Some((remaining_buffer, asset)) =
-                const_serialize::deserialize_const!(BundledAsset, buffer)
             {
-                self.assets
-                    .insert(asset.absolute_source_path().into(), asset);
-                buffer = remaining_buffer;
+                let mut on_optimization_end = on_optimization_end.write().unwrap();
+                on_optimization_end(from, to, options);
             }
-        }
 
-        Ok(())
-    }
+            res.map(|_| ())
+        })
 }

+ 8 - 3
packages/cli/Cargo.toml

@@ -50,7 +50,6 @@ hyper-rustls = { workspace = true }
 rustls = { workspace = true }
 rayon = { workspace = true }
 futures-channel = { workspace = true }
-target-lexicon = { version = "0.13.2", features = ["serde", "serde_support"] }
 krates = { workspace = true }
 regex = "1.11.1"
 console = "0.15.11"
@@ -61,7 +60,12 @@ axum-server = { workspace = true, features = ["tls-rustls"] }
 axum-extra = { workspace = true, features = ["typed-header"] }
 tower-http = { workspace = true, features = ["full"] }
 proc-macro2 = { workspace = true, features = ["span-locations"] }
-syn = { workspace = true, features = ["full", "extra-traits", "visit", "visit-mut"] }
+syn = { workspace = true, features = [
+    "full",
+    "extra-traits",
+    "visit",
+    "visit-mut",
+] }
 
 headers = "0.4.0"
 walkdir = "2"
@@ -108,6 +112,8 @@ log = { version = "0.4", features = ["max_level_off", "release_max_level_off"] }
 tempfile = "3.13"
 manganis = { workspace = true }
 manganis-core = { workspace = true }
+target-lexicon = { version = "0.13.2", features = ["serde", "serde_support"] }
+wasm-encoder = "0.229.0"
 
 # Extracting data from an executable
 object = { workspace = true, features = ["all"] }
@@ -130,7 +136,6 @@ local-ip-address = "0.6.3"
 dircpy = "0.3.19"
 plist = "1.7.0"
 memoize = "0.5.1"
-wasm-encoder = "0.228.0"
 backtrace = "0.3.74"
 ar = "0.9.0"
 wasm-bindgen-externref-xform = "0.2.100"

+ 354 - 0
packages/cli/src/build/assets.rs

@@ -0,0 +1,354 @@
+//! The dioxus asset system.
+//!
+//! This module provides functionality for extracting assets from a binary file and then writing back
+//! their asset hashes directly into the binary file. Previously, we performed asset hashing in the
+//! `asset!()` macro. The new system, implemented here, instead performs the hashing at build time,
+//! which provides more flexibility in the asset processing pipeline.
+//!
+//! We chose to implement this approach since assets might reference each other which means we minimally
+//! need to parse the asset to create a unique hash for each asset before they are used in the application.
+//! The hashes are used both for cache busting the asset in the browser and to cache the asset optimization
+//! process in the build system.
+//!
+//! We use the same lessons learned from the hot-patching engine which parses the binary file and its
+//! symbol table to find symbols that match the `__MANGANIS__` prefix. These symbols are ideally data
+//! symbols and contain the BundledAsset data type which implements ConstSerialize and ConstDeserialize.
+//!
+//! When the binary is built, the `dioxus asset!()` macro will emit its metadata into the __MANGANIS__
+//! symbols, which we process here. After reading the metadata directly from the executable, we then
+//! hash it and write the hash directly into the binary file.
+//!
+//! During development, we can skip this step for most platforms since local paths are sufficient
+//! for asset loading. However, for WASM and for production builds, we need to ensure that assets
+//! can be found relative to the current exe. Unfortunately, on android, the `current_exe` path is wrong,
+//! so the assets are resolved against the "asset root" - which is covered by the asset loader crate.
+//!
+//! Finding the __MANGANIS__ symbols is not quite straightforward when hotpatching, especially on WASM
+//! since we build and link the module as relocatable, which is not a stable WASM proposal. In this
+//! implementation, we handle both the non-PIE *and* PIC cases which are rather bespoke to our whole
+//! build system.
+
+use std::{
+    io::{Cursor, Read, Seek, Write},
+    path::{Path, PathBuf},
+};
+
+use crate::Result;
+use anyhow::Context;
+use const_serialize::{ConstVec, SerializeConst};
+use dioxus_cli_opt::AssetManifest;
+use manganis::BundledAsset;
+use object::{File, Object, ObjectSection, ObjectSymbol, ReadCache, ReadRef, Section, Symbol};
+use pdb::FallibleIterator;
+use rayon::iter::{IntoParallelRefMutIterator, ParallelIterator};
+use wasmparser::BinaryReader;
+
+/// Extract all manganis symbols and their sections from the given object file.
+fn manganis_symbols<'a, 'b, R: ReadRef<'a>>(
+    file: &'b File<'a, R>,
+) -> impl Iterator<Item = (Symbol<'a, 'b, R>, Section<'a, 'b, R>)> + 'b {
+    file.symbols()
+        .filter(|symbol| {
+            if let Ok(name) = symbol.name() {
+                name.contains("__MANGANIS__")
+            } else {
+                false
+            }
+        })
+        .filter_map(move |symbol| {
+            let section_index = symbol.section_index()?;
+            let section = file.section_by_index(section_index).ok()?;
+            Some((symbol, section))
+        })
+}
+
+/// Find the offsets of any manganis symbols in the given file.
+fn find_symbol_offsets<'a, R: ReadRef<'a>>(
+    path: &Path,
+    file_contents: &[u8],
+    file: &File<'a, R>,
+) -> Result<Vec<u64>> {
+    let pdb_file = find_pdb_file(path);
+
+    match file.format() {
+        // We need to handle dynamic offsets in wasm files differently
+        object::BinaryFormat::Wasm => find_wasm_symbol_offsets(file_contents, file),
+        // Windows puts the symbol information in a PDB file alongside the executable.
+        // If this is a windows PE file and we found a PDB file, we will use that to find the symbol offsets.
+        object::BinaryFormat::Pe if pdb_file.is_some() => {
+            find_pdb_symbol_offsets(&pdb_file.unwrap())
+        }
+        // Otherwise, look for manganis symbols in the object file.
+        _ => find_native_symbol_offsets(file),
+    }
+}
+
+/// Find the pdb file matching the executable file.
+fn find_pdb_file(path: &Path) -> Option<PathBuf> {
+    let mut pdb_file = path.with_extension("pdb");
+    // Also try to find it in the same directory as the executable with _'s instead of -'s
+    if let Some(file_name) = pdb_file.file_name() {
+        let new_file_name = file_name.to_string_lossy().replace('-', "_");
+        let altrnate_pdb_file = pdb_file.with_file_name(new_file_name);
+        // Keep the most recent pdb file
+        match (pdb_file.metadata(), altrnate_pdb_file.metadata()) {
+            (Ok(pdb_metadata), Ok(alternate_metadata)) => {
+                if let (Ok(pdb_modified), Ok(alternate_modified)) =
+                    (pdb_metadata.modified(), alternate_metadata.modified())
+                {
+                    if pdb_modified < alternate_modified {
+                        pdb_file = altrnate_pdb_file;
+                    }
+                }
+            }
+            (Err(_), Ok(_)) => {
+                pdb_file = altrnate_pdb_file;
+            }
+            _ => {}
+        }
+    }
+    if pdb_file.exists() {
+        Some(pdb_file)
+    } else {
+        None
+    }
+}
+
+/// Find the offsets of any manganis symbols in a pdb file.
+fn find_pdb_symbol_offsets(pdb_file: &Path) -> Result<Vec<u64>> {
+    let pdb_file_handle = std::fs::File::open(pdb_file)?;
+    let mut pdb_file = pdb::PDB::open(pdb_file_handle).context("Failed to open PDB file")?;
+    let Ok(Some(sections)) = pdb_file.sections() else {
+        tracing::error!("Failed to read sections from PDB file");
+        return Ok(Vec::new());
+    };
+    let global_symbols = pdb_file
+        .global_symbols()
+        .context("Failed to read global symbols from PDB file")?;
+    let address_map = pdb_file
+        .address_map()
+        .context("Failed to read address map from PDB file")?;
+    let mut symbols = global_symbols.iter();
+    let mut addresses = Vec::new();
+    while let Ok(Some(symbol)) = symbols.next() {
+        let Ok(pdb::SymbolData::Public(data)) = symbol.parse() else {
+            continue;
+        };
+        let Some(rva) = data.offset.to_section_offset(&address_map) else {
+            continue;
+        };
+
+        let name = data.name.to_string();
+        if name.contains("__MANGANIS__") {
+            let section = sections
+                .get(rva.section as usize - 1)
+                .expect("Section index out of bounds");
+
+            addresses.push((section.pointer_to_raw_data + rva.offset) as u64);
+        }
+    }
+    Ok(addresses)
+}
+
+/// Find the offsets of any manganis symbols in a native object file.
+fn find_native_symbol_offsets<'a, R: ReadRef<'a>>(file: &File<'a, R>) -> Result<Vec<u64>> {
+    let mut offsets = Vec::new();
+    for (symbol, section) in manganis_symbols(file) {
+        let virtual_address = symbol.address();
+
+        let Some((section_range_start, _)) = section.file_range() else {
+            tracing::error!(
+                "Found __MANGANIS__ symbol {:?} in section {}, but the section has no file range",
+                symbol.name(),
+                section.index()
+            );
+            continue;
+        };
+        // Translate the section_relative_address to the file offset
+        let section_relative_address: u64 = (virtual_address as i128 - section.address() as i128)
+            .try_into()
+            .expect("Virtual address should be greater than or equal to section address");
+        let file_offset = section_range_start + section_relative_address;
+        offsets.push(file_offset);
+    }
+
+    Ok(offsets)
+}
+
+/// Find the offsets of any manganis symbols in the wasm file.
+fn find_wasm_symbol_offsets<'a, R: ReadRef<'a>>(
+    file_contents: &[u8],
+    file: &File<'a, R>,
+) -> Result<Vec<u64>> {
+    // Parse the wasm file to find the globals
+    let parser = wasmparser::Parser::new(0);
+
+    // All integer literal global values in the wasm file
+    let mut global_values = Vec::new();
+    for section in parser.parse_all(file_contents) {
+        let Ok(wasmparser::Payload::GlobalSection(global_section)) = section else {
+            continue;
+        };
+
+        global_values = global_section
+            .into_iter()
+            .map(|global| {
+                let global = global.ok()?;
+                match global.init_expr.get_operators_reader().into_iter().next() {
+                    Some(Ok(wasmparser::Operator::I32Const { value })) => Some(value as u64),
+                    Some(Ok(wasmparser::Operator::I64Const { value })) => Some(value as u64),
+                    _ => None,
+                }
+            })
+            .collect::<Vec<_>>();
+    }
+    let mut offsets = Vec::new();
+
+    for (symbol, section) in manganis_symbols(file) {
+        let virtual_address = symbol.address();
+
+        let Some((_, section_range_end)) = section.file_range() else {
+            tracing::error!(
+                "Found __MANGANIS__ symbol {:?} in section {}, but the section has no file range",
+                symbol.name(),
+                section.index()
+            );
+            continue;
+        };
+        let section_size = section.data()?.len() as u64;
+        let section_start = section_range_end - section_size;
+        // Translate the section_relative_address to the file offset
+        // WASM files have a section address of 0 in object, reparse the data section with wasmparser
+        // to get the correct address and section start
+        let reader = wasmparser::DataSectionReader::new(BinaryReader::new(
+            &file_contents[section_start as usize..section_range_end as usize],
+            0,
+        ))
+        .context("Failed to create WASM data section reader")?;
+        let main_memory = reader
+            .into_iter()
+            .next()
+            .context("Failed find main memory from WASM data section")?
+            .context("Failed to read main memory from WASM data section")?;
+        let main_memory_offset = match main_memory.kind {
+            wasmparser::DataKind::Active { offset_expr, .. } => {
+                match offset_expr.get_operators_reader().into_iter().next() {
+                    Some(Ok(wasmparser::Operator::I32Const { value })) => -value as i128,
+                    Some(Ok(wasmparser::Operator::I64Const { value })) => -value as i128,
+                    Some(Ok(wasmparser::Operator::GlobalGet { global_index })) => {
+                        let Some(value) =
+                            global_values.get(global_index as usize).copied().flatten()
+                        else {
+                            tracing::error!(
+                                "Found __MANGANIS__ symbol {:?} in WASM file, but the global index {} is not found",
+                                symbol.name(),
+                                global_index
+                            );
+                            continue;
+                        };
+                        value as i128
+                    }
+                    offset_expr => {
+                        tracing::error!(
+                            "Found __MANGANIS__ symbol {:?} in WASM file, but the offset expression is not a constant is is {:?}",
+                            symbol.name(),
+                            offset_expr
+                        );
+                        continue;
+                    }
+                }
+            }
+            _ => {
+                tracing::error!(
+                    "Found __MANGANIS__ symbol {:?} in WASM file, but the data section is not active",
+                    symbol.name()
+                );
+                continue;
+            }
+        };
+        // main_memory.data is a slice somewhere in file_contents. Find out the offset in the file
+        let data_start_offset = (main_memory.data.as_ptr() as u64)
+            .checked_sub(file_contents.as_ptr() as u64)
+            .expect("Data section start offset should be within the file contents");
+        let section_relative_address: u64 = ((virtual_address as i128) + main_memory_offset)
+            .try_into()
+            .expect("Virtual address should be greater than or equal to section address");
+        let file_offset = data_start_offset + section_relative_address;
+        offsets.push(file_offset);
+    }
+
+    Ok(offsets)
+}
+
+/// Find all assets in the given file, hash them, and write them back to the file.
+/// Then return an `AssetManifest` containing all the assets found in the file.
+pub(crate) fn extract_assets_from_file(path: impl AsRef<Path>) -> Result<AssetManifest> {
+    let path = path.as_ref();
+    let mut file = std::fs::File::options().write(true).read(true).open(path)?;
+    let mut file_contents = Vec::new();
+    file.read_to_end(&mut file_contents)?;
+    let mut reader = Cursor::new(&file_contents);
+    let read_cache = ReadCache::new(&mut reader);
+    let object_file = object::File::parse(&read_cache)?;
+    let offsets = find_symbol_offsets(path, &file_contents, &object_file)?;
+
+    let mut assets = Vec::new();
+
+    // Read each asset from the data section using the offsets
+    for offset in offsets.iter().copied() {
+        file.seek(std::io::SeekFrom::Start(offset))?;
+        let mut data_in_range = vec![0; BundledAsset::MEMORY_LAYOUT.size()];
+        file.read_exact(&mut data_in_range)?;
+
+        let buffer = const_serialize::ConstReadBuffer::new(&data_in_range);
+
+        if let Some((_, bundled_asset)) = const_serialize::deserialize_const!(BundledAsset, buffer)
+        {
+            assets.push(bundled_asset);
+        } else {
+            tracing::warn!("Found an asset at offset {offset} that could not be deserialized. This may be caused by a mismatch between your dioxus and dioxus-cli versions.");
+        }
+    }
+
+    // Add the hash to each asset in parallel
+    assets
+        .par_iter_mut()
+        .for_each(dioxus_cli_opt::add_hash_to_asset);
+
+    // Write back the assets to the binary file
+    for (offset, asset) in offsets.into_iter().zip(&assets) {
+        let new_data = ConstVec::new();
+        let new_data = const_serialize::serialize_const(asset, new_data);
+
+        file.seek(std::io::SeekFrom::Start(offset))?;
+        // Write the modified binary data back to the file
+        file.write_all(new_data.as_ref())?;
+    }
+
+    // If the file is a macos binary, we need to re-sign the modified binary
+    if object_file.format() == object::BinaryFormat::MachO {
+        // Spawn the codesign command to re-sign the binary
+        let output = std::process::Command::new("codesign")
+            .arg("--force")
+            .arg("--sign")
+            .arg("-") // Sign with an empty identity
+            .arg(path)
+            .output()?;
+
+        if !output.status.success() {
+            return Err(anyhow::anyhow!(
+                "Failed to re-sign the binary with codesign after finalizing the assets: {}",
+                String::from_utf8_lossy(&output.stderr)
+            )
+            .into());
+        }
+    }
+
+    // Finally, create the asset manifest
+    let mut manifest = AssetManifest::default();
+    for asset in assets {
+        manifest.insert_asset(asset);
+    }
+
+    Ok(manifest)
+}

+ 37 - 36
packages/cli/src/build/builder.rs

@@ -567,16 +567,15 @@ impl AppBuilder {
         let original_artifacts = self.artifacts.as_ref().unwrap();
         let asset_dir = self.build.asset_dir();
 
-        for (k, bundled) in res.assets.assets.iter() {
-            let k = dunce::canonicalize(k)?;
-            if original_artifacts.assets.assets.contains_key(k.as_path()) {
+        for bundled in res.assets.assets() {
+            if original_artifacts.assets.contains(bundled) {
                 continue;
             }
+            let from = dunce::canonicalize(PathBuf::from(bundled.absolute_source_path()))?;
 
-            let from = k.clone();
             let to = asset_dir.join(bundled.bundled_path());
 
-            tracing::debug!("Copying asset from patch: {}", k.display());
+            tracing::debug!("Copying asset from patch: {}", from.display());
             if let Err(e) = dioxus_cli_opt::process_file_to(bundled.options(), &from, &to) {
                 tracing::error!("Failed to copy asset: {e}");
                 continue;
@@ -584,13 +583,8 @@ impl AppBuilder {
 
             // If the emulator is android, we need to copy the asset to the device with `adb push asset /data/local/tmp/dx/assets/filename.ext`
             if self.build.platform == Platform::Android {
-                let changed_file = dunce::canonicalize(k).inspect_err(|e| {
-                    tracing::debug!("Failed to canonicalize hotreloaded asset: {e}")
-                })?;
                 let bundled_name = PathBuf::from(bundled.bundled_path());
-                _ = self
-                    .copy_file_to_android_tmp(&changed_file, &bundled_name)
-                    .await;
+                _ = self.copy_file_to_android_tmp(&from, &bundled_name).await;
             }
         }
 
@@ -649,10 +643,13 @@ impl AppBuilder {
     /// dir that the system simulator might be providing. We know this is the case for ios simulators
     /// and haven't yet checked for android.
     ///
-    /// This will return the bundled name of the asset such that we can send it to the clients letting
+    /// This will return the bundled name of the assets such that we can send it to the clients letting
     /// them know what to reload. It's not super important that this is robust since most clients will
     /// kick all stylsheets without necessarily checking the name.
-    pub(crate) async fn hotreload_bundled_asset(&self, changed_file: &PathBuf) -> Option<PathBuf> {
+    pub(crate) async fn hotreload_bundled_assets(
+        &self,
+        changed_file: &PathBuf,
+    ) -> Option<Vec<PathBuf>> {
         let artifacts = self.artifacts.as_ref()?;
 
         // Use the build dir if there's no runtime asset dir as the override. For the case of ios apps,
@@ -668,32 +665,36 @@ impl AppBuilder {
             .ok()?;
 
         // The asset might've been renamed thanks to the manifest, let's attempt to reload that too
-        let resource = artifacts.assets.assets.get(&changed_file)?;
-        let output_path = asset_dir.join(resource.bundled_path());
-
-        tracing::debug!("Hotreloading asset {changed_file:?} in target {asset_dir:?}");
-
-        // Remove the old asset if it exists
-        _ = std::fs::remove_file(&output_path);
-
-        // And then process the asset with the options into the **old** asset location. If we recompiled,
-        // the asset would be in a new location because the contents and hash have changed. Since we are
-        // hotreloading, we need to use the old asset location it was originally written to.
-        let options = *resource.options();
-        let res = process_file_to(&options, &changed_file, &output_path);
-        let bundled_name = PathBuf::from(resource.bundled_path());
-        if let Err(e) = res {
-            tracing::debug!("Failed to hotreload asset {e}");
-        }
+        let resources = artifacts.assets.get_assets_for_source(&changed_file)?;
+        let mut bundled_names = Vec::new();
+        for resource in resources {
+            let output_path = asset_dir.join(resource.bundled_path());
+
+            tracing::debug!("Hotreloading asset {changed_file:?} in target {asset_dir:?}");
+
+            // Remove the old asset if it exists
+            _ = std::fs::remove_file(&output_path);
+
+            // And then process the asset with the options into the **old** asset location. If we recompiled,
+            // the asset would be in a new location because the contents and hash have changed. Since we are
+            // hotreloading, we need to use the old asset location it was originally written to.
+            let options = *resource.options();
+            let res = process_file_to(&options, &changed_file, &output_path);
+            let bundled_name = PathBuf::from(resource.bundled_path());
+            if let Err(e) = res {
+                tracing::debug!("Failed to hotreload asset {e}");
+            }
 
-        // If the emulator is android, we need to copy the asset to the device with `adb push asset /data/local/tmp/dx/assets/filename.ext`
-        if self.build.platform == Platform::Android {
-            _ = self
-                .copy_file_to_android_tmp(&changed_file, &bundled_name)
-                .await;
+            // If the emulator is android, we need to copy the asset to the device with `adb push asset /data/local/tmp/dx/assets/filename.ext`
+            if self.build.platform == Platform::Android {
+                _ = self
+                    .copy_file_to_android_tmp(&changed_file, &bundled_name)
+                    .await;
+            }
+            bundled_names.push(bundled_name);
         }
 
-        Some(bundled_name)
+        Some(bundled_names)
     }
 
     /// Copy this file to the tmp folder on the android device, returning the path to the copied file

+ 2 - 0
packages/cli/src/build/mod.rs

@@ -8,12 +8,14 @@
 //! hot-patching Rust code through binary analysis and a custom linker. The [`builder`] module contains
 //! the management of the ongoing build and methods to open the build as a running app.
 
+mod assets;
 mod builder;
 mod context;
 mod patch;
 mod request;
 mod tools;
 
+pub(crate) use assets::*;
 pub(crate) use builder::*;
 pub(crate) use context::*;
 pub(crate) use patch::*;

+ 21 - 14
packages/cli/src/build/request.rs

@@ -961,10 +961,8 @@ impl BuildRequest {
         })
     }
 
-    /// Traverse the target directory and collect all assets from the incremental cache
-    ///
-    /// This uses "known paths" that have stayed relatively stable during cargo's lifetime.
-    /// One day this system might break and we might need to go back to using the linker approach.
+    /// Collect the assets from the final executable and modify the binary in place to point to the right
+    /// hashed asset location.
     fn collect_assets(&self, exe: &Path, ctx: &BuildContext) -> Result<AssetManifest> {
         // walk every file in the incremental cache dir, reading and inserting items into the manifest.
         let mut manifest = AssetManifest::default();
@@ -972,7 +970,7 @@ impl BuildRequest {
         // And then add from the exe directly, just in case it's LTO compiled and has no incremental cache
         if !self.skip_assets {
             ctx.status_extracting_assets();
-            _ = manifest.add_from_object_path(exe);
+            manifest = super::assets::extract_assets_from_file(exe)?;
         }
 
         Ok(manifest)
@@ -1109,8 +1107,7 @@ impl BuildRequest {
 
         // Create a set of all the paths that new files will be bundled to
         let mut keep_bundled_output_paths: HashSet<_> = assets
-            .assets
-            .values()
+            .assets()
             .map(|a| asset_dir.join(a.bundled_path()))
             .collect();
 
@@ -1149,8 +1146,8 @@ impl BuildRequest {
         let mut assets_to_transfer = vec![];
 
         // Queue the bundled assets
-        for (asset, bundled) in &assets.assets {
-            let from = asset.clone();
+        for bundled in assets.assets() {
+            let from = PathBuf::from(bundled.absolute_source_path());
             let to = asset_dir.join(bundled.bundled_path());
 
             // prefer to log using a shorter path relative to the workspace dir by trimming the workspace dir
@@ -1381,9 +1378,7 @@ impl BuildRequest {
         }
 
         // Now extract the assets from the fat binary
-        artifacts
-            .assets
-            .add_from_object_path(&self.patch_exe(artifacts.time_start))?;
+        self.collect_assets(&self.patch_exe(artifacts.time_start), ctx)?;
 
         // Clean up the temps manually
         // todo: we might want to keep them around for debugging purposes
@@ -1436,6 +1431,14 @@ impl BuildRequest {
                     "--pie".to_string(),
                     "--experimental-pic".to_string(),
                 ]);
+
+                // retain exports so post-processing has hooks to work with
+                for (idx, arg) in original_args.iter().enumerate() {
+                    if *arg == "--export" {
+                        out_args.push(arg.to_string());
+                        out_args.push(original_args[idx + 1].to_string());
+                    }
+                }
             }
 
             // This uses "cc" and these args need to be ld compatible
@@ -3924,7 +3927,7 @@ impl BuildRequest {
         }
 
         // Inject any resources from manganis into the head
-        for asset in assets.assets.values() {
+        for asset in assets.assets() {
             let asset_path = asset.bundled_path();
             match asset.options() {
                 AssetOptions::Css(css_options) => {
@@ -3954,7 +3957,11 @@ impl BuildRequest {
 
         // Manually inject the wasm file for preloading. WASM currently doesn't support preloading in the manganis asset system
         let wasm_source_path = self.wasm_bindgen_wasm_output_file();
-        if let Some(wasm_path) = assets.assets.get(&wasm_source_path) {
+        if let Some(wasm_assets) = assets.get_assets_for_source(&wasm_source_path) {
+            let wasm_path = wasm_assets
+                .iter()
+                .next()
+                .expect("There should be exactly one optimized wasm asset");
             let wasm_path = wasm_path.bundled_path();
             head_resources.push_str(&format!(
                     "<link rel=\"preload\" as=\"fetch\" type=\"application/wasm\" href=\"/{{base_path}}/assets/{wasm_path}\" crossorigin>"

+ 5 - 9
packages/cli/src/cli/build_assets.rs

@@ -1,8 +1,8 @@
 use std::{fs::create_dir_all, path::PathBuf};
 
-use crate::{Result, StructuredOutput};
+use crate::{extract_assets_from_file, Result, StructuredOutput};
 use clap::Parser;
-use dioxus_cli_opt::{process_file_to, AssetManifest};
+use dioxus_cli_opt::process_file_to;
 use tracing::debug;
 
 #[derive(Clone, Debug, Parser)]
@@ -10,21 +10,17 @@ pub struct BuildAssets {
     /// The source executable to build assets for.
     pub(crate) executable: PathBuf,
 
-    /// The source directory for the assets.
-    pub(crate) source: PathBuf,
-
     /// The destination directory for the assets.
     pub(crate) destination: PathBuf,
 }
 
 impl BuildAssets {
     pub async fn run(self) -> Result<StructuredOutput> {
-        let mut manifest = AssetManifest::default();
-        manifest.add_from_object_path(&self.executable)?;
+        let manifest = extract_assets_from_file(&self.executable)?;
 
         create_dir_all(&self.destination)?;
-        for (path, asset) in manifest.assets.iter() {
-            let source_path = self.source.join(path);
+        for asset in manifest.assets() {
+            let source_path = PathBuf::from(asset.absolute_source_path());
             let destination_path = self.destination.join(asset.bundled_path());
             debug!(
                 "Processing asset {} --> {} {:#?}",

+ 3 - 0
packages/cli/src/error.rs

@@ -36,6 +36,9 @@ pub(crate) enum Error {
     #[error("Failed to perform hotpatch: {0}")]
     PatchingFailed(#[from] crate::build::PatchError),
 
+    #[error("Failed to read object file: {0}")]
+    ObjectReadFailed(#[from] object::Error),
+
     #[error("{0}")]
     CapturedPanic(String),
 

+ 1 - 1
packages/cli/src/serve/output.rs

@@ -152,7 +152,7 @@ impl Output {
         use std::io::IsTerminal;
 
         if !stdout().is_terminal() {
-            return io::Result::Err(io::Error::new(io::ErrorKind::Other, "Not a terminal"));
+            return io::Result::Err(io::Error::other("Not a terminal"));
         }
 
         enable_raw_mode()?;

+ 4 - 2
packages/cli/src/serve/runner.rs

@@ -309,8 +309,10 @@ impl AppServer {
 
             // If it's an asset, we want to hotreload it
             // todo(jon): don't hardcode this here
-            if let Some(bundled_name) = self.client.hotreload_bundled_asset(path).await {
-                assets.push(PathBuf::from("/assets/").join(bundled_name));
+            if let Some(bundled_names) = self.client.hotreload_bundled_assets(path).await {
+                for bundled_name in bundled_names {
+                    assets.push(PathBuf::from("/assets/").join(bundled_name));
+                }
             }
 
             // If it's a rust file, we want to hotreload it using the filemap

+ 5 - 0
packages/const-serialize/src/const_buffers.rs

@@ -30,4 +30,9 @@ impl<'a> ConstReadBuffer<'a> {
     pub const fn as_ref(&self) -> &[u8] {
         self.memory
     }
+
+    /// Get a slice of the buffer from the current location to the end of the buffer
+    pub const fn remaining(&self) -> &[u8] {
+        self.memory.split_at(self.location).1
+    }
 }

+ 14 - 0
packages/const-serialize/src/const_vec.rs

@@ -420,3 +420,17 @@ fn test_const_vec_remove() {
     assert_eq!(value, Some(5678));
     assert_eq!(vec.as_ref(), &[]);
 }
+
+#[test]
+fn test_const_vec_extend() {
+    const VEC: ConstVec<u32> = {
+        let mut vec = ConstVec::new();
+        vec = vec.push(1234);
+        vec = vec.push(5678);
+        vec = vec.extend(&[91011, 1213]);
+        vec
+    };
+    let vec = VEC;
+    println!("{:?}", vec);
+    assert_eq!(vec.as_ref(), &[1234, 5678, 91011, 1213]);
+}

+ 2 - 2
packages/const-serialize/src/lib.rs

@@ -137,7 +137,7 @@ pub enum Layout {
 
 impl Layout {
     /// The size of the type in bytes.
-    const fn size(&self) -> usize {
+    pub const fn size(&self) -> usize {
         match self {
             Layout::Enum(layout) => layout.size,
             Layout::Struct(layout) => layout.size,
@@ -221,7 +221,7 @@ impl_serialize_const_tuple!(T1: 0, T2: 1, T3: 2, T4: 3, T5: 4, T6: 5, T7: 6, T8:
 const MAX_STR_SIZE: usize = 256;
 
 /// A string that is stored in a constant sized buffer that can be serialized and deserialized at compile time
-#[derive(PartialEq, PartialOrd, Clone, Copy, Hash)]
+#[derive(Eq, PartialEq, PartialOrd, Clone, Copy, Hash)]
 #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
 pub struct ConstStr {
     #[cfg_attr(feature = "serde", serde(with = "serde_bytes"))]

+ 61 - 0
packages/const-serialize/tests/enum.rs

@@ -95,6 +95,67 @@ fn test_serialize_enum() {
     assert_eq!(deserialize_const!(Enum, buf).unwrap().1, data);
 }
 
+#[test]
+fn test_serialize_list_of_lopsided_enums() {
+    #[derive(Clone, Copy, Debug, PartialEq, SerializeConst)]
+    #[repr(C, u8)]
+    enum Enum {
+        A,
+        B { one: u8, two: u16 } = 15,
+    }
+
+    println!("{:#?}", Enum::MEMORY_LAYOUT);
+
+    let data = [Enum::A, Enum::A];
+    let mut buf = ConstVec::new();
+    buf = serialize_const(&data, buf);
+    println!("{:?}", buf.as_ref());
+    let buf = buf.read();
+    assert_eq!(deserialize_const!([Enum; 2], buf).unwrap().1, data);
+
+    let data = [
+        Enum::B {
+            one: 0x11,
+            two: 0x2233,
+        },
+        Enum::B {
+            one: 0x12,
+            two: 0x2244,
+        },
+    ];
+    let mut buf = ConstVec::new();
+    buf = serialize_const(&data, buf);
+    println!("{:?}", buf.as_ref());
+    let buf = buf.read();
+    assert_eq!(deserialize_const!([Enum; 2], buf).unwrap().1, data);
+
+    let data = [
+        Enum::A,
+        Enum::B {
+            one: 0x11,
+            two: 0x2233,
+        },
+    ];
+    let mut buf = ConstVec::new();
+    buf = serialize_const(&data, buf);
+    println!("{:?}", buf.as_ref());
+    let buf = buf.read();
+    assert_eq!(deserialize_const!([Enum; 2], buf).unwrap().1, data);
+
+    let data = [
+        Enum::B {
+            one: 0x11,
+            two: 0x2233,
+        },
+        Enum::A,
+    ];
+    let mut buf = ConstVec::new();
+    buf = serialize_const(&data, buf);
+    println!("{:?}", buf.as_ref());
+    let buf = buf.read();
+    assert_eq!(deserialize_const!([Enum; 2], buf).unwrap().1, data);
+}
+
 #[test]
 fn test_serialize_u8_enum() {
     #[derive(Clone, Copy, Debug, PartialEq, SerializeConst)]

+ 1 - 1
packages/core/src/scope_context.rs

@@ -594,7 +594,7 @@ impl ScopeId {
 
     /// Create a subscription that schedules a future render for the reference component. Unlike [`Self::needs_update`], this function will work outside of the dioxus runtime.
     ///
-    /// ## Notice: you should prefer using [`crate::prelude::schedule_update_any`]
+    /// ## Notice: you should prefer using [`crate::schedule_update_any`]
     pub fn schedule_update(&self) -> Arc<dyn Fn() + Send + Sync + 'static> {
         Runtime::with_scope(*self, |cx| cx.schedule_update()).unwrap()
     }

+ 75 - 33
packages/manganis/manganis-core/src/asset.rs

@@ -1,23 +1,13 @@
 use crate::AssetOptions;
-use const_serialize::{ConstStr, SerializeConst};
-use std::path::PathBuf;
+use const_serialize::{deserialize_const, ConstStr, ConstVec, SerializeConst};
+use std::{fmt::Debug, hash::Hash, path::PathBuf};
 
 /// An asset that should be copied by the bundler with some options. This type will be
-/// serialized into the binary and added to the link section [`LinkSection::CURRENT`](crate::linker::LinkSection::CURRENT).
+/// serialized into the binary.
 /// CLIs that support manganis, should pull out the assets from the link section, optimize,
 /// and write them to the filesystem at [`BundledAsset::bundled_path`] for the application
 /// to use.
-#[derive(
-    Debug,
-    PartialEq,
-    PartialOrd,
-    Clone,
-    Copy,
-    Hash,
-    SerializeConst,
-    serde::Serialize,
-    serde::Deserialize,
-)]
+#[derive(Debug, Eq, Clone, Copy, SerializeConst, serde::Serialize, serde::Deserialize)]
 pub struct BundledAsset {
     /// The absolute path of the asset
     absolute_source_path: ConstStr,
@@ -27,13 +17,46 @@ pub struct BundledAsset {
     options: AssetOptions,
 }
 
+impl PartialEq for BundledAsset {
+    fn eq(&self, other: &Self) -> bool {
+        self.absolute_source_path == other.absolute_source_path
+            && self.bundled_path == other.bundled_path
+            && self.options == other.options
+    }
+}
+
+impl PartialOrd for BundledAsset {
+    fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
+        match self
+            .absolute_source_path
+            .partial_cmp(&other.absolute_source_path)
+        {
+            Some(core::cmp::Ordering::Equal) => {}
+            ord => return ord,
+        }
+        match self.bundled_path.partial_cmp(&other.bundled_path) {
+            Some(core::cmp::Ordering::Equal) => {}
+            ord => return ord,
+        }
+        self.options.partial_cmp(&other.options)
+    }
+}
+
+impl Hash for BundledAsset {
+    fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
+        self.absolute_source_path.hash(state);
+        self.bundled_path.hash(state);
+        self.options.hash(state);
+    }
+}
+
 impl BundledAsset {
     #[doc(hidden)]
     /// This should only be called from the macro
     /// Create a new asset
     pub const fn new(
-        absolute_source_path: &'static str,
-        bundled_path: &'static str,
+        absolute_source_path: &str,
+        bundled_path: &str,
         options: AssetOptions,
     ) -> Self {
         Self {
@@ -83,6 +106,7 @@ impl BundledAsset {
     pub fn absolute_source_path(&self) -> &str {
         self.absolute_source_path.as_str()
     }
+
     /// Get the options for the asset
     pub const fn options(&self) -> &AssetOptions {
         &self.options
@@ -101,28 +125,49 @@ impl BundledAsset {
 ///     img { src: ASSET }
 /// };
 /// ```
-#[derive(Debug, PartialEq, Clone, Copy)]
+#[derive(PartialEq, Clone, Copy)]
 pub struct Asset {
-    /// The bundled asset
-    bundled: BundledAsset,
-    /// The link section for the asset
-    keep_link_section: fn() -> u8,
+    /// A pointer to the bundled asset. This will be resolved after the linker has run and
+    /// put into the lazy asset
+    ///
+    /// WARNING: Don't read this directly. Reads can get optimized away at compile time before
+    /// the data for this is filled in by the CLI after the binary is built. Instead, use
+    /// [`std::ptr::read_volatile`] to read the data.
+    bundled: &'static [u8],
+}
+
+impl Debug for Asset {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        self.resolve().fmt(f)
+    }
 }
 
+unsafe impl Send for Asset {}
+unsafe impl Sync for Asset {}
+
 impl Asset {
     #[doc(hidden)]
     /// This should only be called from the macro
     /// Create a new asset from the bundled form of the asset and the link section
-    pub const fn new(bundled: BundledAsset, keep_link_section: fn() -> u8) -> Self {
-        Self {
-            bundled,
-            keep_link_section,
-        }
+    pub const fn new(bundled: &'static [u8]) -> Self {
+        Self { bundled }
     }
 
     /// Get the bundled asset
-    pub const fn bundled(&self) -> &BundledAsset {
-        &self.bundled
+    pub fn bundled(&self) -> BundledAsset {
+        let len = self.bundled.len();
+        let ptr = self.bundled as *const [u8] as *const u8;
+        if ptr.is_null() {
+            panic!("Tried to use an asset that was not bundled. Make sure you are compiling dx as the linker");
+        }
+        let mut bytes = ConstVec::new();
+        for byte in 0..len {
+            // SAFETY: We checked that the pointer was not null above. The pointer is valid for reads and
+            // since we are reading a u8 there are no alignment requirements
+            bytes = bytes.push(unsafe { std::ptr::read_volatile(ptr.add(byte)) });
+        }
+        let read = bytes.read();
+        deserialize_const!(BundledAsset, read).expect("Failed to deserialize asset. Make sure you built with the matching version of the Dioxus CLI").1
     }
 
     /// Return a canonicalized path to the asset
@@ -130,13 +175,10 @@ impl Asset {
     /// Attempts to resolve it against an `assets` folder in the current directory.
     /// If that doesn't exist, it will resolve against the cargo manifest dir
     pub fn resolve(&self) -> PathBuf {
-        // Force a volatile read of the asset link section to ensure the symbol makes it into the binary
-        (self.keep_link_section)();
-
         #[cfg(feature = "dioxus")]
         // If the asset is relative, we resolve the asset at the current directory
         if !dioxus_core_types::is_bundled_app() {
-            return PathBuf::from(self.bundled.absolute_source_path.as_str());
+            return PathBuf::from(self.bundled().absolute_source_path.as_str());
         }
 
         #[cfg(feature = "dioxus")]
@@ -156,7 +198,7 @@ impl Asset {
 
         // Otherwise presumably we're bundled and we can use the bundled path
         bundle_root.join(PathBuf::from(
-            self.bundled.bundled_path.as_str().trim_start_matches('/'),
+            self.bundled().bundled_path.as_str().trim_start_matches('/'),
         ))
     }
 }

+ 2 - 0
packages/manganis/manganis-core/src/css.rs

@@ -5,6 +5,7 @@ use std::collections::HashSet;
 /// Options for a css asset
 #[derive(
     Debug,
+    Eq,
     PartialEq,
     PartialOrd,
     Clone,
@@ -79,6 +80,7 @@ impl CssAssetOptions {
 /// Options for a css module asset
 #[derive(
     Debug,
+    Eq,
     PartialEq,
     PartialOrd,
     Clone,

+ 1 - 0
packages/manganis/manganis-core/src/folder.rs

@@ -5,6 +5,7 @@ use crate::AssetOptions;
 /// The builder for a folder asset.
 #[derive(
     Debug,
+    Eq,
     PartialEq,
     PartialOrd,
     Clone,

+ 0 - 94
packages/manganis/manganis-core/src/hash.rs

@@ -1,94 +0,0 @@
-//! Utilities for creating hashed paths to assets in Manganis. This module defines [`AssetHash`] which is used to create a hashed path to an asset in both the CLI and the macro.
-
-use std::{
-    error::Error,
-    hash::{Hash, Hasher},
-    io::Read,
-    path::{Path, PathBuf},
-};
-
-/// An error that can occur when hashing an asset
-#[derive(Debug)]
-#[non_exhaustive]
-pub enum AssetHashError {
-    /// An io error occurred
-    IoError { err: std::io::Error, path: PathBuf },
-}
-
-impl std::fmt::Display for AssetHashError {
-    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
-        match self {
-            AssetHashError::IoError { path, err } => {
-                write!(f, "Failed to read file: {}; {}", path.display(), err)
-            }
-        }
-    }
-}
-
-impl Error for AssetHashError {}
-
-/// The opaque hash type manganis uses to identify assets. Each time an asset or asset options change, this hash will
-/// change. This hash is included in the URL of the bundled asset for cache busting.
-pub struct AssetHash {
-    /// We use a wrapper type here to hide the exact size of the hash so we can switch to a sha hash in a minor version bump
-    hash: [u8; 8],
-}
-
-impl AssetHash {
-    /// Create a new asset hash
-    const fn new(hash: u64) -> Self {
-        Self {
-            hash: hash.to_le_bytes(),
-        }
-    }
-
-    /// Get the hash bytes
-    pub const fn bytes(&self) -> &[u8] {
-        &self.hash
-    }
-
-    /// Create a new asset hash for a file. The input file to this function should be fully resolved
-    pub fn hash_file_contents(file_path: &Path) -> Result<AssetHash, AssetHashError> {
-        // Create a hasher
-        let mut hash = std::collections::hash_map::DefaultHasher::new();
-
-        // If this is a folder, hash the folder contents
-        if file_path.is_dir() {
-            let files = std::fs::read_dir(file_path).map_err(|err| AssetHashError::IoError {
-                err,
-                path: file_path.to_path_buf(),
-            })?;
-            for file in files.flatten() {
-                let path = file.path();
-                Self::hash_file_contents(&path)?.bytes().hash(&mut hash);
-            }
-            let hash = hash.finish();
-            return Ok(AssetHash::new(hash));
-        }
-
-        // Otherwise, open the file to get its contents
-        let mut file = std::fs::File::open(file_path).map_err(|err| AssetHashError::IoError {
-            err,
-            path: file_path.to_path_buf(),
-        })?;
-
-        // We add a hash to the end of the file so it is invalidated when the bundled version of the file changes
-        // The hash includes the file contents, the options, and the version of manganis. From the macro, we just
-        // know the file contents, so we only include that hash
-        let mut buffer = [0; 8192];
-        loop {
-            let read = file
-                .read(&mut buffer)
-                .map_err(|err| AssetHashError::IoError {
-                    err,
-                    path: file_path.to_path_buf(),
-                })?;
-            if read == 0 {
-                break;
-            }
-            hash.write(&buffer[..read]);
-        }
-
-        Ok(AssetHash::new(hash.finish()))
-    }
-}

+ 3 - 0
packages/manganis/manganis-core/src/images.rs

@@ -5,6 +5,7 @@ use crate::AssetOptions;
 /// The type of an image. You can read more about the tradeoffs between image formats [here](https://developer.mozilla.org/en-US/docs/Web/Media/Formats/Image_types)
 #[derive(
     Debug,
+    Eq,
     PartialEq,
     PartialOrd,
     Clone,
@@ -31,6 +32,7 @@ pub enum ImageFormat {
 /// The size of an image asset
 #[derive(
     Debug,
+    Eq,
     PartialEq,
     PartialOrd,
     Clone,
@@ -56,6 +58,7 @@ pub enum ImageSize {
 /// Options for an image asset
 #[derive(
     Debug,
+    Eq,
     PartialEq,
     PartialOrd,
     Clone,

+ 1 - 0
packages/manganis/manganis-core/src/js.rs

@@ -5,6 +5,7 @@ use crate::AssetOptions;
 /// Options for a javascript asset
 #[derive(
     Debug,
+    Eq,
     PartialEq,
     PartialOrd,
     Clone,

+ 0 - 4
packages/manganis/manganis-core/src/lib.rs

@@ -15,7 +15,3 @@ pub use js::*;
 
 mod asset;
 pub use asset::*;
-
-pub mod linker;
-
-pub mod hash;

+ 0 - 71
packages/manganis/manganis-core/src/linker.rs

@@ -1,71 +0,0 @@
-//! Utilities for working with Manganis assets in the linker. This module defines [`LinkSection`] which has information about what section manganis assets are stored in on each platform.
-
-/// Information about the manganis link section for a given platform
-#[derive(Debug, Clone, Copy)]
-pub struct LinkSection {
-    /// The link section we pass to the static
-    pub link_section: &'static str,
-    /// The name of the section we find in the binary
-    pub name: &'static str,
-}
-
-impl LinkSection {
-    /// The list of link sections for all supported platforms
-    pub const ALL: &'static [&'static LinkSection] =
-        &[Self::WASM, Self::MACOS, Self::WINDOWS, Self::ILLUMOS];
-
-    /// Returns the link section used in linux, android, fuchsia, psp, freebsd, and wasm32
-    pub const WASM: &'static LinkSection = &LinkSection {
-        link_section: "manganis",
-        name: "manganis",
-    };
-
-    /// Returns the link section used in macOS, iOS, tvOS
-    pub const MACOS: &'static LinkSection = &LinkSection {
-        link_section: "__DATA,manganis,regular,no_dead_strip",
-        name: "manganis",
-    };
-
-    /// Returns the link section used in windows
-    pub const WINDOWS: &'static LinkSection = &LinkSection {
-        link_section: "mg",
-        name: "mg",
-    };
-
-    /// Returns the link section used in illumos
-    pub const ILLUMOS: &'static LinkSection = &LinkSection {
-        link_section: "set_manganis",
-        name: "set_manganis",
-    };
-
-    /// The link section used on the current platform
-    pub const CURRENT: &'static LinkSection = {
-        #[cfg(any(
-            target_os = "none",
-            target_os = "linux",
-            target_os = "android",
-            target_os = "fuchsia",
-            target_os = "psp",
-            target_os = "freebsd",
-            target_arch = "wasm32"
-        ))]
-        {
-            Self::WASM
-        }
-
-        #[cfg(any(target_os = "macos", target_os = "ios", target_os = "tvos"))]
-        {
-            Self::MACOS
-        }
-
-        #[cfg(target_os = "windows")]
-        {
-            Self::WINDOWS
-        }
-
-        #[cfg(target_os = "illumos")]
-        {
-            Self::ILLUMOS
-        }
-    };
-}

+ 1 - 0
packages/manganis/manganis-core/src/options.rs

@@ -7,6 +7,7 @@ use crate::{
 /// Settings for a generic asset
 #[derive(
     Debug,
+    Eq,
     PartialEq,
     PartialOrd,
     Clone,

+ 20 - 19
packages/manganis/manganis-macro/src/asset.rs

@@ -1,9 +1,11 @@
 use crate::{resolve_path, AssetParseError};
 use macro_string::MacroString;
-use manganis_core::hash::AssetHash;
 use proc_macro2::TokenStream as TokenStream2;
 use quote::{quote, ToTokens, TokenStreamExt};
-use std::path::PathBuf;
+use std::{
+    hash::{DefaultHasher, Hash, Hasher},
+    path::PathBuf,
+};
 use syn::{
     parse::{Parse, ParseStream},
     spanned::Spanned as _,
@@ -72,24 +74,19 @@ impl ToTokens for AssetParser {
                 return;
             }
         };
-        let asset_str = asset.to_string_lossy();
-        let mut asset_str = proc_macro2::Literal::string(&asset_str);
+        let asset_string = asset.to_string_lossy();
+        let mut asset_str = proc_macro2::Literal::string(&asset_string);
         asset_str.set_span(self.path_expr.span());
 
-        let hash = match AssetHash::hash_file_contents(asset) {
-            Ok(hash) => hash,
-            Err(err) => {
-                let err = err.to_string();
-                tokens.append_all(quote! { compile_error!(#err) });
-                return;
-            }
-        };
-
-        let hash = hash.bytes();
+        let mut hash = DefaultHasher::new();
+        format!("{:?}", self.options.span()).hash(&mut hash);
+        format!("{:?}", self.options.to_string()).hash(&mut hash);
+        asset_string.hash(&mut hash);
+        let asset_hash = format!("{:016x}", hash.finish());
 
         // Generate the link section for the asset
         // The link section includes the source path and the output path of the asset
-        let link_section = crate::generate_link_section(quote!(__ASSET));
+        let link_section = crate::generate_link_section(quote!(__ASSET), &asset_hash);
 
         // generate the asset::new method to deprecate the `./assets/blah.css` syntax
         let constructor = if asset.is_relative() {
@@ -106,21 +103,25 @@ impl ToTokens for AssetParser {
 
         tokens.extend(quote! {
             {
-                // We keep a hash of the contents of the asset for cache busting
-                const __ASSET_HASH: &[u8] = &[#(#hash),*];
                 // The source is used by the CLI to copy the asset
                 const __ASSET_SOURCE_PATH: &'static str = #asset_str;
                 // The options give the CLI info about how to process the asset
                 // Note: into_asset_options is not a trait, so we cannot accept the options directly
                 // in the constructor. Stable rust doesn't have support for constant functions in traits
                 const __ASSET_OPTIONS: manganis::AssetOptions = #options.into_asset_options();
+                // The input token hash is used to uniquely identify the link section for this asset
+                const __ASSET_HASH: &'static str = #asset_hash;
                 // Create the asset that the crate will use. This is used both in the return value and
                 // added to the linker for the bundler to copy later
-                const __ASSET: manganis::BundledAsset = manganis::macro_helpers::#constructor(__ASSET_SOURCE_PATH, __ASSET_HASH, __ASSET_OPTIONS);
+                const __ASSET: manganis::BundledAsset = manganis::macro_helpers::#constructor(__ASSET_SOURCE_PATH, __ASSET_OPTIONS);
 
                 #link_section
 
-                manganis::Asset::new(__ASSET, __keep_link_section)
+                static __REFERENCE_TO_LINK_SECTION: &'static [u8] = &__LINK_SECTION;
+
+                manganis::Asset::new(
+                    __REFERENCE_TO_LINK_SECTION
+                )
             }
         })
     }

+ 4 - 12
packages/manganis/manganis-macro/src/linker.rs

@@ -6,12 +6,9 @@ use quote::ToTokens;
 /// We force rust to store a serialized representation of the asset description
 /// inside a particular region of the binary, with the label "manganis".
 /// After linking, the "manganis" sections of the different object files will be merged.
-pub fn generate_link_section(asset: impl ToTokens) -> TokenStream2 {
+pub fn generate_link_section(asset: impl ToTokens, asset_hash: &str) -> TokenStream2 {
     let position = proc_macro2::Span::call_site();
-    let section_name = syn::LitStr::new(
-        manganis_core::linker::LinkSection::CURRENT.link_section,
-        position,
-    );
+    let export_name = syn::LitStr::new(&format!("__MANGANIS__{}", asset_hash), position);
 
     quote::quote! {
         // First serialize the asset into a constant sized buffer
@@ -22,12 +19,7 @@ pub fn generate_link_section(asset: impl ToTokens) -> TokenStream2 {
         const __LEN: usize = __BYTES.len();
 
         // Now that we have the size of the asset, copy the bytes into a static array
-        #[link_section = #section_name]
-        #[used]
-        static __LINK_SECTION: [u8; __LEN] = manganis::macro_helpers::copy_bytes(__BYTES);
-
-        fn __keep_link_section() -> u8 {
-            unsafe { std::ptr::read_volatile(__LINK_SECTION.as_ptr()) }
-        }
+        #[unsafe(export_name = #export_name)]
+        static __LINK_SECTION: [u8; __LEN]  = manganis::macro_helpers::copy_bytes(__BYTES);
     }
 }

+ 0 - 39
packages/manganis/manganis/src/hash.rs

@@ -1,39 +0,0 @@
-use const_serialize::{serialize_const, ConstVec, SerializeConst};
-
-// From rustchash -  https://github.com/rust-lang/rustc-hash/blob/6745258da00b7251bed4a8461871522d0231a9c7/src/lib.rs#L98
-const K: u64 = 0xf1357aea2e62a9c5;
-
-pub(crate) struct ConstHasher {
-    hash: u64,
-}
-
-impl ConstHasher {
-    pub const fn new() -> Self {
-        Self { hash: 0 }
-    }
-
-    pub const fn finish(&self) -> u64 {
-        self.hash
-    }
-
-    pub const fn write(mut self, bytes: &[u8]) -> Self {
-        let mut i = 0;
-        while i < bytes.len() {
-            self = self.write_byte(bytes[i]);
-            i += 1;
-        }
-        self
-    }
-
-    pub const fn write_byte(mut self, byte: u8) -> Self {
-        self.hash = self.hash.wrapping_add(byte as u64).wrapping_mul(K);
-        self
-    }
-
-    pub const fn hash_by_bytes<T: SerializeConst>(self, item: &T) -> Self {
-        let mut bytes = ConstVec::new();
-        bytes = serialize_const(item, bytes);
-        let bytes = bytes.as_ref();
-        self.write(bytes)
-    }
-}

+ 0 - 1
packages/manganis/manganis/src/lib.rs

@@ -1,7 +1,6 @@
 #![doc = include_str!("../README.md")]
 #![deny(missing_docs)]
 
-mod hash;
 #[doc(hidden)]
 pub mod macro_helpers;
 pub use manganis_macro::{asset, css_module};

+ 20 - 199
packages/manganis/manganis/src/macro_helpers.rs

@@ -1,17 +1,13 @@
 pub use const_serialize;
-use const_serialize::{serialize_const, ConstStr, ConstVec};
+use const_serialize::{serialize_const, ConstStr, ConstVec, SerializeConst};
 use manganis_core::{AssetOptions, BundledAsset};
 
-use crate::hash::ConstHasher;
+const PLACEHOLDER_HASH: ConstStr =
+    ConstStr::new("this is a placeholder path which will be replaced by the linker");
 
 /// Create a bundled asset from the input path, the content hash, and the asset options
-pub const fn create_bundled_asset(
-    input_path: &str,
-    content_hash: &[u8],
-    asset_config: AssetOptions,
-) -> BundledAsset {
-    let hashed_path = generate_unique_path_with_byte_hash(input_path, content_hash, &asset_config);
-    BundledAsset::new_from_const(ConstStr::new(input_path), hashed_path, asset_config)
+pub const fn create_bundled_asset(input_path: &str, asset_config: AssetOptions) -> BundledAsset {
+    BundledAsset::new_from_const(ConstStr::new(input_path), PLACEHOLDER_HASH, asset_config)
 }
 
 /// Create a bundled asset from the input path, the content hash, and the asset options with a relative asset deprecation warning
@@ -22,204 +18,29 @@ pub const fn create_bundled_asset(
 )]
 pub const fn create_bundled_asset_relative(
     input_path: &str,
-    content_hash: &[u8],
     asset_config: AssetOptions,
 ) -> BundledAsset {
-    create_bundled_asset(input_path, content_hash, asset_config)
-}
-
-/// Format the input path with a hash to create an unique output path for the macro in the form `{input_path}-{hash}.{extension}`
-pub const fn generate_unique_path(
-    input_path: &str,
-    content_hash: u64,
-    asset_config: &AssetOptions,
-) -> ConstStr {
-    let byte_hash = content_hash.to_le_bytes();
-    generate_unique_path_with_byte_hash(input_path, &byte_hash, asset_config)
-}
-
-/// Format the input path with a hash to create an unique output path for the macro in the form `{input_path}-{hash}.{extension}`
-const fn generate_unique_path_with_byte_hash(
-    input_path: &str,
-    content_hash: &[u8],
-    asset_config: &AssetOptions,
-) -> ConstStr {
-    // Format the unique path with the format `{input_path}-{hash}.{extension}`
-    // Start with the input path
-    let mut input_path = ConstStr::new(input_path);
-    // Then strip the prefix from the input path. The path comes from the build platform, but
-    // in wasm, we don't know what the path separator is from the build platform. We need to
-    // split by both unix and windows paths and take the smallest one
-    let mut extension = None;
-    match (input_path.rsplit_once('/'), input_path.rsplit_once('\\')) {
-        (Some((_, unix_new_input_path)), Some((_, windows_new_input_path))) => {
-            input_path = if unix_new_input_path.len() < windows_new_input_path.len() {
-                unix_new_input_path
-            } else {
-                windows_new_input_path
-            };
-        }
-        (Some((_, unix_new_input_path)), _) => {
-            input_path = unix_new_input_path;
-        }
-        (_, Some((_, windows_new_input_path))) => {
-            input_path = windows_new_input_path;
-        }
-        _ => {}
-    }
-    if let Some((new_input_path, new_extension)) = input_path.rsplit_once('.') {
-        extension = Some(new_extension);
-        input_path = new_input_path;
-    }
-    // Then add a dash
-    let mut macro_output_path = input_path.push_str("-");
-
-    // Hash the contents along with the asset config to create a unique hash for the asset
-    // When this hash changes, the client needs to re-fetch the asset
-    let mut hasher = ConstHasher::new();
-    hasher = hasher.write(content_hash);
-    hasher = hasher.hash_by_bytes(asset_config);
-    let hash = hasher.finish();
-
-    // Then add the hash in hex form
-    let hash_bytes = hash.to_le_bytes();
-    let mut i = 0;
-    while i < hash_bytes.len() {
-        let byte = hash_bytes[i];
-        let first = byte >> 4;
-        let second = byte & 0x0f;
-        const fn byte_to_char(byte: u8) -> char {
-            match char::from_digit(byte as u32, 16) {
-                Some(c) => c,
-                None => panic!("byte must be a valid digit"),
-            }
-        }
-        macro_output_path = macro_output_path.push(byte_to_char(first));
-        macro_output_path = macro_output_path.push(byte_to_char(second));
-        i += 1;
-    }
-
-    // Finally add the extension
-    match asset_config.extension() {
-        Some(extension) => {
-            macro_output_path = macro_output_path.push('.');
-            macro_output_path = macro_output_path.push_str(extension)
-        }
-        None => {
-            if let Some(extension) = extension {
-                macro_output_path = macro_output_path.push('.');
-
-                let ext_bytes = extension.as_str().as_bytes();
-
-                // Rewrite scss as css
-                if bytes_equal(ext_bytes, b"scss") || bytes_equal(ext_bytes, b"sass") {
-                    macro_output_path = macro_output_path.push_str("css")
-                } else {
-                    macro_output_path = macro_output_path.push_str(extension.as_str())
-                }
-            }
-        }
-    }
-
-    macro_output_path
+    create_bundled_asset(input_path, asset_config)
 }
 
-/// Construct the hash used by manganis and cli-opt to uniquely identify a asset based on its contents
-pub const fn hash_asset(asset_config: &AssetOptions, content_hash: u64) -> ConstStr {
-    let mut string = ConstStr::new("");
-
-    // Hash the contents along with the asset config to create a unique hash for the asset
-    // When this hash changes, the client needs to re-fetch the asset
-    let mut hasher = ConstHasher::new();
-    hasher = hasher.write(&content_hash.to_le_bytes());
-    hasher = hasher.hash_by_bytes(asset_config);
-    let hash = hasher.finish();
-
-    // Then add the hash in hex form
-    let hash_bytes = hash.to_le_bytes();
-    let mut i = 0;
-    while i < hash_bytes.len() {
-        let byte = hash_bytes[i];
-        let first = byte >> 4;
-        let second = byte & 0x0f;
-        const fn byte_to_char(byte: u8) -> char {
-            match char::from_digit(byte as u32, 16) {
-                Some(c) => c,
-                None => panic!("byte must be a valid digit"),
-            }
-        }
-        string = string.push(byte_to_char(first));
-        string = string.push(byte_to_char(second));
-        i += 1;
+/// Serialize an asset to a const buffer
+pub const fn serialize_asset(asset: &BundledAsset) -> ConstVec<u8> {
+    let data = ConstVec::new();
+    let mut data = serialize_const(asset, data);
+    // Reserve the maximum size of the asset
+    while data.len() < BundledAsset::MEMORY_LAYOUT.size() {
+        data = data.push(0);
     }
-
-    string
+    data
 }
 
-const fn bytes_equal(left: &[u8], right: &[u8]) -> bool {
-    if left.len() != right.len() {
-        return false;
-    }
-
-    let mut i = 0;
-    while i < left.len() {
-        if left[i] != right[i] {
-            return false;
-        }
-        i += 1;
+/// Deserialize a const buffer into a BundledAsset
+pub const fn deserialize_asset(bytes: &[u8]) -> BundledAsset {
+    let bytes = ConstVec::new().extend(bytes);
+    match const_serialize::deserialize_const!(BundledAsset, bytes.read()) {
+        Some((_, asset)) => asset,
+        None => panic!("Failed to deserialize asset. This may be caused by a mismatch between your dioxus and dioxus-cli versions"),
     }
-
-    true
-}
-
-#[test]
-fn test_unique_path() {
-    use manganis_core::{ImageAssetOptions, ImageFormat};
-    use std::path::PathBuf;
-    let mut input_path = PathBuf::from("some");
-    input_path.push("prefix");
-    input_path.push("test.png");
-    let content_hash = 123456789;
-    let asset_config = AssetOptions::Image(ImageAssetOptions::new().with_format(ImageFormat::Avif));
-    let output_path =
-        generate_unique_path(&input_path.to_string_lossy(), content_hash, &asset_config);
-    assert_eq!(output_path.as_str(), "test-603a88fe296462a3.avif");
-
-    // Changing the path without changing the contents shouldn't change the hash
-    let mut input_path = PathBuf::from("some");
-    input_path.push("prefix");
-    input_path.push("prefix");
-    input_path.push("test.png");
-    let content_hash = 123456789;
-    let asset_config = AssetOptions::Image(ImageAssetOptions::new().with_format(ImageFormat::Avif));
-    let output_path =
-        generate_unique_path(&input_path.to_string_lossy(), content_hash, &asset_config);
-    assert_eq!(output_path.as_str(), "test-603a88fe296462a3.avif");
-
-    let mut input_path = PathBuf::from("test");
-    input_path.push("ing");
-    input_path.push("test");
-    let content_hash = 123456789;
-    let asset_config = AssetOptions::Unknown;
-    let output_path =
-        generate_unique_path(&input_path.to_string_lossy(), content_hash, &asset_config);
-    assert_eq!(output_path.as_str(), "test-8d6e32dc0b45f853");
-
-    // Just changing the content hash should change the total hash
-    let mut input_path = PathBuf::from("test");
-    input_path.push("ing");
-    input_path.push("test");
-    let content_hash = 123456780;
-    let asset_config = AssetOptions::Unknown;
-    let output_path =
-        generate_unique_path(&input_path.to_string_lossy(), content_hash, &asset_config);
-    assert_eq!(output_path.as_str(), "test-40783366737abc4d");
-}
-
-/// Serialize an asset to a const buffer
-pub const fn serialize_asset(asset: &BundledAsset) -> ConstVec<u8> {
-    let write = ConstVec::new();
-    serialize_const(asset, write)
 }
 
 /// Copy a slice into a constant sized buffer at compile time

+ 15 - 0
packages/playwright-tests/cli-optimization.spec.js

@@ -8,4 +8,19 @@ test("optimized scripts run", async ({ page }) => {
   // should add an editor to the page that shows a main function
   const main = page.locator("#main");
   await expect(main).toContainText("hi");
+
+  // Expect the page to contain an image with the id "some_image"
+  const image = page.locator("#some_image");
+  await expect(image).toBeVisible();
+  // Get the image src
+  const src = await image.getAttribute("src");
+
+  // Expect the page to contain an image with the id "some_image_with_the_same_url"
+  const image2 = page.locator("#some_image_with_the_same_url");
+  await expect(image2).toBeVisible();
+  // Get the image src
+  const src2 = await image2.getAttribute("src");
+
+  // Expect the urls to be different
+  expect(src).not.toEqual(src2);
 });

+ 7 - 0
packages/playwright-tests/cli-optimization/src/main.rs

@@ -4,6 +4,8 @@ use dioxus::prelude::*;
 
 const MONACO_FOLDER: Asset = asset!("/monaco-editor-0.52.2/package/min/vs");
 const SOME_IMAGE: Asset = asset!("/images/toasts.png", ImageAssetOptions::new().with_avif());
+const SOME_IMAGE_WITH_THE_SAME_URL: Asset =
+    asset!("/images/toasts.png", ImageAssetOptions::new().with_jpg());
 
 fn main() {
     dioxus::launch(App);
@@ -32,7 +34,12 @@ fn App() -> Element {
             "onload": script
         }
         img {
+            id: "some_image",
             src: "{SOME_IMAGE}"
         }
+        img {
+            id: "some_image_with_the_same_url",
+            src: "{SOME_IMAGE_WITH_THE_SAME_URL}"
+        }
     }
 }

+ 44 - 43
packages/playwright-tests/nested-suspense-ssg.spec.js

@@ -1,50 +1,51 @@
-// @ts-check
-const { test, expect } = require("@playwright/test");
+// TODO: ssg is disabled in the CLI server
+// // @ts-check
+// const { test, expect } = require("@playwright/test");
 
-test("nested suspense resolves", async ({ page }) => {
-  // Wait for the dev server to reload
-  await page.goto("http://localhost:5050");
-  // Then wait for the page to start loading
-  await page.goto("http://localhost:5050", { waitUntil: "commit" });
+// test("nested suspense resolves", async ({ page }) => {
+//   // Wait for the dev server to reload
+//   await page.goto("http://localhost:6060");
+//   // Then wait for the page to start loading
+//   await page.goto("http://localhost:6060", { waitUntil: "commit" });
 
-  // Expect the page to contain the suspense result from the server
-  const mainMessageTitle = page.locator("#title-0");
-  await expect(mainMessageTitle).toContainText("The robot says hello world");
-  const mainMessageBody = page.locator("#body-0");
-  await expect(mainMessageBody).toContainText(
-    "The robot becomes sentient and says hello world"
-  );
+//   // Expect the page to contain the suspense result from the server
+//   const mainMessageTitle = page.locator("#title-0");
+//   await expect(mainMessageTitle).toContainText("The robot says hello world");
+//   const mainMessageBody = page.locator("#body-0");
+//   await expect(mainMessageBody).toContainText(
+//     "The robot becomes sentient and says hello world"
+//   );
 
-  // And expect the title to have resolved on the client
-  await expect(page).toHaveTitle("The robot says hello world");
+//   // And expect the title to have resolved on the client
+//   await expect(page).toHaveTitle("The robot says hello world");
 
-  // Nested suspense should be resolved
-  const nestedMessageTitle1 = page.locator("#title-1");
-  await expect(nestedMessageTitle1).toContainText("The world says hello back");
-  const nestedMessageBody1 = page.locator("#body-1");
-  await expect(nestedMessageBody1).toContainText(
-    "In a stunning turn of events, the world collectively unites and says hello back"
-  );
+//   // Nested suspense should be resolved
+//   const nestedMessageTitle1 = page.locator("#title-1");
+//   await expect(nestedMessageTitle1).toContainText("The world says hello back");
+//   const nestedMessageBody1 = page.locator("#body-1");
+//   await expect(nestedMessageBody1).toContainText(
+//     "In a stunning turn of events, the world collectively unites and says hello back"
+//   );
 
-  const nestedMessageDiv2 = page.locator("#children-2");
-  await expect(nestedMessageDiv2).toBeEmpty();
-  const nestedMessageTitle2 = page.locator("#title-2");
-  await expect(nestedMessageTitle2).toContainText("Goodbye Robot");
-  const nestedMessageBody2 = page.locator("#body-2");
-  await expect(nestedMessageBody2).toContainText("The robot says goodbye");
+//   const nestedMessageDiv2 = page.locator("#children-2");
+//   await expect(nestedMessageDiv2).toBeEmpty();
+//   const nestedMessageTitle2 = page.locator("#title-2");
+//   await expect(nestedMessageTitle2).toContainText("Goodbye Robot");
+//   const nestedMessageBody2 = page.locator("#body-2");
+//   await expect(nestedMessageBody2).toContainText("The robot says goodbye");
 
-  const nestedMessageDiv3 = page.locator("#children-3");
-  await expect(nestedMessageDiv3).toBeEmpty();
-  const nestedMessageTitle3 = page.locator("#title-3");
-  await expect(nestedMessageTitle3).toContainText("Goodbye World");
-  const nestedMessageBody3 = page.locator("#body-3");
-  await expect(nestedMessageBody3).toContainText("The world says goodbye");
+//   const nestedMessageDiv3 = page.locator("#children-3");
+//   await expect(nestedMessageDiv3).toBeEmpty();
+//   const nestedMessageTitle3 = page.locator("#title-3");
+//   await expect(nestedMessageTitle3).toContainText("Goodbye World");
+//   const nestedMessageBody3 = page.locator("#body-3");
+//   await expect(nestedMessageBody3).toContainText("The world says goodbye");
 
-  // Deeply nested suspense should be resolved
-  const nestedMessageDiv4 = page.locator("#children-4");
-  await expect(nestedMessageDiv4).toBeEmpty();
-  const nestedMessageTitle4 = page.locator("#title-4");
-  await expect(nestedMessageTitle4).toContainText("Hello World");
-  const nestedMessageBody4 = page.locator("#body-4");
-  await expect(nestedMessageBody4).toContainText("The world says hello again");
-});
+//   // Deeply nested suspense should be resolved
+//   const nestedMessageDiv4 = page.locator("#children-4");
+//   await expect(nestedMessageDiv4).toBeEmpty();
+//   const nestedMessageTitle4 = page.locator("#title-4");
+//   await expect(nestedMessageTitle4).toContainText("Hello World");
+//   const nestedMessageBody4 = page.locator("#body-4");
+//   await expect(nestedMessageBody4).toContainText("The world says hello again");
+// });

+ 2 - 2
packages/playwright-tests/playwright.config.js

@@ -87,8 +87,8 @@ module.exports = defineConfig({
     {
       cwd: path.join(process.cwd(), "web"),
       command:
-        'cargo run --package dioxus-cli --release -- serve --force-sequential --platform web --addr "127.0.0.1" --port 9999',
-      port: 9999,
+        'cargo run --package dioxus-cli --release -- serve --force-sequential --platform web --addr "127.0.0.1" --port 9990',
+      port: 9990,
       timeout: 50 * 60 * 1000,
       reuseExistingServer: !process.env.CI,
       stdout: "pipe",

+ 3 - 0
packages/playwright-tests/wasm-split-harness/.cargo/config.toml

@@ -0,0 +1,3 @@
+# It's recommended to set the flag on a per-target basis:
+[target.wasm32-unknown-unknown]
+rustflags = ['--cfg', 'getrandom_backend="wasm_js"']

+ 14 - 14
packages/playwright-tests/web.spec.js

@@ -2,7 +2,7 @@
 const { test, expect, defineConfig } = require("@playwright/test");
 
 test("button click", async ({ page }) => {
-  await page.goto("http://localhost:9999");
+  await page.goto("http://localhost:9990");
 
   // Expect the page to contain the counter text.
   const main = page.locator("#main");
@@ -21,7 +21,7 @@ test("button click", async ({ page }) => {
 });
 
 test("svg", async ({ page }) => {
-  await page.goto("http://localhost:9999");
+  await page.goto("http://localhost:9990");
 
   // Expect the page to contain the svg.
   const svg = page.locator("svg");
@@ -36,7 +36,7 @@ test("svg", async ({ page }) => {
 });
 
 test("raw attribute", async ({ page }) => {
-  await page.goto("http://localhost:9999");
+  await page.goto("http://localhost:9990");
 
   // Expect the page to contain the div with the raw attribute.
   const div = page.locator("div.raw-attribute-div");
@@ -44,7 +44,7 @@ test("raw attribute", async ({ page }) => {
 });
 
 test("hidden attribute", async ({ page }) => {
-  await page.goto("http://localhost:9999");
+  await page.goto("http://localhost:9990");
 
   // Expect the page to contain the div with the hidden attribute.
   const div = page.locator("div.hidden-attribute-div");
@@ -52,7 +52,7 @@ test("hidden attribute", async ({ page }) => {
 });
 
 test("dangerous inner html", async ({ page }) => {
-  await page.goto("http://localhost:9999");
+  await page.goto("http://localhost:9990");
 
   // Expect the page to contain the div with the dangerous inner html.
   const div = page.locator("div.dangerous-inner-html-div");
@@ -60,7 +60,7 @@ test("dangerous inner html", async ({ page }) => {
 });
 
 test("input value", async ({ page }) => {
-  await page.goto("http://localhost:9999");
+  await page.goto("http://localhost:9990");
 
   // Expect the page to contain the input with the value.
   const input = page.locator("input");
@@ -68,7 +68,7 @@ test("input value", async ({ page }) => {
 });
 
 test("style", async ({ page }) => {
-  await page.goto("http://localhost:9999");
+  await page.goto("http://localhost:9990");
 
   // Expect the page to contain the div with the style.
   const div = page.locator("div.style-div");
@@ -77,7 +77,7 @@ test("style", async ({ page }) => {
 });
 
 test("eval", async ({ page }) => {
-  await page.goto("http://localhost:9999");
+  await page.goto("http://localhost:9990");
 
   // Expect the page to contain the div with the eval and have no text.
   const div = page.locator("div.eval-result");
@@ -95,7 +95,7 @@ test("eval", async ({ page }) => {
 });
 
 test("prevent default", async ({ page }) => {
-  await page.goto("http://localhost:9999");
+  await page.goto("http://localhost:9990");
 
   // Expect the page to contain the div with the eval and have no text.
   const a = page.locator("a.prevent-default");
@@ -109,7 +109,7 @@ test("prevent default", async ({ page }) => {
 });
 
 test("onmounted", async ({ page }) => {
-  await page.goto("http://localhost:9999");
+  await page.goto("http://localhost:9990");
 
   // Expect the onmounted event to be called exactly once.
   const mountedDiv = page.locator("div.onmounted-div");
@@ -117,7 +117,7 @@ test("onmounted", async ({ page }) => {
 });
 
 test("web-sys closure", async ({ page }) => {
-  await page.goto("http://localhost:9999");
+  await page.goto("http://localhost:9990");
   // wait until the div is mounted
   const scrollDiv = page.locator("div#web-sys-closure-div");
   await scrollDiv.waitFor({ state: "attached" });
@@ -126,7 +126,7 @@ test("web-sys closure", async ({ page }) => {
 });
 
 test("document elements", async ({ page }) => {
-  await page.goto("http://localhost:9999");
+  await page.goto("http://localhost:9990");
   // wait until the meta element is mounted
   const meta = page.locator("meta#meta-head[name='testing']");
   await meta.waitFor({ state: "attached" });
@@ -157,7 +157,7 @@ test("document elements", async ({ page }) => {
 });
 
 test("merge styles", async ({ page }) => {
-  await page.goto("http://localhost:9999");
+  await page.goto("http://localhost:9990");
   // wait until the div is mounted
   const div = page.locator("div#merge-styles-div");
   await div.waitFor({ state: "attached" });
@@ -167,7 +167,7 @@ test("merge styles", async ({ page }) => {
 });
 
 test("select multiple", async ({ page }) => {
-  await page.goto("http://localhost:9999");
+  await page.goto("http://localhost:9990");
   // wait until the select element is mounted
   const staticSelect = page.locator("select#static-multiple-select");
   await staticSelect.waitFor({ state: "attached" });