Преглед изворни кода

feat: router-based wasm bundle splitting (#3683)

* feat: wasm-split base implementation

* kinda sorta works with the cli

* okay great it actually works

* fix: cache busting for chunks

* cleaned up version

* it works with the router

* wip: migrate to a faster linear solution using id remapping

* add a good harness

* it works completely on the docsite

* the super tight build works too

* make parallelizable

* works on the docsite but not the harness

* light cleanups, fix for both harness and docsite

* make codegen smaller by extract some shared functions to prevent monos

* chunking works, but tbd on tuning heuristics

* extract used module

* wip: cleaning up module

* wip: much better understanding of how imports work

* fix the call graph

* working again...

* wip: thinking about ifuncs

* ifunc approach works

* yes the ifunc approach works!!

* it all works completely even with chunks

* small cleanups

* comments

* actually locate all of the symbols

* clean up final implementation

* clean up tomls, revert some mono changs

* clean up, add some more comments / docs in places

* hoist more packages

* clean up impl a bit more

* clean cli

* move harness to playwright

* update playwright

* extract codgen

* add lazy to components

* Add docs to split loader

* clippy

* typos

* clippy, don't generate splits if not on wasm

* whoops, const

* Typos and docs

* enable optimizations in ci

* add optimizations to ci

* add wasm-split router feature

* parallel playwright?

* fix no case check

* disable pre-compress... think that might be the issue

* better logging, no default pre-compress, caching of shared symbols
* Fix: compressing assets

* fix non-interactive logging for serve

* cache playwright even on failure, use the cli with optimizations

* fix prebuild

* fix playwright webserver
Jonathan Kelley пре 4 месеци
родитељ
комит
bdeedc13eb
59 измењених фајлова са 4250 додато и 300 уклоњено
  1. 0 11
      .cargo/config.toml
  2. 1 3
      .github/workflows/main.yml
  3. 2 0
      .vscode/settings.json
  4. 261 29
      Cargo.lock
  5. 46 8
      Cargo.toml
  6. 1 0
      packages/cli-opt/Cargo.toml
  7. 21 0
      packages/cli-opt/src/lib.rs
  8. 2 0
      packages/cli/Cargo.toml
  9. 194 108
      packages/cli/src/build/bundle.rs
  10. 12 0
      packages/cli/src/build/progress.rs
  11. 9 1
      packages/cli/src/build/request.rs
  12. 17 13
      packages/cli/src/build/web.rs
  13. 4 0
      packages/cli/src/cli/build.rs
  14. 8 4
      packages/cli/src/config/web.rs
  15. 1 0
      packages/cli/src/fastfs.rs
  16. 15 6
      packages/cli/src/logging.rs
  17. 1 0
      packages/cli/src/main.rs
  18. 1 1
      packages/cli/src/serve/handle.rs
  19. 1 1
      packages/cli/src/serve/mod.rs
  20. 2 0
      packages/cli/src/serve/output.rs
  21. 26 11
      packages/cli/src/wasm_bindgen.rs
  22. 102 0
      packages/cli/src/wasm_opt.rs
  23. 17 0
      packages/config-macros/Cargo.toml
  24. 15 0
      packages/config-macros/README.md
  25. 39 0
      packages/config-macros/src/lib.rs
  26. 129 4
      packages/core-macro/src/component.rs
  27. 2 1
      packages/core-macro/src/lib.rs
  28. 19 11
      packages/core/src/any_props.rs
  29. 10 0
      packages/core/src/scope_context.rs
  30. 11 1
      packages/core/src/tasks.rs
  31. 3 0
      packages/dioxus/Cargo.toml
  32. 10 0
      packages/dioxus/src/lib.rs
  33. 2 0
      packages/dx-wire-format/src/lib.rs
  34. 10 1
      packages/playwright-tests/playwright.config.js
  35. 21 0
      packages/playwright-tests/wasm-split-harness/Cargo.toml
  36. 1 0
      packages/playwright-tests/wasm-split-harness/data/.gitignore
  37. 13 0
      packages/playwright-tests/wasm-split-harness/data/index.html
  38. 14 0
      packages/playwright-tests/wasm-split-harness/docsite.sh
  39. 82 0
      packages/playwright-tests/wasm-split-harness/run.sh
  40. 296 0
      packages/playwright-tests/wasm-split-harness/src/main.rs
  41. 29 0
      packages/playwright-tests/wasm-split-harness/src/stars.js
  42. 49 0
      packages/playwright-tests/wasm-split.spec.js
  43. 4 1
      packages/router-macro/Cargo.toml
  44. 1 74
      packages/router-macro/src/lib.rs
  45. 95 11
      packages/router-macro/src/route.rs
  46. 1 0
      packages/router/Cargo.toml
  47. 26 0
      packages/router/README.md
  48. 10 0
      packages/wasm-split/README.md
  49. 16 0
      packages/wasm-split/wasm-split-cli/Cargo.toml
  50. 4 0
      packages/wasm-split/wasm-split-cli/data/.gitignore
  51. 63 0
      packages/wasm-split/wasm-split-cli/src/__wasm_split.js
  52. 1535 0
      packages/wasm-split/wasm-split-cli/src/lib.rs
  53. 178 0
      packages/wasm-split/wasm-split-cli/src/main.rs
  54. 15 0
      packages/wasm-split/wasm-split-macro/Cargo.toml
  55. 226 0
      packages/wasm-split/wasm-split-macro/src/lib.rs
  56. 8 0
      packages/wasm-split/wasm-split/Cargo.toml
  57. 214 0
      packages/wasm-split/wasm-split/src/lib.rs
  58. 9 0
      packages/wasm-split/wasm-used/Cargo.toml
  59. 346 0
      packages/wasm-split/wasm-used/src/lib.rs

+ 0 - 11
.cargo/config.toml

@@ -1,11 +0,0 @@
-[profile]
-
-[profile.dioxus-wasm]
-inherits = "dev"
-opt-level = 2
-
-[profile.dioxus-server]
-inherits = "dev"
-
-[profile.dioxus-android]
-inherits = "dev"

+ 1 - 3
.github/workflows/main.yml

@@ -211,9 +211,7 @@ jobs:
       - uses: Swatinem/rust-cache@v2
       - uses: Swatinem/rust-cache@v2
         with:
         with:
           cache-all-crates: "true"
           cache-all-crates: "true"
-      - name: Prebuild CLI
-        run: |
-          cargo build --package dioxus-cli --release
+          cache-on-failure: "true"
       - name: Playwright
       - name: Playwright
         working-directory: ./packages/playwright-tests
         working-directory: ./packages/playwright-tests
         run: |
         run: |

+ 2 - 0
.vscode/settings.json

@@ -9,6 +9,8 @@
   // "rust-analyzer.check.workspace": true,
   // "rust-analyzer.check.workspace": true,
   // "rust-analyzer.check.workspace": false,
   // "rust-analyzer.check.workspace": false,
   // "rust-analyzer.check.features": "all",
   // "rust-analyzer.check.features": "all",
+  // "rust-analyzer.cargo.buildScripts.rebuildOnSave": false,
+  // "rust-analyzer.check.workspace": false,
   "rust-analyzer.cargo.features": "all",
   "rust-analyzer.cargo.features": "all",
   "rust-analyzer.check.features": "all",
   "rust-analyzer.check.features": "all",
   // "rust-analyzer.check.allTargets": true,
   // "rust-analyzer.check.allTargets": true,

+ 261 - 29
Cargo.lock

@@ -8,7 +8,7 @@ version = "0.24.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "dfbe277e56a376000877090da837660b4427aad530e3028d44e0bffe4f89a1c1"
 checksum = "dfbe277e56a376000877090da837660b4427aad530e3028d44e0bffe4f89a1c1"
 dependencies = [
 dependencies = [
- "gimli",
+ "gimli 0.31.1",
 ]
 ]
 
 
 [[package]]
 [[package]]
@@ -583,6 +583,7 @@ dependencies = [
  "brotli 7.0.0",
  "brotli 7.0.0",
  "flate2",
  "flate2",
  "futures-core",
  "futures-core",
+ "futures-io",
  "memchr",
  "memchr",
  "pin-project-lite",
  "pin-project-lite",
  "tokio",
  "tokio",
@@ -648,6 +649,12 @@ dependencies = [
  "pin-project-lite",
  "pin-project-lite",
 ]
 ]
 
 
+[[package]]
+name = "async-once-cell"
+version = "0.5.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4288f83726785267c6f2ef073a3d83dc3f9b81464e9f99898240cced85fce35a"
+
 [[package]]
 [[package]]
 name = "async-process"
 name = "async-process"
 version = "2.3.0"
 version = "2.3.0"
@@ -1504,6 +1511,12 @@ dependencies = [
  "windows-targets 0.52.6",
  "windows-targets 0.52.6",
 ]
 ]
 
 
+[[package]]
+name = "base16"
+version = "0.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d27c3610c36aee21ce8ac510e6224498de4228ad772a171ed65643a24693a5a8"
+
 [[package]]
 [[package]]
 name = "base16ct"
 name = "base16ct"
 version = "0.1.1"
 version = "0.1.1"
@@ -3407,6 +3420,7 @@ dependencies = [
  "dioxus",
  "dioxus",
  "dioxus-cli-config",
  "dioxus-cli-config",
  "dioxus-config-macro",
  "dioxus-config-macro",
+ "dioxus-config-macros",
  "dioxus-core",
  "dioxus-core",
  "dioxus-core-macro",
  "dioxus-core-macro",
  "dioxus-desktop",
  "dioxus-desktop",
@@ -3432,6 +3446,7 @@ dependencies = [
  "tokio",
  "tokio",
  "tracing",
  "tracing",
  "warnings",
  "warnings",
+ "wasm-split",
 ]
 ]
 
 
 [[package]]
 [[package]]
@@ -3518,6 +3533,7 @@ dependencies = [
  "log",
  "log",
  "manganis",
  "manganis",
  "manganis-core",
  "manganis-core",
+ "memoize",
  "notify",
  "notify",
  "object 0.36.5",
  "object 0.36.5",
  "once_cell",
  "once_cell",
@@ -3554,6 +3570,7 @@ dependencies = [
  "uuid",
  "uuid",
  "walkdir",
  "walkdir",
  "wasm-opt",
  "wasm-opt",
+ "wasm-split-cli",
  "which 7.0.1",
  "which 7.0.1",
 ]
 ]
 
 
@@ -3576,6 +3593,7 @@ dependencies = [
  "image",
  "image",
  "imagequant",
  "imagequant",
  "lightningcss",
  "lightningcss",
+ "manganis",
  "manganis-core",
  "manganis-core",
  "mozjpeg",
  "mozjpeg",
  "object 0.36.5",
  "object 0.36.5",
@@ -3630,6 +3648,10 @@ dependencies = [
  "quote",
  "quote",
 ]
 ]
 
 
+[[package]]
+name = "dioxus-config-macros"
+version = "0.6.3"
+
 [[package]]
 [[package]]
 name = "dioxus-core"
 name = "dioxus-core"
 version = "0.6.3"
 version = "0.6.3"
@@ -3810,6 +3832,7 @@ dependencies = [
  "serde",
  "serde",
  "serde_json",
  "serde_json",
  "tokio",
  "tokio",
+ "wasm-split",
  "web-time",
  "web-time",
  "wgpu",
  "wgpu",
 ]
 ]
@@ -4110,9 +4133,12 @@ dependencies = [
 name = "dioxus-router-macro"
 name = "dioxus-router-macro"
 version = "0.6.3"
 version = "0.6.3"
 dependencies = [
 dependencies = [
+ "base16",
+ "digest",
  "dioxus",
  "dioxus",
  "proc-macro2",
  "proc-macro2",
  "quote",
  "quote",
+ "sha2",
  "slab",
  "slab",
  "syn 2.0.90",
  "syn 2.0.90",
 ]
 ]
@@ -4799,6 +4825,12 @@ dependencies = [
  "zune-inflate",
  "zune-inflate",
 ]
 ]
 
 
+[[package]]
+name = "fallible-iterator"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4443176a9f2c162692bd3d352d745ef9413eec5782a80d8fd6f8a1ac692a07f7"
+
 [[package]]
 [[package]]
 name = "faster-hex"
 name = "faster-hex"
 version = "0.9.0"
 version = "0.9.0"
@@ -5430,6 +5462,17 @@ dependencies = [
  "weezl",
  "weezl",
 ]
 ]
 
 
+[[package]]
+name = "gimli"
+version = "0.26.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "22030e2c5a68ec659fde1e949a745124b48e6fa8b045b7ed5bd1fe4ccc5c4e5d"
+dependencies = [
+ "fallible-iterator",
+ "indexmap 1.9.3",
+ "stable_deref_trait",
+]
+
 [[package]]
 [[package]]
 name = "gimli"
 name = "gimli"
 version = "0.31.1"
 version = "0.31.1"
@@ -6150,6 +6193,7 @@ checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1"
 dependencies = [
 dependencies = [
  "ahash 0.8.11",
  "ahash 0.8.11",
  "allocator-api2",
  "allocator-api2",
+ "serde",
 ]
 ]
 
 
 [[package]]
 [[package]]
@@ -6161,6 +6205,7 @@ dependencies = [
  "allocator-api2",
  "allocator-api2",
  "equivalent",
  "equivalent",
  "foldhash",
  "foldhash",
+ "serde",
 ]
 ]
 
 
 [[package]]
 [[package]]
@@ -6699,6 +6744,15 @@ dependencies = [
  "syn 2.0.90",
  "syn 2.0.90",
 ]
 ]
 
 
+[[package]]
+name = "id-arena"
+version = "2.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "25a2bc672d1148e28034f176e01fffebb08b35768468cc954630da77a1449005"
+dependencies = [
+ "rayon",
+]
+
 [[package]]
 [[package]]
 name = "idea"
 name = "idea"
 version = "0.5.1"
 version = "0.5.1"
@@ -7103,6 +7157,15 @@ dependencies = [
  "either",
  "either",
 ]
 ]
 
 
+[[package]]
+name = "itertools"
+version = "0.14.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2b192c782037fadd9cfa75548310488aabdbf3d2da73885b31bd0abd03351285"
+dependencies = [
+ "either",
+]
+
 [[package]]
 [[package]]
 name = "itoa"
 name = "itoa"
 version = "0.4.8"
 version = "0.4.8"
@@ -7177,9 +7240,9 @@ checksum = "f5d4a7da358eff58addd2877a45865158f0d78c911d43a5784ceb7bbf52833b0"
 
 
 [[package]]
 [[package]]
 name = "js-sys"
 name = "js-sys"
-version = "0.3.76"
+version = "0.3.77"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6717b6b5b077764fb5966237269cb3c64edddde4b14ce42647430a78ced9e7b7"
+checksum = "1cfaf33c695fc6e08064efbc1f72ec937429614f25eef83af942d0e227c3a28f"
 dependencies = [
 dependencies = [
  "once_cell",
  "once_cell",
  "wasm-bindgen",
  "wasm-bindgen",
@@ -7404,6 +7467,12 @@ version = "1.3.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55"
 checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55"
 
 
+[[package]]
+name = "leb128"
+version = "0.2.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "884e2677b40cc8c339eaefcb701c32ef1fd2493d71118dc0ca4b6a736c93bd67"
+
 [[package]]
 [[package]]
 name = "lebe"
 name = "lebe"
 version = "0.5.2"
 version = "0.5.2"
@@ -7728,6 +7797,15 @@ dependencies = [
  "imgref",
  "imgref",
 ]
 ]
 
 
+[[package]]
+name = "lru"
+version = "0.7.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e999beba7b6e8345721bd280141ed958096a2e4abdf74f67ff4ce49b4b54e47a"
+dependencies = [
+ "hashbrown 0.12.3",
+]
+
 [[package]]
 [[package]]
 name = "lru"
 name = "lru"
 version = "0.10.1"
 version = "0.10.1"
@@ -7900,6 +7978,29 @@ dependencies = [
  "autocfg",
  "autocfg",
 ]
 ]
 
 
+[[package]]
+name = "memoize"
+version = "0.5.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f8d1d5792299bab3f8b5d88d1b7a7cb50ad7ef039a8c4d45a6b84880a6526276"
+dependencies = [
+ "lazy_static",
+ "lru 0.7.8",
+ "memoize-inner",
+]
+
+[[package]]
+name = "memoize-inner"
+version = "0.5.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6dd8f89255d8ff313afabed9a3c83ef0993cc056679dfd001f5111a026f876f7"
+dependencies = [
+ "lazy_static",
+ "proc-macro2",
+ "quote",
+ "syn 1.0.109",
+]
+
 [[package]]
 [[package]]
 name = "metal"
 name = "metal"
 version = "0.27.0"
 version = "0.27.0"
@@ -8658,7 +8759,7 @@ dependencies = [
  "flate2",
  "flate2",
  "memchr",
  "memchr",
  "ruzstd 0.7.3",
  "ruzstd 0.7.3",
- "wasmparser",
+ "wasmparser 0.218.0",
 ]
 ]
 
 
 [[package]]
 [[package]]
@@ -10958,9 +11059,9 @@ checksum = "f97841a747eef040fcd2e7b3b9a220a7205926e60488e673d9e4926d27772ce5"
 
 
 [[package]]
 [[package]]
 name = "serde"
 name = "serde"
-version = "1.0.216"
+version = "1.0.217"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0b9781016e935a97e8beecf0c933758c97a5520d32930e460142b4cd80c6338e"
+checksum = "02fc4265df13d6fa1d00ecff087228cc0a2b5f3c0e87e258d8b94a156e984c70"
 dependencies = [
 dependencies = [
  "serde_derive",
  "serde_derive",
 ]
 ]
@@ -11001,9 +11102,9 @@ dependencies = [
 
 
 [[package]]
 [[package]]
 name = "serde_derive"
 name = "serde_derive"
-version = "1.0.216"
+version = "1.0.217"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "46f859dbbf73865c6627ed570e78961cd3ac92407a2d117204c49232485da55e"
+checksum = "5a9bf7cf98d04a2b28aead066b7496853d4779c9cc183c440dbac457641e19a0"
 dependencies = [
 dependencies = [
  "proc-macro2",
  "proc-macro2",
  "quote",
  "quote",
@@ -11012,9 +11113,9 @@ dependencies = [
 
 
 [[package]]
 [[package]]
 name = "serde_json"
 name = "serde_json"
-version = "1.0.133"
+version = "1.0.138"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c7fceb2473b9166b2294ef05efcb65a3db80803f0b03ef86a5fc88a2b85ee377"
+checksum = "d434192e7da787e94a6ea7e9670b26a036d0ca41e0b7efb2676dd32bae872949"
 dependencies = [
 dependencies = [
  "itoa 1.0.14",
  "itoa 1.0.14",
  "memchr",
  "memchr",
@@ -13853,6 +13954,35 @@ dependencies = [
  "winapi-util",
  "winapi-util",
 ]
 ]
 
 
+[[package]]
+name = "walrus"
+version = "0.23.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6481311b98508f4bc2d0abbfa5d42172e7a54b4b24d8f15e28b0dc650be0c59f"
+dependencies = [
+ "anyhow",
+ "gimli 0.26.2",
+ "id-arena",
+ "leb128",
+ "log",
+ "rayon",
+ "walrus-macro",
+ "wasm-encoder",
+ "wasmparser 0.214.0",
+]
+
+[[package]]
+name = "walrus-macro"
+version = "0.22.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "439ad39ff894c43c9649fa724cdde9a6fc50b855d517ef071a93e5df82fe51d3"
+dependencies = [
+ "heck 0.5.0",
+ "proc-macro2",
+ "quote",
+ "syn 2.0.90",
+]
+
 [[package]]
 [[package]]
 name = "want"
 name = "want"
 version = "0.3.1"
 version = "0.3.1"
@@ -13904,20 +14034,21 @@ checksum = "b8dad83b4f25e74f184f64c43b150b91efe7647395b42289f38e50566d82855b"
 
 
 [[package]]
 [[package]]
 name = "wasm-bindgen"
 name = "wasm-bindgen"
-version = "0.2.99"
+version = "0.2.100"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a474f6281d1d70c17ae7aa6a613c87fce69a127e2624002df63dcb39d6cf6396"
+checksum = "1edc8929d7499fc4e8f0be2262a241556cfc54a0bea223790e71446f2aab1ef5"
 dependencies = [
 dependencies = [
  "cfg-if",
  "cfg-if",
  "once_cell",
  "once_cell",
+ "rustversion",
  "wasm-bindgen-macro",
  "wasm-bindgen-macro",
 ]
 ]
 
 
 [[package]]
 [[package]]
 name = "wasm-bindgen-backend"
 name = "wasm-bindgen-backend"
-version = "0.2.99"
+version = "0.2.100"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5f89bb38646b4f81674e8f5c3fb81b562be1fd936d84320f3264486418519c79"
+checksum = "2f0a0651a5c2bc21487bde11ee802ccaf4c51935d0d3d42a6101f98161700bc6"
 dependencies = [
 dependencies = [
  "bumpalo",
  "bumpalo",
  "log",
  "log",
@@ -13929,9 +14060,9 @@ dependencies = [
 
 
 [[package]]
 [[package]]
 name = "wasm-bindgen-futures"
 name = "wasm-bindgen-futures"
-version = "0.4.49"
+version = "0.4.50"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "38176d9b44ea84e9184eff0bc34cc167ed044f816accfe5922e54d84cf48eca2"
+checksum = "555d470ec0bc3bb57890405e5d4322cc9ea83cebb085523ced7be4144dac1e61"
 dependencies = [
 dependencies = [
  "cfg-if",
  "cfg-if",
  "js-sys",
  "js-sys",
@@ -13942,9 +14073,9 @@ dependencies = [
 
 
 [[package]]
 [[package]]
 name = "wasm-bindgen-macro"
 name = "wasm-bindgen-macro"
-version = "0.2.99"
+version = "0.2.100"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2cc6181fd9a7492eef6fef1f33961e3695e4579b9872a6f7c83aee556666d4fe"
+checksum = "7fe63fc6d09ed3792bd0897b314f53de8e16568c2b3f7982f468c0bf9bd0b407"
 dependencies = [
 dependencies = [
  "quote",
  "quote",
  "wasm-bindgen-macro-support",
  "wasm-bindgen-macro-support",
@@ -13952,9 +14083,9 @@ dependencies = [
 
 
 [[package]]
 [[package]]
 name = "wasm-bindgen-macro-support"
 name = "wasm-bindgen-macro-support"
-version = "0.2.99"
+version = "0.2.100"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "30d7a95b763d3c45903ed6c81f156801839e5ee968bb07e534c44df0fcd330c2"
+checksum = "8ae87ea40c9f689fc23f209965b6fb8a99ad69aeeb0231408be24920604395de"
 dependencies = [
 dependencies = [
  "proc-macro2",
  "proc-macro2",
  "quote",
  "quote",
@@ -13965,19 +14096,21 @@ dependencies = [
 
 
 [[package]]
 [[package]]
 name = "wasm-bindgen-shared"
 name = "wasm-bindgen-shared"
-version = "0.2.99"
+version = "0.2.100"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "943aab3fdaaa029a6e0271b35ea10b72b943135afe9bffca82384098ad0e06a6"
+checksum = "1a05d73b933a847d6cccdda8f838a22ff101ad9bf93e33684f39c1f5f0eece3d"
+dependencies = [
+ "unicode-ident",
+]
 
 
 [[package]]
 [[package]]
 name = "wasm-bindgen-test"
 name = "wasm-bindgen-test"
-version = "0.3.49"
+version = "0.3.50"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c61d44563646eb934577f2772656c7ad5e9c90fac78aa8013d776fcdaf24625d"
+checksum = "66c8d5e33ca3b6d9fa3b4676d774c5778031d27a578c2b007f905acf816152c3"
 dependencies = [
 dependencies = [
  "js-sys",
  "js-sys",
  "minicov",
  "minicov",
- "scoped-tls",
  "wasm-bindgen",
  "wasm-bindgen",
  "wasm-bindgen-futures",
  "wasm-bindgen-futures",
  "wasm-bindgen-test-macro",
  "wasm-bindgen-test-macro",
@@ -13985,15 +14118,24 @@ dependencies = [
 
 
 [[package]]
 [[package]]
 name = "wasm-bindgen-test-macro"
 name = "wasm-bindgen-test-macro"
-version = "0.3.49"
+version = "0.3.50"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "54171416ce73aa0b9c377b51cc3cb542becee1cd678204812e8392e5b0e4a031"
+checksum = "17d5042cc5fa009658f9a7333ef24291b1291a25b6382dd68862a7f3b969f69b"
 dependencies = [
 dependencies = [
  "proc-macro2",
  "proc-macro2",
  "quote",
  "quote",
  "syn 2.0.90",
  "syn 2.0.90",
 ]
 ]
 
 
+[[package]]
+name = "wasm-encoder"
+version = "0.214.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ff694f02a8d7a50b6922b197ae03883fbf18cdb2ae9fbee7b6148456f5f44041"
+dependencies = [
+ "leb128",
+]
+
 [[package]]
 [[package]]
 name = "wasm-opt"
 name = "wasm-opt"
 version = "0.116.1"
 version = "0.116.1"
@@ -14034,6 +14176,60 @@ dependencies = [
  "cxx-build",
  "cxx-build",
 ]
 ]
 
 
+[[package]]
+name = "wasm-split"
+version = "0.1.0"
+dependencies = [
+ "async-once-cell",
+ "wasm-split-macro",
+]
+
+[[package]]
+name = "wasm-split-cli"
+version = "0.1.0"
+dependencies = [
+ "anyhow",
+ "clap",
+ "id-arena",
+ "itertools 0.14.0",
+ "rayon",
+ "tracing",
+ "tracing-subscriber",
+ "walrus",
+ "wasm-used",
+ "wasmparser 0.225.0",
+]
+
+[[package]]
+name = "wasm-split-harness"
+version = "0.1.0"
+dependencies = [
+ "anyhow",
+ "async-compression",
+ "dioxus",
+ "dioxus-router",
+ "futures",
+ "getrandom 0.2.15",
+ "js-sys",
+ "once_cell",
+ "reqwest 0.12.9",
+ "wasm-bindgen",
+ "wasm-bindgen-futures",
+ "web-sys",
+]
+
+[[package]]
+name = "wasm-split-macro"
+version = "0.1.0"
+dependencies = [
+ "base16",
+ "digest",
+ "proc-macro2",
+ "quote",
+ "sha2",
+ "syn 2.0.90",
+]
+
 [[package]]
 [[package]]
 name = "wasm-streams"
 name = "wasm-streams"
 version = "0.4.2"
 version = "0.4.2"
@@ -14047,6 +14243,29 @@ dependencies = [
  "web-sys",
  "web-sys",
 ]
 ]
 
 
+[[package]]
+name = "wasm-used"
+version = "0.6.3"
+dependencies = [
+ "id-arena",
+ "tracing",
+ "walrus",
+]
+
+[[package]]
+name = "wasmparser"
+version = "0.214.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5309c1090e3e84dad0d382f42064e9933fdaedb87e468cc239f0eabea73ddcb6"
+dependencies = [
+ "ahash 0.8.11",
+ "bitflags 2.6.0",
+ "hashbrown 0.14.5",
+ "indexmap 2.7.0",
+ "semver 1.0.23",
+ "serde",
+]
+
 [[package]]
 [[package]]
 name = "wasmparser"
 name = "wasmparser"
 version = "0.218.0"
 version = "0.218.0"
@@ -14056,11 +14275,24 @@ dependencies = [
  "bitflags 2.6.0",
  "bitflags 2.6.0",
 ]
 ]
 
 
+[[package]]
+name = "wasmparser"
+version = "0.225.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "36e5456165f81e64cb9908a0fe9b9d852c2c74582aa3fe2be3c2da57f937d3ae"
+dependencies = [
+ "bitflags 2.6.0",
+ "hashbrown 0.15.2",
+ "indexmap 2.7.0",
+ "semver 1.0.23",
+ "serde",
+]
+
 [[package]]
 [[package]]
 name = "web-sys"
 name = "web-sys"
-version = "0.3.76"
+version = "0.3.77"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "04dd7223427d52553d3702c004d3b2fe07c148165faa56313cb00211e31c12bc"
+checksum = "33b6dd2ef9186f1f2072e409e99cd22a975331a6b3591b12c764e0e55c60d5d2"
 dependencies = [
 dependencies = [
  "js-sys",
  "js-sys",
  "wasm-bindgen",
  "wasm-bindgen",

+ 46 - 8
Cargo.toml

@@ -65,19 +65,19 @@ members = [
     "packages/const-serialize-macro",
     "packages/const-serialize-macro",
     "packages/dx-wire-format",
     "packages/dx-wire-format",
     "packages/logger",
     "packages/logger",
-
-    # Playwright tests
-    "packages/playwright-tests/liveview",
-    "packages/playwright-tests/web",
-    "packages/playwright-tests/fullstack",
-    "packages/playwright-tests/suspense-carousel",
-    "packages/playwright-tests/nested-suspense",
+    "packages/config-macros",
 
 
     # manganis
     # manganis
     "packages/manganis/manganis",
     "packages/manganis/manganis",
     "packages/manganis/manganis-core",
     "packages/manganis/manganis-core",
     "packages/manganis/manganis-macro",
     "packages/manganis/manganis-macro",
 
 
+    # wasm-split
+    "packages/wasm-split/wasm-split",
+    "packages/wasm-split/wasm-split-macro",
+    "packages/wasm-split/wasm-split-cli",
+    "packages/wasm-split/wasm-used",
+
     # Full project examples
     # Full project examples
     "example-projects/fullstack-hackernews",
     "example-projects/fullstack-hackernews",
     "example-projects/ecommerce-site",
     "example-projects/ecommerce-site",
@@ -101,6 +101,7 @@ members = [
     "packages/playwright-tests/suspense-carousel",
     "packages/playwright-tests/suspense-carousel",
     "packages/playwright-tests/nested-suspense",
     "packages/playwright-tests/nested-suspense",
     "packages/playwright-tests/cli-optimization",
     "packages/playwright-tests/cli-optimization",
+    "packages/playwright-tests/wasm-split-harness",
 ]
 ]
 
 
 [workspace.package]
 [workspace.package]
@@ -142,14 +143,22 @@ dioxus-fullstack = { path = "packages/fullstack", version = "0.6.2" }
 dioxus_server_macro = { path = "packages/server-macro", version = "0.6.2", default-features = false }
 dioxus_server_macro = { path = "packages/server-macro", version = "0.6.2", default-features = false }
 dioxus-dx-wire-format = { path = "packages/dx-wire-format", version = "0.6.2" }
 dioxus-dx-wire-format = { path = "packages/dx-wire-format", version = "0.6.2" }
 dioxus-logger = { path = "packages/logger", version = "0.6.2" }
 dioxus-logger = { path = "packages/logger", version = "0.6.2" }
+dioxus-config-macros = { path = "packages/config-macros", version = "0.6.3" }
 const-serialize = { path = "packages/const-serialize", version = "0.6.2" }
 const-serialize = { path = "packages/const-serialize", version = "0.6.2" }
 const-serialize-macro = { path = "packages/const-serialize-macro", version = "0.6.2" }
 const-serialize-macro = { path = "packages/const-serialize-macro", version = "0.6.2" }
 generational-box = { path = "packages/generational-box", version = "0.6.2" }
 generational-box = { path = "packages/generational-box", version = "0.6.2" }
 lazy-js-bundle = { path = "packages/lazy-js-bundle", version = "0.6.2" }
 lazy-js-bundle = { path = "packages/lazy-js-bundle", version = "0.6.2" }
+
+
 manganis = { path = "packages/manganis/manganis", version = "0.6.2" }
 manganis = { path = "packages/manganis/manganis", version = "0.6.2" }
 manganis-core = { path = "packages/manganis/manganis-core", version = "0.6.2" }
 manganis-core = { path = "packages/manganis/manganis-core", version = "0.6.2" }
 manganis-macro = { path = "packages/manganis/manganis-macro", version = "0.6.2" }
 manganis-macro = { path = "packages/manganis/manganis-macro", version = "0.6.2" }
 
 
+wasm-split = { path = "packages/wasm-split/wasm-split", version = "0.1.0" }
+wasm-split-macro = { path = "packages/wasm-split/wasm-split-macro", version = "0.1.0" }
+wasm-split-cli = { path = "packages/wasm-split/wasm-split-cli", version = "0.1.0" }
+wasm-split-harness = { path = "packages/playwright-tests/wasm-split-harness", version = "0.1.0" }
+
 warnings = { version = "0.2.1" }
 warnings = { version = "0.2.1" }
 
 
 # a fork of pretty please for tests - let's get off of this if we can!
 # a fork of pretty please for tests - let's get off of this if we can!
@@ -225,6 +234,17 @@ cargo_metadata = "0.18.1"
 parking_lot = "0.12.1"
 parking_lot = "0.12.1"
 tracing-wasm = "0.2.1"
 tracing-wasm = "0.2.1"
 console_error_panic_hook = "0.1.7"
 console_error_panic_hook = "0.1.7"
+base16 = "0.2.1"
+digest = "0.10.7"
+sha2 = "0.10.8"
+walrus = { version = "0.23.2", features = ["parallel"] }
+id-arena = "2.2.1"
+async-compression = { version = "0.4.8", features = ["futures-io", "gzip", "brotli"] }
+getrandom = { version = "0.2" }
+async-once-cell = { version = "0.5.3" }
+rayon = "1.2.0"
+wasmparser = "0.225.0"
+itertools = "0.14.0"
 
 
 # desktop
 # desktop
 wry = { version = "0.45.0", default-features = false }
 wry = { version = "0.45.0", default-features = false }
@@ -250,7 +270,24 @@ debug = 0
 # when we ship our CI builds, we turn on LTO which improves perf leftover by turning on incremental
 # when we ship our CI builds, we turn on LTO which improves perf leftover by turning on incremental
 [profile.release]
 [profile.release]
 incremental = true
 incremental = true
-debug = 0
+
+# crank up the opt level for wasm-split-cli in dev mode
+# important here that lto is on and the debug symbols are presenta (since they're used by wasm-opt)a
+[profile.wasm-split-release]
+inherits = "release"
+opt-level = 'z'
+lto = true
+debug=true
+
+# a profile for running the CLI that's also incremental
+[profile.cli-release-dev]
+inherits = "release"
+opt-level = 3
+incremental = true
+
+# crank up walrus since it's quite slow in dev mode
+[profile.dev.package.walrus]
+opt-level = 3
 
 
 [profile.release-max-opt]
 [profile.release-max-opt]
 inherits = "release"
 inherits = "release"
@@ -296,6 +333,7 @@ base64 = { workspace = true, optional = true }
 http-range = { version = "0.1.5", optional = true }
 http-range = { version = "0.1.5", optional = true }
 wgpu = { version = "0.19", optional = true }
 wgpu = { version = "0.19", optional = true }
 ouroboros = { version = "*", optional = true }
 ouroboros = { version = "*", optional = true }
+wasm-split = { workspace = true }
 
 
 [dev-dependencies]
 [dev-dependencies]
 dioxus = { workspace = true, features = ["router"] }
 dioxus = { workspace = true, features = ["router"] }

+ 1 - 0
packages/cli-opt/Cargo.toml

@@ -11,6 +11,7 @@ keywords = ["dom", "ui", "gui", "react"]
 
 
 [dependencies]
 [dependencies]
 anyhow = { workspace = true }
 anyhow = { workspace = true }
+manganis = { workspace = true }
 manganis-core = { workspace = true }
 manganis-core = { workspace = true }
 object = {version="0.36.0", features=["wasm"]}
 object = {version="0.36.0", features=["wasm"]}
 serde = { workspace = true, features = ["derive"] }
 serde = { workspace = true, features = ["derive"] }

+ 21 - 0
packages/cli-opt/src/lib.rs

@@ -25,6 +25,27 @@ pub struct AssetManifest {
 }
 }
 
 
 impl AssetManifest {
 impl AssetManifest {
+    /// Manually add an asset to the manifest
+    pub fn register_asset(
+        &mut self,
+        asset_path: &Path,
+        options: manganis::AssetOptions,
+    ) -> anyhow::Result<BundledAsset> {
+        let hash = manganis_core::hash::AssetHash::hash_file_contents(asset_path)
+            .context("Failed to hash file")?;
+
+        let output_path_str = asset_path.to_str().ok_or(anyhow::anyhow!(
+            "Failed to convert wasm bindgen output path to string"
+        ))?;
+
+        let bundled_asset =
+            manganis::macro_helpers::create_bundled_asset(output_path_str, hash.bytes(), options);
+
+        self.assets.insert(asset_path.into(), bundled_asset);
+
+        Ok(bundled_asset)
+    }
+
     #[allow(dead_code)]
     #[allow(dead_code)]
     pub fn load_from_file(path: &Path) -> anyhow::Result<Self> {
     pub fn load_from_file(path: &Path) -> anyhow::Result<Self> {
         let src = std::fs::read_to_string(path)?;
         let src = std::fs::read_to_string(path)?;

+ 2 - 0
packages/cli/Cargo.toml

@@ -23,6 +23,7 @@ dioxus-cli-config = { workspace = true }
 dioxus-cli-opt = { workspace = true }
 dioxus-cli-opt = { workspace = true }
 dioxus-fullstack = { workspace = true }
 dioxus-fullstack = { workspace = true }
 dioxus-dx-wire-format = { workspace = true }
 dioxus-dx-wire-format = { workspace = true }
+wasm-split-cli = { workspace = true }
 
 
 clap = { workspace = true, features = ["derive", "cargo"] }
 clap = { workspace = true, features = ["derive", "cargo"] }
 convert_case = { workspace = true }
 convert_case = { workspace = true }
@@ -126,6 +127,7 @@ tar = "0.4.43"
 local-ip-address = "0.6.3"
 local-ip-address = "0.6.3"
 dircpy = "0.3.19"
 dircpy = "0.3.19"
 plist = "1.7.0"
 plist = "1.7.0"
+memoize = "0.5.1"
 
 
 [build-dependencies]
 [build-dependencies]
 built = { version = "=0.7.4", features = ["git2"] }
 built = { version = "=0.7.4", features = ["git2"] }

+ 194 - 108
packages/cli/src/build/bundle.rs

@@ -1,17 +1,16 @@
 use super::prerender::pre_render_static_routes;
 use super::prerender::pre_render_static_routes;
 use super::templates::InfoPlistData;
 use super::templates::InfoPlistData;
-use crate::wasm_bindgen::WasmBindgen;
-use crate::{BuildRequest, Platform};
+use crate::{BuildRequest, Platform, WasmOptConfig};
 use crate::{Result, TraceSrc};
 use crate::{Result, TraceSrc};
 use anyhow::Context;
 use anyhow::Context;
 use dioxus_cli_opt::{process_file_to, AssetManifest};
 use dioxus_cli_opt::{process_file_to, AssetManifest};
 use manganis::{AssetOptions, JsAssetOptions};
 use manganis::{AssetOptions, JsAssetOptions};
 use rayon::prelude::{IntoParallelRefIterator, ParallelIterator};
 use rayon::prelude::{IntoParallelRefIterator, ParallelIterator};
-use std::collections::HashSet;
 use std::future::Future;
 use std::future::Future;
 use std::path::{Path, PathBuf};
 use std::path::{Path, PathBuf};
 use std::pin::Pin;
 use std::pin::Pin;
 use std::sync::atomic::Ordering;
 use std::sync::atomic::Ordering;
+use std::{collections::HashSet, io::Write};
 use std::{sync::atomic::AtomicUsize, time::Duration};
 use std::{sync::atomic::AtomicUsize, time::Duration};
 use tokio::process::Command;
 use tokio::process::Command;
 
 
@@ -326,21 +325,7 @@ impl AppBundle {
             //                        logo.png
             //                        logo.png
             // ```
             // ```
             Platform::Web => {
             Platform::Web => {
-                // Run wasm-bindgen and drop its output into the assets folder under "dioxus"
-                self.build.status_wasm_bindgen_start();
-                self.run_wasm_bindgen(&self.app.exe.with_extension("wasm"))
-                    .await?;
-
-                // Only run wasm-opt if the feature is enabled
-                // Wasm-opt has an expensive build script that makes it annoying to keep enabled for iterative dev
-                // We put it behind the "wasm-opt" feature flag so that it can be disabled when iterating on the cli
-                self.run_wasm_opt(&self.build.exe_dir())?;
-
-                // Write the index.html file with the pre-configured contents we got from pre-rendering
-                std::fs::write(
-                    self.build.root_dir().join("index.html"),
-                    self.prepare_html()?,
-                )?;
+                self.bundle_web().await?;
             }
             }
 
 
             // this will require some extra oomf to get the multi architecture builds...
             // this will require some extra oomf to get the multi architecture builds...
@@ -413,6 +398,7 @@ impl AppBundle {
                 if keep_bundled_output_paths.contains(canon_path.as_path()) {
                 if keep_bundled_output_paths.contains(canon_path.as_path()) {
                     return Ok(());
                     return Ok(());
                 }
                 }
+
                 // Otherwise, if it is a directory, we need to walk it and remove child files
                 // Otherwise, if it is a directory, we need to walk it and remove child files
                 if path.is_dir() {
                 if path.is_dir() {
                     for entry in std::fs::read_dir(path)?.flatten() {
                     for entry in std::fs::read_dir(path)?.flatten() {
@@ -427,11 +413,16 @@ impl AppBundle {
                     // If it is a file, remove it
                     // If it is a file, remove it
                     tokio::fs::remove_file(path).await?;
                     tokio::fs::remove_file(path).await?;
                 }
                 }
+
                 Ok(())
                 Ok(())
             })
             })
         }
         }
 
 
         tracing::debug!("Removing old assets");
         tracing::debug!("Removing old assets");
+        tracing::trace!(
+            "Keeping bundled output paths: {:#?}",
+            keep_bundled_output_paths
+        );
         remove_old_assets(&asset_dir, &keep_bundled_output_paths).await?;
         remove_old_assets(&asset_dir, &keep_bundled_output_paths).await?;
 
 
         // todo(jon): we also want to eventually include options for each asset's optimization and compression, which we currently aren't
         // todo(jon): we also want to eventually include options for each asset's optimization and compression, which we currently aren't
@@ -441,7 +432,16 @@ impl AppBundle {
         for (asset, bundled) in &self.app.assets.assets {
         for (asset, bundled) in &self.app.assets.assets {
             let from = asset.clone();
             let from = asset.clone();
             let to = asset_dir.join(bundled.bundled_path());
             let to = asset_dir.join(bundled.bundled_path());
-            tracing::debug!("Copying asset {from:?} to {to:?}");
+
+            // prefer to log using a shorter path relative to the workspace dir by trimming the workspace dir
+            let from_ = from
+                .strip_prefix(self.build.krate.workspace_dir())
+                .unwrap_or(from.as_path());
+            let to_ = from
+                .strip_prefix(self.build.krate.workspace_dir())
+                .unwrap_or(to.as_path());
+
+            tracing::debug!("Copying asset {from_:?} to {to_:?}");
             assets_to_transfer.push((from, to, *bundled.options()));
             assets_to_transfer.push((from, to, *bundled.options()));
         }
         }
 
 
@@ -459,13 +459,17 @@ impl AppBundle {
 
 
         // Parallel Copy over the assets and keep track of progress with an atomic counter
         // Parallel Copy over the assets and keep track of progress with an atomic counter
         let progress = self.build.progress.clone();
         let progress = self.build.progress.clone();
+        let ws_dir = self.build.krate.workspace_dir();
         // Optimizing assets is expensive and blocking, so we do it in a tokio spawn blocking task
         // Optimizing assets is expensive and blocking, so we do it in a tokio spawn blocking task
         tokio::task::spawn_blocking(move || {
         tokio::task::spawn_blocking(move || {
             assets_to_transfer
             assets_to_transfer
                 .par_iter()
                 .par_iter()
                 .try_for_each(|(from, to, options)| {
                 .try_for_each(|(from, to, options)| {
                     let processing = started_processing.fetch_add(1, Ordering::SeqCst);
                     let processing = started_processing.fetch_add(1, Ordering::SeqCst);
-                    tracing::trace!("Starting asset copy {processing}/{asset_count} from {from:?}");
+                    let from_ = from.strip_prefix(&ws_dir).unwrap_or(from);
+                    tracing::trace!(
+                        "Starting asset copy {processing}/{asset_count} from {from_:?}"
+                    );
 
 
                     let res = process_file_to(options, from, to);
                     let res = process_file_to(options, from, to);
                     if let Err(err) = res.as_ref() {
                     if let Err(err) = res.as_ref() {
@@ -486,8 +490,8 @@ impl AppBundle {
         .await
         .await
         .map_err(|e| anyhow::anyhow!("A task failed while trying to copy assets: {e}"))??;
         .map_err(|e| anyhow::anyhow!("A task failed while trying to copy assets: {e}"))??;
 
 
-        // Remove the wasm bindgen output directory if it exists
-        _ = std::fs::remove_dir_all(self.build.wasm_bindgen_out_dir());
+        // // Remove the wasm bindgen output directory if it exists
+        // _ = std::fs::remove_dir_all(self.build.wasm_bindgen_out_dir());
 
 
         // Write the version file so we know what version of the optimizer we used
         // Write the version file so we know what version of the optimizer we used
         std::fs::write(
         std::fs::write(
@@ -575,6 +579,7 @@ impl AppBundle {
                     .krate
                     .krate
                     .should_pre_compress_web_assets(self.build.build.release);
                     .should_pre_compress_web_assets(self.build.build.release);
 
 
+                self.build.status_compressing_assets();
                 let asset_dir = self.build.asset_dir();
                 let asset_dir = self.build.asset_dir();
                 tokio::task::spawn_blocking(move || {
                 tokio::task::spawn_blocking(move || {
                     crate::fastfs::pre_compress_folder(&asset_dir, pre_compress)
                     crate::fastfs::pre_compress_folder(&asset_dir, pre_compress)
@@ -613,115 +618,196 @@ impl AppBundle {
         None
         None
     }
     }
 
 
-    pub(crate) async fn run_wasm_bindgen(&mut self, input_path: &Path) -> anyhow::Result<()> {
-        tracing::debug!(dx_src = ?TraceSrc::Bundle, "Running wasm-bindgen");
-
-        let input_path = input_path.to_path_buf();
-        // Make sure the bindgen output directory exists
+    /// Bundle the web app
+    /// - Run wasm-bindgen
+    /// - Bundle split
+    /// - Run wasm-opt
+    /// - Register the .wasm and .js files with the asset system
+    async fn bundle_web(&mut self) -> Result<()> {
+        use crate::{wasm_bindgen::WasmBindgen, wasm_opt};
+        use std::fmt::Write;
+
+        // Locate the output of the build files and the bindgen output
+        // We'll fill these in a second if they don't already exist
         let bindgen_outdir = self.build.wasm_bindgen_out_dir();
         let bindgen_outdir = self.build.wasm_bindgen_out_dir();
-        std::fs::create_dir_all(&bindgen_outdir)?;
-
-        let name = self.build.krate.executable_name().to_string();
-        let keep_debug =
-            // if we're in debug mode, or we're generating debug symbols, keep debug info
-            (self.build.krate.config.web.wasm_opt.debug || self.build.build.debug_symbols)
-            // but only if we're not in release mode
-            && !self.build.build.release;
-
-        let start = std::time::Instant::now();
-
+        let prebindgen = self.app.exe.clone();
+        let post_bindgen_wasm = self.build.wasm_bindgen_wasm_output_file();
+        let should_bundle_split = self.build.build.experimental_wasm_split;
+        let rustc_exe = self.app.exe.with_extension("wasm");
         let bindgen_version = self
         let bindgen_version = self
             .build
             .build
             .krate
             .krate
             .wasm_bindgen_version()
             .wasm_bindgen_version()
             .expect("this should have been checked by tool verification");
             .expect("this should have been checked by tool verification");
 
 
+        // Prepare any work dirs
+        std::fs::create_dir_all(&bindgen_outdir)?;
+
+        // Prepare our configuration
+        //
+        // we turn off debug symbols in dev mode but leave them on in release mode (weird!) since
+        // wasm-opt and wasm-split need them to do better optimizations.
+        //
+        // We leave demangling to false since it's faster and these tools seem to prefer the raw symbols.
+        // todo(jon): investigate if the chrome extension needs them demangled or demangles them automatically.
+        let will_wasm_opt = (self.build.build.release || self.build.build.experimental_wasm_split)
+            && crate::wasm_opt::wasm_opt_available();
+        let keep_debug = self.build.krate.config.web.wasm_opt.debug
+            || self.build.build.debug_symbols
+            || self.build.build.experimental_wasm_split
+            || !self.build.build.release
+            || will_wasm_opt;
+        let demangle = false;
+        let wasm_opt_options = WasmOptConfig {
+            memory_packing: self.build.build.experimental_wasm_split,
+            debug: self.build.build.debug_symbols,
+            ..self.build.krate.config.web.wasm_opt.clone()
+        };
+
+        // Run wasm-bindgen. Some of the options are not "optimal" but will be fixed up by wasm-opt
+        //
+        // There's performance implications here. Running with --debug is slower than without
+        // We're keeping around lld sections and names but wasm-opt will fix them
+        // todo(jon): investigate a good balance of wiping debug symbols during dev (or doing a double build?)
+        self.build.status_wasm_bindgen_start();
+        tracing::debug!(dx_src = ?TraceSrc::Bundle, "Running wasm-bindgen");
+        let start = std::time::Instant::now();
         WasmBindgen::new(&bindgen_version)
         WasmBindgen::new(&bindgen_version)
-            .input_path(&input_path)
+            .input_path(&rustc_exe)
             .target("web")
             .target("web")
             .debug(keep_debug)
             .debug(keep_debug)
-            .demangle(keep_debug)
+            .demangle(demangle)
             .keep_debug(keep_debug)
             .keep_debug(keep_debug)
-            .remove_name_section(!keep_debug)
-            .remove_producers_section(!keep_debug)
-            .out_name(&name)
+            .keep_lld_sections(true)
+            .out_name(self.build.krate.executable_name())
             .out_dir(&bindgen_outdir)
             .out_dir(&bindgen_outdir)
+            .remove_name_section(!will_wasm_opt)
+            .remove_producers_section(!will_wasm_opt)
             .run()
             .run()
             .await
             .await
             .context("Failed to generate wasm-bindgen bindings")?;
             .context("Failed to generate wasm-bindgen bindings")?;
-
-        // After running wasm-bindgen, add the js and wasm asset to the manifest
-        let js_output_path = self.build.wasm_bindgen_js_output_file();
-        let wasm_output_path = self.build.wasm_bindgen_wasm_output_file();
-        let new_assets = [
-            (
-                js_output_path,
-                AssetOptions::Js(JsAssetOptions::new().with_minify(true).with_preload(true)),
-            ),
-            (wasm_output_path, AssetOptions::Unknown),
-        ];
-        for (asset_path, options) in new_assets {
-            let hash = manganis_core::hash::AssetHash::hash_file_contents(&asset_path)?;
-            let output_path_str = asset_path.to_str().ok_or(anyhow::anyhow!(
-                "Failed to convert wasm bindgen output path to string"
-            ))?;
-            let bundled_asset = manganis::macro_helpers::create_bundled_asset(
-                output_path_str,
-                hash.bytes(),
-                options,
-            );
-            self.app.assets.assets.insert(asset_path, bundled_asset);
-        }
-
         tracing::debug!(dx_src = ?TraceSrc::Bundle, "wasm-bindgen complete in {:?}", start.elapsed());
         tracing::debug!(dx_src = ?TraceSrc::Bundle, "wasm-bindgen complete in {:?}", start.elapsed());
 
 
-        Ok(())
-    }
+        // Run bundle splitting if the user has requested it
+        // It's pretty expensive but because of rayon should be running separate threads, hopefully
+        // not blocking this thread. Dunno if that's true
+        if should_bundle_split {
+            self.build.status_splitting_bundle();
+
+            if !will_wasm_opt {
+                return Err(anyhow::anyhow!(
+                    "Bundle splitting requires wasm-opt to be installed or the CLI to be built with `--features optimizations`. Please install wasm-opt and try again."
+                )
+                .into());
+            }
 
 
-    #[allow(unused)]
-    pub(crate) fn run_wasm_opt(&self, bindgen_outdir: &std::path::Path) -> Result<()> {
-        if !self.build.build.release {
-            return Ok(());
-        };
-        self.build.status_optimizing_wasm();
+            // Load the contents of these binaries since we need both of them
+            // We're going to use the default makeLoad glue from wasm-split
+            let original = std::fs::read(&prebindgen)?;
+            let bindgened = std::fs::read(&post_bindgen_wasm)?;
+            let mut glue = wasm_split_cli::MAKE_LOAD_JS.to_string();
+
+            // Run the emitter
+            let splitter = wasm_split_cli::Splitter::new(&original, &bindgened);
+            let modules = splitter
+                .context("Failed to parse wasm for splitter")?
+                .emit()
+                .context("Failed to emit wasm split modules")?;
+
+            // Write the chunks that contain shared imports
+            // These will be in the format of chunk_0_modulename.wasm - this is hardcoded in wasm-split
+            tracing::debug!("Writing split chunks to disk");
+            for (idx, chunk) in modules.chunks.iter().enumerate() {
+                let path = bindgen_outdir.join(format!("chunk_{}_{}.wasm", idx, chunk.module_name));
+                wasm_opt::write_wasm(&chunk.bytes, &path, &wasm_opt_options).await?;
+                writeln!(
+                    glue, "export const __wasm_split_load_chunk_{idx} = makeLoad(\"/assets/{url}\", [], fusedImports);",
+                    url = self
+                        .app
+                        .assets
+                        .register_asset(&path, AssetOptions::Unknown)?.bundled_path(),
+                )?;
+            }
 
 
-        #[cfg(feature = "optimizations")]
-        {
-            use crate::config::WasmOptLevel;
+            // Write the modules that contain the entrypoints
+            tracing::debug!("Writing split modules to disk");
+            for (idx, module) in modules.modules.iter().enumerate() {
+                let comp_name = module
+                    .component_name
+                    .as_ref()
+                    .context("generated bindgen module has no name?")?;
+
+                let path = bindgen_outdir.join(format!("module_{}_{}.wasm", idx, comp_name));
+                wasm_opt::write_wasm(&module.bytes, &path, &wasm_opt_options).await?;
+
+                let hash_id = module.hash_id.as_ref().unwrap();
+
+                writeln!(
+                    glue,
+                    "export const __wasm_split_load_{module}_{hash_id}_{comp_name} = makeLoad(\"/assets/{url}\", [{deps}], fusedImports);",
+                    module = module.module_name,
+
+
+                    // Again, register this wasm with the asset system
+                    url = self
+                        .app
+                        .assets
+                        .register_asset(&path, AssetOptions::Unknown)?.bundled_path(),
+
+                    // This time, make sure to write the dependencies of this chunk
+                    // The names here are again, hardcoded in wasm-split - fix this eventually.
+                    deps = module
+                        .relies_on_chunks
+                        .iter()
+                        .map(|idx| format!("__wasm_split_load_chunk_{idx}"))
+                        .collect::<Vec<_>>()
+                        .join(", ")
+                )?;
+            }
 
 
-            tracing::info!(dx_src = ?TraceSrc::Build, "Running optimization with wasm-opt...");
+            // Write the js binding
+            // It's not registered as an asset since it will get included in the main.js file
+            let js_output_path = bindgen_outdir.join("__wasm_split.js");
+            std::fs::write(&js_output_path, &glue)?;
+
+            // Make sure to write some entropy to the main.js file so it gets a new hash
+            // If we don't do this, the main.js file will be cached and never pick up the chunk names
+            let uuid = uuid::Uuid::new_v5(&uuid::Uuid::NAMESPACE_URL, glue.as_bytes());
+            std::fs::OpenOptions::new()
+                .append(true)
+                .open(self.build.wasm_bindgen_js_output_file())
+                .context("Failed to open main.js file")?
+                .write_all(format!("/*{uuid}*/").as_bytes())?;
+
+            // Write the main wasm_bindgen file and register it with the asset system
+            // This will overwrite the file in place
+            // We will wasm-opt it in just a second...
+            std::fs::write(&post_bindgen_wasm, modules.main.bytes)?;
+        }
 
 
-            let mut options = match self.build.krate.config.web.wasm_opt.level {
-                WasmOptLevel::Z => {
-                    wasm_opt::OptimizationOptions::new_optimize_for_size_aggressively()
-                }
-                WasmOptLevel::S => wasm_opt::OptimizationOptions::new_optimize_for_size(),
-                WasmOptLevel::Zero => wasm_opt::OptimizationOptions::new_opt_level_0(),
-                WasmOptLevel::One => wasm_opt::OptimizationOptions::new_opt_level_1(),
-                WasmOptLevel::Two => wasm_opt::OptimizationOptions::new_opt_level_2(),
-                WasmOptLevel::Three => wasm_opt::OptimizationOptions::new_opt_level_3(),
-                WasmOptLevel::Four => wasm_opt::OptimizationOptions::new_opt_level_4(),
-            };
-            let wasm_file =
-                bindgen_outdir.join(format!("{}_bg.wasm", self.build.krate.executable_name()));
-            let old_size = wasm_file.metadata()?.len();
-            options
-                // WASM bindgen relies on reference types
-                .enable_feature(wasm_opt::Feature::ReferenceTypes)
-                .debug_info(self.build.krate.config.web.wasm_opt.debug)
-                .run(&wasm_file, &wasm_file)
-                .map_err(|err| crate::Error::Other(anyhow::anyhow!(err)))?;
-
-            let new_size = wasm_file.metadata()?.len();
-            tracing::debug!(
-                dx_src = ?TraceSrc::Build,
-                "wasm-opt reduced WASM size from {} to {} ({:2}%)",
-                old_size,
-                new_size,
-                (new_size as f64 - old_size as f64) / old_size as f64 * 100.0
-            );
+        // Make sure to optimize the main wasm file if requested or if bundle splitting
+        if should_bundle_split || self.build.build.release {
+            self.build.status_optimizing_wasm();
+            wasm_opt::optimize(&post_bindgen_wasm, &post_bindgen_wasm, &wasm_opt_options).await?;
         }
         }
 
 
+        // Make sure to register the main wasm file with the asset system
+        self.app
+            .assets
+            .register_asset(&post_bindgen_wasm, AssetOptions::Unknown)?;
+
+        // Register the main.js with the asset system so it bundles in the snippets and optimizes
+        self.app.assets.register_asset(
+            &self.build.wasm_bindgen_js_output_file(),
+            AssetOptions::Js(JsAssetOptions::new().with_minify(true).with_preload(true)),
+        )?;
+
+        // Write the index.html file with the pre-configured contents we got from pre-rendering
+        std::fs::write(
+            self.build.root_dir().join("index.html"),
+            self.prepare_html()?,
+        )?;
+
         Ok(())
         Ok(())
     }
     }
 
 

+ 12 - 0
packages/cli/src/build/progress.rs

@@ -23,6 +23,12 @@ impl BuildRequest {
         });
         });
     }
     }
 
 
+    pub(crate) fn status_splitting_bundle(&self) {
+        _ = self.progress.unbounded_send(BuildUpdate::Progress {
+            stage: BuildStage::SplittingBundle,
+        });
+    }
+
     pub(crate) fn status_start_bundle(&self) {
     pub(crate) fn status_start_bundle(&self) {
         _ = self.progress.unbounded_send(BuildUpdate::Progress {
         _ = self.progress.unbounded_send(BuildUpdate::Progress {
             stage: BuildStage::Bundling {},
             stage: BuildStage::Bundling {},
@@ -102,6 +108,12 @@ impl BuildRequest {
         });
         });
     }
     }
 
 
+    pub(crate) fn status_compressing_assets(&self) {
+        _ = self.progress.unbounded_send(BuildUpdate::Progress {
+            stage: BuildStage::CompressingAssets,
+        });
+    }
+
     pub(crate) fn is_server(&self) -> bool {
     pub(crate) fn is_server(&self) -> bool {
         self.build.platform() == Platform::Server
         self.build.platform() == Platform::Server
     }
     }

+ 9 - 1
packages/cli/src/build/request.rs

@@ -340,6 +340,14 @@ impl BuildRequest {
 
 
         cargo_args.push(self.krate.executable_name().to_string());
         cargo_args.push(self.krate.executable_name().to_string());
 
 
+        // the bundle splitter needs relocation data
+        // we'll trim these out if we don't need them during the bundling process
+        // todo(jon): for wasm binary patching we might want to leave these on all the time.
+        if self.build.platform() == Platform::Web && self.build.experimental_wasm_split {
+            cargo_args.push("--".to_string());
+            cargo_args.push("-Clink-args=--emit-relocs".to_string());
+        }
+
         tracing::debug!(dx_src = ?TraceSrc::Build, "cargo args: {:?}", cargo_args);
         tracing::debug!(dx_src = ?TraceSrc::Build, "cargo args: {:?}", cargo_args);
 
 
         cargo_args
         cargo_args
@@ -658,7 +666,7 @@ impl BuildRequest {
 
 
     /// Get the path to the wasm bindgen temporary output folder
     /// Get the path to the wasm bindgen temporary output folder
     pub fn wasm_bindgen_out_dir(&self) -> PathBuf {
     pub fn wasm_bindgen_out_dir(&self) -> PathBuf {
-        self.root_dir().join("wasm-bindgen")
+        self.root_dir().join("wasm")
     }
     }
 
 
     /// Get the path to the wasm bindgen javascript output file
     /// Get the path to the wasm bindgen javascript output file

+ 17 - 13
packages/cli/src/build/web.rs

@@ -140,19 +140,23 @@ impl AppBundle {
         // If not, insert the script
         // If not, insert the script
         *html = html.replace(
         *html = html.replace(
             "</body",
             "</body",
-            r#"<script>
-            // We can't use a module script here because we need to start the script immediately when streaming
-            import("/{base_path}/{js_path}").then(
-                ({ default: init }) => {
-                init("/{base_path}/{wasm_path}").then((wasm) => {
-                    if (wasm.__wbindgen_start == undefined) {
-                    wasm.main();
-                    }
-                });
-                }
-            );
-            </script>
-            {DX_TOAST_UTILITIES}
+r#" <script>
+  // We can't use a module script here because we need to start the script immediately when streaming
+  import("/{base_path}/{js_path}").then(
+    ({ default: init, initSync }) => {
+      // export initSync in case a split module needs to initialize
+      window.__wasm_split_main_initSync = initSync;
+
+      // Actually perform the load
+      init("/{base_path}/{wasm_path}").then((wasm) => {
+        if (wasm.__wbindgen_start == undefined) {
+            wasm.main();
+        }
+      });
+    }
+  );
+  </script>
+  {DX_TOAST_UTILITIES}
             </body"#,
             </body"#,
         );
         );
 
 

+ 4 - 0
packages/cli/src/cli/build.rs

@@ -51,6 +51,10 @@ pub(crate) struct BuildArgs {
     #[clap(long, default_value_t = true)]
     #[clap(long, default_value_t = true)]
     pub(crate) inject_loading_scripts: bool,
     pub(crate) inject_loading_scripts: bool,
 
 
+    /// Experimental: Bundle split the wasm binary into multiple chunks based on `#[wasm_split]` annotations [default: false]
+    #[clap(long, default_value_t = false)]
+    pub(crate) experimental_wasm_split: bool,
+
     /// Generate debug symbols for the wasm binary [default: true]
     /// Generate debug symbols for the wasm binary [default: true]
     ///
     ///
     /// This will make the binary larger and take longer to compile, but will allow you to debug the
     /// This will make the binary larger and take longer to compile, but will allow you to debug the

+ 8 - 4
packages/cli/src/config/web.rs

@@ -19,7 +19,7 @@ pub(crate) struct WebConfig {
     pub(crate) https: WebHttpsConfig,
     pub(crate) https: WebHttpsConfig,
 
 
     /// Whether to enable pre-compression of assets and wasm during a web build in release mode
     /// Whether to enable pre-compression of assets and wasm during a web build in release mode
-    #[serde(default = "true_bool")]
+    #[serde(default = "false_bool")]
     pub(crate) pre_compress: bool,
     pub(crate) pre_compress: bool,
 
 
     /// The wasm-opt configuration
     /// The wasm-opt configuration
@@ -30,7 +30,7 @@ pub(crate) struct WebConfig {
 impl Default for WebConfig {
 impl Default for WebConfig {
     fn default() -> Self {
     fn default() -> Self {
         Self {
         Self {
-            pre_compress: true_bool(),
+            pre_compress: false_bool(),
             app: Default::default(),
             app: Default::default(),
             https: Default::default(),
             https: Default::default(),
             wasm_opt: Default::default(),
             wasm_opt: Default::default(),
@@ -58,13 +58,18 @@ pub(crate) struct WasmOptConfig {
     /// Keep debug symbols in the wasm file
     /// Keep debug symbols in the wasm file
     #[serde(default = "false_bool")]
     #[serde(default = "false_bool")]
     pub(crate) debug: bool,
     pub(crate) debug: bool,
+
+    /// Enable memory packing
+    #[serde(default = "false_bool")]
+    pub(crate) memory_packing: bool,
 }
 }
 
 
-/// The wasm-opt level to use for release web builds [default: 4]
+/// The wasm-opt level to use for release web builds [default: Z]
 #[derive(Default, Debug, Copy, Clone, Serialize, Deserialize)]
 #[derive(Default, Debug, Copy, Clone, Serialize, Deserialize)]
 pub(crate) enum WasmOptLevel {
 pub(crate) enum WasmOptLevel {
     /// Optimize aggressively for size
     /// Optimize aggressively for size
     #[serde(rename = "z")]
     #[serde(rename = "z")]
+    #[default]
     Z,
     Z,
     /// Optimize for size
     /// Optimize for size
     #[serde(rename = "s")]
     #[serde(rename = "s")]
@@ -83,7 +88,6 @@ pub(crate) enum WasmOptLevel {
     Three,
     Three,
     /// Optimize aggressively for speed
     /// Optimize aggressively for speed
     #[serde(rename = "4")]
     #[serde(rename = "4")]
-    #[default]
     Four,
     Four,
 }
 }
 
 

+ 1 - 0
packages/cli/src/fastfs.rs

@@ -50,6 +50,7 @@ pub(crate) fn pre_compress_folder(path: &Path, pre_compress: bool) -> std::io::R
         let entry_path = entry.path();
         let entry_path = entry.path();
         if entry_path.is_file() {
         if entry_path.is_file() {
             if pre_compress {
             if pre_compress {
+                tracing::info!("Pre-compressing file {}", entry_path.display());
                 if let Err(err) = pre_compress_file(entry_path) {
                 if let Err(err) = pre_compress_file(entry_path) {
                     tracing::error!("Failed to pre-compress file {entry_path:?}: {err}");
                     tracing::error!("Failed to pre-compress file {entry_path:?}: {err}");
                 }
                 }

+ 15 - 6
packages/cli/src/logging.rs

@@ -68,10 +68,12 @@ impl TraceController {
         let filter = if env::var(LOG_ENV).is_ok() {
         let filter = if env::var(LOG_ENV).is_ok() {
             EnvFilter::from_env(LOG_ENV)
             EnvFilter::from_env(LOG_ENV)
         } else if matches!(args.action, Commands::Serve(_)) {
         } else if matches!(args.action, Commands::Serve(_)) {
-            EnvFilter::new("error,dx=trace,dioxus-cli=trace,manganis-cli-support=trace")
+            EnvFilter::new(
+                "error,dx=trace,dioxus_cli=trace,manganis_cli_support=trace,wasm_split_cli=trace",
+            )
         } else {
         } else {
             EnvFilter::new(format!(
             EnvFilter::new(format!(
-                "error,dx={our_level},dioxus-cli={our_level},manganis-cli-support={our_level}",
+                "error,dx={our_level},dioxus_cli={our_level},manganis_cli_support={our_level},,wasm_split_cli={our_level}",
                 our_level = if args.verbosity.verbose {
                 our_level = if args.verbosity.verbose {
                     "debug"
                     "debug"
                 } else {
                 } else {
@@ -131,10 +133,13 @@ impl TraceController {
     }
     }
 
 
     /// Get a handle to the trace controller.
     /// Get a handle to the trace controller.
-    pub fn redirect() -> Self {
+    pub fn redirect(interactive: bool) -> Self {
         let (tui_tx, tui_rx) = unbounded();
         let (tui_tx, tui_rx) = unbounded();
-        TUI_ACTIVE.store(true, Ordering::Relaxed);
-        TUI_TX.set(tui_tx.clone()).unwrap();
+
+        if interactive {
+            TUI_ACTIVE.store(true, Ordering::Relaxed);
+            TUI_TX.set(tui_tx.clone()).unwrap();
+        }
 
 
         Self { tui_rx }
         Self { tui_rx }
     }
     }
@@ -142,7 +147,11 @@ impl TraceController {
     /// Wait for the internal logger to send a message
     /// Wait for the internal logger to send a message
     pub(crate) async fn wait(&mut self) -> ServeUpdate {
     pub(crate) async fn wait(&mut self) -> ServeUpdate {
         use futures_util::StreamExt;
         use futures_util::StreamExt;
-        let log = self.tui_rx.next().await.expect("tracer should never die");
+
+        let Some(log) = self.tui_rx.next().await else {
+            return std::future::pending().await;
+        };
+
         ServeUpdate::TracingLog { log }
         ServeUpdate::TracingLog { log }
     }
     }
 }
 }

+ 1 - 0
packages/cli/src/main.rs

@@ -19,6 +19,7 @@ mod rustc;
 mod serve;
 mod serve;
 mod settings;
 mod settings;
 mod wasm_bindgen;
 mod wasm_bindgen;
+mod wasm_opt;
 
 
 pub(crate) use build::*;
 pub(crate) use build::*;
 pub(crate) use cli::*;
 pub(crate) use cli::*;

+ 1 - 1
packages/cli/src/serve/handle.rs

@@ -760,7 +760,7 @@ We checked the folder: {}
                 tracing::error!("Failed to push .env file to device: {e}");
                 tracing::error!("Failed to push .env file to device: {e}");
             }
             }
 
 
-            // eventually, use the user's MainAcitivty, not our MainAcitivty
+            // eventually, use the user's MainActivity, not our MainActivity
             // adb shell am start -n dev.dioxus.main/dev.dioxus.main.MainActivity
             // adb shell am start -n dev.dioxus.main/dev.dioxus.main.MainActivity
             let activity_name = format!("{}/dev.dioxus.main.MainActivity", full_mobile_app_name,);
             let activity_name = format!("{}/dev.dioxus.main.MainActivity", full_mobile_app_name,);
 
 

+ 1 - 1
packages/cli/src/serve/mod.rs

@@ -39,7 +39,7 @@ pub(crate) use watcher::*;
 ///   to a dynamic one on the fly.
 ///   to a dynamic one on the fly.
 pub(crate) async fn serve_all(mut args: ServeArgs) -> Result<()> {
 pub(crate) async fn serve_all(mut args: ServeArgs) -> Result<()> {
     // Redirect all logging the cli logger
     // Redirect all logging the cli logger
-    let mut tracer = TraceController::redirect();
+    let mut tracer = TraceController::redirect(args.is_interactive_tty());
 
 
     // Load the krate and resolve the server args against it - this might log so do it after we turn on the tracer first
     // Load the krate and resolve the server args against it - this might log so do it after we turn on the tracer first
     let krate = args.load_krate().await?;
     let krate = args.load_krate().await?;

+ 2 - 0
packages/cli/src/serve/output.rs

@@ -533,6 +533,8 @@ impl Output {
                 lines.push(krate.as_str().dark_gray())
                 lines.push(krate.as_str().dark_gray())
             }
             }
             BuildStage::OptimizingWasm {} => lines.push("Optimizing wasm".yellow()),
             BuildStage::OptimizingWasm {} => lines.push("Optimizing wasm".yellow()),
+            BuildStage::SplittingBundle {} => lines.push("Splitting bundle".yellow()),
+            BuildStage::CompressingAssets => lines.push("Compressing assets".yellow()),
             BuildStage::PrerenderingRoutes {} => lines.push("Prerendering static routes".yellow()),
             BuildStage::PrerenderingRoutes {} => lines.push("Prerendering static routes".yellow()),
             BuildStage::RunningBindgen {} => lines.push("Running wasm-bindgen".yellow()),
             BuildStage::RunningBindgen {} => lines.push("Running wasm-bindgen".yellow()),
             BuildStage::RunningGradle {} => lines.push("Running gradle assemble".yellow()),
             BuildStage::RunningGradle {} => lines.push("Running gradle assemble".yellow()),

+ 26 - 11
packages/cli/src/wasm_bindgen.rs

@@ -18,10 +18,11 @@ pub(crate) struct WasmBindgen {
     demangle: bool,
     demangle: bool,
     remove_name_section: bool,
     remove_name_section: bool,
     remove_producers_section: bool,
     remove_producers_section: bool,
+    keep_lld_exports: bool,
 }
 }
 
 
 impl WasmBindgen {
 impl WasmBindgen {
-    pub fn new(version: &str) -> Self {
+    pub(crate) fn new(version: &str) -> Self {
         Self {
         Self {
             version: version.to_string(),
             version: version.to_string(),
             input_path: PathBuf::new(),
             input_path: PathBuf::new(),
@@ -33,65 +34,73 @@ impl WasmBindgen {
             demangle: true,
             demangle: true,
             remove_name_section: false,
             remove_name_section: false,
             remove_producers_section: false,
             remove_producers_section: false,
+            keep_lld_exports: false,
         }
         }
     }
     }
 
 
-    pub fn input_path(self, input_path: &Path) -> Self {
+    pub(crate) fn input_path(self, input_path: &Path) -> Self {
         Self {
         Self {
             input_path: input_path.to_path_buf(),
             input_path: input_path.to_path_buf(),
             ..self
             ..self
         }
         }
     }
     }
 
 
-    pub fn out_dir(self, out_dir: &Path) -> Self {
+    pub(crate) fn out_dir(self, out_dir: &Path) -> Self {
         Self {
         Self {
             out_dir: out_dir.to_path_buf(),
             out_dir: out_dir.to_path_buf(),
             ..self
             ..self
         }
         }
     }
     }
 
 
-    pub fn out_name(self, out_name: &str) -> Self {
+    pub(crate) fn out_name(self, out_name: &str) -> Self {
         Self {
         Self {
             out_name: out_name.to_string(),
             out_name: out_name.to_string(),
             ..self
             ..self
         }
         }
     }
     }
 
 
-    pub fn target(self, target: &str) -> Self {
+    pub(crate) fn target(self, target: &str) -> Self {
         Self {
         Self {
             target: target.to_string(),
             target: target.to_string(),
             ..self
             ..self
         }
         }
     }
     }
 
 
-    pub fn debug(self, debug: bool) -> Self {
+    pub(crate) fn debug(self, debug: bool) -> Self {
         Self { debug, ..self }
         Self { debug, ..self }
     }
     }
 
 
-    pub fn keep_debug(self, keep_debug: bool) -> Self {
+    pub(crate) fn keep_debug(self, keep_debug: bool) -> Self {
         Self { keep_debug, ..self }
         Self { keep_debug, ..self }
     }
     }
 
 
-    pub fn demangle(self, demangle: bool) -> Self {
+    pub(crate) fn demangle(self, demangle: bool) -> Self {
         Self { demangle, ..self }
         Self { demangle, ..self }
     }
     }
 
 
-    pub fn remove_name_section(self, remove_name_section: bool) -> Self {
+    pub(crate) fn remove_name_section(self, remove_name_section: bool) -> Self {
         Self {
         Self {
             remove_name_section,
             remove_name_section,
             ..self
             ..self
         }
         }
     }
     }
 
 
-    pub fn remove_producers_section(self, remove_producers_section: bool) -> Self {
+    pub(crate) fn remove_producers_section(self, remove_producers_section: bool) -> Self {
         Self {
         Self {
             remove_producers_section,
             remove_producers_section,
             ..self
             ..self
         }
         }
     }
     }
 
 
+    pub(crate) fn keep_lld_sections(self, keep_lld_sections: bool) -> Self {
+        Self {
+            keep_lld_exports: keep_lld_sections,
+            ..self
+        }
+    }
+
     /// Run the bindgen command with the current settings
     /// Run the bindgen command with the current settings
-    pub async fn run(&self) -> Result<()> {
+    pub(crate) async fn run(&self) -> Result<()> {
         let binary = self.get_binary_path().await?;
         let binary = self.get_binary_path().await?;
 
 
         let mut args = Vec::new();
         let mut args = Vec::new();
@@ -121,6 +130,10 @@ impl WasmBindgen {
             args.push("--remove-producers-section");
             args.push("--remove-producers-section");
         }
         }
 
 
+        if self.keep_lld_exports {
+            args.push("--keep-lld-exports");
+        }
+
         // Out name
         // Out name
         args.push("--out-name");
         args.push("--out-name");
         args.push(&self.out_name);
         args.push(&self.out_name);
@@ -144,6 +157,8 @@ impl WasmBindgen {
             .expect("input_path should be valid utf8");
             .expect("input_path should be valid utf8");
         args.push(input_path);
         args.push(input_path);
 
 
+        tracing::debug!("wasm-bindgen args: {:#?}", args);
+
         // Run bindgen
         // Run bindgen
         Command::new(binary)
         Command::new(binary)
             .args(args)
             .args(args)

+ 102 - 0
packages/cli/src/wasm_opt.rs

@@ -0,0 +1,102 @@
+use crate::config::WasmOptLevel;
+use crate::{Result, WasmOptConfig};
+use std::path::Path;
+
+#[memoize::memoize(SharedCache)]
+pub fn wasm_opt_available() -> bool {
+    if cfg!(feature = "optimizations") {
+        return true;
+    }
+
+    which::which("wasm-opt").is_ok()
+}
+
+/// Write these wasm bytes with a particular set of optimizations
+pub async fn write_wasm(bytes: &[u8], output_path: &Path, cfg: &WasmOptConfig) -> Result<()> {
+    tokio::fs::write(output_path, bytes).await?;
+    optimize(output_path, output_path, cfg).await?;
+    Ok(())
+}
+
+#[allow(unreachable_code)]
+pub async fn optimize(input_path: &Path, output_path: &Path, cfg: &WasmOptConfig) -> Result<()> {
+    #[cfg(feature = "optimizations")]
+    return run_from_lib(input_path, output_path, cfg).await;
+
+    // It's okay not to run wasm-opt but we should *really* try it
+    if which::which("wasm-opt").is_err() {
+        tracing::warn!("wasm-opt not found and CLI is compiled without optimizations. Skipping optimization for {}", input_path.display());
+        return Ok(());
+    }
+
+    run_locally(input_path, output_path, cfg).await?;
+
+    Ok(())
+}
+
+async fn run_locally(input_path: &Path, output_path: &Path, cfg: &WasmOptConfig) -> Result<()> {
+    let mut args = vec![
+        // needed by wasm-bindgen
+        "--enable-reference-types",
+    ];
+
+    if cfg.memory_packing {
+        // needed for our current approach to bundle splitting to work properly
+        // todo(jon): emit the main module's data section in chunks instead of all at once
+        args.push("--memory-packing");
+    }
+
+    if !cfg.debug {
+        args.push("--strip-debug");
+    } else {
+        args.push("--debuginfo");
+    }
+
+    let level = match cfg.level {
+        WasmOptLevel::Z => "-Oz",
+        WasmOptLevel::S => "-Os",
+        WasmOptLevel::Zero => "-O0",
+        WasmOptLevel::One => "-O1",
+        WasmOptLevel::Two => "-O2",
+        WasmOptLevel::Three => "-O3",
+        WasmOptLevel::Four => "-O4",
+    };
+
+    tokio::process::Command::new("wasm-opt")
+        .arg(input_path)
+        .arg(level)
+        .arg("-o")
+        .arg(output_path)
+        .args(args)
+        .output()
+        .await?;
+
+    Ok(())
+}
+
+/// Use the `wasm_opt` crate
+#[cfg(feature = "optimizations")]
+async fn run_from_lib(
+    input_path: &Path,
+    output_path: &Path,
+    options: &WasmOptConfig,
+) -> Result<()> {
+    let mut level = match options.level {
+        WasmOptLevel::Z => wasm_opt::OptimizationOptions::new_optimize_for_size_aggressively(),
+        WasmOptLevel::S => wasm_opt::OptimizationOptions::new_optimize_for_size(),
+        WasmOptLevel::Zero => wasm_opt::OptimizationOptions::new_opt_level_0(),
+        WasmOptLevel::One => wasm_opt::OptimizationOptions::new_opt_level_1(),
+        WasmOptLevel::Two => wasm_opt::OptimizationOptions::new_opt_level_2(),
+        WasmOptLevel::Three => wasm_opt::OptimizationOptions::new_opt_level_3(),
+        WasmOptLevel::Four => wasm_opt::OptimizationOptions::new_opt_level_4(),
+    };
+
+    level
+        .enable_feature(wasm_opt::Feature::ReferenceTypes)
+        .add_pass(wasm_opt::Pass::MemoryPacking)
+        .debug_info(options.debug)
+        .run(input_path, output_path)
+        .map_err(|err| crate::Error::Other(anyhow::anyhow!(err)))?;
+
+    Ok(())
+}

+ 17 - 0
packages/config-macros/Cargo.toml

@@ -0,0 +1,17 @@
+[package]
+name = "dioxus-config-macros"
+edition = "2021"
+version.workspace = true
+authors = ["Dioxus Labs"]
+description = "Macros used internally by codegen"
+license = "MIT OR Apache-2.0"
+repository = "https://github.com/DioxusLabs/dioxus/"
+homepage = "https://dioxuslabs.com"
+keywords = ["web", "desktop", "mobile", "gui", "wasm"]
+rust-version = "1.79.0"
+
+[dependencies]
+
+[features]
+default = []
+wasm-split = []

+ 15 - 0
packages/config-macros/README.md

@@ -0,0 +1,15 @@
+# Dioxus Config Macros
+
+These macros are used internally by codegen and are not intended for general use.
+
+Dioxus will export its feature flags into this crate, allowing downstream codegen to use them under the "dioxus" namespace.
+
+## License
+
+This project is licensed under the [MIT license].
+
+[mit license]: https://github.com/dioxuslabs/dioxus/blob/main/LICENSE-MIT
+
+Unless you explicitly state otherwise, any contribution intentionally submitted
+for inclusion in Dioxus by you shall be licensed as MIT without any additional
+terms or conditions.

+ 39 - 0
packages/config-macros/src/lib.rs

@@ -0,0 +1,39 @@
+/// A macro for deciding whether or not to split the wasm bundle.
+/// Used by the internal router-macro code. The contents here are considered to be semver exempt.
+///
+/// Only on wasm with the wasm-split feature will we prefer the `maybe_wasm_split` variant that emits
+/// the "lefthand" tokens. Otherwise, we emit the non-wasm_split tokens
+#[doc(hidden)]
+#[cfg(all(feature = "wasm-split", target_arch = "wasm32"))]
+#[macro_export]
+macro_rules! maybe_wasm_split {
+    (
+        if wasm_split {
+            $left:tt
+        } else {
+            $right:tt
+        }
+    ) => {
+        $left
+    };
+}
+
+/// A macro for deciding whether or not to split the wasm bundle.
+/// Used by the internal router-macro code. The contents here are considered to be semver exempt.
+///
+/// Only on wasm with the wasm-split feature will we prefer the `maybe_wasm_split` variant that emits
+/// the "lefthand" tokens. Otherwise, we emit the non-wasm_split tokens
+#[doc(hidden)]
+#[cfg(any(not(feature = "wasm-split"), not(target_arch = "wasm32")))]
+#[macro_export]
+macro_rules! maybe_wasm_split {
+    (
+        if wasm_split {
+            $left:tt
+        } else {
+            $right:tt
+        }
+    ) => {
+        $right
+    };
+}

+ 129 - 4
packages/core-macro/src/component.rs

@@ -1,18 +1,29 @@
 use proc_macro2::TokenStream;
 use proc_macro2::TokenStream;
-use quote::{quote, ToTokens, TokenStreamExt};
+use quote::{format_ident, quote, ToTokens, TokenStreamExt};
 use syn::parse::{Parse, ParseStream};
 use syn::parse::{Parse, ParseStream};
 use syn::spanned::Spanned;
 use syn::spanned::Spanned;
 use syn::*;
 use syn::*;
 
 
 pub struct ComponentBody {
 pub struct ComponentBody {
     pub item_fn: ItemFn,
     pub item_fn: ItemFn,
+    pub options: ComponentMacroOptions,
 }
 }
 
 
 impl Parse for ComponentBody {
 impl Parse for ComponentBody {
     fn parse(input: ParseStream) -> Result<Self> {
     fn parse(input: ParseStream) -> Result<Self> {
         let item_fn: ItemFn = input.parse()?;
         let item_fn: ItemFn = input.parse()?;
         validate_component_fn(&item_fn)?;
         validate_component_fn(&item_fn)?;
-        Ok(Self { item_fn })
+        Ok(Self {
+            item_fn,
+            options: ComponentMacroOptions::default(),
+        })
+    }
+}
+
+impl ComponentBody {
+    pub fn with_options(mut self, options: ComponentMacroOptions) -> Self {
+        self.options = options;
+        self
     }
     }
 }
 }
 
 
@@ -100,16 +111,98 @@ impl ComponentBody {
             quote! { #struct_ident { #(#struct_field_names),* }: #struct_ident #impl_generics }
             quote! { #struct_ident { #(#struct_field_names),* }: #struct_ident #impl_generics }
         };
         };
 
 
+        // Defer to the lazy_body if we're using lazy
+        let body: TokenStream = if self.options.lazy {
+            self.lazy_body(
+                &struct_ident,
+                generics,
+                &impl_generics,
+                fn_output,
+                where_clause,
+                &inlined_props_argument,
+                block,
+            )
+        } else {
+            quote! { #block }
+        };
+
+        // We need a props type to exist even if the inputs are empty with lazy components
+        let emit_props = if self.options.lazy {
+            if inputs.is_empty() {
+                quote! {props: ()}
+            } else {
+                quote!(props: #struct_ident #impl_generics)
+            }
+        } else {
+            inlined_props_argument
+        };
+
         // The extra nest is for the snake case warning to kick back in
         // The extra nest is for the snake case warning to kick back in
         parse_quote! {
         parse_quote! {
             #(#attrs)*
             #(#attrs)*
             #(#props_docs)*
             #(#props_docs)*
             #[allow(non_snake_case)]
             #[allow(non_snake_case)]
-            #vis fn #fn_ident #generics (#inlined_props_argument) #fn_output #where_clause {
+            #vis fn #fn_ident #generics (#emit_props) #fn_output #where_clause {
                 {
                 {
                     // In debug mode we can detect if the user is calling the component like a function
                     // In debug mode we can detect if the user is calling the component like a function
                     dioxus_core::internal::verify_component_called_as_component(#fn_ident #generics_turbofish);
                     dioxus_core::internal::verify_component_called_as_component(#fn_ident #generics_turbofish);
-                    #block
+                    #body
+                }
+            }
+        }
+    }
+
+    /// Generate the body of the lazy component
+    ///
+    /// This extracts the body into a new component that is wrapped in a lazy loader
+    #[allow(clippy::too_many_arguments)]
+    fn lazy_body(
+        &self,
+        struct_ident: &Ident,
+        generics: &Generics,
+        impl_generics: &TypeGenerics,
+        fn_output: &ReturnType,
+        where_clause: &Option<WhereClause>,
+        inlined_props_argument: &TokenStream,
+        block: &Block,
+    ) -> TokenStream {
+        let fn_ident = &self.item_fn.sig.ident;
+        let inputs = &self.item_fn.sig.inputs;
+
+        let lazy_name = format_ident!("Lazy{fn_ident}");
+        let out_ty = match &self.item_fn.sig.output {
+            ReturnType::Default => quote! { () },
+            ReturnType::Type(_, ty) => quote! { #ty },
+        };
+        let props_ty = if inputs.is_empty() {
+            quote! { () }
+        } else {
+            quote! { #struct_ident #impl_generics }
+        };
+        let anon_props = if inputs.is_empty() {
+            quote! { props: () }
+        } else {
+            quote! { #inlined_props_argument}
+        };
+
+        quote! {
+            fn #lazy_name #generics (#anon_props) #fn_output #where_clause {
+                #block
+            }
+
+            dioxus::config_macros::maybe_wasm_split! {
+                if wasm_split {
+                    {
+                        static __MODULE: wasm_split::LazyLoader<#props_ty, #out_ty> =
+                            wasm_split::lazy_loader!(extern "lazy" fn #lazy_name(props: #props_ty,) -> #out_ty);
+
+                        use_resource(|| async move { __MODULE.load().await }).suspend()?;
+                        __MODULE.call(props).unwrap()
+                    }
+                } else {
+                    {
+                        #lazy_name(props)
+                    }
                 }
                 }
             }
             }
         }
         }
@@ -467,3 +560,35 @@ fn allow_camel_case_for_fn_ident(item_fn: &ItemFn) -> ItemFn {
 
 
     clone
     clone
 }
 }
+
+#[derive(Default)]
+pub struct ComponentMacroOptions {
+    pub lazy: bool,
+}
+
+impl Parse for ComponentMacroOptions {
+    fn parse(input: ParseStream) -> Result<Self> {
+        let mut lazy_load = false;
+
+        while !input.is_empty() {
+            let ident = input.parse::<Ident>()?;
+            let ident_name = ident.to_string();
+            if ident_name == "lazy" {
+                lazy_load = true;
+            } else if ident_name == "no_case_check" {
+                // we used to have this?
+            } else {
+                return Err(Error::new(
+                    ident.span(),
+                    "Unknown option for component macro",
+                ));
+            }
+
+            if input.peek(Token![,]) {
+                input.parse::<Token![,]>()?;
+            }
+        }
+
+        Ok(Self { lazy: lazy_load })
+    }
+}

+ 2 - 1
packages/core-macro/src/lib.rs

@@ -2,7 +2,7 @@
 #![doc(html_logo_url = "https://avatars.githubusercontent.com/u/79236386")]
 #![doc(html_logo_url = "https://avatars.githubusercontent.com/u/79236386")]
 #![doc(html_favicon_url = "https://avatars.githubusercontent.com/u/79236386")]
 #![doc(html_favicon_url = "https://avatars.githubusercontent.com/u/79236386")]
 
 
-use component::ComponentBody;
+use component::{ComponentBody, ComponentMacroOptions};
 use proc_macro::TokenStream;
 use proc_macro::TokenStream;
 use quote::ToTokens;
 use quote::ToTokens;
 use syn::parse_macro_input;
 use syn::parse_macro_input;
@@ -36,6 +36,7 @@ pub fn rsx(tokens: TokenStream) -> TokenStream {
 #[proc_macro_attribute]
 #[proc_macro_attribute]
 pub fn component(_args: TokenStream, input: TokenStream) -> TokenStream {
 pub fn component(_args: TokenStream, input: TokenStream) -> TokenStream {
     parse_macro_input!(input as ComponentBody)
     parse_macro_input!(input as ComponentBody)
+        .with_options(parse_macro_input!(_args as ComponentMacroOptions))
         .into_token_stream()
         .into_token_stream()
         .into()
         .into()
 }
 }

+ 19 - 11
packages/core/src/any_props.rs

@@ -75,19 +75,27 @@ impl<F: ComponentFunction<P, M> + Clone, P: Clone + 'static, M: 'static> AnyProp
     }
     }
 
 
     fn render(&self) -> Element {
     fn render(&self) -> Element {
-        let res = std::panic::catch_unwind(AssertUnwindSafe(move || {
-            self.render_fn.rebuild(self.props.clone())
-        }));
-
-        match res {
-            Ok(node) => node,
-            Err(err) => {
-                let component_name = self.name;
-                tracing::error!("Panic while rendering component `{component_name}`: {err:?}");
-                let panic = CapturedPanic { error: err };
-                Element::Err(panic.into())
+        fn render_inner(name: &str, res: Result<Element, Box<dyn Any + Send>>) -> Element {
+            match res {
+                Ok(node) => node,
+                Err(err) => {
+                    // on wasm this massively bloats binary sizes and we can't even capture the panic
+                    // so do nothing
+                    #[cfg(not(target_arch = "wasm32"))]
+                    {
+                        tracing::error!("Panic while rendering component `{name}`: {err:?}");
+                    }
+                    Element::Err(CapturedPanic { error: err }.into())
+                }
             }
             }
         }
         }
+
+        render_inner(
+            self.name,
+            std::panic::catch_unwind(AssertUnwindSafe(move || {
+                self.render_fn.rebuild(self.props.clone())
+            })),
+        )
     }
     }
 
 
     fn duplicate(&self) -> BoxedAnyProps {
     fn duplicate(&self) -> BoxedAnyProps {

+ 10 - 0
packages/core/src/scope_context.rs

@@ -450,6 +450,16 @@ impl Scope {
             hooks.push(Box::new(initializer()));
             hooks.push(Box::new(initializer()));
         }
         }
 
 
+        self.use_hook_inner::<State>(hooks, cur_hook)
+    }
+
+    // The interior version that gets monoorphized by the `State` type but not the `initializer` type.
+    // This helps trim down binary sizes
+    fn use_hook_inner<State: Clone + 'static>(
+        &self,
+        hooks: std::cell::RefMut<Vec<Box<dyn std::any::Any>>>,
+        cur_hook: usize,
+    ) -> State {
         hooks
         hooks
             .get(cur_hook)
             .get(cur_hook)
             .and_then(|inn| {
             .and_then(|inn| {

+ 11 - 1
packages/core/src/tasks.rs

@@ -162,6 +162,16 @@ impl Runtime {
         scope: ScopeId,
         scope: ScopeId,
         task: impl Future<Output = ()> + 'static,
         task: impl Future<Output = ()> + 'static,
         ty: TaskType,
         ty: TaskType,
+    ) -> Task {
+        self.spawn_task_of_type_inner(scope, Box::pin(task), ty)
+    }
+
+    // a non-momorphic version of spawn_task_of_type, helps with binari sizes
+    fn spawn_task_of_type_inner(
+        &self,
+        scope: ScopeId,
+        pinned_task: Pin<Box<dyn Future<Output = ()>>>,
+        ty: TaskType,
     ) -> Task {
     ) -> Task {
         // Insert the task, temporarily holding a borrow on the tasks map
         // Insert the task, temporarily holding a borrow on the tasks map
         let (task, task_id) = {
         let (task, task_id) = {
@@ -176,7 +186,7 @@ impl Runtime {
                     scope,
                     scope,
                     active: Cell::new(true),
                     active: Cell::new(true),
                     parent: self.current_task(),
                     parent: self.current_task(),
-                    task: RefCell::new(Box::pin(task)),
+                    task: RefCell::new(pinned_task),
                     waker: futures_util::task::waker(Arc::new(LocalTaskHandle {
                     waker: futures_util::task::waker(Arc::new(LocalTaskHandle {
                         id: task_id.id,
                         id: task_id.id,
                         tx: self.sender.clone(),
                         tx: self.sender.clone(),

+ 3 - 0
packages/dioxus/Cargo.toml

@@ -12,6 +12,7 @@ rust-version = "1.79.0"
 
 
 [dependencies]
 [dependencies]
 dioxus-core = { workspace = true }
 dioxus-core = { workspace = true }
+dioxus-config-macros = { workspace = true }
 dioxus-html = { workspace = true, default-features = false, optional = true }
 dioxus-html = { workspace = true, default-features = false, optional = true }
 dioxus-document = { workspace = true, optional = true }
 dioxus-document = { workspace = true, optional = true }
 dioxus-history = { workspace = true, optional = true }
 dioxus-history = { workspace = true, optional = true }
@@ -29,6 +30,7 @@ dioxus-ssr = { workspace = true, optional = true }
 manganis = { workspace = true, features = ["dioxus"], optional = true }
 manganis = { workspace = true, features = ["dioxus"], optional = true }
 dioxus-logger = { workspace = true, optional = true }
 dioxus-logger = { workspace = true, optional = true }
 warnings = { workspace = true, optional = true }
 warnings = { workspace = true, optional = true }
+wasm-split = { workspace = true, optional = true }
 
 
 serde = { workspace = true, optional = true }
 serde = { workspace = true, optional = true }
 dioxus-cli-config = { workspace = true, optional = true }
 dioxus-cli-config = { workspace = true, optional = true }
@@ -51,6 +53,7 @@ document = ["dioxus-web?/document", "dep:dioxus-document", "dep:dioxus-history"]
 logger = ["dep:dioxus-logger"]
 logger = ["dep:dioxus-logger"]
 cli-config = ["dep:dioxus-cli-config"]
 cli-config = ["dep:dioxus-cli-config"]
 warnings = ["dep:warnings"]
 warnings = ["dep:warnings"]
+wasm-split = ["dep:wasm-split", "dioxus-config-macros/wasm-split"] # note: to turn on the router splitter, you need to manually enable wasm-split on the router
 
 
 launch = ["dep:dioxus-config-macro"]
 launch = ["dep:dioxus-config-macro"]
 router = ["dep:dioxus-router"]
 router = ["dep:dioxus-router"]

+ 10 - 0
packages/dioxus/src/lib.rs

@@ -74,6 +74,10 @@ pub use dioxus_logger as logger;
 #[cfg_attr(docsrs, doc(cfg(feature = "cli-config")))]
 #[cfg_attr(docsrs, doc(cfg(feature = "cli-config")))]
 pub use dioxus_cli_config as cli_config;
 pub use dioxus_cli_config as cli_config;
 
 
+#[cfg(feature = "wasm-split")]
+#[cfg_attr(docsrs, doc(cfg(feature = "wasm-split")))]
+pub use wasm_split;
+
 pub mod prelude {
 pub mod prelude {
     #[cfg(feature = "document")]
     #[cfg(feature = "document")]
     #[cfg_attr(docsrs, doc(cfg(feature = "document")))]
     #[cfg_attr(docsrs, doc(cfg(feature = "document")))]
@@ -138,6 +142,10 @@ pub mod prelude {
     #[cfg(feature = "asset")]
     #[cfg(feature = "asset")]
     #[cfg_attr(docsrs, doc(cfg(feature = "asset")))]
     #[cfg_attr(docsrs, doc(cfg(feature = "asset")))]
     pub use manganis::{self, *};
     pub use manganis::{self, *};
+
+    #[cfg(feature = "wasm-split")]
+    #[cfg_attr(docsrs, doc(cfg(feature = "wasm-split")))]
+    pub use wasm_split;
 }
 }
 
 
 #[cfg(feature = "web")]
 #[cfg(feature = "web")]
@@ -171,3 +179,5 @@ pub use dioxus_ssr as ssr;
 #[cfg(feature = "warnings")]
 #[cfg(feature = "warnings")]
 #[cfg_attr(docsrs, doc(cfg(feature = "warnings")))]
 #[cfg_attr(docsrs, doc(cfg(feature = "warnings")))]
 pub use warnings;
 pub use warnings;
+
+pub use dioxus_config_macros as config_macros;

+ 2 - 0
packages/dx-wire-format/src/lib.rs

@@ -56,6 +56,7 @@ pub enum BuildStage {
         krate: String,
         krate: String,
     },
     },
     RunningBindgen,
     RunningBindgen,
+    SplittingBundle,
     OptimizingWasm,
     OptimizingWasm,
     PrerenderingRoutes,
     PrerenderingRoutes,
     CopyingAssets {
     CopyingAssets {
@@ -69,4 +70,5 @@ pub enum BuildStage {
     Failed,
     Failed,
     Aborted,
     Aborted,
     Restarting,
     Restarting,
+    CompressingAssets,
 }
 }

+ 10 - 1
packages/playwright-tests/playwright.config.js

@@ -142,11 +142,20 @@ module.exports = defineConfig({
       cwd: path.join(process.cwd(), "cli-optimization"),
       cwd: path.join(process.cwd(), "cli-optimization"),
       // Remove the cache folder for the cli-optimization build to force a full cache reset
       // Remove the cache folder for the cli-optimization build to force a full cache reset
       command:
       command:
-        'cargo run --package dioxus-cli --release -- serve --addr "127.0.0.1" --port 8989',
+        'cargo run --package dioxus-cli --release --features optimizations -- serve --addr "127.0.0.1" --port 8989',
       port: 8989,
       port: 8989,
       timeout: 50 * 60 * 1000,
       timeout: 50 * 60 * 1000,
       reuseExistingServer: !process.env.CI,
       reuseExistingServer: !process.env.CI,
       stdout: "pipe",
       stdout: "pipe",
     },
     },
+    {
+      cwd: path.join(process.cwd(), "wasm-split-harness"),
+      command:
+        'cargo run --package dioxus-cli --release --features optimizations -- serve --bin wasm-split-harness --platform web --addr "127.0.0.1" --port 8001 --experimental-wasm-split --profile wasm-split-release',
+      port: 8001,
+      timeout: 50 * 60 * 1000,
+      reuseExistingServer: !process.env.CI,
+      stdout: "pipe",
+    },
   ],
   ],
 });
 });

+ 21 - 0
packages/playwright-tests/wasm-split-harness/Cargo.toml

@@ -0,0 +1,21 @@
+[package]
+name = "wasm-split-harness"
+version = "0.1.0"
+edition = "2021"
+publish = false
+authors = ["Jonathan Kelley"]
+
+[dependencies]
+dioxus = { workspace = true, features = ["web", "router", "wasm-split"] }
+dioxus-router = { workspace = true, features = ["wasm-split"] }
+anyhow = { workspace = true }
+async-compression = { workspace = true, features = ["futures-io", "gzip", "brotli"] }
+futures = { workspace = true }
+js-sys = { workspace = true }
+wasm-bindgen = { workspace = true }
+wasm-bindgen-futures = { workspace = true }
+web-sys = { workspace = true, features = ["Document", "Window", "HtmlElement", "Text", "DomRectReadOnly", "console"] }
+once_cell = { workspace = true }
+getrandom = { workspace = true, features = ["js"] }
+reqwest = { workspace = true, features = ["json"] }
+

+ 1 - 0
packages/playwright-tests/wasm-split-harness/data/.gitignore

@@ -0,0 +1 @@
+harness/

+ 13 - 0
packages/playwright-tests/wasm-split-harness/data/index.html

@@ -0,0 +1,13 @@
+<html>
+    <head>
+        <meta charset="utf-8">
+        <title>Hello wasm-bindgen</title>
+    </head>
+    <body>
+        <div id="main"></div>
+    </body>
+    <script type="module">
+        import init from "/harness/split/main.js";
+        init();
+    </script>
+</html>

+ 14 - 0
packages/playwright-tests/wasm-split-harness/docsite.sh

@@ -0,0 +1,14 @@
+cargo build --bin wasm-split-cli
+CLI=./target/debug/wasm-split-cli
+
+rm -rf docsite/chunks
+
+# Run the wasm-split-cli on the with_body.wasm file
+${CLI} split docsite/input.wasm docsite/bindgen/main_bg.wasm docsite/chunks
+
+# copy the contents of the wasm_bindgen folder to the docsite folder
+mv docsite/chunks/main.wasm docsite/chunks/main_bg.wasm # rename the main wasm file
+cp -r docsite/bindgen/snippets docsite/chunks/snippets
+cp docsite/bindgen/main.js docsite/chunks/main.js
+
+python3 -m http.server 8080 --directory docsite

+ 82 - 0
packages/playwright-tests/wasm-split-harness/run.sh

@@ -0,0 +1,82 @@
+# This file is a simple shell script that runs the bundle split process manually without the CLI involved
+# it's not necessarily meant to work on your machine (sorry!)
+#
+# To hack on harness you need the `wasm-tools` CLI installed
+# `cargo binstall wasm-tools`
+#
+# This script is also sensitive to where it's run from, so you *need* to be in the harness folder (running as `./run.sh`)
+
+TARGET_DIR=../../../target
+
+# build the harness
+cargo rustc --package wasm-split-harness --target wasm32-unknown-unknown --profile wasm-split-release -- -Clink-args=--emit-relocs
+
+# for a much smaller compile, you can crank up the flags. However, dioxus relies heavily on location detail, so we can't disable that
+#
+# -Zlocation-detail=none - we could compile with location detail off but if breaks our signals...
+#
+# cargo +nightly rustc \
+#   -Z build-std=std,panic_abort \
+#   -Z build-std-features="optimize_for_size" \
+#   -Z build-std-features=panic_immediate_abort \
+#   --target wasm32-unknown-unknown \
+#   --no-default-features \
+#   --profile wasm-split-release \
+#   -- -Clink-args=--emit-relocs
+
+# Build the wasm-split-cli. We are going to call it directly since it's so noisy to build it multiple times
+cargo build --package wasm-split-cli --bin wasm-split-cli
+CLI=$TARGET_DIR/debug/wasm-split-cli
+
+# clear the workdir and assemble the new structure
+rm -rf data/harness
+mkdir -p data/harness/split
+mkdir -p data/harness/split_not
+
+# copy the output wasm file to the harness dir
+cp $TARGET_DIR/wasm32-unknown-unknown/wasm-split-release/wasm-split-harness.wasm data/harness/input.wasm
+
+# Run wasm-bindgen on this module, without splitting it
+wasm-bindgen data/harness/input.wasm --out-dir data/harness/split_not --target web --out-name main --no-demangle --no-typescript --keep-lld-exports --keep-debug
+
+# Run the wasm-split-cli on the with_body.wasm file
+${CLI} split data/harness/input.wasm data/harness/split_not/main_bg.wasm data/harness/chunks
+
+# copy over the chunks
+paths=$(ls data/harness/chunks/ | grep "\.wasm")
+for path in $paths
+do
+
+    path_without_ext=${path%.*}
+    wasm-opt -Oz data/harness/chunks/$path -o data/harness/split/$path --enable-reference-types --memory-packing --debuginfo
+
+    # remove stuff like manganis, etc
+    wasm-tools strip data/harness/split/$path -o data/harness/split/$path
+
+    # if you don't want names (making it harder to debug the outputs) use `--all`
+    # wasm-tools strip data/harness/split/$path -o data/harness/split/$path --all
+done
+
+
+# rename the main chunk
+mv data/harness/split/main.wasm data/harness/split/main_bg.wasm
+cp data/harness/split_not/main.js data/harness/split/main.js
+cp -r data/harness/split_not/snippets data/harness/split/snippets
+cp data/harness/chunks/__wasm_split.js data/harness/split/__wasm_split.js
+
+wasm-opt -Oz data/harness/split_not/main_bg.wasm -o data/harness/split_not/main_bg_opt.wasm --enable-reference-types --memory-packing --debuginfo
+
+# Run wasm-strip to strip out the debug symbols
+wasm-tools strip data/harness/split_not/main_bg_opt.wasm -o data/harness/split_not/main_bg_opt.wasm
+
+# if you don't want names (making it harder to debug the outputs) use `--all`
+# wasm-tools strip data/harness/split/$path -o strip data/harness/split_not/main_bg_opt.wasm --all
+
+echo "===========================================================================\n"
+ls -l data/harness/split_not/main_bg_opt.wasm | awk '{ printf("%07d -> ", $5);print $9}'
+echo ""
+ls -l data/harness/split | grep "\.wasm" | awk '{ printf("%07d -> ", $5);print $9}'
+echo "\n==========================================================================="
+
+# hope you have python3 installed :)
+python3 -m http.server 9876 --directory data

+ 296 - 0
packages/playwright-tests/wasm-split-harness/src/main.rs

@@ -0,0 +1,296 @@
+//! This file is a fuzz-test for the wasm-split engine to ensure that it works as expected.
+//! The docsite is a better target for this, but we try to boil down the complexity into this small
+//! test file.
+
+#![allow(non_snake_case)]
+
+use dioxus::prelude::*;
+use futures::AsyncReadExt;
+use js_sys::Date;
+use std::pin::Pin;
+use wasm_bindgen::prelude::*;
+use wasm_split::lazy_loader;
+
+fn main() {
+    dioxus::launch(|| {
+        rsx! {
+            Router::<Route> {}
+        }
+    });
+}
+
+#[derive(Routable, PartialEq, Eq, Debug, Clone)]
+enum Route {
+    #[layout(Nav)]
+    #[route("/")]
+    Home,
+    #[route("/child")]
+    ChildSplit,
+}
+
+fn Nav() -> Element {
+    rsx! {
+        div {
+            Link { id: "link-home", to: Route::Home, "Home" }
+            Link { id: "link-child", to: Route::ChildSplit, "Child" }
+            Outlet::<Route> {}
+        }
+    }
+}
+
+pub(crate) static GLOBAL_COUNTER: GlobalSignal<usize> = Signal::global(|| 0);
+
+fn Home() -> Element {
+    let mut count = use_signal(|| 1);
+    let mut res = use_signal(|| "hello".to_string());
+
+    rsx! {
+        h1 { "Hello bundle split 456" }
+        h3 { id: "counter-display", "Count: {count}" }
+        h3 { id: "global-counter", "Global Counter: {GLOBAL_COUNTER}" }
+        button {
+            id: "increment-counter",
+            onclick: move |_| count += 1,
+            "Click me"
+        }
+        button {
+            id: "increment-counter-global",
+            onclick: move |_| *GLOBAL_COUNTER.write() += 1,
+            "Click me"
+        }
+        button {
+            id: "add-body-text",
+            onclick: move |_| add_body_text(),
+            "Add body text"
+        }
+        button {
+            id: "add-body-element",
+            onclick: move |_| async move {
+                add_body_element().await;
+                count += 1;
+            },
+            "Add body element"
+        }
+        button {
+            id: "gzip-it",
+            onclick: move |_| async move {
+                gzip_it().await;
+            },
+            "GZIP it"
+        }
+        button {
+            id: "brotli-it",
+            onclick: move |_| async move {
+                brotli_it(&[0u8; 10]).await;
+            },
+            "Brotli It"
+        }
+        button {
+            id: "make-request",
+            onclick: move |_| async move {
+                let res_ = make_request().await.unwrap();
+                res.set(res_);
+            },
+            "Make Request!"
+        }
+        button {
+            id: "make-local-request",
+            onclick: move |_| async move {
+                let client = reqwest::Client::new();
+                let response = client
+                    .get("https://dog.ceo/api/breeds/image/random")
+                    .send()
+                    .await?;
+                let body = response.text().await?;
+                *res.write() = body;
+                Ok(())
+            },
+            "local request"
+        }
+        LazyComponent {
+            abc: 0
+        }
+        div { "Response: {res}" }
+        div { id: "output-box" }
+    }
+}
+
+#[wasm_split::wasm_split(one)]
+async fn add_body_text() {
+    let window = web_sys::window().unwrap_throw();
+    let document = window.document().unwrap_throw();
+    let output = document.create_text_node("Rendered!");
+    let output_box = document.get_element_by_id("output-box").unwrap_throw();
+    output_box.append_child(&output).unwrap_throw();
+    *GLOBAL_COUNTER.write() += 1;
+}
+
+#[wasm_split::wasm_split(two)]
+async fn add_body_element() {
+    let window = web_sys::window().unwrap_throw();
+    let document = window.document().unwrap_throw();
+    let output = document.create_element("div").unwrap_throw();
+    output.set_text_content(Some("Some inner div"));
+    let output_box = document.get_element_by_id("output-box").unwrap_throw();
+    output_box.append_child(&output).unwrap_throw();
+
+    dioxus::prelude::queue_effect(move || {
+        web_sys::console::log_1(&"add body async internal!".into());
+        *GLOBAL_COUNTER.write() += 2;
+    });
+}
+
+#[wasm_split::wasm_split(four)]
+async fn gzip_it() {
+    static DATA: &[u8] = &[0u8; 10];
+    let reader = Box::pin(futures::io::BufReader::new(DATA));
+    let reader: Pin<Box<dyn futures::io::AsyncBufRead>> = reader;
+
+    dioxus::prelude::spawn(async move {
+        let mut fut = Box::pin(async_compression::futures::bufread::GzipDecoder::new(
+            reader,
+        ));
+        if fut.read_to_end(&mut Vec::new()).await.is_err() {
+            web_sys::console::log_1(&"error reading gzip".into());
+        }
+        *GLOBAL_COUNTER.write() += 3;
+
+        let res: Result<String, anyhow::Error> = Box::pin(async move {
+            let client = reqwest::Client::new();
+            let response = client
+                .get("https://dog.ceo/api/breeds/image/random")
+                .send()
+                .await?;
+            let body = response.text().await?;
+            Ok(body)
+        })
+        .await;
+
+        if res.is_err() {
+            web_sys::console::log_1(&"error making request".into());
+        }
+    });
+}
+
+#[wasm_split::wasm_split(three)]
+async fn brotli_it(data: &'static [u8]) {
+    let reader = Box::pin(futures::io::BufReader::new(data));
+    let reader: Pin<Box<dyn futures::io::AsyncBufRead>> = reader;
+
+    dioxus::prelude::spawn(async move {
+        let mut fut = Box::pin(async_compression::futures::bufread::BrotliDecoder::new(
+            reader,
+        ));
+        if fut.read_to_end(&mut Vec::new()).await.is_err() {
+            web_sys::console::log_1(&"error reading brotli".into());
+        }
+        *GLOBAL_COUNTER.write() += 4;
+    });
+}
+
+#[wasm_split::wasm_split(eleven)]
+async fn make_request() -> Result<String, anyhow::Error> {
+    let client = reqwest::Client::new();
+    let response = client
+        .get("https://dog.ceo/api/breeds/image/random")
+        .send()
+        .await?;
+    let body = response.text().await?;
+    Ok(body)
+}
+
+#[component(lazy)]
+fn LazyComponent(abc: i32) -> Element {
+    rsx! {
+        div {
+            "This is a lazy component! {abc}"
+        }
+    }
+}
+
+fn ChildSplit() -> Element {
+    pub(crate) static DATE: GlobalSignal<Date> = Signal::global(Date::new_0);
+
+    static LOADER: wasm_split::LazyLoader<(), Element> =
+        lazy_loader!(extern "five" fn InnerChild(props: ()) -> Element);
+
+    fn InnerChild(_props: ()) -> Element {
+        static LOADER2: wasm_split::LazyLoader<Signal<String>, Element> =
+            lazy_loader!(extern "fortytwo" fn InnerChild3(props: Signal<String>) -> Element);
+
+        fn InnerChild3(count: Signal<String>) -> Element {
+            pub(crate) static ICONCHECK: Component<()> = |_| {
+                rsx! {
+                    svg {
+                        class: "octicon octicon-check js-clipboard-check-icon d-inline-block d-none",
+                        fill: "rgb(26, 127, 55)",
+                        height: "24",
+                        version: "1.1",
+                        "aria_hidden": "true",
+                        width: "24",
+                        view_box: "0 0 16 16",
+                        "data_view_component": "true",
+                        path {
+                            d: "M13.78 4.22a.75.75 0 010 1.06l-7.25 7.25a.75.75 0 01-1.06 0L2.22 9.28a.75.75 0 011.06-1.06L6 10.94l6.72-6.72a.75.75 0 011.06 0z",
+                            fill_rule: "evenodd",
+                        }
+                    }
+                    button {
+                        onclick: move |_| {
+                            *DATE.write() = Date::new_0();
+                        },
+                        "Update Date"
+                    }
+                }
+            };
+
+            let now = DATE.read().clone();
+
+            rsx! {
+                h1 { "Some other child" }
+                h3 { "Global Counter: {GLOBAL_COUNTER}" }
+                h3 { "Date: {now.to_date_string()}" }
+                h3 { "count: {count}" }
+                ICONCHECK {}
+            }
+        }
+
+        #[wasm_bindgen(module = "/src/stars.js")]
+        extern "C" {
+            pub(crate) fn get_stars(name: String) -> Option<usize>;
+            pub(crate) fn set_stars(name: String, stars: usize);
+        }
+
+        let num = get_stars("stars".to_string()).unwrap_or(0);
+
+        use_resource(|| async move { LOADER2.load().await }).suspend()?;
+        let mut count = use_signal(|| "hello".to_string());
+
+        let fp = LOADER2.call(count).unwrap();
+
+        rsx! {
+            h1 { "Some huge child?" }
+            p { "Stars: {num}" }
+            button {
+                onclick: move |_| {
+                    set_stars("stars".to_string(), num + 1);
+                    dioxus::prelude::needs_update();
+                },
+                "Add Star"
+            }
+            {fp}
+            h3 { id: "nested-child-count", "Count: {count}" }
+            button {
+                id: "nested-child-add-world",
+                onclick: move |_| {
+                    *count.write() += " world";
+                },
+                "Add World"
+            }
+        }
+    }
+
+    use_resource(|| async move { LOADER.load().await }).suspend()?;
+
+    LOADER.call(()).unwrap()
+}

+ 29 - 0
packages/playwright-tests/wasm-split-harness/src/stars.js

@@ -0,0 +1,29 @@
+// Handle caching of github stars
+// This file is part of the fuzz test to prove we can handle linked js files
+
+// Two days
+const STAR_EXPIRE_TIME = 172800000;
+
+export function get_stars(name) {
+  let item = localStorage.getItem(name);
+  let data = JSON.parse(item);
+
+  if (!data) {
+    return null;
+  }
+
+  if (data.expires <= Date.now()) {
+    localStorage.removeItem(name);
+    return null;
+  }
+
+  return data.stars;
+}
+
+export function set_stars(name, value) {
+  let expires = Date.now() + STAR_EXPIRE_TIME;
+  let data = { stars: value, expires };
+
+  let converted = JSON.stringify(data);
+  localStorage.setItem(name, converted);
+}

+ 49 - 0
packages/playwright-tests/wasm-split.spec.js

@@ -0,0 +1,49 @@
+// @ts-check
+const { test, expect } = require("@playwright/test");
+
+test("wasm-split page is functional", async ({ page }) => {
+  // Wait for the dev server to load
+  await page.goto("http://localhost:8001");
+
+  // Make sure the local button works - no broken wasm
+  const counter = page.locator("#counter-display");
+  await expect(counter).toContainText("Count: 1");
+  await page.locator("#increment-counter").click();
+  await expect(counter).toContainText("Count: 2");
+
+  // Make sure the global button works - no broken wasm
+  const counterGlobal = page.locator("#global-counter");
+  await expect(counterGlobal).toContainText("Global Counter: 0");
+  await page.locator("#increment-counter-global").click();
+  await expect(counterGlobal).toContainText("Global Counter: 1");
+
+  // Fire one of the wasm modules to load. Should update the counter and add some text
+  const addBodyTextButton = page.locator("#add-body-text");
+  await addBodyTextButton.click();
+  await expect(counterGlobal).toContainText("Global Counter: 2");
+  const outputBox = page.locator("#output-box");
+  await expect(outputBox).toContainText("Rendered!");
+
+  // The other wasm module
+  const addBodyElementButton = page.locator("#add-body-element");
+  await addBodyElementButton.click();
+  await expect(counterGlobal).toContainText("Global Counter: 4");
+  await expect(outputBox).toContainText("Some inner div");
+
+  // Load the gzip and brotli modules
+  const gzipButton = page.locator("#gzip-it");
+  await gzipButton.click();
+  await expect(counterGlobal).toContainText("Global Counter: 7");
+  const brotliButton = page.locator("#brotli-it");
+  await brotliButton.click();
+  await expect(counterGlobal).toContainText("Global Counter: 11");
+
+  // Ignore the requests in CI
+  // Load the other router module
+  const childRouteButton = page.locator("#link-child");
+  await childRouteButton.click();
+  const nestedChildCounter = page.locator("#nested-child-count");
+  await expect(nestedChildCounter).toContainText("Count: hello");
+  await page.locator("#nested-child-add-world").click();
+  await expect(nestedChildCounter).toContainText("Count: hello world");
+});

+ 4 - 1
packages/router-macro/Cargo.toml

@@ -19,13 +19,16 @@ syn = { workspace = true, features = ["extra-traits", "full"] }
 quote = { workspace = true }
 quote = { workspace = true }
 proc-macro2 = { workspace = true }
 proc-macro2 = { workspace = true }
 slab = { workspace = true }
 slab = { workspace = true }
+base16 = { workspace = true }
+digest = { workspace = true }
+sha2 = { workspace = true }
 
 
 [dev-dependencies]
 [dev-dependencies]
 dioxus = { workspace = true, features = ["router"] }
 dioxus = { workspace = true, features = ["router"] }
 
 
 [features]
 [features]
 default = []
 default = []
-web = []
+web = [] #todo(jon) remove this before releasing 0.7!
 
 
 [package.metadata.docs.rs]
 [package.metadata.docs.rs]
 cargo-args = ["-Zunstable-options", "-Zrustdoc-scrape-examples"]
 cargo-args = ["-Zunstable-options", "-Zrustdoc-scrape-examples"]

+ 1 - 74
packages/router-macro/src/lib.rs

@@ -475,82 +475,9 @@ impl RouteEnum {
             site_map,
             site_map,
         };
         };
 
 
-        // If we're on the web, only the URL history is preserved between navigation. We need to warn the user that the segment is not present in the URL.
-        if cfg!(feature = "web") {
-            for variant in &data.variants {
-                for field in &variant.fields {
-                    if !myself.field_present_in_url(field.ident.as_ref().unwrap()) {
-                        return Err(syn::Error::new_spanned(
-                            field.ident.as_ref().unwrap(),
-                            format!("The `{}` field must be present in the url for the web history. You can include the field in the url by using the `#[route(\"/:{}\")]` attribute on the enum variant.", field.ident.as_ref().unwrap(), field.ident.as_ref().unwrap()),
-                        ));
-                    }
-                }
-            }
-        }
-
         Ok(myself)
         Ok(myself)
     }
     }
 
 
-    fn field_present_in_url(&self, field: &Ident) -> bool {
-        let mut from_route = false;
-
-        for nest in &self.nests {
-            if nest.dynamic_segments_names().any(|i| &i == field) {
-                from_route = true
-            }
-        }
-        for route in &self.endpoints {
-            match route {
-                RouteEndpoint::Route(route) => match &route.ty {
-                    RouteType::Child(child) => {
-                        if let Some(child) = child.ident.as_ref() {
-                            if child == "child" {
-                                from_route = true
-                            }
-                        }
-                    }
-                    RouteType::Leaf { .. } => {
-                        for segment in &route.segments {
-                            if segment.name().as_ref() == Some(field) {
-                                from_route = true
-                            }
-                        }
-                        if let Some(query) = &route.query {
-                            if query.contains_ident(field) {
-                                from_route = true
-                            }
-                        }
-                        if let Some(hash) = &route.hash {
-                            if hash.contains_ident(field) {
-                                from_route = true
-                            }
-                        }
-                    }
-                },
-                RouteEndpoint::Redirect(redirect) => {
-                    for segment in &redirect.segments {
-                        if segment.name().as_ref() == Some(field) {
-                            from_route = true
-                        }
-                    }
-                    if let Some(query) = &redirect.query {
-                        if query.contains_ident(field) {
-                            from_route = true
-                        }
-                    }
-                    if let Some(hash) = &redirect.hash {
-                        if hash.contains_ident(field) {
-                            from_route = true
-                        }
-                    }
-                }
-            }
-        }
-
-        from_route
-    }
-
     fn impl_display(&self) -> TokenStream2 {
     fn impl_display(&self) -> TokenStream2 {
         let mut display_match = Vec::new();
         let mut display_match = Vec::new();
 
 
@@ -737,7 +664,7 @@ impl RouteEnum {
         // Collect all routes matches
         // Collect all routes matches
         for route in &self.endpoints {
         for route in &self.endpoints {
             if let RouteEndpoint::Route(route) = route {
             if let RouteEndpoint::Route(route) = route {
-                matches.push(route.routable_match(&self.layouts, &self.nests));
+                matches.push(route.routable_match(&self.layouts, &self.nests, name));
             }
             }
         }
         }
 
 

+ 95 - 11
packages/router-macro/src/route.rs

@@ -64,7 +64,7 @@ pub(crate) struct Route {
 }
 }
 
 
 impl Route {
 impl Route {
-    pub fn parse(
+    pub(crate) fn parse(
         nests: Vec<NestId>,
         nests: Vec<NestId>,
         layouts: Vec<LayoutId>,
         layouts: Vec<LayoutId>,
         variant: syn::Variant,
         variant: syn::Variant,
@@ -167,7 +167,7 @@ impl Route {
         })
         })
     }
     }
 
 
-    pub fn display_match(&self, nests: &[Nest]) -> TokenStream2 {
+    pub(crate) fn display_match(&self, nests: &[Nest]) -> TokenStream2 {
         let name = &self.route_name;
         let name = &self.route_name;
         let dynamic_segments = self.dynamic_segments();
         let dynamic_segments = self.dynamic_segments();
         let write_query: Option<TokenStream2> = self.query.as_ref().map(|q| q.write());
         let write_query: Option<TokenStream2> = self.query.as_ref().map(|q| q.write());
@@ -211,7 +211,12 @@ impl Route {
         }
         }
     }
     }
 
 
-    pub fn routable_match(&self, layouts: &[Layout], nests: &[Nest]) -> TokenStream2 {
+    pub(crate) fn routable_match(
+        &self,
+        layouts: &[Layout],
+        nests: &[Nest],
+        router_name: &Ident,
+    ) -> TokenStream2 {
         let name = &self.route_name;
         let name = &self.route_name;
 
 
         let mut tokens = TokenStream2::new();
         let mut tokens = TokenStream2::new();
@@ -265,12 +270,91 @@ impl Route {
             RouteType::Leaf { component } => {
             RouteType::Leaf { component } => {
                 let dynamic_segments = self.dynamic_segments();
                 let dynamic_segments = self.dynamic_segments();
                 let dynamic_segments_from_route = self.dynamic_segments();
                 let dynamic_segments_from_route = self.dynamic_segments();
+
+                /*
+                The implementation of this is pretty gnarly/gross.
+
+                We achieve the bundle splitting by wrapping the incoming function in a new component
+                that suspends based on an internal lazy loader. This lets us use suspense features
+                without breaking the rules of hooks. The router derive is quite complex so this shoves
+                the complexity towards the "leaf" of the codegen rather to its core. In the future though,
+                we should think about restructuring the router macro completely since its codegen
+                makes up nearly 30-40% of the binary size in the dioxus docsite.
+                */
+                use sha2::Digest;
+                let dynamic_segments_receiver = self.dynamic_segments();
+                let dynamic_segments_from_route_ = self.dynamic_segments();
+                let dynamic_segments_from_route__ = self.dynamic_segments();
+                    let unique_identifier = base16::encode_lower(
+                    &sha2::Sha256::digest(format!("{name} {span:?}", span = name.span()))[..16],
+                );
+                let module_name = format_ident!("module{}{unique_identifier}", name).to_string();
+                let comp_name = format_ident!("route{}{unique_identifier}", name);
+
                 quote! {
                 quote! {
                     #[allow(unused)]
                     #[allow(unused)]
                     (#last_index, Self::#name { #(#dynamic_segments,)* }) => {
                     (#last_index, Self::#name { #(#dynamic_segments,)* }) => {
-                        rsx! {
-                            #component {
-                                #(#dynamic_segments_from_route: #dynamic_segments_from_route,)*
+                        dioxus::config_macros::maybe_wasm_split! {
+                            if wasm_split {
+                                {
+                                    fn #comp_name(args: #router_name) -> Element {
+                                        match args {
+                                            #router_name::#name { #(#dynamic_segments_from_route_,)* } => {
+                                                rsx! {
+                                                    #component {
+                                                        #(#dynamic_segments_from_route__: #dynamic_segments_from_route__,)*
+                                                    }
+                                                }
+                                            }
+                                            _ => unreachable!()
+                                        }
+                                    }
+
+
+
+                                    #[component]
+                                    fn LoaderInner(args: NoPartialEq<#router_name>) -> Element {
+                                        static MODULE: wasm_split::LazyLoader<#router_name, Element> =
+                                            wasm_split::lazy_loader!(extern #module_name fn #comp_name(props: #router_name) -> Element);
+
+                                        use_resource(|| async move { MODULE.load().await }).suspend()?;
+                                        MODULE.call(args.0).unwrap()
+                                    }
+
+                                    struct NoPartialEq<T>(T);
+
+                                    impl<T: Clone> Clone for NoPartialEq<T> {
+                                        fn clone(&self) -> Self {
+                                            Self(self.0.clone())
+                                        }
+                                    }
+
+                                    impl<T: std::fmt::Display> std::fmt::Display for NoPartialEq<T> {
+                                        fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+                                            self.0.fmt(f)
+                                        }
+                                    }
+
+                                    impl<T> PartialEq for NoPartialEq<T> {
+                                        fn eq(&self, _other: &Self) -> bool {
+                                            false
+                                        }
+                                    }
+
+                                    rsx! {
+                                        LoaderInner {
+                                            args: NoPartialEq(#router_name::#name { #(#dynamic_segments_receiver,)* } )
+                                        }
+                                    }
+                                }
+                            } else {
+                                {
+                                    rsx! {
+                                        #component {
+                                            #(#dynamic_segments_from_route: #dynamic_segments_from_route,)*
+                                        }
+                                    }
+                                }
                             }
                             }
                         }
                         }
                     }
                     }
@@ -287,7 +371,7 @@ impl Route {
         })
         })
     }
     }
 
 
-    pub fn construct(&self, nests: &[Nest], enum_name: Ident) -> TokenStream2 {
+    pub(crate) fn construct(&self, nests: &[Nest], enum_name: Ident) -> TokenStream2 {
         let segments = self.fields.iter().map(|(name, _)| {
         let segments = self.fields.iter().map(|(name, _)| {
             let mut from_route = false;
             let mut from_route = false;
 
 
@@ -343,11 +427,11 @@ impl Route {
         }
         }
     }
     }
 
 
-    pub fn error_ident(&self) -> Ident {
+    pub(crate) fn error_ident(&self) -> Ident {
         format_ident!("{}ParseError", self.route_name)
         format_ident!("{}ParseError", self.route_name)
     }
     }
 
 
-    pub fn error_type(&self) -> TokenStream2 {
+    pub(crate) fn error_type(&self) -> TokenStream2 {
         let error_name = self.error_ident();
         let error_name = self.error_ident();
         let child_type = match &self.ty {
         let child_type = match &self.ty {
             RouteType::Child(field) => Some(&field.ty),
             RouteType::Child(field) => Some(&field.ty),
@@ -357,14 +441,14 @@ impl Route {
         create_error_type(&self.route, error_name, &self.segments, child_type)
         create_error_type(&self.route, error_name, &self.segments, child_type)
     }
     }
 
 
-    pub fn parse_query(&self) -> TokenStream2 {
+    pub(crate) fn parse_query(&self) -> TokenStream2 {
         match &self.query {
         match &self.query {
             Some(query) => query.parse(),
             Some(query) => query.parse(),
             None => quote! {},
             None => quote! {},
         }
         }
     }
     }
 
 
-    pub fn parse_hash(&self) -> TokenStream2 {
+    pub(crate) fn parse_hash(&self) -> TokenStream2 {
         match &self.hash {
         match &self.hash {
             Some(hash) => hash.parse(),
             Some(hash) => hash.parse(),
             None => quote! {},
             None => quote! {},

+ 1 - 0
packages/router/Cargo.toml

@@ -21,6 +21,7 @@ rustversion = "1.0.17"
 
 
 [features]
 [features]
 default = []
 default = []
+wasm-split = []
 
 
 [dev-dependencies]
 [dev-dependencies]
 axum = { workspace = true, features = ["ws"] }
 axum = { workspace = true, features = ["ws"] }

+ 26 - 0
packages/router/README.md

@@ -97,6 +97,32 @@ fn BlogPost(blog_id: usize) -> Element {
 
 
 You need to enable the right features for the platform you're targeting since these are not determined automatically!
 You need to enable the right features for the platform you're targeting since these are not determined automatically!
 
 
+## Bundle Splitting
+
+The Dioxus Router supports automatic bundle splitting along route variants. To enable this, you need to manually turn on the `wasm-split` feature explicitly on the dioxus-router crate:
+
+```toml
+[dependencies]
+dioxus = { version = "*", features = ["router", "wasm-split"] }
+dioxus-router = { version = "*", features = ["wasm-split"] }
+```
+
+Note that `wasm-split` must also be turned on in dioxus since the macro uses the re-exported `wasm-split` from the dioxus prelude.
+
+Enabling splitting disconnects the call graph, meaning if you try to run your app with a normal `dx serve`, it won't work. When running with router splitting, you need to pass `--experimental-wasm-split`.
+
+```sh
+dx serve --experimental-wasm-split
+```
+
+In practice, we recommend passing `dioxus-router?/wasm-split` as a feature only when bundling:
+
+```sh
+dx bundle --features "dioxus-router?/wasm-split"  --experimental-wasm-split
+```
+
+Note that the router will call `.suspend()` so you should add a SuspenseBoundary above the Outlet to prevent suspending the entire page.
+
 ## Contributing
 ## Contributing
 
 
 - Report issues on our [issue tracker](https://github.com/dioxuslabs/dioxus/issues).
 - Report issues on our [issue tracker](https://github.com/dioxuslabs/dioxus/issues).

+ 10 - 0
packages/wasm-split/README.md

@@ -0,0 +1,10 @@
+# This folder contains the wasm-split sub-workspace
+
+wasm-split is a tool that allows you to split a wasm binary into multiple chunks that are lazily loaded on demand.
+
+This workspace is comprised of:
+- the harness on which we test against
+- the wasm-split user-facing crate
+- the wasm-split-macro crate which is used to generate the wasm-split loader
+- the wasm-split-cli crate which is imported by the dioxus-cli and used when building the wasm modules
+- the wasm-used crate which provides a custom walrus `Used` struct that makes it easier to debug why a emit_wasm might be failing

+ 16 - 0
packages/wasm-split/wasm-split-cli/Cargo.toml

@@ -0,0 +1,16 @@
+[package]
+name = "wasm-split-cli"
+version = "0.1.0"
+edition = "2021"
+
+[dependencies]
+anyhow =  { workspace = true }
+itertools = { workspace = true }
+walrus = { workspace = true, features = ["parallel"]}
+wasmparser = { workspace = true }
+id-arena = { workspace = true }
+rayon = { workspace = true }
+tracing-subscriber = { workspace = true, features = ["env-filter", "fmt"] }
+tracing = { workspace = true }
+clap = { workspace = true, features = ["derive"] }
+wasm-used = { path = "../wasm-used" }

+ 4 - 0
packages/wasm-split/wasm-split-cli/data/.gitignore

@@ -0,0 +1,4 @@
+# this folder contains the wasm output for the test harnessa, so ignore it
+
+*.wasm
+bindgen/

+ 63 - 0
packages/wasm-split/wasm-split-cli/src/__wasm_split.js

@@ -0,0 +1,63 @@
+// when running the harness we need to make sure to uncommon this out...
+
+export function makeLoad(url, deps, fusedImports, initIt) {
+  let alreadyLoaded = false;
+  return async (callbackIndex, callbackData) => {
+    await Promise.all(deps.map((dep) => dep()));
+    if (alreadyLoaded) return;
+    try {
+      const response = await fetch(url);
+      const initSync = initIt || window.__wasm_split_main_initSync;
+      const mainExports = initSync(undefined, undefined);
+
+      let imports = {
+        env: {
+          memory: mainExports.memory,
+        },
+        __wasm_split: {
+          __indirect_function_table: mainExports.__indirect_function_table,
+          __stack_pointer: mainExports.__stack_pointer,
+          __tls_base: mainExports.__tls_base,
+          memory: mainExports.memory,
+        },
+      };
+
+      for (let mainExport in mainExports) {
+        imports["__wasm_split"][mainExport] = mainExports[mainExport];
+      }
+
+      for (let name in fusedImports) {
+        imports["__wasm_split"][name] = fusedImports[name];
+      }
+
+      let new_exports = await WebAssembly.instantiateStreaming(
+        response,
+        imports
+      );
+
+      alreadyLoaded = true;
+
+      for (let name in new_exports.instance.exports) {
+        fusedImports[name] = new_exports.instance.exports[name];
+      }
+
+      if (callbackIndex !== undefined) {
+        mainExports.__indirect_function_table.get(callbackIndex)(
+          callbackData,
+          true
+        );
+      }
+    } catch (e) {
+      console.error(
+        "Failed to load wasm-split module",
+        e,
+        url,
+        deps,
+        fusedImports
+      );
+      return;
+    }
+  };
+}
+
+let fusedImports = {};

+ 1535 - 0
packages/wasm-split/wasm-split-cli/src/lib.rs

@@ -0,0 +1,1535 @@
+use anyhow::{Context, Result};
+use itertools::Itertools;
+use rayon::prelude::{IntoParallelIterator, ParallelIterator};
+use std::{
+    collections::{BTreeMap, BTreeSet, HashMap, HashSet, VecDeque},
+    hash::Hash,
+    ops::Range,
+    sync::{Arc, RwLock},
+};
+use walrus::{
+    ir::{self, dfs_in_order, Visitor},
+    ConstExpr, DataKind, ElementItems, ElementKind, ExportId, ExportItem, FunctionBuilder,
+    FunctionId, FunctionKind, GlobalKind, ImportId, ImportKind, Module, ModuleConfig, RefType,
+    TableId, TypeId,
+};
+use wasmparser::{
+    BinaryReader, Linking, LinkingSectionReader, Payload, RelocSectionReader, RelocationEntry,
+    SymbolInfo,
+};
+
+pub const MAKE_LOAD_JS: &str = include_str!("./__wasm_split.js");
+
+/// A parsed wasm module with additional metadata and functionality for splitting and patching.
+///
+/// This struct assumes that relocations will be present in incoming wasm binary.
+/// Upon construction, all the required metadata will be constructed.
+pub struct Splitter<'a> {
+    /// The original module we use as a reference
+    source_module: Module,
+
+    // The byte sources of the pre and post wasm-bindgen .wasm files
+    // We need the original around since wasm-bindgen ruins the relocation locations.
+    original: &'a [u8],
+    bindgened: &'a [u8],
+
+    // Mapping of indices of source functions
+    // This lets us use a much faster approach to emitting split modules simply by maintaining a mapping
+    // between the original Module and the new Module. Ideally we could just index the new module
+    // with old FunctionIds but the underlying IndexMap actually checks that a key belongs to a particular
+    // arena.
+    fns_to_ids: HashMap<FunctionId, usize>,
+    _ids_to_fns: Vec<FunctionId>,
+
+    shared_symbols: BTreeSet<Node>,
+    split_points: Vec<SplitPoint>,
+    chunks: Vec<HashSet<Node>>,
+    data_symbols: BTreeMap<usize, DataSymbol>,
+    main_graph: HashSet<Node>,
+    call_graph: HashMap<Node, HashSet<Node>>,
+    parent_graph: HashMap<Node, HashSet<Node>>,
+}
+
+/// The results of splitting the wasm module with some additional metadata for later use.
+pub struct OutputModules {
+    /// The main chunk
+    pub main: SplitModule,
+
+    /// The modules of the wasm module that were split.
+    pub modules: Vec<SplitModule>,
+
+    /// The chunks that might be imported by the main modules
+    pub chunks: Vec<SplitModule>,
+}
+
+/// A wasm module that was split from the main module.
+///
+/// All IDs here correspond to *this* module - not the parent main module
+pub struct SplitModule {
+    pub module_name: String,
+    pub hash_id: Option<String>,
+    pub component_name: Option<String>,
+    pub bytes: Vec<u8>,
+    pub relies_on_chunks: HashSet<usize>,
+}
+
+impl<'a> Splitter<'a> {
+    /// Create a new "splitter" instance using the original wasm and the wasm from the output of wasm-bindgen.
+    ///
+    /// This will use the relocation data from the original module to create a call graph that we
+    /// then use with the post-bindgened module to create the split modules.
+    ///
+    /// It's important to compile the wasm with --emit-relocs such that the relocations are available
+    /// to construct the callgraph.
+    pub fn new(original: &'a [u8], bindgened: &'a [u8]) -> Result<Self> {
+        let (module, ids, fns_to_ids) = parse_module_with_ids(bindgened)?;
+
+        let split_points = accumulate_split_points(&module);
+
+        // Note that we can't trust the normal symbols - just the data symbols - and we can't use the data offset
+        // since that's not reliable after bindgening
+        let raw_data = parse_bytes_to_data_segment(bindgened)?;
+
+        let mut module = Self {
+            source_module: module,
+            original,
+            bindgened,
+            split_points,
+            data_symbols: raw_data.data_symbols,
+            _ids_to_fns: ids,
+            fns_to_ids,
+            main_graph: Default::default(),
+            chunks: Default::default(),
+            call_graph: Default::default(),
+            parent_graph: Default::default(),
+            shared_symbols: Default::default(),
+        };
+
+        module.build_call_graph()?;
+        module.build_split_chunks();
+
+        Ok(module)
+    }
+
+    /// Split the module into multiple modules at the boundaries of split points.
+    ///
+    /// Note that the binaries might still be "large" at the end of this process. In practice, you
+    /// need to push these binaries through wasm-bindgen and wasm-opt to take advantage of the
+    /// optimizations and splitting. We perform a few steps like zero-ing out the data segments
+    /// that will only be removed by the memory-packing step of wasm-opt.
+    ///
+    /// This returns the list of chunks, an import map, and some javascript to link everything together.
+    pub fn emit(self) -> Result<OutputModules> {
+        tracing::info!("Emitting split modules.");
+
+        let chunks = (0..self.chunks.len())
+            .into_par_iter()
+            .map(|idx| self.emit_split_chunk(idx))
+            .collect::<Result<Vec<SplitModule>>>()?;
+
+        let modules = (0..self.split_points.len())
+            .into_par_iter()
+            .map(|idx| self.emit_split_module(idx))
+            .collect::<Result<Vec<SplitModule>>>()?;
+
+        // Emit the main module, consuming self since we're going to
+        let main = self.emit_main_module()?;
+
+        Ok(OutputModules {
+            modules,
+            chunks,
+            main,
+        })
+    }
+
+    /// Emit the main module.
+    ///
+    /// This will analyze the call graph and then perform some transformations on the module.
+    /// - Clear out active segments that the split modules will initialize
+    /// - Wipe away unused functions and data symbols
+    /// - Re-export the memories, globals, and other items that the split modules will need
+    /// - Convert the split module import functions to real functions that call the indirect function
+    ///
+    /// Once this is done, all the split module functions will have been removed, making the main module smaller.
+    ///
+    /// Emitting the main module is conceptually pretty simple. Emitting the split modules is more
+    /// complex.
+    fn emit_main_module(mut self) -> Result<SplitModule> {
+        tracing::info!("Emitting main bundle split module");
+
+        // Perform some analysis of the module before we start messing with it
+        let unused_symbols = self.unused_main_symbols();
+
+        // Use the original module that contains all the right ids
+        let mut out = std::mem::take(&mut self.source_module);
+
+        // 1. Clear out the active segments that try to initialize functions for modules we just split off.
+        //    When the side modules load, they will initialize functions into the table where the "holes" are.
+        self.replace_segments_with_holes(&mut out, &unused_symbols);
+
+        // 2. Wipe away the unused functions and data symbols
+        self.prune_main_symbols(&mut out, &unused_symbols)?;
+
+        // 3. Change the functions called from split modules to be local functions that call the indirect function
+        self.create_ifunc_table(&mut out);
+
+        // 4. Re-export the memories, globals, and other stuff
+        self.re_export_items(&mut out);
+
+        // 6. Remove the reloc and linking custom sections
+        self.remove_custom_sections(&mut out);
+
+        // 7. Run the garbage collector to remove unused functions
+        walrus::passes::gc::run(&mut out);
+
+        Ok(SplitModule {
+            module_name: "main".to_string(),
+            component_name: None,
+            bytes: out.emit_wasm(),
+            relies_on_chunks: Default::default(),
+            hash_id: None,
+        })
+    }
+
+    /// Write the contents of the split modules to the output
+    fn emit_split_module(&self, split_idx: usize) -> Result<SplitModule> {
+        let split = self.split_points[split_idx].clone();
+
+        // These are the symbols that will only exist in this module and not in the main module.
+        let mut unique_symbols = split
+            .reachable_graph
+            .difference(&self.main_graph)
+            .cloned()
+            .collect::<HashSet<_>>();
+
+        // The functions we'll need to import
+        let mut symbols_to_import: HashSet<_> = split
+            .reachable_graph
+            .intersection(&self.main_graph)
+            .cloned()
+            .collect();
+
+        // Identify the functions we'll delete
+        let symbols_to_delete: HashSet<_> = self
+            .main_graph
+            .difference(&split.reachable_graph)
+            .cloned()
+            .collect();
+
+        // Convert split chunk functions to imports
+        let mut relies_on_chunks = HashSet::new();
+        for (idx, chunk) in self.chunks.iter().enumerate() {
+            let nodes_to_extract = unique_symbols
+                .intersection(chunk)
+                .cloned()
+                .collect::<Vec<_>>();
+            for node in nodes_to_extract {
+                if !self.main_graph.contains(&node) {
+                    unique_symbols.remove(&node);
+                    symbols_to_import.insert(node);
+                    relies_on_chunks.insert(idx);
+                }
+            }
+        }
+
+        tracing::info!(
+            "Emitting module {}/{} {}: {:?}",
+            split_idx,
+            self.split_points.len(),
+            split.module_name,
+            relies_on_chunks
+        );
+
+        let (mut out, ids_to_fns, _fns_to_ids) = parse_module_with_ids(self.bindgened)?;
+
+        // Remap the graph to our module's IDs
+        let shared_funcs = self
+            .shared_symbols
+            .iter()
+            .map(|f| self.remap_id(&ids_to_fns, f))
+            .collect::<Vec<_>>();
+
+        let unique_symbols = self.remap_ids(&unique_symbols, &ids_to_fns);
+        let symbols_to_delete = self.remap_ids(&symbols_to_delete, &ids_to_fns);
+        let symbols_to_import = self.remap_ids(&symbols_to_import, &ids_to_fns);
+        let split_export_func = ids_to_fns[self.fns_to_ids[&split.export_func]];
+
+        // Do some basic cleanup of the module to make it smaller
+        // This removes exports, imports, and the start function
+        self.prune_split_module(&mut out);
+
+        // Clear away the data segments
+        self.clear_data_segments(&mut out, &unique_symbols);
+
+        // Clear out the element segments and then add in the initializers for the shared imports
+        self.create_ifunc_initialzers(&mut out, &unique_symbols);
+
+        // Convert our split module's functions to real functions that call the indirect function
+        self.add_split_imports(
+            &mut out,
+            split.index,
+            split_export_func,
+            split.export_name,
+            &symbols_to_import,
+            &shared_funcs,
+        );
+
+        // Delete all the functions that are not reachable from the main module
+        self.delete_main_funcs_from_split(&mut out, &symbols_to_delete);
+
+        // Remove the reloc and linking custom sections
+        self.remove_custom_sections(&mut out);
+
+        // Run the gc to remove unused functions - also validates the module to ensure we can emit it properly
+        // todo(jon): prefer to delete the items as we go so we don't need to run a gc pass. it/it's quite slow
+        walrus::passes::gc::run(&mut out);
+
+        Ok(SplitModule {
+            bytes: out.emit_wasm(),
+            module_name: split.module_name.clone(),
+            component_name: Some(split.component_name.clone()),
+            relies_on_chunks,
+            hash_id: Some(split.hash_name.clone()),
+        })
+    }
+
+    /// Write a split chunk - this is a chunk with no special functions, just exports + initializers
+    fn emit_split_chunk(&self, idx: usize) -> Result<SplitModule> {
+        tracing::info!("emitting chunk {}", idx);
+
+        let unique_symbols = &self.chunks[idx];
+
+        // The functions we'll need to import
+        let symbols_to_import: HashSet<_> = unique_symbols
+            .intersection(&self.main_graph)
+            .cloned()
+            .collect();
+
+        // Delete everything except the symbols that are reachable from this module
+        let symbols_to_delete: HashSet<_> = self
+            .main_graph
+            .difference(unique_symbols)
+            .cloned()
+            .collect();
+
+        // Make sure to remap any ids from the main module to this module
+        let (mut out, ids_to_fns, _fns_to_ids) = parse_module_with_ids(self.bindgened)?;
+
+        // Remap the graph to our module's IDs
+        let shared_funcs = self
+            .shared_symbols
+            .iter()
+            .map(|f| self.remap_id(&ids_to_fns, f))
+            .collect::<Vec<_>>();
+
+        let unique_symbols = self.remap_ids(unique_symbols, &ids_to_fns);
+        let symbols_to_import = self.remap_ids(&symbols_to_import, &ids_to_fns);
+        let symbols_to_delete = self.remap_ids(&symbols_to_delete, &ids_to_fns);
+
+        self.prune_split_module(&mut out);
+
+        // Clear away the data segments
+        self.clear_data_segments(&mut out, &unique_symbols);
+
+        // Clear out the element segments and then add in the initializers for the shared imports
+        self.create_ifunc_initialzers(&mut out, &unique_symbols);
+
+        // We have to make sure our table matches that of the other tables even though we don't call them.
+        let ifunc_table_id = self.load_funcref_table(&mut out);
+        let segment_start = self
+            .expand_ifunc_table_max(
+                &mut out,
+                ifunc_table_id,
+                self.split_points.len() + shared_funcs.len(),
+            )
+            .unwrap();
+
+        self.convert_shared_to_imports(&mut out, segment_start, &shared_funcs, &symbols_to_import);
+
+        // Make sure we haven't deleted anything important....
+        self.delete_main_funcs_from_split(&mut out, &symbols_to_delete);
+
+        // Remove the reloc and linking custom sections
+        self.remove_custom_sections(&mut out);
+
+        // Run the gc to remove unused functions - also validates the module to ensure we can emit it properly
+        walrus::passes::gc::run(&mut out);
+
+        Ok(SplitModule {
+            bytes: out.emit_wasm(),
+            module_name: "split".to_string(),
+            component_name: None,
+            relies_on_chunks: Default::default(),
+            hash_id: None,
+        })
+    }
+
+    /// Convert functions coming in from outside the module to indirect calls to the ifunc table created in the main module
+    fn convert_shared_to_imports(
+        &self,
+        out: &mut Module,
+        segment_start: usize,
+        ifuncs: &Vec<Node>,
+        symbols_to_import: &HashSet<Node>,
+    ) {
+        let ifunc_table_id = self.load_funcref_table(out);
+
+        let mut idx = self.split_points.len();
+        for node in ifuncs {
+            if let Node::Function(ifunc) = node {
+                if symbols_to_import.contains(node) {
+                    let ty_id = out.funcs.get(*ifunc).ty();
+                    let stub = (idx + segment_start) as _;
+                    out.funcs.get_mut(*ifunc).kind =
+                        self.make_stub_funcs(out, ifunc_table_id, ty_id, stub);
+                }
+
+                idx += 1;
+            }
+        }
+    }
+
+    /// Convert split import functions to local functions that call an indirect function that will
+    /// be filled in from the loaded split module.
+    ///
+    /// This is because these imports are going to be delayed until the split module is loaded
+    /// and loading in the main module these as imports won't be possible since the imports won't
+    /// be resolved until the split module is loaded.
+    fn create_ifunc_table(&self, out: &mut Module) {
+        let ifunc_table = self.load_funcref_table(out);
+        let dummy_func = self.make_dummy_func(out);
+
+        out.exports.add("__indirect_function_table", ifunc_table);
+
+        // Expand the ifunc table to accommodate the new ifuncs
+        let segment_start = self
+            .expand_ifunc_table_max(
+                out,
+                ifunc_table,
+                self.split_points.len() + self.shared_symbols.len(),
+            )
+            .expect("failed to expand ifunc table");
+
+        // Delete the split import functions and replace them with local functions
+        //
+        // Start by pushing all the shared imports into the list
+        // These don't require an additional stub function
+        let mut ifuncs = vec![];
+
+        // Push the split import functions into the list - after we've pushed in the shared imports
+        for idx in 0..self.split_points.len() {
+            // this is okay since we're in the main module
+            let import_func = self.split_points[idx].import_func;
+            let import_id = self.split_points[idx].import_id;
+            let ty_id = out.funcs.get(import_func).ty();
+            let stub_idx = segment_start + ifuncs.len();
+
+            // Replace the import function with a local function that calls the indirect function
+            out.funcs.get_mut(import_func).kind =
+                self.make_stub_funcs(out, ifunc_table, ty_id, stub_idx as _);
+
+            // And remove the corresponding import
+            out.imports.delete(import_id);
+
+            // Push into the list the properly typed dummy func so the entry is populated
+            // unclear if the typing is important here
+            ifuncs.push(dummy_func);
+        }
+
+        // Add the stub functions to the ifunc table
+        // The callers of these functions will call the stub instead of the import
+        let mut _idx = 0;
+        for func in self.shared_symbols.iter() {
+            if let Node::Function(id) = func {
+                ifuncs.push(*id);
+                _idx += 1;
+            }
+        }
+
+        // Now add segments to the ifunc table
+        out.tables
+            .get_mut(ifunc_table)
+            .elem_segments
+            .insert(out.elements.add(
+                ElementKind::Active {
+                    table: ifunc_table,
+                    offset: ConstExpr::Value(ir::Value::I32(segment_start as _)),
+                },
+                ElementItems::Functions(ifuncs),
+            ));
+    }
+
+    /// Re-export the memories, globals, and other items from the main module to the side modules
+    fn re_export_items(&self, out: &mut Module) {
+        // Re-export memories
+        for (idx, memory) in out.memories.iter().enumerate() {
+            let name = memory
+                .name
+                .clone()
+                .unwrap_or_else(|| format!("__memory_{}", idx));
+            out.exports.add(&name, memory.id());
+        }
+
+        // Re-export globals
+        for (idx, global) in out.globals.iter().enumerate() {
+            let global_name = format!("__global__{idx}");
+            out.exports.add(&global_name, global.id());
+        }
+
+        // Export any tables
+        for (idx, table) in out.tables.iter().enumerate() {
+            if table.element_ty != RefType::Funcref {
+                let table_name = format!("__imported_table_{}", idx);
+                out.exports.add(&table_name, table.id());
+            }
+        }
+    }
+
+    fn prune_main_symbols(&self, out: &mut Module, unused_symbols: &HashSet<Node>) -> Result<()> {
+        // Wipe the split point exports
+        for split in self.split_points.iter() {
+            // it's okay that we're not re-mapping IDs since this is just used by the main module
+            out.exports.delete(split.export_id);
+        }
+
+        // And then any actual symbols from the callgraph
+        for symbol in unused_symbols.iter().cloned() {
+            match symbol {
+                // Simply delete functions
+                Node::Function(id) => {
+                    out.funcs.delete(id);
+                }
+
+                // Otherwise, zero out the data segment, which should lead to elimination by wasm-opt
+                Node::DataSymbol(id) => {
+                    let symbol = self
+                        .data_symbols
+                        .get(&id)
+                        .context("Failed to find data symbol")?;
+
+                    // VERY IMPORTANT
+                    //
+                    // apparently wasm-bindgen makes data segments that aren't the main one
+                    // even *touching* those will break the vtable / binding layer
+                    // We can only interact with the first data segment - the rest need to stay available
+                    // for the `.js` to interact with.
+                    if symbol.which_data_segment == 0 {
+                        let data_id = out.data.iter().nth(symbol.which_data_segment).unwrap().id();
+                        let data = out.data.get_mut(data_id);
+                        for i in symbol.segment_offset..symbol.segment_offset + symbol.symbol_size {
+                            data.value[i] = 0;
+                        }
+                    }
+                }
+            }
+        }
+
+        Ok(())
+    }
+
+    // 2.1 Create a dummy func that will be overridden later as modules pop in
+    // 2.2 swap the segment entries with the dummy func, leaving hole in its placed that will be filled in later
+    fn replace_segments_with_holes(&self, out: &mut Module, unused_symbols: &HashSet<Node>) {
+        let dummy_func = self.make_dummy_func(out);
+        for element in out.elements.iter_mut() {
+            match &mut element.items {
+                ElementItems::Functions(vec) => {
+                    for item in vec.iter_mut() {
+                        if unused_symbols.contains(&Node::Function(*item)) {
+                            *item = dummy_func;
+                        }
+                    }
+                }
+                ElementItems::Expressions(_ref_type, const_exprs) => {
+                    for item in const_exprs.iter_mut() {
+                        if let &mut ConstExpr::RefFunc(id) = item {
+                            if unused_symbols.contains(&Node::Function(id)) {
+                                *item = ConstExpr::RefFunc(dummy_func);
+                            }
+                        }
+                    }
+                }
+            }
+        }
+    }
+
+    /// Creates the jump points
+    fn create_ifunc_initialzers(&self, out: &mut Module, unique_symbols: &HashSet<Node>) {
+        let ifunc_table = self.load_funcref_table(out);
+
+        let mut initializers = HashMap::new();
+        for segment in out.elements.iter_mut() {
+            let ElementKind::Active { offset, .. } = &mut segment.kind else {
+                continue;
+            };
+
+            let ConstExpr::Value(ir::Value::I32(offset)) = offset else {
+                continue;
+            };
+
+            match &segment.items {
+                ElementItems::Functions(vec) => {
+                    for (idx, id) in vec.iter().enumerate() {
+                        if unique_symbols.contains(&Node::Function(*id)) {
+                            initializers
+                                .insert(*offset + idx as i32, ElementItems::Functions(vec![*id]));
+                        }
+                    }
+                }
+
+                ElementItems::Expressions(ref_type, const_exprs) => {
+                    for (idx, expr) in const_exprs.iter().enumerate() {
+                        if let ConstExpr::RefFunc(id) = expr {
+                            if unique_symbols.contains(&Node::Function(*id)) {
+                                initializers.insert(
+                                    *offset + idx as i32,
+                                    ElementItems::Expressions(
+                                        *ref_type,
+                                        vec![ConstExpr::RefFunc(*id)],
+                                    ),
+                                );
+                            }
+                        }
+                    }
+                }
+            }
+        }
+
+        // Wipe away references to these segments
+        for table in out.tables.iter_mut() {
+            table.elem_segments.clear();
+        }
+
+        // Wipe away the element segments themselves
+        let segments_to_delete: Vec<_> = out.elements.iter().map(|e| e.id()).collect();
+        for id in segments_to_delete {
+            out.elements.delete(id);
+        }
+
+        // Add in our new segments
+        let ifunc_table_ = out.tables.get_mut(ifunc_table);
+        for (offset, items) in initializers {
+            let kind = ElementKind::Active {
+                table: ifunc_table,
+                offset: ConstExpr::Value(ir::Value::I32(offset)),
+            };
+
+            ifunc_table_
+                .elem_segments
+                .insert(out.elements.add(kind, items));
+        }
+    }
+
+    fn add_split_imports(
+        &self,
+        out: &mut Module,
+        split_idx: usize,
+        split_export_func: FunctionId,
+        split_export_name: String,
+        symbols_to_import: &HashSet<Node>,
+        ifuncs: &Vec<Node>,
+    ) {
+        let ifunc_table_id = self.load_funcref_table(out);
+        let segment_start = self
+            .expand_ifunc_table_max(out, ifunc_table_id, self.split_points.len() + ifuncs.len())
+            .unwrap();
+
+        // Make sure to re-export the split func
+        out.exports.add(&split_export_name, split_export_func);
+
+        // Add the elements back to the table
+        out.tables
+            .get_mut(ifunc_table_id)
+            .elem_segments
+            .insert(out.elements.add(
+                ElementKind::Active {
+                    table: ifunc_table_id,
+                    offset: ConstExpr::Value(ir::Value::I32((segment_start + split_idx) as i32)),
+                },
+                ElementItems::Functions(vec![split_export_func]),
+            ));
+
+        self.convert_shared_to_imports(out, segment_start, ifuncs, symbols_to_import);
+    }
+
+    fn delete_main_funcs_from_split(&self, out: &mut Module, symbols_to_delete: &HashSet<Node>) {
+        for node in symbols_to_delete {
+            if let Node::Function(id) = *node {
+                // if out.exports.get_exported_func(id).is_none() {
+                out.funcs.delete(id);
+                // }
+            }
+        }
+    }
+
+    /// Remove un-needed stuff and then hoist
+    fn prune_split_module(&self, out: &mut Module) {
+        // Clear the module's start/main
+        if let Some(start) = out.start.take() {
+            if let Some(export) = out.exports.get_exported_func(start) {
+                out.exports.delete(export.id());
+            }
+        }
+
+        // We're going to import the funcref table, so wipe it altogether
+        for table in out.tables.iter_mut() {
+            table.elem_segments.clear();
+        }
+
+        // Wipe all our imports - we're going to use a different set of imports
+        let all_imports: HashSet<_> = out.imports.iter().map(|i| i.id()).collect();
+        for import_id in all_imports {
+            out.imports.delete(import_id);
+        }
+
+        // Wipe away memories
+        let all_memories: Vec<_> = out.memories.iter().map(|m| m.id()).collect();
+        for memory_id in all_memories {
+            out.memories.get_mut(memory_id).data_segments.clear();
+        }
+
+        // Add exports that call the corresponding import
+        let exports = out.exports.iter().map(|e| e.id()).collect::<Vec<_>>();
+        for export_id in exports {
+            out.exports.delete(export_id);
+        }
+
+        // Convert the tables to imports.
+        // Should be as simple as adding a new import and then writing the `.import` field
+        for (idx, table) in out.tables.iter_mut().enumerate() {
+            let name = table.name.clone().unwrap_or_else(|| {
+                if table.element_ty == RefType::Funcref {
+                    "__indirect_function_table".to_string()
+                } else {
+                    format!("__imported_table_{}", idx)
+                }
+            });
+            let import = out.imports.add("__wasm_split", &name, table.id());
+            table.import = Some(import);
+        }
+
+        // Convert the memories to imports
+        // Should be as simple as adding a new import and then writing the `.import` field
+        for (idx, memory) in out.memories.iter_mut().enumerate() {
+            let name = memory
+                .name
+                .clone()
+                .unwrap_or_else(|| format!("__memory_{}", idx));
+            let import = out.imports.add("__wasm_split", &name, memory.id());
+            memory.import = Some(import);
+        }
+
+        // Convert the globals to imports
+        // We might not use the global, so if we don't, we can just get
+        let global_ids: Vec<_> = out.globals.iter().map(|t| t.id()).collect();
+        for (idx, global_id) in global_ids.into_iter().enumerate() {
+            let global = out.globals.get_mut(global_id);
+            let global_name = format!("__global__{idx}");
+            let import = out.imports.add("__wasm_split", &global_name, global.id());
+            global.kind = GlobalKind::Import(import);
+        }
+    }
+
+    fn make_dummy_func(&self, out: &mut Module) -> FunctionId {
+        let mut b = FunctionBuilder::new(&mut out.types, &[], &[]);
+        b.name("dummy".into()).func_body().unreachable();
+        b.finish(vec![], &mut out.funcs)
+    }
+
+    fn clear_data_segments(&self, out: &mut Module, unique_symbols: &HashSet<Node>) {
+        // Preserve the data symbols for this module and then clear them away
+        let data_ids: Vec<_> = out.data.iter().map(|t| t.id()).collect();
+        for (idx, data_id) in data_ids.into_iter().enumerate() {
+            let data = out.data.get_mut(data_id);
+
+            // Take the data out of the vec - zeroing it out unless we patch it in manually
+            let contents = data.value.split_off(0);
+
+            // Zero out the non-primary data segments
+            if idx != 0 {
+                continue;
+            }
+
+            let DataKind::Active { memory, offset } = data.kind else {
+                continue;
+            };
+
+            let ConstExpr::Value(ir::Value::I32(data_offset)) = offset else {
+                continue;
+            };
+
+            // And then assign chunks of the data to new data entries that will override the individual slots
+            for unique in unique_symbols {
+                if let Node::DataSymbol(id) = unique {
+                    if let Some(symbol) = self.data_symbols.get(id) {
+                        if symbol.which_data_segment == idx {
+                            let range =
+                                symbol.segment_offset..symbol.segment_offset + symbol.symbol_size;
+                            let offset = ConstExpr::Value(ir::Value::I32(
+                                data_offset + symbol.segment_offset as i32,
+                            ));
+                            out.data.add(
+                                DataKind::Active { memory, offset },
+                                contents[range].to_vec(),
+                            );
+                        }
+                    }
+                }
+            }
+        }
+    }
+
+    /// Load the funcref table from the main module. This *should* exist for all modules created by
+    /// Rustc or Wasm-Bindgen, but we create it if it doesn't exist.
+    fn load_funcref_table(&self, out: &mut Module) -> TableId {
+        let ifunc_table = out
+            .tables
+            .iter()
+            .find(|t| t.element_ty == RefType::Funcref)
+            .map(|t| t.id());
+
+        if let Some(table) = ifunc_table {
+            table
+        } else {
+            out.tables.add_local(false, 0, None, RefType::Funcref)
+        }
+    }
+
+    /// Convert the imported function to a local function that calls an indirect function from the table
+    ///
+    /// This will enable the main module (and split modules) to call functions from outside their own module.
+    /// The functions might not exist when the main module is loaded, so we'll register some elements
+    /// that fill those in eventually.
+    fn make_stub_funcs(
+        &self,
+        out: &mut Module,
+        table: TableId,
+        ty_id: TypeId,
+        table_idx: i32,
+    ) -> FunctionKind {
+        // Convert the import function to a local function that calls the indirect function from the table
+        let ty = out.types.get(ty_id);
+
+        let params = ty.params().to_vec();
+        let results = ty.results().to_vec();
+        let args: Vec<_> = params.iter().map(|ty| out.locals.add(*ty)).collect();
+
+        // New function that calls the indirect function
+        let mut builder = FunctionBuilder::new(&mut out.types, &params, &results);
+        let mut body = builder.name("stub".into()).func_body();
+
+        // Push the params onto the stack
+        for arg in args.iter() {
+            body.local_get(*arg);
+        }
+
+        // And then the address of the indirect function
+        body.instr(ir::Instr::Const(ir::Const {
+            value: ir::Value::I32(table_idx),
+        }));
+
+        // And call it
+        body.instr(ir::Instr::CallIndirect(ir::CallIndirect {
+            ty: ty_id,
+            table,
+        }));
+
+        FunctionKind::Local(builder.local_func(args))
+    }
+
+    /// Expand the ifunc table to accommodate the new ifuncs
+    ///
+    /// returns the old maximum
+    fn expand_ifunc_table_max(
+        &self,
+        out: &mut Module,
+        table: TableId,
+        num_ifuncs: usize,
+    ) -> Option<usize> {
+        let ifunc_table_ = out.tables.get_mut(table);
+
+        if let Some(max) = ifunc_table_.maximum {
+            ifunc_table_.maximum = Some(max + num_ifuncs as u64);
+            ifunc_table_.initial += num_ifuncs as u64;
+            return Some(max as usize);
+        }
+
+        None
+    }
+
+    // only keep the target-features and names section so wasm-opt can use it to optimize the output
+    fn remove_custom_sections(&self, out: &mut Module) {
+        let sections_to_delete = out
+            .customs
+            .iter()
+            .filter_map(|(id, section)| {
+                if section.name() == "target_features" {
+                    None
+                } else {
+                    Some(id)
+                }
+            })
+            .collect::<Vec<_>>();
+
+        for id in sections_to_delete {
+            out.customs.delete(id);
+        }
+    }
+
+    /// Accumulate any shared funcs between multiple chunks into a single residual chunk.
+    /// This prevents duplicates from being downloaded.
+    /// Eventually we need to group the chunks into smarter "communities" - ie the Louvain algorithm
+    ///
+    /// Todo: we could chunk up the main module itself! Not going to now but it would enable parallel downloads of the main chunk
+    fn build_split_chunks(&mut self) {
+        // create a single chunk that contains all functions used by multiple modules
+        let mut funcs_used_by_chunks: HashMap<Node, HashSet<usize>> = HashMap::new();
+        for split in self.split_points.iter() {
+            for item in split.reachable_graph.iter() {
+                if self.main_graph.contains(item) {
+                    continue;
+                }
+            }
+        }
+
+        // Only consider funcs that are used by multiple modules - otherwise they can just stay in their respective module
+        funcs_used_by_chunks.retain(|_, v| v.len() > 1);
+
+        // todo: break down this chunk if it exceeds a certain size (100kb?) by identifying different groups
+
+        self.chunks
+            .push(funcs_used_by_chunks.keys().cloned().collect());
+    }
+
+    fn unused_main_symbols(&self) -> HashSet<Node> {
+        self.split_points
+            .iter()
+            .flat_map(|split| split.reachable_graph.iter())
+            .filter(|sym| {
+                // Make sure the symbol isn't in the main graph
+                if self.main_graph.contains(sym) {
+                    return false;
+                }
+
+                // And ensure we aren't also exporting it
+                match sym {
+                    Node::Function(u) => self.source_module.exports.get_exported_func(*u).is_none(),
+                    _ => true,
+                }
+            })
+            .cloned()
+            .collect()
+    }
+
+    /// Accumulate the relocations from the original module, create a relocation map, and then convert
+    /// that to our *new* module's symbols.
+    fn build_call_graph(&mut self) -> Result<()> {
+        let original = ModuleWithRelocations::new(self.original)?;
+
+        let old_names: HashMap<String, FunctionId> = original
+            .module
+            .funcs
+            .iter()
+            .flat_map(|f| Some((f.name.clone()?, f.id())))
+            .collect();
+
+        let new_names: HashMap<String, FunctionId> = self
+            .source_module
+            .funcs
+            .iter()
+            .flat_map(|f| Some((f.name.clone()?, f.id())))
+            .collect();
+
+        let mut old_to_new = HashMap::new();
+        let mut new_call_graph: HashMap<Node, HashSet<Node>> = HashMap::new();
+
+        for (new_name, new_func) in new_names.iter() {
+            if let Some(old_func) = old_names.get(new_name) {
+                old_to_new.insert(*old_func, new_func);
+            } else {
+                new_call_graph.insert(Node::Function(*new_func), HashSet::new());
+            }
+        }
+
+        let get_old = |old: &Node| -> Option<Node> {
+            match old {
+                Node::Function(id) => old_to_new.get(id).map(|new_id| Node::Function(**new_id)),
+                Node::DataSymbol(id) => Some(Node::DataSymbol(*id)),
+            }
+        };
+
+        // the symbols that we can't find in the original module touch functions that unfortunately
+        // we can't figure out where should exist in the call graph
+        //
+        // we're going to walk and find every child we possibly can and then add it to the call graph
+        // at the root
+        //
+        // wasm-bindgen will dissolve describe functions into the shim functions, but we don't have a
+        // sense of lining up old to new, so we just assume everything ends up in the main chunk.
+        let mut lost_children = HashSet::new();
+        self.call_graph = original
+            .call_graph
+            .iter()
+            .flat_map(|(old, children)| {
+                // If the old function isn't in the new module, we need to move all its descendents into the main chunk
+                let Some(new) = get_old(old) else {
+                    for child in children {
+                        fn descend(
+                            lost_children: &mut HashSet<Node>,
+                            old_graph: &HashMap<Node, HashSet<Node>>,
+                            node: Node,
+                        ) {
+                            if !lost_children.insert(node) {
+                                return;
+                            }
+
+                            if let Some(children) = old_graph.get(&node) {
+                                for child in children {
+                                    descend(lost_children, old_graph, *child);
+                                }
+                            }
+                        }
+
+                        descend(&mut lost_children, &original.call_graph, *child);
+                    }
+                    return None;
+                };
+
+                let mut new_children = HashSet::new();
+                for child in children {
+                    if let Some(new) = get_old(child) {
+                        new_children.insert(new);
+                    }
+                }
+
+                Some((new, new_children))
+            })
+            .collect();
+
+        let mut recovered_children = HashSet::new();
+        for lost in lost_children {
+            match lost {
+                // Functions need to be found - the wasm decsribe functions are usually completely dissolved
+                Node::Function(id) => {
+                    let func = original.module.funcs.get(id);
+                    let name = func.name.as_ref().unwrap();
+                    if let Some(entry) = new_names.get(name) {
+                        recovered_children.insert(Node::Function(*entry));
+                    }
+                }
+
+                // Data symbols are unchanged and fine to remap
+                Node::DataSymbol(id) => {
+                    recovered_children.insert(Node::DataSymbol(id));
+                }
+            }
+        }
+
+        // We're going to attach the recovered children to the main function
+        let main_fn = self.source_module.funcs.by_name("main").context("Failed to find `main` function - was this built with LTO, --emit-relocs, and debug symbols?")?;
+        let main_fn_entry = new_call_graph.entry(Node::Function(main_fn)).or_default();
+        main_fn_entry.extend(recovered_children);
+
+        // Also attach any truly new symbols to the main function. Usually these are the shim functions
+        for (name, new) in new_names.iter() {
+            if !old_names.contains_key(name) {
+                main_fn_entry.insert(Node::Function(*new));
+            }
+        }
+
+        // Walk the functions and try to disconnect any holes manually
+        // This will attempt to resolve any of the new symbols like the shim functions
+        for func in self.source_module.funcs.iter() {
+            struct CallGrapher<'a> {
+                cur: FunctionId,
+                call_graph: &'a mut HashMap<Node, HashSet<Node>>,
+            }
+            impl<'a> Visitor<'a> for CallGrapher<'a> {
+                fn visit_function_id(&mut self, function: &walrus::FunctionId) {
+                    self.call_graph
+                        .entry(Node::Function(self.cur))
+                        .or_default()
+                        .insert(Node::Function(*function));
+                }
+            }
+            if let FunctionKind::Local(local) = &func.kind {
+                let mut call_grapher = CallGrapher {
+                    cur: func.id(),
+                    call_graph: &mut self.call_graph,
+                };
+                dfs_in_order(&mut call_grapher, local, local.entry_block());
+            }
+        }
+
+        // Fill in the parent graph
+        for (parnet, children) in self.call_graph.iter() {
+            for child in children {
+                self.parent_graph.entry(*child).or_default().insert(*parnet);
+            }
+        }
+
+        // Now go fill in the reachability graph for each of the split points
+        // We want to be as narrow as possible since we've reparented any new symbols to the main module
+        self.split_points.iter_mut().for_each(|split| {
+            let roots: HashSet<_> = [Node::Function(split.export_func)].into();
+            split.reachable_graph = reachable_graph(&self.call_graph, &roots);
+        });
+
+        // And then the reachability graph for main
+        self.main_graph = reachable_graph(&self.call_graph, &self.main_roots());
+
+        // And then the symbols shared between all
+        self.shared_symbols = {
+            let mut shared_funcs = HashSet::new();
+
+            // Add all the symbols shared between the various modules
+            for split in self.split_points.iter() {
+                shared_funcs.extend(self.main_graph.intersection(&split.reachable_graph));
+            }
+
+            // And then all our imports will be callabale via the ifunc table too
+            for import in self.source_module.imports.iter() {
+                if let ImportKind::Function(id) = import.kind {
+                    shared_funcs.insert(Node::Function(id));
+                }
+            }
+
+            // Make sure to make this *ordered*
+            shared_funcs.into_iter().collect()
+        };
+
+        Ok(())
+    }
+
+    fn main_roots(&self) -> HashSet<Node> {
+        // Accumulate all the split entrypoints
+        // This will include wasm_bindgen functions too
+        let exported_splits = self
+            .split_points
+            .iter()
+            .map(|f| f.export_func)
+            .collect::<HashSet<_>>();
+
+        // And only return the functions that are reachable from the main module's start function
+        let mut roots = self
+            .source_module
+            .exports
+            .iter()
+            .filter_map(|e| match e.item {
+                ExportItem::Function(id) if !exported_splits.contains(&id) => {
+                    Some(Node::Function(id))
+                }
+                _ => None,
+            })
+            .chain(self.source_module.start.map(Node::Function))
+            .collect::<HashSet<Node>>();
+
+        // Also add "imports" to the roots
+        for import in self.source_module.imports.iter() {
+            if let ImportKind::Function(id) = import.kind {
+                roots.insert(Node::Function(id));
+            }
+        }
+
+        roots
+    }
+
+    /// Convert this set of nodes to reference the new module
+    fn remap_ids(&self, set: &HashSet<Node>, ids_to_fns: &[FunctionId]) -> HashSet<Node> {
+        let mut out = HashSet::with_capacity(set.len());
+        for node in set {
+            out.insert(self.remap_id(ids_to_fns, node));
+        }
+        out
+    }
+
+    fn remap_id(&self, ids_to_fns: &[id_arena::Id<walrus::Function>], node: &Node) -> Node {
+        match node {
+            // Remap the function IDs
+            Node::Function(id) => Node::Function(ids_to_fns[self.fns_to_ids[id]]),
+            // data symbols don't need remapping
+            Node::DataSymbol(id) => Node::DataSymbol(*id),
+        }
+    }
+}
+
+/// Parse a module and return the mapping of index to FunctionID.
+/// We'll use this mapping to remap ModuleIDs
+fn parse_module_with_ids(
+    bindgened: &[u8],
+) -> Result<(Module, Vec<FunctionId>, HashMap<FunctionId, usize>)> {
+    let ids = Arc::new(RwLock::new(Vec::new()));
+    let ids_ = ids.clone();
+    let module = Module::from_buffer_with_config(
+        bindgened,
+        ModuleConfig::new().on_parse(move |_m, our_ids| {
+            let mut ids = ids_.write().expect("No shared writers");
+            let mut idx = 0;
+            while let Ok(entry) = our_ids.get_func(idx) {
+                ids.push(entry);
+                idx += 1;
+            }
+
+            Ok(())
+        }),
+    )?;
+    let mut ids_ = ids.write().expect("No shared writers");
+    let mut ids = vec![];
+    std::mem::swap(&mut ids, &mut *ids_);
+
+    let mut fns_to_ids = HashMap::new();
+    for (idx, id) in ids.iter().enumerate() {
+        fns_to_ids.insert(*id, idx);
+    }
+
+    Ok((module, ids, fns_to_ids))
+}
+
+struct ModuleWithRelocations<'a> {
+    module: Module,
+    symbols: Vec<SymbolInfo<'a>>,
+    names_to_funcs: HashMap<String, FunctionId>,
+    call_graph: HashMap<Node, HashSet<Node>>,
+    parents: HashMap<Node, HashSet<Node>>,
+    relocation_map: HashMap<Node, Vec<RelocationEntry>>,
+    data_symbols: BTreeMap<usize, DataSymbol>,
+    data_section_range: Range<usize>,
+}
+
+impl<'a> ModuleWithRelocations<'a> {
+    fn new(bytes: &'a [u8]) -> Result<Self> {
+        let module = Module::from_buffer(bytes)?;
+        let raw_data = parse_bytes_to_data_segment(bytes)?;
+        let names_to_funcs = module
+            .funcs
+            .iter()
+            .flat_map(|f| Some((f.name.clone()?, f.id())))
+            .collect();
+
+        let mut module = Self {
+            module,
+            data_symbols: raw_data.data_symbols,
+            data_section_range: raw_data.data_range,
+            symbols: raw_data.symbols,
+            names_to_funcs,
+            call_graph: Default::default(),
+            relocation_map: Default::default(),
+            parents: Default::default(),
+        };
+
+        module.build_code_call_graph()?;
+        module.build_data_call_graph()?;
+
+        for (func, children) in module.call_graph.iter() {
+            for child in children {
+                module.parents.entry(*child).or_default().insert(*func);
+            }
+        }
+
+        Ok(module)
+    }
+
+    fn build_code_call_graph(&mut self) -> Result<()> {
+        let codes_relocations = self.collect_relocations_from_section("reloc.CODE")?;
+        let mut relocations = codes_relocations.iter().peekable();
+
+        for (func_id, local) in self.module.funcs.iter_local() {
+            let range = local
+                .original_range
+                .clone()
+                .context("local function has no range")?;
+
+            // Walk with relocation
+            while let Some(entry) =
+                relocations.next_if(|entry| entry.relocation_range().start < range.end)
+            {
+                let reloc_range = entry.relocation_range();
+                assert!(reloc_range.start >= range.start);
+                assert!(reloc_range.end <= range.end);
+
+                if let Some(target) = self.get_symbol_dep_node(entry.index as usize)? {
+                    let us = Node::Function(func_id);
+                    self.call_graph.entry(us).or_default().insert(target);
+                    self.relocation_map.entry(us).or_default().push(*entry);
+                }
+            }
+        }
+
+        assert!(relocations.next().is_none());
+
+        Ok(())
+    }
+
+    fn build_data_call_graph(&mut self) -> Result<()> {
+        let data_relocations = self.collect_relocations_from_section("reloc.DATA")?;
+        let mut relocations = data_relocations.iter().peekable();
+
+        let symbols_sorted = self
+            .data_symbols
+            .values()
+            .sorted_by(|a, b| a.range.start.cmp(&b.range.start));
+
+        for symbol in symbols_sorted {
+            let start = symbol.range.start - self.data_section_range.start;
+            let end = symbol.range.end - self.data_section_range.start;
+            let range = start..end;
+
+            while let Some(entry) =
+                relocations.next_if(|entry| entry.relocation_range().start < range.end)
+            {
+                let reloc_range = entry.relocation_range();
+                assert!(reloc_range.start >= range.start);
+                assert!(reloc_range.end <= range.end);
+
+                if let Some(target) = self.get_symbol_dep_node(entry.index as usize)? {
+                    let dep = Node::DataSymbol(symbol.index);
+                    self.call_graph.entry(dep).or_default().insert(target);
+                    self.relocation_map.entry(dep).or_default().push(*entry);
+                }
+            }
+        }
+
+        assert!(relocations.next().is_none());
+
+        Ok(())
+    }
+
+    /// Accumulate all relocations from a section.
+    ///
+    /// Parses the section using the RelocSectionReader and returns a vector of relocation entries.
+    fn collect_relocations_from_section(&self, name: &str) -> Result<Vec<RelocationEntry>> {
+        let (_reloc_id, code_reloc) = self
+            .module
+            .customs
+            .iter()
+            .find(|(_, c)| c.name() == name)
+            .context("Module does not contain the reloc section")?;
+
+        let code_reloc_data = code_reloc.data(&Default::default());
+        let reader = BinaryReader::new(&code_reloc_data, 0);
+        let relocations = RelocSectionReader::new(reader)
+            .context("failed to parse reloc section")?
+            .entries()
+            .into_iter()
+            .flatten()
+            .collect();
+
+        Ok(relocations)
+    }
+
+    /// Get the symbol's corresponding entry in the call graph
+    ///
+    /// This might panic if the source module isn't built properly. Make sure to enable LTO and `--emit-relocs`
+    /// when building the source module.
+    fn get_symbol_dep_node(&self, index: usize) -> Result<Option<Node>> {
+        let res = match self.symbols[index] {
+            SymbolInfo::Data { .. } => Some(Node::DataSymbol(index)),
+            SymbolInfo::Func { name, .. } => Some(Node::Function(
+                *self
+                    .names_to_funcs
+                    .get(name.expect("local func symbol without name?"))
+                    .unwrap(),
+            )),
+
+            _ => None,
+        };
+
+        Ok(res)
+    }
+}
+
+#[derive(Debug, Clone)]
+pub struct SplitPoint {
+    module_name: String,
+    import_id: ImportId,
+    export_id: ExportId,
+    import_func: FunctionId,
+    export_func: FunctionId,
+    component_name: String,
+    index: usize,
+    reachable_graph: HashSet<Node>,
+    hash_name: String,
+
+    #[allow(unused)]
+    import_name: String,
+
+    #[allow(unused)]
+    export_name: String,
+}
+
+/// Search the module's imports and exports for functions marked as split points.
+///
+/// These will be in the form of:
+///
+/// __wasm_split_00<module>00_<import|export>_<hash>_<function>
+///
+/// For a function named `SomeRoute2` in the module `add_body_element`, the pairings would be:
+///
+/// __wasm_split_00add_body_element00_import_abef5ee3ebe66ff17677c56ee392b4c2_SomeRoute2
+/// __wasm_split_00add_body_element00_export_abef5ee3ebe66ff17677c56ee392b4c2_SomeRoute2
+///
+fn accumulate_split_points(module: &Module) -> Vec<SplitPoint> {
+    let mut index = 0;
+
+    module
+        .imports
+        .iter()
+        .sorted_by(|a, b| a.name.cmp(&b.name))
+        .flat_map(|import| {
+            if !import.name.starts_with("__wasm_split_00") {
+                return None;
+            }
+
+            let ImportKind::Function(import_func) = import.kind else {
+                return None;
+            };
+
+            // Parse the import name to get the module name, the hash, and the function name
+            let remain = import.name.trim_start_matches("__wasm_split_00___");
+            let (module_name, rest) = remain.split_once("___00").unwrap();
+            let (hash, fn_name) = rest.trim_start_matches("_import_").split_once("_").unwrap();
+
+            // Look for the export with the same name
+            let export_name =
+                format!("__wasm_split_00___{module_name}___00_export_{hash}_{fn_name}");
+            let export_func = module
+                .exports
+                .get_func(&export_name)
+                .expect("Could not find export");
+            let export = module.exports.get_exported_func(export_func).unwrap();
+
+            let our_index = index;
+            index += 1;
+
+            Some(SplitPoint {
+                export_id: export.id(),
+                import_id: import.id(),
+                module_name: module_name.to_string(),
+                import_name: import.name.clone(),
+                import_func,
+                export_func,
+                export_name,
+                hash_name: hash.to_string(),
+                component_name: fn_name.to_string(),
+                index: our_index,
+                reachable_graph: Default::default(),
+            })
+        })
+        .collect()
+}
+
+#[derive(Debug, PartialEq, Eq, Hash, Copy, PartialOrd, Ord, Clone)]
+pub enum Node {
+    Function(FunctionId),
+    DataSymbol(usize),
+}
+
+fn reachable_graph(deps: &HashMap<Node, HashSet<Node>>, roots: &HashSet<Node>) -> HashSet<Node> {
+    let mut queue: VecDeque<Node> = roots.iter().copied().collect();
+    let mut reachable = HashSet::<Node>::new();
+    let mut parents = HashMap::<Node, Node>::new();
+
+    while let Some(node) = queue.pop_front() {
+        reachable.insert(node);
+        let Some(children) = deps.get(&node) else {
+            continue;
+        };
+        for child in children {
+            if reachable.contains(child) {
+                continue;
+            }
+            parents.entry(*child).or_insert(node);
+            queue.push_back(*child);
+        }
+    }
+
+    reachable
+}
+
+struct RawDataSection<'a> {
+    data_range: Range<usize>,
+    symbols: Vec<SymbolInfo<'a>>,
+    data_symbols: BTreeMap<usize, DataSymbol>,
+}
+
+#[derive(Debug)]
+struct DataSymbol {
+    index: usize,
+    range: Range<usize>,
+    segment_offset: usize,
+    symbol_size: usize,
+    which_data_segment: usize,
+}
+
+/// Manually parse the data section from a wasm module
+///
+/// We need to do this for data symbols because walrus doesn't provide the right range and offset
+/// information for data segments. Fortunately, it provides it for code sections, so we only need to
+/// do a small amount extra of parsing here.
+fn parse_bytes_to_data_segment(bytes: &[u8]) -> Result<RawDataSection> {
+    let parser = wasmparser::Parser::new(0);
+    let mut parser = parser.parse_all(bytes);
+    let mut segments = vec![];
+    let mut data_range = 0..0;
+    let mut symbols = vec![];
+
+    // Process the payloads in the raw wasm file so we can extract the specific sections we need
+    while let Some(Ok(payload)) = parser.next() {
+        match payload {
+            Payload::DataSection(section) => {
+                data_range = section.range();
+                segments = section.into_iter().collect::<Result<Vec<_>, _>>()?
+            }
+            Payload::CustomSection(section) if section.name() == "linking" => {
+                let reader = BinaryReader::new(section.data(), 0);
+                let reader = LinkingSectionReader::new(reader)?;
+                for subsection in reader.subsections() {
+                    if let Linking::SymbolTable(map) = subsection? {
+                        symbols = map.into_iter().collect::<Result<Vec<_>, _>>()?;
+                    }
+                }
+            }
+            _ => {}
+        }
+    }
+
+    // Accumulate the data symbols into a btreemap for later use
+    let mut data_symbols = BTreeMap::new();
+    for (index, symbol) in symbols.iter().enumerate() {
+        let SymbolInfo::Data {
+            symbol: Some(symbol),
+            ..
+        } = symbol
+        else {
+            continue;
+        };
+
+        if symbol.size == 0 {
+            continue;
+        }
+
+        let data_segment = segments
+            .get(symbol.index as usize)
+            .context("Failed to find data segment")?;
+        let offset: usize =
+            data_segment.range.end - data_segment.data.len() + (symbol.offset as usize);
+        let range = offset..(offset + symbol.size as usize);
+
+        data_symbols.insert(
+            index,
+            DataSymbol {
+                index,
+                range,
+                segment_offset: symbol.offset as usize,
+                symbol_size: symbol.size as usize,
+                which_data_segment: symbol.index as usize,
+            },
+        );
+    }
+
+    Ok(RawDataSection {
+        data_range,
+        symbols,
+        data_symbols,
+    })
+}

+ 178 - 0
packages/wasm-split/wasm-split-cli/src/main.rs

@@ -0,0 +1,178 @@
+use clap::Parser;
+use std::path::PathBuf;
+use wasm_split_cli::SplitModule;
+
+fn main() {
+    tracing_subscriber::fmt()
+        .without_time()
+        .compact()
+        .with_env_filter("debug,walrus=info")
+        .init();
+
+    match Commands::parse() {
+        Commands::Split(split_args) => split(split_args),
+        Commands::Validate(validate_args) => validate(validate_args),
+    }
+}
+
+#[derive(Parser)]
+enum Commands {
+    /// Split a wasm module into multiple chunks
+    #[clap(name = "split")]
+    Split(SplitArgs),
+
+    /// Validate the main module of a wasm module
+    #[clap(name = "validate")]
+    Validate(ValidateArgs),
+}
+
+#[derive(Parser)]
+struct SplitArgs {
+    /// The wasm module emitted by rustc
+    original: PathBuf,
+
+    /// The wasm module emitted by wasm-bindgen
+    bindgened: PathBuf,
+
+    /// The output *directory* to write the split wasm files to
+    out_dir: PathBuf,
+}
+
+fn split(args: SplitArgs) {
+    let original = std::fs::read(&args.original).expect("failed to read input file");
+    let bindgened = std::fs::read(&args.bindgened).expect("failed to read input file");
+
+    _ = std::fs::remove_dir_all(&args.out_dir);
+    std::fs::create_dir_all(&args.out_dir).expect("failed to create output dir");
+
+    tracing::info!("Building split module");
+
+    let module = wasm_split_cli::Splitter::new(&original, &bindgened).unwrap();
+
+    let mut chunks = module.emit().unwrap();
+
+    // Write out the main module
+    tracing::info!(
+        "Writing main module to {}",
+        args.out_dir.join("main.wasm").display()
+    );
+    std::fs::write(args.out_dir.join("main.wasm"), &chunks.main.bytes).unwrap();
+
+    // Write the js module
+    std::fs::write(
+        args.out_dir.join("__wasm_split.js"),
+        emit_js(&chunks.chunks, &chunks.modules),
+    )
+    .expect("failed to write js module");
+
+    for (idx, chunk) in chunks.chunks.iter().enumerate() {
+        tracing::info!(
+            "Writing chunk {} to {}",
+            idx,
+            args.out_dir
+                .join(format!("chunk_{}_{}.wasm", idx, chunk.module_name))
+                .display()
+        );
+        std::fs::write(
+            args.out_dir
+                .join(format!("chunk_{}_{}.wasm", idx, chunk.module_name)),
+            &chunk.bytes,
+        )
+        .expect("failed to write chunk");
+    }
+
+    for (idx, module) in chunks.modules.iter_mut().enumerate() {
+        tracing::info!(
+            "Writing module {} to {}",
+            idx,
+            args.out_dir
+                .join(format!(
+                    "module_{}_{}.wasm",
+                    idx,
+                    module.component_name.as_ref().unwrap()
+                ))
+                .display()
+        );
+        std::fs::write(
+            args.out_dir.join(format!(
+                "module_{}_{}.wasm",
+                idx,
+                module.component_name.as_ref().unwrap()
+            )),
+            &module.bytes,
+        )
+        .expect("failed to write chunk");
+    }
+}
+
+fn emit_js(chunks: &[SplitModule], modules: &[SplitModule]) -> String {
+    use std::fmt::Write;
+    let mut glue = format!(
+        r#"import {{ initSync }} from "./main.js";
+{}"#,
+        include_str!("./__wasm_split.js")
+    );
+
+    for (idx, chunk) in chunks.iter().enumerate() {
+        tracing::debug!("emitting chunk: {:?}", chunk.module_name);
+        writeln!(
+                glue,
+                "export const __wasm_split_load_chunk_{idx} = makeLoad(\"/harness/split/chunk_{idx}_{module}.wasm\", [], fusedImports, initSync);",
+                module = chunk.module_name
+            ).expect("failed to write to string");
+    }
+
+    // Now write the modules
+    for (idx, module) in modules.iter().enumerate() {
+        let deps = module
+            .relies_on_chunks
+            .iter()
+            .map(|idx| format!("__wasm_split_load_chunk_{idx}"))
+            .collect::<Vec<_>>()
+            .join(", ");
+        let hash_id = module.hash_id.as_ref().unwrap();
+
+        writeln!(
+                glue,
+                "export const __wasm_split_load_{module}_{hash_id}_{cname} = makeLoad(\"/harness/split/module_{idx}_{cname}.wasm\", [{deps}], fusedImports, initSync);",
+                module = module.module_name,
+                idx = idx,
+                cname = module.component_name.as_ref().unwrap(),
+                deps = deps
+            )
+            .expect("failed to write to string");
+    }
+
+    glue
+}
+
+#[derive(Parser)]
+struct ValidateArgs {
+    /// The input wasm file to validate
+    main: PathBuf,
+
+    chunks: Vec<PathBuf>,
+}
+
+fn validate(args: ValidateArgs) {
+    let bytes = std::fs::read(&args.main).expect("failed to read input file");
+    let main_module = walrus::Module::from_buffer(&bytes).unwrap();
+
+    for chunk in args.chunks {
+        let bytes = std::fs::read(chunk).expect("failed to read input file");
+        let chunk_module = walrus::Module::from_buffer(&bytes).unwrap();
+
+        assert!(chunk_module.tables.iter().count() == 1);
+
+        for import in chunk_module.imports.iter() {
+            let matching = main_module.exports.iter().find(|e| e.name == import.name);
+
+            let Some(matching) = matching else {
+                tracing::error!("Could not find matching export for import {import:#?}");
+                continue;
+            };
+
+            tracing::debug!("import: {:?}", matching.name);
+        }
+    }
+}

+ 15 - 0
packages/wasm-split/wasm-split-macro/Cargo.toml

@@ -0,0 +1,15 @@
+[package]
+name = "wasm-split-macro"
+version = "0.1.0"
+edition = "2021"
+
+[dependencies]
+syn = { workspace = true, features = ["full"] }
+base16 = { workspace = true }
+digest = { workspace = true }
+quote = { workspace = true }
+sha2 = { workspace = true }
+proc-macro2 = { workspace = true }
+
+[lib]
+proc-macro = true

+ 226 - 0
packages/wasm-split/wasm-split-macro/src/lib.rs

@@ -0,0 +1,226 @@
+use proc_macro::TokenStream;
+
+use digest::Digest;
+use quote::{format_ident, quote};
+use syn::{parse_macro_input, parse_quote, FnArg, Ident, ItemFn, ReturnType, Signature};
+
+#[proc_macro_attribute]
+pub fn wasm_split(args: TokenStream, input: TokenStream) -> TokenStream {
+    let module_ident = parse_macro_input!(args as Ident);
+    let item_fn = parse_macro_input!(input as ItemFn);
+
+    if item_fn.sig.asyncness.is_none() {
+        panic!("wasm_split functions must be async. Use a LazyLoader with synchronous functions instead.");
+    }
+
+    let LoaderNames {
+        split_loader_ident,
+        impl_import_ident,
+        impl_export_ident,
+        load_module_ident,
+        ..
+    } = LoaderNames::new(item_fn.sig.ident.clone(), module_ident.to_string());
+
+    let mut desugard_async_sig = item_fn.sig.clone();
+    desugard_async_sig.asyncness = None;
+    desugard_async_sig.output = match &desugard_async_sig.output {
+        ReturnType::Default => {
+            parse_quote! { -> std::pin::Pin<Box<dyn std::future::Future<Output = ()>>> }
+        }
+        ReturnType::Type(_, ty) => {
+            parse_quote! { -> std::pin::Pin<Box<dyn std::future::Future<Output = #ty>>> }
+        }
+    };
+
+    let import_sig = Signature {
+        ident: impl_import_ident.clone(),
+        ..desugard_async_sig.clone()
+    };
+
+    let export_sig = Signature {
+        ident: impl_export_ident.clone(),
+        ..desugard_async_sig.clone()
+    };
+
+    let default_item = item_fn.clone();
+
+    let mut wrapper_sig = item_fn.sig;
+    wrapper_sig.asyncness = Some(Default::default());
+
+    let mut args = Vec::new();
+    for (i, param) in wrapper_sig.inputs.iter_mut().enumerate() {
+        match param {
+            syn::FnArg::Receiver(_) => args.push(format_ident!("self")),
+            syn::FnArg::Typed(pat_type) => {
+                let param_ident = format_ident!("__wasm_split_arg_{i}");
+                args.push(param_ident.clone());
+                pat_type.pat = Box::new(syn::Pat::Ident(syn::PatIdent {
+                    attrs: vec![],
+                    by_ref: None,
+                    mutability: None,
+                    ident: param_ident,
+                    subpat: None,
+                }));
+            }
+        }
+    }
+
+    let attrs = &item_fn.attrs;
+    let stmts = &item_fn.block.stmts;
+
+    quote! {
+        #[cfg(target_arch = "wasm32")]
+        #wrapper_sig {
+            #(#attrs)*
+            #[allow(improper_ctypes_definitions)]
+            #[no_mangle]
+            pub extern "C" #export_sig {
+                Box::pin(async move { #(#stmts)* })
+            }
+
+            #[link(wasm_import_module = "./__wasm_split.js")]
+            extern "C" {
+                #[no_mangle]
+                fn #load_module_ident (
+                    callback: unsafe extern "C" fn(*const ::std::ffi::c_void, bool),
+                    data: *const ::std::ffi::c_void
+                );
+
+                #[allow(improper_ctypes)]
+                #[no_mangle]
+                #import_sig;
+            }
+
+            thread_local! {
+                static #split_loader_ident: wasm_split::LazySplitLoader = unsafe {
+                    wasm_split::LazySplitLoader::new(#load_module_ident)
+                };
+            }
+
+            // Initiate the download by calling the load_module_ident function which will kick-off the loader
+            if !wasm_split::LazySplitLoader::ensure_loaded(&#split_loader_ident).await {
+                panic!("Failed to load wasm-split module");
+            }
+
+            unsafe { #impl_import_ident( #(#args),* ) }.await
+        }
+
+        #[cfg(not(target_arch = "wasm32"))]
+        #default_item
+    }
+    .into()
+}
+
+/// Create a lazy loader for a given function. Meant to be used in statics. Designed for libraries to
+/// integrate with.
+///
+/// ```rust, no_run
+/// fn SomeFunction(args: Args) -> Ret {}
+///
+/// static LOADER: wasm_split::LazyLoader<Args, Ret> = lazy_loader!(SomeFunction);
+///
+/// LOADER.load().await.call(args)
+/// ```
+#[proc_macro]
+pub fn lazy_loader(input: TokenStream) -> TokenStream {
+    // We can only accept idents/paths that will be the source function
+    let sig = parse_macro_input!(input as Signature);
+    let params = sig.inputs.clone();
+    let outputs = sig.output.clone();
+    let Some(FnArg::Typed(arg)) = params.first().cloned() else {
+        panic!(
+            "Lazy Loader must define a single input argument to satisfy the LazyLoader signature"
+        )
+    };
+    let arg_ty = arg.ty.clone();
+    let LoaderNames {
+        name,
+        split_loader_ident,
+        impl_import_ident,
+        impl_export_ident,
+        load_module_ident,
+        ..
+    } = LoaderNames::new(
+        sig.ident.clone(),
+        sig.abi
+            .as_ref()
+            .and_then(|abi| abi.name.as_ref().map(|f| f.value()))
+            .expect("abi to be module name")
+            .to_string(),
+    );
+
+    quote! {
+        {
+            #[cfg(target_arch = "wasm32")]
+            {
+                #[link(wasm_import_module = "./__wasm_split.js")]
+                extern "C" {
+                    // The function we'll use to initiate the download of the module
+                    #[no_mangle]
+                    fn #load_module_ident(
+                        callback: unsafe extern "C" fn(*const ::std::ffi::c_void, bool),
+                        data: *const ::std::ffi::c_void,
+                    );
+
+                    #[allow(improper_ctypes)]
+                    #[no_mangle]
+                    fn #impl_import_ident(arg: #arg_ty) #outputs;
+                }
+
+
+                #[allow(improper_ctypes_definitions)]
+                #[no_mangle]
+                pub extern "C" fn #impl_export_ident(arg: #arg_ty) #outputs {
+                    #name(arg)
+                }
+
+                thread_local! {
+                    static #split_loader_ident: wasm_split::LazySplitLoader = unsafe {
+                        wasm_split::LazySplitLoader::new(#load_module_ident)
+                    };
+                };
+
+                unsafe {
+                    wasm_split::LazyLoader::new(#impl_import_ident, &#split_loader_ident)
+                }
+            }
+
+            #[cfg(not(target_arch = "wasm32"))]
+            {
+                wasm_split::LazyLoader::preloaded(#name)
+            }
+        }
+    }
+    .into()
+}
+
+struct LoaderNames {
+    name: Ident,
+    split_loader_ident: Ident,
+    impl_import_ident: Ident,
+    impl_export_ident: Ident,
+    load_module_ident: Ident,
+}
+
+impl LoaderNames {
+    fn new(name: Ident, module: String) -> Self {
+        let unique_identifier = base16::encode_lower(
+            &sha2::Sha256::digest(format!("{name} {span:?}", name = name, span = name.span()))
+                [..16],
+        );
+
+        Self {
+            split_loader_ident: format_ident!("__wasm_split_loader_{module}"),
+            impl_export_ident: format_ident!(
+                "__wasm_split_00___{module}___00_export_{unique_identifier}_{name}"
+            ),
+            impl_import_ident: format_ident!(
+                "__wasm_split_00___{module}___00_import_{unique_identifier}_{name}"
+            ),
+            load_module_ident: format_ident!(
+                "__wasm_split_load_{module}_{unique_identifier}_{name}"
+            ),
+            name,
+        }
+    }
+}

+ 8 - 0
packages/wasm-split/wasm-split/Cargo.toml

@@ -0,0 +1,8 @@
+[package]
+name = "wasm-split"
+version = "0.1.0"
+edition = "2021"
+
+[dependencies]
+async-once-cell = { workspace = true, features = ["std"] }
+wasm-split-macro = { workspace = true }

+ 214 - 0
packages/wasm-split/wasm-split/src/lib.rs

@@ -0,0 +1,214 @@
+use std::{
+    cell::Cell,
+    ffi::c_void,
+    future::Future,
+    pin::Pin,
+    rc::Rc,
+    task::{Context, Poll, Waker},
+    thread::LocalKey,
+};
+
+pub use wasm_split_macro::{lazy_loader, wasm_split};
+
+pub type Result<T> = std::result::Result<T, SplitLoaderError>;
+
+#[non_exhaustive]
+#[derive(Debug, Clone)]
+pub enum SplitLoaderError {
+    FailedToLoad,
+}
+impl std::fmt::Display for SplitLoaderError {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        match self {
+            SplitLoaderError::FailedToLoad => write!(f, "Failed to load wasm-split module"),
+        }
+    }
+}
+
+/// A lazy loader that can be used to load a function from a split out `.wasm` file.
+///
+/// # Example
+///
+/// To use the split loader, you must first create the loader using the `lazy_loader` macro. This macro
+/// requires the complete signature of the function you want to load. The extern abi string denotes
+/// which module the function should be loaded from. If you don't know which module to use, use `auto`
+/// and wasm-split will automatically combine all the modules into one.
+///
+/// ```rust, no_run
+/// static LOADER: wasm_split::LazyLoader<Args, Ret> = wasm_split::lazy_loader!(extern "auto" fn SomeFunction(args: Args) -> Ret);
+///
+/// fn SomeFunction(args: Args) -> Ret {
+///     // Implementation
+/// }
+/// ```
+///
+/// ## The `#[component(lazy)]` macro
+///
+/// If you're using wasm-split with Dioxus, the `#[component(lazy)]` macro is provided that wraps
+/// the lazy loader with suspense. This means that the component will suspense until its body has
+/// been loaded.
+///
+/// ```rust, ignore
+/// fn app() -> Element {
+///     rsx! {
+///         Suspense {
+///             fallback: rsx! { "Loading..." },
+///             LazyComponent { abc: 0 }
+///         }
+///     }
+/// }
+///
+/// #[component(lazy)]
+/// fn LazyComponent(abc: i32) -> Element {
+///     rsx! {
+///         div {
+///             "This is a lazy component! {abc}"
+///         }
+///     }
+/// }
+/// ```
+pub struct LazyLoader<Args, Ret> {
+    imported: unsafe extern "C" fn(arg: Args) -> Ret,
+    key: &'static LocalKey<LazySplitLoader>,
+}
+
+impl<Args, Ret> LazyLoader<Args, Ret> {
+    /// Create a new lazy loader from a lazy imported function and a LazySplitLoader
+    ///
+    /// # Safety
+    /// This is unsafe because we're taking an arbitrary function pointer and using it as the loader.
+    /// This function is likely not instantiated when passed here, so it should never be called directly.
+    #[doc(hidden)]
+    pub const unsafe fn new(
+        imported: unsafe extern "C" fn(arg: Args) -> Ret,
+        key: &'static LocalKey<LazySplitLoader>,
+    ) -> Self {
+        Self { imported, key }
+    }
+
+    /// Create a new lazy loader that is already resolved.
+    pub const fn preloaded(f: fn(Args) -> Ret) -> Self {
+        let imported =
+            unsafe { std::mem::transmute::<fn(Args) -> Ret, unsafe extern "C" fn(Args) -> Ret>(f) };
+
+        thread_local! {
+            static LAZY: LazySplitLoader = LazySplitLoader::preloaded();
+        };
+
+        Self {
+            imported,
+            key: &LAZY,
+        }
+    }
+
+    /// Load the lazy loader, returning an boolean indicating whether it loaded successfully
+    pub async fn load(&'static self) -> bool {
+        *self.key.with(|inner| inner.lazy.clone()).as_ref().await
+    }
+
+    /// Call the lazy loader with the given arguments
+    pub fn call(&'static self, args: Args) -> Result<Ret> {
+        let Some(true) = self.key.with(|inner| inner.lazy.try_get().copied()) else {
+            return Err(SplitLoaderError::FailedToLoad);
+        };
+
+        Ok(unsafe { (self.imported)(args) })
+    }
+}
+
+type Lazy = async_once_cell::Lazy<bool, SplitLoaderFuture>;
+type LoadCallbackFn = unsafe extern "C" fn(*const c_void, bool) -> ();
+type LoadFn = unsafe extern "C" fn(LoadCallbackFn, *const c_void) -> ();
+
+pub struct LazySplitLoader {
+    lazy: Pin<Rc<Lazy>>,
+}
+
+impl LazySplitLoader {
+    /// Create a new lazy split loader from a load function that is generated by the wasm-split macro
+    ///
+    /// # Safety
+    ///
+    /// This is unsafe because we're taking an arbitrary function pointer and using it as the loader.
+    /// It is likely not instantiated when passed here, so it should never be called directly.
+    #[doc(hidden)]
+    pub unsafe fn new(load: LoadFn) -> Self {
+        Self {
+            lazy: Rc::pin(Lazy::new({
+                SplitLoaderFuture {
+                    loader: Rc::new(SplitLoader {
+                        state: Cell::new(SplitLoaderState::Deferred(load)),
+                        waker: Cell::new(None),
+                    }),
+                }
+            })),
+        }
+    }
+
+    fn preloaded() -> Self {
+        Self {
+            lazy: Rc::pin(Lazy::new({
+                SplitLoaderFuture {
+                    loader: Rc::new(SplitLoader {
+                        state: Cell::new(SplitLoaderState::Completed(true)),
+                        waker: Cell::new(None),
+                    }),
+                }
+            })),
+        }
+    }
+
+    /// Wait for the lazy loader to load
+    pub async fn ensure_loaded(loader: &'static std::thread::LocalKey<LazySplitLoader>) -> bool {
+        *loader.with(|inner| inner.lazy.clone()).as_ref().await
+    }
+}
+
+struct SplitLoader {
+    state: Cell<SplitLoaderState>,
+    waker: Cell<Option<Waker>>,
+}
+
+#[derive(Clone, Copy)]
+enum SplitLoaderState {
+    Deferred(LoadFn),
+    Pending,
+    Completed(bool),
+}
+
+struct SplitLoaderFuture {
+    loader: Rc<SplitLoader>,
+}
+
+impl Future for SplitLoaderFuture {
+    type Output = bool;
+
+    fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<bool> {
+        unsafe extern "C" fn load_callback(loader: *const c_void, success: bool) {
+            let loader = unsafe { Rc::from_raw(loader as *const SplitLoader) };
+            loader.state.set(SplitLoaderState::Completed(success));
+            if let Some(waker) = loader.waker.take() {
+                waker.wake()
+            }
+        }
+
+        match self.loader.state.get() {
+            SplitLoaderState::Deferred(load) => {
+                self.loader.state.set(SplitLoaderState::Pending);
+                self.loader.waker.set(Some(cx.waker().clone()));
+                unsafe {
+                    load(
+                        load_callback,
+                        Rc::<SplitLoader>::into_raw(self.loader.clone()) as *const c_void,
+                    )
+                };
+                Poll::Pending
+            }
+            SplitLoaderState::Pending => {
+                self.loader.waker.set(Some(cx.waker().clone()));
+                Poll::Pending
+            }
+            SplitLoaderState::Completed(value) => Poll::Ready(value),
+        }
+    }
+}

+ 9 - 0
packages/wasm-split/wasm-used/Cargo.toml

@@ -0,0 +1,9 @@
+[package]
+name = "wasm-used"
+edition = "2021"
+version.workspace = true
+
+[dependencies]
+walrus = { workspace = true, features = ["parallel"] }
+id-arena = { workspace = true }
+tracing = { workspace = true }

+ 346 - 0
packages/wasm-split/wasm-used/src/lib.rs

@@ -0,0 +1,346 @@
+use std::collections::HashSet;
+
+use id_arena::Id;
+use walrus::{ir::*, ExportId};
+use walrus::{ConstExpr, Data, DataId, DataKind, Element, ExportItem, Function};
+use walrus::{ElementId, ElementItems, ElementKind, Module, RefType, Type, TypeId};
+use walrus::{FunctionId, FunctionKind, Global, GlobalId};
+use walrus::{GlobalKind, Memory, MemoryId, Table, TableId};
+
+type IdHashSet<T> = HashSet<Id<T>>;
+
+/// Set of all root used items in a wasm module.
+#[derive(Debug, Default)]
+pub struct Roots {
+    tables: Vec<TableId>,
+    funcs: Vec<(FunctionId, Location)>,
+    globals: Vec<GlobalId>,
+    memories: Vec<MemoryId>,
+    data: Vec<DataId>,
+    elements: Vec<ElementId>,
+    used: Used,
+}
+
+#[allow(dead_code)]
+#[derive(Debug)]
+pub enum Location {
+    Start,
+    Export { export: ExportId },
+    Table { table: TableId },
+    Memory { memory: MemoryId },
+    Global { global: GlobalId },
+    Data,
+    Element { element: ElementId },
+    Code { func: FunctionId },
+}
+
+impl Roots {
+    /// Creates a new set of empty roots.
+    pub fn new() -> Roots {
+        Roots::default()
+    }
+
+    /// Adds a new function to the set of roots
+    pub fn push_func(&mut self, func: FunctionId, from: Location) -> &mut Roots {
+        if self.used.funcs.insert(func) {
+            // log::trace!("function is used: {:?}", func);
+            self.funcs.push((func, from));
+        }
+        self
+    }
+
+    /// Adds a new table to the set of roots
+    pub fn push_table(&mut self, table: TableId) -> &mut Roots {
+        if self.used.tables.insert(table) {
+            // log::trace!("table is used: {:?}", table);
+            self.tables.push(table);
+        }
+        self
+    }
+
+    /// Adds a new memory to the set of roots
+    pub fn push_memory(&mut self, memory: MemoryId) -> &mut Roots {
+        if self.used.memories.insert(memory) {
+            // log::trace!("memory is used: {:?}", memory);
+            self.memories.push(memory);
+        }
+        self
+    }
+
+    /// Adds a new global to the set of roots
+    pub fn push_global(&mut self, global: GlobalId) -> &mut Roots {
+        if self.used.globals.insert(global) {
+            // log::trace!("global is used: {:?}", global);
+            self.globals.push(global);
+        }
+        self
+    }
+
+    fn push_data(&mut self, data: DataId) -> &mut Roots {
+        if self.used.data.insert(data) {
+            // log::trace!("data is used: {:?}", data);
+            self.data.push(data);
+        }
+        self
+    }
+
+    fn push_element(&mut self, element: ElementId) -> &mut Roots {
+        if self.used.elements.insert(element) {
+            // log::trace!("element is used: {:?}", element);
+            self.elements.push(element);
+        }
+        self
+    }
+}
+
+/// Finds the things within a module that are used.
+///
+/// This is useful for implementing something like a linker's `--gc-sections` so
+/// that our emitted `.wasm` binaries are small and don't contain things that
+/// are not used.
+#[derive(Debug, Default)]
+pub struct Used {
+    /// The module's used tables.
+    pub tables: IdHashSet<Table>,
+    /// The module's used types.
+    pub types: IdHashSet<Type>,
+    /// The module's used functions.
+    pub funcs: IdHashSet<Function>,
+    /// The module's used globals.
+    pub globals: IdHashSet<Global>,
+    /// The module's used memories.
+    pub memories: IdHashSet<Memory>,
+    /// The module's used passive element segments.
+    pub elements: IdHashSet<Element>,
+    /// The module's used passive data segments.
+    pub data: IdHashSet<Data>,
+}
+
+impl Used {
+    /// Construct a new `Used` set for the given module.
+    pub fn new(module: &Module, deleted: &HashSet<FunctionId>) -> Used {
+        // log::debug!("starting to calculate used set");
+        let mut stack = Roots::default();
+
+        // All exports are roots
+        for export in module.exports.iter() {
+            match export.item {
+                ExportItem::Function(f) => stack.push_func(
+                    f,
+                    Location::Export {
+                        export: export.id(),
+                    },
+                ),
+                ExportItem::Table(t) => stack.push_table(t),
+                ExportItem::Memory(m) => stack.push_memory(m),
+                ExportItem::Global(g) => stack.push_global(g),
+            };
+        }
+
+        // The start function is an implicit root as well
+        if let Some(f) = module.start {
+            stack.push_func(f, Location::Start);
+        }
+
+        // Initialization of memories or tables is a side-effectful operation
+        // because they can be out-of-bounds, so keep all active segments.
+        for data in module.data.iter() {
+            if let DataKind::Active { .. } = &data.kind {
+                stack.push_data(data.id());
+            }
+        }
+        for elem in module.elements.iter() {
+            match elem.kind {
+                // Active segments are rooted because they initialize imported
+                // tables.
+                ElementKind::Active { table, .. } => {
+                    if module.tables.get(table).import.is_some() {
+                        stack.push_element(elem.id());
+                    }
+                }
+                // Declared segments can probably get gc'd but for now we're
+                // conservative and we root them
+                ElementKind::Declared => {
+                    stack.push_element(elem.id());
+                }
+                ElementKind::Passive => {}
+            }
+        }
+
+        // // And finally ask custom sections for their roots
+        // for (_id, section) in module.customs.iter() {
+        //     section.add_gc_roots(&mut stack);
+        // }
+        // tracing::info!("Used roots: {:#?}", stack);
+
+        // Iteratively visit all items until our stack is empty
+        while !stack.funcs.is_empty()
+            || !stack.tables.is_empty()
+            || !stack.memories.is_empty()
+            || !stack.globals.is_empty()
+            || !stack.data.is_empty()
+            || !stack.elements.is_empty()
+        {
+            while let Some((f, _loc)) = stack.funcs.pop() {
+                if deleted.contains(&f) {
+                    let func = module.funcs.get(f);
+                    let name = func
+                        .name
+                        .as_ref()
+                        .cloned()
+                        .unwrap_or_else(|| format!("unknown - {}", f.index()));
+                    // panic!(
+                    //     "Found a function that should be deleted but is still used: {:?} - {:?}",
+                    //     name, f
+                    // );
+                    tracing::error!(
+                        "Found a function that should be deleted but is still used: {:?} - {:?} - {:?}",
+                        name,
+                        f,
+                        _loc
+                    );
+                    if let Location::Code { func } = _loc {
+                        let func_name = module.funcs.get(func).name.as_ref().unwrap();
+                        tracing::error!("Function {:?} is used by {:?}", f, func_name);
+                    }
+
+                    // continue;
+                }
+
+                let func = module.funcs.get(f);
+                stack.used.types.insert(func.ty());
+
+                match &func.kind {
+                    FunctionKind::Local(func) => {
+                        let mut visitor = UsedVisitor {
+                            cur_func: f,
+                            stack: &mut stack,
+                        };
+                        dfs_in_order(&mut visitor, func, func.entry_block());
+                    }
+                    FunctionKind::Import(_) => {}
+                    FunctionKind::Uninitialized(_) => unreachable!(),
+                }
+            }
+
+            while let Some(t) = stack.tables.pop() {
+                for elem in module.tables.get(t).elem_segments.iter() {
+                    stack.push_element(*elem);
+                }
+            }
+
+            while let Some(t) = stack.globals.pop() {
+                match &module.globals.get(t).kind {
+                    GlobalKind::Import(_) => {}
+                    GlobalKind::Local(ConstExpr::Global(global)) => {
+                        stack.push_global(*global);
+                    }
+                    GlobalKind::Local(ConstExpr::RefFunc(func)) => {
+                        stack.push_func(*func, Location::Global { global: t });
+                    }
+                    GlobalKind::Local(ConstExpr::Value(_))
+                    | GlobalKind::Local(ConstExpr::RefNull(_)) => {}
+                }
+            }
+
+            while let Some(t) = stack.memories.pop() {
+                for data in &module.memories.get(t).data_segments {
+                    stack.push_data(*data);
+                }
+            }
+
+            while let Some(d) = stack.data.pop() {
+                let d = module.data.get(d);
+                if let DataKind::Active { memory, offset } = &d.kind {
+                    stack.push_memory(*memory);
+                    if let ConstExpr::Global(g) = offset {
+                        stack.push_global(*g);
+                    }
+                }
+            }
+
+            while let Some(e) = stack.elements.pop() {
+                let e = module.elements.get(e);
+                if let ElementItems::Functions(function_ids) = &e.items {
+                    function_ids.iter().for_each(|f| {
+                        stack.push_func(*f, Location::Element { element: e.id() });
+                    });
+                }
+                if let ElementItems::Expressions(RefType::Funcref, items) = &e.items {
+                    for item in items {
+                        match item {
+                            ConstExpr::Global(g) => {
+                                stack.push_global(*g);
+                            }
+                            ConstExpr::RefFunc(f) => {
+                                stack.push_func(*f, Location::Element { element: e.id() });
+                            }
+                            _ => {}
+                        }
+                    }
+                }
+                if let ElementKind::Active { offset, table } = &e.kind {
+                    if let ConstExpr::Global(g) = offset {
+                        stack.push_global(*g);
+                    }
+                    stack.push_table(*table);
+                }
+            }
+        }
+
+        // Wabt seems to have weird behavior where a `data` segment, if present
+        // even if passive, requires a `memory` declaration. Our GC pass is
+        // pretty aggressive and if you have a passive data segment and only
+        // `data.drop` instructions you technically don't need the `memory`.
+        // Let's keep `wabt` passing though and just say that if there are data
+        // segments kept, but no memories, then we try to add the first memory,
+        // if any, to the used set.
+        if !stack.used.data.is_empty() && stack.used.memories.is_empty() {
+            if let Some(mem) = module.memories.iter().next() {
+                stack.used.memories.insert(mem.id());
+            }
+        }
+
+        stack.used
+    }
+}
+
+struct UsedVisitor<'a> {
+    cur_func: FunctionId,
+    stack: &'a mut Roots,
+}
+
+impl Visitor<'_> for UsedVisitor<'_> {
+    fn visit_function_id(&mut self, &func: &FunctionId) {
+        self.stack.push_func(
+            func,
+            Location::Code {
+                func: self.cur_func,
+            },
+        );
+    }
+
+    fn visit_memory_id(&mut self, &m: &MemoryId) {
+        self.stack.push_memory(m);
+    }
+
+    fn visit_global_id(&mut self, &g: &GlobalId) {
+        self.stack.push_global(g);
+    }
+
+    fn visit_table_id(&mut self, &t: &TableId) {
+        self.stack.push_table(t);
+    }
+
+    fn visit_type_id(&mut self, &t: &TypeId) {
+        self.stack.used.types.insert(t);
+    }
+
+    fn visit_data_id(&mut self, &d: &DataId) {
+        self.stack.push_data(d);
+    }
+
+    fn visit_element_id(&mut self, &e: &ElementId) {
+        self.stack.push_element(e);
+    }
+}