浏览代码

Merge pull request #2055 from DioxusLabs/jk/fix-hotreloading-issues

Fix hotreloading issues, clean up the CLI a bit
Jonathan Kelley 1 年之前
父节点
当前提交
01a0917223
共有 59 个文件被更改,包括 1943 次插入1316 次删除
  1. 1 1
      .vscode/settings.json
  2. 45 44
      Cargo.lock
  3. 38 15
      Cargo.toml
  4. 2 2
      packages/autofmt/Cargo.toml
  5. 4 4
      packages/check/Cargo.toml
  6. 2 2
      packages/check/src/check.rs
  7. 38 0
      packages/cli-config/src/config.rs
  8. 7 5
      packages/cli/Cargo.toml
  9. 0 45
      packages/cli/build.rs
  10. 1 0
      packages/cli/rustfmt.toml
  11. 11 6
      packages/cli/src/assets/autoreload.js
  12. 30 15
      packages/cli/src/builder.rs
  13. 4 5
      packages/cli/src/cli/build.rs
  14. 3 0
      packages/cli/src/cli/bundle.rs
  15. 3 3
      packages/cli/src/cli/cfg.rs
  16. 1 1
      packages/cli/src/cli/create.rs
  17. 5 10
      packages/cli/src/cli/mod.rs
  18. 7 16
      packages/cli/src/cli/serve.rs
  19. 0 76
      packages/cli/src/cli/version.rs
  20. 0 2
      packages/cli/src/lib.rs
  21. 34 37
      packages/cli/src/main.rs
  22. 18 15
      packages/cli/src/server/desktop/mod.rs
  23. 314 114
      packages/cli/src/server/mod.rs
  24. 45 20
      packages/cli/src/server/output.rs
  25. 60 39
      packages/cli/src/server/web/hot_reload.rs
  26. 71 310
      packages/cli/src/server/web/mod.rs
  27. 243 0
      packages/cli/src/server/web/server.rs
  28. 4 0
      packages/cli/tests/fmt.rs
  29. 1 1
      packages/config-macro/Cargo.toml
  30. 2 2
      packages/core-macro/Cargo.toml
  31. 27 7
      packages/core/src/virtual_dom.rs
  32. 6 0
      packages/desktop/src/app.rs
  33. 4 1
      packages/desktop/src/protocol.rs
  34. 8 0
      packages/desktop/src/webview.rs
  35. 2 1
      packages/fullstack/examples/static-hydrated/src/main.rs
  36. 1 0
      packages/fullstack/src/hooks/server_future.rs
  37. 1 0
      packages/fullstack/src/hot_reload.rs
  38. 1 1
      packages/hot-reload/Cargo.toml
  39. 236 197
      packages/hot-reload/src/file_watcher.rs
  40. 25 18
      packages/hot-reload/src/lib.rs
  41. 1 1
      packages/html-internal-macro/Cargo.toml
  42. 2 0
      packages/liveview/src/pool.rs
  43. 4 4
      packages/router-macro/Cargo.toml
  44. 3 3
      packages/rsx-rosetta/Cargo.toml
  45. 5 5
      packages/rsx/Cargo.toml
  46. 133 122
      packages/rsx/src/hot_reload/hot_reload_diff.rs
  47. 324 140
      packages/rsx/src/hot_reload/hot_reloading_file_map.rs
  48. 2 0
      packages/rsx/src/hot_reload/mod.rs
  49. 1 0
      packages/rsx/src/lib.rs
  50. 43 0
      packages/rsx/tests/hotreloads.rs
  51. 9 0
      packages/rsx/tests/invalid/changedexpr.new.rsx
  52. 11 0
      packages/rsx/tests/invalid/changedexpr.old.rsx
  53. 17 0
      packages/rsx/tests/valid/expr.new.rsx
  54. 17 0
      packages/rsx/tests/valid/expr.old.rsx
  55. 12 0
      packages/rsx/tests/valid/let.new.rsx
  56. 12 0
      packages/rsx/tests/valid/let.old.rsx
  57. 1 1
      packages/server-macro/Cargo.toml
  58. 12 20
      packages/web/Cargo.toml
  59. 29 5
      packages/web/src/hot_reload.rs

+ 1 - 1
.vscode/settings.json

@@ -3,8 +3,8 @@
   "[toml]": {
     "editor.formatOnSave": false
   },
-  "rust-analyzer.check.workspace": false,
   // "rust-analyzer.check.workspace": true,
+  "rust-analyzer.check.workspace": false,
   "rust-analyzer.check.features": "all",
   "rust-analyzer.cargo.features": "all",
   "rust-analyzer.check.allTargets": true

+ 45 - 44
Cargo.lock

@@ -1149,7 +1149,7 @@ dependencies = [
  "clap 4.4.18",
  "console",
  "dialoguer",
- "env_logger",
+ "env_logger 0.10.2",
  "fs-err",
  "git2",
  "gix-config",
@@ -1200,20 +1200,6 @@ dependencies = [
  "serde",
 ]
 
-[[package]]
-name = "cargo_metadata"
-version = "0.15.4"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "eee4243f1f26fc7a42710e7439c149e2b10b05472f88090acce52632f231a73a"
-dependencies = [
- "camino",
- "cargo-platform",
- "semver",
- "serde",
- "serde_json",
- "thiserror",
-]
-
 [[package]]
 name = "cargo_metadata"
 version = "0.17.0"
@@ -1295,15 +1281,6 @@ dependencies = [
  "uuid",
 ]
 
-[[package]]
-name = "cfg-expr"
-version = "0.12.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0bbc13bf6290a6b202cc3efb36f7ec2b739a80634215630c8053a313edf6abef"
-dependencies = [
- "smallvec",
-]
-
 [[package]]
 name = "cfg-expr"
 version = "0.15.7"
@@ -2121,7 +2098,7 @@ dependencies = [
  "dioxus-signals",
  "dioxus-ssr",
  "dioxus-web",
- "env_logger",
+ "env_logger 0.10.2",
  "futures-util",
  "rand 0.8.5",
  "serde",
@@ -2152,7 +2129,7 @@ dependencies = [
  "pretty_assertions",
  "proc-macro2",
  "quote",
- "syn 1.0.109",
+ "syn 2.0.52",
 ]
 
 [[package]]
@@ -2179,6 +2156,7 @@ dependencies = [
  "dioxus-html",
  "dioxus-rsx",
  "dirs",
+ "env_logger 0.11.3",
  "fern",
  "flate2",
  "fs_extra",
@@ -2190,7 +2168,7 @@ dependencies = [
  "hyper-util",
  "ignore",
  "indicatif",
- "interprocess-docfix",
+ "interprocess",
  "lazy_static",
  "log",
  "manganis-cli-support",
@@ -2431,7 +2409,7 @@ dependencies = [
  "dioxus-rsx",
  "execute",
  "ignore",
- "interprocess-docfix",
+ "interprocess",
  "notify",
  "once_cell",
  "serde",
@@ -2885,6 +2863,16 @@ dependencies = [
  "syn 2.0.52",
 ]
 
+[[package]]
+name = "env_filter"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a009aa4810eb158359dda09d0c87378e4bbb89b5a801f016885a4707ba24f7ea"
+dependencies = [
+ "log",
+ "regex",
+]
+
 [[package]]
 name = "env_logger"
 version = "0.10.2"
@@ -2898,6 +2886,19 @@ dependencies = [
  "termcolor",
 ]
 
+[[package]]
+name = "env_logger"
+version = "0.11.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "38b35839ba51819680ba087cd351788c9a3c476841207e0b8cee0b04722343b9"
+dependencies = [
+ "anstream",
+ "anstyle",
+ "env_filter",
+ "humantime",
+ "log",
+]
+
 [[package]]
 name = "equivalent"
 version = "1.0.1"
@@ -4920,10 +4921,10 @@ dependencies = [
 ]
 
 [[package]]
-name = "interprocess-docfix"
-version = "1.2.2"
+name = "interprocess"
+version = "1.2.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4b84ee245c606aeb0841649a9288e3eae8c61b853a8cd5c0e14450e96d53d28f"
+checksum = "81f2533f3be42fffe3b5e63b71aeca416c1c3bc33e4e27be018521e76b1f38fb"
 dependencies = [
  "blocking",
  "cfg-if",
@@ -5182,12 +5183,12 @@ dependencies = [
 
 [[package]]
 name = "krates"
-version = "0.12.6"
+version = "0.16.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "942c43a6cba1c201dfe81a943c89fa5c9140b34993e0c027f542c80b92e319a7"
+checksum = "320d34cfe880f2c6243b4cfff8aab3e34eab6325d0a26729f23356418fbdc809"
 dependencies = [
- "cargo_metadata 0.15.4",
- "cfg-expr 0.12.0",
+ "cargo_metadata 0.18.1",
+ "cfg-expr",
  "petgraph",
  "semver",
 ]
@@ -5362,9 +5363,9 @@ dependencies = [
 
 [[package]]
 name = "lightningcss"
-version = "1.0.0-alpha.54"
+version = "1.0.0-alpha.55"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "07d306844e5af1753490c420c0d6ae3d814b00725092d106332762827ca8f0fe"
+checksum = "3bd5bed3814fb631bfc1e24c2be6f7e86a9837c660909acab79a38374dcb8798"
 dependencies = [
  "ahash 0.8.11",
  "bitflags 2.4.2",
@@ -6363,9 +6364,9 @@ checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39"
 
 [[package]]
 name = "owo-colors"
-version = "3.5.0"
+version = "4.0.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c1b04fb49957986fdce4d6ee7a65027d55d4b6d2265e5848bbb507b58ccfdb6f"
+checksum = "caff54706df99d2a78a5a4e3455ff45448d81ef1bb63c22cd14052ca0e993a3f"
 dependencies = [
  "supports-color",
 ]
@@ -6928,7 +6929,7 @@ version = "0.5.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "865724d4dbe39d9f3dd3b52b88d859d66bcb2d6a0acfd5ea68a65fb66d4bdc1c"
 dependencies = [
- "env_logger",
+ "env_logger 0.10.2",
  "log",
 ]
 
@@ -8646,11 +8647,11 @@ checksum = "81cdd64d312baedb58e21336b31bc043b77e01cc99033ce76ef539f78e965ebc"
 
 [[package]]
 name = "supports-color"
-version = "1.3.1"
+version = "2.1.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8ba6faf2ca7ee42fdd458f4347ae0a9bd6bcc445ad7cb57ad82b383f18870d6f"
+checksum = "d6398cde53adc3c4557306a96ce67b302968513830a77a95b2b17305d9719a89"
 dependencies = [
- "atty",
+ "is-terminal",
  "is_ci",
 ]
 
@@ -8721,7 +8722,7 @@ version = "6.2.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "e8e9199467bcbc77c6a13cc6e32a6af21721ab8c96aa0261856c4fda5a4433f0"
 dependencies = [
- "cfg-expr 0.15.7",
+ "cfg-expr",
  "heck 0.4.1",
  "pkg-config",
  "toml 0.8.11",

+ 38 - 15
Cargo.toml

@@ -44,7 +44,7 @@ members = [
     "packages/playwright-tests/web",
     "packages/playwright-tests/fullstack",
 ]
-exclude = ["examples/mobile_demo", "examples/openid_connect_demo",]
+exclude = ["examples/mobile_demo", "examples/openid_connect_demo"]
 
 [workspace.package]
 version = "0.5.0-alpha.0"
@@ -56,27 +56,27 @@ dioxus-lib = { path = "packages/dioxus-lib", version = "0.5.0-alpha.0" }
 dioxus-core = { path = "packages/core", version = "0.5.0-alpha.0" }
 dioxus-core-macro = { path = "packages/core-macro", version = "0.5.0-alpha.0" }
 dioxus-config-macro = { path = "packages/config-macro", version = "0.5.0-alpha.0" }
-dioxus-router = { path = "packages/router", version = "0.5.0-alpha.0"  }
+dioxus-router = { path = "packages/router", version = "0.5.0-alpha.0" }
 dioxus-router-macro = { path = "packages/router-macro", version = "0.5.0-alpha.0" }
-dioxus-html = { path = "packages/html", version = "0.5.0-alpha.0"  }
-dioxus-html-internal-macro = { path = "packages/html-internal-macro", version = "0.5.0-alpha.0"  }
+dioxus-html = { path = "packages/html", version = "0.5.0-alpha.0" }
+dioxus-html-internal-macro = { path = "packages/html-internal-macro", version = "0.5.0-alpha.0" }
 dioxus-hooks = { path = "packages/hooks", version = "0.5.0-alpha.0" }
 dioxus-web = { path = "packages/web", version = "0.5.0-alpha.0" }
 dioxus-ssr = { path = "packages/ssr", version = "0.5.0-alpha.0", default-features = false }
 dioxus-desktop = { path = "packages/desktop", version = "0.5.0-alpha.0" }
-dioxus-mobile = { path = "packages/mobile", version = "0.5.0-alpha.0"  }
+dioxus-mobile = { path = "packages/mobile", version = "0.5.0-alpha.0" }
 dioxus-interpreter-js = { path = "packages/interpreter", version = "0.5.0-alpha.0" }
-dioxus-liveview = { path = "packages/liveview", version = "0.5.0-alpha.0"  }
-dioxus-autofmt = { path = "packages/autofmt", version = "0.5.0-alpha.0"  }
-dioxus-check = { path = "packages/check", version = "0.5.0-alpha.0"  }
-dioxus-rsx = { path = "packages/rsx", version = "0.5.0-alpha.0"  }
+dioxus-liveview = { path = "packages/liveview", version = "0.5.0-alpha.0" }
+dioxus-autofmt = { path = "packages/autofmt", version = "0.5.0-alpha.0" }
+dioxus-check = { path = "packages/check", version = "0.5.0-alpha.0" }
+dioxus-rsx = { path = "packages/rsx", version = "0.5.0-alpha.0" }
 rsx-rosetta = { path = "packages/rsx-rosetta", version = "0.5.0-alpha.0" }
 dioxus-signals = { path = "packages/signals", version = "0.5.0-alpha.0" }
 dioxus-cli-config = { path = "packages/cli-config", version = "0.5.0-alpha.0" }
 generational-box = { path = "packages/generational-box", version = "0.5.0-alpha.0" }
 dioxus-hot-reload = { path = "packages/hot-reload", version = "0.5.0-alpha.0" }
 dioxus-fullstack = { path = "packages/fullstack", version = "0.5.0-alpha.0" }
-dioxus_server_macro = { path = "packages/server-macro", version = "0.5.0-alpha.0", default-features = false}
+dioxus_server_macro = { path = "packages/server-macro", version = "0.5.0-alpha.0", default-features = false }
 dioxus-ext = { path = "packages/extension", version = "0.4.0" }
 tracing = "0.1.37"
 tracing-futures = "0.2.5"
@@ -97,10 +97,13 @@ manganis-cli-support = { version = "0.2.1", features = [
 ] }
 manganis = { version = "0.2.1" }
 
+interprocess = { version = "1.2.1" }
+# interprocess = { git = "https://github.com/kotauskas/interprocess" }
+
 lru = "0.12.2"
 async-trait = "0.1.77"
 axum = "0.7.0"
-axum-server = {version = "0.6.0", default-features = false}
+axum-server = { version = "0.6.0", default-features = false }
 tower = "0.4.13"
 http = "1.0.0"
 tower-http = "0.5.1"
@@ -108,10 +111,24 @@ hyper = "1.0.0"
 hyper-rustls = "0.26.0"
 serde_json = "1.0.61"
 serde = "1.0.61"
+syn = "2.0"
+quote = "1.0"
+proc-macro2 = "1.0"
 axum_session = "0.12.1"
 axum_session_auth = "0.12.1"
 axum-extra = "0.9.2"
 reqwest = "0.11.24"
+owo-colors = "4.0.0"
+
+# Enable a small amount of optimization in debug mode
+[profile.cli-dev]
+inherits = "dev"
+opt-level = 1
+
+# Enable high optimizations for dependencies (incl. Bevy), but not for our code:
+[profile.cli-dev.package."*"]
+opt-level = 3
+
 
 # This is a "virtual package"
 # It is not meant to be published, but is used so "cargo run --example XYZ" works properly
@@ -130,9 +147,9 @@ rust-version = "1.60.0"
 publish = false
 
 [dependencies]
-manganis = { workspace = true, optional = true}
-reqwest = { version = "0.11.9", features = ["json"], optional = true}
-http-range = {version = "0.1.5", optional = true }
+manganis = { workspace = true, optional = true }
+reqwest = { version = "0.11.9", features = ["json"], optional = true }
+http-range = { version = "0.1.5", optional = true }
 
 [dev-dependencies]
 dioxus = { workspace = true, features = ["router"] }
@@ -146,7 +163,13 @@ form_urlencoded = "1.2.0"
 
 [target.'cfg(target_arch = "wasm32")'.dev-dependencies]
 getrandom = { version = "0.2.12", features = ["js"] }
-tokio = { version = "1.16.1", default-features = false, features = ["sync", "macros", "io-util", "rt", "time"] }
+tokio = { version = "1.16.1", default-features = false, features = [
+    "sync",
+    "macros",
+    "io-util",
+    "rt",
+    "time",
+] }
 
 [target.'cfg(not(target_arch = "wasm32"))'.dev-dependencies]
 tokio = { version = "1.16.1", features = ["full"] }

+ 2 - 2
packages/autofmt/Cargo.toml

@@ -13,8 +13,8 @@ keywords = ["dom", "ui", "gui", "react"]
 [dependencies]
 dioxus-rsx = { workspace = true }
 proc-macro2 = { version = "1.0.6", features = ["span-locations"] }
-quote = "1.0"
-syn = { version = "2.0", features = ["full", "extra-traits", "visit"] }
+quote = { workspace = true }
+syn = { workspace = true, features = ["full", "extra-traits", "visit"] }
 serde = { version = "1.0.136", features = ["derive"] }
 prettyplease = { workspace = true }
 

+ 4 - 4
packages/check/Cargo.toml

@@ -11,10 +11,10 @@ keywords = ["dom", "ui", "gui", "react"]
 # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
 
 [dependencies]
-proc-macro2 = { version = "1.0.6", features = ["span-locations"] }
-quote = "1.0"
-syn = { version = "1.0.11", features = ["full", "extra-traits", "visit"] }
-owo-colors = { version = "3.5.0", features = ["supports-colors"] }
+proc-macro2 = { workspace = true, features = ["span-locations"] }
+quote = {workspace = true }
+syn = { workspace = true, features = ["full", "extra-traits", "visit"] }
+owo-colors = { workspace = true, features = ["supports-colors"] }
 
 [dev-dependencies]
 indoc = "2.0.3"

+ 2 - 2
packages/check/src/check.rs

@@ -77,8 +77,8 @@ fn is_component_fn(item_fn: &syn::ItemFn) -> bool {
 fn get_closure_hook_body(local: &syn::Local) -> Option<&syn::Expr> {
     if let Pat::Ident(ident) = &local.pat {
         if is_hook_ident(&ident.ident) {
-            if let Some((_, expr)) = &local.init {
-                if let syn::Expr::Closure(closure) = &**expr {
+            if let Some(init) = &local.init {
+                if let syn::Expr::Closure(closure) = init.expr.as_ref() {
                     return Some(&closure.body);
                 }
             }

+ 38 - 0
packages/cli-config/src/config.rs

@@ -7,12 +7,17 @@ use std::path::PathBuf;
 #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize, Debug)]
 #[cfg_attr(feature = "cli", derive(clap::ValueEnum))]
 pub enum Platform {
+    /// Targeting the web platform using WASM
     #[cfg_attr(feature = "cli", clap(name = "web"))]
     #[serde(rename = "web")]
     Web,
+
+    /// Targeting the desktop platform using Tao/Wry-based webview
     #[cfg_attr(feature = "cli", clap(name = "desktop"))]
     #[serde(rename = "desktop")]
     Desktop,
+
+    /// Targeting the server platform using Axum and Dioxus-Fullstack
     #[cfg_attr(feature = "cli", clap(name = "fullstack"))]
     #[serde(rename = "fullstack")]
     Fullstack,
@@ -220,10 +225,13 @@ impl Default for DioxusConfig {
 pub struct ApplicationConfig {
     #[serde(default = "default_name")]
     pub name: String,
+
     #[serde(default = "default_platform")]
     pub default_platform: Platform,
+
     #[serde(default = "out_dir_default")]
     pub out_dir: PathBuf,
+
     #[serde(default = "asset_dir_default")]
     pub asset_dir: PathBuf,
 
@@ -301,8 +309,10 @@ pub struct WebProxyConfig {
 pub struct WebWatcherConfig {
     #[serde(default = "watch_path_default")]
     pub watch_path: Vec<PathBuf>,
+
     #[serde(default)]
     pub reload_html: bool,
+
     #[serde(default = "true_bool")]
     pub index_on_404: bool,
 }
@@ -531,6 +541,34 @@ impl CrateConfig {
         self.cargo_args = cargo_args;
         self
     }
+
+    pub fn add_features(&mut self, feature: Vec<String>) -> &mut Self {
+        if let Some(features) = &mut self.features {
+            features.extend(feature);
+        } else {
+            self.features = Some(feature);
+        }
+        self
+    }
+
+    #[cfg(feature = "cli")]
+    pub fn extend_with_platform(&mut self, platform: Platform) -> &mut Self {
+        let manifest = &self.manifest;
+        let features = match platform {
+            Platform::Web if manifest.features.contains_key("web") => {
+                vec!["web".to_string()]
+            }
+            Platform::Desktop if manifest.features.contains_key("desktop") => {
+                vec!["desktop".to_string()]
+            }
+            _ => {
+                // fullstack has its own feature insertion - we use a different featureset for the client and server
+                vec![]
+            }
+        };
+        self.add_features(features);
+        self
+    }
 }
 
 fn true_bool() -> bool {

+ 7 - 5
packages/cli/Cargo.toml

@@ -10,7 +10,7 @@ keywords = ["react", "gui", "cli", "dioxus", "wasm"]
 
 [dependencies]
 # cli core
-clap = { version = "4.2", features = ["derive"] }
+clap = { version = "4.2", features = ["derive", "cargo"] }
 thiserror = { workspace = true }
 wasm-bindgen-cli-support = "0.2"
 colored = "2.0.0"
@@ -21,10 +21,10 @@ log = "0.4.14"
 fern = { version = "0.6.0", features = ["colored"] }
 serde = { version = "1.0.136", features = ["derive"] }
 serde_json = "1.0.79"
-toml = {workspace = true}
+toml = { workspace = true }
 fs_extra = "1.2.0"
 cargo_toml = "0.18.0"
-futures-util = { workspace = true }
+futures-util = { workspace = true, features = ["async-await-macro"] }
 notify = { version = "5.0.0-pre.16", features = ["serde"] }
 html_parser = { workspace = true }
 cargo_metadata = "0.18.1"
@@ -78,7 +78,7 @@ toml_edit = "0.21.0"
 tauri-bundler = { version = "=1.4.*", features = ["native-tls-vendored"] }
 
 # formatting
-syn = { version = "2.0" }
+syn = { workspace = true }
 prettyplease = { workspace = true }
 
 manganis-cli-support = { workspace = true, features = ["webp", "html"] }
@@ -90,8 +90,10 @@ dioxus-rsx = { workspace = true }
 dioxus-html = { workspace = true, features = ["hot-reload-context"] }
 dioxus-core = { workspace = true, features = ["serialize"] }
 dioxus-hot-reload = { workspace = true }
-interprocess-docfix = { version = "1.2.2" }
+interprocess = { workspace = true }
+# interprocess-docfix = { version = "1.2.2" }
 ignore = "0.4.22"
+env_logger = "0.11.3"
 
 [features]
 default = []

+ 0 - 45
packages/cli/build.rs

@@ -1,45 +0,0 @@
-//! Construct version in the `commit-hash date channel` format
-
-use std::{env, path::PathBuf, process::Command};
-
-fn main() {
-    set_rerun();
-    set_commit_info();
-}
-
-fn set_rerun() {
-    let mut manifest_dir = PathBuf::from(
-        env::var("CARGO_MANIFEST_DIR").expect("`CARGO_MANIFEST_DIR` is always set by cargo."),
-    );
-
-    while manifest_dir.parent().is_some() {
-        let head_ref = manifest_dir.join(".git/HEAD");
-        if head_ref.exists() {
-            println!("cargo:rerun-if-changed={}", head_ref.display());
-            return;
-        }
-
-        manifest_dir.pop();
-    }
-
-    println!("cargo:warning=Could not find `.git/HEAD` from manifest dir!");
-}
-
-fn set_commit_info() {
-    let output = match Command::new("git")
-        .arg("log")
-        .arg("-1")
-        .arg("--date=short")
-        .arg("--format=%H %h %cd")
-        .output()
-    {
-        Ok(output) if output.status.success() => output,
-        _ => return,
-    };
-    let stdout = String::from_utf8(output.stdout).unwrap();
-    let mut parts = stdout.split_whitespace();
-    let mut next = || parts.next().unwrap();
-    println!("cargo:rustc-env=RA_COMMIT_HASH={}", next());
-    println!("cargo:rustc-env=RA_COMMIT_SHORT_HASH={}", next());
-    println!("cargo:rustc-env=RA_COMMIT_DATE={}", next())
-}

+ 1 - 0
packages/cli/rustfmt.toml

@@ -0,0 +1 @@
+imports_granularity = "Crate"

+ 11 - 6
packages/cli/src/assets/autoreload.js

@@ -4,7 +4,8 @@
 (function () {
   var protocol = window.location.protocol === 'https:' ? 'wss:' : 'ws:';
   var url = protocol + '//' + window.location.host + '/_dioxus/ws';
-  var poll_interval = 8080;
+    var poll_interval = 8080;
+
   var reload_upon_connect = () => {
       window.setTimeout(
           () => {
@@ -15,11 +16,15 @@
           poll_interval);
   };
 
-  var ws = new WebSocket(url);
-  ws.onmessage = (ev) => {
-      if (ev.data == "reload") {
-          window.location.reload();
-      }
+    var ws = new WebSocket(url);
+
+    ws.onmessage = (ev) => {
+        console.log("Received message: ", ev, ev.data);
+
+        if (ev.data == "reload") {
+            window.location.reload();
+        }
   };
+
   ws.onclose = reload_upon_connect;
 })()

+ 30 - 15
packages/cli/src/builder.rs

@@ -3,10 +3,9 @@ use crate::{
     error::{Error, Result},
     tools::Tool,
 };
+use anyhow::Context;
 use cargo_metadata::{diagnostic::Diagnostic, Message};
-use dioxus_cli_config::crate_root;
-use dioxus_cli_config::CrateConfig;
-use dioxus_cli_config::ExecutableType;
+use dioxus_cli_config::{crate_root, CrateConfig, ExecutableType};
 use indicatif::{ProgressBar, ProgressStyle};
 use lazy_static::lazy_static;
 use manganis_cli_support::{AssetManifest, ManganisSupportGuard};
@@ -16,6 +15,7 @@ use std::{
     io::Read,
     panic,
     path::PathBuf,
+    process::Command,
     time::Duration,
 };
 use wasm_bindgen_cli_support::Bindgen;
@@ -65,9 +65,8 @@ impl ExecWithRustFlagsSetter for subprocess::Exec {
 
 /// Build client (WASM).
 /// Note: `rust_flags` argument is only used for the fullstack platform.
-pub fn build(
+pub fn build_web(
     config: &CrateConfig,
-    _: bool,
     skip_assets: bool,
     rust_flags: Option<String>,
 ) -> Result<BuildResult> {
@@ -100,15 +99,17 @@ pub fn build(
     // [1] Build the .wasm module
     log::info!("🚅 Running build command...");
 
-    let wasm_check_command = std::process::Command::new("rustup")
-        .args(["show"])
-        .output()?;
-    let wasm_check_output = String::from_utf8(wasm_check_command.stdout).unwrap();
-    if !wasm_check_output.contains("wasm32-unknown-unknown") {
-        log::info!("wasm32-unknown-unknown target not detected, installing..");
-        let _ = std::process::Command::new("rustup")
-            .args(["target", "add", "wasm32-unknown-unknown"])
-            .output()?;
+    // If the user has rustup, we can check if the wasm32-unknown-unknown target is installed
+    // Otherwise we can just assume it is installed - which i snot great...
+    // Eventually we can poke at the errors and let the user know they need to install the target
+    if let Ok(wasm_check_command) = Command::new("rustup").args(["show"]).output() {
+        let wasm_check_output = String::from_utf8(wasm_check_command.stdout).unwrap();
+        if !wasm_check_output.contains("wasm32-unknown-unknown") {
+            log::info!("wasm32-unknown-unknown target not detected, installing..");
+            let _ = Command::new("rustup")
+                .args(["target", "add", "wasm32-unknown-unknown"])
+                .output()?;
+        }
     }
 
     let cmd = subprocess::Exec::cmd("cargo")
@@ -163,9 +164,10 @@ pub fn build(
     let input_path = warning_messages
         .output_location
         .as_ref()
-        .unwrap()
+        .context("No output location found")?
         .with_extension("wasm");
 
+    log::info!("Running wasm-bindgen");
     let bindgen_result = panic::catch_unwind(move || {
         // [3] Bindgen the final binary for use easy linking
         let mut bindgen_builder = Bindgen::new();
@@ -183,11 +185,13 @@ pub fn build(
             .generate(&bindgen_outdir)
             .unwrap();
     });
+
     if bindgen_result.is_err() {
         return Err(Error::BuildFailed("Bindgen build failed! \nThis is probably due to the Bindgen version, dioxus-cli using `0.2.81` Bindgen crate.".to_string()));
     }
 
     // check binaryen:wasm-opt tool
+    log::info!("Running optimization with wasm-opt...");
     let dioxus_tools = dioxus_config.application.tools.clone();
     if dioxus_tools.contains_key("binaryen") {
         let info = dioxus_tools.get("binaryen").unwrap();
@@ -221,6 +225,8 @@ pub fn build(
                 "Binaryen tool not found, you can use `dx tool add binaryen` to install it."
             );
         }
+    } else {
+        log::info!("Skipping optimization with wasm-opt, binaryen tool not found.");
     }
 
     // [5][OPTIONAL] If tailwind is enabled and installed we run it to generate the CSS
@@ -271,6 +277,8 @@ pub fn build(
         content_only: false,
         depth: 0,
     };
+
+    log::info!("Copying public assets to the output directory...");
     if asset_dir.is_dir() {
         for entry in std::fs::read_dir(config.asset_dir())?.flatten() {
             let path = entry.path();
@@ -294,6 +302,7 @@ pub fn build(
         }
     }
 
+    log::info!("Processing assets");
     let assets = if !skip_assets {
         let assets = asset_manifest(executable.executable(), config);
         process_assets(config, &assets)?;
@@ -326,6 +335,12 @@ pub fn build_desktop(
     let _manganis_support = ManganisSupportGuard::default();
     let _guard = AssetConfigDropGuard::new();
 
+    // set the asset dir via cli args
+    env::set_var(
+        "DIOXUS_ASSET_DIR",
+        config.asset_dir().canonicalize().unwrap(),
+    );
+
     let mut cmd = subprocess::Exec::cmd("cargo")
         .set_rust_flags(rust_flags)
         .env("CARGO_TARGET_DIR", &config.target_dir)

+ 4 - 5
packages/cli/src/cli/build.rs

@@ -1,5 +1,4 @@
-use crate::assets::AssetConfigDropGuard;
-use crate::server::fullstack;
+use crate::{assets::AssetConfigDropGuard, server::fullstack};
 use dioxus_cli_config::Platform;
 
 use super::*;
@@ -51,6 +50,7 @@ impl Build {
         }
 
         crate_config.set_cargo_args(self.build.cargo_args.clone());
+        crate_config.extend_with_platform(platform);
 
         // #[cfg(feature = "plugin")]
         // let _ = crate::plugin::PluginManager::on_build_start(&crate_config, &platform);
@@ -58,7 +58,7 @@ impl Build {
         let build_result = match platform {
             Platform::Web => {
                 // `rust_flags` are used by fullstack's client build.
-                crate::builder::build(&crate_config, false, self.build.skip_assets, rust_flags)?
+                crate::builder::build_web(&crate_config, self.build.skip_assets, rust_flags)?
             }
             Platform::Desktop => {
                 // Since desktop platform doesn't use `rust_flags`, this
@@ -81,9 +81,8 @@ impl Build {
                         }
                         None => web_config.features = Some(vec![web_feature]),
                     };
-                    crate::builder::build(
+                    crate::builder::build_web(
                         &web_config,
-                        false,
                         self.build.skip_assets,
                         Some(client_rust_flags),
                     )?;

+ 3 - 0
packages/cli/src/cli/bundle.rs

@@ -82,6 +82,9 @@ impl Bundle {
         }
 
         crate_config.set_cargo_args(self.build.cargo_args);
+        if let Some(platform) = self.build.platform {
+            crate_config.extend_with_platform(platform);
+        }
 
         // build the desktop app
         // Since the `bundle()` function is only run for the desktop platform,

+ 3 - 3
packages/cli/src/cli/cfg.rs

@@ -85,8 +85,8 @@ pub struct ConfigOptsServe {
     #[clap(default_value_t = 8080)]
     pub port: u16,
 
-    /// Open the app in the default browser [default: false]
-    #[clap(long)]
+    /// Open the app in the default browser [default: true]
+    #[clap(long, default_value_t = true)]
     #[serde(default)]
     pub open: bool,
 
@@ -178,7 +178,7 @@ pub struct ConfigOptsBundle {
 
     /// Build platform: support Web & Desktop [default: "default_platform"]
     #[clap(long)]
-    pub platform: Option<String>,
+    pub platform: Option<Platform>,
 
     /// Space separated list of features to activate
     #[clap(long)]

+ 1 - 1
packages/cli/src/cli/create.rs

@@ -2,7 +2,7 @@ use super::*;
 use cargo_generate::{GenerateArgs, TemplatePath};
 
 #[derive(Clone, Debug, Default, Deserialize, Parser)]
-#[clap(name = "create")]
+#[clap(name = "new")]
 pub struct Create {
     /// Template path
     #[clap(default_value = "gh:dioxuslabs/dioxus-template", long)]

+ 5 - 10
packages/cli/src/cli/mod.rs

@@ -10,7 +10,6 @@ pub mod init;
 pub mod plugin;
 pub mod serve;
 pub mod translate;
-pub mod version;
 
 use crate::{
     cfg::{ConfigOptsBuild, ConfigOptsServe},
@@ -57,10 +56,11 @@ pub enum Commands {
     /// Build, watch & serve the Rust WASM app and all of its assets.
     Serve(serve::Serve),
 
-    /// Create a new project for Dioxus.
-    Create(create::Create),
+    /// Create a new project for Dioxus.a
+    New(create::Create),
 
-    /// Init a new project for Dioxus
+    /// Init a new project for Dioxus in an existing directory.
+    /// Will attempt to keep your project in a good state
     Init(init::Init),
 
     /// Clean output artifacts.
@@ -69,10 +69,6 @@ pub enum Commands {
     /// Bundle the Rust desktop app and all of its assets.
     Bundle(bundle::Bundle),
 
-    /// Print the version of this extension
-    #[clap(name = "version")]
-    Version(version::Version),
-
     /// Format some rsx
     #[clap(name = "fmt")]
     Autoformat(autoformat::Autoformat),
@@ -97,11 +93,10 @@ impl Display for Commands {
             Commands::Build(_) => write!(f, "build"),
             Commands::Translate(_) => write!(f, "translate"),
             Commands::Serve(_) => write!(f, "serve"),
-            Commands::Create(_) => write!(f, "create"),
+            Commands::New(_) => write!(f, "create"),
             Commands::Init(_) => write!(f, "init"),
             Commands::Clean(_) => write!(f, "clean"),
             Commands::Config(_) => write!(f, "config"),
-            Commands::Version(_) => write!(f, "version"),
             Commands::Autoformat(_) => write!(f, "fmt"),
             Commands::Check(_) => write!(f, "check"),
             Commands::Bundle(_) => write!(f, "bundle"),

+ 7 - 16
packages/cli/src/cli/serve.rs

@@ -60,25 +60,16 @@ impl Serve {
         }
 
         let platform = platform.unwrap_or(crate_config.dioxus_config.application.default_platform);
+        crate_config.extend_with_platform(platform);
 
+        // start the develop server
+        use server::{desktop, fullstack, web};
         match platform {
-            Platform::Web => {
-                // start the develop server
-                server::web::startup(
-                    self.serve.port,
-                    crate_config.clone(),
-                    self.serve.open,
-                    self.serve.skip_assets,
-                )
-                .await?;
-            }
-            Platform::Desktop => {
-                server::desktop::startup(crate_config.clone(), &serve_cfg).await?;
-            }
-            Platform::Fullstack => {
-                server::fullstack::startup(crate_config.clone(), &serve_cfg).await?;
-            }
+            Platform::Web => web::startup(crate_config.clone(), &serve_cfg).await?,
+            Platform::Desktop => desktop::startup(crate_config.clone(), &serve_cfg).await?,
+            Platform::Fullstack => fullstack::startup(crate_config.clone(), &serve_cfg).await?,
         }
+
         Ok(())
     }
 

+ 0 - 76
packages/cli/src/cli/version.rs

@@ -1,76 +0,0 @@
-use super::*;
-
-/// Print the version of this extension
-#[derive(Clone, Debug, Parser)]
-#[clap(name = "version")]
-pub struct Version {}
-
-impl Version {
-    pub fn version(self) -> VersionInfo {
-        version()
-    }
-}
-
-use std::fmt;
-
-/// Information about the git repository where rust-analyzer was built from.
-pub struct CommitInfo {
-    pub short_commit_hash: &'static str,
-    pub commit_hash: &'static str,
-    pub commit_date: &'static str,
-}
-
-/// Cargo's version.
-pub struct VersionInfo {
-    /// rust-analyzer's version, such as "1.57.0", "1.58.0-beta.1", "1.59.0-nightly", etc.
-    pub version: &'static str,
-
-    /// The release channel we were built for (stable/beta/nightly/dev).
-    ///
-    /// `None` if not built via rustbuild.
-    pub release_channel: Option<&'static str>,
-
-    /// Information about the Git repository we may have been built from.
-    ///
-    /// `None` if not built from a git repo.
-    pub commit_info: Option<CommitInfo>,
-}
-
-impl fmt::Display for VersionInfo {
-    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        write!(f, "{}", self.version)?;
-
-        if let Some(ci) = &self.commit_info {
-            write!(f, " ({} {})", ci.short_commit_hash, ci.commit_date)?;
-        };
-        Ok(())
-    }
-}
-
-/// Returns information about cargo's version.
-pub const fn version() -> VersionInfo {
-    let version = match option_env!("CARGO_PKG_VERSION") {
-        Some(x) => x,
-        None => "0.0.0",
-    };
-
-    let release_channel = option_env!("CFG_RELEASE_CHANNEL");
-    let commit_info = match (
-        option_env!("RA_COMMIT_SHORT_HASH"),
-        option_env!("RA_COMMIT_HASH"),
-        option_env!("RA_COMMIT_DATE"),
-    ) {
-        (Some(short_commit_hash), Some(commit_hash), Some(commit_date)) => Some(CommitInfo {
-            short_commit_hash,
-            commit_hash,
-            commit_date,
-        }),
-        _ => None,
-    };
-
-    VersionInfo {
-        version,
-        release_channel,
-        commit_info,
-    }
-}

+ 0 - 2
packages/cli/src/lib.rs

@@ -2,8 +2,6 @@
 #![doc(html_logo_url = "https://avatars.githubusercontent.com/u/79236386")]
 #![doc(html_favicon_url = "https://avatars.githubusercontent.com/u/79236386")]
 
-pub const DIOXUS_CLI_VERSION: &str = "0.4.1";
-
 mod assets;
 pub mod builder;
 pub mod server;

+ 34 - 37
packages/cli/src/main.rs

@@ -7,47 +7,21 @@ use dioxus_cli::*;
 
 use Commands::*;
 
-fn get_bin(bin: Option<String>) -> Result<PathBuf> {
-    let metadata = cargo_metadata::MetadataCommand::new()
-        .exec()
-        .map_err(Error::CargoMetadata)?;
-    let package = if let Some(bin) = bin {
-        metadata
-            .workspace_packages()
-            .into_iter()
-            .find(|p| p.name == bin)
-            .ok_or(Error::CargoError(format!("no such package: {}", bin)))?
-    } else {
-        metadata
-            .root_package()
-            .ok_or(Error::CargoError("no root package?".to_string()))?
-    };
-
-    let crate_dir = package
-        .manifest_path
-        .parent()
-        .ok_or(Error::CargoError("couldn't take parent dir".to_string()))?;
-
-    Ok(crate_dir.into())
-}
-
-/// Simplifies error messages that use the same pattern.
-fn error_wrapper(message: &str) -> String {
-    format!("🚫 {message}:")
-}
-
 #[tokio::main]
 async fn main() -> anyhow::Result<()> {
     let args = Cli::parse();
 
-    set_up_logging();
+    #[cfg(debug_assertions)]
+    env_logger::init();
+
+    // set_up_logging();
 
     match args.action {
         Translate(opts) => opts
             .translate()
             .context(error_wrapper("Translation of HTML into RSX failed")),
 
-        Create(opts) => opts
+        New(opts) => opts
             .create()
             .context(error_wrapper("Creating new project failed")),
 
@@ -74,12 +48,6 @@ async fn main() -> anyhow::Result<()> {
             .await
             .context(error_wrapper("Error checking RSX")),
 
-        Version(opt) => {
-            let version = opt.version();
-            println!("{}", version);
-
-            Ok(())
-        }
         action => {
             let bin = get_bin(args.bin)?;
             let _dioxus_config = DioxusConfig::load(Some(bin.clone()))
@@ -119,3 +87,32 @@ async fn main() -> anyhow::Result<()> {
         }
     }
 }
+
+fn get_bin(bin: Option<String>) -> Result<PathBuf> {
+    let metadata = cargo_metadata::MetadataCommand::new()
+        .exec()
+        .map_err(Error::CargoMetadata)?;
+    let package = if let Some(bin) = bin {
+        metadata
+            .workspace_packages()
+            .into_iter()
+            .find(|p| p.name == bin)
+            .ok_or(Error::CargoError(format!("no such package: {}", bin)))?
+    } else {
+        metadata
+            .root_package()
+            .ok_or(Error::CargoError("no root package?".to_string()))?
+    };
+
+    let crate_dir = package
+        .manifest_path
+        .parent()
+        .ok_or(Error::CargoError("couldn't take parent dir".to_string()))?;
+
+    Ok(crate_dir.into())
+}
+
+/// Simplifies error messages that use the same pattern.
+fn error_wrapper(message: &str) -> String {
+    format!("🚫 {message}:")
+}

+ 18 - 15
packages/cli/src/server/desktop/mod.rs

@@ -1,20 +1,18 @@
-use crate::server::Platform;
 use crate::{
     cfg::ConfigOptsServe,
     server::{
         output::{print_console_info, PrettierOptions},
-        setup_file_watcher,
+        setup_file_watcher, Platform,
     },
     BuildResult, Result,
 };
 use dioxus_cli_config::CrateConfig;
-
 use dioxus_hot_reload::HotReloadMsg;
 use dioxus_html::HtmlCtx;
 use dioxus_rsx::hot_reload::*;
-use interprocess_docfix::local_socket::LocalSocketListener;
-use std::fs::create_dir_all;
+use interprocess::local_socket::LocalSocketListener;
 use std::{
+    fs::create_dir_all,
     process::{Child, Command},
     sync::{Arc, Mutex, RwLock},
 };
@@ -33,13 +31,7 @@ pub(crate) async fn startup_with_platform<P: Platform + Send + 'static>(
     config: CrateConfig,
     serve_cfg: &ConfigOptsServe,
 ) -> Result<()> {
-    // ctrl-c shutdown checker
-    let _crate_config = config.clone();
-    let _ = ctrlc::set_handler(move || {
-        #[cfg(feature = "plugin")]
-        let _ = PluginManager::on_serve_shutdown(&_crate_config);
-        std::process::exit(0);
-    });
+    set_ctrl_c(&config);
 
     let hot_reload_state = match config.hot_reload {
         true => {
@@ -67,6 +59,16 @@ pub(crate) async fn startup_with_platform<P: Platform + Send + 'static>(
     Ok(())
 }
 
+fn set_ctrl_c(config: &CrateConfig) {
+    // ctrl-c shutdown checker
+    let _crate_config = config.clone();
+    let _ = ctrlc::set_handler(move || {
+        #[cfg(feature = "plugin")]
+        let _ = PluginManager::on_serve_shutdown(&_crate_config);
+        std::process::exit(0);
+    });
+}
+
 /// Start the server without hot reload
 async fn serve<P: Platform + Send + 'static>(
     config: CrateConfig,
@@ -136,7 +138,7 @@ async fn start_desktop_hot_reload(hot_reload_state: HotReloadState) -> Result<()
                                         .unwrap()
                                         .map
                                         .values()
-                                        .filter_map(|(_, template_slot)| *template_slot)
+                                        .flat_map(|v| v.templates.values().copied())
                                         .collect()
                                 };
                                 for template in templates {
@@ -169,12 +171,13 @@ async fn start_desktop_hot_reload(hot_reload_state: HotReloadState) -> Result<()
 
             let mut hot_reload_rx = hot_reload_state.messages.subscribe();
 
-            while let Ok(template) = hot_reload_rx.recv().await {
+            while let Ok(msg) = hot_reload_rx.recv().await {
                 let channels = &mut *channels.lock().unwrap();
                 let mut i = 0;
+
                 while i < channels.len() {
                     let channel = &mut channels[i];
-                    if send_msg(HotReloadMsg::UpdateTemplate(template), channel) {
+                    if send_msg(msg.clone(), channel) {
                         i += 1;
                     } else {
                         channels.remove(i);

+ 314 - 114
packages/cli/src/server/mod.rs

@@ -3,10 +3,15 @@ use dioxus_cli_config::CrateConfig;
 
 use cargo_metadata::diagnostic::Diagnostic;
 use dioxus_core::Template;
+use dioxus_hot_reload::HotReloadMsg;
 use dioxus_html::HtmlCtx;
 use dioxus_rsx::hot_reload::*;
+use fs_extra::dir::CopyOptions;
 use notify::{RecommendedWatcher, Watcher};
-use std::sync::{Arc, Mutex};
+use std::{
+    path::PathBuf,
+    sync::{Arc, Mutex},
+};
 use tokio::sync::broadcast::{self};
 
 mod output;
@@ -15,7 +20,31 @@ pub mod desktop;
 pub mod fullstack;
 pub mod web;
 
-/// Sets up a file watcher
+#[derive(Clone)]
+pub struct HotReloadState {
+    /// Pending hotreload updates to be sent to all connected clients
+    pub messages: broadcast::Sender<HotReloadMsg>,
+
+    /// The file map that tracks the state of the projecta
+    pub file_map: SharedFileMap,
+}
+type SharedFileMap = Arc<Mutex<FileMap<HtmlCtx>>>;
+
+impl HotReloadState {
+    pub fn all_templates(&self) -> Vec<Template> {
+        self.file_map
+            .lock()
+            .unwrap()
+            .map
+            .values()
+            .flat_map(|v| v.templates.values().copied())
+            .collect()
+    }
+}
+
+/// Sets up a file watcher.
+///
+/// Will attempt to hotreload HTML, RSX (.rs), and CSS
 async fn setup_file_watcher<F: Fn() -> Result<BuildResult> + Send + 'static>(
     build_with: F,
     config: &CrateConfig,
@@ -25,124 +54,265 @@ async fn setup_file_watcher<F: Fn() -> Result<BuildResult> + Send + 'static>(
     let mut last_update_time = chrono::Local::now().timestamp();
 
     // file watcher: check file change
-    let allow_watch_path = config.dioxus_config.web.watcher.watch_path.clone();
-
-    let watcher_config = config.clone();
-    let mut watcher = notify::recommended_watcher(move |info: notify::Result<notify::Event>| {
-        let config = watcher_config.clone();
-        if let Ok(e) = info {
-            match e.kind {
-                notify::EventKind::Create(_)
-                | notify::EventKind::Remove(_)
-                | notify::EventKind::Modify(_) => {
-                    if chrono::Local::now().timestamp() > last_update_time {
-                        let mut needs_full_rebuild;
-                        if let Some(hot_reload) = &hot_reload {
-                            // find changes to the rsx in the file
-                            let mut rsx_file_map = hot_reload.file_map.lock().unwrap();
-                            let mut messages: Vec<Template> = Vec::new();
-
-                            // In hot reload mode, we only need to rebuild if non-rsx code is changed
-                            needs_full_rebuild = false;
-
-                            for path in &e.paths {
-                                // if this is not a rust file, rebuild the whole project
-                                let path_extension = path.extension().and_then(|p| p.to_str());
-                                if path_extension != Some("rs") {
-                                    needs_full_rebuild = true;
-                                    // if backup file generated will impact normal hot-reload, so ignore it
-                                    if path_extension == Some("rs~") {
-                                        needs_full_rebuild = false;
-                                    }
-                                    break;
-                                }
-
-                                // Workaround for notify and vscode-like editor:
-                                // when edit & save a file in vscode, there will be two notifications,
-                                // the first one is a file with empty content.
-                                // filter the empty file notification to avoid false rebuild during hot-reload
-                                if let Ok(metadata) = fs::metadata(path) {
-                                    if metadata.len() == 0 {
-                                        continue;
-                                    }
-                                }
-
-                                match rsx_file_map.update_rsx(path, &config.crate_dir) {
-                                    Ok(UpdateResult::UpdatedRsx(msgs)) => {
-                                        messages.extend(msgs);
-                                        needs_full_rebuild = false;
-                                    }
-                                    Ok(UpdateResult::NeedsRebuild) => {
-                                        needs_full_rebuild = true;
-                                    }
-                                    Err(err) => {
-                                        log::error!("{}", err);
-                                    }
-                                }
-                            }
-
-                            if needs_full_rebuild {
-                                // Reset the file map to the new state of the project
-                                let FileMapBuildResult {
-                                    map: new_file_map,
-                                    errors,
-                                } = FileMap::<HtmlCtx>::create(config.crate_dir.clone()).unwrap();
-
-                                for err in errors {
-                                    log::error!("{}", err);
-                                }
-
-                                *rsx_file_map = new_file_map;
-                            } else {
-                                for msg in messages {
-                                    let _ = hot_reload.messages.send(msg);
-                                }
-                            }
-                        } else {
-                            needs_full_rebuild = true;
-                        }
-
-                        if needs_full_rebuild {
-                            match build_with() {
-                                Ok(res) => {
-                                    last_update_time = chrono::Local::now().timestamp();
-
-                                    #[allow(clippy::redundant_clone)]
-                                    print_console_info(
-                                        &config,
-                                        PrettierOptions {
-                                            changed: e.paths.clone(),
-                                            warnings: res.warnings,
-                                            elapsed_time: res.elapsed_time,
-                                        },
-                                        web_info.clone(),
-                                    );
-                                }
-                                Err(e) => {
-                                    last_update_time = chrono::Local::now().timestamp();
-                                    log::error!("{:?}", e);
-                                }
-                            }
-                        }
-                    }
-                }
-                _ => {}
-            }
+    let mut allow_watch_path = config.dioxus_config.web.watcher.watch_path.clone();
+
+    // Extend the watch path to include the assets directory - this is so we can hotreload CSS and other assets
+    allow_watch_path.push(config.dioxus_config.application.asset_dir.clone());
+
+    // Create the file watcher
+    let mut watcher = notify::recommended_watcher({
+        let watcher_config = config.clone();
+        move |info: notify::Result<notify::Event>| {
+            let Ok(e) = info else {
+                return;
+            };
+
+            watch_event(
+                e,
+                &mut last_update_time,
+                &hot_reload,
+                &watcher_config,
+                &build_with,
+                &web_info,
+            );
         }
     })
-    .unwrap();
+    .expect("Failed to create file watcher - please ensure you have the required permissions to watch the specified directories.");
 
+    // Watch the specified paths
     for sub_path in allow_watch_path {
-        if let Err(err) = watcher.watch(
-            &config.crate_dir.join(sub_path),
-            notify::RecursiveMode::Recursive,
-        ) {
+        let path = &config.crate_dir.join(sub_path);
+        let mode = notify::RecursiveMode::Recursive;
+
+        if let Err(err) = watcher.watch(path, mode) {
             log::warn!("Failed to watch path: {}", err);
         }
     }
+
     Ok(watcher)
 }
 
+fn watch_event<F>(
+    event: notify::Event,
+    last_update_time: &mut i64,
+    hot_reload: &Option<HotReloadState>,
+    config: &CrateConfig,
+    build_with: &F,
+    web_info: &Option<WebServerInfo>,
+) where
+    F: Fn() -> Result<BuildResult> + Send + 'static,
+{
+    // Ensure that we're tracking only modifications
+    if !matches!(
+        event.kind,
+        notify::EventKind::Create(_) | notify::EventKind::Remove(_) | notify::EventKind::Modify(_)
+    ) {
+        return;
+    }
+
+    // Ensure that we're not rebuilding too frequently
+    if chrono::Local::now().timestamp() <= *last_update_time {
+        return;
+    }
+
+    // By default we want to not do a full rebuild, and instead let the hot reload system invalidate it
+    let mut needs_full_rebuild = false;
+
+    if let Some(hot_reload) = &hot_reload {
+        hotreload_files(hot_reload, &mut needs_full_rebuild, &event, config);
+    }
+
+    if needs_full_rebuild {
+        full_rebuild(build_with, last_update_time, config, event, web_info);
+    }
+}
+
+fn full_rebuild<F>(
+    build_with: &F,
+    last_update_time: &mut i64,
+    config: &CrateConfig,
+    event: notify::Event,
+    web_info: &Option<WebServerInfo>,
+) where
+    F: Fn() -> Result<BuildResult> + Send + 'static,
+{
+    match build_with() {
+        Ok(res) => {
+            *last_update_time = chrono::Local::now().timestamp();
+
+            #[allow(clippy::redundant_clone)]
+            print_console_info(
+                config,
+                PrettierOptions {
+                    changed: event.paths.clone(),
+                    warnings: res.warnings,
+                    elapsed_time: res.elapsed_time,
+                },
+                web_info.clone(),
+            );
+        }
+        Err(e) => {
+            *last_update_time = chrono::Local::now().timestamp();
+            log::error!("{:?}", e);
+        }
+    }
+}
+
+fn hotreload_files(
+    hot_reload: &HotReloadState,
+    needs_full_rebuild: &mut bool,
+    event: &notify::Event,
+    config: &CrateConfig,
+) {
+    // find changes to the rsx in the file
+    let mut rsx_file_map = hot_reload.file_map.lock().unwrap();
+    let mut messages: Vec<HotReloadMsg> = Vec::new();
+
+    for path in &event.paths {
+        // Attempt to hotreload this file
+        let is_potentially_reloadable = hotreload_file(
+            path,
+            config,
+            &rsx_file_map,
+            &mut messages,
+            needs_full_rebuild,
+        );
+
+        // If the file was not hotreloaded, continue
+        if is_potentially_reloadable.is_none() {
+            continue;
+        }
+
+        // If the file was hotreloaded, update the file map in place
+        match rsx_file_map.update_rsx(path, &config.crate_dir) {
+            Ok(UpdateResult::UpdatedRsx(msgs)) => {
+                messages.extend(msgs.into_iter().map(HotReloadMsg::UpdateTemplate));
+            }
+
+            // If the file was not updated, we need to do a full rebuild
+            Ok(UpdateResult::NeedsRebuild) => {
+                log::trace!("Needs full rebuild because file changed: {:?}", path);
+                *needs_full_rebuild = true;
+            }
+
+            // Not necessarily a fatal error, but we should log it
+            Err(err) => log::error!("{}", err),
+        }
+    }
+
+    // If full rebuild, extend the file map with the new file map
+    // This will wipe away any previous cached changed templates
+    if *needs_full_rebuild {
+        // Reset the file map to the new state of the project
+        let FileMapBuildResult {
+            map: new_file_map,
+            errors,
+        } = FileMap::<HtmlCtx>::create(config.crate_dir.clone()).unwrap();
+
+        for err in errors {
+            log::error!("{}", err);
+        }
+
+        *rsx_file_map = new_file_map;
+
+        return;
+    }
+
+    for msg in messages {
+        let _ = hot_reload.messages.send(msg);
+    }
+}
+
+fn hotreload_file(
+    path: &Path,
+    config: &CrateConfig,
+    rsx_file_map: &std::sync::MutexGuard<'_, FileMap<HtmlCtx>>,
+    messages: &mut Vec<HotReloadMsg>,
+    needs_full_rebuild: &mut bool,
+) -> Option<()> {
+    // for various assets that might be linked in, we just try to hotreloading them forcefully
+    // That is, unless they appear in an include! macro, in which case we need to a full rebuild....
+    let ext = path.extension().and_then(|v| v.to_str())?;
+
+    // Workaround for notify and vscode-like editor:
+    // when edit & save a file in vscode, there will be two notifications,
+    // the first one is a file with empty content.
+    // filter the empty file notification to avoid false rebuild during hot-reload
+    if let Ok(metadata) = fs::metadata(path) {
+        if metadata.len() == 0 {
+            return None;
+        }
+    }
+
+    // If the extension is a backup file, or a hidden file, ignore it completely (no rebuilds)
+    if is_backup_file(path) {
+        log::trace!("Ignoring backup file: {:?}", path);
+        return None;
+    }
+
+    // Attempt to hotreload css in the asset directory
+    // Currently no other assets are hotreloaded, but in theory we could hotreload pngs/jpegs, etc
+    //
+    // All potential hotreloadable mime types:
+    // "bin" |"css" | "csv" | "html" | "ico" | "js" | "json" | "jsonld" | "mjs" | "rtf" | "svg" | "mp4"
+    if ext == "css" {
+        let asset_dir = config
+            .crate_dir
+            .join(&config.dioxus_config.application.asset_dir);
+
+        // Only if the CSS is in the asset directory, and we're tracking it, do we hotreload it
+        // Otherwise, we need to do a full rebuild since the user might be doing an include_str! on it
+        if attempt_css_reload(path, asset_dir, rsx_file_map, config, messages).is_none() {
+            *needs_full_rebuild = true;
+        }
+
+        return None;
+    }
+
+    // If the file is not rsx or css and we've already not needed a full rebuild, return
+    if ext != "rs" && ext != "css" {
+        *needs_full_rebuild = true;
+        return None;
+    }
+
+    Some(())
+}
+
+fn attempt_css_reload(
+    path: &Path,
+    asset_dir: PathBuf,
+    rsx_file_map: &std::sync::MutexGuard<'_, FileMap<HtmlCtx>>,
+    config: &CrateConfig,
+    messages: &mut Vec<HotReloadMsg>,
+) -> Option<()> {
+    // If the path is not in the asset directory, return
+    if !path.starts_with(asset_dir) {
+        return None;
+    }
+
+    // Get the local path of the asset (ie var.css or some_dir/var.css as long as the dir is under the asset dir)
+    let local_path = local_path_of_asset(path)?;
+
+    // Make sure we're actually tracking this asset...
+    _ = rsx_file_map.is_tracking_asset(&local_path)?;
+
+    // copy the asset over to the output directory
+    // todo this whole css hotreloading shouldbe less hacky and more robust
+    _ = fs_extra::copy_items(
+        &[path],
+        config.out_dir(),
+        &CopyOptions::new().overwrite(true),
+    );
+
+    messages.push(HotReloadMsg::UpdateAsset(local_path));
+
+    Some(())
+}
+
+fn local_path_of_asset(path: &Path) -> Option<PathBuf> {
+    path.file_name()?.to_str()?.to_string().parse().ok()
+}
+
 pub(crate) trait Platform {
     fn start(config: &CrateConfig, serve: &ConfigOptsServe) -> Result<Self>
     where
@@ -150,8 +320,38 @@ pub(crate) trait Platform {
     fn rebuild(&mut self, config: &CrateConfig) -> Result<BuildResult>;
 }
 
-#[derive(Clone)]
-pub struct HotReloadState {
-    pub messages: broadcast::Sender<Template>,
-    pub file_map: Arc<Mutex<FileMap<HtmlCtx>>>,
+fn is_backup_file(path: &Path) -> bool {
+    // If there's a tilde at the end of the file, it's a backup file
+    if let Some(name) = path.file_name() {
+        if let Some(name) = name.to_str() {
+            if name.ends_with('~') {
+                return true;
+            }
+        }
+    }
+
+    // if the file is hidden, it's a backup file
+    if let Some(name) = path.file_name() {
+        if let Some(name) = name.to_str() {
+            if name.starts_with('.') {
+                return true;
+            }
+        }
+    }
+
+    false
+}
+
+#[test]
+fn test_is_backup_file() {
+    assert!(is_backup_file(&PathBuf::from("examples/test.rs~")));
+    assert!(is_backup_file(&PathBuf::from("examples/.back")));
+    assert!(is_backup_file(&PathBuf::from("test.rs~")));
+    assert!(is_backup_file(&PathBuf::from(".back")));
+
+    assert!(!is_backup_file(&PathBuf::from("val.rs")));
+    assert!(!is_backup_file(&PathBuf::from(
+        "/Users/jonkelley/Development/Tinkering/basic_05_example/src/lib.rs"
+    )));
+    assert!(!is_backup_file(&PathBuf::from("exmaples/val.rs")));
 }

+ 45 - 20
packages/cli/src/server/output.rs

@@ -1,9 +1,7 @@
 use crate::server::Diagnostic;
 use colored::Colorize;
-use dioxus_cli_config::crate_root;
-use dioxus_cli_config::CrateConfig;
-use std::path::PathBuf;
-use std::process::Command;
+use dioxus_cli_config::{crate_root, CrateConfig};
+use std::{path::PathBuf, process::Command};
 
 #[derive(Debug, Default)]
 pub struct PrettierOptions {
@@ -48,7 +46,7 @@ pub fn print_console_info(
     let custom_html_file = if crate_root.join("index.html").is_file() {
         "Custom [index.html]"
     } else {
-        "Default"
+        "None"
     };
     let url_rewrite = if config.dioxus_config.web.watcher.index_on_404 {
         "True"
@@ -60,9 +58,9 @@ pub fn print_console_info(
 
     if options.changed.is_empty() {
         println!(
-            "{} @ v{} [{}] \n",
+            "{} @ v{} [{}]",
             "Dioxus".bold().green(),
-            crate::DIOXUS_CLI_VERSION,
+            clap::crate_version!(),
             chrono::Local::now().format("%H:%M:%S").to_string().dimmed()
         );
     } else {
@@ -81,40 +79,67 @@ pub fn print_console_info(
     if let Some(WebServerInfo { ip, port }) = web_info {
         if config.dioxus_config.web.https.enabled == Some(true) {
             println!(
-                "\t> Local : {}",
+                "    > Local address: {}",
                 format!("https://localhost:{}/", port).blue()
             );
             println!(
-                "\t> Network : {}",
+                "    > Network address: {}",
                 format!("https://{}:{}/", ip, port).blue()
             );
-            println!("\t> HTTPS : {}", "Enabled".to_string().green());
+            println!("    > HTTPS: {}", "Enabled".to_string().green());
         } else {
             println!(
-                "\t> Local : {}",
+                "    > Local address: {}",
                 format!("http://localhost:{}/", port).blue()
             );
             println!(
-                "\t> Network : {}",
+                "    > Network address: {}",
                 format!("http://{}:{}/", ip, port).blue()
             );
-            println!("\t> HTTPS : {}", "Disabled".to_string().red());
+            println!("    > HTTPS status: {}", "Disabled".to_string().red());
         }
     }
     println!();
-    println!("\t> Profile : {}", profile.green());
-    println!("\t> Hot Reload : {}", hot_reload.cyan());
+
+    println!("    > Hot Reload Mode: {}", hot_reload.cyan());
+
+    println!(
+        "    > Watching: [ {} ]",
+        config
+            .dioxus_config
+            .web
+            .watcher
+            .watch_path
+            .iter()
+            .cloned()
+            .chain(Some(config.dioxus_config.application.asset_dir.clone()))
+            .map(|f| f.display().to_string())
+            .collect::<Vec<String>>()
+            .join(", ")
+            .cyan()
+    );
+
     if !proxies.is_empty() {
-        println!("\t> Proxies :");
+        println!("    > Proxies :");
         for proxy in proxies {
-            println!("\t\t- {}", proxy.backend.blue());
+            println!("    - {}", proxy.backend.blue());
         }
     }
-    println!("\t> Index Template : {}", custom_html_file.green());
-    println!("\t> URL Rewrite [index_on_404] : {}", url_rewrite.purple());
+    println!("    > Custom index.html: {}", custom_html_file.green());
+    println!("    > Serve index.html on 404: {}", url_rewrite.purple());
     println!();
     println!(
-        "\t> Build Time Use : {} millis",
+        "    > Build Features: [ {} ]",
+        config
+            .features
+            .clone()
+            .unwrap_or_default()
+            .join(", ")
+            .green()
+    );
+    println!("    > Build Profile: {}", profile.green());
+    println!(
+        "    > Build took: {} millis",
         options.elapsed_time.to_string().green().bold()
     );
     println!();

+ 60 - 39
packages/cli/src/server/web/hot_reload.rs

@@ -1,54 +1,75 @@
 use crate::server::HotReloadState;
 use axum::{
-    extract::{ws::Message, WebSocketUpgrade},
+    extract::{
+        ws::{Message, WebSocket},
+        WebSocketUpgrade,
+    },
     response::IntoResponse,
     Extension,
 };
+use dioxus_hot_reload::HotReloadMsg;
+use futures_util::{pin_mut, FutureExt};
 
 pub async fn hot_reload_handler(
     ws: WebSocketUpgrade,
     Extension(state): Extension<HotReloadState>,
 ) -> impl IntoResponse {
-    ws.on_upgrade(|mut socket| async move {
-        log::info!("🔥 Hot Reload WebSocket connected");
-        {
-            // update any rsx calls that changed before the websocket connected.
-            {
-                log::info!("🔮 Finding updates since last compile...");
-                let templates: Vec<_> = {
-                    state
-                        .file_map
-                        .lock()
-                        .unwrap()
-                        .map
-                        .values()
-                        .filter_map(|(_, template_slot)| *template_slot)
-                        .collect()
-                };
-                for template in templates {
-                    if socket
-                        .send(Message::Text(serde_json::to_string(&template).unwrap()))
-                        .await
-                        .is_err()
-                    {
-                        return;
-                    }
-                }
-            }
-            log::info!("finished");
+    ws.on_upgrade(|socket| async move {
+        let err = hotreload_loop(socket, state).await;
+
+        if let Err(err) = err {
+            log::error!("Hotreload receiver failed: {}", err);
         }
+    })
+}
+
+async fn hotreload_loop(mut socket: WebSocket, state: HotReloadState) -> anyhow::Result<()> {
+    log::info!("🔥 Hot Reload WebSocket connected");
+
+    // update any rsx calls that changed before the websocket connected.
+    // These templates will be sent down immediately so the page is in sync with the hotreloaded version
+    // The compiled version will be different from the one we actually want to present
+    for template in state.all_templates() {
+        socket
+            .send(Message::Text(serde_json::to_string(&template).unwrap()))
+            .await?;
+    }
+
+    let mut rx = state.messages.subscribe();
+
+    loop {
+        let msg = {
+            // Poll both the receiver and the socket
+            //
+            // This shuts us down if the connection is closed.
+            let mut _socket = socket.recv().fuse();
+            let mut _rx = rx.recv().fuse();
+
+            pin_mut!(_socket, _rx);
 
-        let mut rx = state.messages.subscribe();
-        loop {
-            if let Ok(rsx) = rx.recv().await {
-                if socket
-                    .send(Message::Text(serde_json::to_string(&rsx).unwrap()))
-                    .await
-                    .is_err()
-                {
+            let msg = futures_util::select! {
+                msg = _rx => msg,
+                _ = _socket => break,
+            };
+
+            let Ok(msg) = msg else { break };
+
+            match msg {
+                HotReloadMsg::UpdateTemplate(template) => {
+                    Message::Text(serde_json::to_string(&template).unwrap())
+                }
+                HotReloadMsg::UpdateAsset(asset) => {
+                    Message::Text(format!("reload-asset: {}", asset.display()))
+                }
+                HotReloadMsg::Shutdown => {
+                    log::info!("🔥 Hot Reload WebSocket shutting down");
                     break;
-                };
+                }
             }
-        }
-    })
+        };
+
+        socket.send(msg).await?;
+    }
+
+    Ok(())
 }

+ 71 - 310
packages/cli/src/server/web/mod.rs

@@ -1,5 +1,6 @@
 use crate::{
     builder,
+    cfg::ConfigOptsServe,
     serve::Serve,
     server::{
         output::{print_console_info, PrettierOptions, WebServerInfo},
@@ -7,111 +8,51 @@ use crate::{
     },
     BuildResult, Result,
 };
-use axum::{
-    body::Body,
-    extract::{ws::Message, Extension, WebSocketUpgrade},
-    http::{
-        self,
-        header::{HeaderName, HeaderValue},
-        Method, Response, StatusCode,
-    },
-    response::IntoResponse,
-    routing::{get, get_service},
-    Router,
-};
-use axum_server::tls_rustls::RustlsConfig;
 use dioxus_cli_config::CrateConfig;
-use dioxus_cli_config::WebHttpsConfig;
-
-use dioxus_html::HtmlCtx;
 use dioxus_rsx::hot_reload::*;
 use std::{
-    net::UdpSocket,
-    process::Command,
+    net::{SocketAddr, UdpSocket},
     sync::{Arc, Mutex},
 };
-use tokio::sync::broadcast::{self, Sender};
-use tower::ServiceBuilder;
-use tower_http::services::fs::{ServeDir, ServeFileSystemResponseBody};
-use tower_http::{
-    cors::{Any, CorsLayer},
-    ServiceBuilderExt,
-};
-
-#[cfg(feature = "plugin")]
-use crate::plugin::PluginManager;
+use tokio::sync::broadcast;
 
+mod hot_reload;
 mod proxy;
+mod server;
 
-mod hot_reload;
-use hot_reload::*;
+use server::*;
 
-struct WsReloadState {
+pub struct WsReloadState {
     update: broadcast::Sender<()>,
 }
 
-pub async fn startup(
-    port: u16,
-    config: CrateConfig,
-    start_browser: bool,
-    skip_assets: bool,
-) -> Result<()> {
-    // ctrl-c shutdown checker
-    let _crate_config = config.clone();
-    let _ = ctrlc::set_handler(move || {
-        #[cfg(feature = "plugin")]
-        let _ = PluginManager::on_serve_shutdown(&_crate_config);
-        std::process::exit(0);
-    });
+pub async fn startup(config: CrateConfig, serve_cfg: &ConfigOptsServe) -> Result<()> {
+    set_ctrlc_handler(&config);
 
     let ip = get_ip().unwrap_or(String::from("0.0.0.0"));
 
-    let hot_reload_state = match config.hot_reload {
-        true => {
-            let FileMapBuildResult { map, errors } =
-                FileMap::<HtmlCtx>::create(config.crate_dir.clone()).unwrap();
+    let mut hot_reload_state = None;
 
-            for err in errors {
-                log::error!("{}", err);
-            }
-
-            let file_map = Arc::new(Mutex::new(map));
-
-            let hot_reload_tx = broadcast::channel(100).0;
-
-            Some(HotReloadState {
-                messages: hot_reload_tx.clone(),
-                file_map: file_map.clone(),
-            })
-        }
-        false => None,
-    };
-
-    serve(
-        ip,
-        port,
-        config,
-        start_browser,
-        skip_assets,
-        hot_reload_state,
-    )
-    .await?;
+    if config.hot_reload {
+        hot_reload_state = Some(build_hotreload_filemap(&config));
+    }
 
-    Ok(())
+    serve(ip, config, hot_reload_state, serve_cfg).await
 }
 
 /// Start the server without hot reload
 pub async fn serve(
     ip: String,
-    port: u16,
     config: CrateConfig,
-    start_browser: bool,
-    skip_assets: bool,
     hot_reload_state: Option<HotReloadState>,
+    opts: &ConfigOptsServe,
 ) -> Result<()> {
+    let skip_assets = opts.skip_assets;
+    let port = opts.port;
+
     // Since web platform doesn't use `rust_flags`, this argument is explicitly
     // set to `None`.
-    let first_build_result = crate::builder::build(&config, false, skip_assets, None)?;
+    let first_build_result = crate::builder::build_web(&config, skip_assets, None)?;
 
     // generate dev-index page
     Serve::regen_dev_page(&config, first_build_result.assets.as_ref())?;
@@ -154,7 +95,7 @@ pub async fn serve(
             warnings: first_build_result.warnings,
             elapsed_time: first_build_result.elapsed_time,
         },
-        Some(crate::server::output::WebServerInfo {
+        Some(WebServerInfo {
             ip: ip.clone(),
             port,
         }),
@@ -164,230 +105,43 @@ pub async fn serve(
     let router = setup_router(config.clone(), ws_reload_state, hot_reload_state).await?;
 
     // Start server
-    start_server(port, router, start_browser, rustls_config, &config).await?;
+    start_server(port, router, opts.open, rustls_config, &config).await?;
 
     Ok(())
 }
 
-const DEFAULT_KEY_PATH: &str = "ssl/key.pem";
-const DEFAULT_CERT_PATH: &str = "ssl/cert.pem";
-
-/// Returns an enum of rustls config and a bool if mkcert isn't installed
-async fn get_rustls(config: &CrateConfig) -> Result<Option<RustlsConfig>> {
-    let web_config = &config.dioxus_config.web.https;
-    if web_config.enabled != Some(true) {
-        return Ok(None);
-    }
-
-    let (cert_path, key_path) = if let Some(true) = web_config.mkcert {
-        // mkcert, use it
-        get_rustls_with_mkcert(web_config)?
-    } else {
-        // if mkcert not specified or false, don't use it
-        get_rustls_without_mkcert(web_config)?
-    };
-
-    Ok(Some(
-        RustlsConfig::from_pem_file(cert_path, key_path).await?,
-    ))
-}
-
-fn get_rustls_with_mkcert(web_config: &WebHttpsConfig) -> Result<(String, String)> {
-    // Get paths to store certs, otherwise use ssl/item.pem
-    let key_path = web_config
-        .key_path
-        .clone()
-        .unwrap_or(DEFAULT_KEY_PATH.to_string());
-
-    let cert_path = web_config
-        .cert_path
-        .clone()
-        .unwrap_or(DEFAULT_CERT_PATH.to_string());
-
-    // Create ssl directory if using defaults
-    if key_path == DEFAULT_KEY_PATH && cert_path == DEFAULT_CERT_PATH {
-        _ = fs::create_dir("ssl");
-    }
-
-    let cmd = Command::new("mkcert")
-        .args([
-            "-install",
-            "-key-file",
-            &key_path,
-            "-cert-file",
-            &cert_path,
-            "localhost",
-            "::1",
-            "127.0.0.1",
-        ])
-        .spawn();
-
-    match cmd {
-        Err(e) => {
-            match e.kind() {
-                io::ErrorKind::NotFound => log::error!("mkcert is not installed. See https://github.com/FiloSottile/mkcert#installation for installation instructions."),
-                e => log::error!("an error occured while generating mkcert certificates: {}", e.to_string()),
-            };
-            return Err("failed to generate mkcert certificates".into());
-        }
-        Ok(mut cmd) => {
-            cmd.wait()?;
-        }
-    }
-
-    Ok((cert_path, key_path))
-}
-
-fn get_rustls_without_mkcert(web_config: &WebHttpsConfig) -> Result<(String, String)> {
-    // get paths to cert & key
-    if let (Some(key), Some(cert)) = (web_config.key_path.clone(), web_config.cert_path.clone()) {
-        Ok((cert, key))
-    } else {
-        // missing cert or key
-        Err("https is enabled but cert or key path is missing".into())
-    }
-}
-
-/// Sets up and returns a router
-async fn setup_router(
-    config: CrateConfig,
-    ws_reload: Arc<WsReloadState>,
-    hot_reload: Option<HotReloadState>,
-) -> Result<Router> {
-    // Setup cors
-    let cors = CorsLayer::new()
-        // allow `GET` and `POST` when accessing the resource
-        .allow_methods([Method::GET, Method::POST])
-        // allow requests from any origin
-        .allow_origin(Any)
-        .allow_headers(Any);
-
-    let (coep, coop) = if config.cross_origin_policy {
-        (
-            HeaderValue::from_static("require-corp"),
-            HeaderValue::from_static("same-origin"),
-        )
-    } else {
-        (
-            HeaderValue::from_static("unsafe-none"),
-            HeaderValue::from_static("unsafe-none"),
-        )
-    };
-
-    // Create file service
-    let file_service_config = config.clone();
-    let file_service = ServiceBuilder::new()
-        .override_response_header(
-            HeaderName::from_static("cross-origin-embedder-policy"),
-            coep,
-        )
-        .override_response_header(HeaderName::from_static("cross-origin-opener-policy"), coop)
-        .and_then(
-            move |response: Response<ServeFileSystemResponseBody>| async move {
-                let mut response = if file_service_config.dioxus_config.web.watcher.index_on_404
-                    && response.status() == StatusCode::NOT_FOUND
-                {
-                    let body = Body::from(
-                        // TODO: Cache/memoize this.
-                        std::fs::read_to_string(file_service_config.out_dir().join("index.html"))
-                            .ok()
-                            .unwrap(),
-                    );
-                    Response::builder()
-                        .status(StatusCode::OK)
-                        .body(body)
-                        .unwrap()
-                } else {
-                    response.into_response()
-                };
-                let headers = response.headers_mut();
-                headers.insert(
-                    http::header::CACHE_CONTROL,
-                    HeaderValue::from_static("no-cache"),
-                );
-                headers.insert(http::header::PRAGMA, HeaderValue::from_static("no-cache"));
-                headers.insert(http::header::EXPIRES, HeaderValue::from_static("0"));
-                Ok(response)
-            },
-        )
-        .service(ServeDir::new(config.out_dir()));
-
-    // Setup websocket
-    let mut router = Router::new().route("/_dioxus/ws", get(ws_handler));
-
-    // Setup proxy
-    for proxy_config in config.dioxus_config.web.proxy {
-        router = proxy::add_proxy(router, &proxy_config)?;
-    }
-
-    // Route file service
-    router = router.fallback(get_service(file_service).handle_error(
-        |error: std::convert::Infallible| async move {
-            (
-                StatusCode::INTERNAL_SERVER_ERROR,
-                format!("Unhandled internal error: {}", error),
-            )
-        },
-    ));
-
-    router = if let Some(base_path) = config.dioxus_config.web.app.base_path.clone() {
-        let base_path = format!("/{}", base_path.trim_matches('/'));
-        Router::new()
-            .route(&base_path, axum::routing::any_service(router))
-            .fallback(get(move || {
-                let base_path = base_path.clone();
-                async move { format!("Outside of the base path: {}", base_path) }
-            }))
-    } else {
-        router
-    };
-
-    // Setup routes
-    router = router
-        .route("/_dioxus/hot_reload", get(hot_reload_handler))
-        .layer(cors)
-        .layer(Extension(ws_reload));
-
-    if let Some(hot_reload) = hot_reload {
-        router = router.layer(Extension(hot_reload))
-    }
-
-    Ok(router)
-}
-
 /// Starts dx serve with no hot reload
 async fn start_server(
     port: u16,
-    router: Router,
+    router: axum::Router,
     start_browser: bool,
-    rustls: Option<RustlsConfig>,
+    rustls: Option<axum_server::tls_rustls::RustlsConfig>,
     _config: &CrateConfig,
 ) -> Result<()> {
     // If plugins, call on_serve_start event
     #[cfg(feature = "plugin")]
-    PluginManager::on_serve_start(_config)?;
+    crate::plugin::PluginManager::on_serve_start(_config)?;
 
     // Bind the server to `[::]` and it will LISTEN for both IPv4 and IPv6. (required IPv6 dual stack)
-    let addr = format!("[::]:{}", port).parse().unwrap();
+    let addr: SocketAddr = format!("0.0.0.0:{}", port).parse().unwrap();
 
     // Open the browser
     if start_browser {
         match rustls {
-            Some(_) => _ = open::that(format!("https://{}", addr)),
-            None => _ = open::that(format!("http://{}", addr)),
+            Some(_) => _ = open::that(format!("https://localhost:{port}")),
+            None => _ = open::that(format!("http://localhost:{port}")),
         }
     }
 
+    let svc = router.into_make_service();
+
     // Start the server with or without rustls
     match rustls {
-        Some(rustls) => {
-            axum_server::bind_rustls(addr, rustls)
-                .serve(router.into_make_service())
-                .await?
-        }
+        Some(rustls) => axum_server::bind_rustls(addr, rustls).serve(svc).await?,
         None => {
+            // Create a TCP listener bound to the address
             let listener = tokio::net::TcpListener::bind(&addr).await?;
-            axum::serve(listener, router.into_make_service()).await?
+            axum::serve(listener, svc).await?
         }
     }
 
@@ -412,43 +166,50 @@ fn get_ip() -> Option<String> {
     }
 }
 
-/// Handle websockets
-async fn ws_handler(
-    ws: WebSocketUpgrade,
-    Extension(state): Extension<Arc<WsReloadState>>,
-) -> impl IntoResponse {
-    ws.on_upgrade(|mut socket| async move {
-        let mut rx = state.update.subscribe();
-        let reload_watcher = tokio::spawn(async move {
-            loop {
-                rx.recv().await.unwrap();
-                // ignore the error
-                if socket
-                    .send(Message::Text(String::from("reload")))
-                    .await
-                    .is_err()
-                {
-                    break;
-                }
-
-                // flush the errors after recompling
-                rx = rx.resubscribe();
-            }
-        });
-
-        reload_watcher.await.unwrap();
-    })
-}
-
-fn build(config: &CrateConfig, reload_tx: &Sender<()>, skip_assets: bool) -> Result<BuildResult> {
+fn build(
+    config: &CrateConfig,
+    reload_tx: &broadcast::Sender<()>,
+    skip_assets: bool,
+) -> Result<BuildResult> {
     // Since web platform doesn't use `rust_flags`, this argument is explicitly
     // set to `None`.
-    let result = builder::build(config, true, skip_assets, None)?;
+    let result = std::panic::catch_unwind(|| builder::build_web(config, skip_assets, None))
+        .map_err(|e| anyhow::anyhow!("Build failed: {e:?}"))?;
+
     // change the websocket reload state to true;
     // the page will auto-reload.
     if config.dioxus_config.web.watcher.reload_html {
-        let _ = Serve::regen_dev_page(config, result.assets.as_ref());
+        if let Ok(assets) = result.as_ref().map(|x| x.assets.as_ref()) {
+            let _ = Serve::regen_dev_page(config, assets);
+        }
     }
+
     let _ = reload_tx.send(());
-    Ok(result)
+
+    result
+}
+
+fn set_ctrlc_handler(config: &CrateConfig) {
+    // ctrl-c shutdown checker
+    let _crate_config = config.clone();
+
+    let _ = ctrlc::set_handler(move || {
+        #[cfg(feature = "plugin")]
+        let _ = crate::plugin::PluginManager::on_serve_shutdown(&_crate_config);
+
+        std::process::exit(0);
+    });
+}
+
+fn build_hotreload_filemap(config: &CrateConfig) -> HotReloadState {
+    let FileMapBuildResult { map, errors } = FileMap::create(config.crate_dir.clone()).unwrap();
+
+    for err in errors {
+        log::error!("{}", err);
+    }
+
+    HotReloadState {
+        messages: broadcast::channel(100).0.clone(),
+        file_map: Arc::new(Mutex::new(map)).clone(),
+    }
 }

+ 243 - 0
packages/cli/src/server/web/server.rs

@@ -0,0 +1,243 @@
+use super::{hot_reload::*, WsReloadState};
+use crate::{server::HotReloadState, Result};
+use axum::{
+    body::Body,
+    extract::{
+        ws::{Message, WebSocket},
+        Extension, WebSocketUpgrade,
+    },
+    http::{
+        self,
+        header::{HeaderName, HeaderValue},
+        Method, Response, StatusCode,
+    },
+    response::IntoResponse,
+    routing::{get, get_service},
+    Router,
+};
+use axum_server::tls_rustls::RustlsConfig;
+use dioxus_cli_config::{CrateConfig, WebHttpsConfig};
+use std::{fs, io, process::Command, sync::Arc};
+use tower::ServiceBuilder;
+use tower_http::{
+    cors::{Any, CorsLayer},
+    services::fs::{ServeDir, ServeFileSystemResponseBody},
+    ServiceBuilderExt,
+};
+
+/// Sets up and returns a router
+pub async fn setup_router(
+    config: CrateConfig,
+    ws_reload: Arc<WsReloadState>,
+    hot_reload: Option<HotReloadState>,
+) -> Result<Router> {
+    // Setup cors
+    let cors = CorsLayer::new()
+        // allow `GET` and `POST` when accessing the resource
+        .allow_methods([Method::GET, Method::POST])
+        // allow requests from any origin
+        .allow_origin(Any)
+        .allow_headers(Any);
+
+    let (coep, coop) = if config.cross_origin_policy {
+        (
+            HeaderValue::from_static("require-corp"),
+            HeaderValue::from_static("same-origin"),
+        )
+    } else {
+        (
+            HeaderValue::from_static("unsafe-none"),
+            HeaderValue::from_static("unsafe-none"),
+        )
+    };
+
+    // Create file service
+    let file_service_config = config.clone();
+    let file_service = ServiceBuilder::new()
+        .override_response_header(
+            HeaderName::from_static("cross-origin-embedder-policy"),
+            coep,
+        )
+        .override_response_header(HeaderName::from_static("cross-origin-opener-policy"), coop)
+        .and_then(move |response| async move { Ok(no_cache(file_service_config, response)) })
+        .service(ServeDir::new(config.out_dir()));
+
+    // Setup websocket
+    let mut router = Router::new().route("/_dioxus/ws", get(ws_handler));
+
+    // Setup proxy
+    for proxy_config in config.dioxus_config.web.proxy {
+        router = super::proxy::add_proxy(router, &proxy_config)?;
+    }
+
+    // Route file service
+    router = router.fallback(get_service(file_service).handle_error(
+        |error: std::convert::Infallible| async move {
+            (
+                StatusCode::INTERNAL_SERVER_ERROR,
+                format!("Unhandled internal error: {}", error),
+            )
+        },
+    ));
+
+    router = if let Some(base_path) = config.dioxus_config.web.app.base_path.clone() {
+        let base_path = format!("/{}", base_path.trim_matches('/'));
+        Router::new()
+            .route(&base_path, axum::routing::any_service(router))
+            .fallback(get(move || {
+                let base_path = base_path.clone();
+                async move { format!("Outside of the base path: {}", base_path) }
+            }))
+    } else {
+        router
+    };
+
+    // Setup routes
+    router = router
+        .route("/_dioxus/hot_reload", get(hot_reload_handler))
+        .layer(cors)
+        .layer(Extension(ws_reload));
+
+    if let Some(hot_reload) = hot_reload {
+        router = router.layer(Extension(hot_reload))
+    }
+
+    Ok(router)
+}
+
+fn no_cache(
+    file_service_config: CrateConfig,
+    response: Response<ServeFileSystemResponseBody>,
+) -> Response<Body> {
+    let mut response = if file_service_config.dioxus_config.web.watcher.index_on_404
+        && response.status() == StatusCode::NOT_FOUND
+    {
+        let body = Body::from(
+            // TODO: Cache/memoize this.
+            std::fs::read_to_string(file_service_config.out_dir().join("index.html"))
+                .ok()
+                .unwrap(),
+        );
+        Response::builder()
+            .status(StatusCode::OK)
+            .body(body)
+            .unwrap()
+    } else {
+        response.into_response()
+    };
+    let headers = response.headers_mut();
+    headers.insert(
+        http::header::CACHE_CONTROL,
+        HeaderValue::from_static("no-cache"),
+    );
+    headers.insert(http::header::PRAGMA, HeaderValue::from_static("no-cache"));
+    headers.insert(http::header::EXPIRES, HeaderValue::from_static("0"));
+    response
+}
+
+/// Handle websockets
+async fn ws_handler(
+    ws: WebSocketUpgrade,
+    Extension(state): Extension<Arc<WsReloadState>>,
+) -> impl IntoResponse {
+    ws.on_upgrade(move |socket| ws_reload_handler(socket, state))
+}
+
+async fn ws_reload_handler(mut socket: WebSocket, state: Arc<WsReloadState>) {
+    let mut rx = state.update.subscribe();
+
+    let reload_watcher = tokio::spawn(async move {
+        loop {
+            rx.recv().await.unwrap();
+
+            let _ = socket.send(Message::Text(String::from("reload"))).await;
+
+            // ignore the error
+            println!("forcing reload");
+
+            // flush the errors after recompling
+            rx = rx.resubscribe();
+        }
+    });
+
+    reload_watcher.await.unwrap();
+}
+
+const DEFAULT_KEY_PATH: &str = "ssl/key.pem";
+const DEFAULT_CERT_PATH: &str = "ssl/cert.pem";
+
+/// Returns an enum of rustls config and a bool if mkcert isn't installed
+pub async fn get_rustls(config: &CrateConfig) -> Result<Option<RustlsConfig>> {
+    let web_config = &config.dioxus_config.web.https;
+    if web_config.enabled != Some(true) {
+        return Ok(None);
+    }
+
+    let (cert_path, key_path) = if let Some(true) = web_config.mkcert {
+        // mkcert, use it
+        get_rustls_with_mkcert(web_config)?
+    } else {
+        // if mkcert not specified or false, don't use it
+        get_rustls_without_mkcert(web_config)?
+    };
+
+    Ok(Some(
+        RustlsConfig::from_pem_file(cert_path, key_path).await?,
+    ))
+}
+
+pub fn get_rustls_with_mkcert(web_config: &WebHttpsConfig) -> Result<(String, String)> {
+    // Get paths to store certs, otherwise use ssl/item.pem
+    let key_path = web_config
+        .key_path
+        .clone()
+        .unwrap_or(DEFAULT_KEY_PATH.to_string());
+
+    let cert_path = web_config
+        .cert_path
+        .clone()
+        .unwrap_or(DEFAULT_CERT_PATH.to_string());
+
+    // Create ssl directory if using defaults
+    if key_path == DEFAULT_KEY_PATH && cert_path == DEFAULT_CERT_PATH {
+        _ = fs::create_dir("ssl");
+    }
+
+    let cmd = Command::new("mkcert")
+        .args([
+            "-install",
+            "-key-file",
+            &key_path,
+            "-cert-file",
+            &cert_path,
+            "localhost",
+            "::1",
+            "127.0.0.1",
+        ])
+        .spawn();
+
+    match cmd {
+        Err(e) => {
+            match e.kind() {
+                io::ErrorKind::NotFound => log::error!("mkcert is not installed. See https://github.com/FiloSottile/mkcert#installation for installation instructions."),
+                e => log::error!("an error occured while generating mkcert certificates: {}", e.to_string()),
+            };
+            return Err("failed to generate mkcert certificates".into());
+        }
+        Ok(mut cmd) => {
+            cmd.wait()?;
+        }
+    }
+
+    Ok((cert_path, key_path))
+}
+
+pub fn get_rustls_without_mkcert(web_config: &WebHttpsConfig) -> Result<(String, String)> {
+    // get paths to cert & key
+    if let (Some(key), Some(cert)) = (web_config.key_path.clone(), web_config.cert_path.clone()) {
+        Ok((cert, key))
+    } else {
+        // missing cert or key
+        Err("https is enabled but cert or key path is missing".into())
+    }
+}

+ 4 - 0
packages/cli/tests/fmt.rs

@@ -0,0 +1,4 @@
+//! Test that autoformatting works on files/folders/etc
+
+#[tokio::test]
+async fn formats() {}

+ 1 - 1
packages/config-macro/Cargo.toml

@@ -14,7 +14,7 @@ proc-macro = true
 
 [dependencies]
 proc-macro2 = { version = "1.0" }
-quote = "1.0"
+quote = { workspace = true }
 
 [features]
 default = []

+ 2 - 2
packages/core-macro/Cargo.toml

@@ -14,8 +14,8 @@ proc-macro = true
 
 [dependencies]
 proc-macro2 = { version = "1.0" }
-quote = "1.0"
-syn = { version = "2.0", features = ["full", "extra-traits", "visit"] }
+quote = { workspace = true }
+syn = { workspace = true, features = ["full", "extra-traits", "visit"] }
 dioxus-rsx = { workspace = true }
 constcat = "0.3.0"
 convert_case = "^0.6.0"

+ 27 - 7
packages/core/src/virtual_dom.rs

@@ -2,20 +2,19 @@
 //!
 //! This module provides the primary mechanics to create a hook-based, concurrent VDOM for Rust.
 
-use crate::innerlude::{DirtyTasks, ScopeOrder};
 use crate::Task;
 use crate::{
     any_props::AnyProps,
     arena::ElementId,
     innerlude::{
-        ElementRef, ErrorBoundary, NoOpMutations, SchedulerMsg, ScopeState, VNodeMount, VProps,
-        WriteMutations,
+        DirtyTasks, ElementRef, ErrorBoundary, NoOpMutations, SchedulerMsg, ScopeOrder, ScopeState,
+        VNodeMount, VProps, WriteMutations,
     },
     nodes::RenderReturn,
     nodes::{Template, TemplateId},
     runtime::{Runtime, RuntimeGuard},
     scopes::ScopeId,
-    AttributeValue, ComponentFunction, Element, Event, Mutations,
+    AttributeValue, ComponentFunction, Element, Event, Mutations, VNode,
 };
 use futures_util::StreamExt;
 use rustc_hash::FxHashMap;
@@ -534,17 +533,38 @@ impl VirtualDom {
     #[instrument(skip(self), level = "trace", name = "VirtualDom::replace_template")]
     pub fn replace_template(&mut self, template: Template) {
         self.register_template_first_byte_index(template);
+
         // iterating a slab is very inefficient, but this is a rare operation that will only happen during development so it's fine
         let mut dirty = Vec::new();
         for (id, scope) in self.scopes.iter() {
+            // Recurse into the dynamic nodes of the existing mounted node to see if the template is alive in the tree
+            fn check_node_for_templates(node: &VNode, template: Template) -> bool {
+                let this_template_name = node.template.get().name.rsplit_once(':').unwrap().0;
+
+                if this_template_name == template.name.rsplit_once(':').unwrap().0 {
+                    return true;
+                }
+
+                for dynamic in node.dynamic_nodes.iter() {
+                    if let crate::DynamicNode::Fragment(nodes) = dynamic {
+                        for node in nodes {
+                            if check_node_for_templates(node, template) {
+                                return true;
+                            }
+                        }
+                    }
+                }
+
+                false
+            }
+
             if let Some(RenderReturn::Ready(sync)) = scope.try_root_node() {
-                if sync.template.get().name.rsplit_once(':').unwrap().0
-                    == template.name.rsplit_once(':').unwrap().0
-                {
+                if check_node_for_templates(sync, template) {
                     dirty.push(ScopeId(id));
                 }
             }
         }
+
         for dirty in dirty {
             self.mark_dirty(dirty);
         }

+ 6 - 0
packages/desktop/src/app.rs

@@ -277,6 +277,12 @@ impl App {
             dioxus_hot_reload::HotReloadMsg::Shutdown => {
                 self.control_flow = ControlFlow::Exit;
             }
+
+            dioxus_hot_reload::HotReloadMsg::UpdateAsset(_) => {
+                for webview in self.webviews.values_mut() {
+                    webview.kick_stylsheets();
+                }
+            }
         }
     }
 

+ 4 - 1
packages/desktop/src/protocol.rs

@@ -221,7 +221,10 @@ fn get_asset_root() -> Option<PathBuf> {
     // If running under cargo, there's no bundle!
     // There might be a smarter/more resilient way of doing this
     if std::env::var_os("CARGO").is_some() {
-        return None;
+        return dioxus_cli_config::CURRENT_CONFIG
+            .as_ref()
+            .map(|c| c.out_dir())
+            .ok();
     }
 
     #[cfg(target_os = "macos")]

+ 8 - 0
packages/desktop/src/webview.rs

@@ -221,4 +221,12 @@ impl WebviewInstance {
             self.desktop_context.send_edits();
         }
     }
+
+    pub fn kick_stylsheets(&self) {
+        // run eval in the webview to kick the stylesheets by appending a query string
+        // we should do something less clunky than this
+        _ = self.desktop_context
+            .webview
+            .evaluate_script("document.querySelectorAll('link[rel=\"stylesheet\"]').forEach((el) => el.href = el.href + \"?\" + Math.random());");
+    }
 }

+ 2 - 1
packages/fullstack/examples/static-hydrated/src/main.rs

@@ -36,8 +36,9 @@ async fn main() {
 }
 
 // Hydrate the page
-#[cfg(all(feature = "web", not(feature = "server")))]
+#[cfg(not(feature = "server"))]
 fn main() {
+    #[cfg(all(feature = "web", not(feature = "server")))]
     dioxus_web::launch_with_props(
         dioxus_fullstack::router::RouteWithCfg::<Route>,
         dioxus_fullstack::prelude::get_root_props_from_document()

+ 1 - 0
packages/fullstack/src/hooks/server_future.rs

@@ -63,6 +63,7 @@ where
     }
 }
 
+#[cfg(feature = "web")]
 #[inline]
 fn kick_future<F, T>(user_fut: F)
 where

+ 1 - 0
packages/fullstack/src/hot_reload.rs

@@ -35,6 +35,7 @@ impl Default for HotReloadState {
                 dioxus_hot_reload::HotReloadMsg::Shutdown => {
                     std::process::exit(0);
                 }
+                _ => {}
             }
         });
 

+ 1 - 1
packages/hot-reload/Cargo.toml

@@ -14,7 +14,7 @@ dioxus-rsx = { workspace = true }
 dioxus-core = { workspace = true, features = ["serialize"] }
 dioxus-html = { workspace = true, optional = true }
 
-interprocess-docfix = { version = "1.2.2" }
+interprocess = { workspace = true }
 notify = { version = "5.0.0", optional = true }
 chrono = { version = "0.4.24", default-features = false, features = ["clock"], optional = true }
 serde_json = "1.0.91"

+ 236 - 197
packages/hot-reload/src/file_watcher.rs

@@ -10,7 +10,7 @@ use dioxus_rsx::{
     hot_reload::{FileMap, FileMapBuildResult, UpdateResult},
     HotReloadingContext,
 };
-use interprocess_docfix::local_socket::LocalSocketListener;
+use interprocess::local_socket::LocalSocketListener;
 use notify::{RecommendedWatcher, RecursiveMode, Watcher};
 
 #[cfg(feature = "file_watcher")]
@@ -109,232 +109,271 @@ impl<Ctx: HotReloadingContext> Config<Ctx> {
 }
 
 /// Initialize the hot reloading listener
+///
+/// This is designed to be called by hot_reload_Init!() which will pass in information about the project
+///
+/// Notes:
+/// - We don't wannt to watch the
 pub fn init<Ctx: HotReloadingContext + Send + 'static>(cfg: Config<Ctx>) {
     let Config {
+        mut rebuild_with,
         root_path,
         listening_paths,
         log,
-        mut rebuild_with,
         excluded_paths,
-        phantom: _,
+        ..
     } = cfg;
 
-    if let Ok(crate_dir) = PathBuf::from_str(root_path) {
-        // try to find the gitignore file
-        let gitignore_file_path = crate_dir.join(".gitignore");
-        let (gitignore, _) = ignore::gitignore::Gitignore::new(gitignore_file_path);
-
-        // convert the excluded paths to absolute paths
-        let excluded_paths = excluded_paths
-            .iter()
-            .map(|path| crate_dir.join(PathBuf::from(path)))
-            .collect::<Vec<_>>();
-
-        let channels = Arc::new(Mutex::new(Vec::new()));
-        let FileMapBuildResult {
-            map: file_map,
-            errors,
-        } = FileMap::<Ctx>::create_with_filter(crate_dir.clone(), |path| {
-            // skip excluded paths
-            excluded_paths.iter().any(|p| path.starts_with(p)) ||
-                // respect .gitignore
-                gitignore
-                    .matched_path_or_any_parents(path, path.is_dir())
-                    .is_ignore()
-        })
-        .unwrap();
-        for err in errors {
-            if log {
-                println!("hot reloading failed to initialize:\n{err:?}");
-            }
+    let Ok(crate_dir) = PathBuf::from_str(root_path) else {
+        return;
+    };
+
+    // try to find the gitignore file
+    let gitignore_file_path = crate_dir.join(".gitignore");
+    let (gitignore, _) = ignore::gitignore::Gitignore::new(gitignore_file_path);
+
+    // convert the excluded paths to absolute paths
+    let excluded_paths = excluded_paths
+        .iter()
+        .map(|path| crate_dir.join(PathBuf::from(path)))
+        .collect::<Vec<_>>();
+
+    let channels = Arc::new(Mutex::new(Vec::new()));
+    let FileMapBuildResult {
+        map: file_map,
+        errors,
+    } = FileMap::<Ctx>::create_with_filter(crate_dir.clone(), |path| {
+        // skip excluded paths
+        excluded_paths.iter().any(|p| path.starts_with(p)) ||
+            // respect .gitignore
+            gitignore
+                .matched_path_or_any_parents(path, path.is_dir())
+                .is_ignore()
+    })
+    .unwrap();
+
+    for err in errors {
+        if log {
+            println!("hot reloading failed to initialize:\n{err:?}");
         }
-        let file_map = Arc::new(Mutex::new(file_map));
-
-        let target_dir = crate_dir.join("target");
-        let hot_reload_socket_path = target_dir.join("dioxusin");
-
-        #[cfg(unix)]
-        {
-            // On unix, if you force quit the application, it can leave the file socket open
-            // This will cause the local socket listener to fail to open
-            // We check if the file socket is already open from an old session and then delete it
-            if hot_reload_socket_path.exists() {
-                let _ = std::fs::remove_file(hot_reload_socket_path.clone());
-            }
+    }
+
+    let file_map = Arc::new(Mutex::new(file_map));
+
+    let target_dir = crate_dir.join("target");
+    let hot_reload_socket_path = target_dir.join("dioxusin");
+
+    #[cfg(unix)]
+    {
+        // On unix, if you force quit the application, it can leave the file socket open
+        // This will cause the local socket listener to fail to open
+        // We check if the file socket is already open from an old session and then delete it
+        if hot_reload_socket_path.exists() {
+            let _ = std::fs::remove_file(hot_reload_socket_path.clone());
         }
+    }
 
-        match LocalSocketListener::bind(hot_reload_socket_path) {
-            Ok(local_socket_stream) => {
-                let aborted = Arc::new(Mutex::new(false));
-
-                // listen for connections
-                std::thread::spawn({
-                    let file_map = file_map.clone();
-                    let channels = channels.clone();
-                    let aborted = aborted.clone();
-                    let _ = local_socket_stream.set_nonblocking(true);
-                    move || {
-                        loop {
-                            if let Ok(mut connection) = local_socket_stream.accept() {
-                                // send any templates than have changed before the socket connected
-                                let templates: Vec<_> = {
-                                    file_map
-                                        .lock()
-                                        .unwrap()
-                                        .map
-                                        .values()
-                                        .filter_map(|(_, template_slot)| *template_slot)
-                                        .collect()
-                                };
-                                for template in templates {
-                                    if !send_msg(
-                                        HotReloadMsg::UpdateTemplate(template),
-                                        &mut connection,
-                                    ) {
-                                        continue;
-                                    }
-                                }
-                                channels.lock().unwrap().push(connection);
-                                if log {
-                                    println!("Connected to hot reloading 🚀");
-                                }
-                            }
-                            if *aborted.lock().unwrap() {
-                                break;
-                            }
+    let local_socket_stream = match LocalSocketListener::bind(hot_reload_socket_path) {
+        Ok(local_socket_stream) => local_socket_stream,
+        Err(err) => {
+            println!("failed to connect to hot reloading\n{err}");
+            return;
+        }
+    };
+
+    let aborted = Arc::new(Mutex::new(false));
+
+    // listen for connections
+    std::thread::spawn({
+        let file_map = file_map.clone();
+        let channels = channels.clone();
+        let aborted = aborted.clone();
+        let _ = local_socket_stream.set_nonblocking(true);
+        move || {
+            loop {
+                if let Ok(mut connection) = local_socket_stream.accept() {
+                    // send any templates than have changed before the socket connected
+                    let templates: Vec<_> = {
+                        file_map
+                            .lock()
+                            .unwrap()
+                            .map
+                            .values()
+                            .flat_map(|v| v.templates.values().copied())
+                            .collect()
+                    };
+
+                    for template in templates {
+                        if !send_msg(HotReloadMsg::UpdateTemplate(template), &mut connection) {
+                            continue;
                         }
                     }
-                });
+                    channels.lock().unwrap().push(connection);
+                    if log {
+                        println!("Connected to hot reloading 🚀");
+                    }
+                }
+                if *aborted.lock().unwrap() {
+                    break;
+                }
+            }
+        }
+    });
 
-                // watch for changes
-                std::thread::spawn(move || {
-                    let mut last_update_time = chrono::Local::now().timestamp();
+    // watch for changes
+    std::thread::spawn(move || {
+        let mut last_update_time = chrono::Local::now().timestamp();
 
-                    let (tx, rx) = std::sync::mpsc::channel();
+        let (tx, rx) = std::sync::mpsc::channel();
 
-                    let mut watcher =
-                        RecommendedWatcher::new(tx, notify::Config::default()).unwrap();
+        let mut watcher = RecommendedWatcher::new(tx, notify::Config::default()).unwrap();
 
-                    for path in listening_paths {
-                        let full_path = crate_dir.join(path);
-                        if let Err(err) = watcher.watch(&full_path, RecursiveMode::Recursive) {
-                            if log {
-                                println!(
-                                    "hot reloading failed to start watching {full_path:?}:\n{err:?}",
-                                );
-                            }
-                        }
+        let mut listening_pathbufs = vec![];
+
+        // We're attempting to watch the root path... which contains a target directory...
+        // And on some platforms the target directory is really really large and can cause the watcher to crash
+        // since it runs out of file handles
+        // So we're going to iterate through its children and watch them instead of the root path, skipping the target
+        // directory.
+        //
+        // In reality, this whole approach of doing embedded file watching is kinda hairy since you want full knowledge
+        // of where rust code is. We could just use the filemap we generated above as an indication of where the rust
+        // code is in this project and deduce the subfolders under the root path from that.
+        //
+        // FIXME: use a more robust system here for embedded discovery
+        //
+        // https://github.com/DioxusLabs/dioxus/issues/1914
+        if listening_paths == &[""] {
+            for entry in std::fs::read_dir(&crate_dir)
+                .expect("failed to read rust crate directory. Are you running with cargo?")
+            {
+                let entry = entry.expect("failed to read directory entry");
+                let path = entry.path();
+                if path.is_dir() {
+                    if path == target_dir {
+                        continue;
                     }
+                    listening_pathbufs.push(path);
+                }
+            }
+        } else {
+            for path in listening_paths {
+                let full_path = crate_dir.join(path);
+                listening_pathbufs.push(full_path);
+            }
+        }
 
-                    let mut rebuild = {
-                        let aborted = aborted.clone();
-                        let channels = channels.clone();
-                        move || {
-                            if let Some(rebuild_callback) = &mut rebuild_with {
-                                if log {
-                                    println!("Rebuilding the application...");
-                                }
-                                let shutdown = rebuild_callback();
+        for full_path in listening_pathbufs {
+            if let Err(err) = watcher.watch(&full_path, RecursiveMode::Recursive) {
+                if log {
+                    println!("hot reloading failed to start watching {full_path:?}:\n{err:?}",);
+                }
+            }
+        }
 
-                                if shutdown {
-                                    *aborted.lock().unwrap() = true;
-                                }
+        let mut rebuild = {
+            let aborted = aborted.clone();
+            let channels = channels.clone();
+            move || {
+                if let Some(rebuild_callback) = &mut rebuild_with {
+                    if log {
+                        println!("Rebuilding the application...");
+                    }
+                    let shutdown = rebuild_callback();
 
-                                for channel in &mut *channels.lock().unwrap() {
-                                    send_msg(HotReloadMsg::Shutdown, channel);
-                                }
+                    if shutdown {
+                        *aborted.lock().unwrap() = true;
+                    }
 
-                                return shutdown;
-                            } else if log {
-                                println!(
-                                    "Rebuild needed... shutting down hot reloading.\nManually rebuild the application to view further changes."
-                                );
-                            }
-                            true
-                        }
-                    };
+                    for channel in &mut *channels.lock().unwrap() {
+                        send_msg(HotReloadMsg::Shutdown, channel);
+                    }
 
-                    for evt in rx {
-                        if chrono::Local::now().timestamp_millis() >= last_update_time {
-                            if let Ok(evt) = evt {
-                                let real_paths = evt
-                                    .paths
-                                    .iter()
-                                    .filter(|path| {
-                                        // skip non rust files
-                                        matches!(
-                                            path.extension().and_then(|p| p.to_str()),
-                                            Some("rs" | "toml" | "css" | "html" | "js")
-                                        ) &&
-                                        // skip excluded paths
-                                        !excluded_paths.iter().any(|p| path.starts_with(p)) &&
-                                        // respect .gitignore
-                                        !gitignore
-                                            .matched_path_or_any_parents(path, false)
-                                            .is_ignore()
-                                    })
-                                    .collect::<Vec<_>>();
-
-                                // Give time for the change to take effect before reading the file
-                                if !real_paths.is_empty() {
-                                    std::thread::sleep(std::time::Duration::from_millis(10));
-                                }
+                    return shutdown;
+                } else if log {
+                    println!("Rebuild needed... shutting down hot reloading.\nManually rebuild the application to view further changes.");
+                }
+                true
+            }
+        };
 
-                                let mut channels = channels.lock().unwrap();
-                                for path in real_paths {
-                                    // if this file type cannot be hot reloaded, rebuild the application
-                                    if path.extension().and_then(|p| p.to_str()) != Some("rs")
-                                        && rebuild()
-                                    {
-                                        return;
-                                    }
-                                    // find changes to the rsx in the file
-                                    match file_map
-                                        .lock()
-                                        .unwrap()
-                                        .update_rsx(path, crate_dir.as_path())
-                                    {
-                                        Ok(UpdateResult::UpdatedRsx(msgs)) => {
-                                            for msg in msgs {
-                                                let mut i = 0;
-                                                while i < channels.len() {
-                                                    let channel = &mut channels[i];
-                                                    if send_msg(
-                                                        HotReloadMsg::UpdateTemplate(msg),
-                                                        channel,
-                                                    ) {
-                                                        i += 1;
-                                                    } else {
-                                                        channels.remove(i);
-                                                    }
-                                                }
-                                            }
-                                        }
-                                        Ok(UpdateResult::NeedsRebuild) => {
-                                            drop(channels);
-                                            if rebuild() {
-                                                return;
-                                            }
-                                            break;
-                                        }
-                                        Err(err) => {
-                                            if log {
-                                                println!(
-                                                    "hot reloading failed to update rsx:\n{err:?}"
-                                                );
-                                            }
-                                        }
-                                    }
+        for evt in rx {
+            if chrono::Local::now().timestamp_millis() < last_update_time {
+                continue;
+            }
+
+            let Ok(evt) = evt else {
+                continue;
+            };
+
+            let real_paths = evt
+                .paths
+                .iter()
+                .filter(|path| {
+                    // skip non rust files
+                    matches!(
+                        path.extension().and_then(|p| p.to_str()),
+                        Some("rs" | "toml" | "css" | "html" | "js")
+                    ) &&
+                    // skip excluded paths
+                    !excluded_paths.iter().any(|p| path.starts_with(p)) &&
+                    // respect .gitignore
+                    !gitignore
+                        .matched_path_or_any_parents(path, false)
+                        .is_ignore()
+                })
+                .collect::<Vec<_>>();
+
+            // Give time for the change to take effect before reading the file
+            if !real_paths.is_empty() {
+                std::thread::sleep(std::time::Duration::from_millis(10));
+            }
+
+            let mut channels = channels.lock().unwrap();
+            for path in real_paths {
+                // if this file type cannot be hot reloaded, rebuild the application
+                if path.extension().and_then(|p| p.to_str()) != Some("rs") && rebuild() {
+                    return;
+                }
+                // find changes to the rsx in the file
+                let changes = file_map
+                    .lock()
+                    .unwrap()
+                    .update_rsx(path, crate_dir.as_path());
+
+                match changes {
+                    Ok(UpdateResult::UpdatedRsx(msgs)) => {
+                        for msg in msgs {
+                            let mut i = 0;
+                            while i < channels.len() {
+                                let channel = &mut channels[i];
+                                if send_msg(HotReloadMsg::UpdateTemplate(msg), channel) {
+                                    i += 1;
+                                } else {
+                                    channels.remove(i);
                                 }
                             }
-                            last_update_time = chrono::Local::now().timestamp_millis();
                         }
                     }
-                });
+
+                    Ok(UpdateResult::NeedsRebuild) => {
+                        drop(channels);
+                        if rebuild() {
+                            return;
+                        }
+                        break;
+                    }
+                    Err(err) => {
+                        if log {
+                            println!("hot reloading failed to update rsx:\n{err:?}");
+                        }
+                    }
+                }
             }
-            Err(error) => println!("failed to connect to hot reloading\n{error}"),
+
+            last_update_time = chrono::Local::now().timestamp_millis();
         }
-    }
+    });
 }
 
 fn send_msg(msg: HotReloadMsg, channel: &mut impl Write) -> bool {

+ 25 - 18
packages/hot-reload/src/lib.rs

@@ -6,7 +6,7 @@ use std::{
 use dioxus_core::Template;
 #[cfg(feature = "file_watcher")]
 pub use dioxus_html::HtmlCtx;
-use interprocess_docfix::local_socket::LocalSocketStream;
+use interprocess::local_socket::LocalSocketStream;
 use serde::{Deserialize, Serialize};
 
 #[cfg(feature = "custom_file_watcher")]
@@ -15,36 +15,43 @@ mod file_watcher;
 pub use file_watcher::*;
 
 /// A message the hot reloading server sends to the client
-#[derive(Debug, Serialize, Deserialize, Clone, Copy)]
+#[derive(Debug, Serialize, Deserialize, Clone)]
 #[serde(bound(deserialize = "'de: 'static"))]
 pub enum HotReloadMsg {
     /// A template has been updated
     UpdateTemplate(Template),
+
+    /// An asset discovered by rsx! has been updated
+    UpdateAsset(PathBuf),
+
     /// The program needs to be recompiled, and the client should shut down
     Shutdown,
 }
 
 /// Connect to the hot reloading listener. The callback provided will be called every time a template change is detected
-pub fn connect(mut f: impl FnMut(HotReloadMsg) + Send + 'static) {
+pub fn connect(mut callback: impl FnMut(HotReloadMsg) + Send + 'static) {
     std::thread::spawn(move || {
         let path = PathBuf::from("./").join("target").join("dioxusin");
-        if let Ok(socket) = LocalSocketStream::connect(path) {
-            let mut buf_reader = BufReader::new(socket);
-            loop {
-                let mut buf = String::new();
-                match buf_reader.read_line(&mut buf) {
-                    Ok(_) => {
-                        let template: HotReloadMsg =
-                            serde_json::from_str(Box::leak(buf.into_boxed_str())).unwrap();
-                        f(template);
-                    }
-                    Err(err) => {
-                        if err.kind() != std::io::ErrorKind::WouldBlock {
-                            break;
-                        }
-                    }
+
+        let socket =
+            LocalSocketStream::connect(path).expect("Could not connect to hot reloading server.");
+
+        let mut buf_reader = BufReader::new(socket);
+
+        loop {
+            let mut buf = String::new();
+
+            if let Err(err) = buf_reader.read_line(&mut buf) {
+                if err.kind() != std::io::ErrorKind::WouldBlock {
+                    break;
                 }
             }
+
+            let template = serde_json::from_str(Box::leak(buf.into_boxed_str())).expect(
+                "Could not parse hot reloading message - make sure your client is up to date",
+            );
+
+            callback(template);
         }
     });
 }

+ 1 - 1
packages/html-internal-macro/Cargo.toml

@@ -13,7 +13,7 @@ description = "HTML function macros for Dioxus"
 
 [dependencies]
 proc-macro2 = "1.0.66"
-syn = { version = "2", features = ["full"] }
+syn = { workspace = true, features = ["full"] }
 quote = "^1.0.26"
 convert_case = "^0.6.0"
 

+ 2 - 0
packages/liveview/src/pool.rs

@@ -218,6 +218,8 @@ pub async fn run(mut vdom: VirtualDom, ws: impl LiveViewSocket) -> Result<(), Li
                     dioxus_hot_reload::HotReloadMsg::UpdateTemplate(new_template) => {
                         vdom.replace_template(new_template);
                     }
+                    // todo: enable hotreloading in liveview
+                    dioxus_hot_reload::HotReloadMsg::UpdateAsset(_) => {}
                     dioxus_hot_reload::HotReloadMsg::Shutdown => {
                         std::process::exit(0);
                     },

+ 4 - 4
packages/router-macro/Cargo.toml

@@ -15,10 +15,10 @@ keywords = ["dom", "ui", "gui", "react", "router"]
 proc-macro = true
 
 [dependencies]
-syn = { version = "2.0", features = ["extra-traits", "full"] }
-quote = "1.0"
-proc-macro2 = "1.0.56"
-slab = "0.4"
+syn = { workspace = true, features = ["extra-traits", "full"] }
+quote = { workspace = true }
+proc-macro2 = { workspace = true }
+slab = { workspace = true }
 
 [features]
 default = []

+ 3 - 3
packages/rsx-rosetta/Cargo.toml

@@ -17,9 +17,9 @@ dioxus-autofmt = { workspace = true }
 dioxus-rsx = { workspace = true }
 dioxus-html = { workspace = true, features = ["html-to-rsx"]}
 html_parser = { workspace = true }
-proc-macro2 = "1.0.49"
-quote = "1.0.23"
-syn = { version = "2.0", features = ["full"] }
+proc-macro2 = { workspace = true }
+quote = { workspace = true }
+syn = { workspace = true, features = ["full"] }
 convert_case = "0.5.0"
 
 # [features]

+ 5 - 5
packages/rsx/Cargo.toml

@@ -13,13 +13,13 @@ keywords = ["dom", "ui", "gui", "react"]
 # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
 
 [dependencies]
-proc-macro2 = { version = "1.0", features = ["span-locations"] }
+quote = { workspace = true }
+proc-macro2 = { workspace = true, features = ["span-locations"] }
 dioxus-core = { workspace = true, optional = true }
-syn = { version = "2.0", features = ["full", "extra-traits"] }
-quote = { version = "1.0" }
-serde = { version = "1.0", features = ["derive"], optional = true }
+syn = { workspace = true, features = ["full", "extra-traits"] }
+serde = { workspace = true, features = ["derive"], optional = true }
 internment = { version = "0.7.0", optional = true }
-krates = { version = "0.12.6", optional = true }
+krates = { version = "0.16.6", optional = true }
 tracing = { workspace = true }
 
 [features]

+ 133 - 122
packages/rsx/src/hot_reload/hot_reload_diff.rs

@@ -1,15 +1,38 @@
 use proc_macro2::TokenStream;
 use quote::ToTokens;
-use syn::{File, Macro};
+use syn::{Expr, File, Item, Macro, Stmt, TraitItem};
 
+#[derive(Debug)]
 pub enum DiffResult {
-    CodeChanged,
-    RsxChanged(Vec<(Macro, TokenStream)>),
+    /// Non-rsx was changed in the file
+    CodeChanged(NotreloadableReason),
+
+    /// Rsx was changed in the file
+    ///
+    /// Contains a list of macro invocations that were changed
+    RsxChanged { rsx_calls: Vec<ChangedRsx> },
+}
+
+#[derive(Debug)]
+pub enum NotreloadableReason {
+    RootMismatch,
+
+    RsxMismatch,
+}
+
+#[derive(Debug)]
+pub struct ChangedRsx {
+    /// The macro that was changed
+    pub old: Macro,
+
+    /// The new tokens for the macro
+    pub new: TokenStream,
 }
 
 /// Find any rsx calls in the given file and return a list of all the rsx calls that have changed.
-pub fn find_rsx(new: &File, old: &File) -> DiffResult {
+pub fn diff_rsx(new: &File, old: &File) -> DiffResult {
     let mut rsx_calls = Vec::new();
+
     if new.items.len() != old.items.len() {
         tracing::trace!(
             "found not hot reload-able change {:#?} != {:#?}",
@@ -22,8 +45,9 @@ pub fn find_rsx(new: &File, old: &File) -> DiffResult {
                 .map(|i| i.to_token_stream().to_string())
                 .collect::<Vec<_>>()
         );
-        return DiffResult::CodeChanged;
+        return DiffResult::CodeChanged(NotreloadableReason::RootMismatch);
     }
+
     for (new, old) in new.items.iter().zip(old.items.iter()) {
         if find_rsx_item(new, old, &mut rsx_calls) {
             tracing::trace!(
@@ -31,20 +55,18 @@ pub fn find_rsx(new: &File, old: &File) -> DiffResult {
                 new.to_token_stream().to_string(),
                 old.to_token_stream().to_string()
             );
-            return DiffResult::CodeChanged;
+
+            return DiffResult::CodeChanged(NotreloadableReason::RsxMismatch);
         }
     }
+
     tracing::trace!("found hot reload-able changes {:#?}", rsx_calls);
-    DiffResult::RsxChanged(rsx_calls)
+    DiffResult::RsxChanged { rsx_calls }
 }
 
-fn find_rsx_item(
-    new: &syn::Item,
-    old: &syn::Item,
-    rsx_calls: &mut Vec<(Macro, TokenStream)>,
-) -> bool {
+fn find_rsx_item(new: &Item, old: &Item, rsx_calls: &mut Vec<ChangedRsx>) -> bool {
     match (new, old) {
-        (syn::Item::Const(new_item), syn::Item::Const(old_item)) => {
+        (Item::Const(new_item), Item::Const(old_item)) => {
             find_rsx_expr(&new_item.expr, &old_item.expr, rsx_calls)
                 || new_item.attrs != old_item.attrs
                 || new_item.vis != old_item.vis
@@ -55,7 +77,7 @@ fn find_rsx_item(
                 || new_item.eq_token != old_item.eq_token
                 || new_item.semi_token != old_item.semi_token
         }
-        (syn::Item::Enum(new_item), syn::Item::Enum(old_item)) => {
+        (Item::Enum(new_item), Item::Enum(old_item)) => {
             if new_item.variants.len() != old_item.variants.len() {
                 return true;
             }
@@ -84,17 +106,15 @@ fn find_rsx_item(
                 || new_item.generics != old_item.generics
                 || new_item.brace_token != old_item.brace_token
         }
-        (syn::Item::ExternCrate(new_item), syn::Item::ExternCrate(old_item)) => {
-            old_item != new_item
-        }
-        (syn::Item::Fn(new_item), syn::Item::Fn(old_item)) => {
+        (Item::ExternCrate(new_item), Item::ExternCrate(old_item)) => old_item != new_item,
+        (Item::Fn(new_item), Item::Fn(old_item)) => {
             find_rsx_block(&new_item.block, &old_item.block, rsx_calls)
                 || new_item.attrs != old_item.attrs
                 || new_item.vis != old_item.vis
                 || new_item.sig != old_item.sig
         }
-        (syn::Item::ForeignMod(new_item), syn::Item::ForeignMod(old_item)) => old_item != new_item,
-        (syn::Item::Impl(new_item), syn::Item::Impl(old_item)) => {
+        (Item::ForeignMod(new_item), Item::ForeignMod(old_item)) => old_item != new_item,
+        (Item::Impl(new_item), Item::Impl(old_item)) => {
             if new_item.items.len() != old_item.items.len() {
                 return true;
             }
@@ -129,13 +149,13 @@ fn find_rsx_item(
                 || new_item.self_ty != old_item.self_ty
                 || new_item.brace_token != old_item.brace_token
         }
-        (syn::Item::Macro(new_item), syn::Item::Macro(old_item)) => {
+        (Item::Macro(new_item), Item::Macro(old_item)) => {
             find_rsx_macro(&new_item.mac, &old_item.mac, rsx_calls)
                 || new_item.attrs != old_item.attrs
                 || new_item.semi_token != old_item.semi_token
                 || new_item.ident != old_item.ident
         }
-        (syn::Item::Mod(new_item), syn::Item::Mod(old_item)) => {
+        (Item::Mod(new_item), Item::Mod(old_item)) => {
             match (&new_item.content, &old_item.content) {
                 (Some((_, new_items)), Some((_, old_items))) => {
                     if new_items.len() != old_items.len() {
@@ -162,7 +182,7 @@ fn find_rsx_item(
                 _ => true,
             }
         }
-        (syn::Item::Static(new_item), syn::Item::Static(old_item)) => {
+        (Item::Static(new_item), Item::Static(old_item)) => {
             find_rsx_expr(&new_item.expr, &old_item.expr, rsx_calls)
                 || new_item.attrs != old_item.attrs
                 || new_item.vis != old_item.vis
@@ -174,15 +194,16 @@ fn find_rsx_item(
                 || new_item.eq_token != old_item.eq_token
                 || new_item.semi_token != old_item.semi_token
         }
-        (syn::Item::Struct(new_item), syn::Item::Struct(old_item)) => old_item != new_item,
-        (syn::Item::Trait(new_item), syn::Item::Trait(old_item)) => {
+        (Item::Struct(new_item), Item::Struct(old_item)) => old_item != new_item,
+        (Item::Trait(new_item), Item::Trait(old_item)) => {
             find_rsx_trait(new_item, old_item, rsx_calls)
         }
-        (syn::Item::TraitAlias(new_item), syn::Item::TraitAlias(old_item)) => old_item != new_item,
-        (syn::Item::Type(new_item), syn::Item::Type(old_item)) => old_item != new_item,
-        (syn::Item::Union(new_item), syn::Item::Union(old_item)) => old_item != new_item,
-        (syn::Item::Use(new_item), syn::Item::Use(old_item)) => old_item != new_item,
-        (syn::Item::Verbatim(_), syn::Item::Verbatim(_)) => false,
+        (Item::TraitAlias(new_item), Item::TraitAlias(old_item)) => old_item != new_item,
+        (Item::Type(new_item), Item::Type(old_item)) => old_item != new_item,
+        (Item::Union(new_item), Item::Union(old_item)) => old_item != new_item,
+        (Item::Use(new_item), Item::Use(old_item)) => old_item != new_item,
+        (Item::Verbatim(_), Item::Verbatim(_)) => false,
+
         _ => true,
     }
 }
@@ -190,14 +211,14 @@ fn find_rsx_item(
 fn find_rsx_trait(
     new_item: &syn::ItemTrait,
     old_item: &syn::ItemTrait,
-    rsx_calls: &mut Vec<(Macro, TokenStream)>,
+    rsx_calls: &mut Vec<ChangedRsx>,
 ) -> bool {
     if new_item.items.len() != old_item.items.len() {
         return true;
     }
     for (new_item, old_item) in new_item.items.iter().zip(old_item.items.iter()) {
         if match (new_item, old_item) {
-            (syn::TraitItem::Const(new_item), syn::TraitItem::Const(old_item)) => {
+            (TraitItem::Const(new_item), TraitItem::Const(old_item)) => {
                 if let (Some((_, new_expr)), Some((_, old_expr))) =
                     (&new_item.default, &old_item.default)
                 {
@@ -206,7 +227,7 @@ fn find_rsx_trait(
                     true
                 }
             }
-            (syn::TraitItem::Fn(new_item), syn::TraitItem::Fn(old_item)) => {
+            (TraitItem::Fn(new_item), TraitItem::Fn(old_item)) => {
                 match (&new_item.default, &old_item.default) {
                     (Some(new_block), Some(old_block)) => {
                         find_rsx_block(new_block, old_block, rsx_calls)
@@ -215,13 +236,9 @@ fn find_rsx_trait(
                     _ => true,
                 }
             }
-            (syn::TraitItem::Type(new_item), syn::TraitItem::Type(old_item)) => {
-                old_item != new_item
-            }
-            (syn::TraitItem::Macro(new_item), syn::TraitItem::Macro(old_item)) => {
-                old_item != new_item
-            }
-            (syn::TraitItem::Verbatim(stream), syn::TraitItem::Verbatim(stream2)) => {
+            (TraitItem::Type(new_item), TraitItem::Type(old_item)) => old_item != new_item,
+            (TraitItem::Macro(new_item), TraitItem::Macro(old_item)) => old_item != new_item,
+            (TraitItem::Verbatim(stream), TraitItem::Verbatim(stream2)) => {
                 stream.to_string() != stream2.to_string()
             }
             _ => true,
@@ -229,6 +246,7 @@ fn find_rsx_trait(
             return true;
         }
     }
+
     new_item.attrs != old_item.attrs
         || new_item.vis != old_item.vis
         || new_item.unsafety != old_item.unsafety
@@ -243,30 +261,28 @@ fn find_rsx_trait(
 fn find_rsx_block(
     new_block: &syn::Block,
     old_block: &syn::Block,
-    rsx_calls: &mut Vec<(Macro, TokenStream)>,
+    rsx_calls: &mut Vec<ChangedRsx>,
 ) -> bool {
     if new_block.stmts.len() != old_block.stmts.len() {
         return true;
     }
+
     for (new_stmt, old_stmt) in new_block.stmts.iter().zip(old_block.stmts.iter()) {
         if find_rsx_stmt(new_stmt, old_stmt, rsx_calls) {
             return true;
         }
     }
+
     new_block.brace_token != old_block.brace_token
 }
 
-fn find_rsx_stmt(
-    new_stmt: &syn::Stmt,
-    old_stmt: &syn::Stmt,
-    rsx_calls: &mut Vec<(Macro, TokenStream)>,
-) -> bool {
+fn find_rsx_stmt(new_stmt: &Stmt, old_stmt: &Stmt, rsx_calls: &mut Vec<ChangedRsx>) -> bool {
     match (new_stmt, old_stmt) {
-        (syn::Stmt::Local(new_local), syn::Stmt::Local(old_local)) => {
+        (Stmt::Local(new_local), Stmt::Local(old_local)) => {
             (match (&new_local.init, &old_local.init) {
                 (Some(new_local), Some(old_local)) => {
                     find_rsx_expr(&new_local.expr, &old_local.expr, rsx_calls)
-                        || new_local != old_local
+                        || new_local.diverge != old_local.diverge
                 }
                 (None, None) => false,
                 _ => true,
@@ -275,13 +291,13 @@ fn find_rsx_stmt(
                 || new_local.pat != old_local.pat
                 || new_local.semi_token != old_local.semi_token)
         }
-        (syn::Stmt::Item(new_item), syn::Stmt::Item(old_item)) => {
+        (Stmt::Item(new_item), Stmt::Item(old_item)) => {
             find_rsx_item(new_item, old_item, rsx_calls)
         }
-        (syn::Stmt::Expr(new_expr, _), syn::Stmt::Expr(old_expr, _)) => {
+        (Stmt::Expr(new_expr, _), Stmt::Expr(old_expr, _)) => {
             find_rsx_expr(new_expr, old_expr, rsx_calls)
         }
-        (syn::Stmt::Macro(new_macro), syn::Stmt::Macro(old_macro)) => {
+        (Stmt::Macro(new_macro), Stmt::Macro(old_macro)) => {
             find_rsx_macro(&new_macro.mac, &old_macro.mac, rsx_calls)
                 || new_macro.attrs != old_macro.attrs
                 || new_macro.semi_token != old_macro.semi_token
@@ -290,13 +306,9 @@ fn find_rsx_stmt(
     }
 }
 
-fn find_rsx_expr(
-    new_expr: &syn::Expr,
-    old_expr: &syn::Expr,
-    rsx_calls: &mut Vec<(Macro, TokenStream)>,
-) -> bool {
+fn find_rsx_expr(new_expr: &Expr, old_expr: &Expr, rsx_calls: &mut Vec<ChangedRsx>) -> bool {
     match (new_expr, old_expr) {
-        (syn::Expr::Array(new_expr), syn::Expr::Array(old_expr)) => {
+        (Expr::Array(new_expr), Expr::Array(old_expr)) => {
             if new_expr.elems.len() != old_expr.elems.len() {
                 return true;
             }
@@ -307,52 +319,50 @@ fn find_rsx_expr(
             }
             new_expr.attrs != old_expr.attrs || new_expr.bracket_token != old_expr.bracket_token
         }
-        (syn::Expr::Assign(new_expr), syn::Expr::Assign(old_expr)) => {
+        (Expr::Assign(new_expr), Expr::Assign(old_expr)) => {
             find_rsx_expr(&new_expr.left, &old_expr.left, rsx_calls)
                 || find_rsx_expr(&new_expr.right, &old_expr.right, rsx_calls)
                 || new_expr.attrs != old_expr.attrs
                 || new_expr.eq_token != old_expr.eq_token
         }
-        (syn::Expr::Async(new_expr), syn::Expr::Async(old_expr)) => {
+        (Expr::Async(new_expr), Expr::Async(old_expr)) => {
             find_rsx_block(&new_expr.block, &old_expr.block, rsx_calls)
                 || new_expr.attrs != old_expr.attrs
                 || new_expr.async_token != old_expr.async_token
                 || new_expr.capture != old_expr.capture
         }
-        (syn::Expr::Await(new_expr), syn::Expr::Await(old_expr)) => {
+        (Expr::Await(new_expr), Expr::Await(old_expr)) => {
             find_rsx_expr(&new_expr.base, &old_expr.base, rsx_calls)
                 || new_expr.attrs != old_expr.attrs
                 || new_expr.dot_token != old_expr.dot_token
                 || new_expr.await_token != old_expr.await_token
         }
-        (syn::Expr::Binary(new_expr), syn::Expr::Binary(old_expr)) => {
+        (Expr::Binary(new_expr), Expr::Binary(old_expr)) => {
             find_rsx_expr(&new_expr.left, &old_expr.left, rsx_calls)
                 || find_rsx_expr(&new_expr.right, &old_expr.right, rsx_calls)
                 || new_expr.attrs != old_expr.attrs
                 || new_expr.op != old_expr.op
         }
-        (syn::Expr::Block(new_expr), syn::Expr::Block(old_expr)) => {
+        (Expr::Block(new_expr), Expr::Block(old_expr)) => {
             find_rsx_block(&new_expr.block, &old_expr.block, rsx_calls)
                 || new_expr.attrs != old_expr.attrs
                 || new_expr.label != old_expr.label
         }
-        (syn::Expr::Break(new_expr), syn::Expr::Break(old_expr)) => {
-            match (&new_expr.expr, &old_expr.expr) {
-                (Some(new_inner), Some(old_inner)) => {
-                    find_rsx_expr(new_inner, old_inner, rsx_calls)
-                        || new_expr.attrs != old_expr.attrs
-                        || new_expr.break_token != old_expr.break_token
-                        || new_expr.label != old_expr.label
-                }
-                (None, None) => {
-                    new_expr.attrs != old_expr.attrs
-                        || new_expr.break_token != old_expr.break_token
-                        || new_expr.label != old_expr.label
-                }
-                _ => true,
+        (Expr::Break(new_expr), Expr::Break(old_expr)) => match (&new_expr.expr, &old_expr.expr) {
+            (Some(new_inner), Some(old_inner)) => {
+                find_rsx_expr(new_inner, old_inner, rsx_calls)
+                    || new_expr.attrs != old_expr.attrs
+                    || new_expr.break_token != old_expr.break_token
+                    || new_expr.label != old_expr.label
             }
-        }
-        (syn::Expr::Call(new_expr), syn::Expr::Call(old_expr)) => {
+            (None, None) => {
+                new_expr.attrs != old_expr.attrs
+                    || new_expr.break_token != old_expr.break_token
+                    || new_expr.label != old_expr.label
+            }
+            _ => true,
+        },
+        (Expr::Call(new_expr), Expr::Call(old_expr)) => {
             find_rsx_expr(&new_expr.func, &old_expr.func, rsx_calls);
             if new_expr.args.len() != old_expr.args.len() {
                 return true;
@@ -364,13 +374,13 @@ fn find_rsx_expr(
             }
             new_expr.attrs != old_expr.attrs || new_expr.paren_token != old_expr.paren_token
         }
-        (syn::Expr::Cast(new_expr), syn::Expr::Cast(old_expr)) => {
+        (Expr::Cast(new_expr), Expr::Cast(old_expr)) => {
             find_rsx_expr(&new_expr.expr, &old_expr.expr, rsx_calls)
                 || new_expr.attrs != old_expr.attrs
                 || new_expr.as_token != old_expr.as_token
                 || new_expr.ty != old_expr.ty
         }
-        (syn::Expr::Closure(new_expr), syn::Expr::Closure(old_expr)) => {
+        (Expr::Closure(new_expr), Expr::Closure(old_expr)) => {
             find_rsx_expr(&new_expr.body, &old_expr.body, rsx_calls)
                 || new_expr.attrs != old_expr.attrs
                 || new_expr.movability != old_expr.movability
@@ -381,19 +391,19 @@ fn find_rsx_expr(
                 || new_expr.or2_token != old_expr.or2_token
                 || new_expr.output != old_expr.output
         }
-        (syn::Expr::Const(new_expr), syn::Expr::Const(old_expr)) => {
+        (Expr::Const(new_expr), Expr::Const(old_expr)) => {
             find_rsx_block(&new_expr.block, &old_expr.block, rsx_calls)
                 || new_expr.attrs != old_expr.attrs
                 || new_expr.const_token != old_expr.const_token
         }
-        (syn::Expr::Continue(new_expr), syn::Expr::Continue(old_expr)) => old_expr != new_expr,
-        (syn::Expr::Field(new_expr), syn::Expr::Field(old_expr)) => {
+        (Expr::Continue(new_expr), Expr::Continue(old_expr)) => old_expr != new_expr,
+        (Expr::Field(new_expr), Expr::Field(old_expr)) => {
             find_rsx_expr(&new_expr.base, &old_expr.base, rsx_calls)
                 || new_expr.attrs != old_expr.attrs
                 || new_expr.dot_token != old_expr.dot_token
                 || new_expr.member != old_expr.member
         }
-        (syn::Expr::ForLoop(new_expr), syn::Expr::ForLoop(old_expr)) => {
+        (Expr::ForLoop(new_expr), Expr::ForLoop(old_expr)) => {
             find_rsx_block(&new_expr.body, &old_expr.body, rsx_calls)
                 || find_rsx_expr(&new_expr.expr, &old_expr.expr, rsx_calls)
                 || new_expr.attrs != old_expr.attrs
@@ -402,10 +412,10 @@ fn find_rsx_expr(
                 || new_expr.pat != old_expr.pat
                 || new_expr.in_token != old_expr.in_token
         }
-        (syn::Expr::Group(new_expr), syn::Expr::Group(old_expr)) => {
+        (Expr::Group(new_expr), Expr::Group(old_expr)) => {
             find_rsx_expr(&new_expr.expr, &old_expr.expr, rsx_calls)
         }
-        (syn::Expr::If(new_expr), syn::Expr::If(old_expr)) => {
+        (Expr::If(new_expr), Expr::If(old_expr)) => {
             if find_rsx_expr(&new_expr.cond, &old_expr.cond, rsx_calls)
                 || find_rsx_block(&new_expr.then_branch, &old_expr.then_branch, rsx_calls)
             {
@@ -427,32 +437,32 @@ fn find_rsx_expr(
                 _ => true,
             }
         }
-        (syn::Expr::Index(new_expr), syn::Expr::Index(old_expr)) => {
+        (Expr::Index(new_expr), Expr::Index(old_expr)) => {
             find_rsx_expr(&new_expr.expr, &old_expr.expr, rsx_calls)
                 || find_rsx_expr(&new_expr.index, &old_expr.index, rsx_calls)
                 || new_expr.attrs != old_expr.attrs
                 || new_expr.bracket_token != old_expr.bracket_token
         }
-        (syn::Expr::Infer(new_expr), syn::Expr::Infer(old_expr)) => new_expr != old_expr,
-        (syn::Expr::Let(new_expr), syn::Expr::Let(old_expr)) => {
+        (Expr::Infer(new_expr), Expr::Infer(old_expr)) => new_expr != old_expr,
+        (Expr::Let(new_expr), Expr::Let(old_expr)) => {
             find_rsx_expr(&new_expr.expr, &old_expr.expr, rsx_calls)
                 || new_expr.attrs != old_expr.attrs
                 || new_expr.let_token != old_expr.let_token
                 || new_expr.pat != old_expr.pat
                 || new_expr.eq_token != old_expr.eq_token
         }
-        (syn::Expr::Lit(new_expr), syn::Expr::Lit(old_expr)) => old_expr != new_expr,
-        (syn::Expr::Loop(new_expr), syn::Expr::Loop(old_expr)) => {
+        (Expr::Lit(new_expr), Expr::Lit(old_expr)) => old_expr != new_expr,
+        (Expr::Loop(new_expr), Expr::Loop(old_expr)) => {
             find_rsx_block(&new_expr.body, &old_expr.body, rsx_calls)
                 || new_expr.attrs != old_expr.attrs
                 || new_expr.label != old_expr.label
                 || new_expr.loop_token != old_expr.loop_token
         }
-        (syn::Expr::Macro(new_expr), syn::Expr::Macro(old_expr)) => {
+        (Expr::Macro(new_expr), Expr::Macro(old_expr)) => {
             find_rsx_macro(&new_expr.mac, &old_expr.mac, rsx_calls)
                 || new_expr.attrs != old_expr.attrs
         }
-        (syn::Expr::Match(new_expr), syn::Expr::Match(old_expr)) => {
+        (Expr::Match(new_expr), Expr::Match(old_expr)) => {
             if find_rsx_expr(&new_expr.expr, &old_expr.expr, rsx_calls) {
                 return true;
             }
@@ -479,7 +489,7 @@ fn find_rsx_expr(
                 || new_expr.match_token != old_expr.match_token
                 || new_expr.brace_token != old_expr.brace_token
         }
-        (syn::Expr::MethodCall(new_expr), syn::Expr::MethodCall(old_expr)) => {
+        (Expr::MethodCall(new_expr), Expr::MethodCall(old_expr)) => {
             if find_rsx_expr(&new_expr.receiver, &old_expr.receiver, rsx_calls) {
                 return true;
             }
@@ -494,13 +504,13 @@ fn find_rsx_expr(
                 || new_expr.turbofish != old_expr.turbofish
                 || new_expr.paren_token != old_expr.paren_token
         }
-        (syn::Expr::Paren(new_expr), syn::Expr::Paren(old_expr)) => {
+        (Expr::Paren(new_expr), Expr::Paren(old_expr)) => {
             find_rsx_expr(&new_expr.expr, &old_expr.expr, rsx_calls)
                 || new_expr.attrs != old_expr.attrs
                 || new_expr.paren_token != old_expr.paren_token
         }
-        (syn::Expr::Path(new_expr), syn::Expr::Path(old_expr)) => old_expr != new_expr,
-        (syn::Expr::Range(new_expr), syn::Expr::Range(old_expr)) => {
+        (Expr::Path(new_expr), Expr::Path(old_expr)) => old_expr != new_expr,
+        (Expr::Range(new_expr), Expr::Range(old_expr)) => {
             match (&new_expr.start, &old_expr.start) {
                 (Some(new_expr), Some(old_expr)) => {
                     if find_rsx_expr(new_expr, old_expr, rsx_calls) {
@@ -522,20 +532,20 @@ fn find_rsx_expr(
                 _ => true,
             }
         }
-        (syn::Expr::Reference(new_expr), syn::Expr::Reference(old_expr)) => {
+        (Expr::Reference(new_expr), Expr::Reference(old_expr)) => {
             find_rsx_expr(&new_expr.expr, &old_expr.expr, rsx_calls)
                 || new_expr.attrs != old_expr.attrs
                 || new_expr.and_token != old_expr.and_token
                 || new_expr.mutability != old_expr.mutability
         }
-        (syn::Expr::Repeat(new_expr), syn::Expr::Repeat(old_expr)) => {
+        (Expr::Repeat(new_expr), Expr::Repeat(old_expr)) => {
             find_rsx_expr(&new_expr.expr, &old_expr.expr, rsx_calls)
                 || find_rsx_expr(&new_expr.len, &old_expr.len, rsx_calls)
                 || new_expr.attrs != old_expr.attrs
                 || new_expr.bracket_token != old_expr.bracket_token
                 || new_expr.semi_token != old_expr.semi_token
         }
-        (syn::Expr::Return(new_expr), syn::Expr::Return(old_expr)) => {
+        (Expr::Return(new_expr), Expr::Return(old_expr)) => {
             match (&new_expr.expr, &old_expr.expr) {
                 (Some(new_inner), Some(old_inner)) => {
                     find_rsx_expr(new_inner, old_inner, rsx_calls)
@@ -549,7 +559,7 @@ fn find_rsx_expr(
                 _ => true,
             }
         }
-        (syn::Expr::Struct(new_expr), syn::Expr::Struct(old_expr)) => {
+        (Expr::Struct(new_expr), Expr::Struct(old_expr)) => {
             match (&new_expr.rest, &old_expr.rest) {
                 (Some(new_expr), Some(old_expr)) => {
                     if find_rsx_expr(new_expr, old_expr, rsx_calls) {
@@ -573,17 +583,17 @@ fn find_rsx_expr(
                 || new_expr.brace_token != old_expr.brace_token
                 || new_expr.dot2_token != old_expr.dot2_token
         }
-        (syn::Expr::Try(new_expr), syn::Expr::Try(old_expr)) => {
+        (Expr::Try(new_expr), Expr::Try(old_expr)) => {
             find_rsx_expr(&new_expr.expr, &old_expr.expr, rsx_calls)
                 || new_expr.attrs != old_expr.attrs
                 || new_expr.question_token != old_expr.question_token
         }
-        (syn::Expr::TryBlock(new_expr), syn::Expr::TryBlock(old_expr)) => {
+        (Expr::TryBlock(new_expr), Expr::TryBlock(old_expr)) => {
             find_rsx_block(&new_expr.block, &old_expr.block, rsx_calls)
                 || new_expr.attrs != old_expr.attrs
                 || new_expr.try_token != old_expr.try_token
         }
-        (syn::Expr::Tuple(new_expr), syn::Expr::Tuple(old_expr)) => {
+        (Expr::Tuple(new_expr), Expr::Tuple(old_expr)) => {
             for (new_el, old_el) in new_expr.elems.iter().zip(old_expr.elems.iter()) {
                 if find_rsx_expr(new_el, old_el, rsx_calls) {
                     return true;
@@ -591,37 +601,35 @@ fn find_rsx_expr(
             }
             new_expr.attrs != old_expr.attrs || new_expr.paren_token != old_expr.paren_token
         }
-        (syn::Expr::Unary(new_expr), syn::Expr::Unary(old_expr)) => {
+        (Expr::Unary(new_expr), Expr::Unary(old_expr)) => {
             find_rsx_expr(&new_expr.expr, &old_expr.expr, rsx_calls)
                 || new_expr.attrs != old_expr.attrs
                 || new_expr.op != old_expr.op
         }
-        (syn::Expr::Unsafe(new_expr), syn::Expr::Unsafe(old_expr)) => {
+        (Expr::Unsafe(new_expr), Expr::Unsafe(old_expr)) => {
             find_rsx_block(&new_expr.block, &old_expr.block, rsx_calls)
                 || new_expr.attrs != old_expr.attrs
                 || new_expr.unsafe_token != old_expr.unsafe_token
         }
-        (syn::Expr::While(new_expr), syn::Expr::While(old_expr)) => {
+        (Expr::While(new_expr), Expr::While(old_expr)) => {
             find_rsx_expr(&new_expr.cond, &old_expr.cond, rsx_calls)
                 || find_rsx_block(&new_expr.body, &old_expr.body, rsx_calls)
                 || new_expr.attrs != old_expr.attrs
                 || new_expr.label != old_expr.label
                 || new_expr.while_token != old_expr.while_token
         }
-        (syn::Expr::Yield(new_expr), syn::Expr::Yield(old_expr)) => {
-            match (&new_expr.expr, &old_expr.expr) {
-                (Some(new_inner), Some(old_inner)) => {
-                    find_rsx_expr(new_inner, old_inner, rsx_calls)
-                        || new_expr.attrs != old_expr.attrs
-                        || new_expr.yield_token != old_expr.yield_token
-                }
-                (None, None) => {
-                    new_expr.attrs != old_expr.attrs || new_expr.yield_token != old_expr.yield_token
-                }
-                _ => true,
+        (Expr::Yield(new_expr), Expr::Yield(old_expr)) => match (&new_expr.expr, &old_expr.expr) {
+            (Some(new_inner), Some(old_inner)) => {
+                find_rsx_expr(new_inner, old_inner, rsx_calls)
+                    || new_expr.attrs != old_expr.attrs
+                    || new_expr.yield_token != old_expr.yield_token
             }
-        }
-        (syn::Expr::Verbatim(stream), syn::Expr::Verbatim(stream2)) => {
+            (None, None) => {
+                new_expr.attrs != old_expr.attrs || new_expr.yield_token != old_expr.yield_token
+            }
+            _ => true,
+        },
+        (Expr::Verbatim(stream), Expr::Verbatim(stream2)) => {
             stream.to_string() != stream2.to_string()
         }
         _ => true,
@@ -631,7 +639,7 @@ fn find_rsx_expr(
 fn find_rsx_macro(
     new_mac: &syn::Macro,
     old_mac: &syn::Macro,
-    rsx_calls: &mut Vec<(Macro, TokenStream)>,
+    rsx_calls: &mut Vec<ChangedRsx>,
 ) -> bool {
     if matches!(
         new_mac
@@ -648,7 +656,10 @@ fn find_rsx_macro(
             .as_deref(),
         Some("rsx" | "render")
     ) {
-        rsx_calls.push((old_mac.clone(), new_mac.tokens.clone()));
+        rsx_calls.push(ChangedRsx {
+            old: old_mac.clone(),
+            new: new_mac.tokens.clone(),
+        });
         false
     } else {
         new_mac != old_mac

+ 324 - 140
packages/rsx/src/hot_reload/hot_reloading_file_map.rs

@@ -1,21 +1,28 @@
 use crate::{CallBody, HotReloadingContext};
-use dioxus_core::Template;
+use dioxus_core::{
+    prelude::{TemplateAttribute, TemplateNode},
+    Template,
+};
 use krates::cm::MetadataCommand;
 use krates::Cmd;
 pub use proc_macro2::TokenStream;
 pub use std::collections::HashMap;
-use std::path::PathBuf;
 pub use std::sync::Mutex;
 pub use std::time::SystemTime;
+use std::{collections::HashSet, path::PathBuf};
 pub use std::{fs, io, path::Path};
 pub use std::{fs::File, io::Read};
 pub use syn::__private::ToTokens;
 use syn::spanned::Spanned;
 
-use super::hot_reload_diff::{find_rsx, DiffResult};
+use super::{
+    hot_reload_diff::{diff_rsx, DiffResult},
+    ChangedRsx,
+};
 
 pub enum UpdateResult {
     UpdatedRsx(Vec<Template>),
+
     NeedsRebuild,
 }
 
@@ -23,16 +30,29 @@ pub enum UpdateResult {
 pub struct FileMapBuildResult<Ctx: HotReloadingContext> {
     /// The FileMap that was built
     pub map: FileMap<Ctx>,
+
     /// Any errors that occurred while building the FileMap that were not fatal
     pub errors: Vec<io::Error>,
 }
 
 pub struct FileMap<Ctx: HotReloadingContext> {
-    pub map: HashMap<PathBuf, (String, Option<Template>)>,
+    pub map: HashMap<PathBuf, CachedSynFile>,
+
     in_workspace: HashMap<PathBuf, Option<PathBuf>>,
+
     phantom: std::marker::PhantomData<Ctx>,
 }
 
+/// A cached file that has been parsed
+///
+/// We store the templates found in this file
+pub struct CachedSynFile {
+    pub raw: String,
+    pub path: PathBuf,
+    pub templates: HashMap<&'static str, Template>,
+    pub tracked_assets: HashSet<PathBuf>,
+}
+
 impl<Ctx: HotReloadingContext> FileMap<Ctx> {
     /// Create a new FileMap from a crate directory
     pub fn create(path: PathBuf) -> io::Result<FileMapBuildResult<Ctx>> {
@@ -41,165 +61,329 @@ impl<Ctx: HotReloadingContext> FileMap<Ctx> {
 
     /// Create a new FileMap from a crate directory
     pub fn create_with_filter(
-        path: PathBuf,
+        crate_dir: PathBuf,
         mut filter: impl FnMut(&Path) -> bool,
     ) -> io::Result<FileMapBuildResult<Ctx>> {
-        struct FileMapSearchResult {
-            map: HashMap<PathBuf, (String, Option<Template>)>,
-            errors: Vec<io::Error>,
-        }
-        fn find_rs_files(
-            root: PathBuf,
-            filter: &mut impl FnMut(&Path) -> bool,
-        ) -> FileMapSearchResult {
-            let mut files = HashMap::new();
-            let mut errors = Vec::new();
-            if root.is_dir() {
-                let read_dir = match fs::read_dir(root) {
-                    Ok(read_dir) => read_dir,
-                    Err(err) => {
-                        errors.push(err);
-                        return FileMapSearchResult { map: files, errors };
-                    }
-                };
-                for entry in read_dir.flatten() {
-                    let path = entry.path();
-                    if !filter(&path) {
-                        let FileMapSearchResult {
-                            map,
-                            errors: child_errors,
-                        } = find_rs_files(path, filter);
-                        errors.extend(child_errors);
-                        files.extend(map);
-                    }
-                }
-            } else if root.extension().and_then(|s| s.to_str()) == Some("rs") {
-                if let Ok(mut file) = File::open(root.clone()) {
-                    let mut src = String::new();
-                    match file.read_to_string(&mut src) {
-                        Ok(_) => {
-                            files.insert(root, (src, None));
-                        }
-                        Err(err) => {
-                            errors.push(err);
-                        }
-                    }
-                }
-            }
-            FileMapSearchResult { map: files, errors }
-        }
+        let FileMapSearchResult { map, errors } = find_rs_files(crate_dir.clone(), &mut filter);
 
-        let FileMapSearchResult { map, errors } = find_rs_files(path, &mut filter);
-        let result = Self {
+        let mut map = Self {
             map,
             in_workspace: HashMap::new(),
             phantom: std::marker::PhantomData,
         };
-        Ok(FileMapBuildResult {
-            map: result,
-            errors,
-        })
+
+        map.load_assets(crate_dir.as_path());
+
+        Ok(FileMapBuildResult { errors, map })
+    }
+
+    /// Start watching assets for changes
+    ///
+    /// This just diffs every file against itself and populates the tracked assets as it goes
+    pub fn load_assets(&mut self, crate_dir: &Path) {
+        let keys = self.map.keys().cloned().collect::<Vec<_>>();
+        for file in keys {
+            _ = self.update_rsx(file.as_path(), crate_dir);
+        }
     }
 
     /// Try to update the rsx in a file
-    pub fn update_rsx(&mut self, file_path: &Path, crate_dir: &Path) -> io::Result<UpdateResult> {
+    pub fn update_rsx(
+        &mut self,
+        file_path: &Path,
+        crate_dir: &Path,
+    ) -> Result<UpdateResult, HotreloadError> {
         let mut file = File::open(file_path)?;
         let mut src = String::new();
         file.read_to_string(&mut src)?;
-        if let Ok(syntax) = syn::parse_file(&src) {
-            let in_workspace = self.child_in_workspace(crate_dir)?;
-            if let Some((old_src, template_slot)) = self.map.get_mut(file_path) {
-                if let Ok(old) = syn::parse_file(old_src) {
-                    match find_rsx(&syntax, &old) {
-                        DiffResult::CodeChanged => {
-                            self.map.insert(file_path.to_path_buf(), (src, None));
-                        }
-                        DiffResult::RsxChanged(changed) => {
-                            let mut messages: Vec<Template> = Vec::new();
-                            for (old, new) in changed.into_iter() {
-                                let old_start = old.span().start();
-
-                                if let (Ok(old_call_body), Ok(new_call_body)) = (
-                                    syn::parse2::<CallBody>(old.tokens),
-                                    syn::parse2::<CallBody>(new),
-                                ) {
-                                    // if the file!() macro is invoked in a workspace, the path is relative to the workspace root, otherwise it's relative to the crate root
-                                    // we need to check if the file is in a workspace or not and strip the prefix accordingly
-                                    let prefix = if let Some(workspace) = &in_workspace {
-                                        workspace
-                                    } else {
-                                        crate_dir
-                                    };
-                                    if let Ok(file) = file_path.strip_prefix(prefix) {
-                                        let line = old_start.line;
-                                        let column = old_start.column + 1;
-                                        let location = file.display().to_string()
-                                        + ":"
-                                        + &line.to_string()
-                                        + ":"
-                                        + &column.to_string()
-                                        // the byte index doesn't matter, but dioxus needs it
-                                        + ":0";
-
-                                        if let Some(template) = new_call_body
-                                            .update_template::<Ctx>(
-                                                Some(old_call_body),
-                                                Box::leak(location.into_boxed_str()),
-                                            )
-                                        {
-                                            // dioxus cannot handle empty templates
-                                            if template.roots.is_empty() {
-                                                return Ok(UpdateResult::NeedsRebuild);
-                                            } else {
-                                                // if the template is the same, don't send it
-                                                if let Some(old_template) = template_slot {
-                                                    if old_template == &template {
-                                                        continue;
-                                                    }
-                                                }
-                                                *template_slot = Some(template);
-                                                messages.push(template);
-                                            }
-                                        } else {
-                                            return Ok(UpdateResult::NeedsRebuild);
-                                        }
-                                    }
-                                }
-                            }
-                            return Ok(UpdateResult::UpdatedRsx(messages));
+
+        // If we can't parse the contents we want to pass it off to the build system to tell the user that there's a syntax error
+        let syntax = syn::parse_file(&src).map_err(|_err| HotreloadError::Parse)?;
+
+        let in_workspace = self.child_in_workspace(crate_dir)?;
+
+        // Get the cached file if it exists, otherwise try to create it
+        let Some(old_cached) = self.map.get_mut(file_path) else {
+            // if this is a new file, rebuild the project
+            let FileMapBuildResult { map, mut errors } =
+                FileMap::<Ctx>::create(crate_dir.to_path_buf())?;
+
+            if let Some(err) = errors.pop() {
+                return Err(HotreloadError::Failure(err));
+            }
+
+            // merge the new map into the old map
+            self.map.extend(map.map);
+
+            return Ok(UpdateResult::NeedsRebuild);
+        };
+
+        // If the cached file is not a valid rsx file, rebuild the project, forcing errors
+        // TODO: in theory the error is simply in the RsxCallbody. We could attempt to parse it using partial expansion
+        // And collect out its errors instead of giving up to a full rebuild
+        let old = syn::parse_file(&old_cached.raw).map_err(|_e| HotreloadError::Parse)?;
+
+        let instances = match diff_rsx(&syntax, &old) {
+            // If the changes were just some rsx, we can just update the template
+            //
+            // However... if the changes involved code in the rsx itself, this should actually be a CodeChanged
+            DiffResult::RsxChanged {
+                rsx_calls: instances,
+            } => instances,
+
+            // If the changes were some code, we should insert the file into the map and rebuild
+            // todo: not sure we even need to put the cached file into the map, but whatever
+            DiffResult::CodeChanged(_) => {
+                let cached_file = CachedSynFile {
+                    raw: src.clone(),
+                    path: file_path.to_path_buf(),
+                    templates: HashMap::new(),
+                    tracked_assets: HashSet::new(),
+                };
+
+                self.map.insert(file_path.to_path_buf(), cached_file);
+                return Ok(UpdateResult::NeedsRebuild);
+            }
+        };
+
+        let mut messages: Vec<Template> = Vec::new();
+
+        for calls in instances.into_iter() {
+            let ChangedRsx { old, new } = calls;
+
+            let old_start = old.span().start();
+
+            let old_parsed = syn::parse2::<CallBody>(old.tokens);
+            let new_parsed = syn::parse2::<CallBody>(new);
+            let (Ok(old_call_body), Ok(new_call_body)) = (old_parsed, new_parsed) else {
+                continue;
+            };
+
+            // if the file!() macro is invoked in a workspace, the path is relative to the workspace root, otherwise it's relative to the crate root
+            // we need to check if the file is in a workspace or not and strip the prefix accordingly
+            let prefix = match in_workspace {
+                Some(ref workspace) => workspace,
+                _ => crate_dir,
+            };
+
+            let Ok(file) = file_path.strip_prefix(prefix) else {
+                continue;
+            };
+
+            // We leak the template since templates are a compiletime value
+            // This is not ideal, but also not a huge deal for hot reloading
+            // TODO: we could consider arena allocating the templates and dropping them when the connection is closed
+            let leaked_location = Box::leak(template_location(old_start, file).into_boxed_str());
+
+            // Retuns Some(template) if the template is hotreloadable
+            // dynamic changes are not hot reloadable and force a rebuild
+            let hotreloadable_template =
+                new_call_body.update_template::<Ctx>(Some(old_call_body), leaked_location);
+
+            // if the template is not hotreloadable, we need to do a full rebuild
+            let Some(template) = hotreloadable_template else {
+                return Ok(UpdateResult::NeedsRebuild);
+            };
+
+            // dioxus cannot handle empty templates...
+            // todo: I think it can? or we just skip them nowa
+            if template.roots.is_empty() {
+                continue;
+            }
+
+            // if the template is the same, don't send it
+            if let Some(old_template) = old_cached.templates.get(template.name) {
+                if old_template == &template {
+                    continue;
+                }
+            };
+
+            // update the cached file
+            old_cached.templates.insert(template.name, template);
+
+            // Track any new assets
+            old_cached
+                .tracked_assets
+                .extend(Self::populate_assets(template));
+
+            messages.push(template);
+        }
+
+        Ok(UpdateResult::UpdatedRsx(messages))
+    }
+
+    fn populate_assets(template: Template) -> HashSet<PathBuf> {
+        fn collect_assetlike_attrs(node: &TemplateNode, asset_urls: &mut HashSet<PathBuf>) {
+            if let TemplateNode::Element {
+                attrs, children, ..
+            } = node
+            {
+                for attr in attrs.iter() {
+                    if let TemplateAttribute::Static { name, value, .. } = attr {
+                        if *name == "src" || *name == "href" {
+                            asset_urls.insert(PathBuf::from(*value));
                         }
                     }
                 }
-            } else {
-                // if this is a new file, rebuild the project
-                let FileMapBuildResult { map, mut errors } =
-                    FileMap::create(crate_dir.to_path_buf())?;
-                if let Some(err) = errors.pop() {
-                    return Err(err);
+
+                for child in children.iter() {
+                    collect_assetlike_attrs(child, asset_urls);
                 }
-                *self = map;
             }
         }
-        Ok(UpdateResult::NeedsRebuild)
+
+        let mut asset_urls = HashSet::new();
+
+        for node in template.roots {
+            collect_assetlike_attrs(node, &mut asset_urls);
+        }
+
+        asset_urls
+    }
+
+    /// add the template to an existing file in the filemap if it exists
+    /// create a new file if it doesn't exist
+    pub fn insert(&mut self, path: PathBuf, template: Template) {
+        let tracked_assets = Self::populate_assets(template);
+
+        if self.map.contains_key(&path) {
+            let entry = self.map.get_mut(&path).unwrap();
+            entry.tracked_assets.extend(tracked_assets);
+            entry.templates.insert(template.name, template);
+        } else {
+            self.map.insert(
+                path.clone(),
+                CachedSynFile {
+                    raw: String::new(),
+                    path,
+                    tracked_assets,
+                    templates: HashMap::from([(template.name, template)]),
+                },
+            );
+        }
+    }
+
+    pub fn tracked_assets(&self) -> HashSet<PathBuf> {
+        self.map
+            .values()
+            .flat_map(|file| file.tracked_assets.iter().cloned())
+            .collect()
+    }
+
+    pub fn is_tracking_asset(&self, path: &PathBuf) -> Option<&CachedSynFile> {
+        self.map
+            .values()
+            .find(|file| file.tracked_assets.contains(path))
     }
 
     fn child_in_workspace(&mut self, crate_dir: &Path) -> io::Result<Option<PathBuf>> {
         if let Some(in_workspace) = self.in_workspace.get(crate_dir) {
-            Ok(in_workspace.clone())
-        } else {
-            let mut cmd = Cmd::new();
-            let manafest_path = crate_dir.join("Cargo.toml");
-            cmd.manifest_path(&manafest_path);
-            let cmd: MetadataCommand = cmd.into();
-            let metadata = cmd
-                .exec()
-                .map_err(|err| io::Error::new(io::ErrorKind::Other, err))?;
-
-            let in_workspace = metadata.workspace_root != crate_dir;
-            let workspace_path = in_workspace.then(|| metadata.workspace_root.into());
-            self.in_workspace
-                .insert(crate_dir.to_path_buf(), workspace_path.clone());
-            Ok(workspace_path)
+            return Ok(in_workspace.clone());
         }
+
+        let mut cmd = Cmd::new();
+        let manafest_path = crate_dir.join("Cargo.toml");
+        cmd.manifest_path(&manafest_path);
+        let cmd: MetadataCommand = cmd.into();
+        let metadata = cmd
+            .exec()
+            .map_err(|err| io::Error::new(io::ErrorKind::Other, err))?;
+
+        let in_workspace = metadata.workspace_root != crate_dir;
+        let workspace_path = in_workspace.then(|| metadata.workspace_root.into());
+        self.in_workspace
+            .insert(crate_dir.to_path_buf(), workspace_path.clone());
+        Ok(workspace_path)
+    }
+}
+
+fn template_location(old_start: proc_macro2::LineColumn, file: &Path) -> String {
+    let line = old_start.line;
+    let column = old_start.column + 1;
+    let location = file.display().to_string()
+                                + ":"
+                                + &line.to_string()
+                                + ":"
+                                + &column.to_string()
+                                // the byte index doesn't matter, but dioxus needs it
+                                + ":0";
+    location
+}
+
+struct FileMapSearchResult {
+    map: HashMap<PathBuf, CachedSynFile>,
+    errors: Vec<io::Error>,
+}
+
+// todo: we could just steal the mod logic from rustc itself
+fn find_rs_files(root: PathBuf, filter: &mut impl FnMut(&Path) -> bool) -> FileMapSearchResult {
+    let mut files = HashMap::new();
+    let mut errors = Vec::new();
+
+    if root.is_dir() {
+        let read_dir = match fs::read_dir(root) {
+            Ok(read_dir) => read_dir,
+            Err(err) => {
+                errors.push(err);
+                return FileMapSearchResult { map: files, errors };
+            }
+        };
+        for entry in read_dir.flatten() {
+            let path = entry.path();
+            if !filter(&path) {
+                let FileMapSearchResult {
+                    map,
+                    errors: child_errors,
+                } = find_rs_files(path, filter);
+                errors.extend(child_errors);
+                files.extend(map);
+            }
+        }
+    } else if root.extension().and_then(|s| s.to_str()) == Some("rs") {
+        if let Ok(mut file) = File::open(root.clone()) {
+            let mut src = String::new();
+            match file.read_to_string(&mut src) {
+                Ok(_) => {
+                    let cached_file = CachedSynFile {
+                        raw: src.clone(),
+                        path: root.clone(),
+                        templates: HashMap::new(),
+                        tracked_assets: HashSet::new(),
+                    };
+
+                    // track assets while we're here
+
+                    files.insert(root, cached_file);
+                }
+                Err(err) => {
+                    errors.push(err);
+                }
+            }
+        }
+    }
+
+    FileMapSearchResult { map: files, errors }
+}
+
+#[derive(Debug)]
+pub enum HotreloadError {
+    Failure(io::Error),
+    Parse,
+    NoPreviousBuild,
+}
+
+impl std::fmt::Display for HotreloadError {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        match self {
+            Self::Failure(err) => write!(f, "Failed to parse file: {}", err),
+            Self::Parse => write!(f, "Failed to parse file"),
+            Self::NoPreviousBuild => write!(f, "No previous build found"),
+        }
+    }
+}
+
+impl From<io::Error> for HotreloadError {
+    fn from(err: io::Error) -> Self {
+        HotreloadError::Failure(err)
     }
 }

+ 2 - 0
packages/rsx/src/hot_reload/mod.rs

@@ -1,6 +1,8 @@
 mod hot_reload_diff;
 pub use hot_reload_diff::*;
+
 mod hot_reloading_context;
 pub use hot_reloading_context::*;
+
 mod hot_reloading_file_map;
 pub use hot_reloading_file_map::*;

+ 1 - 0
packages/rsx/src/lib.rs

@@ -74,6 +74,7 @@ impl CallBody {
             roots: &self.roots,
             location: None,
         };
+
         renderer.update_template::<Ctx>(template, location)
     }
 

+ 43 - 0
packages/rsx/tests/hotreloads.rs

@@ -0,0 +1,43 @@
+use dioxus_rsx::hot_reload::{diff_rsx, DiffResult};
+use syn::File;
+
+fn load_files(old: &str, new: &str) -> (File, File) {
+    let old = syn::parse_file(old).unwrap();
+    let new = syn::parse_file(new).unwrap();
+    (old, new)
+}
+
+#[test]
+fn hotreloads() {
+    let (old, new) = load_files(
+        include_str!("./valid/expr.old.rsx"),
+        include_str!("./valid/expr.new.rsx"),
+    );
+
+    assert!(matches!(
+        diff_rsx(&new, &old),
+        DiffResult::RsxChanged { .. }
+    ));
+
+    let (old, new) = load_files(
+        include_str!("./valid/let.old.rsx"),
+        include_str!("./valid/let.new.rsx"),
+    );
+
+    assert!(matches!(
+        diff_rsx(&new, &old),
+        DiffResult::RsxChanged { .. }
+    ));
+}
+
+#[test]
+fn doesnt_hotreload() {
+    let (old, new) = load_files(
+        include_str!("./invalid/changedexpr.old.rsx"),
+        include_str!("./invalid/changedexpr.new.rsx"),
+    );
+
+    let res = diff_rsx(&new, &old);
+    dbg!(&res);
+    assert!(matches!(res, DiffResult::CodeChanged(_)));
+}

+ 9 - 0
packages/rsx/tests/invalid/changedexpr.new.rsx

@@ -0,0 +1,9 @@
+use dioxus::prelude::*;
+
+pub fn CoolChild() -> Element {
+    rsx! {
+        div {
+            {some_expr()}
+        }
+    }
+}

+ 11 - 0
packages/rsx/tests/invalid/changedexpr.old.rsx

@@ -0,0 +1,11 @@
+use dioxus::prelude::*;
+
+pub fn CoolChild() -> Element {
+    let a = 123;
+
+    rsx! {
+        div {
+            {some_expr()}
+        }
+    }
+}

+ 17 - 0
packages/rsx/tests/valid/expr.new.rsx

@@ -0,0 +1,17 @@
+use dioxus::prelude::*;
+
+pub fn CoolChild() -> Element {
+    let head_ = rsx! {
+        div {
+            div { "asasddasdasd" }
+            div { "asasdd1asaassdd23asasddasd" }
+            div { "aasdsdsaasdsddasd" }
+        }
+    };
+
+    rsx! {
+        div {
+            {head_}
+        }
+    }
+}

+ 17 - 0
packages/rsx/tests/valid/expr.old.rsx

@@ -0,0 +1,17 @@
+use dioxus::prelude::*;
+
+pub fn CoolChild() -> Element {
+    let head_ = rsx! {
+        div {
+            div { "asasddasdasd" }
+            div { "asasdd1asaassdd23asasddasd" }
+            // div { "aasdsdsaasdsddasd" }
+        }
+    };
+
+    rsx! {
+        div {
+            {head_}
+        }
+    }
+}

+ 12 - 0
packages/rsx/tests/valid/let.new.rsx

@@ -0,0 +1,12 @@
+use dioxus::prelude::*;
+
+pub fn CoolChild() -> Element {
+    let head_ = rsx! {
+        div {
+            div { "asasddasdasd" }
+            div { "asasdd1asaassdd23asasddasd" }
+        }
+    };
+
+    head_
+}

+ 12 - 0
packages/rsx/tests/valid/let.old.rsx

@@ -0,0 +1,12 @@
+use dioxus::prelude::*;
+
+pub fn CoolChild() -> Element {
+    let head_ = rsx! {
+        div {
+            div { "asasddasdasd" }
+            div { "asasdd1asaassdd23asasddasdasd" }
+        }
+    };
+
+    head_
+}

+ 1 - 1
packages/server-macro/Cargo.toml

@@ -15,7 +15,7 @@ description = "Server function macros for Dioxus"
 [dependencies]
 proc-macro2 = "^1.0.63"
 quote = "^1.0.26"
-syn = { version = "2", features = ["full"] }
+syn = { workspace = true, features = ["full"] }
 convert_case = "^0.6.0"
 server_fn_macro = "^0.6.5"
 

+ 12 - 20
packages/web/Cargo.toml

@@ -24,7 +24,11 @@ wasm-bindgen-futures = "0.4.29"
 tracing = { workspace = true }
 rustc-hash = { workspace = true }
 console_error_panic_hook = { version = "0.1.7", optional = true }
-futures-util = { workspace = true, features = ["std", "async-await", "async-await-macro"] }
+futures-util = { workspace = true, features = [
+    "std",
+    "async-await",
+    "async-await-macro",
+] }
 futures-channel = { workspace = true }
 serde_json = { version = "1.0" }
 serde = { version = "1.0" }
@@ -43,35 +47,23 @@ features = [
     "Text",
     "Window",
     "DataTransfer",
-    "console"
+    "console",
+    "NodeList",
 ]
 
 [features]
 default = ["panic_hook", "mounted", "file_engine", "hot_reload", "eval"]
 panic_hook = ["console_error_panic_hook"]
-hydrate = [
-    "web-sys/Comment",
-]
-mounted = [
-    "web-sys/Element",
-    "dioxus-html/mounted"
-]
+hydrate = ["web-sys/Comment"]
+mounted = ["web-sys/Element", "dioxus-html/mounted"]
 file_engine = [
     "web-sys/File",
     "web-sys/FileList",
     "web-sys/FileReader",
-    "async-trait"
-]
-hot_reload = [
-    "web-sys/MessageEvent",
-    "web-sys/WebSocket",
-    "web-sys/Location",
-]
-eval = [
-    "dioxus-html/eval",
-    "serde-wasm-bindgen",
-    "async-trait"
+    "async-trait",
 ]
+hot_reload = ["web-sys/MessageEvent", "web-sys/WebSocket", "web-sys/Location"]
+eval = ["dioxus-html/eval", "serde-wasm-bindgen", "async-trait"]
 
 [dev-dependencies]
 dioxus = { workspace = true }

+ 29 - 5
packages/web/src/hot_reload.rs

@@ -3,6 +3,7 @@
 use futures_channel::mpsc::UnboundedReceiver;
 
 use dioxus_core::Template;
+use web_sys::{console, Element};
 
 pub(crate) fn init() -> UnboundedReceiver<Template> {
     use wasm_bindgen::closure::Closure;
@@ -29,13 +30,36 @@ pub(crate) fn init() -> UnboundedReceiver<Template> {
 
     // change the rsx when new data is received
     let cl = Closure::wrap(Box::new(move |e: MessageEvent| {
+        console::log_1(&e.clone().into());
+
         if let Ok(text) = e.data().dyn_into::<js_sys::JsString>() {
             let string: String = text.into();
-            let val = serde_json::from_str::<serde_json::Value>(&string).unwrap();
-            // leak the value
-            let val: &'static serde_json::Value = Box::leak(Box::new(val));
-            let template: Template = Template::deserialize(val).unwrap();
-            tx.unbounded_send(template).unwrap();
+
+            if let Ok(val) = serde_json::from_str::<serde_json::Value>(&string) {
+                // leak the value
+                let val: &'static serde_json::Value = Box::leak(Box::new(val));
+                let template: Template = Template::deserialize(val).unwrap();
+                tx.unbounded_send(template).unwrap();
+            } else {
+                // it might be triggering a reload of assets
+                // invalidate all the stylesheets on the page
+                let links = web_sys::window()
+                    .unwrap()
+                    .document()
+                    .unwrap()
+                    .query_selector_all("link[rel=stylesheet]")
+                    .unwrap();
+
+                console::log_1(&links.clone().into());
+
+                for x in 0..links.length() {
+                    console::log_1(&x.into());
+
+                    let link: Element = links.get(x).unwrap().unchecked_into();
+                    let href = link.get_attribute("href").unwrap();
+                    _ = link.set_attribute("href", &format!("{}?{}", href, js_sys::Math::random()));
+                }
+            }
         }
     }) as Box<dyn FnMut(MessageEvent)>);