Explorar o código

wip: major cleanups to scheduler

Jonathan Kelley %!s(int64=3) %!d(string=hai) anos
pai
achega
2933e4b

+ 1 - 1
examples/borrowed.rs

@@ -21,7 +21,7 @@ fn main() {
 }
 
 fn App((cx, props): Scope<()>) -> Element {
-    let text: &mut Vec<String> = cx.use_hook(|_| vec![String::from("abc=def")], |f| f, |_| {});
+    let text: &mut Vec<String> = cx.use_hook(|_| vec![String::from("abc=def")], |f| f);
 
     let first = text.get_mut(0).unwrap();
 

+ 13 - 11
examples/crm.rs

@@ -90,15 +90,17 @@ static App: FC<()> = |(cx, _)| {
         }
     };
 
-    rsx!(cx, body {
-        link {
-            rel: "stylesheet"
-            href: "https://unpkg.com/purecss@2.0.6/build/pure-min.css"
-            integrity: "sha384-Uu6IeWbM+gzNVXJcM9XV3SohHtmWE+3VGi496jvgX1jyvDTXfdK+rfZc8C1Aehk5"
-            crossorigin: "anonymous"
-        }
-        margin_left: "35%"
-        h1 {"Dioxus CRM Example"}
-        {scene}
-    })
+    cx.render(rsx!(
+        body {
+           link {
+               rel: "stylesheet"
+               href: "https://unpkg.com/purecss@2.0.6/build/pure-min.css"
+               integrity: "sha384-Uu6IeWbM+gzNVXJcM9XV3SohHtmWE+3VGi496jvgX1jyvDTXfdK+rfZc8C1Aehk5"
+               crossorigin: "anonymous"
+           }
+           margin_left: "35%"
+           h1 {"Dioxus CRM Example"}
+           {scene}
+       }
+    ))
 };

+ 9 - 5
examples/rsx_usage.rs

@@ -102,12 +102,9 @@ pub static Example: FC<()> = |(cx, props)| {
             }}
 
             // Matching
-            // Matching will throw a Rust error about "no two closures are the same type"
-            // To fix this, call "render" method or use the "in" syntax to produce VNodes.
-            // There's nothing we can do about it, sorry :/ (unless you want *really* unhygienic macros)
             {match true {
-                true => rsx!(cx, h1 {"Top text"}),
-                false => cx.render(rsx!( h1 {"Bottom text"}))
+                true => rsx!( h1 {"Top text"}),
+                false => rsx!( h1 {"Bottom text"})
             }}
 
             // Conditional rendering
@@ -174,10 +171,17 @@ pub static Example: FC<()> = |(cx, props)| {
 
             // Can take children too!
             Taller { a: "asd", div {"hello world!"} }
+
+            // helper functions
+            {helper(cx, "hello world!")}
         }
     })
 };
 
+fn helper(cx: Context, text: &str) -> Element {
+    rsx!(cx, p { "{text}" })
+}
+
 mod baller {
     use super::*;
     #[derive(Props, PartialEq)]

+ 1 - 1
examples/ssr.rs

@@ -21,6 +21,6 @@ static App: FC<()> = |(cx, props)| {
 struct MyProps<'a> {
     text: &'a str,
 }
-fn App2<'a>(cx: Context<'a>, props: &'a MyProps) -> Element<'a> {
+fn App2(cx: Context, props: &MyProps) -> Element {
     None
 }

+ 6 - 4
examples/weather_app.rs

@@ -24,14 +24,16 @@ static App: FC<()> = |(cx, props)| {
                 .await
                 .unwrap();
         },
-        |cx, props| {
+        |props| {
             //
-            rsx!(WeatherDisplay {})
+            cx.render(rsx!(WeatherDisplay {}))
         },
     );
 
-    rsx!(cx, div {
-        {body}
+    cx.render(rsx! {
+        div {
+            {body}
+        }
     })
 };
 

+ 2 - 2
packages/core-macro/src/rsx/element.rs

@@ -210,12 +210,12 @@ impl ToTokens for ElementAttrNamed {
 
             ElementAttr::EventClosure { name, closure } => {
                 quote! {
-                    dioxus::events::on::#name(__cx, #closure)
+                    dioxus_elements::on::#name(__cx, #closure)
                 }
             }
             ElementAttr::EventTokens { name, tokens } => {
                 quote! {
-                    dioxus::events::on::#name(__cx, #tokens)
+                    dioxus_elements::on::#name(__cx, #tokens)
                 }
             }
         };

+ 71 - 0
packages/core/architecture.md

@@ -99,3 +99,74 @@ Internally, the flow of suspense works like this:
 9. diff that node with the new node with a low priority on its own fiber
 10. return the patches back to the event loop
 11. apply the patches to the real dom
+
+/*
+Welcome to Dioxus's cooperative, priority-based scheduler.
+
+I hope you enjoy your stay.
+
+Some essential reading:
+- https://github.com/facebook/react/blob/main/packages/scheduler/src/forks/Scheduler.js#L197-L200
+- https://github.com/facebook/react/blob/main/packages/scheduler/src/forks/Scheduler.js#L440
+- https://github.com/WICG/is-input-pending
+- https://web.dev/rail/
+- https://indepth.dev/posts/1008/inside-fiber-in-depth-overview-of-the-new-reconciliation-algorithm-in-react
+
+# What's going on?
+
+Dioxus is a framework for "user experience" - not just "user interfaces." Part of the "experience" is keeping the UI
+snappy and "jank free" even under heavy work loads. Dioxus already has the "speed" part figured out - but there's no
+point in being "fast" if you can't also be "responsive."
+
+As such, Dioxus can manually decide on what work is most important at any given moment in time. With a properly tuned
+priority system, Dioxus can ensure that user interaction is prioritized and committed as soon as possible (sub 100ms).
+The controller responsible for this priority management is called the "scheduler" and is responsible for juggling many
+different types of work simultaneously.
+
+# How does it work?
+
+Per the RAIL guide, we want to make sure that A) inputs are handled ASAP and B) animations are not blocked.
+React-three-fiber is a testament to how amazing this can be - a ThreeJS scene is threaded in between work periods of
+React, and the UI still stays snappy!
+
+While it's straightforward to run code ASAP and be as "fast as possible", what's not  _not_ straightforward is how to do
+this while not blocking the main thread. The current prevailing thought is to stop working periodically so the browser
+has time to paint and run animations. When the browser is finished, we can step in and continue our work.
+
+React-Fiber uses the "Fiber" concept to achieve a pause-resume functionality. This is worth reading up on, but not
+necessary to understand what we're doing here. In Dioxus, our DiffMachine is guided by DiffInstructions - essentially
+"commands" that guide the Diffing algorithm through the tree. Our "diff_scope" method is async - we can literally pause
+our DiffMachine "mid-sentence" (so to speak) by just stopping the poll on the future. The DiffMachine periodically yields
+so Rust's async machinery can take over, allowing us to customize when exactly to pause it.
+
+React's "should_yield" method is more complex than ours, and I assume we'll move in that direction as Dioxus matures. For
+now, Dioxus just assumes a TimeoutFuture, and selects! on both the Diff algorithm and timeout. If the DiffMachine finishes
+before the timeout, then Dioxus will work on any pending work in the interim. If there is no pending work, then the changes
+are committed, and coroutines are polled during the idle period. However, if the timeout expires, then the DiffMachine
+future is paused and saved (self-referentially).
+
+# Priority System
+
+So far, we've been able to thread our Dioxus work between animation frames - the main thread is not blocked! But that
+doesn't help us _under load_. How do we still stay snappy... even if we're doing a lot of work? Well, that's where
+priorities come into play. The goal with priorities is to schedule shorter work as a "high" priority and longer work as
+a "lower" priority. That way, we can interrupt long-running low-priority work with short-running high-priority work.
+
+React's priority system is quite complex.
+
+There are 5 levels of priority and 2 distinctions between UI events (discrete, continuous). I believe React really only
+uses 3 priority levels and "idle" priority isn't used... Regardless, there's some batching going on.
+
+For Dioxus, we're going with a 4 tier priority system:
+- Sync: Things that need to be done by the next frame, like TextInput on controlled elements
+- High: for events that block all others - clicks, keyboard, and hovers
+- Medium: for UI events caused by the user but not directly - scrolls/forms/focus (all other events)
+- Low: set_state called asynchronously, and anything generated by suspense
+
+In "Sync" state, we abort our "idle wait" future, and resolve the sync queue immediately and escape. Because we completed
+work before the next rAF, any edits can be immediately processed before the frame ends. Generally though, we want to leave
+as much time to rAF as possible. "Sync" is currently only used by onInput - we'll leave some docs telling people not to
+do anything too arduous from onInput.
+
+For the rest, we defer to the rIC period and work down each queue from high to low.
+*/

+ 43 - 0
packages/core/examples/works.rs

@@ -0,0 +1,43 @@
+use dioxus::prelude::*;
+use dioxus_core as dioxus;
+use dioxus_core_macro::*;
+use dioxus_html as dioxus_elements;
+
+fn main() {
+    let _ = VirtualDom::new(Parent);
+}
+
+fn Parent((cx, _): Scope<()>) -> Element {
+    let value = cx.use_hook(|_| String::new(), |f| &*f);
+
+    cx.render(rsx! {
+        div {
+            Child { name: value }
+        }
+    })
+}
+
+#[derive(Props)]
+struct ChildProps<'a> {
+    name: &'a str,
+}
+
+fn Child((cx, props): Scope<ChildProps>) -> Element {
+    cx.render(rsx! {
+        div {
+            h1 { "it's nested" }
+            Child2 { name: props.name }
+        }
+    })
+}
+
+#[derive(Props)]
+struct Grandchild<'a> {
+    name: &'a str,
+}
+
+fn Child2((cx, props): Scope<Grandchild>) -> Element {
+    cx.render(rsx! {
+        div { "Hello {props.name}!" }
+    })
+}

+ 6 - 3
packages/core/src/debug_dom.rs

@@ -1,4 +1,4 @@
-use crate::{innerlude::ScopeInner, virtual_dom::VirtualDom, VNode};
+use crate::{innerlude::ScopeState, virtual_dom::VirtualDom, VNode};
 
 impl std::fmt::Display for VirtualDom {
     fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
@@ -24,7 +24,7 @@ impl std::fmt::Display for VirtualDom {
 pub(crate) struct ScopeRenderer<'a> {
     pub skip_components: bool,
     pub show_fragments: bool,
-    pub _scope: &'a ScopeInner,
+    pub _scope: &'a ScopeState,
     pub _pre_render: bool,
     pub _newline: bool,
     pub _indent: bool,
@@ -48,6 +48,9 @@ impl<'a> ScopeRenderer<'a> {
         };
 
         match &node {
+            VNode::Linked(_) => {
+                write!(f, "Linked").unwrap();
+            }
             VNode::Text(text) => {
                 write_indent(f, il);
                 writeln!(f, "\"{}\"", text.text)?
@@ -115,7 +118,7 @@ impl<'a> ScopeRenderer<'a> {
             VNode::Component(vcomp) => {
                 let idx = vcomp.associated_scope.get().unwrap();
                 if !self.skip_components {
-                    let new_node = vdom.get_scope(idx).unwrap().root_node();
+                    let new_node = vdom.get_scope(&idx).unwrap().root_node();
                     self.render(vdom, new_node, f, il)?;
                 }
             }

+ 50 - 12
packages/core/src/diff.rs

@@ -178,7 +178,7 @@ impl<'bump> VirtualDom {
     // }
 
     pub fn diff_scope(&'bump self, state: &mut DiffState<'bump>, id: ScopeId) {
-        if let Some(component) = self.get_scope_mut(&id) {
+        if let Some(component) = self.scopes.get(&id) {
             let (old, new) = (component.frames.wip_head(), component.frames.fin_head());
             state.stack.push(DiffInstruction::Diff { new, old });
             self.work(state, || false);
@@ -230,6 +230,16 @@ impl<'bump> VirtualDom {
                 1
             }
 
+            VNode::Linked(linked) => {
+                todo!("load linked");
+                0
+                // let num_on_stack = linked.children.iter().map(|child| {
+                //     self.push_all_nodes(state, child)
+                // }).sum();
+                // state.mutations.push_root(node.mounted_id());
+                // num_on_stack + 1
+            }
+
             VNode::Fragment(_) | VNode::Component(_) => {
                 //
                 let mut added = 0;
@@ -302,6 +312,7 @@ impl<'bump> VirtualDom {
             VNode::Element(element) => self.create_element_node(state, element, node),
             VNode::Fragment(frag) => self.create_fragment_node(state, frag),
             VNode::Component(component) => self.create_component_node(state, component),
+            VNode::Linked(linked) => self.create_linked_node(state, linked),
         }
     }
 
@@ -371,7 +382,7 @@ impl<'bump> VirtualDom {
         state.stack.add_child_count(1);
 
         if let Some(cur_scope_id) = state.stack.current_scope() {
-            let scope = self.get_scope_mut(&cur_scope_id).unwrap();
+            let scope = self.scopes.get(&cur_scope_id).unwrap();
 
             for listener in *listeners {
                 self.attach_listener_to_scope(state, listener, scope);
@@ -434,7 +445,7 @@ impl<'bump> VirtualDom {
         vcomponent.associated_scope.set(Some(new_idx));
 
         if !vcomponent.can_memoize {
-            let cur_scope = self.get_scope_mut(&parent_idx).unwrap();
+            let cur_scope = self.scopes.get(&parent_idx).unwrap();
             let extended = vcomponent as *const VComponent;
             let extended: *const VComponent<'static> = unsafe { std::mem::transmute(extended) };
 
@@ -445,7 +456,7 @@ impl<'bump> VirtualDom {
         //  add noderefs to current noderef list Noderefs
         //  add effects to current effect list Effects
 
-        let new_component = self.get_scope_mut(&new_idx).unwrap();
+        let new_component = self.scopes.get(&new_idx).unwrap();
 
         log::debug!(
             "initializing component {:?} with height {:?}",
@@ -477,6 +488,10 @@ impl<'bump> VirtualDom {
         state.seen_scopes.insert(new_idx);
     }
 
+    fn create_linked_node(&'bump self, state: &mut DiffState<'bump>, link: &'bump NodeLink) {
+        todo!()
+    }
+
     // =================================
     //  Tools for diffing nodes
     // =================================
@@ -500,11 +515,14 @@ impl<'bump> VirtualDom {
             (Element(old), Element(new)) => {
                 self.diff_element_nodes(state, old, new, old_node, new_node)
             }
+            (Linked(old), Linked(new)) => self.diff_linked_nodes(state, old, new),
 
             // Anything else is just a basic replace and create
             (
-                Component(_) | Fragment(_) | Text(_) | Element(_) | Anchor(_) | Suspended(_),
-                Component(_) | Fragment(_) | Text(_) | Element(_) | Anchor(_) | Suspended(_),
+                Linked(_) | Component(_) | Fragment(_) | Text(_) | Element(_) | Anchor(_)
+                | Suspended(_),
+                Linked(_) | Component(_) | Fragment(_) | Text(_) | Element(_) | Anchor(_)
+                | Suspended(_),
             ) => state
                 .stack
                 .create_node(new_node, MountType::Replace { old: old_node }),
@@ -588,7 +606,7 @@ impl<'bump> VirtualDom {
         //
         // TODO: take a more efficient path than this
         if let Some(cur_scope_id) = state.stack.current_scope() {
-            let scope = self.get_scope_mut(&cur_scope_id).unwrap();
+            let scope = self.scopes.get(&cur_scope_id).unwrap();
 
             if old.listeners.len() == new.listeners.len() {
                 for (old_l, new_l) in old.listeners.iter().zip(new.listeners.iter()) {
@@ -646,7 +664,7 @@ impl<'bump> VirtualDom {
             new.associated_scope.set(Some(scope_addr));
 
             // make sure the component's caller function is up to date
-            let scope = self.get_scope_mut(&scope_addr).unwrap();
+            let scope = self.scopes.get(&scope_addr).unwrap();
             scope.update_vcomp(new);
 
             // React doesn't automatically memoize, but we do.
@@ -699,6 +717,17 @@ impl<'bump> VirtualDom {
         self.attach_suspended_node_to_scope(state, new);
     }
 
+    fn diff_linked_nodes(
+        &'bump self,
+        state: &mut DiffState<'bump>,
+        old: &'bump NodeLink,
+        new: &'bump NodeLink,
+    ) {
+        todo!();
+        // new.dom_id.set(old.dom_id.get());
+        // self.attach_linked_node_to_scope(state, new);
+    }
+
     // =============================================
     //  Utilities for creating new diff instructions
     // =============================================
@@ -1168,7 +1197,9 @@ impl<'bump> VirtualDom {
                 VNode::Element(t) => break t.dom_id.get(),
                 VNode::Suspended(t) => break t.dom_id.get(),
                 VNode::Anchor(t) => break t.dom_id.get(),
-
+                VNode::Linked(_) => {
+                    todo!()
+                }
                 VNode::Fragment(frag) => {
                     search_node = frag.children.last();
                 }
@@ -1199,6 +1230,9 @@ impl<'bump> VirtualDom {
                     let scope = self.get_scope(&scope_id).unwrap();
                     search_node = Some(scope.root_node());
                 }
+                VNode::Linked(link) => {
+                    todo!("linked")
+                }
                 VNode::Text(t) => break t.dom_id.get(),
                 VNode::Element(t) => break t.dom_id.get(),
                 VNode::Suspended(t) => break t.dom_id.get(),
@@ -1272,9 +1306,13 @@ impl<'bump> VirtualDom {
                     self.remove_nodes(state, f.children, gen_muts);
                 }
 
+                VNode::Linked(l) => {
+                    todo!()
+                }
+
                 VNode::Component(c) => {
                     let scope_id = c.associated_scope.get().unwrap();
-                    let scope = self.get_scope_mut(&scope_id).unwrap();
+                    let scope = self.scopes.get(&scope_id).unwrap();
                     let root = scope.root_node();
                     self.remove_nodes(state, Some(root), gen_muts);
 
@@ -1310,7 +1348,7 @@ impl<'bump> VirtualDom {
         &'bump self,
         state: &mut DiffState<'bump>,
         listener: &'bump Listener<'bump>,
-        scope: &ScopeInner,
+        scope: &ScopeState,
     ) {
         let long_listener: &'bump Listener<'static> = unsafe { std::mem::transmute(listener) };
         scope
@@ -1328,7 +1366,7 @@ impl<'bump> VirtualDom {
         if let Some(scope) = state
             .stack
             .current_scope()
-            .and_then(|id| self.get_scope_mut(&id))
+            .and_then(|id| self.scopes.get(&id))
         {
             // safety: this lifetime is managed by the logic on scope
             let extended: &VSuspended<'static> = unsafe { std::mem::transmute(suspended) };

+ 0 - 84
packages/core/src/events.rs

@@ -1,84 +0,0 @@
-//! An event system that's less confusing than Traits + RC;
-//! This should hopefully make it easier to port to other platforms.
-//!
-//! Unfortunately, it is less efficient than the original, but hopefully it's negligible.
-
-use crate::{
-    innerlude::Listener,
-    innerlude::{ElementId, NodeFactory, ScopeId},
-};
-use bumpalo::boxed::Box as BumpBox;
-use std::{
-    any::Any,
-    cell::{Cell, RefCell},
-    fmt::Debug,
-};
-
-#[derive(Debug)]
-pub struct UserEvent {
-    /// The originator of the event trigger
-    pub scope: ScopeId,
-
-    /// The optional real node associated with the trigger
-    pub mounted_dom_id: Option<ElementId>,
-
-    /// The event type IE "onclick" or "onmouseover"
-    ///
-    /// The name that the renderer will use to mount the listener.
-    pub name: &'static str,
-
-    /// The type of event
-    pub event: Box<dyn Any + Send>,
-}
-
-/// Priority of Event Triggers.
-///
-/// Internally, Dioxus will abort work that's taking too long if new, more important work arrives. Unlike React, Dioxus
-/// won't be afraid to pause work or flush changes to the RealDOM. This is called "cooperative scheduling". Some Renderers
-/// implement this form of scheduling internally, however Dioxus will perform its own scheduling as well.
-///
-/// The ultimate goal of the scheduler is to manage latency of changes, prioritizing "flashier" changes over "subtler" changes.
-///
-/// React has a 5-tier priority system. However, they break things into "Continuous" and "Discrete" priority. For now,
-/// we keep it simple, and just use a 3-tier priority system.
-///
-/// - NoPriority = 0
-/// - LowPriority = 1
-/// - NormalPriority = 2
-/// - UserBlocking = 3
-/// - HighPriority = 4
-/// - ImmediatePriority = 5
-///
-/// We still have a concept of discrete vs continuous though - discrete events won't be batched, but continuous events will.
-/// This means that multiple "scroll" events will be processed in a single frame, but multiple "click" events will be
-/// flushed before proceeding. Multiple discrete events is highly unlikely, though.
-#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash, PartialOrd, Ord)]
-pub enum EventPriority {
-    /// Work that must be completed during the EventHandler phase.
-    ///
-    /// Currently this is reserved for controlled inputs.
-    Immediate = 3,
-
-    /// "High Priority" work will not interrupt other high priority work, but will interrupt medium and low priority work.
-    ///
-    /// This is typically reserved for things like user interaction.
-    ///
-    /// React calls these "discrete" events, but with an extra category of "user-blocking" (Immediate).
-    High = 2,
-
-    /// "Medium priority" work is generated by page events not triggered by the user. These types of events are less important
-    /// than "High Priority" events and will take precedence over low priority events.
-    ///
-    /// This is typically reserved for VirtualEvents that are not related to keyboard or mouse input.
-    ///
-    /// React calls these "continuous" events (e.g. mouse move, mouse wheel, touch move, etc).
-    Medium = 1,
-
-    /// "Low Priority" work will always be preempted unless the work is significantly delayed, in which case it will be
-    /// advanced to the front of the work queue until completed.
-    ///
-    /// The primary user of Low Priority work is the asynchronous work system (Suspense).
-    ///
-    /// This is considered "idle" work or "background" work.
-    Low = 0,
-}

+ 1 - 1
packages/core/src/lazynodes.rs

@@ -177,7 +177,7 @@ fn round_to_words(len: usize) -> usize {
 fn it_works() {
     let bump = bumpalo::Bump::new();
 
-    simple_logger::init();
+    simple_logger::init().unwrap();
 
     let factory = NodeFactory { bump: &bump };
 

+ 5 - 7
packages/core/src/lib.rs

@@ -16,7 +16,6 @@ pub(crate) mod bumpframe;
 pub(crate) mod component;
 pub(crate) mod diff;
 pub(crate) mod diff_stack;
-pub(crate) mod events;
 pub(crate) mod hooklist;
 pub(crate) mod hooks;
 pub(crate) mod lazynodes;
@@ -36,7 +35,6 @@ pub(crate) mod innerlude {
     pub use crate::component::*;
     pub(crate) use crate::diff::*;
     pub use crate::diff_stack::*;
-    pub use crate::events::*;
     pub(crate) use crate::hooklist::*;
     pub use crate::hooks::*;
     pub use crate::lazynodes::*;
@@ -48,14 +46,14 @@ pub(crate) mod innerlude {
     pub use crate::util::*;
     pub use crate::virtual_dom::*;
 
-    pub type Element = Option<CachedNode>;
-    pub type FC<P> = for<'a> fn(Scope<'a, P>) -> CachedNode;
+    pub type Element = Option<NodeLink>;
+    pub type FC<P> = for<'a> fn(Scope<'a, P>) -> Element;
 }
 
 pub use crate::innerlude::{
-    Context, DioxusElement, DomEdit, Element, ElementId, EventPriority, LazyNodes, MountType,
-    Mutations, NodeFactory, Properties, ScopeChildren, ScopeId, TestDom, UserEvent, VNode,
-    VirtualDom, FC,
+    Attribute, Context, DioxusElement, DomEdit, Element, ElementId, EventPriority, LazyNodes,
+    Listener, MountType, Mutations, NodeFactory, Properties, SchedulerMsg, ScopeChildren, ScopeId,
+    TestDom, UserEvent, VAnchor, VElement, VFragment, VNode, VSuspended, VirtualDom, FC,
 };
 
 pub mod prelude {

+ 52 - 5
packages/core/src/nodes.rs

@@ -4,7 +4,7 @@
 //! cheap and *very* fast to construct - building a full tree should be quick.
 
 use crate::{
-    innerlude::{empty_cell, Context, Element, ElementId, Properties, Scope, ScopeId, ScopeInner},
+    innerlude::{empty_cell, Context, Element, ElementId, Properties, Scope, ScopeId, ScopeState},
     lazynodes::LazyNodes,
 };
 use bumpalo::{boxed::Box as BumpBox, Bump};
@@ -20,7 +20,7 @@ use std::{
 ///
 /// It is used during the diffing/rendering process as a runtime key into an existing set of nodes. The "render" key
 /// is essentially a unique key to guarantee safe usage of the Node.
-pub struct CachedNode {
+pub struct NodeLink {
     frame_id: u32,
     gen_id: u32,
     scope_id: ScopeId,
@@ -130,6 +130,13 @@ pub enum VNode<'src> {
     /// }
     /// ```
     Anchor(&'src VAnchor),
+
+    /// A type of node that links this node to another scope or render cycle
+    ///
+    /// Is essentially a "pointer" to a "rendered" node in a particular scope
+    ///
+    /// Used in portals
+    Linked(NodeLink),
 }
 
 impl<'src> VNode<'src> {
@@ -139,9 +146,11 @@ impl<'src> VNode<'src> {
             VNode::Element(el) => el.key,
             VNode::Component(c) => c.key,
             VNode::Fragment(f) => f.key,
+
             VNode::Text(_t) => None,
             VNode::Suspended(_s) => None,
             VNode::Anchor(_f) => None,
+            VNode::Linked(_c) => None,
         }
     }
 
@@ -161,6 +170,7 @@ impl<'src> VNode<'src> {
             VNode::Element(el) => el.dom_id.get(),
             VNode::Anchor(el) => el.dom_id.get(),
             VNode::Suspended(el) => el.dom_id.get(),
+            VNode::Linked(_) => None,
             VNode::Fragment(_) => None,
             VNode::Component(_) => None,
         }
@@ -186,6 +196,11 @@ impl<'src> VNode<'src> {
                 children: f.children,
                 key: f.key,
             }),
+            VNode::Linked(c) => VNode::Linked(NodeLink {
+                frame_id: c.frame_id,
+                gen_id: c.gen_id,
+                scope_id: c.scope_id,
+            }),
         }
     }
 }
@@ -205,6 +220,11 @@ impl Debug for VNode<'_> {
             VNode::Fragment(frag) => write!(s, "VFragment {{ children: {:?} }}", frag.children),
             VNode::Suspended { .. } => write!(s, "VSuspended"),
             VNode::Component(comp) => write!(s, "VComponent {{ fc: {:?}}}", comp.user_fc),
+            VNode::Linked(c) => write!(
+                s,
+                "VCached {{ frame_id: {}, gen_id: {}, scope_id: {:?} }}",
+                c.frame_id, c.gen_id, c.scope_id
+            ),
         }
     }
 }
@@ -347,8 +367,8 @@ pub struct VComponent<'src> {
 }
 
 pub enum VCompCaller<'src> {
-    Borrowed(BumpBox<'src, dyn for<'b> Fn(&'b ScopeInner) -> Element + 'src>),
-    Owned(Box<dyn for<'b> Fn(&'b ScopeInner) -> Element>),
+    Borrowed(BumpBox<'src, dyn for<'b> Fn(&'b ScopeState) -> Element + 'src>),
+    Owned(Box<dyn for<'b> Fn(&'b ScopeState) -> Element>),
 }
 
 pub struct VSuspended<'a> {
@@ -491,7 +511,7 @@ impl<'a> NodeFactory<'a> {
         }
     }
 
-    pub fn component<P, P1>(
+    pub fn component<P>(
         &self,
         component: fn(Scope<'a, P>) -> Element,
         props: P,
@@ -605,6 +625,18 @@ impl<'a> NodeFactory<'a> {
         // }))
     }
 
+    pub fn listener(
+        self,
+        event: &'static str,
+        callback: BumpBox<'a, dyn FnMut(Box<dyn Any + Send>) + 'a>,
+    ) -> Listener<'a> {
+        Listener {
+            mounted_node: Cell::new(None),
+            event,
+            callback: RefCell::new(Some(callback)),
+        }
+    }
+
     pub fn fragment_from_iter(
         self,
         node_iter: impl IntoIterator<Item = impl IntoVNode<'a>>,
@@ -713,6 +745,21 @@ impl IntoVNode<'_> for Option<()> {
     }
 }
 
+// Conveniently, we also support "None"
+impl IntoVNode<'_> for Option<NodeLink> {
+    fn into_vnode(self, cx: NodeFactory) -> VNode {
+        todo!()
+        // cx.fragment_from_iter(None as Option<VNode>)
+    }
+}
+// Conveniently, we also support "None"
+impl IntoVNode<'_> for NodeLink {
+    fn into_vnode(self, cx: NodeFactory) -> VNode {
+        todo!()
+        // cx.fragment_from_iter(None as Option<VNode>)
+    }
+}
+
 impl<'a> IntoVNode<'a> for Option<VNode<'a>> {
     fn into_vnode(self, cx: NodeFactory<'a>) -> VNode<'a> {
         self.unwrap_or_else(|| cx.fragment_from_iter(None as Option<VNode>))

+ 15 - 0
packages/core/src/old/events.rs

@@ -0,0 +1,15 @@
+//! An event system that's less confusing than Traits + RC;
+//! This should hopefully make it easier to port to other platforms.
+//!
+//! Unfortunately, it is less efficient than the original, but hopefully it's negligible.
+
+use crate::{
+    innerlude::Listener,
+    innerlude::{ElementId, NodeFactory, ScopeId},
+};
+use bumpalo::boxed::Box as BumpBox;
+use std::{
+    any::Any,
+    cell::{Cell, RefCell},
+    fmt::Debug,
+};

+ 0 - 0
packages/core/src/noderef.rs → packages/core/src/old/noderef.rs


+ 0 - 0
packages/core/src/resources.rs → packages/core/src/old/resources.rs


+ 10 - 5
packages/core/src/scope.rs

@@ -30,7 +30,7 @@ use bumpalo::{boxed::Box as BumpBox, Bump};
 ///     cx.render(rsx!{ div {"Hello, {props.name}"} })
 /// }
 /// ```
-pub type Context<'a> = &'a ScopeInner;
+pub type Context<'a> = &'a ScopeState;
 
 /// Every component in Dioxus is represented by a `Scope`.
 ///
@@ -41,13 +41,13 @@ pub type Context<'a> = &'a ScopeInner;
 ///
 /// We expose the `Scope` type so downstream users can traverse the Dioxus VirtualDOM for whatever
 /// use case they might have.
-pub struct ScopeInner {
+pub struct ScopeState {
     // Book-keeping about our spot in the arena
 
     // safety:
     //
     // pointers to scopes are *always* valid since they are bump allocated and never freed until this scope is also freed
-    pub(crate) parent_scope: Option<*mut ScopeInner>,
+    pub(crate) parent_scope: Option<*mut ScopeState>,
 
     pub(crate) our_arena_idx: ScopeId,
 
@@ -80,6 +80,11 @@ pub struct ScopeInner {
 }
 
 pub struct SelfReferentialItems<'a> {
+    // nodes stored by "cx.render"
+    pub(crate) cached_nodes: Vec<VNode<'a>>,
+
+    pub(crate) generation: u32,
+
     pub(crate) listeners: Vec<*const Listener<'a>>,
     pub(crate) borrowed_props: Vec<*const VComponent<'a>>,
     pub(crate) suspended_nodes: FxHashMap<u64, *const VSuspended<'a>>,
@@ -91,7 +96,7 @@ pub struct ScopeVcomp {
     // important things
 }
 
-impl ScopeInner {
+impl ScopeState {
     /// This method cleans up any references to data held within our hook list. This prevents mutable aliasing from
     /// causing UB in our tree.
     ///
@@ -366,7 +371,7 @@ impl ScopeInner {
     ///     cx.render(lazy_tree)
     /// }
     ///```
-    pub fn render<'src>(&'src self, lazy_nodes: Option<LazyNodes<'src, '_>>) -> Option<CachedNode> {
+    pub fn render<'src>(&'src self, lazy_nodes: Option<LazyNodes<'src, '_>>) -> Option<NodeLink> {
         todo!()
         // ) -> Option<VNode<'src>> {
         // let bump = &self.frames.wip_frame().bump;

+ 6 - 4
packages/core/src/scopearena.rs

@@ -19,7 +19,7 @@ pub struct Heuristic {
 // has an internal heuristics engine to pre-allocate arenas to the right size
 pub(crate) struct ScopeArena {
     bump: Bump,
-    scopes: Vec<*mut ScopeInner>,
+    scopes: Vec<*mut ScopeState>,
     free_scopes: Vec<ScopeId>,
 }
 
@@ -32,7 +32,7 @@ impl ScopeArena {
         }
     }
 
-    pub fn get_mut(&self, id: &ScopeId) -> Option<&ScopeInner> {
+    pub fn get(&self, id: &ScopeId) -> Option<&ScopeState> {
         unsafe { Some(&*self.scopes[id.0]) }
     }
 
@@ -40,7 +40,7 @@ impl ScopeArena {
         &mut self,
         fc_ptr: *const (),
         vcomp: &VComponent,
-        parent_scope: Option<*mut ScopeInner>,
+        parent_scope: Option<*mut ScopeState>,
         height: u32,
         subtree: u32,
         sender: UnboundedSender<SchedulerMsg>,
@@ -56,7 +56,7 @@ impl ScopeArena {
 
             let vcomp = unsafe { std::mem::transmute(vcomp as *const VComponent) };
 
-            let new_scope = ScopeInner {
+            let new_scope = ScopeState {
                 sender,
                 parent_scope,
                 our_arena_idx: id,
@@ -75,6 +75,8 @@ impl ScopeArena {
                     suspended_nodes: Default::default(),
                     tasks: Default::default(),
                     pending_effects: Default::default(),
+                    cached_nodes: Default::default(),
+                    generation: Default::default(),
                 }),
             };
 

+ 302 - 477
packages/core/src/virtual_dom.rs

@@ -80,31 +80,20 @@ pub struct VirtualDom {
 
     pub(crate) scopes: ScopeArena,
 
-    pub receiver: UnboundedReceiver<SchedulerMsg>,
-    pub sender: UnboundedSender<SchedulerMsg>,
+    receiver: UnboundedReceiver<SchedulerMsg>,
+    pub(crate) sender: UnboundedSender<SchedulerMsg>,
 
     // Every component that has futures that need to be polled
-    pub pending_futures: FxHashSet<ScopeId>,
+    pending_futures: FxHashSet<ScopeId>,
+    pending_messages: VecDeque<SchedulerMsg>,
+    dirty_scopes: IndexSet<ScopeId>,
 
-    pub ui_events: VecDeque<UserEvent>,
+    saved_state: Option<SavedDiffWork<'static>>,
 
-    pub pending_immediates: VecDeque<ScopeId>,
-
-    pub dirty_scopes: IndexSet<ScopeId>,
-
-    pub(crate) saved_state: Option<SavedDiffWork<'static>>,
-
-    pub in_progress: bool,
-}
-
-pub enum SchedulerMsg {
-    // events from the host
-    UiEvent(UserEvent),
-
-    // setstate
-    Immediate(ScopeId),
+    in_progress: bool,
 }
 
+// Methods to create the VirtualDom
 impl VirtualDom {
     /// Create a new VirtualDOM with a component that does not have special props.
     ///
@@ -198,87 +187,61 @@ impl VirtualDom {
             root_props: todo!(),
             _root_caller: todo!(),
 
-            ui_events: todo!(),
-            pending_immediates: todo!(),
-
+            pending_messages: VecDeque::new(),
             pending_futures: Default::default(),
             dirty_scopes: Default::default(),
 
-            saved_state: Some(SavedDiffWork {
-                mutations: Mutations::new(),
-                stack: DiffStack::new(),
-                seen_scopes: Default::default(),
-            }),
+            saved_state: None,
             in_progress: false,
         }
     }
+}
 
-    /// Get the [`Scope`] for the root component.
+// Public utility methods
+impl VirtualDom {
+    /// Get the [`ScopeState`] for the root component.
     ///
     /// This is useful for traversing the tree from the root for heuristics or alternsative renderers that use Dioxus
     /// directly.
-    pub fn base_scope(&self) -> &ScopeInner {
-        todo!()
-        // self.get_scope(&self.base_scope).unwrap()
+    ///
+    /// # Example
+    pub fn base_scope(&self) -> &ScopeState {
+        self.get_scope(&self.base_scope).unwrap()
     }
 
-    /// Get the [`Scope`] for a component given its [`ScopeId`]
-    pub fn get_scope(&self, id: &ScopeId) -> Option<&ScopeInner> {
-        todo!()
-        // self.get_scope(&id)
+    /// Get the [`ScopeState`] for a component given its [`ScopeId`]
+    ///
+    /// # Example
+    ///
+    ///
+    ///
+    pub fn get_scope<'a>(&'a self, id: &ScopeId) -> Option<&'a ScopeState> {
+        self.scopes.get(&id)
     }
 
-    /// Update the root props of this VirtualDOM.
+    /// Get an [`UnboundedSender`] handle to the channel used by the scheduler.
+    ///
+    /// # Example
     ///
-    /// This method returns None if the old props could not be removed. The entire VirtualDOM will be rebuilt immediately,
-    /// so calling this method will block the main thread until computation is done.
     ///
-    /// ## Example
+    ///    
+    pub fn get_scheduler_channel(&self) -> futures_channel::mpsc::UnboundedSender<SchedulerMsg> {
+        self.sender.clone()
+    }
+
+    /// Check if the [`VirtualDom`] has any pending updates or work to be done.
     ///
-    /// ```rust
-    /// #[derive(Props, PartialEq)]
-    /// struct AppProps {
-    ///     route: &'static str
-    /// }
-    /// static App: FC<AppProps> = |(cx, props)|cx.render(rsx!{ "route is {cx.route}" });
+    /// # Example
     ///
-    /// let mut dom = VirtualDom::new_with_props(App, AppProps { route: "start" });
     ///
-    /// let mutations = dom.update_root_props(AppProps { route: "end" }).unwrap();
-    /// ```
-    pub fn update_root_props<P>(&mut self, root_props: P) -> Option<Mutations>
-    where
-        P: 'static,
-    {
-        let base = self.base_scope;
-        let root_scope = self.get_scope_mut(&base).unwrap();
-
-        // Pre-emptively drop any downstream references of the old props
-        root_scope.ensure_drop_safety();
-
-        let mut root_props: Rc<dyn Any> = Rc::new(root_props);
-
-        if let Some(props_ptr) = root_props.downcast_ref::<P>().map(|p| p as *const P) {
-            // Swap the old props and new props
-            std::mem::swap(&mut self.root_props, &mut root_props);
-
-            let root = *self.root_fc.downcast_ref::<FC<P>>().unwrap();
-
-            let root_caller: Box<dyn Fn(&ScopeInner) -> Element> =
-                Box::new(move |scope: &ScopeInner| unsafe {
-                    let props: &'_ P = &*(props_ptr as *const P);
-                    Some(root((scope, props)))
-                    // std::mem::transmute()
-                });
-
-            drop(root_props);
-
-            Some(self.rebuild())
-        } else {
-            None
-        }
+    ///
+    pub fn has_any_work(&self) -> bool {
+        !(self.dirty_scopes.is_empty() && self.pending_messages.is_empty())
     }
+}
 
+// Methods to actually run the VirtualDOM
+impl VirtualDom {
     /// Performs a *full* rebuild of the virtual dom, returning every edit required to generate the actual dom from scratch
     ///
     /// The diff machine expects the RealDom's stack to be the root of the application.
@@ -290,64 +253,76 @@ impl VirtualDom {
     ///
     /// # Example
     /// ```
-    /// static App: FC<()> = |(cx, props)|cx.render(rsx!{ "hello world" });
+    /// static App: FC<()> = |(cx, props)| cx.render(rsx!{ "hello world" });
     /// let mut dom = VirtualDom::new();
     /// let edits = dom.rebuild();
     ///
     /// apply_edits(edits);
     /// ```
     pub fn rebuild(&mut self) -> Mutations {
-        todo!()
-        // self.rebuild(self.base_scope)
-    }
-
-    /// Compute a manual diff of the VirtualDOM between states.
-    ///
-    /// This can be useful when state inside the DOM is remotely changed from the outside, but not propagated as an event.
-    ///
-    /// In this case, every component will be diffed, even if their props are memoized. This method is intended to be used
-    /// to force an update of the DOM when the state of the app is changed outside of the app.
-    ///
-    ///
-    /// # Example
-    /// ```rust
-    /// #[derive(PartialEq, Props)]
-    /// struct AppProps {
-    ///     value: Shared<&'static str>,
-    /// }
-    ///
-    /// static App: FC<AppProps> = |(cx, props)|{
-    ///     let val = cx.value.borrow();
-    ///     cx.render(rsx! { div { "{val}" } })
-    /// };
-    ///
-    /// let value = Rc::new(RefCell::new("Hello"));
-    /// let mut dom = VirtualDom::new_with_props(
-    ///     App,
-    ///     AppProps {
-    ///         value: value.clone(),
-    ///     },
-    /// );
-    ///
-    /// let _ = dom.rebuild();
-    ///
-    /// *value.borrow_mut() = "goodbye";
-    ///
-    /// let edits = dom.diff();
-    /// ```
-    pub fn diff(&mut self) -> Mutations {
-        self.hard_diff(self.base_scope)
+        self.hard_diff(&self.base_scope)
     }
 
-    /// Runs the virtualdom immediately, not waiting for any suspended nodes to complete.
-    ///
-    /// This method will not wait for any suspended nodes to complete. If there is no pending work, then this method will
-    /// return "None"
-    pub fn run_immediate(&mut self) -> Option<Vec<Mutations>> {
+    /// Waits for the scheduler to have work
+    /// This lets us poll async tasks during idle periods without blocking the main thread.
+    pub async fn wait_for_work(&mut self) {
+        // todo: poll the events once even if there is work to do to prevent starvation
         if self.has_any_work() {
-            Some(self.work_sync())
-        } else {
-            None
+            return;
+        }
+
+        struct PollTasks<'a> {
+            pending_futures: &'a FxHashSet<ScopeId>,
+            scopes: &'a ScopeArena,
+        }
+
+        impl<'a> Future for PollTasks<'a> {
+            type Output = ();
+
+            fn poll(self: Pin<&mut Self>, cx: &mut std::task::Context<'_>) -> Poll<Self::Output> {
+                let mut all_pending = true;
+
+                // Poll every scope manually
+                for fut in self.pending_futures.iter() {
+                    let scope = self.scopes.get(fut).expect("Scope should never be moved");
+
+                    let mut items = scope.items.borrow_mut();
+                    for task in items.tasks.iter_mut() {
+                        let task = task.as_mut();
+
+                        // todo: does this make sense?
+                        // I don't usually write futures by hand
+                        let unpinned = unsafe { Pin::new_unchecked(task) };
+                        match unpinned.poll(cx) {
+                            Poll::Ready(_) => {
+                                all_pending = false;
+                            }
+                            Poll::Pending => {}
+                        }
+                    }
+                }
+
+                // Resolve the future if any task is ready
+                match all_pending {
+                    true => Poll::Pending,
+                    false => Poll::Ready(()),
+                }
+            }
+        }
+
+        let scheduler_fut = self.receiver.next();
+        let tasks_fut = PollTasks {
+            pending_futures: &self.pending_futures,
+            scopes: &self.scopes,
+        };
+
+        use futures_util::future::{select, Either};
+        match select(tasks_fut, scheduler_fut).await {
+            // Tasks themselves don't generate work
+            Either::Left((_, _)) => {}
+
+            // Save these messages in FIFO to be processed later
+            Either::Right((msg, _)) => self.pending_messages.push_front(msg.unwrap()),
         }
     }
 
@@ -394,122 +369,161 @@ impl VirtualDom {
     /// applied the edits.
     ///
     /// Mutations are the only link between the RealDOM and the VirtualDOM.
-    pub fn run_with_deadline(&mut self, deadline: impl FnMut() -> bool) -> Vec<Mutations<'_>> {
-        self.work_with_deadline(deadline)
-    }
+    pub fn work_with_deadline<'a>(
+        &'a mut self,
+        mut deadline: impl FnMut() -> bool,
+    ) -> Vec<Mutations<'a>> {
+        /*
+        Strategy:
+        - When called, check for any UI events that might've been received since the last frame.
+        - Dump all UI events into a "pending discrete" queue and a "pending continuous" queue.
 
-    pub fn get_event_sender(&self) -> futures_channel::mpsc::UnboundedSender<SchedulerMsg> {
-        self.sender.clone()
-    }
+        - If there are any pending discrete events, then elevate our priority level. If our priority level is already "high,"
+            then we need to finish the high priority work first. If the current work is "low" then analyze what scopes
+            will be invalidated by this new work. If this interferes with any in-flight medium or low work, then we need
+            to bump the other work out of the way, or choose to process it so we don't have any conflicts.
+            'static components have a leg up here since their work can be re-used among multiple scopes.
+            "High priority" is only for blocking! Should only be used on "clicks"
 
-    /// Waits for the scheduler to have work
-    /// This lets us poll async tasks during idle periods without blocking the main thread.
-    pub async fn wait_for_work(&mut self) {
-        // todo: poll the events once even if there is work to do to prevent starvation
-        if self.has_any_work() {
-            return;
-        }
+        - If there are no pending discrete events, then check for continuous events. These can be completely batched
 
-        use futures_util::StreamExt;
-
-        // Wait for any new events if we have nothing to do
-
-        // let tasks_fut = self.async_tasks.next();
-        // let scheduler_fut = self.receiver.next();
-
-        // use futures_util::future::{select, Either};
-        // match select(tasks_fut, scheduler_fut).await {
-        //     // poll the internal futures
-        //     Either::Left((_id, _)) => {
-        //         //
-        //     }
-
-        //     // wait for an external event
-        //     Either::Right((msg, _)) => match msg.unwrap() {
-        //         SchedulerMsg::Task(t) => {
-        //             self.handle_task(t);
-        //         }
-        //         SchedulerMsg::Immediate(im) => {
-        //             self.dirty_scopes.insert(im);
-        //         }
-        //         SchedulerMsg::UiEvent(evt) => {
-        //             self.ui_events.push_back(evt);
-        //         }
-        //     },
-        // }
-    }
-}
+        - we batch completely until we run into a discrete event
+        - all continuous events are batched together
+        - so D C C C C C would be two separate events - D and C. IE onclick and onscroll
+        - D C C C C C C D C C C D would be D C D C D in 5 distinct phases.
 
-/*
-Welcome to Dioxus's cooperative, priority-based scheduler.
+        - !listener bubbling is not currently implemented properly and will need to be implemented somehow in the future
+            - we need to keep track of element parents to be able to traverse properly
 
-I hope you enjoy your stay.
 
-Some essential reading:
-- https://github.com/facebook/react/blob/main/packages/scheduler/src/forks/Scheduler.js#L197-L200
-- https://github.com/facebook/react/blob/main/packages/scheduler/src/forks/Scheduler.js#L440
-- https://github.com/WICG/is-input-pending
-- https://web.dev/rail/
-- https://indepth.dev/posts/1008/inside-fiber-in-depth-overview-of-the-new-reconciliation-algorithm-in-react
+        Open questions:
+        - what if we get two clicks from the component during the same slice?
+            - should we batch?
+            - react says no - they are continuous
+            - but if we received both - then we don't need to diff, do we? run as many as we can and then finally diff?
+        */
+        let mut committed_mutations = Vec::<Mutations<'static>>::new();
 
-# What's going on?
+        while self.has_any_work() {
+            while let Ok(Some(msg)) = self.receiver.try_next() {
+                match msg {
+                    SchedulerMsg::Immediate(im) => {
+                        self.dirty_scopes.insert(im);
+                    }
+                    SchedulerMsg::UiEvent(evt) => {
+                        self.ui_events.push_back(evt);
+                    }
+                }
+            }
 
-Dioxus is a framework for "user experience" - not just "user interfaces." Part of the "experience" is keeping the UI
-snappy and "jank free" even under heavy work loads. Dioxus already has the "speed" part figured out - but there's no
-point in being "fast" if you can't also be "responsive."
+            // switch our priority, pop off any work
+            while let Some(event) = self.ui_events.pop_front() {
+                if let Some(scope) = self.get_scope_mut(&event.scope) {
+                    if let Some(element) = event.mounted_dom_id {
+                        log::info!("Calling listener {:?}, {:?}", event.scope, element);
 
-As such, Dioxus can manually decide on what work is most important at any given moment in time. With a properly tuned
-priority system, Dioxus can ensure that user interaction is prioritized and committed as soon as possible (sub 100ms).
-The controller responsible for this priority management is called the "scheduler" and is responsible for juggling many
-different types of work simultaneously.
+                        // TODO: bubble properly here
+                        scope.call_listener(event, element);
 
-# How does it work?
+                        while let Ok(Some(dirty_scope)) = self.receiver.try_next() {
+                            match dirty_scope {
+                                SchedulerMsg::Immediate(im) => {
+                                    self.dirty_scopes.insert(im);
+                                }
+                                SchedulerMsg::UiEvent(e) => self.ui_events.push_back(e),
+                            }
+                        }
+                    }
+                }
+            }
 
-Per the RAIL guide, we want to make sure that A) inputs are handled ASAP and B) animations are not blocked.
-React-three-fiber is a testament to how amazing this can be - a ThreeJS scene is threaded in between work periods of
-React, and the UI still stays snappy!
+            let work_complete = self.work_on_current_lane(&mut deadline, &mut committed_mutations);
 
-While it's straightforward to run code ASAP and be as "fast as possible", what's not  _not_ straightforward is how to do
-this while not blocking the main thread. The current prevailing thought is to stop working periodically so the browser
-has time to paint and run animations. When the browser is finished, we can step in and continue our work.
+            if !work_complete {
+                return committed_mutations;
+            }
+        }
+
+        committed_mutations
+    }
+}
+
+pub enum SchedulerMsg {
+    // events from the host
+    UiEvent(UserEvent),
 
-React-Fiber uses the "Fiber" concept to achieve a pause-resume functionality. This is worth reading up on, but not
-necessary to understand what we're doing here. In Dioxus, our DiffMachine is guided by DiffInstructions - essentially
-"commands" that guide the Diffing algorithm through the tree. Our "diff_scope" method is async - we can literally pause
-our DiffMachine "mid-sentence" (so to speak) by just stopping the poll on the future. The DiffMachine periodically yields
-so Rust's async machinery can take over, allowing us to customize when exactly to pause it.
+    // setstate
+    Immediate(ScopeId),
+}
 
-React's "should_yield" method is more complex than ours, and I assume we'll move in that direction as Dioxus matures. For
-now, Dioxus just assumes a TimeoutFuture, and selects! on both the Diff algorithm and timeout. If the DiffMachine finishes
-before the timeout, then Dioxus will work on any pending work in the interim. If there is no pending work, then the changes
-are committed, and coroutines are polled during the idle period. However, if the timeout expires, then the DiffMachine
-future is paused and saved (self-referentially).
+#[derive(Debug)]
+pub struct UserEvent {
+    /// The originator of the event trigger
+    pub scope: ScopeId,
 
-# Priority System
+    /// The optional real node associated with the trigger
+    pub mounted_dom_id: Option<ElementId>,
 
-So far, we've been able to thread our Dioxus work between animation frames - the main thread is not blocked! But that
-doesn't help us _under load_. How do we still stay snappy... even if we're doing a lot of work? Well, that's where
-priorities come into play. The goal with priorities is to schedule shorter work as a "high" priority and longer work as
-a "lower" priority. That way, we can interrupt long-running low-priority work with short-running high-priority work.
+    /// The event type IE "onclick" or "onmouseover"
+    ///
+    /// The name that the renderer will use to mount the listener.
+    pub name: &'static str,
 
-React's priority system is quite complex.
+    /// The type of event
+    pub event: Box<dyn Any + Send>,
+}
 
-There are 5 levels of priority and 2 distinctions between UI events (discrete, continuous). I believe React really only
-uses 3 priority levels and "idle" priority isn't used... Regardless, there's some batching going on.
+/// Priority of Event Triggers.
+///
+/// Internally, Dioxus will abort work that's taking too long if new, more important work arrives. Unlike React, Dioxus
+/// won't be afraid to pause work or flush changes to the RealDOM. This is called "cooperative scheduling". Some Renderers
+/// implement this form of scheduling internally, however Dioxus will perform its own scheduling as well.
+///
+/// The ultimate goal of the scheduler is to manage latency of changes, prioritizing "flashier" changes over "subtler" changes.
+///
+/// React has a 5-tier priority system. However, they break things into "Continuous" and "Discrete" priority. For now,
+/// we keep it simple, and just use a 3-tier priority system.
+///
+/// - NoPriority = 0
+/// - LowPriority = 1
+/// - NormalPriority = 2
+/// - UserBlocking = 3
+/// - HighPriority = 4
+/// - ImmediatePriority = 5
+///
+/// We still have a concept of discrete vs continuous though - discrete events won't be batched, but continuous events will.
+/// This means that multiple "scroll" events will be processed in a single frame, but multiple "click" events will be
+/// flushed before proceeding. Multiple discrete events is highly unlikely, though.
+#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash, PartialOrd, Ord)]
+pub enum EventPriority {
+    /// Work that must be completed during the EventHandler phase.
+    ///
+    /// Currently this is reserved for controlled inputs.
+    Immediate = 3,
 
-For Dioxus, we're going with a 4 tier priority system:
-- Sync: Things that need to be done by the next frame, like TextInput on controlled elements
-- High: for events that block all others - clicks, keyboard, and hovers
-- Medium: for UI events caused by the user but not directly - scrolls/forms/focus (all other events)
-- Low: set_state called asynchronously, and anything generated by suspense
+    /// "High Priority" work will not interrupt other high priority work, but will interrupt medium and low priority work.
+    ///
+    /// This is typically reserved for things like user interaction.
+    ///
+    /// React calls these "discrete" events, but with an extra category of "user-blocking" (Immediate).
+    High = 2,
 
-In "Sync" state, we abort our "idle wait" future, and resolve the sync queue immediately and escape. Because we completed
-work before the next rAF, any edits can be immediately processed before the frame ends. Generally though, we want to leave
-as much time to rAF as possible. "Sync" is currently only used by onInput - we'll leave some docs telling people not to
-do anything too arduous from onInput.
+    /// "Medium priority" work is generated by page events not triggered by the user. These types of events are less important
+    /// than "High Priority" events and will take precedence over low priority events.
+    ///
+    /// This is typically reserved for VirtualEvents that are not related to keyboard or mouse input.
+    ///
+    /// React calls these "continuous" events (e.g. mouse move, mouse wheel, touch move, etc).
+    Medium = 1,
 
-For the rest, we defer to the rIC period and work down each queue from high to low.
-*/
+    /// "Low Priority" work will always be preempted unless the work is significantly delayed, in which case it will be
+    /// advanced to the front of the work queue until completed.
+    ///
+    /// The primary user of Low Priority work is the asynchronous work system (Suspense).
+    ///
+    /// This is considered "idle" work or "background" work.
+    Low = 0,
+}
 
 /// The scheduler holds basically everything around "working"
 ///
@@ -532,84 +546,6 @@ For the rest, we defer to the rIC period and work down each queue from high to l
 ///
 ///
 impl VirtualDom {
-    // returns true if the event is discrete
-    pub fn handle_ui_event(&mut self, event: UserEvent) -> bool {
-        // let (discrete, priority) = event_meta(&event);
-
-        if let Some(scope) = self.get_scope_mut(&event.scope) {
-            if let Some(element) = event.mounted_dom_id {
-                // TODO: bubble properly here
-                scope.call_listener(event, element);
-
-                while let Ok(Some(dirty_scope)) = self.receiver.try_next() {
-                    //
-                    //     self.add_dirty_scope(dirty_scope, trigger.priority)
-                }
-            }
-        }
-
-        // use EventPriority::*;
-
-        // match priority {
-        //     Immediate => todo!(),
-        //     High => todo!(),
-        //     Medium => todo!(),
-        //     Low => todo!(),
-        // }
-
-        todo!()
-        // discrete
-    }
-
-    fn prepare_work(&mut self) {
-        // while let Some(trigger) = self.ui_events.pop_back() {
-        //     if let Some(scope) = self.get_scope_mut(&trigger.scope) {}
-        // }
-    }
-
-    // nothing to do, no events on channels, no work
-    pub fn has_any_work(&self) -> bool {
-        !(self.dirty_scopes.is_empty() && self.ui_events.is_empty())
-    }
-
-    /// re-balance the work lanes, ensuring high-priority work properly bumps away low priority work
-    fn balance_lanes(&mut self) {}
-
-    fn save_work(&mut self, lane: SavedDiffWork) {
-        let saved: SavedDiffWork<'static> = unsafe { std::mem::transmute(lane) };
-        self.saved_state = Some(saved);
-    }
-
-    unsafe fn load_work(&mut self) -> SavedDiffWork<'static> {
-        self.saved_state.take().unwrap().extend()
-    }
-
-    pub fn handle_channel_msg(&mut self, msg: SchedulerMsg) {
-        match msg {
-            SchedulerMsg::Immediate(_) => todo!(),
-
-            SchedulerMsg::UiEvent(event) => {
-                //
-
-                // let (discrete, priority) = event_meta(&event);
-
-                if let Some(scope) = self.get_scope_mut(&event.scope) {
-                    if let Some(element) = event.mounted_dom_id {
-                        // TODO: bubble properly here
-                        scope.call_listener(event, element);
-
-                        while let Ok(Some(dirty_scope)) = self.receiver.try_next() {
-                            //
-                            //     self.add_dirty_scope(dirty_scope, trigger.priority)
-                        }
-                    }
-                }
-
-                // discrete;
-            }
-        }
-    }
-
     /// Load the current lane, and work on it, periodically checking in if the deadline has been reached.
     ///
     /// Returns true if the lane is finished before the deadline could be met.
@@ -699,150 +635,74 @@ impl VirtualDom {
         }
     }
 
-    /// The primary workhorse of the VirtualDOM.
+    /// Compute a manual diff of the VirtualDOM between states.
     ///
-    /// Uses some fairly complex logic to schedule what work should be produced.
+    /// This can be useful when state inside the DOM is remotely changed from the outside, but not propagated as an event.
     ///
-    /// Returns a list of successful mutations.
-    pub fn work_with_deadline<'a>(
-        &'a mut self,
-        mut deadline: impl FnMut() -> bool,
-    ) -> Vec<Mutations<'a>> {
-        /*
-        Strategy:
-        - When called, check for any UI events that might've been received since the last frame.
-        - Dump all UI events into a "pending discrete" queue and a "pending continuous" queue.
-
-        - If there are any pending discrete events, then elevate our priority level. If our priority level is already "high,"
-            then we need to finish the high priority work first. If the current work is "low" then analyze what scopes
-            will be invalidated by this new work. If this interferes with any in-flight medium or low work, then we need
-            to bump the other work out of the way, or choose to process it so we don't have any conflicts.
-            'static components have a leg up here since their work can be re-used among multiple scopes.
-            "High priority" is only for blocking! Should only be used on "clicks"
-
-        - If there are no pending discrete events, then check for continuous events. These can be completely batched
-
-        - we batch completely until we run into a discrete event
-        - all continuous events are batched together
-        - so D C C C C C would be two separate events - D and C. IE onclick and onscroll
-        - D C C C C C C D C C C D would be D C D C D in 5 distinct phases.
-
-        - !listener bubbling is not currently implemented properly and will need to be implemented somehow in the future
-            - we need to keep track of element parents to be able to traverse properly
-
-
-        Open questions:
-        - what if we get two clicks from the component during the same slice?
-            - should we batch?
-            - react says no - they are continuous
-            - but if we received both - then we don't need to diff, do we? run as many as we can and then finally diff?
-        */
-        let mut committed_mutations = Vec::<Mutations<'static>>::new();
-
-        while self.has_any_work() {
-            while let Ok(Some(msg)) = self.receiver.try_next() {
-                match msg {
-                    SchedulerMsg::Immediate(im) => {
-                        self.dirty_scopes.insert(im);
-                    }
-                    SchedulerMsg::UiEvent(evt) => {
-                        self.ui_events.push_back(evt);
-                    }
-                }
-            }
-
-            // switch our priority, pop off any work
-            while let Some(event) = self.ui_events.pop_front() {
-                if let Some(scope) = self.get_scope_mut(&event.scope) {
-                    if let Some(element) = event.mounted_dom_id {
-                        log::info!("Calling listener {:?}, {:?}", event.scope, element);
-
-                        // TODO: bubble properly here
-                        scope.call_listener(event, element);
-
-                        while let Ok(Some(dirty_scope)) = self.receiver.try_next() {
-                            match dirty_scope {
-                                SchedulerMsg::Immediate(im) => {
-                                    self.dirty_scopes.insert(im);
-                                }
-                                SchedulerMsg::UiEvent(e) => self.ui_events.push_back(e),
-                            }
-                        }
-                    }
-                }
-            }
-
-            let work_complete = self.work_on_current_lane(&mut deadline, &mut committed_mutations);
-
-            if !work_complete {
-                return committed_mutations;
-            }
-        }
-
-        committed_mutations
-    }
-
-    /// Work the scheduler down, not polling any ongoing tasks.
+    /// In this case, every component will be diffed, even if their props are memoized. This method is intended to be used
+    /// to force an update of the DOM when the state of the app is changed outside of the app.
     ///
-    /// Will use the standard priority-based scheduling, batching, etc, but just won't interact with the async reactor.
-    pub fn work_sync<'a>(&'a mut self) -> Vec<Mutations<'a>> {
-        let mut committed_mutations = Vec::new();
-
-        while let Ok(Some(msg)) = self.receiver.try_next() {
-            self.handle_channel_msg(msg);
-        }
-
-        if !self.has_any_work() {
-            return committed_mutations;
-        }
-
-        while self.has_any_work() {
-            self.prepare_work();
-            self.work_on_current_lane(|| false, &mut committed_mutations);
-        }
-
-        committed_mutations
-    }
-
-    /// Restart the entire VirtualDOM from scratch, wiping away any old state and components.
     ///
-    /// Typically used to kickstart the VirtualDOM after initialization.
-    pub fn rebuild_inner(&mut self, base_scope: ScopeId) -> Mutations {
-        // TODO: drain any in-flight work
-
-        // We run the component. If it succeeds, then we can diff it and add the changes to the dom.
-        if self.run_scope(&base_scope) {
-            let cur_component = self
-                .get_scope_mut(&base_scope)
-                .expect("The base scope should never be moved");
-
-            log::debug!("rebuild {:?}", base_scope);
-
-            let mut diff_machine = DiffState::new(Mutations::new());
-            diff_machine
-                .stack
-                .create_node(cur_component.frames.fin_head(), MountType::Append);
-
-            diff_machine.stack.scope_stack.push(base_scope);
-
-            todo!()
-            // self.work(&mut diff_machine, || false);
-            // diff_machine.work(|| false);
-        } else {
-            // todo: should this be a hard error?
-            log::warn!(
-                "Component failed to run successfully during rebuild.
-                This does not result in a failed rebuild, but indicates a logic failure within your app."
-            );
-        }
+    /// # Example
+    /// ```rust
+    /// #[derive(PartialEq, Props)]
+    /// struct AppProps {
+    ///     value: Shared<&'static str>,
+    /// }
+    ///
+    /// static App: FC<AppProps> = |(cx, props)|{
+    ///     let val = cx.value.borrow();
+    ///     cx.render(rsx! { div { "{val}" } })
+    /// };
+    ///
+    /// let value = Rc::new(RefCell::new("Hello"));
+    /// let mut dom = VirtualDom::new_with_props(
+    ///     App,
+    ///     AppProps {
+    ///         value: value.clone(),
+    ///     },
+    /// );
+    ///
+    /// let _ = dom.rebuild();
+    ///
+    /// *value.borrow_mut() = "goodbye";
+    ///
+    /// let edits = dom.diff();
+    /// ```
+    pub fn hard_diff<'a>(&'a mut self, base_scope: &ScopeId) -> Mutations<'a> {
+        // // TODO: drain any in-flight work
+        // // We run the component. If it succeeds, then we can diff it and add the changes to the dom.
+        // if self.run_scope(&base_scope) {
+        //     let cur_component = self
+        //         .get_scope_mut(&base_scope)
+        //         .expect("The base scope should never be moved");
+
+        //     log::debug!("rebuild {:?}", base_scope);
+
+        //     let mut diff_machine = DiffState::new(Mutations::new());
+        //     diff_machine
+        //         .stack
+        //         .create_node(cur_component.frames.fin_head(), MountType::Append);
+
+        //     diff_machine.stack.scope_stack.push(base_scope);
+
+        //     todo!()
+        //     // self.work(&mut diff_machine, || false);
+        //     // diff_machine.work(|| false);
+        // } else {
+        //     // todo: should this be a hard error?
+        //     log::warn!(
+        //         "Component failed to run successfully during rebuild.
+        //         This does not result in a failed rebuild, but indicates a logic failure within your app."
+        //     );
+        // }
 
-        todo!()
-        // unsafe { std::mem::transmute(diff_machine.mutations) }
-    }
+        // todo!()
+        // // unsafe { std::mem::transmute(diff_machine.mutations) }
 
-    pub fn hard_diff(&mut self, base_scope: ScopeId) -> Mutations {
         let cur_component = self
-            .get_scope_mut(&base_scope)
+            .scopes
+            .get(&base_scope)
             .expect("The base scope should never be moved");
 
         log::debug!("hard diff {:?}", base_scope);
@@ -858,13 +718,10 @@ impl VirtualDom {
         }
     }
 
-    pub fn get_scope_mut<'a>(&'a self, id: &ScopeId) -> Option<&'a ScopeInner> {
-        self.scopes.get_mut(id)
-    }
-
     pub fn run_scope(&mut self, id: &ScopeId) -> bool {
         let scope = self
-            .get_scope_mut(id)
+            .scopes
+            .get(id)
             .expect("The base scope should never be moved");
 
         // Cycle to the next frame and then reset it
@@ -897,7 +754,7 @@ impl VirtualDom {
         // temporarily cast the vcomponent to the right lifetime
         let vcomp = scope.load_vcomp();
 
-        let render: &dyn Fn(&ScopeInner) -> Element = todo!();
+        let render: &dyn Fn(&ScopeState) -> Element = todo!();
 
         // Todo: see if we can add stronger guarantees around internal bookkeeping and failed component renders.
         if let Some(builder) = render(scope) {
@@ -926,39 +783,7 @@ impl VirtualDom {
         todo!()
     }
 
-    pub fn try_remove(&self, id: &ScopeId) -> Option<ScopeInner> {
+    pub fn try_remove(&self, id: &ScopeId) -> Option<ScopeState> {
         todo!()
     }
 }
-
-// impl<'a> Future for PollAllTasks<'a> {
-//     type Output = ();
-
-//     fn poll(self: Pin<&mut Self>, cx: &mut std::task::Context<'_>) -> Poll<Self::Output> {
-//         let mut all_pending = true;
-
-//         for fut in self.pending_futures.iter() {
-//             let scope = self
-//                 .pool
-//                 .get_scope_mut(&fut)
-//                 .expect("Scope should never be moved");
-
-//             let items = scope.items.get_mut();
-//             for task in items.tasks.iter_mut() {
-//                 let t = task.as_mut();
-//                 let g = unsafe { Pin::new_unchecked(t) };
-//                 match g.poll(cx) {
-//                     Poll::Ready(r) => {
-//                         all_pending = false;
-//                     }
-//                     Poll::Pending => {}
-//                 }
-//             }
-//         }
-
-//         match all_pending {
-//             true => Poll::Pending,
-//             false => Poll::Ready(()),
-//         }
-//     }
-// }

+ 3 - 3
packages/core/tests/borrowedstate.rs

@@ -11,7 +11,7 @@ fn test_borrowed_state() {
 }
 
 fn Parent((cx, _): Scope<()>) -> Element {
-    let value = cx.use_hook(|_| String::new(), |f| &*f, |_| {});
+    let value = cx.use_hook(|_| String::new(), |f| &*f);
 
     cx.render(rsx! {
         div {
@@ -28,7 +28,7 @@ struct ChildProps<'a> {
     name: &'a str,
 }
 
-fn Child<'a>((cx, props): Scope<'a, ChildProps>) -> Element<'a> {
+fn Child((cx, props): Scope<ChildProps>) -> Element {
     cx.render(rsx! {
         div {
             h1 { "it's nested" }
@@ -42,7 +42,7 @@ struct Grandchild<'a> {
     name: &'a str,
 }
 
-fn Child2<'a>((cx, props): Scope<'a, Grandchild>) -> Element<'a> {
+fn Child2((cx, props): Scope<Grandchild>) -> Element {
     cx.render(rsx! {
         div { "Hello {props.name}!" }
     })

+ 1 - 1
packages/core/tests/create_dom.rs

@@ -303,7 +303,7 @@ fn anchors() {
 #[test]
 fn suspended() {
     static App: FC<()> = |(cx, props)| {
-        let val = use_suspense(cx, || async {}, |cx, p| todo!());
+        let val = use_suspense(cx, || async {}, |p| todo!());
 
         cx.render(rsx! { {val} })
     };

+ 1 - 1
packages/core/tests/diffing.rs

@@ -5,7 +5,7 @@
 //!
 //! It does not validated that component lifecycles work properly. This is done in another test file.
 
-use dioxus::{nodes::VSuspended, prelude::*, DomEdit, TestDom};
+use dioxus::{prelude::*, DomEdit, TestDom, VSuspended};
 use dioxus_core as dioxus;
 use dioxus_core_macro::*;
 use dioxus_html as dioxus_elements;

+ 1 - 1
packages/desktop/Cargo.toml

@@ -24,6 +24,7 @@ tokio = { version = "1.12.0", features = [
     "rt",
 ], optional = true, default-features = false }
 dioxus-core-macro = { path = "../core-macro" }
+dioxus-html = { path = "../html", features = ["serialize"] }
 
 [features]
 default = ["tokio_runtime"]
@@ -31,6 +32,5 @@ tokio_runtime = ["tokio"]
 
 
 [dev-dependencies]
-dioxus-html = { path = "../html" }
 dioxus-hooks = { path = "../hooks" }
 simple_logger = "1.13.0"

+ 1 - 4
packages/desktop/src/desktop_context.rs

@@ -57,7 +57,7 @@ pub struct WebviewWindowProps<'a> {
 ///
 ///
 ///
-pub fn WebviewWindow<'a>((cx, props): Scope<'a, WebviewWindowProps>) -> Element<'a> {
+pub fn WebviewWindow((cx, props): Scope<WebviewWindowProps>) -> Element {
     let dtcx = cx.consume_state::<RefCell<DesktopContext>>()?;
 
     cx.use_hook(
@@ -67,9 +67,6 @@ pub fn WebviewWindow<'a>((cx, props): Scope<'a, WebviewWindowProps>) -> Element<
         |state| {
             //
         },
-        |hook| {
-            //
-        },
     );
 
     // render the children directly

+ 1 - 0
packages/desktop/src/err.rs

@@ -0,0 +1 @@
+

+ 2 - 3
packages/desktop/src/events.rs

@@ -4,7 +4,8 @@
 use std::sync::Arc;
 use std::{any::Any, rc::Rc};
 
-use dioxus_core::{events::on::MouseEvent, ElementId, EventPriority, ScopeId, UserEvent};
+use dioxus_core::{ElementId, EventPriority, ScopeId, UserEvent};
+use dioxus_html::on::*;
 
 #[derive(serde::Serialize, serde::Deserialize)]
 struct ImEvent {
@@ -38,8 +39,6 @@ pub fn trigger_from_serialized(val: serde_json::Value) -> UserEvent {
 }
 
 fn make_synthetic_event(name: &str, val: serde_json::Value) -> Box<dyn Any + Send> {
-    use dioxus_core::events::on::*;
-
     match name {
         "copy" | "cut" | "paste" => {
             //

+ 0 - 1
packages/desktop/src/lib.rs

@@ -13,7 +13,6 @@ use std::sync::mpsc::channel;
 use std::sync::{Arc, RwLock};
 
 use cfg::DesktopConfig;
-use dioxus_core::scheduler::SchedulerMsg;
 use dioxus_core::*;
 use serde::{Deserialize, Serialize};
 

+ 9 - 8
packages/hooks/src/use_shared_state.rs

@@ -82,13 +82,6 @@ pub fn use_shared_state<'a, T: 'static>(cx: Context<'a>) -> Option<UseSharedStat
                 _ => None,
             }
         },
-        |f| {
-            // we need to unsubscribe when our component is unounted
-            if let Some(root) = &f.root {
-                let mut root = root.borrow_mut();
-                root.consumers.remove(&f.scope_id);
-            }
-        },
     )
 }
 
@@ -98,6 +91,15 @@ struct SharedStateInner<T: 'static> {
     scope_id: ScopeId,
     needs_notification: Cell<bool>,
 }
+impl<T> Drop for SharedStateInner<T> {
+    fn drop(&mut self) {
+        // we need to unsubscribe when our component is unounted
+        if let Some(root) = &self.root {
+            let mut root = root.borrow_mut();
+            root.consumers.remove(&self.scope_id);
+        }
+    }
+}
 
 pub struct UseSharedState<'a, T: 'static> {
     pub(crate) cx: Context<'a>,
@@ -172,6 +174,5 @@ pub fn use_provide_state<'a, T: 'static>(cx: Context<'a>, f: impl FnOnce() -> T)
             cx.provide_state(state)
         },
         |inner| {},
-        |_| {},
     )
 }

+ 0 - 1
packages/hooks/src/useref.rs

@@ -16,7 +16,6 @@ pub fn use_ref<T: 'static>(cx: Context, f: impl FnOnce() -> T) -> UseRef<T> {
             inner.update_scheuled.set(false);
             UseRef { inner }
         },
-        |_| {},
     )
 }
 

+ 0 - 1
packages/hooks/src/usestate.rs

@@ -69,7 +69,6 @@ pub fn use_state<'a, T: 'static>(
 
             UseState { inner: &*hook }
         },
-        |_| {},
     )
 }
 struct UseStateInner<T: 'static> {

+ 8 - 0
packages/html/Cargo.toml

@@ -10,3 +10,11 @@ description = "HTML Element pack for Dioxus - a concurrent renderer-agnostic Vir
 
 [dependencies]
 dioxus-core = { path = "../core", version = "0.1.3" }
+# Serialize the Edits for use in Webview/Liveview instances
+serde = { version = "1", features = ["derive"], optional = true }
+serde_repr = { version = "0.1.7", optional = true }
+
+
+[features]
+default = []
+serialize = ["serde", "serde_repr"]

+ 4 - 0
packages/html/src/elements.rs

@@ -1,3 +1,7 @@
+use crate::{GlobalAttributes, SvgAttributes};
+use dioxus_core::*;
+use std::fmt::Arguments;
+
 macro_rules! builder_constructors {
     (
         $(

+ 6 - 6
packages/html/src/events.rs

@@ -1,3 +1,7 @@
+use bumpalo::boxed::Box as BumpBox;
+use dioxus_core::exports::bumpalo;
+use dioxus_core::*;
+use std::any::Any;
 
 pub mod on {
     use super::*;
@@ -39,11 +43,7 @@ pub mod on {
                         // ie copy
                         let shortname: &'static str = &event_name[2..];
 
-                        Listener {
-                            event: shortname,
-                            mounted_node: Cell::new(None),
-                            callback: RefCell::new(Some(callback)),
-                        }
+                        c.listener(shortname, callback)
                     }
                 )*
             )*
@@ -1062,7 +1062,7 @@ impl KeyCode {
     }
 }
 
-pub(crate) fn event_meta(event: &UserEvent) -> (bool, EventPriority) {
+pub(crate) fn _event_meta(event: &UserEvent) -> (bool, EventPriority) {
     use EventPriority::*;
 
     match event.name {

+ 3 - 0
packages/html/src/global_attributes.rs

@@ -1,3 +1,6 @@
+use dioxus_core::*;
+use std::fmt::Arguments;
+
 macro_rules! no_namespace_trait_methods {
     (
         $(

+ 5 - 8
src/lib.rs

@@ -170,8 +170,11 @@
 #[cfg(feature = "core")]
 pub use dioxus_core as core;
 
-#[cfg(feature = "core")]
-pub use dioxus_core::events;
+#[cfg(feature = "hooks")]
+pub use dioxus_hooks as hooks;
+
+#[cfg(feature = "ssr")]
+pub use dioxus_ssr as ssr;
 
 #[cfg(feature = "web")]
 pub use dioxus_web as web;
@@ -179,12 +182,6 @@ pub use dioxus_web as web;
 #[cfg(feature = "mobile")]
 pub use dioxus_mobile as mobile;
 
-#[cfg(feature = "ssr")]
-pub use dioxus_ssr as ssr;
-
-#[cfg(feature = "hooks")]
-pub use dioxus_hooks as hooks;
-
 #[cfg(feature = "desktop")]
 pub use dioxus_desktop as desktop;