scopearena.rs 14 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385
  1. use bumpalo::Bump;
  2. use futures_channel::mpsc::UnboundedSender;
  3. use fxhash::FxHashMap;
  4. use slab::Slab;
  5. use std::cell::{Cell, RefCell};
  6. use crate::innerlude::*;
  7. pub type FcSlot = *const ();
  8. pub struct Heuristic {
  9. hook_arena_size: usize,
  10. node_arena_size: usize,
  11. }
  12. // a slab-like arena with stable references even when new scopes are allocated
  13. // uses a bump arena as a backing
  14. //
  15. // has an internal heuristics engine to pre-allocate arenas to the right size
  16. pub(crate) struct ScopeArena {
  17. bump: Bump,
  18. scope_counter: Cell<usize>,
  19. scopes: RefCell<FxHashMap<ScopeId, *mut Scope>>,
  20. pub heuristics: RefCell<FxHashMap<FcSlot, Heuristic>>,
  21. free_scopes: RefCell<Vec<*mut Scope>>,
  22. nodes: RefCell<Slab<*const VNode<'static>>>,
  23. pub(crate) sender: UnboundedSender<SchedulerMsg>,
  24. }
  25. impl ScopeArena {
  26. pub(crate) fn new(sender: UnboundedSender<SchedulerMsg>) -> Self {
  27. let bump = Bump::new();
  28. // allocate a container for the root element
  29. // this will *never* show up in the diffing process
  30. let el = bump.alloc(VElement {
  31. tag_name: "root",
  32. namespace: None,
  33. key: None,
  34. dom_id: Cell::new(Some(ElementId(0))),
  35. parent_id: Default::default(),
  36. listeners: &[],
  37. attributes: &[],
  38. children: &[],
  39. });
  40. let node = bump.alloc(VNode::Element(el));
  41. let mut nodes = Slab::new();
  42. let root_id = nodes.insert(unsafe { std::mem::transmute(node as *const _) });
  43. debug_assert_eq!(root_id, 0);
  44. Self {
  45. scope_counter: Cell::new(0),
  46. bump,
  47. scopes: RefCell::new(FxHashMap::default()),
  48. heuristics: RefCell::new(FxHashMap::default()),
  49. free_scopes: RefCell::new(Vec::new()),
  50. nodes: RefCell::new(nodes),
  51. sender,
  52. }
  53. }
  54. /// Safety:
  55. /// - Obtaining a mutable refernece to any Scope is unsafe
  56. /// - Scopes use interior mutability when sharing data into components
  57. pub(crate) fn get_scope(&self, id: &ScopeId) -> Option<&Scope> {
  58. unsafe { self.scopes.borrow().get(id).map(|f| &**f) }
  59. }
  60. pub(crate) unsafe fn get_scope_raw(&self, id: &ScopeId) -> Option<*mut Scope> {
  61. self.scopes.borrow().get(id).copied()
  62. }
  63. pub(crate) unsafe fn get_scope_mut(&self, id: &ScopeId) -> Option<&mut Scope> {
  64. self.scopes.borrow().get(id).map(|s| &mut **s)
  65. }
  66. pub(crate) fn new_with_key(
  67. &self,
  68. fc_ptr: *const (),
  69. caller: *const dyn Fn(&Scope) -> Element,
  70. parent_scope: Option<*mut Scope>,
  71. container: ElementId,
  72. height: u32,
  73. subtree: u32,
  74. ) -> ScopeId {
  75. let new_scope_id = ScopeId(self.scope_counter.get());
  76. self.scope_counter.set(self.scope_counter.get() + 1);
  77. if let Some(old_scope) = self.free_scopes.borrow_mut().pop() {
  78. let scope = unsafe { &mut *old_scope };
  79. log::debug!(
  80. "reusing scope {:?} as {:?}",
  81. scope.our_arena_idx,
  82. new_scope_id
  83. );
  84. scope.caller = caller;
  85. scope.parent_scope = parent_scope;
  86. scope.height = height;
  87. scope.subtree = Cell::new(subtree);
  88. scope.our_arena_idx = new_scope_id;
  89. scope.container = container;
  90. scope.frames[0].nodes.get_mut().push({
  91. let vnode = scope.frames[0]
  92. .bump
  93. .alloc(VNode::Text(scope.frames[0].bump.alloc(VText {
  94. dom_id: Default::default(),
  95. is_static: false,
  96. text: "",
  97. })));
  98. unsafe { std::mem::transmute(vnode as *mut VNode) }
  99. });
  100. scope.frames[1].nodes.get_mut().push({
  101. let vnode = scope.frames[1]
  102. .bump
  103. .alloc(VNode::Text(scope.frames[1].bump.alloc(VText {
  104. dom_id: Default::default(),
  105. is_static: false,
  106. text: "",
  107. })));
  108. unsafe { std::mem::transmute(vnode as *mut VNode) }
  109. });
  110. let any_item = self.scopes.borrow_mut().insert(new_scope_id, scope);
  111. debug_assert!(any_item.is_none());
  112. new_scope_id
  113. } else {
  114. let (node_capacity, hook_capacity) = {
  115. let heuristics = self.heuristics.borrow();
  116. if let Some(heuristic) = heuristics.get(&fc_ptr) {
  117. (heuristic.node_arena_size, heuristic.hook_arena_size)
  118. } else {
  119. (0, 0)
  120. }
  121. };
  122. let mut frames = [BumpFrame::new(node_capacity), BumpFrame::new(node_capacity)];
  123. frames[0].nodes.get_mut().push({
  124. let vnode = frames[0]
  125. .bump
  126. .alloc(VNode::Text(frames[0].bump.alloc(VText {
  127. dom_id: Default::default(),
  128. is_static: false,
  129. text: "",
  130. })));
  131. unsafe { std::mem::transmute(vnode as *mut VNode) }
  132. });
  133. frames[1].nodes.get_mut().push({
  134. let vnode = frames[1]
  135. .bump
  136. .alloc(VNode::Text(frames[1].bump.alloc(VText {
  137. dom_id: Default::default(),
  138. is_static: false,
  139. text: "",
  140. })));
  141. unsafe { std::mem::transmute(vnode as *mut VNode) }
  142. });
  143. let scope = self.bump.alloc(Scope {
  144. sender: self.sender.clone(),
  145. container,
  146. our_arena_idx: new_scope_id,
  147. parent_scope,
  148. height,
  149. frames,
  150. subtree: Cell::new(subtree),
  151. is_subtree_root: Cell::new(false),
  152. caller,
  153. generation: 0.into(),
  154. hooks: HookList::new(hook_capacity),
  155. shared_contexts: Default::default(),
  156. items: RefCell::new(SelfReferentialItems {
  157. listeners: Default::default(),
  158. borrowed_props: Default::default(),
  159. suspended_nodes: Default::default(),
  160. tasks: Default::default(),
  161. pending_effects: Default::default(),
  162. }),
  163. });
  164. let any_item = self.scopes.borrow_mut().insert(new_scope_id, scope);
  165. debug_assert!(any_item.is_none());
  166. new_scope_id
  167. }
  168. }
  169. pub fn try_remove(&self, id: &ScopeId) -> Option<()> {
  170. self.ensure_drop_safety(id);
  171. log::debug!("removing scope {:?}", id);
  172. // Safety:
  173. // - ensure_drop_safety ensures that no references to this scope are in use
  174. // - this raw pointer is removed from the map
  175. let scope = unsafe { &mut *self.scopes.borrow_mut().remove(id).unwrap() };
  176. // we're just reusing scopes so we need to clear it out
  177. scope.hooks.clear();
  178. scope.shared_contexts.get_mut().clear();
  179. scope.parent_scope = None;
  180. scope.generation.set(0);
  181. scope.is_subtree_root.set(false);
  182. scope.subtree.set(0);
  183. scope.frames[0].nodes.get_mut().clear();
  184. scope.frames[1].nodes.get_mut().clear();
  185. scope.frames[0].bump.reset();
  186. scope.frames[1].bump.reset();
  187. let SelfReferentialItems {
  188. borrowed_props,
  189. listeners,
  190. pending_effects,
  191. suspended_nodes,
  192. tasks,
  193. } = scope.items.get_mut();
  194. borrowed_props.clear();
  195. listeners.clear();
  196. pending_effects.clear();
  197. suspended_nodes.clear();
  198. tasks.clear();
  199. self.free_scopes.borrow_mut().push(scope);
  200. Some(())
  201. }
  202. pub fn reserve_node(&self, node: &VNode) -> ElementId {
  203. let mut els = self.nodes.borrow_mut();
  204. let entry = els.vacant_entry();
  205. let key = entry.key();
  206. let id = ElementId(key);
  207. let node: *const VNode = node as *const _;
  208. let node = unsafe { std::mem::transmute::<*const VNode, *const VNode>(node) };
  209. entry.insert(node);
  210. id
  211. }
  212. pub fn update_reservation(&self, node: &VNode, id: ElementId) {
  213. let node = unsafe { std::mem::transmute::<*const VNode, *const VNode>(node) };
  214. *self.nodes.borrow_mut().get_mut(id.0).unwrap() = node;
  215. }
  216. pub fn collect_garbage(&self, id: ElementId) {
  217. self.nodes.borrow_mut().remove(id.0);
  218. }
  219. // These methods would normally exist on `scope` but they need access to *all* of the scopes
  220. /// This method cleans up any references to data held within our hook list. This prevents mutable aliasing from
  221. /// causing UB in our tree.
  222. ///
  223. /// This works by cleaning up our references from the bottom of the tree to the top. The directed graph of components
  224. /// essentially forms a dependency tree that we can traverse from the bottom to the top. As we traverse, we remove
  225. /// any possible references to the data in the hook list.
  226. ///
  227. /// References to hook data can only be stored in listeners and component props. During diffing, we make sure to log
  228. /// all listeners and borrowed props so we can clear them here.
  229. ///
  230. /// This also makes sure that drop order is consistent and predictable. All resources that rely on being dropped will
  231. /// be dropped.
  232. pub(crate) fn ensure_drop_safety(&self, scope_id: &ScopeId) {
  233. let scope = self.get_scope(scope_id).unwrap();
  234. let mut items = scope.items.borrow_mut();
  235. // make sure we drop all borrowed props manually to guarantee that their drop implementation is called before we
  236. // run the hooks (which hold an &mut Reference)
  237. // recursively call ensure_drop_safety on all children
  238. items.borrowed_props.drain(..).for_each(|comp| {
  239. let scope_id = comp
  240. .associated_scope
  241. .get()
  242. .expect("VComponents should be associated with a valid Scope");
  243. self.ensure_drop_safety(&scope_id);
  244. let mut drop_props = comp.drop_props.borrow_mut().take().unwrap();
  245. drop_props();
  246. });
  247. // Now that all the references are gone, we can safely drop our own references in our listeners.
  248. items
  249. .listeners
  250. .drain(..)
  251. .for_each(|listener| drop(listener.callback.borrow_mut().take()));
  252. }
  253. pub(crate) fn run_scope(&self, id: &ScopeId) -> bool {
  254. // Cycle to the next frame and then reset it
  255. // This breaks any latent references, invalidating every pointer referencing into it.
  256. // Remove all the outdated listeners
  257. self.ensure_drop_safety(id);
  258. let scope = unsafe { &mut *self.get_scope_mut(id).expect("could not find scope") };
  259. log::debug!("found scope, about to run: {:?}", id);
  260. // Safety:
  261. // - We dropped the listeners, so no more &mut T can be used while these are held
  262. // - All children nodes that rely on &mut T are replaced with a new reference
  263. unsafe { scope.hooks.reset() };
  264. // Safety:
  265. // - We've dropped all references to the wip bump frame with "ensure_drop_safety"
  266. unsafe { scope.reset_wip_frame() };
  267. {
  268. let mut items = scope.items.borrow_mut();
  269. // just forget about our suspended nodes while we're at it
  270. items.suspended_nodes.clear();
  271. items.tasks.clear();
  272. items.pending_effects.clear();
  273. // guarantee that we haven't screwed up - there should be no latent references anywhere
  274. debug_assert!(items.listeners.is_empty());
  275. debug_assert!(items.borrowed_props.is_empty());
  276. debug_assert!(items.suspended_nodes.is_empty());
  277. debug_assert!(items.tasks.is_empty());
  278. debug_assert!(items.pending_effects.is_empty());
  279. // Todo: see if we can add stronger guarantees around internal bookkeeping and failed component renders.
  280. scope.wip_frame().nodes.borrow_mut().clear();
  281. }
  282. let render: &dyn Fn(&Scope) -> Element = unsafe { &*scope.caller };
  283. if let Some(link) = render(scope) {
  284. // right now, it's a panic to render a nodelink from another scope
  285. // todo: enable this. it should (reasonably) work even if it doesnt make much sense
  286. assert_eq!(link.scope_id.get(), Some(*id));
  287. // nodelinks are not assigned when called and must be done so through the create/diff phase
  288. // however, we need to link this one up since it will never be used in diffing
  289. scope.wip_frame().assign_nodelink(&link);
  290. debug_assert_eq!(scope.wip_frame().nodes.borrow().len(), 1);
  291. if !scope.items.borrow().tasks.is_empty() {
  292. // self.
  293. }
  294. // make the "wip frame" contents the "finished frame"
  295. // any future dipping into completed nodes after "render" will go through "fin head"
  296. scope.cycle_frame();
  297. true
  298. } else {
  299. false
  300. }
  301. }
  302. // The head of the bumpframe is the first linked NodeLink
  303. pub fn wip_head(&self, id: &ScopeId) -> &VNode {
  304. let scope = self.get_scope(id).unwrap();
  305. let frame = scope.wip_frame();
  306. let nodes = frame.nodes.borrow();
  307. let node: &VNode = unsafe { &**nodes.get(0).unwrap() };
  308. unsafe { std::mem::transmute::<&VNode, &VNode>(node) }
  309. }
  310. // The head of the bumpframe is the first linked NodeLink
  311. pub fn fin_head(&self, id: &ScopeId) -> &VNode {
  312. let scope = self.get_scope(id).unwrap();
  313. let frame = scope.fin_frame();
  314. let nodes = frame.nodes.borrow();
  315. let node: &VNode = unsafe { &**nodes.get(0).unwrap() };
  316. unsafe { std::mem::transmute::<&VNode, &VNode>(node) }
  317. }
  318. pub fn root_node(&self, id: &ScopeId) -> &VNode {
  319. self.fin_head(id)
  320. }
  321. }