scopearena.rs 14 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394
  1. use bumpalo::Bump;
  2. use futures_channel::mpsc::UnboundedSender;
  3. use fxhash::{FxHashMap, FxHashSet};
  4. use slab::Slab;
  5. use std::{
  6. borrow::Borrow,
  7. cell::{Cell, RefCell},
  8. };
  9. use crate::innerlude::*;
  10. pub(crate) type FcSlot = *const ();
  11. pub(crate) struct Heuristic {
  12. hook_arena_size: usize,
  13. node_arena_size: usize,
  14. }
  15. // a slab-like arena with stable references even when new scopes are allocated
  16. // uses a bump arena as a backing
  17. //
  18. // has an internal heuristics engine to pre-allocate arenas to the right size
  19. pub(crate) struct ScopeArena {
  20. bump: Bump,
  21. pub pending_futures: RefCell<FxHashSet<ScopeId>>,
  22. scope_counter: Cell<usize>,
  23. pub scopes: RefCell<FxHashMap<ScopeId, *mut ScopeState>>,
  24. pub heuristics: RefCell<FxHashMap<FcSlot, Heuristic>>,
  25. free_scopes: RefCell<Vec<*mut ScopeState>>,
  26. nodes: RefCell<Slab<*const VNode<'static>>>,
  27. pub(crate) sender: UnboundedSender<SchedulerMsg>,
  28. }
  29. impl ScopeArena {
  30. pub(crate) fn new(sender: UnboundedSender<SchedulerMsg>) -> Self {
  31. let bump = Bump::new();
  32. // allocate a container for the root element
  33. // this will *never* show up in the diffing process
  34. let el = bump.alloc(VElement {
  35. tag_name: "root",
  36. namespace: None,
  37. key: None,
  38. dom_id: Cell::new(Some(ElementId(0))),
  39. parent_id: Default::default(),
  40. listeners: &[],
  41. attributes: &[],
  42. children: &[],
  43. });
  44. let node = bump.alloc(VNode::Element(el));
  45. let mut nodes = Slab::new();
  46. let root_id = nodes.insert(unsafe { std::mem::transmute(node as *const _) });
  47. debug_assert_eq!(root_id, 0);
  48. Self {
  49. scope_counter: Cell::new(0),
  50. bump,
  51. pending_futures: RefCell::new(FxHashSet::default()),
  52. scopes: RefCell::new(FxHashMap::default()),
  53. heuristics: RefCell::new(FxHashMap::default()),
  54. free_scopes: RefCell::new(Vec::new()),
  55. nodes: RefCell::new(nodes),
  56. sender,
  57. }
  58. }
  59. /// Safety:
  60. /// - Obtaining a mutable refernece to any Scope is unsafe
  61. /// - Scopes use interior mutability when sharing data into components
  62. pub(crate) fn get_scope(&self, id: &ScopeId) -> Option<&ScopeState> {
  63. unsafe { self.scopes.borrow().get(id).map(|f| &**f) }
  64. }
  65. pub(crate) unsafe fn get_scope_raw(&self, id: &ScopeId) -> Option<*mut ScopeState> {
  66. self.scopes.borrow().get(id).copied()
  67. }
  68. pub(crate) unsafe fn get_scope_mut(&self, id: &ScopeId) -> Option<&mut ScopeState> {
  69. self.scopes.borrow().get(id).map(|s| &mut **s)
  70. }
  71. pub(crate) fn new_with_key(
  72. &self,
  73. fc_ptr: *const (),
  74. caller: *const dyn Fn(&ScopeState) -> Element,
  75. parent_scope: Option<*mut ScopeState>,
  76. container: ElementId,
  77. height: u32,
  78. subtree: u32,
  79. ) -> ScopeId {
  80. let new_scope_id = ScopeId(self.scope_counter.get());
  81. self.scope_counter.set(self.scope_counter.get() + 1);
  82. if let Some(old_scope) = self.free_scopes.borrow_mut().pop() {
  83. let scope = unsafe { &mut *old_scope };
  84. scope.caller = caller;
  85. scope.parent_scope = parent_scope;
  86. scope.height = height;
  87. scope.subtree = Cell::new(subtree);
  88. scope.our_arena_idx = new_scope_id;
  89. scope.container = container;
  90. scope.frames[0].nodes.get_mut().push({
  91. let vnode = scope.frames[0]
  92. .bump
  93. .alloc(VNode::Text(scope.frames[0].bump.alloc(VText {
  94. dom_id: Default::default(),
  95. is_static: false,
  96. text: "",
  97. })));
  98. unsafe { std::mem::transmute(vnode as *mut VNode) }
  99. });
  100. scope.frames[1].nodes.get_mut().push({
  101. let vnode = scope.frames[1]
  102. .bump
  103. .alloc(VNode::Text(scope.frames[1].bump.alloc(VText {
  104. dom_id: Default::default(),
  105. is_static: false,
  106. text: "",
  107. })));
  108. unsafe { std::mem::transmute(vnode as *mut VNode) }
  109. });
  110. let any_item = self.scopes.borrow_mut().insert(new_scope_id, scope);
  111. debug_assert!(any_item.is_none());
  112. } else {
  113. let (node_capacity, hook_capacity) = {
  114. let heuristics = self.heuristics.borrow();
  115. if let Some(heuristic) = heuristics.get(&fc_ptr) {
  116. (heuristic.node_arena_size, heuristic.hook_arena_size)
  117. } else {
  118. (0, 0)
  119. }
  120. };
  121. let mut frames = [BumpFrame::new(node_capacity), BumpFrame::new(node_capacity)];
  122. frames[0].nodes.get_mut().push({
  123. let vnode = frames[0]
  124. .bump
  125. .alloc(VNode::Text(frames[0].bump.alloc(VText {
  126. dom_id: Default::default(),
  127. is_static: false,
  128. text: "",
  129. })));
  130. unsafe { std::mem::transmute(vnode as *mut VNode) }
  131. });
  132. frames[1].nodes.get_mut().push({
  133. let vnode = frames[1]
  134. .bump
  135. .alloc(VNode::Text(frames[1].bump.alloc(VText {
  136. dom_id: Default::default(),
  137. is_static: false,
  138. text: "",
  139. })));
  140. unsafe { std::mem::transmute(vnode as *mut VNode) }
  141. });
  142. let scope = self.bump.alloc(ScopeState {
  143. sender: self.sender.clone(),
  144. container,
  145. our_arena_idx: new_scope_id,
  146. parent_scope,
  147. height,
  148. frames,
  149. subtree: Cell::new(subtree),
  150. is_subtree_root: Cell::new(false),
  151. caller,
  152. generation: 0.into(),
  153. shared_contexts: Default::default(),
  154. items: RefCell::new(SelfReferentialItems {
  155. listeners: Default::default(),
  156. borrowed_props: Default::default(),
  157. tasks: Default::default(),
  158. }),
  159. hook_arena: Bump::new(),
  160. hook_vals: RefCell::new(smallvec::SmallVec::with_capacity(hook_capacity)),
  161. hook_idx: Default::default(),
  162. });
  163. let any_item = self.scopes.borrow_mut().insert(new_scope_id, scope);
  164. debug_assert!(any_item.is_none());
  165. }
  166. new_scope_id
  167. }
  168. pub fn try_remove(&self, id: &ScopeId) -> Option<()> {
  169. self.ensure_drop_safety(id);
  170. // Safety:
  171. // - ensure_drop_safety ensures that no references to this scope are in use
  172. // - this raw pointer is removed from the map
  173. let scope = unsafe { &mut *self.scopes.borrow_mut().remove(id).unwrap() };
  174. // we're just reusing scopes so we need to clear it out
  175. scope.hook_vals.get_mut().drain(..).for_each(|state| {
  176. let as_mut = unsafe { &mut *state };
  177. let boxed = unsafe { bumpalo::boxed::Box::from_raw(as_mut) };
  178. drop(boxed);
  179. });
  180. scope.hook_idx.set(0);
  181. scope.hook_arena.reset();
  182. scope.shared_contexts.get_mut().clear();
  183. scope.parent_scope = None;
  184. scope.generation.set(0);
  185. scope.is_subtree_root.set(false);
  186. scope.subtree.set(0);
  187. scope.frames[0].nodes.get_mut().clear();
  188. scope.frames[1].nodes.get_mut().clear();
  189. scope.frames[0].bump.reset();
  190. scope.frames[1].bump.reset();
  191. let SelfReferentialItems {
  192. borrowed_props,
  193. listeners,
  194. tasks,
  195. } = scope.items.get_mut();
  196. borrowed_props.clear();
  197. listeners.clear();
  198. tasks.clear();
  199. self.free_scopes.borrow_mut().push(scope);
  200. Some(())
  201. }
  202. pub fn reserve_node(&self, node: &VNode) -> ElementId {
  203. let mut els = self.nodes.borrow_mut();
  204. let entry = els.vacant_entry();
  205. let key = entry.key();
  206. let id = ElementId(key);
  207. let node: *const VNode = node as *const _;
  208. let node = unsafe { std::mem::transmute::<*const VNode, *const VNode>(node) };
  209. entry.insert(node);
  210. id
  211. }
  212. pub fn update_node(&self, node: &VNode, id: ElementId) {
  213. let node = unsafe { std::mem::transmute::<*const VNode, *const VNode>(node) };
  214. *self.nodes.borrow_mut().get_mut(id.0).unwrap() = node;
  215. }
  216. pub fn collect_garbage(&self, id: ElementId) {
  217. self.nodes.borrow_mut().remove(id.0);
  218. }
  219. // These methods would normally exist on `scope` but they need access to *all* of the scopes
  220. /// This method cleans up any references to data held within our hook list. This prevents mutable aliasing from
  221. /// causing UB in our tree.
  222. ///
  223. /// This works by cleaning up our references from the bottom of the tree to the top. The directed graph of components
  224. /// essentially forms a dependency tree that we can traverse from the bottom to the top. As we traverse, we remove
  225. /// any possible references to the data in the hook list.
  226. ///
  227. /// References to hook data can only be stored in listeners and component props. During diffing, we make sure to log
  228. /// all listeners and borrowed props so we can clear them here.
  229. ///
  230. /// This also makes sure that drop order is consistent and predictable. All resources that rely on being dropped will
  231. /// be dropped.
  232. pub(crate) fn ensure_drop_safety(&self, scope_id: &ScopeId) {
  233. if let Some(scope) = self.get_scope(scope_id) {
  234. let mut items = scope.items.borrow_mut();
  235. // make sure we drop all borrowed props manually to guarantee that their drop implementation is called before we
  236. // run the hooks (which hold an &mut Reference)
  237. // recursively call ensure_drop_safety on all children
  238. items.borrowed_props.drain(..).for_each(|comp| {
  239. let scope_id = comp
  240. .associated_scope
  241. .get()
  242. .expect("VComponents should be associated with a valid Scope");
  243. self.ensure_drop_safety(&scope_id);
  244. let mut drop_props = comp.drop_props.borrow_mut().take().unwrap();
  245. drop_props();
  246. });
  247. // Now that all the references are gone, we can safely drop our own references in our listeners.
  248. items
  249. .listeners
  250. .drain(..)
  251. .for_each(|listener| drop(listener.callback.callback.borrow_mut().take()));
  252. }
  253. }
  254. pub(crate) fn run_scope(&self, id: &ScopeId) -> bool {
  255. // Cycle to the next frame and then reset it
  256. // This breaks any latent references, invalidating every pointer referencing into it.
  257. // Remove all the outdated listeners
  258. self.ensure_drop_safety(id);
  259. let scope = unsafe { &mut *self.get_scope_mut(id).expect("could not find scope") };
  260. // Safety:
  261. // - We dropped the listeners, so no more &mut T can be used while these are held
  262. // - All children nodes that rely on &mut T are replaced with a new reference
  263. scope.hook_idx.set(0);
  264. // Safety:
  265. // - We've dropped all references to the wip bump frame with "ensure_drop_safety"
  266. unsafe { scope.reset_wip_frame() };
  267. {
  268. let mut items = scope.items.borrow_mut();
  269. // just forget about our suspended nodes while we're at it
  270. items.tasks.clear();
  271. // guarantee that we haven't screwed up - there should be no latent references anywhere
  272. debug_assert!(items.listeners.is_empty());
  273. debug_assert!(items.borrowed_props.is_empty());
  274. debug_assert!(items.tasks.is_empty());
  275. // Todo: see if we can add stronger guarantees around internal bookkeeping and failed component renders.
  276. scope.wip_frame().nodes.borrow_mut().clear();
  277. }
  278. let render: &dyn Fn(&ScopeState) -> Element = unsafe { &*scope.caller };
  279. if let Some(link) = render(scope) {
  280. if !scope.items.borrow().tasks.is_empty() {
  281. self.pending_futures.borrow_mut().insert(*id);
  282. }
  283. // make the "wip frame" contents the "finished frame"
  284. // any future dipping into completed nodes after "render" will go through "fin head"
  285. scope.cycle_frame();
  286. true
  287. } else {
  288. false
  289. }
  290. }
  291. pub fn call_listener_with_bubbling(&self, event: UserEvent, element: ElementId) {
  292. let nodes = self.nodes.borrow();
  293. let mut cur_el = Some(element);
  294. while let Some(id) = cur_el.take() {
  295. if let Some(el) = nodes.get(id.0) {
  296. let real_el = unsafe { &**el };
  297. if let VNode::Element(real_el) = real_el {
  298. //
  299. for listener in real_el.listeners.borrow().iter() {
  300. if listener.event == event.name {
  301. let mut cb = listener.callback.callback.borrow_mut();
  302. if let Some(cb) = cb.as_mut() {
  303. (cb)(event.data.clone());
  304. }
  305. }
  306. }
  307. cur_el = real_el.parent_id.get();
  308. }
  309. }
  310. }
  311. }
  312. // The head of the bumpframe is the first linked NodeLink
  313. pub fn wip_head(&self, id: &ScopeId) -> &VNode {
  314. let scope = self.get_scope(id).unwrap();
  315. let frame = scope.wip_frame();
  316. let nodes = frame.nodes.borrow();
  317. let node: &VNode = unsafe { &**nodes.get(0).unwrap() };
  318. unsafe { std::mem::transmute::<&VNode, &VNode>(node) }
  319. }
  320. // The head of the bumpframe is the first linked NodeLink
  321. pub fn fin_head(&self, id: &ScopeId) -> &VNode {
  322. let scope = self.get_scope(id).unwrap();
  323. let frame = scope.fin_frame();
  324. let nodes = frame.nodes.borrow();
  325. let node: &VNode = unsafe { &**nodes.get(0).unwrap() };
  326. unsafe { std::mem::transmute::<&VNode, &VNode>(node) }
  327. }
  328. pub fn root_node(&self, id: &ScopeId) -> &VNode {
  329. self.fin_head(id)
  330. }
  331. }