diff --git a/collector/compile-benchmarks/README.md b/collector/compile-benchmarks/README.md index bfd2b7c86..8172790d9 100644 --- a/collector/compile-benchmarks/README.md +++ b/collector/compile-benchmarks/README.md @@ -151,8 +151,6 @@ Rust code being written today. obligation processing. - **regex**: See above. This is an older version of the crate. - **piston-image**: See above. This is an older version of the `image` crate. -- **style-servo**: An old version of Servo's `style` crate. A large crate, and - one used by old versions of Firefox. Built with `--features=gecko`. - **syn**: See above. This is an older version (0.11.11) of the crate. - **tokio-webpush-simple**: A simple web server built with a very old version of tokio. Uses futures a lot, but doesn't use `async`/`await`. diff --git a/collector/compile-benchmarks/REUSE.toml b/collector/compile-benchmarks/REUSE.toml index e16ba1dc9..e48f31752 100644 --- a/collector/compile-benchmarks/REUSE.toml +++ b/collector/compile-benchmarks/REUSE.toml @@ -240,11 +240,6 @@ path = "stm32f4-0.15.1/**" SPDX-FileCopyrightText = "stm32-rs contributors" SPDX-License-Identifier = "MIT OR Apache-2.0" -[[annotations]] -path = "style-servo/**" -SPDX-FileCopyrightText = "The Servo Project Developers" -SPDX-License-Identifier = "MPL-2.0" - [[annotations]] path = "syn/**" SPDX-FileCopyrightText = "syn contributors" diff --git a/collector/compile-benchmarks/style-servo/0-println.patch b/collector/compile-benchmarks/style-servo/0-println.patch deleted file mode 100644 index de0efb37c..000000000 --- a/collector/compile-benchmarks/style-servo/0-println.patch +++ /dev/null @@ -1,11 +0,0 @@ -diff --git a/components/style/matching.rs b/components/style/matching.rs ---- a/components/style/matching.rs -+++ b/components/style/matching.rs -@@ -788,6 +788,7 @@ pub trait MatchMethods : TElement { - // Non-animation restyle hints will be processed in a subsequent - // normal traversal. - if replacements.intersects(RestyleHint::for_animations()) { -+ println!("{:?}", context.shared.traversal_flags.for_animation_only()); - debug_assert!(context.shared.traversal_flags.for_animation_only()); - - if replacements.contains(RESTYLE_SMIL) { diff --git a/collector/compile-benchmarks/style-servo/1-b9b3e592dd-cherry-picked.patch b/collector/compile-benchmarks/style-servo/1-b9b3e592dd-cherry-picked.patch deleted file mode 100644 index 733ac8f15..000000000 --- a/collector/compile-benchmarks/style-servo/1-b9b3e592dd-cherry-picked.patch +++ /dev/null @@ -1,947 +0,0 @@ -commit 30b7b62091b002a56ffaaedb26da95e2e45669c6 -Author: Emilio Cobos Álvarez -Date: Thu Oct 12 13:32:40 2017 +0200 - - style: Split the invalidation collection from the invalidator step. - - This is the first step in reusing the invalidation machinery for other stuff, - potentially including QuerySelector / QuerySelectorAll. - -diff --git a/components/style/data.rs b/components/style/data.rs -index bc3f0b1749..c1ca632411 100644 ---- a/components/style/data.rs -+++ b/components/style/data.rs -@@ -244,6 +244,7 @@ impl ElementData { - return InvalidationResult::empty(); - } - -+ use invalidation::element::collector::StateAndAttrInvalidationCollector; - use invalidation::element::invalidator::TreeStyleInvalidator; - - debug!("invalidate_style_if_needed: {:?}, flags: {:?}, has_snapshot: {}, \ -@@ -266,7 +267,8 @@ impl ElementData { - nth_index_cache, - ); - -- let result = invalidator.invalidate(); -+ let result = -+ invalidator.invalidate::(); - unsafe { element.set_handled_snapshot() } - debug_assert!(element.handled_snapshot()); - result -diff --git a/components/style/invalidation/element/collector.rs b/components/style/invalidation/element/collector.rs -new file mode 100644 -index 0000000000..a496e4220d ---- /dev/null -+++ b/components/style/invalidation/element/collector.rs -@@ -0,0 +1,418 @@ -+/* This Source Code Form is subject to the terms of the Mozilla Public -+ * License, v. 2.0. If a copy of the MPL was not distributed with this -+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ -+ -+//! A collector for invalidations due to state and attribute changes. -+ -+use Atom; -+use context::{QuirksMode, SharedStyleContext}; -+use data::ElementData; -+use dom::TElement; -+use element_state::{ElementState, IN_VISITED_OR_UNVISITED_STATE}; -+use invalidation::element::element_wrapper::{ElementSnapshot, ElementWrapper}; -+use invalidation::element::invalidation_map::*; -+use invalidation::element::invalidator::{InvalidationVector, Invalidation, InvalidationCollector}; -+use invalidation::element::restyle_hints::*; -+use selector_map::SelectorMap; -+use selector_parser::Snapshot; -+use selectors::NthIndexCache; -+use selectors::attr::CaseSensitivity; -+use selectors::matching::{MatchingContext, MatchingMode, VisitedHandlingMode}; -+use selectors::matching::matches_selector; -+use smallvec::SmallVec; -+ -+#[derive(Debug, PartialEq)] -+enum VisitedDependent { -+ Yes, -+ No, -+} -+ -+/// The collector implementation. -+struct Collector<'a, 'b: 'a, E> -+where -+ E: TElement, -+{ -+ element: E, -+ wrapper: ElementWrapper<'b, E>, -+ nth_index_cache: Option<&'a mut NthIndexCache>, -+ snapshot: &'a Snapshot, -+ quirks_mode: QuirksMode, -+ lookup_element: E, -+ removed_id: Option<&'a Atom>, -+ added_id: Option<&'a Atom>, -+ classes_removed: &'a SmallVec<[Atom; 8]>, -+ classes_added: &'a SmallVec<[Atom; 8]>, -+ state_changes: ElementState, -+ descendant_invalidations: &'a mut InvalidationVector, -+ sibling_invalidations: &'a mut InvalidationVector, -+ invalidates_self: bool, -+} -+ -+/// A collector for state and attribute invalidations. -+pub struct StateAndAttrInvalidationCollector; -+ -+impl InvalidationCollector for StateAndAttrInvalidationCollector { -+ fn collect_invalidations( -+ element: E, -+ mut data: Option<&mut ElementData>, -+ nth_index_cache: Option<&mut NthIndexCache>, -+ shared_context: &SharedStyleContext, -+ descendant_invalidations: &mut InvalidationVector, -+ sibling_invalidations: &mut InvalidationVector, -+ ) -> bool -+ where -+ E: TElement, -+ { -+ debug_assert!(element.has_snapshot(), "Why bothering?"); -+ debug_assert!(data.is_some(), "How exactly?"); -+ -+ let wrapper = -+ ElementWrapper::new(element, &*shared_context.snapshot_map); -+ -+ let state_changes = wrapper.state_changes(); -+ let snapshot = wrapper.snapshot().expect("has_snapshot lied"); -+ -+ if !snapshot.has_attrs() && state_changes.is_empty() { -+ return false; -+ } -+ -+ // If we are sensitive to visitedness and the visited state changed, we -+ // force a restyle here. Matching doesn't depend on the actual visited -+ // state at all, so we can't look at matching results to decide what to -+ // do for this case. -+ if state_changes.intersects(IN_VISITED_OR_UNVISITED_STATE) { -+ trace!(" > visitedness change, force subtree restyle"); -+ // We can't just return here because there may also be attribute -+ // changes as well that imply additional hints. -+ let data = data.as_mut().unwrap(); -+ data.hint.insert(RestyleHint::restyle_subtree()); -+ } -+ -+ let mut classes_removed = SmallVec::<[Atom; 8]>::new(); -+ let mut classes_added = SmallVec::<[Atom; 8]>::new(); -+ if snapshot.class_changed() { -+ // TODO(emilio): Do this more efficiently! -+ snapshot.each_class(|c| { -+ if !element.has_class(c, CaseSensitivity::CaseSensitive) { -+ classes_removed.push(c.clone()) -+ } -+ }); -+ -+ element.each_class(|c| { -+ if !snapshot.has_class(c, CaseSensitivity::CaseSensitive) { -+ classes_added.push(c.clone()) -+ } -+ }) -+ } -+ -+ let mut id_removed = None; -+ let mut id_added = None; -+ if snapshot.id_changed() { -+ let old_id = snapshot.id_attr(); -+ let current_id = element.get_id(); -+ -+ if old_id != current_id { -+ id_removed = old_id; -+ id_added = current_id; -+ } -+ } -+ -+ let lookup_element = -+ if element.implemented_pseudo_element().is_some() { -+ element.pseudo_element_originating_element().unwrap() -+ } else { -+ element -+ }; -+ -+ let invalidated_self = { -+ let mut collector = Collector { -+ wrapper, -+ lookup_element, -+ nth_index_cache, -+ state_changes, -+ element, -+ snapshot: &snapshot, -+ quirks_mode: shared_context.quirks_mode(), -+ removed_id: id_removed.as_ref(), -+ added_id: id_added.as_ref(), -+ classes_removed: &classes_removed, -+ classes_added: &classes_added, -+ descendant_invalidations, -+ sibling_invalidations, -+ invalidates_self: false, -+ }; -+ -+ shared_context.stylist.each_invalidation_map(|invalidation_map| { -+ collector.collect_dependencies_in_invalidation_map(invalidation_map); -+ }); -+ -+ // TODO(emilio): Consider storing dependencies from the UA sheet in -+ // a different map. If we do that, we can skip the stuff on the -+ // shared stylist iff cut_off_inheritance is true, and we can look -+ // just at that map. -+ let _cut_off_inheritance = -+ element.each_xbl_stylist(|stylist| { -+ // FIXME(emilio): Replace with assert / remove when we -+ // figure out what to do with the quirks mode mismatches -+ // (that is, when bug 1406875 is properly fixed). -+ collector.quirks_mode = stylist.quirks_mode(); -+ stylist.each_invalidation_map(|invalidation_map| { -+ collector.collect_dependencies_in_invalidation_map(invalidation_map); -+ }); -+ }); -+ -+ collector.invalidates_self -+ }; -+ -+ if invalidated_self { -+ if let Some(ref mut data) = data { -+ data.hint.insert(RESTYLE_SELF); -+ } -+ } -+ -+ invalidated_self -+ } -+} -+ -+impl<'a, 'b, E> Collector<'a, 'b, E> -+where -+ E: TElement, -+{ -+ fn collect_dependencies_in_invalidation_map( -+ &mut self, -+ map: &InvalidationMap, -+ ) { -+ let quirks_mode = self.quirks_mode; -+ let removed_id = self.removed_id; -+ if let Some(ref id) = removed_id { -+ if let Some(deps) = map.id_to_selector.get(id, quirks_mode) { -+ for dep in deps { -+ self.scan_dependency(dep, VisitedDependent::No); -+ } -+ } -+ } -+ -+ let added_id = self.added_id; -+ if let Some(ref id) = added_id { -+ if let Some(deps) = map.id_to_selector.get(id, quirks_mode) { -+ for dep in deps { -+ self.scan_dependency(dep, VisitedDependent::No); -+ } -+ } -+ } -+ -+ for class in self.classes_added.iter().chain(self.classes_removed.iter()) { -+ if let Some(deps) = map.class_to_selector.get(class, quirks_mode) { -+ for dep in deps { -+ self.scan_dependency(dep, VisitedDependent::No); -+ } -+ } -+ } -+ -+ let should_examine_attribute_selector_map = -+ self.snapshot.other_attr_changed() || -+ (self.snapshot.class_changed() && map.has_class_attribute_selectors) || -+ (self.snapshot.id_changed() && map.has_id_attribute_selectors); -+ -+ if should_examine_attribute_selector_map { -+ self.collect_dependencies_in_map( -+ &map.other_attribute_affecting_selectors -+ ) -+ } -+ -+ let state_changes = self.state_changes; -+ if !state_changes.is_empty() { -+ self.collect_state_dependencies( -+ &map.state_affecting_selectors, -+ state_changes, -+ ) -+ } -+ } -+ -+ fn collect_dependencies_in_map( -+ &mut self, -+ map: &SelectorMap, -+ ) { -+ map.lookup_with_additional( -+ self.lookup_element, -+ self.quirks_mode, -+ self.removed_id, -+ self.classes_removed, -+ &mut |dependency| { -+ self.scan_dependency(dependency, VisitedDependent::No); -+ true -+ }, -+ ); -+ } -+ -+ fn collect_state_dependencies( -+ &mut self, -+ map: &SelectorMap, -+ state_changes: ElementState, -+ ) { -+ map.lookup_with_additional( -+ self.lookup_element, -+ self.quirks_mode, -+ self.removed_id, -+ self.classes_removed, -+ &mut |dependency| { -+ if !dependency.state.intersects(state_changes) { -+ return true; -+ } -+ let visited_dependent = -+ if dependency.state.intersects(IN_VISITED_OR_UNVISITED_STATE) { -+ VisitedDependent::Yes -+ } else { -+ VisitedDependent::No -+ }; -+ self.scan_dependency(&dependency.dep, visited_dependent); -+ true -+ }, -+ ); -+ } -+ -+ /// Check whether a dependency should be taken into account, using a given -+ /// visited handling mode. -+ fn check_dependency( -+ &mut self, -+ visited_handling_mode: VisitedHandlingMode, -+ dependency: &Dependency, -+ relevant_link_found: &mut bool, -+ ) -> bool { -+ let (matches_now, relevant_link_found_now) = { -+ let mut context = MatchingContext::new_for_visited( -+ MatchingMode::Normal, -+ None, -+ self.nth_index_cache.as_mut().map(|c| &mut **c), -+ visited_handling_mode, -+ self.quirks_mode, -+ ); -+ -+ let matches_now = matches_selector( -+ &dependency.selector, -+ dependency.selector_offset, -+ None, -+ &self.element, -+ &mut context, -+ &mut |_, _| {}, -+ ); -+ -+ (matches_now, context.relevant_link_found) -+ }; -+ -+ let (matched_then, relevant_link_found_then) = { -+ let mut context = MatchingContext::new_for_visited( -+ MatchingMode::Normal, -+ None, -+ self.nth_index_cache.as_mut().map(|c| &mut **c), -+ visited_handling_mode, -+ self.quirks_mode, -+ ); -+ -+ let matched_then = matches_selector( -+ &dependency.selector, -+ dependency.selector_offset, -+ None, -+ &self.wrapper, -+ &mut context, -+ &mut |_, _| {}, -+ ); -+ -+ (matched_then, context.relevant_link_found) -+ }; -+ -+ *relevant_link_found = relevant_link_found_now; -+ -+ // Check for mismatches in both the match result and also the status -+ // of whether a relevant link was found. -+ matched_then != matches_now || -+ relevant_link_found_now != relevant_link_found_then -+ } -+ -+ fn scan_dependency( -+ &mut self, -+ dependency: &Dependency, -+ is_visited_dependent: VisitedDependent, -+ ) { -+ debug!("TreeStyleInvalidator::scan_dependency({:?}, {:?}, {:?})", -+ self.element, -+ dependency, -+ is_visited_dependent); -+ -+ if !self.dependency_may_be_relevant(dependency) { -+ return; -+ } -+ -+ let mut relevant_link_found = false; -+ -+ let should_account_for_dependency = self.check_dependency( -+ VisitedHandlingMode::AllLinksUnvisited, -+ dependency, -+ &mut relevant_link_found, -+ ); -+ -+ if should_account_for_dependency { -+ return self.note_dependency(dependency); -+ } -+ -+ // If there is a relevant link, then we also matched in visited -+ // mode. -+ // -+ // Match again in this mode to ensure this also matches. -+ // -+ // Note that we never actually match directly against the element's true -+ // visited state at all, since that would expose us to timing attacks. -+ // -+ // The matching process only considers the relevant link state and -+ // visited handling mode when deciding if visited matches. Instead, we -+ // are rematching here in case there is some :visited selector whose -+ // matching result changed for some other state or attribute change of -+ // this element (for example, for things like [foo]:visited). -+ // -+ // NOTE: This thing is actually untested because testing it is flaky, -+ // see the tests that were added and then backed out in bug 1328509. -+ if is_visited_dependent == VisitedDependent::Yes && relevant_link_found { -+ let should_account_for_dependency = self.check_dependency( -+ VisitedHandlingMode::RelevantLinkVisited, -+ dependency, -+ &mut false, -+ ); -+ -+ if should_account_for_dependency { -+ return self.note_dependency(dependency); -+ } -+ } -+ } -+ -+ fn note_dependency(&mut self, dependency: &Dependency) { -+ if dependency.affects_self() { -+ self.invalidates_self = true; -+ } -+ -+ if dependency.affects_descendants() { -+ debug_assert_ne!(dependency.selector_offset, 0); -+ debug_assert!(!dependency.affects_later_siblings()); -+ self.descendant_invalidations.push(Invalidation::new( -+ dependency.selector.clone(), -+ dependency.selector_offset, -+ )); -+ } else if dependency.affects_later_siblings() { -+ debug_assert_ne!(dependency.selector_offset, 0); -+ self.sibling_invalidations.push(Invalidation::new( -+ dependency.selector.clone(), -+ dependency.selector_offset, -+ )); -+ } -+ } -+ -+ /// Returns whether `dependency` may cause us to invalidate the style of -+ /// more elements than what we've already invalidated. -+ fn dependency_may_be_relevant(&self, dependency: &Dependency) -> bool { -+ if dependency.affects_descendants() || dependency.affects_later_siblings() { -+ return true; -+ } -+ -+ debug_assert!(dependency.affects_self()); -+ !self.invalidates_self -+ } -+} -diff --git a/components/style/invalidation/element/invalidator.rs b/components/style/invalidation/element/invalidator.rs -index 80d5f70436..012b9feeda 100644 ---- a/components/style/invalidation/element/invalidator.rs -+++ b/components/style/invalidation/element/invalidator.rs -@@ -5,29 +5,34 @@ - //! The struct that takes care of encapsulating all the logic on where and how - //! element styles need to be invalidated. - --use Atom; - use context::{SharedStyleContext, StackLimitChecker}; - use data::ElementData; - use dom::{TElement, TNode}; --use element_state::{ElementState, IN_VISITED_OR_UNVISITED_STATE}; --use invalidation::element::element_wrapper::{ElementSnapshot, ElementWrapper}; --use invalidation::element::invalidation_map::*; - use invalidation::element::restyle_hints::*; --use selector_map::SelectorMap; --use selector_parser::{SelectorImpl, Snapshot}; -+use selector_parser::SelectorImpl; - use selectors::NthIndexCache; --use selectors::attr::CaseSensitivity; - use selectors::matching::{MatchingContext, MatchingMode, VisitedHandlingMode}; --use selectors::matching::{matches_selector, matches_compound_selector}; - use selectors::matching::CompoundSelectorMatchingResult; -+use selectors::matching::matches_compound_selector; - use selectors::parser::{Combinator, Component, Selector}; - use smallvec::SmallVec; - use std::fmt; - --#[derive(Debug, PartialEq)] --enum VisitedDependent { -- Yes, -- No, -+/// A trait to abstract the collection of invalidations for a given pass. -+pub trait InvalidationCollector { -+ /// Collect invalidations for a given element's descendants and siblings. -+ /// -+ /// Returns whether the element itself was invalidated. -+ fn collect_invalidations( -+ element: E, -+ data: Option<&mut ElementData>, -+ nth_index_cache: Option<&mut NthIndexCache>, -+ shared_context: &SharedStyleContext, -+ descendant_invalidations: &mut InvalidationVector, -+ sibling_invalidations: &mut InvalidationVector, -+ ) -> bool -+ where -+ E: TElement; - } - - /// The struct that takes care of encapsulating all the logic on where and how -@@ -51,7 +56,8 @@ pub struct TreeStyleInvalidator<'a, 'b: 'a, E> - nth_index_cache: Option<&'a mut NthIndexCache>, - } - --type InvalidationVector = SmallVec<[Invalidation; 10]>; -+/// A vector of invalidations, optimized for small invalidation sets. -+pub type InvalidationVector = SmallVec<[Invalidation; 10]>; - - /// The kind of invalidation we're processing. - /// -@@ -71,7 +77,7 @@ enum InvalidationKind { - /// must be restyled if the compound selector matches. Otherwise, if - /// describes which descendants (or later siblings) must be restyled. - #[derive(Clone)] --struct Invalidation { -+pub struct Invalidation { - selector: Selector, - offset: usize, - /// Whether the invalidation was already matched by any previous sibling or -@@ -84,6 +90,15 @@ struct Invalidation { - } - - impl Invalidation { -+ /// Create a new invalidation for a given selector and offset. -+ pub fn new(selector: Selector, offset: usize) -> Self { -+ Self { -+ selector, -+ offset, -+ matched_by_any_previous: false, -+ } -+ } -+ - /// Whether this invalidation is effective for the next sibling or - /// descendant after us. - fn effective_for_next(&self) -> bool { -@@ -188,117 +203,25 @@ impl<'a, 'b: 'a, E> TreeStyleInvalidator<'a, 'b, E> - } - - /// Perform the invalidation pass. -- pub fn invalidate(mut self) -> InvalidationResult { -+ pub fn invalidate(mut self) -> InvalidationResult { - debug!("StyleTreeInvalidator::invalidate({:?})", self.element); -- debug_assert!(self.element.has_snapshot(), "Why bothering?"); -- debug_assert!(self.data.is_some(), "How exactly?"); -- -- let shared_context = self.shared_context; -- -- let wrapper = -- ElementWrapper::new(self.element, shared_context.snapshot_map); -- let state_changes = wrapper.state_changes(); -- let snapshot = wrapper.snapshot().expect("has_snapshot lied"); -- -- if !snapshot.has_attrs() && state_changes.is_empty() { -- return InvalidationResult::empty(); -- } -- -- // If we are sensitive to visitedness and the visited state changed, we -- // force a restyle here. Matching doesn't depend on the actual visited -- // state at all, so we can't look at matching results to decide what to -- // do for this case. -- if state_changes.intersects(IN_VISITED_OR_UNVISITED_STATE) { -- trace!(" > visitedness change, force subtree restyle"); -- // We can't just return here because there may also be attribute -- // changes as well that imply additional hints. -- let data = self.data.as_mut().unwrap(); -- data.hint.insert(RestyleHint::restyle_subtree()); -- } -- -- let mut classes_removed = SmallVec::<[Atom; 8]>::new(); -- let mut classes_added = SmallVec::<[Atom; 8]>::new(); -- if snapshot.class_changed() { -- // TODO(emilio): Do this more efficiently! -- snapshot.each_class(|c| { -- if !self.element.has_class(c, CaseSensitivity::CaseSensitive) { -- classes_removed.push(c.clone()) -- } -- }); -- -- self.element.each_class(|c| { -- if !snapshot.has_class(c, CaseSensitivity::CaseSensitive) { -- classes_added.push(c.clone()) -- } -- }) -- } -- -- let mut id_removed = None; -- let mut id_added = None; -- if snapshot.id_changed() { -- let old_id = snapshot.id_attr(); -- let current_id = self.element.get_id(); -- -- if old_id != current_id { -- id_removed = old_id; -- id_added = current_id; -- } -- } -- -- let lookup_element = -- if self.element.implemented_pseudo_element().is_some() { -- self.element.pseudo_element_originating_element().unwrap() -- } else { -- self.element -- }; - - let mut descendant_invalidations = InvalidationVector::new(); - let mut sibling_invalidations = InvalidationVector::new(); -- let invalidated_self = { -- let mut collector = InvalidationCollector { -- wrapper, -- lookup_element, -- nth_index_cache: self.nth_index_cache.as_mut().map(|c| &mut **c), -- state_changes, -- element: self.element, -- snapshot: &snapshot, -- shared_context: self.shared_context, -- removed_id: id_removed.as_ref(), -- added_id: id_added.as_ref(), -- classes_removed: &classes_removed, -- classes_added: &classes_added, -- descendant_invalidations: &mut descendant_invalidations, -- sibling_invalidations: &mut sibling_invalidations, -- invalidates_self: false, -- }; -- -- shared_context.stylist.each_invalidation_map(|invalidation_map| { -- collector.collect_dependencies_in_invalidation_map(invalidation_map); -- }); -- -- // TODO(emilio): Consider storing dependencies from the UA sheet in -- // a different map. If we do that, we can skip the stuff on the -- // shared stylist iff cut_off_inheritance is true, and we can look -- // just at that map. -- let _cut_off_inheritance = -- self.element.each_xbl_stylist(|stylist| { -- stylist.each_invalidation_map(|invalidation_map| { -- collector.collect_dependencies_in_invalidation_map(invalidation_map); -- }); -- }); -- -- collector.invalidates_self -- }; - -- if invalidated_self { -- if let Some(ref mut data) = self.data { -- data.hint.insert(RESTYLE_SELF); -- } -- } -+ let invalidated_self = C::collect_invalidations( -+ self.element, -+ self.data.as_mut().map(|d| &mut **d), -+ self.nth_index_cache.as_mut().map(|c| &mut **c), -+ self.shared_context, -+ &mut descendant_invalidations, -+ &mut sibling_invalidations, -+ ); - - debug!("Collected invalidations (self: {}): ", invalidated_self); - debug!(" > descendants: {:?}", descendant_invalidations); - debug!(" > siblings: {:?}", sibling_invalidations); -+ - let invalidated_descendants = self.invalidate_descendants(&descendant_invalidations); - let invalidated_siblings = self.invalidate_siblings(&mut sibling_invalidations); - -@@ -816,265 +739,3 @@ impl<'a, 'b: 'a, E> TreeStyleInvalidator<'a, 'b, E> - } - } - --struct InvalidationCollector<'a, 'b: 'a, E> -- where E: TElement, --{ -- element: E, -- wrapper: ElementWrapper<'b, E>, -- nth_index_cache: Option<&'a mut NthIndexCache>, -- snapshot: &'a Snapshot, -- shared_context: &'a SharedStyleContext<'b>, -- lookup_element: E, -- removed_id: Option<&'a Atom>, -- added_id: Option<&'a Atom>, -- classes_removed: &'a SmallVec<[Atom; 8]>, -- classes_added: &'a SmallVec<[Atom; 8]>, -- state_changes: ElementState, -- descendant_invalidations: &'a mut InvalidationVector, -- sibling_invalidations: &'a mut InvalidationVector, -- invalidates_self: bool, --} -- --impl<'a, 'b: 'a, E> InvalidationCollector<'a, 'b, E> -- where E: TElement, --{ -- fn collect_dependencies_in_invalidation_map( -- &mut self, -- map: &InvalidationMap, -- ) { -- let quirks_mode = self.shared_context.quirks_mode(); -- let removed_id = self.removed_id; -- if let Some(ref id) = removed_id { -- if let Some(deps) = map.id_to_selector.get(id, quirks_mode) { -- for dep in deps { -- self.scan_dependency(dep, VisitedDependent::No); -- } -- } -- } -- -- let added_id = self.added_id; -- if let Some(ref id) = added_id { -- if let Some(deps) = map.id_to_selector.get(id, quirks_mode) { -- for dep in deps { -- self.scan_dependency(dep, VisitedDependent::No); -- } -- } -- } -- -- for class in self.classes_added.iter().chain(self.classes_removed.iter()) { -- if let Some(deps) = map.class_to_selector.get(class, quirks_mode) { -- for dep in deps { -- self.scan_dependency(dep, VisitedDependent::No); -- } -- } -- } -- -- let should_examine_attribute_selector_map = -- self.snapshot.other_attr_changed() || -- (self.snapshot.class_changed() && map.has_class_attribute_selectors) || -- (self.snapshot.id_changed() && map.has_id_attribute_selectors); -- -- if should_examine_attribute_selector_map { -- self.collect_dependencies_in_map( -- &map.other_attribute_affecting_selectors -- ) -- } -- -- let state_changes = self.state_changes; -- if !state_changes.is_empty() { -- self.collect_state_dependencies( -- &map.state_affecting_selectors, -- state_changes, -- ) -- } -- } -- -- fn collect_dependencies_in_map( -- &mut self, -- map: &SelectorMap, -- ) { -- map.lookup_with_additional( -- self.lookup_element, -- self.shared_context.quirks_mode(), -- self.removed_id, -- self.classes_removed, -- &mut |dependency| { -- self.scan_dependency(dependency, VisitedDependent::No); -- true -- }, -- ); -- } -- -- fn collect_state_dependencies( -- &mut self, -- map: &SelectorMap, -- state_changes: ElementState, -- ) { -- map.lookup_with_additional( -- self.lookup_element, -- self.shared_context.quirks_mode(), -- self.removed_id, -- self.classes_removed, -- &mut |dependency| { -- if !dependency.state.intersects(state_changes) { -- return true; -- } -- let visited_dependent = -- if dependency.state.intersects(IN_VISITED_OR_UNVISITED_STATE) { -- VisitedDependent::Yes -- } else { -- VisitedDependent::No -- }; -- self.scan_dependency(&dependency.dep, visited_dependent); -- true -- }, -- ); -- } -- -- /// Check whether a dependency should be taken into account, using a given -- /// visited handling mode. -- fn check_dependency( -- &mut self, -- visited_handling_mode: VisitedHandlingMode, -- dependency: &Dependency, -- relevant_link_found: &mut bool, -- ) -> bool { -- let (matches_now, relevant_link_found_now) = { -- let mut context = MatchingContext::new_for_visited( -- MatchingMode::Normal, -- None, -- self.nth_index_cache.as_mut().map(|c| &mut **c), -- visited_handling_mode, -- self.shared_context.quirks_mode(), -- ); -- -- let matches_now = matches_selector( -- &dependency.selector, -- dependency.selector_offset, -- None, -- &self.element, -- &mut context, -- &mut |_, _| {}, -- ); -- -- (matches_now, context.relevant_link_found) -- }; -- -- let (matched_then, relevant_link_found_then) = { -- let mut context = MatchingContext::new_for_visited( -- MatchingMode::Normal, -- None, -- self.nth_index_cache.as_mut().map(|c| &mut **c), -- visited_handling_mode, -- self.shared_context.quirks_mode(), -- ); -- -- let matched_then = matches_selector( -- &dependency.selector, -- dependency.selector_offset, -- None, -- &self.wrapper, -- &mut context, -- &mut |_, _| {}, -- ); -- -- (matched_then, context.relevant_link_found) -- }; -- -- *relevant_link_found = relevant_link_found_now; -- -- // Check for mismatches in both the match result and also the status -- // of whether a relevant link was found. -- matched_then != matches_now || -- relevant_link_found_now != relevant_link_found_then -- } -- -- fn scan_dependency( -- &mut self, -- dependency: &Dependency, -- is_visited_dependent: VisitedDependent, -- ) { -- debug!("TreeStyleInvalidator::scan_dependency({:?}, {:?}, {:?})", -- self.element, -- dependency, -- is_visited_dependent); -- -- if !self.dependency_may_be_relevant(dependency) { -- return; -- } -- -- let mut relevant_link_found = false; -- -- let should_account_for_dependency = self.check_dependency( -- VisitedHandlingMode::AllLinksUnvisited, -- dependency, -- &mut relevant_link_found, -- ); -- -- if should_account_for_dependency { -- return self.note_dependency(dependency); -- } -- -- // If there is a relevant link, then we also matched in visited -- // mode. -- // -- // Match again in this mode to ensure this also matches. -- // -- // Note that we never actually match directly against the element's true -- // visited state at all, since that would expose us to timing attacks. -- // -- // The matching process only considers the relevant link state and -- // visited handling mode when deciding if visited matches. Instead, we -- // are rematching here in case there is some :visited selector whose -- // matching result changed for some other state or attribute change of -- // this element (for example, for things like [foo]:visited). -- // -- // NOTE: This thing is actually untested because testing it is flaky, -- // see the tests that were added and then backed out in bug 1328509. -- if is_visited_dependent == VisitedDependent::Yes && relevant_link_found { -- let should_account_for_dependency = self.check_dependency( -- VisitedHandlingMode::RelevantLinkVisited, -- dependency, -- &mut false, -- ); -- -- if should_account_for_dependency { -- return self.note_dependency(dependency); -- } -- } -- } -- -- fn note_dependency(&mut self, dependency: &Dependency) { -- if dependency.affects_self() { -- self.invalidates_self = true; -- } -- -- if dependency.affects_descendants() { -- debug_assert_ne!(dependency.selector_offset, 0); -- debug_assert!(!dependency.affects_later_siblings()); -- self.descendant_invalidations.push(Invalidation { -- selector: dependency.selector.clone(), -- offset: dependency.selector_offset, -- matched_by_any_previous: false, -- }); -- } else if dependency.affects_later_siblings() { -- debug_assert_ne!(dependency.selector_offset, 0); -- self.sibling_invalidations.push(Invalidation { -- selector: dependency.selector.clone(), -- offset: dependency.selector_offset, -- matched_by_any_previous: false, -- }); -- } -- } -- -- /// Returns whether `dependency` may cause us to invalidate the style of -- /// more elements than what we've already invalidated. -- fn dependency_may_be_relevant(&self, dependency: &Dependency) -> bool { -- if dependency.affects_descendants() || dependency.affects_later_siblings() { -- return true; -- } -- -- debug_assert!(dependency.affects_self()); -- !self.invalidates_self -- } --} -diff --git a/components/style/invalidation/element/mod.rs b/components/style/invalidation/element/mod.rs -index 61aae2ffcd..3947a2d057 100644 ---- a/components/style/invalidation/element/mod.rs -+++ b/components/style/invalidation/element/mod.rs -@@ -4,6 +4,7 @@ - - //! Invalidation of element styles due to attribute or style changes. - -+pub mod collector; - pub mod element_wrapper; - pub mod invalidation_map; - pub mod invalidator; diff --git a/collector/compile-benchmarks/style-servo/Cargo.lock b/collector/compile-benchmarks/style-servo/Cargo.lock deleted file mode 100644 index 05edfeddc..000000000 --- a/collector/compile-benchmarks/style-servo/Cargo.lock +++ /dev/null @@ -1,1518 +0,0 @@ -# This file is automatically @generated by Cargo. -# It is not intended for manual editing. -version = 3 - -[[package]] -name = "aho-corasick" -version = "0.6.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "500909c4f87a9e52355b26626d890833e9e1d53ac566db76c36faa984b889699" -dependencies = [ - "memchr", -] - -[[package]] -name = "android_injected_glue" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ec08bc5e100186b5223a24dcfe5655d1488aed9eafeb44fb9a0f67a4f53d0fc" - -[[package]] -name = "ansi_term" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "23ac7c30002a5accbf7e8987d0632fa6de155b7c3d39d0067317a391e00a2ef6" - -[[package]] -name = "app_units" -version = "0.5.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed0a4de09a3b8449515e649f3bb84f72ea15fc2d10639beb0776a09b7d308074" -dependencies = [ - "heapsize", - "num-traits", - "rustc-serialize", - "serde", -] - -[[package]] -name = "arrayvec" -version = "0.3.23" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "699e63a93b79d717e8c3b5eb1b28b7780d0d6d9e59a72eb769291c83b0c8dc67" -dependencies = [ - "nodrop", - "odds", -] - -[[package]] -name = "aster" -version = "0.41.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ccfdf7355d9db158df68f976ed030ab0f6578af811f5a7bb6dcf221ec24e0e0" -dependencies = [ - "syntex_syntax", -] - -[[package]] -name = "atomic_refcell" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d7209e59a3bed898705353140d965172ab13bd310d089bcdadbe1d11f2e22536" - -[[package]] -name = "atty" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d912da0db7fa85514874458ca3651fe2cddace8d0b0505571dbdcd41ab490159" -dependencies = [ - "kernel32-sys", - "libc", - "winapi", -] - -[[package]] -name = "bincode" -version = "0.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e4a2d3bab374f96192eade8f41914373d7b5fcf030ca2f45141f1e939057259" -dependencies = [ - "byteorder", - "num-traits", - "serde", -] - -[[package]] -name = "bindgen" -version = "0.29.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c338079dafc81bef7d581f494b906603d12359c4306979eae6ca081925a4984" -dependencies = [ - "aster", - "cexpr", - "cfg-if", - "clang-sys", - "clap", - "env_logger", - "lazy_static", - "log", - "peeking_take_while", - "quasi", - "quasi_codegen", - "regex", - "syntex_syntax", -] - -[[package]] -name = "bitflags" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aad18937a628ec6abcd26d1489012cc0e18c21798210f491af69ded9b881106d" - -[[package]] -name = "bitflags" -version = "0.8.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1370e9fc2a6ae53aea8b7a5110edbd08836ed87c88736dfabccade1c2b44bff4" - -[[package]] -name = "bitflags" -version = "0.9.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4efd02e230a02e18f92fc2735f44597385ed02ad8f831e7c1c1156ee5e1ab3a5" - -[[package]] -name = "byteorder" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ff81738b726f5d099632ceaffe7fb65b90212e8dce59d518729e7e8634032d3d" - -[[package]] -name = "cexpr" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cdbb21df6ff3497a61df5059994297f746267020ba38ce237aad9c875f7b4313" -dependencies = [ - "nom", -] - -[[package]] -name = "cfg-if" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d4c819a1287eb618df47cc647173c5c4c66ba19d888a6e50d605672aed3140de" - -[[package]] -name = "clang-sys" -version = "0.19.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "611ec2e3a7623afd8a8c0d027887b6b55759d894abbf5fe11b9dc11b50d5b49a" -dependencies = [ - "bitflags 0.9.1", - "glob", - "libc", - "libloading", -] - -[[package]] -name = "clap" -version = "2.24.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6b8f69e518f967224e628896b54e41ff6acfb4dcfefc5076325c36525dac900f" -dependencies = [ - "ansi_term", - "atty", - "bitflags 0.8.2", - "strsim", - "term_size", - "unicode-segmentation", - "unicode-width", - "vec_map", -] - -[[package]] -name = "coco" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c06169f5beb7e31c7c67ebf5540b8b472d23e3eade3b2ec7d1f5b504a85f91bd" -dependencies = [ - "either", - "scopeguard", -] - -[[package]] -name = "core-foundation" -version = "0.4.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5909502e547762013619f4c4e01cc7393c20fe2d52d7fa471c1210adb2320dc7" -dependencies = [ - "core-foundation-sys", - "libc", -] - -[[package]] -name = "core-foundation-sys" -version = "0.4.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bc9fb3d6cb663e6fd7cf1c63f9b144ee2b1e4a78595a0451dd34bff85b9a3387" -dependencies = [ - "libc", -] - -[[package]] -name = "core-graphics" -version = "0.9.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2fd47addfc77b7e574d24e5434f95bb64a863769dfd4f1d451ca4ff5530ba01a" -dependencies = [ - "bitflags 0.9.1", - "core-foundation", - "libc", -] - -[[package]] -name = "cssparser" -version = "0.22.1" -dependencies = [ - "cssparser-macros", - "dtoa-short", - "heapsize", - "itoa", - "matches", - "phf", - "procedural-masquerade", - "quote 0.3.15", - "serde", - "smallvec", - "syn 0.11.11", -] - -[[package]] -name = "cssparser-macros" -version = "0.3.6" -dependencies = [ - "phf_codegen", - "proc-macro2", - "procedural-masquerade", - "quote 1.0.10", - "syn 1.0.80", -] - -[[package]] -name = "darling" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9861a8495606435477df581bc858ccf15a3469747edf175b94a4704fd9aaedac" -dependencies = [ - "darling_core", - "darling_macro", -] - -[[package]] -name = "darling_core" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1486a8b00b45062c997f767738178b43219133dd0c8c826cb811e60563810821" -dependencies = [ - "ident_case", - "lazy_static", - "quote 0.3.15", - "syn 0.11.11", -] - -[[package]] -name = "darling_macro" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a86ec160aa0c3dd492dd4a14ec8104ad8f1a9400a820624db857998cc1f80f9" -dependencies = [ - "darling_core", - "quote 0.3.15", - "syn 0.11.11", -] - -[[package]] -name = "debug_unreachable" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a032eac705ca39214d169f83e3d3da290af06d8d1d344d1baad2fd002dca4b3" -dependencies = [ - "unreachable 0.1.1", -] - -[[package]] -name = "dtoa" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09c3753c3db574d215cba4ea76018483895d7bff25a31b49ba45db21c48e50ab" - -[[package]] -name = "dtoa-short" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "068d4026697c1a18f0b0bb8cfcad1b0c151b90d8edb9bf4c235ad68128920d1d" -dependencies = [ - "dtoa", -] - -[[package]] -name = "dwrote" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "36e3b27cd0b8a68e00f07e8d8e1e4f4d8a6b8b873290a734f63bd56d792d23e1" -dependencies = [ - "gdi32-sys", - "kernel32-sys", - "lazy_static", - "libc", - "serde", - "serde_derive", - "winapi", -] - -[[package]] -name = "either" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "18785c1ba806c258137c937e44ada9ee7e69a37e3c72077542cd2f069d78562a" - -[[package]] -name = "encoding" -version = "0.2.33" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6b0d943856b990d12d3b55b359144ff341533e516d94098b1d3fc1ac666d36ec" -dependencies = [ - "encoding-index-japanese", - "encoding-index-korean", - "encoding-index-simpchinese", - "encoding-index-singlebyte", - "encoding-index-tradchinese", -] - -[[package]] -name = "encoding-index-japanese" -version = "1.20141219.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "04e8b2ff42e9a05335dbf8b5c6f7567e5591d0d916ccef4e0b1710d32a0d0c91" -dependencies = [ - "encoding_index_tests", -] - -[[package]] -name = "encoding-index-korean" -version = "1.20141219.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4dc33fb8e6bcba213fe2f14275f0963fd16f0a02c878e3095ecfdf5bee529d81" -dependencies = [ - "encoding_index_tests", -] - -[[package]] -name = "encoding-index-simpchinese" -version = "1.20141219.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d87a7194909b9118fc707194baa434a4e3b0fb6a5a757c73c3adb07aa25031f7" -dependencies = [ - "encoding_index_tests", -] - -[[package]] -name = "encoding-index-singlebyte" -version = "1.20141219.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3351d5acffb224af9ca265f435b859c7c01537c0849754d3db3fdf2bfe2ae84a" -dependencies = [ - "encoding_index_tests", -] - -[[package]] -name = "encoding-index-tradchinese" -version = "1.20141219.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fd0e20d5688ce3cab59eb3ef3a2083a5c77bf496cb798dc6fcdb75f323890c18" -dependencies = [ - "encoding_index_tests", -] - -[[package]] -name = "encoding_index_tests" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a246d82be1c9d791c5dfde9a2bd045fc3cbba3fa2b11ad558f27d01712f00569" - -[[package]] -name = "env_logger" -version = "0.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3ddf21e73e016298f5cb37d6ef8e8da8e39f91f9ec8b0df44b7deb16a9f8cd5b" -dependencies = [ - "log", - "regex", -] - -[[package]] -name = "euclid" -version = "0.15.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7be9fcb1ce77782eb620253eb02bc1f000545f3c360841a26cda572f10fad4ff" -dependencies = [ - "heapsize", - "log", - "num-traits", - "serde", -] - -[[package]] -name = "fallible" -version = "0.0.1" -dependencies = [ - "hashglobe", - "smallvec", -] - -[[package]] -name = "fnv" -version = "1.0.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6cc484842f1e2884faf56f529f960cc12ad8c71ce96cc7abba0a067c98fee344" - -[[package]] -name = "futf" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "51f93f3de6ba1794dcd5810b3546d004600a59a98266487c8407bc4b24e398f3" -dependencies = [ - "debug_unreachable", - "mac", -] - -[[package]] -name = "futures" -version = "0.1.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b63a4792d4f8f686defe3b39b92127fea6344de5d38202b2ee5a11bbbf29d6a" - -[[package]] -name = "gdi32-sys" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0912515a8ff24ba900422ecda800b52f4016a56251922d397c576bf92c690518" -dependencies = [ - "winapi", - "winapi-build", -] - -[[package]] -name = "getopts" -version = "0.2.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d9047cfbd08a437050b363d35ef160452c5fe8ea5187ae0a624708c91581d685" - -[[package]] -name = "glob" -version = "0.2.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8be18de09a56b60ed0edf84bc9df007e30040691af7acd1c41874faac5895bfb" - -[[package]] -name = "hashglobe" -version = "0.1.0" -dependencies = [ - "heapsize", - "libc", - "rand", -] - -[[package]] -name = "heapsize" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "54fab2624374e5137ae4df13bf32b0b269cb804df42d13a51221bbd431d1a237" -dependencies = [ - "kernel32-sys", -] - -[[package]] -name = "heapsize_derive" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "46f96d52fb1564059fc97b85ef6165728cc30198ab60073bf114c66c4c89bb5d" -dependencies = [ - "quote 0.3.15", - "syn 0.11.11", - "synstructure", -] - -[[package]] -name = "html5ever" -version = "0.20.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5bfb46978eb757a603b7dfe2dafb1c62cb4dee3428d8ac1de734d83d6b022d06" -dependencies = [ - "log", - "mac", - "markup5ever", - "quote 0.3.15", - "syn 0.11.11", -] - -[[package]] -name = "ident_case" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c9826188e666f2ed92071d2dadef6edc430b11b158b5b2b3f4babbcc891eaaa" - -[[package]] -name = "idna" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "014b298351066f1512874135335d62a789ffe78a9974f94b43ed5621951eaf7d" -dependencies = [ - "matches", - "unicode-bidi", - "unicode-normalization", -] - -[[package]] -name = "itertools" -version = "0.5.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4833d6978da405305126af4ac88569b5d71ff758581ce5a987dbfa3755f694fc" -dependencies = [ - "either", -] - -[[package]] -name = "itoa" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eb2f404fbc66fd9aac13e998248505e7ecb2ad8e44ab6388684c5fb11c6c251c" - -[[package]] -name = "kernel32-sys" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d" -dependencies = [ - "winapi", - "winapi-build", -] - -[[package]] -name = "lazy_static" -version = "0.2.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b37545ab726dd833ec6420aaba8231c5b320814b9029ad585555d2a03e94fbf" - -[[package]] -name = "libc" -version = "0.2.28" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb7b49972ee23d8aa1026c365a5b440ba08e35075f18c459980c7395c221ec48" - -[[package]] -name = "libloading" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "be99f814beb3e9503a786a592c909692bb6d4fc5a695f6ed7987223acfbd5194" -dependencies = [ - "kernel32-sys", - "lazy_static", - "winapi", -] - -[[package]] -name = "log" -version = "0.3.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "880f77541efa6e5cc74e76910c9884d9859683118839d6a1dc3b11e63512565b" - -[[package]] -name = "lru_cache" -version = "0.0.1" -dependencies = [ - "arrayvec", -] - -[[package]] -name = "mac" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c41e0c4fef86961ac6d6f8a82609f55f31b05e4fce149ac5710e439df7619ba4" - -[[package]] -name = "malloc_size_of" -version = "0.0.1" -dependencies = [ - "app_units", - "cssparser", - "euclid", - "hashglobe", - "servo_arc", - "smallbitvec", - "smallvec", -] - -[[package]] -name = "malloc_size_of_derive" -version = "0.0.1" -dependencies = [ - "quote 0.3.15", - "syn 0.11.11", - "synstructure", -] - -[[package]] -name = "markup5ever" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "047150a0e03b57e638fc45af33a0b63a0362305d5b9f92ecef81df472a4cceb0" -dependencies = [ - "phf", - "phf_codegen", - "rustc-serialize", - "string_cache", - "string_cache_codegen", - "tendril", -] - -[[package]] -name = "matches" -version = "0.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "100aabe6b8ff4e4a7e32c1c13523379802df0772b82466207ac25b013f193376" - -[[package]] -name = "memchr" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1dbccc0e46f1ea47b9f17e6d67c5a96bd27030519c519c9c91327e31275a47b4" -dependencies = [ - "libc", -] - -[[package]] -name = "nodrop" -version = "0.1.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "52cd74cd09beba596430cc6e3091b74007169a56246e1262f0ba451ea95117b2" -dependencies = [ - "odds", -] - -[[package]] -name = "nom" -version = "3.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "06989cbd367e06f787a451f3bc67d8c3e0eaa10b461cc01152ffab24261a31b1" -dependencies = [ - "memchr", -] - -[[package]] -name = "nsstring_vendor" -version = "0.1.0" -dependencies = [ - "bitflags 0.8.2", -] - -[[package]] -name = "num-integer" -version = "0.1.35" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d1452e8b06e448a07f0e6ebb0bb1d92b8890eea63288c0b627331d53514d0fba" -dependencies = [ - "num-traits", -] - -[[package]] -name = "num-traits" -version = "0.1.40" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "99843c856d68d8b4313b03a17e33c4bb42ae8f6610ea81b28abe076ac721b9b0" - -[[package]] -name = "num_cpus" -version = "1.6.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aec53c34f2d0247c5ca5d32cca1478762f301740468ee9ee6dcb7a0dd7a0c584" -dependencies = [ - "libc", -] - -[[package]] -name = "odds" -version = "0.2.25" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3df9b730298cea3a1c3faa90b7e2f9df3a9c400d0936d6015e6165734eefcba" - -[[package]] -name = "ordered-float" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da12c96037889ae0be29dd2bdd260e5a62a7df24e6466d5a15bb8131c1c200a8" -dependencies = [ - "num-traits", - "unreachable 0.1.1", -] - -[[package]] -name = "owning_ref" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cdf84f41639e037b484f93433aa3897863b561ed65c6e59c7073d7c561710f37" -dependencies = [ - "stable_deref_trait", -] - -[[package]] -name = "parking_lot" -version = "0.4.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37f364e2ce5efa24c7d0b6646d5bb61145551a0112f107ffd7499f1a3e322fbd" -dependencies = [ - "owning_ref", - "parking_lot_core", - "thread-id", -] - -[[package]] -name = "parking_lot_core" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0ad2c4d148942b3560034785bf19df586ebba53351e8c78f84984147d5795eef" -dependencies = [ - "kernel32-sys", - "libc", - "rand", - "smallvec", - "winapi", -] - -[[package]] -name = "pdqsort" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ceca1642c89148ca05611cc775a0c383abef355fc4907c4e95f49f7b09d6287c" - -[[package]] -name = "peeking_take_while" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "19b17cddbe7ec3f8bc800887bab5e717348c95ea2ca0b1bf0837fb964dc67099" - -[[package]] -name = "percent-encoding" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "de154f638187706bde41d9b4738748933d64e6b37bdbffc0b47a97d16a6ae356" - -[[package]] -name = "phf" -version = "0.7.21" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cb325642290f28ee14d8c6201159949a872f220c62af6e110a56ea914fbe42fc" -dependencies = [ - "phf_shared", -] - -[[package]] -name = "phf_codegen" -version = "0.7.21" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d62594c0bb54c464f633175d502038177e90309daf2e0158be42ed5f023ce88f" -dependencies = [ - "phf_generator", - "phf_shared", -] - -[[package]] -name = "phf_generator" -version = "0.7.21" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6b07ffcc532ccc85e3afc45865469bf5d9e4ef5bfcf9622e3cfe80c2d275ec03" -dependencies = [ - "phf_shared", - "rand", -] - -[[package]] -name = "phf_shared" -version = "0.7.21" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07e24b0ca9643bdecd0632f2b3da6b1b89bbb0030e0b992afc1113b23a7bc2f2" -dependencies = [ - "siphasher", -] - -[[package]] -name = "precomputed-hash" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "925383efa346730478fb4838dbe9137d2a47675ad789c546d150a6e1dd4ab31c" - -[[package]] -name = "proc-macro2" -version = "1.0.30" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "edc3358ebc67bc8b7fa0c007f945b0b18226f78437d61bec735a9eb96b61ee70" -dependencies = [ - "unicode-xid 0.2.2", -] - -[[package]] -name = "procedural-masquerade" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c93cdc1fb30af9ddf3debc4afbdb0f35126cbd99daa229dd76cdd5349b41d989" - -[[package]] -name = "quasi" -version = "0.32.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "18c45c4854d6d1cf5d531db97c75880feb91c958b0720f4ec1057135fec358b3" -dependencies = [ - "syntex_errors", - "syntex_syntax", -] - -[[package]] -name = "quasi_codegen" -version = "0.32.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "51b9e25fa23c044c1803f43ca59c98dac608976dd04ce799411edd58ece776d4" -dependencies = [ - "aster", - "syntex", - "syntex_errors", - "syntex_syntax", -] - -[[package]] -name = "quote" -version = "0.3.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a6e920b65c65f10b2ae65c831a81a073a89edd28c7cce89475bff467ab4167a" - -[[package]] -name = "quote" -version = "1.0.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38bc8cc6a5f2e3655e0899c1b848643b2562f853f114bfec7be120678e3ace05" -dependencies = [ - "proc-macro2", -] - -[[package]] -name = "rand" -version = "0.3.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "022e0636ec2519ddae48154b028864bdce4eaf7d35226ab8e65c611be97b189d" -dependencies = [ - "libc", -] - -[[package]] -name = "rayon" -version = "0.8.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b614fe08b6665cb9a231d07ac1364b0ef3cb3698f1239ee0c4c3a88a524f54c8" -dependencies = [ - "rayon-core", -] - -[[package]] -name = "rayon-core" -version = "1.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7febc28567082c345f10cddc3612c6ea020fc3297a1977d472cf9fdb73e6e493" -dependencies = [ - "coco", - "futures", - "lazy_static", - "libc", - "num_cpus", - "rand", -] - -[[package]] -name = "redox_syscall" -version = "0.1.27" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "80dcf663dc552529b9bfc7bdb30ea12e5fa5d3545137d850a91ad410053f68e9" - -[[package]] -name = "regex" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1731164734096285ec2a5ec7fea5248ae2f5485b3feeb0115af4fda2183b2d1b" -dependencies = [ - "aho-corasick", - "memchr", - "regex-syntax", - "thread_local", - "utf8-ranges", -] - -[[package]] -name = "regex-syntax" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ad890a5eef7953f55427c50575c680c42841653abd2b028b68cd223d157f62db" - -[[package]] -name = "rustc-serialize" -version = "0.3.25" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fe834bc780604f4674073badbad26d7219cadfb4a2275802db12cbae17498401" - -[[package]] -name = "same-file" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d931a44fdaa43b8637009e7632a02adc4f2b2e0733c08caa4cf00e8da4a117a7" -dependencies = [ - "kernel32-sys", - "winapi", -] - -[[package]] -name = "scopeguard" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c79eb2c3ac4bc2507cda80e7f3ac5b88bd8eae4c0914d5663e6a8933994be918" - -[[package]] -name = "selectors" -version = "0.19.0" -dependencies = [ - "bitflags 0.7.0", - "cssparser", - "fnv", - "log", - "malloc_size_of", - "malloc_size_of_derive", - "matches", - "phf", - "phf_codegen", - "precomputed-hash", - "servo_arc", - "size_of_test", - "smallvec", -] - -[[package]] -name = "serde" -version = "1.0.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "433d7d9f8530d5a939ad5e0e72a6243d2e42a24804f70bf592c679363dcacb2f" -dependencies = [ - "serde_derive", -] - -[[package]] -name = "serde_derive" -version = "1.0.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b707cf0d4cab852084f573058def08879bb467fda89d99052485e7d00edd624" -dependencies = [ - "quote 0.3.15", - "serde_derive_internals", - "syn 0.11.11", -] - -[[package]] -name = "serde_derive_internals" -version = "0.15.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37aee4e0da52d801acfbc0cc219eb1eda7142112339726e427926a6f6ee65d3a" -dependencies = [ - "syn 0.11.11", - "synom", -] - -[[package]] -name = "servo_arc" -version = "0.0.1" -dependencies = [ - "heapsize", - "nodrop", - "serde", - "stable_deref_trait", -] - -[[package]] -name = "servo_atoms" -version = "0.0.1" -dependencies = [ - "string_cache", - "string_cache_codegen", -] - -[[package]] -name = "servo_config" -version = "0.0.1" -dependencies = [ - "android_injected_glue", - "env_logger", - "euclid", - "getopts", - "lazy_static", - "log", - "num_cpus", - "rustc-serialize", - "serde", - "servo_geometry", - "servo_url", - "url", - "xdg", -] - -[[package]] -name = "servo_geometry" -version = "0.0.1" -dependencies = [ - "app_units", - "euclid", - "heapsize", -] - -[[package]] -name = "servo_rand" -version = "0.0.1" -dependencies = [ - "lazy_static", - "log", - "rand", -] - -[[package]] -name = "servo_url" -version = "0.0.1" -dependencies = [ - "heapsize", - "heapsize_derive", - "serde", - "servo_rand", - "url", - "url_serde", - "uuid", -] - -[[package]] -name = "siphasher" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0df90a788073e8d0235a67e50441d47db7c8ad9debd91cbf43736a2a92d36537" - -[[package]] -name = "size_of_test" -version = "0.0.1" - -[[package]] -name = "smallbitvec" -version = "1.0.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f92c6cb77d695877f9162aeda68316634da35f0fec35e8516a6ed81cffe9f0de" - -[[package]] -name = "smallvec" -version = "0.4.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ee4f357e8cd37bf8822e1b964e96fd39e2cb5a0424f8aaa284ccaccc2162411c" -dependencies = [ - "heapsize", -] - -[[package]] -name = "stable_deref_trait" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "15132e0e364248108c5e2c02e3ab539be8d6f5d52a01ca9bbf27ed657316f02b" - -[[package]] -name = "string_cache" -version = "0.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2fa69b90c5398217fb0414706d1becea9325ad21ed5d87bd6dda82127911f324" -dependencies = [ - "debug_unreachable", - "heapsize", - "lazy_static", - "phf_shared", - "precomputed-hash", - "serde", - "string_cache_codegen", - "string_cache_shared", -] - -[[package]] -name = "string_cache_codegen" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "479cde50c3539481f33906a387f2bd17c8e87cb848c35b6021d41fb81ff9b4d7" -dependencies = [ - "phf_generator", - "phf_shared", - "quote 0.3.15", - "string_cache_shared", -] - -[[package]] -name = "string_cache_shared" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1884d1bc09741d466d9b14e6d37ac89d6909cbcac41dd9ae982d4d063bbedfc" - -[[package]] -name = "strsim" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b4d15c810519a91cf877e7e36e63fe068815c678181439f2f29e2562147c3694" - -[[package]] -name = "style" -version = "0.0.1" -dependencies = [ - "app_units", - "arrayvec", - "atomic_refcell", - "bindgen", - "bitflags 0.7.0", - "byteorder", - "cfg-if", - "cssparser", - "encoding", - "euclid", - "fallible", - "fnv", - "hashglobe", - "heapsize", - "heapsize_derive", - "html5ever", - "itertools", - "itoa", - "kernel32-sys", - "lazy_static", - "log", - "lru_cache", - "malloc_size_of", - "malloc_size_of_derive", - "matches", - "nsstring_vendor", - "num-integer", - "num-traits", - "num_cpus", - "ordered-float", - "owning_ref", - "parking_lot", - "pdqsort", - "precomputed-hash", - "rayon", - "regex", - "selectors", - "serde", - "servo_arc", - "servo_atoms", - "servo_config", - "servo_url", - "smallbitvec", - "smallvec", - "style_derive", - "style_traits", - "time", - "toml", - "unicode-bidi", - "unicode-segmentation", - "walkdir", -] - -[[package]] -name = "style_derive" -version = "0.0.1" -dependencies = [ - "darling", - "quote 0.3.15", - "syn 0.11.11", - "synstructure", -] - -[[package]] -name = "style_traits" -version = "0.0.1" -dependencies = [ - "app_units", - "bitflags 0.7.0", - "cssparser", - "euclid", - "heapsize", - "heapsize_derive", - "malloc_size_of", - "malloc_size_of_derive", - "selectors", - "serde", - "servo_arc", - "servo_atoms", - "webrender_api", -] - -[[package]] -name = "syn" -version = "0.11.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d3b891b9015c88c576343b9b3e41c2c11a51c219ef067b264bd9c8aa9b441dad" -dependencies = [ - "quote 0.3.15", - "synom", - "unicode-xid 0.0.4", -] - -[[package]] -name = "syn" -version = "1.0.80" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d010a1623fbd906d51d650a9916aaefc05ffa0e4053ff7fe601167f3e715d194" -dependencies = [ - "proc-macro2", - "quote 1.0.10", - "unicode-xid 0.2.2", -] - -[[package]] -name = "synom" -version = "0.11.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a393066ed9010ebaed60b9eafa373d4b1baac186dd7e008555b0f702b51945b6" -dependencies = [ - "unicode-xid 0.0.4", -] - -[[package]] -name = "synstructure" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf318c34a2f8381a4f3d4db2c91b45bca2b1cd8cbe56caced900647be164800c" -dependencies = [ - "quote 0.3.15", - "syn 0.11.11", -] - -[[package]] -name = "syntex" -version = "0.58.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a8f5e3aaa79319573d19938ea38d068056b826db9883a5d47f86c1cecc688f0e" -dependencies = [ - "syntex_errors", - "syntex_syntax", -] - -[[package]] -name = "syntex_errors" -version = "0.58.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "867cc5c2d7140ae7eaad2ae9e8bf39cb18a67ca651b7834f88d46ca98faadb9c" -dependencies = [ - "libc", - "rustc-serialize", - "syntex_pos", - "term", - "unicode-xid 0.0.4", -] - -[[package]] -name = "syntex_pos" -version = "0.58.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "13ad4762fe52abc9f4008e85c4fb1b1fe3aa91ccb99ff4826a439c7c598e1047" -dependencies = [ - "rustc-serialize", -] - -[[package]] -name = "syntex_syntax" -version = "0.58.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e0e4dbae163dd98989464c23dd503161b338790640e11537686f2ef0f25c791" -dependencies = [ - "bitflags 0.8.2", - "log", - "rustc-serialize", - "syntex_errors", - "syntex_pos", - "unicode-xid 0.0.4", -] - -[[package]] -name = "tendril" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9de21546595a0873061940d994bbbc5c35f024ae4fd61ec5c5b159115684f508" -dependencies = [ - "futf", - "mac", - "utf-8", -] - -[[package]] -name = "term" -version = "0.4.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa63644f74ce96fbeb9b794f66aff2a52d601cbd5e80f4b97123e3899f4570f1" -dependencies = [ - "kernel32-sys", - "winapi", -] - -[[package]] -name = "term_size" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2b6b55df3198cc93372e85dd2ed817f0e38ce8cc0f22eb32391bfad9c4bf209" -dependencies = [ - "kernel32-sys", - "libc", - "winapi", -] - -[[package]] -name = "thread-id" -version = "3.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2af4d6289a69a35c4d3aea737add39685f2784122c28119a7713165a63d68c9d" -dependencies = [ - "kernel32-sys", - "libc", - "redox_syscall", -] - -[[package]] -name = "thread_local" -version = "0.3.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1697c4b57aeeb7a536b647165a2825faddffb1d3bad386d507709bd51a90bb14" -dependencies = [ - "lazy_static", - "unreachable 1.0.0", -] - -[[package]] -name = "time" -version = "0.1.38" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d5d788d3aa77bc0ef3e9621256885555368b47bd495c13dd2e7413c89f845520" -dependencies = [ - "kernel32-sys", - "libc", - "redox_syscall", - "winapi", -] - -[[package]] -name = "toml" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "736b60249cb25337bc196faa43ee12c705e426f3d55c214d73a4e7be06f92cb4" - -[[package]] -name = "unicode-bidi" -version = "0.3.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49f2bd0c6468a8230e1db229cff8029217cf623c767ea5d60bfbd42729ea54d5" -dependencies = [ - "matches", -] - -[[package]] -name = "unicode-normalization" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "51ccda9ef9efa3f7ef5d91e8f9b83bbe6955f9bf86aec89d5cce2c874625920f" - -[[package]] -name = "unicode-segmentation" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a8083c594e02b8ae1654ae26f0ade5158b119bd88ad0e8227a5d8fcd72407946" - -[[package]] -name = "unicode-width" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bf3a113775714a22dcb774d8ea3655c53a32debae63a063acc00a91cc586245f" - -[[package]] -name = "unicode-xid" -version = "0.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8c1f860d7d29cf02cb2f3f359fd35991af3d30bac52c57d265a3c461074cb4dc" - -[[package]] -name = "unicode-xid" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ccb82d61f80a663efe1f787a51b16b5a51e3314d6ac365b08639f52387b33f3" - -[[package]] -name = "unreachable" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f2ae5ddb18e1c92664717616dd9549dde73f539f01bd7b77c2edb2446bdff91" -dependencies = [ - "void", -] - -[[package]] -name = "unreachable" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "382810877fe448991dfc7f0dd6e3ae5d58088fd0ea5e35189655f84e6814fa56" -dependencies = [ - "void", -] - -[[package]] -name = "url" -version = "1.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eeb819346883532a271eb626deb43c4a1bb4c4dd47c519bd78137c3e72a4fe27" -dependencies = [ - "heapsize", - "idna", - "matches", - "percent-encoding", -] - -[[package]] -name = "url_serde" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "74e7d099f1ee52f823d4bdd60c93c3602043c728f5db3b97bdb548467f7bddea" -dependencies = [ - "serde", - "url", -] - -[[package]] -name = "utf-8" -version = "0.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6f923c601c7ac48ef1d66f7d5b5b2d9a7ba9c51333ab75a3ddf8d0309185a56" -dependencies = [ - "matches", -] - -[[package]] -name = "utf8-ranges" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "662fab6525a98beff2921d7f61a39e7d59e0b425ebc7d0d9e66d316e55124122" - -[[package]] -name = "uuid" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bcc7e3b898aa6f6c08e5295b6c89258d1331e9ac578cc992fb818759951bdc22" -dependencies = [ - "rand", - "serde", -] - -[[package]] -name = "vec_map" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "887b5b631c2ad01628bbbaa7dd4c869f80d3186688f8d0b6f58774fbe324988c" - -[[package]] -name = "void" -version = "1.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a02e4885ed3bc0f2de90ea6dd45ebcbb66dacffe03547fadbb0eeae2770887d" - -[[package]] -name = "walkdir" -version = "1.0.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb08f9e670fab86099470b97cd2b252d6527f0b3cc1401acdb595ffc9dd288ff" -dependencies = [ - "kernel32-sys", - "same-file", - "winapi", -] - -[[package]] -name = "webrender_api" -version = "0.52.1" -source = "git+https://github.com/servo/webrender#c5265b9eed76903da9066ebadbeda5bb908b401d" -dependencies = [ - "app_units", - "bincode", - "bitflags 0.9.1", - "byteorder", - "core-foundation", - "core-graphics", - "dwrote", - "euclid", - "heapsize", - "serde", - "time", -] - -[[package]] -name = "winapi" -version = "0.2.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "167dc9d6949a9b857f3451275e911c3f44255842c1f7a76f33c55103a909087a" - -[[package]] -name = "winapi-build" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2d315eee3b34aca4797b2da6b13ed88266e6d612562a0c46390af8299fc699bc" - -[[package]] -name = "xdg" -version = "2.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a66b7c2281ebde13cf4391d70d4c7e5946c3c25e72a7b859ca8f677dcd0b0c61" diff --git a/collector/compile-benchmarks/style-servo/Cargo.toml b/collector/compile-benchmarks/style-servo/Cargo.toml deleted file mode 100644 index 2cdced1d4..000000000 --- a/collector/compile-benchmarks/style-servo/Cargo.toml +++ /dev/null @@ -1,6 +0,0 @@ -[workspace] -members = ["components/style"] - -[patch.crates-io] -cssparser = { path = "./rust-cssparser" } -cssparser-macros = { path = "./cssparser-macros" } diff --git a/collector/compile-benchmarks/style-servo/components/atoms/Cargo.toml b/collector/compile-benchmarks/style-servo/components/atoms/Cargo.toml deleted file mode 100644 index ff149db72..000000000 --- a/collector/compile-benchmarks/style-servo/components/atoms/Cargo.toml +++ /dev/null @@ -1,16 +0,0 @@ -[package] -name = "servo_atoms" -version = "0.0.1" -authors = ["The Servo Project Developers"] -license = "MPL-2.0" -publish = false -build = "build.rs" - -[lib] -path = "lib.rs" - -[dependencies] -string_cache = {version = "0.6", features = ["heapsize"]} - -[build-dependencies] -string_cache_codegen = "0.4" diff --git a/collector/compile-benchmarks/style-servo/components/atoms/build.rs b/collector/compile-benchmarks/style-servo/components/atoms/build.rs deleted file mode 100644 index 86a41a67e..000000000 --- a/collector/compile-benchmarks/style-servo/components/atoms/build.rs +++ /dev/null @@ -1,33 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ - -extern crate string_cache_codegen; - -use std::env; -use std::fs::File; -use std::io::{BufRead, BufReader}; -use std::path::Path; - -fn main() { - let static_atoms = Path::new(&env::var("CARGO_MANIFEST_DIR").unwrap()).join("static_atoms.txt"); - let static_atoms = BufReader::new(File::open(&static_atoms).unwrap()); - let mut atom_type = string_cache_codegen::AtomType::new("Atom", "atom!"); - - macro_rules! predefined { - ($($name: expr,)+) => { - { - $( - atom_type.atom($name); - )+ - } - } - } - include!("../style/counter_style/predefined.rs"); - - atom_type - .atoms(static_atoms.lines().map(Result::unwrap)) - .write_to_file(&Path::new(&env::var("OUT_DIR").unwrap()).join("atom.rs")) - .unwrap(); - println!("cargo:rerun-if-changed=build.rs"); -} diff --git a/collector/compile-benchmarks/style-servo/components/atoms/lib.rs b/collector/compile-benchmarks/style-servo/components/atoms/lib.rs deleted file mode 100644 index c1da7167b..000000000 --- a/collector/compile-benchmarks/style-servo/components/atoms/lib.rs +++ /dev/null @@ -1,7 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ - -extern crate string_cache; - -include!(concat!(env!("OUT_DIR"), "/atom.rs")); diff --git a/collector/compile-benchmarks/style-servo/components/atoms/static_atoms.txt b/collector/compile-benchmarks/style-servo/components/atoms/static_atoms.txt deleted file mode 100644 index efded7c65..000000000 --- a/collector/compile-benchmarks/style-servo/components/atoms/static_atoms.txt +++ /dev/null @@ -1,79 +0,0 @@ -abort -activate -beforeunload -button -canplay -canplaythrough -center -change -characteristicvaluechanged -checkbox -click -close -controllerchange -cursive -date -datetime -datetime-local -dir -DOMContentLoaded -email -emptied -error -fantasy -fetch -file -fullscreenchange -fullscreenerror -gattserverdisconnected -hidden -image -input -invalid -keydown -keypress -left -load -loadeddata -loadedmetadata -loadend -loadstart -message -message -monospace -month -mouseover -none -number -onchange -open -password -pause -play -playing -print -progress -radio -readystatechange -reftest-wait -reset -right -sans-serif -screen -search -select -serif -statechange -storage -submit -suspend -tel -text -time -timeupdate -toggle -transitionend -url -waiting -webglcontextcreationerror -week diff --git a/collector/compile-benchmarks/style-servo/components/bluetooth/Cargo.toml b/collector/compile-benchmarks/style-servo/components/bluetooth/Cargo.toml deleted file mode 100644 index 45a1d381d..000000000 --- a/collector/compile-benchmarks/style-servo/components/bluetooth/Cargo.toml +++ /dev/null @@ -1,22 +0,0 @@ -[package] -name = "bluetooth" -version = "0.0.1" -authors = ["The Servo Project Developers"] -license = "MPL-2.0" -publish = false - -[lib] -name = "bluetooth" -path = "lib.rs" - -[dependencies] -bitflags = "0.7" -bluetooth_traits = {path = "../bluetooth_traits"} -device = {git = "https://github.com/servo/devices", features = ["bluetooth-test"]} -ipc-channel = "0.8" -servo_config = {path = "../config"} -servo_rand = {path = "../rand"} -uuid = {version = "0.5", features = ["v4"]} - -[target.'cfg(target_os = "linux")'.dependencies] -tinyfiledialogs = "2.5.9" diff --git a/collector/compile-benchmarks/style-servo/components/bluetooth/lib.rs b/collector/compile-benchmarks/style-servo/components/bluetooth/lib.rs deleted file mode 100644 index ac41040fe..000000000 --- a/collector/compile-benchmarks/style-servo/components/bluetooth/lib.rs +++ /dev/null @@ -1,956 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ - -#[macro_use] -extern crate bitflags; -extern crate bluetooth_traits; -extern crate device; -extern crate ipc_channel; -extern crate servo_config; -extern crate servo_rand; -#[cfg(target_os = "linux")] -extern crate tinyfiledialogs; -extern crate uuid; - -pub mod test; - -use bluetooth_traits::{BluetoothCharacteristicMsg, BluetoothDescriptorMsg, BluetoothServiceMsg}; -use bluetooth_traits::{BluetoothDeviceMsg, BluetoothRequest, BluetoothResponse, GATTType}; -use bluetooth_traits::{BluetoothError, BluetoothResponseResult, BluetoothResult}; -use bluetooth_traits::blocklist::{uuid_is_blocklisted, Blocklist}; -use bluetooth_traits::scanfilter::{BluetoothScanfilter, BluetoothScanfilterSequence, RequestDeviceoptions}; -use device::bluetooth::{BluetoothAdapter, BluetoothDevice, BluetoothGATTCharacteristic}; -use device::bluetooth::{BluetoothGATTDescriptor, BluetoothGATTService}; -use ipc_channel::ipc::{self, IpcReceiver, IpcSender}; -#[cfg(target_os = "linux")] -use servo_config::opts; -use servo_config::prefs::PREFS; -use servo_rand::Rng; -use std::borrow::ToOwned; -use std::collections::{HashMap, HashSet}; -use std::string::String; -use std::thread; -use std::time::Duration; - -// A transaction not completed within 30 seconds shall time out. Such a transaction shall be considered to have failed. -// https://www.bluetooth.org/DocMan/handlers/DownloadDoc.ashx?doc_id=286439 (Vol. 3, page 480) -const MAXIMUM_TRANSACTION_TIME: u8 = 30; -const CONNECTION_TIMEOUT_MS: u64 = 1000; -// The discovery session needs some time to find any nearby devices -const DISCOVERY_TIMEOUT_MS: u64 = 1500; -#[cfg(target_os = "linux")] -const DIALOG_TITLE: &'static str = "Choose a device"; -#[cfg(target_os = "linux")] -const DIALOG_COLUMN_ID: &'static str = "Id"; -#[cfg(target_os = "linux")] -const DIALOG_COLUMN_NAME: &'static str = "Name"; - -bitflags! { - flags Flags: u32 { - const BROADCAST = 0b000000001, - const READ = 0b000000010, - const WRITE_WITHOUT_RESPONSE = 0b000000100, - const WRITE = 0b000001000, - const NOTIFY = 0b000010000, - const INDICATE = 0b000100000, - const AUTHENTICATED_SIGNED_WRITES = 0b001000000, - const RELIABLE_WRITE = 0b010000000, - const WRITABLE_AUXILIARIES = 0b100000000, - } -} - -macro_rules! return_if_cached( - ($cache:expr, $key:expr) => ( - if $cache.contains_key($key) { - return $cache.get($key); - } - ); -); - -pub trait BluetoothThreadFactory { - fn new() -> Self; -} - -impl BluetoothThreadFactory for IpcSender { - fn new() -> IpcSender { - let (sender, receiver) = ipc::channel().unwrap(); - let adapter = if Some(true) == PREFS.get("dom.bluetooth.enabled").as_boolean() { - BluetoothAdapter::init() - } else { - BluetoothAdapter::init_mock() - }.ok(); - thread::Builder::new().name("BluetoothThread".to_owned()).spawn(move || { - BluetoothManager::new(receiver, adapter).start(); - }).expect("Thread spawning failed"); - sender - } -} - -// https://webbluetoothcg.github.io/web-bluetooth/#matches-a-filter -fn matches_filter(device: &BluetoothDevice, filter: &BluetoothScanfilter) -> bool { - if filter.is_empty_or_invalid() { - return false; - } - - // Step 1. - if let Some(name) = filter.get_name() { - if device.get_name().ok() != Some(name.to_string()) { - return false; - } - } - - // Step 2. - if !filter.get_name_prefix().is_empty() { - if let Ok(device_name) = device.get_name() { - if !device_name.starts_with(filter.get_name_prefix()) { - return false; - } - } else { - return false; - } - } - - // Step 3. - if !filter.get_services().is_empty() { - if let Ok(device_uuids) = device.get_uuids() { - for service in filter.get_services() { - if device_uuids.iter().find(|x| x == &service).is_none() { - return false; - } - } - } - } - - // Step 4. - if let Some(ref manufacturer_data) = filter.get_manufacturer_data() { - let advertised_manufacturer_data = match device.get_manufacturer_data() { - Ok(data) => data, - Err(_) => return false, - }; - for (ref id, &(ref prefix, ref mask)) in manufacturer_data.iter() { - if let Some(advertised_data) = advertised_manufacturer_data.get(id) { - if !data_filter_matches(advertised_data, prefix, mask) { - return false; - } - } else { - return false; - } - } - } - - // Step 5. - if let Some(ref service_data) = filter.get_service_data() { - let advertised_service_data = match device.get_service_data() { - Ok(data) => data, - Err(_) => return false, - }; - for (uuid, &(ref prefix, ref mask)) in service_data.iter() { - if let Some(advertised_data) = advertised_service_data.get(uuid.as_str()) { - if !data_filter_matches(advertised_data, prefix, mask) { - return false; - } - } else { - return false; - } - } - } - - // Step 6. - true -} - -// https://webbluetoothcg.github.io/web-bluetooth/#bluetoothdatafilterinit-matches -fn data_filter_matches(data: &[u8], prefix: &[u8], mask: &[u8]) -> bool { - // Step 1-2: No need to copy the bytes here. - // Step 3. - if data.len() < prefix.len() { - return false; - } - - // Step 4. - for ((data, mask), prefix) in data.iter().zip(mask.iter()).zip(prefix.iter()) { - if data & mask != prefix & mask { - return false; - } - } - - // Step 5. - true -} - -fn matches_filters(device: &BluetoothDevice, filters: &BluetoothScanfilterSequence) -> bool { - if filters.has_empty_or_invalid_filter() { - return false; - } - - return filters.iter().any(|f| matches_filter(device, f)) -} - -fn is_mock_adapter(adapter: &BluetoothAdapter) -> bool { - match adapter { - &BluetoothAdapter::Mock(_) => true, - _ => false, - } -} - -pub struct BluetoothManager { - receiver: IpcReceiver, - adapter: Option, - address_to_id: HashMap, - service_to_device: HashMap, - characteristic_to_service: HashMap, - descriptor_to_characteristic: HashMap, - cached_devices: HashMap, - cached_services: HashMap, - cached_characteristics: HashMap, - cached_descriptors: HashMap, - allowed_services: HashMap>, -} - -impl BluetoothManager { - pub fn new(receiver: IpcReceiver, adapter: Option) -> BluetoothManager { - BluetoothManager { - receiver: receiver, - adapter: adapter, - address_to_id: HashMap::new(), - service_to_device: HashMap::new(), - characteristic_to_service: HashMap::new(), - descriptor_to_characteristic: HashMap::new(), - cached_devices: HashMap::new(), - cached_services: HashMap::new(), - cached_characteristics: HashMap::new(), - cached_descriptors: HashMap::new(), - allowed_services: HashMap::new(), - } - } - - fn start(&mut self) { - while let Ok(msg) = self.receiver.recv() { - match msg { - BluetoothRequest::RequestDevice(options, sender) => { - let _ = sender.send(self.request_device(options)); - }, - BluetoothRequest::GATTServerConnect(device_id, sender) => { - let _ = sender.send(self.gatt_server_connect(device_id)); - }, - BluetoothRequest::GATTServerDisconnect(device_id, sender) => { - let _ = sender.send(self.gatt_server_disconnect(device_id)); - }, - BluetoothRequest::GetGATTChildren(id, uuid, single, child_type, sender) => { - let _ = sender.send(self.get_gatt_children(id, uuid, single, child_type)); - }, - BluetoothRequest::ReadValue(id, sender) => { - let _ = sender.send(self.read_value(id)); - }, - BluetoothRequest::WriteValue(id, value, sender) => { - let _ = sender.send(self.write_value(id, value)); - }, - BluetoothRequest::EnableNotification(id, enable, sender) => { - let _ = sender.send(self.enable_notification(id, enable)); - }, - BluetoothRequest::WatchAdvertisements(id, sender) => { - let _ = sender.send(self.watch_advertisements(id)); - }, - BluetoothRequest::Test(data_set_name, sender) => { - let _ = sender.send(self.test(data_set_name)); - }, - BluetoothRequest::SetRepresentedToNull(service_ids, characteristic_ids, descriptor_ids) => { - self.remove_ids_from_caches(service_ids, characteristic_ids, descriptor_ids) - }, - BluetoothRequest::IsRepresentedDeviceNull(id, sender) => { - let _ = sender.send(!self.device_is_cached(&id)); - }, - BluetoothRequest::GetAvailability(sender) => { - let _ = sender.send(self.get_availability()); - }, - BluetoothRequest::MatchesFilter(id, filters, sender) => { - let _ = sender.send(self.device_matches_filter(&id, &filters)); - }, - BluetoothRequest::Exit => { - break - }, - } - } - } - - // Test - - fn test(&mut self, data_set_name: String) -> BluetoothResult<()> { - self.address_to_id.clear(); - self.service_to_device.clear(); - self.characteristic_to_service.clear(); - self.descriptor_to_characteristic.clear(); - self.cached_devices.clear(); - self.cached_services.clear(); - self.cached_characteristics.clear(); - self.cached_descriptors.clear(); - self.allowed_services.clear(); - self.adapter = BluetoothAdapter::init_mock().ok(); - match test::test(self, data_set_name) { - Ok(_) => return Ok(()), - Err(error) => Err(BluetoothError::Type(error.description().to_owned())), - } - } - - fn remove_ids_from_caches(&mut self, - service_ids: Vec, - characteristic_ids: Vec, - descriptor_ids: Vec) { - for id in service_ids { - self.cached_services.remove(&id); - self.service_to_device.remove(&id); - } - - for id in characteristic_ids { - self.cached_characteristics.remove(&id); - self.characteristic_to_service.remove(&id); - } - - for id in descriptor_ids { - self.cached_descriptors.remove(&id); - self.descriptor_to_characteristic.remove(&id); - } - } - - // Adapter - - pub fn get_or_create_adapter(&mut self) -> Option { - let adapter_valid = self.adapter.as_ref().map_or(false, |a| a.get_address().is_ok()); - if !adapter_valid { - self.adapter = BluetoothAdapter::init().ok(); - } - - let adapter = match self.adapter.as_ref() { - Some(adapter) => adapter, - None => return None, - }; - - if is_mock_adapter(adapter) && !adapter.is_present().unwrap_or(false) { - return None; - } - - self.adapter.clone() - } - - fn get_adapter(&mut self) -> BluetoothResult { - match self.get_or_create_adapter() { - Some(adapter) => { - if !adapter.is_powered().unwrap_or(false) { - return Err(BluetoothError::NotFound); - } - return Ok(adapter); - }, - None => return Err(BluetoothError::NotFound), - } - } - - // Device - - fn get_and_cache_devices(&mut self, adapter: &mut BluetoothAdapter) -> Vec { - let devices = adapter.get_devices().unwrap_or(vec!()); - for device in &devices { - if let Ok(address) = device.get_address() { - if !self.address_to_id.contains_key(&address) { - let generated_id = self.generate_device_id(); - self.address_to_id.insert(address, generated_id.clone()); - self.cached_devices.insert(generated_id.clone(), device.clone()); - self.allowed_services.insert(generated_id, HashSet::new()); - } - } - } - self.cached_devices.iter().map(|(_, d)| d.clone()).collect() - } - - fn get_device(&mut self, adapter: &mut BluetoothAdapter, device_id: &str) -> Option<&BluetoothDevice> { - return_if_cached!(self.cached_devices, device_id); - self.get_and_cache_devices(adapter); - return_if_cached!(self.cached_devices, device_id); - None - } - - #[cfg(target_os = "linux")] - fn select_device(&mut self, devices: Vec, adapter: &BluetoothAdapter) -> Option { - if is_mock_adapter(adapter) || opts::get().headless { - for device in devices { - if let Ok(address) = device.get_address() { - return Some(address); - } - } - return None; - } - - let mut dialog_rows: Vec = vec!(); - for device in devices { - dialog_rows.extend_from_slice(&[device.get_address().unwrap_or("".to_string()), - device.get_name().unwrap_or("".to_string())]); - } - let dialog_rows: Vec<&str> = dialog_rows.iter() - .map(|s| s.as_ref()) - .collect(); - let dialog_rows: &[&str] = dialog_rows.as_slice(); - - if let Some(device) = tinyfiledialogs::list_dialog(DIALOG_TITLE, - &[DIALOG_COLUMN_ID, DIALOG_COLUMN_NAME], - Some(dialog_rows)) { - // The device string format will be "Address|Name". We need the first part of it. - return device.split("|").next().map(|s| s.to_string()); - } - None - } - - #[cfg(not(target_os = "linux"))] - fn select_device(&mut self, devices: Vec, _adapter: &BluetoothAdapter) -> Option { - for device in devices { - if let Ok(address) = device.get_address() { - return Some(address); - } - } - None - } - - fn generate_device_id(&mut self) -> String { - let mut device_id; - let mut rng = servo_rand::thread_rng(); - loop { - device_id = rng.gen::().to_string(); - if !self.cached_devices.contains_key(&device_id) { - break; - } - } - device_id - } - - fn device_from_service_id(&self, service_id: &str) -> Option { - let device_id = match self.service_to_device.get(service_id) { - Some(id) => id, - None => return None, - }; - match self.cached_devices.get(device_id) { - Some(d) => Some(d.clone()), - None => None, - } - } - - fn device_is_cached(&self, device_id: &str) -> bool { - self.cached_devices.contains_key(device_id) && self.address_to_id.values().any(|v| v == device_id) - } - - fn device_matches_filter(&mut self, - device_id: &str, - filters: &BluetoothScanfilterSequence) - -> BluetoothResult { - let mut adapter = self.get_adapter()?; - match self.get_device(&mut adapter, device_id) { - Some(ref device) => Ok(matches_filters(device, filters)), - None => Ok(false), - } - } - - // Service - - fn get_and_cache_gatt_services(&mut self, - adapter: &mut BluetoothAdapter, - device_id: &str) - -> Vec { - let mut services = match self.get_device(adapter, device_id) { - Some(d) => d.get_gatt_services().unwrap_or(vec!()), - None => vec!(), - }; - - services.retain(|s| !uuid_is_blocklisted(&s.get_uuid().unwrap_or(String::new()), Blocklist::All) && - self.allowed_services - .get(device_id) - .map_or(false, |uuids| uuids.contains(&s.get_uuid().unwrap_or(String::new())))); - for service in &services { - self.cached_services.insert(service.get_id(), service.clone()); - self.service_to_device.insert(service.get_id(), device_id.to_owned()); - } - services - } - - fn get_gatt_service(&mut self, adapter: &mut BluetoothAdapter, service_id: &str) -> Option<&BluetoothGATTService> { - return_if_cached!(self.cached_services, service_id); - let device_id = match self.service_to_device.get(service_id) { - Some(d) => d.clone(), - None => return None, - }; - self.get_and_cache_gatt_services(adapter, &device_id); - return_if_cached!(self.cached_services, service_id); - None - } - - fn service_is_cached(&self, service_id: &str) -> bool { - self.cached_services.contains_key(service_id) && self.service_to_device.contains_key(service_id) - } - - // Characteristic - - fn get_and_cache_gatt_characteristics(&mut self, - adapter: &mut BluetoothAdapter, - service_id: &str) - -> Vec { - let mut characteristics = match self.get_gatt_service(adapter, service_id) { - Some(s) => s.get_gatt_characteristics().unwrap_or(vec!()), - None => vec!(), - }; - - characteristics.retain(|c| !uuid_is_blocklisted(&c.get_uuid().unwrap_or(String::new()), Blocklist::All)); - for characteristic in &characteristics { - self.cached_characteristics.insert(characteristic.get_id(), characteristic.clone()); - self.characteristic_to_service.insert(characteristic.get_id(), service_id.to_owned()); - } - characteristics - } - - fn get_gatt_characteristic(&mut self, - adapter: &mut BluetoothAdapter, - characteristic_id: &str) - -> Option<&BluetoothGATTCharacteristic> { - return_if_cached!(self.cached_characteristics, characteristic_id); - let service_id = match self.characteristic_to_service.get(characteristic_id) { - Some(s) => s.clone(), - None => return None, - }; - self.get_and_cache_gatt_characteristics(adapter, &service_id); - return_if_cached!(self.cached_characteristics, characteristic_id); - None - } - - fn get_characteristic_properties(&self, characteristic: &BluetoothGATTCharacteristic) -> Flags { - let mut props: Flags = Flags::empty(); - let flags = characteristic.get_flags().unwrap_or(vec!()); - for flag in flags { - match flag.as_ref() { - "broadcast" => props.insert(BROADCAST), - "read" => props.insert(READ), - "write-without-response" => props.insert(WRITE_WITHOUT_RESPONSE), - "write" => props.insert(WRITE), - "notify" => props.insert(NOTIFY), - "indicate" => props.insert(INDICATE), - "authenticated-signed-writes" => props.insert(AUTHENTICATED_SIGNED_WRITES), - "reliable-write" => props.insert(RELIABLE_WRITE), - "writable-auxiliaries" => props.insert(WRITABLE_AUXILIARIES), - _ => (), - } - } - props - } - - fn characteristic_is_cached(&self, characteristic_id: &str) -> bool { - self.cached_characteristics.contains_key(characteristic_id) && - self.characteristic_to_service.contains_key(characteristic_id) - } - - // Descriptor - - fn get_and_cache_gatt_descriptors(&mut self, - adapter: &mut BluetoothAdapter, - characteristic_id: &str) - -> Vec { - let mut descriptors = match self.get_gatt_characteristic(adapter, characteristic_id) { - Some(c) => c.get_gatt_descriptors().unwrap_or(vec!()), - None => vec!(), - }; - - descriptors.retain(|d| !uuid_is_blocklisted(&d.get_uuid().unwrap_or(String::new()), Blocklist::All)); - for descriptor in &descriptors { - self.cached_descriptors.insert(descriptor.get_id(), descriptor.clone()); - self.descriptor_to_characteristic.insert(descriptor.get_id(), characteristic_id.to_owned()); - } - descriptors - } - - fn get_gatt_descriptor(&mut self, - adapter: &mut BluetoothAdapter, - descriptor_id: &str) - -> Option<&BluetoothGATTDescriptor> { - return_if_cached!(self.cached_descriptors, descriptor_id); - let characteristic_id = match self.descriptor_to_characteristic.get(descriptor_id) { - Some(c) => c.clone(), - None => return None, - }; - self.get_and_cache_gatt_descriptors(adapter, &characteristic_id); - return_if_cached!(self.cached_descriptors, descriptor_id); - None - } - - // Methods - - // https://webbluetoothcg.github.io/web-bluetooth/#request-bluetooth-devices - fn request_device(&mut self, - options: RequestDeviceoptions) - -> BluetoothResponseResult { - // Step 6. - let mut adapter = self.get_adapter()?; - - // Step 7. - // Note: There are no requiredServiceUUIDS, we scan for all devices. - if let Ok(ref session) = adapter.create_discovery_session() { - if session.start_discovery().is_ok() { - if !is_mock_adapter(&adapter) { - thread::sleep(Duration::from_millis(DISCOVERY_TIMEOUT_MS)); - } - } - let _ = session.stop_discovery(); - } - - let mut matched_devices = self.get_and_cache_devices(&mut adapter); - - // Step 8. - if !options.is_accepting_all_devices() { - matched_devices = matched_devices.into_iter() - .filter(|d| matches_filters(d, options.get_filters())) - .collect(); - } - - // Step 9. - if let Some(address) = self.select_device(matched_devices, &adapter) { - let device_id = match self.address_to_id.get(&address) { - Some(id) => id.clone(), - None => return Err(BluetoothError::NotFound), - }; - let mut services = options.get_services_set(); - if let Some(services_set) = self.allowed_services.get(&device_id) { - services = services_set | &services; - } - self.allowed_services.insert(device_id.clone(), services); - if let Some(device) = self.get_device(&mut adapter, &device_id) { - let message = BluetoothDeviceMsg { - id: device_id, - name: device.get_name().ok(), - }; - return Ok(BluetoothResponse::RequestDevice(message)); - } - } - // Step 10. - return Err(BluetoothError::NotFound); - // Step 12: Missing, because it is optional. - } - - // https://webbluetoothcg.github.io/web-bluetooth/#dom-bluetoothremotegattserver-connect - fn gatt_server_connect(&mut self, device_id: String) -> BluetoothResponseResult { - // Step 2. - if !self.device_is_cached(&device_id) { - return Err(BluetoothError::Network); - } - let mut adapter = self.get_adapter()?; - - // Step 5.1.1. - match self.get_device(&mut adapter, &device_id) { - Some(d) => { - if d.is_connected().unwrap_or(false) { - return Ok(BluetoothResponse::GATTServerConnect(true)); - } - let _ = d.connect(); - for _ in 0..MAXIMUM_TRANSACTION_TIME { - match d.is_connected().unwrap_or(false) { - true => return Ok(BluetoothResponse::GATTServerConnect(true)), - false => { - if is_mock_adapter(&adapter) { - break; - } - thread::sleep(Duration::from_millis(CONNECTION_TIMEOUT_MS)); - }, - } - // TODO: Step 5.1.4: Use the exchange MTU procedure. - } - // Step 5.1.3. - return Err(BluetoothError::Network); - }, - None => return Err(BluetoothError::NotFound), - } - } - - // https://webbluetoothcg.github.io/web-bluetooth/#dom-bluetoothremotegattserver-disconnect - fn gatt_server_disconnect(&mut self, device_id: String) -> BluetoothResult<()> { - let mut adapter = self.get_adapter()?; - match self.get_device(&mut adapter, &device_id) { - Some(d) => { - // Step 2. - if !d.is_connected().unwrap_or(true) { - return Ok(()); - } - let _ = d.disconnect(); - for _ in 0..MAXIMUM_TRANSACTION_TIME { - match d.is_connected().unwrap_or(true) { - true => thread::sleep(Duration::from_millis(CONNECTION_TIMEOUT_MS)), - false => return Ok(()), - } - } - return Err(BluetoothError::Network); - }, - None => return Err(BluetoothError::NotFound), - } - } - - // https://webbluetoothcg.github.io/web-bluetooth/#getgattchildren - fn get_gatt_children(&mut self, - id: String, - uuid: Option, - single: bool, - child_type: GATTType) - -> BluetoothResponseResult { - let mut adapter = self.get_adapter()?; - match child_type { - GATTType::PrimaryService => { - // Step 5. - if !self.device_is_cached(&id) { - return Err(BluetoothError::InvalidState); - } - // Step 6. - if let Some(ref uuid) = uuid { - if !self.allowed_services.get(&id).map_or(false, |s| s.contains(uuid)) { - return Err(BluetoothError::Security); - } - } - let mut services = self.get_and_cache_gatt_services(&mut adapter, &id); - if let Some(uuid) = uuid { - services.retain(|ref e| e.get_uuid().unwrap_or(String::new()) == uuid); - } - let mut services_vec = vec!(); - for service in services { - if service.is_primary().unwrap_or(false) { - if let Ok(uuid) = service.get_uuid() { - services_vec.push( - BluetoothServiceMsg { - uuid: uuid, - is_primary: true, - instance_id: service.get_id(), - } - ); - } - } - } - // Step 7. - if services_vec.is_empty() { - return Err(BluetoothError::NotFound); - } - - return Ok(BluetoothResponse::GetPrimaryServices(services_vec, single)); - }, - GATTType::Characteristic => { - // Step 5. - if !self.service_is_cached(&id) { - return Err(BluetoothError::InvalidState); - } - // Step 6. - let mut characteristics = self.get_and_cache_gatt_characteristics(&mut adapter, &id); - if let Some(uuid) = uuid { - characteristics.retain(|ref e| e.get_uuid().unwrap_or(String::new()) == uuid); - } - let mut characteristics_vec = vec!(); - for characteristic in characteristics { - if let Ok(uuid) = characteristic.get_uuid() { - let properties = self.get_characteristic_properties(&characteristic); - characteristics_vec.push( - BluetoothCharacteristicMsg { - uuid: uuid, - instance_id: characteristic.get_id(), - broadcast: properties.contains(BROADCAST), - read: properties.contains(READ), - write_without_response: properties.contains(WRITE_WITHOUT_RESPONSE), - write: properties.contains(WRITE), - notify: properties.contains(NOTIFY), - indicate: properties.contains(INDICATE), - authenticated_signed_writes: properties.contains(AUTHENTICATED_SIGNED_WRITES), - reliable_write: properties.contains(RELIABLE_WRITE), - writable_auxiliaries: properties.contains(WRITABLE_AUXILIARIES), - } - ); - } - } - - // Step 7. - if characteristics_vec.is_empty() { - return Err(BluetoothError::NotFound); - } - - return Ok(BluetoothResponse::GetCharacteristics(characteristics_vec, single)); - }, - GATTType::IncludedService => { - // Step 5. - if !self.service_is_cached(&id) { - return Err(BluetoothError::InvalidState); - } - // Step 6. - let device = match self.device_from_service_id(&id) { - Some(device) => device, - None => return Err(BluetoothError::NotFound), - }; - let primary_service = match self.get_gatt_service(&mut adapter, &id) { - Some(s) => s, - None => return Err(BluetoothError::NotFound), - }; - let services = primary_service.get_includes(device).unwrap_or(vec!()); - let mut services_vec = vec!(); - for service in services { - if let Ok(service_uuid) = service.get_uuid() { - services_vec.push( - BluetoothServiceMsg { - uuid: service_uuid, - is_primary: service.is_primary().unwrap_or(false), - instance_id: service.get_id(), - } - ); - } - } - if let Some(uuid) = uuid { - services_vec.retain(|ref s| s.uuid == uuid); - } - services_vec.retain(|s| !uuid_is_blocklisted(&s.uuid, Blocklist::All)); - - // Step 7. - if services_vec.is_empty() { - return Err(BluetoothError::NotFound); - } - - return Ok(BluetoothResponse::GetIncludedServices(services_vec, single)); - }, - GATTType::Descriptor => { - // Step 5. - if !self.characteristic_is_cached(&id) { - return Err(BluetoothError::InvalidState); - } - // Step 6. - let mut descriptors = self.get_and_cache_gatt_descriptors(&mut adapter, &id); - if let Some(uuid) = uuid { - descriptors.retain(|ref e| e.get_uuid().unwrap_or(String::new()) == uuid); - } - let mut descriptors_vec = vec!(); - for descriptor in descriptors { - if let Ok(uuid) = descriptor.get_uuid() { - descriptors_vec.push( - BluetoothDescriptorMsg { - uuid: uuid, - instance_id: descriptor.get_id(), - } - ); - } - } - - // Step 7. - if descriptors_vec.is_empty() { - return Err(BluetoothError::NotFound); - } - return Ok(BluetoothResponse::GetDescriptors(descriptors_vec, single)); - }, - } - } - - // https://webbluetoothcg.github.io/web-bluetooth/#dom-bluetoothremotegattcharacteristic-readvalue - // https://webbluetoothcg.github.io/web-bluetooth/#dom-bluetoothremotegattdescriptor-readvalue - fn read_value(&mut self, id: String) -> BluetoothResponseResult { - // (Characteristic) Step 5.2: Missing because it is optional. - // (Descriptor) Step 5.1: Missing because it is optional. - let mut adapter = self.get_adapter()?; - - // (Characteristic) Step 5.3. - let mut value = self.get_gatt_characteristic(&mut adapter, &id) - .map(|c| c.read_value().unwrap_or(vec![])); - - // (Characteristic) TODO: Step 5.4: Handle all the errors returned from the read_value call. - - // (Descriptor) Step 5.2. - if value.is_none() { - value = self.get_gatt_descriptor(&mut adapter, &id) - .map(|d| d.read_value().unwrap_or(vec![])); - } - - // (Descriptor) TODO: Step 5.3: Handle all the errors returned from the read_value call. - - match value { - // (Characteristic) Step 5.5.4. - // (Descriptor) Step 5.4.3. - Some(v) => return Ok(BluetoothResponse::ReadValue(v)), - - // (Characteristic) Step 4. - // (Descriptor) Step 4. - None => return Err(BluetoothError::InvalidState), - } - } - - // https://webbluetoothcg.github.io/web-bluetooth/#dom-bluetoothremotegattcharacteristic-writevalue - // https://webbluetoothcg.github.io/web-bluetooth/#dom-bluetoothremotegattdescriptor-writevalue - fn write_value(&mut self, id: String, value: Vec) -> BluetoothResponseResult { - // (Characteristic) Step 7.2: Missing because it is optional. - // (Descriptor) Step 7.1: Missing because it is optional. - let mut adapter = self.get_adapter()?; - - // (Characteristic) Step 7.3. - let mut result = self.get_gatt_characteristic(&mut adapter, &id) - .map(|c| c.write_value(value.clone())); - - // (Characteristic) TODO: Step 7.4: Handle all the errors returned from the write_value call. - - // (Descriptor) Step 7.2. - if result.is_none() { - result = self.get_gatt_descriptor(&mut adapter, &id) - .map(|d| d.write_value(value.clone())); - } - - // (Descriptor) TODO: Step 7.3: Handle all the errors returned from the write_value call. - - match result { - Some(v) => match v { - // (Characteristic) Step 7.5.3. - // (Descriptor) Step 7.4.3. - Ok(_) => return Ok(BluetoothResponse::WriteValue(value)), - - // (Characteristic) Step 7.1. - Err(_) => return Err(BluetoothError::NotSupported), - }, - - // (Characteristic) Step 6. - // (Descriptor) Step 6. - None => return Err(BluetoothError::InvalidState), - } - } - - // https://webbluetoothcg.github.io/web-bluetooth/#dom-bluetoothremotegattcharacteristic-startnotifications - // https://webbluetoothcg.github.io/web-bluetooth/#dom-bluetoothremotegattcharacteristic-stopnotifications - fn enable_notification(&mut self, id: String, enable: bool) -> BluetoothResponseResult { - // (StartNotifications) Step 3 - 4. - // (StopNotifications) Step 1 - 2. - if !self.characteristic_is_cached(&id) { - return Err(BluetoothError::InvalidState); - } - - // (StartNotification) TODO: Step 7: Missing because it is optional. - let mut adapter = self.get_adapter()?; - match self.get_gatt_characteristic(&mut adapter, &id) { - Some(c) => { - let result = match enable { - // (StartNotification) Step 8. - // TODO: Handle all the errors returned from the start_notify call. - true => c.start_notify(), - - // (StopNotification) Step 4. - false => c.stop_notify(), - }; - match result { - // (StartNotification) Step 11. - // (StopNotification) Step 5. - Ok(_) => return Ok(BluetoothResponse::EnableNotification(())), - - // (StartNotification) Step 5. - Err(_) => return Err(BluetoothError::NotSupported), - } - }, - // (StartNotification) Step 4. - None => return Err(BluetoothError::InvalidState), - } - } - - // https://webbluetoothcg.github.io/web-bluetooth/#dom-bluetoothdevice-watchadvertisements - fn watch_advertisements(&mut self, _device_id: String) -> BluetoothResponseResult { - // Step 2. - // TODO: Implement this when supported in lower level - return Err(BluetoothError::NotSupported); - } - - // https://webbluetoothcg.github.io/web-bluetooth/#dom-bluetooth-getavailability - fn get_availability(&mut self) -> BluetoothResponseResult { - Ok(BluetoothResponse::GetAvailability(self.get_adapter().is_ok())) - } -} diff --git a/collector/compile-benchmarks/style-servo/components/bluetooth/test.rs b/collector/compile-benchmarks/style-servo/components/bluetooth/test.rs deleted file mode 100644 index 29541123d..000000000 --- a/collector/compile-benchmarks/style-servo/components/bluetooth/test.rs +++ /dev/null @@ -1,522 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ - -use BluetoothManager; -use device::bluetooth::{BluetoothAdapter, BluetoothDevice}; -use device::bluetooth::{BluetoothGATTCharacteristic, BluetoothGATTDescriptor, BluetoothGATTService}; -use std::borrow::ToOwned; -use std::cell::RefCell; -use std::collections::{HashSet, HashMap}; -use std::error::Error; -use std::string::String; -use uuid::Uuid; - -thread_local!(pub static CACHED_IDS: RefCell> = RefCell::new(HashSet::new())); - -const ADAPTER_ERROR: &'static str = "No adapter found"; -const WRONG_DATA_SET_ERROR: &'static str = "Wrong data set name was provided"; -const READ_FLAG: &'static str = "read"; -const WRITE_FLAG: &'static str = "write"; -const NOTIFY_FLAG: &'static str = "notify"; - -// Adapter names -// https://cs.chromium.org/chromium/src/content/shell/browser/layout_test/layout_test_bluetooth_adapter_provider.h?l=65 -const NOT_PRESENT_ADAPTER: &'static str = "NotPresentAdapter"; -// https://cs.chromium.org/chromium/src/content/shell/browser/layout_test/layout_test_bluetooth_adapter_provider.h?l=83 -const NOT_POWERED_ADAPTER: &'static str = "NotPoweredAdapter"; -// https://cs.chromium.org/chromium/src/content/shell/browser/layout_test/layout_test_bluetooth_adapter_provider.h?l=118 -const EMPTY_ADAPTER: &'static str = "EmptyAdapter"; -// https://cs.chromium.org/chromium/src/content/shell/browser/layout_test/layout_test_bluetooth_adapter_provider.h?l=126 -const GLUCOSE_HEART_RATE_ADAPTER: &'static str = "GlucoseHeartRateAdapter"; -// https://cs.chromium.org/chromium/src/content/shell/browser/layout_test/layout_test_bluetooth_adapter_provider.h?l=135 -const UNICODE_DEVICE_ADAPTER: &'static str = "UnicodeDeviceAdapter"; -// https://cs.chromium.org/chromium/src/content/shell/browser/layout_test/layout_test_bluetooth_adapter_provider.h?l=205 -const MISSING_SERVICE_HEART_RATE_ADAPTER: &'static str = "MissingServiceHeartRateAdapter"; -// https://cs.chromium.org/chromium/src/content/shell/browser/layout_test/layout_test_bluetooth_adapter_provider.h?l=219 -const MISSING_CHARACTERISTIC_HEART_RATE_ADAPTER: &'static str = "MissingCharacteristicHeartRateAdapter"; -const MISSING_DESCRIPTOR_HEART_RATE_ADAPTER: &'static str = "MissingDescriptorHeartRateAdapter"; -// https://cs.chromium.org/chromium/src/content/shell/browser/layout_test/layout_test_bluetooth_adapter_provider.h?l=234 -const HEART_RATE_ADAPTER: &'static str = "HeartRateAdapter"; -// https://cs.chromium.org/chromium/src/content/shell/browser/layout_test/layout_test_bluetooth_adapter_provider.h?l=250 -const EMPTY_NAME_HEART_RATE_ADAPTER: &'static str = "EmptyNameHeartRateAdapter"; -// https://cs.chromium.org/chromium/src/content/shell/browser/layout_test/layout_test_bluetooth_adapter_provider.h?l=267 -const NO_NAME_HEART_RATE_ADAPTER: &'static str = "NoNameHeartRateAdapter"; -// https://cs.chromium.org/chromium/src/content/shell/browser/layout_test/layout_test_bluetooth_adapter_provider.h?l=284 -const TWO_HEART_RATE_SERVICES_ADAPTER: &'static str = "TwoHeartRateServicesAdapter"; -const BLOCKLIST_TEST_ADAPTER: &'static str = "BlocklistTestAdapter"; - -// Device names -const CONNECTABLE_DEVICE_NAME: &'static str = "Connectable Device"; -const EMPTY_DEVICE_NAME: &'static str = ""; -// https://webbluetoothcg.github.io/web-bluetooth/tests.html#glucosedevice -const GLUCOSE_DEVICE_NAME: &'static str = "Glucose Device"; -// https://webbluetoothcg.github.io/web-bluetooth/tests.html#heartratedevice -const HEART_RATE_DEVICE_NAME: &'static str = "Heart Rate Device"; -const UNICODE_DEVICE_NAME: &'static str = "❤❤❤❤❤❤❤❤❤"; - -// Device addresses -const CONNECTABLE_DEVICE_ADDRESS: &'static str = "00:00:00:00:00:04"; -// https://webbluetoothcg.github.io/web-bluetooth/tests.html#glucosedevice -const GLUCOSE_DEVICE_ADDRESS: &'static str = "00:00:00:00:00:02"; -// https://webbluetoothcg.github.io/web-bluetooth/tests.html#heartratedevice -const HEART_RATE_DEVICE_ADDRESS: &'static str = "00:00:00:00:00:03"; -const UNICODE_DEVICE_ADDRESS: &'static str = "00:00:00:00:00:01"; - -// Service UUIDs -const BLOCKLIST_TEST_SERVICE_UUID: &'static str = "611c954a-263b-4f4a-aab6-01ddb953f985"; -// https://www.bluetooth.com/specifications/gatt/viewer?attributeXmlFile=org.bluetooth.service.device_information.xml -const DEVICE_INFORMATION_UUID: &'static str = "0000180a-0000-1000-8000-00805f9b34fb"; -// https://www.bluetooth.com/specifications/gatt/viewer?attributeXmlFile=org.bluetooth.service.generic_access.xml -const GENERIC_ACCESS_SERVICE_UUID: &'static str = "00001800-0000-1000-8000-00805f9b34fb"; -// https://www.bluetooth.com/specifications/gatt/viewer?attributeXmlFile=org.bluetooth.service.glucose.xml -const GLUCOSE_SERVICE_UUID: &'static str = "00001808-0000-1000-8000-00805f9b34fb"; -// https://www.bluetooth.com/specifications/gatt/viewer?attributeXmlFile=org.bluetooth.service.heart_rate.xml -const HEART_RATE_SERVICE_UUID: &'static str = "0000180d-0000-1000-8000-00805f9b34fb"; -// https://www.bluetooth.com/specifications/gatt/ -// viewer?attributeXmlFile=org.bluetooth.service.human_interface_device.xml -const HUMAN_INTERFACE_DEVICE_SERVICE_UUID: &'static str = "00001812-0000-1000-8000-00805f9b34fb"; -// https://www.bluetooth.com/specifications/gatt/viewer?attributeXmlFile=org.bluetooth.service.tx_power.xml -const TX_POWER_SERVICE_UUID: &'static str = "00001804-0000-1000-8000-00805f9b34fb"; - -// Characteristic UUIDs -const BLOCKLIST_EXCLUDE_READS_CHARACTERISTIC_UUID: &'static str = "bad1c9a2-9a5b-4015-8b60-1579bbbf2135"; -// https://www.bluetooth.com/specifications/gatt/ -// viewer?attributeXmlFile=org.bluetooth.characteristic.body_sensor_location.xml -const BODY_SENSOR_LOCATION_CHARACTERISTIC_UUID: &'static str = "00002a38-0000-1000-8000-00805f9b34fb"; -// https://www.bluetooth.com/specifications/gatt/ -// viewer?attributeXmlFile=org.bluetooth.characteristic.gap.device_name.xml -const DEVICE_NAME_CHARACTERISTIC_UUID: &'static str = "00002a00-0000-1000-8000-00805f9b34fb"; -// https://www.bluetooth.com/specifications/gatt/ -// viewer?attributeXmlFile=org.bluetooth.characteristic.heart_rate_measurement.xml -const HEART_RATE_MEASUREMENT_CHARACTERISTIC_UUID: &'static str = "00002a37-0000-1000-8000-00805f9b34fb"; -// https://www.bluetooth.com/specifications/gatt/ -// viewer?attributeXmlFile=org.bluetooth.characteristic.gap.peripheral_privacy_flag.xml -const PERIPHERAL_PRIVACY_FLAG_CHARACTERISTIC_UUID: &'static str = "00002a02-0000-1000-8000-00805f9b34fb"; -// https://www.bluetooth.com/specifications/gatt/ -// viewer?attributeXmlFile=org.bluetooth.characteristic.serial_number_string.xml -const SERIAL_NUMBER_STRING_UUID: &'static str = "00002a25-0000-1000-8000-00805f9b34fb"; - -// Descriptor UUIDs -const BLOCKLIST_EXCLUDE_READS_DESCRIPTOR_UUID: &'static str = "aaaaaaaa-aaaa-1181-0510-810819516110"; -const BLOCKLIST_DESCRIPTOR_UUID: &'static str = "07711111-6104-0970-7011-1107105110aa"; -// https://www.bluetooth.com/specifications/gatt/ -// viewer?attributeXmlFile=org.bluetooth.descriptor.gatt.characteristic_user_description.xml -const CHARACTERISTIC_USER_DESCRIPTION_UUID: &'static str = "00002901-0000-1000-8000-00805f9b34fb"; -// https://www.bluetooth.com/specifications/gatt/ -// viewer?attributeXmlFile=org.bluetooth.descriptor.gatt.client_characteristic_configuration.xml -const CLIENT_CHARACTERISTIC_CONFIGURATION_UUID: &'static str = "00002902-0000-1000-8000-00805f9b34fb"; -// https://www.bluetooth.com/specifications/gatt/ -// viewer?attributeXmlFile=org.bluetooth.descriptor.number_of_digitals.xml -const NUMBER_OF_DIGITALS_UUID: &'static str = "00002909-0000-1000-8000-00805f9b34fb"; - -const HEART_RATE_DEVICE_NAME_DESCRIPTION: &'static str = "The name of this device."; - -fn generate_id() -> Uuid { - let mut id = Uuid::nil(); - let mut generated = false; - while !generated { - id = Uuid::new_v4(); - CACHED_IDS.with(|cache| - if !cache.borrow().contains(&id) { - cache.borrow_mut().insert(id.clone()); - generated = true; - } - ); - } - id -} - -// Set the adapter's name, is_powered and is_discoverable attributes -fn set_adapter(adapter: &BluetoothAdapter, adapter_name: String) -> Result<(), Box> { - adapter.set_name(adapter_name)?; - adapter.set_powered(true)?; - adapter.set_discoverable(true)?; - Ok(()) -} - -// Create Device -fn create_device(adapter: &BluetoothAdapter, - name: String, - address: String) - -> Result> { - let device = BluetoothDevice::create_mock_device(adapter.clone(), generate_id().to_string())?; - device.set_name(Some(name))?; - device.set_address(address)?; - device.set_connectable(true)?; - Ok(device) -} - -// Create Device with UUIDs -fn create_device_with_uuids(adapter: &BluetoothAdapter, - name: String, - address: String, - uuids: Vec) - -> Result> { - let device = create_device(adapter, name, address)?; - device.set_uuids(uuids)?; - Ok(device) -} - -// Create Service -fn create_service(device: &BluetoothDevice, - uuid: String) - -> Result> { - let service = BluetoothGATTService::create_mock_service(device.clone(), generate_id().to_string())?; - service.set_uuid(uuid)?; - Ok(service) -} - -// Create Characteristic -fn create_characteristic(service: &BluetoothGATTService, - uuid: String) - -> Result> { - let characteristic = - BluetoothGATTCharacteristic::create_mock_characteristic(service.clone(), generate_id().to_string())?; - characteristic.set_uuid(uuid)?; - Ok(characteristic) -} - -// Create Characteristic with value -fn create_characteristic_with_value(service: &BluetoothGATTService, - uuid: String, - value: Vec) - -> Result> { - let characteristic = create_characteristic(service, uuid)?; - characteristic.set_value(value)?; - Ok(characteristic) -} - -// Create Descriptor -fn create_descriptor(characteristic: &BluetoothGATTCharacteristic, - uuid: String) - -> Result> { - let descriptor = - BluetoothGATTDescriptor::create_mock_descriptor(characteristic.clone(), generate_id().to_string())?; - descriptor.set_uuid(uuid)?; - Ok(descriptor) -} - -// Create Descriptor with value -fn create_descriptor_with_value(characteristic: &BluetoothGATTCharacteristic, - uuid: String, - value: Vec) - -> Result> { - let descriptor = create_descriptor(characteristic, uuid)?; - descriptor.set_value(value)?; - Ok(descriptor) -} - -fn create_heart_rate_service(device: &BluetoothDevice, - empty: bool) - -> Result> { - // Heart Rate Service - let heart_rate_service = create_service(device, HEART_RATE_SERVICE_UUID.to_owned())?; - - if empty { - return Ok(heart_rate_service) - } - - // Heart Rate Measurement Characteristic - let heart_rate_measurement_characteristic = - create_characteristic_with_value(&heart_rate_service, - HEART_RATE_MEASUREMENT_CHARACTERISTIC_UUID.to_owned(), - vec![0])?; - heart_rate_measurement_characteristic.set_flags(vec![NOTIFY_FLAG.to_string(), - READ_FLAG.to_string(), - WRITE_FLAG.to_string()])?; - - // Body Sensor Location Characteristic 1 - let body_sensor_location_characteristic_1 = - create_characteristic_with_value(&heart_rate_service, - BODY_SENSOR_LOCATION_CHARACTERISTIC_UUID.to_owned(), - vec![49])?; - body_sensor_location_characteristic_1.set_flags(vec![READ_FLAG.to_string(), WRITE_FLAG.to_string()])?; - - // Body Sensor Location Characteristic 2 - let body_sensor_location_characteristic_2 = - create_characteristic_with_value(&heart_rate_service, - BODY_SENSOR_LOCATION_CHARACTERISTIC_UUID.to_owned(), - vec![50])?; - body_sensor_location_characteristic_2.set_flags(vec![READ_FLAG.to_string(), WRITE_FLAG.to_string()])?; - Ok(heart_rate_service) -} - -fn create_generic_access_service(device: &BluetoothDevice, - empty: bool) - -> Result> { - // Generic Access Service - let generic_access_service = - create_service(device, GENERIC_ACCESS_SERVICE_UUID.to_owned())?; - - if empty { - return Ok(generic_access_service) - } - - // Device Name Characteristic - let device_name_characteristic = - create_characteristic_with_value(&generic_access_service, - DEVICE_NAME_CHARACTERISTIC_UUID.to_owned(), - HEART_RATE_DEVICE_NAME.as_bytes().to_vec())?; - device_name_characteristic.set_flags(vec![READ_FLAG.to_string(), WRITE_FLAG.to_string()])?; - - // Number of Digitals descriptor - let number_of_digitals_descriptor_1 = - create_descriptor_with_value(&device_name_characteristic, - NUMBER_OF_DIGITALS_UUID.to_owned(), - vec![49])?; - number_of_digitals_descriptor_1.set_flags(vec![READ_FLAG.to_string(), WRITE_FLAG.to_string()])?; - - let number_of_digitals_descriptor_2 = - create_descriptor_with_value(&device_name_characteristic, - NUMBER_OF_DIGITALS_UUID.to_owned(), - vec![50])?; - number_of_digitals_descriptor_2.set_flags(vec![READ_FLAG.to_string(), WRITE_FLAG.to_string()])?; - - // Characteristic User Description Descriptor - let _characteristic_user_description = - create_descriptor_with_value(&device_name_characteristic, - CHARACTERISTIC_USER_DESCRIPTION_UUID.to_owned(), - HEART_RATE_DEVICE_NAME_DESCRIPTION.as_bytes().to_vec())?; - - // Client Characteristic Configuration descriptor - let _client_characteristic_configuration = - create_descriptor_with_value(&device_name_characteristic, - CLIENT_CHARACTERISTIC_CONFIGURATION_UUID.to_owned(), - vec![0])?; - - // Peripheral Privacy Flag Characteristic - let peripheral_privacy_flag_characteristic = - create_characteristic(&generic_access_service, PERIPHERAL_PRIVACY_FLAG_CHARACTERISTIC_UUID.to_owned())?; - peripheral_privacy_flag_characteristic - .set_flags(vec![READ_FLAG.to_string(), WRITE_FLAG.to_string()])?; - Ok(generic_access_service) -} - -// Create Heart Rate Device -fn create_heart_rate_device(adapter: &BluetoothAdapter, - empty: bool) - -> Result> { - // Heart Rate Device - let heart_rate_device = - create_device_with_uuids(adapter, - HEART_RATE_DEVICE_NAME.to_owned(), - HEART_RATE_DEVICE_ADDRESS.to_owned(), - vec![GENERIC_ACCESS_SERVICE_UUID.to_owned(), - HEART_RATE_SERVICE_UUID.to_owned()])?; - - if empty { - return Ok(heart_rate_device); - } - - // Generic Access Service - let _generic_access_service = create_generic_access_service(&heart_rate_device, false)?; - - // Heart Rate Service - let _heart_rate_service = create_heart_rate_service(&heart_rate_device, false)?; - - Ok(heart_rate_device) -} - -fn create_missing_characterisitc_heart_rate_device(adapter: &BluetoothAdapter) -> Result<(), Box> { - let heart_rate_device_empty = create_heart_rate_device(adapter, true)?; - - let _generic_access_service_empty = create_generic_access_service(&heart_rate_device_empty, true)?; - - let _heart_rate_service_empty = create_heart_rate_service(&heart_rate_device_empty, true)?; - - Ok(()) -} - -fn create_missing_descriptor_heart_rate_device(adapter: &BluetoothAdapter) -> Result<(), Box> { - let heart_rate_device_empty = create_heart_rate_device(adapter, true)?; - - let generic_access_service_empty = create_generic_access_service(&heart_rate_device_empty, true)?; - - let _device_name_characteristic = - create_characteristic_with_value(&generic_access_service_empty, - DEVICE_NAME_CHARACTERISTIC_UUID.to_owned(), - HEART_RATE_DEVICE_NAME.as_bytes().to_vec())?; - - let peripheral_privacy_flag_characteristic = - create_characteristic(&generic_access_service_empty, - PERIPHERAL_PRIVACY_FLAG_CHARACTERISTIC_UUID.to_owned())?; - peripheral_privacy_flag_characteristic.set_flags(vec![READ_FLAG.to_string(), WRITE_FLAG.to_string()])?; - - let _heart_rate_service = create_heart_rate_service(&heart_rate_device_empty, false)?; - - Ok(()) -} - -fn create_two_heart_rate_services_device(adapter: &BluetoothAdapter) -> Result<(), Box> { - let heart_rate_device_empty = create_heart_rate_device(adapter, true)?; - - heart_rate_device_empty.set_uuids(vec![GENERIC_ACCESS_SERVICE_UUID.to_owned(), - HEART_RATE_SERVICE_UUID.to_owned(), - HEART_RATE_SERVICE_UUID.to_owned()])?; - - let _generic_access_service = create_generic_access_service(&heart_rate_device_empty, false)?; - - let heart_rate_service_empty_1 = create_heart_rate_service(&heart_rate_device_empty, true)?; - - let heart_rate_service_empty_2 = create_heart_rate_service(&heart_rate_device_empty, true)?; - - let heart_rate_measurement_characteristic = - create_characteristic_with_value(&heart_rate_service_empty_1, - HEART_RATE_MEASUREMENT_CHARACTERISTIC_UUID.to_owned(), - vec![0])?; - heart_rate_measurement_characteristic.set_flags(vec![NOTIFY_FLAG.to_string()])?; - - let _body_sensor_location_characteristic_1 = - create_characteristic_with_value(&heart_rate_service_empty_1, - BODY_SENSOR_LOCATION_CHARACTERISTIC_UUID.to_owned(), - vec![49])?; - - let _body_sensor_location_characteristic_2 = - create_characteristic_with_value(&heart_rate_service_empty_2, - BODY_SENSOR_LOCATION_CHARACTERISTIC_UUID.to_owned(), - vec![50])?; - Ok(()) -} - -fn create_blocklisted_device(adapter: &BluetoothAdapter) -> Result<(), Box> { - let connectable_device = - create_device_with_uuids(adapter, - CONNECTABLE_DEVICE_NAME.to_owned(), - CONNECTABLE_DEVICE_ADDRESS.to_owned(), - vec![BLOCKLIST_TEST_SERVICE_UUID.to_owned(), - DEVICE_INFORMATION_UUID.to_owned(), - GENERIC_ACCESS_SERVICE_UUID.to_owned(), - HEART_RATE_SERVICE_UUID.to_owned(), - HUMAN_INTERFACE_DEVICE_SERVICE_UUID.to_owned()])?; - - let blocklist_test_service = create_service(&connectable_device, BLOCKLIST_TEST_SERVICE_UUID.to_owned())?; - - let blocklist_exclude_reads_characteristic = - create_characteristic(&blocklist_test_service, - BLOCKLIST_EXCLUDE_READS_CHARACTERISTIC_UUID.to_owned())?; - blocklist_exclude_reads_characteristic - .set_flags(vec![READ_FLAG.to_string(), WRITE_FLAG.to_string()])?; - - let _blocklist_exclude_reads_descriptor = - create_descriptor_with_value(&blocklist_exclude_reads_characteristic, - BLOCKLIST_EXCLUDE_READS_DESCRIPTOR_UUID.to_owned(), - vec![54; 3])?; - - let _blocklist_descriptor = - create_descriptor_with_value(&blocklist_exclude_reads_characteristic, - BLOCKLIST_DESCRIPTOR_UUID.to_owned(), - vec![54; 3])?; - - let device_information_service = create_service(&connectable_device, DEVICE_INFORMATION_UUID.to_owned())?; - - let _serial_number_string_characteristic = - create_characteristic(&device_information_service, SERIAL_NUMBER_STRING_UUID.to_owned())?; - - let _generic_access_service = create_generic_access_service(&connectable_device, false)?; - - let _heart_rate_service = create_heart_rate_service(&connectable_device, false)?; - - let _human_interface_device_service = - create_service(&connectable_device, HUMAN_INTERFACE_DEVICE_SERVICE_UUID.to_owned())?; - Ok(()) -} - -fn create_glucose_heart_rate_devices(adapter: &BluetoothAdapter) -> Result<(), Box> { - let glucose_devie = create_device_with_uuids(adapter, - GLUCOSE_DEVICE_NAME.to_owned(), - GLUCOSE_DEVICE_ADDRESS.to_owned(), - vec![GLUCOSE_SERVICE_UUID.to_owned(), - TX_POWER_SERVICE_UUID.to_owned()])?; - - let heart_rate_device_empty = create_heart_rate_device(adapter, true)?; - - let mut manufacturer_dta = HashMap::new(); - manufacturer_dta.insert(17, vec![1, 2, 3]); - glucose_devie.set_manufacturer_data(manufacturer_dta)?; - - let mut service_data = HashMap::new(); - service_data.insert(GLUCOSE_SERVICE_UUID.to_owned(), vec![1, 2, 3]); - glucose_devie.set_service_data(service_data)?; - - service_data = HashMap::new(); - service_data.insert(HEART_RATE_SERVICE_UUID.to_owned(), vec![1, 2, 3]); - heart_rate_device_empty.set_service_data(service_data)?; - Ok(()) -} - -pub fn test(manager: &mut BluetoothManager, data_set_name: String) -> Result<(), Box> { - let may_existing_adapter = manager.get_or_create_adapter(); - let adapter = match may_existing_adapter.as_ref() { - Some(adapter) => adapter, - None => return Err(Box::from(ADAPTER_ERROR.to_string())), - }; - match data_set_name.as_str() { - NOT_PRESENT_ADAPTER => { - set_adapter(adapter, NOT_PRESENT_ADAPTER.to_owned())?; - adapter.set_present(false)?; - }, - NOT_POWERED_ADAPTER => { - set_adapter(adapter, NOT_POWERED_ADAPTER.to_owned())?; - adapter.set_powered(false)?; - }, - EMPTY_ADAPTER => { - set_adapter(adapter, EMPTY_ADAPTER.to_owned())?; - }, - GLUCOSE_HEART_RATE_ADAPTER => { - set_adapter(adapter, GLUCOSE_HEART_RATE_ADAPTER.to_owned())?; - let _ = create_glucose_heart_rate_devices(adapter)?; - }, - UNICODE_DEVICE_ADAPTER => { - set_adapter(adapter, UNICODE_DEVICE_ADAPTER.to_owned())?; - - let _unicode_device = create_device(adapter, - UNICODE_DEVICE_NAME.to_owned(), - UNICODE_DEVICE_ADDRESS.to_owned())?; - }, - MISSING_SERVICE_HEART_RATE_ADAPTER => { - set_adapter(adapter, MISSING_SERVICE_HEART_RATE_ADAPTER.to_owned())?; - - let _heart_rate_device_empty = create_heart_rate_device(adapter, true)?; - }, - MISSING_CHARACTERISTIC_HEART_RATE_ADAPTER => { - set_adapter(adapter, MISSING_CHARACTERISTIC_HEART_RATE_ADAPTER.to_owned())?; - - let _ = create_missing_characterisitc_heart_rate_device(adapter)?; - }, - MISSING_DESCRIPTOR_HEART_RATE_ADAPTER => { - set_adapter(adapter, MISSING_DESCRIPTOR_HEART_RATE_ADAPTER.to_owned())?; - - let _ = create_missing_descriptor_heart_rate_device(adapter)?; - }, - HEART_RATE_ADAPTER => { - set_adapter(adapter, HEART_RATE_ADAPTER.to_owned())?; - - let _heart_rate_device = create_heart_rate_device(adapter, false)?; - }, - EMPTY_NAME_HEART_RATE_ADAPTER => { - set_adapter(adapter, EMPTY_NAME_HEART_RATE_ADAPTER.to_owned())?; - - let heart_rate_device = create_heart_rate_device(adapter, false)?; - heart_rate_device.set_name(Some(EMPTY_DEVICE_NAME.to_owned()))?; - }, - NO_NAME_HEART_RATE_ADAPTER => { - set_adapter(adapter, NO_NAME_HEART_RATE_ADAPTER.to_owned())?; - - let heart_rate_device = create_heart_rate_device(adapter, false)?; - heart_rate_device.set_name(None)?; - }, - TWO_HEART_RATE_SERVICES_ADAPTER => { - set_adapter(adapter, TWO_HEART_RATE_SERVICES_ADAPTER.to_owned())?; - - let _ = create_two_heart_rate_services_device(adapter)?; - }, - BLOCKLIST_TEST_ADAPTER => { - set_adapter(adapter, BLOCKLIST_TEST_ADAPTER.to_owned())?; - - let _ = create_blocklisted_device(adapter)?; - }, - _ => return Err(Box::from(WRONG_DATA_SET_ERROR.to_string())), - } - return Ok(()); -} diff --git a/collector/compile-benchmarks/style-servo/components/bluetooth_traits/Cargo.toml b/collector/compile-benchmarks/style-servo/components/bluetooth_traits/Cargo.toml deleted file mode 100644 index d67faf596..000000000 --- a/collector/compile-benchmarks/style-servo/components/bluetooth_traits/Cargo.toml +++ /dev/null @@ -1,16 +0,0 @@ -[package] -name = "bluetooth_traits" -version = "0.0.1" -authors = ["The Servo Project Developers"] -license = "MPL-2.0" -publish = false - -[lib] -name = "bluetooth_traits" -path = "lib.rs" - -[dependencies] -ipc-channel = "0.8" -regex = "0.2" -serde = "1.0" -servo_config = {path = "../config"} diff --git a/collector/compile-benchmarks/style-servo/components/bluetooth_traits/blocklist.rs b/collector/compile-benchmarks/style-servo/components/bluetooth_traits/blocklist.rs deleted file mode 100644 index 26df3413c..000000000 --- a/collector/compile-benchmarks/style-servo/components/bluetooth_traits/blocklist.rs +++ /dev/null @@ -1,124 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ - -use regex::Regex; -use servo_config::resource_files::read_resource_file; -use std::cell::RefCell; -use std::collections::HashMap; -use std::io::BufRead; -use std::string::String; - -const BLOCKLIST_FILE: &'static str = "gatt_blocklist.txt"; -const BLOCKLIST_FILE_NOT_FOUND: &'static str = "Could not find gatt_blocklist.txt file"; -const EXCLUDE_READS: &'static str = "exclude-reads"; -const EXCLUDE_WRITES: &'static str = "exclude-writes"; -const VALID_UUID_REGEX: &'static str = "^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}"; - -thread_local!(pub static BLUETOOTH_BLOCKLIST: RefCell = - RefCell::new(BluetoothBlocklist(parse_blocklist()))); - -pub fn uuid_is_blocklisted(uuid: &str, exclude_type: Blocklist) -> bool { - BLUETOOTH_BLOCKLIST.with(|blist| { - match exclude_type { - Blocklist::All => { - blist.borrow().is_blocklisted(uuid) - }, - Blocklist::Reads => { - blist.borrow().is_blocklisted_for_reads(uuid) - } - Blocklist::Writes => { - blist.borrow().is_blocklisted_for_writes(uuid) - } - } - }) -} - -pub struct BluetoothBlocklist(Option>); - -#[derive(Eq, PartialEq)] -pub enum Blocklist { - All, // Read and Write - Reads, - Writes, -} - -impl BluetoothBlocklist { - // https://webbluetoothcg.github.io/web-bluetooth/#blocklisted - pub fn is_blocklisted(&self, uuid: &str) -> bool { - match self.0 { - Some(ref map) => map.get(uuid).map_or(false, |et| et.eq(&Blocklist::All)), - None => false, - } - } - - // https://webbluetoothcg.github.io/web-bluetooth/#blocklisted-for-reads - pub fn is_blocklisted_for_reads(&self, uuid: &str) -> bool { - match self.0 { - Some(ref map) => map.get(uuid).map_or(false, |et| et.eq(&Blocklist::All) || - et.eq(&Blocklist::Reads)), - None => false, - } - } - - // https://webbluetoothcg.github.io/web-bluetooth/#blocklisted-for-writes - pub fn is_blocklisted_for_writes(&self, uuid: &str) -> bool { - match self.0 { - Some(ref map) => map.get(uuid).map_or(false, |et| et.eq(&Blocklist::All) || - et.eq(&Blocklist::Writes)), - None => false, - } - } -} - -// https://webbluetoothcg.github.io/web-bluetooth/#parsing-the-blocklist -fn parse_blocklist() -> Option> { - // Step 1 missing, currently we parse ./resources/gatt_blocklist.txt. - let valid_uuid_regex = Regex::new(VALID_UUID_REGEX).unwrap(); - let content = read_resource_file(BLOCKLIST_FILE).expect(BLOCKLIST_FILE_NOT_FOUND); - // Step 3 - let mut result = HashMap::new(); - // Step 2 and 4 - for line in content.lines() { - let line = match line { - Ok(l) => l, - Err(_) => return None, - }; - // Step 4.1 - if line.is_empty() || line.starts_with('#') { - continue; - } - let mut exclude_type = Blocklist::All; - let mut words = line.split_whitespace(); - let uuid = match words.next() { - Some(uuid) => uuid, - None => continue, - }; - if !valid_uuid_regex.is_match(uuid) { - return None; - } - match words.next() { - // Step 4.2 We already have an initialized exclude_type variable with Blocklist::All. - None => {}, - // Step 4.3 - Some(EXCLUDE_READS) => { - exclude_type = Blocklist::Reads; - }, - Some(EXCLUDE_WRITES) => { - exclude_type = Blocklist::Writes; - }, - // Step 4.4 - _ => { - return None; - }, - } - // Step 4.5 - if result.contains_key(uuid) { - return None; - } - // Step 4.6 - result.insert(uuid.to_string(), exclude_type); - } - // Step 5 - return Some(result); -} diff --git a/collector/compile-benchmarks/style-servo/components/bluetooth_traits/lib.rs b/collector/compile-benchmarks/style-servo/components/bluetooth_traits/lib.rs deleted file mode 100644 index 31e644bd8..000000000 --- a/collector/compile-benchmarks/style-servo/components/bluetooth_traits/lib.rs +++ /dev/null @@ -1,112 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ - -extern crate ipc_channel; -extern crate regex; -#[macro_use] extern crate serde; -extern crate servo_config; - -pub mod blocklist; -pub mod scanfilter; - -use ipc_channel::ipc::IpcSender; -use scanfilter::{BluetoothScanfilterSequence, RequestDeviceoptions}; - -#[derive(Deserialize, Serialize)] -pub enum BluetoothError { - Type(String), - Network, - NotFound, - NotSupported, - Security, - InvalidState, -} - -#[derive(Deserialize, Serialize)] -pub enum GATTType { - PrimaryService, - Characteristic, - IncludedService, - Descriptor, -} - -#[derive(Deserialize, Serialize)] -pub struct BluetoothDeviceMsg { - // Bluetooth Device properties - pub id: String, - pub name: Option, -} - -#[derive(Deserialize, Serialize)] -pub struct BluetoothServiceMsg { - pub uuid: String, - pub is_primary: bool, - pub instance_id: String, -} - -#[derive(Deserialize, Serialize)] -pub struct BluetoothCharacteristicMsg { - // Characteristic - pub uuid: String, - pub instance_id: String, - // Characteristic properties - pub broadcast: bool, - pub read: bool, - pub write_without_response: bool, - pub write: bool, - pub notify: bool, - pub indicate: bool, - pub authenticated_signed_writes: bool, - pub reliable_write: bool, - pub writable_auxiliaries: bool, -} - -#[derive(Deserialize, Serialize)] -pub struct BluetoothDescriptorMsg { - pub uuid: String, - pub instance_id: String, -} - -pub type BluetoothServicesMsg = Vec; - -pub type BluetoothCharacteristicsMsg = Vec; - -pub type BluetoothDescriptorsMsg = Vec; - -pub type BluetoothResult = Result; - -pub type BluetoothResponseResult = Result; - -#[derive(Deserialize, Serialize)] -pub enum BluetoothRequest { - RequestDevice(RequestDeviceoptions, IpcSender), - GATTServerConnect(String, IpcSender), - GATTServerDisconnect(String, IpcSender>), - GetGATTChildren(String, Option, bool, GATTType, IpcSender), - ReadValue(String, IpcSender), - WriteValue(String, Vec, IpcSender), - EnableNotification(String, bool, IpcSender), - WatchAdvertisements(String, IpcSender), - SetRepresentedToNull(Vec, Vec, Vec), - IsRepresentedDeviceNull(String, IpcSender), - GetAvailability(IpcSender), - MatchesFilter(String, BluetoothScanfilterSequence, IpcSender>), - Test(String, IpcSender>), - Exit, -} - -#[derive(Deserialize, Serialize)] -pub enum BluetoothResponse { - RequestDevice(BluetoothDeviceMsg), - GATTServerConnect(bool), - GetPrimaryServices(BluetoothServicesMsg, bool), - GetIncludedServices(BluetoothServicesMsg, bool), - GetCharacteristics(BluetoothCharacteristicsMsg, bool), - GetDescriptors(BluetoothDescriptorsMsg, bool), - ReadValue(Vec), - WriteValue(Vec), - EnableNotification(()), - WatchAdvertisements(()), - GetAvailability(bool), -} diff --git a/collector/compile-benchmarks/style-servo/components/bluetooth_traits/scanfilter.rs b/collector/compile-benchmarks/style-servo/components/bluetooth_traits/scanfilter.rs deleted file mode 100644 index a3b33f684..000000000 --- a/collector/compile-benchmarks/style-servo/components/bluetooth_traits/scanfilter.rs +++ /dev/null @@ -1,137 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ - -use std::collections::{HashMap, HashSet}; -use std::slice::Iter; - -// A device name can never be longer than 29 bytes. An adv packet is at most -// 31 bytes long. The length and identifier of the length field take 2 bytes. -// That leaves 29 bytes for the name. -const MAX_NAME_LENGTH: usize = 29; - -#[derive(Deserialize, Serialize)] -pub struct ServiceUUIDSequence(Vec); - -impl ServiceUUIDSequence { - pub fn new(vec: Vec) -> ServiceUUIDSequence { - ServiceUUIDSequence(vec) - } - - fn get_services_set(&self) -> HashSet { - self.0.iter().map(String::clone).collect() - } -} - -type ManufacturerData = HashMap, Vec)>; -type ServiceData = HashMap, Vec)>; - -#[derive(Deserialize, Serialize)] -pub struct BluetoothScanfilter { - name: Option, - name_prefix: String, - services: ServiceUUIDSequence, - manufacturer_data: Option, - service_data: Option, -} - -impl BluetoothScanfilter { - pub fn new(name: Option, - name_prefix: String, - services: Vec, - manufacturer_data: Option, - service_data: Option) - -> BluetoothScanfilter { - BluetoothScanfilter { - name: name, - name_prefix: name_prefix, - services: ServiceUUIDSequence::new(services), - manufacturer_data: manufacturer_data, - service_data: service_data, - } - } - - pub fn get_name(&self) -> Option<&str> { - self.name.as_ref().map(|s| s.as_str()) - } - - pub fn get_name_prefix(&self) -> &str { - &self.name_prefix - } - - pub fn get_services(&self) -> &[String] { - &self.services.0 - } - - pub fn get_manufacturer_data(&self) -> Option<&ManufacturerData> { - self.manufacturer_data.as_ref() - } - - pub fn get_service_data(&self) -> Option<&ServiceData> { - self.service_data.as_ref() - } - - pub fn is_empty_or_invalid(&self) -> bool { - (self.name.is_none() && - self.name_prefix.is_empty() && - self.get_services().is_empty() && - self.manufacturer_data.is_none() && - self.service_data.is_none()) || - self.get_name().unwrap_or("").len() > MAX_NAME_LENGTH || - self.name_prefix.len() > MAX_NAME_LENGTH - } -} - -#[derive(Deserialize, Serialize)] -pub struct BluetoothScanfilterSequence(Vec); - -impl BluetoothScanfilterSequence { - pub fn new(vec: Vec) -> BluetoothScanfilterSequence { - BluetoothScanfilterSequence(vec) - } - - pub fn has_empty_or_invalid_filter(&self) -> bool { - self.0.iter().any(BluetoothScanfilter::is_empty_or_invalid) - } - - pub fn iter(&self) -> Iter { - self.0.iter() - } - - fn get_services_set(&self) -> HashSet { - self.iter().flat_map(|filter| filter.services.get_services_set()).collect() - } - - fn is_empty(&self) -> bool { - self.0.is_empty() - } -} - -#[derive(Deserialize, Serialize)] -pub struct RequestDeviceoptions { - filters: BluetoothScanfilterSequence, - optional_services: ServiceUUIDSequence, -} - -impl RequestDeviceoptions { - pub fn new(filters: BluetoothScanfilterSequence, - services: ServiceUUIDSequence) - -> RequestDeviceoptions { - RequestDeviceoptions { - filters: filters, - optional_services: services, - } - } - - pub fn get_filters(&self) -> &BluetoothScanfilterSequence { - &self.filters - } - - pub fn get_services_set(&self) -> HashSet { - &self.filters.get_services_set() | &self.optional_services.get_services_set() - } - - pub fn is_accepting_all_devices(&self) -> bool { - self.filters.is_empty() - } -} diff --git a/collector/compile-benchmarks/style-servo/components/canvas/Cargo.toml b/collector/compile-benchmarks/style-servo/components/canvas/Cargo.toml deleted file mode 100644 index 00eb30236..000000000 --- a/collector/compile-benchmarks/style-servo/components/canvas/Cargo.toml +++ /dev/null @@ -1,25 +0,0 @@ -[package] -name = "canvas" -version = "0.0.1" -authors = ["The Servo Project Developers"] -license = "MPL-2.0" -publish = false - -[lib] -name = "canvas" -path = "lib.rs" - -[dependencies] -azure = {git = "https://github.com/servo/rust-azure"} -canvas_traits = {path = "../canvas_traits"} -compositing = {path = "../compositing"} -cssparser = "0.22.0" -euclid = "0.15" -fnv = "1.0" -gleam = "0.4" -ipc-channel = "0.8" -log = "0.3.5" -num-traits = "0.1.32" -offscreen_gl_context = { version = "0.11", features = ["serde", "osmesa"] } -webrender = {git = "https://github.com/servo/webrender"} -webrender_api = {git = "https://github.com/servo/webrender", features = ["ipc"]} diff --git a/collector/compile-benchmarks/style-servo/components/canvas/canvas_paint_thread.rs b/collector/compile-benchmarks/style-servo/components/canvas/canvas_paint_thread.rs deleted file mode 100644 index 28b7fec54..000000000 --- a/collector/compile-benchmarks/style-servo/components/canvas/canvas_paint_thread.rs +++ /dev/null @@ -1,1088 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ - -use azure::azure::AzFloat; -use azure::azure_hl::{AntialiasMode, CapStyle, CompositionOp, JoinStyle}; -use azure::azure_hl::{BackendType, DrawOptions, DrawTarget, Pattern, StrokeOptions, SurfaceFormat}; -use azure::azure_hl::{Color, ColorPattern, DrawSurfaceOptions, Filter, PathBuilder}; -use azure::azure_hl::{ExtendMode, GradientStop, LinearGradientPattern, RadialGradientPattern}; -use azure::azure_hl::SurfacePattern; -use canvas_traits::canvas::*; -use cssparser::RGBA; -use euclid::{Transform2D, Point2D, Vector2D, Rect, Size2D}; -use ipc_channel::ipc::{self, IpcSender}; -use num_traits::ToPrimitive; -use std::borrow::ToOwned; -use std::mem; -use std::sync::Arc; -use std::thread; -use webrender_api; - -impl<'a> CanvasPaintThread<'a> { - /// It reads image data from the canvas - /// canvas_size: The size of the canvas we're reading from - /// read_rect: The area of the canvas we want to read from - fn read_pixels(&self, read_rect: Rect, canvas_size: Size2D) -> Vec{ - let canvas_size = canvas_size.to_i32(); - let canvas_rect = Rect::new(Point2D::new(0i32, 0i32), canvas_size); - let src_read_rect = canvas_rect.intersection(&read_rect).unwrap_or(Rect::zero()); - - let mut image_data = vec![]; - if src_read_rect.is_empty() || canvas_size.width <= 0 && canvas_size.height <= 0 { - return image_data; - } - - let data_surface = self.drawtarget.snapshot().get_data_surface(); - let mut src_data = Vec::new(); - data_surface.with_data(|element| { src_data = element.to_vec(); }); - let stride = data_surface.stride(); - - //start offset of the copyable rectangle - let mut src = (src_read_rect.origin.y * stride + src_read_rect.origin.x * 4) as usize; - //copy the data to the destination vector - for _ in 0..src_read_rect.size.height { - let row = &src_data[src .. src + (4 * src_read_rect.size.width) as usize]; - image_data.extend_from_slice(row); - src += stride as usize; - } - - image_data - } -} - -pub struct CanvasPaintThread<'a> { - drawtarget: DrawTarget, - /// TODO(pcwalton): Support multiple paths. - path_builder: PathBuilder, - state: CanvasPaintState<'a>, - saved_states: Vec>, - webrender_api: webrender_api::RenderApi, - image_key: Option, - /// An old webrender image key that can be deleted when the next epoch ends. - old_image_key: Option, - /// An old webrender image key that can be deleted when the current epoch ends. - very_old_image_key: Option, -} - -#[derive(Clone)] -struct CanvasPaintState<'a> { - draw_options: DrawOptions, - fill_style: Pattern, - stroke_style: Pattern, - stroke_opts: StrokeOptions<'a>, - /// The current 2D transform matrix. - transform: Transform2D, - shadow_offset_x: f64, - shadow_offset_y: f64, - shadow_blur: f64, - shadow_color: Color, -} - -impl<'a> CanvasPaintState<'a> { - fn new(antialias: AntialiasMode) -> CanvasPaintState<'a> { - CanvasPaintState { - draw_options: DrawOptions::new(1.0, CompositionOp::Over, antialias), - fill_style: Pattern::Color(ColorPattern::new(Color::black())), - stroke_style: Pattern::Color(ColorPattern::new(Color::black())), - stroke_opts: StrokeOptions::new(1.0, JoinStyle::MiterOrBevel, CapStyle::Butt, 10.0, &[]), - transform: Transform2D::identity(), - shadow_offset_x: 0.0, - shadow_offset_y: 0.0, - shadow_blur: 0.0, - shadow_color: Color::transparent(), - } - } -} - -impl<'a> CanvasPaintThread<'a> { - fn new(size: Size2D, - webrender_api_sender: webrender_api::RenderApiSender, - antialias: AntialiasMode) -> CanvasPaintThread<'a> { - let draw_target = CanvasPaintThread::create(size); - let path_builder = draw_target.create_path_builder(); - let webrender_api = webrender_api_sender.create_api(); - CanvasPaintThread { - drawtarget: draw_target, - path_builder: path_builder, - state: CanvasPaintState::new(antialias), - saved_states: vec![], - webrender_api: webrender_api, - image_key: None, - old_image_key: None, - very_old_image_key: None, - } - } - - /// Creates a new `CanvasPaintThread` and returns an `IpcSender` to - /// communicate with it. - pub fn start(size: Size2D, - webrender_api_sender: webrender_api::RenderApiSender, - antialias: bool) - -> IpcSender { - let (sender, receiver) = ipc::channel::().unwrap(); - let antialias = if antialias { - AntialiasMode::Default - } else { - AntialiasMode::None - }; - thread::Builder::new().name("CanvasThread".to_owned()).spawn(move || { - let mut painter = CanvasPaintThread::new(size, webrender_api_sender, antialias); - loop { - let msg = receiver.recv(); - match msg.unwrap() { - CanvasMsg::Canvas2d(message) => { - match message { - Canvas2dMsg::FillText(text, x, y, max_width) => painter.fill_text(text, x, y, max_width), - Canvas2dMsg::FillRect(ref rect) => painter.fill_rect(rect), - Canvas2dMsg::StrokeRect(ref rect) => painter.stroke_rect(rect), - Canvas2dMsg::ClearRect(ref rect) => painter.clear_rect(rect), - Canvas2dMsg::BeginPath => painter.begin_path(), - Canvas2dMsg::ClosePath => painter.close_path(), - Canvas2dMsg::Fill => painter.fill(), - Canvas2dMsg::Stroke => painter.stroke(), - Canvas2dMsg::Clip => painter.clip(), - Canvas2dMsg::IsPointInPath(x, y, fill_rule, chan) => { - painter.is_point_in_path(x, y, fill_rule, chan) - }, - Canvas2dMsg::DrawImage(imagedata, image_size, dest_rect, source_rect, - smoothing_enabled) => { - painter.draw_image(imagedata, image_size, dest_rect, source_rect, smoothing_enabled) - } - Canvas2dMsg::DrawImageSelf(image_size, dest_rect, source_rect, smoothing_enabled) => { - painter.draw_image_self(image_size, dest_rect, source_rect, smoothing_enabled) - } - Canvas2dMsg::DrawImageInOther( - renderer, image_size, dest_rect, source_rect, smoothing, sender - ) => { - painter.draw_image_in_other( - renderer, image_size, dest_rect, source_rect, smoothing, sender) - } - Canvas2dMsg::MoveTo(ref point) => painter.move_to(point), - Canvas2dMsg::LineTo(ref point) => painter.line_to(point), - Canvas2dMsg::Rect(ref rect) => painter.rect(rect), - Canvas2dMsg::QuadraticCurveTo(ref cp, ref pt) => { - painter.quadratic_curve_to(cp, pt) - } - Canvas2dMsg::BezierCurveTo(ref cp1, ref cp2, ref pt) => { - painter.bezier_curve_to(cp1, cp2, pt) - } - Canvas2dMsg::Arc(ref center, radius, start, end, ccw) => { - painter.arc(center, radius, start, end, ccw) - } - Canvas2dMsg::ArcTo(ref cp1, ref cp2, radius) => { - painter.arc_to(cp1, cp2, radius) - } - Canvas2dMsg::Ellipse(ref center, radius_x, radius_y, rotation, start, end, ccw) => { - painter.ellipse(center, radius_x, radius_y, rotation, start, end, ccw) - } - Canvas2dMsg::RestoreContext => painter.restore_context_state(), - Canvas2dMsg::SaveContext => painter.save_context_state(), - Canvas2dMsg::SetFillStyle(style) => painter.set_fill_style(style), - Canvas2dMsg::SetStrokeStyle(style) => painter.set_stroke_style(style), - Canvas2dMsg::SetLineWidth(width) => painter.set_line_width(width), - Canvas2dMsg::SetLineCap(cap) => painter.set_line_cap(cap), - Canvas2dMsg::SetLineJoin(join) => painter.set_line_join(join), - Canvas2dMsg::SetMiterLimit(limit) => painter.set_miter_limit(limit), - Canvas2dMsg::SetTransform(ref matrix) => painter.set_transform(matrix), - Canvas2dMsg::SetGlobalAlpha(alpha) => painter.set_global_alpha(alpha), - Canvas2dMsg::SetGlobalComposition(op) => painter.set_global_composition(op), - Canvas2dMsg::GetImageData(dest_rect, canvas_size, chan) - => painter.image_data(dest_rect, canvas_size, chan), - Canvas2dMsg::PutImageData(imagedata, offset, image_data_size, dirty_rect) - => painter.put_image_data(imagedata, offset, image_data_size, dirty_rect), - Canvas2dMsg::SetShadowOffsetX(value) => painter.set_shadow_offset_x(value), - Canvas2dMsg::SetShadowOffsetY(value) => painter.set_shadow_offset_y(value), - Canvas2dMsg::SetShadowBlur(value) => painter.set_shadow_blur(value), - Canvas2dMsg::SetShadowColor(ref color) => painter.set_shadow_color(color.to_azure_style()), - } - }, - CanvasMsg::Close => break, - CanvasMsg::Recreate(size) => painter.recreate(size), - CanvasMsg::FromScript(message) => { - match message { - FromScriptMsg::SendPixels(chan) => { - painter.send_pixels(chan) - } - } - } - CanvasMsg::FromLayout(message) => { - match message { - FromLayoutMsg::SendData(chan) => { - painter.send_data(chan) - } - } - } - } - } - }).expect("Thread spawning failed"); - - sender - } - - fn save_context_state(&mut self) { - self.saved_states.push(self.state.clone()); - } - - fn restore_context_state(&mut self) { - if let Some(state) = self.saved_states.pop() { - mem::replace(&mut self.state, state); - self.drawtarget.set_transform(&self.state.transform); - self.drawtarget.pop_clip(); - } - } - - fn fill_text(&self, text: String, x: f64, y: f64, max_width: Option) { - error!("Unimplemented canvas2d.fillText. Values received: {}, {}, {}, {:?}.", text, x, y, max_width); - } - - fn fill_rect(&self, rect: &Rect) { - if is_zero_size_gradient(&self.state.fill_style) { - return; // Paint nothing if gradient size is zero. - } - - let draw_rect = Rect::new(rect.origin, - match self.state.fill_style { - Pattern::Surface(ref surface) => { - let surface_size = surface.size(); - match (surface.repeat_x, surface.repeat_y) { - (true, true) => rect.size, - (true, false) => Size2D::new(rect.size.width, surface_size.height as f32), - (false, true) => Size2D::new(surface_size.width as f32, rect.size.height), - (false, false) => Size2D::new(surface_size.width as f32, surface_size.height as f32), - } - }, - _ => rect.size, - } - ); - - if self.need_to_draw_shadow() { - self.draw_with_shadow(&draw_rect, |new_draw_target: &DrawTarget| { - new_draw_target.fill_rect(&draw_rect, self.state.fill_style.to_pattern_ref(), - Some(&self.state.draw_options)); - }); - } else { - self.drawtarget.fill_rect(&draw_rect, self.state.fill_style.to_pattern_ref(), - Some(&self.state.draw_options)); - } - } - - fn clear_rect(&self, rect: &Rect) { - self.drawtarget.clear_rect(rect); - } - - fn stroke_rect(&self, rect: &Rect) { - if is_zero_size_gradient(&self.state.stroke_style) { - return; // Paint nothing if gradient size is zero. - } - - if self.need_to_draw_shadow() { - self.draw_with_shadow(&rect, |new_draw_target: &DrawTarget| { - new_draw_target.stroke_rect(rect, self.state.stroke_style.to_pattern_ref(), - &self.state.stroke_opts, &self.state.draw_options); - }); - } else if rect.size.width == 0. || rect.size.height == 0. { - let cap = match self.state.stroke_opts.line_join { - JoinStyle::Round => CapStyle::Round, - _ => CapStyle::Butt - }; - - let stroke_opts = - StrokeOptions::new(self.state.stroke_opts.line_width, - self.state.stroke_opts.line_join, - cap, - self.state.stroke_opts.miter_limit, - self.state.stroke_opts.mDashPattern); - self.drawtarget.stroke_line(rect.origin, rect.bottom_right(), - self.state.stroke_style.to_pattern_ref(), - &stroke_opts, &self.state.draw_options); - } else { - self.drawtarget.stroke_rect(rect, self.state.stroke_style.to_pattern_ref(), - &self.state.stroke_opts, &self.state.draw_options); - } - } - - fn begin_path(&mut self) { - self.path_builder = self.drawtarget.create_path_builder() - } - - fn close_path(&self) { - self.path_builder.close() - } - - fn fill(&self) { - if is_zero_size_gradient(&self.state.fill_style) { - return; // Paint nothing if gradient size is zero. - } - - self.drawtarget.fill(&self.path_builder.finish(), - self.state.fill_style.to_pattern_ref(), - &self.state.draw_options); - } - - fn stroke(&self) { - if is_zero_size_gradient(&self.state.stroke_style) { - return; // Paint nothing if gradient size is zero. - } - - self.drawtarget.stroke(&self.path_builder.finish(), - self.state.stroke_style.to_pattern_ref(), - &self.state.stroke_opts, - &self.state.draw_options); - } - - fn clip(&self) { - self.drawtarget.push_clip(&self.path_builder.finish()); - } - - fn is_point_in_path(&mut self, x: f64, y: f64, - _fill_rule: FillRule, chan: IpcSender) { - let path = self.path_builder.finish(); - let result = path.contains_point(x, y, &self.state.transform); - self.path_builder = path.copy_to_builder(); - chan.send(result).unwrap(); - } - - fn draw_image(&self, image_data: Vec, image_size: Size2D, - dest_rect: Rect, source_rect: Rect, smoothing_enabled: bool) { - // We round up the floating pixel values to draw the pixels - let source_rect = source_rect.ceil(); - // It discards the extra pixels (if any) that won't be painted - let image_data = crop_image(image_data, image_size, source_rect); - - if self.need_to_draw_shadow() { - let rect = Rect::new(Point2D::new(dest_rect.origin.x as f32, dest_rect.origin.y as f32), - Size2D::new(dest_rect.size.width as f32, dest_rect.size.height as f32)); - - self.draw_with_shadow(&rect, |new_draw_target: &DrawTarget| { - write_image(&new_draw_target, image_data, source_rect.size, dest_rect, - smoothing_enabled, self.state.draw_options.composition, - self.state.draw_options.alpha); - }); - } else { - write_image(&self.drawtarget, image_data, source_rect.size, dest_rect, - smoothing_enabled, self.state.draw_options.composition, - self.state.draw_options.alpha); - } - } - - fn draw_image_self(&self, image_size: Size2D, - dest_rect: Rect, source_rect: Rect, - smoothing_enabled: bool) { - // Reads pixels from source image - // In this case source and target are the same canvas - let image_data = self.read_pixels(source_rect.to_i32(), image_size); - - if self.need_to_draw_shadow() { - let rect = Rect::new(Point2D::new(dest_rect.origin.x as f32, dest_rect.origin.y as f32), - Size2D::new(dest_rect.size.width as f32, dest_rect.size.height as f32)); - - self.draw_with_shadow(&rect, |new_draw_target: &DrawTarget| { - write_image(&new_draw_target, image_data, source_rect.size, dest_rect, - smoothing_enabled, self.state.draw_options.composition, - self.state.draw_options.alpha); - }); - } else { - // Writes on target canvas - write_image(&self.drawtarget, image_data, image_size, dest_rect, - smoothing_enabled, self.state.draw_options.composition, - self.state.draw_options.alpha); - } - } - - fn draw_image_in_other(&self, - renderer: IpcSender, - image_size: Size2D, - dest_rect: Rect, - source_rect: Rect, - smoothing_enabled: bool, - sender: IpcSender<()>) { - let mut image_data = self.read_pixels(source_rect.to_i32(), image_size); - // TODO: avoid double byte_swap. - byte_swap(&mut image_data); - - let msg = CanvasMsg::Canvas2d(Canvas2dMsg::DrawImage( - image_data, source_rect.size, dest_rect, source_rect, smoothing_enabled)); - renderer.send(msg).unwrap(); - // We acknowledge to the caller here that the data was sent to the - // other canvas so that if JS immediately afterwards try to get the - // pixels of the other one, it won't retrieve the other values. - sender.send(()).unwrap(); - } - - fn move_to(&self, point: &Point2D) { - self.path_builder.move_to(*point) - } - - fn line_to(&self, point: &Point2D) { - self.path_builder.line_to(*point) - } - - fn rect(&self, rect: &Rect) { - self.path_builder.move_to(Point2D::new(rect.origin.x, rect.origin.y)); - self.path_builder.line_to(Point2D::new(rect.origin.x + rect.size.width, rect.origin.y)); - self.path_builder.line_to(Point2D::new(rect.origin.x + rect.size.width, - rect.origin.y + rect.size.height)); - self.path_builder.line_to(Point2D::new(rect.origin.x, rect.origin.y + rect.size.height)); - self.path_builder.close(); - } - - fn quadratic_curve_to(&self, - cp: &Point2D, - endpoint: &Point2D) { - self.path_builder.quadratic_curve_to(cp, endpoint) - } - - fn bezier_curve_to(&self, - cp1: &Point2D, - cp2: &Point2D, - endpoint: &Point2D) { - self.path_builder.bezier_curve_to(cp1, cp2, endpoint) - } - - fn arc(&self, - center: &Point2D, - radius: AzFloat, - start_angle: AzFloat, - end_angle: AzFloat, - ccw: bool) { - self.path_builder.arc(*center, radius, start_angle, end_angle, ccw) - } - - fn arc_to(&self, - cp1: &Point2D, - cp2: &Point2D, - radius: AzFloat) { - let cp0 = self.path_builder.get_current_point(); - let cp1 = *cp1; - let cp2 = *cp2; - - if (cp0.x == cp1.x && cp0.y == cp1.y) || cp1 == cp2 || radius == 0.0 { - self.line_to(&cp1); - return; - } - - // if all three control points lie on a single straight line, - // connect the first two by a straight line - let direction = (cp2.x - cp1.x) * (cp0.y - cp1.y) + (cp2.y - cp1.y) * (cp1.x - cp0.x); - if direction == 0.0 { - self.line_to(&cp1); - return; - } - - // otherwise, draw the Arc - let a2 = (cp0.x - cp1.x).powi(2) + (cp0.y - cp1.y).powi(2); - let b2 = (cp1.x - cp2.x).powi(2) + (cp1.y - cp2.y).powi(2); - let d = { - let c2 = (cp0.x - cp2.x).powi(2) + (cp0.y - cp2.y).powi(2); - let cosx = (a2 + b2 - c2) / (2.0 * (a2 * b2).sqrt()); - let sinx = (1.0 - cosx.powi(2)).sqrt(); - radius / ((1.0 - cosx) / sinx) - }; - - // first tangent point - let anx = (cp1.x - cp0.x) / a2.sqrt(); - let any = (cp1.y - cp0.y) / a2.sqrt(); - let tp1 = Point2D::new(cp1.x - anx * d, cp1.y - any * d); - - // second tangent point - let bnx = (cp1.x - cp2.x) / b2.sqrt(); - let bny = (cp1.y - cp2.y) / b2.sqrt(); - let tp2 = Point2D::new(cp1.x - bnx * d, cp1.y - bny * d); - - // arc center and angles - let anticlockwise = direction < 0.0; - let cx = tp1.x + any * radius * if anticlockwise { 1.0 } else { -1.0 }; - let cy = tp1.y - anx * radius * if anticlockwise { 1.0 } else { -1.0 }; - let angle_start = (tp1.y - cy).atan2(tp1.x - cx); - let angle_end = (tp2.y - cy).atan2(tp2.x - cx); - - self.line_to(&tp1); - if [cx, cy, angle_start, angle_end].iter().all(|x| x.is_finite()) { - self.arc(&Point2D::new(cx, cy), radius, - angle_start, angle_end, anticlockwise); - } - } - - fn ellipse(&mut self, - center: &Point2D, - radius_x: AzFloat, - radius_y: AzFloat, - rotation_angle: AzFloat, - start_angle: AzFloat, - end_angle: AzFloat, - ccw: bool) { - self.path_builder.ellipse(*center, radius_x, radius_y, rotation_angle, start_angle, end_angle, ccw); - } - - fn set_fill_style(&mut self, style: FillOrStrokeStyle) { - if let Some(pattern) = style.to_azure_pattern(&self.drawtarget) { - self.state.fill_style = pattern - } - } - - fn set_stroke_style(&mut self, style: FillOrStrokeStyle) { - if let Some(pattern) = style.to_azure_pattern(&self.drawtarget) { - self.state.stroke_style = pattern - } - } - - fn set_line_width(&mut self, width: f32) { - self.state.stroke_opts.line_width = width; - } - - fn set_line_cap(&mut self, cap: LineCapStyle) { - self.state.stroke_opts.line_cap = cap.to_azure_style(); - } - - fn set_line_join(&mut self, join: LineJoinStyle) { - self.state.stroke_opts.line_join = join.to_azure_style(); - } - - fn set_miter_limit(&mut self, limit: f32) { - self.state.stroke_opts.miter_limit = limit; - } - - fn set_transform(&mut self, transform: &Transform2D) { - self.state.transform = transform.clone(); - self.drawtarget.set_transform(transform) - } - - fn set_global_alpha(&mut self, alpha: f32) { - self.state.draw_options.alpha = alpha; - } - - fn set_global_composition(&mut self, op: CompositionOrBlending) { - self.state.draw_options.set_composition_op(op.to_azure_style()); - } - - fn create(size: Size2D) -> DrawTarget { - DrawTarget::new(BackendType::Skia, size, SurfaceFormat::B8G8R8A8) - } - - fn recreate(&mut self, size: Size2D) { - // TODO: clear the thread state. https://github.com/servo/servo/issues/17533 - self.drawtarget = CanvasPaintThread::create(size); - self.state = CanvasPaintState::new(self.state.draw_options.antialias); - self.saved_states.clear(); - // Webrender doesn't let images change size, so we clear the webrender image key. - // TODO: there is an annying race condition here: the display list builder - // might still be using the old image key. Really, we should be scheduling the image - // for later deletion, not deleting it immediately. - // https://github.com/servo/servo/issues/17534 - if let Some(image_key) = self.image_key.take() { - // If this executes, then we are in a new epoch since we last recreated the canvas, - // so `old_image_key` must be `None`. - debug_assert!(self.old_image_key.is_none()); - self.old_image_key = Some(image_key); - } - } - - fn send_pixels(&mut self, chan: IpcSender>>) { - self.drawtarget.snapshot().get_data_surface().with_data(|element| { - chan.send(Some(element.into())).unwrap(); - }) - } - - fn send_data(&mut self, chan: IpcSender) { - self.drawtarget.snapshot().get_data_surface().with_data(|element| { - let size = self.drawtarget.get_size(); - - let descriptor = webrender_api::ImageDescriptor { - width: size.width as u32, - height: size.height as u32, - stride: None, - format: webrender_api::ImageFormat::BGRA8, - offset: 0, - is_opaque: false, - }; - let data = webrender_api::ImageData::Raw(Arc::new(element.into())); - - let mut updates = webrender_api::ResourceUpdates::new(); - - match self.image_key { - Some(image_key) => { - debug!("Updating image {:?}.", image_key); - updates.update_image(image_key, - descriptor, - data, - None); - } - None => { - self.image_key = Some(self.webrender_api.generate_image_key()); - debug!("New image {:?}.", self.image_key); - updates.add_image(self.image_key.unwrap(), - descriptor, - data, - None); - } - } - - if let Some(image_key) = mem::replace(&mut self.very_old_image_key, self.old_image_key.take()) { - updates.delete_image(image_key); - } - - self.webrender_api.update_resources(updates); - - let data = CanvasImageData { - image_key: self.image_key.unwrap(), - }; - chan.send(data).unwrap(); - }) - } - - fn image_data(&self, dest_rect: Rect, canvas_size: Size2D, chan: IpcSender>) { - let mut dest_data = self.read_pixels(dest_rect, canvas_size); - - // bgra -> rgba - byte_swap(&mut dest_data); - chan.send(dest_data).unwrap(); - } - - // https://html.spec.whatwg.org/multipage/#dom-context-2d-putimagedata - fn put_image_data(&mut self, imagedata: Vec, - offset: Vector2D, - image_data_size: Size2D, - mut dirty_rect: Rect) { - if image_data_size.width <= 0.0 || image_data_size.height <= 0.0 { - return - } - - assert!(image_data_size.width * image_data_size.height * 4.0 == imagedata.len() as f64); - - // Step 1. TODO (neutered data) - - // Step 2. - if dirty_rect.size.width < 0.0f64 { - dirty_rect.origin.x += dirty_rect.size.width; - dirty_rect.size.width = -dirty_rect.size.width; - } - - if dirty_rect.size.height < 0.0f64 { - dirty_rect.origin.y += dirty_rect.size.height; - dirty_rect.size.height = -dirty_rect.size.height; - } - - // Step 3. - if dirty_rect.origin.x < 0.0f64 { - dirty_rect.size.width += dirty_rect.origin.x; - dirty_rect.origin.x = 0.0f64; - } - - if dirty_rect.origin.y < 0.0f64 { - dirty_rect.size.height += dirty_rect.origin.y; - dirty_rect.origin.y = 0.0f64; - } - - // Step 4. - if dirty_rect.max_x() > image_data_size.width { - dirty_rect.size.width = image_data_size.width - dirty_rect.origin.x; - } - - if dirty_rect.max_y() > image_data_size.height { - dirty_rect.size.height = image_data_size.height - dirty_rect.origin.y; - } - - // 5) If either dirtyWidth or dirtyHeight is negative or zero, - // stop without affecting any bitmaps - if dirty_rect.size.width <= 0.0 || dirty_rect.size.height <= 0.0 { - return - } - - // Step 6. - let dest_rect = dirty_rect.translate(&offset).to_i32(); - - // azure_hl operates with integers. We need to cast the image size - let image_size = image_data_size.to_i32(); - - let first_pixel = dest_rect.origin - offset.to_i32(); - let mut src_line = (first_pixel.y * (image_size.width * 4) + first_pixel.x * 4) as usize; - - let mut dest = - Vec::with_capacity((dest_rect.size.width * dest_rect.size.height * 4) as usize); - - for _ in 0 .. dest_rect.size.height { - let mut src_offset = src_line; - for _ in 0 .. dest_rect.size.width { - let alpha = imagedata[src_offset + 3] as u16; - // add 127 before dividing for more accurate rounding - let premultiply_channel = |channel: u8| (((channel as u16 * alpha) + 127) / 255) as u8; - dest.push(premultiply_channel(imagedata[src_offset + 2])); - dest.push(premultiply_channel(imagedata[src_offset + 1])); - dest.push(premultiply_channel(imagedata[src_offset + 0])); - dest.push(imagedata[src_offset + 3]); - src_offset += 4; - } - src_line += (image_size.width * 4) as usize; - } - - if let Some(source_surface) = self.drawtarget.create_source_surface_from_data( - &dest, - dest_rect.size, - dest_rect.size.width * 4, - SurfaceFormat::B8G8R8A8) { - self.drawtarget.copy_surface(source_surface, - Rect::new(Point2D::new(0, 0), dest_rect.size), - dest_rect.origin); - } - } - - fn set_shadow_offset_x(&mut self, value: f64) { - self.state.shadow_offset_x = value; - } - - fn set_shadow_offset_y(&mut self, value: f64) { - self.state.shadow_offset_y = value; - } - - fn set_shadow_blur(&mut self, value: f64) { - self.state.shadow_blur = value; - } - - fn set_shadow_color(&mut self, value: Color) { - self.state.shadow_color = value; - } - - // https://html.spec.whatwg.org/multipage/#when-shadows-are-drawn - fn need_to_draw_shadow(&self) -> bool { - self.state.shadow_color.a != 0.0f32 && - (self.state.shadow_offset_x != 0.0f64 || - self.state.shadow_offset_y != 0.0f64 || - self.state.shadow_blur != 0.0f64) - } - - fn create_draw_target_for_shadow(&self, source_rect: &Rect) -> DrawTarget { - let draw_target = self.drawtarget.create_similar_draw_target(&Size2D::new(source_rect.size.width as i32, - source_rect.size.height as i32), - self.drawtarget.get_format()); - let matrix = Transform2D::identity() - .pre_translate(-source_rect.origin.to_vector().cast().unwrap()) - .pre_mul(&self.state.transform); - draw_target.set_transform(&matrix); - draw_target - } - - fn draw_with_shadow(&self, rect: &Rect, draw_shadow_source: F) - where F: FnOnce(&DrawTarget) - { - let shadow_src_rect = self.state.transform.transform_rect(rect); - let new_draw_target = self.create_draw_target_for_shadow(&shadow_src_rect); - draw_shadow_source(&new_draw_target); - self.drawtarget.draw_surface_with_shadow(new_draw_target.snapshot(), - &Point2D::new(shadow_src_rect.origin.x as AzFloat, - shadow_src_rect.origin.y as AzFloat), - &self.state.shadow_color, - &Vector2D::new(self.state.shadow_offset_x as AzFloat, - self.state.shadow_offset_y as AzFloat), - (self.state.shadow_blur / 2.0f64) as AzFloat, - self.state.draw_options.composition); - } -} - -impl<'a> Drop for CanvasPaintThread<'a> { - fn drop(&mut self) { - let mut updates = webrender_api::ResourceUpdates::new(); - - if let Some(image_key) = self.old_image_key.take() { - updates.delete_image(image_key); - } - if let Some(image_key) = self.very_old_image_key.take() { - updates.delete_image(image_key); - } - - self.webrender_api.update_resources(updates); - } -} - -/// Used by drawImage to get rid of the extra pixels of the image data that -/// won't be copied to the canvas -/// image_data: Color pixel data of the image -/// image_size: Image dimensions -/// crop_rect: It determines the area of the image we want to keep -fn crop_image(image_data: Vec, - image_size: Size2D, - crop_rect: Rect) -> Vec{ - // We're going to iterate over a pixel values array so we need integers - let crop_rect = crop_rect.to_i32(); - let image_size = image_size.to_i32(); - // Assuming 4 bytes per pixel and row-major order for storage - // (consecutive elements in a pixel row of the image are contiguous in memory) - let stride = image_size.width * 4; - let image_bytes_length = image_size.height * image_size.width * 4; - let crop_area_bytes_length = crop_rect.size.height * crop_rect.size.width * 4; - // If the image size is less or equal than the crop area we do nothing - if image_bytes_length <= crop_area_bytes_length { - return image_data; - } - - let mut new_image_data = Vec::new(); - let mut src = (crop_rect.origin.y * stride + crop_rect.origin.x * 4) as usize; - for _ in 0..crop_rect.size.height { - let row = &image_data[src .. src + (4 * crop_rect.size.width) as usize]; - new_image_data.extend_from_slice(row); - src += stride as usize; - } - new_image_data -} - -/// It writes an image to the destination target -/// draw_target: the destination target where the image_data will be copied -/// image_data: Pixel information of the image to be written. It takes RGBA8 -/// image_size: The size of the image to be written -/// dest_rect: Area of the destination target where the pixels will be copied -/// smoothing_enabled: It determines if smoothing is applied to the image result -fn write_image(draw_target: &DrawTarget, - mut image_data: Vec, - image_size: Size2D, - dest_rect: Rect, - smoothing_enabled: bool, - composition_op: CompositionOp, - global_alpha: f32) { - if image_data.is_empty() { - return - } - let image_rect = Rect::new(Point2D::zero(), image_size); - // rgba -> bgra - byte_swap(&mut image_data); - - // From spec https://html.spec.whatwg.org/multipage/#dom-context-2d-drawimage - // When scaling up, if the imageSmoothingEnabled attribute is set to true, the user agent should attempt - // to apply a smoothing algorithm to the image data when it is scaled. - // Otherwise, the image must be rendered using nearest-neighbor interpolation. - let filter = if smoothing_enabled { - Filter::Linear - } else { - Filter::Point - }; - // azure_hl operates with integers. We need to cast the image size - let image_size = image_size.to_i32(); - - if let Some(source_surface) = - draw_target.create_source_surface_from_data(&image_data, - image_size, - image_size.width * 4, - SurfaceFormat::B8G8R8A8) { - let draw_surface_options = DrawSurfaceOptions::new(filter, true); - let draw_options = DrawOptions::new(global_alpha, composition_op, AntialiasMode::None); - - draw_target.draw_surface(source_surface, - dest_rect.to_azure_style(), - image_rect.to_azure_style(), - draw_surface_options, - draw_options); - } -} - -fn is_zero_size_gradient(pattern: &Pattern) -> bool { - if let &Pattern::LinearGradient(ref gradient) = pattern { - if gradient.is_zero_size() { - return true; - } - } - false -} - -pub trait PointToi32 { - fn to_i32(&self) -> Point2D; -} - -impl PointToi32 for Point2D { - fn to_i32(&self) -> Point2D { - Point2D::new(self.x.to_i32().unwrap(), - self.y.to_i32().unwrap()) - } -} - -pub trait SizeToi32 { - fn to_i32(&self) -> Size2D; -} - -impl SizeToi32 for Size2D { - fn to_i32(&self) -> Size2D { - Size2D::new(self.width.to_i32().unwrap(), - self.height.to_i32().unwrap()) - } -} - -pub trait RectToi32 { - fn to_i32(&self) -> Rect; - fn ceil(&self) -> Rect; -} - -impl RectToi32 for Rect { - fn to_i32(&self) -> Rect { - Rect::new(Point2D::new(self.origin.x.to_i32().unwrap(), - self.origin.y.to_i32().unwrap()), - Size2D::new(self.size.width.to_i32().unwrap(), - self.size.height.to_i32().unwrap())) - } - - fn ceil(&self) -> Rect { - Rect::new(Point2D::new(self.origin.x.ceil(), - self.origin.y.ceil()), - Size2D::new(self.size.width.ceil(), - self.size.height.ceil())) - } - -} - -pub trait ToAzureStyle { - type Target; - fn to_azure_style(self) -> Self::Target; -} - -impl ToAzureStyle for Rect { - type Target = Rect; - - fn to_azure_style(self) -> Rect { - Rect::new(Point2D::new(self.origin.x as AzFloat, self.origin.y as AzFloat), - Size2D::new(self.size.width as AzFloat, self.size.height as AzFloat)) - } -} - - -impl ToAzureStyle for LineCapStyle { - type Target = CapStyle; - - fn to_azure_style(self) -> CapStyle { - match self { - LineCapStyle::Butt => CapStyle::Butt, - LineCapStyle::Round => CapStyle::Round, - LineCapStyle::Square => CapStyle::Square, - } - } -} - -impl ToAzureStyle for LineJoinStyle { - type Target = JoinStyle; - - fn to_azure_style(self) -> JoinStyle { - match self { - LineJoinStyle::Round => JoinStyle::Round, - LineJoinStyle::Bevel => JoinStyle::Bevel, - LineJoinStyle::Miter => JoinStyle::Miter, - } - } -} - -impl ToAzureStyle for CompositionStyle { - type Target = CompositionOp; - - fn to_azure_style(self) -> CompositionOp { - match self { - CompositionStyle::SrcIn => CompositionOp::In, - CompositionStyle::SrcOut => CompositionOp::Out, - CompositionStyle::SrcOver => CompositionOp::Over, - CompositionStyle::SrcAtop => CompositionOp::Atop, - CompositionStyle::DestIn => CompositionOp::DestIn, - CompositionStyle::DestOut => CompositionOp::DestOut, - CompositionStyle::DestOver => CompositionOp::DestOver, - CompositionStyle::DestAtop => CompositionOp::DestAtop, - CompositionStyle::Copy => CompositionOp::Source, - CompositionStyle::Lighter => CompositionOp::Add, - CompositionStyle::Xor => CompositionOp::Xor, - } - } -} - -impl ToAzureStyle for BlendingStyle { - type Target = CompositionOp; - - fn to_azure_style(self) -> CompositionOp { - match self { - BlendingStyle::Multiply => CompositionOp::Multiply, - BlendingStyle::Screen => CompositionOp::Screen, - BlendingStyle::Overlay => CompositionOp::Overlay, - BlendingStyle::Darken => CompositionOp::Darken, - BlendingStyle::Lighten => CompositionOp::Lighten, - BlendingStyle::ColorDodge => CompositionOp::ColorDodge, - BlendingStyle::ColorBurn => CompositionOp::ColorBurn, - BlendingStyle::HardLight => CompositionOp::HardLight, - BlendingStyle::SoftLight => CompositionOp::SoftLight, - BlendingStyle::Difference => CompositionOp::Difference, - BlendingStyle::Exclusion => CompositionOp::Exclusion, - BlendingStyle::Hue => CompositionOp::Hue, - BlendingStyle::Saturation => CompositionOp::Saturation, - BlendingStyle::Color => CompositionOp::Color, - BlendingStyle::Luminosity => CompositionOp::Luminosity, - } - } -} - -impl ToAzureStyle for CompositionOrBlending { - type Target = CompositionOp; - - fn to_azure_style(self) -> CompositionOp { - match self { - CompositionOrBlending::Composition(op) => op.to_azure_style(), - CompositionOrBlending::Blending(op) => op.to_azure_style(), - } - } -} - -pub trait ToAzurePattern { - fn to_azure_pattern(&self, drawtarget: &DrawTarget) -> Option; -} - -impl ToAzurePattern for FillOrStrokeStyle { - fn to_azure_pattern(&self, drawtarget: &DrawTarget) -> Option { - match *self { - FillOrStrokeStyle::Color(ref color) => { - Some(Pattern::Color(ColorPattern::new(color.to_azure_style()))) - }, - FillOrStrokeStyle::LinearGradient(ref linear_gradient_style) => { - let gradient_stops: Vec = linear_gradient_style.stops.iter().map(|s| { - GradientStop { - offset: s.offset as AzFloat, - color: s.color.to_azure_style() - } - }).collect(); - - Some(Pattern::LinearGradient(LinearGradientPattern::new( - &Point2D::new(linear_gradient_style.x0 as AzFloat, linear_gradient_style.y0 as AzFloat), - &Point2D::new(linear_gradient_style.x1 as AzFloat, linear_gradient_style.y1 as AzFloat), - drawtarget.create_gradient_stops(&gradient_stops, ExtendMode::Clamp), - &Transform2D::identity()))) - }, - FillOrStrokeStyle::RadialGradient(ref radial_gradient_style) => { - let gradient_stops: Vec = radial_gradient_style.stops.iter().map(|s| { - GradientStop { - offset: s.offset as AzFloat, - color: s.color.to_azure_style() - } - }).collect(); - - Some(Pattern::RadialGradient(RadialGradientPattern::new( - &Point2D::new(radial_gradient_style.x0 as AzFloat, radial_gradient_style.y0 as AzFloat), - &Point2D::new(radial_gradient_style.x1 as AzFloat, radial_gradient_style.y1 as AzFloat), - radial_gradient_style.r0 as AzFloat, radial_gradient_style.r1 as AzFloat, - drawtarget.create_gradient_stops(&gradient_stops, ExtendMode::Clamp), - &Transform2D::identity()))) - }, - FillOrStrokeStyle::Surface(ref surface_style) => { - drawtarget.create_source_surface_from_data(&surface_style.surface_data, - surface_style.surface_size, - surface_style.surface_size.width * 4, - SurfaceFormat::B8G8R8A8) - .map(|source_surface| { - Pattern::Surface(SurfacePattern::new( - source_surface.azure_source_surface, - surface_style.repeat_x, - surface_style.repeat_y, - &Transform2D::identity())) - }) - } - } - } -} - -impl ToAzureStyle for RGBA { - type Target = Color; - - fn to_azure_style(self) -> Color { - Color::rgba(self.red_f32() as AzFloat, - self.green_f32() as AzFloat, - self.blue_f32() as AzFloat, - self.alpha_f32() as AzFloat) - } -} diff --git a/collector/compile-benchmarks/style-servo/components/canvas/gl_context.rs b/collector/compile-benchmarks/style-servo/components/canvas/gl_context.rs deleted file mode 100644 index ae7446e2e..000000000 --- a/collector/compile-benchmarks/style-servo/components/canvas/gl_context.rs +++ /dev/null @@ -1,203 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ - -use canvas_traits::webgl::WebGLCommand; -use compositing::compositor_thread::{CompositorProxy, self}; -use euclid::Size2D; -use gleam::gl; -use offscreen_gl_context::{ColorAttachmentType, GLContext, GLContextAttributes, GLContextDispatcher, GLLimits}; -use offscreen_gl_context::{NativeGLContext, NativeGLContextHandle, NativeGLContextMethods}; -use offscreen_gl_context::{OSMesaContext, OSMesaContextHandle}; -use std::sync::{Arc, Mutex}; -use super::webgl_thread::WebGLImpl; - -/// The GLContextFactory is used to create shared GL contexts with the main thread GL context. -/// Currently, shared textures are used to render WebGL textures into the WR compositor. -/// In order to create a shared context, the GLContextFactory stores the handle of the main GL context. -pub enum GLContextFactory { - Native(NativeGLContextHandle, Option), - OSMesa(OSMesaContextHandle), -} - -impl GLContextFactory { - /// Creates a new GLContextFactory that uses the currently bound GL context to create shared contexts. - pub fn current_native_handle(proxy: &CompositorProxy) -> Option { - NativeGLContext::current_handle().map(|handle| { - if cfg!(target_os = "windows") { - // Used to dispatch functions from the GLContext thread to the main thread's event loop. - // Required to allow WGL GLContext sharing in Windows. - GLContextFactory::Native(handle, Some(MainThreadDispatcher::new(proxy.clone()))) - } else { - GLContextFactory::Native(handle, None) - } - }) - } - - /// Creates a new GLContextFactory that uses the currently bound OSMesa context to create shared contexts. - pub fn current_osmesa_handle() -> Option { - OSMesaContext::current_handle().map(GLContextFactory::OSMesa) - } - - /// Creates a new shared GLContext with the main GLContext - pub fn new_shared_context(&self, - size: Size2D, - attributes: GLContextAttributes) -> Result { - match *self { - GLContextFactory::Native(ref handle, ref dispatcher) => { - let dispatcher = dispatcher.as_ref().map(|d| Box::new(d.clone()) as Box<_>); - let ctx = GLContext::::new_shared_with_dispatcher(size, - attributes, - ColorAttachmentType::Texture, - gl::GlType::default(), - Some(handle), - dispatcher); - ctx.map(GLContextWrapper::Native) - } - GLContextFactory::OSMesa(ref handle) => { - let ctx = GLContext::::new_shared_with_dispatcher(size.to_untyped(), - attributes, - ColorAttachmentType::Texture, - gl::GlType::default(), - Some(handle), - None); - ctx.map(GLContextWrapper::OSMesa) - } - } - } - - /// Creates a new non-shared GLContext - pub fn new_context(&self, - size: Size2D, - attributes: GLContextAttributes) -> Result { - match *self { - GLContextFactory::Native(..) => { - let ctx = GLContext::::new_shared_with_dispatcher(size, - attributes, - ColorAttachmentType::Texture, - gl::GlType::default(), - None, - None); - ctx.map(GLContextWrapper::Native) - } - GLContextFactory::OSMesa(_) => { - let ctx = GLContext::::new_shared_with_dispatcher(size.to_untyped(), - attributes, - ColorAttachmentType::Texture, - gl::GlType::default(), - None, - None); - ctx.map(GLContextWrapper::OSMesa) - } - } - } -} - - -/// GLContextWrapper used to abstract NativeGLContext and OSMesaContext types -pub enum GLContextWrapper { - Native(GLContext), - OSMesa(GLContext), -} - -impl GLContextWrapper { - pub fn make_current(&self) { - match *self { - GLContextWrapper::Native(ref ctx) => { - ctx.make_current().unwrap(); - } - GLContextWrapper::OSMesa(ref ctx) => { - ctx.make_current().unwrap(); - } - } - } - - pub fn unbind(&self) { - match *self { - GLContextWrapper::Native(ref ctx) => { - ctx.unbind().unwrap(); - } - GLContextWrapper::OSMesa(ref ctx) => { - ctx.unbind().unwrap(); - } - } - } - - pub fn apply_command(&self, cmd: WebGLCommand) { - match *self { - GLContextWrapper::Native(ref ctx) => { - WebGLImpl::apply(ctx, cmd); - } - GLContextWrapper::OSMesa(ref ctx) => { - WebGLImpl::apply(ctx, cmd); - } - } - } - - pub fn gl(&self) -> &gl::Gl { - match *self { - GLContextWrapper::Native(ref ctx) => { - ctx.gl() - } - GLContextWrapper::OSMesa(ref ctx) => { - ctx.gl() - } - } - } - - pub fn get_info(&self) -> (Size2D, u32, GLLimits) { - match *self { - GLContextWrapper::Native(ref ctx) => { - let (real_size, texture_id) = { - let draw_buffer = ctx.borrow_draw_buffer().unwrap(); - (draw_buffer.size(), draw_buffer.get_bound_texture_id().unwrap()) - }; - - let limits = ctx.borrow_limits().clone(); - - (real_size, texture_id, limits) - } - GLContextWrapper::OSMesa(ref ctx) => { - let (real_size, texture_id) = { - let draw_buffer = ctx.borrow_draw_buffer().unwrap(); - (draw_buffer.size(), draw_buffer.get_bound_texture_id().unwrap()) - }; - - let limits = ctx.borrow_limits().clone(); - - (real_size, texture_id, limits) - } - } - } - - pub fn resize(&mut self, size: Size2D) -> Result<(), &'static str> { - match *self { - GLContextWrapper::Native(ref mut ctx) => { - ctx.resize(size) - } - GLContextWrapper::OSMesa(ref mut ctx) => { - ctx.resize(size) - } - } - } -} - -/// Implements GLContextDispatcher to dispatch functions from GLContext threads to the main thread's event loop. -/// It's used in Windows to allow WGL GLContext sharing. -#[derive(Clone)] -pub struct MainThreadDispatcher { - compositor_proxy: Arc> -} - -impl MainThreadDispatcher { - fn new(proxy: CompositorProxy) -> Self { - Self { - compositor_proxy: Arc::new(Mutex::new(proxy)), - } - } -} -impl GLContextDispatcher for MainThreadDispatcher { - fn dispatch(&self, f: Box) { - self.compositor_proxy.lock().unwrap().send(compositor_thread::Msg::Dispatch(f)); - } -} diff --git a/collector/compile-benchmarks/style-servo/components/canvas/lib.rs b/collector/compile-benchmarks/style-servo/components/canvas/lib.rs deleted file mode 100644 index e26d8c2b5..000000000 --- a/collector/compile-benchmarks/style-servo/components/canvas/lib.rs +++ /dev/null @@ -1,24 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ - -#![deny(unsafe_code)] - -extern crate azure; -extern crate canvas_traits; -extern crate compositing; -extern crate cssparser; -extern crate euclid; -extern crate fnv; -extern crate gleam; -extern crate ipc_channel; -#[macro_use] extern crate log; -extern crate num_traits; -extern crate offscreen_gl_context; -extern crate webrender; -extern crate webrender_api; - -pub mod canvas_paint_thread; -pub mod gl_context; -mod webgl_mode; -pub mod webgl_thread; diff --git a/collector/compile-benchmarks/style-servo/components/canvas/webgl_mode/inprocess.rs b/collector/compile-benchmarks/style-servo/components/canvas/webgl_mode/inprocess.rs deleted file mode 100644 index 71ab8d8a9..000000000 --- a/collector/compile-benchmarks/style-servo/components/canvas/webgl_mode/inprocess.rs +++ /dev/null @@ -1,107 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ - -use ::gl_context::GLContextFactory; -use ::webgl_thread::{WebGLExternalImageApi, WebGLExternalImageHandler, WebGLThreadObserver, WebGLThread}; -use canvas_traits::webgl::{WebGLChan, WebGLContextId, WebGLMsg, WebGLPipeline, WebGLReceiver}; -use canvas_traits::webgl::{WebGLSender, WebVRCommand, WebVRRenderHandler}; -use canvas_traits::webgl::webgl_channel; -use euclid::Size2D; -use gleam::gl; -use std::marker::PhantomData; -use std::rc::Rc; -use webrender; -use webrender_api; - -/// WebGL Threading API entry point that lives in the constellation. -pub struct WebGLThreads(WebGLSender); - -impl WebGLThreads { - /// Creates a new WebGLThreads object - pub fn new(gl_factory: GLContextFactory, - webrender_gl: Rc, - webrender_api_sender: webrender_api::RenderApiSender, - webvr_compositor: Option>) - -> (WebGLThreads, Box) { - // This implementation creates a single `WebGLThread` for all the pipelines. - let channel = WebGLThread::start(gl_factory, - webrender_api_sender, - webvr_compositor.map(|c| WebVRRenderWrapper(c)), - PhantomData); - let external = WebGLExternalImageHandler::new(WebGLExternalImages::new(webrender_gl, channel.clone())); - (WebGLThreads(channel), Box::new(external)) - } - - /// Gets the WebGLThread handle for each script pipeline. - pub fn pipeline(&self) -> WebGLPipeline { - // This mode creates a single thread, so the existing WebGLChan is just cloned. - WebGLPipeline(WebGLChan(self.0.clone())) - } - - /// Sends a exit message to close the WebGLThreads and release all WebGLContexts. - pub fn exit(&self) -> Result<(), &'static str> { - self.0.send(WebGLMsg::Exit).map_err(|_| "Failed to send Exit message") - } -} - -/// Bridge between the webrender::ExternalImage callbacks and the WebGLThreads. -struct WebGLExternalImages { - webrender_gl: Rc, - webgl_channel: WebGLSender, - // Used to avoid creating a new channel on each received WebRender request. - lock_channel: (WebGLSender<(u32, Size2D, usize)>, WebGLReceiver<(u32, Size2D, usize)>), -} - -impl WebGLExternalImages { - fn new(webrender_gl: Rc, channel: WebGLSender) -> Self { - Self { - webrender_gl, - webgl_channel: channel, - lock_channel: webgl_channel().unwrap(), - } - } -} - -impl WebGLExternalImageApi for WebGLExternalImages { - fn lock(&mut self, ctx_id: WebGLContextId) -> (u32, Size2D) { - // WebGL Thread has it's own GL command queue that we need to synchronize with the WR GL command queue. - // The WebGLMsg::Lock message inserts a fence in the WebGL command queue. - self.webgl_channel.send(WebGLMsg::Lock(ctx_id, self.lock_channel.0.clone())).unwrap(); - let (image_id, size, gl_sync) = self.lock_channel.1.recv().unwrap(); - // The next glWaitSync call is run on the WR thread and it's used to synchronize the two - // flows of OpenGL commands in order to avoid WR using a semi-ready WebGL texture. - // glWaitSync doesn't block WR thread, it affects only internal OpenGL subsystem. - self.webrender_gl.wait_sync(gl_sync as gl::GLsync, 0, gl::TIMEOUT_IGNORED); - (image_id, size) - } - - fn unlock(&mut self, ctx_id: WebGLContextId) { - self.webgl_channel.send(WebGLMsg::Unlock(ctx_id)).unwrap(); - } -} - -/// Custom observer used in a `WebGLThread`. -impl WebGLThreadObserver for PhantomData<()> { - fn on_context_create(&mut self, ctx_id: WebGLContextId, texture_id: u32, size: Size2D) { - debug!("WebGLContext created (ctx_id: {:?} texture_id: {:?} size: {:?}", ctx_id, texture_id, size); - } - - fn on_context_resize(&mut self, ctx_id: WebGLContextId, texture_id: u32, size: Size2D) { - debug!("WebGLContext resized (ctx_id: {:?} texture_id: {:?} size: {:?}", ctx_id, texture_id, size); - } - - fn on_context_delete(&mut self, ctx_id: WebGLContextId) { - debug!("WebGLContext deleted (ctx_id: {:?})", ctx_id); - } -} - - -/// Wrapper to send WebVR commands used in `WebGLThread`. -struct WebVRRenderWrapper(Box); - -impl WebVRRenderHandler for WebVRRenderWrapper { - fn handle(&mut self, command: WebVRCommand, texture: Option<(u32, Size2D)>) { - self.0.handle(command, texture); - } -} diff --git a/collector/compile-benchmarks/style-servo/components/canvas/webgl_mode/mod.rs b/collector/compile-benchmarks/style-servo/components/canvas/webgl_mode/mod.rs deleted file mode 100644 index 660818fb0..000000000 --- a/collector/compile-benchmarks/style-servo/components/canvas/webgl_mode/mod.rs +++ /dev/null @@ -1,6 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ - -mod inprocess; -pub use self::inprocess::WebGLThreads; diff --git a/collector/compile-benchmarks/style-servo/components/canvas/webgl_thread.rs b/collector/compile-benchmarks/style-servo/components/canvas/webgl_thread.rs deleted file mode 100644 index 2bb907795..000000000 --- a/collector/compile-benchmarks/style-servo/components/canvas/webgl_thread.rs +++ /dev/null @@ -1,1212 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ - -use canvas_traits::canvas::byte_swap; -use canvas_traits::webgl::*; -use euclid::Size2D; -use fnv::FnvHashMap; -use gleam::gl; -use offscreen_gl_context::{GLContext, GLContextAttributes, GLLimits, NativeGLContextMethods}; -use std::thread; -use super::gl_context::{GLContextFactory, GLContextWrapper}; -use webrender; -use webrender_api; - -/// WebGL Threading API entry point that lives in the constellation. -/// It allows to get a WebGLThread handle for each script pipeline. -pub use ::webgl_mode::WebGLThreads; - -/// A WebGLThread manages the life cycle and message multiplexing of -/// a set of WebGLContexts living in the same thread. -pub struct WebGLThread { - /// Factory used to create a new GLContext shared with the WR/Main thread. - gl_factory: GLContextFactory, - /// Channel used to generate/update or delete `webrender_api::ImageKey`s. - webrender_api: webrender_api::RenderApi, - /// Map of live WebGLContexts. - contexts: FnvHashMap, - /// Cached information for WebGLContexts. - cached_context_info: FnvHashMap, - /// Current bound context. - bound_context_id: Option, - /// Id generator for new WebGLContexts. - next_webgl_id: usize, - /// Handler user to send WebVR commands. - webvr_compositor: Option, - /// Generic observer that listens WebGLContext creation, resize or removal events. - observer: OB, -} - -impl WebGLThread { - pub fn new(gl_factory: GLContextFactory, - webrender_api_sender: webrender_api::RenderApiSender, - webvr_compositor: Option, - observer: OB) -> Self { - WebGLThread { - gl_factory, - webrender_api: webrender_api_sender.create_api(), - contexts: Default::default(), - cached_context_info: Default::default(), - bound_context_id: None, - next_webgl_id: 0, - webvr_compositor, - observer: observer, - } - } - - /// Creates a new `WebGLThread` and returns a Sender to - /// communicate with it. - pub fn start(gl_factory: GLContextFactory, - webrender_api_sender: webrender_api::RenderApiSender, - webvr_compositor: Option, - observer: OB) - -> WebGLSender { - let (sender, receiver) = webgl_channel::().unwrap(); - let result = sender.clone(); - thread::Builder::new().name("WebGLThread".to_owned()).spawn(move || { - let mut renderer = WebGLThread::new(gl_factory, - webrender_api_sender, - webvr_compositor, - observer); - let webgl_chan = WebGLChan(sender); - loop { - let msg = receiver.recv().unwrap(); - let exit = renderer.handle_msg(msg, &webgl_chan); - if exit { - return; - } - } - }).expect("Thread spawning failed"); - - result - } - - /// Handles a generic WebGLMsg message - #[inline] - fn handle_msg(&mut self, msg: WebGLMsg, webgl_chan: &WebGLChan) -> bool { - match msg { - WebGLMsg::CreateContext(size, attributes, result_sender) => { - let result = self.create_webgl_context(size, attributes); - result_sender.send(result.map(|(id, limits, share_mode)| - WebGLCreateContextResult { - sender: WebGLMsgSender::new(id, webgl_chan.clone()), - limits: limits, - share_mode: share_mode, - } - )).unwrap(); - }, - WebGLMsg::ResizeContext(ctx_id, size, sender) => { - self.resize_webgl_context(ctx_id, size, sender); - }, - WebGLMsg::RemoveContext(ctx_id) => { - self.remove_webgl_context(ctx_id); - }, - WebGLMsg::WebGLCommand(ctx_id, command) => { - self.handle_webgl_command(ctx_id, command); - }, - WebGLMsg::WebVRCommand(ctx_id, command) => { - self.handle_webvr_command(ctx_id, command); - }, - WebGLMsg::Lock(ctx_id, sender) => { - self.handle_lock(ctx_id, sender); - }, - WebGLMsg::Unlock(ctx_id) => { - self.handle_unlock(ctx_id); - }, - WebGLMsg::UpdateWebRenderImage(ctx_id, sender) => { - self.handle_update_wr_image(ctx_id, sender); - }, - WebGLMsg::Exit => { - return true; - } - } - - false - } - - /// Handles a WebGLCommand for a specific WebGLContext - fn handle_webgl_command(&mut self, context_id: WebGLContextId, command: WebGLCommand) { - if let Some(ctx) = Self::make_current_if_needed(context_id, &self.contexts, &mut self.bound_context_id) { - ctx.apply_command(command); - } - } - - /// Handles a WebVRCommand for a specific WebGLContext - fn handle_webvr_command(&mut self, context_id: WebGLContextId, command: WebVRCommand) { - Self::make_current_if_needed(context_id, &self.contexts, &mut self.bound_context_id); - let texture = match command { - WebVRCommand::SubmitFrame(..) => { - self.cached_context_info.get(&context_id) - }, - _ => None - }; - self.webvr_compositor.as_mut().unwrap().handle(command, texture.map(|t| (t.texture_id, t.size))); - } - - /// Handles a lock external callback received from webrender::ExternalImageHandler - fn handle_lock(&mut self, context_id: WebGLContextId, sender: WebGLSender<(u32, Size2D, usize)>) { - let ctx = Self::make_current_if_needed(context_id, &self.contexts, &mut self.bound_context_id) - .expect("WebGLContext not found in a WebGLMsg::Lock message"); - let info = self.cached_context_info.get_mut(&context_id).unwrap(); - // Insert a OpenGL Fence sync object that sends a signal when all the WebGL commands are finished. - // The related gl().wait_sync call is performed in the WR thread. See WebGLExternalImageApi for mor details. - let gl_sync = ctx.gl().fence_sync(gl::SYNC_GPU_COMMANDS_COMPLETE, 0); - info.gl_sync = Some(gl_sync); - // It is important that the fence sync is properly flushed into the GPU's command queue. - // Without proper flushing, the sync object may never be signaled. - ctx.gl().flush(); - - sender.send((info.texture_id, info.size, gl_sync as usize)).unwrap(); - } - - /// Handles an unlock external callback received from webrender::ExternalImageHandler - fn handle_unlock(&mut self, context_id: WebGLContextId) { - let ctx = Self::make_current_if_needed(context_id, &self.contexts, &mut self.bound_context_id) - .expect("WebGLContext not found in a WebGLMsg::Unlock message"); - let info = self.cached_context_info.get_mut(&context_id).unwrap(); - if let Some(gl_sync) = info.gl_sync.take() { - // Release the GLSync object. - ctx.gl().delete_sync(gl_sync); - } - } - - /// Creates a new WebGLContext - fn create_webgl_context(&mut self, - size: Size2D, - attributes: GLContextAttributes) - -> Result<(WebGLContextId, GLLimits, WebGLContextShareMode), String> { - // First try to create a shared context for the best performance. - // Fallback to readback mode if the shared context creation fails. - let result = self.gl_factory.new_shared_context(size, attributes) - .map(|r| (r, WebGLContextShareMode::SharedTexture)) - .or_else(|_| { - let ctx = self.gl_factory.new_context(size, attributes); - ctx.map(|r| (r, WebGLContextShareMode::Readback)) - }); - - // Creating a new GLContext may make the current bound context_id dirty. - // Clear it to ensure that make_current() is called in subsequent commands. - self.bound_context_id = None; - - match result { - Ok((ctx, share_mode)) => { - let id = WebGLContextId(self.next_webgl_id); - let (size, texture_id, limits) = ctx.get_info(); - self.next_webgl_id += 1; - self.contexts.insert(id, ctx); - self.cached_context_info.insert(id, WebGLContextInfo { - texture_id, - size, - alpha: attributes.alpha, - image_key: None, - share_mode, - gl_sync: None, - }); - - self.observer.on_context_create(id, texture_id, size); - - Ok((id, limits, share_mode)) - }, - Err(msg) => { - Err(msg.to_owned()) - } - } - } - - /// Resizes a WebGLContext - fn resize_webgl_context(&mut self, - context_id: WebGLContextId, - size: Size2D, - sender: WebGLSender>) { - let ctx = Self::make_current_if_needed_mut(context_id, &mut self.contexts, &mut self.bound_context_id); - match ctx.resize(size) { - Ok(_) => { - let (real_size, texture_id, _) = ctx.get_info(); - self.observer.on_context_resize(context_id, texture_id, real_size); - - let info = self.cached_context_info.get_mut(&context_id).unwrap(); - // Update webgl texture size. Texture id may change too. - info.texture_id = texture_id; - info.size = real_size; - // Update WR image if needed. Resize image updates are only required for SharedTexture mode. - // Readback mode already updates the image every frame to send the raw pixels. - // See `handle_update_wr_image`. - match (info.image_key, info.share_mode) { - (Some(image_key), WebGLContextShareMode::SharedTexture) => { - Self::update_wr_external_image(&self.webrender_api, - info.size, - info.alpha, - context_id, - image_key); - }, - _ => {} - } - - sender.send(Ok(())).unwrap(); - }, - Err(msg) => { - sender.send(Err(msg.into())).unwrap(); - } - } - } - - /// Removes a WebGLContext and releases attached resources. - fn remove_webgl_context(&mut self, context_id: WebGLContextId) { - // Release webrender image keys. - if let Some(info) = self.cached_context_info.remove(&context_id) { - let mut updates = webrender_api::ResourceUpdates::new(); - - if let Some(image_key) = info.image_key { - updates.delete_image(image_key); - } - - self.webrender_api.update_resources(updates) - } - - // Release GL context. - if self.contexts.remove(&context_id).is_some() { - self.observer.on_context_delete(context_id); - } - - // Removing a GLContext may make the current bound context_id dirty. - self.bound_context_id = None; - } - - /// Handles the creation/update of webrender_api::ImageKeys for a specific WebGLContext. - /// This method is invoked from a UpdateWebRenderImage message sent by the layout thread. - /// If SharedTexture is used the UpdateWebRenderImage message is sent only after a WebGLContext creation. - /// If Readback is used UpdateWebRenderImage message is sent always on each layout iteration in order to - /// submit the updated raw pixels. - fn handle_update_wr_image(&mut self, context_id: WebGLContextId, sender: WebGLSender) { - let info = self.cached_context_info.get_mut(&context_id).unwrap(); - let webrender_api = &self.webrender_api; - - let image_key = match info.share_mode { - WebGLContextShareMode::SharedTexture => { - let size = info.size; - let alpha = info.alpha; - // Reuse existing ImageKey or generate a new one. - // When using a shared texture ImageKeys are only generated after a WebGLContext creation. - *info.image_key.get_or_insert_with(|| { - Self::create_wr_external_image(webrender_api, size, alpha, context_id) - }) - }, - WebGLContextShareMode::Readback => { - let pixels = Self::raw_pixels(&self.contexts[&context_id], info.size); - match info.image_key.clone() { - Some(image_key) => { - // ImageKey was already created, but WR Images must - // be updated every frame in readback mode to send the new raw pixels. - Self::update_wr_readback_image(webrender_api, - info.size, - info.alpha, - image_key, - pixels); - - image_key - }, - None => { - // Generate a new ImageKey for Readback mode. - let image_key = Self::create_wr_readback_image(webrender_api, - info.size, - info.alpha, - pixels); - info.image_key = Some(image_key); - image_key - } - } - } - }; - - // Send the ImageKey to the Layout thread. - sender.send(image_key).unwrap(); - } - - /// Gets a reference to a GLContextWrapper for a given WebGLContextId and makes it current if required. - fn make_current_if_needed<'a>(context_id: WebGLContextId, - contexts: &'a FnvHashMap, - bound_id: &mut Option) -> Option<&'a GLContextWrapper> { - contexts.get(&context_id).and_then(|ctx| { - if Some(context_id) != *bound_id { - ctx.make_current(); - *bound_id = Some(context_id); - } - - Some(ctx) - }) - } - - /// Gets a mutable reference to a GLContextWrapper for a WebGLContextId and makes it current if required. - fn make_current_if_needed_mut<'a>(context_id: WebGLContextId, - contexts: &'a mut FnvHashMap, - bound_id: &mut Option) -> &'a mut GLContextWrapper { - let ctx = contexts.get_mut(&context_id).expect("WebGLContext not found!"); - if Some(context_id) != *bound_id { - ctx.make_current(); - *bound_id = Some(context_id); - } - ctx - } - - /// Creates a `webrender_api::ImageKey` that uses shared textures. - fn create_wr_external_image(webrender_api: &webrender_api::RenderApi, - size: Size2D, - alpha: bool, - context_id: WebGLContextId) -> webrender_api::ImageKey { - let descriptor = Self::image_descriptor(size, alpha); - let data = Self::external_image_data(context_id); - - let image_key = webrender_api.generate_image_key(); - let mut updates = webrender_api::ResourceUpdates::new(); - updates.add_image(image_key, - descriptor, - data, - None); - webrender_api.update_resources(updates); - - image_key - } - - /// Updates a `webrender_api::ImageKey` that uses shared textures. - fn update_wr_external_image(webrender_api: &webrender_api::RenderApi, - size: Size2D, - alpha: bool, - context_id: WebGLContextId, - image_key: webrender_api::ImageKey) { - let descriptor = Self::image_descriptor(size, alpha); - let data = Self::external_image_data(context_id); - - let mut updates = webrender_api::ResourceUpdates::new(); - updates.update_image(image_key, - descriptor, - data, - None); - webrender_api.update_resources(updates); - } - - /// Creates a `webrender_api::ImageKey` that uses raw pixels. - fn create_wr_readback_image(webrender_api: &webrender_api::RenderApi, - size: Size2D, - alpha: bool, - data: Vec) -> webrender_api::ImageKey { - let descriptor = Self::image_descriptor(size, alpha); - let data = webrender_api::ImageData::new(data); - - let image_key = webrender_api.generate_image_key(); - let mut updates = webrender_api::ResourceUpdates::new(); - updates.add_image(image_key, - descriptor, - data, - None); - webrender_api.update_resources(updates); - - image_key - } - - /// Updates a `webrender_api::ImageKey` that uses raw pixels. - fn update_wr_readback_image(webrender_api: &webrender_api::RenderApi, - size: Size2D, - alpha: bool, - image_key: webrender_api::ImageKey, - data: Vec) { - let descriptor = Self::image_descriptor(size, alpha); - let data = webrender_api::ImageData::new(data); - - let mut updates = webrender_api::ResourceUpdates::new(); - updates.update_image(image_key, - descriptor, - data, - None); - webrender_api.update_resources(updates); - } - - /// Helper function to create a `webrender_api::ImageDescriptor`. - fn image_descriptor(size: Size2D, alpha: bool) -> webrender_api::ImageDescriptor { - webrender_api::ImageDescriptor { - width: size.width as u32, - height: size.height as u32, - stride: None, - format: if alpha { webrender_api::ImageFormat::BGRA8 } else { webrender_api::ImageFormat::RGB8 }, - offset: 0, - is_opaque: !alpha, - } - } - - /// Helper function to create a `webrender_api::ImageData::External` instance. - fn external_image_data(context_id: WebGLContextId) -> webrender_api::ImageData { - let data = webrender_api::ExternalImageData { - id: webrender_api::ExternalImageId(context_id.0 as u64), - channel_index: 0, - image_type: webrender_api::ExternalImageType::Texture2DHandle, - }; - webrender_api::ImageData::External(data) - } - - /// Helper function to fetch the raw pixels used in readback mode. - fn raw_pixels(context: &GLContextWrapper, size: Size2D) -> Vec { - let width = size.width as usize; - let height = size.height as usize; - - let mut pixels = context.gl().read_pixels(0, 0, - size.width as gl::GLsizei, - size.height as gl::GLsizei, - gl::RGBA, gl::UNSIGNED_BYTE); - // flip image vertically (texture is upside down) - let orig_pixels = pixels.clone(); - let stride = width * 4; - for y in 0..height { - let dst_start = y * stride; - let src_start = (height - y - 1) * stride; - let src_slice = &orig_pixels[src_start .. src_start + stride]; - (&mut pixels[dst_start .. dst_start + stride]).clone_from_slice(&src_slice[..stride]); - } - byte_swap(&mut pixels); - pixels - } -} - -impl Drop for WebGLThread { - fn drop(&mut self) { - // Call remove_context functions in order to correctly delete WebRender image keys. - let context_ids: Vec = self.contexts.keys().map(|id| *id).collect(); - for id in context_ids { - self.remove_webgl_context(id); - } - } -} - -/// Helper struct to store cached WebGLContext information. -struct WebGLContextInfo { - /// Render to texture identifier used by the WebGLContext. - texture_id: u32, - /// Size of the WebGLContext. - size: Size2D, - /// True if the WebGLContext uses an alpha channel. - alpha: bool, - /// Currently used WebRender image key. - image_key: Option, - /// The sharing mode used to send the image to WebRender. - share_mode: WebGLContextShareMode, - /// GLSync Object used for a correct synchronization with Webrender external image callbacks. - gl_sync: Option, -} - -/// Trait used to observe events in a WebGL Thread. -/// Used in webrender::ExternalImageHandler when multiple WebGL threads are used. -pub trait WebGLThreadObserver: Send + 'static { - fn on_context_create(&mut self, ctx_id: WebGLContextId, texture_id: u32, size: Size2D); - fn on_context_resize(&mut self, ctx_id: WebGLContextId, texture_id: u32, size: Size2D); - fn on_context_delete(&mut self, ctx_id: WebGLContextId); -} - -/// This trait is used as a bridge between the `WebGLThreads` implementation and -/// the WR ExternalImageHandler API implemented in the `WebGLExternalImageHandler` struct. -/// `WebGLExternalImageHandler` takes care of type conversions between WR and WebGL info (e.g keys, uvs). -/// It uses this trait to notify lock/unlock messages and get the required info that WR needs. -/// `WebGLThreads` receives lock/unlock message notifications and takes care of sending -/// the unlock/lock messages to the appropiate `WebGLThread`. -pub trait WebGLExternalImageApi { - fn lock(&mut self, ctx_id: WebGLContextId) -> (u32, Size2D); - fn unlock(&mut self, ctx_id: WebGLContextId); -} - -/// WebRender External Image Handler implementation -pub struct WebGLExternalImageHandler { - handler: T, -} - -impl WebGLExternalImageHandler { - pub fn new(handler: T) -> Self { - Self { - handler: handler - } - } -} - -impl webrender::ExternalImageHandler for WebGLExternalImageHandler { - /// Lock the external image. Then, WR could start to read the image content. - /// The WR client should not change the image content until the unlock() call. - fn lock(&mut self, - key: webrender_api::ExternalImageId, - _channel_index: u8) -> webrender::ExternalImage { - let ctx_id = WebGLContextId(key.0 as _); - let (texture_id, size) = self.handler.lock(ctx_id); - - webrender::ExternalImage { - u0: 0.0, - u1: size.width as f32, - v1: 0.0, - v0: size.height as f32, - source: webrender::ExternalImageSource::NativeTexture(texture_id), - } - - } - /// Unlock the external image. The WR should not read the image content - /// after this call. - fn unlock(&mut self, - key: webrender_api::ExternalImageId, - _channel_index: u8) { - let ctx_id = WebGLContextId(key.0 as _); - self.handler.unlock(ctx_id); - } -} - -/// WebGL Commands Implementation -pub struct WebGLImpl; - -impl WebGLImpl { - pub fn apply(ctx: &GLContext, command: WebGLCommand) { - match command { - WebGLCommand::GetContextAttributes(sender) => - sender.send(*ctx.borrow_attributes()).unwrap(), - WebGLCommand::ActiveTexture(target) => - ctx.gl().active_texture(target), - WebGLCommand::AttachShader(program_id, shader_id) => - ctx.gl().attach_shader(program_id.get(), shader_id.get()), - WebGLCommand::DetachShader(program_id, shader_id) => - ctx.gl().detach_shader(program_id.get(), shader_id.get()), - WebGLCommand::BindAttribLocation(program_id, index, name) => - ctx.gl().bind_attrib_location(program_id.get(), index, &name), - WebGLCommand::BlendColor(r, g, b, a) => - ctx.gl().blend_color(r, g, b, a), - WebGLCommand::BlendEquation(mode) => - ctx.gl().blend_equation(mode), - WebGLCommand::BlendEquationSeparate(mode_rgb, mode_alpha) => - ctx.gl().blend_equation_separate(mode_rgb, mode_alpha), - WebGLCommand::BlendFunc(src, dest) => - ctx.gl().blend_func(src, dest), - WebGLCommand::BlendFuncSeparate(src_rgb, dest_rgb, src_alpha, dest_alpha) => - ctx.gl().blend_func_separate(src_rgb, dest_rgb, src_alpha, dest_alpha), - WebGLCommand::BufferData(buffer_type, data, usage) => - gl::buffer_data(ctx.gl(), buffer_type, &data, usage), - WebGLCommand::BufferSubData(buffer_type, offset, data) => - gl::buffer_sub_data(ctx.gl(), buffer_type, offset, &data), - WebGLCommand::Clear(mask) => - ctx.gl().clear(mask), - WebGLCommand::ClearColor(r, g, b, a) => - ctx.gl().clear_color(r, g, b, a), - WebGLCommand::ClearDepth(depth) => - ctx.gl().clear_depth(depth), - WebGLCommand::ClearStencil(stencil) => - ctx.gl().clear_stencil(stencil), - WebGLCommand::ColorMask(r, g, b, a) => - ctx.gl().color_mask(r, g, b, a), - WebGLCommand::CopyTexImage2D(target, level, internal_format, x, y, width, height, border) => - ctx.gl().copy_tex_image_2d(target, level, internal_format, x, y, width, height, border), - WebGLCommand::CopyTexSubImage2D(target, level, xoffset, yoffset, x, y, width, height) => - ctx.gl().copy_tex_sub_image_2d(target, level, xoffset, yoffset, x, y, width, height), - WebGLCommand::CullFace(mode) => - ctx.gl().cull_face(mode), - WebGLCommand::DepthFunc(func) => - ctx.gl().depth_func(func), - WebGLCommand::DepthMask(flag) => - ctx.gl().depth_mask(flag), - WebGLCommand::DepthRange(near, far) => - ctx.gl().depth_range(near, far), - WebGLCommand::Disable(cap) => - ctx.gl().disable(cap), - WebGLCommand::Enable(cap) => - ctx.gl().enable(cap), - WebGLCommand::FramebufferRenderbuffer(target, attachment, renderbuffertarget, rb) => - ctx.gl().framebuffer_renderbuffer(target, attachment, renderbuffertarget, - rb.map_or(0, WebGLRenderbufferId::get)), - WebGLCommand::FramebufferTexture2D(target, attachment, textarget, texture, level) => - ctx.gl().framebuffer_texture_2d(target, attachment, textarget, - texture.map_or(0, WebGLTextureId::get), level), - WebGLCommand::FrontFace(mode) => - ctx.gl().front_face(mode), - WebGLCommand::DisableVertexAttribArray(attrib_id) => - ctx.gl().disable_vertex_attrib_array(attrib_id), - WebGLCommand::DrawArrays(mode, first, count) => - ctx.gl().draw_arrays(mode, first, count), - WebGLCommand::DrawElements(mode, count, type_, offset) => - ctx.gl().draw_elements(mode, count, type_, offset as u32), - WebGLCommand::EnableVertexAttribArray(attrib_id) => - ctx.gl().enable_vertex_attrib_array(attrib_id), - WebGLCommand::Hint(name, val) => - ctx.gl().hint(name, val), - WebGLCommand::IsEnabled(cap, chan) => - chan.send(ctx.gl().is_enabled(cap) != 0).unwrap(), - WebGLCommand::LineWidth(width) => - ctx.gl().line_width(width), - WebGLCommand::PixelStorei(name, val) => - ctx.gl().pixel_store_i(name, val), - WebGLCommand::PolygonOffset(factor, units) => - ctx.gl().polygon_offset(factor, units), - WebGLCommand::ReadPixels(x, y, width, height, format, pixel_type, chan) => - Self::read_pixels(ctx.gl(), x, y, width, height, format, pixel_type, chan), - WebGLCommand::RenderbufferStorage(target, format, width, height) => - ctx.gl().renderbuffer_storage(target, format, width, height), - WebGLCommand::SampleCoverage(value, invert) => - ctx.gl().sample_coverage(value, invert), - WebGLCommand::Scissor(x, y, width, height) => - ctx.gl().scissor(x, y, width, height), - WebGLCommand::StencilFunc(func, ref_, mask) => - ctx.gl().stencil_func(func, ref_, mask), - WebGLCommand::StencilFuncSeparate(face, func, ref_, mask) => - ctx.gl().stencil_func_separate(face, func, ref_, mask), - WebGLCommand::StencilMask(mask) => - ctx.gl().stencil_mask(mask), - WebGLCommand::StencilMaskSeparate(face, mask) => - ctx.gl().stencil_mask_separate(face, mask), - WebGLCommand::StencilOp(fail, zfail, zpass) => - ctx.gl().stencil_op(fail, zfail, zpass), - WebGLCommand::StencilOpSeparate(face, fail, zfail, zpass) => - ctx.gl().stencil_op_separate(face, fail, zfail, zpass), - WebGLCommand::GetActiveAttrib(program_id, index, chan) => - Self::active_attrib(ctx.gl(), program_id, index, chan), - WebGLCommand::GetActiveUniform(program_id, index, chan) => - Self::active_uniform(ctx.gl(), program_id, index, chan), - WebGLCommand::GetAttribLocation(program_id, name, chan) => - Self::attrib_location(ctx.gl(), program_id, name, chan), - WebGLCommand::GetVertexAttrib(index, pname, chan) => - Self::vertex_attrib(ctx.gl(), index, pname, chan), - WebGLCommand::GetVertexAttribOffset(index, pname, chan) => - Self::vertex_attrib_offset(ctx.gl(), index, pname, chan), - WebGLCommand::GetBufferParameter(target, param_id, chan) => - Self::buffer_parameter(ctx.gl(), target, param_id, chan), - WebGLCommand::GetParameter(param_id, chan) => - Self::parameter(ctx.gl(), param_id, chan), - WebGLCommand::GetProgramParameter(program_id, param_id, chan) => - Self::program_parameter(ctx.gl(), program_id, param_id, chan), - WebGLCommand::GetShaderParameter(shader_id, param_id, chan) => - Self::shader_parameter(ctx.gl(), shader_id, param_id, chan), - WebGLCommand::GetShaderPrecisionFormat(shader_type, precision_type, chan) => - Self::shader_precision_format(ctx.gl(), shader_type, precision_type, chan), - WebGLCommand::GetExtensions(chan) => - Self::get_extensions(ctx.gl(), chan), - WebGLCommand::GetUniformLocation(program_id, name, chan) => - Self::uniform_location(ctx.gl(), program_id, name, chan), - WebGLCommand::GetShaderInfoLog(shader_id, chan) => - Self::shader_info_log(ctx.gl(), shader_id, chan), - WebGLCommand::GetProgramInfoLog(program_id, chan) => - Self::program_info_log(ctx.gl(), program_id, chan), - WebGLCommand::CompileShader(shader_id, source) => - Self::compile_shader(ctx.gl(), shader_id, source), - WebGLCommand::CreateBuffer(chan) => - Self::create_buffer(ctx.gl(), chan), - WebGLCommand::CreateFramebuffer(chan) => - Self::create_framebuffer(ctx.gl(), chan), - WebGLCommand::CreateRenderbuffer(chan) => - Self::create_renderbuffer(ctx.gl(), chan), - WebGLCommand::CreateTexture(chan) => - Self::create_texture(ctx.gl(), chan), - WebGLCommand::CreateProgram(chan) => - Self::create_program(ctx.gl(), chan), - WebGLCommand::CreateShader(shader_type, chan) => - Self::create_shader(ctx.gl(), shader_type, chan), - WebGLCommand::DeleteBuffer(id) => - ctx.gl().delete_buffers(&[id.get()]), - WebGLCommand::DeleteFramebuffer(id) => - ctx.gl().delete_framebuffers(&[id.get()]), - WebGLCommand::DeleteRenderbuffer(id) => - ctx.gl().delete_renderbuffers(&[id.get()]), - WebGLCommand::DeleteTexture(id) => - ctx.gl().delete_textures(&[id.get()]), - WebGLCommand::DeleteProgram(id) => - ctx.gl().delete_program(id.get()), - WebGLCommand::DeleteShader(id) => - ctx.gl().delete_shader(id.get()), - WebGLCommand::BindBuffer(target, id) => - ctx.gl().bind_buffer(target, id.map_or(0, WebGLBufferId::get)), - WebGLCommand::BindFramebuffer(target, request) => - Self::bind_framebuffer(ctx.gl(), target, request, ctx), - WebGLCommand::BindRenderbuffer(target, id) => - ctx.gl().bind_renderbuffer(target, id.map_or(0, WebGLRenderbufferId::get)), - WebGLCommand::BindTexture(target, id) => - ctx.gl().bind_texture(target, id.map_or(0, WebGLTextureId::get)), - WebGLCommand::LinkProgram(program_id) => - ctx.gl().link_program(program_id.get()), - WebGLCommand::Uniform1f(uniform_id, v) => - ctx.gl().uniform_1f(uniform_id, v), - WebGLCommand::Uniform1fv(uniform_id, v) => - ctx.gl().uniform_1fv(uniform_id, &v), - WebGLCommand::Uniform1i(uniform_id, v) => - ctx.gl().uniform_1i(uniform_id, v), - WebGLCommand::Uniform1iv(uniform_id, v) => - ctx.gl().uniform_1iv(uniform_id, &v), - WebGLCommand::Uniform2f(uniform_id, x, y) => - ctx.gl().uniform_2f(uniform_id, x, y), - WebGLCommand::Uniform2fv(uniform_id, v) => - ctx.gl().uniform_2fv(uniform_id, &v), - WebGLCommand::Uniform2i(uniform_id, x, y) => - ctx.gl().uniform_2i(uniform_id, x, y), - WebGLCommand::Uniform2iv(uniform_id, v) => - ctx.gl().uniform_2iv(uniform_id, &v), - WebGLCommand::Uniform3f(uniform_id, x, y, z) => - ctx.gl().uniform_3f(uniform_id, x, y, z), - WebGLCommand::Uniform3fv(uniform_id, v) => - ctx.gl().uniform_3fv(uniform_id, &v), - WebGLCommand::Uniform3i(uniform_id, x, y, z) => - ctx.gl().uniform_3i(uniform_id, x, y, z), - WebGLCommand::Uniform3iv(uniform_id, v) => - ctx.gl().uniform_3iv(uniform_id, &v), - WebGLCommand::Uniform4f(uniform_id, x, y, z, w) => - ctx.gl().uniform_4f(uniform_id, x, y, z, w), - WebGLCommand::Uniform4fv(uniform_id, v) => - ctx.gl().uniform_4fv(uniform_id, &v), - WebGLCommand::Uniform4i(uniform_id, x, y, z, w) => - ctx.gl().uniform_4i(uniform_id, x, y, z, w), - WebGLCommand::Uniform4iv(uniform_id, v) => - ctx.gl().uniform_4iv(uniform_id, &v), - WebGLCommand::UniformMatrix2fv(uniform_id, transpose, v) => - ctx.gl().uniform_matrix_2fv(uniform_id, transpose, &v), - WebGLCommand::UniformMatrix3fv(uniform_id, transpose, v) => - ctx.gl().uniform_matrix_3fv(uniform_id, transpose, &v), - WebGLCommand::UniformMatrix4fv(uniform_id, transpose, v) => - ctx.gl().uniform_matrix_4fv(uniform_id, transpose, &v), - WebGLCommand::UseProgram(program_id) => - ctx.gl().use_program(program_id.get()), - WebGLCommand::ValidateProgram(program_id) => - ctx.gl().validate_program(program_id.get()), - WebGLCommand::VertexAttrib(attrib_id, x, y, z, w) => - ctx.gl().vertex_attrib_4f(attrib_id, x, y, z, w), - WebGLCommand::VertexAttribPointer2f(attrib_id, size, normalized, stride, offset) => - ctx.gl().vertex_attrib_pointer_f32(attrib_id, size, normalized, stride, offset), - WebGLCommand::VertexAttribPointer(attrib_id, size, data_type, normalized, stride, offset) => - ctx.gl().vertex_attrib_pointer(attrib_id, size, data_type, normalized, stride, offset), - WebGLCommand::Viewport(x, y, width, height) => - ctx.gl().viewport(x, y, width, height), - WebGLCommand::TexImage2D(target, level, internal, width, height, format, data_type, data) => - ctx.gl().tex_image_2d(target, level, internal, width, height, - /*border*/0, format, data_type, Some(&data)), - WebGLCommand::TexParameteri(target, name, value) => - ctx.gl().tex_parameter_i(target, name, value), - WebGLCommand::TexParameterf(target, name, value) => - ctx.gl().tex_parameter_f(target, name, value), - WebGLCommand::TexSubImage2D(target, level, xoffset, yoffset, x, y, width, height, data) => - ctx.gl().tex_sub_image_2d(target, level, xoffset, yoffset, x, y, width, height, &data), - WebGLCommand::DrawingBufferWidth(sender) => - sender.send(ctx.borrow_draw_buffer().unwrap().size().width).unwrap(), - WebGLCommand::DrawingBufferHeight(sender) => - sender.send(ctx.borrow_draw_buffer().unwrap().size().height).unwrap(), - WebGLCommand::Finish(sender) => - Self::finish(ctx.gl(), sender), - WebGLCommand::Flush => - ctx.gl().flush(), - WebGLCommand::GenerateMipmap(target) => - ctx.gl().generate_mipmap(target), - WebGLCommand::CreateVertexArray(chan) => - Self::create_vertex_array(ctx.gl(), chan), - WebGLCommand::DeleteVertexArray(id) => - ctx.gl().delete_vertex_arrays(&[id.get()]), - WebGLCommand::BindVertexArray(id) => - ctx.gl().bind_vertex_array(id.map_or(0, WebGLVertexArrayId::get)), - } - - // TODO: update test expectations in order to enable debug assertions - //if cfg!(debug_assertions) { - let error = ctx.gl().get_error(); - assert!(error == gl::NO_ERROR, "Unexpected WebGL error: 0x{:x} ({})", error, error); - //} - } - - fn read_pixels(gl: &gl::Gl, x: i32, y: i32, width: i32, height: i32, format: u32, pixel_type: u32, - chan: WebGLSender>) { - let result = gl.read_pixels(x, y, width, height, format, pixel_type); - chan.send(result).unwrap() - } - - fn active_attrib(gl: &gl::Gl, - program_id: WebGLProgramId, - index: u32, - chan: WebGLSender>) { - let result = if index >= gl.get_program_iv(program_id.get(), gl::ACTIVE_ATTRIBUTES) as u32 { - Err(WebGLError::InvalidValue) - } else { - Ok(gl.get_active_attrib(program_id.get(), index)) - }; - chan.send(result).unwrap(); - } - - fn active_uniform(gl: &gl::Gl, - program_id: WebGLProgramId, - index: u32, - chan: WebGLSender>) { - let result = if index >= gl.get_program_iv(program_id.get(), gl::ACTIVE_UNIFORMS) as u32 { - Err(WebGLError::InvalidValue) - } else { - Ok(gl.get_active_uniform(program_id.get(), index)) - }; - chan.send(result).unwrap(); - } - - fn attrib_location(gl: &gl::Gl, - program_id: WebGLProgramId, - name: String, - chan: WebGLSender> ) { - let attrib_location = gl.get_attrib_location(program_id.get(), &name); - - let attrib_location = if attrib_location == -1 { - None - } else { - Some(attrib_location) - }; - - chan.send(attrib_location).unwrap(); - } - - fn parameter(gl: &gl::Gl, - param_id: u32, - chan: WebGLSender>) { - let result = match param_id { - gl::ACTIVE_TEXTURE | - gl::ALPHA_BITS | - gl::BLEND_DST_ALPHA | - gl::BLEND_DST_RGB | - gl::BLEND_EQUATION_ALPHA | - gl::BLEND_EQUATION_RGB | - gl::BLEND_SRC_ALPHA | - gl::BLEND_SRC_RGB | - gl::BLUE_BITS | - gl::CULL_FACE_MODE | - gl::DEPTH_BITS | - gl::DEPTH_FUNC | - gl::FRONT_FACE | - //gl::GENERATE_MIPMAP_HINT | - gl::GREEN_BITS | - //gl::IMPLEMENTATION_COLOR_READ_FORMAT | - //gl::IMPLEMENTATION_COLOR_READ_TYPE | - gl::MAX_COMBINED_TEXTURE_IMAGE_UNITS | - gl::MAX_CUBE_MAP_TEXTURE_SIZE | - //gl::MAX_FRAGMENT_UNIFORM_VECTORS | - gl::MAX_RENDERBUFFER_SIZE | - gl::MAX_TEXTURE_IMAGE_UNITS | - gl::MAX_TEXTURE_SIZE | - //gl::MAX_VARYING_VECTORS | - gl::MAX_VERTEX_ATTRIBS | - gl::MAX_VERTEX_TEXTURE_IMAGE_UNITS | - //gl::MAX_VERTEX_UNIFORM_VECTORS | - gl::PACK_ALIGNMENT | - gl::RED_BITS | - gl::SAMPLE_BUFFERS | - gl::SAMPLES | - gl::STENCIL_BACK_FAIL | - gl::STENCIL_BACK_FUNC | - gl::STENCIL_BACK_PASS_DEPTH_FAIL | - gl::STENCIL_BACK_PASS_DEPTH_PASS | - gl::STENCIL_BACK_REF | - gl::STENCIL_BACK_VALUE_MASK | - gl::STENCIL_BACK_WRITEMASK | - gl::STENCIL_BITS | - gl::STENCIL_CLEAR_VALUE | - gl::STENCIL_FAIL | - gl::STENCIL_FUNC | - gl::STENCIL_PASS_DEPTH_FAIL | - gl::STENCIL_PASS_DEPTH_PASS | - gl::STENCIL_REF | - gl::STENCIL_VALUE_MASK | - gl::STENCIL_WRITEMASK | - gl::SUBPIXEL_BITS | - gl::UNPACK_ALIGNMENT | - gl::FRAGMENT_SHADER_DERIVATIVE_HINT => - //gl::UNPACK_COLORSPACE_CONVERSION_WEBGL => - Ok(WebGLParameter::Int(gl.get_integer_v(param_id))), - - gl::BLEND | - gl::CULL_FACE | - gl::DEPTH_TEST | - gl::DEPTH_WRITEMASK | - gl::DITHER | - gl::POLYGON_OFFSET_FILL | - gl::SAMPLE_COVERAGE_INVERT | - gl::STENCIL_TEST => - //gl::UNPACK_FLIP_Y_WEBGL | - //gl::UNPACK_PREMULTIPLY_ALPHA_WEBGL => - Ok(WebGLParameter::Bool(gl.get_boolean_v(param_id) != 0)), - - gl::DEPTH_CLEAR_VALUE | - gl::LINE_WIDTH | - gl::POLYGON_OFFSET_FACTOR | - gl::POLYGON_OFFSET_UNITS | - gl::SAMPLE_COVERAGE_VALUE => - Ok(WebGLParameter::Float(gl.get_float_v(param_id))), - - gl::VERSION => Ok(WebGLParameter::String("WebGL 1.0".to_owned())), - gl::RENDERER | - gl::VENDOR => Ok(WebGLParameter::String("Mozilla/Servo".to_owned())), - gl::SHADING_LANGUAGE_VERSION => Ok(WebGLParameter::String("WebGL GLSL ES 1.0".to_owned())), - - // TODO(zbarsky, emilio): Implement support for the following valid parameters - // Float32Array - gl::ALIASED_LINE_WIDTH_RANGE | - //gl::ALIASED_POINT_SIZE_RANGE | - //gl::BLEND_COLOR | - gl::COLOR_CLEAR_VALUE | - gl::DEPTH_RANGE | - - // WebGLBuffer - gl::ARRAY_BUFFER_BINDING | - gl::ELEMENT_ARRAY_BUFFER_BINDING | - - // WebGLFrameBuffer - gl::FRAMEBUFFER_BINDING | - - // WebGLRenderBuffer - gl::RENDERBUFFER_BINDING | - - // WebGLProgram - gl::CURRENT_PROGRAM | - - // WebGLTexture - gl::TEXTURE_BINDING_2D | - gl::TEXTURE_BINDING_CUBE_MAP | - - // sequence - gl::COLOR_WRITEMASK | - - // Uint32Array - gl::COMPRESSED_TEXTURE_FORMATS | - - // Int32Array - gl::MAX_VIEWPORT_DIMS | - gl::SCISSOR_BOX | - gl::VIEWPORT => Err(WebGLError::InvalidEnum), - - // Invalid parameters - _ => Err(WebGLError::InvalidEnum) - }; - - chan.send(result).unwrap(); - } - - fn finish(gl: &gl::Gl, chan: WebGLSender<()>) { - gl.finish(); - chan.send(()).unwrap(); - } - - fn vertex_attrib(gl: &gl::Gl, - index: u32, - pname: u32, - chan: WebGLSender>) { - let result = if index >= gl.get_integer_v(gl::MAX_VERTEX_ATTRIBS) as u32 { - Err(WebGLError::InvalidValue) - } else { - match pname { - gl::VERTEX_ATTRIB_ARRAY_ENABLED | - gl::VERTEX_ATTRIB_ARRAY_NORMALIZED => - Ok(WebGLParameter::Bool(gl.get_vertex_attrib_iv(index, pname) != 0)), - gl::VERTEX_ATTRIB_ARRAY_SIZE | - gl::VERTEX_ATTRIB_ARRAY_STRIDE | - gl::VERTEX_ATTRIB_ARRAY_TYPE => - Ok(WebGLParameter::Int(gl.get_vertex_attrib_iv(index, pname))), - gl::CURRENT_VERTEX_ATTRIB => - Ok(WebGLParameter::FloatArray(gl.get_vertex_attrib_fv(index, pname))), - // gl::VERTEX_ATTRIB_ARRAY_BUFFER_BINDING should return WebGLBuffer - _ => Err(WebGLError::InvalidEnum), - } - }; - - chan.send(result).unwrap(); - } - - fn vertex_attrib_offset(gl: &gl::Gl, - index: u32, - pname: u32, - chan: WebGLSender>) { - let result = match pname { - gl::VERTEX_ATTRIB_ARRAY_POINTER => Ok(gl.get_vertex_attrib_pointer_v(index, pname)), - _ => Err(WebGLError::InvalidEnum), - }; - - chan.send(result).unwrap(); - } - - fn buffer_parameter(gl: &gl::Gl, - target: u32, - param_id: u32, - chan: WebGLSender>) { - let result = match param_id { - gl::BUFFER_SIZE | - gl::BUFFER_USAGE => - Ok(WebGLParameter::Int(gl.get_buffer_parameter_iv(target, param_id))), - _ => Err(WebGLError::InvalidEnum), - }; - - chan.send(result).unwrap(); - } - - fn program_parameter(gl: &gl::Gl, - program_id: WebGLProgramId, - param_id: u32, - chan: WebGLSender>) { - let result = match param_id { - gl::DELETE_STATUS | - gl::LINK_STATUS | - gl::VALIDATE_STATUS => - Ok(WebGLParameter::Bool(gl.get_program_iv(program_id.get(), param_id) != 0)), - gl::ATTACHED_SHADERS | - gl::ACTIVE_ATTRIBUTES | - gl::ACTIVE_UNIFORMS => - Ok(WebGLParameter::Int(gl.get_program_iv(program_id.get(), param_id))), - _ => Err(WebGLError::InvalidEnum), - }; - - chan.send(result).unwrap(); - } - - fn shader_parameter(gl: &gl::Gl, - shader_id: WebGLShaderId, - param_id: u32, - chan: WebGLSender>) { - let result = match param_id { - gl::SHADER_TYPE => - Ok(WebGLParameter::Int(gl.get_shader_iv(shader_id.get(), param_id))), - gl::DELETE_STATUS | - gl::COMPILE_STATUS => - Ok(WebGLParameter::Bool(gl.get_shader_iv(shader_id.get(), param_id) != 0)), - _ => Err(WebGLError::InvalidEnum), - }; - - chan.send(result).unwrap(); - } - - fn shader_precision_format(gl: &gl::Gl, - shader_type: u32, - precision_type: u32, - chan: WebGLSender>) { - let result = match precision_type { - gl::LOW_FLOAT | - gl::MEDIUM_FLOAT | - gl::HIGH_FLOAT | - gl::LOW_INT | - gl::MEDIUM_INT | - gl::HIGH_INT => { - Ok(gl.get_shader_precision_format(shader_type, precision_type)) - }, - _=> { - Err(WebGLError::InvalidEnum) - } - }; - - chan.send(result).unwrap(); - } - - fn get_extensions(gl: &gl::Gl, chan: WebGLSender) { - chan.send(gl.get_string(gl::EXTENSIONS)).unwrap(); - } - - fn uniform_location(gl: &gl::Gl, - program_id: WebGLProgramId, - name: String, - chan: WebGLSender>) { - let location = gl.get_uniform_location(program_id.get(), &name); - let location = if location == -1 { - None - } else { - Some(location) - }; - - chan.send(location).unwrap(); - } - - - fn shader_info_log(gl: &gl::Gl, shader_id: WebGLShaderId, chan: WebGLSender) { - let log = gl.get_shader_info_log(shader_id.get()); - chan.send(log).unwrap(); - } - - fn program_info_log(gl: &gl::Gl, program_id: WebGLProgramId, chan: WebGLSender) { - let log = gl.get_program_info_log(program_id.get()); - chan.send(log).unwrap(); - } - - #[allow(unsafe_code)] - fn create_buffer(gl: &gl::Gl, chan: WebGLSender>) { - let buffer = gl.gen_buffers(1)[0]; - let buffer = if buffer == 0 { - None - } else { - Some(unsafe { WebGLBufferId::new(buffer) }) - }; - chan.send(buffer).unwrap(); - } - - #[allow(unsafe_code)] - fn create_framebuffer(gl: &gl::Gl, chan: WebGLSender>) { - let framebuffer = gl.gen_framebuffers(1)[0]; - let framebuffer = if framebuffer == 0 { - None - } else { - Some(unsafe { WebGLFramebufferId::new(framebuffer) }) - }; - chan.send(framebuffer).unwrap(); - } - - #[allow(unsafe_code)] - fn create_renderbuffer(gl: &gl::Gl, chan: WebGLSender>) { - let renderbuffer = gl.gen_renderbuffers(1)[0]; - let renderbuffer = if renderbuffer == 0 { - None - } else { - Some(unsafe { WebGLRenderbufferId::new(renderbuffer) }) - }; - chan.send(renderbuffer).unwrap(); - } - - #[allow(unsafe_code)] - fn create_texture(gl: &gl::Gl, chan: WebGLSender>) { - let texture = gl.gen_textures(1)[0]; - let texture = if texture == 0 { - None - } else { - Some(unsafe { WebGLTextureId::new(texture) }) - }; - chan.send(texture).unwrap(); - } - - #[allow(unsafe_code)] - fn create_program(gl: &gl::Gl, chan: WebGLSender>) { - let program = gl.create_program(); - let program = if program == 0 { - None - } else { - Some(unsafe { WebGLProgramId::new(program) }) - }; - chan.send(program).unwrap(); - } - - #[allow(unsafe_code)] - fn create_shader(gl: &gl::Gl, shader_type: u32, chan: WebGLSender>) { - let shader = gl.create_shader(shader_type); - let shader = if shader == 0 { - None - } else { - Some(unsafe { WebGLShaderId::new(shader) }) - }; - chan.send(shader).unwrap(); - } - - #[allow(unsafe_code)] - fn create_vertex_array(gl: &gl::Gl, chan: WebGLSender>) { - let vao = gl.gen_vertex_arrays(1)[0]; - let vao = if vao == 0 { - None - } else { - Some(unsafe { WebGLVertexArrayId::new(vao) }) - }; - chan.send(vao).unwrap(); - } - - #[inline] - fn bind_framebuffer(gl: &gl::Gl, - target: u32, - request: WebGLFramebufferBindingRequest, - ctx: &GLContext) { - let id = match request { - WebGLFramebufferBindingRequest::Explicit(id) => id.get(), - WebGLFramebufferBindingRequest::Default => - ctx.borrow_draw_buffer().unwrap().get_framebuffer(), - }; - - gl.bind_framebuffer(target, id); - } - - - #[inline] - fn compile_shader(gl: &gl::Gl, shader_id: WebGLShaderId, source: String) { - gl.shader_source(shader_id.get(), &[source.as_bytes()]); - gl.compile_shader(shader_id.get()); - } -} diff --git a/collector/compile-benchmarks/style-servo/components/canvas_traits/Cargo.toml b/collector/compile-benchmarks/style-servo/components/canvas_traits/Cargo.toml deleted file mode 100644 index 091c01dc6..000000000 --- a/collector/compile-benchmarks/style-servo/components/canvas_traits/Cargo.toml +++ /dev/null @@ -1,22 +0,0 @@ -[package] -name = "canvas_traits" -version = "0.0.1" -authors = ["The Servo Project Developers"] -license = "MPL-2.0" -publish = false - -[lib] -name = "canvas_traits" -path = "lib.rs" - -[dependencies] -cssparser = "0.22.0" -euclid = "0.15" -heapsize = "0.4" -heapsize_derive = "0.1" -ipc-channel = "0.8" -lazy_static = "0.2" -offscreen_gl_context = { version = "0.11", features = ["serde"] } -serde = "1.0" -servo_config = {path = "../config"} -webrender_api = {git = "https://github.com/servo/webrender", features = ["ipc"]} diff --git a/collector/compile-benchmarks/style-servo/components/canvas_traits/canvas.rs b/collector/compile-benchmarks/style-servo/components/canvas_traits/canvas.rs deleted file mode 100644 index 06194052b..000000000 --- a/collector/compile-benchmarks/style-servo/components/canvas_traits/canvas.rs +++ /dev/null @@ -1,411 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ - -use cssparser::RGBA; -use euclid::{Transform2D, Point2D, Vector2D, Rect, Size2D}; -use ipc_channel::ipc::IpcSender; -use std::default::Default; -use std::str::FromStr; -use webrender_api; - -#[derive(Clone, Deserialize, Serialize)] -pub enum FillRule { - Nonzero, - Evenodd, -} - -#[derive(Clone, Deserialize, Serialize)] -pub enum CanvasMsg { - Canvas2d(Canvas2dMsg), - FromLayout(FromLayoutMsg), - FromScript(FromScriptMsg), - Recreate(Size2D), - Close, -} - -#[derive(Clone, Deserialize, Serialize)] -pub struct CanvasImageData { - pub image_key: webrender_api::ImageKey, -} - -#[derive(Clone, Deserialize, Serialize)] -pub enum Canvas2dMsg { - Arc(Point2D, f32, f32, f32, bool), - ArcTo(Point2D, Point2D, f32), - DrawImage(Vec, Size2D, Rect, Rect, bool), - DrawImageSelf(Size2D, Rect, Rect, bool), - DrawImageInOther( - IpcSender, Size2D, Rect, Rect, bool, IpcSender<()>), - BeginPath, - BezierCurveTo(Point2D, Point2D, Point2D), - ClearRect(Rect), - Clip, - ClosePath, - Ellipse(Point2D, f32, f32, f32, f32, f32, bool), - Fill, - FillText(String, f64, f64, Option), - FillRect(Rect), - GetImageData(Rect, Size2D, IpcSender>), - IsPointInPath(f64, f64, FillRule, IpcSender), - LineTo(Point2D), - MoveTo(Point2D), - PutImageData(Vec, Vector2D, Size2D, Rect), - QuadraticCurveTo(Point2D, Point2D), - Rect(Rect), - RestoreContext, - SaveContext, - StrokeRect(Rect), - Stroke, - SetFillStyle(FillOrStrokeStyle), - SetStrokeStyle(FillOrStrokeStyle), - SetLineWidth(f32), - SetLineCap(LineCapStyle), - SetLineJoin(LineJoinStyle), - SetMiterLimit(f32), - SetGlobalAlpha(f32), - SetGlobalComposition(CompositionOrBlending), - SetTransform(Transform2D), - SetShadowOffsetX(f64), - SetShadowOffsetY(f64), - SetShadowBlur(f64), - SetShadowColor(RGBA), -} - -#[derive(Clone, Deserialize, Serialize)] -pub enum FromLayoutMsg { - SendData(IpcSender), -} - -#[derive(Clone, Deserialize, Serialize)] -pub enum FromScriptMsg { - SendPixels(IpcSender>>), -} - -#[derive(Clone, Deserialize, HeapSizeOf, Serialize)] -pub struct CanvasGradientStop { - pub offset: f64, - pub color: RGBA, -} - -#[derive(Clone, Deserialize, HeapSizeOf, Serialize)] -pub struct LinearGradientStyle { - pub x0: f64, - pub y0: f64, - pub x1: f64, - pub y1: f64, - pub stops: Vec -} - -impl LinearGradientStyle { - pub fn new(x0: f64, y0: f64, x1: f64, y1: f64, stops: Vec) - -> LinearGradientStyle { - LinearGradientStyle { - x0: x0, - y0: y0, - x1: x1, - y1: y1, - stops: stops, - } - } -} - -#[derive(Clone, Deserialize, HeapSizeOf, Serialize)] -pub struct RadialGradientStyle { - pub x0: f64, - pub y0: f64, - pub r0: f64, - pub x1: f64, - pub y1: f64, - pub r1: f64, - pub stops: Vec -} - -impl RadialGradientStyle { - pub fn new(x0: f64, y0: f64, r0: f64, x1: f64, y1: f64, r1: f64, stops: Vec) - -> RadialGradientStyle { - RadialGradientStyle { - x0: x0, - y0: y0, - r0: r0, - x1: x1, - y1: y1, - r1: r1, - stops: stops, - } - } -} - -#[derive(Clone, Deserialize, Serialize)] -pub struct SurfaceStyle { - pub surface_data: Vec, - pub surface_size: Size2D, - pub repeat_x: bool, - pub repeat_y: bool, -} - -impl SurfaceStyle { - pub fn new(surface_data: Vec, surface_size: Size2D, repeat_x: bool, repeat_y: bool) - -> SurfaceStyle { - SurfaceStyle { - surface_data: surface_data, - surface_size: surface_size, - repeat_x: repeat_x, - repeat_y: repeat_y, - } - } -} - - -#[derive(Clone, Deserialize, Serialize)] -pub enum FillOrStrokeStyle { - Color(RGBA), - LinearGradient(LinearGradientStyle), - RadialGradient(RadialGradientStyle), - Surface(SurfaceStyle), -} - -#[derive(Clone, Copy, Deserialize, HeapSizeOf, PartialEq, Serialize)] -pub enum LineCapStyle { - Butt = 0, - Round = 1, - Square = 2, -} - -impl FromStr for LineCapStyle { - type Err = (); - - fn from_str(string: &str) -> Result { - match string { - "butt" => Ok(LineCapStyle::Butt), - "round" => Ok(LineCapStyle::Round), - "square" => Ok(LineCapStyle::Square), - _ => Err(()), - } - } -} - -#[derive(Clone, Copy, Deserialize, HeapSizeOf, PartialEq, Serialize)] -pub enum LineJoinStyle { - Round = 0, - Bevel = 1, - Miter = 2, -} - -impl FromStr for LineJoinStyle { - type Err = (); - - fn from_str(string: &str) -> Result { - match string { - "round" => Ok(LineJoinStyle::Round), - "bevel" => Ok(LineJoinStyle::Bevel), - "miter" => Ok(LineJoinStyle::Miter), - _ => Err(()), - } - } -} - -#[derive(Clone, Copy, Deserialize, PartialEq, Serialize)] -pub enum RepetitionStyle { - Repeat, - RepeatX, - RepeatY, - NoRepeat, -} - -impl FromStr for RepetitionStyle { - type Err = (); - - fn from_str(string: &str) -> Result { - match string { - "repeat" => Ok(RepetitionStyle::Repeat), - "repeat-x" => Ok(RepetitionStyle::RepeatX), - "repeat-y" => Ok(RepetitionStyle::RepeatY), - "no-repeat" => Ok(RepetitionStyle::NoRepeat), - _ => Err(()), - } - } -} - -#[derive(Clone, Copy, Deserialize, HeapSizeOf, PartialEq, Serialize)] -pub enum CompositionStyle { - SrcIn, - SrcOut, - SrcOver, - SrcAtop, - DestIn, - DestOut, - DestOver, - DestAtop, - Copy, - Lighter, - Xor, -} - -impl FromStr for CompositionStyle { - type Err = (); - - fn from_str(string: &str) -> Result { - match string { - "source-in" => Ok(CompositionStyle::SrcIn), - "source-out" => Ok(CompositionStyle::SrcOut), - "source-over" => Ok(CompositionStyle::SrcOver), - "source-atop" => Ok(CompositionStyle::SrcAtop), - "destination-in" => Ok(CompositionStyle::DestIn), - "destination-out" => Ok(CompositionStyle::DestOut), - "destination-over" => Ok(CompositionStyle::DestOver), - "destination-atop" => Ok(CompositionStyle::DestAtop), - "copy" => Ok(CompositionStyle::Copy), - "lighter" => Ok(CompositionStyle::Lighter), - "xor" => Ok(CompositionStyle::Xor), - _ => Err(()) - } - } -} - -impl CompositionStyle { - pub fn to_str(&self) -> &str { - match *self { - CompositionStyle::SrcIn => "source-in", - CompositionStyle::SrcOut => "source-out", - CompositionStyle::SrcOver => "source-over", - CompositionStyle::SrcAtop => "source-atop", - CompositionStyle::DestIn => "destination-in", - CompositionStyle::DestOut => "destination-out", - CompositionStyle::DestOver => "destination-over", - CompositionStyle::DestAtop => "destination-atop", - CompositionStyle::Copy => "copy", - CompositionStyle::Lighter => "lighter", - CompositionStyle::Xor => "xor", - } - } -} - -#[derive(Clone, Copy, Deserialize, HeapSizeOf, PartialEq, Serialize)] -pub enum BlendingStyle { - Multiply, - Screen, - Overlay, - Darken, - Lighten, - ColorDodge, - ColorBurn, - HardLight, - SoftLight, - Difference, - Exclusion, - Hue, - Saturation, - Color, - Luminosity, -} - -impl FromStr for BlendingStyle { - type Err = (); - - fn from_str(string: &str) -> Result { - match string { - "multiply" => Ok(BlendingStyle::Multiply), - "screen" => Ok(BlendingStyle::Screen), - "overlay" => Ok(BlendingStyle::Overlay), - "darken" => Ok(BlendingStyle::Darken), - "lighten" => Ok(BlendingStyle::Lighten), - "color-dodge" => Ok(BlendingStyle::ColorDodge), - "color-burn" => Ok(BlendingStyle::ColorBurn), - "hard-light" => Ok(BlendingStyle::HardLight), - "soft-light" => Ok(BlendingStyle::SoftLight), - "difference" => Ok(BlendingStyle::Difference), - "exclusion" => Ok(BlendingStyle::Exclusion), - "hue" => Ok(BlendingStyle::Hue), - "saturation" => Ok(BlendingStyle::Saturation), - "color" => Ok(BlendingStyle::Color), - "luminosity" => Ok(BlendingStyle::Luminosity), - _ => Err(()) - } - } -} - -impl BlendingStyle { - pub fn to_str(&self) -> &str { - match *self { - BlendingStyle::Multiply => "multiply", - BlendingStyle::Screen => "screen", - BlendingStyle::Overlay => "overlay", - BlendingStyle::Darken => "darken", - BlendingStyle::Lighten => "lighten", - BlendingStyle::ColorDodge => "color-dodge", - BlendingStyle::ColorBurn => "color-burn", - BlendingStyle::HardLight => "hard-light", - BlendingStyle::SoftLight => "soft-light", - BlendingStyle::Difference => "difference", - BlendingStyle::Exclusion => "exclusion", - BlendingStyle::Hue => "hue", - BlendingStyle::Saturation => "saturation", - BlendingStyle::Color => "color", - BlendingStyle::Luminosity => "luminosity", - } - } -} - -#[derive(Clone, Copy, Deserialize, HeapSizeOf, PartialEq, Serialize)] -pub enum CompositionOrBlending { - Composition(CompositionStyle), - Blending(BlendingStyle), -} - -impl Default for CompositionOrBlending { - fn default() -> CompositionOrBlending { - CompositionOrBlending::Composition(CompositionStyle::SrcOver) - } -} - -impl FromStr for CompositionOrBlending { - type Err = (); - - fn from_str(string: &str) -> Result { - if let Ok(op) = CompositionStyle::from_str(string) { - return Ok(CompositionOrBlending::Composition(op)); - } - - if let Ok(op) = BlendingStyle::from_str(string) { - return Ok(CompositionOrBlending::Blending(op)); - } - - Err(()) - } -} - -// TODO(pcwalton): Speed up with SIMD, or better yet, find some way to not do this. -pub fn byte_swap(data: &mut [u8]) { - let length = data.len(); - // FIXME(rust #27741): Range::step_by is not stable yet as of this writing. - let mut i = 0; - while i < length { - let r = data[i + 2]; - data[i + 2] = data[i + 0]; - data[i + 0] = r; - i += 4; - } -} - -pub fn multiply_u8_pixel(a: u8, b: u8) -> u8 { - return (a as u32 * b as u32 / 255) as u8; -} - -pub fn byte_swap_and_premultiply(data: &mut [u8]) { - let length = data.len(); - - let mut i = 0; - while i < length { - let r = data[i + 2]; - let g = data[i + 1]; - let b = data[i + 0]; - let a = data[i + 3]; - - data[i + 0] = multiply_u8_pixel(r, a); - data[i + 1] = multiply_u8_pixel(g, a); - data[i + 2] = multiply_u8_pixel(b, a); - - i += 4; - } -} diff --git a/collector/compile-benchmarks/style-servo/components/canvas_traits/lib.rs b/collector/compile-benchmarks/style-servo/components/canvas_traits/lib.rs deleted file mode 100644 index 7830d669e..000000000 --- a/collector/compile-benchmarks/style-servo/components/canvas_traits/lib.rs +++ /dev/null @@ -1,25 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ - -#![crate_name = "canvas_traits"] -#![crate_type = "rlib"] -#![feature(nonzero)] - -#![deny(unsafe_code)] - -extern crate core; -extern crate cssparser; -extern crate euclid; -extern crate heapsize; -#[macro_use] extern crate heapsize_derive; -extern crate ipc_channel; -#[macro_use] extern crate lazy_static; -extern crate offscreen_gl_context; -#[macro_use] extern crate serde; -extern crate servo_config; -extern crate webrender_api; - -pub mod canvas; -pub mod webgl; -mod webgl_channel; diff --git a/collector/compile-benchmarks/style-servo/components/canvas_traits/webgl.rs b/collector/compile-benchmarks/style-servo/components/canvas_traits/webgl.rs deleted file mode 100644 index 92a66013f..000000000 --- a/collector/compile-benchmarks/style-servo/components/canvas_traits/webgl.rs +++ /dev/null @@ -1,506 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ - -use core::nonzero::NonZero; -use euclid::Size2D; -use offscreen_gl_context::{GLContextAttributes, GLLimits}; -use std::fmt; -use webrender_api; - -/// Sender type used in WebGLCommands. -pub use ::webgl_channel::WebGLSender; -/// Receiver type used in WebGLCommands. -pub use ::webgl_channel::WebGLReceiver; -/// Result type for send()/recv() calls in in WebGLCommands. -pub use ::webgl_channel::WebGLSendResult; -/// Helper function that creates a WebGL channel (WebGLSender, WebGLReceiver) to be used in WebGLCommands. -pub use ::webgl_channel::webgl_channel; -/// Entry point type used in a Script Pipeline to get the WebGLChan to be used in that thread. -pub use ::webgl_channel::WebGLPipeline; -/// Entry point channel type used for sending WebGLMsg messages to the WebGL renderer. -pub use ::webgl_channel::WebGLChan; - -/// WebGL Message API -#[derive(Clone, Deserialize, Serialize)] -pub enum WebGLMsg { - /// Creates a new WebGLContext. - CreateContext(Size2D, GLContextAttributes, WebGLSender>), - /// Resizes a WebGLContext. - ResizeContext(WebGLContextId, Size2D, WebGLSender>), - /// Drops a WebGLContext. - RemoveContext(WebGLContextId), - /// Runs a WebGLCommand in a specific WebGLContext. - WebGLCommand(WebGLContextId, WebGLCommand), - /// Runs a WebVRCommand in a specific WebGLContext. - WebVRCommand(WebGLContextId, WebVRCommand), - /// Locks a specific WebGLContext. Lock messages are used for a correct synchronization - /// with WebRender external image API. - /// WR locks a external texture when it wants to use the shared texture contents. - /// The WR client should not change the shared texture content until the Unlock call. - /// Currently OpenGL Sync Objects are used to implement the synchronization mechanism. - Lock(WebGLContextId, WebGLSender<(u32, Size2D, usize)>), - /// Unlocks a specific WebGLContext. Unlock messages are used for a correct synchronization - /// with WebRender external image API. - /// The WR unlocks a context when it finished reading the shared texture contents. - /// Unlock messages are always sent after a Lock message. - Unlock(WebGLContextId), - /// Creates or updates the image keys required for WebRender. - UpdateWebRenderImage(WebGLContextId, WebGLSender), - /// Frees all resources and closes the thread. - Exit, -} - -/// Contains the WebGLCommand sender and information about a WebGLContext -#[derive(Clone, Deserialize, Serialize)] -pub struct WebGLCreateContextResult { - /// Sender instance to send commands to the specific WebGLContext - pub sender: WebGLMsgSender, - /// Information about the internal GL Context. - pub limits: GLLimits, - /// How the WebGLContext is shared with WebRender. - pub share_mode: WebGLContextShareMode, -} - -#[derive(Clone, Copy, Deserialize, HeapSizeOf, Serialize)] -pub enum WebGLContextShareMode { - /// Fast: a shared texture_id is used in WebRender. - SharedTexture, - /// Slow: glReadPixels is used to send pixels to WebRender each frame. - Readback, -} - -/// Helper struct to send WebGLCommands to a specific WebGLContext. -#[derive(Clone, Deserialize, HeapSizeOf, Serialize)] -pub struct WebGLMsgSender { - ctx_id: WebGLContextId, - #[ignore_heap_size_of = "channels are hard"] - sender: WebGLChan, -} - -impl WebGLMsgSender { - pub fn new(id: WebGLContextId, sender: WebGLChan) -> Self { - WebGLMsgSender { - ctx_id: id, - sender: sender, - } - } - - /// Send a WebGLCommand message - #[inline] - pub fn send(&self, command: WebGLCommand) -> WebGLSendResult { - self.sender.send(WebGLMsg::WebGLCommand(self.ctx_id, command)) - } - - /// Send a WebVRCommand message - #[inline] - pub fn send_vr(&self, command: WebVRCommand) -> WebGLSendResult { - self.sender.send(WebGLMsg::WebVRCommand(self.ctx_id, command)) - } - - /// Send a resize message - #[inline] - pub fn send_resize(&self, - size: Size2D, - sender: WebGLSender>) - -> WebGLSendResult { - self.sender.send(WebGLMsg::ResizeContext(self.ctx_id, size, sender)) - } - - #[inline] - pub fn send_remove(&self) -> WebGLSendResult { - self.sender.send(WebGLMsg::RemoveContext(self.ctx_id)) - } - - #[inline] - pub fn send_update_wr_image(&self, sender: WebGLSender) -> WebGLSendResult { - self.sender.send(WebGLMsg::UpdateWebRenderImage(self.ctx_id, sender)) - } -} - -/// WebGL Commands for a specific WebGLContext -#[derive(Clone, Deserialize, Serialize)] -pub enum WebGLCommand { - GetContextAttributes(WebGLSender), - ActiveTexture(u32), - BlendColor(f32, f32, f32, f32), - BlendEquation(u32), - BlendEquationSeparate(u32, u32), - BlendFunc(u32, u32), - BlendFuncSeparate(u32, u32, u32, u32), - AttachShader(WebGLProgramId, WebGLShaderId), - DetachShader(WebGLProgramId, WebGLShaderId), - BindAttribLocation(WebGLProgramId, u32, String), - BufferData(u32, Vec, u32), - BufferSubData(u32, isize, Vec), - Clear(u32), - ClearColor(f32, f32, f32, f32), - ClearDepth(f64), - ClearStencil(i32), - ColorMask(bool, bool, bool, bool), - CullFace(u32), - FrontFace(u32), - DepthFunc(u32), - DepthMask(bool), - DepthRange(f64, f64), - Enable(u32), - Disable(u32), - CompileShader(WebGLShaderId, String), - CopyTexImage2D(u32, i32, u32, i32, i32, i32, i32, i32), - CopyTexSubImage2D(u32, i32, i32, i32, i32, i32, i32, i32), - CreateBuffer(WebGLSender>), - CreateFramebuffer(WebGLSender>), - CreateRenderbuffer(WebGLSender>), - CreateTexture(WebGLSender>), - CreateProgram(WebGLSender>), - CreateShader(u32, WebGLSender>), - DeleteBuffer(WebGLBufferId), - DeleteFramebuffer(WebGLFramebufferId), - DeleteRenderbuffer(WebGLRenderbufferId), - DeleteTexture(WebGLTextureId), - DeleteProgram(WebGLProgramId), - DeleteShader(WebGLShaderId), - BindBuffer(u32, Option), - BindFramebuffer(u32, WebGLFramebufferBindingRequest), - BindRenderbuffer(u32, Option), - BindTexture(u32, Option), - DisableVertexAttribArray(u32), - DrawArrays(u32, i32, i32), - DrawElements(u32, i32, u32, i64), - EnableVertexAttribArray(u32), - FramebufferRenderbuffer(u32, u32, u32, Option), - FramebufferTexture2D(u32, u32, u32, Option, i32), - GetBufferParameter(u32, u32, WebGLSender>), - GetExtensions(WebGLSender), - GetParameter(u32, WebGLSender>), - GetProgramParameter(WebGLProgramId, u32, WebGLSender>), - GetShaderParameter(WebGLShaderId, u32, WebGLSender>), - GetShaderPrecisionFormat(u32, u32, WebGLSender>), - GetActiveAttrib(WebGLProgramId, u32, WebGLSender>), - GetActiveUniform(WebGLProgramId, u32, WebGLSender>), - GetAttribLocation(WebGLProgramId, String, WebGLSender>), - GetUniformLocation(WebGLProgramId, String, WebGLSender>), - GetVertexAttrib(u32, u32, WebGLSender>), - GetVertexAttribOffset(u32, u32, WebGLSender>), - GetShaderInfoLog(WebGLShaderId, WebGLSender), - GetProgramInfoLog(WebGLProgramId, WebGLSender), - PolygonOffset(f32, f32), - RenderbufferStorage(u32, u32, i32, i32), - ReadPixels(i32, i32, i32, i32, u32, u32, WebGLSender>), - SampleCoverage(f32, bool), - Scissor(i32, i32, i32, i32), - StencilFunc(u32, i32, u32), - StencilFuncSeparate(u32, u32, i32, u32), - StencilMask(u32), - StencilMaskSeparate(u32, u32), - StencilOp(u32, u32, u32), - StencilOpSeparate(u32, u32, u32, u32), - Hint(u32, u32), - IsEnabled(u32, WebGLSender), - LineWidth(f32), - PixelStorei(u32, i32), - LinkProgram(WebGLProgramId), - Uniform1f(i32, f32), - Uniform1fv(i32, Vec), - Uniform1i(i32, i32), - Uniform1iv(i32, Vec), - Uniform2f(i32, f32, f32), - Uniform2fv(i32, Vec), - Uniform2i(i32, i32, i32), - Uniform2iv(i32, Vec), - Uniform3f(i32, f32, f32, f32), - Uniform3fv(i32, Vec), - Uniform3i(i32, i32, i32, i32), - Uniform3iv(i32, Vec), - Uniform4f(i32, f32, f32, f32, f32), - Uniform4fv(i32, Vec), - Uniform4i(i32, i32, i32, i32, i32), - Uniform4iv(i32, Vec), - UniformMatrix2fv(i32, bool, Vec), - UniformMatrix3fv(i32, bool, Vec), - UniformMatrix4fv(i32, bool, Vec), - UseProgram(WebGLProgramId), - ValidateProgram(WebGLProgramId), - VertexAttrib(u32, f32, f32, f32, f32), - VertexAttribPointer(u32, i32, u32, bool, i32, u32), - VertexAttribPointer2f(u32, i32, bool, i32, u32), - Viewport(i32, i32, i32, i32), - TexImage2D(u32, i32, i32, i32, i32, u32, u32, Vec), - TexParameteri(u32, u32, i32), - TexParameterf(u32, u32, f32), - TexSubImage2D(u32, i32, i32, i32, i32, i32, u32, u32, Vec), - DrawingBufferWidth(WebGLSender), - DrawingBufferHeight(WebGLSender), - Finish(WebGLSender<()>), - Flush, - GenerateMipmap(u32), - CreateVertexArray(WebGLSender>), - DeleteVertexArray(WebGLVertexArrayId), - BindVertexArray(Option), -} - -macro_rules! define_resource_id_struct { - ($name:ident) => { - #[derive(Clone, Copy, Eq, Hash, PartialEq)] - pub struct $name(NonZero); - - impl $name { - #[allow(unsafe_code)] - #[inline] - pub unsafe fn new(id: u32) -> Self { - $name(NonZero::new_unchecked(id)) - } - - #[inline] - pub fn get(self) -> u32 { - self.0.get() - } - } - - }; -} - -macro_rules! define_resource_id { - ($name:ident) => { - define_resource_id_struct!($name); - - #[allow(unsafe_code)] - impl<'de> ::serde::Deserialize<'de> for $name { - fn deserialize(deserializer: D) -> Result - where D: ::serde::Deserializer<'de> - { - let id = try!(u32::deserialize(deserializer)); - if id == 0 { - Err(::serde::de::Error::custom("expected a non-zero value")) - } else { - Ok(unsafe { $name::new(id) }) - } - } - } - - impl ::serde::Serialize for $name { - fn serialize(&self, serializer: S) -> Result - where S: ::serde::Serializer - { - self.get().serialize(serializer) - } - } - - impl ::std::fmt::Debug for $name { - fn fmt(&self, fmt: &mut ::std::fmt::Formatter) - -> Result<(), ::std::fmt::Error> { - fmt.debug_tuple(stringify!($name)) - .field(&self.get()) - .finish() - } - } - - impl ::std::fmt::Display for $name { - fn fmt(&self, fmt: &mut ::std::fmt::Formatter) - -> Result<(), ::std::fmt::Error> { - write!(fmt, "{}", self.get()) - } - } - - impl ::heapsize::HeapSizeOf for $name { - fn heap_size_of_children(&self) -> usize { 0 } - } - } -} - -define_resource_id!(WebGLBufferId); -define_resource_id!(WebGLFramebufferId); -define_resource_id!(WebGLRenderbufferId); -define_resource_id!(WebGLTextureId); -define_resource_id!(WebGLProgramId); -define_resource_id!(WebGLShaderId); -define_resource_id!(WebGLVertexArrayId); - -#[derive(Clone, Copy, Debug, Deserialize, Eq, Hash, Ord, PartialEq, PartialOrd, Serialize)] -pub struct WebGLContextId(pub usize); - -impl ::heapsize::HeapSizeOf for WebGLContextId { - fn heap_size_of_children(&self) -> usize { 0 } -} - -#[derive(Clone, Copy, Debug, Deserialize, PartialEq, Serialize)] -pub enum WebGLError { - InvalidEnum, - InvalidFramebufferOperation, - InvalidOperation, - InvalidValue, - OutOfMemory, - ContextLost, -} - -#[derive(Clone, Debug, Deserialize, Serialize)] -pub enum WebGLFramebufferBindingRequest { - Explicit(WebGLFramebufferId), - Default, -} - -#[derive(Clone, Debug, Deserialize, Serialize)] -pub enum WebGLParameter { - Int(i32), - Bool(bool), - String(String), - Float(f32), - FloatArray(Vec), - Invalid, -} - -pub type WebGLResult = Result; - -#[derive(Clone, Debug, Deserialize, Serialize)] -pub enum WebGLShaderParameter { - Int(i32), - Bool(bool), - Invalid, -} - -pub type WebVRDeviceId = u32; - -// WebVR commands that must be called in the WebGL render thread. -#[derive(Clone, Deserialize, Serialize)] -pub enum WebVRCommand { - /// Start presenting to a VR device. - Create(WebVRDeviceId), - /// Synchronize the pose information to be used in the frame. - SyncPoses(WebVRDeviceId, f64, f64, WebGLSender, ()>>), - /// Submit the frame to a VR device using the specified texture coordinates. - SubmitFrame(WebVRDeviceId, [f32; 4], [f32; 4]), - /// Stop presenting to a VR device - Release(WebVRDeviceId) -} - -// Trait object that handles WebVR commands. -// Receives the texture id and size associated to the WebGLContext. -pub trait WebVRRenderHandler: Send { - fn handle(&mut self, command: WebVRCommand, texture: Option<(u32, Size2D)>); -} - -impl fmt::Debug for WebGLCommand { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - use self::WebGLCommand::*; - let name = match *self { - GetContextAttributes(..) => "GetContextAttributes", - ActiveTexture(..) => "ActiveTexture", - BlendColor(..) => "BlendColor", - BlendEquation(..) => "BlendEquation", - BlendEquationSeparate(..) => "BlendEquationSeparate", - BlendFunc(..) => "BlendFunc", - BlendFuncSeparate(..) => "BlendFuncSeparate", - AttachShader(..) => "AttachShader", - DetachShader(..) => "DetachShader", - BindAttribLocation(..) => "BindAttribLocation", - BufferData(..) => "BufferData", - BufferSubData(..) => "BufferSubData", - Clear(..) => "Clear", - ClearColor(..) => "ClearColor", - ClearDepth(..) => "ClearDepth", - ClearStencil(..) => "ClearStencil", - ColorMask(..) => "ColorMask", - CopyTexImage2D(..) => "CopyTexImage2D", - CopyTexSubImage2D(..) => "CopyTexSubImage2D", - CullFace(..) => "CullFace", - FrontFace(..) => "FrontFace", - DepthFunc(..) => "DepthFunc", - DepthMask(..) => "DepthMask", - DepthRange(..) => "DepthRange", - Enable(..) => "Enable", - Disable(..) => "Disable", - CompileShader(..) => "CompileShader", - CreateBuffer(..) => "CreateBuffer", - CreateFramebuffer(..) => "CreateFramebuffer", - CreateRenderbuffer(..) => "CreateRenderbuffer", - CreateTexture(..) => "CreateTexture", - CreateProgram(..) => "CreateProgram", - CreateShader(..) => "CreateShader", - DeleteBuffer(..) => "DeleteBuffer", - DeleteFramebuffer(..) => "DeleteFramebuffer", - DeleteRenderbuffer(..) => "DeleteRenderBuffer", - DeleteTexture(..) => "DeleteTexture", - DeleteProgram(..) => "DeleteProgram", - DeleteShader(..) => "DeleteShader", - BindBuffer(..) => "BindBuffer", - BindFramebuffer(..) => "BindFramebuffer", - BindRenderbuffer(..) => "BindRenderbuffer", - BindTexture(..) => "BindTexture", - DisableVertexAttribArray(..) => "DisableVertexAttribArray", - DrawArrays(..) => "DrawArrays", - DrawElements(..) => "DrawElements", - EnableVertexAttribArray(..) => "EnableVertexAttribArray", - FramebufferRenderbuffer(..) => "FramebufferRenderbuffer", - FramebufferTexture2D(..) => "FramebufferTexture2D", - GetBufferParameter(..) => "GetBufferParameter", - GetExtensions(..) => "GetExtensions", - GetParameter(..) => "GetParameter", - GetProgramParameter(..) => "GetProgramParameter", - GetShaderParameter(..) => "GetShaderParameter", - GetShaderPrecisionFormat(..) => "GetShaderPrecisionFormat", - GetActiveAttrib(..) => "GetActiveAttrib", - GetActiveUniform(..) => "GetActiveUniform", - GetAttribLocation(..) => "GetAttribLocation", - GetUniformLocation(..) => "GetUniformLocation", - GetShaderInfoLog(..) => "GetShaderInfoLog", - GetProgramInfoLog(..) => "GetProgramInfoLog", - GetVertexAttrib(..) => "GetVertexAttrib", - GetVertexAttribOffset(..) => "GetVertexAttribOffset", - PolygonOffset(..) => "PolygonOffset", - ReadPixels(..) => "ReadPixels", - RenderbufferStorage(..) => "RenderbufferStorage", - SampleCoverage(..) => "SampleCoverage", - Scissor(..) => "Scissor", - StencilFunc(..) => "StencilFunc", - StencilFuncSeparate(..) => "StencilFuncSeparate", - StencilMask(..) => "StencilMask", - StencilMaskSeparate(..) => "StencilMaskSeparate", - StencilOp(..) => "StencilOp", - StencilOpSeparate(..) => "StencilOpSeparate", - Hint(..) => "Hint", - IsEnabled(..) => "IsEnabled", - LineWidth(..) => "LineWidth", - PixelStorei(..) => "PixelStorei", - LinkProgram(..) => "LinkProgram", - Uniform1f(..) => "Uniform1f", - Uniform1fv(..) => "Uniform1fv", - Uniform1i(..) => "Uniform1i", - Uniform1iv(..) => "Uniform1iv", - Uniform2f(..) => "Uniform2f", - Uniform2fv(..) => "Uniform2fv", - Uniform2i(..) => "Uniform2i", - Uniform2iv(..) => "Uniform2iv", - Uniform3f(..) => "Uniform3f", - Uniform3fv(..) => "Uniform3fv", - Uniform3i(..) => "Uniform3i", - Uniform3iv(..) => "Uniform3iv", - Uniform4f(..) => "Uniform4f", - Uniform4fv(..) => "Uniform4fv", - Uniform4i(..) => "Uniform4i", - Uniform4iv(..) => "Uniform4iv", - UniformMatrix2fv(..) => "UniformMatrix2fv", - UniformMatrix3fv(..) => "UniformMatrix3fv", - UniformMatrix4fv(..) => "UniformMatrix4fv", - UseProgram(..) => "UseProgram", - ValidateProgram(..) => "ValidateProgram", - VertexAttrib(..) => "VertexAttrib", - VertexAttribPointer2f(..) => "VertexAttribPointer2f", - VertexAttribPointer(..) => "VertexAttribPointer", - Viewport(..) => "Viewport", - TexImage2D(..) => "TexImage2D", - TexParameteri(..) => "TexParameteri", - TexParameterf(..) => "TexParameterf", - TexSubImage2D(..) => "TexSubImage2D", - DrawingBufferWidth(..) => "DrawingBufferWidth", - DrawingBufferHeight(..) => "DrawingBufferHeight", - Finish(..) => "Finish", - Flush => "Flush", - GenerateMipmap(..) => "GenerateMipmap", - CreateVertexArray(..) => "CreateVertexArray", - DeleteVertexArray(..) => "DeleteVertexArray", - BindVertexArray(..) => "BindVertexArray" - }; - - write!(f, "CanvasWebGLMsg::{}(..)", name) - } -} diff --git a/collector/compile-benchmarks/style-servo/components/canvas_traits/webgl_channel/ipc.rs b/collector/compile-benchmarks/style-servo/components/canvas_traits/webgl_channel/ipc.rs deleted file mode 100644 index ac3020bbc..000000000 --- a/collector/compile-benchmarks/style-servo/components/canvas_traits/webgl_channel/ipc.rs +++ /dev/null @@ -1,15 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ - -use ipc_channel; -use serde::{Deserialize, Serialize}; -use std::io; - -pub type WebGLSender = ipc_channel::ipc::IpcSender; -pub type WebGLReceiver = ipc_channel::ipc::IpcReceiver; - -pub fn webgl_channel Deserialize<'de>>() - -> Result<(WebGLSender, WebGLReceiver), io::Error> { - ipc_channel::ipc::channel() -} diff --git a/collector/compile-benchmarks/style-servo/components/canvas_traits/webgl_channel/mod.rs b/collector/compile-benchmarks/style-servo/components/canvas_traits/webgl_channel/mod.rs deleted file mode 100644 index 1ac4ce15c..000000000 --- a/collector/compile-benchmarks/style-servo/components/canvas_traits/webgl_channel/mod.rs +++ /dev/null @@ -1,87 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ - -//! Enum wrappers to be able to select different channel implementations at runtime. - -mod ipc; -mod mpsc; - -use ::webgl::WebGLMsg; -use serde::{Deserialize, Serialize}; -use servo_config::opts; - -lazy_static! { - static ref IS_MULTIPROCESS: bool = { - opts::multiprocess() - }; -} - -#[derive(Clone, Deserialize, Serialize)] -pub enum WebGLSender { - Ipc(ipc::WebGLSender), - Mpsc(mpsc::WebGLSender), -} - -impl WebGLSender { - #[inline] - pub fn send(&self, msg: T) -> WebGLSendResult { - match *self { - WebGLSender::Ipc(ref sender) => { - sender.send(msg).map_err(|_| ()) - }, - WebGLSender::Mpsc(ref sender) => { - sender.send(msg).map_err(|_| ()) - } - } - } -} - -pub type WebGLSendResult = Result<(), ()>; - -pub enum WebGLReceiver where T: for<'de> Deserialize<'de> + Serialize { - Ipc(ipc::WebGLReceiver), - Mpsc(mpsc::WebGLReceiver), -} - -impl WebGLReceiver where T: for<'de> Deserialize<'de> + Serialize { - pub fn recv(&self) -> Result { - match *self { - WebGLReceiver::Ipc(ref receiver) => { - receiver.recv().map_err(|_| ()) - }, - WebGLReceiver::Mpsc(ref receiver) => { - receiver.recv().map_err(|_| ()) - } - } - } -} - -pub fn webgl_channel() -> Result<(WebGLSender, WebGLReceiver), ()> - where T: for<'de> Deserialize<'de> + Serialize { - if *IS_MULTIPROCESS { - ipc::webgl_channel().map(|(tx, rx)| (WebGLSender::Ipc(tx), WebGLReceiver::Ipc(rx))) - .map_err(|_| ()) - } else { - mpsc::webgl_channel().map(|(tx, rx)| (WebGLSender::Mpsc(tx), WebGLReceiver::Mpsc(rx))) - } -} - -#[derive(Clone, Deserialize, Serialize)] -pub struct WebGLChan(pub WebGLSender); - -impl WebGLChan { - #[inline] - pub fn send(&self, msg: WebGLMsg) -> WebGLSendResult { - self.0.send(msg) - } -} - -#[derive(Clone, Deserialize, Serialize)] -pub struct WebGLPipeline(pub WebGLChan); - -impl WebGLPipeline { - pub fn channel(&self) -> WebGLChan { - self.0.clone() - } -} diff --git a/collector/compile-benchmarks/style-servo/components/canvas_traits/webgl_channel/mpsc.rs b/collector/compile-benchmarks/style-servo/components/canvas_traits/webgl_channel/mpsc.rs deleted file mode 100644 index b0fe29241..000000000 --- a/collector/compile-benchmarks/style-servo/components/canvas_traits/webgl_channel/mpsc.rs +++ /dev/null @@ -1,51 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ - -use serde::{Deserialize, Serialize}; -use serde::{Deserializer, Serializer}; -use std::sync::mpsc; - -#[macro_use] -macro_rules! unreachable_serializable { - ($name:ident) => { - impl Serialize for $name { - fn serialize(&self, _: S) -> Result { - unreachable!(); - } - } - - impl<'a, T> Deserialize<'a> for $name { - fn deserialize(_: D) -> Result<$name, D::Error> - where D: Deserializer<'a> { - unreachable!(); - } - } - }; -} - -#[derive(Clone)] -pub struct WebGLSender(mpsc::Sender); -pub struct WebGLReceiver(mpsc::Receiver); - -impl WebGLSender { - #[inline] - pub fn send(&self, data: T) -> Result<(), mpsc::SendError> { - self.0.send(data) - } -} - -impl WebGLReceiver { - #[inline] - pub fn recv(&self) -> Result { - self.0.recv() - } -} - -pub fn webgl_channel() -> Result<(WebGLSender, WebGLReceiver), ()> { - let (sender, receiver) = mpsc::channel(); - Ok((WebGLSender(sender), WebGLReceiver(receiver))) -} - -unreachable_serializable!(WebGLReceiver); -unreachable_serializable!(WebGLSender); diff --git a/collector/compile-benchmarks/style-servo/components/compositing/Cargo.toml b/collector/compile-benchmarks/style-servo/components/compositing/Cargo.toml deleted file mode 100644 index 40e5b724f..000000000 --- a/collector/compile-benchmarks/style-servo/components/compositing/Cargo.toml +++ /dev/null @@ -1,29 +0,0 @@ -[package] -name = "compositing" -version = "0.0.1" -authors = ["The Servo Project Developers"] -license = "MPL-2.0" -publish = false - -[lib] -name = "compositing" -path = "lib.rs" - -[dependencies] -euclid = "0.15" -gfx_traits = {path = "../gfx_traits"} -gleam = "0.4" -image = "0.16" -ipc-channel = "0.8" -log = "0.3.5" -msg = {path = "../msg"} -net_traits = {path = "../net_traits"} -profile_traits = {path = "../profile_traits"} -script_traits = {path = "../script_traits"} -servo_config = {path = "../config"} -servo_geometry = {path = "../geometry", features = ["servo"]} -servo_url = {path = "../url"} -style_traits = {path = "../style_traits"} -time = "0.1.17" -webrender = {git = "https://github.com/servo/webrender"} -webrender_api = {git = "https://github.com/servo/webrender", features = ["ipc"]} diff --git a/collector/compile-benchmarks/style-servo/components/compositing/compositor.rs b/collector/compile-benchmarks/style-servo/components/compositing/compositor.rs deleted file mode 100644 index 35e8a9340..000000000 --- a/collector/compile-benchmarks/style-servo/components/compositing/compositor.rs +++ /dev/null @@ -1,1538 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ - -use CompositionPipeline; -use SendableFrameTree; -use compositor_thread::{CompositorProxy, CompositorReceiver}; -use compositor_thread::{InitialCompositorState, Msg, RenderListener}; -use core::nonzero::NonZero; -use euclid::{Point2D, TypedPoint2D, TypedVector2D, ScaleFactor}; -use gfx_traits::Epoch; -use gleam::gl; -use image::{DynamicImage, ImageFormat, RgbImage}; -use ipc_channel::ipc::{self, IpcSharedMemory}; -use msg::constellation_msg::{PipelineId, PipelineIndex, PipelineNamespaceId}; -use net_traits::image::base::{Image, PixelFormat}; -use profile_traits::time::{self, ProfilerCategory, profile}; -use script_traits::{AnimationState, AnimationTickType, ConstellationControlMsg}; -use script_traits::{ConstellationMsg, LayoutControlMsg, MouseButton}; -use script_traits::{MouseEventType, ScrollState}; -use script_traits::{TouchpadPressurePhase, TouchEventType, TouchId, WindowSizeData, WindowSizeType}; -use script_traits::CompositorEvent::{self, MouseMoveEvent, MouseButtonEvent, TouchEvent, TouchpadPressureEvent}; -use servo_config::opts; -use servo_config::prefs::PREFS; -use servo_geometry::DeviceIndependentPixel; -use std::collections::HashMap; -use std::fs::File; -use std::rc::Rc; -use std::sync::mpsc::Sender; -use std::time::{Duration, Instant}; -use style_traits::{CSSPixel, DevicePixel, PinchZoomFactor}; -use style_traits::viewport::ViewportConstraints; -use time::{precise_time_ns, precise_time_s}; -use touch::{TouchHandler, TouchAction}; -use webrender; -use webrender_api::{self, ClipId, DeviceUintRect, DeviceUintSize, LayoutPoint, LayoutVector2D}; -use webrender_api::{ScrollEventPhase, ScrollLocation, ScrollClamping}; -use windowing::{self, MouseWindowEvent, WebRenderDebugOption, WindowMethods}; - -#[derive(Debug, PartialEq)] -enum UnableToComposite { - WindowUnprepared, - NotReadyToPaintImage(NotReadyToPaint), -} - -#[derive(Debug, PartialEq)] -enum NotReadyToPaint { - AnimationsActive, - JustNotifiedConstellation, - WaitingOnConstellation, -} - -// Default viewport constraints -const MAX_ZOOM: f32 = 8.0; -const MIN_ZOOM: f32 = 0.1; - -trait ConvertPipelineIdFromWebRender { - fn from_webrender(&self) -> PipelineId; -} - -impl ConvertPipelineIdFromWebRender for webrender_api::PipelineId { - fn from_webrender(&self) -> PipelineId { - PipelineId { - namespace_id: PipelineNamespaceId(self.0), - index: PipelineIndex(NonZero::new(self.1).expect("Webrender pipeline zero?")), - } - } -} - -/// Holds the state when running reftests that determines when it is -/// safe to save the output image. -#[derive(Clone, Copy, PartialEq)] -enum ReadyState { - Unknown, - WaitingForConstellationReply, - ReadyToSaveImage, -} - -#[derive(Clone, Copy, Debug, Eq, PartialEq)] -struct FrameTreeId(u32); - -impl FrameTreeId { - pub fn next(&mut self) { - self.0 += 1; - } -} - -/// One pixel in layer coordinate space. -/// -/// This unit corresponds to a "pixel" in layer coordinate space, which after scaling and -/// transformation becomes a device pixel. -#[derive(Clone, Copy, Debug)] -enum LayerPixel {} - -/// NB: Never block on the constellation, because sometimes the constellation blocks on us. -pub struct IOCompositor { - /// The application window. - pub window: Rc, - - /// The port on which we receive messages. - port: CompositorReceiver, - - /// The root pipeline. - root_pipeline: Option, - - /// Tracks details about each active pipeline that the compositor knows about. - pipeline_details: HashMap, - - /// The scene scale, to allow for zooming and high-resolution painting. - scale: ScaleFactor, - - /// The size of the rendering area. - frame_size: DeviceUintSize, - - /// The position and size of the window within the rendering area. - window_rect: DeviceUintRect, - - /// "Mobile-style" zoom that does not reflow the page. - viewport_zoom: PinchZoomFactor, - - /// Viewport zoom constraints provided by @viewport. - min_viewport_zoom: Option, - max_viewport_zoom: Option, - - /// "Desktop-style" zoom that resizes the viewport to fit the window. - page_zoom: ScaleFactor, - - /// The device pixel ratio for this window. - scale_factor: ScaleFactor, - - channel_to_self: CompositorProxy, - - /// The type of composition to perform - composite_target: CompositeTarget, - - /// Tracks whether we should composite this frame. - composition_request: CompositionRequest, - - /// Tracks whether we are in the process of shutting down, or have shut down and should close - /// the compositor. - pub shutdown_state: ShutdownState, - - /// Tracks the last composite time. - last_composite_time: u64, - - /// Tracks whether the zoom action has happened recently. - zoom_action: bool, - - /// The time of the last zoom action has started. - zoom_time: f64, - - /// The current frame tree ID (used to reject old paint buffers) - frame_tree_id: FrameTreeId, - - /// The channel on which messages can be sent to the constellation. - constellation_chan: Sender, - - /// The channel on which messages can be sent to the time profiler. - time_profiler_chan: time::ProfilerChan, - - /// Touch input state machine - touch_handler: TouchHandler, - - /// Pending scroll/zoom events. - pending_scroll_zoom_events: Vec, - - /// Whether we're waiting on a recomposite after dispatching a scroll. - waiting_for_results_of_scroll: bool, - - /// Used by the logic that determines when it is safe to output an - /// image for the reftest framework. - ready_to_save_state: ReadyState, - - /// Whether a scroll is in progress; i.e. whether the user's fingers are down. - scroll_in_progress: bool, - - in_scroll_transaction: Option, - - /// The webrender renderer. - webrender: webrender::Renderer, - - /// The active webrender document. - webrender_document: webrender_api::DocumentId, - - /// The webrender interface, if enabled. - webrender_api: webrender_api::RenderApi, - - /// GL functions interface (may be GL or GLES) - gl: Rc, - - /// Map of the pending paint metrics per layout thread. - /// The layout thread for each specific pipeline expects the compositor to - /// paint frames with specific given IDs (epoch). Once the compositor paints - /// these frames, it records the paint time for each of them and sends the - /// metric to the corresponding layout thread. - pending_paint_metrics: HashMap, -} - -#[derive(Clone, Copy)] -struct ScrollZoomEvent { - /// Change the pinch zoom level by this factor - magnification: f32, - /// Scroll by this offset, or to Start or End - scroll_location: ScrollLocation, - /// Apply changes to the frame at this location - cursor: TypedPoint2D, - /// The scroll event phase. - phase: ScrollEventPhase, - /// The number of OS events that have been coalesced together into this one event. - event_count: u32, -} - -#[derive(Debug, PartialEq)] -enum CompositionRequest { - NoCompositingNecessary, - CompositeNow(CompositingReason), -} - -#[derive(Clone, Copy, Debug, PartialEq)] -pub enum ShutdownState { - NotShuttingDown, - ShuttingDown, - FinishedShuttingDown, -} - -struct PipelineDetails { - /// The pipeline associated with this PipelineDetails object. - pipeline: Option, - - /// Whether animations are running - animations_running: bool, - - /// Whether there are animation callbacks - animation_callbacks_running: bool, - - /// Whether this pipeline is visible - visible: bool, -} - -impl PipelineDetails { - fn new() -> PipelineDetails { - PipelineDetails { - pipeline: None, - animations_running: false, - animation_callbacks_running: false, - visible: true, - } - } -} - -#[derive(Clone, Copy, Debug, PartialEq)] -enum CompositeTarget { - /// Normal composition to a window - Window, - - /// Compose as normal, but also return a PNG of the composed output - WindowAndPng, - - /// Compose to a PNG, write it to disk, and then exit the browser (used for reftests) - PngFile -} - -struct RenderTargetInfo { - framebuffer_ids: Vec, - renderbuffer_ids: Vec, - texture_ids: Vec, -} - -impl RenderTargetInfo { - fn empty() -> RenderTargetInfo { - RenderTargetInfo { - framebuffer_ids: Vec::new(), - renderbuffer_ids: Vec::new(), - texture_ids: Vec::new(), - } - } -} - -fn initialize_png(gl: &gl::Gl, width: usize, height: usize) -> RenderTargetInfo { - let framebuffer_ids = gl.gen_framebuffers(1); - gl.bind_framebuffer(gl::FRAMEBUFFER, framebuffer_ids[0]); - - let texture_ids = gl.gen_textures(1); - gl.bind_texture(gl::TEXTURE_2D, texture_ids[0]); - - gl.tex_image_2d(gl::TEXTURE_2D, 0, gl::RGB as gl::GLint, width as gl::GLsizei, - height as gl::GLsizei, 0, gl::RGB, gl::UNSIGNED_BYTE, None); - gl.tex_parameter_i(gl::TEXTURE_2D, gl::TEXTURE_MAG_FILTER, gl::NEAREST as gl::GLint); - gl.tex_parameter_i(gl::TEXTURE_2D, gl::TEXTURE_MIN_FILTER, gl::NEAREST as gl::GLint); - - gl.framebuffer_texture_2d(gl::FRAMEBUFFER, gl::COLOR_ATTACHMENT0, gl::TEXTURE_2D, - texture_ids[0], 0); - - gl.bind_texture(gl::TEXTURE_2D, 0); - - let renderbuffer_ids = gl.gen_renderbuffers(1); - let depth_rb = renderbuffer_ids[0]; - gl.bind_renderbuffer(gl::RENDERBUFFER, depth_rb); - gl.renderbuffer_storage(gl::RENDERBUFFER, - gl::DEPTH_COMPONENT24, - width as gl::GLsizei, - height as gl::GLsizei); - gl.framebuffer_renderbuffer(gl::FRAMEBUFFER, - gl::DEPTH_ATTACHMENT, - gl::RENDERBUFFER, - depth_rb); - - RenderTargetInfo { - framebuffer_ids: framebuffer_ids, - renderbuffer_ids: renderbuffer_ids, - texture_ids: texture_ids, - } -} - -struct RenderNotifier { - compositor_proxy: CompositorProxy, -} - -impl RenderNotifier { - fn new(compositor_proxy: CompositorProxy, - _: Sender) -> RenderNotifier { - RenderNotifier { - compositor_proxy: compositor_proxy, - } - } -} - -impl webrender_api::RenderNotifier for RenderNotifier { - fn new_frame_ready(&mut self) { - self.compositor_proxy.recomposite(CompositingReason::NewWebRenderFrame); - } - - fn new_scroll_frame_ready(&mut self, composite_needed: bool) { - self.compositor_proxy.send(Msg::NewScrollFrameReady(composite_needed)); - } -} - -impl IOCompositor { - fn new(window: Rc, state: InitialCompositorState) - -> IOCompositor { - let frame_size = window.framebuffer_size(); - let window_rect = window.window_rect(); - let scale_factor = window.hidpi_factor(); - let composite_target = match opts::get().output_file { - Some(_) => CompositeTarget::PngFile, - None => CompositeTarget::Window - }; - - IOCompositor { - gl: window.gl(), - window: window, - port: state.receiver, - root_pipeline: None, - pipeline_details: HashMap::new(), - frame_size: frame_size, - window_rect: window_rect, - scale: ScaleFactor::new(1.0), - scale_factor: scale_factor, - channel_to_self: state.sender.clone(), - composition_request: CompositionRequest::NoCompositingNecessary, - touch_handler: TouchHandler::new(), - pending_scroll_zoom_events: Vec::new(), - waiting_for_results_of_scroll: false, - composite_target: composite_target, - shutdown_state: ShutdownState::NotShuttingDown, - page_zoom: ScaleFactor::new(1.0), - viewport_zoom: PinchZoomFactor::new(1.0), - min_viewport_zoom: None, - max_viewport_zoom: None, - zoom_action: false, - zoom_time: 0f64, - frame_tree_id: FrameTreeId(0), - constellation_chan: state.constellation_chan, - time_profiler_chan: state.time_profiler_chan, - last_composite_time: 0, - ready_to_save_state: ReadyState::Unknown, - scroll_in_progress: false, - in_scroll_transaction: None, - webrender: state.webrender, - webrender_document: state.webrender_document, - webrender_api: state.webrender_api, - pending_paint_metrics: HashMap::new(), - } - } - - pub fn create(window: Rc, state: InitialCompositorState) -> IOCompositor { - let mut compositor = IOCompositor::new(window, state); - - let compositor_proxy_for_webrender = compositor.channel_to_self - .clone(); - let render_notifier = RenderNotifier::new(compositor_proxy_for_webrender, - compositor.constellation_chan.clone()); - compositor.webrender.set_render_notifier(Box::new(render_notifier)); - - // Set the size of the root layer. - compositor.update_zoom_transform(); - - // Tell the constellation about the initial window size. - compositor.send_window_size(WindowSizeType::Initial); - - compositor - } - - pub fn deinit(self) { - self.webrender.deinit(); - } - - pub fn maybe_start_shutting_down(&mut self) { - if self.shutdown_state == ShutdownState::NotShuttingDown { - debug!("Shutting down the constellation for WindowEvent::Quit"); - self.start_shutting_down(); - } - } - - fn start_shutting_down(&mut self) { - debug!("Compositor sending Exit message to Constellation"); - if let Err(e) = self.constellation_chan.send(ConstellationMsg::Exit) { - warn!("Sending exit message to constellation failed ({}).", e); - } - - self.shutdown_state = ShutdownState::ShuttingDown; - } - - fn finish_shutting_down(&mut self) { - debug!("Compositor received message that constellation shutdown is complete"); - - // Drain compositor port, sometimes messages contain channels that are blocking - // another thread from finishing (i.e. SetFrameTree). - while self.port.try_recv_compositor_msg().is_some() {} - - // Tell the profiler, memory profiler, and scrolling timer to shut down. - if let Ok((sender, receiver)) = ipc::channel() { - self.time_profiler_chan.send(time::ProfilerMsg::Exit(sender)); - let _ = receiver.recv(); - } - - self.shutdown_state = ShutdownState::FinishedShuttingDown; - } - - fn handle_browser_message(&mut self, msg: Msg) -> bool { - match (msg, self.shutdown_state) { - (_, ShutdownState::FinishedShuttingDown) => { - error!("compositor shouldn't be handling messages after shutting down"); - return false - } - - (Msg::Exit, _) => { - self.start_shutting_down(); - } - - (Msg::ShutdownComplete, _) => { - self.finish_shutting_down(); - return false; - } - - (Msg::ChangeRunningAnimationsState(pipeline_id, animation_state), - ShutdownState::NotShuttingDown) => { - self.change_running_animations_state(pipeline_id, animation_state); - } - - (Msg::SetFrameTree(frame_tree), - ShutdownState::NotShuttingDown) => { - self.set_frame_tree(&frame_tree); - self.send_viewport_rects(); - } - - (Msg::ScrollFragmentPoint(scroll_root_id, point, _), - ShutdownState::NotShuttingDown) => { - self.scroll_fragment_to_point(scroll_root_id, point); - } - - (Msg::Recomposite(reason), ShutdownState::NotShuttingDown) => { - self.composition_request = CompositionRequest::CompositeNow(reason) - } - - - (Msg::TouchEventProcessed(result), ShutdownState::NotShuttingDown) => { - self.touch_handler.on_event_processed(result); - } - - (Msg::CreatePng(reply), ShutdownState::NotShuttingDown) => { - let res = self.composite_specific_target(CompositeTarget::WindowAndPng); - if let Err(ref e) = res { - info!("Error retrieving PNG: {:?}", e); - } - let img = res.unwrap_or(None); - if let Err(e) = reply.send(img) { - warn!("Sending reply to create png failed ({}).", e); - } - } - - (Msg::ViewportConstrained(pipeline_id, constraints), - ShutdownState::NotShuttingDown) => { - self.constrain_viewport(pipeline_id, constraints); - } - - (Msg::IsReadyToSaveImageReply(is_ready), ShutdownState::NotShuttingDown) => { - assert!(self.ready_to_save_state == ReadyState::WaitingForConstellationReply); - if is_ready { - self.ready_to_save_state = ReadyState::ReadyToSaveImage; - if opts::get().is_running_problem_test { - println!("ready to save image!"); - } - } else { - self.ready_to_save_state = ReadyState::Unknown; - if opts::get().is_running_problem_test { - println!("resetting ready_to_save_state!"); - } - } - self.composite_if_necessary(CompositingReason::Headless); - } - - (Msg::PipelineVisibilityChanged(pipeline_id, visible), ShutdownState::NotShuttingDown) => { - self.pipeline_details(pipeline_id).visible = visible; - if visible { - self.process_animations(); - } - } - - (Msg::PipelineExited(pipeline_id, sender), _) => { - debug!("Compositor got pipeline exited: {:?}", pipeline_id); - self.remove_pipeline_root_layer(pipeline_id); - let _ = sender.send(()); - } - - (Msg::NewScrollFrameReady(recomposite_needed), ShutdownState::NotShuttingDown) => { - self.waiting_for_results_of_scroll = false; - if recomposite_needed { - self.composition_request = CompositionRequest::CompositeNow( - CompositingReason::NewWebRenderScrollFrame); - } - } - - (Msg::Dispatch(func), ShutdownState::NotShuttingDown) => { - // The functions sent here right now are really dumb, so they can't panic. - // But if we start running more complex code here, we should really catch panic here. - func(); - } - - (Msg::LoadComplete(_), ShutdownState::NotShuttingDown) => { - // If we're painting in headless mode, schedule a recomposite. - if opts::get().output_file.is_some() || opts::get().exit_after_load { - self.composite_if_necessary(CompositingReason::Headless); - } - }, - - (Msg::PendingPaintMetric(pipeline_id, epoch), _) => { - self.pending_paint_metrics.insert(pipeline_id, epoch); - } - - // When we are shutting_down, we need to avoid performing operations - // such as Paint that may crash because we have begun tearing down - // the rest of our resources. - (_, ShutdownState::ShuttingDown) => {} - } - - true - } - - /// Sets or unsets the animations-running flag for the given pipeline, and schedules a - /// recomposite if necessary. - fn change_running_animations_state(&mut self, - pipeline_id: PipelineId, - animation_state: AnimationState) { - match animation_state { - AnimationState::AnimationsPresent => { - let visible = self.pipeline_details(pipeline_id).visible; - self.pipeline_details(pipeline_id).animations_running = true; - if visible { - self.composite_if_necessary(CompositingReason::Animation); - } - } - AnimationState::AnimationCallbacksPresent => { - let visible = self.pipeline_details(pipeline_id).visible; - self.pipeline_details(pipeline_id).animation_callbacks_running = true; - if visible { - self.tick_animations_for_pipeline(pipeline_id); - } - } - AnimationState::NoAnimationsPresent => { - self.pipeline_details(pipeline_id).animations_running = false; - } - AnimationState::NoAnimationCallbacksPresent => { - self.pipeline_details(pipeline_id).animation_callbacks_running = false; - } - } - } - - fn pipeline_details(&mut self, pipeline_id: PipelineId) -> &mut PipelineDetails { - if !self.pipeline_details.contains_key(&pipeline_id) { - self.pipeline_details.insert(pipeline_id, PipelineDetails::new()); - } - self.pipeline_details.get_mut(&pipeline_id).expect("Insert then get failed!") - } - - pub fn pipeline(&self, pipeline_id: PipelineId) -> Option<&CompositionPipeline> { - match self.pipeline_details.get(&pipeline_id) { - Some(ref details) => details.pipeline.as_ref(), - None => { - warn!("Compositor layer has an unknown pipeline ({:?}).", pipeline_id); - None - } - } - } - - fn set_frame_tree(&mut self, frame_tree: &SendableFrameTree) { - debug!("Setting the frame tree for pipeline {}", frame_tree.pipeline.id); - - self.root_pipeline = Some(frame_tree.pipeline.clone()); - - let pipeline_id = frame_tree.pipeline.id.to_webrender(); - self.webrender_api.set_root_pipeline(self.webrender_document, pipeline_id); - self.webrender_api.generate_frame(self.webrender_document, None); - - self.create_pipeline_details_for_frame_tree(&frame_tree); - - self.send_window_size(WindowSizeType::Initial); - - self.frame_tree_id.next(); - } - - fn create_pipeline_details_for_frame_tree(&mut self, frame_tree: &SendableFrameTree) { - self.pipeline_details(frame_tree.pipeline.id).pipeline = Some(frame_tree.pipeline.clone()); - - for kid in &frame_tree.children { - self.create_pipeline_details_for_frame_tree(kid); - } - } - - fn remove_pipeline_root_layer(&mut self, pipeline_id: PipelineId) { - self.pipeline_details.remove(&pipeline_id); - } - - fn send_window_size(&self, size_type: WindowSizeType) { - let dppx = self.page_zoom * self.hidpi_factor(); - - self.webrender_api.set_window_parameters(self.webrender_document, - self.frame_size, - self.window_rect, - self.hidpi_factor().get()); - - let initial_viewport = self.window_rect.size.to_f32() / dppx; - - let data = WindowSizeData { - device_pixel_ratio: dppx, - initial_viewport: initial_viewport, - }; - let top_level_browsing_context_id = match self.root_pipeline { - Some(ref pipeline) => pipeline.top_level_browsing_context_id, - None => return warn!("Window resize without root pipeline."), - }; - let msg = ConstellationMsg::WindowSize(top_level_browsing_context_id, data, size_type); - - if let Err(e) = self.constellation_chan.send(msg) { - warn!("Sending window resize to constellation failed ({}).", e); - } - } - - fn scroll_fragment_to_point(&mut self, id: ClipId, point: Point2D) { - self.webrender_api.scroll_node_with_id(self.webrender_document, - LayoutPoint::from_untyped(&point), - id, - ScrollClamping::ToContentBounds); - } - - pub fn on_resize_window_event(&mut self, new_size: DeviceUintSize) { - debug!("compositor resizing to {:?}", new_size.to_untyped()); - - // A size change could also mean a resolution change. - let new_scale_factor = self.window.hidpi_factor(); - if self.scale_factor != new_scale_factor { - self.scale_factor = new_scale_factor; - self.update_zoom_transform(); - } - - let new_window_rect = self.window.window_rect(); - let new_frame_size = self.window.framebuffer_size(); - - if self.window_rect == new_window_rect && - self.frame_size == new_frame_size { - return; - } - - self.frame_size = new_size; - self.window_rect = new_window_rect; - - self.send_window_size(WindowSizeType::Resize); - } - - pub fn on_mouse_window_event_class(&mut self, mouse_window_event: MouseWindowEvent) { - if opts::get().convert_mouse_to_touch { - match mouse_window_event { - MouseWindowEvent::Click(_, _) => {} - MouseWindowEvent::MouseDown(_, p) => self.on_touch_down(TouchId(0), p), - MouseWindowEvent::MouseUp(_, p) => self.on_touch_up(TouchId(0), p), - } - return - } - - self.dispatch_mouse_window_event_class(mouse_window_event); - } - - fn dispatch_mouse_window_event_class(&mut self, mouse_window_event: MouseWindowEvent) { - let point = match mouse_window_event { - MouseWindowEvent::Click(_, p) => p, - MouseWindowEvent::MouseDown(_, p) => p, - MouseWindowEvent::MouseUp(_, p) => p, - }; - - let root_pipeline_id = match self.get_root_pipeline_id() { - Some(root_pipeline_id) => root_pipeline_id, - None => return, - }; - - if let Some(pipeline) = self.pipeline(root_pipeline_id) { - let dppx = self.page_zoom * self.hidpi_factor(); - let translated_point = (point / dppx).to_untyped(); - let event_to_send = match mouse_window_event { - MouseWindowEvent::Click(button, _) => { - MouseButtonEvent(MouseEventType::Click, button, translated_point) - } - MouseWindowEvent::MouseDown(button, _) => { - MouseButtonEvent(MouseEventType::MouseDown, button, translated_point) - } - MouseWindowEvent::MouseUp(button, _) => { - MouseButtonEvent(MouseEventType::MouseUp, button, translated_point) - } - }; - let msg = ConstellationControlMsg::SendEvent(root_pipeline_id, event_to_send); - if let Err(e) = pipeline.script_chan.send(msg) { - warn!("Sending control event to script failed ({}).", e); - } - } - } - - pub fn on_mouse_window_move_event_class(&mut self, cursor: TypedPoint2D) { - if opts::get().convert_mouse_to_touch { - self.on_touch_move(TouchId(0), cursor); - return - } - - self.dispatch_mouse_window_move_event_class(cursor); - } - - fn dispatch_mouse_window_move_event_class(&mut self, cursor: TypedPoint2D) { - let root_pipeline_id = match self.get_root_pipeline_id() { - Some(root_pipeline_id) => root_pipeline_id, - None => return, - }; - if self.pipeline(root_pipeline_id).is_none() { - return; - } - - let dppx = self.page_zoom * self.hidpi_factor(); - let event_to_send = MouseMoveEvent(Some((cursor / dppx).to_untyped())); - let msg = ConstellationControlMsg::SendEvent(root_pipeline_id, event_to_send); - if let Some(pipeline) = self.pipeline(root_pipeline_id) { - if let Err(e) = pipeline.script_chan.send(msg) { - warn!("Sending mouse control event to script failed ({}).", e); - } - } - } - - fn send_event_to_root_pipeline(&self, event: CompositorEvent) { - let root_pipeline_id = match self.get_root_pipeline_id() { - Some(root_pipeline_id) => root_pipeline_id, - None => return, - }; - - if let Some(pipeline) = self.pipeline(root_pipeline_id) { - let msg = ConstellationControlMsg::SendEvent(root_pipeline_id, event); - if let Err(e) = pipeline.script_chan.send(msg) { - warn!("Sending control event to script failed ({}).", e); - } - } - } - - pub fn on_touch_event(&mut self, - event_type: TouchEventType, - identifier: TouchId, - location: TypedPoint2D) { - match event_type { - TouchEventType::Down => self.on_touch_down(identifier, location), - TouchEventType::Move => self.on_touch_move(identifier, location), - TouchEventType::Up => self.on_touch_up(identifier, location), - TouchEventType::Cancel => self.on_touch_cancel(identifier, location), - } - } - - fn on_touch_down(&mut self, identifier: TouchId, point: TypedPoint2D) { - self.touch_handler.on_touch_down(identifier, point); - let dppx = self.page_zoom * self.hidpi_factor(); - let translated_point = (point / dppx).to_untyped(); - self.send_event_to_root_pipeline(TouchEvent(TouchEventType::Down, - identifier, - translated_point)); - } - - fn on_touch_move(&mut self, identifier: TouchId, point: TypedPoint2D) { - match self.touch_handler.on_touch_move(identifier, point) { - TouchAction::Scroll(delta) => { - match point.cast() { - Some(point) => self.on_scroll_window_event( - ScrollLocation::Delta( - LayoutVector2D::from_untyped(&delta.to_untyped()) - ), - point - ), - None => error!("Point cast failed."), - } - } - TouchAction::Zoom(magnification, scroll_delta) => { - let cursor = TypedPoint2D::new(-1, -1); // Make sure this hits the base layer. - self.pending_scroll_zoom_events.push(ScrollZoomEvent { - magnification: magnification, - scroll_location: ScrollLocation::Delta(webrender_api::LayoutVector2D::from_untyped( - &scroll_delta.to_untyped())), - cursor: cursor, - phase: ScrollEventPhase::Move(true), - event_count: 1, - }); - } - TouchAction::DispatchEvent => { - let dppx = self.page_zoom * self.hidpi_factor(); - let translated_point = (point / dppx).to_untyped(); - self.send_event_to_root_pipeline(TouchEvent(TouchEventType::Move, - identifier, - translated_point)); - } - _ => {} - } - } - - fn on_touch_up(&mut self, identifier: TouchId, point: TypedPoint2D) { - let dppx = self.page_zoom * self.hidpi_factor(); - let translated_point = (point / dppx).to_untyped(); - self.send_event_to_root_pipeline(TouchEvent(TouchEventType::Up, - identifier, - translated_point)); - if let TouchAction::Click = self.touch_handler.on_touch_up(identifier, point) { - self.simulate_mouse_click(point); - } - } - - fn on_touch_cancel(&mut self, identifier: TouchId, point: TypedPoint2D) { - // Send the event to script. - self.touch_handler.on_touch_cancel(identifier, point); - let dppx = self.page_zoom * self.hidpi_factor(); - let translated_point = (point / dppx).to_untyped(); - self.send_event_to_root_pipeline(TouchEvent(TouchEventType::Cancel, - identifier, - translated_point)); - } - - pub fn on_touchpad_pressure_event(&self, - point: TypedPoint2D, - pressure: f32, - phase: TouchpadPressurePhase) { - if let Some(true) = PREFS.get("dom.forcetouch.enabled").as_boolean() { - let dppx = self.page_zoom * self.hidpi_factor(); - let translated_point = (point / dppx).to_untyped(); - self.send_event_to_root_pipeline(TouchpadPressureEvent(translated_point, - pressure, - phase)); - } - } - - /// http://w3c.github.io/touch-events/#mouse-events - fn simulate_mouse_click(&mut self, p: TypedPoint2D) { - let button = MouseButton::Left; - self.dispatch_mouse_window_move_event_class(p); - self.dispatch_mouse_window_event_class(MouseWindowEvent::MouseDown(button, p)); - self.dispatch_mouse_window_event_class(MouseWindowEvent::MouseUp(button, p)); - self.dispatch_mouse_window_event_class(MouseWindowEvent::Click(button, p)); - } - - pub fn on_scroll_event(&mut self, - delta: ScrollLocation, - cursor: TypedPoint2D, - phase: TouchEventType) { - match phase { - TouchEventType::Move => self.on_scroll_window_event(delta, cursor), - TouchEventType::Up | TouchEventType::Cancel => { - self.on_scroll_end_window_event(delta, cursor); - } - TouchEventType::Down => { - self.on_scroll_start_window_event(delta, cursor); - } - } - } - - fn on_scroll_window_event(&mut self, - scroll_location: ScrollLocation, - cursor: TypedPoint2D) { - let event_phase = match (self.scroll_in_progress, self.in_scroll_transaction) { - (false, None) => ScrollEventPhase::Start, - (false, Some(last_scroll)) if last_scroll.elapsed() > Duration::from_millis(80) => - ScrollEventPhase::Start, - (_, _) => ScrollEventPhase::Move(self.scroll_in_progress), - }; - self.in_scroll_transaction = Some(Instant::now()); - self.pending_scroll_zoom_events.push(ScrollZoomEvent { - magnification: 1.0, - scroll_location: scroll_location, - cursor: cursor, - phase: event_phase, - event_count: 1, - }); - } - - fn on_scroll_start_window_event(&mut self, - scroll_location: ScrollLocation, - cursor: TypedPoint2D) { - self.scroll_in_progress = true; - self.pending_scroll_zoom_events.push(ScrollZoomEvent { - magnification: 1.0, - scroll_location: scroll_location, - cursor: cursor, - phase: ScrollEventPhase::Start, - event_count: 1, - }); - } - - fn on_scroll_end_window_event(&mut self, - scroll_location: ScrollLocation, - cursor: TypedPoint2D) { - self.scroll_in_progress = false; - self.pending_scroll_zoom_events.push(ScrollZoomEvent { - magnification: 1.0, - scroll_location: scroll_location, - cursor: cursor, - phase: ScrollEventPhase::End, - event_count: 1, - }); - } - - fn process_pending_scroll_events(&mut self) { - let had_events = self.pending_scroll_zoom_events.len() > 0; - - // Batch up all scroll events into one, or else we'll do way too much painting. - let mut last_combined_event: Option = None; - for scroll_event in self.pending_scroll_zoom_events.drain(..) { - let this_cursor = scroll_event.cursor; - - let this_delta = match scroll_event.scroll_location { - ScrollLocation::Delta(delta) => delta, - ScrollLocation::Start | ScrollLocation::End => { - // If this is an event which is scrolling to the start or end of the page, - // disregard other pending events and exit the loop. - last_combined_event = Some(scroll_event); - break; - } - }; - - if let Some(combined_event) = last_combined_event { - if combined_event.phase != scroll_event.phase { - let combined_delta = match combined_event.scroll_location { - ScrollLocation::Delta(delta) => delta, - ScrollLocation::Start | ScrollLocation::End => { - // If this is an event which is scrolling to the start or end of the page, - // disregard other pending events and exit the loop. - last_combined_event = Some(scroll_event); - break; - } - }; - // TODO: units don't match! - let delta = combined_delta / self.scale.get(); - - let cursor = - (combined_event.cursor.to_f32() / self.scale).to_untyped(); - let location = webrender_api::ScrollLocation::Delta(delta); - let cursor = webrender_api::WorldPoint::from_untyped(&cursor); - self.webrender_api.scroll(self.webrender_document, location, cursor, combined_event.phase); - last_combined_event = None - } - } - - match (&mut last_combined_event, scroll_event.phase) { - (last_combined_event @ &mut None, _) => { - *last_combined_event = Some(ScrollZoomEvent { - magnification: scroll_event.magnification, - scroll_location: ScrollLocation::Delta(webrender_api::LayoutVector2D::from_untyped( - &this_delta.to_untyped())), - cursor: this_cursor, - phase: scroll_event.phase, - event_count: 1, - }) - } - (&mut Some(ref mut last_combined_event), - ScrollEventPhase::Move(false)) => { - // Mac OS X sometimes delivers scroll events out of vsync during a - // fling. This causes events to get bunched up occasionally, causing - // nasty-looking "pops". To mitigate this, during a fling we average - // deltas instead of summing them. - if let ScrollLocation::Delta(delta) = last_combined_event.scroll_location { - let old_event_count = - ScaleFactor::new(last_combined_event.event_count as f32); - last_combined_event.event_count += 1; - let new_event_count = - ScaleFactor::new(last_combined_event.event_count as f32); - last_combined_event.scroll_location = ScrollLocation::Delta( - (delta * old_event_count + this_delta) / - new_event_count); - } - } - (&mut Some(ref mut last_combined_event), _) => { - if let ScrollLocation::Delta(delta) = last_combined_event.scroll_location { - last_combined_event.scroll_location = ScrollLocation::Delta(delta + this_delta); - last_combined_event.event_count += 1 - } - } - } - } - - // TODO(gw): Support zoom (WR issue #28). - if let Some(combined_event) = last_combined_event { - let scroll_location = match combined_event.scroll_location { - ScrollLocation::Delta(delta) => { - let scaled_delta = (TypedVector2D::from_untyped(&delta.to_untyped()) / self.scale) - .to_untyped(); - let calculated_delta = webrender_api::LayoutVector2D::from_untyped(&scaled_delta); - ScrollLocation::Delta(calculated_delta) - }, - // Leave ScrollLocation unchanged if it is Start or End location. - sl @ ScrollLocation::Start | sl @ ScrollLocation::End => sl, - }; - let cursor = (combined_event.cursor.to_f32() / self.scale).to_untyped(); - let cursor = webrender_api::WorldPoint::from_untyped(&cursor); - self.webrender_api.scroll(self.webrender_document, scroll_location, cursor, combined_event.phase); - self.waiting_for_results_of_scroll = true - } - - if had_events { - self.send_viewport_rects(); - } - } - - /// If there are any animations running, dispatches appropriate messages to the constellation. - fn process_animations(&mut self) { - let mut pipeline_ids = vec![]; - for (pipeline_id, pipeline_details) in &self.pipeline_details { - if (pipeline_details.animations_running || - pipeline_details.animation_callbacks_running) && - pipeline_details.visible { - pipeline_ids.push(*pipeline_id); - } - } - let animation_state = if pipeline_ids.is_empty() { - windowing::AnimationState::Idle - } else { - windowing::AnimationState::Animating - }; - self.window.set_animation_state(animation_state); - for pipeline_id in &pipeline_ids { - self.tick_animations_for_pipeline(*pipeline_id) - } - } - - fn tick_animations_for_pipeline(&mut self, pipeline_id: PipelineId) { - let animation_callbacks_running = self.pipeline_details(pipeline_id).animation_callbacks_running; - if animation_callbacks_running { - let msg = ConstellationMsg::TickAnimation(pipeline_id, AnimationTickType::Script); - if let Err(e) = self.constellation_chan.send(msg) { - warn!("Sending tick to constellation failed ({}).", e); - } - } - - // We may need to tick animations in layout. (See #12749.) - let animations_running = self.pipeline_details(pipeline_id).animations_running; - if animations_running { - let msg = ConstellationMsg::TickAnimation(pipeline_id, AnimationTickType::Layout); - if let Err(e) = self.constellation_chan.send(msg) { - warn!("Sending tick to constellation failed ({}).", e); - } - } - } - - fn constrain_viewport(&mut self, pipeline_id: PipelineId, constraints: ViewportConstraints) { - let is_root = self.root_pipeline.as_ref().map_or(false, |root_pipeline| { - root_pipeline.id == pipeline_id - }); - - if is_root { - self.viewport_zoom = constraints.initial_zoom; - self.min_viewport_zoom = constraints.min_zoom; - self.max_viewport_zoom = constraints.max_zoom; - self.update_zoom_transform(); - } - } - - fn hidpi_factor(&self) -> ScaleFactor { - match opts::get().device_pixels_per_px { - Some(device_pixels_per_px) => ScaleFactor::new(device_pixels_per_px), - None => match opts::get().output_file { - Some(_) => ScaleFactor::new(1.0), - None => self.scale_factor - } - } - } - - fn device_pixels_per_page_px(&self) -> ScaleFactor { - self.page_zoom * self.hidpi_factor() - } - - fn update_zoom_transform(&mut self) { - let scale = self.device_pixels_per_page_px(); - self.scale = ScaleFactor::new(scale.get()); - } - - pub fn on_zoom_reset_window_event(&mut self) { - self.page_zoom = ScaleFactor::new(1.0); - self.update_zoom_transform(); - self.send_window_size(WindowSizeType::Resize); - self.update_page_zoom_for_webrender(); - } - - pub fn on_zoom_window_event(&mut self, magnification: f32) { - self.page_zoom = ScaleFactor::new((self.page_zoom.get() * magnification) - .max(MIN_ZOOM).min(MAX_ZOOM)); - self.update_zoom_transform(); - self.send_window_size(WindowSizeType::Resize); - self.update_page_zoom_for_webrender(); - } - - fn update_page_zoom_for_webrender(&mut self) { - let page_zoom = webrender_api::ZoomFactor::new(self.page_zoom.get()); - self.webrender_api.set_page_zoom(self.webrender_document, page_zoom); - } - - /// Simulate a pinch zoom - pub fn on_pinch_zoom_window_event(&mut self, magnification: f32) { - self.pending_scroll_zoom_events.push(ScrollZoomEvent { - magnification: magnification, - scroll_location: ScrollLocation::Delta(TypedVector2D::zero()), // TODO: Scroll to keep the center in view? - cursor: TypedPoint2D::new(-1, -1), // Make sure this hits the base layer. - phase: ScrollEventPhase::Move(true), - event_count: 1, - }); - } - - fn send_viewport_rects(&self) { - let mut scroll_states_per_pipeline = HashMap::new(); - for scroll_layer_state in self.webrender_api.get_scroll_node_state(self.webrender_document) { - if scroll_layer_state.id.external_id().is_none() && - !scroll_layer_state.id.is_root_scroll_node() { - continue; - } - - let scroll_state = ScrollState { - scroll_root_id: scroll_layer_state.id, - scroll_offset: scroll_layer_state.scroll_offset.to_untyped(), - }; - - scroll_states_per_pipeline.entry(scroll_layer_state.id.pipeline_id()) - .or_insert(vec![]) - .push(scroll_state); - } - - for (pipeline_id, scroll_states) in scroll_states_per_pipeline { - if let Some(pipeline) = self.pipeline(pipeline_id.from_webrender()) { - let msg = LayoutControlMsg::SetScrollStates(scroll_states); - let _ = pipeline.layout_chan.send(msg); - } - } - } - - // Check if any pipelines currently have active animations or animation callbacks. - fn animations_active(&self) -> bool { - for (_, details) in &self.pipeline_details { - // If animations are currently running, then don't bother checking - // with the constellation if the output image is stable. - if details.animations_running { - return true; - } - if details.animation_callbacks_running { - return true; - } - } - - false - } - - /// Query the constellation to see if the current compositor - /// output matches the current frame tree output, and if the - /// associated script threads are idle. - fn is_ready_to_paint_image_output(&mut self) -> Result<(), NotReadyToPaint> { - match self.ready_to_save_state { - ReadyState::Unknown => { - // Unsure if the output image is stable. - - // Collect the currently painted epoch of each pipeline that is - // complete (i.e. has *all* layers painted to the requested epoch). - // This gets sent to the constellation for comparison with the current - // frame tree. - let mut pipeline_epochs = HashMap::new(); - for (id, _) in &self.pipeline_details { - let webrender_pipeline_id = id.to_webrender(); - if let Some(webrender_api::Epoch(epoch)) = self.webrender - .current_epoch(webrender_pipeline_id) { - let epoch = Epoch(epoch); - pipeline_epochs.insert(*id, epoch); - } - } - - // Pass the pipeline/epoch states to the constellation and check - // if it's safe to output the image. - let msg = ConstellationMsg::IsReadyToSaveImage(pipeline_epochs); - if let Err(e) = self.constellation_chan.send(msg) { - warn!("Sending ready to save to constellation failed ({}).", e); - } - self.ready_to_save_state = ReadyState::WaitingForConstellationReply; - Err(NotReadyToPaint::JustNotifiedConstellation) - } - ReadyState::WaitingForConstellationReply => { - // If waiting on a reply from the constellation to the last - // query if the image is stable, then assume not ready yet. - Err(NotReadyToPaint::WaitingOnConstellation) - } - ReadyState::ReadyToSaveImage => { - // Constellation has replied at some point in the past - // that the current output image is stable and ready - // for saving. - // Reset the flag so that we check again in the future - // TODO: only reset this if we load a new document? - if opts::get().is_running_problem_test { - println!("was ready to save, resetting ready_to_save_state"); - } - self.ready_to_save_state = ReadyState::Unknown; - Ok(()) - } - } - } - - pub fn composite(&mut self) { - let target = self.composite_target; - match self.composite_specific_target(target) { - Ok(_) => if opts::get().output_file.is_some() || opts::get().exit_after_load { - println!("Shutting down the Constellation after generating an output file or exit flag specified"); - self.start_shutting_down(); - }, - Err(e) => if opts::get().is_running_problem_test { - if e != UnableToComposite::NotReadyToPaintImage(NotReadyToPaint::WaitingOnConstellation) { - println!("not ready to composite: {:?}", e); - } - }, - } - } - - /// Composite either to the screen or to a png image or both. - /// Returns Ok if composition was performed or Err if it was not possible to composite - /// for some reason. If CompositeTarget is Window or Png no image data is returned; - /// in the latter case the image is written directly to a file. If CompositeTarget - /// is WindowAndPng Ok(Some(png::Image)) is returned. - fn composite_specific_target(&mut self, - target: CompositeTarget) - -> Result, UnableToComposite> { - let (width, height) = - (self.frame_size.width as usize, self.frame_size.height as usize); - if !self.window.prepare_for_composite(width, height) { - return Err(UnableToComposite::WindowUnprepared) - } - - self.webrender.update(); - - let wait_for_stable_image = match target { - CompositeTarget::WindowAndPng | CompositeTarget::PngFile => true, - CompositeTarget::Window => opts::get().exit_after_load, - }; - - if wait_for_stable_image { - // The current image may be ready to output. However, if there are animations active, - // tick those instead and continue waiting for the image output to be stable AND - // all active animations to complete. - if self.animations_active() { - self.process_animations(); - return Err(UnableToComposite::NotReadyToPaintImage(NotReadyToPaint::AnimationsActive)); - } - if let Err(result) = self.is_ready_to_paint_image_output() { - return Err(UnableToComposite::NotReadyToPaintImage(result)) - } - } - - let render_target_info = match target { - CompositeTarget::Window => RenderTargetInfo::empty(), - _ => initialize_png(&*self.gl, width, height) - }; - - profile(ProfilerCategory::Compositing, None, self.time_profiler_chan.clone(), || { - debug!("compositor: compositing"); - - // Paint the scene. - // TODO(gw): Take notice of any errors the renderer returns! - self.webrender.render(self.frame_size).ok(); - }); - - // If there are pending paint metrics, we check if any of the painted epochs is - // one of the ones that the paint metrics recorder is expecting . In that case, - // we get the current time, inform the layout thread about it and remove the - // pending metric from the list. - if !self.pending_paint_metrics.is_empty() { - let paint_time = precise_time_ns() as f64; - let mut to_remove = Vec::new(); - // For each pending paint metrics pipeline id - for (id, pending_epoch) in &self.pending_paint_metrics { - // we get the last painted frame id from webrender - if let Some(webrender_api::Epoch(epoch)) = self.webrender.current_epoch(id.to_webrender()) { - // and check if it is the one the layout thread is expecting, - let epoch = Epoch(epoch); - if *pending_epoch != epoch { - continue; - } - // in which case, we remove it from the list of pending metrics, - to_remove.push(id.clone()); - if let Some(pipeline) = self.pipeline(*id) { - // and inform the layout thread with the measured paint time. - let msg = LayoutControlMsg::PaintMetric(epoch, paint_time); - if let Err(e) = pipeline.layout_chan.send(msg) { - warn!("Sending PaintMetric message to layout failed ({}).", e); - } - } - } - } - for id in to_remove.iter() { - self.pending_paint_metrics.remove(id); - } - } - - let rv = match target { - CompositeTarget::Window => None, - CompositeTarget::WindowAndPng => { - let img = self.draw_img(render_target_info, - width, - height); - Some(Image { - width: img.width(), - height: img.height(), - format: PixelFormat::RGB8, - bytes: IpcSharedMemory::from_bytes(&*img), - id: None, - }) - } - CompositeTarget::PngFile => { - profile(ProfilerCategory::ImageSaving, None, self.time_profiler_chan.clone(), || { - match opts::get().output_file.as_ref() { - Some(path) => match File::create(path) { - Ok(mut file) => { - let img = self.draw_img(render_target_info, width, height); - let dynamic_image = DynamicImage::ImageRgb8(img); - if let Err(e) = dynamic_image.save(&mut file, ImageFormat::PNG) { - error!("Failed to save {} ({}).", path, e); - } - }, - Err(e) => error!("Failed to create {} ({}).", path, e), - }, - None => error!("No file specified."), - } - }); - None - } - }; - - // Perform the page flip. This will likely block for a while. - self.window.present(); - - self.last_composite_time = precise_time_ns(); - - self.composition_request = CompositionRequest::NoCompositingNecessary; - - self.process_animations(); - self.start_scrolling_bounce_if_necessary(); - self.waiting_for_results_of_scroll = false; - - Ok(rv) - } - - fn draw_img(&self, - render_target_info: RenderTargetInfo, - width: usize, - height: usize) - -> RgbImage { - let mut pixels = self.gl.read_pixels(0, 0, - width as gl::GLsizei, - height as gl::GLsizei, - gl::RGB, gl::UNSIGNED_BYTE); - - self.gl.bind_framebuffer(gl::FRAMEBUFFER, 0); - - self.gl.delete_buffers(&render_target_info.texture_ids); - self.gl.delete_renderbuffers(&render_target_info.renderbuffer_ids); - self.gl.delete_framebuffers(&render_target_info.framebuffer_ids); - - // flip image vertically (texture is upside down) - let orig_pixels = pixels.clone(); - let stride = width * 3; - for y in 0..height { - let dst_start = y * stride; - let src_start = (height - y - 1) * stride; - let src_slice = &orig_pixels[src_start .. src_start + stride]; - (&mut pixels[dst_start .. dst_start + stride]).clone_from_slice(&src_slice[..stride]); - } - RgbImage::from_raw(width as u32, height as u32, pixels).expect("Flipping image failed!") - } - - fn composite_if_necessary(&mut self, reason: CompositingReason) { - if self.composition_request == CompositionRequest::NoCompositingNecessary { - if opts::get().is_running_problem_test { - println!("updating composition_request ({:?})", reason); - } - self.composition_request = CompositionRequest::CompositeNow(reason) - } else if opts::get().is_running_problem_test { - println!("composition_request is already {:?}", self.composition_request); - } - } - - fn get_root_pipeline_id(&self) -> Option { - self.root_pipeline.as_ref().map(|pipeline| pipeline.id) - } - - fn start_scrolling_bounce_if_necessary(&mut self) { - if self.scroll_in_progress { - return - } - - if self.webrender.layers_are_bouncing_back() { - self.webrender_api.tick_scrolling_bounce_animations(self.webrender_document); - self.send_viewport_rects() - } - } - - pub fn receive_messages(&mut self) -> bool { - // Check for new messages coming from the other threads in the system. - let mut compositor_messages = vec![]; - let mut found_recomposite_msg = false; - while let Some(msg) = self.port.try_recv_compositor_msg() { - match msg { - Msg::Recomposite(_) if found_recomposite_msg => {} - Msg::Recomposite(_) => { - found_recomposite_msg = true; - compositor_messages.push(msg) - } - _ => compositor_messages.push(msg), - } - } - for msg in compositor_messages { - if !self.handle_browser_message(msg) { - return false - } - } - true - } - - pub fn perform_updates(&mut self) -> bool { - if self.shutdown_state == ShutdownState::FinishedShuttingDown { - return false; - } - - // If a pinch-zoom happened recently, ask for tiles at the new resolution - if self.zoom_action && precise_time_s() - self.zoom_time > 0.3 { - self.zoom_action = false; - } - - match self.composition_request { - CompositionRequest::NoCompositingNecessary => {} - CompositionRequest::CompositeNow(_) => { - self.composite() - } - } - - if !self.pending_scroll_zoom_events.is_empty() && !self.waiting_for_results_of_scroll { - self.process_pending_scroll_events() - } - self.shutdown_state != ShutdownState::FinishedShuttingDown - } - - /// Repaints and recomposites synchronously. You must be careful when calling this, as if a - /// paint is not scheduled the compositor will hang forever. - /// - /// This is used when resizing the window. - pub fn repaint_synchronously(&mut self) { - while self.shutdown_state != ShutdownState::ShuttingDown { - let msg = self.port.recv_compositor_msg(); - let need_recomposite = match msg { - Msg::Recomposite(_) => true, - _ => false, - }; - let keep_going = self.handle_browser_message(msg); - if need_recomposite { - self.composite(); - break - } - if !keep_going { - break - } - } - } - - pub fn pinch_zoom_level(&self) -> f32 { - // TODO(gw): Access via WR. - 1.0 - } - - pub fn toggle_webrender_debug(&mut self, option: WebRenderDebugOption) { - let mut flags = self.webrender.get_debug_flags(); - let flag = match option { - WebRenderDebugOption::Profiler => webrender::PROFILER_DBG, - WebRenderDebugOption::TextureCacheDebug => webrender::TEXTURE_CACHE_DBG, - WebRenderDebugOption::RenderTargetDebug => webrender::RENDER_TARGET_DBG, - }; - flags.toggle(flag); - self.webrender.set_debug_flags(flags); - self.webrender_api.generate_frame(self.webrender_document, None); - } -} - -/// Why we performed a composite. This is used for debugging. -#[derive(Clone, Copy, Debug, PartialEq)] -pub enum CompositingReason { - /// We hit the delayed composition timeout. (See `delayed_composition.rs`.) - DelayedCompositeTimeout, - /// The window has been scrolled and we're starting the first recomposite. - Scroll, - /// A scroll has continued and we need to recomposite again. - ContinueScroll, - /// We're performing the single composite in headless mode. - Headless, - /// We're performing a composite to run an animation. - Animation, - /// A new frame tree has been loaded. - NewFrameTree, - /// New painted buffers have been received. - NewPaintedBuffers, - /// The window has been zoomed. - Zoom, - /// A new WebRender frame has arrived. - NewWebRenderFrame, - /// WebRender has processed a scroll event and has generated a new frame. - NewWebRenderScrollFrame, -} diff --git a/collector/compile-benchmarks/style-servo/components/compositing/compositor_thread.rs b/collector/compile-benchmarks/style-servo/components/compositing/compositor_thread.rs deleted file mode 100644 index 03a7b46c7..000000000 --- a/collector/compile-benchmarks/style-servo/components/compositing/compositor_thread.rs +++ /dev/null @@ -1,260 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ - -//! Communication with the compositor thread. - -use SendableFrameTree; -use compositor::CompositingReason; -use euclid::{Point2D, Size2D}; -use gfx_traits::Epoch; -use ipc_channel::ipc::IpcSender; -use msg::constellation_msg::{Key, KeyModifiers, KeyState, PipelineId, TopLevelBrowsingContextId}; -use net_traits::image::base::Image; -use profile_traits::mem; -use profile_traits::time; -use script_traits::{AnimationState, ConstellationMsg, EventResult, LoadData}; -use servo_url::ServoUrl; -use std::fmt::{Debug, Error, Formatter}; -use std::sync::mpsc::{Receiver, Sender}; -use style_traits::cursor::Cursor; -use style_traits::viewport::ViewportConstraints; -use webrender; -use webrender_api; - - -/// Used to wake up the event loop, provided by the servo port/embedder. -pub trait EventLoopWaker : 'static + Send { - fn clone(&self) -> Box; - fn wake(&self); -} - -/// Sends messages to the embedder. -pub struct EmbedderProxy { - pub sender: Sender, - pub event_loop_waker: Box, -} - -impl EmbedderProxy { - pub fn send(&self, msg: EmbedderMsg) { - // Send a message and kick the OS event loop awake. - if let Err(err) = self.sender.send(msg) { - warn!("Failed to send response ({}).", err); - } - self.event_loop_waker.wake(); - } -} - -impl Clone for EmbedderProxy { - fn clone(&self) -> EmbedderProxy { - EmbedderProxy { - sender: self.sender.clone(), - event_loop_waker: self.event_loop_waker.clone(), - } - } -} - -/// The port that the embedder receives messages on. -pub struct EmbedderReceiver { - pub receiver: Receiver -} - -impl EmbedderReceiver { - pub fn try_recv_embedder_msg(&mut self) -> Option { - self.receiver.try_recv().ok() - } - pub fn recv_embedder_msg(&mut self) -> EmbedderMsg { - self.receiver.recv().unwrap() - } -} - -/// Sends messages to the compositor. -pub struct CompositorProxy { - pub sender: Sender, - pub event_loop_waker: Box, -} - -impl CompositorProxy { - pub fn send(&self, msg: Msg) { - // Send a message and kick the OS event loop awake. - if let Err(err) = self.sender.send(msg) { - warn!("Failed to send response ({}).", err); - } - self.event_loop_waker.wake(); - } -} - -impl Clone for CompositorProxy { - fn clone(&self) -> CompositorProxy { - CompositorProxy { - sender: self.sender.clone(), - event_loop_waker: self.event_loop_waker.clone(), - } - } -} - -/// The port that the compositor receives messages on. -pub struct CompositorReceiver { - pub receiver: Receiver -} - -impl CompositorReceiver { - pub fn try_recv_compositor_msg(&mut self) -> Option { - self.receiver.try_recv().ok() - } - pub fn recv_compositor_msg(&mut self) -> Msg { - self.receiver.recv().unwrap() - } -} - -pub trait RenderListener { - fn recomposite(&mut self, reason: CompositingReason); -} - -impl RenderListener for CompositorProxy { - fn recomposite(&mut self, reason: CompositingReason) { - self.send(Msg::Recomposite(reason)); - } -} - -pub enum EmbedderMsg { - /// A status message to be displayed by the browser chrome. - Status(TopLevelBrowsingContextId, Option), - /// Alerts the embedder that the current page has changed its title. - ChangePageTitle(TopLevelBrowsingContextId, Option), - /// Move the window to a point - MoveTo(TopLevelBrowsingContextId, Point2D), - /// Resize the window to size - ResizeTo(TopLevelBrowsingContextId, Size2D), - /// Get Window Informations size and position - GetClientWindow(TopLevelBrowsingContextId, IpcSender<(Size2D, Point2D)>), - /// Wether or not to follow a link - AllowNavigation(TopLevelBrowsingContextId, ServoUrl, IpcSender), - /// Sends an unconsumed key event back to the embedder. - KeyEvent(Option, Option, Key, KeyState, KeyModifiers), - /// Changes the cursor. - SetCursor(Cursor), - /// A favicon was detected - NewFavicon(TopLevelBrowsingContextId, ServoUrl), - /// tag finished parsing - HeadParsed(TopLevelBrowsingContextId), - /// The history state has changed. - HistoryChanged(TopLevelBrowsingContextId, Vec, usize), - /// Enter or exit fullscreen - SetFullscreenState(TopLevelBrowsingContextId, bool), - /// The load of a page has begun - LoadStart(TopLevelBrowsingContextId), - /// The load of a page has completed - LoadComplete(TopLevelBrowsingContextId), -} - -/// Messages from the painting thread and the constellation thread to the compositor thread. -pub enum Msg { - /// Requests that the compositor shut down. - Exit, - - /// Informs the compositor that the constellation has completed shutdown. - /// Required because the constellation can have pending calls to make - /// (e.g. SetFrameTree) at the time that we send it an ExitMsg. - ShutdownComplete, - - /// Scroll a page in a window - ScrollFragmentPoint(webrender_api::ClipId, Point2D, bool), - /// Alerts the compositor that the given pipeline has changed whether it is running animations. - ChangeRunningAnimationsState(PipelineId, AnimationState), - /// Replaces the current frame tree, typically called during main frame navigation. - SetFrameTree(SendableFrameTree), - /// Composite. - Recomposite(CompositingReason), - /// Script has handled a touch event, and either prevented or allowed default actions. - TouchEventProcessed(EventResult), - /// Composite to a PNG file and return the Image over a passed channel. - CreatePng(IpcSender>), - /// Alerts the compositor that the viewport has been constrained in some manner - ViewportConstrained(PipelineId, ViewportConstraints), - /// A reply to the compositor asking if the output image is stable. - IsReadyToSaveImageReply(bool), - /// Pipeline visibility changed - PipelineVisibilityChanged(PipelineId, bool), - /// WebRender has successfully processed a scroll. The boolean specifies whether a composite is - /// needed. - NewScrollFrameReady(bool), - /// A pipeline was shut down. - // This message acts as a synchronization point between the constellation, - // when it shuts down a pipeline, to the compositor; when the compositor - // sends a reply on the IpcSender, the constellation knows it's safe to - // tear down the other threads associated with this pipeline. - PipelineExited(PipelineId, IpcSender<()>), - /// Runs a closure in the compositor thread. - /// It's used to dispatch functions from webrender to the main thread's event loop. - /// Required to allow WGL GLContext sharing in Windows. - Dispatch(Box), - /// Indicates to the compositor that it needs to record the time when the frame with - /// the given ID (epoch) is painted and report it to the layout thread of the given - /// pipeline ID. - PendingPaintMetric(PipelineId, Epoch), - /// The load of a page has completed - LoadComplete(TopLevelBrowsingContextId), -} - -impl Debug for Msg { - fn fmt(&self, f: &mut Formatter) -> Result<(), Error> { - match *self { - Msg::Exit => write!(f, "Exit"), - Msg::ShutdownComplete => write!(f, "ShutdownComplete"), - Msg::ScrollFragmentPoint(..) => write!(f, "ScrollFragmentPoint"), - Msg::ChangeRunningAnimationsState(..) => write!(f, "ChangeRunningAnimationsState"), - Msg::SetFrameTree(..) => write!(f, "SetFrameTree"), - Msg::Recomposite(..) => write!(f, "Recomposite"), - Msg::TouchEventProcessed(..) => write!(f, "TouchEventProcessed"), - Msg::CreatePng(..) => write!(f, "CreatePng"), - Msg::ViewportConstrained(..) => write!(f, "ViewportConstrained"), - Msg::IsReadyToSaveImageReply(..) => write!(f, "IsReadyToSaveImageReply"), - Msg::PipelineVisibilityChanged(..) => write!(f, "PipelineVisibilityChanged"), - Msg::PipelineExited(..) => write!(f, "PipelineExited"), - Msg::NewScrollFrameReady(..) => write!(f, "NewScrollFrameReady"), - Msg::Dispatch(..) => write!(f, "Dispatch"), - Msg::PendingPaintMetric(..) => write!(f, "PendingPaintMetric"), - Msg::LoadComplete(..) => write!(f, "LoadComplete"), - } - } -} - -impl Debug for EmbedderMsg { - fn fmt(&self, f: &mut Formatter) -> Result<(), Error> { - match *self { - EmbedderMsg::Status(..) => write!(f, "Status"), - EmbedderMsg::ChangePageTitle(..) => write!(f, "ChangePageTitle"), - EmbedderMsg::MoveTo(..) => write!(f, "MoveTo"), - EmbedderMsg::ResizeTo(..) => write!(f, "ResizeTo"), - EmbedderMsg::GetClientWindow(..) => write!(f, "GetClientWindow"), - EmbedderMsg::AllowNavigation(..) => write!(f, "AllowNavigation"), - EmbedderMsg::KeyEvent(..) => write!(f, "KeyEvent"), - EmbedderMsg::SetCursor(..) => write!(f, "SetCursor"), - EmbedderMsg::NewFavicon(..) => write!(f, "NewFavicon"), - EmbedderMsg::HeadParsed(..) => write!(f, "HeadParsed"), - EmbedderMsg::HistoryChanged(..) => write!(f, "HistoryChanged"), - EmbedderMsg::SetFullscreenState(..) => write!(f, "SetFullscreenState"), - EmbedderMsg::LoadStart(..) => write!(f, "LoadStart"), - EmbedderMsg::LoadComplete(..) => write!(f, "LoadComplete"), - } - } -} - -/// Data used to construct a compositor. -pub struct InitialCompositorState { - /// A channel to the compositor. - pub sender: CompositorProxy, - /// A port on which messages inbound to the compositor can be received. - pub receiver: CompositorReceiver, - /// A channel to the constellation. - pub constellation_chan: Sender, - /// A channel to the time profiler thread. - pub time_profiler_chan: time::ProfilerChan, - /// A channel to the memory profiler thread. - pub mem_profiler_chan: mem::ProfilerChan, - /// Instance of webrender API - pub webrender: webrender::Renderer, - pub webrender_document: webrender_api::DocumentId, - pub webrender_api: webrender_api::RenderApi, -} diff --git a/collector/compile-benchmarks/style-servo/components/compositing/lib.rs b/collector/compile-benchmarks/style-servo/components/compositing/lib.rs deleted file mode 100644 index 03188affb..000000000 --- a/collector/compile-benchmarks/style-servo/components/compositing/lib.rs +++ /dev/null @@ -1,57 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ - -#![deny(unsafe_code)] -#![feature(box_syntax)] -#![feature(nonzero)] - -extern crate core; -extern crate euclid; -extern crate gfx_traits; -extern crate gleam; -extern crate image; -extern crate ipc_channel; -#[macro_use] -extern crate log; -extern crate msg; -extern crate net_traits; -extern crate profile_traits; -extern crate script_traits; -extern crate servo_config; -extern crate servo_geometry; -extern crate servo_url; -extern crate style_traits; -extern crate time; -extern crate webrender; -extern crate webrender_api; - -pub use compositor_thread::CompositorProxy; -pub use compositor::IOCompositor; -pub use compositor::ShutdownState; -use euclid::TypedSize2D; -use ipc_channel::ipc::IpcSender; -use msg::constellation_msg::PipelineId; -use msg::constellation_msg::TopLevelBrowsingContextId; -use script_traits::{ConstellationControlMsg, LayoutControlMsg}; -use style_traits::CSSPixel; - -mod compositor; -pub mod compositor_thread; -mod touch; -pub mod windowing; - -pub struct SendableFrameTree { - pub pipeline: CompositionPipeline, - pub size: Option>, - pub children: Vec, -} - -/// The subset of the pipeline that is needed for layer composition. -#[derive(Clone)] -pub struct CompositionPipeline { - pub id: PipelineId, - pub top_level_browsing_context_id: TopLevelBrowsingContextId, - pub script_chan: IpcSender, - pub layout_chan: IpcSender, -} diff --git a/collector/compile-benchmarks/style-servo/components/compositing/touch.rs b/collector/compile-benchmarks/style-servo/components/compositing/touch.rs deleted file mode 100644 index 0d6a7561a..000000000 --- a/collector/compile-benchmarks/style-servo/components/compositing/touch.rs +++ /dev/null @@ -1,230 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ - -use euclid::{TypedPoint2D, TypedVector2D}; -use euclid::ScaleFactor; -use script_traits::{EventResult, TouchId}; -use self::TouchState::*; -use style_traits::DevicePixel; - -/// Minimum number of `DeviceIndependentPixel` to begin touch scrolling. -const TOUCH_PAN_MIN_SCREEN_PX: f32 = 20.0; - -pub struct TouchHandler { - pub state: TouchState, - pub active_touch_points: Vec, -} - -#[derive(Clone, Copy, Debug)] -pub struct TouchPoint { - pub id: TouchId, - pub point: TypedPoint2D -} - -impl TouchPoint { - pub fn new(id: TouchId, point: TypedPoint2D) -> Self { - TouchPoint { id: id, point: point } - } -} - -/// The states of the touch input state machine. -/// -/// TODO: Add support for "flinging" (scrolling inertia) -#[derive(Clone, Copy, Debug, Eq, PartialEq)] -pub enum TouchState { - /// Not tracking any touch point - Nothing, - /// A touchstart event was dispatched to the page, but the response wasn't received yet. - /// Contains the initial touch point. - WaitingForScript, - /// Script is consuming the current touch sequence; don't perform default actions. - DefaultPrevented, - /// A single touch point is active and may perform click or pan default actions. - /// Contains the initial touch location. - Touching, - /// A single touch point is active and has started panning. - Panning, - /// A two-finger pinch zoom gesture is active. - Pinching, - /// A multi-touch gesture is in progress. Contains the number of active touch points. - MultiTouch, -} - -/// The action to take in response to a touch event -#[derive(Clone, Copy, Debug)] -pub enum TouchAction { - /// Simulate a mouse click. - Click, - /// Scroll by the provided offset. - Scroll(TypedVector2D), - /// Zoom by a magnification factor and scroll by the provided offset. - Zoom(f32, TypedVector2D), - /// Send a JavaScript event to content. - DispatchEvent, - /// Don't do anything. - NoAction, -} - -impl TouchHandler { - pub fn new() -> Self { - TouchHandler { - state: Nothing, - active_touch_points: Vec::new(), - } - } - - pub fn on_touch_down(&mut self, id: TouchId, point: TypedPoint2D) { - let point = TouchPoint::new(id, point); - self.active_touch_points.push(point); - - self.state = match self.state { - Nothing => WaitingForScript, - Touching | Panning => Pinching, - WaitingForScript => WaitingForScript, - DefaultPrevented => DefaultPrevented, - Pinching | MultiTouch => MultiTouch, - }; - } - - pub fn on_touch_move(&mut self, id: TouchId, point: TypedPoint2D) - -> TouchAction { - let idx = match self.active_touch_points.iter_mut().position(|t| t.id == id) { - Some(i) => i, - None => { - warn!("Got a touchmove event for a non-active touch point"); - return TouchAction::NoAction; - } - }; - let old_point = self.active_touch_points[idx].point; - - let action = match self.state { - Touching => { - let delta = point - old_point; - - if delta.x.abs() > TOUCH_PAN_MIN_SCREEN_PX || - delta.y.abs() > TOUCH_PAN_MIN_SCREEN_PX - { - self.state = Panning; - TouchAction::Scroll(delta) - } else { - TouchAction::NoAction - } - } - Panning => { - let delta = point - old_point; - TouchAction::Scroll(delta) - } - DefaultPrevented => { - TouchAction::DispatchEvent - } - Pinching => { - let (d0, c0) = self.pinch_distance_and_center(); - self.active_touch_points[idx].point = point; - let (d1, c1) = self.pinch_distance_and_center(); - - let magnification = d1 / d0; - let scroll_delta = c1 - c0 * ScaleFactor::new(magnification); - - TouchAction::Zoom(magnification, scroll_delta) - } - WaitingForScript => TouchAction::NoAction, - MultiTouch => TouchAction::NoAction, - Nothing => unreachable!(), - }; - - // If we're still waiting to see whether this is a click or pan, remember the original - // location. Otherwise, update the touch point with the latest location. - if self.state != Touching && self.state != WaitingForScript { - self.active_touch_points[idx].point = point; - } - action - } - - pub fn on_touch_up(&mut self, id: TouchId, _point: TypedPoint2D) - -> TouchAction { - match self.active_touch_points.iter().position(|t| t.id == id) { - Some(i) => { - self.active_touch_points.swap_remove(i); - } - None => { - warn!("Got a touch up event for a non-active touch point"); - } - } - match self.state { - Touching => { - // FIXME: If the duration exceeds some threshold, send a contextmenu event instead. - // FIXME: Don't send a click if preventDefault is called on the touchend event. - self.state = Nothing; - TouchAction::Click - } - Nothing | Panning => { - self.state = Nothing; - TouchAction::NoAction - } - Pinching => { - self.state = Panning; - TouchAction::NoAction - } - WaitingForScript | DefaultPrevented | MultiTouch => { - if self.active_touch_points.is_empty() { - self.state = Nothing; - } - TouchAction::NoAction - } - } - } - - pub fn on_touch_cancel(&mut self, id: TouchId, _point: TypedPoint2D) { - match self.active_touch_points.iter().position(|t| t.id == id) { - Some(i) => { - self.active_touch_points.swap_remove(i); - } - None => { - warn!("Got a touchcancel event for a non-active touch point"); - return; - } - } - match self.state { - Nothing => {} - Touching | Panning => { - self.state = Nothing; - } - Pinching => { - self.state = Panning; - } - WaitingForScript | DefaultPrevented | MultiTouch => { - if self.active_touch_points.is_empty() { - self.state = Nothing; - } - } - } - } - - pub fn on_event_processed(&mut self, result: EventResult) { - if let WaitingForScript = self.state { - self.state = match result { - EventResult::DefaultPrevented => DefaultPrevented, - EventResult::DefaultAllowed => match self.touch_count() { - 1 => Touching, - 2 => Pinching, - _ => MultiTouch, - } - } - } - } - - fn touch_count(&self) -> usize { - self.active_touch_points.len() - } - - fn pinch_distance_and_center(&self) -> (f32, TypedPoint2D) { - debug_assert!(self.touch_count() == 2); - let p0 = self.active_touch_points[0].point; - let p1 = self.active_touch_points[1].point; - let center = p0.lerp(p1, 0.5); - let distance = (p0 - p1).length(); - - (distance, center) - } -} diff --git a/collector/compile-benchmarks/style-servo/components/compositing/windowing.rs b/collector/compile-benchmarks/style-servo/components/compositing/windowing.rs deleted file mode 100644 index edff7cf85..000000000 --- a/collector/compile-benchmarks/style-servo/components/compositing/windowing.rs +++ /dev/null @@ -1,191 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ - -//! Abstract windowing methods. The concrete implementations of these can be found in `platform/`. - -use compositor_thread::EventLoopWaker; -use euclid::{Point2D, Size2D}; -use euclid::{ScaleFactor, TypedPoint2D, TypedSize2D}; -use gleam::gl; -use ipc_channel::ipc::IpcSender; -use msg::constellation_msg::{Key, KeyModifiers, KeyState, TopLevelBrowsingContextId, TraversalDirection}; -use net_traits::net_error_list::NetError; -use script_traits::{LoadData, MouseButton, TouchEventType, TouchId, TouchpadPressurePhase}; -use servo_geometry::DeviceIndependentPixel; -use servo_url::ServoUrl; -use std::fmt::{Debug, Error, Formatter}; -use std::rc::Rc; -use style_traits::DevicePixel; -use style_traits::cursor::Cursor; -use webrender_api::{DeviceUintSize, DeviceUintRect, ScrollLocation}; - -#[derive(Clone)] -pub enum MouseWindowEvent { - Click(MouseButton, TypedPoint2D), - MouseDown(MouseButton, TypedPoint2D), - MouseUp(MouseButton, TypedPoint2D), -} - -/// Various debug and profiling flags that WebRender supports. -#[derive(Clone)] -pub enum WebRenderDebugOption { - Profiler, - TextureCacheDebug, - RenderTargetDebug, -} - -/// Events that the windowing system sends to Servo. -#[derive(Clone)] -pub enum WindowEvent { - /// Sent when no message has arrived, but the event loop was kicked for some reason (perhaps - /// by another Servo subsystem). - /// - /// FIXME(pcwalton): This is kind of ugly and may not work well with multiprocess Servo. - /// It's possible that this should be something like - /// `CompositorMessageWindowEvent(compositor_thread::Msg)` instead. - Idle, - /// Sent when part of the window is marked dirty and needs to be redrawn. Before sending this - /// message, the window must make the same GL context as in `PrepareRenderingEvent` current. - Refresh, - /// Sent when the window is resized. - Resize(DeviceUintSize), - /// Touchpad Pressure - TouchpadPressure(TypedPoint2D, f32, TouchpadPressurePhase), - /// Sent when a new URL is to be loaded. - LoadUrl(TopLevelBrowsingContextId, ServoUrl), - /// Sent when a mouse hit test is to be performed. - MouseWindowEventClass(MouseWindowEvent), - /// Sent when a mouse move. - MouseWindowMoveEventClass(TypedPoint2D), - /// Touch event: type, identifier, point - Touch(TouchEventType, TouchId, TypedPoint2D), - /// Sent when the user scrolls. The first point is the delta and the second point is the - /// origin. - Scroll(ScrollLocation, TypedPoint2D, TouchEventType), - /// Sent when the user zooms. - Zoom(f32), - /// Simulated "pinch zoom" gesture for non-touch platforms (e.g. ctrl-scrollwheel). - PinchZoom(f32), - /// Sent when the user resets zoom to default. - ResetZoom, - /// Sent when the user uses chrome navigation (i.e. backspace or shift-backspace). - Navigation(TopLevelBrowsingContextId, TraversalDirection), - /// Sent when the user quits the application - Quit, - /// Sent when a key input state changes - KeyEvent(Option, Key, KeyState, KeyModifiers), - /// Sent when Ctr+R/Apple+R is called to reload the current page. - Reload(TopLevelBrowsingContextId), - /// Create a new top level browsing context - NewBrowser(ServoUrl, IpcSender), - /// Close a top level browsing context - CloseBrowser(TopLevelBrowsingContextId), - /// Make a top level browsing context visible, hiding the previous - /// visible one. - SelectBrowser(TopLevelBrowsingContextId), - /// Toggles a debug flag in WebRender - ToggleWebRenderDebug(WebRenderDebugOption), -} - -impl Debug for WindowEvent { - fn fmt(&self, f: &mut Formatter) -> Result<(), Error> { - match *self { - WindowEvent::Idle => write!(f, "Idle"), - WindowEvent::Refresh => write!(f, "Refresh"), - WindowEvent::Resize(..) => write!(f, "Resize"), - WindowEvent::TouchpadPressure(..) => write!(f, "TouchpadPressure"), - WindowEvent::KeyEvent(..) => write!(f, "Key"), - WindowEvent::LoadUrl(..) => write!(f, "LoadUrl"), - WindowEvent::MouseWindowEventClass(..) => write!(f, "Mouse"), - WindowEvent::MouseWindowMoveEventClass(..) => write!(f, "MouseMove"), - WindowEvent::Touch(..) => write!(f, "Touch"), - WindowEvent::Scroll(..) => write!(f, "Scroll"), - WindowEvent::Zoom(..) => write!(f, "Zoom"), - WindowEvent::PinchZoom(..) => write!(f, "PinchZoom"), - WindowEvent::ResetZoom => write!(f, "ResetZoom"), - WindowEvent::Navigation(..) => write!(f, "Navigation"), - WindowEvent::Quit => write!(f, "Quit"), - WindowEvent::Reload(..) => write!(f, "Reload"), - WindowEvent::NewBrowser(..) => write!(f, "NewBrowser"), - WindowEvent::CloseBrowser(..) => write!(f, "CloseBrowser"), - WindowEvent::SelectBrowser(..) => write!(f, "SelectBrowser"), - WindowEvent::ToggleWebRenderDebug(..) => write!(f, "ToggleWebRenderDebug"), - } - } -} - -#[derive(Clone, Copy, Debug, PartialEq)] -pub enum AnimationState { - Idle, - Animating, -} - -pub trait WindowMethods { - /// Returns the rendering area size in hardware pixels. - fn framebuffer_size(&self) -> DeviceUintSize; - /// Returns the position and size of the window within the rendering area. - fn window_rect(&self) -> DeviceUintRect; - /// Returns the size of the window in density-independent "px" units. - fn size(&self) -> TypedSize2D; - /// Presents the window to the screen (perhaps by page flipping). - fn present(&self); - - /// Return the size of the window with head and borders and position of the window values - fn client_window(&self, ctx: TopLevelBrowsingContextId) -> (Size2D, Point2D); - /// Set the size inside of borders and head - fn set_inner_size(&self, ctx: TopLevelBrowsingContextId, size: Size2D); - /// Set the window position - fn set_position(&self, ctx: TopLevelBrowsingContextId, point: Point2D); - /// Set fullscreen state - fn set_fullscreen_state(&self, ctx: TopLevelBrowsingContextId, state: bool); - - /// Sets the page title for the current page. - fn set_page_title(&self, ctx: TopLevelBrowsingContextId, title: Option); - /// Called when the browser chrome should display a status message. - fn status(&self, ctx: TopLevelBrowsingContextId, Option); - /// Called when the browser has started loading a frame. - fn load_start(&self, ctx: TopLevelBrowsingContextId); - /// Called when the browser is done loading a frame. - fn load_end(&self, ctx: TopLevelBrowsingContextId); - /// Called when the browser encounters an error while loading a URL - fn load_error(&self, ctx: TopLevelBrowsingContextId, code: NetError, url: String); - /// Wether or not to follow a link - fn allow_navigation(&self, ctx: TopLevelBrowsingContextId, url: ServoUrl, IpcSender); - /// Called when the tag has finished parsing - fn head_parsed(&self, ctx: TopLevelBrowsingContextId); - /// Called when the history state has changed. - fn history_changed(&self, ctx: TopLevelBrowsingContextId, Vec, usize); - - /// Returns the scale factor of the system (device pixels / device independent pixels). - fn hidpi_factor(&self) -> ScaleFactor; - - /// Returns a thread-safe object to wake up the window's event loop. - fn create_event_loop_waker(&self) -> Box; - - /// Requests that the window system prepare a composite. Typically this will involve making - /// some type of platform-specific graphics context current. Returns true if the composite may - /// proceed and false if it should not. - fn prepare_for_composite(&self, width: usize, height: usize) -> bool; - - /// Sets the cursor to be used in the window. - fn set_cursor(&self, cursor: Cursor); - - /// Process a key event. - fn handle_key(&self, ctx: Option, ch: Option, key: Key, mods: KeyModifiers); - - /// Does this window support a clipboard - fn supports_clipboard(&self) -> bool; - - /// Add a favicon - fn set_favicon(&self, ctx: TopLevelBrowsingContextId, url: ServoUrl); - - /// Return the GL function pointer trait. - fn gl(&self) -> Rc; - - /// Set whether the application is currently animating. - /// Typically, when animations are active, the window - /// will want to avoid blocking on UI events, and just - /// run the event loop at the vsync interval. - fn set_animation_state(&self, _state: AnimationState) {} -} diff --git a/collector/compile-benchmarks/style-servo/components/config/Cargo.toml b/collector/compile-benchmarks/style-servo/components/config/Cargo.toml deleted file mode 100644 index 2c98f114f..000000000 --- a/collector/compile-benchmarks/style-servo/components/config/Cargo.toml +++ /dev/null @@ -1,31 +0,0 @@ -[package] -name = "servo_config" -version = "0.0.1" -authors = ["The Servo Project Developers"] -license = "MPL-2.0" -publish = false - -[lib] -name = "servo_config" -path = "lib.rs" - -[dependencies] -euclid = "0.15" -getopts = "0.2.11" -lazy_static = "0.2" -log = "0.3.5" -num_cpus = "1.1.0" -rustc-serialize = "0.3" -serde = "1.0" -servo_geometry = {path = "../geometry"} -servo_url = {path = "../url"} -url = "1.2" - -[dev-dependencies] -env_logger = "0.4" - -[target.'cfg(all(unix, not(target_os = "macos"), not(target_os = "ios"), not(target_os = "android")))'.dependencies] -xdg = "2.0" - -[target.'cfg(target_os = "android")'.dependencies] -android_injected_glue = "0.2" diff --git a/collector/compile-benchmarks/style-servo/components/config/basedir.rs b/collector/compile-benchmarks/style-servo/components/config/basedir.rs deleted file mode 100644 index 521eb4e9d..000000000 --- a/collector/compile-benchmarks/style-servo/components/config/basedir.rs +++ /dev/null @@ -1,90 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ - -//! Contains routines for retrieving default config directories. -//! For linux based platforms, it uses the XDG base directory spec but provides -//! similar abstractions for non-linux platforms. - -#[cfg(target_os = "android")] -use android_injected_glue; -#[cfg(any(target_os = "macos", target_os = "windows"))] -use std::env; -#[cfg(target_os = "android")] -use std::ffi::CStr; -use std::path::PathBuf; -#[cfg(all(unix, not(target_os = "macos"), not(target_os = "ios"), not(target_os = "android")))] -use xdg; - -#[cfg(all(unix, not(target_os = "macos"), not(target_os = "ios"), not(target_os = "android")))] -pub fn default_config_dir() -> Option { - let xdg_dirs = xdg::BaseDirectories::with_profile("servo", "default").unwrap(); - let config_dir = xdg_dirs.get_config_home(); - Some(config_dir) -} - -#[cfg(target_os = "android")] -#[allow(unsafe_code)] -pub fn default_config_dir() -> Option { - let dir = unsafe { - CStr::from_ptr((*android_injected_glue::get_app().activity).externalDataPath) - }; - Some(PathBuf::from(dir.to_str().unwrap())) -} - -#[cfg(all(unix, not(target_os = "macos"), not(target_os = "ios"), not(target_os = "android")))] -pub fn default_data_dir() -> Option { - let xdg_dirs = xdg::BaseDirectories::with_profile("servo", "default").unwrap(); - let data_dir = xdg_dirs.get_data_home(); - Some(data_dir) -} - -#[cfg(target_os = "android")] -#[allow(unsafe_code)] -pub fn default_data_dir() -> Option { - let dir = unsafe { - CStr::from_ptr((*android_injected_glue::get_app().activity).internalDataPath) - }; - Some(PathBuf::from(dir.to_str().unwrap())) -} - -#[cfg(all(unix, not(target_os = "macos"), not(target_os = "ios"), not(target_os = "android")))] -pub fn default_cache_dir() -> Option { - let xdg_dirs = xdg::BaseDirectories::with_profile("servo", "default").unwrap(); - let cache_dir = xdg_dirs.get_cache_home(); - Some(cache_dir) -} - -#[cfg(target_os = "android")] -#[allow(unsafe_code)] -pub fn default_cache_dir() -> Option { - // TODO: Use JNI to call context.getCacheDir(). - // There is no equivalent function in NDK/NativeActivity. - let dir = unsafe { - CStr::from_ptr((*android_injected_glue::get_app().activity).externalDataPath) - }; - Some(PathBuf::from(dir.to_str().unwrap())) -} - -#[cfg(target_os = "macos")] -pub fn default_config_dir() -> Option { - let mut config_dir = env::home_dir().unwrap(); - config_dir.push("Library"); - config_dir.push("Application Support"); - config_dir.push("Servo"); - Some(config_dir) -} - -#[cfg(target_os = "windows")] -pub fn default_config_dir() -> Option { - let mut config_dir = match env::var("APPDATA") { - Ok(appdata_path) => PathBuf::from(appdata_path), - Err(_) => { let mut dir = env::home_dir().unwrap(); - dir.push("Appdata"); - dir.push("Roaming"); - dir - } - }; - config_dir.push("Servo"); - Some(config_dir) -} diff --git a/collector/compile-benchmarks/style-servo/components/config/lib.rs b/collector/compile-benchmarks/style-servo/components/config/lib.rs deleted file mode 100644 index 55c33e25b..000000000 --- a/collector/compile-benchmarks/style-servo/components/config/lib.rs +++ /dev/null @@ -1,34 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ - -#![deny(unsafe_code)] - -#[cfg(target_os = "android")] -extern crate android_injected_glue; -extern crate euclid; -extern crate getopts; -#[macro_use] extern crate lazy_static; -#[macro_use] extern crate log; -extern crate num_cpus; -extern crate rustc_serialize; -#[macro_use] extern crate serde; -extern crate servo_geometry; -extern crate servo_url; -extern crate url; -#[cfg(all(unix, not(target_os = "macos"), not(target_os = "ios"), not(target_os = "android")))] -extern crate xdg; - -pub mod basedir; -#[allow(unsafe_code)] pub mod opts; -pub mod prefs; -pub mod resource_files; - -pub fn servo_version() -> String { - let cargo_version = env!("CARGO_PKG_VERSION"); - let git_info = option_env!("GIT_INFO"); - match git_info { - Some(info) => format!("Servo {}{}", cargo_version, info), - None => format!("Servo {}", cargo_version), - } -} diff --git a/collector/compile-benchmarks/style-servo/components/config/opts.rs b/collector/compile-benchmarks/style-servo/components/config/opts.rs deleted file mode 100644 index aa9a4bd0a..000000000 --- a/collector/compile-benchmarks/style-servo/components/config/opts.rs +++ /dev/null @@ -1,955 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ - -//! Configuration options for a single run of the servo application. Created -//! from command line arguments. - -use euclid::TypedSize2D; -use getopts::Options; -use num_cpus; -use prefs::{self, PrefValue, PREFS}; -use resource_files::set_resources_path; -use servo_geometry::DeviceIndependentPixel; -use servo_url::ServoUrl; -use std::borrow::Cow; -use std::cmp; -use std::default::Default; -use std::env; -use std::fs::{self, File}; -use std::io::{self, Read, Write}; -use std::path::{Path, PathBuf}; -use std::process; -use std::sync::atomic::{AtomicBool, ATOMIC_BOOL_INIT, Ordering}; -use url::{self, Url}; - - -/// Global flags for Servo, currently set on the command line. -#[derive(Clone, Deserialize, Serialize)] -pub struct Opts { - pub is_running_problem_test: bool, - - /// The initial URL to load. - pub url: Option, - - /// The maximum size of each tile in pixels (`-s`). - pub tile_size: usize, - - /// The ratio of device pixels per px at the default scale. If unspecified, will use the - /// platform default setting. - pub device_pixels_per_px: Option, - - /// `None` to disable the time profiler or `Some` to enable it with: - /// - an interval in seconds to cause it to produce output on that interval. - /// (`i.e. -p 5`). - /// - a file path to write profiling info to a TSV file upon Servo's termination. - /// (`i.e. -p out.tsv`). - /// - an InfluxDB hostname to store profiling info upon Servo's termination. - /// (`i.e. -p http://localhost:8086`) - pub time_profiling: Option, - - /// When the profiler is enabled, this is an optional path to dump a self-contained HTML file - /// visualizing the traces as a timeline. - pub time_profiler_trace_path: Option, - - /// `None` to disable the memory profiler or `Some` with an interval in seconds to enable it - /// and cause it to produce output on that interval (`-m`). - pub mem_profiler_period: Option, - - pub nonincremental_layout: bool, - - /// Where to load userscripts from, if any. An empty string will load from - /// the resources/user-agent-js directory, and if the option isn't passed userscripts - /// won't be loaded - pub userscripts: Option, - - pub user_stylesheets: Vec<(Vec, ServoUrl)>, - - pub output_file: Option, - - /// Replace unpaires surrogates in DOM strings with U+FFFD. - /// See https://github.com/servo/servo/issues/6564 - pub replace_surrogates: bool, - - /// Log GC passes and their durations. - pub gc_profile: bool, - - /// Load web fonts synchronously to avoid non-deterministic network-driven reflows. - pub load_webfonts_synchronously: bool, - - pub headless: bool, - pub hard_fail: bool, - - /// True if we should bubble intrinsic widths sequentially (`-b`). If this is true, then - /// intrinsic widths are computed as a separate pass instead of during flow construction. You - /// may wish to turn this flag on in order to benchmark style recalculation against other - /// browser engines. - pub bubble_inline_sizes_separately: bool, - - /// True if we should show borders on all fragments for debugging purposes - /// (`--show-debug-fragment-borders`). - pub show_debug_fragment_borders: bool, - - /// True if we should paint borders around flows based on which thread painted them. - pub show_debug_parallel_layout: bool, - - /// If set with --disable-text-aa, disable antialiasing on fonts. This is primarily useful for reftests - /// where pixel perfect results are required when using fonts such as the Ahem - /// font for layout tests. - pub enable_text_antialiasing: bool, - - /// If set with --disable-subpixel, use subpixel antialiasing for glyphs. In the future - /// this will likely become the default, but for now it's opt-in while we work - /// out any bugs and improve the implementation. - pub enable_subpixel_text_antialiasing: bool, - - /// If set with --disable-canvas-aa, disable antialiasing on the HTML canvas element. - /// Like --disable-text-aa, this is useful for reftests where pixel perfect results are required. - pub enable_canvas_antialiasing: bool, - - /// True if each step of layout is traced to an external JSON file - /// for debugging purposes. Settings this implies sequential layout - /// and paint. - pub trace_layout: bool, - - /// Periodically print out on which events script threads spend their processing time. - pub profile_script_events: bool, - - /// Enable all heartbeats for profiling. - pub profile_heartbeats: bool, - - /// `None` to disable debugger or `Some` with a port number to start a server to listen to - /// remote Firefox debugger connections. - pub debugger_port: Option, - - /// `None` to disable devtools or `Some` with a port number to start a server to listen to - /// remote Firefox devtools connections. - pub devtools_port: Option, - - /// `None` to disable WebDriver or `Some` with a port number to start a server to listen to - /// remote WebDriver commands. - pub webdriver_port: Option, - - /// The initial requested size of the window. - pub initial_window_size: TypedSize2D, - - /// An optional string allowing the user agent to be set for testing. - pub user_agent: Cow<'static, str>, - - /// Whether we're running in multiprocess mode. - pub multiprocess: bool, - - /// Whether we're running inside the sandbox. - pub sandbox: bool, - - /// Probability of randomly closing a pipeline, - /// used for testing the hardening of the constellation. - pub random_pipeline_closure_probability: Option, - - /// The seed for the RNG used to randomly close pipelines, - /// used for testing the hardening of the constellation. - pub random_pipeline_closure_seed: Option, - - /// Dumps the DOM after restyle. - pub dump_style_tree: bool, - - /// Dumps the rule tree. - pub dump_rule_tree: bool, - - /// Dumps the flow tree after a layout. - pub dump_flow_tree: bool, - - /// Dumps the display list after a layout. - pub dump_display_list: bool, - - /// Dumps the display list in JSON form after a layout. - pub dump_display_list_json: bool, - - /// Emits notifications when there is a relayout. - pub relayout_event: bool, - - /// Whether Style Sharing Cache is used - pub disable_share_style_cache: bool, - - /// Whether to show in stdout style sharing cache stats after a restyle. - pub style_sharing_stats: bool, - - /// Translate mouse input into touch events. - pub convert_mouse_to_touch: bool, - - /// True to exit after the page load (`-x`). - pub exit_after_load: bool, - - /// Do not use native titlebar - pub no_native_titlebar: bool, - - /// Enable vsync in the compositor - pub enable_vsync: bool, - - /// True to show webrender profiling stats on screen. - pub webrender_stats: bool, - - /// True to show webrender debug on screen. - pub webrender_debug: bool, - - /// True if webrender recording should be enabled. - pub webrender_record: bool, - - /// True if webrender is allowed to batch draw calls as instances. - pub webrender_batch: bool, - - /// True to compile all webrender shaders at init time. This is mostly - /// useful when modifying the shaders, to ensure they all compile - /// after each change is made. - pub precache_shaders: bool, - - /// True if WebRender should use multisample antialiasing. - pub use_msaa: bool, - - /// Directory for a default config directory - pub config_dir: Option, - - // don't skip any backtraces on panic - pub full_backtraces: bool, - - /// True to use OS native signposting facilities. This makes profiling events (script activity, - /// reflow, compositing, etc.) appear in Instruments.app on macOS. - pub signpost: bool, - - /// Print the version and exit. - pub is_printing_version: bool, - - /// Path to SSL certificates. - pub certificate_path: Option, - - /// Unminify Javascript. - pub unminify_js: bool, - - /// Print Progressive Web Metrics to console. - pub print_pwm: bool, -} - -fn print_usage(app: &str, opts: &Options) { - let message = format!("Usage: {} [ options ... ] [URL]\n\twhere options include", app); - println!("{}", opts.usage(&message)); -} - - -/// Debug options for Servo, currently set on the command line with -Z -#[derive(Default)] -pub struct DebugOptions { - /// List all the debug options. - pub help: bool, - - /// Bubble intrinsic widths separately like other engines. - pub bubble_widths: bool, - - /// Disable antialiasing of rendered text. - pub disable_text_aa: bool, - - /// Disable subpixel antialiasing of rendered text. - pub disable_subpixel_aa: bool, - - /// Disable antialiasing of rendered text on the HTML canvas element. - pub disable_canvas_aa: bool, - - /// Print the DOM after each restyle. - pub dump_style_tree: bool, - - /// Dumps the rule tree. - pub dump_rule_tree: bool, - - /// Print the flow tree after each layout. - pub dump_flow_tree: bool, - - /// Print the display list after each layout. - pub dump_display_list: bool, - - /// Print the display list in JSON form. - pub dump_display_list_json: bool, - - /// Print notifications when there is a relayout. - pub relayout_event: bool, - - /// Profile which events script threads spend their time on. - pub profile_script_events: bool, - - /// Enable all heartbeats for profiling. - pub profile_heartbeats: bool, - - /// Paint borders along fragment boundaries. - pub show_fragment_borders: bool, - - /// Mark which thread laid each flow out with colors. - pub show_parallel_layout: bool, - - /// Write layout trace to an external file for debugging. - pub trace_layout: bool, - - /// Disable the style sharing cache. - pub disable_share_style_cache: bool, - - /// Whether to show in stdout style sharing cache stats after a restyle. - pub style_sharing_stats: bool, - - /// Translate mouse input into touch events. - pub convert_mouse_to_touch: bool, - - /// Replace unpaires surrogates in DOM strings with U+FFFD. - /// See https://github.com/servo/servo/issues/6564 - pub replace_surrogates: bool, - - /// Log GC passes and their durations. - pub gc_profile: bool, - - /// Load web fonts synchronously to avoid non-deterministic network-driven reflows. - pub load_webfonts_synchronously: bool, - - /// Disable vsync in the compositor - pub disable_vsync: bool, - - /// Show webrender profiling stats on screen. - pub webrender_stats: bool, - - /// Show webrender debug on screen. - pub webrender_debug: bool, - - /// Enable webrender recording. - pub webrender_record: bool, - - /// Enable webrender instanced draw call batching. - pub webrender_disable_batch: bool, - - /// Use multisample antialiasing in WebRender. - pub use_msaa: bool, - - // don't skip any backtraces on panic - pub full_backtraces: bool, - - /// True to compile all webrender shaders at init time. This is mostly - /// useful when modifying the shaders, to ensure they all compile - /// after each change is made. - pub precache_shaders: bool, - - /// True to use OS native signposting facilities. This makes profiling events (script activity, - /// reflow, compositing, etc.) appear in Instruments.app on macOS. - pub signpost: bool, -} - - -impl DebugOptions { - pub fn extend(&mut self, debug_string: String) -> Result<(), String> { - for option in debug_string.split(',') { - match option { - "help" => self.help = true, - "bubble-widths" => self.bubble_widths = true, - "disable-text-aa" => self.disable_text_aa = true, - "disable-subpixel-aa" => self.disable_subpixel_aa = true, - "disable-canvas-aa" => self.disable_text_aa = true, - "dump-style-tree" => self.dump_style_tree = true, - "dump-rule-tree" => self.dump_rule_tree = true, - "dump-flow-tree" => self.dump_flow_tree = true, - "dump-display-list" => self.dump_display_list = true, - "dump-display-list-json" => self.dump_display_list_json = true, - "relayout-event" => self.relayout_event = true, - "profile-script-events" => self.profile_script_events = true, - "profile-heartbeats" => self.profile_heartbeats = true, - "show-fragment-borders" => self.show_fragment_borders = true, - "show-parallel-layout" => self.show_parallel_layout = true, - "trace-layout" => self.trace_layout = true, - "disable-share-style-cache" => self.disable_share_style_cache = true, - "style-sharing-stats" => self.style_sharing_stats = true, - "convert-mouse-to-touch" => self.convert_mouse_to_touch = true, - "replace-surrogates" => self.replace_surrogates = true, - "gc-profile" => self.gc_profile = true, - "load-webfonts-synchronously" => self.load_webfonts_synchronously = true, - "disable-vsync" => self.disable_vsync = true, - "wr-stats" => self.webrender_stats = true, - "wr-debug" => self.webrender_debug = true, - "wr-record" => self.webrender_record = true, - "wr-no-batch" => self.webrender_disable_batch = true, - "msaa" => self.use_msaa = true, - "full-backtraces" => self.full_backtraces = true, - "precache-shaders" => self.precache_shaders = true, - "signpost" => self.signpost = true, - "" => {}, - _ => return Err(String::from(option)), - }; - }; - Ok(()) - } -} - - -fn print_debug_usage(app: &str) -> ! { - fn print_option(name: &str, description: &str) { - println!("\t{:<35} {}", name, description); - } - - println!("Usage: {} debug option,[options,...]\n\twhere options include\n\nOptions:", app); - - print_option("bubble-widths", "Bubble intrinsic widths separately like other engines."); - print_option("disable-text-aa", "Disable antialiasing of rendered text."); - print_option("disable-canvas-aa", "Disable antialiasing on the HTML canvas element."); - print_option("dump-style-tree", "Print the DOM with computed styles after each restyle."); - print_option("dump-flow-tree", "Print the flow tree after each layout."); - print_option("dump-display-list", "Print the display list after each layout."); - print_option("dump-display-list-json", "Print the display list in JSON form."); - print_option("relayout-event", "Print notifications when there is a relayout."); - print_option("profile-script-events", "Enable profiling of script-related events."); - print_option("profile-heartbeats", "Enable heartbeats for all thread categories."); - print_option("show-fragment-borders", "Paint borders along fragment boundaries."); - print_option("show-parallel-layout", "Mark which thread laid each flow out with colors."); - print_option("trace-layout", "Write layout trace to an external file for debugging."); - print_option("disable-share-style-cache", - "Disable the style sharing cache."); - print_option("parallel-display-list-building", "Build display lists in parallel."); - print_option("convert-mouse-to-touch", "Send touch events instead of mouse events"); - print_option("replace-surrogates", "Replace unpaires surrogates in DOM strings with U+FFFD. \ - See https://github.com/servo/servo/issues/6564"); - print_option("gc-profile", "Log GC passes and their durations."); - print_option("load-webfonts-synchronously", - "Load web fonts synchronously to avoid non-deterministic network-driven reflows"); - print_option("disable-vsync", - "Disable vsync mode in the compositor to allow profiling at more than monitor refresh rate"); - print_option("wr-stats", "Show WebRender profiler on screen."); - print_option("msaa", "Use multisample antialiasing in WebRender."); - print_option("full-backtraces", "Print full backtraces for all errors"); - print_option("wr-debug", "Display webrender tile borders."); - print_option("wr-no-batch", "Disable webrender instanced batching."); - print_option("precache-shaders", "Compile all shaders during init."); - print_option("signpost", "Emit native OS signposts for profile events (currently macOS only)"); - - println!(""); - - process::exit(0) -} - -#[derive(Clone, Deserialize, Serialize)] -pub enum OutputOptions { - /// Database connection config (hostname, name, user, pass) - DB(ServoUrl, Option, Option, Option), - FileName(String), - Stdout(f64), -} - -fn args_fail(msg: &str) -> ! { - writeln!(io::stderr(), "{}", msg).unwrap(); - process::exit(1) -} - -static MULTIPROCESS: AtomicBool = ATOMIC_BOOL_INIT; - -#[inline] -pub fn multiprocess() -> bool { - MULTIPROCESS.load(Ordering::Relaxed) -} - -enum UserAgent { - Desktop, - Android, - #[allow(non_camel_case_types)] - iOS -} - -fn default_user_agent_string(agent: UserAgent) -> &'static str { - #[cfg(all(target_os = "linux", target_arch = "x86_64"))] - const DESKTOP_UA_STRING: &'static str = - "Mozilla/5.0 (X11; Linux x86_64; rv:55.0) Servo/1.0 Firefox/55.0"; - #[cfg(all(target_os = "linux", not(target_arch = "x86_64")))] - const DESKTOP_UA_STRING: &'static str = - "Mozilla/5.0 (X11; Linux i686; rv:55.0) Servo/1.0 Firefox/55.0"; - - #[cfg(all(target_os = "windows", target_arch = "x86_64"))] - const DESKTOP_UA_STRING: &'static str = - "Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:55.0) Servo/1.0 Firefox/55.0"; - #[cfg(all(target_os = "windows", not(target_arch = "x86_64")))] - const DESKTOP_UA_STRING: &'static str = - "Mozilla/5.0 (Windows NT 6.1; rv:55.0) Servo/1.0 Firefox/55.0"; - - #[cfg(not(any(target_os = "linux", target_os = "windows")))] - // Neither Linux nor Windows, so maybe OS X, and if not then OS X is an okay fallback. - const DESKTOP_UA_STRING: &'static str = - "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.10; rv:55.0) Servo/1.0 Firefox/55.0"; - - - match agent { - UserAgent::Desktop => { - DESKTOP_UA_STRING - } - UserAgent::Android => { - "Mozilla/5.0 (Android; Mobile; rv:55.0) Servo/1.0 Firefox/55.0" - } - UserAgent::iOS => { - "Mozilla/5.0 (iPhone; CPU iPhone OS 8_3 like Mac OS X; rv:55.0) Servo/1.0 Firefox/55.0" - } - } -} - -#[cfg(target_os = "android")] -const DEFAULT_USER_AGENT: UserAgent = UserAgent::Android; - -#[cfg(target_os = "ios")] -const DEFAULT_USER_AGENT: UserAgent = UserAgent::iOS; - -#[cfg(not(any(target_os = "android", target_os = "ios")))] -const DEFAULT_USER_AGENT: UserAgent = UserAgent::Desktop; - -pub fn default_opts() -> Opts { - Opts { - is_running_problem_test: false, - url: None, - tile_size: 512, - device_pixels_per_px: None, - time_profiling: None, - time_profiler_trace_path: None, - mem_profiler_period: None, - nonincremental_layout: false, - userscripts: None, - user_stylesheets: Vec::new(), - output_file: None, - replace_surrogates: false, - gc_profile: false, - load_webfonts_synchronously: false, - headless: false, - hard_fail: true, - bubble_inline_sizes_separately: false, - show_debug_fragment_borders: false, - show_debug_parallel_layout: false, - enable_text_antialiasing: true, - enable_subpixel_text_antialiasing: true, - enable_canvas_antialiasing: true, - trace_layout: false, - debugger_port: None, - devtools_port: None, - webdriver_port: None, - initial_window_size: TypedSize2D::new(1024, 740), - user_agent: default_user_agent_string(DEFAULT_USER_AGENT).into(), - multiprocess: false, - random_pipeline_closure_probability: None, - random_pipeline_closure_seed: None, - sandbox: false, - dump_style_tree: false, - dump_rule_tree: false, - dump_flow_tree: false, - dump_display_list: false, - dump_display_list_json: false, - relayout_event: false, - profile_script_events: false, - profile_heartbeats: false, - disable_share_style_cache: false, - style_sharing_stats: false, - convert_mouse_to_touch: false, - exit_after_load: false, - no_native_titlebar: false, - enable_vsync: true, - webrender_stats: false, - use_msaa: false, - config_dir: None, - full_backtraces: false, - is_printing_version: false, - webrender_debug: false, - webrender_record: false, - webrender_batch: true, - precache_shaders: false, - signpost: false, - certificate_path: None, - unminify_js: false, - print_pwm: false, - } -} - -pub fn from_cmdline_args(args: &[String]) -> ArgumentParsingResult { - let (app_name, args) = args.split_first().unwrap(); - - let mut opts = Options::new(); - opts.optflag("c", "cpu", "CPU painting"); - opts.optflag("g", "gpu", "GPU painting"); - opts.optopt("o", "output", "Output file", "output.png"); - opts.optopt("s", "size", "Size of tiles", "512"); - opts.optopt("", "device-pixel-ratio", "Device pixels per px", ""); - opts.optopt("t", "threads", "Number of paint threads", "1"); - opts.optflagopt("p", "profile", "Time profiler flag and either a TSV output filename \ - OR an interval for output to Stdout (blank for Stdout with interval of 5s)", "10 \ - OR time.tsv"); - opts.optflagopt("", "profiler-trace-path", - "Path to dump a self-contained HTML timeline of profiler traces", - ""); - opts.optflagopt("m", "memory-profile", "Memory profiler flag and output interval", "10"); - opts.optflag("x", "exit", "Exit after load flag"); - opts.optopt("y", "layout-threads", "Number of threads to use for layout", "1"); - opts.optflag("i", "nonincremental-layout", "Enable to turn off incremental layout."); - opts.optflagopt("", "userscripts", - "Uses userscripts in resources/user-agent-js, or a specified full path", ""); - opts.optmulti("", "user-stylesheet", - "A user stylesheet to be added to every document", "file.css"); - opts.optflag("z", "headless", "Headless mode"); - opts.optflag("f", "hard-fail", "Exit on thread failure instead of displaying about:failure"); - opts.optflag("F", "soft-fail", "Display about:failure on thread failure instead of exiting"); - opts.optflagopt("", "remote-debugging-port", "Start remote debugger server on port", "2794"); - opts.optflagopt("", "devtools", "Start remote devtools server on port", "6000"); - opts.optflagopt("", "webdriver", "Start remote WebDriver server on port", "7000"); - opts.optopt("", "resolution", "Set window resolution.", "1024x740"); - opts.optopt("u", - "user-agent", - "Set custom user agent string (or ios / android / desktop for platform default)", - "NCSA Mosaic/1.0 (X11;SunOS 4.1.4 sun4m)"); - opts.optflag("M", "multiprocess", "Run in multiprocess mode"); - opts.optflag("S", "sandbox", "Run in a sandbox if multiprocess"); - opts.optopt("", - "random-pipeline-closure-probability", - "Probability of randomly closing a pipeline (for testing constellation hardening).", - "0.0"); - opts.optopt("", "random-pipeline-closure-seed", "A fixed seed for repeatbility of random pipeline closure.", ""); - opts.optmulti("Z", "debug", - "A comma-separated string of debug options. Pass help to show available options.", ""); - opts.optflag("h", "help", "Print this message"); - opts.optopt("", "resources-path", "Path to find static resources", "/home/servo/resources"); - opts.optopt("", "certificate-path", "Path to find SSL certificates", "/home/servo/resources/certs"); - opts.optopt("", "content-process" , "Run as a content process and connect to the given pipe", - "servo-ipc-channel.abcdefg"); - opts.optmulti("", "pref", - "A preference to set to enable", "dom.mozbrowser.enabled"); - opts.optflag("b", "no-native-titlebar", "Do not use native titlebar"); - opts.optflag("w", "webrender", "Use webrender backend"); - opts.optopt("G", "graphics", "Select graphics backend (gl or es2)", "gl"); - opts.optopt("", "config-dir", - "config directory following xdg spec on linux platform", ""); - opts.optflag("v", "version", "Display servo version information"); - opts.optflag("", "unminify-js", "Unminify Javascript"); - opts.optopt("", "profiler-db-user", "Profiler database user", ""); - opts.optopt("", "profiler-db-pass", "Profiler database password", ""); - opts.optopt("", "profiler-db-name", "Profiler database name", ""); - opts.optflag("", "print-pwm", "Print Progressive Web Metrics"); - - let opt_match = match opts.parse(args) { - Ok(m) => m, - Err(f) => args_fail(&f.to_string()), - }; - - set_resources_path(opt_match.opt_str("resources-path")); - - if opt_match.opt_present("h") || opt_match.opt_present("help") { - print_usage(app_name, &opts); - process::exit(0); - }; - - // If this is the content process, we'll receive the real options over IPC. So just fill in - // some dummy options for now. - if let Some(content_process) = opt_match.opt_str("content-process") { - MULTIPROCESS.store(true, Ordering::SeqCst); - return ArgumentParsingResult::ContentProcess(content_process); - } - - let mut debug_options = DebugOptions::default(); - - for debug_string in opt_match.opt_strs("Z") { - if let Err(e) = debug_options.extend(debug_string) { - args_fail(&format!("error: unrecognized debug option: {}", e)); - } - } - - if debug_options.help { - print_debug_usage(app_name) - } - - let cwd = env::current_dir().unwrap(); - let url_opt = if !opt_match.free.is_empty() { - Some(&opt_match.free[0][..]) - } else { - None - }; - let is_running_problem_test = - url_opt - .as_ref() - .map_or(false, |url| - url.starts_with("http://web-platform.test:8000/2dcontext/drawing-images-to-the-canvas/") || - url.starts_with("http://web-platform.test:8000/_mozilla/mozilla/canvas/") || - url.starts_with("http://web-platform.test:8000/_mozilla/css/canvas_over_area.html")); - - let url_opt = url_opt.and_then(|url_string| parse_url_or_filename(&cwd, url_string) - .or_else(|error| { - warn!("URL parsing failed ({:?}).", error); - Err(error) - }).ok()); - - let tile_size: usize = match opt_match.opt_str("s") { - Some(tile_size_str) => tile_size_str.parse() - .unwrap_or_else(|err| args_fail(&format!("Error parsing option: -s ({})", err))), - None => 512, - }; - - let device_pixels_per_px = opt_match.opt_str("device-pixel-ratio").map(|dppx_str| - dppx_str.parse() - .unwrap_or_else(|err| args_fail(&format!("Error parsing option: --device-pixel-ratio ({})", err))) - ); - - // If only the flag is present, default to a 5 second period for both profilers - let time_profiling = if opt_match.opt_present("p") { - match opt_match.opt_str("p") { - Some(argument) => match argument.parse::() { - Ok(interval) => Some(OutputOptions::Stdout(interval)) , - Err(_) => { - match ServoUrl::parse(&argument) { - Ok(url) => Some(OutputOptions::DB(url, opt_match.opt_str("profiler-db-name"), - opt_match.opt_str("profiler-db-user"), - opt_match.opt_str("profiler-db-pass"))), - Err(_) => Some(OutputOptions::FileName(argument)), - } - } - }, - None => Some(OutputOptions::Stdout(5.0 as f64)), - } - } else { - // if the p option doesn't exist: - None - }; - - if let Some(ref time_profiler_trace_path) = opt_match.opt_str("profiler-trace-path") { - let mut path = PathBuf::from(time_profiler_trace_path); - path.pop(); - if let Err(why) = fs::create_dir_all(&path) { - error!("Couldn't create/open {:?}: {:?}", - Path::new(time_profiler_trace_path).to_string_lossy(), why); - } - } - - let mem_profiler_period = opt_match.opt_default("m", "5").map(|period| { - period.parse().unwrap_or_else(|err| args_fail(&format!("Error parsing option: -m ({})", err))) - }); - - let mut layout_threads: Option = opt_match.opt_str("y") - .map(|layout_threads_str| { - layout_threads_str.parse() - .unwrap_or_else(|err| args_fail(&format!("Error parsing option: -y ({})", err))) - }); - - let nonincremental_layout = opt_match.opt_present("i"); - - let random_pipeline_closure_probability = opt_match.opt_str("random-pipeline-closure-probability").map(|prob| - prob.parse().unwrap_or_else(|err| { - args_fail(&format!("Error parsing option: --random-pipeline-closure-probability ({})", err)) - }) - ); - - let random_pipeline_closure_seed = opt_match.opt_str("random-pipeline-closure-seed").map(|seed| - seed.parse().unwrap_or_else(|err| { - args_fail(&format!("Error parsing option: --random-pipeline-closure-seed ({})", err)) - }) - ); - - let mut bubble_inline_sizes_separately = debug_options.bubble_widths; - if debug_options.trace_layout { - layout_threads = Some(1); - bubble_inline_sizes_separately = true; - } - - let debugger_port = opt_match.opt_default("remote-debugging-port", "2794").map(|port| { - port.parse() - .unwrap_or_else(|err| args_fail(&format!("Error parsing option: --remote-debugging-port ({})", err))) - }); - - let devtools_port = opt_match.opt_default("devtools", "6000").map(|port| { - port.parse().unwrap_or_else(|err| args_fail(&format!("Error parsing option: --devtools ({})", err))) - }); - - let webdriver_port = opt_match.opt_default("webdriver", "7000").map(|port| { - port.parse().unwrap_or_else(|err| args_fail(&format!("Error parsing option: --webdriver ({})", err))) - }); - - let initial_window_size = match opt_match.opt_str("resolution") { - Some(res_string) => { - let res: Vec = res_string.split('x').map(|r| { - r.parse().unwrap_or_else(|err| args_fail(&format!("Error parsing option: --resolution ({})", err))) - }).collect(); - TypedSize2D::new(res[0], res[1]) - } - None => { - TypedSize2D::new(1024, 740) - } - }; - - if opt_match.opt_present("M") { - MULTIPROCESS.store(true, Ordering::SeqCst) - } - - let user_agent = match opt_match.opt_str("u") { - Some(ref ua) if ua == "ios" => default_user_agent_string(UserAgent::iOS).into(), - Some(ref ua) if ua == "android" => default_user_agent_string(UserAgent::Android).into(), - Some(ref ua) if ua == "desktop" => default_user_agent_string(UserAgent::Desktop).into(), - Some(ua) => ua.into(), - None => default_user_agent_string(DEFAULT_USER_AGENT).into(), - }; - - let user_stylesheets = opt_match.opt_strs("user-stylesheet").iter().map(|filename| { - let path = cwd.join(filename); - let url = ServoUrl::from_url(Url::from_file_path(&path).unwrap()); - let mut contents = Vec::new(); - File::open(path) - .unwrap_or_else(|err| args_fail(&format!("Couldn't open {}: {}", filename, err))) - .read_to_end(&mut contents) - .unwrap_or_else(|err| args_fail(&format!("Couldn't read {}: {}", filename, err))); - (contents, url) - }).collect(); - - let do_not_use_native_titlebar = - opt_match.opt_present("b") || - !PREFS.get("shell.native-titlebar.enabled").as_boolean().unwrap(); - - let is_printing_version = opt_match.opt_present("v") || opt_match.opt_present("version"); - - let opts = Opts { - is_running_problem_test: is_running_problem_test, - url: url_opt, - tile_size: tile_size, - device_pixels_per_px: device_pixels_per_px, - time_profiling: time_profiling, - time_profiler_trace_path: opt_match.opt_str("profiler-trace-path"), - mem_profiler_period: mem_profiler_period, - nonincremental_layout: nonincremental_layout, - userscripts: opt_match.opt_default("userscripts", ""), - user_stylesheets: user_stylesheets, - output_file: opt_match.opt_str("o"), - replace_surrogates: debug_options.replace_surrogates, - gc_profile: debug_options.gc_profile, - load_webfonts_synchronously: debug_options.load_webfonts_synchronously, - headless: opt_match.opt_present("z"), - hard_fail: opt_match.opt_present("f") && !opt_match.opt_present("F"), - bubble_inline_sizes_separately: bubble_inline_sizes_separately, - profile_script_events: debug_options.profile_script_events, - profile_heartbeats: debug_options.profile_heartbeats, - trace_layout: debug_options.trace_layout, - debugger_port: debugger_port, - devtools_port: devtools_port, - webdriver_port: webdriver_port, - initial_window_size: initial_window_size, - user_agent: user_agent, - multiprocess: opt_match.opt_present("M"), - sandbox: opt_match.opt_present("S"), - random_pipeline_closure_probability: random_pipeline_closure_probability, - random_pipeline_closure_seed: random_pipeline_closure_seed, - show_debug_fragment_borders: debug_options.show_fragment_borders, - show_debug_parallel_layout: debug_options.show_parallel_layout, - enable_text_antialiasing: !debug_options.disable_text_aa, - enable_subpixel_text_antialiasing: !debug_options.disable_subpixel_aa, - enable_canvas_antialiasing: !debug_options.disable_canvas_aa, - dump_style_tree: debug_options.dump_style_tree, - dump_rule_tree: debug_options.dump_rule_tree, - dump_flow_tree: debug_options.dump_flow_tree, - dump_display_list: debug_options.dump_display_list, - dump_display_list_json: debug_options.dump_display_list_json, - relayout_event: debug_options.relayout_event, - disable_share_style_cache: debug_options.disable_share_style_cache, - style_sharing_stats: debug_options.style_sharing_stats, - convert_mouse_to_touch: debug_options.convert_mouse_to_touch, - exit_after_load: opt_match.opt_present("x"), - no_native_titlebar: do_not_use_native_titlebar, - enable_vsync: !debug_options.disable_vsync, - webrender_stats: debug_options.webrender_stats, - use_msaa: debug_options.use_msaa, - config_dir: opt_match.opt_str("config-dir").map(Into::into), - full_backtraces: debug_options.full_backtraces, - is_printing_version: is_printing_version, - webrender_debug: debug_options.webrender_debug, - webrender_record: debug_options.webrender_record, - webrender_batch: !debug_options.webrender_disable_batch, - precache_shaders: debug_options.precache_shaders, - signpost: debug_options.signpost, - certificate_path: opt_match.opt_str("certificate-path"), - unminify_js: opt_match.opt_present("unminify-js"), - print_pwm: opt_match.opt_present("print-pwm"), - }; - - set_defaults(opts); - - // These must happen after setting the default options, since the prefs rely on - // on the resource path. - // Note that command line preferences have the highest precedence - - prefs::add_user_prefs(); - - for pref in opt_match.opt_strs("pref").iter() { - parse_pref_from_command_line(pref); - } - - if let Some(layout_threads) = layout_threads { - PREFS.set("layout.threads", PrefValue::Number(layout_threads as f64)); - } else if let Some(layout_threads) = PREFS.get("layout.threads").as_string() { - PREFS.set("layout.threads", PrefValue::Number(layout_threads.parse::().unwrap())); - } else if *PREFS.get("layout.threads") == PrefValue::Missing { - let layout_threads = cmp::max(num_cpus::get() * 3 / 4, 1); - PREFS.set("layout.threads", PrefValue::Number(layout_threads as f64)); - } - - ArgumentParsingResult::ChromeProcess -} - -pub enum ArgumentParsingResult { - ChromeProcess, - ContentProcess(String), -} - -// Make Opts available globally. This saves having to clone and pass -// opts everywhere it is used, which gets particularly cumbersome -// when passing through the DOM structures. -static mut DEFAULT_OPTIONS: *mut Opts = 0 as *mut Opts; -const INVALID_OPTIONS: *mut Opts = 0x01 as *mut Opts; - -lazy_static! { - static ref OPTIONS: Opts = { - unsafe { - let initial = if !DEFAULT_OPTIONS.is_null() { - let opts = Box::from_raw(DEFAULT_OPTIONS); - *opts - } else { - default_opts() - }; - DEFAULT_OPTIONS = INVALID_OPTIONS; - initial - } - }; -} - -pub fn set_defaults(opts: Opts) { - unsafe { - assert!(DEFAULT_OPTIONS.is_null()); - assert!(DEFAULT_OPTIONS != INVALID_OPTIONS); - let box_opts = Box::new(opts); - DEFAULT_OPTIONS = Box::into_raw(box_opts); - } -} - -pub fn parse_pref_from_command_line(pref: &str) { - let split: Vec<&str> = pref.splitn(2, '=').collect(); - let pref_name = split[0]; - let value = split.get(1); - match value { - Some(&"false") => PREFS.set(pref_name, PrefValue::Boolean(false)), - Some(&"true") | None => PREFS.set(pref_name, PrefValue::Boolean(true)), - Some(value) => match value.parse::() { - Ok(v) => PREFS.set(pref_name, PrefValue::Number(v)), - Err(_) => PREFS.set(pref_name, PrefValue::String(value.to_string())) - } - }; -} - -#[inline] -pub fn get() -> &'static Opts { - &OPTIONS -} - -pub fn parse_url_or_filename(cwd: &Path, input: &str) -> Result { - match ServoUrl::parse(input) { - Ok(url) => Ok(url), - Err(url::ParseError::RelativeUrlWithoutBase) => { - Url::from_file_path(&*cwd.join(input)).map(ServoUrl::from_url) - } - Err(_) => Err(()), - } -} - -impl Opts { - pub fn should_use_osmesa(&self) -> bool { - self.headless - } -} diff --git a/collector/compile-benchmarks/style-servo/components/config/prefs.rs b/collector/compile-benchmarks/style-servo/components/config/prefs.rs deleted file mode 100644 index 3bfb94772..000000000 --- a/collector/compile-benchmarks/style-servo/components/config/prefs.rs +++ /dev/null @@ -1,276 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ - -use basedir::default_config_dir; -use num_cpus; -use opts; -use resource_files::resources_dir_path; -use rustc_serialize::json::{Json, ToJson}; -use std::borrow::ToOwned; -use std::cmp::max; -use std::collections::HashMap; -use std::fs::File; -use std::io::{Read, Write, stderr}; -use std::path::PathBuf; -use std::sync::{Arc, RwLock}; - -lazy_static! { - pub static ref PREFS: Preferences = { - let defaults = default_prefs(); - if let Ok(prefs) = read_prefs() { - defaults.extend(prefs); - } - defaults - }; -} - -#[derive(Clone, Debug, Deserialize, PartialEq, Serialize)] -pub enum PrefValue { - Boolean(bool), - String(String), - Number(f64), - Missing -} - -impl PrefValue { - pub fn from_json(data: Json) -> Result { - let value = match data { - Json::Boolean(x) => PrefValue::Boolean(x), - Json::String(x) => PrefValue::String(x), - Json::F64(x) => PrefValue::Number(x), - Json::I64(x) => PrefValue::Number(x as f64), - Json::U64(x) => PrefValue::Number(x as f64), - _ => return Err(()) - }; - Ok(value) - } - - pub fn as_boolean(&self) -> Option { - match *self { - PrefValue::Boolean(value) => { - Some(value) - }, - _ => None - } - } - - pub fn as_string(&self) -> Option<&str> { - match *self { - PrefValue::String(ref value) => { - Some(&value) - }, - _ => None - } - } - - pub fn as_i64(&self) -> Option { - match *self { - PrefValue::Number(x) => Some(x as i64), - _ => None, - } - } - - pub fn as_u64(&self) -> Option { - match *self { - PrefValue::Number(x) => Some(x as u64), - _ => None, - } - } -} - -impl ToJson for PrefValue { - fn to_json(&self) -> Json { - match *self { - PrefValue::Boolean(x) => { - Json::Boolean(x) - }, - PrefValue::String(ref x) => { - Json::String(x.clone()) - }, - PrefValue::Number(x) => { - Json::F64(x) - }, - PrefValue::Missing => Json::Null - } - } -} - -#[derive(Clone, Debug, Deserialize, Serialize)] -pub enum Pref { - NoDefault(Arc), - WithDefault(Arc, Option>) -} - - -impl Pref { - pub fn new(value: PrefValue) -> Pref { - Pref::NoDefault(Arc::new(value)) - } - - fn new_default(value: PrefValue) -> Pref { - Pref::WithDefault(Arc::new(value), None) - } - - fn from_json(data: Json) -> Result { - let value = PrefValue::from_json(data)?; - Ok(Pref::new_default(value)) - } - - pub fn value(&self) -> &Arc { - match *self { - Pref::NoDefault(ref x) => x, - Pref::WithDefault(ref default, ref override_value) => { - match *override_value { - Some(ref x) => x, - None => default - } - } - } - } - - fn set(&mut self, value: PrefValue) { - // TODO - this should error if we try to override a pref of one type - // with a value of a different type - match *self { - Pref::NoDefault(ref mut pref_value) => { - *pref_value = Arc::new(value) - }, - Pref::WithDefault(_, ref mut override_value) => { - *override_value = Some(Arc::new(value)) - } - } - } -} - -impl ToJson for Pref { - fn to_json(&self) -> Json { - self.value().to_json() - } -} - -pub fn default_prefs() -> Preferences { - let prefs = Preferences(Arc::new(RwLock::new(HashMap::new()))); - prefs.set("layout.threads", PrefValue::Number( - max(num_cpus::get() * 3 / 4, 1) as f64)); - prefs -} - -pub fn read_prefs_from_file(mut file: T) - -> Result, ()> where T: Read { - let json = Json::from_reader(&mut file).or_else(|e| { - println!("Ignoring invalid JSON in preferences: {:?}.", e); - Err(()) - })?; - - let mut prefs = HashMap::new(); - if let Json::Object(obj) = json { - for (name, value) in obj.into_iter() { - match Pref::from_json(value) { - Ok(x) => { - prefs.insert(name, x); - }, - Err(_) => println!("Ignoring non-boolean/string/i64 preference value for {:?}", name), - } - } - } - Ok(prefs) -} - -pub fn add_user_prefs() { - match opts::get().config_dir { - Some(ref config_path) => { - let mut path = PathBuf::from(config_path); - init_user_prefs(&mut path); - } - None => { - let mut path = default_config_dir().unwrap(); - if path.join("prefs.json").exists() { - init_user_prefs(&mut path); - } - } - } -} - -fn init_user_prefs(path: &mut PathBuf) { - path.push("prefs.json"); - if let Ok(file) = File::open(path) { - if let Ok(prefs) = read_prefs_from_file(file) { - PREFS.extend(prefs); - } - } else { - writeln!(&mut stderr(), "Error opening prefs.json from config directory") - .expect("failed printing to stderr"); - } -} - -fn read_prefs() -> Result, ()> { - let mut path = resources_dir_path().map_err(|_| ())?; - path.push("prefs.json"); - - let file = File::open(path).or_else(|e| { - writeln!(&mut stderr(), "Error opening preferences: {:?}.", e) - .expect("failed printing to stderr"); - Err(()) - })?; - - read_prefs_from_file(file) -} - -pub struct Preferences(Arc>>); - -impl Preferences { - pub fn get(&self, name: &str) -> Arc { - self.0.read().unwrap().get(name).map_or(Arc::new(PrefValue::Missing), |x| x.value().clone()) - } - - pub fn cloned(&self) -> HashMap { - self.0.read().unwrap().clone() - } - - pub fn is_mozbrowser_enabled(&self) -> bool { - self.get("dom.mozbrowser.enabled").as_boolean().unwrap_or(false) - } - - pub fn set(&self, name: &str, value: PrefValue) { - let mut prefs = self.0.write().unwrap(); - if let Some(pref) = prefs.get_mut(name) { - pref.set(value); - return; - } - prefs.insert(name.to_owned(), Pref::new(value)); - } - - pub fn reset(&self, name: &str) -> Arc { - let mut prefs = self.0.write().unwrap(); - let result = match prefs.get_mut(name) { - None => return Arc::new(PrefValue::Missing), - Some(&mut Pref::NoDefault(_)) => Arc::new(PrefValue::Missing), - Some(&mut Pref::WithDefault(ref default, ref mut set_value)) => { - *set_value = None; - default.clone() - }, - }; - if *result == PrefValue::Missing { - prefs.remove(name); - } - result - } - - pub fn reset_all(&self) { - let names = { - self.0.read().unwrap().keys().cloned().collect::>() - }; - for name in names.iter() { - self.reset(name); - } - } - - pub fn extend(&self, extension: HashMap) { - self.0.write().unwrap().extend(extension); - } - - pub fn is_webvr_enabled(&self) -> bool { - self.get("dom.webvr.enabled").as_boolean().unwrap_or(false) - } -} diff --git a/collector/compile-benchmarks/style-servo/components/config/resource_files.rs b/collector/compile-benchmarks/style-servo/components/config/resource_files.rs deleted file mode 100644 index e8fc58328..000000000 --- a/collector/compile-benchmarks/style-servo/components/config/resource_files.rs +++ /dev/null @@ -1,83 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ - -#[cfg(target_os = "android")] -use android_injected_glue; -#[cfg(not(target_os = "android"))] -use std::env; -#[cfg(target_os = "android")] -use std::ffi::CStr; -use std::fs::File; -use std::io::{self, Read}; -use std::path::{Path, PathBuf}; -use std::sync::{Arc, Mutex}; - -lazy_static! { - static ref CMD_RESOURCE_DIR: Arc>> = { - Arc::new(Mutex::new(None)) - }; -} - -pub fn set_resources_path(path: Option) { - let mut dir = CMD_RESOURCE_DIR.lock().unwrap(); - *dir = path; -} - -#[cfg(target_os = "android")] -#[allow(unsafe_code)] -pub fn resources_dir_path() -> io::Result { - let mut dir = CMD_RESOURCE_DIR.lock().unwrap(); - - if let Some(ref path) = *dir { - return Ok(PathBuf::from(path)); - } - - let data_path = unsafe { - CStr::from_ptr((*android_injected_glue::get_app().activity).externalDataPath) - }; - let path = PathBuf::from(data_path.to_str().unwrap()); - *dir = Some(path.to_str().unwrap().to_owned()); - Ok(path) -} - -#[cfg(not(target_os = "android"))] -pub fn resources_dir_path() -> io::Result { - let mut dir = CMD_RESOURCE_DIR.lock().unwrap(); - - if let Some(ref path) = *dir { - return Ok(PathBuf::from(path)); - } - - // FIXME: Find a way to not rely on the executable being - // under `[/$target_triple]/target/debug` - // or `[/$target_triple]/target/release`. - let mut path = env::current_exe()?; - // Follow symlink - path = path.canonicalize()?; - - while path.pop() { - path.push("resources"); - if path.is_dir() { - break; - } - path.pop(); - // Check for Resources on mac when using a case sensitive filesystem. - path.push("Resources"); - if path.is_dir() { - break; - } - path.pop(); - } - *dir = Some(path.to_str().unwrap().to_owned()); - Ok(path) -} - -pub fn read_resource_file>(relative_path: P) -> io::Result> { - let mut path = resources_dir_path()?; - path.push(relative_path); - let mut file = File::open(&path)?; - let mut data = Vec::new(); - file.read_to_end(&mut data)?; - Ok(data) -} diff --git a/collector/compile-benchmarks/style-servo/components/constellation/Cargo.toml b/collector/compile-benchmarks/style-servo/components/constellation/Cargo.toml deleted file mode 100644 index 2d659bd43..000000000 --- a/collector/compile-benchmarks/style-servo/components/constellation/Cargo.toml +++ /dev/null @@ -1,45 +0,0 @@ -[package] -name = "constellation" -version = "0.0.1" -authors = ["The Servo Project Developers"] -license = "MPL-2.0" -publish = false - -[lib] -name = "constellation" -path = "lib.rs" - -[dependencies] -backtrace = "0.3" -bluetooth_traits = { path = "../bluetooth_traits" } -canvas = {path = "../canvas"} -clipboard = "0.4" -canvas_traits = {path = "../canvas_traits"} -compositing = {path = "../compositing"} -debugger = {path = "../debugger"} -devtools_traits = {path = "../devtools_traits"} -euclid = "0.15" -gfx = {path = "../gfx"} -gfx_traits = {path = "../gfx_traits"} -hyper = "0.10" -ipc-channel = "0.8" -itertools = "0.5" -layout_traits = {path = "../layout_traits"} -log = "0.3.5" -metrics = {path = "../metrics"} -msg = {path = "../msg"} -net = {path = "../net"} -net_traits = {path = "../net_traits"} -profile_traits = {path = "../profile_traits"} -script_traits = {path = "../script_traits"} -serde = "1.0" -style_traits = {path = "../style_traits"} -servo_config = {path = "../config"} -servo_rand = {path = "../rand"} -servo_remutex = {path = "../remutex"} -servo_url = {path = "../url"} -webvr_traits = {path = "../webvr_traits"} -webrender_api = {git = "https://github.com/servo/webrender", features = ["ipc"]} - -[target.'cfg(all(not(target_os = "windows"), not(target_os = "ios")))'.dependencies] -gaol = {git = "https://github.com/servo/gaol"} diff --git a/collector/compile-benchmarks/style-servo/components/constellation/browsingcontext.rs b/collector/compile-benchmarks/style-servo/components/constellation/browsingcontext.rs deleted file mode 100644 index 2d124b3b6..000000000 --- a/collector/compile-benchmarks/style-servo/components/constellation/browsingcontext.rs +++ /dev/null @@ -1,235 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ - -use euclid::TypedSize2D; -use msg::constellation_msg::{BrowsingContextId, TopLevelBrowsingContextId, PipelineId}; -use pipeline::Pipeline; -use script_traits::LoadData; -use std::collections::HashMap; -use std::iter::once; -use std::mem::replace; -use std::time::Instant; -use style_traits::CSSPixel; - -/// The constellation's view of a browsing context. -/// Each browsing context has a session history, caused by -/// navigation and traversing the history. Each browsing context has its -/// current entry, plus past and future entries. The past is sorted -/// chronologically, the future is sorted reverse chronologically: -/// in particular prev.pop() is the latest past entry, and -/// next.pop() is the earliest future entry. -pub struct BrowsingContext { - /// The browsing context id. - pub id: BrowsingContextId, - - /// The top-level browsing context ancestor - pub top_level_id: TopLevelBrowsingContextId, - - /// The size of the frame. - pub size: Option>, - - /// The timestamp for the current session history entry. - pub instant: Instant, - - /// The pipeline for the current session history entry. - pub pipeline_id: PipelineId, - - /// The load data for the current session history entry. - pub load_data: LoadData, - - /// The past session history, ordered chronologically. - pub prev: Vec, - - /// The future session history, ordered reverse chronologically. - pub next: Vec, -} - -impl BrowsingContext { - /// Create a new browsing context. - /// Note this just creates the browsing context, it doesn't add it to the constellation's set of browsing contexts. - pub fn new(id: BrowsingContextId, - top_level_id: TopLevelBrowsingContextId, - pipeline_id: PipelineId, - load_data: LoadData) - -> BrowsingContext - { - BrowsingContext { - id: id, - top_level_id: top_level_id, - size: None, - pipeline_id: pipeline_id, - instant: Instant::now(), - load_data: load_data, - prev: vec!(), - next: vec!(), - } - } - - /// Get the current session history entry. - pub fn current(&self) -> SessionHistoryEntry { - SessionHistoryEntry { - instant: self.instant, - browsing_context_id: self.id, - pipeline_id: Some(self.pipeline_id), - load_data: self.load_data.clone(), - } - } - - /// Set the current session history entry, and push the current frame entry into the past. - pub fn load(&mut self, pipeline_id: PipelineId, load_data: LoadData) { - let current = self.current(); - self.prev.push(current); - self.instant = Instant::now(); - self.pipeline_id = pipeline_id; - self.load_data = load_data; - } - - /// Set the future to be empty. - pub fn remove_forward_entries(&mut self) -> Vec { - replace(&mut self.next, vec!()) - } - - /// Update the current entry of the BrowsingContext from an entry that has been traversed to. - pub fn update_current(&mut self, pipeline_id: PipelineId, entry: SessionHistoryEntry) { - self.pipeline_id = pipeline_id; - self.instant = entry.instant; - self.load_data = entry.load_data; - } - - /// Is this a top-level browsing context? - pub fn is_top_level(&self) -> bool { - self.id == self.top_level_id - } -} - -/// An entry in a browsing context's session history. -/// Each entry stores the pipeline id for a document in the session history. -/// -/// When we operate on the joint session history, entries are sorted chronologically, -/// so we timestamp the entries by when the entry was added to the session history. -/// -/// https://html.spec.whatwg.org/multipage/#session-history-entry -#[derive(Clone)] -pub struct SessionHistoryEntry { - /// The timestamp for when the session history entry was created - pub instant: Instant, - - /// The pipeline for the document in the session history, - /// None if the entry has been discarded - pub pipeline_id: Option, - - /// The load data for this entry, used to reload the pipeline if it has been discarded - pub load_data: LoadData, - - /// The frame that this session history entry is part of - pub browsing_context_id: BrowsingContextId, -} - -/// Represents a pending change in a session history, that will be applied -/// once the new pipeline has loaded and completed initial layout / paint. -pub struct SessionHistoryChange { - /// The browsing context to change. - pub browsing_context_id: BrowsingContextId, - - /// The top-level browsing context ancestor. - pub top_level_browsing_context_id: TopLevelBrowsingContextId, - - /// The pipeline for the document being loaded. - pub new_pipeline_id: PipelineId, - - /// The data for the document being loaded. - pub load_data: LoadData, - - /// Is the new document replacing the current document (e.g. a reload) - /// or pushing it into the session history (e.g. a navigation)? - /// If it is replacing an existing entry, we store its timestamp. - pub replace_instant: Option, -} - -/// An iterator over browsing contexts, returning the descendant -/// contexts whose active documents are fully active, in depth-first -/// order. -pub struct FullyActiveBrowsingContextsIterator<'a> { - /// The browsing contexts still to iterate over. - pub stack: Vec, - - /// The set of all browsing contexts. - pub browsing_contexts: &'a HashMap, - - /// The set of all pipelines. We use this to find the active - /// children of a frame, which are the iframes in the currently - /// active document. - pub pipelines: &'a HashMap, -} - -impl<'a> Iterator for FullyActiveBrowsingContextsIterator<'a> { - type Item = &'a BrowsingContext; - fn next(&mut self) -> Option<&'a BrowsingContext> { - loop { - let browsing_context_id = match self.stack.pop() { - Some(browsing_context_id) => browsing_context_id, - None => return None, - }; - let browsing_context = match self.browsing_contexts.get(&browsing_context_id) { - Some(browsing_context) => browsing_context, - None => { - warn!("BrowsingContext {:?} iterated after closure.", browsing_context_id); - continue; - }, - }; - let pipeline = match self.pipelines.get(&browsing_context.pipeline_id) { - Some(pipeline) => pipeline, - None => { - warn!("Pipeline {:?} iterated after closure.", browsing_context.pipeline_id); - continue; - }, - }; - self.stack.extend(pipeline.children.iter()); - return Some(browsing_context) - } - } -} - -/// An iterator over browsing contexts, returning all descendant -/// contexts in depth-first order. Note that this iterator returns all -/// contexts, not just the fully active ones. -pub struct AllBrowsingContextsIterator<'a> { - /// The browsing contexts still to iterate over. - pub stack: Vec, - - /// The set of all browsing contexts. - pub browsing_contexts: &'a HashMap, - - /// The set of all pipelines. We use this to find the - /// children of a browsing context, which are the iframes in all documents - /// in the session history. - pub pipelines: &'a HashMap, -} - -impl<'a> Iterator for AllBrowsingContextsIterator<'a> { - type Item = &'a BrowsingContext; - fn next(&mut self) -> Option<&'a BrowsingContext> { - let pipelines = self.pipelines; - loop { - let browsing_context_id = match self.stack.pop() { - Some(browsing_context_id) => browsing_context_id, - None => return None, - }; - let browsing_context = match self.browsing_contexts.get(&browsing_context_id) { - Some(browsing_context) => browsing_context, - None => { - warn!("BrowsingContext {:?} iterated after closure.", browsing_context_id); - continue; - }, - }; - let child_browsing_context_ids = browsing_context.prev.iter().chain(browsing_context.next.iter()) - .filter_map(|entry| entry.pipeline_id) - .chain(once(browsing_context.pipeline_id)) - .filter_map(|pipeline_id| pipelines.get(&pipeline_id)) - .flat_map(|pipeline| pipeline.children.iter()); - self.stack.extend(child_browsing_context_ids); - return Some(browsing_context) - } - } -} diff --git a/collector/compile-benchmarks/style-servo/components/constellation/constellation.rs b/collector/compile-benchmarks/style-servo/components/constellation/constellation.rs deleted file mode 100644 index 4141e04d8..000000000 --- a/collector/compile-benchmarks/style-servo/components/constellation/constellation.rs +++ /dev/null @@ -1,3063 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ - -//! The `Constellation`, Servo's Grand Central Station -//! -//! The constellation tracks all information kept globally by the -//! browser engine, which includes: -//! -//! * The set of all `EventLoop` objects. Each event loop is -//! the constellation's view of a script thread. The constellation -//! interacts with a script thread by message-passing. -//! -//! * The set of all `Pipeline` objects. Each pipeline gives the -//! constellation's view of a `Window`, with its script thread and -//! layout threads. Pipelines may share script threads, but not -//! layout threads. -//! -//! * The set of all `BrowsingContext` objects. Each browsing context -//! gives the constellation's view of a `WindowProxy`. -//! Each browsing context stores an independent -//! session history, created by navigation. The session -//! history can be traversed, for example by the back and forwards UI, -//! so each session history maintains a list of past and future pipelines, -//! as well as the current active pipeline. -//! -//! There are two kinds of browsing context: top-level ones (for -//! example tabs in a browser UI), and nested ones (typically caused -//! by `iframe` elements). Browsing contexts have a hierarchy -//! (typically caused by `iframe`s containing `iframe`s), giving rise -//! to a forest whose roots are top-level browsing context. The logical -//! relationship between these types is: -//! -//! ``` -//! +------------+ +------------+ +---------+ -//! | Browsing | ------parent?------> | Pipeline | --event_loop--> | Event | -//! | Context | ------current------> | | | Loop | -//! | | ------prev*--------> | | <---pipeline*-- | | -//! | | ------next*--------> | | +---------+ -//! | | | | -//! | | <-top_level--------- | | -//! | | <-browsing_context-- | | -//! +------------+ +------------+ -//! ``` -// -//! Complicating matters, there are also mozbrowser iframes, which are top-level -//! iframes with a parent. -//! -//! The constellation also maintains channels to threads, including: -//! -//! * The script and layout threads. -//! * The graphics compositor. -//! * The font cache, image cache, and resource manager, which load -//! and cache shared fonts, images, or other resources. -//! * The service worker manager. -//! * The devtools, debugger and webdriver servers. -//! -//! The constellation passes messages between the threads, and updates its state -//! to track the evolving state of the browsing context tree. -//! -//! The constellation acts as a logger, tracking any `warn!` messages from threads, -//! and converting any `error!` or `panic!` into a crash report, which is filed -//! using an appropriate `mozbrowsererror` event. -//! -//! Since there is only one constellation, and its responsibilities include crash reporting, -//! it is very important that it does not panic. - -use backtrace::Backtrace; -use bluetooth_traits::BluetoothRequest; -use browsingcontext::{BrowsingContext, SessionHistoryChange, SessionHistoryEntry}; -use browsingcontext::{FullyActiveBrowsingContextsIterator, AllBrowsingContextsIterator}; -use canvas::canvas_paint_thread::CanvasPaintThread; -use canvas::webgl_thread::WebGLThreads; -use canvas_traits::canvas::CanvasMsg; -use clipboard::{ClipboardContext, ClipboardProvider}; -use compositing::SendableFrameTree; -use compositing::compositor_thread::{CompositorProxy, EmbedderMsg, EmbedderProxy}; -use compositing::compositor_thread::Msg as ToCompositorMsg; -use debugger; -use devtools_traits::{ChromeToDevtoolsControlMsg, DevtoolsControlMsg}; -use euclid::{Size2D, TypedSize2D, ScaleFactor}; -use event_loop::EventLoop; -use gfx::font_cache_thread::FontCacheThread; -use gfx_traits::Epoch; -use ipc_channel::{Error as IpcError}; -use ipc_channel::ipc::{self, IpcSender, IpcReceiver}; -use ipc_channel::router::ROUTER; -use itertools::Itertools; -use layout_traits::LayoutThreadFactory; -use log::{Log, LogLevel, LogLevelFilter, LogMetadata, LogRecord}; -use msg::constellation_msg::{BrowsingContextId, TopLevelBrowsingContextId, FrameType, PipelineId}; -use msg::constellation_msg::{Key, KeyModifiers, KeyState}; -use msg::constellation_msg::{PipelineNamespace, PipelineNamespaceId, TraversalDirection}; -use net_traits::{self, IpcSend, FetchResponseMsg, ResourceThreads}; -use net_traits::pub_domains::reg_host; -use net_traits::request::RequestInit; -use net_traits::storage_thread::{StorageThreadMsg, StorageType}; -use network_listener::NetworkListener; -use pipeline::{InitialPipelineState, Pipeline}; -use profile_traits::mem; -use profile_traits::time; -use script_traits::{AnimationState, AnimationTickType, CompositorEvent}; -use script_traits::{ConstellationControlMsg, ConstellationMsg as FromCompositorMsg, DiscardBrowsingContext}; -use script_traits::{DocumentActivity, DocumentState, LayoutControlMsg, LoadData}; -use script_traits::{IFrameLoadInfo, IFrameLoadInfoWithData, IFrameSandboxState, TimerSchedulerMsg}; -use script_traits::{LayoutMsg as FromLayoutMsg, ScriptMsg as FromScriptMsg, ScriptThreadFactory}; -use script_traits::{LogEntry, ScriptToConstellationChan, ServiceWorkerMsg, webdriver_msg}; -use script_traits::{MozBrowserErrorType, MozBrowserEvent, WebDriverCommandMsg, WindowSizeData}; -use script_traits::{SWManagerMsg, ScopeThings, UpdatePipelineIdReason, WindowSizeType}; -use serde::{Deserialize, Serialize}; -use servo_config::opts; -use servo_config::prefs::PREFS; -use servo_rand::{Rng, SeedableRng, ServoRng, random}; -use servo_remutex::ReentrantMutex; -use servo_url::{Host, ImmutableOrigin, ServoUrl}; -use std::borrow::ToOwned; -use std::cmp::Ordering; -use std::collections::{HashMap, VecDeque}; -use std::iter::once; -use std::marker::PhantomData; -use std::process; -use std::rc::{Rc, Weak}; -use std::sync::Arc; -use std::sync::mpsc::{Receiver, Sender, channel}; -use std::thread; -use style_traits::CSSPixel; -use style_traits::cursor::Cursor; -use style_traits::viewport::ViewportConstraints; -use timer_scheduler::TimerScheduler; -use webrender_api; -use webvr_traits::{WebVREvent, WebVRMsg}; - -/// The `Constellation` itself. In the servo browser, there is one -/// constellation, which maintains all of the browser global data. -/// In embedded applications, there may be more than one constellation, -/// which are independent of each other. -/// -/// The constellation may be in a different process from the pipelines, -/// and communicates using IPC. -/// -/// It is parameterized over a `LayoutThreadFactory` and a -/// `ScriptThreadFactory` (which in practice are implemented by -/// `LayoutThread` in the `layout` crate, and `ScriptThread` in -/// the `script` crate). Script and layout communicate using a `Message` -/// type. -pub struct Constellation { - /// An IPC channel for script threads to send messages to the constellation. - /// This is the script threads' view of `script_receiver`. - script_sender: IpcSender<(PipelineId, FromScriptMsg)>, - - /// A channel for the constellation to receive messages from script threads. - /// This is the constellation's view of `script_sender`. - script_receiver: Receiver>, - - /// An IPC channel for layout threads to send messages to the constellation. - /// This is the layout threads' view of `layout_receiver`. - layout_sender: IpcSender, - - /// A channel for the constellation to receive messages from layout threads. - /// This is the constellation's view of `layout_sender`. - layout_receiver: Receiver>, - - /// A channel for network listener to send messages to the constellation. - network_listener_sender: Sender<(PipelineId, FetchResponseMsg)>, - - /// A channel for the constellation to receive messages from network listener. - network_listener_receiver: Receiver<(PipelineId, FetchResponseMsg)>, - - /// A channel for the constellation to receive messages from the compositor thread. - compositor_receiver: Receiver, - - /// A channel through which messages can be sent to the embedder. - embedder_proxy: EmbedderProxy, - - /// A channel (the implementation of which is port-specific) for the - /// constellation to send messages to the compositor thread. - compositor_proxy: CompositorProxy, - - /// The last frame tree sent to WebRender. - active_browser_id: Option, - - /// Channels for the constellation to send messages to the public - /// resource-related threads. There are two groups of resource - /// threads: one for public browsing, and one for private - /// browsing. - public_resource_threads: ResourceThreads, - - /// Channels for the constellation to send messages to the private - /// resource-related threads. There are two groups of resource - /// threads: one for public browsing, and one for private - /// browsing. - private_resource_threads: ResourceThreads, - - /// A channel for the constellation to send messages to the font - /// cache thread. - font_cache_thread: FontCacheThread, - - /// A channel for the constellation to send messages to the - /// debugger thread. - debugger_chan: Option, - - /// A channel for the constellation to send messages to the - /// devtools thread. - devtools_chan: Option>, - - /// An IPC channel for the constellation to send messages to the - /// bluetooth thread. - bluetooth_thread: IpcSender, - - /// An IPC channel for the constellation to send messages to the - /// Service Worker Manager thread. - swmanager_chan: Option>, - - /// An IPC channel for Service Worker Manager threads to send - /// messages to the constellation. This is the SW Manager thread's - /// view of `swmanager_receiver`. - swmanager_sender: IpcSender, - - /// A channel for the constellation to receive messages from the - /// Service Worker Manager thread. This is the constellation's view of - /// `swmanager_sender`. - swmanager_receiver: Receiver>, - - /// A channel for the constellation to send messages to the - /// time profiler thread. - time_profiler_chan: time::ProfilerChan, - - /// A channel for the constellation to send messages to the - /// memory profiler thread. - mem_profiler_chan: mem::ProfilerChan, - - /// A channel for the constellation to send messages to the - /// timer thread. - scheduler_chan: IpcSender, - - /// A single WebRender document the constellation operates on. - webrender_document: webrender_api::DocumentId, - - /// A channel for the constellation to send messages to the - /// WebRender thread. - webrender_api_sender: webrender_api::RenderApiSender, - - /// The set of all event loops in the browser. We generate a new - /// event loop for each registered domain name (aka eTLD+1) in - /// each top-level browsing context. We store the event loops in a map - /// indexed by top-level browsing context id - /// (as a `TopLevelBrowsingContextId`) and registered - /// domain name (as a `Host`) to event loops. This double - /// indirection ensures that separate tabs do not share event - /// loops, even if the same domain is loaded in each. - /// It is important that scripts with the same eTLD+1 - /// share an event loop, since they can use `document.domain` - /// to become same-origin, at which point they can share DOM objects. - event_loops: HashMap>>, - - /// The set of all the pipelines in the browser. - /// (See the `pipeline` module for more details.) - pipelines: HashMap, - - /// The set of all the browsing contexts in the browser. - browsing_contexts: HashMap, - - /// When a navigation is performed, we do not immediately update - /// the session history, instead we ask the event loop to begin loading - /// the new document, and do not update the browsing context until the - /// document is active. Between starting the load and it activating, - /// we store a `SessionHistoryChange` object for the navigation in progress. - pending_changes: Vec, - - /// The currently focused pipeline for key events. - focus_pipeline_id: Option, - - /// Pipeline IDs are namespaced in order to avoid name collisions, - /// and the namespaces are allocated by the constellation. - next_pipeline_namespace_id: PipelineNamespaceId, - - /// The size of the top-level window. - window_size: WindowSizeData, - - /// Means of accessing the clipboard - clipboard_ctx: Option, - - /// Bits of state used to interact with the webdriver implementation - webdriver: WebDriverData, - - /// Document states for loaded pipelines (used only when writing screenshots). - document_states: HashMap, - - /// Are we shutting down? - shutting_down: bool, - - /// Have we seen any warnings? Hopefully always empty! - /// The buffer contains `(thread_name, reason)` entries. - handled_warnings: VecDeque<(Option, String)>, - - /// The random number generator and probability for closing pipelines. - /// This is for testing the hardening of the constellation. - random_pipeline_closure: Option<(ServoRng, f32)>, - - /// Phantom data that keeps the Rust type system happy. - phantom: PhantomData<(Message, LTF, STF)>, - - /// Entry point to create and get channels to a WebGLThread. - webgl_threads: WebGLThreads, - - /// A channel through which messages can be sent to the webvr thread. - webvr_chan: Option>, -} - -/// State needed to construct a constellation. -pub struct InitialConstellationState { - /// A channel through which messages can be sent to the embedder. - pub embedder_proxy: EmbedderProxy, - - /// A channel through which messages can be sent to the compositor. - pub compositor_proxy: CompositorProxy, - - /// A channel to the debugger, if applicable. - pub debugger_chan: Option, - - /// A channel to the developer tools, if applicable. - pub devtools_chan: Option>, - - /// A channel to the bluetooth thread. - pub bluetooth_thread: IpcSender, - - /// A channel to the font cache thread. - pub font_cache_thread: FontCacheThread, - - /// A channel to the resource thread. - pub public_resource_threads: ResourceThreads, - - /// A channel to the resource thread. - pub private_resource_threads: ResourceThreads, - - /// A channel to the time profiler thread. - pub time_profiler_chan: time::ProfilerChan, - - /// A channel to the memory profiler thread. - pub mem_profiler_chan: mem::ProfilerChan, - - /// Webrender document ID. - pub webrender_document: webrender_api::DocumentId, - - /// Webrender API. - pub webrender_api_sender: webrender_api::RenderApiSender, - - /// Entry point to create and get channels to a WebGLThread. - pub webgl_threads: WebGLThreads, - - /// A channel to the webgl thread. - pub webvr_chan: Option>, - - /// Whether the constellation supports the clipboard. - /// TODO: this field is not used, remove it? - pub supports_clipboard: bool, -} - -/// Data needed for webdriver -struct WebDriverData { - load_channel: Option<(PipelineId, IpcSender)>, - resize_channel: Option>, -} - -impl WebDriverData { - fn new() -> WebDriverData { - WebDriverData { - load_channel: None, - resize_channel: None, - } - } -} - -/// When we are running reftests, we save an image to compare against a reference. -/// This enum gives the possible states of preparing such an image. -#[derive(Debug, PartialEq)] -enum ReadyToSave { - NoTopLevelBrowsingContext, - PendingChanges, - WebFontNotLoaded, - DocumentLoading, - EpochMismatch, - PipelineUnknown, - Ready, -} - -/// When we are exiting a pipeline, we can either force exiting or not. -/// A normal exit waits for the compositor to update its state before -/// exiting, and delegates layout exit to script. A forced exit does -/// not notify the compositor, and exits layout without involving script. -#[derive(Clone, Copy)] -enum ExitPipelineMode { - Normal, - Force, -} - -/// The constellation uses logging to perform crash reporting. -/// The constellation receives all `warn!`, `error!` and `panic!` messages, -/// and generates a crash report when it receives a panic. - -/// A logger directed at the constellation from content processes -#[derive(Clone)] -pub struct FromScriptLogger { - /// A channel to the constellation - pub script_to_constellation_chan: Arc>, -} - -impl FromScriptLogger { - /// Create a new constellation logger. - pub fn new(script_to_constellation_chan: ScriptToConstellationChan) -> FromScriptLogger { - FromScriptLogger { - script_to_constellation_chan: Arc::new(ReentrantMutex::new(script_to_constellation_chan)) - } - } - - /// The maximum log level the constellation logger is interested in. - pub fn filter(&self) -> LogLevelFilter { - LogLevelFilter::Warn - } -} - -impl Log for FromScriptLogger { - fn enabled(&self, metadata: &LogMetadata) -> bool { - metadata.level() <= LogLevel::Warn - } - - fn log(&self, record: &LogRecord) { - if let Some(entry) = log_entry(record) { - debug!("Sending log entry {:?}.", entry); - let thread_name = thread::current().name().map(ToOwned::to_owned); - let msg = FromScriptMsg::LogEntry(thread_name, entry); - let chan = self.script_to_constellation_chan.lock().unwrap_or_else(|err| err.into_inner()); - let _ = chan.send(msg); - } - } -} - -/// A logger directed at the constellation from the compositor -#[derive(Clone)] -pub struct FromCompositorLogger { - /// A channel to the constellation - pub constellation_chan: Arc>>, -} - -impl FromCompositorLogger { - /// Create a new constellation logger. - pub fn new(constellation_chan: Sender) -> FromCompositorLogger { - FromCompositorLogger { - constellation_chan: Arc::new(ReentrantMutex::new(constellation_chan)) - } - } - - /// The maximum log level the constellation logger is interested in. - pub fn filter(&self) -> LogLevelFilter { - LogLevelFilter::Warn - } -} - -impl Log for FromCompositorLogger { - fn enabled(&self, metadata: &LogMetadata) -> bool { - metadata.level() <= LogLevel::Warn - } - - fn log(&self, record: &LogRecord) { - if let Some(entry) = log_entry(record) { - debug!("Sending log entry {:?}.", entry); - let top_level_id = TopLevelBrowsingContextId::installed(); - let thread_name = thread::current().name().map(ToOwned::to_owned); - let msg = FromCompositorMsg::LogEntry(top_level_id, thread_name, entry); - let chan = self.constellation_chan.lock().unwrap_or_else(|err| err.into_inner()); - let _ = chan.send(msg); - } - } -} - -/// Rust uses `LogRecord` for storing logging, but servo converts that to -/// a `LogEntry`. We do this so that we can record panics as well as log -/// messages, and because `LogRecord` does not implement serde (de)serialization, -/// so cannot be used over an IPC channel. -fn log_entry(record: &LogRecord) -> Option { - match record.level() { - LogLevel::Error if thread::panicking() => Some(LogEntry::Panic( - format!("{}", record.args()), - format!("{:?}", Backtrace::new()) - )), - LogLevel::Error => Some(LogEntry::Error( - format!("{}", record.args()) - )), - LogLevel::Warn => Some(LogEntry::Warn( - format!("{}", record.args()) - )), - _ => None, - } -} - -/// The number of warnings to include in each crash report. -const WARNINGS_BUFFER_SIZE: usize = 32; - -/// Route an ipc receiver to an mpsc receiver, preserving any errors. -/// This is the same as `route_ipc_receiver_to_new_mpsc_receiver`, -/// but does not panic on deserializtion errors. -fn route_ipc_receiver_to_new_mpsc_receiver_preserving_errors(ipc_receiver: IpcReceiver) - -> Receiver> - where T: for<'de> Deserialize<'de> + Serialize + Send + 'static -{ - let (mpsc_sender, mpsc_receiver) = channel(); - ROUTER.add_route(ipc_receiver.to_opaque(), Box::new(move |message| { - drop(mpsc_sender.send(message.to::())) - })); - mpsc_receiver -} - -impl Constellation - where LTF: LayoutThreadFactory, - STF: ScriptThreadFactory -{ - /// Create a new constellation thread. - pub fn start(state: InitialConstellationState) -> (Sender, IpcSender) { - let (compositor_sender, compositor_receiver) = channel(); - - // service worker manager to communicate with constellation - let (swmanager_sender, swmanager_receiver) = ipc::channel().expect("ipc channel failure"); - let sw_mgr_clone = swmanager_sender.clone(); - - thread::Builder::new().name("Constellation".to_owned()).spawn(move || { - let (ipc_script_sender, ipc_script_receiver) = ipc::channel().expect("ipc channel failure"); - let script_receiver = route_ipc_receiver_to_new_mpsc_receiver_preserving_errors(ipc_script_receiver); - - let (ipc_layout_sender, ipc_layout_receiver) = ipc::channel().expect("ipc channel failure"); - let layout_receiver = route_ipc_receiver_to_new_mpsc_receiver_preserving_errors(ipc_layout_receiver); - - let (network_listener_sender, network_listener_receiver) = channel(); - - let swmanager_receiver = route_ipc_receiver_to_new_mpsc_receiver_preserving_errors(swmanager_receiver); - - PipelineNamespace::install(PipelineNamespaceId(0)); - - let mut constellation: Constellation = Constellation { - script_sender: ipc_script_sender, - layout_sender: ipc_layout_sender, - script_receiver: script_receiver, - compositor_receiver: compositor_receiver, - layout_receiver: layout_receiver, - network_listener_sender: network_listener_sender, - network_listener_receiver: network_listener_receiver, - embedder_proxy: state.embedder_proxy, - compositor_proxy: state.compositor_proxy, - active_browser_id: None, - debugger_chan: state.debugger_chan, - devtools_chan: state.devtools_chan, - bluetooth_thread: state.bluetooth_thread, - public_resource_threads: state.public_resource_threads, - private_resource_threads: state.private_resource_threads, - font_cache_thread: state.font_cache_thread, - swmanager_chan: None, - swmanager_receiver: swmanager_receiver, - swmanager_sender: sw_mgr_clone, - event_loops: HashMap::new(), - pipelines: HashMap::new(), - browsing_contexts: HashMap::new(), - pending_changes: vec!(), - // We initialize the namespace at 1, since we reserved namespace 0 for the constellation - next_pipeline_namespace_id: PipelineNamespaceId(1), - focus_pipeline_id: None, - time_profiler_chan: state.time_profiler_chan, - mem_profiler_chan: state.mem_profiler_chan, - window_size: WindowSizeData { - initial_viewport: opts::get().initial_window_size.to_f32() * - ScaleFactor::new(1.0), - device_pixel_ratio: - ScaleFactor::new(opts::get().device_pixels_per_px.unwrap_or(1.0)), - }, - phantom: PhantomData, - clipboard_ctx: if state.supports_clipboard { - match ClipboardContext::new() { - Ok(c) => Some(c), - Err(e) => { - warn!("Error creating clipboard context ({})", e); - None - }, - } - } else { - None - }, - webdriver: WebDriverData::new(), - scheduler_chan: TimerScheduler::start(), - document_states: HashMap::new(), - webrender_document: state.webrender_document, - webrender_api_sender: state.webrender_api_sender, - shutting_down: false, - handled_warnings: VecDeque::new(), - random_pipeline_closure: opts::get().random_pipeline_closure_probability.map(|prob| { - let seed = opts::get().random_pipeline_closure_seed.unwrap_or_else(random); - let rng = ServoRng::from_seed(&[seed]); - warn!("Randomly closing pipelines."); - info!("Using seed {} for random pipeline closure.", seed); - (rng, prob) - }), - webgl_threads: state.webgl_threads, - webvr_chan: state.webvr_chan, - }; - - constellation.run(); - }).expect("Thread spawning failed"); - - (compositor_sender, swmanager_sender) - } - - /// The main event loop for the constellation. - fn run(&mut self) { - while !self.shutting_down || !self.pipelines.is_empty() { - // Randomly close a pipeline if --random-pipeline-closure-probability is set - // This is for testing the hardening of the constellation. - self.maybe_close_random_pipeline(); - self.handle_request(); - } - self.handle_shutdown(); - } - - /// Generate a new pipeline id namespace. - fn next_pipeline_namespace_id(&mut self) -> PipelineNamespaceId { - let namespace_id = self.next_pipeline_namespace_id; - let PipelineNamespaceId(ref mut i) = self.next_pipeline_namespace_id; - *i += 1; - namespace_id - } - - /// Helper function for creating a pipeline - fn new_pipeline(&mut self, - pipeline_id: PipelineId, - browsing_context_id: BrowsingContextId, - top_level_browsing_context_id: TopLevelBrowsingContextId, - parent_info: Option<(PipelineId, FrameType)>, - initial_window_size: Option>, - // TODO: we have to provide ownership of the LoadData - // here, because it will be send on an ipc channel, - // and ipc channels take onership of their data. - // https://github.com/servo/ipc-channel/issues/138 - load_data: LoadData, - sandbox: IFrameSandboxState, - is_private: bool) { - if self.shutting_down { return; } - - debug!("Creating new pipeline {} in browsing context {}.", pipeline_id, browsing_context_id); - - let (event_loop, host) = match sandbox { - IFrameSandboxState::IFrameSandboxed => (None, None), - IFrameSandboxState::IFrameUnsandboxed => { - // If this is an about:blank load, it must share the creator's event loop. - // This must match the logic in the script thread when determining the proper origin. - if load_data.url.as_str() != "about:blank" { - match reg_host(&load_data.url) { - None => (None, None), - Some(host) => { - let event_loop = self.event_loops.get(&top_level_browsing_context_id) - .and_then(|map| map.get(&host)) - .and_then(|weak| weak.upgrade()); - match event_loop { - None => (None, Some(host)), - Some(event_loop) => (Some(event_loop.clone()), None), - } - }, - } - } else if let Some(parent) = parent_info - .and_then(|(pipeline_id, _)| self.pipelines.get(&pipeline_id)) { - (Some(parent.event_loop.clone()), None) - } else if let Some(creator) = load_data.creator_pipeline_id - .and_then(|pipeline_id| self.pipelines.get(&pipeline_id)) { - (Some(creator.event_loop.clone()), None) - } else { - (None, None) - } - }, - }; - - let resource_threads = if is_private { - self.private_resource_threads.clone() - } else { - self.public_resource_threads.clone() - }; - - let parent_visibility = parent_info - .and_then(|(parent_pipeline_id, _)| self.pipelines.get(&parent_pipeline_id)) - .map(|pipeline| pipeline.visible); - - let prev_visibility = self.browsing_contexts.get(&browsing_context_id) - .and_then(|browsing_context| self.pipelines.get(&browsing_context.pipeline_id)) - .map(|pipeline| pipeline.visible) - .or(parent_visibility); - - let result = Pipeline::spawn::(InitialPipelineState { - id: pipeline_id, - browsing_context_id, - top_level_browsing_context_id, - parent_info, - script_to_constellation_chan: ScriptToConstellationChan { - sender: self.script_sender.clone(), - pipeline_id: pipeline_id, - }, - layout_to_constellation_chan: self.layout_sender.clone(), - scheduler_chan: self.scheduler_chan.clone(), - compositor_proxy: self.compositor_proxy.clone(), - devtools_chan: self.devtools_chan.clone(), - bluetooth_thread: self.bluetooth_thread.clone(), - swmanager_thread: self.swmanager_sender.clone(), - font_cache_thread: self.font_cache_thread.clone(), - resource_threads, - time_profiler_chan: self.time_profiler_chan.clone(), - mem_profiler_chan: self.mem_profiler_chan.clone(), - window_size: initial_window_size, - event_loop, - load_data, - device_pixel_ratio: self.window_size.device_pixel_ratio, - pipeline_namespace_id: self.next_pipeline_namespace_id(), - prev_visibility, - webrender_api_sender: self.webrender_api_sender.clone(), - webrender_document: self.webrender_document, - is_private, - webgl_chan: self.webgl_threads.pipeline(), - webvr_chan: self.webvr_chan.clone() - }); - - let pipeline = match result { - Ok(result) => result, - Err(e) => return self.handle_send_error(pipeline_id, e), - }; - - if let Some(host) = host { - debug!("Adding new host entry {} for top-level browsing context {}.", host, top_level_browsing_context_id); - self.event_loops.entry(top_level_browsing_context_id) - .or_insert_with(HashMap::new) - .insert(host, Rc::downgrade(&pipeline.event_loop)); - } - - assert!(!self.pipelines.contains_key(&pipeline_id)); - self.pipelines.insert(pipeline_id, pipeline); - } - - /// Get an iterator for the fully active browsing contexts in a subtree. - fn fully_active_descendant_browsing_contexts_iter(&self, browsing_context_id: BrowsingContextId) - -> FullyActiveBrowsingContextsIterator - { - FullyActiveBrowsingContextsIterator { - stack: vec!(browsing_context_id), - pipelines: &self.pipelines, - browsing_contexts: &self.browsing_contexts, - } - } - - /// Get an iterator for the fully active browsing contexts in a tree. - fn fully_active_browsing_contexts_iter(&self, top_level_browsing_context_id: TopLevelBrowsingContextId) - -> FullyActiveBrowsingContextsIterator - { - self.fully_active_descendant_browsing_contexts_iter(BrowsingContextId::from(top_level_browsing_context_id)) - } - - /// Get an iterator for the browsing contexts in a subtree. - fn all_descendant_browsing_contexts_iter(&self, browsing_context_id: BrowsingContextId) - -> AllBrowsingContextsIterator - { - AllBrowsingContextsIterator { - stack: vec!(browsing_context_id), - pipelines: &self.pipelines, - browsing_contexts: &self.browsing_contexts, - } - } - - /// Get an iterator for the browsing contexts in a tree. - fn all_browsing_contexts_iter(&self, top_level_browsing_context_id: TopLevelBrowsingContextId) - -> AllBrowsingContextsIterator - { - self.all_descendant_browsing_contexts_iter(BrowsingContextId::from(top_level_browsing_context_id)) - } - - /// The joint session future is the merge of the session future of every - /// browsing_context, sorted chronologically. - fn joint_session_future<'a>(&'a self, top_level_browsing_context_id: TopLevelBrowsingContextId) - -> impl Iterator + 'a - { - self.all_browsing_contexts_iter(top_level_browsing_context_id) - .map(|browsing_context| browsing_context.next.iter().rev()) - .kmerge_by(|a, b| a.instant.cmp(&b.instant) == Ordering::Less) - } - - /// Is the joint session future empty? - fn joint_session_future_is_empty(&self, top_level_browsing_context_id: TopLevelBrowsingContextId) -> bool { - self.all_browsing_contexts_iter(top_level_browsing_context_id) - .all(|browsing_context| browsing_context.next.is_empty()) - } - - /// The joint session past is the merge of the session past of every - /// browsing_context, sorted reverse chronologically. - fn joint_session_past<'a>(&'a self, top_level_browsing_context_id: TopLevelBrowsingContextId) - -> impl Iterator + 'a - { - self.all_browsing_contexts_iter(top_level_browsing_context_id) - .map(|browsing_context| browsing_context.prev.iter().rev() - .scan(browsing_context.instant, |prev_instant, entry| { - let instant = *prev_instant; - *prev_instant = entry.instant; - Some((instant, entry)) - })) - .kmerge_by(|a, b| a.0.cmp(&b.0) == Ordering::Greater) - .map(|(_, entry)| entry) - } - - /// Is the joint session past empty? - fn joint_session_past_is_empty(&self, top_level_browsing_context_id: TopLevelBrowsingContextId) -> bool { - self.all_browsing_contexts_iter(top_level_browsing_context_id) - .all(|browsing_context| browsing_context.prev.is_empty()) - } - - /// Create a new browsing context and update the internal bookkeeping. - fn new_browsing_context(&mut self, - browsing_context_id: BrowsingContextId, - top_level_id: TopLevelBrowsingContextId, - pipeline_id: PipelineId, - load_data: LoadData) { - debug!("Creating new browsing context {}", browsing_context_id); - let browsing_context = BrowsingContext::new(browsing_context_id, top_level_id, pipeline_id, load_data); - self.browsing_contexts.insert(browsing_context_id, browsing_context); - - // If a child browsing_context, add it to the parent pipeline. - let parent_info = self.pipelines.get(&pipeline_id) - .and_then(|pipeline| pipeline.parent_info); - if let Some((parent_id, _)) = parent_info { - if let Some(parent) = self.pipelines.get_mut(&parent_id) { - parent.add_child(browsing_context_id); - } - } - } - - fn add_pending_change(&mut self, change: SessionHistoryChange) { - self.handle_load_start_msg(change.top_level_browsing_context_id, change.new_pipeline_id); - self.pending_changes.push(change); - } - - /// Handles loading pages, navigation, and granting access to the compositor - #[allow(unsafe_code)] - fn handle_request(&mut self) { - enum Request { - Script((PipelineId, FromScriptMsg)), - Compositor(FromCompositorMsg), - Layout(FromLayoutMsg), - NetworkListener((PipelineId, FetchResponseMsg)), - FromSWManager(SWManagerMsg), - } - - // Get one incoming request. - // This is one of the few places where the compositor is - // allowed to panic. If one of the receiver.recv() calls - // fails, it is because the matching sender has been - // reclaimed, but this can't happen in normal execution - // because the constellation keeps a pointer to the sender, - // so it should never be reclaimed. A possible scenario in - // which receiver.recv() fails is if some unsafe code - // produces undefined behaviour, resulting in the destructor - // being called. If this happens, there's not much we can do - // other than panic. - let request = { - let receiver_from_script = &self.script_receiver; - let receiver_from_compositor = &self.compositor_receiver; - let receiver_from_layout = &self.layout_receiver; - let receiver_from_network_listener = &self.network_listener_receiver; - let receiver_from_swmanager = &self.swmanager_receiver; - select! { - msg = receiver_from_script.recv() => - msg.expect("Unexpected script channel panic in constellation").map(Request::Script), - msg = receiver_from_compositor.recv() => - Ok(Request::Compositor(msg.expect("Unexpected compositor channel panic in constellation"))), - msg = receiver_from_layout.recv() => - msg.expect("Unexpected layout channel panic in constellation").map(Request::Layout), - msg = receiver_from_network_listener.recv() => - Ok(Request::NetworkListener( - msg.expect("Unexpected network listener channel panic in constellation") - )), - msg = receiver_from_swmanager.recv() => - msg.expect("Unexpected panic channel panic in constellation").map(Request::FromSWManager) - } - }; - - let request = match request { - Ok(request) => request, - Err(err) => return error!("Deserialization failed ({}).", err), - }; - - match request { - Request::Compositor(message) => { - self.handle_request_from_compositor(message) - }, - Request::Script(message) => { - self.handle_request_from_script(message); - }, - Request::Layout(message) => { - self.handle_request_from_layout(message); - }, - Request::NetworkListener(message) => { - self.handle_request_from_network_listener(message); - }, - Request::FromSWManager(message) => { - self.handle_request_from_swmanager(message); - } - } - } - - fn handle_request_from_network_listener(&mut self, message: (PipelineId, FetchResponseMsg)) { - let (id, message_) = message; - let result = match self.pipelines.get(&id) { - Some(pipeline) => { - let msg = ConstellationControlMsg::NavigationResponse(id, message_); - pipeline.event_loop.send(msg) - }, - None => { - return warn!("Pipeline {:?} got fetch data after closure!", id); - }, - }; - if let Err(e) = result { - self.handle_send_error(id, e); - } - } - - fn handle_request_from_swmanager(&mut self, message: SWManagerMsg) { - match message { - SWManagerMsg::OwnSender(sw_sender) => { - // store service worker manager for communicating with it. - self.swmanager_chan = Some(sw_sender); - } - } - } - - fn handle_request_from_compositor(&mut self, message: FromCompositorMsg) { - match message { - FromCompositorMsg::Exit => { - debug!("constellation exiting"); - self.handle_exit(); - } - FromCompositorMsg::GetBrowsingContext(pipeline_id, resp_chan) => { - debug!("constellation got get browsing context message"); - self.handle_get_browsing_context(pipeline_id, resp_chan); - } - FromCompositorMsg::GetPipeline(browsing_context_id, resp_chan) => { - debug!("constellation got get pipeline message"); - self.handle_get_pipeline(browsing_context_id, resp_chan); - } - FromCompositorMsg::GetFocusTopLevelBrowsingContext(resp_chan) => { - debug!("constellation got get focus browsing context message"); - let focus_browsing_context = self.focus_pipeline_id - .and_then(|pipeline_id| self.pipelines.get(&pipeline_id)) - .map(|pipeline| pipeline.top_level_browsing_context_id); - let _ = resp_chan.send(focus_browsing_context); - } - FromCompositorMsg::KeyEvent(ch, key, state, modifiers) => { - debug!("constellation got key event message"); - self.handle_key_msg(ch, key, state, modifiers); - } - // Load a new page from a typed url - // If there is already a pending page (self.pending_changes), it will not be overridden; - // However, if the id is not encompassed by another change, it will be. - FromCompositorMsg::LoadUrl(top_level_browsing_context_id, url) => { - debug!("constellation got URL load message from compositor"); - let load_data = LoadData::new(url, None, None, None); - let ctx_id = BrowsingContextId::from(top_level_browsing_context_id); - let pipeline_id = match self.browsing_contexts.get(&ctx_id) { - Some(ctx) => ctx.pipeline_id, - None => return warn!("LoadUrl for unknow browsing context: {:?}", top_level_browsing_context_id), - }; - self.handle_load_url_msg(top_level_browsing_context_id, pipeline_id, load_data, false); - } - FromCompositorMsg::IsReadyToSaveImage(pipeline_states) => { - let is_ready = self.handle_is_ready_to_save_image(pipeline_states); - debug!("Ready to save image {:?}.", is_ready); - if opts::get().is_running_problem_test { - println!("got ready to save image query, result is {:?}", is_ready); - } - let is_ready = is_ready == ReadyToSave::Ready; - self.compositor_proxy.send(ToCompositorMsg::IsReadyToSaveImageReply(is_ready)); - if opts::get().is_running_problem_test { - println!("sent response"); - } - } - // Create a new top level browsing context. Will use response_chan to return - // the browsing context id. - FromCompositorMsg::NewBrowser(url, response_chan) => { - debug!("constellation got NewBrowser message"); - self.handle_new_top_level_browsing_context(url, response_chan); - } - // Close a top level browsing context. - FromCompositorMsg::CloseBrowser(top_level_browsing_context_id) => { - debug!("constellation got CloseBrowser message"); - self.handle_close_top_level_browsing_context(top_level_browsing_context_id); - } - // Send frame tree to WebRender. Make it visible. - FromCompositorMsg::SelectBrowser(top_level_browsing_context_id) => { - self.send_frame_tree(top_level_browsing_context_id); - } - // Handle a forward or back request - FromCompositorMsg::TraverseHistory(top_level_browsing_context_id, direction) => { - debug!("constellation got traverse history message from compositor"); - self.handle_traverse_history_msg(top_level_browsing_context_id, direction); - } - FromCompositorMsg::WindowSize(top_level_browsing_context_id, new_size, size_type) => { - debug!("constellation got window resize message"); - self.handle_window_size_msg(top_level_browsing_context_id, new_size, size_type); - } - FromCompositorMsg::TickAnimation(pipeline_id, tick_type) => { - self.handle_tick_animation(pipeline_id, tick_type) - } - FromCompositorMsg::WebDriverCommand(command) => { - debug!("constellation got webdriver command message"); - self.handle_webdriver_msg(command); - } - FromCompositorMsg::Reload(top_level_browsing_context_id) => { - debug!("constellation got reload message"); - self.handle_reload_msg(top_level_browsing_context_id); - } - FromCompositorMsg::LogEntry(top_level_browsing_context_id, thread_name, entry) => { - self.handle_log_entry(top_level_browsing_context_id, thread_name, entry); - } - FromCompositorMsg::WebVREvents(pipeline_ids, events) => { - debug!("constellation got {:?} WebVR events", events.len()); - self.handle_webvr_events(pipeline_ids, events); - } - } - } - - fn handle_request_from_script(&mut self, message: (PipelineId, FromScriptMsg)) { - let (source_pipeline_id, content) = message; - let source_top_ctx_id = match self.pipelines.get(&source_pipeline_id) - .map(|pipeline| pipeline.top_level_browsing_context_id) { - None => return warn!("ScriptMsg from closed pipeline {:?}.", source_pipeline_id), - Some(ctx) => ctx, - }; - - let source_is_top_level_pipeline = self.browsing_contexts - .get(&BrowsingContextId::from(source_top_ctx_id)) - .map(|ctx| ctx.pipeline_id == source_pipeline_id) - .unwrap_or(false); - - match content { - FromScriptMsg::PipelineExited => { - self.handle_pipeline_exited(source_pipeline_id); - } - FromScriptMsg::InitiateNavigateRequest(req_init) => { - debug!("constellation got initiate navigate request message"); - self.handle_navigate_request(source_pipeline_id, req_init); - } - FromScriptMsg::ScriptLoadedURLInIFrame(load_info) => { - debug!("constellation got iframe URL load message {:?} {:?} {:?}", - load_info.info.parent_pipeline_id, - load_info.old_pipeline_id, - load_info.info.new_pipeline_id); - self.handle_script_loaded_url_in_iframe_msg(load_info); - } - FromScriptMsg::ScriptNewIFrame(load_info, layout_sender) => { - debug!("constellation got loaded `about:blank` in iframe message {:?} {:?}", - load_info.parent_pipeline_id, - load_info.new_pipeline_id); - self.handle_script_new_iframe(load_info, layout_sender); - } - FromScriptMsg::ChangeRunningAnimationsState(animation_state) => { - self.handle_change_running_animations_state(source_pipeline_id, animation_state) - } - // Load a new page from a mouse click - // If there is already a pending page (self.pending_changes), it will not be overridden; - // However, if the id is not encompassed by another change, it will be. - FromScriptMsg::LoadUrl(load_data, replace) => { - debug!("constellation got URL load message from script"); - self.handle_load_url_msg(source_top_ctx_id, source_pipeline_id, load_data, replace); - } - FromScriptMsg::AbortLoadUrl => { - debug!("constellation got abort URL load message from script"); - self.handle_abort_load_url_msg(source_pipeline_id); - } - // A page loaded has completed all parsing, script, and reflow messages have been sent. - FromScriptMsg::LoadComplete => { - debug!("constellation got load complete message"); - self.handle_load_complete_msg(source_top_ctx_id, source_pipeline_id) - } - // Handle a forward or back request - FromScriptMsg::TraverseHistory(direction) => { - debug!("constellation got traverse history message from script"); - self.handle_traverse_history_msg(source_top_ctx_id, direction); - } - // Handle a joint session history length request. - FromScriptMsg::JointSessionHistoryLength(sender) => { - debug!("constellation got joint session history length message from script"); - self.handle_joint_session_history_length(source_top_ctx_id, sender); - } - // Notification that the new document is ready to become active - FromScriptMsg::ActivateDocument => { - debug!("constellation got activate document message"); - self.handle_activate_document_msg(source_pipeline_id); - } - // Update pipeline url after redirections - FromScriptMsg::SetFinalUrl(final_url) => { - // The script may have finished loading after we already started shutting down. - if let Some(ref mut pipeline) = self.pipelines.get_mut(&source_pipeline_id) { - debug!("constellation got set final url message"); - pipeline.url = final_url; - } else { - warn!("constellation got set final url message for dead pipeline"); - } - } - FromScriptMsg::PostMessage(browsing_context_id, origin, data) => { - debug!("constellation got postMessage message"); - self.handle_post_message_msg(browsing_context_id, origin, data); - } - FromScriptMsg::MozBrowserEvent(pipeline_id, event) => { - debug!("constellation got mozbrowser event message"); - self.handle_mozbrowser_event_msg(pipeline_id, source_top_ctx_id, event); - } - FromScriptMsg::Focus => { - debug!("constellation got focus message"); - self.handle_focus_msg(source_pipeline_id); - } - FromScriptMsg::ForwardEvent(dest_id, event) => { - let msg = ConstellationControlMsg::SendEvent(dest_id, event); - let result = match self.pipelines.get(&dest_id) { - None => { debug!("Pipeline {:?} got event after closure.", dest_id); return; } - Some(pipeline) => pipeline.event_loop.send(msg), - }; - if let Err(e) = result { - self.handle_send_error(dest_id, e); - } - } - FromScriptMsg::GetClipboardContents(sender) => { - let contents = match self.clipboard_ctx { - Some(ref mut ctx) => match ctx.get_contents() { - Ok(c) => c, - Err(e) => { - warn!("Error getting clipboard contents ({}), defaulting to empty string", e); - "".to_owned() - }, - }, - None => "".to_owned(), - }; - if let Err(e) = sender.send(contents.to_owned()) { - warn!("Failed to send clipboard ({})", e); - } - } - FromScriptMsg::SetClipboardContents(s) => { - if let Some(ref mut ctx) = self.clipboard_ctx { - if let Err(e) = ctx.set_contents(s) { - warn!("Error setting clipboard contents ({})", e); - } - } - } - FromScriptMsg::SetVisible(visible) => { - debug!("constellation got set visible messsage"); - self.handle_set_visible_msg(source_pipeline_id, visible); - } - FromScriptMsg::VisibilityChangeComplete(visible) => { - debug!("constellation got set visibility change complete message"); - self.handle_visibility_change_complete(source_pipeline_id, visible); - } - FromScriptMsg::RemoveIFrame(browsing_context_id, sender) => { - debug!("constellation got remove iframe message"); - let removed_pipeline_ids = self.handle_remove_iframe_msg(browsing_context_id); - if let Err(e) = sender.send(removed_pipeline_ids) { - warn!("Error replying to remove iframe ({})", e); - } - } - FromScriptMsg::NewFavicon(url) => { - debug!("constellation got new favicon message"); - if source_is_top_level_pipeline { - self.embedder_proxy.send(EmbedderMsg::NewFavicon(source_top_ctx_id, url)); - } - } - FromScriptMsg::HeadParsed => { - debug!("constellation got head parsed message"); - if source_is_top_level_pipeline { - self.embedder_proxy.send(EmbedderMsg::HeadParsed(source_top_ctx_id)); - } - } - FromScriptMsg::CreateCanvasPaintThread(size, sender) => { - debug!("constellation got create-canvas-paint-thread message"); - self.handle_create_canvas_paint_thread_msg(&size, sender) - } - FromScriptMsg::NodeStatus(message) => { - debug!("constellation got NodeStatus message"); - self.embedder_proxy.send(EmbedderMsg::Status(source_top_ctx_id, message)); - } - FromScriptMsg::SetDocumentState(state) => { - debug!("constellation got SetDocumentState message"); - self.document_states.insert(source_pipeline_id, state); - } - FromScriptMsg::Alert(message, sender) => { - debug!("constellation got Alert message"); - self.handle_alert(source_top_ctx_id, message, sender); - } - - FromScriptMsg::ScrollFragmentPoint(scroll_root_id, point, smooth) => { - self.compositor_proxy.send(ToCompositorMsg::ScrollFragmentPoint(scroll_root_id, - point, - smooth)); - } - - FromScriptMsg::GetClientWindow(send) => { - self.embedder_proxy.send(EmbedderMsg::GetClientWindow(source_top_ctx_id, send)); - } - - FromScriptMsg::MoveTo(point) => { - self.embedder_proxy.send(EmbedderMsg::MoveTo(source_top_ctx_id, point)); - } - - FromScriptMsg::ResizeTo(size) => { - self.embedder_proxy.send(EmbedderMsg::ResizeTo(source_top_ctx_id, size)); - } - - FromScriptMsg::Exit => { - self.compositor_proxy.send(ToCompositorMsg::Exit); - } - FromScriptMsg::LogEntry(thread_name, entry) => { - self.handle_log_entry(Some(source_top_ctx_id), thread_name, entry); - } - - FromScriptMsg::SetTitle(title) => { - if source_is_top_level_pipeline { - self.embedder_proxy.send(EmbedderMsg::ChangePageTitle(source_top_ctx_id, title)) - } - } - - FromScriptMsg::SendKeyEvent(ch, key, key_state, key_modifiers) => { - let event = EmbedderMsg::KeyEvent(Some(source_top_ctx_id), ch, key, key_state, key_modifiers); - self.embedder_proxy.send(event); - } - - FromScriptMsg::TouchEventProcessed(result) => { - self.compositor_proxy.send(ToCompositorMsg::TouchEventProcessed(result)) - } - FromScriptMsg::GetBrowsingContextId(pipeline_id, sender) => { - let result = self.pipelines.get(&pipeline_id).map(|pipeline| pipeline.browsing_context_id); - if let Err(e) = sender.send(result) { - warn!("Sending reply to get browsing context failed ({:?}).", e); - } - } - FromScriptMsg::GetParentInfo(pipeline_id, sender) => { - let result = self.pipelines.get(&pipeline_id).and_then(|pipeline| pipeline.parent_info); - if let Err(e) = sender.send(result) { - warn!("Sending reply to get parent info failed ({:?}).", e); - } - } - FromScriptMsg::RegisterServiceWorker(scope_things, scope) => { - debug!("constellation got store registration scope message"); - self.handle_register_serviceworker(scope_things, scope); - } - FromScriptMsg::ForwardDOMMessage(msg_vec, scope_url) => { - if let Some(ref mgr) = self.swmanager_chan { - let _ = mgr.send(ServiceWorkerMsg::ForwardDOMMessage(msg_vec, scope_url)); - } else { - warn!("Unable to forward DOMMessage for postMessage call"); - } - } - FromScriptMsg::BroadcastStorageEvent(storage, url, key, old_value, new_value) => { - self.handle_broadcast_storage_event(source_pipeline_id, storage, url, key, old_value, new_value); - } - FromScriptMsg::SetFullscreenState(state) => { - self.embedder_proxy.send(EmbedderMsg::SetFullscreenState(source_top_ctx_id, state)); - } - } - } - - fn handle_request_from_layout(&mut self, message: FromLayoutMsg) { - match message { - FromLayoutMsg::ChangeRunningAnimationsState(pipeline_id, animation_state) => { - self.handle_change_running_animations_state(pipeline_id, animation_state) - } - // Layout sends new sizes for all subframes. This needs to be reflected by all - // frame trees in the navigation context containing the subframe. - FromLayoutMsg::IFrameSizes(iframe_sizes) => { - debug!("constellation got iframe size message"); - self.handle_iframe_size_msg(iframe_sizes); - } - FromLayoutMsg::PendingPaintMetric(pipeline_id, epoch) => { - debug!("constellation got a pending paint metric message"); - self.handle_pending_paint_metric(pipeline_id, epoch); - } - FromLayoutMsg::SetCursor(cursor) => { - self.handle_set_cursor_msg(cursor) - } - FromLayoutMsg::ViewportConstrained(pipeline_id, constraints) => { - debug!("constellation got viewport-constrained event message"); - self.handle_viewport_constrained_msg(pipeline_id, constraints); - } - } - } - - fn handle_register_serviceworker(&self, scope_things: ScopeThings, scope: ServoUrl) { - if let Some(ref mgr) = self.swmanager_chan { - let _ = mgr.send(ServiceWorkerMsg::RegisterServiceWorker(scope_things, scope)); - } else { - warn!("sending scope info to service worker manager failed"); - } - } - - fn handle_broadcast_storage_event(&self, pipeline_id: PipelineId, storage: StorageType, url: ServoUrl, - key: Option, old_value: Option, new_value: Option) { - let origin = url.origin(); - for pipeline in self.pipelines.values() { - if (pipeline.id != pipeline_id) && (pipeline.url.origin() == origin) { - let msg = ConstellationControlMsg::DispatchStorageEvent( - pipeline.id, storage, url.clone(), key.clone(), old_value.clone(), new_value.clone() - ); - if let Err(err) = pipeline.event_loop.send(msg) { - warn!("Failed to broadcast storage event to pipeline {} ({:?}).", pipeline.id, err); - } - } - } - } - - fn handle_exit(&mut self) { - // TODO: add a timer, which forces shutdown if threads aren't responsive. - if self.shutting_down { return; } - self.shutting_down = true; - - self.mem_profiler_chan.send(mem::ProfilerMsg::Exit); - - // Close the top-level browsing contexts - let browsing_context_ids: Vec = self.browsing_contexts.values() - .filter(|browsing_context| browsing_context.is_top_level()) - .map(|browsing_context| browsing_context.id) - .collect(); - for browsing_context_id in browsing_context_ids { - debug!("Removing top-level browsing context {}.", browsing_context_id); - self.close_browsing_context(browsing_context_id, ExitPipelineMode::Normal); - } - - // Close any pending changes and pipelines - while let Some(pending) = self.pending_changes.pop() { - debug!("Removing pending browsing context {}.", pending.browsing_context_id); - self.close_browsing_context(pending.browsing_context_id, ExitPipelineMode::Normal); - debug!("Removing pending pipeline {}.", pending.new_pipeline_id); - self.close_pipeline(pending.new_pipeline_id, DiscardBrowsingContext::Yes, ExitPipelineMode::Normal); - } - - // In case there are browsing contexts which weren't attached, we close them. - let browsing_context_ids: Vec = self.browsing_contexts.keys().cloned().collect(); - for browsing_context_id in browsing_context_ids { - debug!("Removing detached browsing context {}.", browsing_context_id); - self.close_browsing_context(browsing_context_id, ExitPipelineMode::Normal); - } - - // In case there are pipelines which weren't attached to the pipeline tree, we close them. - let pipeline_ids: Vec = self.pipelines.keys().cloned().collect(); - for pipeline_id in pipeline_ids { - debug!("Removing detached pipeline {}.", pipeline_id); - self.close_pipeline(pipeline_id, DiscardBrowsingContext::Yes, ExitPipelineMode::Normal); - } - } - - fn handle_shutdown(&mut self) { - // At this point, there are no active pipelines, - // so we can safely block on other threads, without worrying about deadlock. - // Channels to receive signals when threads are done exiting. - let (core_sender, core_receiver) = ipc::channel().expect("Failed to create IPC channel!"); - let (storage_sender, storage_receiver) = ipc::channel().expect("Failed to create IPC channel!"); - - debug!("Exiting core resource threads."); - if let Err(e) = self.public_resource_threads.send(net_traits::CoreResourceMsg::Exit(core_sender)) { - warn!("Exit resource thread failed ({})", e); - } - - if let Some(ref chan) = self.debugger_chan { - debugger::shutdown_server(chan); - } - - if let Some(ref chan) = self.devtools_chan { - debug!("Exiting devtools."); - let msg = DevtoolsControlMsg::FromChrome(ChromeToDevtoolsControlMsg::ServerExitMsg); - if let Err(e) = chan.send(msg) { - warn!("Exit devtools failed ({})", e); - } - } - - debug!("Exiting storage resource threads."); - if let Err(e) = self.public_resource_threads.send(StorageThreadMsg::Exit(storage_sender)) { - warn!("Exit storage thread failed ({})", e); - } - - debug!("Exiting bluetooth thread."); - if let Err(e) = self.bluetooth_thread.send(BluetoothRequest::Exit) { - warn!("Exit bluetooth thread failed ({})", e); - } - - debug!("Exiting service worker manager thread."); - if let Some(mgr) = self.swmanager_chan.as_ref() { - if let Err(e) = mgr.send(ServiceWorkerMsg::Exit) { - warn!("Exit service worker manager failed ({})", e); - } - } - - debug!("Exiting WebGL thread."); - if let Err(e) = self.webgl_threads.exit() { - warn!("Exit WebGL Thread failed ({})", e); - } - - if let Some(chan) = self.webvr_chan.as_ref() { - debug!("Exiting WebVR thread."); - if let Err(e) = chan.send(WebVRMsg::Exit) { - warn!("Exit WebVR thread failed ({})", e); - } - } - - debug!("Exiting timer scheduler."); - if let Err(e) = self.scheduler_chan.send(TimerSchedulerMsg::Exit) { - warn!("Exit timer scheduler failed ({})", e); - } - - debug!("Exiting font cache thread."); - self.font_cache_thread.exit(); - - // Receive exit signals from threads. - if let Err(e) = core_receiver.recv() { - warn!("Exit resource thread failed ({})", e); - } - if let Err(e) = storage_receiver.recv() { - warn!("Exit storage thread failed ({})", e); - } - - debug!("Asking compositor to complete shutdown."); - self.compositor_proxy.send(ToCompositorMsg::ShutdownComplete); - } - - fn handle_pipeline_exited(&mut self, pipeline_id: PipelineId) { - debug!("Pipeline {:?} exited.", pipeline_id); - self.pipelines.remove(&pipeline_id); - } - - fn handle_send_error(&mut self, pipeline_id: PipelineId, err: IpcError) { - // Treat send error the same as receiving a panic message - error!("Pipeline {} send error ({}).", pipeline_id, err); - let top_level_browsing_context_id = self.pipelines.get(&pipeline_id) - .map(|pipeline| pipeline.top_level_browsing_context_id); - if let Some(top_level_browsing_context_id) = top_level_browsing_context_id { - let reason = format!("Send failed ({})", err); - self.handle_panic(top_level_browsing_context_id, reason, None); - } - } - - fn handle_panic(&mut self, - top_level_browsing_context_id: TopLevelBrowsingContextId, - reason: String, - backtrace: Option) - { - if opts::get().hard_fail { - // It's quite difficult to make Servo exit cleanly if some threads have failed. - // Hard fail exists for test runners so we crash and that's good enough. - println!("Pipeline failed in hard-fail mode. Crashing!"); - process::exit(1); - } - - debug!("Panic handler for top-level browsing context {}: {}.", top_level_browsing_context_id, reason); - - let browsing_context_id = BrowsingContextId::from(top_level_browsing_context_id); - - // Notify the browser chrome that the pipeline has failed - self.trigger_mozbrowsererror(top_level_browsing_context_id, reason, backtrace); - - let (window_size, pipeline_id) = { - let browsing_context = self.browsing_contexts.get(&browsing_context_id); - let window_size = browsing_context.and_then(|browsing_context| browsing_context.size); - let pipeline_id = browsing_context.map(|browsing_context| browsing_context.pipeline_id); - (window_size, pipeline_id) - }; - - let (pipeline_url, parent_info) = { - let pipeline = pipeline_id.and_then(|id| self.pipelines.get(&id)); - let pipeline_url = pipeline.map(|pipeline| pipeline.url.clone()); - let parent_info = pipeline.and_then(|pipeline| pipeline.parent_info); - (pipeline_url, parent_info) - }; - - self.close_browsing_context_children(browsing_context_id, - DiscardBrowsingContext::No, - ExitPipelineMode::Force); - - let failure_url = ServoUrl::parse("about:failure").expect("infallible"); - - if let Some(pipeline_url) = pipeline_url { - if pipeline_url == failure_url { - return error!("about:failure failed"); - } - } - - warn!("creating replacement pipeline for about:failure"); - - let new_pipeline_id = PipelineId::new(); - let load_data = LoadData::new(failure_url, None, None, None); - let sandbox = IFrameSandboxState::IFrameSandboxed; - self.new_pipeline(new_pipeline_id, browsing_context_id, top_level_browsing_context_id, parent_info, - window_size, load_data.clone(), sandbox, false); - self.add_pending_change(SessionHistoryChange { - top_level_browsing_context_id: top_level_browsing_context_id, - browsing_context_id: browsing_context_id, - new_pipeline_id: new_pipeline_id, - load_data: load_data, - replace_instant: None, - }); - } - - fn handle_log_entry(&mut self, - top_level_browsing_context_id: Option, - thread_name: Option, - entry: LogEntry) - { - debug!("Received log entry {:?}.", entry); - match (entry, top_level_browsing_context_id) { - (LogEntry::Panic(reason, backtrace), Some(top_level_browsing_context_id)) => { - self.handle_panic(top_level_browsing_context_id, reason, Some(backtrace)); - }, - (LogEntry::Panic(reason, _), _) | (LogEntry::Error(reason), _) | (LogEntry::Warn(reason), _) => { - // VecDeque::truncate is unstable - if WARNINGS_BUFFER_SIZE <= self.handled_warnings.len() { - self.handled_warnings.pop_front(); - } - self.handled_warnings.push_back((thread_name, reason)); - }, - } - } - - fn handle_webvr_events(&mut self, ids: Vec, events: Vec) { - for id in ids { - match self.pipelines.get_mut(&id) { - Some(ref pipeline) => { - // Notify script thread - let _ = pipeline.event_loop.send(ConstellationControlMsg::WebVREvents(id, events.clone())); - }, - None => warn!("constellation got webvr event for dead pipeline") - } - } - } - - fn handle_new_top_level_browsing_context(&mut self, url: ServoUrl, reply: IpcSender) { - let window_size = self.window_size.initial_viewport; - let pipeline_id = PipelineId::new(); - let top_level_browsing_context_id = TopLevelBrowsingContextId::new(); - if let Err(e) = reply.send(top_level_browsing_context_id) { - warn!("Failed to send newly created top level browsing context ({}).", e); - } - let browsing_context_id = BrowsingContextId::from(top_level_browsing_context_id); - let load_data = LoadData::new(url.clone(), None, None, None); - let sandbox = IFrameSandboxState::IFrameUnsandboxed; - if self.focus_pipeline_id.is_none() { - self.focus_pipeline_id = Some(pipeline_id); - } - self.new_pipeline(pipeline_id, - browsing_context_id, - top_level_browsing_context_id, - None, - Some(window_size), - load_data.clone(), - sandbox, - false); - self.add_pending_change(SessionHistoryChange { - top_level_browsing_context_id: top_level_browsing_context_id, - browsing_context_id: browsing_context_id, - new_pipeline_id: pipeline_id, - load_data: load_data, - replace_instant: None, - }); - } - - fn handle_close_top_level_browsing_context(&mut self, top_level_browsing_context_id: TopLevelBrowsingContextId) { - let browsing_context_id = BrowsingContextId::from(top_level_browsing_context_id); - self.close_browsing_context(browsing_context_id, ExitPipelineMode::Normal); - } - - fn handle_iframe_size_msg(&mut self, - iframe_sizes: Vec<(BrowsingContextId, TypedSize2D)>) { - for (browsing_context_id, size) in iframe_sizes { - let window_size = WindowSizeData { - initial_viewport: size, - device_pixel_ratio: self.window_size.device_pixel_ratio, - }; - - self.resize_browsing_context(window_size, WindowSizeType::Initial, browsing_context_id); - } - } - - fn handle_subframe_loaded(&mut self, pipeline_id: PipelineId) { - let (browsing_context_id, parent_id) = match self.pipelines.get(&pipeline_id) { - Some(pipeline) => match pipeline.parent_info { - Some((parent_id, _)) => (pipeline.browsing_context_id, parent_id), - None => return debug!("Pipeline {} has no parent.", pipeline_id), - }, - None => return warn!("Pipeline {} loaded after closure.", pipeline_id), - }; - let msg = ConstellationControlMsg::DispatchIFrameLoadEvent { - target: browsing_context_id, - parent: parent_id, - child: pipeline_id, - }; - let result = match self.pipelines.get(&parent_id) { - Some(parent) => parent.event_loop.send(msg), - None => return warn!("Parent {} browsing context loaded after closure.", parent_id), - }; - if let Err(e) = result { - self.handle_send_error(parent_id, e); - } - } - - fn handle_navigate_request(&self, - id: PipelineId, - req_init: RequestInit) { - let listener = NetworkListener::new( - req_init, - id, - self.public_resource_threads.clone(), - self.network_listener_sender.clone()); - - listener.initiate_fetch(); - } - - // The script thread associated with pipeline_id has loaded a URL in an iframe via script. This - // will result in a new pipeline being spawned and a child being added to - // the parent pipeline. This message is never the result of a - // page navigation. - fn handle_script_loaded_url_in_iframe_msg(&mut self, load_info: IFrameLoadInfoWithData) { - let (load_data, window_size, is_private) = { - let old_pipeline = load_info.old_pipeline_id - .and_then(|old_pipeline_id| self.pipelines.get(&old_pipeline_id)); - - let source_pipeline = match self.pipelines.get(&load_info.info.parent_pipeline_id) { - Some(source_pipeline) => source_pipeline, - None => return warn!("Script loaded url in closed iframe {}.", load_info.info.parent_pipeline_id), - }; - - // If no url is specified, reload. - let load_data = load_info.load_data.unwrap_or_else(|| { - let url = match old_pipeline { - Some(old_pipeline) => old_pipeline.url.clone(), - None => ServoUrl::parse("about:blank").expect("infallible"), - }; - - // TODO - loaddata here should have referrer info (not None, None) - LoadData::new(url, Some(source_pipeline.id), None, None) - }); - - let is_private = load_info.info.is_private || source_pipeline.is_private; - - let window_size = self.browsing_contexts.get(&load_info.info.browsing_context_id) - .and_then(|browsing_context| browsing_context.size); - - (load_data, window_size, is_private) - }; - - let replace_instant = if load_info.info.replace { - self.browsing_contexts.get(&load_info.info.browsing_context_id) - .map(|browsing_context| browsing_context.instant) - } else { - None - }; - - // Create the new pipeline, attached to the parent and push to pending changes - self.new_pipeline(load_info.info.new_pipeline_id, - load_info.info.browsing_context_id, - load_info.info.top_level_browsing_context_id, - Some((load_info.info.parent_pipeline_id, load_info.info.frame_type)), - window_size, - load_data.clone(), - load_info.sandbox, - is_private); - self.add_pending_change(SessionHistoryChange { - top_level_browsing_context_id: load_info.info.top_level_browsing_context_id, - browsing_context_id: load_info.info.browsing_context_id, - new_pipeline_id: load_info.info.new_pipeline_id, - load_data: load_data, - replace_instant: replace_instant, - }); - } - - fn handle_script_new_iframe(&mut self, - load_info: IFrameLoadInfo, - layout_sender: IpcSender) { - let IFrameLoadInfo { - parent_pipeline_id, - new_pipeline_id, - frame_type, - replace, - browsing_context_id, - top_level_browsing_context_id, - is_private, - } = load_info; - - let url = ServoUrl::parse("about:blank").expect("infallible"); - - let pipeline = { - let parent_pipeline = match self.pipelines.get(&parent_pipeline_id) { - Some(parent_pipeline) => parent_pipeline, - None => return warn!("Script loaded url in closed iframe {}.", parent_pipeline_id), - }; - - let script_sender = parent_pipeline.event_loop.clone(); - - Pipeline::new(new_pipeline_id, - browsing_context_id, - top_level_browsing_context_id, - Some((parent_pipeline_id, frame_type)), - script_sender, - layout_sender, - self.compositor_proxy.clone(), - is_private || parent_pipeline.is_private, - url.clone(), - parent_pipeline.visible) - }; - - // TODO: Referrer? - let load_data = LoadData::new(url, Some(parent_pipeline_id), None, None); - - let replace_instant = if replace { - self.browsing_contexts.get(&browsing_context_id).map(|browsing_context| browsing_context.instant) - } else { - None - }; - - assert!(!self.pipelines.contains_key(&new_pipeline_id)); - self.pipelines.insert(new_pipeline_id, pipeline); - - self.add_pending_change(SessionHistoryChange { - top_level_browsing_context_id: top_level_browsing_context_id, - browsing_context_id: browsing_context_id, - new_pipeline_id: new_pipeline_id, - load_data: load_data, - replace_instant: replace_instant, - }); - } - - fn handle_pending_paint_metric(&self, pipeline_id: PipelineId, epoch: Epoch) { - self.compositor_proxy.send(ToCompositorMsg::PendingPaintMetric(pipeline_id, epoch)) - } - - fn handle_set_cursor_msg(&mut self, cursor: Cursor) { - self.embedder_proxy.send(EmbedderMsg::SetCursor(cursor)) - } - - fn handle_change_running_animations_state(&mut self, - pipeline_id: PipelineId, - animation_state: AnimationState) { - self.compositor_proxy.send(ToCompositorMsg::ChangeRunningAnimationsState(pipeline_id, - animation_state)) - } - - fn handle_tick_animation(&mut self, pipeline_id: PipelineId, tick_type: AnimationTickType) { - let result = match tick_type { - AnimationTickType::Script => { - let msg = ConstellationControlMsg::TickAllAnimations(pipeline_id); - match self.pipelines.get(&pipeline_id) { - Some(pipeline) => pipeline.event_loop.send(msg), - None => return warn!("Pipeline {:?} got script tick after closure.", pipeline_id), - } - } - AnimationTickType::Layout => { - let msg = LayoutControlMsg::TickAnimations; - match self.pipelines.get(&pipeline_id) { - Some(pipeline) => pipeline.layout_chan.send(msg), - None => return warn!("Pipeline {:?} got layout tick after closure.", pipeline_id), - } - } - }; - if let Err(e) = result { - self.handle_send_error(pipeline_id, e); - } - } - - fn handle_alert(&mut self, - top_level_browsing_context_id: TopLevelBrowsingContextId, - message: String, - sender: IpcSender) { - let browser_pipeline_id = self.browsing_contexts.get(&BrowsingContextId::from(top_level_browsing_context_id)) - .and_then(|browsing_context| self.pipelines.get(&browsing_context.pipeline_id)) - .and_then(|pipeline| pipeline.parent_info) - .map(|(browser_pipeline_id, _)| browser_pipeline_id); - let mozbrowser_modal_prompt = PREFS.is_mozbrowser_enabled() && browser_pipeline_id.is_some(); - - if mozbrowser_modal_prompt { - // https://developer.mozilla.org/en-US/docs/Web/Events/mozbrowsershowmodalprompt - let prompt_type = String::from("alert"); - let title = String::from("Alert"); - let return_value = String::from(""); - let event = MozBrowserEvent::ShowModalPrompt(prompt_type, title, message, return_value); - match browser_pipeline_id.and_then(|id| self.pipelines.get(&id)) { - None => warn!("Alert sent after browser pipeline closure."), - Some(pipeline) => pipeline.trigger_mozbrowser_event(Some(top_level_browsing_context_id), event), - } - } - - let result = sender.send(!mozbrowser_modal_prompt); - if let Err(e) = result { - let ctx_id = BrowsingContextId::from(top_level_browsing_context_id); - let pipeline_id = match self.browsing_contexts.get(&ctx_id) { - Some(ctx) => ctx.pipeline_id, - None => return warn!("Alert sent for unknown browsing context."), - }; - self.handle_send_error(pipeline_id, e); - } - } - - fn handle_load_url_msg(&mut self, top_level_browsing_context_id: TopLevelBrowsingContextId, source_id: PipelineId, - load_data: LoadData, replace: bool) { - self.load_url(top_level_browsing_context_id, source_id, load_data, replace); - } - - fn load_url(&mut self, top_level_browsing_context_id: TopLevelBrowsingContextId, source_id: PipelineId, - load_data: LoadData, replace: bool) -> Option { - // Allow the embedder to handle the url itself - let (chan, port) = ipc::channel().expect("Failed to create IPC channel!"); - let msg = EmbedderMsg::AllowNavigation(top_level_browsing_context_id, load_data.url.clone(), chan); - self.embedder_proxy.send(msg); - if let Ok(false) = port.recv() { - return None; - } - - debug!("Loading {} in pipeline {}.", load_data.url, source_id); - // If this load targets an iframe, its framing element may exist - // in a separate script thread than the framed document that initiated - // the new load. The framing element must be notified about the - // requested change so it can update its internal state. - // - // If replace is true, the current entry is replaced instead of a new entry being added. - let (browsing_context_id, parent_info) = match self.pipelines.get(&source_id) { - Some(pipeline) => (pipeline.browsing_context_id, pipeline.parent_info), - None => { - warn!("Pipeline {} loaded after closure.", source_id); - return None; - } - }; - match parent_info { - Some((parent_pipeline_id, _)) => { - // Find the script thread for the pipeline containing the iframe - // and issue an iframe load through there. - let msg = ConstellationControlMsg::Navigate(parent_pipeline_id, - browsing_context_id, - load_data, - replace); - let result = match self.pipelines.get(&parent_pipeline_id) { - Some(parent_pipeline) => parent_pipeline.event_loop.send(msg), - None => { - warn!("Pipeline {:?} child loaded after closure", parent_pipeline_id); - return None; - }, - }; - if let Err(e) = result { - self.handle_send_error(parent_pipeline_id, e); - } - None - } - None => { - // Make sure no pending page would be overridden. - for change in &self.pending_changes { - if change.browsing_context_id == browsing_context_id { - // id that sent load msg is being changed already; abort - return None; - } - } - - if self.get_activity(source_id) == DocumentActivity::Inactive { - // Disregard this load if the navigating pipeline is not actually - // active. This could be caused by a delayed navigation (eg. from - // a timer) or a race between multiple navigations (such as an - // onclick handler on an anchor element). - return None; - } - - // Being here means either there are no pending changes, or none of the pending - // changes would be overridden by changing the subframe associated with source_id. - - // Create the new pipeline - let (top_level_id, window_size, timestamp) = match self.browsing_contexts.get(&browsing_context_id) { - Some(context) => (context.top_level_id, context.size, context.instant), - None => { - warn!("Browsing context {} loaded after closure.", browsing_context_id); - return None; - } - }; - let new_pipeline_id = PipelineId::new(); - let sandbox = IFrameSandboxState::IFrameUnsandboxed; - let replace_instant = if replace { Some(timestamp) } else { None }; - self.new_pipeline(new_pipeline_id, - browsing_context_id, - top_level_id, - None, - window_size, - load_data.clone(), - sandbox, - false); - self.add_pending_change(SessionHistoryChange { - top_level_browsing_context_id: top_level_id, - browsing_context_id: browsing_context_id, - new_pipeline_id: new_pipeline_id, - load_data: load_data, - replace_instant: replace_instant, - }); - Some(new_pipeline_id) - } - } - } - - fn handle_abort_load_url_msg(&mut self, new_pipeline_id: PipelineId) { - let pending_index = self.pending_changes.iter().rposition(|change| { - change.new_pipeline_id == new_pipeline_id - }); - - // If it is found, remove it from the pending changes. - if let Some(pending_index) = pending_index { - self.pending_changes.remove(pending_index); - self.close_pipeline(new_pipeline_id, DiscardBrowsingContext::No, ExitPipelineMode::Normal); - } - } - - fn handle_load_start_msg(&mut self, top_level_browsing_context_id: TopLevelBrowsingContextId, - pipeline_id: PipelineId) { - if self.pipelines.get(&pipeline_id).and_then(|p| p.parent_info).is_none() { - // Notify embedder top level document started loading. - self.embedder_proxy.send(EmbedderMsg::LoadStart(top_level_browsing_context_id)); - } - } - - fn handle_load_complete_msg(&mut self, top_level_browsing_context_id: TopLevelBrowsingContextId, - pipeline_id: PipelineId) { - let mut webdriver_reset = false; - if let Some((expected_pipeline_id, ref reply_chan)) = self.webdriver.load_channel { - debug!("Sending load to WebDriver"); - if expected_pipeline_id == pipeline_id { - let _ = reply_chan.send(webdriver_msg::LoadStatus::LoadComplete); - webdriver_reset = true; - } - } - if webdriver_reset { - self.webdriver.load_channel = None; - } - - // Notify the embedder that the TopLevelBrowsingContext current document - // has finished loading. - // We need to make sure the pipeline that has finished loading is the current - // pipeline and that no pending pipeline will replace the current one. - let pipeline_is_top_level_pipeline = self.browsing_contexts - .get(&BrowsingContextId::from(top_level_browsing_context_id)) - .map(|ctx| ctx.pipeline_id == pipeline_id) - .unwrap_or(false); - if pipeline_is_top_level_pipeline { - // Is there any pending pipeline that will replace the current top level pipeline - let current_top_level_pipeline_will_be_replaced = self.pending_changes.iter() - .any(|change| change.browsing_context_id == top_level_browsing_context_id); - - if !current_top_level_pipeline_will_be_replaced { - // Notify embedder and compositor top level document finished loading. - self.compositor_proxy.send(ToCompositorMsg::LoadComplete(top_level_browsing_context_id)); - self.embedder_proxy.send(EmbedderMsg::LoadComplete(top_level_browsing_context_id)); - } - } - self.handle_subframe_loaded(pipeline_id); - } - - fn handle_traverse_history_msg(&mut self, - top_level_browsing_context_id: TopLevelBrowsingContextId, - direction: TraversalDirection) - { - let mut size = 0; - let mut table = HashMap::new(); - - match direction { - TraversalDirection::Forward(delta) => { - for entry in self.joint_session_future(top_level_browsing_context_id).take(delta) { - size = size + 1; - table.insert(entry.browsing_context_id, entry.clone()); - } - if size < delta { - return debug!("Traversing forward too much."); - } - }, - TraversalDirection::Back(delta) => { - for entry in self.joint_session_past(top_level_browsing_context_id).take(delta) { - size = size + 1; - table.insert(entry.browsing_context_id, entry.clone()); - } - if size < delta { - return debug!("Traversing back too much."); - } - }, - } - - for (_, entry) in table { - self.traverse_to_entry(entry); - } - } - - fn handle_joint_session_history_length(&self, - top_level_browsing_context_id: TopLevelBrowsingContextId, - sender: IpcSender) - { - // Initialize length at 1 to count for the current active entry - let mut length = 1; - for browsing_context in self.all_browsing_contexts_iter(top_level_browsing_context_id) { - length += browsing_context.next.len(); - length += browsing_context.prev.len(); - } - let _ = sender.send(length as u32); - } - - fn handle_key_msg(&mut self, ch: Option, key: Key, state: KeyState, mods: KeyModifiers) { - // Send to the explicitly focused pipeline. If it doesn't exist, fall back to sending to - // the compositor. - match self.focus_pipeline_id { - Some(pipeline_id) => { - let event = CompositorEvent::KeyEvent(ch, key, state, mods); - let msg = ConstellationControlMsg::SendEvent(pipeline_id, event); - let result = match self.pipelines.get(&pipeline_id) { - Some(pipeline) => pipeline.event_loop.send(msg), - None => return debug!("Pipeline {:?} got key event after closure.", pipeline_id), - }; - if let Err(e) = result { - self.handle_send_error(pipeline_id, e); - } - }, - None => { - let event = EmbedderMsg::KeyEvent(None, ch, key, state, mods); - self.embedder_proxy.clone().send(event); - } - } - } - - fn handle_reload_msg(&mut self, top_level_browsing_context_id: TopLevelBrowsingContextId) { - let browsing_context_id = BrowsingContextId::from(top_level_browsing_context_id); - let pipeline_id = match self.browsing_contexts.get(&browsing_context_id) { - Some(browsing_context) => browsing_context.pipeline_id, - None => return warn!("Browsing context {} got reload event after closure.", browsing_context_id), - }; - let msg = ConstellationControlMsg::Reload(pipeline_id); - let result = match self.pipelines.get(&pipeline_id) { - None => return warn!("Pipeline {} got reload event after closure.", pipeline_id), - Some(pipeline) => pipeline.event_loop.send(msg), - }; - if let Err(e) = result { - self.handle_send_error(pipeline_id, e); - } - } - - fn handle_post_message_msg(&mut self, - browsing_context_id: BrowsingContextId, - origin: Option, - data: Vec) - { - let pipeline_id = match self.browsing_contexts.get(&browsing_context_id) { - None => return warn!("postMessage to closed browsing_context {}.", browsing_context_id), - Some(browsing_context) => browsing_context.pipeline_id, - }; - let msg = ConstellationControlMsg::PostMessage(pipeline_id, origin, data); - let result = match self.pipelines.get(&pipeline_id) { - Some(pipeline) => pipeline.event_loop.send(msg), - None => return warn!("postMessage to closed pipeline {}.", pipeline_id), - }; - if let Err(e) = result { - self.handle_send_error(pipeline_id, e); - } - } - - fn handle_mozbrowser_event_msg(&mut self, - pipeline_id: PipelineId, - top_level_browsing_context_id: TopLevelBrowsingContextId, - event: MozBrowserEvent) { - assert!(PREFS.is_mozbrowser_enabled()); - - // Find the script channel for the given parent pipeline, - // and pass the event to that script thread. - // If the pipeline lookup fails, it is because we have torn down the pipeline, - // so it is reasonable to silently ignore the event. - match self.pipelines.get(&pipeline_id) { - Some(pipeline) => pipeline.trigger_mozbrowser_event(Some(top_level_browsing_context_id), event), - None => warn!("Pipeline {:?} handling mozbrowser event after closure.", pipeline_id), - } - } - - fn handle_get_pipeline(&mut self, - browsing_context_id: BrowsingContextId, - resp_chan: IpcSender>) { - let current_pipeline_id = self.browsing_contexts.get(&browsing_context_id) - .map(|browsing_context| browsing_context.pipeline_id); - let pipeline_id_loaded = self.pending_changes.iter().rev() - .find(|x| x.browsing_context_id == browsing_context_id) - .map(|x| x.new_pipeline_id) - .or(current_pipeline_id); - if let Err(e) = resp_chan.send(pipeline_id_loaded) { - warn!("Failed get_pipeline response ({}).", e); - } - } - - fn handle_get_browsing_context(&mut self, - pipeline_id: PipelineId, - resp_chan: IpcSender>) { - let browsing_context_id = self.pipelines.get(&pipeline_id).map(|pipeline| pipeline.browsing_context_id); - if let Err(e) = resp_chan.send(browsing_context_id) { - warn!("Failed get_browsing_context response ({}).", e); - } - } - - fn focus_parent_pipeline(&mut self, pipeline_id: PipelineId) { - let (browsing_context_id, parent_info) = match self.pipelines.get(&pipeline_id) { - Some(pipeline) => (pipeline.browsing_context_id, pipeline.parent_info), - None => return warn!("Pipeline {:?} focus parent after closure.", pipeline_id), - }; - let (parent_pipeline_id, _) = match parent_info { - Some(info) => info, - None => return debug!("Pipeline {:?} focus has no parent.", pipeline_id), - }; - - // Send a message to the parent of the provided pipeline (if it exists) - // telling it to mark the iframe element as focused. - let msg = ConstellationControlMsg::FocusIFrame(parent_pipeline_id, browsing_context_id); - let result = match self.pipelines.get(&parent_pipeline_id) { - Some(pipeline) => pipeline.event_loop.send(msg), - None => return warn!("Pipeline {:?} focus after closure.", parent_pipeline_id), - }; - if let Err(e) = result { - self.handle_send_error(parent_pipeline_id, e); - } - self.focus_parent_pipeline(parent_pipeline_id); - } - - fn handle_focus_msg(&mut self, pipeline_id: PipelineId) { - self.focus_pipeline_id = Some(pipeline_id); - - // Focus parent iframes recursively - self.focus_parent_pipeline(pipeline_id); - } - - fn handle_remove_iframe_msg(&mut self, browsing_context_id: BrowsingContextId) -> Vec { - let result = self.all_descendant_browsing_contexts_iter(browsing_context_id) - .flat_map(|browsing_context| browsing_context.next.iter().chain(browsing_context.prev.iter()) - .filter_map(|entry| entry.pipeline_id) - .chain(once(browsing_context.pipeline_id))) - .collect(); - self.close_browsing_context(browsing_context_id, ExitPipelineMode::Normal); - result - } - - fn handle_set_visible_msg(&mut self, pipeline_id: PipelineId, visible: bool) { - let browsing_context_id = match self.pipelines.get(&pipeline_id) { - Some(pipeline) => pipeline.browsing_context_id, - None => return warn!("No browsing context associated with pipeline {:?}", pipeline_id), - }; - - let child_pipeline_ids: Vec = self.all_descendant_browsing_contexts_iter(browsing_context_id) - .flat_map(|browsing_context| browsing_context.prev.iter().chain(browsing_context.next.iter()) - .filter_map(|entry| entry.pipeline_id) - .chain(once(browsing_context.pipeline_id))) - .collect(); - - for id in child_pipeline_ids { - if let Some(pipeline) = self.pipelines.get_mut(&id) { - pipeline.change_visibility(visible); - } - } - } - - fn handle_visibility_change_complete(&mut self, pipeline_id: PipelineId, visibility: bool) { - let (browsing_context_id, parent_pipeline_info) = match self.pipelines.get(&pipeline_id) { - None => return warn!("Visibity change for closed pipeline {:?}.", pipeline_id), - Some(pipeline) => (pipeline.browsing_context_id, pipeline.parent_info), - }; - if let Some((parent_pipeline_id, _)) = parent_pipeline_info { - let visibility_msg = ConstellationControlMsg::NotifyVisibilityChange(parent_pipeline_id, - browsing_context_id, - visibility); - let result = match self.pipelines.get(&parent_pipeline_id) { - None => return warn!("Parent pipeline {:?} closed", parent_pipeline_id), - Some(parent_pipeline) => parent_pipeline.event_loop.send(visibility_msg), - }; - - if let Err(e) = result { - self.handle_send_error(parent_pipeline_id, e); - } - } - } - - fn handle_create_canvas_paint_thread_msg( - &mut self, - size: &Size2D, - response_sender: IpcSender>) { - let webrender_api = self.webrender_api_sender.clone(); - let sender = CanvasPaintThread::start(*size, webrender_api, - opts::get().enable_canvas_antialiasing); - if let Err(e) = response_sender.send(sender) { - warn!("Create canvas paint thread response failed ({})", e); - } - } - - fn handle_webdriver_msg(&mut self, msg: WebDriverCommandMsg) { - // Find the script channel for the given parent pipeline, - // and pass the event to that script thread. - match msg { - WebDriverCommandMsg::GetWindowSize(_, reply) => { - let _ = reply.send(self.window_size); - }, - WebDriverCommandMsg::SetWindowSize(top_level_browsing_context_id, size, reply) => { - self.webdriver.resize_channel = Some(reply); - self.embedder_proxy.send(EmbedderMsg::ResizeTo(top_level_browsing_context_id, size)); - }, - WebDriverCommandMsg::LoadUrl(top_level_browsing_context_id, load_data, reply) => { - self.load_url_for_webdriver(top_level_browsing_context_id, load_data, reply, false); - }, - WebDriverCommandMsg::Refresh(top_level_browsing_context_id, reply) => { - let browsing_context_id = BrowsingContextId::from(top_level_browsing_context_id); - let load_data = match self.browsing_contexts.get(&browsing_context_id) { - Some(browsing_context) => browsing_context.load_data.clone(), - None => return warn!("Browsing context {} Refresh after closure.", browsing_context_id), - }; - self.load_url_for_webdriver(top_level_browsing_context_id, load_data, reply, true); - } - WebDriverCommandMsg::ScriptCommand(browsing_context_id, cmd) => { - let pipeline_id = match self.browsing_contexts.get(&browsing_context_id) { - Some(browsing_context) => browsing_context.pipeline_id, - None => return warn!("Browsing context {} ScriptCommand after closure.", browsing_context_id), - }; - let control_msg = ConstellationControlMsg::WebDriverScriptCommand(pipeline_id, cmd); - let result = match self.pipelines.get(&pipeline_id) { - Some(pipeline) => pipeline.event_loop.send(control_msg), - None => return warn!("Pipeline {:?} ScriptCommand after closure.", pipeline_id), - }; - if let Err(e) = result { - self.handle_send_error(pipeline_id, e); - } - }, - WebDriverCommandMsg::SendKeys(browsing_context_id, cmd) => { - let pipeline_id = match self.browsing_contexts.get(&browsing_context_id) { - Some(browsing_context) => browsing_context.pipeline_id, - None => return warn!("Browsing context {} SendKeys after closure.", browsing_context_id), - }; - let event_loop = match self.pipelines.get(&pipeline_id) { - Some(pipeline) => pipeline.event_loop.clone(), - None => return warn!("Pipeline {} SendKeys after closure.", pipeline_id), - }; - for (key, mods, state) in cmd { - let event = CompositorEvent::KeyEvent(None, key, state, mods); - let control_msg = ConstellationControlMsg::SendEvent(pipeline_id, event); - if let Err(e) = event_loop.send(control_msg) { - return self.handle_send_error(pipeline_id, e); - } - } - }, - WebDriverCommandMsg::TakeScreenshot(_, reply) => { - self.compositor_proxy.send(ToCompositorMsg::CreatePng(reply)); - }, - } - } - - // https://html.spec.whatwg.org/multipage/#traverse-the-history - fn traverse_to_entry(&mut self, entry: SessionHistoryEntry) { - // Step 1. - let browsing_context_id = entry.browsing_context_id; - let pipeline_id = match entry.pipeline_id { - Some(pipeline_id) => pipeline_id, - None => { - // If there is no pipeline, then the document for this - // entry has been discarded, so we navigate to the entry - // URL instead. When the document has activated, it will - // traverse to the entry, but with the new pipeline id. - debug!("Reloading document {} in browsing context {}.", entry.load_data.url, entry.browsing_context_id); - // TODO: save the sandbox state so it can be restored here. - let sandbox = IFrameSandboxState::IFrameUnsandboxed; - let new_pipeline_id = PipelineId::new(); - let load_data = entry.load_data; - let (top_level_id, parent_info, window_size, is_private) = - match self.browsing_contexts.get(&browsing_context_id) - { - Some(browsing_context) => match self.pipelines.get(&browsing_context.pipeline_id) { - Some(pipeline) => (browsing_context.top_level_id, - pipeline.parent_info, - browsing_context.size, - pipeline.is_private), - None => (browsing_context.top_level_id, - None, - browsing_context.size, - false), - }, - None => return warn!("no browsing context to traverse"), - }; - self.new_pipeline(new_pipeline_id, browsing_context_id, top_level_id, parent_info, - window_size, load_data.clone(), sandbox, is_private); - self.add_pending_change(SessionHistoryChange { - top_level_browsing_context_id: top_level_id, - browsing_context_id: browsing_context_id, - new_pipeline_id: new_pipeline_id, - load_data: load_data, - replace_instant: Some(entry.instant), - }); - return; - } - }; - - // Check if the currently focused pipeline is the pipeline being replaced - // (or a child of it). This has to be done here, before the current - // frame tree is modified below. - let update_focus_pipeline = self.focused_pipeline_is_descendant_of(entry.browsing_context_id); - - let (old_pipeline_id, replaced_pipeline_id, top_level_id) = - match self.browsing_contexts.get_mut(&browsing_context_id) - { - Some(browsing_context) => { - let old_pipeline_id = browsing_context.pipeline_id; - let top_level_id = browsing_context.top_level_id; - let mut curr_entry = browsing_context.current(); - - if entry.instant > browsing_context.instant { - // We are traversing to the future. - while let Some(next) = browsing_context.next.pop() { - browsing_context.prev.push(curr_entry); - curr_entry = next; - if entry.instant <= curr_entry.instant { break; } - } - } else if entry.instant < browsing_context.instant { - // We are traversing to the past. - while let Some(prev) = browsing_context.prev.pop() { - browsing_context.next.push(curr_entry); - curr_entry = prev; - if entry.instant >= curr_entry.instant { break; } - } - } - - debug_assert_eq!(entry.instant, curr_entry.instant); - - let replaced_pipeline_id = curr_entry.pipeline_id; - - browsing_context.update_current(pipeline_id, entry); - - (old_pipeline_id, replaced_pipeline_id, top_level_id) - }, - None => return warn!("no browsing context to traverse"), - }; - - let parent_info = self.pipelines.get(&old_pipeline_id) - .and_then(|pipeline| pipeline.parent_info); - - // If the currently focused pipeline is the one being changed (or a child - // of the pipeline being changed) then update the focus pipeline to be - // the replacement. - if update_focus_pipeline { - self.focus_pipeline_id = Some(pipeline_id); - } - - // If we replaced a pipeline, close it. - if let Some(replaced_pipeline_id) = replaced_pipeline_id { - if replaced_pipeline_id != pipeline_id { - self.close_pipeline(replaced_pipeline_id, DiscardBrowsingContext::No, ExitPipelineMode::Normal); - } - } - - // Deactivate the old pipeline, and activate the new one. - self.update_activity(old_pipeline_id); - self.update_activity(pipeline_id); - self.notify_history_changed(top_level_id); - - self.update_frame_tree_if_active(top_level_id); - - // Update the owning iframe to point to the new pipeline id. - // This makes things like contentDocument work correctly. - if let Some((parent_pipeline_id, frame_type)) = parent_info { - let msg = ConstellationControlMsg::UpdatePipelineId(parent_pipeline_id, - browsing_context_id, pipeline_id, UpdatePipelineIdReason::Traversal); - let result = match self.pipelines.get(&parent_pipeline_id) { - None => return warn!("Pipeline {:?} child traversed after closure.", parent_pipeline_id), - Some(pipeline) => pipeline.event_loop.send(msg), - }; - if let Err(e) = result { - self.handle_send_error(parent_pipeline_id, e); - } - - // If this is a mozbrowser iframe, send a mozbrowser location change event. - // This is the result of a back/forward traversal. - if frame_type == FrameType::MozBrowserIFrame { - self.trigger_mozbrowserlocationchange(top_level_id); - } - } - } - - fn notify_history_changed(&self, top_level_browsing_context_id: TopLevelBrowsingContextId) { - // Send a flat projection of the history. - // The final vector is a concatenation of the LoadData of the past entries, - // the current entry and the future entries. - // LoadData of inner frames are ignored and replaced with the LoadData of the parent. - - // Ignore LoadData of non-top-level browsing contexts. - let keep_load_data_if_top_browsing_context = |entry: &SessionHistoryEntry| { - match entry.pipeline_id { - None => Some(entry.load_data.clone()), - Some(pipeline_id) => { - match self.pipelines.get(&pipeline_id) { - None => Some(entry.load_data.clone()), - Some(pipeline) => match pipeline.parent_info { - None => Some(entry.load_data.clone()), - Some(_) => None, - } - } - } - } - }; - - // If LoadData was ignored, use the LoadData of the previous SessionHistoryEntry, which - // is the LoadData of the parent browsing context. - let resolve_load_data = |previous_load_data: &mut LoadData, load_data| { - let load_data = match load_data { - None => previous_load_data.clone(), - Some(load_data) => load_data, - }; - *previous_load_data = load_data.clone(); - Some(load_data) - }; - - let browsing_context_id = BrowsingContextId::from(top_level_browsing_context_id); - let current_load_data = match self.browsing_contexts.get(&browsing_context_id) { - Some(browsing_context) => browsing_context.load_data.clone(), - None => return warn!("notify_history_changed error after top-level browsing context closed."), - }; - - let mut entries: Vec = self.joint_session_past(top_level_browsing_context_id) - .map(&keep_load_data_if_top_browsing_context) - .scan(current_load_data.clone(), &resolve_load_data) - .collect(); - - entries.reverse(); - - let current_index = entries.len(); - - entries.push(current_load_data.clone()); - - entries.extend(self.joint_session_future(top_level_browsing_context_id) - .map(&keep_load_data_if_top_browsing_context) - .scan(current_load_data.clone(), &resolve_load_data)); - - let msg = EmbedderMsg::HistoryChanged(top_level_browsing_context_id, entries, current_index); - self.embedder_proxy.send(msg); - } - - fn load_url_for_webdriver(&mut self, - top_level_browsing_context_id: TopLevelBrowsingContextId, - load_data: LoadData, - reply: IpcSender, - replace: bool) - { - let browsing_context_id = BrowsingContextId::from(top_level_browsing_context_id); - let pipeline_id = match self.browsing_contexts.get(&browsing_context_id) { - Some(browsing_context) => browsing_context.pipeline_id, - None => return warn!("Webdriver load for closed browsing context {}.", browsing_context_id), - }; - if let Some(new_pipeline_id) = self.load_url(top_level_browsing_context_id, pipeline_id, load_data, replace) { - self.webdriver.load_channel = Some((new_pipeline_id, reply)); - } - } - - fn change_session_history(&mut self, change: SessionHistoryChange) { - debug!("Setting browsing context {} to be pipeline {}.", change.browsing_context_id, change.new_pipeline_id); - - // If the currently focused pipeline is the one being changed (or a child - // of the pipeline being changed) then update the focus pipeline to be - // the replacement. - if self.focused_pipeline_is_descendant_of(change.browsing_context_id) { - self.focus_pipeline_id = Some(change.new_pipeline_id); - } - - let (evicted_id, new_context, navigated) = if let Some(instant) = change.replace_instant { - debug!("Replacing pipeline in existing browsing context with timestamp {:?}.", instant); - let entry = SessionHistoryEntry { - browsing_context_id: change.browsing_context_id, - pipeline_id: Some(change.new_pipeline_id), - load_data: change.load_data.clone(), - instant: instant, - }; - self.traverse_to_entry(entry); - (None, false, None) - } else if let Some(browsing_context) = self.browsing_contexts.get_mut(&change.browsing_context_id) { - debug!("Adding pipeline to existing browsing context."); - let old_pipeline_id = browsing_context.pipeline_id; - browsing_context.load(change.new_pipeline_id, change.load_data.clone()); - let evicted_id = browsing_context.prev.len() - .checked_sub(PREFS.get("session-history.max-length").as_u64().unwrap_or(20) as usize) - .and_then(|index| browsing_context.prev.get_mut(index)) - .and_then(|entry| entry.pipeline_id.take()); - (evicted_id, false, Some(old_pipeline_id)) - } else { - debug!("Adding pipeline to new browsing context."); - (None, true, None) - }; - - if let Some(evicted_id) = evicted_id { - self.close_pipeline(evicted_id, DiscardBrowsingContext::No, ExitPipelineMode::Normal); - } - - if new_context { - self.new_browsing_context(change.browsing_context_id, - change.top_level_browsing_context_id, - change.new_pipeline_id, - change.load_data); - self.update_activity(change.new_pipeline_id); - self.notify_history_changed(change.top_level_browsing_context_id); - }; - - if let Some(old_pipeline_id) = navigated { - // Deactivate the old pipeline, and activate the new one. - self.update_activity(old_pipeline_id); - self.update_activity(change.new_pipeline_id); - // Clear the joint session future - self.clear_joint_session_future(change.top_level_browsing_context_id); - self.notify_history_changed(change.top_level_browsing_context_id); - } - - // If the navigation is for a top-level browsing context, inform mozbrowser - if change.browsing_context_id == change.top_level_browsing_context_id { - self.trigger_mozbrowserlocationchange(change.top_level_browsing_context_id); - } - - self.update_frame_tree_if_active(change.top_level_browsing_context_id); - } - - fn handle_activate_document_msg(&mut self, pipeline_id: PipelineId) { - debug!("Document ready to activate {}", pipeline_id); - - // Notify the parent (if there is one). - if let Some(pipeline) = self.pipelines.get(&pipeline_id) { - if let Some((parent_pipeline_id, _)) = pipeline.parent_info { - if let Some(parent_pipeline) = self.pipelines.get(&parent_pipeline_id) { - let msg = ConstellationControlMsg::UpdatePipelineId(parent_pipeline_id, - pipeline.browsing_context_id, pipeline_id, UpdatePipelineIdReason::Navigation); - let _ = parent_pipeline.event_loop.send(msg); - } - } - } - - // Find the pending change whose new pipeline id is pipeline_id. - let pending_index = self.pending_changes.iter().rposition(|change| { - change.new_pipeline_id == pipeline_id - }); - - // If it is found, remove it from the pending changes, and make it - // the active document of its frame. - if let Some(pending_index) = pending_index { - let change = self.pending_changes.swap_remove(pending_index); - self.change_session_history(change); - } - } - - /// Called when the window is resized. - fn handle_window_size_msg(&mut self, - top_level_browsing_context_id: TopLevelBrowsingContextId, - new_size: WindowSizeData, - size_type: WindowSizeType) - { - debug!("handle_window_size_msg: {:?}", new_size.initial_viewport.to_untyped()); - - let browsing_context_id = BrowsingContextId::from(top_level_browsing_context_id); - self.resize_browsing_context(new_size, size_type, browsing_context_id); - - if let Some(resize_channel) = self.webdriver.resize_channel.take() { - let _ = resize_channel.send(new_size); - } - - self.window_size = new_size; - } - - /// Handle updating actual viewport / zoom due to @viewport rules - fn handle_viewport_constrained_msg(&mut self, - pipeline_id: PipelineId, - constraints: ViewportConstraints) { - self.compositor_proxy.send(ToCompositorMsg::ViewportConstrained(pipeline_id, constraints)); - } - - /// Checks the state of all script and layout pipelines to see if they are idle - /// and compares the current layout state to what the compositor has. This is used - /// to check if the output image is "stable" and can be written as a screenshot - /// for reftests. - /// Since this function is only used in reftests, we do not harden it against panic. - fn handle_is_ready_to_save_image(&mut self, pipeline_states: HashMap) -> ReadyToSave { - // Note that this function can panic, due to ipc-channel creation failure. - // avoiding this panic would require a mechanism for dealing - // with low-resource scenarios. - // - // If there is no focus browsing context yet, the initial page has - // not loaded, so there is nothing to save yet. - let top_level_browsing_context_id = self.focus_pipeline_id - .and_then(|pipeline_id| self.pipelines.get(&pipeline_id)) - .map(|pipeline| pipeline.top_level_browsing_context_id); - let top_level_browsing_context_id = match top_level_browsing_context_id { - Some(id) => id, - None => return ReadyToSave::NoTopLevelBrowsingContext, - }; - - // If there are pending loads, wait for those to complete. - if !self.pending_changes.is_empty() { - return ReadyToSave::PendingChanges; - } - - let (state_sender, state_receiver) = ipc::channel().expect("Failed to create IPC channel!"); - let (epoch_sender, epoch_receiver) = ipc::channel().expect("Failed to create IPC channel!"); - - // Step through the fully active browsing contexts, checking that the script - // thread is idle, and that the current epoch of the layout thread - // matches what the compositor has painted. If all these conditions - // are met, then the output image should not change and a reftest - // screenshot can safely be written. - for browsing_context in self.fully_active_browsing_contexts_iter(top_level_browsing_context_id) { - let pipeline_id = browsing_context.pipeline_id; - debug!("Checking readiness of browsing context {}, pipeline {}.", browsing_context.id, pipeline_id); - - let pipeline = match self.pipelines.get(&pipeline_id) { - None => { - warn!("Pipeline {} screenshot while closing.", pipeline_id); - continue; - }, - Some(pipeline) => pipeline, - }; - - // Check to see if there are any webfonts still loading. - // - // If GetWebFontLoadState returns false, either there are no - // webfonts loading, or there's a WebFontLoaded message waiting in - // script_chan's message queue. Therefore, we need to check this - // before we check whether the document is ready; otherwise, - // there's a race condition where a webfont has finished loading, - // but hasn't yet notified the document. - let msg = LayoutControlMsg::GetWebFontLoadState(state_sender.clone()); - if let Err(e) = pipeline.layout_chan.send(msg) { - warn!("Get web font failed ({})", e); - } - if state_receiver.recv().unwrap_or(true) { - return ReadyToSave::WebFontNotLoaded; - } - - // See if this pipeline has reached idle script state yet. - match self.document_states.get(&browsing_context.pipeline_id) { - Some(&DocumentState::Idle) => {} - Some(&DocumentState::Pending) | None => { - return ReadyToSave::DocumentLoading; - } - } - - // Check the visible rectangle for this pipeline. If the constellation has received a - // size for the pipeline, then its painting should be up to date. If the constellation - // *hasn't* received a size, it could be that the layer was hidden by script before the - // compositor discovered it, so we just don't check the layer. - if let Some(size) = browsing_context.size { - // If the rectangle for this pipeline is zero sized, it will - // never be painted. In this case, don't query the layout - // thread as it won't contribute to the final output image. - if size == TypedSize2D::zero() { - continue; - } - - // Get the epoch that the compositor has drawn for this pipeline. - let compositor_epoch = pipeline_states.get(&browsing_context.pipeline_id); - match compositor_epoch { - Some(compositor_epoch) => { - // Synchronously query the layout thread to see if the current - // epoch matches what the compositor has drawn. If they match - // (and script is idle) then this pipeline won't change again - // and can be considered stable. - let message = LayoutControlMsg::GetCurrentEpoch(epoch_sender.clone()); - if let Err(e) = pipeline.layout_chan.send(message) { - warn!("Failed to send GetCurrentEpoch ({}).", e); - } - match epoch_receiver.recv() { - Err(e) => warn!("Failed to receive current epoch ({}).", e), - Ok(layout_thread_epoch) => if layout_thread_epoch != *compositor_epoch { - return ReadyToSave::EpochMismatch; - }, - } - } - None => { - // The compositor doesn't know about this pipeline yet. - // Assume it hasn't rendered yet. - return ReadyToSave::PipelineUnknown; - } - } - } - } - - // All script threads are idle and layout epochs match compositor, so output image! - ReadyToSave::Ready - } - - /// Get the current activity of a pipeline. - fn get_activity(&self, pipeline_id: PipelineId) -> DocumentActivity { - let mut ancestor_id = pipeline_id; - loop { - if let Some(ancestor) = self.pipelines.get(&ancestor_id) { - if let Some(browsing_context) = self.browsing_contexts.get(&ancestor.browsing_context_id) { - if browsing_context.pipeline_id == ancestor_id { - if let Some((parent_id, FrameType::IFrame)) = ancestor.parent_info { - ancestor_id = parent_id; - continue; - } else { - return DocumentActivity::FullyActive; - } - } - } - } - if pipeline_id == ancestor_id { - return DocumentActivity::Inactive; - } else { - return DocumentActivity::Active; - } - } - } - - /// Set the current activity of a pipeline. - fn set_activity(&self, pipeline_id: PipelineId, activity: DocumentActivity) { - debug!("Setting activity of {} to be {:?}.", pipeline_id, activity); - if let Some(pipeline) = self.pipelines.get(&pipeline_id) { - pipeline.set_activity(activity); - let child_activity = if activity == DocumentActivity::Inactive { - DocumentActivity::Active - } else { - activity - }; - for child_id in &pipeline.children { - if let Some(child) = self.browsing_contexts.get(child_id) { - self.set_activity(child.pipeline_id, child_activity); - } - } - } - } - - /// Update the current activity of a pipeline. - fn update_activity(&self, pipeline_id: PipelineId) { - self.set_activity(pipeline_id, self.get_activity(pipeline_id)); - } - - /// Handle updating the size of a browsing context. - /// This notifies every pipeline in the context of the new size. - fn resize_browsing_context(&mut self, - new_size: WindowSizeData, - size_type: WindowSizeType, - browsing_context_id: BrowsingContextId) - { - if let Some(browsing_context) = self.browsing_contexts.get_mut(&browsing_context_id) { - browsing_context.size = Some(new_size.initial_viewport); - } - - if let Some(browsing_context) = self.browsing_contexts.get(&browsing_context_id) { - // Send Resize (or ResizeInactive) messages to each - // pipeline in the frame tree. - let pipeline_id = browsing_context.pipeline_id; - let pipeline = match self.pipelines.get(&pipeline_id) { - None => return warn!("Pipeline {:?} resized after closing.", pipeline_id), - Some(pipeline) => pipeline, - }; - let _ = pipeline.event_loop.send(ConstellationControlMsg::Resize( - pipeline.id, - new_size, - size_type - )); - let pipelines = browsing_context.prev.iter().chain(browsing_context.next.iter()) - .filter_map(|entry| entry.pipeline_id) - .filter_map(|pipeline_id| self.pipelines.get(&pipeline_id)); - for pipeline in pipelines { - let _ = pipeline.event_loop.send(ConstellationControlMsg::ResizeInactive( - pipeline.id, - new_size - )); - } - } - - // Send resize message to any pending pipelines that aren't loaded yet. - for change in &self.pending_changes { - let pipeline_id = change.new_pipeline_id; - let pipeline = match self.pipelines.get(&pipeline_id) { - None => { warn!("Pending pipeline {:?} is closed", pipeline_id); continue; } - Some(pipeline) => pipeline, - }; - if pipeline.browsing_context_id == browsing_context_id { - let _ = pipeline.event_loop.send(ConstellationControlMsg::Resize( - pipeline.id, - new_size, - size_type - )); - } - } - } - - fn clear_joint_session_future(&mut self, top_level_browsing_context_id: TopLevelBrowsingContextId) { - let browsing_context_ids: Vec = - self.all_browsing_contexts_iter(top_level_browsing_context_id) - .map(|browsing_context| browsing_context.id) - .collect(); - for browsing_context_id in browsing_context_ids { - let evicted = match self.browsing_contexts.get_mut(&browsing_context_id) { - Some(browsing_context) => browsing_context.remove_forward_entries(), - None => continue, - }; - for entry in evicted { - if let Some(pipeline_id) = entry.pipeline_id { - self.close_pipeline(pipeline_id, DiscardBrowsingContext::No, ExitPipelineMode::Normal); - } - } - } - } - - // Close a browsing context (and all children) - fn close_browsing_context(&mut self, browsing_context_id: BrowsingContextId, exit_mode: ExitPipelineMode) { - debug!("Closing browsing context {}.", browsing_context_id); - - self.close_browsing_context_children(browsing_context_id, DiscardBrowsingContext::Yes, exit_mode); - - let browsing_context = match self.browsing_contexts.remove(&browsing_context_id) { - None => return warn!("Closing browsing context {:?} twice.", browsing_context_id), - Some(browsing_context) => browsing_context, - }; - - if BrowsingContextId::from(browsing_context.top_level_id) == browsing_context_id { - self.event_loops.remove(&browsing_context.top_level_id); - } - - let parent_info = self.pipelines.get(&browsing_context.pipeline_id) - .and_then(|pipeline| pipeline.parent_info); - - if let Some((parent_pipeline_id, _)) = parent_info { - match self.pipelines.get_mut(&parent_pipeline_id) { - None => return warn!("Pipeline {:?} child closed after parent.", parent_pipeline_id), - Some(parent_pipeline) => parent_pipeline.remove_child(browsing_context_id), - }; - } - debug!("Closed browsing context {:?}.", browsing_context_id); - } - - // Close the children of a browsing context - fn close_browsing_context_children(&mut self, - browsing_context_id: BrowsingContextId, - dbc: DiscardBrowsingContext, - exit_mode: ExitPipelineMode) - { - debug!("Closing browsing context children {}.", browsing_context_id); - // Store information about the pipelines to be closed. Then close the - // pipelines, before removing ourself from the browsing_contexts hash map. This - // ordering is vital - so that if close_pipeline() ends up closing - // any child browsing contexts, they can be removed from the parent browsing context correctly. - let mut pipelines_to_close: Vec = self.pending_changes.iter() - .filter(|change| change.browsing_context_id == browsing_context_id) - .map(|change| change.new_pipeline_id) - .collect(); - - if let Some(browsing_context) = self.browsing_contexts.get(&browsing_context_id) { - pipelines_to_close.extend(browsing_context.next.iter().filter_map(|state| state.pipeline_id)); - pipelines_to_close.push(browsing_context.pipeline_id); - pipelines_to_close.extend(browsing_context.prev.iter().filter_map(|state| state.pipeline_id)); - } - - for pipeline_id in pipelines_to_close { - self.close_pipeline(pipeline_id, dbc, exit_mode); - } - - debug!("Closed browsing context children {}.", browsing_context_id); - } - - // Close all pipelines at and beneath a given browsing context - fn close_pipeline(&mut self, pipeline_id: PipelineId, dbc: DiscardBrowsingContext, exit_mode: ExitPipelineMode) { - debug!("Closing pipeline {:?}.", pipeline_id); - // Store information about the browsing contexts to be closed. Then close the - // browsing contexts, before removing ourself from the pipelines hash map. This - // ordering is vital - so that if close_browsing_context() ends up closing - // any child pipelines, they can be removed from the parent pipeline correctly. - let browsing_contexts_to_close = { - let mut browsing_contexts_to_close = vec!(); - - if let Some(pipeline) = self.pipelines.get(&pipeline_id) { - browsing_contexts_to_close.extend_from_slice(&pipeline.children); - } - - browsing_contexts_to_close - }; - - // Remove any child browsing contexts - for child_browsing_context in &browsing_contexts_to_close { - self.close_browsing_context(*child_browsing_context, exit_mode); - } - - // Note, we don't remove the pipeline now, we wait for the message to come back from - // the pipeline. - let pipeline = match self.pipelines.get(&pipeline_id) { - Some(pipeline) => pipeline, - None => return warn!("Closing pipeline {:?} twice.", pipeline_id), - }; - - // Remove this pipeline from pending changes if it hasn't loaded yet. - let pending_index = self.pending_changes.iter().position(|change| { - change.new_pipeline_id == pipeline_id - }); - if let Some(pending_index) = pending_index { - self.pending_changes.remove(pending_index); - } - - // Inform script, compositor that this pipeline has exited. - match exit_mode { - ExitPipelineMode::Normal => pipeline.exit(dbc), - ExitPipelineMode::Force => pipeline.force_exit(dbc), - } - debug!("Closed pipeline {:?}.", pipeline_id); - } - - // Randomly close a pipeline -if --random-pipeline-closure-probability is set - fn maybe_close_random_pipeline(&mut self) { - match self.random_pipeline_closure { - Some((ref mut rng, probability)) => if probability <= rng.gen::() { return }, - _ => return, - }; - // In order to get repeatability, we sort the pipeline ids. - let mut pipeline_ids: Vec<&PipelineId> = self.pipelines.keys().collect(); - pipeline_ids.sort(); - if let Some((ref mut rng, _)) = self.random_pipeline_closure { - if let Some(pipeline_id) = rng.choose(&*pipeline_ids) { - if let Some(pipeline) = self.pipelines.get(pipeline_id) { - // Don't kill the mozbrowser pipeline - if PREFS.is_mozbrowser_enabled() && pipeline.parent_info.is_none() { - info!("Not closing mozbrowser pipeline {}.", pipeline_id); - } else { - // Note that we deliberately do not do any of the tidying up - // associated with closing a pipeline. The constellation should cope! - warn!("Randomly closing pipeline {}.", pipeline_id); - pipeline.force_exit(DiscardBrowsingContext::No); - } - } - } - } - } - - // Convert a browsing context to a sendable form to pass to the compositor - fn browsing_context_to_sendable(&self, browsing_context_id: BrowsingContextId) -> Option { - self.browsing_contexts.get(&browsing_context_id).and_then(|browsing_context| { - self.pipelines.get(&browsing_context.pipeline_id).map(|pipeline| { - let mut frame_tree = SendableFrameTree { - pipeline: pipeline.to_sendable(), - size: browsing_context.size, - children: vec!(), - }; - - for child_browsing_context_id in &pipeline.children { - if let Some(child) = self.browsing_context_to_sendable(*child_browsing_context_id) { - frame_tree.children.push(child); - } - } - - frame_tree - }) - }) - } - - /// Re-send the frame tree to the compositor. - fn update_frame_tree_if_active(&mut self, mut top_level_browsing_context_id: TopLevelBrowsingContextId) { - // This might be a mozbrowser iframe, so we need to climb the parent hierarchy, - // even though it's a top-level browsing context. - // FIXME(paul): to remove once mozbrowser API is removed. - let browsing_context_id = BrowsingContextId::from(top_level_browsing_context_id); - let mut pipeline_id = match self.browsing_contexts.get(&browsing_context_id) { - Some(browsing_context) => browsing_context.pipeline_id, - None => return warn!("Sending frame tree for discarded browsing context {}.", browsing_context_id), - }; - - while let Some(pipeline) = self.pipelines.get(&pipeline_id) { - match pipeline.parent_info { - Some((parent_id, _)) => pipeline_id = parent_id, - None => { - top_level_browsing_context_id = pipeline.top_level_browsing_context_id; - break; - }, - } - } - - // Only send the frame tree if it's the active one or if no frame tree - // has been sent yet. - if self.active_browser_id.is_none() || Some(top_level_browsing_context_id) == self.active_browser_id { - self.send_frame_tree(top_level_browsing_context_id); - } - - } - - /// Send the current frame tree to compositor - fn send_frame_tree(&mut self, top_level_browsing_context_id: TopLevelBrowsingContextId) { - self.active_browser_id = Some(top_level_browsing_context_id); - let browsing_context_id = BrowsingContextId::from(top_level_browsing_context_id); - - // Note that this function can panic, due to ipc-channel creation failure. - // avoiding this panic would require a mechanism for dealing - // with low-resource scenarios. - debug!("Sending frame tree for browsing context {}.", browsing_context_id); - if let Some(frame_tree) = self.browsing_context_to_sendable(browsing_context_id) { - self.compositor_proxy.send(ToCompositorMsg::SetFrameTree(frame_tree)); - } - } - - // https://developer.mozilla.org/en-US/docs/Web/Events/mozbrowserlocationchange - // Note that this is a no-op if the pipeline is not a mozbrowser iframe - fn trigger_mozbrowserlocationchange(&self, - top_level_browsing_context_id: TopLevelBrowsingContextId) - { - let browsing_context_id = BrowsingContextId::from(top_level_browsing_context_id); - let pipeline_id = match self.browsing_contexts.get(&browsing_context_id) { - Some(browsing_context) => browsing_context.pipeline_id, - None => return warn!("mozbrowser location change on closed browsing context {}.", browsing_context_id), - }; - let (url, parent_info) = match self.pipelines.get(&pipeline_id) { - Some(pipeline) => (pipeline.url.clone(), pipeline.parent_info), - None => return warn!("mozbrowser location change on closed pipeline {}.", pipeline_id), - }; - let parent_id = match parent_info { - Some((parent_id, FrameType::MozBrowserIFrame)) => parent_id, - _ => return debug!("mozbrowser location change on a regular iframe {}", browsing_context_id), - }; - let can_go_forward = !self.joint_session_future_is_empty(top_level_browsing_context_id); - let can_go_back = !self.joint_session_past_is_empty(top_level_browsing_context_id); - let event = MozBrowserEvent::LocationChange(url.to_string(), can_go_back, can_go_forward); - match self.pipelines.get(&parent_id) { - Some(parent) => parent.trigger_mozbrowser_event(Some(top_level_browsing_context_id), event), - None => return warn!("mozbrowser location change on closed parent {}", parent_id), - }; - } - - // https://developer.mozilla.org/en-US/docs/Web/Events/mozbrowsererror - fn trigger_mozbrowsererror(&mut self, - top_level_browsing_context_id: TopLevelBrowsingContextId, - reason: String, - backtrace: Option) - { - if !PREFS.is_mozbrowser_enabled() { return; } - - let mut report = String::new(); - for (thread_name, warning) in self.handled_warnings.drain(..) { - report.push_str("\nWARNING: "); - if let Some(thread_name) = thread_name { - report.push_str("<"); - report.push_str(&*thread_name); - report.push_str(">: "); - } - report.push_str(&*warning); - } - report.push_str("\nERROR: "); - report.push_str(&*reason); - if let Some(backtrace) = backtrace { - report.push_str("\n\n"); - report.push_str(&*backtrace); - } - - let event = MozBrowserEvent::Error(MozBrowserErrorType::Fatal, reason, report); - let browsing_context_id = BrowsingContextId::from(top_level_browsing_context_id); - let pipeline_id = match self.browsing_contexts.get(&browsing_context_id) { - Some(browsing_context) => browsing_context.pipeline_id, - None => return warn!("Mozbrowser error after top-level browsing context closed."), - }; - let parent_id = match self.pipelines.get(&pipeline_id) { - Some(pipeline) => match pipeline.parent_info { - Some((parent_id, FrameType::MozBrowserIFrame)) => parent_id, - _ => return pipeline.trigger_mozbrowser_event(None, event), - }, - None => return warn!("Mozbrowser error on a closed pipeline {}", pipeline_id), - }; - match self.pipelines.get(&parent_id) { - None => warn!("Mozbrowser error after parent pipeline {} closed.", parent_id), - Some(parent) => parent.trigger_mozbrowser_event(Some(top_level_browsing_context_id), event), - }; - } - - fn focused_pipeline_is_descendant_of(&self, browsing_context_id: BrowsingContextId) -> bool { - self.focus_pipeline_id.map_or(false, |pipeline_id| { - self.fully_active_descendant_browsing_contexts_iter(browsing_context_id) - .any(|browsing_context| browsing_context.pipeline_id == pipeline_id) - }) - } -} diff --git a/collector/compile-benchmarks/style-servo/components/constellation/event_loop.rs b/collector/compile-benchmarks/style-servo/components/constellation/event_loop.rs deleted file mode 100644 index c20a3e2da..000000000 --- a/collector/compile-benchmarks/style-servo/components/constellation/event_loop.rs +++ /dev/null @@ -1,46 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ - -//! This module contains the `EventLoop` type, which is the constellation's -//! view of a script thread. When an `EventLoop` is dropped, an `ExitScriptThread` -//! message is sent to the script thread, asking it to shut down. - -use ipc_channel::Error; -use ipc_channel::ipc::IpcSender; -use script_traits::ConstellationControlMsg; -use std::marker::PhantomData; -use std::rc::Rc; - -/// https://html.spec.whatwg.org/multipage/#event-loop -pub struct EventLoop { - script_chan: IpcSender, - dont_send_or_sync: PhantomData>, -} - -impl Drop for EventLoop { - fn drop(&mut self) { - let _ = self.script_chan.send(ConstellationControlMsg::ExitScriptThread); - } -} - -impl EventLoop { - /// Create a new event loop from the channel to its script thread. - pub fn new(script_chan: IpcSender) -> Rc { - Rc::new(EventLoop { - script_chan: script_chan, - dont_send_or_sync: PhantomData, - }) - } - - /// Send a message to the event loop. - pub fn send(&self, msg: ConstellationControlMsg) -> Result<(), Error> { - self.script_chan.send(msg) - } - - /// The underlying channel to the script thread. - pub fn sender(&self) -> IpcSender { - self.script_chan.clone() - } -} - diff --git a/collector/compile-benchmarks/style-servo/components/constellation/lib.rs b/collector/compile-benchmarks/style-servo/components/constellation/lib.rs deleted file mode 100644 index 48365cdef..000000000 --- a/collector/compile-benchmarks/style-servo/components/constellation/lib.rs +++ /dev/null @@ -1,56 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ - -#![deny(unsafe_code)] -#![feature(box_syntax)] -#![feature(conservative_impl_trait)] -#![feature(mpsc_select)] - -extern crate backtrace; -extern crate bluetooth_traits; -extern crate canvas; -extern crate canvas_traits; -extern crate clipboard; -extern crate compositing; -extern crate debugger; -extern crate devtools_traits; -extern crate euclid; -#[cfg(all(not(target_os = "windows"), not(target_os = "ios")))] -extern crate gaol; -extern crate gfx; -extern crate gfx_traits; -extern crate hyper; -extern crate ipc_channel; -extern crate itertools; -extern crate layout_traits; -#[macro_use] -extern crate log; -extern crate metrics; -extern crate msg; -extern crate net; -extern crate net_traits; -extern crate profile_traits; -extern crate script_traits; -#[macro_use] extern crate serde; -extern crate servo_config; -extern crate servo_rand; -extern crate servo_remutex; -extern crate servo_url; -extern crate style_traits; -extern crate webrender_api; -extern crate webvr_traits; - -mod browsingcontext; -mod constellation; -mod event_loop; -mod network_listener; -mod pipeline; -#[cfg(all(not(target_os = "windows"), not(target_os = "ios")))] -mod sandboxing; -mod timer_scheduler; - -pub use constellation::{Constellation, FromCompositorLogger, FromScriptLogger, InitialConstellationState}; -pub use pipeline::UnprivilegedPipelineContent; -#[cfg(all(not(target_os = "windows"), not(target_os = "ios")))] -pub use sandboxing::content_process_sandbox_profile; diff --git a/collector/compile-benchmarks/style-servo/components/constellation/network_listener.rs b/collector/compile-benchmarks/style-servo/components/constellation/network_listener.rs deleted file mode 100644 index 038d304fa..000000000 --- a/collector/compile-benchmarks/style-servo/components/constellation/network_listener.rs +++ /dev/null @@ -1,134 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ - -//! The listener that encapsulates all state for an in-progress document request. -//! Any redirects that are encountered are followed. Whenever a non-redirect -//! response is received, it is forwarded to the appropriate script thread. - -use hyper::header::Location; -use ipc_channel::ipc; -use ipc_channel::router::ROUTER; -use msg::constellation_msg::PipelineId; -use net::http_loader::{set_default_accept, set_default_accept_language}; -use net_traits::{CoreResourceMsg, FetchMetadata, FetchResponseMsg}; -use net_traits::{IpcSend, NetworkError, ResourceThreads}; -use net_traits::request::{Destination, RequestInit, Type}; -use net_traits::response::ResponseInit; -use std::sync::mpsc::Sender; - -pub struct NetworkListener { - res_init: Option, - req_init: RequestInit, - pipeline_id: PipelineId, - resource_threads: ResourceThreads, - sender: Sender<(PipelineId, FetchResponseMsg)>, - should_send: bool, -} - -impl NetworkListener { - pub fn new(req_init: RequestInit, - pipeline_id: PipelineId, - resource_threads: ResourceThreads, - sender: Sender<(PipelineId, FetchResponseMsg)>) -> NetworkListener { - NetworkListener { - res_init: None, - req_init, - pipeline_id, - resource_threads, - sender, - should_send: false - } - } - - pub fn initiate_fetch(&self) { - let (ipc_sender, ipc_receiver) = ipc::channel().expect("Failed to create IPC channel!"); - - let mut listener = NetworkListener { - res_init: self.res_init.clone(), - req_init: self.req_init.clone(), - resource_threads: self.resource_threads.clone(), - sender: self.sender.clone(), - pipeline_id: self.pipeline_id.clone(), - should_send: false, - }; - - let msg = match self.res_init { - Some(ref res_init_) => CoreResourceMsg::FetchRedirect( - self.req_init.clone(), - res_init_.clone(), - ipc_sender), - None => { - set_default_accept(Type::None, Destination::Document, &mut listener.req_init.headers); - set_default_accept_language(&mut listener.req_init.headers); - - CoreResourceMsg::Fetch( - listener.req_init.clone(), - ipc_sender) - } - }; - - ROUTER.add_route(ipc_receiver.to_opaque(), box move |message| { - let msg = message.to(); - match msg { - Ok(FetchResponseMsg::ProcessResponse(res)) => listener.check_redirect(res), - Ok(msg_) => listener.send(msg_), - Err(e) => warn!("Error while receiving network listener message: {}", e), - }; - }); - - if let Err(e) = self.resource_threads.sender().send(msg) { - warn!("Resource thread unavailable ({})", e); - } - } - - fn check_redirect(&mut self, - message: Result<(FetchMetadata), NetworkError>) { - match message { - Ok(res_metadata) => { - let metadata = match res_metadata { - FetchMetadata::Filtered { ref unsafe_, .. } => unsafe_, - FetchMetadata::Unfiltered(ref m) => m, - }; - - match metadata.headers { - Some(ref headers) if headers.has::() => { - if self.req_init.url_list.is_empty() { - self.req_init.url_list.push(self.req_init.url.clone()); - } - self.req_init.url_list.push(metadata.final_url.clone()); - - self.req_init.referrer_url = metadata.referrer.clone(); - self.req_init.referrer_policy = metadata.referrer_policy; - - self.res_init = Some(ResponseInit { - url: metadata.final_url.clone(), - location_url: metadata.location_url.clone(), - headers: headers.clone().into_inner(), - referrer: metadata.referrer.clone(), - }); - - self.initiate_fetch(); - }, - _ => { - // Response should be processed by script thread. - self.should_send = true; - self.send(FetchResponseMsg::ProcessResponse(Ok(res_metadata.clone()))); - } - }; - }, - Err(e) => { - self.should_send = true; - self.send(FetchResponseMsg::ProcessResponse(Err(e))) - } - }; - } - - fn send(&mut self, msg: FetchResponseMsg) { - if self.should_send { - if let Err(e) = self.sender.send((self.pipeline_id, msg)) { - warn!("Failed to forward network message to pipeline {}: {:?}", self.pipeline_id, e); - } - } - } -} diff --git a/collector/compile-benchmarks/style-servo/components/constellation/pipeline.rs b/collector/compile-benchmarks/style-servo/components/constellation/pipeline.rs deleted file mode 100644 index 805df28c3..000000000 --- a/collector/compile-benchmarks/style-servo/components/constellation/pipeline.rs +++ /dev/null @@ -1,650 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ - -use bluetooth_traits::BluetoothRequest; -use canvas_traits::webgl::WebGLPipeline; -use compositing::CompositionPipeline; -use compositing::CompositorProxy; -use compositing::compositor_thread::Msg as CompositorMsg; -use devtools_traits::{DevtoolsControlMsg, ScriptToDevtoolsControlMsg}; -use euclid::{TypedSize2D, ScaleFactor}; -use event_loop::EventLoop; -use gfx::font_cache_thread::FontCacheThread; -use ipc_channel::Error; -use ipc_channel::ipc::{self, IpcReceiver, IpcSender}; -use ipc_channel::router::ROUTER; -use layout_traits::LayoutThreadFactory; -use metrics::PaintTimeMetrics; -use msg::constellation_msg::{BrowsingContextId, TopLevelBrowsingContextId, FrameType, PipelineId, PipelineNamespaceId}; -use net::image_cache::ImageCacheImpl; -use net_traits::{IpcSend, ResourceThreads}; -use net_traits::image_cache::ImageCache; -use profile_traits::mem as profile_mem; -use profile_traits::time; -use script_traits::{ConstellationControlMsg, DiscardBrowsingContext, ScriptToConstellationChan}; -use script_traits::{DocumentActivity, InitialScriptState}; -use script_traits::{LayoutControlMsg, LayoutMsg, LoadData, MozBrowserEvent}; -use script_traits::{NewLayoutInfo, SWManagerMsg, SWManagerSenders}; -use script_traits::{ScriptThreadFactory, TimerSchedulerMsg, WindowSizeData}; -use servo_config::opts::{self, Opts}; -use servo_config::prefs::{PREFS, Pref}; -use servo_url::ServoUrl; -use std::collections::HashMap; -#[cfg(not(windows))] -use std::env; -use std::ffi::OsStr; -use std::process; -use std::rc::Rc; -use std::sync::Arc; -use std::sync::mpsc::Sender; -use style_traits::CSSPixel; -use style_traits::DevicePixel; -use webrender_api; -use webvr_traits::WebVRMsg; - -/// A `Pipeline` is the constellation's view of a `Document`. Each pipeline has an -/// event loop (executed by a script thread) and a layout thread. A script thread -/// may be responsible for many pipelines, but a layout thread is only responsible -/// for one. -pub struct Pipeline { - /// The ID of the pipeline. - pub id: PipelineId, - - /// The ID of the browsing context that contains this Pipeline. - pub browsing_context_id: BrowsingContextId, - - /// The ID of the top-level browsing context that contains this Pipeline. - pub top_level_browsing_context_id: TopLevelBrowsingContextId, - - /// The parent pipeline of this one. `None` if this is a root pipeline. - /// Note that because of mozbrowser iframes, even top-level pipelines - /// may have a parent (in which case the frame type will be - /// `MozbrowserIFrame`). - /// TODO: move this field to `BrowsingContext`. - pub parent_info: Option<(PipelineId, FrameType)>, - - /// The event loop handling this pipeline. - pub event_loop: Rc, - - /// A channel to layout, for performing reflows and shutdown. - pub layout_chan: IpcSender, - - /// A channel to the compositor. - pub compositor_proxy: CompositorProxy, - - /// The most recently loaded URL in this pipeline. - /// Note that this URL can change, for example if the page navigates - /// to a hash URL. - pub url: ServoUrl, - - /// Whether this pipeline is currently running animations. Pipelines that are running - /// animations cause composites to be continually scheduled. - pub running_animations: bool, - - /// The child browsing contexts of this pipeline (these are iframes in the document). - pub children: Vec, - - /// Whether this pipeline is in private browsing mode. - /// TODO: move this field to `BrowsingContext`. - pub is_private: bool, - - /// Whether this pipeline should be treated as visible for the purposes of scheduling and - /// resource management. - pub visible: bool, -} - -/// Initial setup data needed to construct a pipeline. -/// -/// *DO NOT* add any Senders to this unless you absolutely know what you're doing, or pcwalton will -/// have to rewrite your code. Use IPC senders instead. -pub struct InitialPipelineState { - /// The ID of the pipeline to create. - pub id: PipelineId, - - /// The ID of the browsing context that contains this Pipeline. - pub browsing_context_id: BrowsingContextId, - - /// The ID of the top-level browsing context that contains this Pipeline. - pub top_level_browsing_context_id: TopLevelBrowsingContextId, - - /// The ID of the parent pipeline and frame type, if any. - /// If `None`, this is the root. - pub parent_info: Option<(PipelineId, FrameType)>, - - /// A channel to the associated constellation. - pub script_to_constellation_chan: ScriptToConstellationChan, - - /// A channel for the layout thread to send messages to the constellation. - pub layout_to_constellation_chan: IpcSender, - - /// A channel to schedule timer events. - pub scheduler_chan: IpcSender, - - /// A channel to the compositor. - pub compositor_proxy: CompositorProxy, - - /// A channel to the developer tools, if applicable. - pub devtools_chan: Option>, - - /// A channel to the bluetooth thread. - pub bluetooth_thread: IpcSender, - - /// A channel to the service worker manager thread - pub swmanager_thread: IpcSender, - - /// A channel to the font cache thread. - pub font_cache_thread: FontCacheThread, - - /// Channels to the resource-related threads. - pub resource_threads: ResourceThreads, - - /// A channel to the time profiler thread. - pub time_profiler_chan: time::ProfilerChan, - - /// A channel to the memory profiler thread. - pub mem_profiler_chan: profile_mem::ProfilerChan, - - /// Information about the initial window size. - pub window_size: Option>, - - /// Information about the device pixel ratio. - pub device_pixel_ratio: ScaleFactor, - - /// The event loop to run in, if applicable. - pub event_loop: Option>, - - /// Information about the page to load. - pub load_data: LoadData, - - - /// The ID of the pipeline namespace for this script thread. - pub pipeline_namespace_id: PipelineNamespaceId, - - /// Pipeline visibility to be inherited - pub prev_visibility: Option, - - /// Webrender api. - pub webrender_api_sender: webrender_api::RenderApiSender, - - /// The ID of the document processed by this script thread. - pub webrender_document: webrender_api::DocumentId, - - /// Whether this pipeline is considered private. - pub is_private: bool, - - /// A channel to the webgl thread. - pub webgl_chan: WebGLPipeline, - - /// A channel to the webvr thread. - pub webvr_chan: Option>, -} - -impl Pipeline { - /// Starts a layout thread, and possibly a script thread, in - /// a new process if requested. - pub fn spawn(state: InitialPipelineState) -> Result - where LTF: LayoutThreadFactory, - STF: ScriptThreadFactory - { - // Note: we allow channel creation to panic, since recovering from this - // probably requires a general low-memory strategy. - let (pipeline_chan, pipeline_port) = ipc::channel() - .expect("Pipeline main chan"); - - let (layout_content_process_shutdown_chan, layout_content_process_shutdown_port) = - ipc::channel().expect("Pipeline layout content shutdown chan"); - - let device_pixel_ratio = state.device_pixel_ratio; - let window_size = state.window_size.map(|size| { - WindowSizeData { - initial_viewport: size, - device_pixel_ratio: device_pixel_ratio, - } - }); - - let url = state.load_data.url.clone(); - - let script_chan = match state.event_loop { - Some(script_chan) => { - let new_layout_info = NewLayoutInfo { - parent_info: state.parent_info, - new_pipeline_id: state.id, - browsing_context_id: state.browsing_context_id, - top_level_browsing_context_id: state.top_level_browsing_context_id, - load_data: state.load_data, - window_size: window_size, - pipeline_port: pipeline_port, - content_process_shutdown_chan: Some(layout_content_process_shutdown_chan.clone()), - layout_threads: PREFS.get("layout.threads").as_u64().expect("count") as usize, - }; - - if let Err(e) = script_chan.send(ConstellationControlMsg::AttachLayout(new_layout_info)) { - warn!("Sending to script during pipeline creation failed ({})", e); - } - script_chan - } - None => { - let (script_chan, script_port) = ipc::channel().expect("Pipeline script chan"); - - // Route messages coming from content to devtools as appropriate. - let script_to_devtools_chan = state.devtools_chan.as_ref().map(|devtools_chan| { - let (script_to_devtools_chan, script_to_devtools_port) = ipc::channel() - .expect("Pipeline script to devtools chan"); - let devtools_chan = (*devtools_chan).clone(); - ROUTER.add_route(script_to_devtools_port.to_opaque(), box move |message| { - match message.to::() { - Err(e) => error!("Cast to ScriptToDevtoolsControlMsg failed ({}).", e), - Ok(message) => if let Err(e) = devtools_chan.send(DevtoolsControlMsg::FromScript(message)) { - warn!("Sending to devtools failed ({})", e) - }, - } - }); - script_to_devtools_chan - }); - - let (script_content_process_shutdown_chan, script_content_process_shutdown_port) = - ipc::channel().expect("Pipeline script content process shutdown chan"); - - let unprivileged_pipeline_content = UnprivilegedPipelineContent { - id: state.id, - browsing_context_id: state.browsing_context_id, - top_level_browsing_context_id: state.top_level_browsing_context_id, - parent_info: state.parent_info, - script_to_constellation_chan: state.script_to_constellation_chan.clone(), - scheduler_chan: state.scheduler_chan, - devtools_chan: script_to_devtools_chan, - bluetooth_thread: state.bluetooth_thread, - swmanager_thread: state.swmanager_thread, - font_cache_thread: state.font_cache_thread, - resource_threads: state.resource_threads, - time_profiler_chan: state.time_profiler_chan, - mem_profiler_chan: state.mem_profiler_chan, - window_size: window_size, - layout_to_constellation_chan: state.layout_to_constellation_chan, - script_chan: script_chan.clone(), - load_data: state.load_data, - script_port: script_port, - opts: (*opts::get()).clone(), - prefs: PREFS.cloned(), - pipeline_port: pipeline_port, - pipeline_namespace_id: state.pipeline_namespace_id, - layout_content_process_shutdown_chan: layout_content_process_shutdown_chan, - layout_content_process_shutdown_port: layout_content_process_shutdown_port, - script_content_process_shutdown_chan: script_content_process_shutdown_chan, - script_content_process_shutdown_port: script_content_process_shutdown_port, - webrender_api_sender: state.webrender_api_sender, - webrender_document: state.webrender_document, - webgl_chan: state.webgl_chan, - webvr_chan: state.webvr_chan, - }; - - // Spawn the child process. - // - // Yes, that's all there is to it! - if opts::multiprocess() { - let _ = unprivileged_pipeline_content.spawn_multiprocess()?; - } else { - unprivileged_pipeline_content.start_all::(false); - } - - EventLoop::new(script_chan) - } - }; - - Ok(Pipeline::new(state.id, - state.browsing_context_id, - state.top_level_browsing_context_id, - state.parent_info, - script_chan, - pipeline_chan, - state.compositor_proxy, - state.is_private, - url, - state.prev_visibility.unwrap_or(true))) - } - - /// Creates a new `Pipeline`, after the script and layout threads have been - /// spawned. - pub fn new(id: PipelineId, - browsing_context_id: BrowsingContextId, - top_level_browsing_context_id: TopLevelBrowsingContextId, - parent_info: Option<(PipelineId, FrameType)>, - event_loop: Rc, - layout_chan: IpcSender, - compositor_proxy: CompositorProxy, - is_private: bool, - url: ServoUrl, - visible: bool) - -> Pipeline { - let pipeline = Pipeline { - id: id, - browsing_context_id: browsing_context_id, - top_level_browsing_context_id: top_level_browsing_context_id, - parent_info: parent_info, - event_loop: event_loop, - layout_chan: layout_chan, - compositor_proxy: compositor_proxy, - url: url, - children: vec!(), - running_animations: false, - visible: visible, - is_private: is_private, - }; - - pipeline.notify_visibility(); - - pipeline - } - - /// A normal exit of the pipeline, which waits for the compositor, - /// and delegates layout shutdown to the script thread. - pub fn exit(&self, discard_bc: DiscardBrowsingContext) { - debug!("pipeline {:?} exiting", self.id); - - // The compositor wants to know when pipelines shut down too. - // It may still have messages to process from these other threads - // before they can be safely shut down. - // It's OK for the constellation to block on the compositor, - // since the compositor never blocks on the constellation. - if let Ok((sender, receiver)) = ipc::channel() { - self.compositor_proxy.send(CompositorMsg::PipelineExited(self.id, sender)); - if let Err(e) = receiver.recv() { - warn!("Sending exit message failed ({}).", e); - } - } - - // Script thread handles shutting down layout, and layout handles shutting down the painter. - // For now, if the script thread has failed, we give up on clean shutdown. - let msg = ConstellationControlMsg::ExitPipeline(self.id, discard_bc); - if let Err(e) = self.event_loop.send(msg) { - warn!("Sending script exit message failed ({}).", e); - } - } - - /// A forced exit of the shutdown, which does not wait for the compositor, - /// or for the script thread to shut down layout. - pub fn force_exit(&self, discard_bc: DiscardBrowsingContext) { - let msg = ConstellationControlMsg::ExitPipeline(self.id, discard_bc); - if let Err(e) = self.event_loop.send(msg) { - warn!("Sending script exit message failed ({}).", e); - } - if let Err(e) = self.layout_chan.send(LayoutControlMsg::ExitNow) { - warn!("Sending layout exit message failed ({}).", e); - } - } - - /// Notify this pipeline of its activity. - pub fn set_activity(&self, activity: DocumentActivity) { - let msg = ConstellationControlMsg::SetDocumentActivity(self.id, activity); - if let Err(e) = self.event_loop.send(msg) { - warn!("Sending activity message failed ({}).", e); - } - } - - /// The compositor's view of a pipeline. - pub fn to_sendable(&self) -> CompositionPipeline { - CompositionPipeline { - id: self.id.clone(), - top_level_browsing_context_id: self.top_level_browsing_context_id.clone(), - script_chan: self.event_loop.sender(), - layout_chan: self.layout_chan.clone(), - } - } - - /// Add a new child browsing context. - pub fn add_child(&mut self, browsing_context_id: BrowsingContextId) { - self.children.push(browsing_context_id); - } - - /// Remove a child browsing context. - pub fn remove_child(&mut self, browsing_context_id: BrowsingContextId) { - match self.children.iter().position(|id| *id == browsing_context_id) { - None => return warn!("Pipeline remove child already removed ({:?}).", browsing_context_id), - Some(index) => self.children.remove(index), - }; - } - - /// Send a mozbrowser event to the script thread for this pipeline. - /// This will cause an event to be fired on an iframe in the document, - /// or on the `Window` if no frame is given. - pub fn trigger_mozbrowser_event(&self, - child_id: Option, - event: MozBrowserEvent) { - assert!(PREFS.is_mozbrowser_enabled()); - - let event = ConstellationControlMsg::MozBrowserEvent(self.id, - child_id, - event); - if let Err(e) = self.event_loop.send(event) { - warn!("Sending mozbrowser event to script failed ({}).", e); - } - } - - /// Notify the script thread that this pipeline is visible. - fn notify_visibility(&self) { - let script_msg = ConstellationControlMsg::ChangeFrameVisibilityStatus(self.id, self.visible); - let compositor_msg = CompositorMsg::PipelineVisibilityChanged(self.id, self.visible); - let err = self.event_loop.send(script_msg); - if let Err(e) = err { - warn!("Sending visibility change failed ({}).", e); - } - self.compositor_proxy.send(compositor_msg); - } - - /// Change the visibility of this pipeline. - pub fn change_visibility(&mut self, visible: bool) { - if visible == self.visible { - return; - } - self.visible = visible; - self.notify_visibility(); - } - -} - -/// Creating a new pipeline may require creating a new event loop. -/// This is the data used to initialize the event loop. -/// TODO: simplify this, and unify it with `InitialPipelineState` if possible. -#[derive(Deserialize, Serialize)] -pub struct UnprivilegedPipelineContent { - id: PipelineId, - top_level_browsing_context_id: TopLevelBrowsingContextId, - browsing_context_id: BrowsingContextId, - parent_info: Option<(PipelineId, FrameType)>, - script_to_constellation_chan: ScriptToConstellationChan, - layout_to_constellation_chan: IpcSender, - scheduler_chan: IpcSender, - devtools_chan: Option>, - bluetooth_thread: IpcSender, - swmanager_thread: IpcSender, - font_cache_thread: FontCacheThread, - resource_threads: ResourceThreads, - time_profiler_chan: time::ProfilerChan, - mem_profiler_chan: profile_mem::ProfilerChan, - window_size: Option, - script_chan: IpcSender, - load_data: LoadData, - script_port: IpcReceiver, - opts: Opts, - prefs: HashMap, - pipeline_port: IpcReceiver, - pipeline_namespace_id: PipelineNamespaceId, - layout_content_process_shutdown_chan: IpcSender<()>, - layout_content_process_shutdown_port: IpcReceiver<()>, - script_content_process_shutdown_chan: IpcSender<()>, - script_content_process_shutdown_port: IpcReceiver<()>, - webrender_api_sender: webrender_api::RenderApiSender, - webrender_document: webrender_api::DocumentId, - webgl_chan: WebGLPipeline, - webvr_chan: Option>, -} - -impl UnprivilegedPipelineContent { - pub fn start_all(self, wait_for_completion: bool) - where LTF: LayoutThreadFactory, - STF: ScriptThreadFactory - { - let image_cache = Arc::new(ImageCacheImpl::new(self.webrender_api_sender.create_api())); - let paint_time_metrics = PaintTimeMetrics::new(self.id, - self.time_profiler_chan.clone(), - self.layout_to_constellation_chan.clone(), - self.script_chan.clone()); - let layout_pair = STF::create(InitialScriptState { - id: self.id, - browsing_context_id: self.browsing_context_id, - top_level_browsing_context_id: self.top_level_browsing_context_id, - parent_info: self.parent_info, - control_chan: self.script_chan.clone(), - control_port: self.script_port, - script_to_constellation_chan: self.script_to_constellation_chan.clone(), - layout_to_constellation_chan: self.layout_to_constellation_chan.clone(), - scheduler_chan: self.scheduler_chan, - bluetooth_thread: self.bluetooth_thread, - resource_threads: self.resource_threads, - image_cache: image_cache.clone(), - time_profiler_chan: self.time_profiler_chan.clone(), - mem_profiler_chan: self.mem_profiler_chan.clone(), - devtools_chan: self.devtools_chan, - window_size: self.window_size, - pipeline_namespace_id: self.pipeline_namespace_id, - content_process_shutdown_chan: self.script_content_process_shutdown_chan, - webgl_chan: self.webgl_chan, - webvr_chan: self.webvr_chan, - }, self.load_data.clone()); - - LTF::create(self.id, - self.top_level_browsing_context_id, - self.load_data.url, - self.parent_info.is_some(), - layout_pair, - self.pipeline_port, - self.layout_to_constellation_chan, - self.script_chan, - image_cache.clone(), - self.font_cache_thread, - self.time_profiler_chan, - self.mem_profiler_chan, - Some(self.layout_content_process_shutdown_chan), - self.webrender_api_sender, - self.webrender_document, - self.prefs.get("layout.threads").expect("exists").value() - .as_u64().expect("count") as usize, - paint_time_metrics); - - if wait_for_completion { - let _ = self.script_content_process_shutdown_port.recv(); - let _ = self.layout_content_process_shutdown_port.recv(); - } - } - - #[cfg(all(not(target_os = "windows"), not(target_os = "ios")))] - pub fn spawn_multiprocess(self) -> Result<(), Error> { - use gaol::sandbox::{self, Sandbox, SandboxMethods}; - use ipc_channel::ipc::IpcOneShotServer; - use sandboxing::content_process_sandbox_profile; - - impl CommandMethods for sandbox::Command { - fn arg(&mut self, arg: T) - where T: AsRef { - self.arg(arg); - } - - fn env(&mut self, key: T, val: U) - where T: AsRef, U: AsRef { - self.env(key, val); - } - } - - // Note that this function can panic, due to process creation, - // avoiding this panic would require a mechanism for dealing - // with low-resource scenarios. - let (server, token) = - IpcOneShotServer::>::new() - .expect("Failed to create IPC one-shot server."); - - // If there is a sandbox, use the `gaol` API to create the child process. - if opts::get().sandbox { - let mut command = sandbox::Command::me().expect("Failed to get current sandbox."); - self.setup_common(&mut command, token); - - let profile = content_process_sandbox_profile(); - let _ = Sandbox::new(profile) - .start(&mut command) - .expect("Failed to start sandboxed child process!"); - } else { - let path_to_self = env::current_exe() - .expect("Failed to get current executor."); - let mut child_process = process::Command::new(path_to_self); - self.setup_common(&mut child_process, token); - let _ = child_process.spawn().expect("Failed to start unsandboxed child process!"); - } - - let (_receiver, sender) = server.accept().expect("Server failed to accept."); - sender.send(self)?; - - Ok(()) - } - - #[cfg(any(target_os = "windows", target_os = "ios"))] - pub fn spawn_multiprocess(self) -> Result<(), Error> { - error!("Multiprocess is not supported on Windows or iOS."); - process::exit(1); - } - - #[cfg(not(windows))] - fn setup_common(&self, command: &mut C, token: String) { - C::arg(command, "--content-process"); - C::arg(command, token); - - if let Ok(value) = env::var("RUST_BACKTRACE") { - C::env(command, "RUST_BACKTRACE", value); - } - - if let Ok(value) = env::var("RUST_LOG") { - C::env(command, "RUST_LOG", value); - } - } - - pub fn script_to_constellation_chan(&self) -> &ScriptToConstellationChan { - &self.script_to_constellation_chan - } - - pub fn opts(&self) -> Opts { - self.opts.clone() - } - - pub fn prefs(&self) -> HashMap { - self.prefs.clone() - } - - pub fn swmanager_senders(&self) -> SWManagerSenders { - SWManagerSenders { - swmanager_sender: self.swmanager_thread.clone(), - resource_sender: self.resource_threads.sender() - } - } -} - -/// A trait to unify commands launched as multiprocess with or without a sandbox. -trait CommandMethods { - /// A command line argument. - fn arg(&mut self, arg: T) - where T: AsRef; - - /// An environment variable. - fn env(&mut self, key: T, val: U) - where T: AsRef, U: AsRef; -} - -impl CommandMethods for process::Command { - fn arg(&mut self, arg: T) - where T: AsRef { - self.arg(arg); - } - - fn env(&mut self, key: T, val: U) - where T: AsRef, U: AsRef { - self.env(key, val); - } -} diff --git a/collector/compile-benchmarks/style-servo/components/constellation/sandboxing.rs b/collector/compile-benchmarks/style-servo/components/constellation/sandboxing.rs deleted file mode 100644 index a51fd4b8e..000000000 --- a/collector/compile-benchmarks/style-servo/components/constellation/sandboxing.rs +++ /dev/null @@ -1,42 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ - -use gaol::profile::{Operation, PathPattern, Profile}; -use servo_config::resource_files; -use std::path::PathBuf; - -/// Our content process sandbox profile on Mac. As restrictive as possible. -#[cfg(target_os = "macos")] -pub fn content_process_sandbox_profile() -> Profile { - use gaol::platform; - Profile::new(vec![ - Operation::FileReadAll(PathPattern::Literal(PathBuf::from("/dev/urandom"))), - Operation::FileReadAll(PathPattern::Subpath(resource_files::resources_dir_path() - .expect("Cannot find resource dir"))), - Operation::FileReadAll(PathPattern::Subpath(PathBuf::from("/Library/Fonts"))), - Operation::FileReadAll(PathPattern::Subpath(PathBuf::from("/System/Library/Fonts"))), - Operation::FileReadAll(PathPattern::Subpath(PathBuf::from( - "/System/Library/Frameworks/ApplicationServices.framework"))), - Operation::FileReadAll(PathPattern::Subpath(PathBuf::from( - "/System/Library/Frameworks/CoreGraphics.framework"))), - Operation::FileReadMetadata(PathPattern::Literal(PathBuf::from("/"))), - Operation::FileReadMetadata(PathPattern::Literal(PathBuf::from("/Library"))), - Operation::FileReadMetadata(PathPattern::Literal(PathBuf::from("/System"))), - Operation::FileReadMetadata(PathPattern::Literal(PathBuf::from("/etc"))), - Operation::SystemInfoRead, - Operation::PlatformSpecific(platform::macos::Operation::MachLookup( - b"com.apple.FontServer".to_vec())), - ]).expect("Failed to create sandbox profile!") -} - -/// Our content process sandbox profile on Linux. As restrictive as possible. -#[cfg(not(target_os = "macos"))] -pub fn content_process_sandbox_profile() -> Profile { - Profile::new(vec![ - Operation::FileReadAll(PathPattern::Literal(PathBuf::from("/dev/urandom"))), - Operation::FileReadAll(PathPattern::Subpath(resource_files::resources_dir_path() - .expect("Cannot find resource dir"))), - ]).expect("Failed to create sandbox profile!") -} - diff --git a/collector/compile-benchmarks/style-servo/components/constellation/timer_scheduler.rs b/collector/compile-benchmarks/style-servo/components/constellation/timer_scheduler.rs deleted file mode 100644 index 2ed000595..000000000 --- a/collector/compile-benchmarks/style-servo/components/constellation/timer_scheduler.rs +++ /dev/null @@ -1,128 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ - -use ipc_channel::ipc::{self, IpcSender}; -use script_traits::{TimerEvent, TimerEventRequest, TimerSchedulerMsg}; -use std::cmp::{self, Ord}; -use std::collections::BinaryHeap; -use std::sync::mpsc; -use std::sync::mpsc::TryRecvError::{Disconnected, Empty}; -use std::thread; -use std::time::{Duration, Instant}; - -pub struct TimerScheduler; - -struct ScheduledEvent { - request: TimerEventRequest, - for_time: Instant, -} - -impl Ord for ScheduledEvent { - fn cmp(&self, other: &ScheduledEvent) -> cmp::Ordering { - self.for_time.cmp(&other.for_time).reverse() - } -} - -impl PartialOrd for ScheduledEvent { - fn partial_cmp(&self, other: &ScheduledEvent) -> Option { - Some(self.cmp(other)) - } -} - -impl Eq for ScheduledEvent {} -impl PartialEq for ScheduledEvent { - fn eq(&self, other: &ScheduledEvent) -> bool { - self as *const ScheduledEvent == other as *const ScheduledEvent - } -} - -impl TimerScheduler { - pub fn start() -> IpcSender { - let (req_ipc_sender, req_ipc_receiver) = ipc::channel().expect("Channel creation failed."); - let (req_sender, req_receiver) = mpsc::sync_channel(1); - - // We could do this much more directly with recv_timeout - // (https://github.com/rust-lang/rfcs/issues/962). - - // util::thread doesn't give us access to the JoinHandle, which we need for park/unpark, - // so we use the builder directly. - let timeout_thread = thread::Builder::new() - .name(String::from("TimerScheduler")) - .spawn(move || { - // We maintain a priority queue of future events, sorted by due time. - let mut scheduled_events = BinaryHeap::::new(); - loop { - let now = Instant::now(); - // Dispatch any events whose due time is past - loop { - match scheduled_events.peek() { - // Dispatch the event if its due time is past - Some(event) if event.for_time <= now => { - let TimerEventRequest(ref sender, source, id, _) = event.request; - let _ = sender.send(TimerEvent(source, id)); - }, - // Otherwise, we're done dispatching events - _ => break, - } - // Remove the event from the priority queue - // (Note this only executes when the first event has been dispatched - scheduled_events.pop(); - } - // Look to see if there are any incoming events - match req_receiver.try_recv() { - // If there is an event, add it to the priority queue - Ok(TimerSchedulerMsg::Request(req)) => { - let TimerEventRequest(_, _, _, delay) = req; - let schedule = Instant::now() + Duration::from_millis(delay.get()); - let event = ScheduledEvent { request: req, for_time: schedule }; - scheduled_events.push(event); - }, - // If there is no incoming event, park the thread, - // it will either be unparked when a new event arrives, - // or by a timeout. - Err(Empty) => match scheduled_events.peek() { - None => thread::park(), - Some(event) => thread::park_timeout(event.for_time - now), - }, - // If the channel is closed or we are shutting down, we are done. - Ok(TimerSchedulerMsg::Exit) | - Err(Disconnected) => break, - } - } - // This thread can terminate if the req_ipc_sender is dropped. - warn!("TimerScheduler thread terminated."); - }) - .expect("Thread creation failed.") - .thread() - .clone(); - - // A proxy that just routes incoming IPC requests over the MPSC channel to the timeout thread, - // and unparks the timeout thread each time. Note that if unpark is called while the timeout - // thread isn't parked, this causes the next call to thread::park by the timeout thread - // not to block. This means that the timeout thread won't park when there is a request - // waiting in the MPSC channel buffer. - thread::Builder::new() - .name(String::from("TimerProxy")) - .spawn(move || { - while let Ok(req) = req_ipc_receiver.recv() { - let mut shutting_down = false; - match req { - TimerSchedulerMsg::Exit => shutting_down = true, - _ => {} - } - let _ = req_sender.send(req); - timeout_thread.unpark(); - if shutting_down { - break; - } - } - // This thread can terminate if the req_ipc_sender is dropped. - warn!("TimerProxy thread terminated."); - }) - .expect("Thread creation failed."); - - // Return the IPC sender - req_ipc_sender - } -} diff --git a/collector/compile-benchmarks/style-servo/components/debugger/Cargo.toml b/collector/compile-benchmarks/style-servo/components/debugger/Cargo.toml deleted file mode 100644 index d09814852..000000000 --- a/collector/compile-benchmarks/style-servo/components/debugger/Cargo.toml +++ /dev/null @@ -1,15 +0,0 @@ -[package] -name = "debugger" -version = "0.0.1" -authors = ["The Servo Project Developers"] -license = "MPL-2.0" -publish = false - -[lib] -name = "debugger" -path = "lib.rs" -crate_type = ["rlib"] - -[dependencies] -log = "0.3.5" -ws = "0.6" diff --git a/collector/compile-benchmarks/style-servo/components/debugger/lib.rs b/collector/compile-benchmarks/style-servo/components/debugger/lib.rs deleted file mode 100644 index ec0e5ba0e..000000000 --- a/collector/compile-benchmarks/style-servo/components/debugger/lib.rs +++ /dev/null @@ -1,68 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ - -#[macro_use] -extern crate log; -extern crate ws; - -use std::sync::mpsc; -use std::sync::mpsc::channel; -use std::thread; -use ws::{Builder, CloseCode, Handler, Handshake}; - -enum Message { - ShutdownServer, -} - -pub struct Sender(mpsc::Sender); - -struct Connection { - sender: ws::Sender -} - -impl Handler for Connection { - fn on_open(&mut self, _: Handshake) -> ws::Result<()> { - debug!("Connection opened."); - Ok(()) - } - - fn on_close(&mut self, _: CloseCode, _: &str) { - debug!("Connection closed."); - } - - fn on_message(&mut self, message: ws::Message) -> ws::Result<()> { - self.sender.send(message) - } -} - -pub fn start_server(port: u16) -> Sender { - debug!("Starting server."); - let (sender, receiver) = channel(); - thread::Builder::new().name("debugger".to_owned()).spawn(move || { - let socket = Builder::new().build(|sender: ws::Sender| { - Connection { sender: sender } - }).unwrap(); - let sender = socket.broadcaster(); - thread::Builder::new().name("debugger-websocket".to_owned()).spawn(move || { - socket.listen(("127.0.0.1", port)).unwrap(); - }).expect("Thread spawning failed"); - while let Ok(message) = receiver.recv() { - match message { - Message::ShutdownServer => { - break; - } - } - } - sender.shutdown().unwrap(); - }).expect("Thread spawning failed"); - Sender(sender) -} - -pub fn shutdown_server(sender: &Sender) { - debug!("Shutting down server."); - let &Sender(ref sender) = sender; - if let Err(_) = sender.send(Message::ShutdownServer) { - warn!("Failed to shut down server."); - } -} diff --git a/collector/compile-benchmarks/style-servo/components/deny_public_fields/Cargo.toml b/collector/compile-benchmarks/style-servo/components/deny_public_fields/Cargo.toml deleted file mode 100644 index 6b2c567e2..000000000 --- a/collector/compile-benchmarks/style-servo/components/deny_public_fields/Cargo.toml +++ /dev/null @@ -1,14 +0,0 @@ -[package] -name = "deny_public_fields" -version = "0.0.1" -authors = ["The Servo Project Developers"] -license = "MPL-2.0" -publish = false - -[lib] -path = "lib.rs" -proc-macro = true - -[dependencies] -syn = "0.11" -synstructure = "0.5" diff --git a/collector/compile-benchmarks/style-servo/components/deny_public_fields/lib.rs b/collector/compile-benchmarks/style-servo/components/deny_public_fields/lib.rs deleted file mode 100644 index e9b8b7938..000000000 --- a/collector/compile-benchmarks/style-servo/components/deny_public_fields/lib.rs +++ /dev/null @@ -1,27 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ - -extern crate proc_macro; -extern crate syn; -extern crate synstructure; - -#[proc_macro_derive(DenyPublicFields)] -pub fn expand_token_stream(input: proc_macro::TokenStream) -> proc_macro::TokenStream { - expand_string(&input.to_string()).parse().unwrap() -} - -fn expand_string(input: &str) -> String { - let type_ = syn::parse_macro_input(input).unwrap(); - - let style = synstructure::BindStyle::Ref.into(); - synstructure::each_field(&type_, &style, |binding| { - if binding.field.vis != syn::Visibility::Inherited { - panic!("Field `{}` should not be public", - binding.field.ident.as_ref().unwrap_or(&binding.ident)); - } - "".to_owned() - }); - - "".to_owned() -} diff --git a/collector/compile-benchmarks/style-servo/components/devtools/Cargo.toml b/collector/compile-benchmarks/style-servo/components/devtools/Cargo.toml deleted file mode 100644 index 2ce43e5eb..000000000 --- a/collector/compile-benchmarks/style-servo/components/devtools/Cargo.toml +++ /dev/null @@ -1,21 +0,0 @@ -[package] -name = "devtools" -version = "0.0.1" -authors = ["The Servo Project Developers"] -license = "MPL-2.0" -publish = false - -[lib] -name = "devtools" -path = "lib.rs" - -[dependencies] -devtools_traits = {path = "../devtools_traits"} -hyper = "0.10" -hyper_serde = "0.7" -ipc-channel = "0.8" -log = "0.3.5" -msg = {path = "../msg"} -serde = "1.0" -serde_json = "1.0" -time = "0.1" diff --git a/collector/compile-benchmarks/style-servo/components/devtools/actor.rs b/collector/compile-benchmarks/style-servo/components/devtools/actor.rs deleted file mode 100644 index f2ee6398e..000000000 --- a/collector/compile-benchmarks/style-servo/components/devtools/actor.rs +++ /dev/null @@ -1,189 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ - -/// General actor system infrastructure. - -use devtools_traits::PreciseTime; -use serde_json::{Map, Value}; -use std::any::Any; -use std::cell::{Cell, RefCell}; -use std::collections::HashMap; -use std::mem::replace; -use std::net::TcpStream; -use std::sync::{Arc, Mutex}; - -#[derive(PartialEq)] -pub enum ActorMessageStatus { - Processed, - Ignored, -} - -/// A common trait for all devtools actors that encompasses an immutable name -/// and the ability to process messages that are directed to particular actors. -/// TODO: ensure the name is immutable -pub trait Actor: Any + ActorAsAny { - fn handle_message(&self, - registry: &ActorRegistry, - msg_type: &str, - msg: &Map, - stream: &mut TcpStream) -> Result; - fn name(&self) -> String; -} - -pub trait ActorAsAny { - fn actor_as_any(&self) -> &Any; - fn actor_as_any_mut(&mut self) -> &mut Any; -} - -impl ActorAsAny for T { - fn actor_as_any(&self) -> &Any { self } - fn actor_as_any_mut(&mut self) -> &mut Any { self } -} - -/// A list of known, owned actors. -pub struct ActorRegistry { - actors: HashMap>, - new_actors: RefCell>>, - old_actors: RefCell>, - script_actors: RefCell>, - shareable: Option>>, - next: Cell, - start_stamp: PreciseTime, -} - -impl ActorRegistry { - /// Create an empty registry. - pub fn new() -> ActorRegistry { - ActorRegistry { - actors: HashMap::new(), - new_actors: RefCell::new(vec!()), - old_actors: RefCell::new(vec!()), - script_actors: RefCell::new(HashMap::new()), - shareable: None, - next: Cell::new(0), - start_stamp: PreciseTime::now(), - } - } - - /// Creating shareable registry - pub fn create_shareable(self) -> Arc> { - if let Some(shareable) = self.shareable { - return shareable; - } - - let shareable = Arc::new(Mutex::new(self)); - { - let mut lock = shareable.lock(); - let registry = lock.as_mut().unwrap(); - registry.shareable = Some(shareable.clone()); - } - shareable - } - - /// Get shareable registry through threads - pub fn shareable(&self) -> Arc> { - self.shareable.as_ref().unwrap().clone() - } - - /// Get start stamp when registry was started - pub fn start_stamp(&self) -> PreciseTime { - self.start_stamp.clone() - } - - pub fn register_script_actor(&self, script_id: String, actor: String) { - debug!("registering {} ({})", actor, script_id); - let mut script_actors = self.script_actors.borrow_mut(); - script_actors.insert(script_id, actor); - } - - pub fn script_to_actor(&self, script_id: String) -> String { - if script_id.is_empty() { - return "".to_owned(); - } - self.script_actors.borrow().get(&script_id).unwrap().clone() - } - - pub fn script_actor_registered(&self, script_id: String) -> bool { - self.script_actors.borrow().contains_key(&script_id) - } - - pub fn actor_to_script(&self, actor: String) -> String { - for (key, value) in &*self.script_actors.borrow() { - debug!("checking {}", value); - if *value == actor { - return key.to_owned(); - } - } - panic!("couldn't find actor named {}", actor) - } - - /// Create a unique name based on a monotonically increasing suffix - pub fn new_name(&self, prefix: &str) -> String { - let suffix = self.next.get(); - self.next.set(suffix + 1); - format!("{}{}", prefix, suffix) - } - - /// Add an actor to the registry of known actors that can receive messages. - pub fn register(&mut self, actor: Box) { - self.actors.insert(actor.name(), actor); - } - - pub fn register_later(&self, actor: Box) { - let mut actors = self.new_actors.borrow_mut(); - actors.push(actor); - } - - /// Find an actor by registered name - pub fn find<'a, T: Any>(&'a self, name: &str) -> &'a T { - let actor = self.actors.get(name).unwrap(); - actor.actor_as_any().downcast_ref::().unwrap() - } - - /// Find an actor by registered name - pub fn find_mut<'a, T: Any>(&'a mut self, name: &str) -> &'a mut T { - let actor = self.actors.get_mut(name).unwrap(); - actor.actor_as_any_mut().downcast_mut::().unwrap() - } - - /// Attempt to process a message as directed by its `to` property. If the actor is not - /// found or does not indicate that it knew how to process the message, ignore the failure. - pub fn handle_message(&mut self, - msg: &Map, - stream: &mut TcpStream) - -> Result<(), ()> { - let to = msg.get("to").unwrap().as_str().unwrap(); - - match self.actors.get(to) { - None => debug!("message received for unknown actor \"{}\"", to), - Some(actor) => { - let msg_type = msg.get("type").unwrap().as_str().unwrap(); - if actor.handle_message(self, msg_type, msg, stream)? - != ActorMessageStatus::Processed { - debug!("unexpected message type \"{}\" found for actor \"{}\"", - msg_type, to); - } - } - } - let new_actors = replace(&mut *self.new_actors.borrow_mut(), vec!()); - for actor in new_actors.into_iter() { - self.actors.insert(actor.name().to_owned(), actor); - } - - let old_actors = replace(&mut *self.old_actors.borrow_mut(), vec!()); - for name in old_actors { - self.drop_actor(name); - } - Ok(()) - } - - pub fn drop_actor(&mut self, name: String) { - self.actors.remove(&name); - } - - pub fn drop_actor_later(&self, name: String) { - let mut actors = self.old_actors.borrow_mut(); - actors.push(name); - } -} diff --git a/collector/compile-benchmarks/style-servo/components/devtools/actors/console.rs b/collector/compile-benchmarks/style-servo/components/devtools/actors/console.rs deleted file mode 100644 index c94c98601..000000000 --- a/collector/compile-benchmarks/style-servo/components/devtools/actors/console.rs +++ /dev/null @@ -1,257 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ - -//! Liberally derived from the [Firefox JS implementation] -//! (http://mxr.mozilla.org/mozilla-central/source/toolkit/devtools/server/actors/webconsole.js). -//! Mediates interaction between the remote web console and equivalent functionality (object -//! inspection, JS evaluation, autocompletion) in Servo. - -use actor::{Actor, ActorMessageStatus, ActorRegistry}; -use actors::object::ObjectActor; -use devtools_traits::{CONSOLE_API, CachedConsoleMessageTypes, DevtoolScriptControlMsg, PAGE_ERROR}; -use devtools_traits::CachedConsoleMessage; -use devtools_traits::EvaluateJSReply::{ActorValue, BooleanValue, StringValue}; -use devtools_traits::EvaluateJSReply::{NullValue, NumberValue, VoidValue}; -use ipc_channel::ipc::{self, IpcSender}; -use msg::constellation_msg::PipelineId; -use protocol::JsonPacketStream; -use serde_json::{self, Map, Number, Value}; -use std::cell::RefCell; -use std::net::TcpStream; - -trait EncodableConsoleMessage { - fn encode(&self) -> serde_json::Result; -} - -impl EncodableConsoleMessage for CachedConsoleMessage { - fn encode(&self) -> serde_json::Result { - match *self { - CachedConsoleMessage::PageError(ref a) => serde_json::to_string(a), - CachedConsoleMessage::ConsoleAPI(ref a) => serde_json::to_string(a), - } - } -} - -#[derive(Serialize)] -struct StartedListenersTraits { - customNetworkRequest: bool, -} - -#[derive(Serialize)] -struct StartedListenersReply { - from: String, - nativeConsoleAPI: bool, - startedListeners: Vec, - traits: StartedListenersTraits, -} - -#[derive(Serialize)] -struct GetCachedMessagesReply { - from: String, - messages: Vec>, -} - -#[derive(Serialize)] -struct StopListenersReply { - from: String, - stoppedListeners: Vec, -} - -#[derive(Serialize)] -struct AutocompleteReply { - from: String, - matches: Vec, - matchProp: String, -} - -#[derive(Serialize)] -struct EvaluateJSReply { - from: String, - input: String, - result: Value, - timestamp: u64, - exception: Value, - exceptionMessage: String, - helperResult: Value, -} - -#[derive(Serialize)] -struct SetPreferencesReply { - from: String, - updated: Vec, -} - -pub struct ConsoleActor { - pub name: String, - pub pipeline: PipelineId, - pub script_chan: IpcSender, - pub streams: RefCell>, -} - -impl Actor for ConsoleActor { - fn name(&self) -> String { - self.name.clone() - } - - fn handle_message(&self, - registry: &ActorRegistry, - msg_type: &str, - msg: &Map, - stream: &mut TcpStream) -> Result { - Ok(match msg_type { - "getCachedMessages" => { - let str_types = msg.get("messageTypes").unwrap().as_array().unwrap().into_iter().map(|json_type| { - json_type.as_str().unwrap() - }); - let mut message_types = CachedConsoleMessageTypes::empty(); - for str_type in str_types { - match str_type { - "PageError" => message_types.insert(PAGE_ERROR), - "ConsoleAPI" => message_types.insert(CONSOLE_API), - s => debug!("unrecognized message type requested: \"{}\"", s), - }; - }; - let (chan, port) = ipc::channel().unwrap(); - self.script_chan.send(DevtoolScriptControlMsg::GetCachedMessages( - self.pipeline, message_types, chan)).unwrap(); - let messages = port.recv().map_err(|_| ())?.into_iter().map(|message| { - let json_string = message.encode().unwrap(); - let json = serde_json::from_str::(&json_string).unwrap(); - json.as_object().unwrap().to_owned() - }).collect(); - - let msg = GetCachedMessagesReply { - from: self.name(), - messages: messages, - }; - stream.write_json_packet(&msg); - ActorMessageStatus::Processed - } - - "startListeners" => { - //TODO: actually implement listener filters that support starting/stopping - let msg = StartedListenersReply { - from: self.name(), - nativeConsoleAPI: true, - startedListeners: - vec!("PageError".to_owned(), "ConsoleAPI".to_owned()), - traits: StartedListenersTraits { - customNetworkRequest: true, - } - }; - stream.write_json_packet(&msg); - ActorMessageStatus::Processed - } - - "stopListeners" => { - //TODO: actually implement listener filters that support starting/stopping - let msg = StopListenersReply { - from: self.name(), - stoppedListeners: msg.get("listeners") - .unwrap() - .as_array() - .unwrap_or(&vec!()) - .iter() - .map(|listener| listener.as_str().unwrap().to_owned()) - .collect(), - }; - stream.write_json_packet(&msg); - ActorMessageStatus::Processed - } - - //TODO: implement autocompletion like onAutocomplete in - // http://mxr.mozilla.org/mozilla-central/source/toolkit/devtools/server/actors/webconsole.js - "autocomplete" => { - let msg = AutocompleteReply { - from: self.name(), - matches: vec!(), - matchProp: "".to_owned(), - }; - stream.write_json_packet(&msg); - ActorMessageStatus::Processed - } - - "evaluateJS" => { - let input = msg.get("text").unwrap().as_str().unwrap().to_owned(); - let (chan, port) = ipc::channel().unwrap(); - self.script_chan.send(DevtoolScriptControlMsg::EvaluateJS( - self.pipeline, input.clone(), chan)).unwrap(); - - //TODO: extract conversion into protocol module or some other useful place - let result = match port.recv().map_err(|_| ())? { - VoidValue => { - let mut m = Map::new(); - m.insert("type".to_owned(), Value::String("undefined".to_owned())); - Value::Object(m) - } - NullValue => { - let mut m = Map::new(); - m.insert("type".to_owned(), Value::String("null".to_owned())); - Value::Object(m) - } - BooleanValue(val) => Value::Bool(val), - NumberValue(val) => { - if val.is_nan() { - let mut m = Map::new(); - m.insert("type".to_owned(), Value::String("NaN".to_owned())); - Value::Object(m) - } else if val.is_infinite() { - let mut m = Map::new(); - if val < 0. { - m.insert("type".to_owned(), Value::String("-Infinity".to_owned())); - } else { - m.insert("type".to_owned(), Value::String("Infinity".to_owned())); - } - Value::Object(m) - } else if val == 0. && val.is_sign_negative() { - let mut m = Map::new(); - m.insert("type".to_owned(), Value::String("-0".to_owned())); - Value::Object(m) - } else { - Value::Number(Number::from_f64(val).unwrap()) - } - } - StringValue(s) => Value::String(s), - ActorValue { class, uuid } => { - //TODO: make initial ActorValue message include these properties? - let mut m = Map::new(); - let actor = ObjectActor::new(registry, uuid); - - m.insert("type".to_owned(), Value::String("object".to_owned())); - m.insert("class".to_owned(), Value::String(class)); - m.insert("actor".to_owned(), Value::String(actor)); - m.insert("extensible".to_owned(), Value::Bool(true)); - m.insert("frozen".to_owned(), Value::Bool(false)); - m.insert("sealed".to_owned(), Value::Bool(false)); - Value::Object(m) - } - }; - - //TODO: catch and return exception values from JS evaluation - let msg = EvaluateJSReply { - from: self.name(), - input: input, - result: result, - timestamp: 0, - exception: Value::Object(Map::new()), - exceptionMessage: "".to_owned(), - helperResult: Value::Object(Map::new()), - }; - stream.write_json_packet(&msg); - ActorMessageStatus::Processed - } - - "setPreferences" => { - let msg = SetPreferencesReply { - from: self.name(), - updated: vec![], - }; - stream.write_json_packet(&msg); - ActorMessageStatus::Processed - } - - _ => ActorMessageStatus::Ignored - }) - } -} diff --git a/collector/compile-benchmarks/style-servo/components/devtools/actors/framerate.rs b/collector/compile-benchmarks/style-servo/components/devtools/actors/framerate.rs deleted file mode 100644 index e5edc70c6..000000000 --- a/collector/compile-benchmarks/style-servo/components/devtools/actors/framerate.rs +++ /dev/null @@ -1,100 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ - -use actor::{Actor, ActorMessageStatus, ActorRegistry}; -use actors::timeline::HighResolutionStamp; -use devtools_traits::DevtoolScriptControlMsg; -use ipc_channel::ipc::IpcSender; -use msg::constellation_msg::PipelineId; -use serde_json::{Map, Value}; -use std::mem; -use std::net::TcpStream; -use time::precise_time_ns; - -pub struct FramerateActor { - name: String, - pipeline: PipelineId, - script_sender: IpcSender, - start_time: Option, - is_recording: bool, - ticks: Vec, -} - -impl Actor for FramerateActor { - fn name(&self) -> String { - self.name.clone() - } - - - fn handle_message(&self, - _registry: &ActorRegistry, - _msg_type: &str, - _msg: &Map, - _stream: &mut TcpStream) -> Result { - Ok(ActorMessageStatus::Ignored) - } -} - -impl FramerateActor { - /// return name of actor - pub fn create(registry: &ActorRegistry, - pipeline_id: PipelineId, - script_sender: IpcSender) -> String { - let actor_name = registry.new_name("framerate"); - let mut actor = FramerateActor { - name: actor_name.clone(), - pipeline: pipeline_id, - script_sender: script_sender, - start_time: None, - is_recording: false, - ticks: Vec::new(), - }; - - actor.start_recording(); - registry.register_later(box actor); - actor_name - } - - pub fn add_tick(&mut self, tick: f64) { - self.ticks.push(HighResolutionStamp::wrap(tick)); - - if self.is_recording { - let msg = DevtoolScriptControlMsg::RequestAnimationFrame(self.pipeline, - self.name()); - self.script_sender.send(msg).unwrap(); - } - } - - pub fn take_pending_ticks(&mut self) -> Vec { - mem::replace(&mut self.ticks, Vec::new()) - } - - fn start_recording(&mut self) { - if self.is_recording { - return; - } - - self.start_time = Some(precise_time_ns()); - self.is_recording = true; - - let msg = DevtoolScriptControlMsg::RequestAnimationFrame(self.pipeline, - self.name()); - self.script_sender.send(msg).unwrap(); - } - - fn stop_recording(&mut self) { - if !self.is_recording { - return; - } - self.is_recording = false; - self.start_time = None; - } - -} - -impl Drop for FramerateActor { - fn drop(&mut self) { - self.stop_recording(); - } -} diff --git a/collector/compile-benchmarks/style-servo/components/devtools/actors/inspector.rs b/collector/compile-benchmarks/style-servo/components/devtools/actors/inspector.rs deleted file mode 100644 index 370d90128..000000000 --- a/collector/compile-benchmarks/style-servo/components/devtools/actors/inspector.rs +++ /dev/null @@ -1,629 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ - -//! Liberally derived from the [Firefox JS implementation] -//! (http://mxr.mozilla.org/mozilla-central/source/toolkit/devtools/server/actors/inspector.js). - -use actor::{Actor, ActorMessageStatus, ActorRegistry}; -use devtools_traits::{ComputedNodeLayout, DevtoolScriptControlMsg, NodeInfo}; -use devtools_traits::DevtoolScriptControlMsg::{GetChildren, GetDocumentElement, GetRootNode}; -use devtools_traits::DevtoolScriptControlMsg::{GetLayout, ModifyAttribute}; -use ipc_channel::ipc::{self, IpcSender}; -use msg::constellation_msg::PipelineId; -use protocol::JsonPacketStream; -use serde_json::{self, Map, Value}; -use std::cell::RefCell; -use std::net::TcpStream; - -pub struct InspectorActor { - pub name: String, - pub walker: RefCell>, - pub pageStyle: RefCell>, - pub highlighter: RefCell>, - pub script_chan: IpcSender, - pub pipeline: PipelineId, -} - -#[derive(Serialize)] -struct GetHighlighterReply { - highligter: HighlighterMsg, // sic. - from: String, -} - -#[derive(Serialize)] -struct HighlighterMsg { - actor: String, -} - -struct HighlighterActor { - name: String, -} - -pub struct NodeActor { - pub name: String, - script_chan: IpcSender, - pipeline: PipelineId, -} - -#[derive(Serialize)] -struct ShowBoxModelReply { - from: String, -} - -#[derive(Serialize)] -struct HideBoxModelReply { - from: String, -} - -impl Actor for HighlighterActor { - fn name(&self) -> String { - self.name.clone() - } - - fn handle_message(&self, - _registry: &ActorRegistry, - msg_type: &str, - _msg: &Map, - stream: &mut TcpStream) -> Result { - Ok(match msg_type { - "showBoxModel" => { - let msg = ShowBoxModelReply { - from: self.name(), - }; - stream.write_json_packet(&msg); - ActorMessageStatus::Processed - } - - "hideBoxModel" => { - let msg = HideBoxModelReply { - from: self.name(), - }; - stream.write_json_packet(&msg); - ActorMessageStatus::Processed - } - - _ => ActorMessageStatus::Ignored, - }) - } -} - -#[derive(Serialize)] -struct ModifyAttributeReply { - from: String, -} - -impl Actor for NodeActor { - fn name(&self) -> String { - self.name.clone() - } - - fn handle_message(&self, - registry: &ActorRegistry, - msg_type: &str, - msg: &Map, - stream: &mut TcpStream) -> Result { - Ok(match msg_type { - "modifyAttributes" => { - let target = msg.get("to").unwrap().as_str().unwrap(); - let mods = msg.get("modifications").unwrap().as_array().unwrap(); - let modifications = mods.iter().map(|json_mod| { - serde_json::from_str(&serde_json::to_string(json_mod).unwrap()).unwrap() - }).collect(); - - self.script_chan.send(ModifyAttribute(self.pipeline, - registry.actor_to_script(target.to_owned()), - modifications)) - .unwrap(); - let reply = ModifyAttributeReply { - from: self.name(), - }; - stream.write_json_packet(&reply); - ActorMessageStatus::Processed - } - - _ => ActorMessageStatus::Ignored, - }) - } -} - -#[derive(Serialize)] -struct GetWalkerReply { - from: String, - walker: WalkerMsg, -} - -#[derive(Serialize)] -struct WalkerMsg { - actor: String, - root: NodeActorMsg, -} - -#[derive(Serialize)] -struct AttrMsg { - namespace: String, - name: String, - value: String, -} - -#[derive(Serialize)] -struct NodeActorMsg { - actor: String, - baseURI: String, - parent: String, - nodeType: u16, - namespaceURI: String, - nodeName: String, - numChildren: usize, - - name: String, - publicId: String, - systemId: String, - - attrs: Vec, - - pseudoClassLocks: Vec, - - isDisplayed: bool, - - hasEventListeners: bool, - - isDocumentElement: bool, - - shortValue: String, - incompleteValue: bool, -} - -trait NodeInfoToProtocol { - fn encode(self, - actors: &ActorRegistry, - display: bool, - script_chan: IpcSender, - pipeline: PipelineId) -> NodeActorMsg; -} - -impl NodeInfoToProtocol for NodeInfo { - fn encode(self, - actors: &ActorRegistry, - display: bool, - script_chan: IpcSender, - pipeline: PipelineId) -> NodeActorMsg { - let actor_name = if !actors.script_actor_registered(self.uniqueId.clone()) { - let name = actors.new_name("node"); - let node_actor = NodeActor { - name: name.clone(), - script_chan: script_chan, - pipeline: pipeline.clone(), - }; - actors.register_script_actor(self.uniqueId, name.clone()); - actors.register_later(box node_actor); - name - } else { - actors.script_to_actor(self.uniqueId) - }; - - NodeActorMsg { - actor: actor_name, - baseURI: self.baseURI, - parent: actors.script_to_actor(self.parent.clone()), - nodeType: self.nodeType, - namespaceURI: self.namespaceURI, - nodeName: self.nodeName, - numChildren: self.numChildren, - - name: self.name, - publicId: self.publicId, - systemId: self.systemId, - - attrs: self.attrs.into_iter().map(|attr| { - AttrMsg { - namespace: attr.namespace, - name: attr.name, - value: attr.value, - } - }).collect(), - - pseudoClassLocks: vec!(), //TODO get this data from script - - isDisplayed: display, - - hasEventListeners: false, //TODO get this data from script - - isDocumentElement: self.isDocumentElement, - - shortValue: self.shortValue, - incompleteValue: self.incompleteValue, - } - } -} - -struct WalkerActor { - name: String, - script_chan: IpcSender, - pipeline: PipelineId, -} - -#[derive(Serialize)] -struct QuerySelectorReply { - from: String, -} - -#[derive(Serialize)] -struct DocumentElementReply { - from: String, - node: NodeActorMsg, -} - -#[derive(Serialize)] -struct ClearPseudoclassesReply { - from: String, -} - -#[derive(Serialize)] -struct ChildrenReply { - hasFirst: bool, - hasLast: bool, - nodes: Vec, - from: String, -} - -impl Actor for WalkerActor { - fn name(&self) -> String { - self.name.clone() - } - - fn handle_message(&self, - registry: &ActorRegistry, - msg_type: &str, - msg: &Map, - stream: &mut TcpStream) -> Result { - Ok(match msg_type { - "querySelector" => { - let msg = QuerySelectorReply { - from: self.name(), - }; - stream.write_json_packet(&msg); - ActorMessageStatus::Processed - } - - "documentElement" => { - let (tx, rx) = ipc::channel().unwrap(); - self.script_chan.send(GetDocumentElement(self.pipeline, tx)).unwrap(); - let doc_elem_info = rx.recv().unwrap().ok_or(())?; - let node = doc_elem_info.encode(registry, true, self.script_chan.clone(), self.pipeline); - - let msg = DocumentElementReply { - from: self.name(), - node: node, - }; - stream.write_json_packet(&msg); - ActorMessageStatus::Processed - } - - "clearPseudoClassLocks" => { - let msg = ClearPseudoclassesReply { - from: self.name(), - }; - stream.write_json_packet(&msg); - ActorMessageStatus::Processed - } - - "children" => { - let target = msg.get("node").unwrap().as_str().unwrap(); - let (tx, rx) = ipc::channel().unwrap(); - self.script_chan.send(GetChildren(self.pipeline, - registry.actor_to_script(target.to_owned()), - tx)) - .unwrap(); - let children = rx.recv().unwrap().ok_or(())?; - - let msg = ChildrenReply { - hasFirst: true, - hasLast: true, - nodes: children.into_iter().map(|child| { - child.encode(registry, true, self.script_chan.clone(), self.pipeline) - }).collect(), - from: self.name(), - }; - stream.write_json_packet(&msg); - ActorMessageStatus::Processed - } - - _ => ActorMessageStatus::Ignored, - }) - } -} - -#[derive(Serialize)] -struct GetPageStyleReply { - from: String, - pageStyle: PageStyleMsg, -} - -#[derive(Serialize)] -struct PageStyleMsg { - actor: String, -} - -struct PageStyleActor { - name: String, - script_chan: IpcSender, - pipeline: PipelineId, -} - -#[derive(Serialize)] -struct GetAppliedReply { - entries: Vec, - rules: Vec, - sheets: Vec, - from: String, -} - -#[derive(Serialize)] -struct GetComputedReply { - computed: Vec, //XXX all css props - from: String, -} - -#[derive(Serialize)] -struct AppliedEntry { - rule: String, - pseudoElement: Value, - isSystem: bool, - matchedSelectors: Vec, -} - -#[derive(Serialize)] -struct AppliedRule { - actor: String, - #[serde(rename = "type")] - type_: String, - href: String, - cssText: String, - line: u32, - column: u32, - parentStyleSheet: String, -} - -#[derive(Serialize)] -struct AppliedSheet { - actor: String, - href: String, - nodeHref: String, - disabled: bool, - title: String, - system: bool, - styleSheetIndex: isize, - ruleCount: usize, -} - -#[derive(Serialize)] -struct GetLayoutReply { - from: String, - - display: String, - position: String, - #[serde(rename = "z-index")] - zIndex: String, - #[serde(rename = "box-sizing")] - boxSizing: String, - - // Would be nice to use a proper struct, blocked by - // https://github.com/serde-rs/serde/issues/43 - autoMargins: serde_json::value::Value, - #[serde(rename = "margin-top")] - marginTop: String, - #[serde(rename = "margin-right")] - marginRight: String, - #[serde(rename = "margin-bottom")] - marginBottom: String, - #[serde(rename = "margin-left")] - marginLeft: String, - - #[serde(rename = "border-top-width")] - borderTopWidth: String, - #[serde(rename = "border-right-width")] - borderRightWidth: String, - #[serde(rename = "border-bottom-width")] - borderBottomWidth: String, - #[serde(rename = "border-left-width")] - borderLeftWidth: String, - - #[serde(rename = "padding-top")] - paddingTop: String, - #[serde(rename = "padding-right")] - paddingRight: String, - #[serde(rename = "padding-bottom")] - paddingBottom: String, - #[serde(rename = "padding-left")] - paddingLeft: String, - - width: f32, - height: f32, -} - -impl Actor for PageStyleActor { - fn name(&self) -> String { - self.name.clone() - } - - fn handle_message(&self, - registry: &ActorRegistry, - msg_type: &str, - msg: &Map, - stream: &mut TcpStream) -> Result { - Ok(match msg_type { - "getApplied" => { - //TODO: query script for relevant applied styles to node (msg.node) - let msg = GetAppliedReply { - entries: vec!(), - rules: vec!(), - sheets: vec!(), - from: self.name(), - }; - stream.write_json_packet(&msg); - ActorMessageStatus::Processed - } - - "getComputed" => { - //TODO: query script for relevant computed styles on node (msg.node) - let msg = GetComputedReply { - computed: vec!(), - from: self.name(), - }; - stream.write_json_packet(&msg); - ActorMessageStatus::Processed - } - - //TODO: query script for box layout properties of node (msg.node) - "getLayout" => { - let target = msg.get("node").unwrap().as_str().unwrap(); - let (tx, rx) = ipc::channel().unwrap(); - self.script_chan.send(GetLayout(self.pipeline, - registry.actor_to_script(target.to_owned()), - tx)) - .unwrap(); - let ComputedNodeLayout { - display, position, zIndex, boxSizing, - autoMargins, marginTop, marginRight, marginBottom, marginLeft, - borderTopWidth, borderRightWidth, borderBottomWidth, borderLeftWidth, - paddingTop, paddingRight, paddingBottom, paddingLeft, - width, height, - } = rx.recv().unwrap().ok_or(())?; - - let auto_margins = msg.get("autoMargins") - .and_then(&Value::as_bool).unwrap_or(false); - - // http://mxr.mozilla.org/mozilla-central/source/toolkit/devtools/server/actors/styles.js - let msg = GetLayoutReply { - from: self.name(), - display: display, - position: position, - zIndex: zIndex, - boxSizing: boxSizing, - autoMargins: if auto_margins { - let mut m = Map::new(); - let auto = serde_json::value::Value::String("auto".to_owned()); - if autoMargins.top { m.insert("top".to_owned(), auto.clone()); } - if autoMargins.right { m.insert("right".to_owned(), auto.clone()); } - if autoMargins.bottom { m.insert("bottom".to_owned(), auto.clone()); } - if autoMargins.left { m.insert("left".to_owned(), auto.clone()); } - serde_json::value::Value::Object(m) - } else { - serde_json::value::Value::Null - }, - marginTop: marginTop, - marginRight: marginRight, - marginBottom: marginBottom, - marginLeft: marginLeft, - borderTopWidth: borderTopWidth, - borderRightWidth: borderRightWidth, - borderBottomWidth: borderBottomWidth, - borderLeftWidth: borderLeftWidth, - paddingTop: paddingTop, - paddingRight: paddingRight, - paddingBottom: paddingBottom, - paddingLeft: paddingLeft, - width: width, - height: height, - }; - let msg = serde_json::to_string(&msg).unwrap(); - let msg = serde_json::from_str::(&msg).unwrap(); - stream.write_json_packet(&msg); - ActorMessageStatus::Processed - } - - _ => ActorMessageStatus::Ignored, - }) - } -} - -impl Actor for InspectorActor { - fn name(&self) -> String { - self.name.clone() - } - - fn handle_message(&self, - registry: &ActorRegistry, - msg_type: &str, - _msg: &Map, - stream: &mut TcpStream) -> Result { - Ok(match msg_type { - "getWalker" => { - if self.walker.borrow().is_none() { - let walker = WalkerActor { - name: registry.new_name("walker"), - script_chan: self.script_chan.clone(), - pipeline: self.pipeline, - }; - let mut walker_name = self.walker.borrow_mut(); - *walker_name = Some(walker.name()); - registry.register_later(box walker); - } - - let (tx, rx) = ipc::channel().unwrap(); - self.script_chan.send(GetRootNode(self.pipeline, tx)).unwrap(); - let root_info = rx.recv().unwrap().ok_or(())?; - - let node = root_info.encode(registry, false, self.script_chan.clone(), self.pipeline); - - let msg = GetWalkerReply { - from: self.name(), - walker: WalkerMsg { - actor: self.walker.borrow().clone().unwrap(), - root: node, - } - }; - stream.write_json_packet(&msg); - ActorMessageStatus::Processed - } - - "getPageStyle" => { - if self.pageStyle.borrow().is_none() { - let style = PageStyleActor { - name: registry.new_name("pageStyle"), - script_chan: self.script_chan.clone(), - pipeline: self.pipeline, - }; - let mut pageStyle = self.pageStyle.borrow_mut(); - *pageStyle = Some(style.name()); - registry.register_later(box style); - } - - let msg = GetPageStyleReply { - from: self.name(), - pageStyle: PageStyleMsg { - actor: self.pageStyle.borrow().clone().unwrap(), - }, - }; - stream.write_json_packet(&msg); - ActorMessageStatus::Processed - } - - //TODO: this is an old message; try adding highlightable to the root traits instead - // and support getHighlighter instead - //"highlight" => {} - "getHighlighter" => { - if self.highlighter.borrow().is_none() { - let highlighter_actor = HighlighterActor { - name: registry.new_name("highlighter"), - }; - let mut highlighter = self.highlighter.borrow_mut(); - *highlighter = Some(highlighter_actor.name()); - registry.register_later(box highlighter_actor); - } - - let msg = GetHighlighterReply { - from: self.name(), - highligter: HighlighterMsg { - actor: self.highlighter.borrow().clone().unwrap(), - }, - }; - stream.write_json_packet(&msg); - ActorMessageStatus::Processed - } - - _ => ActorMessageStatus::Ignored, - }) - } -} diff --git a/collector/compile-benchmarks/style-servo/components/devtools/actors/memory.rs b/collector/compile-benchmarks/style-servo/components/devtools/actors/memory.rs deleted file mode 100644 index 25dc2733b..000000000 --- a/collector/compile-benchmarks/style-servo/components/devtools/actors/memory.rs +++ /dev/null @@ -1,66 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ - -use actor::{Actor, ActorMessageStatus, ActorRegistry}; -use serde_json::{Map, Value}; -use std::net::TcpStream; - -#[derive(Serialize)] -pub struct TimelineMemoryReply { - jsObjectSize: u64, - jsStringSize: u64, - jsOtherSize: u64, - domSize: u64, - styleSize: u64, - otherSize: u64, - totalSize: u64, - jsMilliseconds: f64, - nonJSMilliseconds: f64, -} - -pub struct MemoryActor { - pub name: String, -} - -impl Actor for MemoryActor { - fn name(&self) -> String { - self.name.clone() - } - - fn handle_message(&self, - _registry: &ActorRegistry, - _msg_type: &str, - _msg: &Map, - _stream: &mut TcpStream) -> Result { - Ok(ActorMessageStatus::Ignored) - } -} - -impl MemoryActor { - /// return name of actor - pub fn create(registry: &ActorRegistry) -> String { - let actor_name = registry.new_name("memory"); - let actor = MemoryActor { - name: actor_name.clone() - }; - - registry.register_later(box actor); - actor_name - } - - pub fn measure(&self) -> TimelineMemoryReply { - //TODO: - TimelineMemoryReply { - jsObjectSize: 1, - jsStringSize: 1, - jsOtherSize: 1, - domSize: 1, - styleSize: 1, - otherSize: 1, - totalSize: 1, - jsMilliseconds: 1.1, - nonJSMilliseconds: 1.1, - } - } -} diff --git a/collector/compile-benchmarks/style-servo/components/devtools/actors/network_event.rs b/collector/compile-benchmarks/style-servo/components/devtools/actors/network_event.rs deleted file mode 100644 index 090697182..000000000 --- a/collector/compile-benchmarks/style-servo/components/devtools/actors/network_event.rs +++ /dev/null @@ -1,469 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ - -//! Liberally derived from the [Firefox JS implementation] -//! (http://mxr.mozilla.org/mozilla-central/source/toolkit/devtools/server/actors/webconsole.js). -//! Handles interaction with the remote web console on network events (HTTP requests, responses) in Servo. - -use actor::{Actor, ActorMessageStatus, ActorRegistry}; -use devtools_traits::HttpRequest as DevtoolsHttpRequest; -use devtools_traits::HttpResponse as DevtoolsHttpResponse; -use hyper::header::{ContentType, Cookie}; -use hyper::header::Headers; -use hyper::http::RawStatus; -use hyper::method::Method; -use protocol::JsonPacketStream; -use serde_json::{Map, Value}; -use std::borrow::Cow; -use std::net::TcpStream; -use time; -use time::Tm; - -struct HttpRequest { - url: String, - method: Method, - headers: Headers, - body: Option>, - startedDateTime: Tm, - timeStamp: i64, - connect_time: u64, - send_time: u64, -} - -struct HttpResponse { - headers: Option, - status: Option, - body: Option> -} - -pub struct NetworkEventActor { - pub name: String, - request: HttpRequest, - response: HttpResponse, - is_xhr: bool, -} - -#[derive(Serialize)] -pub struct EventActor { - pub actor: String, - pub url: String, - pub method: String, - pub startedDateTime: String, - pub timeStamp: i64, - pub isXHR: bool, - pub private: bool -} - -#[derive(Serialize)] -pub struct ResponseCookiesMsg { - pub cookies: usize, -} - -#[derive(Serialize)] -pub struct ResponseStartMsg { - pub httpVersion: String, - pub remoteAddress: String, - pub remotePort: u32, - pub status: String, - pub statusText: String, - pub headersSize: usize, - pub discardResponseBody: bool, -} - -#[derive(Serialize)] -pub struct ResponseContentMsg { - pub mimeType: String, - pub contentSize: u32, - pub transferredSize: u32, - pub discardResponseBody: bool, -} - - -#[derive(Serialize)] -pub struct ResponseHeadersMsg { - pub headers: usize, - pub headersSize: usize, -} - - -#[derive(Serialize)] -pub struct RequestCookiesMsg { - pub cookies: usize, -} - -#[derive(Serialize)] -pub struct RequestHeadersMsg { - headers: usize, - headersSize: usize, -} - -#[derive(Serialize)] -struct GetRequestHeadersReply { - from: String, - headers: Vec
, - headerSize: usize, - rawHeaders: String -} - -#[derive(Serialize)] -struct Header { - name: String, - value: String, -} - -#[derive(Serialize)] -struct GetResponseHeadersReply { - from: String, - headers: Vec
, - headerSize: usize, - rawHeaders: String -} - -#[derive(Serialize)] -struct GetResponseContentReply { - from: String, - content: Option>, - contentDiscarded: bool, -} - -#[derive(Serialize)] -struct GetRequestPostDataReply { - from: String, - postData: Option>, - postDataDiscarded: bool -} - -#[derive(Serialize)] -struct GetRequestCookiesReply { - from: String, - cookies: Vec -} - -#[derive(Serialize)] -struct GetResponseCookiesReply { - from: String, - cookies: Vec -} - -#[derive(Serialize)] -struct Timings { - blocked: u32, - dns: u32, - connect: u64, - send: u64, - wait: u32, - receive: u32, -} - -#[derive(Serialize)] -struct GetEventTimingsReply { - from: String, - timings: Timings, - totalTime: u64, -} - -#[derive(Serialize)] -struct SecurityInfo { - state: String, -} - -#[derive(Serialize)] -struct GetSecurityInfoReply { - from: String, - securityInfo: SecurityInfo, -} - -impl Actor for NetworkEventActor { - fn name(&self) -> String { - self.name.clone() - } - - fn handle_message(&self, - _registry: &ActorRegistry, - msg_type: &str, - _msg: &Map, - stream: &mut TcpStream) -> Result { - Ok(match msg_type { - "getRequestHeaders" => { - let mut headers = Vec::new(); - let mut rawHeadersString = "".to_owned(); - let mut headersSize = 0; - for item in self.request.headers.iter() { - let name = item.name(); - let value = item.value_string(); - rawHeadersString = rawHeadersString + name + ":" + &value + "\r\n"; - headersSize += name.len() + value.len(); - headers.push(Header { name: name.to_owned(), value: value.to_owned() }); - } - let msg = GetRequestHeadersReply { - from: self.name(), - headers: headers, - headerSize: headersSize, - rawHeaders: rawHeadersString, - }; - stream.write_json_packet(&msg); - ActorMessageStatus::Processed - } - "getRequestCookies" => { - let mut cookies = Vec::new(); - if let Some(req_cookies) = self.request.headers.get_raw("Cookie") { - for cookie in &*req_cookies { - if let Ok(cookie_value) = String::from_utf8(cookie.clone()) { - cookies = cookie_value.into_bytes(); - } - } - } - - let msg = GetRequestCookiesReply { - from: self.name(), - cookies: cookies, - }; - stream.write_json_packet(&msg); - ActorMessageStatus::Processed - } - "getRequestPostData" => { - let msg = GetRequestPostDataReply { - from: self.name(), - postData: self.request.body.clone(), - postDataDiscarded: false, - }; - stream.write_json_packet(&msg); - ActorMessageStatus::Processed - } - "getResponseHeaders" => { - if let Some(ref response_headers) = self.response.headers { - let mut headers = vec![]; - let mut rawHeadersString = "".to_owned(); - let mut headersSize = 0; - for item in response_headers.iter() { - let name = item.name(); - let value = item.value_string(); - headers.push(Header { - name: name.to_owned(), - value: value.clone(), - }); - headersSize += name.len() + value.len(); - rawHeadersString.push_str(name); - rawHeadersString.push_str(":"); - rawHeadersString.push_str(&value); - rawHeadersString.push_str("\r\n"); - } - let msg = GetResponseHeadersReply { - from: self.name(), - headers: headers, - headerSize: headersSize, - rawHeaders: rawHeadersString, - }; - stream.write_json_packet(&msg); - } - ActorMessageStatus::Processed - } - "getResponseCookies" => { - let mut cookies = Vec::new(); - if let Some(res_cookies) = self.request.headers.get_raw("set-cookie") { - for cookie in &*res_cookies { - if let Ok(cookie_value) = String::from_utf8(cookie.clone()) { - cookies = cookie_value.into_bytes(); - } - } - } - - let msg = GetResponseCookiesReply { - from: self.name(), - cookies: cookies, - }; - stream.write_json_packet(&msg); - ActorMessageStatus::Processed - } - "getResponseContent" => { - let msg = GetResponseContentReply { - from: self.name(), - content: self.response.body.clone(), - contentDiscarded: self.response.body.is_none(), - }; - stream.write_json_packet(&msg); - ActorMessageStatus::Processed - } - "getEventTimings" => { - // TODO: This is a fake timings msg - let timingsObj = Timings { - blocked: 0, - dns: 0, - connect: self.request.connect_time, - send: self.request.send_time, - wait: 0, - receive: 0, - }; - let total = timingsObj.connect + timingsObj.send; - // TODO: Send the correct values for all these fields. - let msg = GetEventTimingsReply { - from: self.name(), - timings: timingsObj, - totalTime: total, - }; - stream.write_json_packet(&msg); - ActorMessageStatus::Processed - } - "getSecurityInfo" => { - // TODO: Send the correct values for securityInfo. - let msg = GetSecurityInfoReply { - from: self.name(), - securityInfo: SecurityInfo { - state: "insecure".to_owned() - }, - }; - stream.write_json_packet(&msg); - ActorMessageStatus::Processed - } - _ => ActorMessageStatus::Ignored - }) - } -} - -impl NetworkEventActor { - pub fn new(name: String) -> NetworkEventActor { - NetworkEventActor { - name: name, - request: HttpRequest { - url: String::new(), - method: Method::Get, - headers: Headers::new(), - body: None, - startedDateTime: time::now(), - timeStamp: time::get_time().sec, - send_time: 0, - connect_time: 0, - }, - response: HttpResponse { - headers: None, - status: None, - body: None, - }, - is_xhr: false, - } - } - - pub fn add_request(&mut self, request: DevtoolsHttpRequest) { - self.request.url = request.url.as_str().to_owned(); - self.request.method = request.method.clone(); - self.request.headers = request.headers.clone(); - self.request.body = request.body; - self.request.startedDateTime = request.startedDateTime; - self.request.timeStamp = request.timeStamp; - self.request.connect_time = request.connect_time; - self.request.send_time = request.send_time; - self.is_xhr = request.is_xhr; - } - - pub fn add_response(&mut self, response: DevtoolsHttpResponse) { - self.response.headers = response.headers.clone(); - self.response.status = response.status.as_ref().map(|&(s, ref st)| { - let status_text = String::from_utf8_lossy(st).into_owned(); - RawStatus(s, Cow::from(status_text)) - }); - self.response.body = response.body.clone(); - } - - pub fn event_actor(&self) -> EventActor { - // TODO: Send the correct values for startedDateTime, isXHR, private - EventActor { - actor: self.name(), - url: self.request.url.clone(), - method: format!("{}", self.request.method), - startedDateTime: format!("{}", self.request.startedDateTime.rfc3339()), - timeStamp: self.request.timeStamp, - isXHR: self.is_xhr, - private: false, - } - } - - pub fn response_start(&self) -> ResponseStartMsg { - // TODO: Send the correct values for all these fields. - let hSizeOption = self.response.headers.as_ref().map(|headers| headers.len()); - let hSize = hSizeOption.unwrap_or(0); - let (status_code, status_message) = self.response.status.as_ref(). - map_or((0, "".to_owned()), |&RawStatus(ref code, ref text)| (*code, text.clone().into_owned())); - // TODO: Send the correct values for remoteAddress and remotePort and http_version. - ResponseStartMsg { - httpVersion: "HTTP/1.1".to_owned(), - remoteAddress: "63.245.217.43".to_owned(), - remotePort: 443, - status: status_code.to_string(), - statusText: status_message, - headersSize: hSize, - discardResponseBody: false - } - } - - pub fn response_content(&self) -> ResponseContentMsg { - let mut mString = "".to_owned(); - if let Some(ref headers) = self.response.headers { - mString = match headers.get() { - Some(&ContentType(ref mime)) => mime.to_string(), - None => "".to_owned() - }; - } - // TODO: Set correct values when response's body is sent to the devtools in http_loader. - ResponseContentMsg { - mimeType: mString, - contentSize: 0, - transferredSize: 0, - discardResponseBody: true, - } - } - - pub fn response_cookies(&self) -> ResponseCookiesMsg { - let mut cookies_size = 0; - if let Some(ref headers) = self.response.headers { - cookies_size = match headers.get() { - Some(&Cookie(ref cookie)) => cookie.len(), - None => 0 - }; - } - ResponseCookiesMsg { - cookies: cookies_size, - } - } - - pub fn response_headers(&self) -> ResponseHeadersMsg { - let mut headers_size = 0; - let mut headers_byte_count = 0; - if let Some(ref headers) = self.response.headers { - headers_size = headers.len(); - for item in headers.iter() { - headers_byte_count += item.name().len() + item.value_string().len(); - } - - } - ResponseHeadersMsg { - headers: headers_size, - headersSize: headers_byte_count, - } - } - - pub fn request_headers(&self) -> RequestHeadersMsg { - let size = self.request - .headers - .iter() - .fold(0, |acc, h| acc + h.name().len() + h.value_string().len()); - RequestHeadersMsg { - headers: self.request.headers.len(), - headersSize: size, - } - } - - pub fn request_cookies(&self) -> RequestCookiesMsg { - let cookies_size = match self.request.headers.get() { - Some(&Cookie(ref cookie)) => cookie.len(), - None => 0 - }; - RequestCookiesMsg { - cookies: cookies_size, - } - } - - pub fn total_time(&self) -> u64 { - self.request.connect_time + self.request.send_time - } -} diff --git a/collector/compile-benchmarks/style-servo/components/devtools/actors/object.rs b/collector/compile-benchmarks/style-servo/components/devtools/actors/object.rs deleted file mode 100644 index 81f972a76..000000000 --- a/collector/compile-benchmarks/style-servo/components/devtools/actors/object.rs +++ /dev/null @@ -1,44 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ - -use actor::{Actor, ActorMessageStatus, ActorRegistry}; -use serde_json::{Map, Value}; -use std::net::TcpStream; - -pub struct ObjectActor { - pub name: String, - pub uuid: String, -} - -impl Actor for ObjectActor { - fn name(&self) -> String { - self.name.clone() - } - fn handle_message(&self, - _: &ActorRegistry, - _: &str, - _: &Map, - _: &mut TcpStream) -> Result { - Ok(ActorMessageStatus::Ignored) - } -} - -impl ObjectActor { - pub fn new(registry: &ActorRegistry, uuid: String) -> String { - if !registry.script_actor_registered(uuid.clone()) { - let name = registry.new_name("object"); - let actor = ObjectActor { - name: name.clone(), - uuid: uuid.clone(), - }; - - registry.register_script_actor(uuid, name.clone()); - registry.register_later(box actor); - - name - } else { - registry.script_to_actor(uuid) - } - } -} diff --git a/collector/compile-benchmarks/style-servo/components/devtools/actors/performance.rs b/collector/compile-benchmarks/style-servo/components/devtools/actors/performance.rs deleted file mode 100644 index c246cb26f..000000000 --- a/collector/compile-benchmarks/style-servo/components/devtools/actors/performance.rs +++ /dev/null @@ -1,118 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ - -use actor::{Actor, ActorMessageStatus, ActorRegistry}; -use protocol::{ActorDescription, JsonPacketStream, Method}; -use serde_json::{Map, Value}; -use std::net::TcpStream; - -pub struct PerformanceActor { - name: String, -} - -#[derive(Serialize)] -struct PerformanceFeatures { - withMarkers: bool, - withMemory: bool, - withTicks: bool, - withAllocations: bool, - withJITOptimizations: bool, -} - -#[derive(Serialize)] -struct PerformanceTraits { - features: PerformanceFeatures, -} - -#[derive(Serialize)] -struct ConnectReply { - from: String, - traits: PerformanceTraits, -} - -#[derive(Serialize)] -struct CanCurrentlyRecordReply { - from: String, - value: SuccessMsg, -} - -#[derive(Serialize)] -struct SuccessMsg { - success: bool, - errors: Vec, -} - -#[derive(Serialize)] -enum Error {} - -impl Actor for PerformanceActor { - fn name(&self) -> String { - self.name.clone() - } - - fn handle_message(&self, - _registry: &ActorRegistry, - msg_type: &str, - _msg: &Map, - stream: &mut TcpStream) -> Result { - Ok(match msg_type { - "connect" => { - let msg = ConnectReply { - from: self.name(), - traits: PerformanceTraits { - features: PerformanceFeatures { - withMarkers: true, - withMemory: true, - withTicks: true, - withAllocations: true, - withJITOptimizations: true, - }, - }, - }; - stream.write_json_packet(&msg); - ActorMessageStatus::Processed - }, - "canCurrentlyRecord" => { - let msg = CanCurrentlyRecordReply { - from: self.name(), - value: SuccessMsg { - success: true, - errors: vec![], - } - }; - stream.write_json_packet(&msg); - ActorMessageStatus::Processed - } - _ => ActorMessageStatus::Ignored, - }) - } -} - -impl PerformanceActor { - pub fn new(name: String) -> PerformanceActor { - PerformanceActor { - name: name, - } - } - - pub fn description() -> ActorDescription { - ActorDescription { - category: "actor", - typeName: "performance", - methods: vec![ - Method { - name: "canCurrentlyRecord", - request: Value::Object(vec![ - ("type".to_owned(), Value::String("canCurrentlyRecord".to_owned())), - ].into_iter().collect()), - response: Value::Object(vec![ - ("value".to_owned(), Value::Object(vec![ - ("_retval".to_owned(), Value::String("json".to_owned())), - ].into_iter().collect())), - ].into_iter().collect()), - }, - ], - } - } -} diff --git a/collector/compile-benchmarks/style-servo/components/devtools/actors/profiler.rs b/collector/compile-benchmarks/style-servo/components/devtools/actors/profiler.rs deleted file mode 100644 index a80e3c001..000000000 --- a/collector/compile-benchmarks/style-servo/components/devtools/actors/profiler.rs +++ /dev/null @@ -1,33 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ - -use actor::{Actor, ActorMessageStatus, ActorRegistry}; -use serde_json::{Map, Value}; -use std::net::TcpStream; - -pub struct ProfilerActor { - name: String, -} - -impl Actor for ProfilerActor { - fn name(&self) -> String { - self.name.clone() - } - - fn handle_message(&self, - _registry: &ActorRegistry, - _msg_type: &str, - _msg: &Map, - _stream: &mut TcpStream) -> Result { - Ok(ActorMessageStatus::Ignored) - } -} - -impl ProfilerActor { - pub fn new(name: String) -> ProfilerActor { - ProfilerActor { - name: name, - } - } -} diff --git a/collector/compile-benchmarks/style-servo/components/devtools/actors/root.rs b/collector/compile-benchmarks/style-servo/components/devtools/actors/root.rs deleted file mode 100644 index 20629ea7a..000000000 --- a/collector/compile-benchmarks/style-servo/components/devtools/actors/root.rs +++ /dev/null @@ -1,125 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ - -/// Liberally derived from the [Firefox JS implementation] -/// (http://mxr.mozilla.org/mozilla-central/source/toolkit/devtools/server/actors/root.js). -/// Connection point for all new remote devtools interactions, providing lists of know actors -/// that perform more specific actions (tabs, addons, browser chrome, etc.) - -use actor::{Actor, ActorMessageStatus, ActorRegistry}; -use actors::performance::PerformanceActor; -use actors::tab::{TabActor, TabActorMsg}; -use protocol::{ActorDescription, JsonPacketStream}; -use serde_json::{Map, Value}; -use std::net::TcpStream; - -#[derive(Serialize)] -struct ActorTraits { - sources: bool, - highlightable: bool, - customHighlighters: bool, - networkMonitor: bool, -} - -#[derive(Serialize)] -struct ListAddonsReply { - from: String, - addons: Vec, -} - -#[derive(Serialize)] -enum AddonMsg {} - -#[derive(Serialize)] -struct ListTabsReply { - from: String, - selected: u32, - tabs: Vec, -} - -#[derive(Serialize)] -pub struct RootActorMsg { - from: String, - applicationType: String, - traits: ActorTraits, -} - -#[derive(Serialize)] -pub struct ProtocolDescriptionReply { - from: String, - types: Types, -} - -#[derive(Serialize)] -pub struct Types { - performance: ActorDescription, -} - -pub struct RootActor { - pub tabs: Vec, -} - -impl Actor for RootActor { - fn name(&self) -> String { - "root".to_owned() - } - - fn handle_message(&self, - registry: &ActorRegistry, - msg_type: &str, - _msg: &Map, - stream: &mut TcpStream) -> Result { - Ok(match msg_type { - "listAddons" => { - let actor = ListAddonsReply { - from: "root".to_owned(), - addons: vec![], - }; - stream.write_json_packet(&actor); - ActorMessageStatus::Processed - } - - //https://wiki.mozilla.org/Remote_Debugging_Protocol#Listing_Browser_Tabs - "listTabs" => { - let actor = ListTabsReply { - from: "root".to_owned(), - selected: 0, - tabs: self.tabs.iter().map(|tab| { - registry.find::(tab).encodable() - }).collect() - }; - stream.write_json_packet(&actor); - ActorMessageStatus::Processed - } - - "protocolDescription" => { - let msg = ProtocolDescriptionReply { - from: self.name(), - types: Types { - performance: PerformanceActor::description(), - }, - }; - stream.write_json_packet(&msg); - ActorMessageStatus::Processed - } - - _ => ActorMessageStatus::Ignored - }) - } -} - -impl RootActor { - pub fn encodable(&self) -> RootActorMsg { - RootActorMsg { - from: "root".to_owned(), - applicationType: "browser".to_owned(), - traits: ActorTraits { - sources: true, - highlightable: true, - customHighlighters: true, - networkMonitor: true - }, - } - } -} diff --git a/collector/compile-benchmarks/style-servo/components/devtools/actors/tab.rs b/collector/compile-benchmarks/style-servo/components/devtools/actors/tab.rs deleted file mode 100644 index 916c53193..000000000 --- a/collector/compile-benchmarks/style-servo/components/devtools/actors/tab.rs +++ /dev/null @@ -1,169 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ - -//! Liberally derived from the [Firefox JS implementation] -//! (http://mxr.mozilla.org/mozilla-central/source/toolkit/devtools/server/actors/webbrowser.js). -//! Connection point for remote devtools that wish to investigate a particular tab's contents. -//! Supports dynamic attaching and detaching which control notifications of navigation, etc. - -use actor::{Actor, ActorMessageStatus, ActorRegistry}; -use actors::console::ConsoleActor; -use devtools_traits::DevtoolScriptControlMsg::{self, WantsLiveNotifications}; -use protocol::JsonPacketStream; -use serde_json::{Map, Value}; -use std::net::TcpStream; - -#[derive(Serialize)] -struct TabTraits; - -#[derive(Serialize)] -struct TabAttachedReply { - from: String, - #[serde(rename = "type")] - type_: String, - threadActor: String, - cacheDisabled: bool, - javascriptEnabled: bool, - traits: TabTraits, -} - -#[derive(Serialize)] -struct TabDetachedReply { - from: String, - #[serde(rename = "type")] - type_: String, -} - -#[derive(Serialize)] -struct ReconfigureReply { - from: String -} - -#[derive(Serialize)] -struct ListFramesReply { - from: String, - frames: Vec, -} - -#[derive(Serialize)] -struct FrameMsg { - id: u32, - url: String, - title: String, - parentID: u32, -} - -#[derive(Serialize)] -pub struct TabActorMsg { - actor: String, - title: String, - url: String, - outerWindowID: u32, - consoleActor: String, - inspectorActor: String, - timelineActor: String, - profilerActor: String, - performanceActor: String, -} - -pub struct TabActor { - pub name: String, - pub title: String, - pub url: String, - pub console: String, - pub inspector: String, - pub timeline: String, - pub profiler: String, - pub performance: String, - pub thread: String, -} - -impl Actor for TabActor { - fn name(&self) -> String { - self.name.clone() - } - - fn handle_message(&self, - registry: &ActorRegistry, - msg_type: &str, - msg: &Map, - stream: &mut TcpStream) -> Result { - Ok(match msg_type { - "reconfigure" => { - if let Some(options) = msg.get("options").and_then(|o| o.as_object()) { - if let Some(val) = options.get("performReload") { - if val.as_bool().unwrap_or(false) { - let console_actor = registry.find::(&self.console); - let _ = console_actor.script_chan.send( - DevtoolScriptControlMsg::Reload(console_actor.pipeline)); - } - } - } - stream.write_json_packet(&ReconfigureReply { from: self.name() }); - ActorMessageStatus::Processed - } - - // https://wiki.mozilla.org/Remote_Debugging_Protocol#Listing_Browser_Tabs - // (see "To attach to a _tabActor_") - "attach" => { - let msg = TabAttachedReply { - from: self.name(), - type_: "tabAttached".to_owned(), - threadActor: self.thread.clone(), - cacheDisabled: false, - javascriptEnabled: true, - traits: TabTraits, - }; - let console_actor = registry.find::(&self.console); - console_actor.streams.borrow_mut().push(stream.try_clone().unwrap()); - stream.write_json_packet(&msg); - console_actor.script_chan.send( - WantsLiveNotifications(console_actor.pipeline, true)).unwrap(); - ActorMessageStatus::Processed - } - - //FIXME: The current implementation won't work for multiple connections. Need to ensure 105 - // that the correct stream is removed. - "detach" => { - let msg = TabDetachedReply { - from: self.name(), - type_: "detached".to_owned(), - }; - let console_actor = registry.find::(&self.console); - console_actor.streams.borrow_mut().pop(); - stream.write_json_packet(&msg); - console_actor.script_chan.send( - WantsLiveNotifications(console_actor.pipeline, false)).unwrap(); - ActorMessageStatus::Processed - } - - "listFrames" => { - let msg = ListFramesReply { - from: self.name(), - frames: vec!(), - }; - stream.write_json_packet(&msg); - ActorMessageStatus::Processed - } - - _ => ActorMessageStatus::Ignored - }) - } -} - -impl TabActor { - pub fn encodable(&self) -> TabActorMsg { - TabActorMsg { - actor: self.name(), - title: self.title.clone(), - url: self.url.clone(), - outerWindowID: 0, //FIXME: this should probably be the pipeline id - consoleActor: self.console.clone(), - inspectorActor: self.inspector.clone(), - timelineActor: self.timeline.clone(), - profilerActor: self.profiler.clone(), - performanceActor: self.performance.clone(), - } - } -} diff --git a/collector/compile-benchmarks/style-servo/components/devtools/actors/thread.rs b/collector/compile-benchmarks/style-servo/components/devtools/actors/thread.rs deleted file mode 100644 index 7be5bc55f..000000000 --- a/collector/compile-benchmarks/style-servo/components/devtools/actors/thread.rs +++ /dev/null @@ -1,111 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ - -use actor::{Actor, ActorMessageStatus, ActorRegistry}; -use protocol::JsonPacketStream; -use serde_json::{Map, Value}; -use std::net::TcpStream; - -#[derive(Serialize)] -struct ThreadAttachedReply { - from: String, - #[serde(rename = "type")] - type_: String, - actor: String, - poppedFrames: Vec, - why: WhyMsg, -} - -#[derive(Serialize)] -enum PoppedFrameMsg {} - -#[derive(Serialize)] -struct WhyMsg { - #[serde(rename = "type")] - type_: String, -} - -#[derive(Serialize)] -struct ThreadResumedReply { - from: String, - #[serde(rename = "type")] - type_: String, -} - -#[derive(Serialize)] -struct ReconfigureReply { - from: String -} - -#[derive(Serialize)] -struct SourcesReply { - from: String, - sources: Vec, -} - -#[derive(Serialize)] -enum Source {} - -pub struct ThreadActor { - name: String, -} - -impl ThreadActor { - pub fn new(name: String) -> ThreadActor { - ThreadActor { - name: name, - } - } -} - -impl Actor for ThreadActor { - fn name(&self) -> String { - self.name.clone() - } - - fn handle_message(&self, - registry: &ActorRegistry, - msg_type: &str, - _msg: &Map, - stream: &mut TcpStream) -> Result { - Ok(match msg_type { - "attach" => { - let msg = ThreadAttachedReply { - from: self.name(), - type_: "paused".to_owned(), - actor: registry.new_name("pause"), - poppedFrames: vec![], - why: WhyMsg { type_: "attached".to_owned() }, - }; - stream.write_json_packet(&msg); - ActorMessageStatus::Processed - }, - - "resume" => { - let msg = ThreadResumedReply { - from: self.name(), - type_: "resumed".to_owned(), - }; - stream.write_json_packet(&msg); - ActorMessageStatus::Processed - }, - - "reconfigure" => { - stream.write_json_packet(&ReconfigureReply { from: self.name() }); - ActorMessageStatus::Processed - } - - "sources" => { - let msg = SourcesReply { - from: self.name(), - sources: vec![], - }; - stream.write_json_packet(&msg); - ActorMessageStatus::Processed - } - - _ => ActorMessageStatus::Ignored, - }) - } -} diff --git a/collector/compile-benchmarks/style-servo/components/devtools/actors/timeline.rs b/collector/compile-benchmarks/style-servo/components/devtools/actors/timeline.rs deleted file mode 100644 index 0ea57ad8d..000000000 --- a/collector/compile-benchmarks/style-servo/components/devtools/actors/timeline.rs +++ /dev/null @@ -1,326 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ - -use actor::{Actor, ActorMessageStatus, ActorRegistry}; -use actors::framerate::FramerateActor; -use actors::memory::{MemoryActor, TimelineMemoryReply}; -use devtools_traits::{PreciseTime, TimelineMarker, TimelineMarkerType}; -use devtools_traits::DevtoolScriptControlMsg; -use devtools_traits::DevtoolScriptControlMsg::{DropTimelineMarkers, SetTimelineMarkers}; -use ipc_channel::ipc::{self, IpcReceiver, IpcSender}; -use msg::constellation_msg::PipelineId; -use protocol::JsonPacketStream; -use serde::{Serialize, Serializer}; -use serde_json::{Map, Value}; -use std::cell::RefCell; -use std::net::TcpStream; -use std::sync::{Arc, Mutex}; -use std::thread; -use std::time::Duration; - -pub struct TimelineActor { - name: String, - script_sender: IpcSender, - marker_types: Vec, - pipeline: PipelineId, - is_recording: Arc>, - stream: RefCell>, - - framerate_actor: RefCell>, - memory_actor: RefCell>, -} - -struct Emitter { - from: String, - stream: TcpStream, - registry: Arc>, - start_stamp: PreciseTime, - - framerate_actor: Option, - memory_actor: Option, -} - -#[derive(Serialize)] -struct IsRecordingReply { - from: String, - value: bool -} - -#[derive(Serialize)] -struct StartReply { - from: String, - value: HighResolutionStamp, -} - -#[derive(Serialize)] -struct StopReply { - from: String, - value: HighResolutionStamp, -} - -#[derive(Serialize)] -struct TimelineMarkerReply { - name: String, - start: HighResolutionStamp, - end: HighResolutionStamp, - stack: Option>, - endStack: Option>, -} - -#[derive(Serialize)] -struct MarkersEmitterReply { - #[serde(rename = "type")] - type_: String, - markers: Vec, - from: String, - endTime: HighResolutionStamp, -} - -#[derive(Serialize)] -struct MemoryEmitterReply { - #[serde(rename = "type")] - type_: String, - from: String, - delta: HighResolutionStamp, - measurement: TimelineMemoryReply, -} - -#[derive(Serialize)] -struct FramerateEmitterReply { - #[serde(rename = "type")] - type_: String, - from: String, - delta: HighResolutionStamp, - timestamps: Vec, -} - -/// HighResolutionStamp is struct that contains duration in milliseconds -/// with accuracy to microsecond that shows how much time has passed since -/// actor registry inited -/// analog https://w3c.github.io/hr-time/#sec-DOMHighResTimeStamp -pub struct HighResolutionStamp(f64); - -impl HighResolutionStamp { - pub fn new(start_stamp: PreciseTime, time: PreciseTime) -> HighResolutionStamp { - let duration = start_stamp.to(time).num_microseconds() - .expect("Too big duration in microseconds"); - HighResolutionStamp(duration as f64 / 1000 as f64) - } - - pub fn wrap(time: f64) -> HighResolutionStamp { - HighResolutionStamp(time) - } -} - -impl Serialize for HighResolutionStamp { - fn serialize(&self, s: S) -> Result { - self.0.serialize(s) - } -} - -static DEFAULT_TIMELINE_DATA_PULL_TIMEOUT: u64 = 200; //ms - -impl TimelineActor { - pub fn new(name: String, - pipeline: PipelineId, - script_sender: IpcSender) -> TimelineActor { - let marker_types = vec!(TimelineMarkerType::Reflow, - TimelineMarkerType::DOMEvent); - - TimelineActor { - name: name, - pipeline: pipeline, - marker_types: marker_types, - script_sender: script_sender, - is_recording: Arc::new(Mutex::new(false)), - stream: RefCell::new(None), - - framerate_actor: RefCell::new(None), - memory_actor: RefCell::new(None), - } - } - - fn pull_timeline_data(&self, receiver: IpcReceiver>, mut emitter: Emitter) { - let is_recording = self.is_recording.clone(); - - if !*is_recording.lock().unwrap() { - return; - } - - thread::Builder::new().name("PullTimelineMarkers".to_owned()).spawn(move || { - loop { - if !*is_recording.lock().unwrap() { - break; - } - - let mut markers = vec![]; - while let Ok(Some(marker)) = receiver.try_recv() { - markers.push(emitter.marker(marker)); - } - emitter.send(markers); - - thread::sleep(Duration::from_millis(DEFAULT_TIMELINE_DATA_PULL_TIMEOUT)); - } - }).expect("Thread spawning failed"); - } -} - -impl Actor for TimelineActor { - fn name(&self) -> String { - self.name.clone() - } - - fn handle_message(&self, - registry: &ActorRegistry, - msg_type: &str, - msg: &Map, - stream: &mut TcpStream) -> Result { - Ok(match msg_type { - "start" => { - **self.is_recording.lock().as_mut().unwrap() = true; - - let (tx, rx) = ipc::channel::>().unwrap(); - self.script_sender.send(SetTimelineMarkers(self.pipeline, - self.marker_types.clone(), - tx)).unwrap(); - - *self.stream.borrow_mut() = stream.try_clone().ok(); - - // init memory actor - if let Some(with_memory) = msg.get("withMemory") { - if let Some(true) = with_memory.as_bool() { - *self.memory_actor.borrow_mut() = Some(MemoryActor::create(registry)); - } - } - - // init framerate actor - if let Some(with_ticks) = msg.get("withTicks") { - if let Some(true) = with_ticks.as_bool() { - let framerate_actor = Some(FramerateActor::create( - registry, - self.pipeline.clone(), - self.script_sender.clone())); - *self.framerate_actor.borrow_mut() = framerate_actor; - } - } - - let emitter = Emitter::new(self.name(), registry.shareable(), - registry.start_stamp(), - stream.try_clone().unwrap(), - self.memory_actor.borrow().clone(), - self.framerate_actor.borrow().clone()); - - self.pull_timeline_data(rx, emitter); - - let msg = StartReply { - from: self.name(), - value: HighResolutionStamp::new(registry.start_stamp(), PreciseTime::now()), - }; - stream.write_json_packet(&msg); - ActorMessageStatus::Processed - } - - "stop" => { - let msg = StopReply { - from: self.name(), - value: HighResolutionStamp::new(registry.start_stamp(), PreciseTime::now()), - }; - - stream.write_json_packet(&msg); - self.script_sender.send(DropTimelineMarkers(self.pipeline, self.marker_types.clone())).unwrap(); - - if let Some(ref actor_name) = *self.framerate_actor.borrow() { - registry.drop_actor_later(actor_name.clone()); - } - - if let Some(ref actor_name) = *self.memory_actor.borrow() { - registry.drop_actor_later(actor_name.clone()); - } - - **self.is_recording.lock().as_mut().unwrap() = false; - self.stream.borrow_mut().take(); - ActorMessageStatus::Processed - } - - "isRecording" => { - let msg = IsRecordingReply { - from: self.name(), - value: self.is_recording.lock().unwrap().clone() - }; - - stream.write_json_packet(&msg); - ActorMessageStatus::Processed - } - - _ => { - ActorMessageStatus::Ignored - } - }) - } -} - -impl Emitter { - pub fn new(name: String, - registry: Arc>, - start_stamp: PreciseTime, - stream: TcpStream, - memory_actor_name: Option, - framerate_actor_name: Option) -> Emitter { - Emitter { - from: name, - stream: stream, - registry: registry, - start_stamp: start_stamp, - - framerate_actor: framerate_actor_name, - memory_actor: memory_actor_name, - } - } - - fn marker(&self, payload: TimelineMarker) -> TimelineMarkerReply { - TimelineMarkerReply { - name: payload.name, - start: HighResolutionStamp::new(self.start_stamp, payload.start_time), - end: HighResolutionStamp::new(self.start_stamp, payload.end_time), - stack: payload.start_stack, - endStack: payload.end_stack, - } - } - - fn send(&mut self, markers: Vec) { - let end_time = PreciseTime::now(); - let reply = MarkersEmitterReply { - type_: "markers".to_owned(), - markers: markers, - from: self.from.clone(), - endTime: HighResolutionStamp::new(self.start_stamp, end_time), - }; - self.stream.write_json_packet(&reply); - - if let Some(ref actor_name) = self.framerate_actor { - let mut lock = self.registry.lock(); - let registry = lock.as_mut().unwrap(); - let framerate_actor = registry.find_mut::(actor_name); - let framerateReply = FramerateEmitterReply { - type_: "framerate".to_owned(), - from: framerate_actor.name(), - delta: HighResolutionStamp::new(self.start_stamp, end_time), - timestamps: framerate_actor.take_pending_ticks(), - }; - self.stream.write_json_packet(&framerateReply); - } - - if let Some(ref actor_name) = self.memory_actor { - let registry = self.registry.lock().unwrap(); - let memory_actor = registry.find::(actor_name); - let memoryReply = MemoryEmitterReply { - type_: "memory".to_owned(), - from: memory_actor.name(), - delta: HighResolutionStamp::new(self.start_stamp, end_time), - measurement: memory_actor.measure(), - }; - self.stream.write_json_packet(&memoryReply); - } - } -} diff --git a/collector/compile-benchmarks/style-servo/components/devtools/actors/worker.rs b/collector/compile-benchmarks/style-servo/components/devtools/actors/worker.rs deleted file mode 100644 index 1beead719..000000000 --- a/collector/compile-benchmarks/style-servo/components/devtools/actors/worker.rs +++ /dev/null @@ -1,27 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ - -use actor::{Actor, ActorMessageStatus, ActorRegistry}; -use devtools_traits::WorkerId; -use serde_json::{Map, Value}; -use std::net::TcpStream; - -pub struct WorkerActor { - pub name: String, - pub console: String, - pub id: WorkerId, -} - -impl Actor for WorkerActor { - fn name(&self) -> String { - self.name.clone() - } - fn handle_message(&self, - _: &ActorRegistry, - _: &str, - _: &Map, - _: &mut TcpStream) -> Result { - Ok(ActorMessageStatus::Processed) - } -} diff --git a/collector/compile-benchmarks/style-servo/components/devtools/lib.rs b/collector/compile-benchmarks/style-servo/components/devtools/lib.rs deleted file mode 100644 index 6a25696ea..000000000 --- a/collector/compile-benchmarks/style-servo/components/devtools/lib.rs +++ /dev/null @@ -1,542 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ - -//! An actor-based remote devtools server implementation. Only tested with -//! nightly Firefox versions at time of writing. Largely based on -//! reverse-engineering of Firefox chrome devtool logs and reading of -//! [code](http://mxr.mozilla.org/mozilla-central/source/toolkit/devtools/server/). - -#![crate_name = "devtools"] -#![crate_type = "rlib"] - -#![allow(non_snake_case)] -#![deny(unsafe_code)] -#![feature(box_syntax)] - -extern crate devtools_traits; -extern crate hyper; -extern crate ipc_channel; -#[macro_use] -extern crate log; -extern crate msg; -#[macro_use] extern crate serde; -extern crate serde_json; -extern crate time; - -use actor::{Actor, ActorRegistry}; -use actors::console::ConsoleActor; -use actors::framerate::FramerateActor; -use actors::inspector::InspectorActor; -use actors::network_event::{EventActor, NetworkEventActor, ResponseStartMsg}; -use actors::performance::PerformanceActor; -use actors::profiler::ProfilerActor; -use actors::root::RootActor; -use actors::tab::TabActor; -use actors::thread::ThreadActor; -use actors::timeline::TimelineActor; -use actors::worker::WorkerActor; -use devtools_traits::{ChromeToDevtoolsControlMsg, ConsoleMessage, DevtoolsControlMsg}; -use devtools_traits::{DevtoolScriptControlMsg, DevtoolsPageInfo, LogLevel, NetworkEvent}; -use devtools_traits::{ScriptToDevtoolsControlMsg, WorkerId}; -use ipc_channel::ipc::IpcSender; -use msg::constellation_msg::PipelineId; -use protocol::JsonPacketStream; -use std::borrow::ToOwned; -use std::cell::RefCell; -use std::collections::HashMap; -use std::collections::hash_map::Entry::{Occupied, Vacant}; -use std::net::{Shutdown, TcpListener, TcpStream}; -use std::sync::{Arc, Mutex}; -use std::sync::mpsc::{Receiver, Sender, channel}; -use std::thread; -use time::precise_time_ns; - -mod actor; -/// Corresponds to http://mxr.mozilla.org/mozilla-central/source/toolkit/devtools/server/actors/ -mod actors { - pub mod console; - pub mod framerate; - pub mod inspector; - pub mod memory; - pub mod network_event; - pub mod object; - pub mod performance; - pub mod profiler; - pub mod root; - pub mod tab; - pub mod thread; - pub mod timeline; - pub mod worker; -} -mod protocol; - -#[derive(Serialize)] -struct ConsoleAPICall { - from: String, - #[serde(rename = "type")] - type_: String, - message: ConsoleMsg, -} - -#[derive(Serialize)] -struct ConsoleMsg { - level: String, - timeStamp: u64, - arguments: Vec, - filename: String, - lineNumber: usize, - columnNumber: usize, -} - -#[derive(Serialize)] -struct NetworkEventMsg { - from: String, - #[serde(rename = "type")] - type_: String, - eventActor: EventActor, -} - -#[derive(Serialize)] -struct NetworkEventUpdateMsg { - from: String, - #[serde(rename = "type")] - type_: String, - updateType: String, -} - -#[derive(Serialize)] -struct EventTimingsUpdateMsg { - totalTime: u64, -} - -#[derive(Serialize)] -struct SecurityInfoUpdateMsg { - state: String, -} - -#[derive(Serialize)] -struct ResponseStartUpdateMsg { - from: String, - #[serde(rename = "type")] - type_: String, - updateType: String, - response: ResponseStartMsg, -} - -/// Spin up a devtools server that listens for connections on the specified port. -pub fn start_server(port: u16) -> Sender { - let (sender, receiver) = channel(); - { - let sender = sender.clone(); - thread::Builder::new().name("Devtools".to_owned()).spawn(move || { - run_server(sender, receiver, port) - }).expect("Thread spawning failed"); - } - sender -} - -fn run_server(sender: Sender, - receiver: Receiver, - port: u16) { - let listener = TcpListener::bind(&("127.0.0.1", port)).unwrap(); - - let mut registry = ActorRegistry::new(); - - let root = box RootActor { - tabs: vec!(), - }; - - registry.register(root); - registry.find::("root"); - - let actors = registry.create_shareable(); - - let mut accepted_connections: Vec = Vec::new(); - - let mut actor_pipelines: HashMap = HashMap::new(); - let mut actor_requests: HashMap = HashMap::new(); - - let mut actor_workers: HashMap<(PipelineId, WorkerId), String> = HashMap::new(); - - - /// Process the input from a single devtools client until EOF. - fn handle_client(actors: Arc>, mut stream: TcpStream) { - debug!("connection established to {}", stream.peer_addr().unwrap()); - { - let actors = actors.lock().unwrap(); - let msg = actors.find::("root").encodable(); - stream.write_json_packet(&msg); - } - - 'outer: loop { - match stream.read_json_packet() { - Ok(Some(json_packet)) => { - if let Err(()) = actors.lock().unwrap().handle_message(json_packet.as_object().unwrap(), - &mut stream) { - debug!("error: devtools actor stopped responding"); - let _ = stream.shutdown(Shutdown::Both); - break 'outer - } - } - Ok(None) => { - debug!("error: EOF"); - break 'outer - } - Err(err_msg) => { - debug!("error: {}", err_msg); - break 'outer - } - } - } - } - - fn handle_framerate_tick(actors: Arc>, actor_name: String, tick: f64) { - let mut actors = actors.lock().unwrap(); - let framerate_actor = actors.find_mut::(&actor_name); - framerate_actor.add_tick(tick); - } - - // We need separate actor representations for each script global that exists; - // clients can theoretically connect to multiple globals simultaneously. - // TODO: move this into the root or tab modules? - fn handle_new_global(actors: Arc>, - ids: (PipelineId, Option), - script_sender: IpcSender, - actor_pipelines: &mut HashMap, - actor_workers: &mut HashMap<(PipelineId, WorkerId), String>, - page_info: DevtoolsPageInfo) { - let mut actors = actors.lock().unwrap(); - - let (pipeline, worker_id) = ids; - - //TODO: move all this actor creation into a constructor method on TabActor - let (tab, console, inspector, timeline, profiler, performance, thread) = { - let console = ConsoleActor { - name: actors.new_name("console"), - script_chan: script_sender.clone(), - pipeline: pipeline, - streams: RefCell::new(Vec::new()), - }; - let inspector = InspectorActor { - name: actors.new_name("inspector"), - walker: RefCell::new(None), - pageStyle: RefCell::new(None), - highlighter: RefCell::new(None), - script_chan: script_sender.clone(), - pipeline: pipeline, - }; - - let timeline = TimelineActor::new(actors.new_name("timeline"), - pipeline, - script_sender); - - let profiler = ProfilerActor::new(actors.new_name("profiler")); - let performance = PerformanceActor::new(actors.new_name("performance")); - - let thread = ThreadActor::new(actors.new_name("context")); - - let DevtoolsPageInfo { title, url } = page_info; - let tab = TabActor { - name: actors.new_name("tab"), - title: String::from(title), - url: url.into_string(), - console: console.name(), - inspector: inspector.name(), - timeline: timeline.name(), - profiler: profiler.name(), - performance: performance.name(), - thread: thread.name(), - }; - - let root = actors.find_mut::("root"); - root.tabs.push(tab.name.clone()); - - (tab, console, inspector, timeline, profiler, performance, thread) - }; - - if let Some(id) = worker_id { - let worker = WorkerActor { - name: actors.new_name("worker"), - console: console.name(), - id: id, - }; - actor_workers.insert((pipeline, id), worker.name.clone()); - actors.register(box worker); - } - - actor_pipelines.insert(pipeline, tab.name.clone()); - actors.register(box tab); - actors.register(box console); - actors.register(box inspector); - actors.register(box timeline); - actors.register(box profiler); - actors.register(box performance); - actors.register(box thread); - } - - fn handle_console_message(actors: Arc>, - id: PipelineId, - worker_id: Option, - console_message: ConsoleMessage, - actor_pipelines: &HashMap, - actor_workers: &HashMap<(PipelineId, WorkerId), String>) { - let console_actor_name = match find_console_actor(actors.clone(), id, worker_id, actor_workers, - actor_pipelines) { - Some(name) => name, - None => return, - }; - let actors = actors.lock().unwrap(); - let console_actor = actors.find::(&console_actor_name); - let msg = ConsoleAPICall { - from: console_actor.name.clone(), - type_: "consoleAPICall".to_owned(), - message: ConsoleMsg { - level: match console_message.logLevel { - LogLevel::Debug => "debug", - LogLevel::Info => "info", - LogLevel::Warn => "warn", - LogLevel::Error => "error", - _ => "log" - }.to_owned(), - timeStamp: precise_time_ns(), - arguments: vec!(console_message.message), - filename: console_message.filename, - lineNumber: console_message.lineNumber, - columnNumber: console_message.columnNumber, - }, - }; - for stream in &mut *console_actor.streams.borrow_mut() { - stream.write_json_packet(&msg); - } - } - - fn find_console_actor(actors: Arc>, - id: PipelineId, - worker_id: Option, - actor_workers: &HashMap<(PipelineId, WorkerId), String>, - actor_pipelines: &HashMap) -> Option { - let actors = actors.lock().unwrap(); - if let Some(worker_id) = worker_id { - let actor_name = match (*actor_workers).get(&(id, worker_id)) { - Some(name) => name, - None => return None, - }; - Some(actors.find::(actor_name).console.clone()) - } else { - let actor_name = match (*actor_pipelines).get(&id) { - Some(name) => name, - None => return None, - }; - Some(actors.find::(actor_name).console.clone()) - } - } - - fn handle_network_event(actors: Arc>, - mut connections: Vec, - actor_pipelines: &HashMap, - actor_requests: &mut HashMap, - actor_workers: &HashMap<(PipelineId, WorkerId), String>, - pipeline_id: PipelineId, - request_id: String, - network_event: NetworkEvent) { - let console_actor_name = match find_console_actor(actors.clone(), pipeline_id, None, - actor_workers, actor_pipelines) { - Some(name) => name, - None => return, - }; - let netevent_actor_name = find_network_event_actor(actors.clone(), actor_requests, request_id.clone()); - let mut actors = actors.lock().unwrap(); - let actor = actors.find_mut::(&netevent_actor_name); - - match network_event { - NetworkEvent::HttpRequest(httprequest) => { - //Store the request information in the actor - actor.add_request(httprequest); - - //Send a networkEvent message to the client - let msg = NetworkEventMsg { - from: console_actor_name, - type_: "networkEvent".to_owned(), - eventActor: actor.event_actor(), - }; - for stream in &mut connections { - stream.write_json_packet(&msg); - } - - } - NetworkEvent::HttpResponse(httpresponse) => { - //Store the response information in the actor - actor.add_response(httpresponse); - - let msg = NetworkEventUpdateMsg { - from: netevent_actor_name.clone(), - type_: "networkEventUpdate".to_owned(), - updateType: "requestHeaders".to_owned(), - }; - for stream in &mut connections { - stream.write_merged_json_packet(&msg, &actor.request_headers()); - } - - let msg = NetworkEventUpdateMsg { - from: netevent_actor_name.clone(), - type_: "networkEventUpdate".to_owned(), - updateType: "requestCookies".to_owned(), - }; - for stream in &mut connections { - stream.write_merged_json_packet(&msg, &actor.request_cookies()); - } - - //Send a networkEventUpdate (responseStart) to the client - let msg = ResponseStartUpdateMsg { - from: netevent_actor_name.clone(), - type_: "networkEventUpdate".to_owned(), - updateType: "responseStart".to_owned(), - response: actor.response_start() - }; - - for stream in &mut connections { - stream.write_json_packet(&msg); - } - let msg = NetworkEventUpdateMsg { - from: netevent_actor_name.clone(), - type_: "networkEventUpdate".to_owned(), - updateType: "eventTimings".to_owned(), - }; - let extra = EventTimingsUpdateMsg { - totalTime: actor.total_time(), - }; - for stream in &mut connections { - stream.write_merged_json_packet(&msg, &extra); - } - - let msg = NetworkEventUpdateMsg { - from: netevent_actor_name.clone(), - type_: "networkEventUpdate".to_owned(), - updateType: "securityInfo".to_owned(), - }; - let extra = SecurityInfoUpdateMsg { - state: "insecure".to_owned(), - }; - for stream in &mut connections { - stream.write_merged_json_packet(&msg, &extra); - } - - let msg = NetworkEventUpdateMsg { - from: netevent_actor_name.clone(), - type_: "networkEventUpdate".to_owned(), - updateType: "responseContent".to_owned(), - }; - for stream in &mut connections { - stream.write_merged_json_packet(&msg, &actor.response_content()); - } - - let msg = NetworkEventUpdateMsg { - from: netevent_actor_name.clone(), - type_: "networkEventUpdate".to_owned(), - updateType: "responseCookies".to_owned(), - }; - for stream in &mut connections { - stream.write_merged_json_packet(&msg, &actor.response_cookies()); - } - - let msg = NetworkEventUpdateMsg { - from: netevent_actor_name.clone(), - type_: "networkEventUpdate".to_owned(), - updateType: "responseHeaders".to_owned(), - }; - for stream in &mut connections { - stream.write_merged_json_packet(&msg, &actor.response_headers()); - } - } - } - } - - // Find the name of NetworkEventActor corresponding to request_id - // Create a new one if it does not exist, add it to the actor_requests hashmap - fn find_network_event_actor(actors: Arc>, - actor_requests: &mut HashMap, - request_id: String) -> String { - let mut actors = actors.lock().unwrap(); - match (*actor_requests).entry(request_id) { - Occupied(name) => { - //TODO: Delete from map like Firefox does? - name.into_mut().clone() - } - Vacant(entry) => { - let actor_name = actors.new_name("netevent"); - let actor = NetworkEventActor::new(actor_name.clone()); - entry.insert(actor_name.clone()); - actors.register(box actor); - actor_name - } - } - } - - let sender_clone = sender.clone(); - thread::Builder::new().name("DevtoolsClientAcceptor".to_owned()).spawn(move || { - // accept connections and process them, spawning a new thread for each one - for stream in listener.incoming() { - // connection succeeded - sender_clone.send(DevtoolsControlMsg::FromChrome( - ChromeToDevtoolsControlMsg::AddClient(stream.unwrap()))).unwrap(); - } - }).expect("Thread spawning failed"); - - while let Ok(msg) = receiver.recv() { - match msg { - DevtoolsControlMsg::FromChrome(ChromeToDevtoolsControlMsg::AddClient(stream)) => { - let actors = actors.clone(); - accepted_connections.push(stream.try_clone().unwrap()); - thread::Builder::new().name("DevtoolsClientHandler".to_owned()).spawn(move || { - handle_client(actors, stream.try_clone().unwrap()) - }).expect("Thread spawning failed"); - } - DevtoolsControlMsg::FromScript(ScriptToDevtoolsControlMsg::FramerateTick( - actor_name, tick)) => - handle_framerate_tick(actors.clone(), actor_name, tick), - DevtoolsControlMsg::FromScript(ScriptToDevtoolsControlMsg::NewGlobal( - ids, script_sender, pageinfo)) => - handle_new_global(actors.clone(), ids, script_sender, &mut actor_pipelines, - &mut actor_workers, pageinfo), - DevtoolsControlMsg::FromScript(ScriptToDevtoolsControlMsg::ConsoleAPI( - id, - console_message, - worker_id)) => - handle_console_message(actors.clone(), id, worker_id, console_message, - &actor_pipelines, &actor_workers), - DevtoolsControlMsg::FromScript(ScriptToDevtoolsControlMsg::ReportCSSError( - id, - css_error)) => { - let console_message = ConsoleMessage { - message: css_error.msg, - logLevel: LogLevel::Warn, - filename: css_error.filename, - lineNumber: css_error.line as usize, - columnNumber: css_error.column as usize, - }; - handle_console_message(actors.clone(), id, None, console_message, - &actor_pipelines, &actor_workers) - }, - DevtoolsControlMsg::FromChrome(ChromeToDevtoolsControlMsg::NetworkEvent( - request_id, network_event)) => { - // copy the accepted_connections vector - let mut connections = Vec::::new(); - for stream in &accepted_connections { - connections.push(stream.try_clone().unwrap()); - } - - let pipeline_id = match network_event { - NetworkEvent::HttpResponse(ref response) => response.pipeline_id, - NetworkEvent::HttpRequest(ref request) => request.pipeline_id, - }; - handle_network_event(actors.clone(), connections, &actor_pipelines, &mut actor_requests, - &actor_workers, pipeline_id, request_id, network_event); - }, - DevtoolsControlMsg::FromChrome(ChromeToDevtoolsControlMsg::ServerExitMsg) => break - } - } - for connection in &mut accepted_connections { - let _ = connection.shutdown(Shutdown::Both); - } -} diff --git a/collector/compile-benchmarks/style-servo/components/devtools/protocol.rs b/collector/compile-benchmarks/style-servo/components/devtools/protocol.rs deleted file mode 100644 index f0bfce4b3..000000000 --- a/collector/compile-benchmarks/style-servo/components/devtools/protocol.rs +++ /dev/null @@ -1,89 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ - -//! Low-level wire protocol implementation. Currently only supports -//! [JSON packets] -//! (https://wiki.mozilla.org/Remote_Debugging_Protocol_Stream_Transport#JSON_Packets). - -use serde::Serialize; -use serde_json::{self, Value}; -use std::error::Error; -use std::io::{Read, Write}; -use std::net::TcpStream; - -#[derive(Serialize)] -pub struct ActorDescription { - pub category: &'static str, - pub typeName: &'static str, - pub methods: Vec, -} - -#[derive(Serialize)] -pub struct Method { - pub name: &'static str, - pub request: Value, - pub response: Value, -} - -pub trait JsonPacketStream { - fn write_json_packet(&mut self, obj: &T); - fn write_merged_json_packet(&mut self, base: &T, extra: &U); - fn read_json_packet(&mut self) -> Result, String>; -} - -impl JsonPacketStream for TcpStream { - fn write_json_packet(&mut self, obj: &T) { - let s = serde_json::to_string(obj).unwrap(); - debug!("<- {}", s); - write!(self, "{}:{}", s.len(), s).unwrap(); - } - - fn write_merged_json_packet(&mut self, base: &T, extra: &U) { - let mut obj = serde_json::to_value(base).unwrap(); - let obj = obj.as_object_mut().unwrap(); - let extra = serde_json::to_value(extra).unwrap(); - let extra = extra.as_object().unwrap(); - - for (key, value) in extra { - obj.insert(key.to_owned(), value.to_owned()); - } - - self.write_json_packet(obj); - } - - fn read_json_packet(&mut self) -> Result, String> { - // https://wiki.mozilla.org/Remote_Debugging_Protocol_Stream_Transport - // In short, each JSON packet is [ascii length]:[JSON data of given length] - let mut buffer = vec!(); - loop { - let mut buf = [0]; - let byte = match self.read(&mut buf) { - Ok(0) => return Ok(None), // EOF - Ok(1) => buf[0], - Ok(_) => unreachable!(), - Err(e) => return Err(e.description().to_owned()), - }; - match byte { - b':' => { - let packet_len_str = match String::from_utf8(buffer) { - Ok(packet_len) => packet_len, - Err(_) => return Err("nonvalid UTF8 in packet length".to_owned()), - }; - let packet_len = match u64::from_str_radix(&packet_len_str, 10) { - Ok(packet_len) => packet_len, - Err(_) => return Err("packet length missing / not parsable".to_owned()), - }; - let mut packet = String::new(); - self.take(packet_len).read_to_string(&mut packet).unwrap(); - debug!("{}", packet); - return match serde_json::from_str(&packet) { - Ok(json) => Ok(Some(json)), - Err(err) => Err(err.description().to_owned()), - }; - }, - c => buffer.push(c), - } - } - } -} diff --git a/collector/compile-benchmarks/style-servo/components/devtools_traits/Cargo.toml b/collector/compile-benchmarks/style-servo/components/devtools_traits/Cargo.toml deleted file mode 100644 index fe0cc20c6..000000000 --- a/collector/compile-benchmarks/style-servo/components/devtools_traits/Cargo.toml +++ /dev/null @@ -1,22 +0,0 @@ -[package] -name = "devtools_traits" -version = "0.0.1" -authors = ["The Servo Project Developers"] -license = "MPL-2.0" -publish = false - -[lib] -name = "devtools_traits" -path = "lib.rs" - -[dependencies] -bitflags = "0.7" -heapsize = "0.4" -heapsize_derive = "0.1" -hyper = "0.10" -hyper_serde = "0.7" -ipc-channel = "0.8" -msg = {path = "../msg"} -serde = "1.0" -servo_url = {path = "../url"} -time = "0.1" diff --git a/collector/compile-benchmarks/style-servo/components/devtools_traits/lib.rs b/collector/compile-benchmarks/style-servo/components/devtools_traits/lib.rs deleted file mode 100644 index d703b27e6..000000000 --- a/collector/compile-benchmarks/style-servo/components/devtools_traits/lib.rs +++ /dev/null @@ -1,359 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ - -//! This module contains shared types and messages for use by devtools/script. -//! The traits are here instead of in script so that the devtools crate can be -//! modified independently of the rest of Servo. - -#![crate_name = "devtools_traits"] -#![crate_type = "rlib"] - -#![allow(non_snake_case)] -#![deny(unsafe_code)] - -#[macro_use] -extern crate bitflags; -extern crate heapsize; -#[macro_use] extern crate heapsize_derive; -extern crate hyper; -extern crate ipc_channel; -extern crate msg; -#[macro_use] extern crate serde; -extern crate servo_url; -extern crate time; - -use hyper::header::Headers; -use hyper::method::Method; -use ipc_channel::ipc::IpcSender; -use msg::constellation_msg::PipelineId; -use servo_url::ServoUrl; -use std::net::TcpStream; -use time::Duration; -use time::Tm; - -// Information would be attached to NewGlobal to be received and show in devtools. -// Extend these fields if we need more information. -#[derive(Debug, Deserialize, Serialize)] -pub struct DevtoolsPageInfo { - pub title: String, - pub url: ServoUrl, -} - -#[derive(Clone, Debug, Deserialize, HeapSizeOf, Serialize)] -pub struct CSSError { - pub filename: String, - pub line: u32, - pub column: u32, - pub msg: String -} - -/// Messages to instruct the devtools server to update its known actors/state -/// according to changes in the browser. -#[derive(Debug)] -pub enum DevtoolsControlMsg { - /// Messages from threads in the chrome process (resource/constellation/devtools) - FromChrome(ChromeToDevtoolsControlMsg), - /// Messages from script threads - FromScript(ScriptToDevtoolsControlMsg), -} - -/// Events that the devtools server must act upon. -#[derive(Debug)] -pub enum ChromeToDevtoolsControlMsg { - /// A new client has connected to the server. - AddClient(TcpStream), - /// The browser is shutting down. - ServerExitMsg, - /// A network event occurred (request, reply, etc.). The actor with the - /// provided name should be notified. - NetworkEvent(String, NetworkEvent), -} - -#[derive(Debug, Deserialize, Serialize)] -/// Events that the devtools server must act upon. -pub enum ScriptToDevtoolsControlMsg { - /// A new global object was created, associated with a particular pipeline. - /// The means of communicating directly with it are provided. - NewGlobal((PipelineId, Option), - IpcSender, - DevtoolsPageInfo), - /// A particular page has invoked the console API. - ConsoleAPI(PipelineId, ConsoleMessage, Option), - /// An animation frame with the given timestamp was processed in a script thread. - /// The actor with the provided name should be notified. - FramerateTick(String, f64), - - /// Report a CSS parse error for the given pipeline - ReportCSSError(PipelineId, CSSError), -} - -/// Serialized JS return values -/// TODO: generalize this beyond the EvaluateJS message? -#[derive(Debug, Deserialize, Serialize)] -pub enum EvaluateJSReply { - VoidValue, - NullValue, - BooleanValue(bool), - NumberValue(f64), - StringValue(String), - ActorValue { class: String, uuid: String }, -} - -#[derive(Debug, Deserialize, Serialize)] -pub struct AttrInfo { - pub namespace: String, - pub name: String, - pub value: String, -} - -#[derive(Debug, Deserialize, Serialize)] -pub struct NodeInfo { - pub uniqueId: String, - pub baseURI: String, - pub parent: String, - pub nodeType: u16, - pub namespaceURI: String, - pub nodeName: String, - pub numChildren: usize, - - pub name: String, - pub publicId: String, - pub systemId: String, - - pub attrs: Vec, - - pub isDocumentElement: bool, - - pub shortValue: String, - pub incompleteValue: bool, -} - -pub struct StartedTimelineMarker { - name: String, - start_time: PreciseTime, - start_stack: Option>, -} - -#[derive(Debug, Deserialize, Serialize)] -pub struct TimelineMarker { - pub name: String, - pub start_time: PreciseTime, - pub start_stack: Option>, - pub end_time: PreciseTime, - pub end_stack: Option>, -} - -#[derive(Clone, Debug, Deserialize, Eq, Hash, HeapSizeOf, PartialEq, Serialize)] -pub enum TimelineMarkerType { - Reflow, - DOMEvent, -} - -/// The properties of a DOM node as computed by layout. -#[derive(Debug, Deserialize, Serialize)] -pub struct ComputedNodeLayout { - pub display: String, - pub position: String, - pub zIndex: String, - pub boxSizing: String, - - pub autoMargins: AutoMargins, - pub marginTop: String, - pub marginRight: String, - pub marginBottom: String, - pub marginLeft: String, - - pub borderTopWidth: String, - pub borderRightWidth: String, - pub borderBottomWidth: String, - pub borderLeftWidth: String, - - pub paddingTop: String, - pub paddingRight: String, - pub paddingBottom: String, - pub paddingLeft: String, - - pub width: f32, - pub height: f32, -} - -#[derive(Debug, Deserialize, Serialize)] -pub struct AutoMargins { - pub top: bool, - pub right: bool, - pub bottom: bool, - pub left: bool, -} - -/// Messages to process in a particular script thread, as instructed by a devtools client. -/// TODO: better error handling, e.g. if pipeline id lookup fails? -#[derive(Debug, Deserialize, Serialize)] -pub enum DevtoolScriptControlMsg { - /// Evaluate a JS snippet in the context of the global for the given pipeline. - EvaluateJS(PipelineId, String, IpcSender), - /// Retrieve the details of the root node (ie. the document) for the given pipeline. - GetRootNode(PipelineId, IpcSender>), - /// Retrieve the details of the document element for the given pipeline. - GetDocumentElement(PipelineId, IpcSender>), - /// Retrieve the details of the child nodes of the given node in the given pipeline. - GetChildren(PipelineId, String, IpcSender>>), - /// Retrieve the computed layout properties of the given node in the given pipeline. - GetLayout(PipelineId, String, IpcSender>), - /// Retrieve all stored console messages for the given pipeline. - GetCachedMessages(PipelineId, CachedConsoleMessageTypes, IpcSender>), - /// Update a given node's attributes with a list of modifications. - ModifyAttribute(PipelineId, String, Vec), - /// Request live console messages for a given pipeline (true if desired, false otherwise). - WantsLiveNotifications(PipelineId, bool), - /// Request live notifications for a given set of timeline events for a given pipeline. - SetTimelineMarkers(PipelineId, Vec, IpcSender>), - /// Withdraw request for live timeline notifications for a given pipeline. - DropTimelineMarkers(PipelineId, Vec), - /// Request a callback directed at the given actor name from the next animation frame - /// executed in the given pipeline. - RequestAnimationFrame(PipelineId, String), - /// Direct the given pipeline to reload the current page. - Reload(PipelineId), -} - -#[derive(Debug, Deserialize, Serialize)] -pub struct Modification { - pub attributeName: String, - pub newValue: Option, -} - -#[derive(Clone, Debug, Deserialize, Serialize)] -pub enum LogLevel { - Log, - Debug, - Info, - Warn, - Error, -} - -#[derive(Clone, Debug, Deserialize, Serialize)] -pub struct ConsoleMessage { - pub message: String, - pub logLevel: LogLevel, - pub filename: String, - pub lineNumber: usize, - pub columnNumber: usize, -} - -bitflags! { - #[derive(Deserialize, Serialize)] - pub flags CachedConsoleMessageTypes: u8 { - const PAGE_ERROR = 1 << 0, - const CONSOLE_API = 1 << 1, - } -} - -#[derive(Debug, Deserialize, Serialize)] -pub struct PageError { - #[serde(rename = "_type")] - pub type_: String, - pub errorMessage: String, - pub sourceName: String, - pub lineText: String, - pub lineNumber: u32, - pub columnNumber: u32, - pub category: String, - pub timeStamp: u64, - pub error: bool, - pub warning: bool, - pub exception: bool, - pub strict: bool, - pub private: bool, -} - -#[derive(Debug, Deserialize, Serialize)] -pub struct ConsoleAPI { - #[serde(rename = "_type")] - pub type_: String, - pub level: String, - pub filename: String, - pub lineNumber: u32, - pub functionName: String, - pub timeStamp: u64, - pub private: bool, - pub arguments: Vec, -} - -#[derive(Debug, Deserialize, Serialize)] -pub enum CachedConsoleMessage { - PageError(PageError), - ConsoleAPI(ConsoleAPI), -} - -#[derive(Debug, PartialEq)] -pub struct HttpRequest { - pub url: ServoUrl, - pub method: Method, - pub headers: Headers, - pub body: Option>, - pub pipeline_id: PipelineId, - pub startedDateTime: Tm, - pub timeStamp: i64, - pub connect_time: u64, - pub send_time: u64, - pub is_xhr: bool, -} - -#[derive(Debug, PartialEq)] -pub struct HttpResponse { - pub headers: Option, - pub status: Option<(u16, Vec)>, - pub body: Option>, - pub pipeline_id: PipelineId, -} - -#[derive(Debug)] -pub enum NetworkEvent { - HttpRequest(HttpRequest), - HttpResponse(HttpResponse), -} - -impl TimelineMarker { - pub fn start(name: String) -> StartedTimelineMarker { - StartedTimelineMarker { - name: name, - start_time: PreciseTime::now(), - start_stack: None, - } - } -} - -impl StartedTimelineMarker { - pub fn end(self) -> TimelineMarker { - TimelineMarker { - name: self.name, - start_time: self.start_time, - start_stack: self.start_stack, - end_time: PreciseTime::now(), - end_stack: None, - } - } -} - -/// A replacement for `time::PreciseTime` that isn't opaque, so we can serialize it. -/// -/// The reason why this doesn't go upstream is that `time` is slated to be part of Rust's standard -/// library, which definitely can't have any dependencies on `serde`. But `serde` can't implement -/// `Deserialize` and `Serialize` itself, because `time::PreciseTime` is opaque! A Catch-22. So I'm -/// duplicating the definition here. -#[derive(Clone, Copy, Debug, Deserialize, Serialize)] -pub struct PreciseTime(u64); - -impl PreciseTime { - pub fn now() -> PreciseTime { - PreciseTime(time::precise_time_ns()) - } - - pub fn to(&self, later: PreciseTime) -> Duration { - Duration::nanoseconds((later.0 - self.0) as i64) - } -} - -#[derive(Clone, Copy, Debug, Deserialize, Eq, Hash, HeapSizeOf, PartialEq, Serialize)] -pub struct WorkerId(pub u32); diff --git a/collector/compile-benchmarks/style-servo/components/dom_struct/Cargo.toml b/collector/compile-benchmarks/style-servo/components/dom_struct/Cargo.toml deleted file mode 100644 index 7a76375ac..000000000 --- a/collector/compile-benchmarks/style-servo/components/dom_struct/Cargo.toml +++ /dev/null @@ -1,10 +0,0 @@ -[package] -name = "dom_struct" -version = "0.0.1" -authors = ["The Servo Project Developers"] -license = "MPL-2.0" -publish = false - -[lib] -path = "lib.rs" -proc-macro = true diff --git a/collector/compile-benchmarks/style-servo/components/dom_struct/lib.rs b/collector/compile-benchmarks/style-servo/components/dom_struct/lib.rs deleted file mode 100644 index 33801de8c..000000000 --- a/collector/compile-benchmarks/style-servo/components/dom_struct/lib.rs +++ /dev/null @@ -1,23 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ - -#![feature(proc_macro)] - -extern crate proc_macro; - -use proc_macro::{TokenStream, quote}; -use std::iter; - -#[proc_macro_attribute] -pub fn dom_struct(args: TokenStream, input: TokenStream) -> TokenStream { - if !args.is_empty() { - panic!("#[dom_struct] takes no arguments"); - } - let attributes = quote! { - #[derive(DenyPublicFields, DomObject, HeapSizeOf, JSTraceable)] - #[must_root] - #[repr(C)] - }; - iter::once(attributes).chain(iter::once(input)).collect() -} diff --git a/collector/compile-benchmarks/style-servo/components/domobject_derive/Cargo.toml b/collector/compile-benchmarks/style-servo/components/domobject_derive/Cargo.toml deleted file mode 100644 index 5b9f8f4aa..000000000 --- a/collector/compile-benchmarks/style-servo/components/domobject_derive/Cargo.toml +++ /dev/null @@ -1,14 +0,0 @@ -[package] -name = "domobject_derive" -version = "0.0.1" -authors = ["The Servo Project Developers"] -license = "MPL-2.0" -publish = false - -[lib] -path = "lib.rs" -proc-macro = true - -[dependencies] -syn = "0.11" -quote = "0.3.15" diff --git a/collector/compile-benchmarks/style-servo/components/domobject_derive/lib.rs b/collector/compile-benchmarks/style-servo/components/domobject_derive/lib.rs deleted file mode 100644 index c718b514f..000000000 --- a/collector/compile-benchmarks/style-servo/components/domobject_derive/lib.rs +++ /dev/null @@ -1,104 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ - -extern crate proc_macro; -#[macro_use] extern crate quote; -extern crate syn; - -#[proc_macro_derive(DomObject)] -pub fn expand_token_stream(input: proc_macro::TokenStream) -> proc_macro::TokenStream { - expand_string(&input.to_string()).parse().unwrap() -} - -fn expand_string(input: &str) -> String { - let type_ = syn::parse_macro_input(input).unwrap(); - - let fields = if let syn::Body::Struct(syn::VariantData::Struct(ref fields)) = type_.body { - fields - } else { - panic!("#[derive(DomObject)] should only be applied on proper structs") - }; - - let (first_field, fields) = fields - .split_first() - .expect("#[derive(DomObject)] should not be applied on empty structs"); - let first_field_name = first_field.ident.as_ref().unwrap(); - let mut field_types = vec![]; - for field in fields { - if !field_types.contains(&&field.ty) { - field_types.push(&field.ty); - } - } - - let name = &type_.ident; - let (impl_generics, ty_generics, where_clause) = type_.generics.split_for_impl(); - - let mut items = quote! { - impl #impl_generics ::js::conversions::ToJSValConvertible for #name #ty_generics #where_clause { - #[allow(unsafe_code)] - unsafe fn to_jsval(&self, - cx: *mut ::js::jsapi::JSContext, - rval: ::js::jsapi::MutableHandleValue) { - let object = ::dom::bindings::reflector::DomObject::reflector(self).get_jsobject(); - object.to_jsval(cx, rval) - } - } - - impl #impl_generics ::dom::bindings::reflector::DomObject for #name #ty_generics #where_clause { - #[inline] - fn reflector(&self) -> &::dom::bindings::reflector::Reflector { - self.#first_field_name.reflector() - } - } - - impl #impl_generics ::dom::bindings::reflector::MutDomObject for #name #ty_generics #where_clause { - fn init_reflector(&mut self, obj: *mut ::js::jsapi::JSObject) { - self.#first_field_name.init_reflector(obj); - } - } - }; - - let mut params = quote::Tokens::new(); - params.append_separated(type_.generics.ty_params.iter().map(|param| ¶m.ident), ", "); - - // For each field in the struct, we implement ShouldNotImplDomObject for a - // pair of all the type parameters of the DomObject and and the field type. - // This allows us to support parameterized DOM objects - // such as IteratorIterable. - items.append_all(field_types.iter().map(|ty| { - quote! { - impl #impl_generics ShouldNotImplDomObject for ((#params), #ty) #where_clause {} - } - })); - - let bound = syn::TyParamBound::Trait( - syn::PolyTraitRef { - bound_lifetimes: vec![], - trait_ref: syn::parse_path("::dom::bindings::reflector::DomObject").unwrap(), - }, - syn::TraitBoundModifier::None - ); - - let mut generics = type_.generics.clone(); - generics.ty_params.push(syn::TyParam { - attrs: vec![], - ident: "__T".into(), - bounds: vec![bound], - default: None, - }); - let (impl_generics, _, where_clause) = generics.split_for_impl(); - - items.append(quote! { - trait ShouldNotImplDomObject {} - impl #impl_generics ShouldNotImplDomObject for ((#params), __T) #where_clause {} - }.as_str()); - - let dummy_const = syn::Ident::new(format!("_IMPL_DOMOBJECT_FOR_{}", name)); - let tokens = quote! { - #[allow(non_upper_case_globals)] - const #dummy_const: () = { #items }; - }; - - tokens.to_string() -} diff --git a/collector/compile-benchmarks/style-servo/components/fallible/Cargo.toml b/collector/compile-benchmarks/style-servo/components/fallible/Cargo.toml deleted file mode 100644 index eae094d57..000000000 --- a/collector/compile-benchmarks/style-servo/components/fallible/Cargo.toml +++ /dev/null @@ -1,25 +0,0 @@ -[package] -name = "fallible" -version = "0.0.1" -authors = ["The Servo Project Developers"] -license = "MPL-2.0" -publish = false - -[lib] -name = "fallible" -path = "lib.rs" - -[dependencies] -smallvec = "0.4" -hashglobe = { path = "../hashglobe" } - -# This crate effectively does nothing except if the `known_system_malloc` -# feature is specified. -# -# In that case, we actually call the system malloc functions to reserve space, -# otherwise we just let rust do its thing (aborting on OOM). -# -# This is effectively a stop-gap measure until we can do this properly in -# stable rust. -[features] -known_system_malloc = [] diff --git a/collector/compile-benchmarks/style-servo/components/fallible/lib.rs b/collector/compile-benchmarks/style-servo/components/fallible/lib.rs deleted file mode 100644 index c220699a7..000000000 --- a/collector/compile-benchmarks/style-servo/components/fallible/lib.rs +++ /dev/null @@ -1,171 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ - -extern crate hashglobe; -extern crate smallvec; - -use hashglobe::FailedAllocationError; -#[cfg(feature = "known_system_malloc")] -use hashglobe::alloc; -use smallvec::Array; -use smallvec::SmallVec; -use std::vec::Vec; - -pub trait FallibleVec { - /// Append |val| to the end of |vec|. Returns Ok(()) on success, - /// Err(reason) if it fails, with |reason| describing the failure. - fn try_push(&mut self, value: T) -> Result<(), FailedAllocationError>; -} - - -///////////////////////////////////////////////////////////////// -// Vec - -impl FallibleVec for Vec { - #[inline(always)] - fn try_push(&mut self, val: T) -> Result<(), FailedAllocationError> { - #[cfg(feature = "known_system_malloc")] - { - if self.capacity() == self.len() { - try_double_vec(self)?; - debug_assert!(self.capacity() > self.len()); - } - } - self.push(val); - Ok(()) - } -} - -// Double the capacity of |vec|, or fail to do so due to lack of memory. -// Returns Ok(()) on success, Err(..) on failure. -#[cfg(feature = "known_system_malloc")] -#[inline(never)] -#[cold] -fn try_double_vec(vec: &mut Vec) -> Result<(), FailedAllocationError> { - use std::mem; - - let old_ptr = vec.as_mut_ptr(); - let old_len = vec.len(); - - let old_cap: usize = vec.capacity(); - let new_cap: usize = if old_cap == 0 { - 4 - } else { - old_cap.checked_mul(2).ok_or(FailedAllocationError::new( - "capacity overflow for Vec", - ))? - }; - - let new_size_bytes = new_cap.checked_mul(mem::size_of::()).ok_or( - FailedAllocationError::new("capacity overflow for Vec"), - )?; - - let new_ptr = unsafe { - if old_cap == 0 { - alloc::alloc(new_size_bytes, 0) - } else { - alloc::realloc(old_ptr as *mut u8, new_size_bytes) - } - }; - - if new_ptr.is_null() { - return Err(FailedAllocationError::new( - "out of memory when allocating Vec", - )); - } - - let new_vec = unsafe { - Vec::from_raw_parts(new_ptr as *mut T, old_len, new_cap) - }; - - mem::forget(mem::replace(vec, new_vec)); - Ok(()) -} - - -///////////////////////////////////////////////////////////////// -// SmallVec - -impl FallibleVec for SmallVec { - #[inline(always)] - fn try_push(&mut self, val: T::Item) -> Result<(), FailedAllocationError> { - #[cfg(feature = "known_system_malloc")] - { - if self.capacity() == self.len() { - try_double_small_vec(self)?; - debug_assert!(self.capacity() > self.len()); - } - } - self.push(val); - Ok(()) - } -} - -// Double the capacity of |svec|, or fail to do so due to lack of memory. -// Returns Ok(()) on success, Err(..) on failure. -#[cfg(feature = "known_system_malloc")] -#[inline(never)] -#[cold] -fn try_double_small_vec(svec: &mut SmallVec) --> Result<(), FailedAllocationError> -where - T: Array, -{ - use std::mem; - use std::ptr::copy_nonoverlapping; - - let old_ptr = svec.as_mut_ptr(); - let old_len = svec.len(); - - let old_cap: usize = svec.capacity(); - let new_cap: usize = if old_cap == 0 { - 4 - } else { - old_cap.checked_mul(2).ok_or(FailedAllocationError::new( - "capacity overflow for SmallVec", - ))? - }; - - // This surely shouldn't fail, if |old_cap| was previously accepted as a - // valid value. But err on the side of caution. - let old_size_bytes = old_cap.checked_mul(mem::size_of::()).ok_or( - FailedAllocationError::new("capacity overflow for SmallVec"), - )?; - - let new_size_bytes = new_cap.checked_mul(mem::size_of::()).ok_or( - FailedAllocationError::new("capacity overflow for SmallVec"), - )?; - - let new_ptr; - if svec.spilled() { - // There's an old block to free, and, presumably, old contents to - // copy. realloc takes care of both aspects. - unsafe { - new_ptr = alloc::realloc(old_ptr as *mut u8, new_size_bytes); - } - } else { - // There's no old block to free. There may be old contents to copy. - unsafe { - new_ptr = alloc::alloc(new_size_bytes, 0); - if !new_ptr.is_null() && old_size_bytes > 0 { - copy_nonoverlapping(old_ptr as *const u8, - new_ptr as *mut u8, old_size_bytes); - } - } - } - - if new_ptr.is_null() { - return Err(FailedAllocationError::new( - "out of memory when allocating SmallVec", - )); - } - - let new_vec = unsafe { - Vec::from_raw_parts(new_ptr as *mut T::Item, old_len, new_cap) - }; - - let new_svec = SmallVec::from_vec(new_vec); - mem::forget(mem::replace(svec, new_svec)); - Ok(()) -} diff --git a/collector/compile-benchmarks/style-servo/components/geometry/Cargo.toml b/collector/compile-benchmarks/style-servo/components/geometry/Cargo.toml deleted file mode 100644 index 956337a06..000000000 --- a/collector/compile-benchmarks/style-servo/components/geometry/Cargo.toml +++ /dev/null @@ -1,19 +0,0 @@ -[package] -name = "servo_geometry" -version = "0.0.1" -authors = ["The Servo Project Developers"] -license = "MPL-2.0" -publish = false - -[lib] -name = "servo_geometry" -path = "lib.rs" - -[features] -# servo as opposed to geckolib -servo = ["euclid/unstable"] - -[dependencies] -app_units = "0.5" -euclid = "0.15" -heapsize = "0.4" diff --git a/collector/compile-benchmarks/style-servo/components/geometry/lib.rs b/collector/compile-benchmarks/style-servo/components/geometry/lib.rs deleted file mode 100644 index c71b687e8..000000000 --- a/collector/compile-benchmarks/style-servo/components/geometry/lib.rs +++ /dev/null @@ -1,51 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ - -extern crate app_units; -extern crate euclid; -#[macro_use] extern crate heapsize; - -use app_units::{Au, MAX_AU, MIN_AU}; -use euclid::{Point2D, Rect, Size2D}; - -// Units for use with euclid::length and euclid::scale_factor. - -/// A normalized "pixel" at the default resolution for the display. -/// -/// Like the CSS "px" unit, the exact physical size of this unit may vary between devices, but it -/// should approximate a device-independent reference length. This unit corresponds to Android's -/// "density-independent pixel" (dip), Mac OS X's "point", and Windows "device-independent pixel." -/// -/// The relationship between DevicePixel and DeviceIndependentPixel is defined by the OS. On most low-dpi -/// screens, one DeviceIndependentPixel is equal to one DevicePixel. But on high-density screens it can be -/// some larger number. For example, by default on Apple "retina" displays, one DeviceIndependentPixel equals -/// two DevicePixels. On Android "MDPI" displays, one DeviceIndependentPixel equals 1.5 device pixels. -/// -/// The ratio between DeviceIndependentPixel and DevicePixel for a given display be found by calling -/// `servo::windowing::WindowMethods::hidpi_factor`. -#[derive(Clone, Copy, Debug)] -pub enum DeviceIndependentPixel {} - -known_heap_size!(0, DeviceIndependentPixel); - -// An Au is an "App Unit" and represents 1/60th of a CSS pixel. It was -// originally proposed in 2002 as a standard unit of measure in Gecko. -// See https://bugzilla.mozilla.org/show_bug.cgi?id=177805 for more info. - -#[inline(always)] -pub fn max_rect() -> Rect { - Rect::new(Point2D::new(MIN_AU / 2, MIN_AU / 2), Size2D::new(MAX_AU, MAX_AU)) -} - -/// A helper function to convert a rect of `f32` pixels to a rect of app units. -pub fn f32_rect_to_au_rect(rect: Rect) -> Rect { - Rect::new(Point2D::new(Au::from_f32_px(rect.origin.x), Au::from_f32_px(rect.origin.y)), - Size2D::new(Au::from_f32_px(rect.size.width), Au::from_f32_px(rect.size.height))) -} - -/// A helper function to convert a rect of `Au` pixels to a rect of f32 units. -pub fn au_rect_to_f32_rect(rect: Rect) -> Rect { - Rect::new(Point2D::new(rect.origin.x.to_f32_px(), rect.origin.y.to_f32_px()), - Size2D::new(rect.size.width.to_f32_px(), rect.size.height.to_f32_px())) -} diff --git a/collector/compile-benchmarks/style-servo/components/gfx/Cargo.toml b/collector/compile-benchmarks/style-servo/components/gfx/Cargo.toml deleted file mode 100644 index 3d7aea1b5..000000000 --- a/collector/compile-benchmarks/style-servo/components/gfx/Cargo.toml +++ /dev/null @@ -1,65 +0,0 @@ -[package] - -name = "gfx" -version = "0.0.1" -authors = ["The Servo Project Developers"] -license = "MPL-2.0" -publish = false - -[lib] -name = "gfx" -path = "lib.rs" - -[dependencies] -app_units = "0.5" -bitflags = "0.7" -euclid = "0.15" -fnv = "1.0" -fontsan = {git = "https://github.com/servo/fontsan"} -gfx_traits = {path = "../gfx_traits"} -harfbuzz-sys = "0.1" -heapsize = "0.4" -heapsize_derive = "0.1" -ipc-channel = "0.8" -lazy_static = "0.2" -libc = "0.2" -log = "0.3.5" -msg = {path = "../msg"} -net_traits = {path = "../net_traits"} -ordered-float = "0.4" -range = {path = "../range"} -serde = "1.0" -servo_arc = {path = "../servo_arc"} -servo_atoms = {path = "../atoms"} -servo_geometry = {path = "../geometry"} -servo_url = {path = "../url"} -smallvec = "0.4" -style = {path = "../style"} -style_traits = {path = "../style_traits"} -time = "0.1.12" -unicode-bidi = {version = "0.3", features = ["with_serde"]} -unicode-script = {version = "0.1", features = ["harfbuzz"]} -webrender_api = {git = "https://github.com/servo/webrender", features = ["ipc"]} -xi-unicode = "0.1.0" - -[target.'cfg(target_os = "macos")'.dependencies] -byteorder = "1.0" -core-foundation = "0.4" -core-graphics = "0.9" -core-text = "7.0" - -[target.'cfg(any(target_os = "linux", target_os = "android"))'.dependencies] -freetype = "0.3" - -[target.'cfg(target_os = "linux")'.dependencies] -servo-fontconfig = "0.2.1" - -[target.'cfg(target_os = "android")'.dependencies] -xml5ever = {version = "0.10"} - -[target.'cfg(any(target_feature = "sse2", target_feature = "neon"))'.dependencies] -simd = "0.2.0" - -[target.'cfg(target_os = "windows")'.dependencies] -dwrote = "0.4" -truetype = "0.26" diff --git a/collector/compile-benchmarks/style-servo/components/gfx/display_list/mod.rs b/collector/compile-benchmarks/style-servo/components/gfx/display_list/mod.rs deleted file mode 100644 index 4e0aae447..000000000 --- a/collector/compile-benchmarks/style-servo/components/gfx/display_list/mod.rs +++ /dev/null @@ -1,1451 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ - -//! Servo heavily uses display lists, which are retained-mode lists of painting commands to -//! perform. Using a list instead of painting elements in immediate mode allows transforms, hit -//! testing, and invalidation to be performed using the same primitives as painting. It also allows -//! Servo to aggressively cull invisible and out-of-bounds painting elements, to reduce overdraw. -//! Finally, display lists allow tiles to be farmed out onto multiple CPUs and painted in parallel -//! (although this benefit does not apply to GPU-based painting). -//! -//! Display items describe relatively high-level drawing operations (for example, entire borders -//! and shadows instead of lines and blur operations), to reduce the amount of allocation required. -//! They are therefore not exactly analogous to constructs like Skia pictures, which consist of -//! low-level drawing primitives. - -use app_units::Au; -use euclid::{Transform3D, Point2D, Vector2D, Rect, Size2D, TypedRect, SideOffsets2D}; -use euclid::num::{One, Zero}; -use gfx_traits::StackingContextId; -use gfx_traits::print_tree::PrintTree; -use ipc_channel::ipc::IpcSharedMemory; -use msg::constellation_msg::PipelineId; -use net_traits::image::base::{Image, PixelFormat}; -use range::Range; -use servo_geometry::max_rect; -use std::cmp::{self, Ordering}; -use std::collections::HashMap; -use std::fmt; -use std::sync::Arc; -use style::computed_values::{border_style, image_rendering}; -use style::values::computed::Filter; -use style_traits::cursor::Cursor; -use text::TextRun; -use text::glyph::ByteIndex; -use webrender_api::{self, ClipAndScrollInfo, ClipId, ColorF, GradientStop, LocalClip}; -use webrender_api::{MixBlendMode, ScrollPolicy, ScrollSensitivity, StickyFrameInfo}; -use webrender_api::TransformStyle; - -pub use style::dom::OpaqueNode; - -/// The factor that we multiply the blur radius by in order to inflate the boundaries of display -/// items that involve a blur. This ensures that the display item boundaries include all the ink. -pub static BLUR_INFLATION_FACTOR: i32 = 3; - -#[derive(Deserialize, HeapSizeOf, Serialize)] -pub struct DisplayList { - pub list: Vec, -} - -struct ScrollOffsetLookup<'a> { - parents: &'a mut HashMap, - calculated_total_offsets: ScrollOffsetMap, - raw_offsets: &'a ScrollOffsetMap, -} - -impl<'a> ScrollOffsetLookup<'a> { - fn new(parents: &'a mut HashMap, - raw_offsets: &'a ScrollOffsetMap) - -> ScrollOffsetLookup<'a> { - ScrollOffsetLookup { - parents: parents, - calculated_total_offsets: HashMap::new(), - raw_offsets: raw_offsets, - } - } - - fn new_for_reference_frame(&mut self, - clip_id: ClipId, - transform: &Transform3D, - point: &mut Point2D) - -> Option { - // If a transform function causes the current transformation matrix of an object - // to be non-invertible, the object and its content do not get displayed. - let inv_transform = match transform.inverse() { - Some(transform) => transform, - None => return None, - }; - - let scroll_offset = self.full_offset_for_clip_scroll_node(&clip_id); - *point = Point2D::new(point.x - Au::from_f32_px(scroll_offset.x), - point.y - Au::from_f32_px(scroll_offset.y)); - let frac_point = inv_transform.transform_point2d(&Point2D::new(point.x.to_f32_px(), - point.y.to_f32_px())); - *point = Point2D::new(Au::from_f32_px(frac_point.x), Au::from_f32_px(frac_point.y)); - - let mut sublookup = ScrollOffsetLookup { - parents: &mut self.parents, - calculated_total_offsets: HashMap::new(), - raw_offsets: self.raw_offsets, - }; - sublookup.calculated_total_offsets.insert(clip_id, Vector2D::zero()); - Some(sublookup) - } - - fn add_clip_scroll_node(&mut self, clip_scroll_node: &ClipScrollNode) { - self.parents.insert(clip_scroll_node.id, clip_scroll_node.parent_id); - } - - fn full_offset_for_clip_scroll_node(&mut self, id: &ClipId) -> Vector2D { - if let Some(offset) = self.calculated_total_offsets.get(id) { - return *offset; - } - - let parent_offset = if !id.is_root_scroll_node() { - let parent_id = *self.parents.get(id).unwrap(); - self.full_offset_for_clip_scroll_node(&parent_id) - } else { - Vector2D::zero() - }; - - let offset = parent_offset + - self.raw_offsets.get(id).cloned().unwrap_or_else(Vector2D::zero); - self.calculated_total_offsets.insert(*id, offset); - offset - } -} - -impl DisplayList { - /// Return the bounds of this display list based on the dimensions of the root - /// stacking context. - pub fn bounds(&self) -> Rect { - match self.list.get(0) { - Some(&DisplayItem::PushStackingContext(ref item)) => item.stacking_context.bounds, - Some(_) => unreachable!("Root element of display list not stacking context."), - None => Rect::zero(), - } - } - - // Returns the text index within a node for the point of interest. - pub fn text_index(&self, - node: OpaqueNode, - client_point: &Point2D, - scroll_offsets: &ScrollOffsetMap) - -> Option { - let mut result = Vec::new(); - let mut traversal = DisplayListTraversal::new(self); - self.text_index_contents(node, - &mut traversal, - client_point, - &mut ScrollOffsetLookup::new(&mut HashMap::new(), scroll_offsets), - &mut result); - result.pop() - } - - fn text_index_contents<'a>(&self, - node: OpaqueNode, - traversal: &mut DisplayListTraversal<'a>, - point: &Point2D, - offset_lookup: &mut ScrollOffsetLookup, - result: &mut Vec) { - while let Some(item) = traversal.next() { - match item { - &DisplayItem::PushStackingContext(ref context_item) => { - self.text_index_stacking_context(&context_item.stacking_context, - item.scroll_node_id(), - node, - traversal, - point, - offset_lookup, - result); - } - &DisplayItem::DefineClipScrollNode(ref item) => { - offset_lookup.add_clip_scroll_node(&item.node); - } - &DisplayItem::PopStackingContext(_) => return, - &DisplayItem::Text(ref text) => { - let base = item.base(); - if base.metadata.node == node { - let offset = *point - text.baseline_origin; - let index = text.text_run.range_index_of_advance(&text.range, offset.x); - result.push(index); - } - }, - _ => {}, - } - } - } - - fn text_index_stacking_context<'a>(&self, - stacking_context: &StackingContext, - clip_id: ClipId, - node: OpaqueNode, - traversal: &mut DisplayListTraversal<'a>, - point: &Point2D, - offset_lookup: &mut ScrollOffsetLookup, - result: &mut Vec) { - let mut point = *point - stacking_context.bounds.origin.to_vector(); - if stacking_context.scroll_policy == ScrollPolicy::Fixed { - let old_offset = offset_lookup.calculated_total_offsets.get(&clip_id).cloned(); - offset_lookup.calculated_total_offsets.insert(clip_id, Vector2D::zero()); - - self.text_index_contents(node, traversal, &point, offset_lookup, result); - - match old_offset { - Some(offset) => offset_lookup.calculated_total_offsets.insert(clip_id, offset), - None => offset_lookup.calculated_total_offsets.remove(&clip_id), - }; - } else if let Some(transform) = stacking_context.transform { - if let Some(ref mut sublookup) = - offset_lookup.new_for_reference_frame(clip_id, &transform, &mut point) { - self.text_index_contents(node, traversal, &point, sublookup, result); - } - } else { - self.text_index_contents(node, traversal, &point, offset_lookup, result); - } - } - - // Return all nodes containing the point of interest, bottommost first, and - // respecting the `pointer-events` CSS property. - pub fn hit_test(&self, - point: &Point2D, - scroll_offsets: &ScrollOffsetMap) - -> Vec { - let mut result = Vec::new(); - let mut traversal = DisplayListTraversal::new(self); - self.hit_test_contents(&mut traversal, - point, - &mut ScrollOffsetLookup::new(&mut HashMap::new(), scroll_offsets), - &mut result); - result - } - - fn hit_test_contents<'a>(&self, - traversal: &mut DisplayListTraversal<'a>, - point: &Point2D, - offset_lookup: &mut ScrollOffsetLookup, - result: &mut Vec) { - while let Some(item) = traversal.next() { - match item { - &DisplayItem::PushStackingContext(ref context_item) => { - self.hit_test_stacking_context(&context_item.stacking_context, - item.scroll_node_id(), - traversal, - point, - offset_lookup, - result); - } - &DisplayItem::PopStackingContext(_) => return, - &DisplayItem::DefineClipScrollNode(ref item) => { - offset_lookup.add_clip_scroll_node(&item.node); - } - _ => { - if let Some(meta) = item.hit_test(*point, offset_lookup) { - result.push(meta); - } - } - } - } - } - - fn hit_test_stacking_context<'a>(&self, - stacking_context: &StackingContext, - clip_id: ClipId, - traversal: &mut DisplayListTraversal<'a>, - point: &Point2D, - offset_lookup: &mut ScrollOffsetLookup, - result: &mut Vec) { - debug_assert!(stacking_context.context_type == StackingContextType::Real); - - let mut point = *point - stacking_context.bounds.origin.to_vector(); - if stacking_context.scroll_policy == ScrollPolicy::Fixed { - let old_offset = offset_lookup.calculated_total_offsets.get(&clip_id).cloned(); - offset_lookup.calculated_total_offsets.insert(clip_id, Vector2D::zero()); - - self.hit_test_contents(traversal, &point, offset_lookup, result); - - match old_offset { - Some(offset) => offset_lookup.calculated_total_offsets.insert(clip_id, offset), - None => offset_lookup.calculated_total_offsets.remove(&clip_id), - }; - } else if let Some(transform) = stacking_context.transform { - if let Some(ref mut sublookup) = - offset_lookup.new_for_reference_frame(clip_id, &transform, &mut point) { - self.hit_test_contents(traversal, &point, sublookup, result); - } - } else { - self.hit_test_contents(traversal, &point, offset_lookup, result); - } - } - - pub fn print(&self) { - let mut print_tree = PrintTree::new("Display List".to_owned()); - self.print_with_tree(&mut print_tree); - } - - pub fn print_with_tree(&self, print_tree: &mut PrintTree) { - print_tree.new_level("Items".to_owned()); - for item in &self.list { - print_tree.add_item(format!("{:?} StackingContext: {:?} {:?}", - item, - item.base().stacking_context_id, - item.clip_and_scroll_info())); - } - print_tree.end_level(); - } -} - -pub struct DisplayListTraversal<'a> { - pub display_list: &'a DisplayList, - pub next_item_index: usize, - pub first_item_index: usize, - pub last_item_index: usize, -} - -impl<'a> DisplayListTraversal<'a> { - pub fn new(display_list: &'a DisplayList) -> DisplayListTraversal { - DisplayListTraversal { - display_list: display_list, - next_item_index: 0, - first_item_index: 0, - last_item_index: display_list.list.len(), - } - } - - pub fn new_partial(display_list: &'a DisplayList, - stacking_context_id: StackingContextId, - start: usize, - end: usize) - -> DisplayListTraversal { - debug_assert!(start <= end); - debug_assert!(display_list.list.len() > start); - debug_assert!(display_list.list.len() > end); - - let stacking_context_start = display_list.list[0..start].iter().rposition(|item| - match item { - &DisplayItem::PushStackingContext(ref item) => - item.stacking_context.id == stacking_context_id, - _ => false, - }).unwrap_or(start); - debug_assert!(stacking_context_start <= start); - - DisplayListTraversal { - display_list: display_list, - next_item_index: stacking_context_start, - first_item_index: start, - last_item_index: end + 1, - } - } - - pub fn previous_item_id(&self) -> usize { - self.next_item_index - 1 - } - - pub fn skip_to_end_of_stacking_context(&mut self, id: StackingContextId) { - self.next_item_index = self.display_list.list[self.next_item_index..].iter() - .position(|item| { - match item { - &DisplayItem::PopStackingContext(ref item) => item.stacking_context_id == id, - _ => false - } - }).unwrap_or(self.display_list.list.len()); - debug_assert!(self.next_item_index < self.last_item_index); - } -} - -impl<'a> Iterator for DisplayListTraversal<'a> { - type Item = &'a DisplayItem; - - fn next(&mut self) -> Option<&'a DisplayItem> { - while self.next_item_index < self.last_item_index { - debug_assert!(self.next_item_index <= self.last_item_index); - - let reached_first_item = self.next_item_index >= self.first_item_index; - let item = &self.display_list.list[self.next_item_index]; - - self.next_item_index += 1; - - if reached_first_item { - return Some(item) - } - - // Before we reach the starting item, we only emit stacking context boundaries. This - // is to ensure that we properly position items when we are processing a display list - // slice that is relative to a certain stacking context. - match item { - &DisplayItem::PushStackingContext(_) | - &DisplayItem::PopStackingContext(_) => return Some(item), - _ => {} - } - } - - None - } -} - -/// Display list sections that make up a stacking context. Each section here refers -/// to the steps in CSS 2.1 Appendix E. -/// -#[derive(Clone, Copy, Debug, Deserialize, Eq, HeapSizeOf, Ord, PartialEq, PartialOrd, Serialize)] -pub enum DisplayListSection { - BackgroundAndBorders, - BlockBackgroundsAndBorders, - Content, - Outlines, -} - -#[derive(Clone, Copy, Debug, Deserialize, Eq, HeapSizeOf, Ord, PartialEq, PartialOrd, Serialize)] -pub enum StackingContextType { - Real, - PseudoPositioned, - PseudoFloat, -} - -#[derive(Clone, Deserialize, HeapSizeOf, Serialize)] -/// Represents one CSS stacking context, which may or may not have a hardware layer. -pub struct StackingContext { - /// The ID of this StackingContext for uniquely identifying it. - pub id: StackingContextId, - - /// The type of this StackingContext. Used for collecting and sorting. - pub context_type: StackingContextType, - - /// The position and size of this stacking context. - pub bounds: Rect, - - /// The overflow rect for this stacking context in its coordinate system. - pub overflow: Rect, - - /// The `z-index` for this stacking context. - pub z_index: i32, - - /// CSS filters to be applied to this stacking context (including opacity). - pub filters: Vec, - - /// The blend mode with which this stacking context blends with its backdrop. - pub mix_blend_mode: MixBlendMode, - - /// A transform to be applied to this stacking context. - pub transform: Option>, - - /// The transform style of this stacking context. - pub transform_style: TransformStyle, - - /// The perspective matrix to be applied to children. - pub perspective: Option>, - - /// The scroll policy of this layer. - pub scroll_policy: ScrollPolicy, - - /// The clip and scroll info for this StackingContext. - pub parent_clip_and_scroll_info: ClipAndScrollInfo, -} - -impl StackingContext { - /// Creates a new stacking context. - #[inline] - pub fn new(id: StackingContextId, - context_type: StackingContextType, - bounds: &Rect, - overflow: &Rect, - z_index: i32, - filters: Vec, - mix_blend_mode: MixBlendMode, - transform: Option>, - transform_style: TransformStyle, - perspective: Option>, - scroll_policy: ScrollPolicy, - parent_clip_and_scroll_info: ClipAndScrollInfo) - -> StackingContext { - StackingContext { - id: id, - context_type: context_type, - bounds: *bounds, - overflow: *overflow, - z_index: z_index, - filters: filters, - mix_blend_mode: mix_blend_mode, - transform: transform, - transform_style: transform_style, - perspective: perspective, - scroll_policy: scroll_policy, - parent_clip_and_scroll_info: parent_clip_and_scroll_info, - } - } - - #[inline] - pub fn root(pipeline_id: PipelineId) -> StackingContext { - StackingContext::new(StackingContextId::root(), - StackingContextType::Real, - &Rect::zero(), - &Rect::zero(), - 0, - vec![], - MixBlendMode::Normal, - None, - TransformStyle::Flat, - None, - ScrollPolicy::Scrollable, - pipeline_id.root_clip_and_scroll_info()) - } - - pub fn to_display_list_items(self, pipeline_id: PipelineId) -> (DisplayItem, DisplayItem) { - let mut base_item = BaseDisplayItem::empty(pipeline_id); - base_item.stacking_context_id = self.id; - base_item.clip_and_scroll_info = self.parent_clip_and_scroll_info; - - let pop_item = DisplayItem::PopStackingContext(Box::new( - PopStackingContextItem { - base: base_item.clone(), - stacking_context_id: self.id, - } - )); - - let push_item = DisplayItem::PushStackingContext(Box::new( - PushStackingContextItem { - base: base_item, - stacking_context: self, - } - )); - - (push_item, pop_item) - } -} - -impl Ord for StackingContext { - fn cmp(&self, other: &Self) -> Ordering { - if self.z_index != 0 || other.z_index != 0 { - return self.z_index.cmp(&other.z_index); - } - - match (self.context_type, other.context_type) { - (StackingContextType::PseudoFloat, StackingContextType::PseudoFloat) => Ordering::Equal, - (StackingContextType::PseudoFloat, _) => Ordering::Less, - (_, StackingContextType::PseudoFloat) => Ordering::Greater, - (_, _) => Ordering::Equal, - } - } -} - -impl PartialOrd for StackingContext { - fn partial_cmp(&self, other: &Self) -> Option { - Some(self.cmp(other)) - } -} - -impl Eq for StackingContext {} -impl PartialEq for StackingContext { - fn eq(&self, other: &Self) -> bool { - self.id == other.id - } -} - -impl fmt::Debug for StackingContext { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - let type_string = if self.context_type == StackingContextType::Real { - "StackingContext" - } else { - "Pseudo-StackingContext" - }; - - write!(f, "{} at {:?} with overflow {:?}: {:?}", - type_string, - self.bounds, - self.overflow, - self.id) - } -} - -#[derive(Clone, Debug, Deserialize, HeapSizeOf, Serialize)] -pub enum ClipScrollNodeType { - ScrollFrame(ScrollSensitivity), - StickyFrame(StickyFrameInfo), - Clip, -} - -/// Defines a clip scroll node. -#[derive(Clone, Debug, Deserialize, HeapSizeOf, Serialize)] -pub struct ClipScrollNode { - /// The WebRender clip id of this scroll root based on the source of this clip - /// and information about the fragment. - pub id: ClipId, - - /// The unique ID of the parent of this ClipScrollNode. - pub parent_id: ClipId, - - /// The position of this scroll root's frame in the parent stacking context. - pub clip: ClippingRegion, - - /// The rect of the contents that can be scrolled inside of the scroll root. - pub content_rect: Rect, - - /// The type of this ClipScrollNode. - pub node_type: ClipScrollNodeType, -} - -impl ClipScrollNode { - pub fn to_define_item(&self, pipeline_id: PipelineId) -> DisplayItem { - DisplayItem::DefineClipScrollNode(box DefineClipScrollNodeItem { - base: BaseDisplayItem::empty(pipeline_id), - node: self.clone(), - }) - } -} - - -/// One drawing command in the list. -#[derive(Clone, Deserialize, HeapSizeOf, Serialize)] -pub enum DisplayItem { - SolidColor(Box), - Text(Box), - Image(Box), - Border(Box), - Gradient(Box), - RadialGradient(Box), - Line(Box), - BoxShadow(Box), - PushTextShadow(Box), - PopTextShadow(Box), - Iframe(Box), - PushStackingContext(Box), - PopStackingContext(Box), - DefineClipScrollNode(Box), -} - -/// Information common to all display items. -#[derive(Clone, Deserialize, HeapSizeOf, Serialize)] -pub struct BaseDisplayItem { - /// The boundaries of the display item, in layer coordinates. - pub bounds: Rect, - - /// Metadata attached to this display item. - pub metadata: DisplayItemMetadata, - - /// The local clip for this item. - pub local_clip: LocalClip, - - /// The section of the display list that this item belongs to. - pub section: DisplayListSection, - - /// The id of the stacking context this item belongs to. - pub stacking_context_id: StackingContextId, - - /// The clip and scroll info for this item. - pub clip_and_scroll_info: ClipAndScrollInfo, -} - -impl BaseDisplayItem { - #[inline(always)] - pub fn new(bounds: &Rect, - metadata: DisplayItemMetadata, - local_clip: LocalClip, - section: DisplayListSection, - stacking_context_id: StackingContextId, - clip_and_scroll_info: ClipAndScrollInfo) - -> BaseDisplayItem { - BaseDisplayItem { - bounds: *bounds, - metadata: metadata, - local_clip: local_clip, - section: section, - stacking_context_id: stacking_context_id, - clip_and_scroll_info: clip_and_scroll_info, - } - } - - #[inline(always)] - pub fn empty(pipeline_id: PipelineId) -> BaseDisplayItem { - BaseDisplayItem { - bounds: TypedRect::zero(), - metadata: DisplayItemMetadata { - node: OpaqueNode(0), - pointing: None, - }, - local_clip: LocalClip::from(max_rect().to_rectf()), - section: DisplayListSection::Content, - stacking_context_id: StackingContextId::root(), - clip_and_scroll_info: pipeline_id.root_clip_and_scroll_info(), - } - } -} - -/// A clipping region for a display item. Currently, this can describe rectangles, rounded -/// rectangles (for `border-radius`), or arbitrary intersections of the two. Arbitrary transforms -/// are not supported because those are handled by the higher-level `StackingContext` abstraction. -#[derive(Clone, Deserialize, HeapSizeOf, PartialEq, Serialize)] -pub struct ClippingRegion { - /// The main rectangular region. This does not include any corners. - pub main: Rect, - /// Any complex regions. - /// - /// TODO(pcwalton): Atomically reference count these? Not sure if it's worth the trouble. - /// Measure and follow up. - pub complex: Vec, -} - -/// A complex clipping region. These don't as easily admit arbitrary intersection operations, so -/// they're stored in a list over to the side. Currently a complex clipping region is just a -/// rounded rectangle, but the CSS WGs will probably make us throw more stuff in here eventually. -#[derive(Clone, Debug, Deserialize, HeapSizeOf, PartialEq, Serialize)] -pub struct ComplexClippingRegion { - /// The boundaries of the rectangle. - pub rect: Rect, - /// Border radii of this rectangle. - pub radii: BorderRadii, -} - -impl ClippingRegion { - /// Returns an empty clipping region that, if set, will result in no pixels being visible. - #[inline] - pub fn empty() -> ClippingRegion { - ClippingRegion { - main: Rect::zero(), - complex: Vec::new(), - } - } - - /// Returns an all-encompassing clipping region that clips no pixels out. - #[inline] - pub fn max() -> ClippingRegion { - ClippingRegion { - main: max_rect(), - complex: Vec::new(), - } - } - - /// Returns a clipping region that represents the given rectangle. - #[inline] - pub fn from_rect(rect: &Rect) -> ClippingRegion { - ClippingRegion { - main: *rect, - complex: Vec::new(), - } - } - - /// Mutates this clipping region to intersect with the given rectangle. - /// - /// TODO(pcwalton): This could more eagerly eliminate complex clipping regions, at the cost of - /// complexity. - #[inline] - pub fn intersect_rect(&mut self, rect: &Rect) { - self.main = self.main.intersection(rect).unwrap_or(Rect::zero()) - } - - /// Returns true if this clipping region might be nonempty. This can return false positives, - /// but never false negatives. - #[inline] - pub fn might_be_nonempty(&self) -> bool { - !self.main.is_empty() - } - - /// Returns true if this clipping region might contain the given point and false otherwise. - /// This is a quick, not a precise, test; it can yield false positives. - #[inline] - pub fn might_intersect_point(&self, point: &Point2D) -> bool { - self.main.contains(point) && - self.complex.iter().all(|complex| complex.rect.contains(point)) - } - - /// Returns true if this clipping region might intersect the given rectangle and false - /// otherwise. This is a quick, not a precise, test; it can yield false positives. - #[inline] - pub fn might_intersect_rect(&self, rect: &Rect) -> bool { - self.main.intersects(rect) && - self.complex.iter().all(|complex| complex.rect.intersects(rect)) - } - - /// Returns true if this clipping region completely surrounds the given rect. - #[inline] - pub fn does_not_clip_rect(&self, rect: &Rect) -> bool { - self.main.contains(&rect.origin) && self.main.contains(&rect.bottom_right()) && - self.complex.iter().all(|complex| { - complex.rect.contains(&rect.origin) && complex.rect.contains(&rect.bottom_right()) - }) - } - - /// Returns a bounding rect that surrounds this entire clipping region. - #[inline] - pub fn bounding_rect(&self) -> Rect { - let mut rect = self.main; - for complex in &*self.complex { - rect = rect.union(&complex.rect) - } - rect - } - - /// Intersects this clipping region with the given rounded rectangle. - #[inline] - pub fn intersect_with_rounded_rect(&mut self, rect: &Rect, radii: &BorderRadii) { - let new_complex_region = ComplexClippingRegion { - rect: *rect, - radii: *radii, - }; - - // FIXME(pcwalton): This is O(n²) worst case for disjoint clipping regions. Is that OK? - // They're slow anyway… - // - // Possibly relevant if we want to do better: - // - // http://www.inrg.csie.ntu.edu.tw/algorithm2014/presentation/D&C%20Lee-84.pdf - for existing_complex_region in &mut self.complex { - if existing_complex_region.completely_encloses(&new_complex_region) { - *existing_complex_region = new_complex_region; - return - } - if new_complex_region.completely_encloses(existing_complex_region) { - return - } - } - - self.complex.push(ComplexClippingRegion { - rect: *rect, - radii: *radii, - }); - } - - /// Translates this clipping region by the given vector. - #[inline] - pub fn translate(&self, delta: &Vector2D) -> ClippingRegion { - ClippingRegion { - main: self.main.translate(delta), - complex: self.complex.iter().map(|complex| { - ComplexClippingRegion { - rect: complex.rect.translate(delta), - radii: complex.radii, - } - }).collect(), - } - } - - #[inline] - pub fn is_max(&self) -> bool { - self.main == max_rect() && self.complex.is_empty() - } -} - -impl fmt::Debug for ClippingRegion { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - if *self == ClippingRegion::max() { - write!(f, "ClippingRegion::Max") - } else if *self == ClippingRegion::empty() { - write!(f, "ClippingRegion::Empty") - } else if self.main == max_rect() { - write!(f, "ClippingRegion(Complex={:?})", self.complex) - } else { - write!(f, "ClippingRegion(Rect={:?}, Complex={:?})", self.main, self.complex) - } - } -} - -impl ComplexClippingRegion { - // TODO(pcwalton): This could be more aggressive by considering points that touch the inside of - // the border radius ellipse. - fn completely_encloses(&self, other: &ComplexClippingRegion) -> bool { - let left = cmp::max(self.radii.top_left.width, self.radii.bottom_left.width); - let top = cmp::max(self.radii.top_left.height, self.radii.top_right.height); - let right = cmp::max(self.radii.top_right.width, self.radii.bottom_right.width); - let bottom = cmp::max(self.radii.bottom_left.height, self.radii.bottom_right.height); - let interior = Rect::new(Point2D::new(self.rect.origin.x + left, self.rect.origin.y + top), - Size2D::new(self.rect.size.width - left - right, - self.rect.size.height - top - bottom)); - interior.origin.x <= other.rect.origin.x && interior.origin.y <= other.rect.origin.y && - interior.max_x() >= other.rect.max_x() && interior.max_y() >= other.rect.max_y() - } -} - -/// Metadata attached to each display item. This is useful for performing auxiliary threads with -/// the display list involving hit testing: finding the originating DOM node and determining the -/// cursor to use when the element is hovered over. -#[derive(Clone, Copy, Deserialize, HeapSizeOf, Serialize)] -pub struct DisplayItemMetadata { - /// The DOM node from which this display item originated. - pub node: OpaqueNode, - /// The value of the `cursor` property when the mouse hovers over this display item. If `None`, - /// this display item is ineligible for pointer events (`pointer-events: none`). - pub pointing: Option, -} - -/// Paints a solid color. -#[derive(Clone, Deserialize, HeapSizeOf, Serialize)] -pub struct SolidColorDisplayItem { - /// Fields common to all display items. - pub base: BaseDisplayItem, - - /// The color. - pub color: ColorF, -} - -/// Paints text. -#[derive(Clone, Deserialize, HeapSizeOf, Serialize)] -pub struct TextDisplayItem { - /// Fields common to all display items. - pub base: BaseDisplayItem, - - /// The text run. - #[ignore_heap_size_of = "Because it is non-owning"] - pub text_run: Arc, - - /// The range of text within the text run. - pub range: Range, - - /// The color of the text. - pub text_color: ColorF, - - /// The position of the start of the baseline of this text. - pub baseline_origin: Point2D, - - /// The orientation of the text: upright or sideways left/right. - pub orientation: TextOrientation, -} - -#[derive(Clone, Deserialize, Eq, HeapSizeOf, PartialEq, Serialize)] -pub enum TextOrientation { - Upright, - SidewaysLeft, - SidewaysRight, -} - -/// Paints an image. -#[derive(Clone, Deserialize, HeapSizeOf, Serialize)] -pub struct ImageDisplayItem { - pub base: BaseDisplayItem, - - pub webrender_image: WebRenderImageInfo, - - #[ignore_heap_size_of = "Because it is non-owning"] - pub image_data: Option>, - - /// The dimensions to which the image display item should be stretched. If this is smaller than - /// the bounds of this display item, then the image will be repeated in the appropriate - /// direction to tile the entire bounds. - pub stretch_size: Size2D, - - /// The amount of space to add to the right and bottom part of each tile, when the image - /// is tiled. - pub tile_spacing: Size2D, - - /// The algorithm we should use to stretch the image. See `image_rendering` in CSS-IMAGES-3 § - /// 5.3. - pub image_rendering: image_rendering::T, -} -/// Paints an iframe. -#[derive(Clone, Deserialize, HeapSizeOf, Serialize)] -pub struct IframeDisplayItem { - pub base: BaseDisplayItem, - pub iframe: PipelineId, -} - -/// Paints a gradient. -#[derive(Clone, Deserialize, HeapSizeOf, Serialize)] -pub struct Gradient { - /// The start point of the gradient (computed during display list construction). - pub start_point: Point2D, - - /// The end point of the gradient (computed during display list construction). - pub end_point: Point2D, - - /// A list of color stops. - pub stops: Vec, - - /// True if gradient repeats infinitly. - pub repeating: bool, -} - -#[derive(Clone, Deserialize, HeapSizeOf, Serialize)] -pub struct GradientDisplayItem { - /// Fields common to all display item. - pub base: BaseDisplayItem, - - /// Contains all gradient data. Included start, end point and color stops. - pub gradient: Gradient, -} - -/// Paints a radial gradient. -#[derive(Clone, Deserialize, HeapSizeOf, Serialize)] -pub struct RadialGradient { - /// The center point of the gradient. - pub center: Point2D, - - /// The radius of the gradient with an x and an y component. - pub radius: Size2D, - - /// A list of color stops. - pub stops: Vec, - - /// True if gradient repeats infinitly. - pub repeating: bool, -} - -#[derive(Clone, Deserialize, HeapSizeOf, Serialize)] -pub struct RadialGradientDisplayItem { - /// Fields common to all display item. - pub base: BaseDisplayItem, - - /// Contains all gradient data. - pub gradient: RadialGradient, -} - -/// A normal border, supporting CSS border styles. -#[derive(Clone, Deserialize, HeapSizeOf, Serialize)] -pub struct NormalBorder { - /// Border colors. - pub color: SideOffsets2D, - - /// Border styles. - pub style: SideOffsets2D, - - /// Border radii. - /// - /// TODO(pcwalton): Elliptical radii. - pub radius: BorderRadii, -} - -/// A border that is made of image segments. -#[derive(Clone, Deserialize, HeapSizeOf, Serialize)] -pub struct ImageBorder { - /// The image this border uses, border-image-source. - pub image: WebRenderImageInfo, - - /// How to slice the image, as per border-image-slice. - pub slice: SideOffsets2D, - - /// Outsets for the border, as per border-image-outset. - pub outset: SideOffsets2D, - - /// If fill is true, draw the center patch of the image. - pub fill: bool, - - /// How to repeat or stretch horizontal edges (border-image-repeat). - pub repeat_horizontal: webrender_api::RepeatMode, - - /// How to repeat or stretch vertical edges (border-image-repeat). - pub repeat_vertical: webrender_api::RepeatMode, -} - -/// A border that is made of linear gradient -#[derive(Clone, Deserialize, HeapSizeOf, Serialize)] -pub struct GradientBorder { - /// The gradient info that this border uses, border-image-source. - pub gradient: Gradient, - - /// Outsets for the border, as per border-image-outset. - pub outset: SideOffsets2D, -} - -/// A border that is made of radial gradient -#[derive(Clone, Deserialize, HeapSizeOf, Serialize)] -pub struct RadialGradientBorder { - /// The gradient info that this border uses, border-image-source. - pub gradient: RadialGradient, - - /// Outsets for the border, as per border-image-outset. - pub outset: SideOffsets2D, -} - -/// Specifies the type of border -#[derive(Clone, Deserialize, HeapSizeOf, Serialize)] -pub enum BorderDetails { - Normal(NormalBorder), - Image(ImageBorder), - Gradient(GradientBorder), - RadialGradient(RadialGradientBorder), -} - -/// Paints a border. -#[derive(Clone, Deserialize, HeapSizeOf, Serialize)] -pub struct BorderDisplayItem { - /// Fields common to all display items. - pub base: BaseDisplayItem, - - /// Border widths. - pub border_widths: SideOffsets2D, - - /// Details for specific border type - pub details: BorderDetails, -} - -/// Information about the border radii. -/// -/// TODO(pcwalton): Elliptical radii. -#[derive(Clone, Copy, Debug, Deserialize, HeapSizeOf, PartialEq, Serialize)] -pub struct BorderRadii { - pub top_left: Size2D, - pub top_right: Size2D, - pub bottom_right: Size2D, - pub bottom_left: Size2D, -} - -impl Default for BorderRadii where T: Default, T: Clone { - fn default() -> Self { - let top_left = Size2D::new(Default::default(), - Default::default()); - let top_right = Size2D::new(Default::default(), - Default::default()); - let bottom_left = Size2D::new(Default::default(), - Default::default()); - let bottom_right = Size2D::new(Default::default(), - Default::default()); - BorderRadii { top_left: top_left, - top_right: top_right, - bottom_left: bottom_left, - bottom_right: bottom_right } - } -} - -impl BorderRadii { - // Scale the border radii by the specified factor - pub fn scale_by(&self, s: f32) -> BorderRadii { - BorderRadii { top_left: BorderRadii::scale_corner_by(self.top_left, s), - top_right: BorderRadii::scale_corner_by(self.top_right, s), - bottom_left: BorderRadii::scale_corner_by(self.bottom_left, s), - bottom_right: BorderRadii::scale_corner_by(self.bottom_right, s) } - } - - // Scale the border corner radius by the specified factor - pub fn scale_corner_by(corner: Size2D, s: f32) -> Size2D { - Size2D::new(corner.width.scale_by(s), corner.height.scale_by(s)) - } -} - -impl BorderRadii where T: PartialEq + Zero { - /// Returns true if all the radii are zero. - pub fn is_square(&self) -> bool { - let zero = Zero::zero(); - self.top_left == zero && self.top_right == zero && self.bottom_right == zero && - self.bottom_left == zero - } -} - -impl BorderRadii where T: PartialEq + Zero + Clone { - /// Returns a set of border radii that all have the given value. - pub fn all_same(value: T) -> BorderRadii { - BorderRadii { - top_left: Size2D::new(value.clone(), value.clone()), - top_right: Size2D::new(value.clone(), value.clone()), - bottom_right: Size2D::new(value.clone(), value.clone()), - bottom_left: Size2D::new(value.clone(), value.clone()), - } - } -} - -/// Paints a line segment. -#[derive(Clone, Deserialize, HeapSizeOf, Serialize)] -pub struct LineDisplayItem { - pub base: BaseDisplayItem, - - /// The line segment color. - pub color: ColorF, - - /// The line segment style. - #[ignore_heap_size_of = "enum type in webrender"] - pub style: webrender_api::LineStyle, -} - -/// Paints a box shadow per CSS-BACKGROUNDS. -#[derive(Clone, Deserialize, HeapSizeOf, Serialize)] -pub struct BoxShadowDisplayItem { - /// Fields common to all display items. - pub base: BaseDisplayItem, - - /// The dimensions of the box that we're placing a shadow around. - pub box_bounds: Rect, - - /// The offset of this shadow from the box. - pub offset: Vector2D, - - /// The color of this shadow. - pub color: ColorF, - - /// The blur radius for this shadow. - pub blur_radius: Au, - - /// The spread radius of this shadow. - pub spread_radius: Au, - - /// The border radius of this shadow. - /// - /// TODO(pcwalton): Elliptical radii; different radii for each corner. - pub border_radius: Au, - - /// How we should clip the result. - pub clip_mode: BoxShadowClipMode, -} - -/// Defines a text shadow that affects all items until the paired PopTextShadow. -#[derive(Clone, Deserialize, HeapSizeOf, Serialize)] -pub struct PushTextShadowDisplayItem { - /// Fields common to all display items. - pub base: BaseDisplayItem, - - /// The offset of this shadow from the text. - pub offset: Vector2D, - - /// The color of this shadow. - pub color: ColorF, - - /// The blur radius for this shadow. - pub blur_radius: Au, -} - -/// Defines a text shadow that affects all items until the next PopTextShadow. -#[derive(Clone, Deserialize, HeapSizeOf, Serialize)] -pub struct PopTextShadowDisplayItem { - /// Fields common to all display items. - pub base: BaseDisplayItem, -} - -/// Defines a stacking context. -#[derive(Clone, Deserialize, HeapSizeOf, Serialize)] -pub struct PushStackingContextItem { - /// Fields common to all display items. - pub base: BaseDisplayItem, - - pub stacking_context: StackingContext, -} - -/// Defines a stacking context. -#[derive(Clone, Deserialize, HeapSizeOf, Serialize)] -pub struct PopStackingContextItem { - /// Fields common to all display items. - pub base: BaseDisplayItem, - - pub stacking_context_id: StackingContextId, -} - -/// Starts a group of items inside a particular scroll root. -#[derive(Clone, Deserialize, HeapSizeOf, Serialize)] -pub struct DefineClipScrollNodeItem { - /// Fields common to all display items. - pub base: BaseDisplayItem, - - /// The scroll root that this item starts. - pub node: ClipScrollNode, -} - -/// How a box shadow should be clipped. -#[derive(Clone, Copy, Debug, Deserialize, HeapSizeOf, PartialEq, Serialize)] -pub enum BoxShadowClipMode { - /// No special clipping should occur. This is used for (shadowed) text decorations. - None, - /// The area inside `box_bounds` should be clipped out. Corresponds to the normal CSS - /// `box-shadow`. - Outset, - /// The area outside `box_bounds` should be clipped out. Corresponds to the `inset` flag on CSS - /// `box-shadow`. - Inset, -} - -impl DisplayItem { - pub fn base(&self) -> &BaseDisplayItem { - match *self { - DisplayItem::SolidColor(ref solid_color) => &solid_color.base, - DisplayItem::Text(ref text) => &text.base, - DisplayItem::Image(ref image_item) => &image_item.base, - DisplayItem::Border(ref border) => &border.base, - DisplayItem::Gradient(ref gradient) => &gradient.base, - DisplayItem::RadialGradient(ref gradient) => &gradient.base, - DisplayItem::Line(ref line) => &line.base, - DisplayItem::BoxShadow(ref box_shadow) => &box_shadow.base, - DisplayItem::PushTextShadow(ref push_text_shadow) => &push_text_shadow.base, - DisplayItem::PopTextShadow(ref pop_text_shadow) => &pop_text_shadow.base, - DisplayItem::Iframe(ref iframe) => &iframe.base, - DisplayItem::PushStackingContext(ref stacking_context) => &stacking_context.base, - DisplayItem::PopStackingContext(ref item) => &item.base, - DisplayItem::DefineClipScrollNode(ref item) => &item.base, - } - } - - pub fn scroll_node_id(&self) -> ClipId { - self.base().clip_and_scroll_info.scroll_node_id - } - - pub fn clip_and_scroll_info(&self) -> ClipAndScrollInfo { - self.base().clip_and_scroll_info - } - - pub fn stacking_context_id(&self) -> StackingContextId { - self.base().stacking_context_id - } - - pub fn section(&self) -> DisplayListSection { - self.base().section - } - - pub fn bounds(&self) -> Rect { - self.base().bounds - } - - pub fn debug_with_level(&self, level: u32) { - let mut indent = String::new(); - for _ in 0..level { - indent.push_str("| ") - } - println!("{}+ {:?}", indent, self); - } - - fn hit_test(&self, - point: Point2D, - offset_lookup: &mut ScrollOffsetLookup) - -> Option { - // TODO(pcwalton): Use a precise algorithm here. This will allow us to properly hit - // test elements with `border-radius`, for example. - let base_item = self.base(); - - let scroll_offset = offset_lookup.full_offset_for_clip_scroll_node(&self.scroll_node_id()); - let point = Point2D::new(point.x - Au::from_f32_px(scroll_offset.x), - point.y - Au::from_f32_px(scroll_offset.y)); - - if !base_item.local_clip.clip_rect().contains(&point.to_pointf()) { - // Clipped out. - return None; - } - if !self.bounds().contains(&point) { - // Can't possibly hit. - return None; - } - if base_item.metadata.pointing.is_none() { - // `pointer-events` is `none`. Ignore this item. - return None; - } - - match *self { - DisplayItem::Border(ref border) => { - // If the point is inside the border, it didn't hit the border! - let interior_rect = - Rect::new( - Point2D::new(border.base.bounds.origin.x + - border.border_widths.left, - border.base.bounds.origin.y + - border.border_widths.top), - Size2D::new(border.base.bounds.size.width - - (border.border_widths.left + - border.border_widths.right), - border.base.bounds.size.height - - (border.border_widths.top + - border.border_widths.bottom))); - if interior_rect.contains(&point) { - return None; - } - } - DisplayItem::BoxShadow(_) => { - // Box shadows can never be hit. - return None; - } - _ => {} - } - - Some(base_item.metadata) - } -} - -impl fmt::Debug for DisplayItem { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - if let DisplayItem::PushStackingContext(ref item) = *self { - return write!(f, "PushStackingContext({:?})", item.stacking_context); - } - - if let DisplayItem::PopStackingContext(ref item) = *self { - return write!(f, "PopStackingContext({:?}", item.stacking_context_id); - } - - if let DisplayItem::DefineClipScrollNode(ref item) = *self { - return write!(f, "DefineClipScrollNode({:?}", item.node); - } - - write!(f, "{} @ {:?} {:?}", - match *self { - DisplayItem::SolidColor(ref solid_color) => - format!("SolidColor rgba({}, {}, {}, {})", - solid_color.color.r, - solid_color.color.g, - solid_color.color.b, - solid_color.color.a), - DisplayItem::Text(ref text) => { - format!("Text ({:?})", - &text.text_run.text[ - text.range.begin().0 as usize..(text.range.begin().0 + text.range.length().0) as usize]) - } - DisplayItem::Image(_) => "Image".to_owned(), - DisplayItem::Border(_) => "Border".to_owned(), - DisplayItem::Gradient(_) => "Gradient".to_owned(), - DisplayItem::RadialGradient(_) => "RadialGradient".to_owned(), - DisplayItem::Line(_) => "Line".to_owned(), - DisplayItem::BoxShadow(_) => "BoxShadow".to_owned(), - DisplayItem::PushTextShadow(_) => "PushTextShadow".to_owned(), - DisplayItem::PopTextShadow(_) => "PopTextShadow".to_owned(), - DisplayItem::Iframe(_) => "Iframe".to_owned(), - DisplayItem::PushStackingContext(_) | - DisplayItem::PopStackingContext(_) | - DisplayItem::DefineClipScrollNode(_) => "".to_owned(), - }, - self.bounds(), - self.base().local_clip - ) - } -} - -#[derive(Clone, Copy, Deserialize, HeapSizeOf, Serialize)] -pub struct WebRenderImageInfo { - pub width: u32, - pub height: u32, - pub format: PixelFormat, - pub key: Option, -} - -impl WebRenderImageInfo { - #[inline] - pub fn from_image(image: &Image) -> WebRenderImageInfo { - WebRenderImageInfo { - width: image.width, - height: image.height, - format: image.format, - key: image.id, - } - } -} - -/// The type of the scroll offset list. This is only populated if WebRender is in use. -pub type ScrollOffsetMap = HashMap>; - - -pub trait SimpleMatrixDetection { - fn is_identity_or_simple_translation(&self) -> bool; -} - -impl SimpleMatrixDetection for Transform3D { - #[inline] - fn is_identity_or_simple_translation(&self) -> bool { - let (_0, _1) = (Zero::zero(), One::one()); - self.m11 == _1 && self.m12 == _0 && self.m13 == _0 && self.m14 == _0 && - self.m21 == _0 && self.m22 == _1 && self.m23 == _0 && self.m24 == _0 && - self.m31 == _0 && self.m32 == _0 && self.m33 == _1 && self.m34 == _0 && - self.m44 == _1 - } -} - -trait ToPointF { - fn to_pointf(&self) -> webrender_api::LayoutPoint; -} - -impl ToPointF for Point2D { - fn to_pointf(&self) -> webrender_api::LayoutPoint { - webrender_api::LayoutPoint::new(self.x.to_f32_px(), self.y.to_f32_px()) - } -} - -trait ToRectF { - fn to_rectf(&self) -> webrender_api::LayoutRect; -} - -impl ToRectF for Rect { - fn to_rectf(&self) -> webrender_api::LayoutRect { - let x = self.origin.x.to_f32_px(); - let y = self.origin.y.to_f32_px(); - let w = self.size.width.to_f32_px(); - let h = self.size.height.to_f32_px(); - let point = webrender_api::LayoutPoint::new(x, y); - let size = webrender_api::LayoutSize::new(w, h); - webrender_api::LayoutRect::new(point, size) - } -} diff --git a/collector/compile-benchmarks/style-servo/components/gfx/font.rs b/collector/compile-benchmarks/style-servo/components/gfx/font.rs deleted file mode 100644 index 9c59f71fb..000000000 --- a/collector/compile-benchmarks/style-servo/components/gfx/font.rs +++ /dev/null @@ -1,331 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ - -use app_units::Au; -use euclid::{Point2D, Rect, Size2D}; -use font_template::FontTemplateDescriptor; -use ordered_float::NotNaN; -use platform::font::{FontHandle, FontTable}; -use platform::font_context::FontContextHandle; -use platform::font_template::FontTemplateData; -use smallvec::SmallVec; -use std::ascii::AsciiExt; -use std::borrow::ToOwned; -use std::cell::RefCell; -use std::collections::HashMap; -use std::rc::Rc; -use std::str; -use std::sync::Arc; -use std::sync::atomic::{ATOMIC_USIZE_INIT, AtomicUsize, Ordering}; -use style::computed_values::{font_stretch, font_variant_caps, font_weight}; -use text::Shaper; -use text::glyph::{ByteIndex, GlyphData, GlyphId, GlyphStore}; -use text::shaping::ShaperMethods; -use time; -use unicode_script::Script; -use webrender_api; - -macro_rules! ot_tag { - ($t1:expr, $t2:expr, $t3:expr, $t4:expr) => ( - (($t1 as u32) << 24) | (($t2 as u32) << 16) | (($t3 as u32) << 8) | ($t4 as u32) - ); -} - -pub const GPOS: u32 = ot_tag!('G', 'P', 'O', 'S'); -pub const GSUB: u32 = ot_tag!('G', 'S', 'U', 'B'); -pub const KERN: u32 = ot_tag!('k', 'e', 'r', 'n'); - -static TEXT_SHAPING_PERFORMANCE_COUNTER: AtomicUsize = ATOMIC_USIZE_INIT; - -// FontHandle encapsulates access to the platform's font API, -// e.g. quartz, FreeType. It provides access to metrics and tables -// needed by the text shaper as well as access to the underlying font -// resources needed by the graphics layer to draw glyphs. - -pub trait FontHandleMethods: Sized { - fn new_from_template(fctx: &FontContextHandle, template: Arc, pt_size: Option) - -> Result; - fn template(&self) -> Arc; - fn family_name(&self) -> String; - fn face_name(&self) -> Option; - fn is_italic(&self) -> bool; - fn boldness(&self) -> font_weight::T; - fn stretchiness(&self) -> font_stretch::T; - - fn glyph_index(&self, codepoint: char) -> Option; - fn glyph_h_advance(&self, GlyphId) -> Option; - fn glyph_h_kerning(&self, glyph0: GlyphId, glyph1: GlyphId) -> FractionalPixel; - /// Can this font do basic horizontal LTR shaping without Harfbuzz? - fn can_do_fast_shaping(&self) -> bool; - fn metrics(&self) -> FontMetrics; - fn table_for_tag(&self, FontTableTag) -> Option; -} - -// Used to abstract over the shaper's choice of fixed int representation. -pub type FractionalPixel = f64; - -pub type FontTableTag = u32; - -trait FontTableTagConversions { - fn tag_to_str(&self) -> String; -} - -impl FontTableTagConversions for FontTableTag { - fn tag_to_str(&self) -> String { - let bytes = [(self >> 24) as u8, - (self >> 16) as u8, - (self >> 8) as u8, - (self >> 0) as u8]; - str::from_utf8(&bytes).unwrap().to_owned() - } -} - -pub trait FontTableMethods { - fn buffer(&self) -> &[u8]; -} - -#[derive(Clone, Debug, Deserialize, Serialize)] -pub struct FontMetrics { - pub underline_size: Au, - pub underline_offset: Au, - pub strikeout_size: Au, - pub strikeout_offset: Au, - pub leading: Au, - pub x_height: Au, - pub em_size: Au, - pub ascent: Au, - pub descent: Au, - pub max_advance: Au, - pub average_advance: Au, - pub line_gap: Au, -} - -#[derive(Debug)] -pub struct Font { - pub handle: FontHandle, - pub metrics: FontMetrics, - pub variant: font_variant_caps::T, - pub descriptor: FontTemplateDescriptor, - pub requested_pt_size: Au, - pub actual_pt_size: Au, - shaper: Option, - shape_cache: RefCell>>, - glyph_advance_cache: RefCell>, - pub font_key: webrender_api::FontInstanceKey, -} - -impl Font { - pub fn new(handle: FontHandle, - variant: font_variant_caps::T, - descriptor: FontTemplateDescriptor, - requested_pt_size: Au, - actual_pt_size: Au, - font_key: webrender_api::FontInstanceKey) -> Font { - let metrics = handle.metrics(); - Font { - handle: handle, - shaper: None, - variant: variant, - descriptor: descriptor, - requested_pt_size: requested_pt_size, - actual_pt_size: actual_pt_size, - metrics: metrics, - shape_cache: RefCell::new(HashMap::new()), - glyph_advance_cache: RefCell::new(HashMap::new()), - font_key: font_key, - } - } -} - -bitflags! { - pub flags ShapingFlags: u8 { - #[doc = "Set if the text is entirely whitespace."] - const IS_WHITESPACE_SHAPING_FLAG = 0x01, - #[doc = "Set if we are to ignore ligatures."] - const IGNORE_LIGATURES_SHAPING_FLAG = 0x02, - #[doc = "Set if we are to disable kerning."] - const DISABLE_KERNING_SHAPING_FLAG = 0x04, - #[doc = "Text direction is right-to-left."] - const RTL_FLAG = 0x08, - #[doc = "Set if word-break is set to keep-all."] - const KEEP_ALL_FLAG = 0x10, - } -} - -/// Various options that control text shaping. -#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)] -pub struct ShapingOptions { - /// Spacing to add between each letter. Corresponds to the CSS 2.1 `letter-spacing` property. - /// NB: You will probably want to set the `IGNORE_LIGATURES_SHAPING_FLAG` if this is non-null. - pub letter_spacing: Option, - /// Spacing to add between each word. Corresponds to the CSS 2.1 `word-spacing` property. - pub word_spacing: (Au, NotNaN), - /// The Unicode script property of the characters in this run. - pub script: Script, - /// Various flags. - pub flags: ShapingFlags, -} - -/// An entry in the shape cache. -#[derive(Clone, Debug, Eq, Hash, PartialEq)] -struct ShapeCacheEntry { - text: String, - options: ShapingOptions, -} - -impl Font { - pub fn shape_text(&mut self, text: &str, options: &ShapingOptions) -> Arc { - let this = self as *const Font; - let mut shaper = self.shaper.take(); - - let lookup_key = ShapeCacheEntry { - text: text.to_owned(), - options: *options, - }; - let result = self.shape_cache.borrow_mut().entry(lookup_key).or_insert_with(|| { - let start_time = time::precise_time_ns(); - let mut glyphs = GlyphStore::new(text.len(), - options.flags.contains(IS_WHITESPACE_SHAPING_FLAG), - options.flags.contains(RTL_FLAG)); - - if self.can_do_fast_shaping(text, options) { - debug!("shape_text: Using ASCII fast path."); - self.shape_text_fast(text, options, &mut glyphs); - } else { - debug!("shape_text: Using Harfbuzz."); - if shaper.is_none() { - shaper = Some(Shaper::new(this)); - } - shaper.as_ref().unwrap().shape_text(text, options, &mut glyphs); - } - - let end_time = time::precise_time_ns(); - TEXT_SHAPING_PERFORMANCE_COUNTER.fetch_add((end_time - start_time) as usize, - Ordering::Relaxed); - Arc::new(glyphs) - }).clone(); - self.shaper = shaper; - result - } - - fn can_do_fast_shaping(&self, text: &str, options: &ShapingOptions) -> bool { - options.script == Script::Latin && - !options.flags.contains(RTL_FLAG) && - self.handle.can_do_fast_shaping() && - text.is_ascii() - } - - /// Fast path for ASCII text that only needs simple horizontal LTR kerning. - fn shape_text_fast(&self, text: &str, options: &ShapingOptions, glyphs: &mut GlyphStore) { - let mut prev_glyph_id = None; - for (i, byte) in text.bytes().enumerate() { - let character = byte as char; - let glyph_id = match self.glyph_index(character) { - Some(id) => id, - None => continue, - }; - - let mut advance = Au::from_f64_px(self.glyph_h_advance(glyph_id)); - if character == ' ' { - // https://drafts.csswg.org/css-text-3/#word-spacing-property - let (length, percent) = options.word_spacing; - advance = (advance + length) + Au((advance.0 as f32 * percent.into_inner()) as i32); - } - if let Some(letter_spacing) = options.letter_spacing { - advance += letter_spacing; - } - let offset = prev_glyph_id.map(|prev| { - let h_kerning = Au::from_f64_px(self.glyph_h_kerning(prev, glyph_id)); - advance += h_kerning; - Point2D::new(h_kerning, Au(0)) - }); - - let glyph = GlyphData::new(glyph_id, advance, offset, true, true); - glyphs.add_glyph_for_byte_index(ByteIndex(i as isize), character, &glyph); - prev_glyph_id = Some(glyph_id); - } - glyphs.finalize_changes(); - } - - pub fn table_for_tag(&self, tag: FontTableTag) -> Option { - let result = self.handle.table_for_tag(tag); - let status = if result.is_some() { "Found" } else { "Didn't find" }; - - debug!("{} font table[{}] with family={}, face={}", - status, tag.tag_to_str(), - self.handle.family_name(), self.handle.face_name().unwrap_or("unavailable".to_owned())); - - result - } - - #[inline] - pub fn glyph_index(&self, codepoint: char) -> Option { - let codepoint = match self.variant { - font_variant_caps::T::small_caps => codepoint.to_uppercase().next().unwrap(), //FIXME: #5938 - font_variant_caps::T::normal => codepoint, - }; - self.handle.glyph_index(codepoint) - } - - pub fn glyph_h_kerning(&self, first_glyph: GlyphId, second_glyph: GlyphId) - -> FractionalPixel { - self.handle.glyph_h_kerning(first_glyph, second_glyph) - } - - pub fn glyph_h_advance(&self, glyph: GlyphId) -> FractionalPixel { - *self.glyph_advance_cache.borrow_mut().entry(glyph).or_insert_with(|| { - match self.handle.glyph_h_advance(glyph) { - Some(adv) => adv, - None => 10f64 as FractionalPixel // FIXME: Need fallback strategy - } - }) - } -} - -#[derive(Debug)] -pub struct FontGroup { - pub fonts: SmallVec<[Rc>; 8]>, -} - -impl FontGroup { - pub fn new(fonts: SmallVec<[Rc>; 8]>) -> FontGroup { - FontGroup { - fonts: fonts, - } - } -} - -pub struct RunMetrics { - // may be negative due to negative width (i.e., kerning of '.' in 'P.T.') - pub advance_width: Au, - pub ascent: Au, // nonzero - pub descent: Au, // nonzero - // this bounding box is relative to the left origin baseline. - // so, bounding_box.position.y = -ascent - pub bounding_box: Rect -} - -impl RunMetrics { - pub fn new(advance: Au, ascent: Au, descent: Au) -> RunMetrics { - let bounds = Rect::new(Point2D::new(Au(0), -ascent), - Size2D::new(advance, ascent + descent)); - - // TODO(Issue #125): support loose and tight bounding boxes; using the - // ascent+descent and advance is sometimes too generous and - // looking at actual glyph extents can yield a tighter box. - - RunMetrics { - advance_width: advance, - bounding_box: bounds, - ascent: ascent, - descent: descent, - } - } -} - -pub fn get_and_reset_text_shaping_performance_counter() -> usize { - let value = TEXT_SHAPING_PERFORMANCE_COUNTER.load(Ordering::SeqCst); - TEXT_SHAPING_PERFORMANCE_COUNTER.store(0, Ordering::SeqCst); - value -} diff --git a/collector/compile-benchmarks/style-servo/components/gfx/font_cache_thread.rs b/collector/compile-benchmarks/style-servo/components/gfx/font_cache_thread.rs deleted file mode 100644 index 39fd21009..000000000 --- a/collector/compile-benchmarks/style-servo/components/gfx/font_cache_thread.rs +++ /dev/null @@ -1,541 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ - -use app_units::Au; -use font_template::{FontTemplate, FontTemplateDescriptor}; -use fontsan; -use ipc_channel::ipc::{self, IpcReceiver, IpcSender}; -use net_traits::{CoreResourceThread, FetchResponseMsg, fetch_async}; -use net_traits::request::{Destination, RequestInit, Type as RequestType}; -use platform::font_context::FontContextHandle; -use platform::font_list::SANS_SERIF_FONT_FAMILY; -use platform::font_list::for_each_available_family; -use platform::font_list::for_each_variation; -use platform::font_list::last_resort_font_families; -use platform::font_list::system_default_family; -use platform::font_template::FontTemplateData; -use servo_atoms::Atom; -use servo_url::ServoUrl; -use std::borrow::ToOwned; -use std::collections::HashMap; -use std::fmt; -use std::mem; -use std::ops::Deref; -use std::sync::{Arc, Mutex}; -use std::thread; -use std::u32; -use style::font_face::{EffectiveSources, Source}; -use style::properties::longhands::font_family::computed_value::{FontFamily, FamilyName}; -use webrender_api; - -/// A list of font templates that make up a given font family. -struct FontTemplates { - templates: Vec, -} - -#[derive(Debug, Deserialize, Serialize)] -pub struct FontTemplateInfo { - pub font_template: Arc, - pub font_key: webrender_api::FontKey, -} - -impl FontTemplates { - fn new() -> FontTemplates { - FontTemplates { - templates: vec!(), - } - } - - /// Find a font in this family that matches a given descriptor. - fn find_font_for_style(&mut self, desc: &FontTemplateDescriptor, fctx: &FontContextHandle) - -> Option> { - // TODO(Issue #189): optimize lookup for - // regular/bold/italic/bolditalic with fixed offsets and a - // static decision table for fallback between these values. - for template in &mut self.templates { - let maybe_template = template.data_for_descriptor(fctx, desc); - if maybe_template.is_some() { - return maybe_template; - } - } - - // We didn't find an exact match. Do more expensive fuzzy matching. - // TODO(#190): Do a better job. - let (mut best_template_data, mut best_distance) = (None, u32::MAX); - for template in &mut self.templates { - if let Some((template_data, distance)) = - template.data_for_approximate_descriptor(fctx, desc) { - if distance < best_distance { - best_template_data = Some(template_data); - best_distance = distance - } - } - } - if best_template_data.is_some() { - return best_template_data - } - - // If a request is made for a font family that exists, - // pick the first valid font in the family if we failed - // to find an exact match for the descriptor. - for template in &mut self.templates { - let maybe_template = template.get(); - if maybe_template.is_some() { - return maybe_template; - } - } - - None - } - - fn add_template(&mut self, identifier: Atom, maybe_data: Option>) { - for template in &self.templates { - if *template.identifier() == identifier { - return; - } - } - - if let Ok(template) = FontTemplate::new(identifier, maybe_data) { - self.templates.push(template); - } - } -} - -/// Commands that the FontContext sends to the font cache thread. -#[derive(Debug, Deserialize, Serialize)] -pub enum Command { - GetFontTemplate(FontFamily, FontTemplateDescriptor, IpcSender), - GetLastResortFontTemplate(FontTemplateDescriptor, IpcSender), - GetFontInstance(webrender_api::FontKey, Au, IpcSender), - AddWebFont(LowercaseString, EffectiveSources, IpcSender<()>), - AddDownloadedWebFont(LowercaseString, ServoUrl, Vec, IpcSender<()>), - Exit(IpcSender<()>), -} - -/// Reply messages sent from the font cache thread to the FontContext caller. -#[derive(Debug, Deserialize, Serialize)] -pub enum Reply { - GetFontTemplateReply(Option), -} - -/// The font cache thread itself. It maintains a list of reference counted -/// font templates that are currently in use. -struct FontCache { - port: IpcReceiver, - channel_to_self: IpcSender, - generic_fonts: HashMap, - local_families: HashMap, - web_families: HashMap, - font_context: FontContextHandle, - core_resource_thread: CoreResourceThread, - webrender_api: webrender_api::RenderApi, - webrender_fonts: HashMap, - font_instances: HashMap<(webrender_api::FontKey, Au), webrender_api::FontInstanceKey>, -} - -fn populate_generic_fonts() -> HashMap { - let mut generic_fonts = HashMap::with_capacity(5); - - append_map(&mut generic_fonts, FontFamily::Generic(atom!("serif")), "Times New Roman"); - append_map(&mut generic_fonts, FontFamily::Generic(atom!("sans-serif")), SANS_SERIF_FONT_FAMILY); - append_map(&mut generic_fonts, FontFamily::Generic(atom!("cursive")), "Apple Chancery"); - append_map(&mut generic_fonts, FontFamily::Generic(atom!("fantasy")), "Papyrus"); - append_map(&mut generic_fonts, FontFamily::Generic(atom!("monospace")), "Menlo"); - - fn append_map(generic_fonts: &mut HashMap, - font_family: FontFamily, - mapped_name: &str) { - let family_name = { - let opt_system_default = system_default_family(font_family.name()); - match opt_system_default { - Some(system_default) => LowercaseString::new(&system_default), - None => LowercaseString::new(mapped_name) - } - }; - - generic_fonts.insert(font_family, family_name); - } - - - generic_fonts -} - -impl FontCache { - fn run(&mut self) { - loop { - let msg = self.port.recv().unwrap(); - - match msg { - Command::GetFontTemplate(family, descriptor, result) => { - let maybe_font_template = self.find_font_template(&family, &descriptor); - let _ = result.send(Reply::GetFontTemplateReply(maybe_font_template)); - } - Command::GetLastResortFontTemplate(descriptor, result) => { - let font_template = self.last_resort_font_template(&descriptor); - let _ = result.send(Reply::GetFontTemplateReply(Some(font_template))); - } - Command::GetFontInstance(font_key, size, result) => { - let webrender_api = &self.webrender_api; - - let instance_key = *self.font_instances - .entry((font_key, size)) - .or_insert_with(|| { - let key = webrender_api.generate_font_instance_key(); - let mut updates = webrender_api::ResourceUpdates::new(); - updates.add_font_instance(key, - font_key, - size, - None, - None, - Vec::new()); - webrender_api.update_resources(updates); - key - }); - - let _ = result.send(instance_key); - } - Command::AddWebFont(family_name, sources, result) => { - self.handle_add_web_font(family_name, sources, result); - } - Command::AddDownloadedWebFont(family_name, url, bytes, result) => { - let templates = &mut self.web_families.get_mut(&family_name).unwrap(); - templates.add_template(Atom::from(url.to_string()), Some(bytes)); - drop(result.send(())); - } - Command::Exit(result) => { - let _ = result.send(()); - break; - } - } - } - } - - fn handle_add_web_font(&mut self, - family_name: LowercaseString, - mut sources: EffectiveSources, - sender: IpcSender<()>) { - let src = if let Some(src) = sources.next() { - src - } else { - sender.send(()).unwrap(); - return; - }; - - if !self.web_families.contains_key(&family_name) { - let templates = FontTemplates::new(); - self.web_families.insert(family_name.clone(), templates); - } - - match src { - Source::Url(url_source) => { - // https://drafts.csswg.org/css-fonts/#font-fetching-requirements - let url = match url_source.url.url() { - Some(url) => url.clone(), - None => return, - }; - - let request = RequestInit { - url: url.clone(), - type_: RequestType::Font, - destination: Destination::Font, - // TODO: Add a proper origin - Can't import GlobalScope from gfx - // We can leave origin to be set by default - .. RequestInit::default() - }; - - let channel_to_self = self.channel_to_self.clone(); - let bytes = Mutex::new(Vec::new()); - let response_valid = Mutex::new(false); - debug!("Loading @font-face {} from {}", family_name, url); - fetch_async(request, &self.core_resource_thread, move |response| { - match response { - FetchResponseMsg::ProcessRequestBody | - FetchResponseMsg::ProcessRequestEOF => (), - FetchResponseMsg::ProcessResponse(meta_result) => { - trace!("@font-face {} metadata ok={:?}", family_name, meta_result.is_ok()); - *response_valid.lock().unwrap() = meta_result.is_ok(); - } - FetchResponseMsg::ProcessResponseChunk(new_bytes) => { - trace!("@font-face {} chunk={:?}", family_name, new_bytes); - if *response_valid.lock().unwrap() { - bytes.lock().unwrap().extend(new_bytes.into_iter()) - } - } - FetchResponseMsg::ProcessResponseEOF(response) => { - trace!("@font-face {} EOF={:?}", family_name, response); - if response.is_err() || !*response_valid.lock().unwrap() { - let msg = Command::AddWebFont(family_name.clone(), sources.clone(), sender.clone()); - channel_to_self.send(msg).unwrap(); - return; - } - let bytes = mem::replace(&mut *bytes.lock().unwrap(), vec![]); - trace!("@font-face {} data={:?}", family_name, bytes); - let bytes = match fontsan::process(&bytes) { - Ok(san) => san, - Err(_) => { - // FIXME(servo/fontsan#1): get an error message - debug!("Sanitiser rejected web font: \ - family={} url={:?}", family_name, url); - let msg = Command::AddWebFont(family_name.clone(), sources.clone(), sender.clone()); - channel_to_self.send(msg).unwrap(); - return; - }, - }; - let command = - Command::AddDownloadedWebFont(family_name.clone(), - url.clone(), - bytes, - sender.clone()); - channel_to_self.send(command).unwrap(); - } - } - }); - } - Source::Local(ref font) => { - let font_face_name = LowercaseString::new(&font.name); - let templates = &mut self.web_families.get_mut(&family_name).unwrap(); - let mut found = false; - for_each_variation(&font_face_name, |path| { - found = true; - templates.add_template(Atom::from(&*path), None); - }); - if found { - sender.send(()).unwrap(); - } else { - let msg = Command::AddWebFont(family_name, sources, sender); - self.channel_to_self.send(msg).unwrap(); - } - } - } - } - - fn refresh_local_families(&mut self) { - self.local_families.clear(); - for_each_available_family(|family_name| { - let family_name = LowercaseString::new(&family_name); - if !self.local_families.contains_key(&family_name) { - let templates = FontTemplates::new(); - self.local_families.insert(family_name, templates); - } - }); - } - - fn transform_family(&self, family: &FontFamily) -> LowercaseString { - match self.generic_fonts.get(family) { - None => LowercaseString::new(family.name()), - Some(mapped_family) => (*mapped_family).clone() - } - } - - fn find_font_in_local_family(&mut self, family_name: &LowercaseString, desc: &FontTemplateDescriptor) - -> Option> { - // TODO(Issue #188): look up localized font family names if canonical name not found - // look up canonical name - if self.local_families.contains_key(family_name) { - debug!("FontList: Found font family with name={}", &**family_name); - let s = self.local_families.get_mut(family_name).unwrap(); - - if s.templates.is_empty() { - for_each_variation(family_name, |path| { - s.add_template(Atom::from(&*path), None); - }); - } - - // TODO(Issue #192: handle generic font families, like 'serif' and 'sans-serif'. - // if such family exists, try to match style to a font - - s.find_font_for_style(desc, &self.font_context) - } else { - debug!("FontList: Couldn't find font family with name={}", &**family_name); - None - } - } - - fn find_font_in_web_family(&mut self, family: &FontFamily, desc: &FontTemplateDescriptor) - -> Option> { - let family_name = LowercaseString::new(family.name()); - - if self.web_families.contains_key(&family_name) { - let templates = self.web_families.get_mut(&family_name).unwrap(); - templates.find_font_for_style(desc, &self.font_context) - } else { - None - } - } - - fn get_font_template_info(&mut self, template: Arc) -> FontTemplateInfo { - let webrender_api = &self.webrender_api; - let webrender_fonts = &mut self.webrender_fonts; - - let font_key = *webrender_fonts.entry(template.identifier.clone()).or_insert_with(|| { - let font_key = webrender_api.generate_font_key(); - let mut updates = webrender_api::ResourceUpdates::new(); - match (template.bytes_if_in_memory(), template.native_font()) { - (Some(bytes), _) => updates.add_raw_font(font_key, bytes, 0), - (None, Some(native_font)) => updates.add_native_font(font_key, native_font), - (None, None) => updates.add_raw_font(font_key, template.bytes().clone(), 0), - } - webrender_api.update_resources(updates); - font_key - }); - - FontTemplateInfo { - font_template: template, - font_key: font_key, - } - } - - fn find_font_template(&mut self, family: &FontFamily, desc: &FontTemplateDescriptor) - -> Option { - let template = self.find_font_in_web_family(family, desc) - .or_else(|| { - let transformed_family = self.transform_family(family); - self.find_font_in_local_family(&transformed_family, desc) - }); - - template.map(|template| { - self.get_font_template_info(template) - }) - } - - fn last_resort_font_template(&mut self, desc: &FontTemplateDescriptor) - -> FontTemplateInfo { - let last_resort = last_resort_font_families(); - - for family in &last_resort { - let family = LowercaseString::new(family); - let maybe_font_in_family = self.find_font_in_local_family(&family, desc); - if let Some(family) = maybe_font_in_family { - return self.get_font_template_info(family) - } - } - - panic!("Unable to find any fonts that match (do you have fallback fonts installed?)"); - } -} - -/// The public interface to the font cache thread, used exclusively by -/// the per-thread/thread FontContext structures. -#[derive(Clone, Debug, Deserialize, Serialize)] -pub struct FontCacheThread { - chan: IpcSender, -} - -impl FontCacheThread { - pub fn new(core_resource_thread: CoreResourceThread, - webrender_api: webrender_api::RenderApi) -> FontCacheThread { - let (chan, port) = ipc::channel().unwrap(); - - let channel_to_self = chan.clone(); - thread::Builder::new().name("FontCacheThread".to_owned()).spawn(move || { - // TODO: Allow users to specify these. - let generic_fonts = populate_generic_fonts(); - - let mut cache = FontCache { - port: port, - channel_to_self, - generic_fonts, - local_families: HashMap::new(), - web_families: HashMap::new(), - font_context: FontContextHandle::new(), - core_resource_thread, - webrender_api, - webrender_fonts: HashMap::new(), - font_instances: HashMap::new(), - }; - - cache.refresh_local_families(); - cache.run(); - }).expect("Thread spawning failed"); - - FontCacheThread { - chan: chan, - } - } - - pub fn find_font_template(&self, family: FontFamily, desc: FontTemplateDescriptor) - -> Option { - let (response_chan, response_port) = - ipc::channel().expect("failed to create IPC channel"); - self.chan.send(Command::GetFontTemplate(family, desc, response_chan)) - .expect("failed to send message to font cache thread"); - - let reply = response_port.recv() - .expect("failed to receive response to font request"); - - match reply { - Reply::GetFontTemplateReply(data) => { - data - } - } - } - - pub fn last_resort_font_template(&self, desc: FontTemplateDescriptor) - -> FontTemplateInfo { - let (response_chan, response_port) = - ipc::channel().expect("failed to create IPC channel"); - self.chan.send(Command::GetLastResortFontTemplate(desc, response_chan)) - .expect("failed to send message to font cache thread"); - - let reply = response_port.recv() - .expect("failed to receive response to font request"); - - match reply { - Reply::GetFontTemplateReply(data) => { - data.unwrap() - } - } - } - - pub fn add_web_font(&self, family: FamilyName, sources: EffectiveSources, sender: IpcSender<()>) { - self.chan.send(Command::AddWebFont(LowercaseString::new(&family.name), sources, sender)).unwrap(); - } - - pub fn get_font_instance(&self, key: webrender_api::FontKey, size: Au) -> webrender_api::FontInstanceKey { - let (response_chan, response_port) = - ipc::channel().expect("failed to create IPC channel"); - self.chan.send(Command::GetFontInstance(key, size, response_chan)) - .expect("failed to send message to font cache thread"); - - let instance_key = response_port.recv() - .expect("failed to receive response to font request"); - - instance_key - } - - pub fn exit(&self) { - let (response_chan, response_port) = ipc::channel().unwrap(); - self.chan.send(Command::Exit(response_chan)).expect("Couldn't send FontCacheThread exit message"); - response_port.recv().expect("Couldn't receive FontCacheThread reply"); - } -} - - -#[derive(Clone, Debug, Deserialize, Eq, Hash, PartialEq, Serialize)] -pub struct LowercaseString { - inner: String, -} - -impl LowercaseString { - pub fn new(s: &str) -> LowercaseString { - LowercaseString { - inner: s.to_lowercase(), - } - } -} - -impl Deref for LowercaseString { - type Target = str; - - #[inline] - fn deref(&self) -> &str { - &*self.inner - } -} - -impl fmt::Display for LowercaseString { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - self.inner.fmt(f) - } -} diff --git a/collector/compile-benchmarks/style-servo/components/gfx/font_context.rs b/collector/compile-benchmarks/style-servo/components/gfx/font_context.rs deleted file mode 100644 index b5d2298d1..000000000 --- a/collector/compile-benchmarks/style-servo/components/gfx/font_context.rs +++ /dev/null @@ -1,265 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ - -use app_units::Au; -use fnv::FnvHasher; -use font::{Font, FontGroup, FontHandleMethods}; -use font_cache_thread::FontCacheThread; -use font_template::FontTemplateDescriptor; -use heapsize::HeapSizeOf; -use platform::font::FontHandle; -use platform::font_context::FontContextHandle; -use platform::font_template::FontTemplateData; -use servo_arc::Arc as ServoArc; -use smallvec::SmallVec; -use std::cell::RefCell; -use std::collections::HashMap; -use std::default::Default; -use std::hash::{BuildHasherDefault, Hash, Hasher}; -use std::rc::Rc; -use std::sync::Arc; -use std::sync::atomic::{AtomicUsize, Ordering, ATOMIC_USIZE_INIT}; -use style::computed_values::{font_style, font_variant_caps}; -use style::properties::style_structs; -use webrender_api; - -static SMALL_CAPS_SCALE_FACTOR: f32 = 0.8; // Matches FireFox (see gfxFont.h) - -#[derive(Debug)] -struct LayoutFontCacheEntry { - family: String, - font: Option>>, -} - -#[derive(Debug)] -struct FallbackFontCacheEntry { - font: Rc>, -} - -/// An epoch for the font context cache. The cache is flushed if the current epoch does not match -/// this one. -static FONT_CACHE_EPOCH: AtomicUsize = ATOMIC_USIZE_INIT; - -/// The FontContext represents the per-thread/thread state necessary for -/// working with fonts. It is the public API used by the layout and -/// paint code. It talks directly to the font cache thread where -/// required. -#[derive(Debug)] -pub struct FontContext { - platform_handle: FontContextHandle, - font_cache_thread: FontCacheThread, - - /// TODO: See bug https://github.com/servo/servo/issues/3300. - layout_font_cache: Vec, - fallback_font_cache: Vec, - - layout_font_group_cache: - HashMap, BuildHasherDefault>, - - epoch: usize, -} - -impl FontContext { - pub fn new(font_cache_thread: FontCacheThread) -> FontContext { - let handle = FontContextHandle::new(); - FontContext { - platform_handle: handle, - font_cache_thread: font_cache_thread, - layout_font_cache: vec!(), - fallback_font_cache: vec!(), - layout_font_group_cache: HashMap::with_hasher(Default::default()), - epoch: 0, - } - } - - /// Create a font for use in layout calculations. - fn create_layout_font(&self, - template: Arc, - descriptor: FontTemplateDescriptor, - pt_size: Au, - variant: font_variant_caps::T, - font_key: webrender_api::FontKey) -> Result { - // TODO: (Bug #3463): Currently we only support fake small-caps - // painting. We should also support true small-caps (where the - // font supports it) in the future. - let actual_pt_size = match variant { - font_variant_caps::T::small_caps => pt_size.scale_by(SMALL_CAPS_SCALE_FACTOR), - font_variant_caps::T::normal => pt_size, - }; - - let handle = FontHandle::new_from_template(&self.platform_handle, - template, - Some(actual_pt_size))?; - - let font_instance_key = self.font_cache_thread - .get_font_instance(font_key, actual_pt_size); - Ok(Font::new(handle, variant, descriptor, pt_size, actual_pt_size, font_instance_key)) - } - - fn expire_font_caches_if_necessary(&mut self) { - let current_epoch = FONT_CACHE_EPOCH.load(Ordering::SeqCst); - if current_epoch == self.epoch { - return - } - - self.layout_font_cache.clear(); - self.fallback_font_cache.clear(); - self.layout_font_group_cache.clear(); - self.epoch = current_epoch - } - - /// Create a group of fonts for use in layout calculations. May return - /// a cached font if this font instance has already been used by - /// this context. - pub fn layout_font_group_for_style(&mut self, style: ServoArc) - -> Rc { - self.expire_font_caches_if_necessary(); - - let layout_font_group_cache_key = LayoutFontGroupCacheKey { - pointer: style.clone(), - size: style.font_size.size(), - }; - if let Some(ref cached_font_group) = self.layout_font_group_cache.get( - &layout_font_group_cache_key) { - return (*cached_font_group).clone() - } - - // TODO: The font context holds a strong ref to the cached fonts - // so they will never be released. Find out a good time to drop them. - - let desc = FontTemplateDescriptor::new(style.font_weight, - style.font_stretch, - style.font_style == font_style::T::italic || - style.font_style == font_style::T::oblique); - - let mut fonts: SmallVec<[Rc>; 8]> = SmallVec::new(); - - for family in style.font_family.0.iter() { - // GWTODO: Check on real pages if this is faster as Vec() or HashMap(). - let mut cache_hit = false; - for cached_font_entry in &self.layout_font_cache { - if cached_font_entry.family == family.name() { - match cached_font_entry.font { - None => { - cache_hit = true; - break; - } - Some(ref cached_font_ref) => { - let cached_font = (*cached_font_ref).borrow(); - if cached_font.descriptor == desc && - cached_font.requested_pt_size == style.font_size.size() && - cached_font.variant == style.font_variant_caps { - fonts.push((*cached_font_ref).clone()); - cache_hit = true; - break; - } - } - } - } - } - - if !cache_hit { - let template_info = self.font_cache_thread.find_font_template(family.clone(), - desc.clone()); - match template_info { - Some(template_info) => { - let layout_font = self.create_layout_font(template_info.font_template, - desc.clone(), - style.font_size.size(), - style.font_variant_caps, - template_info.font_key); - let font = match layout_font { - Ok(layout_font) => { - let layout_font = Rc::new(RefCell::new(layout_font)); - fonts.push(layout_font.clone()); - - Some(layout_font) - } - Err(_) => None - }; - - self.layout_font_cache.push(LayoutFontCacheEntry { - family: family.name().to_owned(), - font: font - }); - } - None => { - self.layout_font_cache.push(LayoutFontCacheEntry { - family: family.name().to_owned(), - font: None, - }); - } - } - } - } - - // Add a last resort font as a fallback option. - let mut cache_hit = false; - for cached_font_entry in &self.fallback_font_cache { - let cached_font = cached_font_entry.font.borrow(); - if cached_font.descriptor == desc && - cached_font.requested_pt_size == style.font_size.size() && - cached_font.variant == style.font_variant_caps { - fonts.push(cached_font_entry.font.clone()); - cache_hit = true; - break; - } - } - - if !cache_hit { - let template_info = self.font_cache_thread.last_resort_font_template(desc.clone()); - let layout_font = self.create_layout_font(template_info.font_template, - desc.clone(), - style.font_size.size(), - style.font_variant_caps, - template_info.font_key); - match layout_font { - Ok(layout_font) => { - let layout_font = Rc::new(RefCell::new(layout_font)); - self.fallback_font_cache.push(FallbackFontCacheEntry { - font: layout_font.clone(), - }); - fonts.push(layout_font); - } - Err(_) => debug!("Failed to create fallback layout font!") - } - } - - let font_group = Rc::new(FontGroup::new(fonts)); - self.layout_font_group_cache.insert(layout_font_group_cache_key, font_group.clone()); - font_group - } -} - -impl HeapSizeOf for FontContext { - fn heap_size_of_children(&self) -> usize { - // FIXME(njn): Measure other fields eventually. - self.platform_handle.heap_size_of_children() - } -} - -#[derive(Debug)] -struct LayoutFontGroupCacheKey { - pointer: ServoArc, - size: Au, -} - -impl PartialEq for LayoutFontGroupCacheKey { - fn eq(&self, other: &LayoutFontGroupCacheKey) -> bool { - self.pointer == other.pointer && self.size == other.size - } -} - -impl Eq for LayoutFontGroupCacheKey {} - -impl Hash for LayoutFontGroupCacheKey { - fn hash(&self, hasher: &mut H) where H: Hasher { - self.pointer.hash.hash(hasher) - } -} - -#[inline] -pub fn invalidate_font_caches() { - FONT_CACHE_EPOCH.fetch_add(1, Ordering::SeqCst); -} diff --git a/collector/compile-benchmarks/style-servo/components/gfx/font_template.rs b/collector/compile-benchmarks/style-servo/components/gfx/font_template.rs deleted file mode 100644 index 2c88384a4..000000000 --- a/collector/compile-benchmarks/style-servo/components/gfx/font_template.rs +++ /dev/null @@ -1,209 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ - -use font::FontHandleMethods; -use platform::font::FontHandle; -use platform::font_context::FontContextHandle; -use platform::font_template::FontTemplateData; -use servo_atoms::Atom; -use std::fmt::{Debug, Error, Formatter}; -use std::io::Error as IoError; -use std::sync::{Arc, Weak}; -use std::u32; -use style::computed_values::{font_stretch, font_weight}; - -/// Describes how to select a font from a given family. This is very basic at the moment and needs -/// to be expanded or refactored when we support more of the font styling parameters. -/// -/// NB: If you change this, you will need to update `style::properties::compute_font_hash()`. -#[derive(Clone, Copy, Debug, Deserialize, Eq, Hash, Serialize)] -pub struct FontTemplateDescriptor { - pub weight: font_weight::T, - pub stretch: font_stretch::T, - pub italic: bool, -} - -impl FontTemplateDescriptor { - #[inline] - pub fn new(weight: font_weight::T, stretch: font_stretch::T, italic: bool) - -> FontTemplateDescriptor { - FontTemplateDescriptor { - weight: weight, - stretch: stretch, - italic: italic, - } - } - - /// Returns a score indicating how far apart visually the two font descriptors are. This is - /// used for fuzzy font selection. - /// - /// The smaller the score, the better the fonts match. 0 indicates an exact match. This must - /// be commutative (distance(A, B) == distance(B, A)). - #[inline] - fn distance_from(&self, other: &FontTemplateDescriptor) -> u32 { - if self.stretch != other.stretch || self.italic != other.italic { - // A value higher than all weights. - return 1000 - } - ((self.weight.0 as i16) - (other.weight.0 as i16)).abs() as u32 - } -} - -impl PartialEq for FontTemplateDescriptor { - fn eq(&self, other: &FontTemplateDescriptor) -> bool { - self.weight == other.weight && self.stretch == other.stretch && self.italic == other.italic - } -} - -/// This describes all the information needed to create -/// font instance handles. It contains a unique -/// FontTemplateData structure that is platform specific. -pub struct FontTemplate { - identifier: Atom, - descriptor: Option, - weak_ref: Option>, - // GWTODO: Add code path to unset the strong_ref for web fonts! - strong_ref: Option>, - is_valid: bool, -} - -impl Debug for FontTemplate { - fn fmt(&self, f: &mut Formatter) -> Result<(), Error> { - self.identifier.fmt(f) - } -} - -/// Holds all of the template information for a font that -/// is common, regardless of the number of instances of -/// this font handle per thread. -impl FontTemplate { - pub fn new(identifier: Atom, maybe_bytes: Option>) -> Result { - let maybe_data = match maybe_bytes { - Some(_) => Some(FontTemplateData::new(identifier.clone(), maybe_bytes)?), - None => None, - }; - - let maybe_strong_ref = match maybe_data { - Some(data) => Some(Arc::new(data)), - None => None, - }; - - let maybe_weak_ref = match maybe_strong_ref { - Some(ref strong_ref) => Some(Arc::downgrade(strong_ref)), - None => None, - }; - - Ok(FontTemplate { - identifier: identifier, - descriptor: None, - weak_ref: maybe_weak_ref, - strong_ref: maybe_strong_ref, - is_valid: true, - }) - } - - pub fn identifier(&self) -> &Atom { - &self.identifier - } - - /// Get the data for creating a font if it matches a given descriptor. - pub fn data_for_descriptor(&mut self, - fctx: &FontContextHandle, - requested_desc: &FontTemplateDescriptor) - -> Option> { - // The font template data can be unloaded when nothing is referencing - // it (via the Weak reference to the Arc above). However, if we have - // already loaded a font, store the style information about it separately, - // so that we can do font matching against it again in the future - // without having to reload the font (unless it is an actual match). - match self.descriptor { - Some(actual_desc) if *requested_desc == actual_desc => self.data().ok(), - Some(_) => None, - None => { - if self.instantiate(fctx).is_err() { - return None - } - - if self.descriptor - .as_ref() - .expect("Instantiation succeeded but no descriptor?") == requested_desc { - self.data().ok() - } else { - None - } - } - } - } - - /// Returns the font data along with the distance between this font's descriptor and the given - /// descriptor, if the font can be loaded. - pub fn data_for_approximate_descriptor(&mut self, - font_context: &FontContextHandle, - requested_descriptor: &FontTemplateDescriptor) - -> Option<(Arc, u32)> { - match self.descriptor { - Some(actual_descriptor) => { - self.data().ok().map(|data| { - (data, actual_descriptor.distance_from(requested_descriptor)) - }) - } - None => { - if self.instantiate(font_context).is_ok() { - let distance = self.descriptor - .as_ref() - .expect("Instantiation successful but no descriptor?") - .distance_from(requested_descriptor); - self.data().ok().map(|data| (data, distance)) - } else { - None - } - } - } - } - - fn instantiate(&mut self, font_context: &FontContextHandle) -> Result<(), ()> { - if !self.is_valid { - return Err(()) - } - - let data = self.data().map_err(|_| ())?; - let handle: Result = FontHandleMethods::new_from_template(font_context, - data, - None); - self.is_valid = handle.is_ok(); - let handle = handle?; - self.descriptor = Some(FontTemplateDescriptor::new(handle.boldness(), - handle.stretchiness(), - handle.is_italic())); - Ok(()) - } - - /// Get the data for creating a font. - pub fn get(&mut self) -> Option> { - if self.is_valid { - self.data().ok() - } else { - None - } - } - - /// Get the font template data. If any strong references still - /// exist, it will return a clone, otherwise it will load the - /// font data and store a weak reference to it internally. - pub fn data(&mut self) -> Result, IoError> { - let maybe_data = match self.weak_ref { - Some(ref data) => data.upgrade(), - None => None, - }; - - if let Some(data) = maybe_data { - return Ok(data) - } - - assert!(self.strong_ref.is_none()); - let template_data = Arc::new(FontTemplateData::new(self.identifier.clone(), None)?); - self.weak_ref = Some(Arc::downgrade(&template_data)); - Ok(template_data) - } -} diff --git a/collector/compile-benchmarks/style-servo/components/gfx/lib.rs b/collector/compile-benchmarks/style-servo/components/gfx/lib.rs deleted file mode 100644 index 97bda55b5..000000000 --- a/collector/compile-benchmarks/style-servo/components/gfx/lib.rs +++ /dev/null @@ -1,85 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ - -// For SIMD -#![cfg_attr(any(target_os = "linux", target_os = "android"), feature(allocator_api))] -#![feature(box_syntax)] -#![feature(cfg_target_feature)] -#![feature(range_contains)] - -#![deny(unsafe_code)] - -extern crate app_units; -#[macro_use] -extern crate bitflags; - -// Mac OS-specific library dependencies -#[cfg(target_os = "macos")] extern crate byteorder; -#[cfg(target_os = "macos")] extern crate core_foundation; -#[cfg(target_os = "macos")] extern crate core_graphics; -#[cfg(target_os = "macos")] extern crate core_text; - -// Windows-specific library dependencies -#[cfg(target_os = "windows")] extern crate dwrote; -#[cfg(target_os = "windows")] extern crate truetype; - -extern crate euclid; -extern crate fnv; - -#[cfg(target_os = "linux")] -extern crate fontconfig; -extern crate fontsan; -#[cfg(any(target_os = "linux", target_os = "android"))] -extern crate freetype; -extern crate gfx_traits; - -// Eventually we would like the shaper to be pluggable, as many operating systems have their own -// shapers. For now, however, this is a hard dependency. -extern crate harfbuzz_sys as harfbuzz; - -extern crate heapsize; -#[macro_use] extern crate heapsize_derive; -extern crate ipc_channel; -#[macro_use] -extern crate lazy_static; -extern crate libc; -#[macro_use] -extern crate log; -extern crate msg; -extern crate net_traits; -extern crate ordered_float; -extern crate range; -#[macro_use] extern crate serde; -extern crate servo_arc; -extern crate servo_geometry; -extern crate servo_url; -#[macro_use] extern crate servo_atoms; -#[cfg(any(target_feature = "sse2", target_feature = "neon"))] -extern crate simd; -extern crate smallvec; -extern crate style; -extern crate style_traits; -extern crate time; -extern crate unicode_bidi; -extern crate unicode_script; -extern crate webrender_api; -extern crate xi_unicode; -#[cfg(target_os = "android")] -extern crate xml5ever; - -#[deny(unsafe_code)] -pub mod display_list; - -// Fonts -#[macro_use] pub mod font; -pub mod font_cache_thread; -pub mod font_context; -pub mod font_template; - -// Platform-specific implementations. -#[allow(unsafe_code)] -mod platform; - -// Text -pub mod text; diff --git a/collector/compile-benchmarks/style-servo/components/gfx/platform/freetype/android/font_list.rs b/collector/compile-benchmarks/style-servo/components/gfx/platform/freetype/android/font_list.rs deleted file mode 100644 index 2ff8be5a3..000000000 --- a/collector/compile-benchmarks/style-servo/components/gfx/platform/freetype/android/font_list.rs +++ /dev/null @@ -1,481 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ - -use std::cell::RefCell; -use std::fs::File; -use std::io::{self, Read}; -use std::path::Path; -use xml5ever::Attribute; -use xml5ever::driver::parse_document; -use xml5ever::rcdom::*; -use xml5ever::rcdom::{Node, RcDom}; -use xml5ever::tendril::TendrilSink; - -lazy_static! { - static ref FONT_LIST: FontList = FontList::new(); -} - -// Android doesn't provide an API to query system fonts until Android O: -// https://developer.android.com/reference/android/text/FontConfig.html -// System font configuration files must be parsed until Android O version is set as the minimum target. -// Android uses XML files to handle font mapping configurations. -// On Android API 21+ font mappings are loaded from /etc/fonts.xml. -// Each entry consists of a family with various font names, or a font alias. -// Example: -// -// -// -// Roboto-Thin.ttf -// Roboto-ThinItalic.ttf -// Roboto-Light.ttf -// Roboto-LightItalic.ttf -// Roboto-Regular.ttf -// Roboto-Italic.ttf -// Roboto-Medium.ttf -// Roboto-MediumItalic.ttf -// Roboto-Black.ttf -// Roboto-BlackItalic.ttf -// Roboto-Bold.ttf -// Roboto-BoldItalic.ttf -// // - -// -// -// -// -// -// -// -// -// - -// -// RobotoCondensed-Light.ttf -// RobotoCondensed-LightItalic.ttf -// RobotoCondensed-Regular.ttf -// RobotoCondensed-Italic.ttf -// RobotoCondensed-Bold.ttf -// RobotoCondensed-BoldItalic.ttf -// -// -// -// On Android API 17-20 font mappings are loaded from /system/etc/system_fonts.xml -// Each entry consists of a family with a nameset and a fileset. -// Example: -// -// -// -// sans-serif -// arial -// helvetica -// tahoma -// verdana -// -// -// Roboto-Regular.ttf -// Roboto-Bold.ttf -// Roboto-Italic.ttf -// Roboto-BoldItalic.ttf -// -// // - -// -// -// sans-serif-light -// -// -// Roboto-Light.ttf -// Roboto-LightItalic.ttf -// -// // - -// -// -// sans-serif-thin -// -// -// Roboto-Thin.ttf -// Roboto-ThinItalic.ttf -// -// -// - -struct Font { - filename: String, - weight: Option, -} - -struct FontFamily { - name: String, - fonts: Vec, -} - -struct FontAlias { - from: String, - to: String, - weight: Option -} - -struct FontList { - families: Vec, - aliases: Vec -} - -impl FontList { - fn new() -> FontList { - // Possible paths containing the font mapping xml file. - let paths = [ - "/etc/fonts.xml", - "/system/etc/system_fonts.xml" - ]; - - // Try to load and parse paths until one of them success. - let mut result = None; - paths.iter().all(|path| { - result = Self::from_path(path); - !result.is_some() - }); - - match result { - Some(result) => result, - // If no xml mapping file is found fallback to some default - // fonts expected to be on all Android devices. - None => FontList { - families: Self::fallback_font_families(), - aliases: Vec::new(), - } - } - } - - // Creates a new FontList from a path to the font mapping xml file. - fn from_path(path: &str) -> Option { - let xml = match Self::load_file(path) { - Ok(xml) => xml, - _=> { return None; }, - }; - - let dom: RcDom = parse_document(RcDom::default(), Default::default()) - .one(xml); - let doc = &dom.document; - - // find familyset root node - let children = doc.children.borrow(); - let familyset = children.iter().find(|child| { - match child.data { - NodeData::Element { ref name, .. } => &*name.local == "familyset", - _ => false, - } - }); - - let familyset = match familyset { - Some(node) => node, - _ => { return None; } - }; - - // Parse familyset node - let mut families = Vec::new(); - let mut aliases = Vec::new(); - - for node in familyset.children.borrow().iter() { - match node.data { - NodeData::Element { ref name, ref attrs, .. } => { - if &*name.local == "family" { - Self::parse_family(&node, attrs, &mut families); - } else if &*name.local == "alias" { - // aliases come after the fonts they reference. --> - if !families.is_empty() { - Self::parse_alias(attrs, &mut aliases); - } - } - }, - _=> {} - } - } - - Some(FontList { - families: families, - aliases: aliases - }) - } - - // Fonts expected to exist in Android devices. - // Only used in the unlikely case where no font xml mapping files are found. - fn fallback_font_families() -> Vec { - let alternatives = [ - ("san-serif", "Roboto-Regular.ttf"), - ("Droid Sans", "DroidSans.ttf"), - ]; - - alternatives.iter().filter(|item| { - Path::new(&Self::font_absolute_path(item.1)).exists() - }).map(|item| { - FontFamily { - name: item.0.into(), - fonts: vec![Font { - filename: item.1.into(), - weight: None, - }] - } - }). collect() - } - - // All Android fonts are located in /system/fonts - fn font_absolute_path(filename: &str) -> String { - format!("/system/fonts/{}", filename) - } - - fn find_family(&self, name: &str) -> Option<&FontFamily>{ - self.families.iter().find(|f| f.name == name) - } - - fn find_alias(&self, name: &str) -> Option<&FontAlias>{ - self.aliases.iter().find(|f| f.from == name) - } - - - fn load_file(path: &str) -> Result { - let mut file = File::open(path)?; - let mut content = String::new(); - file.read_to_string(&mut content)?; - - Ok(content) - } - - // Parse family and font file names - // Example: - // - // Roboto-Thin.ttf - // Roboto-ThinItalic.ttf - // Roboto-Light.ttf - // Roboto-LightItalic.ttf - // Roboto-Regular.ttf - // - fn parse_family(familyset: &Node, attrs: &RefCell>, out:&mut Vec) { - // Fallback to old Android API v17 xml format if required - let using_api_17 = familyset.children.borrow().iter().any(|node| { - match node.data { - NodeData::Element { ref name, .. } => &*name.local == "nameset", - _=> false, - } - }); - if using_api_17 { - Self::parse_family_v17(familyset, out); - return; - } - - // Parse family name - let name = match Self::find_attrib("name", attrs) { - Some(name) => name, - _ => { return; }, - }; - - let mut fonts = Vec::new(); - // Parse font variants - for node in familyset.children.borrow().iter() { - match node.data { - NodeData::Element { ref name, ref attrs, .. } => { - if &*name.local == "font" { - FontList::parse_font(&node, attrs, &mut fonts); - } - }, - _=> {} - } - } - - out.push(FontFamily { - name: name, - fonts: fonts - }); - } - - // Parse family and font file names for Androi API < 21 - // Example: - // - // - // sans-serif - // arial - // helvetica - // tahoma - // verdana - // - // - // Roboto-Regular.ttf - // Roboto-Bold.ttf - // Roboto-Italic.ttf - // Roboto-BoldItalic.ttf - // - // - fn parse_family_v17(familyset: &Node, out:&mut Vec) { - let mut nameset = Vec::new(); - let mut fileset = Vec::new(); - for node in familyset.children.borrow().iter() { - match node.data { - NodeData::Element { ref name, .. } => { - if &*name.local == "nameset" { - Self::collect_contents_with_tag(node, "name", &mut nameset); - } else if &*name.local == "fileset" { - Self::collect_contents_with_tag(node, "file", &mut fileset); - } - }, - _=> {} - } - } - - // Create a families for each variation - for name in nameset { - let fonts: Vec = fileset.iter().map(|f| Font { - filename: f.clone(), - weight: None, - }).collect(); - - if !fonts.is_empty() { - out.push(FontFamily { - name: name, - fonts: fonts - }) - } - } - } - - // Example: - // Roboto-Thin.ttf - fn parse_font(node: &Node, attrs: &RefCell>, out:&mut Vec) { - // Parse font filename - let filename = match Self::text_content(node) { - Some(filename) => filename, - _ => { return; } - }; - - // Parse font weight - let weight = Self::find_attrib("weight", attrs).and_then(|w| w.parse().ok()); - - out.push(Font { - filename: filename, - weight: weight, - }) - } - - // Example: - // - // - // - // - // - // - // - // - fn parse_alias(attrs: &RefCell>, out:&mut Vec) { - // Parse alias name and referenced font - let from = match Self::find_attrib("name", attrs) { - Some(from) => from, - _ => { return; }, - }; - - // Parse referenced font - let to = match Self::find_attrib("to", attrs) { - Some(to) => to, - _ => { return; }, - }; - - // Parse optional weight filter - let weight = Self::find_attrib("weight", attrs).and_then(|w| w.parse().ok()); - - out.push(FontAlias { - from: from, - to: to, - weight: weight, - }) - } - - fn find_attrib(name: &str, attrs: &RefCell>) -> Option { - attrs.borrow().iter().find(|attr| &*attr.name.local == name).map(|s| String::from(&s.value)) - } - - fn text_content(node: &Node) -> Option { - node.children.borrow().get(0).and_then(|child| { - match child.data { - NodeData::Text { ref contents } => { - let mut result = String::new(); - result.push_str(&contents.borrow()); - Some(result) - }, - _ => None - } - }) - } - - fn collect_contents_with_tag(node: &Node, tag: &str, out:&mut Vec) { - for child in node.children.borrow().iter() { - match child.data { - NodeData::Element { ref name, .. } => { - if &*name.local == tag { - if let Some(content) = Self::text_content(child) { - out.push(content); - } - } - }, - _=> {} - } - } - } -} - -// Functions used by FontCacheThread -pub fn for_each_available_family(mut callback: F) where F: FnMut(String) { - for family in &FONT_LIST.families { - callback(family.name.clone()); - } - for alias in &FONT_LIST.aliases { - callback(alias.from.clone()); - } -} - -pub fn for_each_variation(family_name: &str, mut callback: F) - where F: FnMut(String) -{ - println!("Variatioooon {:?}", family_name); - if let Some(family) = FONT_LIST.find_family(family_name) { - for font in &family.fonts { - callback(FontList::font_absolute_path(&font.filename)); - } - return; - } - - if let Some(alias) = FONT_LIST.find_alias(family_name) { - if let Some(family) = FONT_LIST.find_family(&alias.to) { - for font in &family.fonts { - match (alias.weight, font.weight) { - (None, _) => callback(FontList::font_absolute_path(&font.filename)), - (Some(w1), Some(w2)) => { - if w1 == w2 { - callback(FontList::font_absolute_path(&font.filename)) - } - }, - _ => {} - } - } - } - } -} - -pub fn system_default_family(generic_name: &str) -> Option { - if let Some(family) = FONT_LIST.find_family(&generic_name) { - Some(family.name.clone()) - } else if let Some(alias) = FONT_LIST.find_alias(&generic_name) { - Some(alias.from.clone()) - } else { - // First font defined in the fonts.xml is the default on Android. - FONT_LIST.families.get(0).map(|family| family.name.clone()) - } -} - -pub fn last_resort_font_families() -> Vec { - vec!( - "sans-serif".to_owned(), - "Droid Sans".to_owned(), - "serif".to_owned(), - ) -} - -pub static SANS_SERIF_FONT_FAMILY: &'static str = "sans-serif"; diff --git a/collector/compile-benchmarks/style-servo/components/gfx/platform/freetype/font.rs b/collector/compile-benchmarks/style-servo/components/gfx/platform/freetype/font.rs deleted file mode 100644 index f77f77be4..000000000 --- a/collector/compile-benchmarks/style-servo/components/gfx/platform/freetype/font.rs +++ /dev/null @@ -1,328 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ - -use app_units::Au; -use font::{FontHandleMethods, FontMetrics, FontTableMethods}; -use font::{FontTableTag, FractionalPixel, GPOS, GSUB, KERN}; -use freetype::freetype::{FT_Done_Face, FT_New_Memory_Face}; -use freetype::freetype::{FT_F26Dot6, FT_Face, FT_FaceRec}; -use freetype::freetype::{FT_Get_Char_Index, FT_Get_Postscript_Name}; -use freetype::freetype::{FT_Get_Kerning, FT_Get_Sfnt_Table, FT_Load_Sfnt_Table}; -use freetype::freetype::{FT_GlyphSlot, FT_Library, FT_Long, FT_ULong}; -use freetype::freetype::{FT_Int32, FT_Kerning_Mode, FT_STYLE_FLAG_BOLD, FT_STYLE_FLAG_ITALIC}; -use freetype::freetype::{FT_Load_Glyph, FT_Set_Char_Size}; -use freetype::freetype::{FT_SizeRec, FT_Size_Metrics, FT_UInt, FT_Vector}; -use freetype::freetype::FT_Sfnt_Tag; -use freetype::tt_os2::TT_OS2; -use platform::font_context::FontContextHandle; -use platform::font_template::FontTemplateData; -use std::{mem, ptr}; -use std::os::raw::{c_char, c_long}; -use std::sync::Arc; -use style::computed_values::{font_stretch, font_weight}; -use super::c_str_to_string; -use text::glyph::GlyphId; -use text::util::fixed_to_float; - -// This constant is not present in the freetype -// bindings due to bindgen not handling the way -// the macro is defined. -const FT_LOAD_TARGET_LIGHT: FT_Int32 = 1 << 16; - -// Default to slight hinting, which is what most -// Linux distros use by default, and is a better -// default than no hinting. -// TODO(gw): Make this configurable. -const GLYPH_LOAD_FLAGS: FT_Int32 = FT_LOAD_TARGET_LIGHT; - -fn fixed_to_float_ft(f: i32) -> f64 { - fixed_to_float(6, f) -} - -#[derive(Debug)] -pub struct FontTable { - buffer: Vec, -} - -impl FontTableMethods for FontTable { - fn buffer(&self) -> &[u8] { - &self.buffer - } -} - -#[derive(Debug)] -pub struct FontHandle { - // The font binary. This must stay valid for the lifetime of the font, - // if the font is created using FT_Memory_Face. - font_data: Arc, - face: FT_Face, - handle: FontContextHandle, - can_do_fast_shaping: bool, -} - -impl Drop for FontHandle { - fn drop(&mut self) { - assert!(!self.face.is_null()); - unsafe { - if !FT_Done_Face(self.face).succeeded() { - panic!("FT_Done_Face failed"); - } - } - } -} - -impl FontHandleMethods for FontHandle { - fn new_from_template(fctx: &FontContextHandle, - template: Arc, - pt_size: Option) - -> Result { - let ft_ctx: FT_Library = fctx.ctx.ctx; - if ft_ctx.is_null() { return Err(()); } - - return create_face_from_buffer(ft_ctx, &template.bytes, pt_size).map(|face| { - let mut handle = FontHandle { - face: face, - font_data: template.clone(), - handle: fctx.clone(), - can_do_fast_shaping: false, - }; - // TODO (#11310): Implement basic support for GPOS and GSUB. - handle.can_do_fast_shaping = handle.has_table(KERN) && - !handle.has_table(GPOS) && - !handle.has_table(GSUB); - handle - }); - - fn create_face_from_buffer(lib: FT_Library, buffer: &[u8], pt_size: Option) - -> Result { - unsafe { - let mut face: FT_Face = ptr::null_mut(); - let face_index = 0 as FT_Long; - let result = FT_New_Memory_Face(lib, buffer.as_ptr(), buffer.len() as FT_Long, - face_index, &mut face); - - if !result.succeeded() || face.is_null() { - return Err(()); - } - if let Some(s) = pt_size { - FontHandle::set_char_size(face, s).or(Err(()))? - } - Ok(face) - } - } - } - fn template(&self) -> Arc { - self.font_data.clone() - } - fn family_name(&self) -> String { - unsafe { - c_str_to_string((*self.face).family_name as *const c_char) - } - } - fn face_name(&self) -> Option { - unsafe { - let name = FT_Get_Postscript_Name(self.face) as *const c_char; - - if !name.is_null() { - Some(c_str_to_string(name)) - } else { - None - } - } - } - fn is_italic(&self) -> bool { - unsafe { (*self.face).style_flags & FT_STYLE_FLAG_ITALIC as c_long != 0 } - } - fn boldness(&self) -> font_weight::T { - let default_weight = font_weight::T::normal(); - if unsafe { (*self.face).style_flags & FT_STYLE_FLAG_BOLD as c_long == 0 } { - default_weight - } else { - unsafe { - let os2 = FT_Get_Sfnt_Table(self.face, FT_Sfnt_Tag::FT_SFNT_OS2) as *mut TT_OS2; - let valid = !os2.is_null() && (*os2).version != 0xffff; - if valid { - let weight =(*os2).usWeightClass as i32; - if weight < 10 { - font_weight::T::from_int(weight * 100).unwrap() - } else if weight >= 100 && weight < 1000 { - font_weight::T::from_int(weight / 100 * 100).unwrap() - } else { - default_weight - } - } else { - default_weight - } - } - } - } - fn stretchiness(&self) -> font_stretch::T { - // TODO(pcwalton): Implement this. - font_stretch::T::normal - } - - fn glyph_index(&self, codepoint: char) -> Option { - assert!(!self.face.is_null()); - unsafe { - let idx = FT_Get_Char_Index(self.face, codepoint as FT_ULong); - if idx != 0 as FT_UInt { - Some(idx as GlyphId) - } else { - debug!("Invalid codepoint: {}", codepoint); - None - } - } - } - - fn glyph_h_kerning(&self, first_glyph: GlyphId, second_glyph: GlyphId) - -> FractionalPixel { - assert!(!self.face.is_null()); - let mut delta = FT_Vector { x: 0, y: 0 }; - unsafe { - FT_Get_Kerning(self.face, first_glyph, second_glyph, - FT_Kerning_Mode::FT_KERNING_DEFAULT as FT_UInt, - &mut delta); - } - fixed_to_float_ft(delta.x as i32) - } - - fn can_do_fast_shaping(&self) -> bool { - self.can_do_fast_shaping - } - - fn glyph_h_advance(&self, glyph: GlyphId) -> Option { - assert!(!self.face.is_null()); - unsafe { - let res = FT_Load_Glyph(self.face, - glyph as FT_UInt, - GLYPH_LOAD_FLAGS); - if res.succeeded() { - let void_glyph = (*self.face).glyph; - let slot: FT_GlyphSlot = mem::transmute(void_glyph); - assert!(!slot.is_null()); - let advance = (*slot).metrics.horiAdvance; - debug!("h_advance for {} is {}", glyph, advance); - let advance = advance as i32; - Some(fixed_to_float_ft(advance) as FractionalPixel) - } else { - debug!("Unable to load glyph {}. reason: {:?}", glyph, res); - None - } - } - } - - fn metrics(&self) -> FontMetrics { - /* TODO(Issue #76): complete me */ - let face = self.face_rec_mut(); - - let underline_size = self.font_units_to_au(face.underline_thickness as f64); - let underline_offset = self.font_units_to_au(face.underline_position as f64); - let em_size = self.font_units_to_au(face.units_per_EM as f64); - let ascent = self.font_units_to_au(face.ascender as f64); - let descent = self.font_units_to_au(face.descender as f64); - let max_advance = self.font_units_to_au(face.max_advance_width as f64); - - // 'leading' is supposed to be the vertical distance between two baselines, - // reflected by the height attribute in freetype. On OS X (w/ CTFont), - // leading represents the distance between the bottom of a line descent to - // the top of the next line's ascent or: (line_height - ascent - descent), - // see http://stackoverflow.com/a/5635981 for CTFont implementation. - // Convert using a formula similar to what CTFont returns for consistency. - let height = self.font_units_to_au(face.height as f64); - let leading = height - (ascent + descent); - - let mut strikeout_size = Au(0); - let mut strikeout_offset = Au(0); - let mut x_height = Au(0); - unsafe { - let os2 = FT_Get_Sfnt_Table(face, FT_Sfnt_Tag::FT_SFNT_OS2) as *mut TT_OS2; - let valid = !os2.is_null() && (*os2).version != 0xffff; - if valid { - strikeout_size = self.font_units_to_au((*os2).yStrikeoutSize as f64); - strikeout_offset = self.font_units_to_au((*os2).yStrikeoutPosition as f64); - x_height = self.font_units_to_au((*os2).sxHeight as f64); - } - } - - let average_advance = self.glyph_index('0') - .and_then(|idx| self.glyph_h_advance(idx)) - .map_or(max_advance, |advance| self.font_units_to_au(advance)); - - let metrics = FontMetrics { - underline_size: underline_size, - underline_offset: underline_offset, - strikeout_size: strikeout_size, - strikeout_offset: strikeout_offset, - leading: leading, - x_height: x_height, - em_size: em_size, - ascent: ascent, - descent: -descent, // linux font's seem to use the opposite sign from mac - max_advance: max_advance, - average_advance: average_advance, - line_gap: height, - }; - - debug!("Font metrics (@{}px): {:?}", em_size.to_f32_px(), metrics); - metrics - } - - fn table_for_tag(&self, tag: FontTableTag) -> Option { - let tag = tag as FT_ULong; - - unsafe { - // Get the length - let mut len = 0; - if !FT_Load_Sfnt_Table(self.face, tag, 0, ptr::null_mut(), &mut len).succeeded() { - return None - } - // Get the bytes - let mut buf = vec![0u8; len as usize]; - if !FT_Load_Sfnt_Table(self.face, tag, 0, buf.as_mut_ptr(), &mut len).succeeded() { - return None - } - Some(FontTable { buffer: buf }) - } - } -} - -impl<'a> FontHandle { - fn set_char_size(face: FT_Face, pt_size: Au) -> Result<(), ()>{ - let char_size = pt_size.to_f64_px() * 64.0 + 0.5; - - unsafe { - let result = FT_Set_Char_Size(face, char_size as FT_F26Dot6, 0, 0, 0); - if result.succeeded() { Ok(()) } else { Err(()) } - } - } - - fn has_table(&self, tag: FontTableTag) -> bool { - unsafe { - FT_Load_Sfnt_Table(self.face, tag as FT_ULong, 0, ptr::null_mut(), &mut 0).succeeded() - } - } - - fn face_rec_mut(&'a self) -> &'a mut FT_FaceRec { - unsafe { - &mut (*self.face) - } - } - - fn font_units_to_au(&self, value: f64) -> Au { - let face = self.face_rec_mut(); - - // face.size is a *c_void in the bindings, presumably to avoid - // recursive structural types - let size: &FT_SizeRec = unsafe { mem::transmute(&(*face.size)) }; - let metrics: &FT_Size_Metrics = &(*size).metrics; - - let em_size = face.units_per_EM as f64; - let x_scale = (metrics.x_ppem as f64) / em_size as f64; - - // If this isn't true then we're scaling one of the axes wrong - assert!(metrics.x_ppem == metrics.y_ppem); - - Au::from_f64_px(value * x_scale) - } -} diff --git a/collector/compile-benchmarks/style-servo/components/gfx/platform/freetype/font_context.rs b/collector/compile-benchmarks/style-servo/components/gfx/platform/freetype/font_context.rs deleted file mode 100644 index 5e19e0816..000000000 --- a/collector/compile-benchmarks/style-servo/components/gfx/platform/freetype/font_context.rs +++ /dev/null @@ -1,143 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ - -use freetype::freetype::FT_Add_Default_Modules; -use freetype::freetype::FT_Done_Library; -use freetype::freetype::FT_Library; -use freetype::freetype::FT_Memory; -use freetype::freetype::FT_MemoryRec_; -use freetype::freetype::FT_New_Library; -use heapsize::{HeapSizeOf, heap_size_of}; -use std::heap::{Heap, Alloc, Layout}; -use std::os::raw::{c_long, c_void}; -use std::ptr; -use std::rc::Rc; - -// We pass a |User| struct -- via an opaque |void*| -- to FreeType each time a new instance is -// created. FreeType passes it back to the ft_alloc/ft_realloc/ft_free callbacks. We use it to -// record the memory usage of each FreeType instance. -pub struct User { - size: usize, -} - -// FreeType doesn't require any particular alignment for allocations. -const FT_ALIGNMENT: usize = 1; - -extern fn ft_alloc(mem: FT_Memory, req_size: c_long) -> *mut c_void { - unsafe { - let layout = Layout::from_size_align(req_size as usize, FT_ALIGNMENT).unwrap(); - let ptr = Heap.alloc(layout).unwrap() as *mut c_void; - let actual_size = heap_size_of(ptr as *const _); - - let user = (*mem).user as *mut User; - (*user).size += actual_size; - - ptr - } -} - -extern fn ft_free(mem: FT_Memory, ptr: *mut c_void) { - unsafe { - let actual_size = heap_size_of(ptr as *const _); - - let user = (*mem).user as *mut User; - (*user).size -= actual_size; - - let layout = Layout::from_size_align(actual_size, FT_ALIGNMENT).unwrap(); - Heap.dealloc(ptr as *mut u8, layout); - } -} - -extern fn ft_realloc(mem: FT_Memory, _cur_size: c_long, new_req_size: c_long, - old_ptr: *mut c_void) -> *mut c_void { - unsafe { - let old_actual_size = heap_size_of(old_ptr as *const _); - let old_layout = Layout::from_size_align(old_actual_size, FT_ALIGNMENT).unwrap(); - let new_layout = Layout::from_size_align(new_req_size as usize, FT_ALIGNMENT).unwrap(); - let result = Heap.realloc(old_ptr as *mut u8, old_layout, new_layout); - let new_ptr = result.unwrap() as *mut c_void; - let new_actual_size = heap_size_of(new_ptr as *const _); - - let user = (*mem).user as *mut User; - (*user).size += new_actual_size; - (*user).size -= old_actual_size; - - new_ptr - } -} - -// A |*mut User| field in a struct triggers a "use of `#[derive]` with a raw pointer" warning from -// rustc. But using a typedef avoids this, so... -pub type UserPtr = *mut User; - -// WARNING: We need to be careful how we use this struct. See the comment about Rc<> in -// FontContextHandle. -#[derive(Clone, Debug)] -pub struct FreeTypeLibraryHandle { - pub ctx: FT_Library, - mem: FT_Memory, - user: UserPtr, -} - -impl Drop for FreeTypeLibraryHandle { - fn drop(&mut self) { - assert!(!self.ctx.is_null()); - unsafe { - FT_Done_Library(self.ctx); - Box::from_raw(self.mem); - Box::from_raw(self.user); - } - } -} - -impl HeapSizeOf for FreeTypeLibraryHandle { - fn heap_size_of_children(&self) -> usize { - unsafe { - (*self.user).size + - heap_size_of(self.ctx as *const _) + - heap_size_of(self.mem as *const _) + - heap_size_of(self.user as *const _) - } - } -} - -#[derive(Clone, Debug)] -pub struct FontContextHandle { - // WARNING: FreeTypeLibraryHandle contains raw pointers, is clonable, and also implements - // `Drop`. This field needs to be Rc<> to make sure that the `drop` function is only called - // once, otherwise we'll get crashes. Yuk. - pub ctx: Rc, -} - -impl HeapSizeOf for FontContextHandle { - fn heap_size_of_children(&self) -> usize { - self.ctx.heap_size_of_children() - } -} - -impl FontContextHandle { - pub fn new() -> FontContextHandle { - let user = Box::into_raw(box User { - size: 0, - }); - let mem = Box::into_raw(box FT_MemoryRec_ { - user: user as *mut c_void, - alloc: Some(ft_alloc), - free: Some(ft_free), - realloc: Some(ft_realloc), - }); - unsafe { - let mut ctx: FT_Library = ptr::null_mut(); - - let result = FT_New_Library(mem, &mut ctx); - if !result.succeeded() { panic!("Unable to initialize FreeType library"); } - - FT_Add_Default_Modules(ctx); - - FontContextHandle { - ctx: Rc::new(FreeTypeLibraryHandle { ctx: ctx, mem: mem, user: user }), - } - } - } -} diff --git a/collector/compile-benchmarks/style-servo/components/gfx/platform/freetype/font_list.rs b/collector/compile-benchmarks/style-servo/components/gfx/platform/freetype/font_list.rs deleted file mode 100644 index 8f2898ae6..000000000 --- a/collector/compile-benchmarks/style-servo/components/gfx/platform/freetype/font_list.rs +++ /dev/null @@ -1,156 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ - -use fontconfig::fontconfig::{FcChar8, FcResultMatch, FcSetSystem}; -use fontconfig::fontconfig::{FcConfigGetCurrent, FcConfigGetFonts, FcConfigSubstitute}; -use fontconfig::fontconfig::{FcDefaultSubstitute, FcFontMatch, FcNameParse, FcPatternGetString}; -use fontconfig::fontconfig::{FcFontSetDestroy, FcMatchPattern, FcPatternCreate, FcPatternDestroy}; -use fontconfig::fontconfig::{FcFontSetList, FcObjectSetCreate, FcObjectSetDestroy, FcPatternAddString}; -use fontconfig::fontconfig::{FcObjectSetAdd, FcPatternGetInteger}; -use libc; -use libc::{c_char, c_int}; -use std::borrow::ToOwned; -use std::ffi::CString; -use std::ptr; -use super::c_str_to_string; - -static FC_FAMILY: &'static [u8] = b"family\0"; -static FC_FILE: &'static [u8] = b"file\0"; -static FC_INDEX: &'static [u8] = b"index\0"; -static FC_FONTFORMAT: &'static [u8] = b"fontformat\0"; - -pub fn for_each_available_family(mut callback: F) where F: FnMut(String) { - unsafe { - let config = FcConfigGetCurrent(); - let font_set = FcConfigGetFonts(config, FcSetSystem); - for i in 0..((*font_set).nfont as isize) { - let font = (*font_set).fonts.offset(i); - let mut family: *mut FcChar8 = ptr::null_mut(); - let mut format: *mut FcChar8 = ptr::null_mut(); - let mut v: c_int = 0; - if FcPatternGetString(*font, FC_FONTFORMAT.as_ptr() as *mut c_char, v, &mut format) != FcResultMatch { - continue; - } - - // Skip bitmap fonts. They aren't supported by FreeType. - let fontformat = c_str_to_string(format as *const c_char); - if fontformat != "TrueType" && - fontformat != "CFF" && - fontformat != "Type 1" { - continue; - } - - while FcPatternGetString(*font, FC_FAMILY.as_ptr() as *mut c_char, v, &mut family) == FcResultMatch { - let family_name = c_str_to_string(family as *const c_char); - callback(family_name); - v += 1; - } - } - } -} - -pub fn for_each_variation(family_name: &str, mut callback: F) - where F: FnMut(String) -{ - debug!("getting variations for {}", family_name); - unsafe { - let config = FcConfigGetCurrent(); - let mut font_set = FcConfigGetFonts(config, FcSetSystem); - let font_set_array_ptr = &mut font_set; - let pattern = FcPatternCreate(); - assert!(!pattern.is_null()); - let family_name_c = CString::new(family_name).unwrap(); - let family_name = family_name_c.as_ptr(); - let ok = FcPatternAddString(pattern, FC_FAMILY.as_ptr() as *mut c_char, family_name as *mut FcChar8); - assert!(ok != 0); - - let object_set = FcObjectSetCreate(); - assert!(!object_set.is_null()); - - FcObjectSetAdd(object_set, FC_FILE.as_ptr() as *mut c_char); - FcObjectSetAdd(object_set, FC_INDEX.as_ptr() as *mut c_char); - - let matches = FcFontSetList(config, font_set_array_ptr, 1, pattern, object_set); - - debug!("found {} variations", (*matches).nfont); - - for i in 0..((*matches).nfont as isize) { - let font = (*matches).fonts.offset(i); - let mut file: *mut FcChar8 = ptr::null_mut(); - let result = FcPatternGetString(*font, FC_FILE.as_ptr() as *mut c_char, 0, &mut file); - let file = if result == FcResultMatch { - c_str_to_string(file as *const c_char) - } else { - panic!(); - }; - let mut index: libc::c_int = 0; - let result = FcPatternGetInteger(*font, FC_INDEX.as_ptr() as *mut c_char, 0, &mut index); - let index = if result == FcResultMatch { - index - } else { - panic!(); - }; - - debug!("variation file: {}", file); - debug!("variation index: {}", index); - - callback(file); - } - - FcFontSetDestroy(matches); - FcPatternDestroy(pattern); - FcObjectSetDestroy(object_set); - } -} - -pub fn system_default_family(generic_name: &str) -> Option { - let generic_name_c = CString::new(generic_name).unwrap(); - let generic_name_ptr = generic_name_c.as_ptr(); - - unsafe { - let pattern = FcNameParse(generic_name_ptr as *mut FcChar8); - - FcConfigSubstitute(ptr::null_mut(), pattern, FcMatchPattern); - FcDefaultSubstitute(pattern); - - let mut result = 0; - let family_match = FcFontMatch(ptr::null_mut(), pattern, &mut result); - - let family_name = if result == FcResultMatch { - let mut match_string: *mut FcChar8 = ptr::null_mut(); - FcPatternGetString(family_match, FC_FAMILY.as_ptr() as *mut c_char, 0, &mut match_string); - let result = c_str_to_string(match_string as *const c_char); - FcPatternDestroy(family_match); - Some(result) - } else { - None - }; - - FcPatternDestroy(pattern); - family_name - } -} - -#[cfg(target_os = "linux")] -pub fn last_resort_font_families() -> Vec { - vec!( - "Fira Sans".to_owned(), - "DejaVu Sans".to_owned(), - "Arial".to_owned() - ) -} - -#[cfg(target_os = "windows")] -pub fn last_resort_font_families() -> Vec { - vec!( - "Arial".to_owned() - ) -} - -#[cfg(target_os = "linux")] -pub static SANS_SERIF_FONT_FAMILY: &'static str = "DejaVu Sans"; - -#[cfg(target_os = "windows")] -pub static SANS_SERIF_FONT_FAMILY: &'static str = "Arial"; - diff --git a/collector/compile-benchmarks/style-servo/components/gfx/platform/freetype/font_template.rs b/collector/compile-benchmarks/style-servo/components/gfx/platform/freetype/font_template.rs deleted file mode 100644 index 658974bc3..000000000 --- a/collector/compile-benchmarks/style-servo/components/gfx/platform/freetype/font_template.rs +++ /dev/null @@ -1,58 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ - -use servo_atoms::Atom; -use std::fs::File; -use std::io::{Read, Error}; -use webrender_api::NativeFontHandle; - -/// Platform specific font representation for Linux. -/// The identifier is an absolute path, and the bytes -/// field is the loaded data that can be passed to -/// freetype and azure directly. -#[derive(Debug, Deserialize, Serialize)] -pub struct FontTemplateData { - pub bytes: Vec, - pub identifier: Atom, -} - -impl FontTemplateData { - pub fn new(identifier: Atom, font_data: Option>) -> Result { - let bytes = match font_data { - Some(bytes) => { - bytes - }, - None => { - // TODO: Handle file load failure! - let mut file = File::open(&*identifier)?; - let mut buffer = vec![]; - file.read_to_end(&mut buffer).unwrap(); - buffer - }, - }; - - Ok(FontTemplateData { - bytes: bytes, - identifier: identifier, - }) - } - - /// Returns a clone of the data in this font. This may be a hugely expensive - /// operation (depending on the platform) which performs synchronous disk I/O - /// and should never be done lightly. - pub fn bytes(&self) -> Vec { - self.bytes.clone() - } - - /// Returns a clone of the bytes in this font if they are in memory. This function never - /// performs disk I/O. - pub fn bytes_if_in_memory(&self) -> Option> { - Some(self.bytes()) - } - - /// Returns the native font that underlies this font template, if applicable. - pub fn native_font(&self) -> Option { - None - } -} diff --git a/collector/compile-benchmarks/style-servo/components/gfx/platform/macos/font.rs b/collector/compile-benchmarks/style-servo/components/gfx/platform/macos/font.rs deleted file mode 100644 index aaaad7332..000000000 --- a/collector/compile-benchmarks/style-servo/components/gfx/platform/macos/font.rs +++ /dev/null @@ -1,324 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ - -/// Implementation of Quartz (CoreGraphics) fonts. - -use app_units::Au; -use byteorder::{BigEndian, ByteOrder}; -use core_foundation::base::CFIndex; -use core_foundation::data::CFData; -use core_foundation::string::UniChar; -use core_graphics::font::CGGlyph; -use core_graphics::geometry::CGRect; -use core_text::font::CTFont; -use core_text::font_descriptor::{SymbolicTraitAccessors, TraitAccessors}; -use core_text::font_descriptor::kCTFontDefaultOrientation; -use font::{FontHandleMethods, FontMetrics, FontTableMethods, FontTableTag, FractionalPixel}; -use font::{GPOS, GSUB, KERN}; -use platform::font_template::FontTemplateData; -use platform::macos::font_context::FontContextHandle; -use std::{fmt, ptr}; -use std::ops::Range; -use std::sync::Arc; -use style::computed_values::{font_stretch, font_weight}; -use text::glyph::GlyphId; - -const KERN_PAIR_LEN: usize = 6; - -pub struct FontTable { - data: CFData, -} - -// assumes 72 points per inch, and 96 px per inch -fn px_to_pt(px: f64) -> f64 { - px / 96. * 72. -} - -// assumes 72 points per inch, and 96 px per inch -fn pt_to_px(pt: f64) -> f64 { - pt / 72. * 96. -} - -fn au_from_pt(pt: f64) -> Au { - Au::from_f64_px(pt_to_px(pt)) -} - -impl FontTable { - pub fn wrap(data: CFData) -> FontTable { - FontTable { data: data } - } -} - -impl FontTableMethods for FontTable { - fn buffer(&self) -> &[u8] { - self.data.bytes() - } -} - -#[derive(Debug)] -pub struct FontHandle { - font_data: Arc, - ctfont: CTFont, - h_kern_subtable: Option, - can_do_fast_shaping: bool, -} - -impl FontHandle { - /// Cache all the data needed for basic horizontal kerning. This is used only as a fallback or - /// fast path (when the GPOS table is missing or unnecessary) so it needn't handle every case. - fn find_h_kern_subtable(&self) -> Option { - let font_table = match self.table_for_tag(KERN) { - Some(table) => table, - None => return None - }; - - let mut result = CachedKernTable { - font_table: font_table, - pair_data_range: 0..0, - px_per_font_unit: 0.0, - }; - - // Look for a subtable with horizontal kerning in format 0. - // https://www.microsoft.com/typography/otspec/kern.htm - const KERN_COVERAGE_HORIZONTAL_FORMAT_0: u16 = 1; - const SUBTABLE_HEADER_LEN: usize = 6; - const FORMAT_0_HEADER_LEN: usize = 8; - { - let table = result.font_table.buffer(); - let version = BigEndian::read_u16(table); - if version != 0 { - return None; - } - let num_subtables = BigEndian::read_u16(&table[2..]); - let mut start = 4; - for _ in 0..num_subtables { - // TODO: Check the subtable version number? - let len = BigEndian::read_u16(&table[start + 2..]) as usize; - let cov = BigEndian::read_u16(&table[start + 4..]); - let end = start + len; - if cov == KERN_COVERAGE_HORIZONTAL_FORMAT_0 { - // Found a matching subtable. - if result.pair_data_range.len() > 0 { - debug!("Found multiple horizontal kern tables. Disable fast path."); - return None; - } - // Read the subtable header. - let subtable_start = start + SUBTABLE_HEADER_LEN; - let n_pairs = BigEndian::read_u16(&table[subtable_start..]) as usize; - let pair_data_start = subtable_start + FORMAT_0_HEADER_LEN; - - result.pair_data_range = pair_data_start..end; - if result.pair_data_range.len() != n_pairs * KERN_PAIR_LEN { - debug!("Bad data in kern header. Disable fast path."); - return None; - } - - let pt_per_font_unit = self.ctfont.pt_size() as f64 / - self.ctfont.units_per_em() as f64; - result.px_per_font_unit = pt_to_px(pt_per_font_unit); - } - start = end; - } - } - if result.pair_data_range.len() > 0 { - Some(result) - } else { - None - } - } -} - -struct CachedKernTable { - font_table: FontTable, - pair_data_range: Range, - px_per_font_unit: f64, -} - -impl CachedKernTable { - /// Search for a glyph pair in the kern table and return the corresponding value. - fn binary_search(&self, first_glyph: GlyphId, second_glyph: GlyphId) -> Option { - let pairs = &self.font_table.buffer()[self.pair_data_range.clone()]; - - let query = first_glyph << 16 | second_glyph; - let (mut start, mut end) = (0, pairs.len() / KERN_PAIR_LEN); - while start < end { - let i = (start + end) / 2; - let key = BigEndian::read_u32(&pairs[i * KERN_PAIR_LEN..]); - if key > query { - end = i; - } else if key < query { - start = i + 1; - } else { - return Some(BigEndian::read_i16(&pairs[i * KERN_PAIR_LEN + 4..])); - } - } - None - } -} - -impl fmt::Debug for CachedKernTable { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - write!(f, "CachedKernTable") - } -} - - -impl FontHandleMethods for FontHandle { - fn new_from_template(_fctx: &FontContextHandle, - template: Arc, - pt_size: Option) - -> Result { - let size = match pt_size { - Some(s) => s.to_f64_px(), - None => 0.0 - }; - match template.ctfont(size) { - Some(ref ctfont) => { - let mut handle = FontHandle { - font_data: template.clone(), - ctfont: ctfont.clone_with_font_size(size), - h_kern_subtable: None, - can_do_fast_shaping: false, - }; - handle.h_kern_subtable = handle.find_h_kern_subtable(); - // TODO (#11310): Implement basic support for GPOS and GSUB. - handle.can_do_fast_shaping = handle.h_kern_subtable.is_some() && - handle.table_for_tag(GPOS).is_none() && - handle.table_for_tag(GSUB).is_none(); - Ok(handle) - } - None => { - Err(()) - } - } - } - - fn template(&self) -> Arc { - self.font_data.clone() - } - - fn family_name(&self) -> String { - self.ctfont.family_name() - } - - fn face_name(&self) -> Option { - Some(self.ctfont.face_name()) - } - - fn is_italic(&self) -> bool { - self.ctfont.symbolic_traits().is_italic() - } - - fn boldness(&self) -> font_weight::T { - let normalized = self.ctfont.all_traits().normalized_weight(); // [-1.0, 1.0] - let normalized = if normalized <= 0.0 { - 4.0 + normalized * 3.0 // [1.0, 4.0] - } else { - 4.0 + normalized * 5.0 // [4.0, 9.0] - }; // [1.0, 9.0], centered on 4.0 - font_weight::T::from_int(normalized.round() as i32 * 100).unwrap() - } - - fn stretchiness(&self) -> font_stretch::T { - let normalized = self.ctfont.all_traits().normalized_width(); // [-1.0, 1.0] - let normalized = (normalized + 1.0) / 2.0 * 9.0; // [0.0, 9.0] - match normalized { - v if v < 1.0 => font_stretch::T::ultra_condensed, - v if v < 2.0 => font_stretch::T::extra_condensed, - v if v < 3.0 => font_stretch::T::condensed, - v if v < 4.0 => font_stretch::T::semi_condensed, - v if v < 5.0 => font_stretch::T::normal, - v if v < 6.0 => font_stretch::T::semi_expanded, - v if v < 7.0 => font_stretch::T::expanded, - v if v < 8.0 => font_stretch::T::extra_expanded, - _ => font_stretch::T::ultra_expanded, - } - } - - fn glyph_index(&self, codepoint: char) -> Option { - let characters: [UniChar; 1] = [codepoint as UniChar]; - let mut glyphs: [CGGlyph; 1] = [0 as CGGlyph]; - let count: CFIndex = 1; - - let result = self.ctfont.get_glyphs_for_characters(&characters[0], - &mut glyphs[0], - count); - - if !result { - // No glyph for this character - return None; - } - - assert!(glyphs[0] != 0); // FIXME: error handling - return Some(glyphs[0] as GlyphId); - } - - fn glyph_h_kerning(&self, first_glyph: GlyphId, second_glyph: GlyphId) -> FractionalPixel { - if let Some(ref table) = self.h_kern_subtable { - if let Some(font_units) = table.binary_search(first_glyph, second_glyph) { - return font_units as f64 * table.px_per_font_unit; - } - } - 0.0 - } - - fn can_do_fast_shaping(&self) -> bool { - self.can_do_fast_shaping - } - - fn glyph_h_advance(&self, glyph: GlyphId) -> Option { - let glyphs = [glyph as CGGlyph]; - let advance = self.ctfont.get_advances_for_glyphs(kCTFontDefaultOrientation, - &glyphs[0], - ptr::null_mut(), - 1); - Some(advance as FractionalPixel) - } - - fn metrics(&self) -> FontMetrics { - let bounding_rect: CGRect = self.ctfont.bounding_box(); - let ascent = self.ctfont.ascent() as f64; - let descent = self.ctfont.descent() as f64; - let em_size = Au::from_f64_px(self.ctfont.pt_size() as f64); - let leading = self.ctfont.leading() as f64; - - let scale = px_to_pt(self.ctfont.pt_size() as f64) / (ascent + descent); - let line_gap = (ascent + descent + leading + 0.5).floor(); - - let max_advance_width = au_from_pt(bounding_rect.size.width as f64); - let average_advance = self.glyph_index('0') - .and_then(|idx| self.glyph_h_advance(idx)) - .map(Au::from_f64_px) - .unwrap_or(max_advance_width); - - let metrics = FontMetrics { - underline_size: au_from_pt(self.ctfont.underline_thickness() as f64), - // TODO(Issue #201): underline metrics are not reliable. Have to pull out of font table - // directly. - // - // see also: https://bugs.webkit.org/show_bug.cgi?id=16768 - // see also: https://bugreports.qt-project.org/browse/QTBUG-13364 - underline_offset: au_from_pt(self.ctfont.underline_position() as f64), - strikeout_size: Au(0), // FIXME(Issue #942) - strikeout_offset: Au(0), // FIXME(Issue #942) - leading: au_from_pt(leading), - x_height: au_from_pt((self.ctfont.x_height() as f64) * scale), - em_size: em_size, - ascent: au_from_pt(ascent * scale), - descent: au_from_pt(descent * scale), - max_advance: max_advance_width, - average_advance: average_advance, - line_gap: Au::from_f64_px(line_gap), - }; - debug!("Font metrics (@{} pt): {:?}", self.ctfont.pt_size() as f64, metrics); - metrics - } - - fn table_for_tag(&self, tag: FontTableTag) -> Option { - let result: Option = self.ctfont.get_font_table(tag); - result.and_then(|data| { - Some(FontTable::wrap(data)) - }) - } -} diff --git a/collector/compile-benchmarks/style-servo/components/gfx/platform/macos/font_context.rs b/collector/compile-benchmarks/style-servo/components/gfx/platform/macos/font_context.rs deleted file mode 100644 index e728b62ae..000000000 --- a/collector/compile-benchmarks/style-servo/components/gfx/platform/macos/font_context.rs +++ /dev/null @@ -1,23 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ - -use heapsize::HeapSizeOf; - -#[derive(Clone, Debug)] -pub struct FontContextHandle { - ctx: () -} - -impl FontContextHandle { - // this is a placeholder until NSFontManager or whatever is bound in here. - pub fn new() -> FontContextHandle { - FontContextHandle { ctx: () } - } -} - -impl HeapSizeOf for FontContextHandle { - fn heap_size_of_children(&self) -> usize { - 0 - } -} diff --git a/collector/compile-benchmarks/style-servo/components/gfx/platform/macos/font_list.rs b/collector/compile-benchmarks/style-servo/components/gfx/platform/macos/font_list.rs deleted file mode 100644 index c0800f94c..000000000 --- a/collector/compile-benchmarks/style-servo/components/gfx/platform/macos/font_list.rs +++ /dev/null @@ -1,47 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ - -use core_foundation::base::TCFType; -use core_foundation::string::{CFString, CFStringRef}; -use core_text; -use core_text::font_descriptor::{CTFontDescriptor, CTFontDescriptorRef}; -use std::borrow::ToOwned; -use std::mem; - -pub fn for_each_available_family(mut callback: F) where F: FnMut(String) { - let family_names = core_text::font_collection::get_family_names(); - for strref in family_names.iter() { - let family_name_ref: CFStringRef = unsafe { mem::transmute(strref) }; - let family_name_cf: CFString = unsafe { TCFType::wrap_under_get_rule(family_name_ref) }; - let family_name = family_name_cf.to_string(); - callback(family_name); - } -} - -pub fn for_each_variation(family_name: &str, mut callback: F) where F: FnMut(String) { - debug!("Looking for faces of family: {}", family_name); - - let family_collection = core_text::font_collection::create_for_family(family_name); - if let Some(family_collection) = family_collection { - let family_descriptors = family_collection.get_descriptors(); - for descref in family_descriptors.iter() { - let descref: CTFontDescriptorRef = unsafe { mem::transmute(descref) }; - let desc: CTFontDescriptor = unsafe { TCFType::wrap_under_get_rule(descref) }; - let postscript_name = desc.font_name(); - callback(postscript_name); - } - } -} - -pub fn system_default_family(_generic_name: &str) -> Option { - None -} - -pub fn last_resort_font_families() -> Vec { - vec!("Arial Unicode MS".to_owned(), "Arial".to_owned()) -} - -#[cfg(target_os = "macos")] -pub static SANS_SERIF_FONT_FAMILY: &'static str = "Helvetica"; - diff --git a/collector/compile-benchmarks/style-servo/components/gfx/platform/macos/font_template.rs b/collector/compile-benchmarks/style-servo/components/gfx/platform/macos/font_template.rs deleted file mode 100644 index d71dc8425..000000000 --- a/collector/compile-benchmarks/style-servo/components/gfx/platform/macos/font_template.rs +++ /dev/null @@ -1,151 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ - -use app_units::Au; -use core_graphics::data_provider::CGDataProvider; -use core_graphics::font::CGFont; -use core_text; -use core_text::font::CTFont; -use serde::{Deserialize, Deserializer, Serialize, Serializer}; -use serde::de::{Error, Visitor}; -use servo_atoms::Atom; -use servo_url::ServoUrl; -use std::borrow::ToOwned; -use std::collections::HashMap; -use std::fmt; -use std::fs::File; -use std::io::{Read, Error as IoError}; -use std::ops::Deref; -use std::sync::Mutex; -use webrender_api::NativeFontHandle; - -/// Platform specific font representation for mac. -/// The identifier is a PostScript font name. The -/// CTFont object is cached here for use by the -/// paint functions that create CGFont references. -#[derive(Debug, Deserialize, Serialize)] -pub struct FontTemplateData { - /// The `CTFont` object, if present. This is cached here so that we don't have to keep creating - /// `CTFont` instances over and over. It can always be recreated from the `identifier` and/or - /// `font_data` fields. - /// - /// When sending a `FontTemplateData` instance across processes, this will be cleared out on - /// the other side, because `CTFont` instances cannot be sent across processes. This is - /// harmless, however, because it can always be recreated. - ctfont: CachedCTFont, - - pub identifier: Atom, - pub font_data: Option> -} - -unsafe impl Send for FontTemplateData {} -unsafe impl Sync for FontTemplateData {} - -impl FontTemplateData { - pub fn new(identifier: Atom, font_data: Option>) -> Result { - Ok(FontTemplateData { - ctfont: CachedCTFont(Mutex::new(HashMap::new())), - identifier: identifier.to_owned(), - font_data: font_data - }) - } - - /// Retrieves the Core Text font instance, instantiating it if necessary. - pub fn ctfont(&self, pt_size: f64) -> Option { - let mut ctfonts = self.ctfont.lock().unwrap(); - let pt_size_key = Au::from_f64_px(pt_size); - if !ctfonts.contains_key(&pt_size_key) { - // If you pass a zero font size to one of the Core Text APIs, it'll replace it with - // 12.0. We don't want that! (Issue #10492.) - let clamped_pt_size = pt_size.max(0.01); - let ctfont = match self.font_data { - Some(ref bytes) => { - let fontprov = CGDataProvider::from_buffer(bytes); - let cgfont_result = CGFont::from_data_provider(fontprov); - match cgfont_result { - Ok(cgfont) => { - Some(core_text::font::new_from_CGFont(&cgfont, clamped_pt_size)) - } - Err(_) => None - } - } - None => core_text::font::new_from_name(&*self.identifier, clamped_pt_size).ok(), - }; - if let Some(ctfont) = ctfont { - ctfonts.insert(pt_size_key, ctfont); - } - } - ctfonts.get(&pt_size_key).map(|ctfont| (*ctfont).clone()) - } - - /// Returns a clone of the data in this font. This may be a hugely expensive - /// operation (depending on the platform) which performs synchronous disk I/O - /// and should never be done lightly. - pub fn bytes(&self) -> Vec { - if let Some(font_data) = self.bytes_if_in_memory() { - return font_data; - } - - let path = ServoUrl::parse(&*self.ctfont(0.0) - .expect("No Core Text font available!") - .url() - .expect("No URL for Core Text font!") - .get_string() - .to_string()).expect("Couldn't parse Core Text font URL!") - .as_url().to_file_path() - .expect("Core Text font didn't name a path!"); - let mut bytes = Vec::new(); - File::open(path).expect("Couldn't open font file!").read_to_end(&mut bytes).unwrap(); - bytes - } - - /// Returns a clone of the bytes in this font if they are in memory. This function never - /// performs disk I/O. - pub fn bytes_if_in_memory(&self) -> Option> { - self.font_data.clone() - } - - /// Returns the native font that underlies this font template, if applicable. - pub fn native_font(&self) -> Option { - self.ctfont(0.0).map(|ctfont| NativeFontHandle(ctfont.copy_to_CGFont())) - } -} - -#[derive(Debug)] -pub struct CachedCTFont(Mutex>); - -impl Deref for CachedCTFont { - type Target = Mutex>; - fn deref(&self) -> &Mutex> { - &self.0 - } -} - -impl Serialize for CachedCTFont { - fn serialize(&self, serializer: S) -> Result where S: Serializer { - serializer.serialize_none() - } -} - -impl<'de> Deserialize<'de> for CachedCTFont { - fn deserialize(deserializer: D) -> Result - where D: Deserializer<'de> { - struct NoneOptionVisitor; - - impl<'de> Visitor<'de> for NoneOptionVisitor { - type Value = CachedCTFont; - - fn expecting(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - write!(fmt, "none") - } - - #[inline] - fn visit_none(self) -> Result where E: Error { - Ok(CachedCTFont(Mutex::new(HashMap::new()))) - } - } - - deserializer.deserialize_option(NoneOptionVisitor) - } -} diff --git a/collector/compile-benchmarks/style-servo/components/gfx/platform/mod.rs b/collector/compile-benchmarks/style-servo/components/gfx/platform/mod.rs deleted file mode 100644 index 897fbe9e8..000000000 --- a/collector/compile-benchmarks/style-servo/components/gfx/platform/mod.rs +++ /dev/null @@ -1,59 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ - -#[cfg(any(target_os = "linux", target_os = "android"))] -pub use platform::freetype::{font, font_context}; - -#[cfg(any(target_os = "linux", target_os = "android"))] -pub use platform::freetype::{font_list, font_template}; - -#[cfg(target_os = "windows")] -pub use platform::windows::{font, font_context, font_list, font_template}; - -#[cfg(target_os = "macos")] -pub use platform::macos::{font, font_context, font_list, font_template}; - -#[cfg(any(target_os = "linux", target_os = "android"))] -mod freetype { - use libc::c_char; - use std::ffi::CStr; - use std::str; - - /// Creates a String from the given null-terminated buffer. - /// Panics if the buffer does not contain UTF-8. - unsafe fn c_str_to_string(s: *const c_char) -> String { - str::from_utf8(CStr::from_ptr(s).to_bytes()).unwrap().to_owned() - } - - pub mod font; - pub mod font_context; - - #[cfg(target_os = "linux")] - pub mod font_list; - #[cfg(target_os = "android")] - mod android { - pub mod font_list; - } - #[cfg(target_os = "android")] - pub use self::android::font_list; - - #[cfg(any(target_os = "linux", target_os = "android"))] - pub mod font_template; -} - -#[cfg(target_os = "macos")] -mod macos { - pub mod font; - pub mod font_context; - pub mod font_list; - pub mod font_template; -} - -#[cfg(target_os = "windows")] -mod windows { - pub mod font; - pub mod font_context; - pub mod font_list; - pub mod font_template; -} diff --git a/collector/compile-benchmarks/style-servo/components/gfx/platform/windows/font.rs b/collector/compile-benchmarks/style-servo/components/gfx/platform/windows/font.rs deleted file mode 100644 index 38a2e6e0d..000000000 --- a/collector/compile-benchmarks/style-servo/components/gfx/platform/windows/font.rs +++ /dev/null @@ -1,374 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ - -// NOTE: https://www.chromium.org/directwrite-font-proxy has useful -// information for an approach that we'll likely need to take when the -// renderer moves to a sandboxed process. - -use app_units::Au; -use dwrote; -use dwrote::{Font, FontFace, FontFile}; -use dwrote::{FontWeight, FontStretch, FontStyle}; -use font::{FontHandleMethods, FontMetrics, FontTableMethods}; -use font::{FontTableTag, FractionalPixel}; -use platform::font_template::FontTemplateData; -use platform::windows::font_context::FontContextHandle; -use platform::windows::font_list::font_from_atom; -use std::sync::Arc; -use style::computed_values::{font_stretch, font_weight}; -use text::glyph::GlyphId; -use truetype; - -// 1em = 12pt = 16px, assuming 72 points per inch and 96 px per inch -fn pt_to_px(pt: f64) -> f64 { pt / 72. * 96. } -fn em_to_px(em: f64) -> f64 { em * 16. } -fn au_from_em(em: f64) -> Au { Au::from_f64_px(em_to_px(em)) } -fn au_from_pt(pt: f64) -> Au { Au::from_f64_px(pt_to_px(pt)) } - -pub struct FontTable { - data: Vec, -} - -impl FontTable { - pub fn wrap(data: &[u8]) -> FontTable { - FontTable { data: data.to_vec() } - } -} - -impl FontTableMethods for FontTable { - fn buffer(&self) -> &[u8] { - &self.data - } -} - -fn make_tag(tag_bytes: &[u8]) -> FontTableTag { - assert_eq!(tag_bytes.len(), 4); - unsafe { *(tag_bytes.as_ptr() as *const FontTableTag) } -} - -macro_rules! try_lossy(($result:expr) => ($result.map_err(|_| (()))?)); - -// Given a set of records, figure out the string indices for the family and face -// names. We want name_id 1 and 2, and we need to use platform_id == 1 and -// language_id == 0 to avoid limitations in the truetype crate. We *could* just -// do our own parsing here, and use the offset/length data and pull the values out -// ourselves. -fn get_family_face_indices(records: &[truetype::naming_table::Record]) -> Option<(usize, usize)> { - let mut family_name_index = None; - let mut face_name_index = None; - - for i in 0..records.len() { - // the truetype crate can only decode mac platform format names - if records[i].platform_id != 1 { - continue; - } - - if records[i].language_id != 0 { - continue; - } - - if records[i].name_id == 1 { - family_name_index = Some(i); - } else if records[i].name_id == 2 { - face_name_index = Some(i); - } - } - - if family_name_index.is_some() && face_name_index.is_some() { - Some((family_name_index.unwrap(), face_name_index.unwrap())) - } else { - None - } -} - -// We need the font (DWriteFont) in order to be able to query things like -// the family name, face name, weight, etc. On Windows 10, the -// DWriteFontFace3 interface provides this on the FontFace, but that's only -// available on Win10+. -// -// Instead, we do the parsing work using the truetype crate for raw fonts. -// We're just extracting basic info, so this is sufficient for now. - -#[derive(Debug)] -struct FontInfo { - family_name: String, - face_name: String, - weight: font_weight::T, - stretch: font_stretch::T, - style: FontStyle, -} - -impl FontInfo { - fn new_from_face(face: &FontFace) -> Result { - use std::cmp::{min, max}; - use std::io::Cursor; - use truetype::{NamingTable, Value, WindowsMetrics}; - - let name_table_bytes = face.get_font_table(make_tag(b"name")); - let os2_table_bytes = face.get_font_table(make_tag(b"OS/2")); - if name_table_bytes.is_none() || os2_table_bytes.is_none() { - return Err(()); - } - - let mut name_table_cursor = Cursor::new(name_table_bytes.as_ref().unwrap()); - let names = try_lossy!(NamingTable::read(&mut name_table_cursor)); - let (family, face) = match names { - NamingTable::Format0(ref table) => { - if let Some((family_index, face_index)) = get_family_face_indices(&table.records) { - let strings = table.strings().unwrap(); - let family = strings[family_index].clone(); - let face = strings[face_index].clone(); - ((family, face)) - } else { - return Err(()); - } - }, - NamingTable::Format1(ref table) => { - if let Some((family_index, face_index)) = get_family_face_indices(&table.records) { - let strings = table.strings().unwrap(); - let family = strings[family_index].clone(); - let face = strings[face_index].clone(); - ((family, face)) - } else { - return Err(()); - } - } - }; - - let mut os2_table_cursor = Cursor::new(os2_table_bytes.as_ref().unwrap()); - let metrics = try_lossy!(WindowsMetrics::read(&mut os2_table_cursor)); - let (weight_val, width_val, italic_bool) = match metrics { - WindowsMetrics::Version0(ref m) => { - (m.weight_class, m.width_class, m.selection_flags.0 & 1 == 1) - }, - WindowsMetrics::Version1(ref m) => { - (m.weight_class, m.width_class, m.selection_flags.0 & 1 == 1) - }, - WindowsMetrics::Version2(ref m) | - WindowsMetrics::Version3(ref m) | - WindowsMetrics::Version4(ref m) => { - (m.weight_class, m.width_class, m.selection_flags.0 & 1 == 1) - }, - WindowsMetrics::Version5(ref m) => { - (m.weight_class, m.width_class, m.selection_flags.0 & 1 == 1) - }, - }; - - let weight = font_weight::T:: - from_int(min(9, max(1, weight_val as i32 / 100)) * 100).unwrap(); - - let stretch = match min(9, max(1, width_val)) { - 1 => font_stretch::T::ultra_condensed, - 2 => font_stretch::T::extra_condensed, - 3 => font_stretch::T::condensed, - 4 => font_stretch::T::semi_condensed, - 5 => font_stretch::T::normal, - 6 => font_stretch::T::semi_expanded, - 7 => font_stretch::T::expanded, - 8 => font_stretch::T::extra_expanded, - 9 => font_stretch::T::ultra_expanded, - _ => return Err(()), - }; - - let style = if italic_bool { - FontStyle::Italic - } else { - FontStyle::Normal - }; - - Ok(FontInfo { - family_name: family, - face_name: face, - weight: weight, - stretch: stretch, - style: style, - }) - } - - fn new_from_font(font: &Font) -> Result { - let style = font.style(); - let weight = font_weight::T(match font.weight() { - FontWeight::Thin => 100, - FontWeight::ExtraLight => 200, - FontWeight::Light => 300, - // slightly grayer gray - FontWeight::SemiLight => 300, - FontWeight::Regular => 400, - FontWeight::Medium => 500, - FontWeight::SemiBold => 600, - FontWeight::Bold => 700, - FontWeight::ExtraBold => 800, - FontWeight::Black => 900, - // slightly blacker black - FontWeight::ExtraBlack => 900, - }); - let stretch = match font.stretch() { - FontStretch::Undefined => font_stretch::T::normal, - FontStretch::UltraCondensed => font_stretch::T::ultra_condensed, - FontStretch::ExtraCondensed => font_stretch::T::extra_condensed, - FontStretch::Condensed => font_stretch::T::condensed, - FontStretch::SemiCondensed => font_stretch::T::semi_condensed, - FontStretch::Normal => font_stretch::T::normal, - FontStretch::SemiExpanded => font_stretch::T::semi_expanded, - FontStretch::Expanded => font_stretch::T::expanded, - FontStretch::ExtraExpanded => font_stretch::T::extra_expanded, - FontStretch::UltraExpanded => font_stretch::T::ultra_expanded, - }; - - Ok(FontInfo { - family_name: font.family_name(), - face_name: font.face_name(), - style: style, - weight: weight, - stretch: stretch, - }) - } -} - -#[derive(Debug)] -pub struct FontHandle { - font_data: Arc, - face: FontFace, - info: FontInfo, - em_size: f32, - du_per_em: f32, - du_to_px: f32, - scaled_du_to_px: f32, -} - -impl FontHandle { -} - -impl FontHandleMethods for FontHandle { - fn new_from_template(_: &FontContextHandle, template: Arc, pt_size: Option) - -> Result - { - let (info, face) = if let Some(ref raw_font) = template.bytes { - let font_file = FontFile::new_from_data(&raw_font); - if font_file.is_none() { - // failed to load raw font - return Err(()); - } - - let face = font_file.unwrap().create_face(0, dwrote::DWRITE_FONT_SIMULATIONS_NONE); - let info = FontInfo::new_from_face(&face)?; - (info, face) - } else { - let font = font_from_atom(&template.identifier); - let face = font.create_font_face(); - let info = FontInfo::new_from_font(&font)?; - (info, face) - }; - - let pt_size = pt_size.unwrap_or(au_from_pt(12.)); - let du_per_em = face.metrics().designUnitsPerEm as f32; - - let em_size = pt_size.to_f32_px() / 16.; - let design_units_per_pixel = du_per_em / 16.; - - let design_units_to_pixels = 1. / design_units_per_pixel; - let scaled_design_units_to_pixels = em_size / design_units_per_pixel; - - Ok(FontHandle { - font_data: template.clone(), - face: face, - info: info, - em_size: em_size, - du_per_em: du_per_em, - du_to_px: design_units_to_pixels, - scaled_du_to_px: scaled_design_units_to_pixels, - }) - } - - fn template(&self) -> Arc { - self.font_data.clone() - } - - fn family_name(&self) -> String { - self.info.family_name.clone() - } - - fn face_name(&self) -> Option { - Some(self.info.face_name.clone()) - } - - fn is_italic(&self) -> bool { - match self.info.style { - FontStyle::Normal => false, - FontStyle::Oblique | FontStyle::Italic => true, - } - } - - fn boldness(&self) -> font_weight::T { - self.info.weight - } - - fn stretchiness(&self) -> font_stretch::T { - self.info.stretch - } - - fn glyph_index(&self, codepoint: char) -> Option { - let glyph = self.face.get_glyph_indices(&[codepoint as u32])[0]; - if glyph == 0 { - return None; - } - Some(glyph as GlyphId) - } - - fn glyph_h_advance(&self, glyph: GlyphId) -> Option { - if glyph == 0 { - return None; - } - - let gm = self.face.get_design_glyph_metrics(&[glyph as u16], false)[0]; - let f = (gm.advanceWidth as f32 * self.scaled_du_to_px) as FractionalPixel; - - Some(f) - } - - /// Can this font do basic horizontal LTR shaping without Harfbuzz? - fn can_do_fast_shaping(&self) -> bool { - // TODO copy CachedKernTable from the MacOS X implementation to - // somehwere global and use it here. We could also implement the - // IDirectWriteFontFace1 interface and use the glyph kerning pair - // methods there. - false - } - - fn glyph_h_kerning(&self, _: GlyphId, _: GlyphId) -> FractionalPixel { - 0.0 - } - - fn metrics(&self) -> FontMetrics { - let dm = self.face.metrics(); - - let au_from_du = |du| -> Au { Au::from_f32_px(du as f32 * self.du_to_px) }; - let au_from_du_s = |du| -> Au { Au:: from_f32_px(du as f32 * self.scaled_du_to_px) }; - - // anything that we calculate and don't just pull out of self.face.metrics - // is pulled out here for clarity - let leading = dm.ascent - dm.capHeight; - - let metrics = FontMetrics { - underline_size: au_from_du(dm.underlineThickness as i32), - underline_offset: au_from_du_s(dm.underlinePosition as i32), - strikeout_size: au_from_du(dm.strikethroughThickness as i32), - strikeout_offset: au_from_du_s(dm.strikethroughPosition as i32), - leading: au_from_du_s(leading as i32), - x_height: au_from_du_s(dm.xHeight as i32), - em_size: au_from_em(self.em_size as f64), - ascent: au_from_du_s(dm.ascent as i32), - descent: au_from_du_s(dm.descent as i32), - max_advance: au_from_pt(0.0), // FIXME - average_advance: au_from_pt(0.0), // FIXME - line_gap: au_from_du_s((dm.ascent + dm.descent + dm.lineGap as u16) as i32), - }; - debug!("Font metrics (@{} pt): {:?}", self.em_size * 12., metrics); - metrics - } - - fn table_for_tag(&self, tag: FontTableTag) -> Option { - self.face.get_font_table(tag).map(|bytes| FontTable { data: bytes }) - } -} diff --git a/collector/compile-benchmarks/style-servo/components/gfx/platform/windows/font_context.rs b/collector/compile-benchmarks/style-servo/components/gfx/platform/windows/font_context.rs deleted file mode 100644 index 26670bb7d..000000000 --- a/collector/compile-benchmarks/style-servo/components/gfx/platform/windows/font_context.rs +++ /dev/null @@ -1,21 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ - -use heapsize::HeapSizeOf; - -#[derive(Clone, Debug)] -pub struct FontContextHandle; - -impl FontContextHandle { - // *shrug* - pub fn new() -> FontContextHandle { - FontContextHandle {} - } -} - -impl HeapSizeOf for FontContextHandle { - fn heap_size_of_children(&self) -> usize { - 0 - } -} diff --git a/collector/compile-benchmarks/style-servo/components/gfx/platform/windows/font_list.rs b/collector/compile-benchmarks/style-servo/components/gfx/platform/windows/font_list.rs deleted file mode 100644 index 27dfd5e73..000000000 --- a/collector/compile-benchmarks/style-servo/components/gfx/platform/windows/font_list.rs +++ /dev/null @@ -1,71 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ - -use dwrote::{Font, FontDescriptor, FontCollection}; -use servo_atoms::Atom; -use std::collections::HashMap; -use std::sync::Mutex; -use std::sync::atomic::{Ordering, AtomicUsize}; - -lazy_static! { - static ref FONT_ATOM_COUNTER: AtomicUsize = AtomicUsize::new(1); - static ref FONT_ATOM_MAP: Mutex> = Mutex::new(HashMap::new()); -} - -pub static SANS_SERIF_FONT_FAMILY: &'static str = "Arial"; - -pub fn system_default_family(_: &str) -> Option { - Some("Verdana".to_owned()) -} - -pub fn last_resort_font_families() -> Vec { - vec!("Arial".to_owned()) -} - -pub fn for_each_available_family(mut callback: F) where F: FnMut(String) { - let system_fc = FontCollection::system(); - for family in system_fc.families_iter() { - callback(family.name()); - } -} - -// for_each_variation is supposed to return a string that can be -// atomized and then uniquely used to return back to this font. -// Some platforms use the full postscript name (MacOS X), or -// a font filename. -// -// For windows we're going to use just a basic integer value that -// we'll stringify, and then put them all in a HashMap with -// the actual FontDescriptor there. - -pub fn for_each_variation(family_name: &str, mut callback: F) where F: FnMut(String) { - let system_fc = FontCollection::system(); - if let Some(family) = system_fc.get_font_family_by_name(family_name) { - let count = family.get_font_count(); - for i in 0..count { - let font = family.get_font(i); - let index = FONT_ATOM_COUNTER.fetch_add(1, Ordering::Relaxed); - let index_str = format!("{}", index); - let atom = Atom::from(index_str.clone()); - - { - let descriptor = font.to_descriptor(); - let mut fonts = FONT_ATOM_MAP.lock().unwrap(); - fonts.insert(atom, descriptor); - } - - callback(index_str); - } - } -} - -pub fn descriptor_from_atom(ident: &Atom) -> FontDescriptor { - let fonts = FONT_ATOM_MAP.lock().unwrap(); - fonts.get(ident).unwrap().clone() -} - -pub fn font_from_atom(ident: &Atom) -> Font { - let fonts = FONT_ATOM_MAP.lock().unwrap(); - FontCollection::system().get_font_from_descriptor(fonts.get(ident).unwrap()).unwrap() -} diff --git a/collector/compile-benchmarks/style-servo/components/gfx/platform/windows/font_template.rs b/collector/compile-benchmarks/style-servo/components/gfx/platform/windows/font_template.rs deleted file mode 100644 index 12a08f1ff..000000000 --- a/collector/compile-benchmarks/style-servo/components/gfx/platform/windows/font_template.rs +++ /dev/null @@ -1,49 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ - -use platform::windows::font_list::{descriptor_from_atom, font_from_atom}; -use servo_atoms::Atom; -use std::io; -use webrender_api::NativeFontHandle; - -#[derive(Debug, Deserialize, Serialize)] -pub struct FontTemplateData { - pub bytes: Option>, - pub identifier: Atom, -} - -impl FontTemplateData { - pub fn new(identifier: Atom, - font_data: Option>) -> Result { - Ok(FontTemplateData { - bytes: font_data, - identifier: identifier, - }) - } - - pub fn bytes(&self) -> Vec { - if self.bytes.is_some() { - self.bytes.as_ref().unwrap().clone() - } else { - let font = font_from_atom(&self.identifier); - let face = font.create_font_face(); - let files = face.get_files(); - assert!(files.len() > 0); - - files[0].get_font_file_bytes() - } - } - - pub fn bytes_if_in_memory(&self) -> Option> { - self.bytes.clone() - } - - pub fn native_font(&self) -> Option { - if self.bytes.is_none() { - Some(descriptor_from_atom(&self.identifier)) - } else { - None - } - } -} diff --git a/collector/compile-benchmarks/style-servo/components/gfx/text/glyph.rs b/collector/compile-benchmarks/style-servo/components/gfx/text/glyph.rs deleted file mode 100644 index c0e1be96d..000000000 --- a/collector/compile-benchmarks/style-servo/components/gfx/text/glyph.rs +++ /dev/null @@ -1,762 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ - -use app_units::Au; -use euclid::Point2D; -use range::{self, EachIndex, Range, RangeIndex}; -#[cfg(any(target_feature = "sse2", target_feature = "neon"))] -use simd::u32x4; -use std::{fmt, mem, u16}; -use std::cmp::{Ordering, PartialOrd}; -use std::vec::Vec; - -pub use gfx_traits::ByteIndex; - -/// GlyphEntry is a port of Gecko's CompressedGlyph scheme for storing glyph data compactly. -/// -/// In the common case (reasonable glyph advances, no offsets from the font em-box, and one glyph -/// per character), we pack glyph advance, glyph id, and some flags into a single u32. -/// -/// In the uncommon case (multiple glyphs per unicode character, large glyph index/advance, or -/// glyph offsets), we pack the glyph count into GlyphEntry, and store the other glyph information -/// in DetailedGlyphStore. -#[derive(Clone, Copy, Debug, Deserialize, PartialEq, Serialize)] -pub struct GlyphEntry { - value: u32, -} - -impl GlyphEntry { - fn new(value: u32) -> GlyphEntry { - GlyphEntry { - value: value, - } - } - - fn initial() -> GlyphEntry { - GlyphEntry::new(0) - } - - // Creates a GlyphEntry for the common case - fn simple(id: GlyphId, advance: Au) -> GlyphEntry { - assert!(is_simple_glyph_id(id)); - assert!(is_simple_advance(advance)); - - let id_mask = id as u32; - let Au(advance) = advance; - let advance_mask = (advance as u32) << GLYPH_ADVANCE_SHIFT; - - GlyphEntry::new(id_mask | advance_mask | FLAG_IS_SIMPLE_GLYPH) - } - - // Create a GlyphEntry for uncommon case; should be accompanied by - // initialization of the actual DetailedGlyph data in DetailedGlyphStore - fn complex(starts_cluster: bool, starts_ligature: bool, glyph_count: usize) -> GlyphEntry { - assert!(glyph_count <= u16::MAX as usize); - - debug!("creating complex glyph entry: starts_cluster={}, starts_ligature={}, \ - glyph_count={}", - starts_cluster, - starts_ligature, - glyph_count); - - GlyphEntry::new(glyph_count as u32) - } - - fn is_initial(&self) -> bool { - *self == GlyphEntry::initial() - } -} - -/// The id of a particular glyph within a font -pub type GlyphId = u32; - -// TODO: make this more type-safe. - -const FLAG_CHAR_IS_SPACE: u32 = 0x40000000; -#[cfg(any(target_feature = "sse2", target_feature = "neon"))] -const FLAG_CHAR_IS_SPACE_SHIFT: u32 = 30; -const FLAG_IS_SIMPLE_GLYPH: u32 = 0x80000000; - -// glyph advance; in Au's. -const GLYPH_ADVANCE_MASK: u32 = 0x3FFF0000; -const GLYPH_ADVANCE_SHIFT: u32 = 16; -const GLYPH_ID_MASK: u32 = 0x0000FFFF; - -// Non-simple glyphs (more than one glyph per char; missing glyph, -// newline, tab, large advance, or nonzero x/y offsets) may have one -// or more detailed glyphs associated with them. They are stored in a -// side array so that there is a 1:1 mapping of GlyphEntry to -// unicode char. - -// The number of detailed glyphs for this char. -const GLYPH_COUNT_MASK: u32 = 0x0000FFFF; - -fn is_simple_glyph_id(id: GlyphId) -> bool { - ((id as u32) & GLYPH_ID_MASK) == id -} - -fn is_simple_advance(advance: Au) -> bool { - advance >= Au(0) && { - let unsigned_au = advance.0 as u32; - (unsigned_au & (GLYPH_ADVANCE_MASK >> GLYPH_ADVANCE_SHIFT)) == unsigned_au - } -} - -pub type DetailedGlyphCount = u16; - -// Getters and setters for GlyphEntry. Setter methods are functional, -// because GlyphEntry is immutable and only a u32 in size. -impl GlyphEntry { - #[inline(always)] - fn advance(&self) -> Au { - Au::new(((self.value & GLYPH_ADVANCE_MASK) >> GLYPH_ADVANCE_SHIFT) as i32) - } - - #[inline] - fn id(&self) -> GlyphId { - self.value & GLYPH_ID_MASK - } - - /// True if original char was normal (U+0020) space. Other chars may - /// map to space glyph, but this does not account for them. - fn char_is_space(&self) -> bool { - self.has_flag(FLAG_CHAR_IS_SPACE) - } - - #[inline(always)] - fn set_char_is_space(&mut self) { - self.value |= FLAG_CHAR_IS_SPACE; - } - - fn glyph_count(&self) -> u16 { - assert!(!self.is_simple()); - (self.value & GLYPH_COUNT_MASK) as u16 - } - - #[inline(always)] - fn is_simple(&self) -> bool { - self.has_flag(FLAG_IS_SIMPLE_GLYPH) - } - - #[inline(always)] - fn has_flag(&self, flag: u32) -> bool { - (self.value & flag) != 0 - } -} - -// Stores data for a detailed glyph, in the case that several glyphs -// correspond to one character, or the glyph's data couldn't be packed. -#[derive(Clone, Copy, Debug, Deserialize, Serialize)] -struct DetailedGlyph { - id: GlyphId, - // glyph's advance, in the text's direction (LTR or RTL) - advance: Au, - // glyph's offset from the font's em-box (from top-left) - offset: Point2D, -} - -impl DetailedGlyph { - fn new(id: GlyphId, advance: Au, offset: Point2D) -> DetailedGlyph { - DetailedGlyph { - id: id, - advance: advance, - offset: offset, - } - } -} - -#[derive(Clone, Copy, Debug, Deserialize, Eq, PartialEq, Serialize)] -struct DetailedGlyphRecord { - // source string offset/GlyphEntry offset in the TextRun - entry_offset: ByteIndex, - // offset into the detailed glyphs buffer - detail_offset: usize, -} - -impl PartialOrd for DetailedGlyphRecord { - fn partial_cmp(&self, other: &DetailedGlyphRecord) -> Option { - self.entry_offset.partial_cmp(&other.entry_offset) - } -} - -impl Ord for DetailedGlyphRecord { - fn cmp(&self, other: &DetailedGlyphRecord) -> Ordering { - self.entry_offset.cmp(&other.entry_offset) - } -} - -// Manages the lookup table for detailed glyphs. Sorting is deferred -// until a lookup is actually performed; this matches the expected -// usage pattern of setting/appending all the detailed glyphs, and -// then querying without setting. -#[derive(Clone, Deserialize, Serialize)] -struct DetailedGlyphStore { - // TODO(pcwalton): Allocation of this buffer is expensive. Consider a small-vector - // optimization. - detail_buffer: Vec, - // TODO(pcwalton): Allocation of this buffer is expensive. Consider a small-vector - // optimization. - detail_lookup: Vec, - lookup_is_sorted: bool, -} - -impl<'a> DetailedGlyphStore { - fn new() -> DetailedGlyphStore { - DetailedGlyphStore { - detail_buffer: vec!(), // TODO: default size? - detail_lookup: vec!(), - lookup_is_sorted: false, - } - } - - fn add_detailed_glyphs_for_entry(&mut self, entry_offset: ByteIndex, glyphs: &[DetailedGlyph]) { - let entry = DetailedGlyphRecord { - entry_offset: entry_offset, - detail_offset: self.detail_buffer.len(), - }; - - debug!("Adding entry[off={:?}] for detailed glyphs: {:?}", entry_offset, glyphs); - - /* TODO: don't actually assert this until asserts are compiled - in/out based on severity, debug/release, etc. This assertion - would wreck the complexity of the lookup. - - See Rust Issue #3647, #2228, #3627 for related information. - - do self.detail_lookup.borrow |arr| { - assert !arr.contains(entry) - } - */ - - self.detail_lookup.push(entry); - self.detail_buffer.extend_from_slice(glyphs); - self.lookup_is_sorted = false; - } - - fn detailed_glyphs_for_entry(&'a self, entry_offset: ByteIndex, count: u16) - -> &'a [DetailedGlyph] { - debug!("Requesting detailed glyphs[n={}] for entry[off={:?}]", count, entry_offset); - - // FIXME: Is this right? --pcwalton - // TODO: should fix this somewhere else - if count == 0 { - return &self.detail_buffer[0..0]; - } - - assert!((count as usize) <= self.detail_buffer.len()); - assert!(self.lookup_is_sorted); - - let key = DetailedGlyphRecord { - entry_offset: entry_offset, - detail_offset: 0, // unused - }; - - let i = self.detail_lookup.binary_search(&key) - .expect("Invalid index not found in detailed glyph lookup table!"); - let main_detail_offset = self.detail_lookup[i].detail_offset; - assert!(main_detail_offset + (count as usize) <= self.detail_buffer.len()); - // return a slice into the buffer - &self.detail_buffer[main_detail_offset .. main_detail_offset + count as usize] - } - - fn detailed_glyph_with_index(&'a self, - entry_offset: ByteIndex, - detail_offset: u16) - -> &'a DetailedGlyph { - assert!((detail_offset as usize) <= self.detail_buffer.len()); - assert!(self.lookup_is_sorted); - - let key = DetailedGlyphRecord { - entry_offset: entry_offset, - detail_offset: 0, // unused - }; - - let i = self.detail_lookup.binary_search(&key) - .expect("Invalid index not found in detailed glyph lookup table!"); - let main_detail_offset = self.detail_lookup[i].detail_offset; - assert!(main_detail_offset + (detail_offset as usize) < self.detail_buffer.len()); - &self.detail_buffer[main_detail_offset + (detail_offset as usize)] - } - - fn ensure_sorted(&mut self) { - if self.lookup_is_sorted { - return; - } - - // Sorting a unique vector is surprisingly hard. The following - // code is a good argument for using DVecs, but they require - // immutable locations thus don't play well with freezing. - - // Thar be dragons here. You have been warned. (Tips accepted.) - let mut unsorted_records: Vec = vec!(); - mem::swap(&mut self.detail_lookup, &mut unsorted_records); - let mut mut_records: Vec = unsorted_records; - mut_records.sort_by(|a, b| { - if a < b { - Ordering::Less - } else { - Ordering::Greater - } - }); - let mut sorted_records = mut_records; - mem::swap(&mut self.detail_lookup, &mut sorted_records); - - self.lookup_is_sorted = true; - } -} - -// This struct is used by GlyphStore clients to provide new glyph data. -// It should be allocated on the stack and passed by reference to GlyphStore. -#[derive(Clone, Copy)] -pub struct GlyphData { - id: GlyphId, - advance: Au, - offset: Point2D, - cluster_start: bool, - ligature_start: bool, -} - -impl GlyphData { - /// Creates a new entry for one glyph. - pub fn new(id: GlyphId, - advance: Au, - offset: Option>, - cluster_start: bool, - ligature_start: bool) - -> GlyphData { - GlyphData { - id: id, - advance: advance, - offset: offset.unwrap_or(Point2D::zero()), - cluster_start: cluster_start, - ligature_start: ligature_start, - } - } -} - -// This enum is a proxy that's provided to GlyphStore clients when iterating -// through glyphs (either for a particular TextRun offset, or all glyphs). -// Rather than eagerly assembling and copying glyph data, it only retrieves -// values as they are needed from the GlyphStore, using provided offsets. -#[derive(Clone, Copy)] -pub enum GlyphInfo<'a> { - Simple(&'a GlyphStore, ByteIndex), - Detail(&'a GlyphStore, ByteIndex, u16), -} - -impl<'a> GlyphInfo<'a> { - pub fn id(self) -> GlyphId { - match self { - GlyphInfo::Simple(store, entry_i) => store.entry_buffer[entry_i.to_usize()].id(), - GlyphInfo::Detail(store, entry_i, detail_j) => { - store.detail_store.detailed_glyph_with_index(entry_i, detail_j).id - } - } - } - - #[inline(always)] - // FIXME: Resolution conflicts with IteratorUtil trait so adding trailing _ - pub fn advance(self) -> Au { - match self { - GlyphInfo::Simple(store, entry_i) => store.entry_buffer[entry_i.to_usize()].advance(), - GlyphInfo::Detail(store, entry_i, detail_j) => { - store.detail_store.detailed_glyph_with_index(entry_i, detail_j).advance - } - } - } - - #[inline] - pub fn offset(self) -> Option> { - match self { - GlyphInfo::Simple(_, _) => None, - GlyphInfo::Detail(store, entry_i, detail_j) => { - Some(store.detail_store.detailed_glyph_with_index(entry_i, detail_j).offset) - } - } - } - - pub fn char_is_space(self) -> bool { - let (store, entry_i) = match self { - GlyphInfo::Simple(store, entry_i) => (store, entry_i), - GlyphInfo::Detail(store, entry_i, _) => (store, entry_i), - }; - - store.char_is_space(entry_i) - } -} - -/// Stores the glyph data belonging to a text run. -/// -/// Simple glyphs are stored inline in the `entry_buffer`, detailed glyphs are -/// stored as pointers into the `detail_store`. -/// -/// ~~~ignore -/// +- GlyphStore --------------------------------+ -/// | +---+---+---+---+---+---+---+ | -/// | entry_buffer: | | s | | s | | s | s | | d = detailed -/// | +-|-+---+-|-+---+-|-+---+---+ | s = simple -/// | | | | | -/// | | +---+-------+ | -/// | | | | -/// | +-V-+-V-+ | -/// | detail_store: | d | d | | -/// | +---+---+ | -/// +---------------------------------------------+ -/// ~~~ -#[derive(Clone, Deserialize, Serialize)] -pub struct GlyphStore { - // TODO(pcwalton): Allocation of this buffer is expensive. Consider a small-vector - // optimization. - /// A buffer of glyphs within the text run, in the order in which they - /// appear in the input text. - /// Any changes will also need to be reflected in - /// transmute_entry_buffer_to_u32_buffer(). - entry_buffer: Vec, - /// A store of the detailed glyph data. Detailed glyphs contained in the - /// `entry_buffer` point to locations in this data structure. - detail_store: DetailedGlyphStore, - - /// A cache of the advance of the entire glyph store. - total_advance: Au, - /// A cache of the number of spaces in the entire glyph store. - total_spaces: i32, - - /// Used to check if fast path should be used in glyph iteration. - has_detailed_glyphs: bool, - is_whitespace: bool, - is_rtl: bool, -} - -impl<'a> GlyphStore { - /// Initializes the glyph store, but doesn't actually shape anything. - /// - /// Use the `add_*` methods to store glyph data. - pub fn new(length: usize, is_whitespace: bool, is_rtl: bool) -> GlyphStore { - assert!(length > 0); - - GlyphStore { - entry_buffer: vec![GlyphEntry::initial(); length], - detail_store: DetailedGlyphStore::new(), - total_advance: Au(0), - total_spaces: 0, - has_detailed_glyphs: false, - is_whitespace: is_whitespace, - is_rtl: is_rtl, - } - } - - #[inline] - pub fn len(&self) -> ByteIndex { - ByteIndex(self.entry_buffer.len() as isize) - } - - #[inline] - pub fn is_whitespace(&self) -> bool { - self.is_whitespace - } - - pub fn finalize_changes(&mut self) { - self.detail_store.ensure_sorted(); - self.cache_total_advance_and_spaces() - } - - #[inline(never)] - fn cache_total_advance_and_spaces(&mut self) { - let mut total_advance = Au(0); - let mut total_spaces = 0; - for glyph in self.iter_glyphs_for_byte_range(&Range::new(ByteIndex(0), self.len())) { - total_advance = total_advance + glyph.advance(); - if glyph.char_is_space() { - total_spaces += 1; - } - } - self.total_advance = total_advance; - self.total_spaces = total_spaces; - } - - /// Adds a single glyph. - pub fn add_glyph_for_byte_index(&mut self, - i: ByteIndex, - character: char, - data: &GlyphData) { - let glyph_is_compressible = is_simple_glyph_id(data.id) && - is_simple_advance(data.advance) && - data.offset == Point2D::zero() && - data.cluster_start; // others are stored in detail buffer - - debug_assert!(data.ligature_start); // can't compress ligature continuation glyphs. - debug_assert!(i < self.len()); - - let mut entry = if glyph_is_compressible { - GlyphEntry::simple(data.id, data.advance) - } else { - let glyph = &[DetailedGlyph::new(data.id, data.advance, data.offset)]; - self.has_detailed_glyphs = true; - self.detail_store.add_detailed_glyphs_for_entry(i, glyph); - GlyphEntry::complex(data.cluster_start, data.ligature_start, 1) - }; - - if character == ' ' { - entry.set_char_is_space() - } - - self.entry_buffer[i.to_usize()] = entry; - } - - pub fn add_glyphs_for_byte_index(&mut self, i: ByteIndex, data_for_glyphs: &[GlyphData]) { - assert!(i < self.len()); - assert!(data_for_glyphs.len() > 0); - - let glyph_count = data_for_glyphs.len(); - - let first_glyph_data = data_for_glyphs[0]; - let glyphs_vec: Vec = (0..glyph_count).map(|i| { - DetailedGlyph::new(data_for_glyphs[i].id, - data_for_glyphs[i].advance, - data_for_glyphs[i].offset) - }).collect(); - - self.has_detailed_glyphs = true; - self.detail_store.add_detailed_glyphs_for_entry(i, &glyphs_vec); - - let entry = GlyphEntry::complex(first_glyph_data.cluster_start, - first_glyph_data.ligature_start, - glyph_count); - - debug!("Adding multiple glyphs[idx={:?}, count={}]: {:?}", i, glyph_count, entry); - - self.entry_buffer[i.to_usize()] = entry; - } - - #[inline] - pub fn iter_glyphs_for_byte_range(&'a self, range: &Range) -> GlyphIterator<'a> { - if range.begin() >= self.len() { - panic!("iter_glyphs_for_range: range.begin beyond length!"); - } - if range.end() > self.len() { - panic!("iter_glyphs_for_range: range.end beyond length!"); - } - - GlyphIterator { - store: self, - byte_index: if self.is_rtl { range.end() } else { range.begin() - ByteIndex(1) }, - byte_range: *range, - glyph_range: None, - } - } - - // Scan the glyphs for a given range until we reach a given advance. Returns the index - // and advance of the glyph in the range at the given advance, if reached. Otherwise, returns the - // the number of glyphs and the advance for the given range. - #[inline] - pub fn range_index_of_advance(&self, range: &Range, advance: Au, extra_word_spacing: Au) -> (usize, Au) { - let mut index = 0; - let mut current_advance = Au(0); - for glyph in self.iter_glyphs_for_byte_range(range) { - if glyph.char_is_space() { - current_advance += glyph.advance() + extra_word_spacing - } else { - current_advance += glyph.advance() - } - if current_advance > advance { - break; - } - index += 1; - } - (index, current_advance) - } - - #[inline] - pub fn advance_for_byte_range(&self, range: &Range, extra_word_spacing: Au) -> Au { - if range.begin() == ByteIndex(0) && range.end() == self.len() { - self.total_advance + extra_word_spacing * self.total_spaces - } else if !self.has_detailed_glyphs { - self.advance_for_byte_range_simple_glyphs(range, extra_word_spacing) - } else { - self.advance_for_byte_range_slow_path(range, extra_word_spacing) - } - } - - #[inline] - pub fn advance_for_byte_range_slow_path(&self, range: &Range, extra_word_spacing: Au) -> Au { - self.iter_glyphs_for_byte_range(range) - .fold(Au(0), |advance, glyph| { - if glyph.char_is_space() { - advance + glyph.advance() + extra_word_spacing - } else { - advance + glyph.advance() - } - }) - } - - #[inline] - #[cfg(any(target_feature = "sse2", target_feature = "neon"))] - fn advance_for_byte_range_simple_glyphs(&self, range: &Range, extra_word_spacing: Au) -> Au { - let advance_mask = u32x4::splat(GLYPH_ADVANCE_MASK); - let space_flag_mask = u32x4::splat(FLAG_CHAR_IS_SPACE); - let mut simd_advance = u32x4::splat(0); - let mut simd_spaces = u32x4::splat(0); - let begin = range.begin().to_usize(); - let len = range.length().to_usize(); - let num_simd_iterations = len / 4; - let leftover_entries = range.end().to_usize() - (len - num_simd_iterations * 4); - let buf = self.transmute_entry_buffer_to_u32_buffer(); - - for i in 0..num_simd_iterations { - let v = u32x4::load(buf, begin + i * 4); - let advance = (v & advance_mask) >> GLYPH_ADVANCE_SHIFT; - let spaces = (v & space_flag_mask) >> FLAG_CHAR_IS_SPACE_SHIFT; - simd_advance = simd_advance + advance; - simd_spaces = simd_spaces + spaces; - } - - let advance = - (simd_advance.extract(0) + - simd_advance.extract(1) + - simd_advance.extract(2) + - simd_advance.extract(3)) as i32; - let spaces = - (simd_spaces.extract(0) + - simd_spaces.extract(1) + - simd_spaces.extract(2) + - simd_spaces.extract(3)) as i32; - let mut leftover_advance = Au(0); - let mut leftover_spaces = 0; - for i in leftover_entries..range.end().to_usize() { - leftover_advance = leftover_advance + self.entry_buffer[i].advance(); - if self.entry_buffer[i].char_is_space() { - leftover_spaces += 1; - } - } - Au::new(advance) + leftover_advance + extra_word_spacing * (spaces + leftover_spaces) - } - - /// When SIMD isn't available, fallback to the slow path. - #[inline] - #[cfg(not(any(target_feature = "sse2", target_feature = "neon")))] - fn advance_for_byte_range_simple_glyphs(&self, range: &Range, extra_word_spacing: Au) -> Au { - self.advance_for_byte_range_slow_path(range, extra_word_spacing) - } - - /// Used for SIMD. - #[inline] - #[cfg(any(target_feature = "sse2", target_feature = "neon"))] - #[allow(unsafe_code)] - fn transmute_entry_buffer_to_u32_buffer(&self) -> &[u32] { - unsafe { mem::transmute(self.entry_buffer.as_slice()) } - } - - pub fn char_is_space(&self, i: ByteIndex) -> bool { - assert!(i < self.len()); - self.entry_buffer[i.to_usize()].char_is_space() - } - - pub fn space_count_in_range(&self, range: &Range) -> u32 { - let mut spaces = 0; - for index in range.each_index() { - if self.char_is_space(index) { - spaces += 1 - } - } - spaces - } -} - -impl fmt::Debug for GlyphStore { - fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - write!(formatter, "GlyphStore:\n")?; - let mut detailed_buffer = self.detail_store.detail_buffer.iter(); - for entry in self.entry_buffer.iter() { - if entry.is_simple() { - write!(formatter, - " simple id={:?} advance={:?}\n", - entry.id(), - entry.advance())?; - continue - } - if entry.is_initial() { - continue - } - write!(formatter, " complex...")?; - if detailed_buffer.next().is_none() { - continue - } - write!(formatter, - " detailed id={:?} advance={:?}\n", - entry.id(), - entry.advance())?; - } - Ok(()) - } -} - -/// An iterator over the glyphs in a byte range in a `GlyphStore`. -pub struct GlyphIterator<'a> { - store: &'a GlyphStore, - byte_index: ByteIndex, - byte_range: Range, - glyph_range: Option>, -} - -impl<'a> GlyphIterator<'a> { - // Slow path when there is a glyph range. - #[inline(never)] - fn next_glyph_range(&mut self) -> Option> { - match self.glyph_range.as_mut().unwrap().next() { - Some(j) => { - Some(GlyphInfo::Detail(self.store, self.byte_index, j.get() as u16 /* ??? */)) - } - None => { - // No more glyphs for current character. Try to get another. - self.glyph_range = None; - self.next() - } - } - } - - // Slow path when there is a complex glyph. - #[inline(never)] - fn next_complex_glyph(&mut self, entry: &GlyphEntry, i: ByteIndex) -> Option> { - let glyphs = self.store.detail_store.detailed_glyphs_for_entry(i, entry.glyph_count()); - self.glyph_range = Some(range::each_index(ByteIndex(0), ByteIndex(glyphs.len() as isize))); - self.next() - } -} - -impl<'a> Iterator for GlyphIterator<'a> { - type Item = GlyphInfo<'a>; - - // I tried to start with something simpler and apply FlatMap, but the - // inability to store free variables in the FlatMap struct was problematic. - // - // This function consists of the fast path and is designed to be inlined into its caller. The - // slow paths, which should not be inlined, are `next_glyph_range()` and - // `next_complex_glyph()`. - #[inline(always)] - fn next(&mut self) -> Option> { - // Would use 'match' here but it borrows contents in a way that interferes with mutation. - if self.glyph_range.is_some() { - return self.next_glyph_range() - } - - // No glyph range. Look at next byte. - self.byte_index = self.byte_index + if self.store.is_rtl { - ByteIndex(-1) - } else { - ByteIndex(1) - }; - let i = self.byte_index; - if !self.byte_range.contains(i) { - return None - } - debug_assert!(i < self.store.len()); - let entry = self.store.entry_buffer[i.to_usize()]; - if entry.is_simple() { - Some(GlyphInfo::Simple(self.store, i)) - } else { - // Fall back to the slow path. - self.next_complex_glyph(&entry, i) - } - } -} diff --git a/collector/compile-benchmarks/style-servo/components/gfx/text/mod.rs b/collector/compile-benchmarks/style-servo/components/gfx/text/mod.rs deleted file mode 100644 index 5aae08764..000000000 --- a/collector/compile-benchmarks/style-servo/components/gfx/text/mod.rs +++ /dev/null @@ -1,12 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ - -pub use text::shaping::Shaper; -pub use text::text_run::TextRun; - -pub mod glyph; -pub mod shaping; -pub mod text_run; -pub mod util; - diff --git a/collector/compile-benchmarks/style-servo/components/gfx/text/shaping/harfbuzz.rs b/collector/compile-benchmarks/style-servo/components/gfx/text/shaping/harfbuzz.rs deleted file mode 100644 index 1069e3460..000000000 --- a/collector/compile-benchmarks/style-servo/components/gfx/text/shaping/harfbuzz.rs +++ /dev/null @@ -1,533 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ - -#![allow(unsafe_code)] - -use app_units::Au; -use euclid::Point2D; -use font::{DISABLE_KERNING_SHAPING_FLAG, Font, FontTableMethods, FontTableTag}; -use font::{IGNORE_LIGATURES_SHAPING_FLAG, KERN, RTL_FLAG, ShapingOptions}; -use harfbuzz::{HB_DIRECTION_LTR, HB_DIRECTION_RTL, HB_MEMORY_MODE_READONLY}; -use harfbuzz::{hb_blob_create, hb_face_create_for_tables}; -use harfbuzz::{hb_buffer_create, hb_font_destroy}; -use harfbuzz::{hb_buffer_get_glyph_infos, hb_shape}; -use harfbuzz::{hb_buffer_set_direction, hb_buffer_set_script}; -use harfbuzz::{hb_buffer_t, hb_codepoint_t, hb_font_funcs_t}; -use harfbuzz::{hb_face_t, hb_font_t}; -use harfbuzz::{hb_position_t, hb_tag_t}; -use harfbuzz::hb_blob_t; -use harfbuzz::hb_bool_t; -use harfbuzz::hb_buffer_add_utf8; -use harfbuzz::hb_buffer_destroy; -use harfbuzz::hb_buffer_get_glyph_positions; -use harfbuzz::hb_buffer_get_length; -use harfbuzz::hb_face_destroy; -use harfbuzz::hb_feature_t; -use harfbuzz::hb_font_create; -use harfbuzz::hb_font_funcs_create; -use harfbuzz::hb_font_funcs_set_glyph_func; -use harfbuzz::hb_font_funcs_set_glyph_h_advance_func; -use harfbuzz::hb_font_funcs_set_glyph_h_kerning_func; -use harfbuzz::hb_font_set_funcs; -use harfbuzz::hb_font_set_ppem; -use harfbuzz::hb_font_set_scale; -use harfbuzz::hb_glyph_info_t; -use harfbuzz::hb_glyph_position_t; -use libc::{c_char, c_int, c_uint, c_void}; -use platform::font::FontTable; -use std::{char, cmp, ptr}; -use text::glyph::{ByteIndex, GlyphData, GlyphId, GlyphStore}; -use text::shaping::ShaperMethods; -use text::util::{fixed_to_float, float_to_fixed, is_bidi_control}; - -const NO_GLYPH: i32 = -1; -const LIGA: u32 = ot_tag!('l', 'i', 'g', 'a'); - -pub struct ShapedGlyphData { - count: usize, - glyph_infos: *mut hb_glyph_info_t, - pos_infos: *mut hb_glyph_position_t, -} - -pub struct ShapedGlyphEntry { - codepoint: GlyphId, - advance: Au, - offset: Option>, -} - -impl ShapedGlyphData { - pub fn new(buffer: *mut hb_buffer_t) -> ShapedGlyphData { - unsafe { - let mut glyph_count = 0; - let glyph_infos = hb_buffer_get_glyph_infos(buffer, &mut glyph_count); - assert!(!glyph_infos.is_null()); - let mut pos_count = 0; - let pos_infos = hb_buffer_get_glyph_positions(buffer, &mut pos_count); - assert!(!pos_infos.is_null()); - assert!(glyph_count == pos_count); - - ShapedGlyphData { - count: glyph_count as usize, - glyph_infos: glyph_infos, - pos_infos: pos_infos, - } - } - } - - #[inline(always)] - fn byte_offset_of_glyph(&self, i: usize) -> u32 { - assert!(i < self.count); - - unsafe { - let glyph_info_i = self.glyph_infos.offset(i as isize); - (*glyph_info_i).cluster - } - } - - pub fn len(&self) -> usize { - self.count - } - - /// Returns shaped glyph data for one glyph, and updates the y-position of the pen. - pub fn entry_for_glyph(&self, i: usize, y_pos: &mut Au) -> ShapedGlyphEntry { - assert!(i < self.count); - - unsafe { - let glyph_info_i = self.glyph_infos.offset(i as isize); - let pos_info_i = self.pos_infos.offset(i as isize); - let x_offset = Shaper::fixed_to_float((*pos_info_i).x_offset); - let y_offset = Shaper::fixed_to_float((*pos_info_i).y_offset); - let x_advance = Shaper::fixed_to_float((*pos_info_i).x_advance); - let y_advance = Shaper::fixed_to_float((*pos_info_i).y_advance); - - let x_offset = Au::from_f64_px(x_offset); - let y_offset = Au::from_f64_px(y_offset); - let x_advance = Au::from_f64_px(x_advance); - let y_advance = Au::from_f64_px(y_advance); - - let offset = if x_offset == Au(0) && y_offset == Au(0) && y_advance == Au(0) { - None - } else { - // adjust the pen.. - if y_advance > Au(0) { - *y_pos = *y_pos - y_advance; - } - - Some(Point2D::new(x_offset, *y_pos - y_offset)) - }; - - ShapedGlyphEntry { - codepoint: (*glyph_info_i).codepoint as GlyphId, - advance: x_advance, - offset: offset, - } - } - } -} - -#[derive(Debug)] -pub struct Shaper { - hb_face: *mut hb_face_t, - hb_font: *mut hb_font_t, - font: *const Font, -} - -impl Drop for Shaper { - fn drop(&mut self) { - unsafe { - assert!(!self.hb_face.is_null()); - hb_face_destroy(self.hb_face); - - assert!(!self.hb_font.is_null()); - hb_font_destroy(self.hb_font); - } - } -} - -impl Shaper { - pub fn new(font: *const Font) -> Shaper { - unsafe { - let hb_face: *mut hb_face_t = - hb_face_create_for_tables(Some(font_table_func), - font as *const c_void as *mut c_void, - None); - let hb_font: *mut hb_font_t = hb_font_create(hb_face); - - // Set points-per-em. if zero, performs no hinting in that direction. - let pt_size = (*font).actual_pt_size.to_f64_px(); - hb_font_set_ppem(hb_font, pt_size as c_uint, pt_size as c_uint); - - // Set scaling. Note that this takes 16.16 fixed point. - hb_font_set_scale(hb_font, - Shaper::float_to_fixed(pt_size) as c_int, - Shaper::float_to_fixed(pt_size) as c_int); - - // configure static function callbacks. - hb_font_set_funcs(hb_font, HB_FONT_FUNCS.0, font as *mut Font as *mut c_void, None); - - Shaper { - hb_face: hb_face, - hb_font: hb_font, - font: font, - } - } - } - - fn float_to_fixed(f: f64) -> i32 { - float_to_fixed(16, f) - } - - fn fixed_to_float(i: hb_position_t) -> f64 { - fixed_to_float(16, i) - } -} - -impl ShaperMethods for Shaper { - /// Calculate the layout metrics associated with the given text when painted in a specific - /// font. - fn shape_text(&self, text: &str, options: &ShapingOptions, glyphs: &mut GlyphStore) { - unsafe { - let hb_buffer: *mut hb_buffer_t = hb_buffer_create(); - hb_buffer_set_direction(hb_buffer, if options.flags.contains(RTL_FLAG) { - HB_DIRECTION_RTL - } else { - HB_DIRECTION_LTR - }); - - hb_buffer_set_script(hb_buffer, options.script.to_hb_script()); - - hb_buffer_add_utf8(hb_buffer, - text.as_ptr() as *const c_char, - text.len() as c_int, - 0, - text.len() as c_int); - - let mut features = Vec::new(); - if options.flags.contains(IGNORE_LIGATURES_SHAPING_FLAG) { - features.push(hb_feature_t { - tag: LIGA, - value: 0, - start: 0, - end: hb_buffer_get_length(hb_buffer), - }) - } - if options.flags.contains(DISABLE_KERNING_SHAPING_FLAG) { - features.push(hb_feature_t { - tag: KERN, - value: 0, - start: 0, - end: hb_buffer_get_length(hb_buffer), - }) - } - - hb_shape(self.hb_font, hb_buffer, features.as_mut_ptr(), features.len() as u32); - self.save_glyph_results(text, options, glyphs, hb_buffer); - hb_buffer_destroy(hb_buffer); - } - } -} - -impl Shaper { - fn save_glyph_results(&self, - text: &str, - options: &ShapingOptions, - glyphs: &mut GlyphStore, - buffer: *mut hb_buffer_t) { - let glyph_data = ShapedGlyphData::new(buffer); - let glyph_count = glyph_data.len(); - let byte_max = text.len(); - - debug!("Shaped text[byte count={}], got back {} glyph info records.", - byte_max, - glyph_count); - - // make map of what chars have glyphs - let mut byte_to_glyph = vec![NO_GLYPH; byte_max]; - - debug!("(glyph idx) -> (text byte offset)"); - for i in 0..glyph_data.len() { - let loc = glyph_data.byte_offset_of_glyph(i) as usize; - if loc < byte_max { - byte_to_glyph[loc] = i as i32; - } else { - debug!("ERROR: tried to set out of range byte_to_glyph: idx={}, glyph idx={}", - loc, - i); - } - debug!("{} -> {}", i, loc); - } - - debug!("text: {:?}", text); - debug!("(char idx): char->(glyph index):"); - for (i, ch) in text.char_indices() { - debug!("{}: {:?} --> {}", i, ch, byte_to_glyph[i]); - } - - let mut glyph_span = 0..0; - let mut byte_range = 0..0; - - let mut y_pos = Au(0); - - // main loop over each glyph. each iteration usually processes 1 glyph and 1+ chars. - // in cases with complex glyph-character associations, 2+ glyphs and 1+ chars can be - // processed. - while glyph_span.start < glyph_count { - debug!("Processing glyph at idx={}", glyph_span.start); - glyph_span.end = glyph_span.start; - byte_range.end = glyph_data.byte_offset_of_glyph(glyph_span.start) as usize; - - while byte_range.end < byte_max { - byte_range.end += 1; - // Extend the byte range to include any following byte without its own glyph. - while byte_range.end < byte_max && byte_to_glyph[byte_range.end] == NO_GLYPH { - byte_range.end += 1; - } - - // Extend the glyph range to include all glyphs covered by bytes processed so far. - let mut max_glyph_idx = glyph_span.end; - for glyph_idx in &byte_to_glyph[byte_range.clone()] { - if *glyph_idx != NO_GLYPH { - max_glyph_idx = cmp::max(*glyph_idx as usize + 1, max_glyph_idx); - } - } - if max_glyph_idx > glyph_span.end { - glyph_span.end = max_glyph_idx; - debug!("Extended glyph span to {:?}", glyph_span); - } - - // if there's just one glyph, then we don't need further checks. - if glyph_span.len() == 1 { break; } - - // if no glyphs were found yet, extend the char byte range more. - if glyph_span.len() == 0 { continue; } - - // If byte_range now includes all the byte offsets found in glyph_span, then we - // have found a contiguous "cluster" and can stop extending it. - let mut all_glyphs_are_within_cluster: bool = true; - for j in glyph_span.clone() { - let loc = glyph_data.byte_offset_of_glyph(j); - if !byte_range.contains(loc as usize) { - all_glyphs_are_within_cluster = false; - break - } - } - if all_glyphs_are_within_cluster { - break - } - - // Otherwise, the bytes we have seen so far correspond to a non-contiguous set of - // glyphs. Keep extending byte_range until we fill in all the holes in the glyph - // span or reach the end of the text. - } - - assert!(byte_range.len() > 0); - assert!(glyph_span.len() > 0); - - // Now byte_range is the ligature clump formed by the glyphs in glyph_span. - // We will save these glyphs to the glyph store at the index of the first byte. - let byte_idx = ByteIndex(byte_range.start as isize); - - if glyph_span.len() == 1 { - // Fast path: 1-to-1 mapping of byte offset to single glyph. - // - // TODO(Issue #214): cluster ranges need to be computed before - // shaping, and then consulted here. - // for now, just pretend that every character is a cluster start. - // (i.e., pretend there are no combining character sequences). - // 1-to-1 mapping of character to glyph also treated as ligature start. - // - // NB: When we acquire the ability to handle ligatures that cross word boundaries, - // we'll need to do something special to handle `word-spacing` properly. - let character = text[byte_range.clone()].chars().next().unwrap(); - if is_bidi_control(character) { - // Don't add any glyphs for bidi control chars - } else if character == '\t' { - // Treat tabs in pre-formatted text as a fixed number of spaces. - // - // TODO: Proper tab stops. - const TAB_COLS: i32 = 8; - let (space_glyph_id, space_advance) = glyph_space_advance(self.font); - let advance = Au::from_f64_px(space_advance) * TAB_COLS; - let data = GlyphData::new(space_glyph_id, - advance, - Default::default(), - true, - true); - glyphs.add_glyph_for_byte_index(byte_idx, character, &data); - } else { - let shape = glyph_data.entry_for_glyph(glyph_span.start, &mut y_pos); - let advance = self.advance_for_shaped_glyph(shape.advance, character, options); - let data = GlyphData::new(shape.codepoint, - advance, - shape.offset, - true, - true); - glyphs.add_glyph_for_byte_index(byte_idx, character, &data); - } - } else { - // collect all glyphs to be assigned to the first character. - let mut datas = vec!(); - - for glyph_i in glyph_span.clone() { - let shape = glyph_data.entry_for_glyph(glyph_i, &mut y_pos); - datas.push(GlyphData::new(shape.codepoint, - shape.advance, - shape.offset, - true, // treat as cluster start - glyph_i > glyph_span.start)); - // all but first are ligature continuations - } - // now add the detailed glyph entry. - glyphs.add_glyphs_for_byte_index(byte_idx, &datas); - } - - glyph_span.start = glyph_span.end; - byte_range.start = byte_range.end; - } - - // this must be called after adding all glyph data; it sorts the - // lookup table for finding detailed glyphs by associated char index. - glyphs.finalize_changes(); - } - - fn advance_for_shaped_glyph(&self, mut advance: Au, character: char, options: &ShapingOptions) - -> Au { - if let Some(letter_spacing) = options.letter_spacing { - advance = advance + letter_spacing; - }; - - // CSS 2.1 § 16.4 states that "word spacing affects each space (U+0020) and non-breaking - // space (U+00A0) left in the text after the white space processing rules have been - // applied. The effect of the property on other word-separator characters is undefined." - // We elect to only space the two required code points. - if character == ' ' || character == '\u{a0}' { - // https://drafts.csswg.org/css-text-3/#word-spacing-property - let (length, percent) = options.word_spacing; - advance = (advance + length) + Au::new((advance.0 as f32 * percent.into_inner()) as i32); - } - - advance - } -} - -/// Callbacks from Harfbuzz when font map and glyph advance lookup needed. -struct FontFuncs(*mut hb_font_funcs_t); - -unsafe impl Sync for FontFuncs {} - -lazy_static! { - static ref HB_FONT_FUNCS: FontFuncs = unsafe { - let hb_funcs = hb_font_funcs_create(); - hb_font_funcs_set_glyph_func(hb_funcs, Some(glyph_func), ptr::null_mut(), None); - hb_font_funcs_set_glyph_h_advance_func( - hb_funcs, Some(glyph_h_advance_func), ptr::null_mut(), None); - hb_font_funcs_set_glyph_h_kerning_func( - hb_funcs, Some(glyph_h_kerning_func), ptr::null_mut(), None); - - FontFuncs(hb_funcs) - }; -} - -extern fn glyph_func(_: *mut hb_font_t, - font_data: *mut c_void, - unicode: hb_codepoint_t, - _: hb_codepoint_t, - glyph: *mut hb_codepoint_t, - _: *mut c_void) - -> hb_bool_t { - let font: *const Font = font_data as *const Font; - assert!(!font.is_null()); - - unsafe { - match (*font).glyph_index(char::from_u32(unicode).unwrap()) { - Some(g) => { - *glyph = g as hb_codepoint_t; - true as hb_bool_t - } - None => false as hb_bool_t - } - } -} - -extern fn glyph_h_advance_func(_: *mut hb_font_t, - font_data: *mut c_void, - glyph: hb_codepoint_t, - _: *mut c_void) - -> hb_position_t { - let font: *mut Font = font_data as *mut Font; - assert!(!font.is_null()); - - unsafe { - let advance = (*font).glyph_h_advance(glyph as GlyphId); - Shaper::float_to_fixed(advance) - } -} - -fn glyph_space_advance(font: *const Font) -> (hb_codepoint_t, f64) { - let space_unicode = ' '; - let space_glyph: hb_codepoint_t; - match unsafe { (*font).glyph_index(space_unicode) } { - Some(g) => { - space_glyph = g as hb_codepoint_t; - } - None => panic!("No space info") - } - let space_advance = unsafe { (*font).glyph_h_advance(space_glyph as GlyphId) }; - (space_glyph, space_advance) -} - -extern fn glyph_h_kerning_func(_: *mut hb_font_t, - font_data: *mut c_void, - first_glyph: hb_codepoint_t, - second_glyph: hb_codepoint_t, - _: *mut c_void) - -> hb_position_t { - let font: *mut Font = font_data as *mut Font; - assert!(!font.is_null()); - - unsafe { - let advance = (*font).glyph_h_kerning(first_glyph as GlyphId, second_glyph as GlyphId); - Shaper::float_to_fixed(advance) - } -} - -// Callback to get a font table out of a font. -extern fn font_table_func(_: *mut hb_face_t, - tag: hb_tag_t, - user_data: *mut c_void) - -> *mut hb_blob_t { - unsafe { - // NB: These asserts have security implications. - let font = user_data as *const Font; - assert!(!font.is_null()); - - // TODO(Issue #197): reuse font table data, which will change the unsound trickery here. - match (*font).table_for_tag(tag as FontTableTag) { - None => ptr::null_mut(), - Some(font_table) => { - // `Box::into_raw` intentionally leaks the FontTable so we don't destroy the buffer - // while HarfBuzz is using it. When HarfBuzz is done with the buffer, it will pass - // this raw pointer back to `destroy_blob_func` which will deallocate the Box. - let font_table_ptr = Box::into_raw(box font_table); - - let buf = (*font_table_ptr).buffer(); - // HarfBuzz calls `destroy_blob_func` when the buffer is no longer needed. - let blob = hb_blob_create(buf.as_ptr() as *const c_char, - buf.len() as c_uint, - HB_MEMORY_MODE_READONLY, - font_table_ptr as *mut c_void, - Some(destroy_blob_func)); - - assert!(!blob.is_null()); - blob - } - } - } -} - -extern fn destroy_blob_func(font_table_ptr: *mut c_void) { - unsafe { - drop(Box::from_raw(font_table_ptr as *mut FontTable)); - } -} diff --git a/collector/compile-benchmarks/style-servo/components/gfx/text/shaping/mod.rs b/collector/compile-benchmarks/style-servo/components/gfx/text/shaping/mod.rs deleted file mode 100644 index 79e5452db..000000000 --- a/collector/compile-benchmarks/style-servo/components/gfx/text/shaping/mod.rs +++ /dev/null @@ -1,20 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ - -//! Shaper encapsulates a specific shaper, such as Harfbuzz, -//! Uniscribe, Pango, or Coretext. -//! -//! Currently, only harfbuzz bindings are implemented. - -use font::ShapingOptions; -use text::glyph::GlyphStore; - -pub use text::shaping::harfbuzz::Shaper; - -pub mod harfbuzz; - -pub trait ShaperMethods { - fn shape_text(&self, text: &str, options: &ShapingOptions, glyphs: &mut GlyphStore); -} - diff --git a/collector/compile-benchmarks/style-servo/components/gfx/text/text_run.rs b/collector/compile-benchmarks/style-servo/components/gfx/text/text_run.rs deleted file mode 100644 index d34e4788c..000000000 --- a/collector/compile-benchmarks/style-servo/components/gfx/text/text_run.rs +++ /dev/null @@ -1,382 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ - -use app_units::Au; -use font::{Font, FontHandleMethods, FontMetrics, IS_WHITESPACE_SHAPING_FLAG, KEEP_ALL_FLAG}; -use font::{RunMetrics, ShapingOptions}; -use platform::font_template::FontTemplateData; -use range::Range; -use std::cell::Cell; -use std::cmp::{Ordering, max}; -use std::slice::Iter; -use std::sync::Arc; -use style::str::char_is_whitespace; -use text::glyph::{ByteIndex, GlyphStore}; -use unicode_bidi as bidi; -use webrender_api; -use xi_unicode::LineBreakIterator; - -thread_local! { - static INDEX_OF_FIRST_GLYPH_RUN_CACHE: Cell> = - Cell::new(None) -} - -/// A single "paragraph" of text in one font size and style. -#[derive(Clone, Deserialize, Serialize)] -pub struct TextRun { - /// The UTF-8 string represented by this text run. - pub text: Arc, - pub font_template: Arc, - pub actual_pt_size: Au, - pub font_metrics: FontMetrics, - pub font_key: webrender_api::FontInstanceKey, - /// The glyph runs that make up this text run. - pub glyphs: Arc>, - pub bidi_level: bidi::Level, - pub extra_word_spacing: Au, -} - -impl Drop for TextRun { - fn drop(&mut self) { - // Invalidate the glyph run cache if it was our text run that got freed. - INDEX_OF_FIRST_GLYPH_RUN_CACHE.with(|index_of_first_glyph_run_cache| { - if let Some((text_run_ptr, _, _)) = index_of_first_glyph_run_cache.get() { - if text_run_ptr == (self as *const TextRun) { - index_of_first_glyph_run_cache.set(None); - } - } - }) - } -} - -/// A single series of glyphs within a text run. -#[derive(Clone, Deserialize, Serialize)] -pub struct GlyphRun { - /// The glyphs. - pub glyph_store: Arc, - /// The byte range of characters in the containing run. - pub range: Range, -} - -pub struct NaturalWordSliceIterator<'a> { - glyphs: &'a [GlyphRun], - index: usize, - range: Range, - reverse: bool, -} - -impl GlyphRun { - fn compare(&self, key: &ByteIndex) -> Ordering { - if *key < self.range.begin() { - Ordering::Greater - } else if *key >= self.range.end() { - Ordering::Less - } else { - Ordering::Equal - } - } -} - -/// A "slice" of a text run is a series of contiguous glyphs that all belong to the same glyph -/// store. Line breaking strategies yield these. -pub struct TextRunSlice<'a> { - /// The glyph store that the glyphs in this slice belong to. - pub glyphs: &'a GlyphStore, - /// The byte index that this slice begins at, relative to the start of the *text run*. - pub offset: ByteIndex, - /// The range that these glyphs encompass, relative to the start of the *glyph store*. - pub range: Range, -} - -impl<'a> TextRunSlice<'a> { - /// Returns the range that these glyphs encompass, relative to the start of the *text run*. - #[inline] - pub fn text_run_range(&self) -> Range { - let mut range = self.range; - range.shift_by(self.offset); - range - } -} - -impl<'a> Iterator for NaturalWordSliceIterator<'a> { - type Item = TextRunSlice<'a>; - - // inline(always) due to the inefficient rt failures messing up inline heuristics, I think. - #[inline(always)] - fn next(&mut self) -> Option> { - let slice_glyphs; - if self.reverse { - if self.index == 0 { - return None; - } - self.index -= 1; - slice_glyphs = &self.glyphs[self.index]; - } else { - if self.index >= self.glyphs.len() { - return None; - } - slice_glyphs = &self.glyphs[self.index]; - self.index += 1; - } - - let mut byte_range = self.range.intersect(&slice_glyphs.range); - let slice_range_begin = slice_glyphs.range.begin(); - byte_range.shift_by(-slice_range_begin); - - if !byte_range.is_empty() { - Some(TextRunSlice { - glyphs: &*slice_glyphs.glyph_store, - offset: slice_range_begin, - range: byte_range, - }) - } else { - None - } - } -} - -pub struct CharacterSliceIterator<'a> { - text: &'a str, - glyph_run: Option<&'a GlyphRun>, - glyph_run_iter: Iter<'a, GlyphRun>, - range: Range, -} - -impl<'a> Iterator for CharacterSliceIterator<'a> { - type Item = TextRunSlice<'a>; - - // inline(always) due to the inefficient rt failures messing up inline heuristics, I think. - #[inline(always)] - fn next(&mut self) -> Option> { - let glyph_run = match self.glyph_run { - None => return None, - Some(glyph_run) => glyph_run, - }; - - debug_assert!(!self.range.is_empty()); - let byte_start = self.range.begin(); - let byte_len = match self.text[byte_start.to_usize()..].chars().next() { - Some(ch) => ByteIndex(ch.len_utf8() as isize), - None => unreachable!() // XXX refactor? - }; - - self.range.adjust_by(byte_len, -byte_len); - if self.range.is_empty() { - // We're done. - self.glyph_run = None - } else if self.range.intersect(&glyph_run.range).is_empty() { - // Move on to the next glyph run. - self.glyph_run = self.glyph_run_iter.next(); - } - - let index_within_glyph_run = byte_start - glyph_run.range.begin(); - Some(TextRunSlice { - glyphs: &*glyph_run.glyph_store, - offset: glyph_run.range.begin(), - range: Range::new(index_within_glyph_run, byte_len), - }) - } -} - -impl<'a> TextRun { - pub fn new(font: &mut Font, text: String, options: &ShapingOptions, bidi_level: bidi::Level) -> TextRun { - let glyphs = TextRun::break_and_shape(font, &text, options); - TextRun { - text: Arc::new(text), - font_metrics: font.metrics.clone(), - font_template: font.handle.template(), - font_key: font.font_key, - actual_pt_size: font.actual_pt_size, - glyphs: Arc::new(glyphs), - bidi_level: bidi_level, - extra_word_spacing: Au(0), - } - } - - pub fn break_and_shape(font: &mut Font, text: &str, options: &ShapingOptions) - -> Vec { - let mut glyphs = vec!(); - let mut slice = 0..0; - - for (idx, _is_hard_break) in LineBreakIterator::new(text) { - // Extend the slice to the next UAX#14 line break opportunity. - slice.end = idx; - let word = &text[slice.clone()]; - - // Split off any trailing whitespace into a separate glyph run. - let mut whitespace = slice.end..slice.end; - if let Some((i, _)) = word.char_indices().rev() - .take_while(|&(_, c)| char_is_whitespace(c)).last() { - whitespace.start = slice.start + i; - slice.end = whitespace.start; - } else if idx != text.len() && options.flags.contains(KEEP_ALL_FLAG) { - // If there's no whitespace and word-break is set to - // keep-all, try increasing the slice. - continue; - } - if slice.len() > 0 { - glyphs.push(GlyphRun { - glyph_store: font.shape_text(&text[slice.clone()], options), - range: Range::new(ByteIndex(slice.start as isize), - ByteIndex(slice.len() as isize)), - }); - } - if whitespace.len() > 0 { - let mut options = options.clone(); - options.flags.insert(IS_WHITESPACE_SHAPING_FLAG); - glyphs.push(GlyphRun { - glyph_store: font.shape_text(&text[whitespace.clone()], &options), - range: Range::new(ByteIndex(whitespace.start as isize), - ByteIndex(whitespace.len() as isize)), - }); - } - slice.start = whitespace.end; - } - glyphs - } - - pub fn ascent(&self) -> Au { - self.font_metrics.ascent - } - - pub fn descent(&self) -> Au { - self.font_metrics.descent - } - - pub fn advance_for_range(&self, range: &Range) -> Au { - if range.is_empty() { - return Au(0) - } - - // TODO(Issue #199): alter advance direction for RTL - // TODO(Issue #98): using inter-char and inter-word spacing settings when measuring text - self.natural_word_slices_in_range(range) - .fold(Au(0), |advance, slice| { - advance + slice.glyphs.advance_for_byte_range(&slice.range, self.extra_word_spacing) - }) - } - - pub fn metrics_for_range(&self, range: &Range) -> RunMetrics { - RunMetrics::new(self.advance_for_range(range), - self.font_metrics.ascent, - self.font_metrics.descent) - } - - pub fn metrics_for_slice(&self, glyphs: &GlyphStore, slice_range: &Range) - -> RunMetrics { - RunMetrics::new(glyphs.advance_for_byte_range(slice_range, self.extra_word_spacing), - self.font_metrics.ascent, - self.font_metrics.descent) - } - - pub fn min_width_for_range(&self, range: &Range) -> Au { - debug!("iterating outer range {:?}", range); - self.natural_word_slices_in_range(range).fold(Au(0), |max_piece_width, slice| { - debug!("iterated on {:?}[{:?}]", slice.offset, slice.range); - max(max_piece_width, self.advance_for_range(&slice.range)) - }) - } - - pub fn minimum_splittable_inline_size(&self, range: &Range) -> Au { - match self.natural_word_slices_in_range(range).next() { - None => Au(0), - Some(slice) => self.advance_for_range(&slice.range), - } - } - - /// Returns the index of the first glyph run containing the given character index. - fn index_of_first_glyph_run_containing(&self, index: ByteIndex) -> Option { - let self_ptr = self as *const TextRun; - INDEX_OF_FIRST_GLYPH_RUN_CACHE.with(|index_of_first_glyph_run_cache| { - if let Some((last_text_run, last_index, last_result)) = - index_of_first_glyph_run_cache.get() { - if last_text_run == self_ptr && last_index == index { - return Some(last_result) - } - } - - if let Ok(result) = (&**self.glyphs).binary_search_by(|current| current.compare(&index)) { - index_of_first_glyph_run_cache.set(Some((self_ptr, index, result))); - Some(result) - } else { - None - } - }) - } - - /// Returns the index in the range of the first glyph advancing over given advance - pub fn range_index_of_advance(&self, range: &Range, advance: Au) -> usize { - // TODO(Issue #199): alter advance direction for RTL - // TODO(Issue #98): using inter-char and inter-word spacing settings when measuring text - let mut remaining = advance; - self.natural_word_slices_in_range(range) - .map(|slice| { - let (slice_index, slice_advance) = - slice.glyphs.range_index_of_advance(&slice.range, remaining, self.extra_word_spacing); - remaining -= slice_advance; - slice_index - }) - .sum() - } - - /// Returns an iterator that will iterate over all slices of glyphs that represent natural - /// words in the given range. - pub fn natural_word_slices_in_range(&'a self, range: &Range) - -> NaturalWordSliceIterator<'a> { - let index = match self.index_of_first_glyph_run_containing(range.begin()) { - None => self.glyphs.len(), - Some(index) => index, - }; - NaturalWordSliceIterator { - glyphs: &self.glyphs[..], - index: index, - range: *range, - reverse: false, - } - } - - /// Returns an iterator that over natural word slices in visual order (left to right or - /// right to left, depending on the bidirectional embedding level). - pub fn natural_word_slices_in_visual_order(&'a self, range: &Range) - -> NaturalWordSliceIterator<'a> { - // Iterate in reverse order if bidi level is RTL. - let reverse = self.bidi_level.is_rtl(); - - let index = if reverse { - match self.index_of_first_glyph_run_containing(range.end() - ByteIndex(1)) { - Some(i) => i + 1, // In reverse mode, index points one past the next element. - None => 0 - } - } else { - match self.index_of_first_glyph_run_containing(range.begin()) { - Some(i) => i, - None => self.glyphs.len() - } - }; - NaturalWordSliceIterator { - glyphs: &self.glyphs[..], - index: index, - range: *range, - reverse: reverse, - } - } - - /// Returns an iterator that will iterate over all slices of glyphs that represent individual - /// characters in the given range. - pub fn character_slices_in_range(&'a self, range: &Range) - -> CharacterSliceIterator<'a> { - let index = match self.index_of_first_glyph_run_containing(range.begin()) { - None => self.glyphs.len(), - Some(index) => index, - }; - let mut glyph_run_iter = self.glyphs[index..].iter(); - let first_glyph_run = glyph_run_iter.next(); - CharacterSliceIterator { - text: &self.text, - glyph_run: first_glyph_run, - glyph_run_iter: glyph_run_iter, - range: *range, - } - } -} diff --git a/collector/compile-benchmarks/style-servo/components/gfx/text/util.rs b/collector/compile-benchmarks/style-servo/components/gfx/text/util.rs deleted file mode 100644 index 5be03cb2b..000000000 --- a/collector/compile-benchmarks/style-servo/components/gfx/text/util.rs +++ /dev/null @@ -1,116 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ - -#[derive(Clone, Copy, Debug, Eq, PartialEq)] -pub enum CompressionMode { - CompressNone, - CompressWhitespace, - CompressWhitespaceNewline, - DiscardNewline -} - -// ported from Gecko's nsTextFrameUtils::TransformText. -// -// High level TODOs: -// -// * Issue #113: consider incoming text state (arabic, etc) -// and propagate outgoing text state (dual of above) -// -// * Issue #114: record skipped and kept chars for mapping original to new text -// -// * Untracked: various edge cases for bidi, CJK, etc. -pub fn transform_text(text: &str, - mode: CompressionMode, - incoming_whitespace: bool, - output_text: &mut String) - -> bool { - let out_whitespace = match mode { - CompressionMode::CompressNone | CompressionMode::DiscardNewline => { - for ch in text.chars() { - if is_discardable_char(ch, mode) { - // TODO: record skipped char - } else { - // TODO: record kept char - if ch == '\t' { - // TODO: set "has tab" flag - } - output_text.push(ch); - } - } - false - }, - - CompressionMode::CompressWhitespace | CompressionMode::CompressWhitespaceNewline => { - let mut in_whitespace: bool = incoming_whitespace; - for ch in text.chars() { - // TODO: discard newlines between CJK chars - let mut next_in_whitespace: bool = is_in_whitespace(ch, mode); - - if !next_in_whitespace { - if is_always_discardable_char(ch) { - // revert whitespace setting, since this char was discarded - next_in_whitespace = in_whitespace; - // TODO: record skipped char - } else { - // TODO: record kept char - output_text.push(ch); - } - } else { /* next_in_whitespace; possibly add a space char */ - if in_whitespace { - // TODO: record skipped char - } else { - // TODO: record kept char - output_text.push(' '); - } - } - // save whitespace context for next char - in_whitespace = next_in_whitespace; - } /* /for str::each_char */ - in_whitespace - } - }; - - return out_whitespace; - - fn is_in_whitespace(ch: char, mode: CompressionMode) -> bool { - match (ch, mode) { - (' ', _) => true, - ('\t', _) => true, - ('\n', CompressionMode::CompressWhitespaceNewline) => true, - (_, _) => false - } - } - - fn is_discardable_char(ch: char, mode: CompressionMode) -> bool { - if is_always_discardable_char(ch) { - return true; - } - match mode { - CompressionMode::DiscardNewline | CompressionMode::CompressWhitespaceNewline => ch == '\n', - _ => false - } - } - - fn is_always_discardable_char(ch: char) -> bool { - // TODO: check for soft hyphens. - is_bidi_control(ch) - } -} - -pub fn float_to_fixed(before: usize, f: f64) -> i32 { - ((1i32 << before) as f64 * f) as i32 -} - -pub fn fixed_to_float(before: usize, f: i32) -> f64 { - f as f64 * 1.0f64 / ((1i32 << before) as f64) -} - -pub fn is_bidi_control(c: char) -> bool { - match c { - '\u{202A}'...'\u{202E}' => true, - '\u{2066}'...'\u{2069}' => true, - '\u{200E}' | '\u{200F}' | '\u{061C}' => true, - _ => false - } -} diff --git a/collector/compile-benchmarks/style-servo/components/gfx_traits/Cargo.toml b/collector/compile-benchmarks/style-servo/components/gfx_traits/Cargo.toml deleted file mode 100644 index f9de4fc30..000000000 --- a/collector/compile-benchmarks/style-servo/components/gfx_traits/Cargo.toml +++ /dev/null @@ -1,16 +0,0 @@ -[package] -name = "gfx_traits" -version = "0.0.1" -authors = ["The Servo Project Developers"] -license = "MPL-2.0" -publish = false - -[lib] -name = "gfx_traits" -path = "lib.rs" - -[dependencies] -heapsize = "0.4" -heapsize_derive = "0.1" -range = {path = "../range"} -serde = "1.0" diff --git a/collector/compile-benchmarks/style-servo/components/gfx_traits/lib.rs b/collector/compile-benchmarks/style-servo/components/gfx_traits/lib.rs deleted file mode 100644 index e6f6c124e..000000000 --- a/collector/compile-benchmarks/style-servo/components/gfx_traits/lib.rs +++ /dev/null @@ -1,108 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ - -#![crate_name = "gfx_traits"] -#![crate_type = "rlib"] - -#![deny(unsafe_code)] - -extern crate heapsize; -#[macro_use] extern crate heapsize_derive; -#[macro_use] extern crate range; -#[macro_use] extern crate serde; - -pub mod print_tree; - -use range::RangeIndex; -use std::sync::atomic::{ATOMIC_USIZE_INIT, AtomicUsize, Ordering}; - -/// A newtype struct for denoting the age of messages; prevents race conditions. -#[derive(Clone, Copy, Debug, Deserialize, Eq, Hash, Ord, PartialEq, PartialOrd, Serialize)] -pub struct Epoch(pub u32); - -impl Epoch { - pub fn next(&mut self) { - self.0 += 1; - } -} - -/// A unique ID for every stacking context. -#[derive(Clone, Copy, Debug, Deserialize, Eq, Hash, HeapSizeOf, PartialEq, Serialize)] -pub struct StackingContextId( - /// The identifier for this StackingContext, derived from the Flow's memory address - /// and fragment type. As a space optimization, these are combined into a single word. - pub u64 -); - -impl StackingContextId { - /// Returns the stacking context ID for the outer document/layout root. - #[inline] - pub fn root() -> StackingContextId { - StackingContextId(0) - } - - /// Returns a new sacking context id with the given numeric id. - #[inline] - pub fn new(id: u64) -> StackingContextId { - StackingContextId(id) - } -} - -int_range_index! { - #[derive(Deserialize, Serialize)] - #[doc = "An index that refers to a byte offset in a text run. This could \ - point to the middle of a glyph."] - #[derive(HeapSizeOf)] - struct ByteIndex(isize) -} - -/// The type of fragment that a stacking context represents. -/// -/// This can only ever grow to maximum 4 entries. That's because we cram the value of this enum -/// into the lower 2 bits of the `StackingContextId`, which otherwise contains a 32-bit-aligned -/// heap address. -#[derive(Clone, Copy, Debug, Deserialize, Eq, Hash, HeapSizeOf, PartialEq, Serialize)] -pub enum FragmentType { - /// A StackingContext for the fragment body itself. - FragmentBody, - /// A StackingContext created to contain ::before pseudo-element content. - BeforePseudoContent, - /// A StackingContext created to contain ::after pseudo-element content. - AfterPseudoContent, -} - -/// The next ID that will be used for a special stacking context. -/// -/// A special stacking context is a stacking context that is one of (a) the outer stacking context -/// of an element with `overflow: scroll`; (b) generated content; (c) both (a) and (b). -static NEXT_SPECIAL_STACKING_CONTEXT_ID: AtomicUsize = ATOMIC_USIZE_INIT; - -/// If none of the bits outside this mask are set, the stacking context is a special stacking -/// context. -/// -/// Note that we assume that the top 16 bits of the address space are unused on the platform. -const SPECIAL_STACKING_CONTEXT_ID_MASK: usize = 0xffff; - -/// Returns a new stacking context ID for a special stacking context. -fn next_special_id() -> usize { - // We shift this left by 2 to make room for the fragment type ID. - ((NEXT_SPECIAL_STACKING_CONTEXT_ID.fetch_add(1, Ordering::SeqCst) + 1) << 2) & - SPECIAL_STACKING_CONTEXT_ID_MASK -} - -pub fn combine_id_with_fragment_type(id: usize, fragment_type: FragmentType) -> usize { - debug_assert_eq!(id & (fragment_type as usize), 0); - if fragment_type == FragmentType::FragmentBody { - id - } else { - next_special_id() | (fragment_type as usize) - } -} - -pub fn node_id_from_clip_id(id: usize) -> Option { - if (id & !SPECIAL_STACKING_CONTEXT_ID_MASK) != 0 { - return Some((id & !3) as usize); - } - None -} diff --git a/collector/compile-benchmarks/style-servo/components/gfx_traits/print_tree.rs b/collector/compile-benchmarks/style-servo/components/gfx_traits/print_tree.rs deleted file mode 100644 index a842193e0..000000000 --- a/collector/compile-benchmarks/style-servo/components/gfx_traits/print_tree.rs +++ /dev/null @@ -1,65 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ - -/// A struct that makes it easier to print out a pretty tree of data, which -/// can be visually scanned more easily. -pub struct PrintTree { - /// The current level of recursion. - level: u32, - - /// An item which is queued up, so that we can determine if we need - /// a mid-tree prefix or a branch ending prefix. - queued_item: Option, -} - -impl PrintTree { - pub fn new(title: String) -> PrintTree { - println!("\u{250c} {}", title); - PrintTree { - level: 1, - queued_item: None, - } - } - - /// Descend one level in the tree with the given title. - pub fn new_level(&mut self, title: String) { - self.flush_queued_item("\u{251C}\u{2500}"); - - self.print_level_prefix(); - println!("\u{251C}\u{2500} {}", title); - - self.level = self.level + 1; - } - - /// Ascend one level in the tree. - pub fn end_level(&mut self) { - self.flush_queued_item("\u{2514}\u{2500}"); - self.level = self.level - 1; - } - - /// Add an item to the current level in the tree. - pub fn add_item(&mut self, text: String) { - self.flush_queued_item("\u{251C}\u{2500}"); - self.queued_item = Some(text); - } - - fn print_level_prefix(&self) { - for _ in 0..self.level { - print!("\u{2502} "); - } - } - - fn flush_queued_item(&mut self, prefix: &str) { - if let Some(queued_item) = self.queued_item.take() { - self.print_level_prefix(); - println!("{} {}", prefix, queued_item); - } - } -} - -impl Drop for PrintTree { - fn drop(&mut self) { - self.flush_queued_item("\u{9492}\u{9472}"); - } -} diff --git a/collector/compile-benchmarks/style-servo/components/hashglobe/Cargo.toml b/collector/compile-benchmarks/style-servo/components/hashglobe/Cargo.toml deleted file mode 100644 index cffb48919..000000000 --- a/collector/compile-benchmarks/style-servo/components/hashglobe/Cargo.toml +++ /dev/null @@ -1,17 +0,0 @@ -[package] -name = "hashglobe" -version = "0.1.0" -authors = ["The Rust Project Developers", "Manish Goregaokar "] -license = "MIT/Apache-2.0" -description = "Fork of std::HashMap with stable fallible allocation." -documentation = "https://docs.rs/hashglobe" -repository = "https://github.com/Manishearth/hashglobe" - -readme = "README.md" - -[dependencies] -libc = "0.2" -heapsize = "0.4" - -[dev-dependencies] -rand = "0.3" diff --git a/collector/compile-benchmarks/style-servo/components/hashglobe/LICENSE-APACHE b/collector/compile-benchmarks/style-servo/components/hashglobe/LICENSE-APACHE deleted file mode 100644 index 16fe87b06..000000000 --- a/collector/compile-benchmarks/style-servo/components/hashglobe/LICENSE-APACHE +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - -TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - -1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - -2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - -3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - -4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - -5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - -6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - -7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - -8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - -9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - -END OF TERMS AND CONDITIONS - -APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - -Copyright [yyyy] [name of copyright owner] - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. diff --git a/collector/compile-benchmarks/style-servo/components/hashglobe/LICENSE-MIT b/collector/compile-benchmarks/style-servo/components/hashglobe/LICENSE-MIT deleted file mode 100644 index 31aa79387..000000000 --- a/collector/compile-benchmarks/style-servo/components/hashglobe/LICENSE-MIT +++ /dev/null @@ -1,23 +0,0 @@ -Permission is hereby granted, free of charge, to any -person obtaining a copy of this software and associated -documentation files (the "Software"), to deal in the -Software without restriction, including without -limitation the rights to use, copy, modify, merge, -publish, distribute, sublicense, and/or sell copies of -the Software, and to permit persons to whom the Software -is furnished to do so, subject to the following -conditions: - -The above copyright notice and this permission notice -shall be included in all copies or substantial portions -of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF -ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED -TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A -PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT -SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR -IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -DEALINGS IN THE SOFTWARE. diff --git a/collector/compile-benchmarks/style-servo/components/hashglobe/README.md b/collector/compile-benchmarks/style-servo/components/hashglobe/README.md deleted file mode 100644 index e2f1df4fa..000000000 --- a/collector/compile-benchmarks/style-servo/components/hashglobe/README.md +++ /dev/null @@ -1,17 +0,0 @@ -hashglobe -======== - - -This is a fork of Rust's `std::HashMap`. It works on stable out of the stdlib and has fallible APIs. - -We intend to diverge as little as possible from the original hashmap. - - -Dual licensed Apache/MIT, the same as the stdlib. - - -## Should I use this? - -No. - -Wait for https://github.com/rust-lang/rfcs/pull/2116 instead. diff --git a/collector/compile-benchmarks/style-servo/components/hashglobe/src/alloc.rs b/collector/compile-benchmarks/style-servo/components/hashglobe/src/alloc.rs deleted file mode 100644 index f7d505014..000000000 --- a/collector/compile-benchmarks/style-servo/components/hashglobe/src/alloc.rs +++ /dev/null @@ -1,166 +0,0 @@ -// FORK NOTE: Copied from liballoc_system, removed unnecessary APIs, -// APIs take size/align directly instead of Layout - - - - -// The minimum alignment guaranteed by the architecture. This value is used to -// add fast paths for low alignment values. In practice, the alignment is a -// constant at the call site and the branch will be optimized out. -#[cfg(all(any(target_arch = "x86", - target_arch = "arm", - target_arch = "mips", - target_arch = "powerpc", - target_arch = "powerpc64", - target_arch = "asmjs", - target_arch = "wasm32")))] -const MIN_ALIGN: usize = 8; -#[cfg(all(any(target_arch = "x86_64", - target_arch = "aarch64", - target_arch = "mips64", - target_arch = "s390x", - target_arch = "sparc64")))] -const MIN_ALIGN: usize = 16; - -pub use self::platform::{alloc, dealloc, realloc}; - -#[cfg(any(unix, target_os = "redox"))] -mod platform { - extern crate libc; - - use std::ptr; - - use super::MIN_ALIGN; - - #[inline] - pub unsafe fn alloc(size: usize, align: usize) -> *mut u8 { - let ptr = if align <= MIN_ALIGN { - libc::malloc(size) as *mut u8 - } else { - aligned_malloc(size, align) - }; - ptr - } - - #[inline] - pub unsafe fn dealloc(ptr: *mut u8, _align: usize) { - libc::free(ptr as *mut libc::c_void) - } - - #[inline] - pub unsafe fn realloc(ptr: *mut u8, new_size: usize) -> *mut u8 { - libc::realloc(ptr as *mut libc::c_void, new_size) as *mut u8 - } - - #[cfg(any(target_os = "android", target_os = "redox"))] - #[inline] - unsafe fn aligned_malloc(size: usize, align: usize) -> *mut u8 { - // On android we currently target API level 9 which unfortunately - // doesn't have the `posix_memalign` API used below. Instead we use - // `memalign`, but this unfortunately has the property on some systems - // where the memory returned cannot be deallocated by `free`! - // - // Upon closer inspection, however, this appears to work just fine with - // Android, so for this platform we should be fine to call `memalign` - // (which is present in API level 9). Some helpful references could - // possibly be chromium using memalign [1], attempts at documenting that - // memalign + free is ok [2] [3], or the current source of chromium - // which still uses memalign on android [4]. - // - // [1]: https://codereview.chromium.org/10796020/ - // [2]: https://code.google.com/p/android/issues/detail?id=35391 - // [3]: https://bugs.chromium.org/p/chromium/issues/detail?id=138579 - // [4]: https://chromium.googlesource.com/chromium/src/base/+/master/ - // /memory/aligned_memory.cc - libc::memalign(align, size) as *mut u8 - } - - #[cfg(not(any(target_os = "android", target_os = "redox")))] - #[inline] - unsafe fn aligned_malloc(size: usize, align: usize) -> *mut u8 { - let mut out = ptr::null_mut(); - let ret = libc::posix_memalign(&mut out, align, size); - if ret != 0 { - ptr::null_mut() - } else { - out as *mut u8 - } - } -} - -#[cfg(windows)] -#[allow(bad_style)] -mod platform { - - use super::MIN_ALIGN; - type LPVOID = *mut u8; - type HANDLE = LPVOID; - type SIZE_T = usize; - type DWORD = u32; - type BOOL = i32; - - - extern "system" { - fn GetProcessHeap() -> HANDLE; - fn HeapAlloc(hHeap: HANDLE, dwFlags: DWORD, dwBytes: SIZE_T) -> LPVOID; - fn HeapReAlloc(hHeap: HANDLE, dwFlags: DWORD, lpMem: LPVOID, dwBytes: SIZE_T) -> LPVOID; - fn HeapFree(hHeap: HANDLE, dwFlags: DWORD, lpMem: LPVOID) -> BOOL; - fn GetLastError() -> DWORD; - } - - #[repr(C)] - struct Header(*mut u8); - - unsafe fn get_header<'a>(ptr: *mut u8) -> &'a mut Header { - &mut *(ptr as *mut Header).offset(-1) - } - - unsafe fn align_ptr(ptr: *mut u8, align: usize) -> *mut u8 { - let aligned = ptr.offset((align - (ptr as usize & (align - 1))) as isize); - *get_header(aligned) = Header(ptr); - aligned - } - - #[inline] - unsafe fn allocate_with_flags(size: usize, align: usize, flags: DWORD) -> *mut u8 - { - if align <= MIN_ALIGN { - HeapAlloc(GetProcessHeap(), flags, size) - } else { - let size = size + align; - let ptr = HeapAlloc(GetProcessHeap(), flags, size); - if ptr.is_null() { - ptr - } else { - align_ptr(ptr, align) - } - } - } - - #[inline] - pub unsafe fn alloc(size: usize, align: usize) -> *mut u8 { - allocate_with_flags(size, align, 0) - } - - #[inline] - pub unsafe fn dealloc(ptr: *mut u8, align: usize) { - if align <= MIN_ALIGN { - let err = HeapFree(GetProcessHeap(), 0, ptr as LPVOID); - debug_assert!(err != 0, "Failed to free heap memory: {}", - GetLastError()); - } else { - let header = get_header(ptr); - let err = HeapFree(GetProcessHeap(), 0, header.0 as LPVOID); - debug_assert!(err != 0, "Failed to free heap memory: {}", - GetLastError()); - } - } - - #[inline] - pub unsafe fn realloc(ptr: *mut u8, new_size: usize) -> *mut u8 { - HeapReAlloc(GetProcessHeap(), - 0, - ptr as LPVOID, - new_size) as *mut u8 - } -} diff --git a/collector/compile-benchmarks/style-servo/components/hashglobe/src/diagnostic.rs b/collector/compile-benchmarks/style-servo/components/hashglobe/src/diagnostic.rs deleted file mode 100644 index 05a044b02..000000000 --- a/collector/compile-benchmarks/style-servo/components/hashglobe/src/diagnostic.rs +++ /dev/null @@ -1,219 +0,0 @@ -use hash_map::HashMap; -use std::borrow::Borrow; -use std::hash::{BuildHasher, Hash}; - -use FailedAllocationError; - -#[cfg(target_pointer_width = "32")] -const CANARY: usize = 0x42cafe99; -#[cfg(target_pointer_width = "64")] -const CANARY: usize = 0x42cafe9942cafe99; - -#[derive(Clone, Debug)] -enum JournalEntry { - Insert(usize), - GOIW(usize), - Remove(usize), - DidClear(usize), -} - -#[derive(Clone, Debug)] -pub struct DiagnosticHashMap - where K: Eq + Hash, - S: BuildHasher -{ - map: HashMap, - journal: Vec, - readonly: bool, -} - -impl DiagnosticHashMap - where K: Eq + Hash, - S: BuildHasher -{ - #[inline(always)] - pub fn inner(&self) -> &HashMap { - &self.map - } - - #[inline(never)] - pub fn begin_mutation(&mut self) { - self.map.verify(); - assert!(self.readonly); - self.readonly = false; - self.verify(); - } - - #[inline(never)] - pub fn end_mutation(&mut self) { - self.map.verify(); - assert!(!self.readonly); - self.readonly = true; - self.verify(); - } - - fn verify(&self) { - let mut position = 0; - let mut bad_canary: Option<(usize, *const usize)> = None; - for (_,v) in self.map.iter() { - let canary_ref = &v.0; - if *canary_ref == CANARY { - position += 1; - continue; - } - bad_canary = Some((*canary_ref, canary_ref)); - } - if let Some(c) = bad_canary { - self.report_corruption(c.0, c.1, position); - } - } - - #[inline(always)] - pub fn with_hasher(hash_builder: S) -> Self { - Self { - map: HashMap::::with_hasher(hash_builder), - journal: Vec::new(), - readonly: true, - } - } - - #[inline(always)] - pub fn len(&self) -> usize { - self.map.len() - } - - #[inline(always)] - pub fn is_empty(&self) -> bool { - self.map.is_empty() - } - - #[inline(always)] - pub fn contains_key(&self, k: &Q) -> bool - where K: Borrow, - Q: Hash + Eq - { - self.map.contains_key(k) - } - - #[inline(always)] - pub fn get(&self, k: &Q) -> Option<&V> - where K: Borrow, - Q: Hash + Eq - { - self.map.get(k).map(|v| &v.1) - } - - #[inline(always)] - pub fn try_get_or_insert_with V>( - &mut self, - key: K, - default: F - ) -> Result<&mut V, FailedAllocationError> { - assert!(!self.readonly); - self.journal.push(JournalEntry::GOIW(self.map.make_hash(&key).inspect())); - let entry = self.map.try_entry(key)?; - Ok(&mut entry.or_insert_with(|| (CANARY, default())).1) - } - - #[inline(always)] - pub fn try_insert(&mut self, k: K, v: V) -> Result, FailedAllocationError> { - assert!(!self.readonly); - self.journal.push(JournalEntry::Insert(self.map.make_hash(&k).inspect())); - let old = self.map.try_insert(k, (CANARY, v))?; - Ok(old.map(|x| x.1)) - } - - #[inline(always)] - pub fn remove(&mut self, k: &Q) -> Option - where K: Borrow, - Q: Hash + Eq - { - assert!(!self.readonly); - self.journal.push(JournalEntry::Remove(self.map.make_hash(k).inspect())); - self.map.remove(k).map(|x| x.1) - } - - #[inline(always)] - pub fn clear(&mut self) where K: 'static, V: 'static { - // We handle scoped mutations for the caller here, since callsites that - // invoke clear() don't benefit from the coalescing we do around insertion. - self.begin_mutation(); - self.journal.clear(); - self.journal.push(JournalEntry::DidClear(self.map.raw_capacity())); - self.map.clear(); - self.end_mutation(); - } - - #[inline(never)] - fn report_corruption( - &self, - canary: usize, - canary_addr: *const usize, - position: usize - ) { - use ::std::ffi::CString; - let key = b"HashMapJournal\0"; - let value = CString::new(format!("{:?}", self.journal)).unwrap(); - unsafe { - Gecko_AnnotateCrashReport( - key.as_ptr() as *const ::std::os::raw::c_char, - value.as_ptr(), - ); - } - panic!( - "HashMap Corruption (sz={}, cap={}, pairsz={}, cnry={:#x}, pos={}, base_addr={:?}, cnry_addr={:?}, jrnl_len={})", - self.map.len(), - self.map.raw_capacity(), - ::std::mem::size_of::<(K, (usize, V))>(), - canary, - position, - self.map.raw_buffer(), - canary_addr, - self.journal.len(), - ); - } -} - -impl PartialEq for DiagnosticHashMap - where K: Eq + Hash, - V: PartialEq, - S: BuildHasher -{ - fn eq(&self, other: &Self) -> bool { - self.map.eq(&other.map) - } -} - -impl Eq for DiagnosticHashMap - where K: Eq + Hash, - V: Eq, - S: BuildHasher -{ -} - -impl Default for DiagnosticHashMap - where K: Eq + Hash, - S: BuildHasher + Default -{ - fn default() -> Self { - Self { - map: HashMap::default(), - journal: Vec::new(), - readonly: true, - } - } -} - -impl Drop for DiagnosticHashMap - where K: Eq + Hash, - S: BuildHasher -{ - fn drop(&mut self) { - debug_assert!(self.readonly, "Dropped while mutating"); - } -} - -extern "C" { - pub fn Gecko_AnnotateCrashReport(key_str: *const ::std::os::raw::c_char, - value_str: *const ::std::os::raw::c_char); -} diff --git a/collector/compile-benchmarks/style-servo/components/hashglobe/src/fake.rs b/collector/compile-benchmarks/style-servo/components/hashglobe/src/fake.rs deleted file mode 100644 index f83032d5a..000000000 --- a/collector/compile-benchmarks/style-servo/components/hashglobe/src/fake.rs +++ /dev/null @@ -1,290 +0,0 @@ -// Copyright 2014-2015 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -//! This module contains shims around the stdlib HashMap -//! that add fallible methods -//! -//! These methods are a lie. They are not actually fallible. This is just to make -//! it smooth to switch between hashmap impls in a codebase. - -use heapsize::HeapSizeOf; -use std::collections::HashMap as StdMap; -use std::collections::HashSet as StdSet; -use std::fmt; -use std::hash::{BuildHasher, Hash}; -use std::ops::{Deref, DerefMut}; - -pub use std::collections::hash_map::{Entry, RandomState, Iter as MapIter, IterMut as MapIterMut}; -pub use std::collections::hash_set::{Iter as SetIter, IntoIter as SetIntoIter}; - -#[derive(Clone)] -pub struct HashMap(StdMap); - - -use FailedAllocationError; - -impl Deref for HashMap { - type Target = StdMap; - fn deref(&self) -> &Self::Target { - &self.0 - } -} - -impl DerefMut for HashMap { - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.0 - } -} - -impl HashMap - where K: Eq + Hash, - S: BuildHasher -{ - #[inline] - pub fn try_with_hasher(hash_builder: S) -> Result, FailedAllocationError> { - Ok(HashMap(StdMap::with_hasher(hash_builder))) - } - - #[inline] - pub fn try_with_capacity_and_hasher(capacity: usize, - hash_builder: S) - -> Result, FailedAllocationError> { - Ok(HashMap(StdMap::with_capacity_and_hasher(capacity, hash_builder))) - } - - pub fn with_capacity_and_hasher(capacity: usize, hash_builder: S) -> HashMap { - HashMap(StdMap::with_capacity_and_hasher(capacity, hash_builder)) - } - - - #[inline] - pub fn try_reserve(&mut self, additional: usize) -> Result<(), FailedAllocationError> { - Ok(self.reserve(additional)) - } - - pub fn try_shrink_to_fit(&mut self) -> Result<(), FailedAllocationError> { - Ok(self.shrink_to_fit()) - } - - pub fn try_entry(&mut self, key: K) -> Result, FailedAllocationError> { - Ok(self.entry(key)) - } - - #[inline(always)] - pub fn try_get_or_insert_with V>( - &mut self, - key: K, - default: F - ) -> Result<&mut V, FailedAllocationError> { - Ok(self.entry(key).or_insert_with(default)) - } - - #[inline] - pub fn try_insert(&mut self, k: K, v: V) -> Result, FailedAllocationError> { - Ok(self.insert(k, v)) - } - - #[inline(always)] - pub fn begin_mutation(&mut self) {} - #[inline(always)] - pub fn end_mutation(&mut self) {} -} - -#[derive(Clone)] -pub struct HashSet(StdSet); - - -impl Deref for HashSet { - type Target = StdSet; - fn deref(&self) -> &Self::Target { - &self.0 - } -} - -impl DerefMut for HashSet { - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.0 - } -} - -impl HashSet { - #[inline] - pub fn new() -> HashSet { - HashSet(StdSet::new()) - } - - #[inline] - pub fn with_capacity(capacity: usize) -> HashSet { - HashSet(StdSet::with_capacity(capacity)) - } -} - - -impl HashSet - where T: Eq + Hash, - S: BuildHasher -{ - #[inline] - pub fn with_hasher(hasher: S) -> HashSet { - HashSet(StdSet::with_hasher(hasher)) - } - - - #[inline] - pub fn with_capacity_and_hasher(capacity: usize, hasher: S) -> HashSet { - HashSet(StdSet::with_capacity_and_hasher(capacity, hasher)) - } - - #[inline] - pub fn try_reserve(&mut self, additional: usize) -> Result<(), FailedAllocationError> { - Ok(self.reserve(additional)) - } - - #[inline] - pub fn try_shrink_to_fit(&mut self) -> Result<(), FailedAllocationError> { - Ok(self.shrink_to_fit()) - } - - #[inline] - pub fn try_insert(&mut self, value: T) -> Result { - Ok(self.insert(value)) - } -} - -// Pass through trait impls -// We can't derive these since the bounds are not obvious to the derive macro - - -impl - HeapSizeOf for HashMap { - fn heap_size_of_children(&self) -> usize { - self.0.heap_size_of_children() - } -} - -impl Default for HashMap { - fn default() -> Self { - HashMap(Default::default()) - } -} - -impl fmt::Debug for HashMap - where K: Eq + Hash + fmt::Debug, - V: fmt::Debug, - S: BuildHasher { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - self.0.fmt(f) - } -} - -impl PartialEq for HashMap - where K: Eq + Hash, - V: PartialEq, - S: BuildHasher -{ - fn eq(&self, other: &HashMap) -> bool { - self.0.eq(&other.0) - } -} - -impl Eq for HashMap - where K: Eq + Hash, - V: Eq, - S: BuildHasher -{ -} - -impl<'a, K, V, S> IntoIterator for &'a HashMap - where K: Eq + Hash, - S: BuildHasher -{ - type Item = (&'a K, &'a V); - type IntoIter = MapIter<'a, K, V>; - - fn into_iter(self) -> MapIter<'a, K, V> { - self.0.iter() - } -} - -impl<'a, K, V, S> IntoIterator for &'a mut HashMap - where K: Eq + Hash, - S: BuildHasher -{ - type Item = (&'a K, &'a mut V); - type IntoIter = MapIterMut<'a, K, V>; - - fn into_iter(self) -> MapIterMut<'a, K, V> { - self.0.iter_mut() - } -} - - -impl HeapSizeOf for HashSet { - fn heap_size_of_children(&self) -> usize { - self.0.heap_size_of_children() - } -} - -impl Default for HashSet { - fn default() -> Self { - HashSet(Default::default()) - } -} - -impl fmt::Debug for HashSet - where T: Eq + Hash + fmt::Debug, - S: BuildHasher -{ - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - self.0.fmt(f) - } -} - -impl PartialEq for HashSet - where T: Eq + Hash, - S: BuildHasher -{ - fn eq(&self, other: &HashSet) -> bool { - self.0.eq(&other.0) - } -} - -impl Eq for HashSet - where T: Eq + Hash, - S: BuildHasher -{ -} - -impl<'a, T, S> IntoIterator for &'a HashSet - where T: Eq + Hash, - S: BuildHasher -{ - type Item = &'a T; - type IntoIter = SetIter<'a, T>; - - fn into_iter(self) -> SetIter<'a, T> { - self.0.iter() - } -} - -impl IntoIterator for HashSet - where T: Eq + Hash, - S: BuildHasher -{ - type Item = T; - type IntoIter = SetIntoIter; - - - fn into_iter(self) -> SetIntoIter { - self.0.into_iter() - } -} - - diff --git a/collector/compile-benchmarks/style-servo/components/hashglobe/src/hash_map.rs b/collector/compile-benchmarks/style-servo/components/hashglobe/src/hash_map.rs deleted file mode 100644 index e17f6fd6a..000000000 --- a/collector/compile-benchmarks/style-servo/components/hashglobe/src/hash_map.rs +++ /dev/null @@ -1,3073 +0,0 @@ -// Copyright 2014-2015 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -use self::Entry::*; -use self::VacantEntryState::*; - -use std::borrow::Borrow; -use std::cmp::max; -use std::fmt::{self, Debug}; -#[allow(deprecated)] -use std::hash::{Hash, BuildHasher}; -use std::iter::FromIterator; -use std::mem::{self, replace}; -use std::ops::{Deref, Index}; - -use super::table::{self, Bucket, EmptyBucket, FullBucket, FullBucketMut, RawTable, SafeHash}; -use super::table::BucketState::{Empty, Full}; - -use FailedAllocationError; - -const MIN_NONZERO_RAW_CAPACITY: usize = 32; // must be a power of two - -/// The default behavior of HashMap implements a maximum load factor of 90.9%. -#[derive(Clone)] -struct DefaultResizePolicy; - -impl DefaultResizePolicy { - fn new() -> DefaultResizePolicy { - DefaultResizePolicy - } - - /// A hash map's "capacity" is the number of elements it can hold without - /// being resized. Its "raw capacity" is the number of slots required to - /// provide that capacity, accounting for maximum loading. The raw capacity - /// is always zero or a power of two. - #[inline] - fn raw_capacity(&self, len: usize) -> usize { - if len == 0 { - 0 - } else { - // 1. Account for loading: `raw_capacity >= len * 1.1`. - // 2. Ensure it is a power of two. - // 3. Ensure it is at least the minimum size. - let mut raw_cap = len * 11 / 10; - assert!(raw_cap >= len, "raw_cap overflow"); - raw_cap = raw_cap.checked_next_power_of_two().expect("raw_capacity overflow"); - raw_cap = max(MIN_NONZERO_RAW_CAPACITY, raw_cap); - raw_cap - } - } - - /// The capacity of the given raw capacity. - #[inline] - fn capacity(&self, raw_cap: usize) -> usize { - // This doesn't have to be checked for overflow since allocation size - // in bytes will overflow earlier than multiplication by 10. - // - // As per https://github.com/rust-lang/rust/pull/30991 this is updated - // to be: (raw_cap * den + den - 1) / num - (raw_cap * 10 + 10 - 1) / 11 - } -} - -// The main performance trick in this hashmap is called Robin Hood Hashing. -// It gains its excellent performance from one essential operation: -// -// If an insertion collides with an existing element, and that element's -// "probe distance" (how far away the element is from its ideal location) -// is higher than how far we've already probed, swap the elements. -// -// This massively lowers variance in probe distance, and allows us to get very -// high load factors with good performance. The 90% load factor I use is rather -// conservative. -// -// > Why a load factor of approximately 90%? -// -// In general, all the distances to initial buckets will converge on the mean. -// At a load factor of α, the odds of finding the target bucket after k -// probes is approximately 1-α^k. If we set this equal to 50% (since we converge -// on the mean) and set k=8 (64-byte cache line / 8-byte hash), α=0.92. I round -// this down to make the math easier on the CPU and avoid its FPU. -// Since on average we start the probing in the middle of a cache line, this -// strategy pulls in two cache lines of hashes on every lookup. I think that's -// pretty good, but if you want to trade off some space, it could go down to one -// cache line on average with an α of 0.84. -// -// > Wait, what? Where did you get 1-α^k from? -// -// On the first probe, your odds of a collision with an existing element is α. -// The odds of doing this twice in a row is approximately α^2. For three times, -// α^3, etc. Therefore, the odds of colliding k times is α^k. The odds of NOT -// colliding after k tries is 1-α^k. -// -// The paper from 1986 cited below mentions an implementation which keeps track -// of the distance-to-initial-bucket histogram. This approach is not suitable -// for modern architectures because it requires maintaining an internal data -// structure. This allows very good first guesses, but we are most concerned -// with guessing entire cache lines, not individual indexes. Furthermore, array -// accesses are no longer linear and in one direction, as we have now. There -// is also memory and cache pressure that this would entail that would be very -// difficult to properly see in a microbenchmark. -// -// ## Future Improvements (FIXME!) -// -// Allow the load factor to be changed dynamically and/or at initialization. -// -// Also, would it be possible for us to reuse storage when growing the -// underlying table? This is exactly the use case for 'realloc', and may -// be worth exploring. -// -// ## Future Optimizations (FIXME!) -// -// Another possible design choice that I made without any real reason is -// parameterizing the raw table over keys and values. Technically, all we need -// is the size and alignment of keys and values, and the code should be just as -// efficient (well, we might need one for power-of-two size and one for not...). -// This has the potential to reduce code bloat in rust executables, without -// really losing anything except 4 words (key size, key alignment, val size, -// val alignment) which can be passed in to every call of a `RawTable` function. -// This would definitely be an avenue worth exploring if people start complaining -// about the size of rust executables. -// -// Annotate exceedingly likely branches in `table::make_hash` -// and `search_hashed` to reduce instruction cache pressure -// and mispredictions once it becomes possible (blocked on issue #11092). -// -// Shrinking the table could simply reallocate in place after moving buckets -// to the first half. -// -// The growth algorithm (fragment of the Proof of Correctness) -// -------------------- -// -// The growth algorithm is basically a fast path of the naive reinsertion- -// during-resize algorithm. Other paths should never be taken. -// -// Consider growing a robin hood hashtable of capacity n. Normally, we do this -// by allocating a new table of capacity `2n`, and then individually reinsert -// each element in the old table into the new one. This guarantees that the -// new table is a valid robin hood hashtable with all the desired statistical -// properties. Remark that the order we reinsert the elements in should not -// matter. For simplicity and efficiency, we will consider only linear -// reinsertions, which consist of reinserting all elements in the old table -// into the new one by increasing order of index. However we will not be -// starting our reinsertions from index 0 in general. If we start from index -// i, for the purpose of reinsertion we will consider all elements with real -// index j < i to have virtual index n + j. -// -// Our hash generation scheme consists of generating a 64-bit hash and -// truncating the most significant bits. When moving to the new table, we -// simply introduce a new bit to the front of the hash. Therefore, if an -// elements has ideal index i in the old table, it can have one of two ideal -// locations in the new table. If the new bit is 0, then the new ideal index -// is i. If the new bit is 1, then the new ideal index is n + i. Intuitively, -// we are producing two independent tables of size n, and for each element we -// independently choose which table to insert it into with equal probability. -// However the rather than wrapping around themselves on overflowing their -// indexes, the first table overflows into the first, and the first into the -// second. Visually, our new table will look something like: -// -// [yy_xxx_xxxx_xxx|xx_yyy_yyyy_yyy] -// -// Where x's are elements inserted into the first table, y's are elements -// inserted into the second, and _'s are empty sections. We now define a few -// key concepts that we will use later. Note that this is a very abstract -// perspective of the table. A real resized table would be at least half -// empty. -// -// Theorem: A linear robin hood reinsertion from the first ideal element -// produces identical results to a linear naive reinsertion from the same -// element. -// -// FIXME(Gankro, pczarn): review the proof and put it all in a separate README.md -// -// Adaptive early resizing -// ---------------------- -// To protect against degenerate performance scenarios (including DOS attacks), -// the implementation includes an adaptive behavior that can resize the map -// early (before its capacity is exceeded) when suspiciously long probe sequences -// are encountered. -// -// With this algorithm in place it would be possible to turn a CPU attack into -// a memory attack due to the aggressive resizing. To prevent that the -// adaptive behavior only triggers when the map is at least half full. -// This reduces the effectiveness of the algorithm but also makes it completely safe. -// -// The previous safety measure also prevents degenerate interactions with -// really bad quality hash algorithms that can make normal inputs look like a -// DOS attack. -// -const DISPLACEMENT_THRESHOLD: usize = 128; -// -// The threshold of 128 is chosen to minimize the chance of exceeding it. -// In particular, we want that chance to be less than 10^-8 with a load of 90%. -// For displacement, the smallest constant that fits our needs is 90, -// so we round that up to 128. -// -// At a load factor of α, the odds of finding the target bucket after exactly n -// unsuccessful probes[1] are -// -// Pr_α{displacement = n} = -// (1 - α) / α * ∑_{k≥1} e^(-kα) * (kα)^(k+n) / (k + n)! * (1 - kα / (k + n + 1)) -// -// We use this formula to find the probability of triggering the adaptive behavior -// -// Pr_0.909{displacement > 128} = 1.601 * 10^-11 -// -// 1. Alfredo Viola (2005). Distributional analysis of Robin Hood linear probing -// hashing with buckets. - -/// A hash map implemented with linear probing and Robin Hood bucket stealing. -/// -/// By default, `HashMap` uses a hashing algorithm selected to provide -/// resistance against HashDoS attacks. The algorithm is randomly seeded, and a -/// reasonable best-effort is made to generate this seed from a high quality, -/// secure source of randomness provided by the host without blocking the -/// program. Because of this, the randomness of the seed depends on the output -/// quality of the system's random number generator when the seed is created. -/// In particular, seeds generated when the system's entropy pool is abnormally -/// low such as during system boot may be of a lower quality. -/// -/// The default hashing algorithm is currently SipHash 1-3, though this is -/// subject to change at any point in the future. While its performance is very -/// competitive for medium sized keys, other hashing algorithms will outperform -/// it for small keys such as integers as well as large keys such as long -/// strings, though those algorithms will typically *not* protect against -/// attacks such as HashDoS. -/// -/// The hashing algorithm can be replaced on a per-`HashMap` basis using the -/// [`default`], [`with_hasher`], and [`with_capacity_and_hasher`] methods. Many -/// alternative algorithms are available on crates.io, such as the [`fnv`] crate. -/// -/// It is required that the keys implement the [`Eq`] and [`Hash`] traits, although -/// this can frequently be achieved by using `#[derive(PartialEq, Eq, Hash)]`. -/// If you implement these yourself, it is important that the following -/// property holds: -/// -/// ```text -/// k1 == k2 -> hash(k1) == hash(k2) -/// ``` -/// -/// In other words, if two keys are equal, their hashes must be equal. -/// -/// It is a logic error for a key to be modified in such a way that the key's -/// hash, as determined by the [`Hash`] trait, or its equality, as determined by -/// the [`Eq`] trait, changes while it is in the map. This is normally only -/// possible through [`Cell`], [`RefCell`], global state, I/O, or unsafe code. -/// -/// Relevant papers/articles: -/// -/// 1. Pedro Celis. ["Robin Hood Hashing"](https://cs.uwaterloo.ca/research/tr/1986/CS-86-14.pdf) -/// 2. Emmanuel Goossaert. ["Robin Hood -/// hashing"](http://codecapsule.com/2013/11/11/robin-hood-hashing/) -/// 3. Emmanuel Goossaert. ["Robin Hood hashing: backward shift -/// deletion"](http://codecapsule.com/2013/11/17/robin-hood-hashing-backward-shift-deletion/) -/// -/// # Examples -/// -/// ``` -/// use std::collections::HashMap; -/// -/// // type inference lets us omit an explicit type signature (which -/// // would be `HashMap<&str, &str>` in this example). -/// let mut book_reviews = HashMap::new(); -/// -/// // review some books. -/// book_reviews.insert("Adventures of Huckleberry Finn", "My favorite book."); -/// book_reviews.insert("Grimms' Fairy Tales", "Masterpiece."); -/// book_reviews.insert("Pride and Prejudice", "Very enjoyable."); -/// book_reviews.insert("The Adventures of Sherlock Holmes", "Eye lyked it alot."); -/// -/// // check for a specific one. -/// if !book_reviews.contains_key("Les Misérables") { -/// println!("We've got {} reviews, but Les Misérables ain't one.", -/// book_reviews.len()); -/// } -/// -/// // oops, this review has a lot of spelling mistakes, let's delete it. -/// book_reviews.remove("The Adventures of Sherlock Holmes"); -/// -/// // look up the values associated with some keys. -/// let to_find = ["Pride and Prejudice", "Alice's Adventure in Wonderland"]; -/// for book in &to_find { -/// match book_reviews.get(book) { -/// Some(review) => println!("{}: {}", book, review), -/// None => println!("{} is unreviewed.", book) -/// } -/// } -/// -/// // iterate over everything. -/// for (book, review) in &book_reviews { -/// println!("{}: \"{}\"", book, review); -/// } -/// ``` -/// -/// `HashMap` also implements an [`Entry API`](#method.entry), which allows -/// for more complex methods of getting, setting, updating and removing keys and -/// their values: -/// -/// ``` -/// use std::collections::HashMap; -/// -/// // type inference lets us omit an explicit type signature (which -/// // would be `HashMap<&str, u8>` in this example). -/// let mut player_stats = HashMap::new(); -/// -/// fn random_stat_buff() -> u8 { -/// // could actually return some random value here - let's just return -/// // some fixed value for now -/// 42 -/// } -/// -/// // insert a key only if it doesn't already exist -/// player_stats.entry("health").or_insert(100); -/// -/// // insert a key using a function that provides a new value only if it -/// // doesn't already exist -/// player_stats.entry("defence").or_insert_with(random_stat_buff); -/// -/// // update a key, guarding against the key possibly not being set -/// let stat = player_stats.entry("attack").or_insert(100); -/// *stat += random_stat_buff(); -/// ``` -/// -/// The easiest way to use `HashMap` with a custom type as key is to derive [`Eq`] and [`Hash`]. -/// We must also derive [`PartialEq`]. -/// -/// [`Eq`]: ../../std/cmp/trait.Eq.html -/// [`Hash`]: ../../std/hash/trait.Hash.html -/// [`PartialEq`]: ../../std/cmp/trait.PartialEq.html -/// [`RefCell`]: ../../std/cell/struct.RefCell.html -/// [`Cell`]: ../../std/cell/struct.Cell.html -/// [`default`]: #method.default -/// [`with_hasher`]: #method.with_hasher -/// [`with_capacity_and_hasher`]: #method.with_capacity_and_hasher -/// [`fnv`]: https://crates.io/crates/fnv -/// -/// ``` -/// use std::collections::HashMap; -/// -/// #[derive(Hash, Eq, PartialEq, Debug)] -/// struct Viking { -/// name: String, -/// country: String, -/// } -/// -/// impl Viking { -/// /// Create a new Viking. -/// fn new(name: &str, country: &str) -> Viking { -/// Viking { name: name.to_string(), country: country.to_string() } -/// } -/// } -/// -/// // Use a HashMap to store the vikings' health points. -/// let mut vikings = HashMap::new(); -/// -/// vikings.insert(Viking::new("Einar", "Norway"), 25); -/// vikings.insert(Viking::new("Olaf", "Denmark"), 24); -/// vikings.insert(Viking::new("Harald", "Iceland"), 12); -/// -/// // Use derived implementation to print the status of the vikings. -/// for (viking, health) in &vikings { -/// println!("{:?} has {} hp", viking, health); -/// } -/// ``` -/// -/// A `HashMap` with fixed list of elements can be initialized from an array: -/// -/// ``` -/// use std::collections::HashMap; -/// -/// fn main() { -/// let timber_resources: HashMap<&str, i32> = -/// [("Norway", 100), -/// ("Denmark", 50), -/// ("Iceland", 10)] -/// .iter().cloned().collect(); -/// // use the values stored in map -/// } -/// ``` - -#[derive(Clone)] -pub struct HashMap { - // All hashes are keyed on these values, to prevent hash collision attacks. - hash_builder: S, - - table: RawTable, - - resize_policy: DefaultResizePolicy, -} - -/// Search for a pre-hashed key. -#[inline] -fn search_hashed(table: M, hash: SafeHash, mut is_match: F) -> InternalEntry - where M: Deref>, - F: FnMut(&K) -> bool -{ - // This is the only function where capacity can be zero. To avoid - // undefined behavior when Bucket::new gets the raw bucket in this - // case, immediately return the appropriate search result. - if table.capacity() == 0 { - return InternalEntry::TableIsEmpty; - } - - let size = table.size(); - let mut probe = Bucket::new(table, hash); - let mut displacement = 0; - - loop { - let full = match probe.peek() { - Empty(bucket) => { - // Found a hole! - return InternalEntry::Vacant { - hash, - elem: NoElem(bucket, displacement), - }; - } - Full(bucket) => bucket, - }; - - let probe_displacement = full.displacement(); - - if probe_displacement < displacement { - // Found a luckier bucket than me. - // We can finish the search early if we hit any bucket - // with a lower distance to initial bucket than we've probed. - return InternalEntry::Vacant { - hash, - elem: NeqElem(full, probe_displacement), - }; - } - - // If the hash doesn't match, it can't be this one.. - if hash == full.hash() { - // If the key doesn't match, it can't be this one.. - if is_match(full.read().0) { - return InternalEntry::Occupied { elem: full }; - } - } - displacement += 1; - probe = full.next(); - debug_assert!(displacement <= size); - } -} - -fn pop_internal(starting_bucket: FullBucketMut) - -> (K, V, &mut RawTable) -{ - let (empty, retkey, retval) = starting_bucket.take(); - let mut gap = match empty.gap_peek() { - Ok(b) => b, - Err(b) => return (retkey, retval, b.into_table()), - }; - - while gap.full().displacement() != 0 { - gap = match gap.shift() { - Ok(b) => b, - Err(b) => { - return (retkey, retval, b.into_table()); - }, - }; - } - - // Now we've done all our shifting. Return the value we grabbed earlier. - (retkey, retval, gap.into_table()) -} - -/// Perform robin hood bucket stealing at the given `bucket`. You must -/// also pass that bucket's displacement so we don't have to recalculate it. -/// -/// `hash`, `key`, and `val` are the elements to "robin hood" into the hashtable. -fn robin_hood<'a, K: 'a, V: 'a>(bucket: FullBucketMut<'a, K, V>, - mut displacement: usize, - mut hash: SafeHash, - mut key: K, - mut val: V) - -> FullBucketMut<'a, K, V> { - let size = bucket.table().size(); - let raw_capacity = bucket.table().capacity(); - // There can be at most `size - dib` buckets to displace, because - // in the worst case, there are `size` elements and we already are - // `displacement` buckets away from the initial one. - let idx_end = (bucket.index() + size - bucket.displacement()) % raw_capacity; - // Save the *starting point*. - let mut bucket = bucket.stash(); - - loop { - let (old_hash, old_key, old_val) = bucket.replace(hash, key, val); - hash = old_hash; - key = old_key; - val = old_val; - - loop { - displacement += 1; - let probe = bucket.next(); - debug_assert!(probe.index() != idx_end); - - let full_bucket = match probe.peek() { - Empty(bucket) => { - // Found a hole! - let bucket = bucket.put(hash, key, val); - // Now that it's stolen, just read the value's pointer - // right out of the table! Go back to the *starting point*. - // - // This use of `into_table` is misleading. It turns the - // bucket, which is a FullBucket on top of a - // FullBucketMut, into just one FullBucketMut. The "table" - // refers to the inner FullBucketMut in this context. - return bucket.into_table(); - } - Full(bucket) => bucket, - }; - - let probe_displacement = full_bucket.displacement(); - - bucket = full_bucket; - - // Robin hood! Steal the spot. - if probe_displacement < displacement { - displacement = probe_displacement; - break; - } - } - } -} - -impl HashMap - where K: Eq + Hash, - S: BuildHasher -{ - pub fn make_hash(&self, x: &X) -> SafeHash - where X: Hash - { - table::make_hash(&self.hash_builder, x) - } - - /// Search for a key, yielding the index if it's found in the hashtable. - /// If you already have the hash for the key lying around, use - /// search_hashed. - #[inline] - fn search<'a, Q: ?Sized>(&'a self, q: &Q) -> InternalEntry> - where K: Borrow, - Q: Eq + Hash - { - let hash = self.make_hash(q); - search_hashed(&self.table, hash, |k| q.eq(k.borrow())) - } - - #[inline] - fn search_mut<'a, Q: ?Sized>(&'a mut self, q: &Q) -> InternalEntry> - where K: Borrow, - Q: Eq + Hash - { - let hash = self.make_hash(q); - search_hashed(&mut self.table, hash, |k| q.eq(k.borrow())) - } - - // The caller should ensure that invariants by Robin Hood Hashing hold - // and that there's space in the underlying table. - fn insert_hashed_ordered(&mut self, hash: SafeHash, k: K, v: V) { - let mut buckets = Bucket::new(&mut self.table, hash); - let start_index = buckets.index(); - - loop { - // We don't need to compare hashes for value swap. - // Not even DIBs for Robin Hood. - buckets = match buckets.peek() { - Empty(empty) => { - empty.put(hash, k, v); - return; - } - Full(b) => b.into_bucket(), - }; - buckets.next(); - debug_assert!(buckets.index() != start_index); - } - } -} - -impl HashMap - where K: Eq + Hash, - S: BuildHasher -{ - /// Creates an empty `HashMap` which will use the given hash builder to hash - /// keys. - /// - /// The created map has the default initial capacity. - /// - /// Warning: `hash_builder` is normally randomly generated, and - /// is designed to allow HashMaps to be resistant to attacks that - /// cause many collisions and very poor performance. Setting it - /// manually using this function can expose a DoS attack vector. - /// - /// # Examples - /// - /// ``` - /// use std::collections::HashMap; - /// use std::collections::hash_map::RandomState; - /// - /// let s = RandomState::new(); - /// let mut map = HashMap::with_hasher(s); - /// map.insert(1, 2); - /// ``` - #[inline] - pub fn try_with_hasher(hash_builder: S) -> Result, FailedAllocationError> { - Ok(HashMap { - hash_builder, - resize_policy: DefaultResizePolicy::new(), - table: RawTable::new(0)?, - }) - } - - #[inline] - pub fn with_hasher(hash_builder: S) -> HashMap { - Self::try_with_hasher(hash_builder).unwrap() - } - - /// Creates an empty `HashMap` with the specified capacity, using `hash_builder` - /// to hash the keys. - /// - /// The hash map will be able to hold at least `capacity` elements without - /// reallocating. If `capacity` is 0, the hash map will not allocate. - /// - /// Warning: `hash_builder` is normally randomly generated, and - /// is designed to allow HashMaps to be resistant to attacks that - /// cause many collisions and very poor performance. Setting it - /// manually using this function can expose a DoS attack vector. - /// - /// # Examples - /// - /// ``` - /// use std::collections::HashMap; - /// use std::collections::hash_map::RandomState; - /// - /// let s = RandomState::new(); - /// let mut map = HashMap::with_capacity_and_hasher(10, s); - /// map.insert(1, 2); - /// ``` - #[inline] - pub fn try_with_capacity_and_hasher(capacity: usize, hash_builder: S) -> Result, FailedAllocationError> { - let resize_policy = DefaultResizePolicy::new(); - let raw_cap = resize_policy.raw_capacity(capacity); - Ok(HashMap { - hash_builder, - resize_policy, - table: RawTable::new(raw_cap)?, - }) - } - - pub fn with_capacity_and_hasher(capacity: usize, hash_builder: S) -> HashMap { - Self::try_with_capacity_and_hasher(capacity, hash_builder).unwrap() - } - - /// Returns a reference to the map's [`BuildHasher`]. - /// - /// [`BuildHasher`]: ../../std/hash/trait.BuildHasher.html - pub fn hasher(&self) -> &S { - &self.hash_builder - } - - /// Returns the number of elements the map can hold without reallocating. - /// - /// This number is a lower bound; the `HashMap` might be able to hold - /// more, but is guaranteed to be able to hold at least this many. - /// - /// # Examples - /// - /// ``` - /// use std::collections::HashMap; - /// let map: HashMap = HashMap::with_capacity(100); - /// assert!(map.capacity() >= 100); - /// ``` - #[inline] - pub fn capacity(&self) -> usize { - self.resize_policy.capacity(self.raw_capacity()) - } - - /// Returns the hash map's raw capacity. - #[inline] - pub fn raw_capacity(&self) -> usize { - self.table.capacity() - } - - /// Returns a raw pointer to the table's buffer. - #[inline] - pub fn raw_buffer(&self) -> *const u8 { - assert!(self.len() != 0); - self.table.raw_buffer() - } - - /// Verify that the table metadata is internally consistent. - #[inline] - pub fn verify(&self) { - self.table.verify(); - } - - /// Reserves capacity for at least `additional` more elements to be inserted - /// in the `HashMap`. The collection may reserve more space to avoid - /// frequent reallocations. - /// - /// # Panics - /// - /// Panics if the new allocation size overflows [`usize`]. - /// - /// [`usize`]: ../../std/primitive.usize.html - /// - /// # Examples - /// - /// ``` - /// use std::collections::HashMap; - /// let mut map: HashMap<&str, isize> = HashMap::new(); - /// map.reserve(10); - /// ``` - pub fn reserve(&mut self, additional: usize) { - self.try_reserve(additional).unwrap(); - } - - - #[inline] - pub fn try_reserve(&mut self, additional: usize) -> Result<(), FailedAllocationError> { - let remaining = self.capacity() - self.len(); // this can't overflow - if remaining < additional { - let min_cap = self.len().checked_add(additional).expect("reserve overflow"); - let raw_cap = self.resize_policy.raw_capacity(min_cap); - self.try_resize(raw_cap)?; - } else if self.table.tag() && remaining <= self.len() { - // Probe sequence is too long and table is half full, - // resize early to reduce probing length. - let new_capacity = self.table.capacity() * 2; - self.try_resize(new_capacity)?; - } - Ok(()) - } - - #[cold] - #[inline(never)] - fn try_resize(&mut self, new_raw_cap: usize) -> Result<(), FailedAllocationError> { - assert!(self.table.size() <= new_raw_cap); - assert!(new_raw_cap.is_power_of_two() || new_raw_cap == 0); - - let mut old_table = replace(&mut self.table, RawTable::new(new_raw_cap)?); - let old_size = old_table.size(); - - if old_table.size() == 0 { - return Ok(()); - } - - let mut bucket = Bucket::head_bucket(&mut old_table); - - // This is how the buckets might be laid out in memory: - // ($ marks an initialized bucket) - // ________________ - // |$$$_$$$$$$_$$$$$| - // - // But we've skipped the entire initial cluster of buckets - // and will continue iteration in this order: - // ________________ - // |$$$$$$_$$$$$ - // ^ wrap around once end is reached - // ________________ - // $$$_____________| - // ^ exit once table.size == 0 - loop { - bucket = match bucket.peek() { - Full(bucket) => { - let h = bucket.hash(); - let (b, k, v) = bucket.take(); - self.insert_hashed_ordered(h, k, v); - if b.table().size() == 0 { - break; - } - b.into_bucket() - } - Empty(b) => b.into_bucket(), - }; - bucket.next(); - } - - assert_eq!(self.table.size(), old_size); - Ok(()) - } - - /// Shrinks the capacity of the map as much as possible. It will drop - /// down as much as possible while maintaining the internal rules - /// and possibly leaving some space in accordance with the resize policy. - /// - /// # Examples - /// - /// ``` - /// use std::collections::HashMap; - /// - /// let mut map: HashMap = HashMap::with_capacity(100); - /// map.insert(1, 2); - /// map.insert(3, 4); - /// assert!(map.capacity() >= 100); - /// map.shrink_to_fit(); - /// assert!(map.capacity() >= 2); - /// ``` - pub fn shrink_to_fit(&mut self) { - self.try_shrink_to_fit().unwrap(); - } - - pub fn try_shrink_to_fit(&mut self) -> Result<(), FailedAllocationError> { - let new_raw_cap = self.resize_policy.raw_capacity(self.len()); - if self.raw_capacity() != new_raw_cap { - let old_table = replace(&mut self.table, RawTable::new(new_raw_cap)?); - let old_size = old_table.size(); - - // Shrink the table. Naive algorithm for resizing: - for (h, k, v) in old_table.into_iter() { - self.insert_hashed_nocheck(h, k, v); - } - - debug_assert_eq!(self.table.size(), old_size); - } - Ok(()) - } - - /// Insert a pre-hashed key-value pair, without first checking - /// that there's enough room in the buckets. Returns a reference to the - /// newly insert value. - /// - /// If the key already exists, the hashtable will be returned untouched - /// and a reference to the existing element will be returned. - fn insert_hashed_nocheck(&mut self, hash: SafeHash, k: K, v: V) -> Option { - let entry = search_hashed(&mut self.table, hash, |key| *key == k).into_entry(k); - match entry { - Some(Occupied(mut elem)) => Some(elem.insert(v)), - Some(Vacant(elem)) => { - elem.insert(v); - None - } - None => unreachable!(), - } - } - - /// An iterator visiting all keys in arbitrary order. - /// The iterator element type is `&'a K`. - /// - /// # Examples - /// - /// ``` - /// use std::collections::HashMap; - /// - /// let mut map = HashMap::new(); - /// map.insert("a", 1); - /// map.insert("b", 2); - /// map.insert("c", 3); - /// - /// for key in map.keys() { - /// println!("{}", key); - /// } - /// ``` - pub fn keys(&self) -> Keys { - Keys { inner: self.iter() } - } - - /// An iterator visiting all values in arbitrary order. - /// The iterator element type is `&'a V`. - /// - /// # Examples - /// - /// ``` - /// use std::collections::HashMap; - /// - /// let mut map = HashMap::new(); - /// map.insert("a", 1); - /// map.insert("b", 2); - /// map.insert("c", 3); - /// - /// for val in map.values() { - /// println!("{}", val); - /// } - /// ``` - pub fn values(&self) -> Values { - Values { inner: self.iter() } - } - - /// An iterator visiting all values mutably in arbitrary order. - /// The iterator element type is `&'a mut V`. - /// - /// # Examples - /// - /// ``` - /// use std::collections::HashMap; - /// - /// let mut map = HashMap::new(); - /// - /// map.insert("a", 1); - /// map.insert("b", 2); - /// map.insert("c", 3); - /// - /// for val in map.values_mut() { - /// *val = *val + 10; - /// } - /// - /// for val in map.values() { - /// println!("{}", val); - /// } - /// ``` - pub fn values_mut(&mut self) -> ValuesMut { - ValuesMut { inner: self.iter_mut() } - } - - /// An iterator visiting all key-value pairs in arbitrary order. - /// The iterator element type is `(&'a K, &'a V)`. - /// - /// # Examples - /// - /// ``` - /// use std::collections::HashMap; - /// - /// let mut map = HashMap::new(); - /// map.insert("a", 1); - /// map.insert("b", 2); - /// map.insert("c", 3); - /// - /// for (key, val) in map.iter() { - /// println!("key: {} val: {}", key, val); - /// } - /// ``` - pub fn iter(&self) -> Iter { - Iter { inner: self.table.iter() } - } - - /// An iterator visiting all key-value pairs in arbitrary order, - /// with mutable references to the values. - /// The iterator element type is `(&'a K, &'a mut V)`. - /// - /// # Examples - /// - /// ``` - /// use std::collections::HashMap; - /// - /// let mut map = HashMap::new(); - /// map.insert("a", 1); - /// map.insert("b", 2); - /// map.insert("c", 3); - /// - /// // Update all values - /// for (_, val) in map.iter_mut() { - /// *val *= 2; - /// } - /// - /// for (key, val) in &map { - /// println!("key: {} val: {}", key, val); - /// } - /// ``` - pub fn iter_mut(&mut self) -> IterMut { - IterMut { inner: self.table.iter_mut() } - } - - /// Gets the given key's corresponding entry in the map for in-place manipulation. - /// - /// # Examples - /// - /// ``` - /// use std::collections::HashMap; - /// - /// let mut letters = HashMap::new(); - /// - /// for ch in "a short treatise on fungi".chars() { - /// let counter = letters.entry(ch).or_insert(0); - /// *counter += 1; - /// } - /// - /// assert_eq!(letters[&'s'], 2); - /// assert_eq!(letters[&'t'], 3); - /// assert_eq!(letters[&'u'], 1); - /// assert_eq!(letters.get(&'y'), None); - /// ``` - pub fn entry(&mut self, key: K) -> Entry { - self.try_entry(key).unwrap() - } - - #[inline(always)] - pub fn try_entry(&mut self, key: K) -> Result, FailedAllocationError> { - // Gotta resize now. - self.try_reserve(1)?; - let hash = self.make_hash(&key); - Ok(search_hashed(&mut self.table, hash, |q| q.eq(&key)) - .into_entry(key).expect("unreachable")) - } - - /// Returns the number of elements in the map. - /// - /// # Examples - /// - /// ``` - /// use std::collections::HashMap; - /// - /// let mut a = HashMap::new(); - /// assert_eq!(a.len(), 0); - /// a.insert(1, "a"); - /// assert_eq!(a.len(), 1); - /// ``` - pub fn len(&self) -> usize { - self.table.size() - } - - /// Returns true if the map contains no elements. - /// - /// # Examples - /// - /// ``` - /// use std::collections::HashMap; - /// - /// let mut a = HashMap::new(); - /// assert!(a.is_empty()); - /// a.insert(1, "a"); - /// assert!(!a.is_empty()); - /// ``` - #[inline] - pub fn is_empty(&self) -> bool { - self.len() == 0 - } - - /// Clears the map, returning all key-value pairs as an iterator. Keeps the - /// allocated memory for reuse. - /// - /// # Examples - /// - /// ``` - /// use std::collections::HashMap; - /// - /// let mut a = HashMap::new(); - /// a.insert(1, "a"); - /// a.insert(2, "b"); - /// - /// for (k, v) in a.drain().take(1) { - /// assert!(k == 1 || k == 2); - /// assert!(v == "a" || v == "b"); - /// } - /// - /// assert!(a.is_empty()); - /// ``` - #[inline] - pub fn drain(&mut self) -> Drain where K: 'static, V: 'static { - Drain { inner: self.table.drain() } - } - - /// Clears the map, removing all key-value pairs. Keeps the allocated memory - /// for reuse. - /// - /// # Examples - /// - /// ``` - /// use std::collections::HashMap; - /// - /// let mut a = HashMap::new(); - /// a.insert(1, "a"); - /// a.clear(); - /// assert!(a.is_empty()); - /// ``` - #[inline] - pub fn clear(&mut self) where K: 'static, V: 'static { - self.drain(); - } - - /// Returns a reference to the value corresponding to the key. - /// - /// The key may be any borrowed form of the map's key type, but - /// [`Hash`] and [`Eq`] on the borrowed form *must* match those for - /// the key type. - /// - /// [`Eq`]: ../../std/cmp/trait.Eq.html - /// [`Hash`]: ../../std/hash/trait.Hash.html - /// - /// # Examples - /// - /// ``` - /// use std::collections::HashMap; - /// - /// let mut map = HashMap::new(); - /// map.insert(1, "a"); - /// assert_eq!(map.get(&1), Some(&"a")); - /// assert_eq!(map.get(&2), None); - /// ``` - pub fn get(&self, k: &Q) -> Option<&V> - where K: Borrow, - Q: Hash + Eq - { - self.search(k).into_occupied_bucket().map(|bucket| bucket.into_refs().1) - } - - /// Returns true if the map contains a value for the specified key. - /// - /// The key may be any borrowed form of the map's key type, but - /// [`Hash`] and [`Eq`] on the borrowed form *must* match those for - /// the key type. - /// - /// [`Eq`]: ../../std/cmp/trait.Eq.html - /// [`Hash`]: ../../std/hash/trait.Hash.html - /// - /// # Examples - /// - /// ``` - /// use std::collections::HashMap; - /// - /// let mut map = HashMap::new(); - /// map.insert(1, "a"); - /// assert_eq!(map.contains_key(&1), true); - /// assert_eq!(map.contains_key(&2), false); - /// ``` - pub fn contains_key(&self, k: &Q) -> bool - where K: Borrow, - Q: Hash + Eq - { - self.search(k).into_occupied_bucket().is_some() - } - - /// Returns a mutable reference to the value corresponding to the key. - /// - /// The key may be any borrowed form of the map's key type, but - /// [`Hash`] and [`Eq`] on the borrowed form *must* match those for - /// the key type. - /// - /// [`Eq`]: ../../std/cmp/trait.Eq.html - /// [`Hash`]: ../../std/hash/trait.Hash.html - /// - /// # Examples - /// - /// ``` - /// use std::collections::HashMap; - /// - /// let mut map = HashMap::new(); - /// map.insert(1, "a"); - /// if let Some(x) = map.get_mut(&1) { - /// *x = "b"; - /// } - /// assert_eq!(map[&1], "b"); - /// ``` - pub fn get_mut(&mut self, k: &Q) -> Option<&mut V> - where K: Borrow, - Q: Hash + Eq - { - self.search_mut(k).into_occupied_bucket().map(|bucket| bucket.into_mut_refs().1) - } - - /// Inserts a key-value pair into the map. - /// - /// If the map did not have this key present, [`None`] is returned. - /// - /// If the map did have this key present, the value is updated, and the old - /// value is returned. The key is not updated, though; this matters for - /// types that can be `==` without being identical. See the [module-level - /// documentation] for more. - /// - /// [`None`]: ../../std/option/enum.Option.html#variant.None - /// [module-level documentation]: index.html#insert-and-complex-keys - /// - /// # Examples - /// - /// ``` - /// use std::collections::HashMap; - /// - /// let mut map = HashMap::new(); - /// assert_eq!(map.insert(37, "a"), None); - /// assert_eq!(map.is_empty(), false); - /// - /// map.insert(37, "b"); - /// assert_eq!(map.insert(37, "c"), Some("b")); - /// assert_eq!(map[&37], "c"); - /// ``` - pub fn insert(&mut self, k: K, v: V) -> Option { - self.try_insert(k, v).unwrap() - } - - #[inline] - pub fn try_insert(&mut self, k: K, v: V) -> Result, FailedAllocationError> { - let hash = self.make_hash(&k); - self.try_reserve(1)?; - Ok(self.insert_hashed_nocheck(hash, k, v)) - } - - /// Removes a key from the map, returning the value at the key if the key - /// was previously in the map. - /// - /// The key may be any borrowed form of the map's key type, but - /// [`Hash`] and [`Eq`] on the borrowed form *must* match those for - /// the key type. - /// - /// [`Eq`]: ../../std/cmp/trait.Eq.html - /// [`Hash`]: ../../std/hash/trait.Hash.html - /// - /// # Examples - /// - /// ``` - /// use std::collections::HashMap; - /// - /// let mut map = HashMap::new(); - /// map.insert(1, "a"); - /// assert_eq!(map.remove(&1), Some("a")); - /// assert_eq!(map.remove(&1), None); - /// ``` - pub fn remove(&mut self, k: &Q) -> Option - where K: Borrow, - Q: Hash + Eq - { - if self.table.size() == 0 { - return None; - } - - self.search_mut(k).into_occupied_bucket().map(|bucket| pop_internal(bucket).1) - } - - /// Retains only the elements specified by the predicate. - /// - /// In other words, remove all pairs `(k, v)` such that `f(&k,&mut v)` returns `false`. - /// - /// # Examples - /// - /// ``` - /// use std::collections::HashMap; - /// - /// let mut map: HashMap = (0..8).map(|x|(x, x*10)).collect(); - /// map.retain(|&k, _| k % 2 == 0); - /// assert_eq!(map.len(), 4); - /// ``` - pub fn retain(&mut self, mut f: F) - where F: FnMut(&K, &mut V) -> bool - { - if self.table.size() == 0 { - return; - } - let mut elems_left = self.table.size(); - let mut bucket = Bucket::head_bucket(&mut self.table); - bucket.prev(); - let start_index = bucket.index(); - while elems_left != 0 { - bucket = match bucket.peek() { - Full(mut full) => { - elems_left -= 1; - let should_remove = { - let (k, v) = full.read_mut(); - !f(k, v) - }; - if should_remove { - let prev_raw = full.raw(); - let (_, _, t) = pop_internal(full); - Bucket::new_from(prev_raw, t) - } else { - full.into_bucket() - } - }, - Empty(b) => { - b.into_bucket() - } - }; - bucket.prev(); // reverse iteration - debug_assert!(elems_left == 0 || bucket.index() != start_index); - } - } -} - -impl PartialEq for HashMap - where K: Eq + Hash, - V: PartialEq, - S: BuildHasher -{ - fn eq(&self, other: &HashMap) -> bool { - if self.len() != other.len() { - return false; - } - - self.iter().all(|(key, value)| other.get(key).map_or(false, |v| *value == *v)) - } -} - -impl Eq for HashMap - where K: Eq + Hash, - V: Eq, - S: BuildHasher -{ -} - -impl Debug for HashMap - where K: Eq + Hash + Debug, - V: Debug, - S: BuildHasher -{ - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - f.debug_map().entries(self.iter()).finish() - } -} - -impl Default for HashMap - where K: Eq + Hash, - S: BuildHasher + Default -{ - /// Creates an empty `HashMap`, with the `Default` value for the hasher. - fn default() -> HashMap { - HashMap::with_hasher(Default::default()) - } -} - -impl<'a, K, Q: ?Sized, V, S> Index<&'a Q> for HashMap - where K: Eq + Hash + Borrow, - Q: Eq + Hash, - S: BuildHasher -{ - type Output = V; - - #[inline] - fn index(&self, index: &Q) -> &V { - self.get(index).expect("no entry found for key") - } -} - -/// An iterator over the entries of a `HashMap`. -/// -/// This `struct` is created by the [`iter`] method on [`HashMap`]. See its -/// documentation for more. -/// -/// [`iter`]: struct.HashMap.html#method.iter -/// [`HashMap`]: struct.HashMap.html -pub struct Iter<'a, K: 'a, V: 'a> { - inner: table::Iter<'a, K, V>, -} - -// FIXME(#19839) Remove in favor of `#[derive(Clone)]` -impl<'a, K, V> Clone for Iter<'a, K, V> { - fn clone(&self) -> Iter<'a, K, V> { - Iter { inner: self.inner.clone() } - } -} - -impl<'a, K: Debug, V: Debug> fmt::Debug for Iter<'a, K, V> { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - f.debug_list() - .entries(self.clone()) - .finish() - } -} - -/// A mutable iterator over the entries of a `HashMap`. -/// -/// This `struct` is created by the [`iter_mut`] method on [`HashMap`]. See its -/// documentation for more. -/// -/// [`iter_mut`]: struct.HashMap.html#method.iter_mut -/// [`HashMap`]: struct.HashMap.html -pub struct IterMut<'a, K: 'a, V: 'a> { - inner: table::IterMut<'a, K, V>, -} - -/// An owning iterator over the entries of a `HashMap`. -/// -/// This `struct` is created by the [`into_iter`] method on [`HashMap`][`HashMap`] -/// (provided by the `IntoIterator` trait). See its documentation for more. -/// -/// [`into_iter`]: struct.HashMap.html#method.into_iter -/// [`HashMap`]: struct.HashMap.html -pub struct IntoIter { - pub(super) inner: table::IntoIter, -} - -/// An iterator over the keys of a `HashMap`. -/// -/// This `struct` is created by the [`keys`] method on [`HashMap`]. See its -/// documentation for more. -/// -/// [`keys`]: struct.HashMap.html#method.keys -/// [`HashMap`]: struct.HashMap.html -pub struct Keys<'a, K: 'a, V: 'a> { - inner: Iter<'a, K, V>, -} - -// FIXME(#19839) Remove in favor of `#[derive(Clone)]` -impl<'a, K, V> Clone for Keys<'a, K, V> { - fn clone(&self) -> Keys<'a, K, V> { - Keys { inner: self.inner.clone() } - } -} - -impl<'a, K: Debug, V> fmt::Debug for Keys<'a, K, V> { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - f.debug_list() - .entries(self.clone()) - .finish() - } -} - -/// An iterator over the values of a `HashMap`. -/// -/// This `struct` is created by the [`values`] method on [`HashMap`]. See its -/// documentation for more. -/// -/// [`values`]: struct.HashMap.html#method.values -/// [`HashMap`]: struct.HashMap.html -pub struct Values<'a, K: 'a, V: 'a> { - inner: Iter<'a, K, V>, -} - -// FIXME(#19839) Remove in favor of `#[derive(Clone)]` -impl<'a, K, V> Clone for Values<'a, K, V> { - fn clone(&self) -> Values<'a, K, V> { - Values { inner: self.inner.clone() } - } -} - -impl<'a, K, V: Debug> fmt::Debug for Values<'a, K, V> { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - f.debug_list() - .entries(self.clone()) - .finish() - } -} - -/// A draining iterator over the entries of a `HashMap`. -/// -/// This `struct` is created by the [`drain`] method on [`HashMap`]. See its -/// documentation for more. -/// -/// [`drain`]: struct.HashMap.html#method.drain -/// [`HashMap`]: struct.HashMap.html -pub struct Drain<'a, K: 'static, V: 'static> { - pub(super) inner: table::Drain<'a, K, V>, -} - -/// A mutable iterator over the values of a `HashMap`. -/// -/// This `struct` is created by the [`values_mut`] method on [`HashMap`]. See its -/// documentation for more. -/// -/// [`values_mut`]: struct.HashMap.html#method.values_mut -/// [`HashMap`]: struct.HashMap.html -pub struct ValuesMut<'a, K: 'a, V: 'a> { - inner: IterMut<'a, K, V>, -} - -enum InternalEntry { - Occupied { elem: FullBucket }, - Vacant { - hash: SafeHash, - elem: VacantEntryState, - }, - TableIsEmpty, -} - -impl InternalEntry { - #[inline] - fn into_occupied_bucket(self) -> Option> { - match self { - InternalEntry::Occupied { elem } => Some(elem), - _ => None, - } - } -} - -impl<'a, K, V> InternalEntry> { - #[inline] - fn into_entry(self, key: K) -> Option> { - match self { - InternalEntry::Occupied { elem } => { - Some(Occupied(OccupiedEntry { - key: Some(key), - elem, - })) - } - InternalEntry::Vacant { hash, elem } => { - Some(Vacant(VacantEntry { - hash, - key, - elem, - })) - } - InternalEntry::TableIsEmpty => None, - } - } -} - -/// A view into a single entry in a map, which may either be vacant or occupied. -/// -/// This `enum` is constructed from the [`entry`] method on [`HashMap`]. -/// -/// [`HashMap`]: struct.HashMap.html -/// [`entry`]: struct.HashMap.html#method.entry -pub enum Entry<'a, K: 'a, V: 'a> { - /// An occupied entry. - Occupied( OccupiedEntry<'a, K, V>), - - /// A vacant entry. - Vacant( VacantEntry<'a, K, V>), -} - -impl<'a, K: 'a + Debug, V: 'a + Debug> Debug for Entry<'a, K, V> { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Vacant(ref v) => { - f.debug_tuple("Entry") - .field(v) - .finish() - } - Occupied(ref o) => { - f.debug_tuple("Entry") - .field(o) - .finish() - } - } - } -} - -/// A view into an occupied entry in a `HashMap`. -/// It is part of the [`Entry`] enum. -/// -/// [`Entry`]: enum.Entry.html -pub struct OccupiedEntry<'a, K: 'a, V: 'a> { - key: Option, - elem: FullBucket>, -} - -impl<'a, K: 'a + Debug, V: 'a + Debug> Debug for OccupiedEntry<'a, K, V> { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - f.debug_struct("OccupiedEntry") - .field("key", self.key()) - .field("value", self.get()) - .finish() - } -} - -/// A view into a vacant entry in a `HashMap`. -/// It is part of the [`Entry`] enum. -/// -/// [`Entry`]: enum.Entry.html -pub struct VacantEntry<'a, K: 'a, V: 'a> { - hash: SafeHash, - key: K, - elem: VacantEntryState>, -} - -impl<'a, K: 'a + Debug, V: 'a> Debug for VacantEntry<'a, K, V> { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - f.debug_tuple("VacantEntry") - .field(self.key()) - .finish() - } -} - -/// Possible states of a VacantEntry. -enum VacantEntryState { - /// The index is occupied, but the key to insert has precedence, - /// and will kick the current one out on insertion. - NeqElem(FullBucket, usize), - /// The index is genuinely vacant. - NoElem(EmptyBucket, usize), -} - -impl<'a, K, V, S> IntoIterator for &'a HashMap - where K: Eq + Hash, - S: BuildHasher -{ - type Item = (&'a K, &'a V); - type IntoIter = Iter<'a, K, V>; - - fn into_iter(self) -> Iter<'a, K, V> { - self.iter() - } -} - -impl<'a, K, V, S> IntoIterator for &'a mut HashMap - where K: Eq + Hash, - S: BuildHasher -{ - type Item = (&'a K, &'a mut V); - type IntoIter = IterMut<'a, K, V>; - - fn into_iter(self) -> IterMut<'a, K, V> { - self.iter_mut() - } -} - -impl IntoIterator for HashMap - where K: Eq + Hash, - S: BuildHasher -{ - type Item = (K, V); - type IntoIter = IntoIter; - - /// Creates a consuming iterator, that is, one that moves each key-value - /// pair out of the map in arbitrary order. The map cannot be used after - /// calling this. - /// - /// # Examples - /// - /// ``` - /// use std::collections::HashMap; - /// - /// let mut map = HashMap::new(); - /// map.insert("a", 1); - /// map.insert("b", 2); - /// map.insert("c", 3); - /// - /// // Not possible with .iter() - /// let vec: Vec<(&str, isize)> = map.into_iter().collect(); - /// ``` - fn into_iter(self) -> IntoIter { - IntoIter { inner: self.table.into_iter() } - } -} - -impl<'a, K, V> Iterator for Iter<'a, K, V> { - type Item = (&'a K, &'a V); - - #[inline] - fn next(&mut self) -> Option<(&'a K, &'a V)> { - self.inner.next() - } - #[inline] - fn size_hint(&self) -> (usize, Option) { - self.inner.size_hint() - } -} -impl<'a, K, V> ExactSizeIterator for Iter<'a, K, V> { - #[inline] - fn len(&self) -> usize { - self.inner.len() - } -} - - -impl<'a, K, V> Iterator for IterMut<'a, K, V> { - type Item = (&'a K, &'a mut V); - - #[inline] - fn next(&mut self) -> Option<(&'a K, &'a mut V)> { - self.inner.next() - } - #[inline] - fn size_hint(&self) -> (usize, Option) { - self.inner.size_hint() - } -} -impl<'a, K, V> ExactSizeIterator for IterMut<'a, K, V> { - #[inline] - fn len(&self) -> usize { - self.inner.len() - } -} - -impl<'a, K, V> fmt::Debug for IterMut<'a, K, V> - where K: fmt::Debug, - V: fmt::Debug, -{ - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - f.debug_list() - .entries(self.inner.iter()) - .finish() - } -} - -impl Iterator for IntoIter { - type Item = (K, V); - - #[inline] - fn next(&mut self) -> Option<(K, V)> { - self.inner.next().map(|(_, k, v)| (k, v)) - } - #[inline] - fn size_hint(&self) -> (usize, Option) { - self.inner.size_hint() - } -} -impl ExactSizeIterator for IntoIter { - #[inline] - fn len(&self) -> usize { - self.inner.len() - } -} - -impl fmt::Debug for IntoIter { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - f.debug_list() - .entries(self.inner.iter()) - .finish() - } -} - -impl<'a, K, V> Iterator for Keys<'a, K, V> { - type Item = &'a K; - - #[inline] - fn next(&mut self) -> Option<(&'a K)> { - self.inner.next().map(|(k, _)| k) - } - #[inline] - fn size_hint(&self) -> (usize, Option) { - self.inner.size_hint() - } -} -impl<'a, K, V> ExactSizeIterator for Keys<'a, K, V> { - #[inline] - fn len(&self) -> usize { - self.inner.len() - } -} - -impl<'a, K, V> Iterator for Values<'a, K, V> { - type Item = &'a V; - - #[inline] - fn next(&mut self) -> Option<(&'a V)> { - self.inner.next().map(|(_, v)| v) - } - #[inline] - fn size_hint(&self) -> (usize, Option) { - self.inner.size_hint() - } -} -impl<'a, K, V> ExactSizeIterator for Values<'a, K, V> { - #[inline] - fn len(&self) -> usize { - self.inner.len() - } -} -impl<'a, K, V> Iterator for ValuesMut<'a, K, V> { - type Item = &'a mut V; - - #[inline] - fn next(&mut self) -> Option<(&'a mut V)> { - self.inner.next().map(|(_, v)| v) - } - #[inline] - fn size_hint(&self) -> (usize, Option) { - self.inner.size_hint() - } -} -impl<'a, K, V> ExactSizeIterator for ValuesMut<'a, K, V> { - #[inline] - fn len(&self) -> usize { - self.inner.len() - } -} - -impl<'a, K, V> fmt::Debug for ValuesMut<'a, K, V> - where K: fmt::Debug, - V: fmt::Debug, -{ - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - f.debug_list() - .entries(self.inner.inner.iter()) - .finish() - } -} - -impl<'a, K, V> Iterator for Drain<'a, K, V> { - type Item = (K, V); - - #[inline] - fn next(&mut self) -> Option<(K, V)> { - self.inner.next().map(|(_, k, v)| (k, v)) - } - #[inline] - fn size_hint(&self) -> (usize, Option) { - self.inner.size_hint() - } -} -impl<'a, K, V> ExactSizeIterator for Drain<'a, K, V> { - #[inline] - fn len(&self) -> usize { - self.inner.len() - } -} - -impl<'a, K, V> fmt::Debug for Drain<'a, K, V> - where K: fmt::Debug, - V: fmt::Debug, -{ - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - f.debug_list() - .entries(self.inner.iter()) - .finish() - } -} - -// FORK NOTE: Removed Placer impl - -impl<'a, K, V> Entry<'a, K, V> { - /// Ensures a value is in the entry by inserting the default if empty, and returns - /// a mutable reference to the value in the entry. - /// - /// # Examples - /// - /// ``` - /// use std::collections::HashMap; - /// - /// let mut map: HashMap<&str, u32> = HashMap::new(); - /// map.entry("poneyland").or_insert(12); - /// - /// assert_eq!(map["poneyland"], 12); - /// - /// *map.entry("poneyland").or_insert(12) += 10; - /// assert_eq!(map["poneyland"], 22); - /// ``` - pub fn or_insert(self, default: V) -> &'a mut V { - match self { - Occupied(entry) => entry.into_mut(), - Vacant(entry) => entry.insert(default), - } - } - - /// Ensures a value is in the entry by inserting the result of the default function if empty, - /// and returns a mutable reference to the value in the entry. - /// - /// # Examples - /// - /// ``` - /// use std::collections::HashMap; - /// - /// let mut map: HashMap<&str, String> = HashMap::new(); - /// let s = "hoho".to_string(); - /// - /// map.entry("poneyland").or_insert_with(|| s); - /// - /// assert_eq!(map["poneyland"], "hoho".to_string()); - /// ``` - pub fn or_insert_with V>(self, default: F) -> &'a mut V { - match self { - Occupied(entry) => entry.into_mut(), - Vacant(entry) => entry.insert(default()), - } - } - - /// Returns a reference to this entry's key. - /// - /// # Examples - /// - /// ``` - /// use std::collections::HashMap; - /// - /// let mut map: HashMap<&str, u32> = HashMap::new(); - /// assert_eq!(map.entry("poneyland").key(), &"poneyland"); - /// ``` - pub fn key(&self) -> &K { - match *self { - Occupied(ref entry) => entry.key(), - Vacant(ref entry) => entry.key(), - } - } -} - -impl<'a, K, V> OccupiedEntry<'a, K, V> { - /// Gets a reference to the key in the entry. - /// - /// # Examples - /// - /// ``` - /// use std::collections::HashMap; - /// - /// let mut map: HashMap<&str, u32> = HashMap::new(); - /// map.entry("poneyland").or_insert(12); - /// assert_eq!(map.entry("poneyland").key(), &"poneyland"); - /// ``` - pub fn key(&self) -> &K { - self.elem.read().0 - } - - /// Take the ownership of the key and value from the map. - /// - /// # Examples - /// - /// ``` - /// use std::collections::HashMap; - /// use std::collections::hash_map::Entry; - /// - /// let mut map: HashMap<&str, u32> = HashMap::new(); - /// map.entry("poneyland").or_insert(12); - /// - /// if let Entry::Occupied(o) = map.entry("poneyland") { - /// // We delete the entry from the map. - /// o.remove_entry(); - /// } - /// - /// assert_eq!(map.contains_key("poneyland"), false); - /// ``` - pub fn remove_entry(self) -> (K, V) { - let (k, v, _) = pop_internal(self.elem); - (k, v) - } - - /// Gets a reference to the value in the entry. - /// - /// # Examples - /// - /// ``` - /// use std::collections::HashMap; - /// use std::collections::hash_map::Entry; - /// - /// let mut map: HashMap<&str, u32> = HashMap::new(); - /// map.entry("poneyland").or_insert(12); - /// - /// if let Entry::Occupied(o) = map.entry("poneyland") { - /// assert_eq!(o.get(), &12); - /// } - /// ``` - pub fn get(&self) -> &V { - self.elem.read().1 - } - - /// Gets a mutable reference to the value in the entry. - /// - /// # Examples - /// - /// ``` - /// use std::collections::HashMap; - /// use std::collections::hash_map::Entry; - /// - /// let mut map: HashMap<&str, u32> = HashMap::new(); - /// map.entry("poneyland").or_insert(12); - /// - /// assert_eq!(map["poneyland"], 12); - /// if let Entry::Occupied(mut o) = map.entry("poneyland") { - /// *o.get_mut() += 10; - /// } - /// - /// assert_eq!(map["poneyland"], 22); - /// ``` - pub fn get_mut(&mut self) -> &mut V { - self.elem.read_mut().1 - } - - /// Converts the OccupiedEntry into a mutable reference to the value in the entry - /// with a lifetime bound to the map itself. - /// - /// # Examples - /// - /// ``` - /// use std::collections::HashMap; - /// use std::collections::hash_map::Entry; - /// - /// let mut map: HashMap<&str, u32> = HashMap::new(); - /// map.entry("poneyland").or_insert(12); - /// - /// assert_eq!(map["poneyland"], 12); - /// if let Entry::Occupied(o) = map.entry("poneyland") { - /// *o.into_mut() += 10; - /// } - /// - /// assert_eq!(map["poneyland"], 22); - /// ``` - pub fn into_mut(self) -> &'a mut V { - self.elem.into_mut_refs().1 - } - - /// Sets the value of the entry, and returns the entry's old value. - /// - /// # Examples - /// - /// ``` - /// use std::collections::HashMap; - /// use std::collections::hash_map::Entry; - /// - /// let mut map: HashMap<&str, u32> = HashMap::new(); - /// map.entry("poneyland").or_insert(12); - /// - /// if let Entry::Occupied(mut o) = map.entry("poneyland") { - /// assert_eq!(o.insert(15), 12); - /// } - /// - /// assert_eq!(map["poneyland"], 15); - /// ``` - pub fn insert(&mut self, mut value: V) -> V { - let old_value = self.get_mut(); - mem::swap(&mut value, old_value); - value - } - - /// Takes the value out of the entry, and returns it. - /// - /// # Examples - /// - /// ``` - /// use std::collections::HashMap; - /// use std::collections::hash_map::Entry; - /// - /// let mut map: HashMap<&str, u32> = HashMap::new(); - /// map.entry("poneyland").or_insert(12); - /// - /// if let Entry::Occupied(o) = map.entry("poneyland") { - /// assert_eq!(o.remove(), 12); - /// } - /// - /// assert_eq!(map.contains_key("poneyland"), false); - /// ``` - pub fn remove(self) -> V { - pop_internal(self.elem).1 - } - - /// Returns a key that was used for search. - /// - /// The key was retained for further use. - fn take_key(&mut self) -> Option { - self.key.take() - } -} - -impl<'a, K: 'a, V: 'a> VacantEntry<'a, K, V> { - /// Gets a reference to the key that would be used when inserting a value - /// through the `VacantEntry`. - /// - /// # Examples - /// - /// ``` - /// use std::collections::HashMap; - /// - /// let mut map: HashMap<&str, u32> = HashMap::new(); - /// assert_eq!(map.entry("poneyland").key(), &"poneyland"); - /// ``` - pub fn key(&self) -> &K { - &self.key - } - - /// Take ownership of the key. - /// - /// # Examples - /// - /// ``` - /// use std::collections::HashMap; - /// use std::collections::hash_map::Entry; - /// - /// let mut map: HashMap<&str, u32> = HashMap::new(); - /// - /// if let Entry::Vacant(v) = map.entry("poneyland") { - /// v.into_key(); - /// } - /// ``` - pub fn into_key(self) -> K { - self.key - } - - /// Sets the value of the entry with the VacantEntry's key, - /// and returns a mutable reference to it. - /// - /// # Examples - /// - /// ``` - /// use std::collections::HashMap; - /// use std::collections::hash_map::Entry; - /// - /// let mut map: HashMap<&str, u32> = HashMap::new(); - /// - /// if let Entry::Vacant(o) = map.entry("poneyland") { - /// o.insert(37); - /// } - /// assert_eq!(map["poneyland"], 37); - /// ``` - pub fn insert(self, value: V) -> &'a mut V { - let b = match self.elem { - NeqElem(mut bucket, disp) => { - if disp >= DISPLACEMENT_THRESHOLD { - bucket.table_mut().set_tag(true); - } - robin_hood(bucket, disp, self.hash, self.key, value) - }, - NoElem(mut bucket, disp) => { - if disp >= DISPLACEMENT_THRESHOLD { - bucket.table_mut().set_tag(true); - } - bucket.put(self.hash, self.key, value) - }, - }; - b.into_mut_refs().1 - } -} - -impl FromIterator<(K, V)> for HashMap - where K: Eq + Hash, - S: BuildHasher + Default -{ - fn from_iter>(iter: T) -> HashMap { - let mut map = HashMap::with_hasher(Default::default()); - map.extend(iter); - map - } -} - -impl Extend<(K, V)> for HashMap - where K: Eq + Hash, - S: BuildHasher -{ - fn extend>(&mut self, iter: T) { - // Keys may be already present or show multiple times in the iterator. - // Reserve the entire hint lower bound if the map is empty. - // Otherwise reserve half the hint (rounded up), so the map - // will only resize twice in the worst case. - let iter = iter.into_iter(); - let reserve = if self.is_empty() { - iter.size_hint().0 - } else { - (iter.size_hint().0 + 1) / 2 - }; - self.reserve(reserve); - for (k, v) in iter { - self.insert(k, v); - } - } -} - -impl<'a, K, V, S> Extend<(&'a K, &'a V)> for HashMap - where K: Eq + Hash + Copy, - V: Copy, - S: BuildHasher -{ - fn extend>(&mut self, iter: T) { - self.extend(iter.into_iter().map(|(&key, &value)| (key, value))); - } -} - -// FORK NOTE: These can be reused -pub use std::collections::hash_map::{DefaultHasher, RandomState}; - - -impl super::Recover for HashMap - where K: Eq + Hash + Borrow, - S: BuildHasher, - Q: Eq + Hash -{ - type Key = K; - - fn get(&self, key: &Q) -> Option<&K> { - self.search(key).into_occupied_bucket().map(|bucket| bucket.into_refs().0) - } - - fn take(&mut self, key: &Q) -> Option { - if self.table.size() == 0 { - return None; - } - - self.search_mut(key).into_occupied_bucket().map(|bucket| pop_internal(bucket).0) - } - - fn replace(&mut self, key: K) -> Option { - self.reserve(1); - - match self.entry(key) { - Occupied(mut occupied) => { - let key = occupied.take_key().unwrap(); - Some(mem::replace(occupied.elem.read_mut().0, key)) - } - Vacant(vacant) => { - vacant.insert(()); - None - } - } - } -} - -#[allow(dead_code)] -fn assert_covariance() { - fn map_key<'new>(v: HashMap<&'static str, u8>) -> HashMap<&'new str, u8> { - v - } - fn map_val<'new>(v: HashMap) -> HashMap { - v - } - fn iter_key<'a, 'new>(v: Iter<'a, &'static str, u8>) -> Iter<'a, &'new str, u8> { - v - } - fn iter_val<'a, 'new>(v: Iter<'a, u8, &'static str>) -> Iter<'a, u8, &'new str> { - v - } - fn into_iter_key<'new>(v: IntoIter<&'static str, u8>) -> IntoIter<&'new str, u8> { - v - } - fn into_iter_val<'new>(v: IntoIter) -> IntoIter { - v - } - fn keys_key<'a, 'new>(v: Keys<'a, &'static str, u8>) -> Keys<'a, &'new str, u8> { - v - } - fn keys_val<'a, 'new>(v: Keys<'a, u8, &'static str>) -> Keys<'a, u8, &'new str> { - v - } - fn values_key<'a, 'new>(v: Values<'a, &'static str, u8>) -> Values<'a, &'new str, u8> { - v - } - fn values_val<'a, 'new>(v: Values<'a, u8, &'static str>) -> Values<'a, u8, &'new str> { - v - } - fn drain<'new>(d: Drain<'static, &'static str, &'static str>) - -> Drain<'new, &'new str, &'new str> { - d - } -} - -#[cfg(test)] -mod test_map { - extern crate rand; - use super::HashMap; - use super::Entry::{Occupied, Vacant}; - use super::RandomState; - use cell::RefCell; - use self::rand::{thread_rng, Rng}; - - #[test] - fn test_zero_capacities() { - type HM = HashMap; - - let m = HM::new(); - assert_eq!(m.capacity(), 0); - - let m = HM::default(); - assert_eq!(m.capacity(), 0); - - let m = HM::with_hasher(RandomState::new()); - assert_eq!(m.capacity(), 0); - - let m = HM::with_capacity(0); - assert_eq!(m.capacity(), 0); - - let m = HM::with_capacity_and_hasher(0, RandomState::new()); - assert_eq!(m.capacity(), 0); - - let mut m = HM::new(); - m.insert(1, 1); - m.insert(2, 2); - m.remove(&1); - m.remove(&2); - m.shrink_to_fit(); - assert_eq!(m.capacity(), 0); - - let mut m = HM::new(); - m.reserve(0); - assert_eq!(m.capacity(), 0); - } - - #[test] - fn test_create_capacity_zero() { - let mut m = HashMap::with_capacity(0); - - assert!(m.insert(1, 1).is_none()); - - assert!(m.contains_key(&1)); - assert!(!m.contains_key(&0)); - } - - #[test] - fn test_insert() { - let mut m = HashMap::new(); - assert_eq!(m.len(), 0); - assert!(m.insert(1, 2).is_none()); - assert_eq!(m.len(), 1); - assert!(m.insert(2, 4).is_none()); - assert_eq!(m.len(), 2); - assert_eq!(*m.get(&1).unwrap(), 2); - assert_eq!(*m.get(&2).unwrap(), 4); - } - - #[test] - fn test_clone() { - let mut m = HashMap::new(); - assert_eq!(m.len(), 0); - assert!(m.insert(1, 2).is_none()); - assert_eq!(m.len(), 1); - assert!(m.insert(2, 4).is_none()); - assert_eq!(m.len(), 2); - let m2 = m.clone(); - assert_eq!(*m2.get(&1).unwrap(), 2); - assert_eq!(*m2.get(&2).unwrap(), 4); - assert_eq!(m2.len(), 2); - } - - thread_local! { static DROP_VECTOR: RefCell> = RefCell::new(Vec::new()) } - - #[derive(Hash, PartialEq, Eq)] - struct Dropable { - k: usize, - } - - impl Dropable { - fn new(k: usize) -> Dropable { - DROP_VECTOR.with(|slot| { - slot.borrow_mut()[k] += 1; - }); - - Dropable { k: k } - } - } - - impl Drop for Dropable { - fn drop(&mut self) { - DROP_VECTOR.with(|slot| { - slot.borrow_mut()[self.k] -= 1; - }); - } - } - - impl Clone for Dropable { - fn clone(&self) -> Dropable { - Dropable::new(self.k) - } - } - - #[test] - fn test_drops() { - DROP_VECTOR.with(|slot| { - *slot.borrow_mut() = vec![0; 200]; - }); - - { - let mut m = HashMap::new(); - - DROP_VECTOR.with(|v| { - for i in 0..200 { - assert_eq!(v.borrow()[i], 0); - } - }); - - for i in 0..100 { - let d1 = Dropable::new(i); - let d2 = Dropable::new(i + 100); - m.insert(d1, d2); - } - - DROP_VECTOR.with(|v| { - for i in 0..200 { - assert_eq!(v.borrow()[i], 1); - } - }); - - for i in 0..50 { - let k = Dropable::new(i); - let v = m.remove(&k); - - assert!(v.is_some()); - - DROP_VECTOR.with(|v| { - assert_eq!(v.borrow()[i], 1); - assert_eq!(v.borrow()[i+100], 1); - }); - } - - DROP_VECTOR.with(|v| { - for i in 0..50 { - assert_eq!(v.borrow()[i], 0); - assert_eq!(v.borrow()[i+100], 0); - } - - for i in 50..100 { - assert_eq!(v.borrow()[i], 1); - assert_eq!(v.borrow()[i+100], 1); - } - }); - } - - DROP_VECTOR.with(|v| { - for i in 0..200 { - assert_eq!(v.borrow()[i], 0); - } - }); - } - - #[test] - fn test_into_iter_drops() { - DROP_VECTOR.with(|v| { - *v.borrow_mut() = vec![0; 200]; - }); - - let hm = { - let mut hm = HashMap::new(); - - DROP_VECTOR.with(|v| { - for i in 0..200 { - assert_eq!(v.borrow()[i], 0); - } - }); - - for i in 0..100 { - let d1 = Dropable::new(i); - let d2 = Dropable::new(i + 100); - hm.insert(d1, d2); - } - - DROP_VECTOR.with(|v| { - for i in 0..200 { - assert_eq!(v.borrow()[i], 1); - } - }); - - hm - }; - - // By the way, ensure that cloning doesn't screw up the dropping. - drop(hm.clone()); - - { - let mut half = hm.into_iter().take(50); - - DROP_VECTOR.with(|v| { - for i in 0..200 { - assert_eq!(v.borrow()[i], 1); - } - }); - - for _ in half.by_ref() {} - - DROP_VECTOR.with(|v| { - let nk = (0..100) - .filter(|&i| v.borrow()[i] == 1) - .count(); - - let nv = (0..100) - .filter(|&i| v.borrow()[i + 100] == 1) - .count(); - - assert_eq!(nk, 50); - assert_eq!(nv, 50); - }); - }; - - DROP_VECTOR.with(|v| { - for i in 0..200 { - assert_eq!(v.borrow()[i], 0); - } - }); - } - - #[test] - fn test_empty_remove() { - let mut m: HashMap = HashMap::new(); - assert_eq!(m.remove(&0), None); - } - - #[test] - fn test_empty_entry() { - let mut m: HashMap = HashMap::new(); - match m.entry(0) { - Occupied(_) => panic!(), - Vacant(_) => {} - } - assert!(*m.entry(0).or_insert(true)); - assert_eq!(m.len(), 1); - } - - #[test] - fn test_empty_iter() { - let mut m: HashMap = HashMap::new(); - assert_eq!(m.drain().next(), None); - assert_eq!(m.keys().next(), None); - assert_eq!(m.values().next(), None); - assert_eq!(m.values_mut().next(), None); - assert_eq!(m.iter().next(), None); - assert_eq!(m.iter_mut().next(), None); - assert_eq!(m.len(), 0); - assert!(m.is_empty()); - assert_eq!(m.into_iter().next(), None); - } - - #[test] - fn test_lots_of_insertions() { - let mut m = HashMap::new(); - - // Try this a few times to make sure we never screw up the hashmap's - // internal state. - for _ in 0..10 { - assert!(m.is_empty()); - - for i in 1..1001 { - assert!(m.insert(i, i).is_none()); - - for j in 1..i + 1 { - let r = m.get(&j); - assert_eq!(r, Some(&j)); - } - - for j in i + 1..1001 { - let r = m.get(&j); - assert_eq!(r, None); - } - } - - for i in 1001..2001 { - assert!(!m.contains_key(&i)); - } - - // remove forwards - for i in 1..1001 { - assert!(m.remove(&i).is_some()); - - for j in 1..i + 1 { - assert!(!m.contains_key(&j)); - } - - for j in i + 1..1001 { - assert!(m.contains_key(&j)); - } - } - - for i in 1..1001 { - assert!(!m.contains_key(&i)); - } - - for i in 1..1001 { - assert!(m.insert(i, i).is_none()); - } - - // remove backwards - for i in (1..1001).rev() { - assert!(m.remove(&i).is_some()); - - for j in i..1001 { - assert!(!m.contains_key(&j)); - } - - for j in 1..i { - assert!(m.contains_key(&j)); - } - } - } - } - - #[test] - fn test_find_mut() { - let mut m = HashMap::new(); - assert!(m.insert(1, 12).is_none()); - assert!(m.insert(2, 8).is_none()); - assert!(m.insert(5, 14).is_none()); - let new = 100; - match m.get_mut(&5) { - None => panic!(), - Some(x) => *x = new, - } - assert_eq!(m.get(&5), Some(&new)); - } - - #[test] - fn test_insert_overwrite() { - let mut m = HashMap::new(); - assert!(m.insert(1, 2).is_none()); - assert_eq!(*m.get(&1).unwrap(), 2); - assert!(!m.insert(1, 3).is_none()); - assert_eq!(*m.get(&1).unwrap(), 3); - } - - #[test] - fn test_insert_conflicts() { - let mut m = HashMap::with_capacity(4); - assert!(m.insert(1, 2).is_none()); - assert!(m.insert(5, 3).is_none()); - assert!(m.insert(9, 4).is_none()); - assert_eq!(*m.get(&9).unwrap(), 4); - assert_eq!(*m.get(&5).unwrap(), 3); - assert_eq!(*m.get(&1).unwrap(), 2); - } - - #[test] - fn test_conflict_remove() { - let mut m = HashMap::with_capacity(4); - assert!(m.insert(1, 2).is_none()); - assert_eq!(*m.get(&1).unwrap(), 2); - assert!(m.insert(5, 3).is_none()); - assert_eq!(*m.get(&1).unwrap(), 2); - assert_eq!(*m.get(&5).unwrap(), 3); - assert!(m.insert(9, 4).is_none()); - assert_eq!(*m.get(&1).unwrap(), 2); - assert_eq!(*m.get(&5).unwrap(), 3); - assert_eq!(*m.get(&9).unwrap(), 4); - assert!(m.remove(&1).is_some()); - assert_eq!(*m.get(&9).unwrap(), 4); - assert_eq!(*m.get(&5).unwrap(), 3); - } - - #[test] - fn test_is_empty() { - let mut m = HashMap::with_capacity(4); - assert!(m.insert(1, 2).is_none()); - assert!(!m.is_empty()); - assert!(m.remove(&1).is_some()); - assert!(m.is_empty()); - } - - #[test] - fn test_pop() { - let mut m = HashMap::new(); - m.insert(1, 2); - assert_eq!(m.remove(&1), Some(2)); - assert_eq!(m.remove(&1), None); - } - - #[test] - fn test_iterate() { - let mut m = HashMap::with_capacity(4); - for i in 0..32 { - assert!(m.insert(i, i*2).is_none()); - } - assert_eq!(m.len(), 32); - - let mut observed: u32 = 0; - - for (k, v) in &m { - assert_eq!(*v, *k * 2); - observed |= 1 << *k; - } - assert_eq!(observed, 0xFFFF_FFFF); - } - - #[test] - fn test_keys() { - let vec = vec![(1, 'a'), (2, 'b'), (3, 'c')]; - let map: HashMap<_, _> = vec.into_iter().collect(); - let keys: Vec<_> = map.keys().cloned().collect(); - assert_eq!(keys.len(), 3); - assert!(keys.contains(&1)); - assert!(keys.contains(&2)); - assert!(keys.contains(&3)); - } - - #[test] - fn test_values() { - let vec = vec![(1, 'a'), (2, 'b'), (3, 'c')]; - let map: HashMap<_, _> = vec.into_iter().collect(); - let values: Vec<_> = map.values().cloned().collect(); - assert_eq!(values.len(), 3); - assert!(values.contains(&'a')); - assert!(values.contains(&'b')); - assert!(values.contains(&'c')); - } - - #[test] - fn test_values_mut() { - let vec = vec![(1, 1), (2, 2), (3, 3)]; - let mut map: HashMap<_, _> = vec.into_iter().collect(); - for value in map.values_mut() { - *value = (*value) * 2 - } - let values: Vec<_> = map.values().cloned().collect(); - assert_eq!(values.len(), 3); - assert!(values.contains(&2)); - assert!(values.contains(&4)); - assert!(values.contains(&6)); - } - - #[test] - fn test_find() { - let mut m = HashMap::new(); - assert!(m.get(&1).is_none()); - m.insert(1, 2); - match m.get(&1) { - None => panic!(), - Some(v) => assert_eq!(*v, 2), - } - } - - #[test] - fn test_eq() { - let mut m1 = HashMap::new(); - m1.insert(1, 2); - m1.insert(2, 3); - m1.insert(3, 4); - - let mut m2 = HashMap::new(); - m2.insert(1, 2); - m2.insert(2, 3); - - assert!(m1 != m2); - - m2.insert(3, 4); - - assert_eq!(m1, m2); - } - - #[test] - fn test_show() { - let mut map = HashMap::new(); - let empty: HashMap = HashMap::new(); - - map.insert(1, 2); - map.insert(3, 4); - - let map_str = format!("{:?}", map); - - assert!(map_str == "{1: 2, 3: 4}" || - map_str == "{3: 4, 1: 2}"); - assert_eq!(format!("{:?}", empty), "{}"); - } - - #[test] - fn test_expand() { - let mut m = HashMap::new(); - - assert_eq!(m.len(), 0); - assert!(m.is_empty()); - - let mut i = 0; - let old_raw_cap = m.raw_capacity(); - while old_raw_cap == m.raw_capacity() { - m.insert(i, i); - i += 1; - } - - assert_eq!(m.len(), i); - assert!(!m.is_empty()); - } - - #[test] - fn test_behavior_resize_policy() { - let mut m = HashMap::new(); - - assert_eq!(m.len(), 0); - assert_eq!(m.raw_capacity(), 0); - assert!(m.is_empty()); - - m.insert(0, 0); - m.remove(&0); - assert!(m.is_empty()); - let initial_raw_cap = m.raw_capacity(); - m.reserve(initial_raw_cap); - let raw_cap = m.raw_capacity(); - - assert_eq!(raw_cap, initial_raw_cap * 2); - - let mut i = 0; - for _ in 0..raw_cap * 3 / 4 { - m.insert(i, i); - i += 1; - } - // three quarters full - - assert_eq!(m.len(), i); - assert_eq!(m.raw_capacity(), raw_cap); - - for _ in 0..raw_cap / 4 { - m.insert(i, i); - i += 1; - } - // half full - - let new_raw_cap = m.raw_capacity(); - assert_eq!(new_raw_cap, raw_cap * 2); - - for _ in 0..raw_cap / 2 - 1 { - i -= 1; - m.remove(&i); - assert_eq!(m.raw_capacity(), new_raw_cap); - } - // A little more than one quarter full. - m.shrink_to_fit(); - assert_eq!(m.raw_capacity(), raw_cap); - // again, a little more than half full - for _ in 0..raw_cap / 2 - 1 { - i -= 1; - m.remove(&i); - } - m.shrink_to_fit(); - - assert_eq!(m.len(), i); - assert!(!m.is_empty()); - assert_eq!(m.raw_capacity(), initial_raw_cap); - } - - #[test] - fn test_reserve_shrink_to_fit() { - let mut m = HashMap::new(); - m.insert(0, 0); - m.remove(&0); - assert!(m.capacity() >= m.len()); - for i in 0..128 { - m.insert(i, i); - } - m.reserve(256); - - let usable_cap = m.capacity(); - for i in 128..(128 + 256) { - m.insert(i, i); - assert_eq!(m.capacity(), usable_cap); - } - - for i in 100..(128 + 256) { - assert_eq!(m.remove(&i), Some(i)); - } - m.shrink_to_fit(); - - assert_eq!(m.len(), 100); - assert!(!m.is_empty()); - assert!(m.capacity() >= m.len()); - - for i in 0..100 { - assert_eq!(m.remove(&i), Some(i)); - } - m.shrink_to_fit(); - m.insert(0, 0); - - assert_eq!(m.len(), 1); - assert!(m.capacity() >= m.len()); - assert_eq!(m.remove(&0), Some(0)); - } - - #[test] - fn test_from_iter() { - let xs = [(1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6)]; - - let map: HashMap<_, _> = xs.iter().cloned().collect(); - - for &(k, v) in &xs { - assert_eq!(map.get(&k), Some(&v)); - } - } - - #[test] - fn test_size_hint() { - let xs = [(1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6)]; - - let map: HashMap<_, _> = xs.iter().cloned().collect(); - - let mut iter = map.iter(); - - for _ in iter.by_ref().take(3) {} - - assert_eq!(iter.size_hint(), (3, Some(3))); - } - - #[test] - fn test_iter_len() { - let xs = [(1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6)]; - - let map: HashMap<_, _> = xs.iter().cloned().collect(); - - let mut iter = map.iter(); - - for _ in iter.by_ref().take(3) {} - - assert_eq!(iter.len(), 3); - } - - #[test] - fn test_mut_size_hint() { - let xs = [(1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6)]; - - let mut map: HashMap<_, _> = xs.iter().cloned().collect(); - - let mut iter = map.iter_mut(); - - for _ in iter.by_ref().take(3) {} - - assert_eq!(iter.size_hint(), (3, Some(3))); - } - - #[test] - fn test_iter_mut_len() { - let xs = [(1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6)]; - - let mut map: HashMap<_, _> = xs.iter().cloned().collect(); - - let mut iter = map.iter_mut(); - - for _ in iter.by_ref().take(3) {} - - assert_eq!(iter.len(), 3); - } - - #[test] - fn test_index() { - let mut map = HashMap::new(); - - map.insert(1, 2); - map.insert(2, 1); - map.insert(3, 4); - - assert_eq!(map[&2], 1); - } - - #[test] - #[should_panic] - fn test_index_nonexistent() { - let mut map = HashMap::new(); - - map.insert(1, 2); - map.insert(2, 1); - map.insert(3, 4); - - map[&4]; - } - - #[test] - fn test_entry() { - let xs = [(1, 10), (2, 20), (3, 30), (4, 40), (5, 50), (6, 60)]; - - let mut map: HashMap<_, _> = xs.iter().cloned().collect(); - - // Existing key (insert) - match map.entry(1) { - Vacant(_) => unreachable!(), - Occupied(mut view) => { - assert_eq!(view.get(), &10); - assert_eq!(view.insert(100), 10); - } - } - assert_eq!(map.get(&1).unwrap(), &100); - assert_eq!(map.len(), 6); - - - // Existing key (update) - match map.entry(2) { - Vacant(_) => unreachable!(), - Occupied(mut view) => { - let v = view.get_mut(); - let new_v = (*v) * 10; - *v = new_v; - } - } - assert_eq!(map.get(&2).unwrap(), &200); - assert_eq!(map.len(), 6); - - // Existing key (take) - match map.entry(3) { - Vacant(_) => unreachable!(), - Occupied(view) => { - assert_eq!(view.remove(), 30); - } - } - assert_eq!(map.get(&3), None); - assert_eq!(map.len(), 5); - - - // Inexistent key (insert) - match map.entry(10) { - Occupied(_) => unreachable!(), - Vacant(view) => { - assert_eq!(*view.insert(1000), 1000); - } - } - assert_eq!(map.get(&10).unwrap(), &1000); - assert_eq!(map.len(), 6); - } - - #[test] - fn test_entry_take_doesnt_corrupt() { - #![allow(deprecated)] //rand - // Test for #19292 - fn check(m: &HashMap) { - for k in m.keys() { - assert!(m.contains_key(k), - "{} is in keys() but not in the map?", k); - } - } - - let mut m = HashMap::new(); - let mut rng = thread_rng(); - - // Populate the map with some items. - for _ in 0..50 { - let x = rng.gen_range(-10, 10); - m.insert(x, ()); - } - - for i in 0..1000 { - let x = rng.gen_range(-10, 10); - match m.entry(x) { - Vacant(_) => {} - Occupied(e) => { - println!("{}: remove {}", i, x); - e.remove(); - } - } - - check(&m); - } - } - - #[test] - fn test_extend_ref() { - let mut a = HashMap::new(); - a.insert(1, "one"); - let mut b = HashMap::new(); - b.insert(2, "two"); - b.insert(3, "three"); - - a.extend(&b); - - assert_eq!(a.len(), 3); - assert_eq!(a[&1], "one"); - assert_eq!(a[&2], "two"); - assert_eq!(a[&3], "three"); - } - - #[test] - fn test_capacity_not_less_than_len() { - let mut a = HashMap::new(); - let mut item = 0; - - for _ in 0..116 { - a.insert(item, 0); - item += 1; - } - - assert!(a.capacity() > a.len()); - - let free = a.capacity() - a.len(); - for _ in 0..free { - a.insert(item, 0); - item += 1; - } - - assert_eq!(a.len(), a.capacity()); - - // Insert at capacity should cause allocation. - a.insert(item, 0); - assert!(a.capacity() > a.len()); - } - - #[test] - fn test_occupied_entry_key() { - let mut a = HashMap::new(); - let key = "hello there"; - let value = "value goes here"; - assert!(a.is_empty()); - a.insert(key.clone(), value.clone()); - assert_eq!(a.len(), 1); - assert_eq!(a[key], value); - - match a.entry(key.clone()) { - Vacant(_) => panic!(), - Occupied(e) => assert_eq!(key, *e.key()), - } - assert_eq!(a.len(), 1); - assert_eq!(a[key], value); - } - - #[test] - fn test_vacant_entry_key() { - let mut a = HashMap::new(); - let key = "hello there"; - let value = "value goes here"; - - assert!(a.is_empty()); - match a.entry(key.clone()) { - Occupied(_) => panic!(), - Vacant(e) => { - assert_eq!(key, *e.key()); - e.insert(value.clone()); - } - } - assert_eq!(a.len(), 1); - assert_eq!(a[key], value); - } - - #[test] - fn test_retain() { - let mut map: HashMap = (0..100).map(|x|(x, x*10)).collect(); - - map.retain(|&k, _| k % 2 == 0); - assert_eq!(map.len(), 50); - assert_eq!(map[&2], 20); - assert_eq!(map[&4], 40); - assert_eq!(map[&6], 60); - } - - #[test] - fn test_adaptive() { - const TEST_LEN: usize = 5000; - // by cloning we get maps with the same hasher seed - let mut first = HashMap::new(); - let mut second = first.clone(); - first.extend((0..TEST_LEN).map(|i| (i, i))); - second.extend((TEST_LEN..TEST_LEN * 2).map(|i| (i, i))); - - for (&k, &v) in &second { - let prev_cap = first.capacity(); - let expect_grow = first.len() == prev_cap; - first.insert(k, v); - if !expect_grow && first.capacity() != prev_cap { - return; - } - } - panic!("Adaptive early resize failed"); - } -} diff --git a/collector/compile-benchmarks/style-servo/components/hashglobe/src/hash_set.rs b/collector/compile-benchmarks/style-servo/components/hashglobe/src/hash_set.rs deleted file mode 100644 index e21880453..000000000 --- a/collector/compile-benchmarks/style-servo/components/hashglobe/src/hash_set.rs +++ /dev/null @@ -1,1591 +0,0 @@ -// Copyright 2014 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -use std::borrow::Borrow; -use std::fmt; -use std::hash::{Hash, BuildHasher}; -use std::iter::{Chain, FromIterator}; -use std::ops::{BitOr, BitAnd, BitXor, Sub}; - -use super::Recover; -use super::hash_map::{self, HashMap, Keys, RandomState}; - -// Future Optimization (FIXME!) -// ============================= -// -// Iteration over zero sized values is a noop. There is no need -// for `bucket.val` in the case of HashSet. I suppose we would need HKT -// to get rid of it properly. - -/// A hash set implemented as a `HashMap` where the value is `()`. -/// -/// As with the [`HashMap`] type, a `HashSet` requires that the elements -/// implement the [`Eq`] and [`Hash`] traits. This can frequently be achieved by -/// using `#[derive(PartialEq, Eq, Hash)]`. If you implement these yourself, -/// it is important that the following property holds: -/// -/// ```text -/// k1 == k2 -> hash(k1) == hash(k2) -/// ``` -/// -/// In other words, if two keys are equal, their hashes must be equal. -/// -/// -/// It is a logic error for an item to be modified in such a way that the -/// item's hash, as determined by the [`Hash`] trait, or its equality, as -/// determined by the [`Eq`] trait, changes while it is in the set. This is -/// normally only possible through [`Cell`], [`RefCell`], global state, I/O, or -/// unsafe code. -/// -/// # Examples -/// -/// ``` -/// use std::collections::HashSet; -/// // Type inference lets us omit an explicit type signature (which -/// // would be `HashSet<&str>` in this example). -/// let mut books = HashSet::new(); -/// -/// // Add some books. -/// books.insert("A Dance With Dragons"); -/// books.insert("To Kill a Mockingbird"); -/// books.insert("The Odyssey"); -/// books.insert("The Great Gatsby"); -/// -/// // Check for a specific one. -/// if !books.contains("The Winds of Winter") { -/// println!("We have {} books, but The Winds of Winter ain't one.", -/// books.len()); -/// } -/// -/// // Remove a book. -/// books.remove("The Odyssey"); -/// -/// // Iterate over everything. -/// for book in &books { -/// println!("{}", book); -/// } -/// ``` -/// -/// The easiest way to use `HashSet` with a custom type is to derive -/// [`Eq`] and [`Hash`]. We must also derive [`PartialEq`], this will in the -/// future be implied by [`Eq`]. -/// -/// ``` -/// use std::collections::HashSet; -/// #[derive(Hash, Eq, PartialEq, Debug)] -/// struct Viking<'a> { -/// name: &'a str, -/// power: usize, -/// } -/// -/// let mut vikings = HashSet::new(); -/// -/// vikings.insert(Viking { name: "Einar", power: 9 }); -/// vikings.insert(Viking { name: "Einar", power: 9 }); -/// vikings.insert(Viking { name: "Olaf", power: 4 }); -/// vikings.insert(Viking { name: "Harald", power: 8 }); -/// -/// // Use derived implementation to print the vikings. -/// for x in &vikings { -/// println!("{:?}", x); -/// } -/// ``` -/// -/// A `HashSet` with fixed list of elements can be initialized from an array: -/// -/// ``` -/// use std::collections::HashSet; -/// -/// fn main() { -/// let viking_names: HashSet<&str> = -/// [ "Einar", "Olaf", "Harald" ].iter().cloned().collect(); -/// // use the values stored in the set -/// } -/// ``` -/// -/// [`Cell`]: ../../std/cell/struct.Cell.html -/// [`Eq`]: ../../std/cmp/trait.Eq.html -/// [`Hash`]: ../../std/hash/trait.Hash.html -/// [`HashMap`]: struct.HashMap.html -/// [`PartialEq`]: ../../std/cmp/trait.PartialEq.html -/// [`RefCell`]: ../../std/cell/struct.RefCell.html -#[derive(Clone)] -pub struct HashSet { - map: HashMap, -} - -impl HashSet - where T: Eq + Hash, - S: BuildHasher -{ - /// Creates a new empty hash set which will use the given hasher to hash - /// keys. - /// - /// The hash set is also created with the default initial capacity. - /// - /// Warning: `hasher` is normally randomly generated, and - /// is designed to allow `HashSet`s to be resistant to attacks that - /// cause many collisions and very poor performance. Setting it - /// manually using this function can expose a DoS attack vector. - /// - /// # Examples - /// - /// ``` - /// use std::collections::HashSet; - /// use std::collections::hash_map::RandomState; - /// - /// let s = RandomState::new(); - /// let mut set = HashSet::with_hasher(s); - /// set.insert(2); - /// ``` - #[inline] - pub fn with_hasher(hasher: S) -> HashSet { - HashSet { map: HashMap::with_hasher(hasher) } - } - - /// Creates an empty `HashSet` with with the specified capacity, using - /// `hasher` to hash the keys. - /// - /// The hash set will be able to hold at least `capacity` elements without - /// reallocating. If `capacity` is 0, the hash set will not allocate. - /// - /// Warning: `hasher` is normally randomly generated, and - /// is designed to allow `HashSet`s to be resistant to attacks that - /// cause many collisions and very poor performance. Setting it - /// manually using this function can expose a DoS attack vector. - /// - /// # Examples - /// - /// ``` - /// use std::collections::HashSet; - /// use std::collections::hash_map::RandomState; - /// - /// let s = RandomState::new(); - /// let mut set = HashSet::with_capacity_and_hasher(10, s); - /// set.insert(1); - /// ``` - #[inline] - pub fn with_capacity_and_hasher(capacity: usize, hasher: S) -> HashSet { - HashSet { map: HashMap::with_capacity_and_hasher(capacity, hasher) } - } - - /// Returns a reference to the set's [`BuildHasher`]. - /// - /// [`BuildHasher`]: ../../std/hash/trait.BuildHasher.html - /// - /// # Examples - /// - /// ``` - /// use std::collections::HashSet; - /// use std::collections::hash_map::RandomState; - /// - /// let hasher = RandomState::new(); - /// let set: HashSet = HashSet::with_hasher(hasher); - /// let hasher: &RandomState = set.hasher(); - /// ``` - pub fn hasher(&self) -> &S { - self.map.hasher() - } - - /// Returns the number of elements the set can hold without reallocating. - /// - /// # Examples - /// - /// ``` - /// use std::collections::HashSet; - /// let set: HashSet = HashSet::with_capacity(100); - /// assert!(set.capacity() >= 100); - /// ``` - #[inline] - pub fn capacity(&self) -> usize { - self.map.capacity() - } - - /// Reserves capacity for at least `additional` more elements to be inserted - /// in the `HashSet`. The collection may reserve more space to avoid - /// frequent reallocations. - /// - /// # Panics - /// - /// Panics if the new allocation size overflows `usize`. - /// - /// # Examples - /// - /// ``` - /// use std::collections::HashSet; - /// let mut set: HashSet = HashSet::new(); - /// set.reserve(10); - /// assert!(set.capacity() >= 10); - /// ``` - pub fn reserve(&mut self, additional: usize) { - self.map.reserve(additional) - } - - /// Shrinks the capacity of the set as much as possible. It will drop - /// down as much as possible while maintaining the internal rules - /// and possibly leaving some space in accordance with the resize policy. - /// - /// # Examples - /// - /// ``` - /// use std::collections::HashSet; - /// - /// let mut set = HashSet::with_capacity(100); - /// set.insert(1); - /// set.insert(2); - /// assert!(set.capacity() >= 100); - /// set.shrink_to_fit(); - /// assert!(set.capacity() >= 2); - /// ``` - pub fn shrink_to_fit(&mut self) { - self.map.shrink_to_fit() - } - - /// An iterator visiting all elements in arbitrary order. - /// The iterator element type is `&'a T`. - /// - /// # Examples - /// - /// ``` - /// use std::collections::HashSet; - /// let mut set = HashSet::new(); - /// set.insert("a"); - /// set.insert("b"); - /// - /// // Will print in an arbitrary order. - /// for x in set.iter() { - /// println!("{}", x); - /// } - /// ``` - pub fn iter(&self) -> Iter { - Iter { iter: self.map.keys() } - } - - /// Visits the values representing the difference, - /// i.e. the values that are in `self` but not in `other`. - /// - /// # Examples - /// - /// ``` - /// use std::collections::HashSet; - /// let a: HashSet<_> = [1, 2, 3].iter().cloned().collect(); - /// let b: HashSet<_> = [4, 2, 3, 4].iter().cloned().collect(); - /// - /// // Can be seen as `a - b`. - /// for x in a.difference(&b) { - /// println!("{}", x); // Print 1 - /// } - /// - /// let diff: HashSet<_> = a.difference(&b).collect(); - /// assert_eq!(diff, [1].iter().collect()); - /// - /// // Note that difference is not symmetric, - /// // and `b - a` means something else: - /// let diff: HashSet<_> = b.difference(&a).collect(); - /// assert_eq!(diff, [4].iter().collect()); - /// ``` - pub fn difference<'a>(&'a self, other: &'a HashSet) -> Difference<'a, T, S> { - Difference { - iter: self.iter(), - other, - } - } - - /// Visits the values representing the symmetric difference, - /// i.e. the values that are in `self` or in `other` but not in both. - /// - /// # Examples - /// - /// ``` - /// use std::collections::HashSet; - /// let a: HashSet<_> = [1, 2, 3].iter().cloned().collect(); - /// let b: HashSet<_> = [4, 2, 3, 4].iter().cloned().collect(); - /// - /// // Print 1, 4 in arbitrary order. - /// for x in a.symmetric_difference(&b) { - /// println!("{}", x); - /// } - /// - /// let diff1: HashSet<_> = a.symmetric_difference(&b).collect(); - /// let diff2: HashSet<_> = b.symmetric_difference(&a).collect(); - /// - /// assert_eq!(diff1, diff2); - /// assert_eq!(diff1, [1, 4].iter().collect()); - /// ``` - pub fn symmetric_difference<'a>(&'a self, - other: &'a HashSet) - -> SymmetricDifference<'a, T, S> { - SymmetricDifference { iter: self.difference(other).chain(other.difference(self)) } - } - - /// Visits the values representing the intersection, - /// i.e. the values that are both in `self` and `other`. - /// - /// # Examples - /// - /// ``` - /// use std::collections::HashSet; - /// let a: HashSet<_> = [1, 2, 3].iter().cloned().collect(); - /// let b: HashSet<_> = [4, 2, 3, 4].iter().cloned().collect(); - /// - /// // Print 2, 3 in arbitrary order. - /// for x in a.intersection(&b) { - /// println!("{}", x); - /// } - /// - /// let intersection: HashSet<_> = a.intersection(&b).collect(); - /// assert_eq!(intersection, [2, 3].iter().collect()); - /// ``` - pub fn intersection<'a>(&'a self, other: &'a HashSet) -> Intersection<'a, T, S> { - Intersection { - iter: self.iter(), - other, - } - } - - /// Visits the values representing the union, - /// i.e. all the values in `self` or `other`, without duplicates. - /// - /// # Examples - /// - /// ``` - /// use std::collections::HashSet; - /// let a: HashSet<_> = [1, 2, 3].iter().cloned().collect(); - /// let b: HashSet<_> = [4, 2, 3, 4].iter().cloned().collect(); - /// - /// // Print 1, 2, 3, 4 in arbitrary order. - /// for x in a.union(&b) { - /// println!("{}", x); - /// } - /// - /// let union: HashSet<_> = a.union(&b).collect(); - /// assert_eq!(union, [1, 2, 3, 4].iter().collect()); - /// ``` - pub fn union<'a>(&'a self, other: &'a HashSet) -> Union<'a, T, S> { - Union { iter: self.iter().chain(other.difference(self)) } - } - - /// Returns the number of elements in the set. - /// - /// # Examples - /// - /// ``` - /// use std::collections::HashSet; - /// - /// let mut v = HashSet::new(); - /// assert_eq!(v.len(), 0); - /// v.insert(1); - /// assert_eq!(v.len(), 1); - /// ``` - pub fn len(&self) -> usize { - self.map.len() - } - - /// Returns true if the set contains no elements. - /// - /// # Examples - /// - /// ``` - /// use std::collections::HashSet; - /// - /// let mut v = HashSet::new(); - /// assert!(v.is_empty()); - /// v.insert(1); - /// assert!(!v.is_empty()); - /// ``` - pub fn is_empty(&self) -> bool { - self.map.is_empty() - } - - /// Clears the set, returning all elements in an iterator. - /// - /// # Examples - /// - /// ``` - /// use std::collections::HashSet; - /// - /// let mut set: HashSet<_> = [1, 2, 3].iter().cloned().collect(); - /// assert!(!set.is_empty()); - /// - /// // print 1, 2, 3 in an arbitrary order - /// for i in set.drain() { - /// println!("{}", i); - /// } - /// - /// assert!(set.is_empty()); - /// ``` - #[inline] - pub fn drain(&mut self) -> Drain { - Drain { iter: self.map.drain() } - } - - /// Clears the set, removing all values. - /// - /// # Examples - /// - /// ``` - /// use std::collections::HashSet; - /// - /// let mut v = HashSet::new(); - /// v.insert(1); - /// v.clear(); - /// assert!(v.is_empty()); - /// ``` - pub fn clear(&mut self) where T: 'static { - self.map.clear() - } - - /// Returns `true` if the set contains a value. - /// - /// The value may be any borrowed form of the set's value type, but - /// [`Hash`] and [`Eq`] on the borrowed form *must* match those for - /// the value type. - /// - /// # Examples - /// - /// ``` - /// use std::collections::HashSet; - /// - /// let set: HashSet<_> = [1, 2, 3].iter().cloned().collect(); - /// assert_eq!(set.contains(&1), true); - /// assert_eq!(set.contains(&4), false); - /// ``` - /// - /// [`Eq`]: ../../std/cmp/trait.Eq.html - /// [`Hash`]: ../../std/hash/trait.Hash.html - pub fn contains(&self, value: &Q) -> bool - where T: Borrow, - Q: Hash + Eq - { - self.map.contains_key(value) - } - - /// Returns a reference to the value in the set, if any, that is equal to the given value. - /// - /// The value may be any borrowed form of the set's value type, but - /// [`Hash`] and [`Eq`] on the borrowed form *must* match those for - /// the value type. - /// - /// [`Eq`]: ../../std/cmp/trait.Eq.html - /// [`Hash`]: ../../std/hash/trait.Hash.html - pub fn get(&self, value: &Q) -> Option<&T> - where T: Borrow, - Q: Hash + Eq - { - Recover::get(&self.map, value) - } - - /// Returns `true` if `self` has no elements in common with `other`. - /// This is equivalent to checking for an empty intersection. - /// - /// # Examples - /// - /// ``` - /// use std::collections::HashSet; - /// - /// let a: HashSet<_> = [1, 2, 3].iter().cloned().collect(); - /// let mut b = HashSet::new(); - /// - /// assert_eq!(a.is_disjoint(&b), true); - /// b.insert(4); - /// assert_eq!(a.is_disjoint(&b), true); - /// b.insert(1); - /// assert_eq!(a.is_disjoint(&b), false); - /// ``` - pub fn is_disjoint(&self, other: &HashSet) -> bool { - self.iter().all(|v| !other.contains(v)) - } - - /// Returns `true` if the set is a subset of another, - /// i.e. `other` contains at least all the values in `self`. - /// - /// # Examples - /// - /// ``` - /// use std::collections::HashSet; - /// - /// let sup: HashSet<_> = [1, 2, 3].iter().cloned().collect(); - /// let mut set = HashSet::new(); - /// - /// assert_eq!(set.is_subset(&sup), true); - /// set.insert(2); - /// assert_eq!(set.is_subset(&sup), true); - /// set.insert(4); - /// assert_eq!(set.is_subset(&sup), false); - /// ``` - pub fn is_subset(&self, other: &HashSet) -> bool { - self.iter().all(|v| other.contains(v)) - } - - /// Returns `true` if the set is a superset of another, - /// i.e. `self` contains at least all the values in `other`. - /// - /// # Examples - /// - /// ``` - /// use std::collections::HashSet; - /// - /// let sub: HashSet<_> = [1, 2].iter().cloned().collect(); - /// let mut set = HashSet::new(); - /// - /// assert_eq!(set.is_superset(&sub), false); - /// - /// set.insert(0); - /// set.insert(1); - /// assert_eq!(set.is_superset(&sub), false); - /// - /// set.insert(2); - /// assert_eq!(set.is_superset(&sub), true); - /// ``` - #[inline] - pub fn is_superset(&self, other: &HashSet) -> bool { - other.is_subset(self) - } - - /// Adds a value to the set. - /// - /// If the set did not have this value present, `true` is returned. - /// - /// If the set did have this value present, `false` is returned. - /// - /// # Examples - /// - /// ``` - /// use std::collections::HashSet; - /// - /// let mut set = HashSet::new(); - /// - /// assert_eq!(set.insert(2), true); - /// assert_eq!(set.insert(2), false); - /// assert_eq!(set.len(), 1); - /// ``` - pub fn insert(&mut self, value: T) -> bool { - self.map.insert(value, ()).is_none() - } - - /// Adds a value to the set, replacing the existing value, if any, that is equal to the given - /// one. Returns the replaced value. - pub fn replace(&mut self, value: T) -> Option { - Recover::replace(&mut self.map, value) - } - - /// Removes a value from the set. Returns `true` if the value was - /// present in the set. - /// - /// The value may be any borrowed form of the set's value type, but - /// [`Hash`] and [`Eq`] on the borrowed form *must* match those for - /// the value type. - /// - /// # Examples - /// - /// ``` - /// use std::collections::HashSet; - /// - /// let mut set = HashSet::new(); - /// - /// set.insert(2); - /// assert_eq!(set.remove(&2), true); - /// assert_eq!(set.remove(&2), false); - /// ``` - /// - /// [`Eq`]: ../../std/cmp/trait.Eq.html - /// [`Hash`]: ../../std/hash/trait.Hash.html - pub fn remove(&mut self, value: &Q) -> bool - where T: Borrow, - Q: Hash + Eq - { - self.map.remove(value).is_some() - } - - /// Removes and returns the value in the set, if any, that is equal to the given one. - /// - /// The value may be any borrowed form of the set's value type, but - /// [`Hash`] and [`Eq`] on the borrowed form *must* match those for - /// the value type. - /// - /// [`Eq`]: ../../std/cmp/trait.Eq.html - /// [`Hash`]: ../../std/hash/trait.Hash.html - pub fn take(&mut self, value: &Q) -> Option - where T: Borrow, - Q: Hash + Eq - { - Recover::take(&mut self.map, value) - } - - /// Retains only the elements specified by the predicate. - /// - /// In other words, remove all elements `e` such that `f(&e)` returns `false`. - /// - /// # Examples - /// - /// ``` - /// use std::collections::HashSet; - /// - /// let xs = [1,2,3,4,5,6]; - /// let mut set: HashSet = xs.iter().cloned().collect(); - /// set.retain(|&k| k % 2 == 0); - /// assert_eq!(set.len(), 3); - /// ``` - pub fn retain(&mut self, mut f: F) - where F: FnMut(&T) -> bool - { - self.map.retain(|k, _| f(k)); - } -} - -impl PartialEq for HashSet - where T: Eq + Hash, - S: BuildHasher -{ - fn eq(&self, other: &HashSet) -> bool { - if self.len() != other.len() { - return false; - } - - self.iter().all(|key| other.contains(key)) - } -} - -impl Eq for HashSet - where T: Eq + Hash, - S: BuildHasher -{ -} - -impl fmt::Debug for HashSet - where T: Eq + Hash + fmt::Debug, - S: BuildHasher -{ - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - f.debug_set().entries(self.iter()).finish() - } -} - -impl FromIterator for HashSet - where T: Eq + Hash, - S: BuildHasher + Default -{ - fn from_iter>(iter: I) -> HashSet { - let mut set = HashSet::with_hasher(Default::default()); - set.extend(iter); - set - } -} - -impl Extend for HashSet - where T: Eq + Hash, - S: BuildHasher -{ - fn extend>(&mut self, iter: I) { - self.map.extend(iter.into_iter().map(|k| (k, ()))); - } -} - -impl<'a, T, S> Extend<&'a T> for HashSet - where T: 'a + Eq + Hash + Copy, - S: BuildHasher -{ - fn extend>(&mut self, iter: I) { - self.extend(iter.into_iter().cloned()); - } -} - -impl Default for HashSet - where T: Eq + Hash, - S: BuildHasher + Default -{ - /// Creates an empty `HashSet` with the `Default` value for the hasher. - fn default() -> HashSet { - HashSet { map: HashMap::default() } - } -} - -impl<'a, 'b, T, S> BitOr<&'b HashSet> for &'a HashSet - where T: Eq + Hash + Clone, - S: BuildHasher + Default -{ - type Output = HashSet; - - /// Returns the union of `self` and `rhs` as a new `HashSet`. - /// - /// # Examples - /// - /// ``` - /// use std::collections::HashSet; - /// - /// let a: HashSet<_> = vec![1, 2, 3].into_iter().collect(); - /// let b: HashSet<_> = vec![3, 4, 5].into_iter().collect(); - /// - /// let set = &a | &b; - /// - /// let mut i = 0; - /// let expected = [1, 2, 3, 4, 5]; - /// for x in &set { - /// assert!(expected.contains(x)); - /// i += 1; - /// } - /// assert_eq!(i, expected.len()); - /// ``` - fn bitor(self, rhs: &HashSet) -> HashSet { - self.union(rhs).cloned().collect() - } -} - -impl<'a, 'b, T, S> BitAnd<&'b HashSet> for &'a HashSet - where T: Eq + Hash + Clone, - S: BuildHasher + Default -{ - type Output = HashSet; - - /// Returns the intersection of `self` and `rhs` as a new `HashSet`. - /// - /// # Examples - /// - /// ``` - /// use std::collections::HashSet; - /// - /// let a: HashSet<_> = vec![1, 2, 3].into_iter().collect(); - /// let b: HashSet<_> = vec![2, 3, 4].into_iter().collect(); - /// - /// let set = &a & &b; - /// - /// let mut i = 0; - /// let expected = [2, 3]; - /// for x in &set { - /// assert!(expected.contains(x)); - /// i += 1; - /// } - /// assert_eq!(i, expected.len()); - /// ``` - fn bitand(self, rhs: &HashSet) -> HashSet { - self.intersection(rhs).cloned().collect() - } -} - -impl<'a, 'b, T, S> BitXor<&'b HashSet> for &'a HashSet - where T: Eq + Hash + Clone, - S: BuildHasher + Default -{ - type Output = HashSet; - - /// Returns the symmetric difference of `self` and `rhs` as a new `HashSet`. - /// - /// # Examples - /// - /// ``` - /// use std::collections::HashSet; - /// - /// let a: HashSet<_> = vec![1, 2, 3].into_iter().collect(); - /// let b: HashSet<_> = vec![3, 4, 5].into_iter().collect(); - /// - /// let set = &a ^ &b; - /// - /// let mut i = 0; - /// let expected = [1, 2, 4, 5]; - /// for x in &set { - /// assert!(expected.contains(x)); - /// i += 1; - /// } - /// assert_eq!(i, expected.len()); - /// ``` - fn bitxor(self, rhs: &HashSet) -> HashSet { - self.symmetric_difference(rhs).cloned().collect() - } -} - -impl<'a, 'b, T, S> Sub<&'b HashSet> for &'a HashSet - where T: Eq + Hash + Clone, - S: BuildHasher + Default -{ - type Output = HashSet; - - /// Returns the difference of `self` and `rhs` as a new `HashSet`. - /// - /// # Examples - /// - /// ``` - /// use std::collections::HashSet; - /// - /// let a: HashSet<_> = vec![1, 2, 3].into_iter().collect(); - /// let b: HashSet<_> = vec![3, 4, 5].into_iter().collect(); - /// - /// let set = &a - &b; - /// - /// let mut i = 0; - /// let expected = [1, 2]; - /// for x in &set { - /// assert!(expected.contains(x)); - /// i += 1; - /// } - /// assert_eq!(i, expected.len()); - /// ``` - fn sub(self, rhs: &HashSet) -> HashSet { - self.difference(rhs).cloned().collect() - } -} - -/// An iterator over the items of a `HashSet`. -/// -/// This `struct` is created by the [`iter`] method on [`HashSet`]. -/// See its documentation for more. -/// -/// [`HashSet`]: struct.HashSet.html -/// [`iter`]: struct.HashSet.html#method.iter -pub struct Iter<'a, K: 'a> { - iter: Keys<'a, K, ()>, -} - -/// An owning iterator over the items of a `HashSet`. -/// -/// This `struct` is created by the [`into_iter`] method on [`HashSet`][`HashSet`] -/// (provided by the `IntoIterator` trait). See its documentation for more. -/// -/// [`HashSet`]: struct.HashSet.html -/// [`into_iter`]: struct.HashSet.html#method.into_iter -pub struct IntoIter { - iter: hash_map::IntoIter, -} - -/// A draining iterator over the items of a `HashSet`. -/// -/// This `struct` is created by the [`drain`] method on [`HashSet`]. -/// See its documentation for more. -/// -/// [`HashSet`]: struct.HashSet.html -/// [`drain`]: struct.HashSet.html#method.drain -pub struct Drain<'a, K: 'static> { - iter: hash_map::Drain<'a, K, ()>, -} - -/// A lazy iterator producing elements in the intersection of `HashSet`s. -/// -/// This `struct` is created by the [`intersection`] method on [`HashSet`]. -/// See its documentation for more. -/// -/// [`HashSet`]: struct.HashSet.html -/// [`intersection`]: struct.HashSet.html#method.intersection -pub struct Intersection<'a, T: 'a, S: 'a> { - // iterator of the first set - iter: Iter<'a, T>, - // the second set - other: &'a HashSet, -} - -/// A lazy iterator producing elements in the difference of `HashSet`s. -/// -/// This `struct` is created by the [`difference`] method on [`HashSet`]. -/// See its documentation for more. -/// -/// [`HashSet`]: struct.HashSet.html -/// [`difference`]: struct.HashSet.html#method.difference -pub struct Difference<'a, T: 'a, S: 'a> { - // iterator of the first set - iter: Iter<'a, T>, - // the second set - other: &'a HashSet, -} - -/// A lazy iterator producing elements in the symmetric difference of `HashSet`s. -/// -/// This `struct` is created by the [`symmetric_difference`] method on -/// [`HashSet`]. See its documentation for more. -/// -/// [`HashSet`]: struct.HashSet.html -/// [`symmetric_difference`]: struct.HashSet.html#method.symmetric_difference -pub struct SymmetricDifference<'a, T: 'a, S: 'a> { - iter: Chain, Difference<'a, T, S>>, -} - -/// A lazy iterator producing elements in the union of `HashSet`s. -/// -/// This `struct` is created by the [`union`] method on [`HashSet`]. -/// See its documentation for more. -/// -/// [`HashSet`]: struct.HashSet.html -/// [`union`]: struct.HashSet.html#method.union -pub struct Union<'a, T: 'a, S: 'a> { - iter: Chain, Difference<'a, T, S>>, -} - -impl<'a, T, S> IntoIterator for &'a HashSet - where T: Eq + Hash, - S: BuildHasher -{ - type Item = &'a T; - type IntoIter = Iter<'a, T>; - - fn into_iter(self) -> Iter<'a, T> { - self.iter() - } -} - -impl IntoIterator for HashSet - where T: Eq + Hash, - S: BuildHasher -{ - type Item = T; - type IntoIter = IntoIter; - - /// Creates a consuming iterator, that is, one that moves each value out - /// of the set in arbitrary order. The set cannot be used after calling - /// this. - /// - /// # Examples - /// - /// ``` - /// use std::collections::HashSet; - /// let mut set = HashSet::new(); - /// set.insert("a".to_string()); - /// set.insert("b".to_string()); - /// - /// // Not possible to collect to a Vec with a regular `.iter()`. - /// let v: Vec = set.into_iter().collect(); - /// - /// // Will print in an arbitrary order. - /// for x in &v { - /// println!("{}", x); - /// } - /// ``` - fn into_iter(self) -> IntoIter { - IntoIter { iter: self.map.into_iter() } - } -} - -impl<'a, K> Clone for Iter<'a, K> { - fn clone(&self) -> Iter<'a, K> { - Iter { iter: self.iter.clone() } - } -} -impl<'a, K> Iterator for Iter<'a, K> { - type Item = &'a K; - - fn next(&mut self) -> Option<&'a K> { - self.iter.next() - } - fn size_hint(&self) -> (usize, Option) { - self.iter.size_hint() - } -} -impl<'a, K> ExactSizeIterator for Iter<'a, K> { - fn len(&self) -> usize { - self.iter.len() - } -} - -impl<'a, K: fmt::Debug> fmt::Debug for Iter<'a, K> { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - f.debug_list().entries(self.clone()).finish() - } -} - -impl Iterator for IntoIter { - type Item = K; - - fn next(&mut self) -> Option { - self.iter.next().map(|(k, _)| k) - } - fn size_hint(&self) -> (usize, Option) { - self.iter.size_hint() - } -} -impl ExactSizeIterator for IntoIter { - fn len(&self) -> usize { - self.iter.len() - } -} - -impl fmt::Debug for IntoIter { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - let entries_iter = self.iter - .inner - .iter() - .map(|(k, _)| k); - f.debug_list().entries(entries_iter).finish() - } -} - -impl<'a, K> Iterator for Drain<'a, K> { - type Item = K; - - fn next(&mut self) -> Option { - self.iter.next().map(|(k, _)| k) - } - fn size_hint(&self) -> (usize, Option) { - self.iter.size_hint() - } -} -impl<'a, K> ExactSizeIterator for Drain<'a, K> { - fn len(&self) -> usize { - self.iter.len() - } -} - -impl<'a, K: fmt::Debug> fmt::Debug for Drain<'a, K> { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - let entries_iter = self.iter - .inner - .iter() - .map(|(k, _)| k); - f.debug_list().entries(entries_iter).finish() - } -} - -impl<'a, T, S> Clone for Intersection<'a, T, S> { - fn clone(&self) -> Intersection<'a, T, S> { - Intersection { iter: self.iter.clone(), ..*self } - } -} - -impl<'a, T, S> Iterator for Intersection<'a, T, S> - where T: Eq + Hash, - S: BuildHasher -{ - type Item = &'a T; - - fn next(&mut self) -> Option<&'a T> { - loop { - match self.iter.next() { - None => return None, - Some(elt) => { - if self.other.contains(elt) { - return Some(elt); - } - } - } - } - } - - fn size_hint(&self) -> (usize, Option) { - let (_, upper) = self.iter.size_hint(); - (0, upper) - } -} - -impl<'a, T, S> fmt::Debug for Intersection<'a, T, S> - where T: fmt::Debug + Eq + Hash, - S: BuildHasher -{ - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - f.debug_list().entries(self.clone()).finish() - } -} - -impl<'a, T, S> Clone for Difference<'a, T, S> { - fn clone(&self) -> Difference<'a, T, S> { - Difference { iter: self.iter.clone(), ..*self } - } -} - -impl<'a, T, S> Iterator for Difference<'a, T, S> - where T: Eq + Hash, - S: BuildHasher -{ - type Item = &'a T; - - fn next(&mut self) -> Option<&'a T> { - loop { - match self.iter.next() { - None => return None, - Some(elt) => { - if !self.other.contains(elt) { - return Some(elt); - } - } - } - } - } - - fn size_hint(&self) -> (usize, Option) { - let (_, upper) = self.iter.size_hint(); - (0, upper) - } -} - -impl<'a, T, S> fmt::Debug for Difference<'a, T, S> - where T: fmt::Debug + Eq + Hash, - S: BuildHasher -{ - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - f.debug_list().entries(self.clone()).finish() - } -} - -impl<'a, T, S> Clone for SymmetricDifference<'a, T, S> { - fn clone(&self) -> SymmetricDifference<'a, T, S> { - SymmetricDifference { iter: self.iter.clone() } - } -} - -impl<'a, T, S> Iterator for SymmetricDifference<'a, T, S> - where T: Eq + Hash, - S: BuildHasher -{ - type Item = &'a T; - - fn next(&mut self) -> Option<&'a T> { - self.iter.next() - } - fn size_hint(&self) -> (usize, Option) { - self.iter.size_hint() - } -} - -impl<'a, T, S> fmt::Debug for SymmetricDifference<'a, T, S> - where T: fmt::Debug + Eq + Hash, - S: BuildHasher -{ - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - f.debug_list().entries(self.clone()).finish() - } -} - -impl<'a, T, S> Clone for Union<'a, T, S> { - fn clone(&self) -> Union<'a, T, S> { - Union { iter: self.iter.clone() } - } -} - -impl<'a, T, S> fmt::Debug for Union<'a, T, S> - where T: fmt::Debug + Eq + Hash, - S: BuildHasher -{ - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - f.debug_list().entries(self.clone()).finish() - } -} - -impl<'a, T, S> Iterator for Union<'a, T, S> - where T: Eq + Hash, - S: BuildHasher -{ - type Item = &'a T; - - fn next(&mut self) -> Option<&'a T> { - self.iter.next() - } - fn size_hint(&self) -> (usize, Option) { - self.iter.size_hint() - } -} - -#[allow(dead_code)] -fn assert_covariance() { - fn set<'new>(v: HashSet<&'static str>) -> HashSet<&'new str> { - v - } - fn iter<'a, 'new>(v: Iter<'a, &'static str>) -> Iter<'a, &'new str> { - v - } - fn into_iter<'new>(v: IntoIter<&'static str>) -> IntoIter<&'new str> { - v - } - fn difference<'a, 'new>(v: Difference<'a, &'static str, RandomState>) - -> Difference<'a, &'new str, RandomState> { - v - } - fn symmetric_difference<'a, 'new>(v: SymmetricDifference<'a, &'static str, RandomState>) - -> SymmetricDifference<'a, &'new str, RandomState> { - v - } - fn intersection<'a, 'new>(v: Intersection<'a, &'static str, RandomState>) - -> Intersection<'a, &'new str, RandomState> { - v - } - fn union<'a, 'new>(v: Union<'a, &'static str, RandomState>) - -> Union<'a, &'new str, RandomState> { - v - } - fn drain<'new>(d: Drain<'static, &'static str>) -> Drain<'new, &'new str> { - d - } -} - -#[cfg(test)] -mod test_set { - use super::HashSet; - use super::hash_map::RandomState; - - #[test] - fn test_zero_capacities() { - type HS = HashSet; - - let s = HS::new(); - assert_eq!(s.capacity(), 0); - - let s = HS::default(); - assert_eq!(s.capacity(), 0); - - let s = HS::with_hasher(RandomState::new()); - assert_eq!(s.capacity(), 0); - - let s = HS::with_capacity(0); - assert_eq!(s.capacity(), 0); - - let s = HS::with_capacity_and_hasher(0, RandomState::new()); - assert_eq!(s.capacity(), 0); - - let mut s = HS::new(); - s.insert(1); - s.insert(2); - s.remove(&1); - s.remove(&2); - s.shrink_to_fit(); - assert_eq!(s.capacity(), 0); - - let mut s = HS::new(); - s.reserve(0); - assert_eq!(s.capacity(), 0); - } - - #[test] - fn test_disjoint() { - let mut xs = HashSet::new(); - let mut ys = HashSet::new(); - assert!(xs.is_disjoint(&ys)); - assert!(ys.is_disjoint(&xs)); - assert!(xs.insert(5)); - assert!(ys.insert(11)); - assert!(xs.is_disjoint(&ys)); - assert!(ys.is_disjoint(&xs)); - assert!(xs.insert(7)); - assert!(xs.insert(19)); - assert!(xs.insert(4)); - assert!(ys.insert(2)); - assert!(ys.insert(-11)); - assert!(xs.is_disjoint(&ys)); - assert!(ys.is_disjoint(&xs)); - assert!(ys.insert(7)); - assert!(!xs.is_disjoint(&ys)); - assert!(!ys.is_disjoint(&xs)); - } - - #[test] - fn test_subset_and_superset() { - let mut a = HashSet::new(); - assert!(a.insert(0)); - assert!(a.insert(5)); - assert!(a.insert(11)); - assert!(a.insert(7)); - - let mut b = HashSet::new(); - assert!(b.insert(0)); - assert!(b.insert(7)); - assert!(b.insert(19)); - assert!(b.insert(250)); - assert!(b.insert(11)); - assert!(b.insert(200)); - - assert!(!a.is_subset(&b)); - assert!(!a.is_superset(&b)); - assert!(!b.is_subset(&a)); - assert!(!b.is_superset(&a)); - - assert!(b.insert(5)); - - assert!(a.is_subset(&b)); - assert!(!a.is_superset(&b)); - assert!(!b.is_subset(&a)); - assert!(b.is_superset(&a)); - } - - #[test] - fn test_iterate() { - let mut a = HashSet::new(); - for i in 0..32 { - assert!(a.insert(i)); - } - let mut observed: u32 = 0; - for k in &a { - observed |= 1 << *k; - } - assert_eq!(observed, 0xFFFF_FFFF); - } - - #[test] - fn test_intersection() { - let mut a = HashSet::new(); - let mut b = HashSet::new(); - - assert!(a.insert(11)); - assert!(a.insert(1)); - assert!(a.insert(3)); - assert!(a.insert(77)); - assert!(a.insert(103)); - assert!(a.insert(5)); - assert!(a.insert(-5)); - - assert!(b.insert(2)); - assert!(b.insert(11)); - assert!(b.insert(77)); - assert!(b.insert(-9)); - assert!(b.insert(-42)); - assert!(b.insert(5)); - assert!(b.insert(3)); - - let mut i = 0; - let expected = [3, 5, 11, 77]; - for x in a.intersection(&b) { - assert!(expected.contains(x)); - i += 1 - } - assert_eq!(i, expected.len()); - } - - #[test] - fn test_difference() { - let mut a = HashSet::new(); - let mut b = HashSet::new(); - - assert!(a.insert(1)); - assert!(a.insert(3)); - assert!(a.insert(5)); - assert!(a.insert(9)); - assert!(a.insert(11)); - - assert!(b.insert(3)); - assert!(b.insert(9)); - - let mut i = 0; - let expected = [1, 5, 11]; - for x in a.difference(&b) { - assert!(expected.contains(x)); - i += 1 - } - assert_eq!(i, expected.len()); - } - - #[test] - fn test_symmetric_difference() { - let mut a = HashSet::new(); - let mut b = HashSet::new(); - - assert!(a.insert(1)); - assert!(a.insert(3)); - assert!(a.insert(5)); - assert!(a.insert(9)); - assert!(a.insert(11)); - - assert!(b.insert(-2)); - assert!(b.insert(3)); - assert!(b.insert(9)); - assert!(b.insert(14)); - assert!(b.insert(22)); - - let mut i = 0; - let expected = [-2, 1, 5, 11, 14, 22]; - for x in a.symmetric_difference(&b) { - assert!(expected.contains(x)); - i += 1 - } - assert_eq!(i, expected.len()); - } - - #[test] - fn test_union() { - let mut a = HashSet::new(); - let mut b = HashSet::new(); - - assert!(a.insert(1)); - assert!(a.insert(3)); - assert!(a.insert(5)); - assert!(a.insert(9)); - assert!(a.insert(11)); - assert!(a.insert(16)); - assert!(a.insert(19)); - assert!(a.insert(24)); - - assert!(b.insert(-2)); - assert!(b.insert(1)); - assert!(b.insert(5)); - assert!(b.insert(9)); - assert!(b.insert(13)); - assert!(b.insert(19)); - - let mut i = 0; - let expected = [-2, 1, 3, 5, 9, 11, 13, 16, 19, 24]; - for x in a.union(&b) { - assert!(expected.contains(x)); - i += 1 - } - assert_eq!(i, expected.len()); - } - - #[test] - fn test_from_iter() { - let xs = [1, 2, 3, 4, 5, 6, 7, 8, 9]; - - let set: HashSet<_> = xs.iter().cloned().collect(); - - for x in &xs { - assert!(set.contains(x)); - } - } - - #[test] - fn test_move_iter() { - let hs = { - let mut hs = HashSet::new(); - - hs.insert('a'); - hs.insert('b'); - - hs - }; - - let v = hs.into_iter().collect::>(); - assert!(v == ['a', 'b'] || v == ['b', 'a']); - } - - #[test] - fn test_eq() { - // These constants once happened to expose a bug in insert(). - // I'm keeping them around to prevent a regression. - let mut s1 = HashSet::new(); - - s1.insert(1); - s1.insert(2); - s1.insert(3); - - let mut s2 = HashSet::new(); - - s2.insert(1); - s2.insert(2); - - assert!(s1 != s2); - - s2.insert(3); - - assert_eq!(s1, s2); - } - - #[test] - fn test_show() { - let mut set = HashSet::new(); - let empty = HashSet::::new(); - - set.insert(1); - set.insert(2); - - let set_str = format!("{:?}", set); - - assert!(set_str == "{1, 2}" || set_str == "{2, 1}"); - assert_eq!(format!("{:?}", empty), "{}"); - } - - #[test] - fn test_trivial_drain() { - let mut s = HashSet::::new(); - for _ in s.drain() {} - assert!(s.is_empty()); - drop(s); - - let mut s = HashSet::::new(); - drop(s.drain()); - assert!(s.is_empty()); - } - - #[test] - fn test_drain() { - let mut s: HashSet<_> = (1..100).collect(); - - // try this a bunch of times to make sure we don't screw up internal state. - for _ in 0..20 { - assert_eq!(s.len(), 99); - - { - let mut last_i = 0; - let mut d = s.drain(); - for (i, x) in d.by_ref().take(50).enumerate() { - last_i = i; - assert!(x != 0); - } - assert_eq!(last_i, 49); - } - - for _ in &s { - panic!("s should be empty!"); - } - - // reset to try again. - s.extend(1..100); - } - } - - #[test] - fn test_replace() { - use hash; - - #[derive(Debug)] - struct Foo(&'static str, i32); - - impl PartialEq for Foo { - fn eq(&self, other: &Self) -> bool { - self.0 == other.0 - } - } - - impl Eq for Foo {} - - impl hash::Hash for Foo { - fn hash(&self, h: &mut H) { - self.0.hash(h); - } - } - - let mut s = HashSet::new(); - assert_eq!(s.replace(Foo("a", 1)), None); - assert_eq!(s.len(), 1); - assert_eq!(s.replace(Foo("a", 2)), Some(Foo("a", 1))); - assert_eq!(s.len(), 1); - - let mut it = s.iter(); - assert_eq!(it.next(), Some(&Foo("a", 2))); - assert_eq!(it.next(), None); - } - - #[test] - fn test_extend_ref() { - let mut a = HashSet::new(); - a.insert(1); - - a.extend(&[2, 3, 4]); - - assert_eq!(a.len(), 4); - assert!(a.contains(&1)); - assert!(a.contains(&2)); - assert!(a.contains(&3)); - assert!(a.contains(&4)); - - let mut b = HashSet::new(); - b.insert(5); - b.insert(6); - - a.extend(&b); - - assert_eq!(a.len(), 6); - assert!(a.contains(&1)); - assert!(a.contains(&2)); - assert!(a.contains(&3)); - assert!(a.contains(&4)); - assert!(a.contains(&5)); - assert!(a.contains(&6)); - } - - #[test] - fn test_retain() { - let xs = [1, 2, 3, 4, 5, 6]; - let mut set: HashSet = xs.iter().cloned().collect(); - set.retain(|&k| k % 2 == 0); - assert_eq!(set.len(), 3); - assert!(set.contains(&2)); - assert!(set.contains(&4)); - assert!(set.contains(&6)); - } -} diff --git a/collector/compile-benchmarks/style-servo/components/hashglobe/src/lib.rs b/collector/compile-benchmarks/style-servo/components/hashglobe/src/lib.rs deleted file mode 100644 index 480f3a540..000000000 --- a/collector/compile-benchmarks/style-servo/components/hashglobe/src/lib.rs +++ /dev/null @@ -1,54 +0,0 @@ -// Copyright 2014-2015 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -extern crate heapsize; - -pub mod alloc; -pub mod diagnostic; -pub mod hash_map; -pub mod hash_set; -mod shim; -mod table; - -pub mod fake; - -use std::{error, fmt}; - -trait Recover { - type Key; - - fn get(&self, key: &Q) -> Option<&Self::Key>; - fn take(&mut self, key: &Q) -> Option; - fn replace(&mut self, key: Self::Key) -> Option; -} - -#[derive(Debug)] -pub struct FailedAllocationError { - reason: &'static str, -} - -impl FailedAllocationError { - #[inline] - pub fn new(reason: &'static str) -> Self { - Self { reason } - } -} - -impl error::Error for FailedAllocationError { - fn description(&self) -> &str { - self.reason - } -} - -impl fmt::Display for FailedAllocationError { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - self.reason.fmt(f) - } -} diff --git a/collector/compile-benchmarks/style-servo/components/hashglobe/src/shim.rs b/collector/compile-benchmarks/style-servo/components/hashglobe/src/shim.rs deleted file mode 100644 index 08fbf32b7..000000000 --- a/collector/compile-benchmarks/style-servo/components/hashglobe/src/shim.rs +++ /dev/null @@ -1,60 +0,0 @@ -use std::marker::PhantomData; - -pub struct NonZeroPtr(&'static T); - -impl NonZeroPtr { - pub unsafe fn new_unchecked(ptr: *mut T) -> Self { - NonZeroPtr(&*ptr) - } - pub fn as_ptr(&self) -> *mut T { - self.0 as *const T as *mut T - } -} - -pub struct Unique { - ptr: NonZeroPtr, - _marker: PhantomData, -} - -impl Unique { - pub unsafe fn new_unchecked(ptr: *mut T) -> Self { - Unique { - ptr: NonZeroPtr::new_unchecked(ptr), - _marker: PhantomData, - } - } - pub fn as_ptr(&self) -> *mut T { - self.ptr.as_ptr() - } -} - -unsafe impl Send for Unique { } - -unsafe impl Sync for Unique { } - -pub struct Shared { - ptr: NonZeroPtr, - _marker: PhantomData, - // force it to be !Send/!Sync - _marker2: PhantomData<*const u8>, -} - -impl Shared { - pub unsafe fn new_unchecked(ptr: *mut T) -> Self { - Shared { - ptr: NonZeroPtr::new_unchecked(ptr), - _marker: PhantomData, - _marker2: PhantomData, - } - } - - pub unsafe fn as_mut(&self) -> &mut T { - &mut *self.ptr.as_ptr() - } -} - -impl<'a, T> From<&'a mut T> for Shared { - fn from(reference: &'a mut T) -> Self { - unsafe { Shared::new_unchecked(reference) } - } -} \ No newline at end of file diff --git a/collector/compile-benchmarks/style-servo/components/hashglobe/src/table.rs b/collector/compile-benchmarks/style-servo/components/hashglobe/src/table.rs deleted file mode 100644 index adaf52ee5..000000000 --- a/collector/compile-benchmarks/style-servo/components/hashglobe/src/table.rs +++ /dev/null @@ -1,1282 +0,0 @@ -// Copyright 2014-2015 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -use alloc::{alloc, dealloc}; -use std::cmp; -use std::hash::{BuildHasher, Hash, Hasher}; -use std::marker; -use std::mem::{self, align_of, size_of}; -use std::ops::{Deref, DerefMut}; -use std::ptr; -use shim::{Unique, Shared}; - -use self::BucketState::*; -use FailedAllocationError; - -/// Integer type used for stored hash values. -/// -/// No more than bit_width(usize) bits are needed to select a bucket. -/// -/// The most significant bit is ours to use for tagging `SafeHash`. -/// -/// (Even if we could have usize::MAX bytes allocated for buckets, -/// each bucket stores at least a `HashUint`, so there can be no more than -/// usize::MAX / size_of(usize) buckets.) -type HashUint = usize; - -const EMPTY_BUCKET: HashUint = 0; -const EMPTY: usize = 1; - -/// Special `Unique` that uses the lower bit of the pointer -/// to expose a boolean tag. -/// Note: when the pointer is initialized to EMPTY `.ptr()` will return -/// null and the tag functions shouldn't be used. -struct TaggedHashUintPtr(Unique); - -impl TaggedHashUintPtr { - #[inline] - unsafe fn new(ptr: *mut HashUint) -> Self { - assert!(ptr as usize & 1 == 0 || ptr as usize == EMPTY as usize); - TaggedHashUintPtr(Unique::new_unchecked(ptr)) - } - - #[inline] - fn set_tag(&mut self, value: bool) { - let mut usize_ptr = self.0.as_ptr() as usize; - unsafe { - if value { - usize_ptr |= 1; - } else { - usize_ptr &= !1; - } - self.0 = Unique::new_unchecked(usize_ptr as *mut HashUint) - } - } - - #[inline] - fn tag(&self) -> bool { - (self.0.as_ptr() as usize) & 1 == 1 - } - - #[inline] - fn ptr(&self) -> *mut HashUint { - (self.0.as_ptr() as usize & !1) as *mut HashUint - } -} - -/// The raw hashtable, providing safe-ish access to the unzipped and highly -/// optimized arrays of hashes, and key-value pairs. -/// -/// This design is a lot faster than the naive -/// `Vec>`, because we don't pay for the overhead of an -/// option on every element, and we get a generally more cache-aware design. -/// -/// Essential invariants of this structure: -/// -/// - if t.hashes[i] == EMPTY_BUCKET, then `Bucket::at_index(&t, i).raw` -/// points to 'undefined' contents. Don't read from it. This invariant is -/// enforced outside this module with the `EmptyBucket`, `FullBucket`, -/// and `SafeHash` types. -/// -/// - An `EmptyBucket` is only constructed at an index with -/// a hash of EMPTY_BUCKET. -/// -/// - A `FullBucket` is only constructed at an index with a -/// non-EMPTY_BUCKET hash. -/// -/// - A `SafeHash` is only constructed for non-`EMPTY_BUCKET` hash. We get -/// around hashes of zero by changing them to 0x8000_0000_0000_0000, -/// which will likely map to the same bucket, while not being confused -/// with "empty". -/// -/// - Both "arrays represented by pointers" are the same length: -/// `capacity`. This is set at creation and never changes. The arrays -/// are unzipped and are more cache aware (scanning through 8 hashes -/// brings in at most 2 cache lines, since they're all right beside each -/// other). This layout may waste space in padding such as in a map from -/// u64 to u8, but is a more cache conscious layout as the key-value pairs -/// are only very shortly probed and the desired value will be in the same -/// or next cache line. -/// -/// You can kind of think of this module/data structure as a safe wrapper -/// around just the "table" part of the hashtable. It enforces some -/// invariants at the type level and employs some performance trickery, -/// but in general is just a tricked out `Vec>`. -/// -/// The hashtable also exposes a special boolean tag. The tag defaults to false -/// when the RawTable is created and is accessible with the `tag` and `set_tag` -/// functions. -pub struct RawTable { - capacity_mask: usize, - size: usize, - hashes: TaggedHashUintPtr, - bytes_allocated: usize, - - // Because K/V do not appear directly in any of the types in the struct, - // inform rustc that in fact instances of K and V are reachable from here. - marker: marker::PhantomData<(K, V)>, -} - -unsafe impl Send for RawTable {} -unsafe impl Sync for RawTable {} - -// An unsafe view of a RawTable bucket -// Valid indexes are within [0..table_capacity) -pub struct RawBucket { - hash_start: *mut HashUint, - // We use *const to ensure covariance with respect to K and V - pair_start: *const (K, V), - idx: usize, - _marker: marker::PhantomData<(K, V)>, -} - -impl Copy for RawBucket {} -impl Clone for RawBucket { - fn clone(&self) -> RawBucket { - *self - } -} - -pub struct Bucket { - raw: RawBucket, - table: M, -} - -impl Copy for Bucket {} -impl Clone for Bucket { - fn clone(&self) -> Bucket { - *self - } -} - -pub struct EmptyBucket { - raw: RawBucket, - table: M, -} - -pub struct FullBucket { - raw: RawBucket, - table: M, -} - -pub type FullBucketMut<'table, K, V> = FullBucket>; - -pub enum BucketState { - Empty(EmptyBucket), - Full(FullBucket), -} - -// A GapThenFull encapsulates the state of two consecutive buckets at once. -// The first bucket, called the gap, is known to be empty. -// The second bucket is full. -pub struct GapThenFull { - gap: EmptyBucket, - full: FullBucket, -} - -/// A hash that is not zero, since we use a hash of zero to represent empty -/// buckets. -#[derive(PartialEq, Copy, Clone, Debug)] -pub struct SafeHash { - hash: HashUint, -} - -impl SafeHash { - /// Peek at the hash value, which is guaranteed to be non-zero. - #[inline(always)] - pub fn inspect(&self) -> HashUint { - self.hash - } - - #[inline(always)] - pub fn new(hash: u64) -> Self { - // We need to avoid 0 in order to prevent collisions with - // EMPTY_HASH. We can maintain our precious uniform distribution - // of initial indexes by unconditionally setting the MSB, - // effectively reducing the hashes by one bit. - // - // Truncate hash to fit in `HashUint`. - let hash_bits = size_of::() * 8; - SafeHash { hash: (1 << (hash_bits - 1)) | (hash as HashUint) } - } -} - -/// We need to remove hashes of 0. That's reserved for empty buckets. -/// This function wraps up `hash_keyed` to be the only way outside this -/// module to generate a SafeHash. -pub fn make_hash(hash_state: &S, t: &T) -> SafeHash - where T: Hash, - S: BuildHasher -{ - let mut state = hash_state.build_hasher(); - t.hash(&mut state); - SafeHash::new(state.finish()) -} - -// `replace` casts a `*HashUint` to a `*SafeHash`. Since we statically -// ensure that a `FullBucket` points to an index with a non-zero hash, -// and a `SafeHash` is just a `HashUint` with a different name, this is -// safe. -// -// This test ensures that a `SafeHash` really IS the same size as a -// `HashUint`. If you need to change the size of `SafeHash` (and -// consequently made this test fail), `replace` needs to be -// modified to no longer assume this. -#[test] -fn can_alias_safehash_as_hash() { - assert_eq!(size_of::(), size_of::()) -} - -// RawBucket methods are unsafe as it's possible to -// make a RawBucket point to invalid memory using safe code. -impl RawBucket { - unsafe fn hash(&self) -> *mut HashUint { - self.hash_start.offset(self.idx as isize) - } - unsafe fn pair(&self) -> *mut (K, V) { - self.pair_start.offset(self.idx as isize) as *mut (K, V) - } - unsafe fn hash_pair(&self) -> (*mut HashUint, *mut (K, V)) { - (self.hash(), self.pair()) - } - - fn assert_bounds(&self, bytes_allocated: usize, size: Option) { - let base = self.hash_start as *mut u8; - let (h, p) = unsafe { self.hash_pair() }; - assert!((h as *mut u8) < (p as *mut u8), "HashMap Corruption - hash offset not below pair offset"); - let end = unsafe { p.offset(1) } as *mut u8; - assert!(end > base, "HashMap Corruption - end={:?}, base={:?}, idx={}, alloc={}, size={:?}", end, base, self.idx, bytes_allocated, size); - assert!( - end <= unsafe { base.offset(bytes_allocated as isize) }, - "HashMap Corruption - end={:?}, base={:?}, idx={}, alloc={}, size={:?}", - end, - base, - self.idx, - bytes_allocated, - size, - ); - } -} - -// Buckets hold references to the table. -impl FullBucket { - /// Borrow a reference to the table. - pub fn table(&self) -> &M { - &self.table - } - /// Borrow a mutable reference to the table. - pub fn table_mut(&mut self) -> &mut M { - &mut self.table - } - /// Move out the reference to the table. - pub fn into_table(self) -> M { - self.table - } - /// Get the raw index. - pub fn index(&self) -> usize { - self.raw.idx - } - /// Get the raw bucket. - pub fn raw(&self) -> RawBucket { - self.raw - } -} - -impl EmptyBucket { - /// Borrow a reference to the table. - pub fn table(&self) -> &M { - &self.table - } - /// Borrow a mutable reference to the table. - pub fn table_mut(&mut self) -> &mut M { - &mut self.table - } -} - -impl Bucket { - /// Get the raw index. - pub fn index(&self) -> usize { - self.raw.idx - } - /// get the table. - pub fn into_table(self) -> M { - self.table - } -} - -impl Deref for FullBucket - where M: Deref> -{ - type Target = RawTable; - fn deref(&self) -> &RawTable { - &self.table - } -} - -/// `Put` is implemented for types which provide access to a table and cannot be invalidated -/// by filling a bucket. A similar implementation for `Take` is possible. -pub trait Put { - unsafe fn borrow_table_mut(&mut self) -> &mut RawTable; -} - - -impl<'t, K, V> Put for &'t mut RawTable { - unsafe fn borrow_table_mut(&mut self) -> &mut RawTable { - *self - } -} - -impl Put for Bucket - where M: Put -{ - unsafe fn borrow_table_mut(&mut self) -> &mut RawTable { - self.table.borrow_table_mut() - } -} - -impl Put for FullBucket - where M: Put -{ - unsafe fn borrow_table_mut(&mut self) -> &mut RawTable { - self.table.borrow_table_mut() - } -} - -impl>> Bucket { - pub fn new(table: M, hash: SafeHash) -> Bucket { - Bucket::at_index(table, hash.inspect() as usize) - } - - pub fn new_from(r: RawBucket, t: M) - -> Bucket - { - Bucket { - raw: r, - table: t, - } - } - - pub fn at_index(table: M, ib_index: usize) -> Bucket { - // if capacity is 0, then the RawBucket will be populated with bogus pointers. - // This is an uncommon case though, so avoid it in release builds. - assert!(table.capacity() > 0, "HashMap Corruption - Table should have capacity at this point"); - let ib_index = ib_index & table.capacity_mask; - Bucket { - raw: table.raw_bucket_at(ib_index), - table, - } - } - - pub fn first(table: M) -> Bucket { - Bucket { - raw: table.raw_bucket_at(0), - table, - } - } - - // "So a few of the first shall be last: for many be called, - // but few chosen." - // - // We'll most likely encounter a few buckets at the beginning that - // have their initial buckets near the end of the table. They were - // placed at the beginning as the probe wrapped around the table - // during insertion. We must skip forward to a bucket that won't - // get reinserted too early and won't unfairly steal others spot. - // This eliminates the need for robin hood. - pub fn head_bucket(table: M) -> Bucket { - let mut bucket = Bucket::first(table); - - loop { - bucket = match bucket.peek() { - Full(full) => { - if full.displacement() == 0 { - // This bucket occupies its ideal spot. - // It indicates the start of another "cluster". - bucket = full.into_bucket(); - break; - } - // Leaving this bucket in the last cluster for later. - full.into_bucket() - } - Empty(b) => { - // Encountered a hole between clusters. - b.into_bucket() - } - }; - bucket.next(); - } - bucket - } - - /// Reads a bucket at a given index, returning an enum indicating whether - /// it's initialized or not. You need to match on this enum to get - /// the appropriate types to call most of the other functions in - /// this module. - pub fn peek(self) -> BucketState { - match unsafe { *self.raw.hash() } { - EMPTY_BUCKET => { - Empty(EmptyBucket { - raw: self.raw, - table: self.table, - }) - } - _ => { - Full(FullBucket { - raw: self.raw, - table: self.table, - }) - } - } - } - - /// Modifies the bucket in place to make it point to the next slot. - pub fn next(&mut self) { - self.raw.idx = self.raw.idx.wrapping_add(1) & self.table.capacity_mask; - self.raw.assert_bounds(self.table.bytes_allocated, None); - } - - /// Modifies the bucket in place to make it point to the previous slot. - pub fn prev(&mut self) { - self.raw.idx = self.raw.idx.wrapping_sub(1) & self.table.capacity_mask; - self.raw.assert_bounds(self.table.bytes_allocated, None); - } -} - -impl>> EmptyBucket { - #[inline] - pub fn next(self) -> Bucket { - let mut bucket = self.into_bucket(); - bucket.next(); - bucket - } - - #[inline] - pub fn into_bucket(self) -> Bucket { - Bucket { - raw: self.raw, - table: self.table, - } - } - - pub fn gap_peek(self) -> Result, Bucket> { - let gap = EmptyBucket { - raw: self.raw, - table: (), - }; - - match self.next().peek() { - Full(bucket) => { - Ok(GapThenFull { - gap, - full: bucket, - }) - } - Empty(e) => Err(e.into_bucket()), - } - } -} - -impl EmptyBucket - where M: Put -{ - /// Puts given key and value pair, along with the key's hash, - /// into this bucket in the hashtable. Note how `self` is 'moved' into - /// this function, because this slot will no longer be empty when - /// we return! A `FullBucket` is returned for later use, pointing to - /// the newly-filled slot in the hashtable. - /// - /// Use `make_hash` to construct a `SafeHash` to pass to this function. - pub fn put(mut self, hash: SafeHash, key: K, value: V) -> FullBucket { - unsafe { - *self.raw.hash() = hash.inspect(); - ptr::write(self.raw.pair(), (key, value)); - - self.table.borrow_table_mut().size += 1; - } - - FullBucket { - raw: self.raw, - table: self.table, - } - } -} - -impl>> FullBucket { - #[inline] - pub fn next(self) -> Bucket { - let mut bucket = self.into_bucket(); - bucket.next(); - bucket - } - - #[inline] - pub fn into_bucket(self) -> Bucket { - Bucket { - raw: self.raw, - table: self.table, - } - } - - /// Duplicates the current position. This can be useful for operations - /// on two or more buckets. - pub fn stash(self) -> FullBucket { - FullBucket { - raw: self.raw, - table: self, - } - } - - /// Get the distance between this bucket and the 'ideal' location - /// as determined by the key's hash stored in it. - /// - /// In the cited blog posts above, this is called the "distance to - /// initial bucket", or DIB. Also known as "probe count". - pub fn displacement(&self) -> usize { - // Calculates the distance one has to travel when going from - // `hash mod capacity` onwards to `idx mod capacity`, wrapping around - // if the destination is not reached before the end of the table. - (self.raw.idx.wrapping_sub(self.hash().inspect() as usize)) & self.table.capacity_mask - } - - #[inline] - pub fn hash(&self) -> SafeHash { - unsafe { SafeHash { hash: *self.raw.hash() } } - } - - /// Gets references to the key and value at a given index. - pub fn read(&self) -> (&K, &V) { - unsafe { - let pair_ptr = self.raw.pair(); - (&(*pair_ptr).0, &(*pair_ptr).1) - } - } -} - -// We take a mutable reference to the table instead of accepting anything that -// implements `DerefMut` to prevent fn `take` from being called on `stash`ed -// buckets. -impl<'t, K, V> FullBucket> { - /// Removes this bucket's key and value from the hashtable. - /// - /// This works similarly to `put`, building an `EmptyBucket` out of the - /// taken bucket. - pub fn take(self) -> (EmptyBucket>, K, V) { - self.table.size = self.table.size.checked_sub(1).unwrap(); - - unsafe { - *self.raw.hash() = EMPTY_BUCKET; - let (k, v) = ptr::read(self.raw.pair()); - (EmptyBucket { - raw: self.raw, - table: self.table, - }, - k, - v) - } - } -} - -// This use of `Put` is misleading and restrictive, but safe and sufficient for our use cases -// where `M` is a full bucket or table reference type with mutable access to the table. -impl FullBucket - where M: Put -{ - pub fn replace(&mut self, h: SafeHash, k: K, v: V) -> (SafeHash, K, V) { - unsafe { - let old_hash = ptr::replace(self.raw.hash() as *mut SafeHash, h); - let (old_key, old_val) = ptr::replace(self.raw.pair(), (k, v)); - - (old_hash, old_key, old_val) - } - } -} - -impl FullBucket - where M: Deref> + DerefMut -{ - /// Gets mutable references to the key and value at a given index. - pub fn read_mut(&mut self) -> (&mut K, &mut V) { - unsafe { - let pair_ptr = self.raw.pair(); - (&mut (*pair_ptr).0, &mut (*pair_ptr).1) - } - } -} - -impl<'t, K, V, M> FullBucket - where M: Deref> + 't -{ - /// Exchange a bucket state for immutable references into the table. - /// Because the underlying reference to the table is also consumed, - /// no further changes to the structure of the table are possible; - /// in exchange for this, the returned references have a longer lifetime - /// than the references returned by `read()`. - pub fn into_refs(self) -> (&'t K, &'t V) { - unsafe { - let pair_ptr = self.raw.pair(); - (&(*pair_ptr).0, &(*pair_ptr).1) - } - } -} - -impl<'t, K, V, M> FullBucket - where M: Deref> + DerefMut + 't -{ - /// This works similarly to `into_refs`, exchanging a bucket state - /// for mutable references into the table. - pub fn into_mut_refs(self) -> (&'t mut K, &'t mut V) { - unsafe { - let pair_ptr = self.raw.pair(); - (&mut (*pair_ptr).0, &mut (*pair_ptr).1) - } - } -} - -impl GapThenFull - where M: Deref> -{ - #[inline] - pub fn full(&self) -> &FullBucket { - &self.full - } - - pub fn into_table(self) -> M { - self.full.into_table() - } - - pub fn shift(mut self) -> Result, Bucket> { - unsafe { - let (gap_hash, gap_pair) = self.gap.raw.hash_pair(); - let (full_hash, full_pair) = self.full.raw.hash_pair(); - *gap_hash = mem::replace(&mut *full_hash, EMPTY_BUCKET); - ptr::copy_nonoverlapping(full_pair, gap_pair, 1); - } - - let FullBucket { raw: prev_raw, .. } = self.full; - - match self.full.next().peek() { - Full(bucket) => { - self.gap.raw = prev_raw; - - self.full = bucket; - - Ok(self) - } - Empty(b) => Err(b.into_bucket()), - } - } -} - - -/// Rounds up to a multiple of a power of two. Returns the closest multiple -/// of `target_alignment` that is higher or equal to `unrounded`. -/// -/// # Panics -/// -/// Panics if `target_alignment` is not a power of two. -#[inline] -fn round_up_to_next(unrounded: usize, target_alignment: usize) -> usize { - assert!(target_alignment.is_power_of_two(), "HashMap Corruption - alignment not power of two"); - (unrounded + target_alignment - 1) & !(target_alignment - 1) -} - -#[test] -fn test_rounding() { - assert_eq!(round_up_to_next(0, 4), 0); - assert_eq!(round_up_to_next(1, 4), 4); - assert_eq!(round_up_to_next(2, 4), 4); - assert_eq!(round_up_to_next(3, 4), 4); - assert_eq!(round_up_to_next(4, 4), 4); - assert_eq!(round_up_to_next(5, 4), 8); -} - -// Returns a tuple of (pairs_offset, end_of_pairs_offset), -// from the start of a mallocated array. -#[inline] -fn calculate_offsets(hashes_size: usize, - pairs_size: usize, - pairs_align: usize) - -> (usize, usize, bool) { - let pairs_offset = round_up_to_next(hashes_size, pairs_align); - let (end_of_pairs, oflo) = pairs_offset.overflowing_add(pairs_size); - - (pairs_offset, end_of_pairs, oflo) -} - -// Returns a tuple of (minimum required malloc alignment, hash_offset, -// array_size), from the start of a mallocated array. -fn calculate_allocation(hash_size: usize, - hash_align: usize, - pairs_size: usize, - pairs_align: usize) - -> (usize, usize, usize, bool) { - let hash_offset = 0; - let (_, end_of_pairs, oflo) = calculate_offsets(hash_size, pairs_size, pairs_align); - - let align = cmp::max(hash_align, pairs_align); - - (align, hash_offset, end_of_pairs, oflo) -} - -#[test] -fn test_offset_calculation() { - assert_eq!(calculate_allocation(128, 8, 16, 8), (8, 0, 144, false)); - assert_eq!(calculate_allocation(3, 1, 2, 1), (1, 0, 5, false)); - assert_eq!(calculate_allocation(6, 2, 12, 4), (4, 0, 20, false)); - assert_eq!(calculate_offsets(128, 15, 4), (128, 143, false)); - assert_eq!(calculate_offsets(3, 2, 4), (4, 6, false)); - assert_eq!(calculate_offsets(6, 12, 4), (8, 20, false)); -} - -impl RawTable { - unsafe fn new_uninitialized(capacity: usize) -> RawTable { - extern crate libc; - if let Ok(table) = Self::try_new_uninitialized(capacity) { - table - } else { - libc::abort(); - } - } - - /// Does not initialize the buckets. The caller should ensure they, - /// at the very least, set every hash to EMPTY_BUCKET. - unsafe fn try_new_uninitialized(capacity: usize) -> Result, FailedAllocationError> { - if capacity == 0 { - return Ok(RawTable { - size: 0, - capacity_mask: capacity.wrapping_sub(1), - hashes: TaggedHashUintPtr::new(EMPTY as *mut HashUint), - bytes_allocated: 0, - marker: marker::PhantomData, - }); - } - assert!(capacity.is_power_of_two(), "HashMap Corruption - capacity not power of two"); - - // No need for `checked_mul` before a more restrictive check performed - // later in this method. - let hashes_size = capacity.wrapping_mul(size_of::()); - let pairs_size = capacity.wrapping_mul(size_of::<(K, V)>()); - - // Allocating hashmaps is a little tricky. We need to allocate two - // arrays, but since we know their sizes and alignments up front, - // we just allocate a single array, and then have the subarrays - // point into it. - // - // This is great in theory, but in practice getting the alignment - // right is a little subtle. Therefore, calculating offsets has been - // factored out into a different function. - let (alignment, hash_offset, size, oflo) = calculate_allocation(hashes_size, - align_of::(), - pairs_size, - align_of::<(K, V)>()); - - if oflo { - return Err(FailedAllocationError { reason: "capacity overflow when allocating RawTable" }); - } - - // One check for overflow that covers calculation and rounding of size. - let size_of_bucket = size_of::().checked_add(size_of::<(K, V)>()).unwrap(); - - let cap_bytes = capacity.checked_mul(size_of_bucket); - - if let Some(cap_bytes) = cap_bytes { - if size < cap_bytes { - return Err(FailedAllocationError { reason: "capacity overflow when allocating RawTable" }); - } - } else { - - return Err(FailedAllocationError { reason: "capacity overflow when allocating RawTable" }); - } - - - - // FORK NOTE: Uses alloc shim instead of Heap.alloc - let buffer: *mut u8 = alloc(size, alignment); - - if buffer.is_null() { - - return Err(FailedAllocationError { reason: "out of memory when allocating RawTable" }); - } - - // FORK NOTE: poison the entire buffer rather than leaving it uninitialized. - ptr::write_bytes(buffer, 0xe7, size); - - let hashes = buffer.offset(hash_offset as isize) as *mut HashUint; - assert!(hashes as *mut u8 == buffer, "HashMap Corruption - Nonzero hash_offset"); - - Ok(RawTable { - capacity_mask: capacity.wrapping_sub(1), - size: 0, - hashes: TaggedHashUintPtr::new(hashes), - bytes_allocated: size, - marker: marker::PhantomData, - }) - } - - fn raw_bucket_at(&self, index: usize) -> RawBucket { - self.verify(); - let hashes_size = self.capacity() * size_of::(); - let pairs_size = self.capacity() * size_of::<(K, V)>(); - - let (pairs_offset, _, oflo) = - calculate_offsets(hashes_size, pairs_size, align_of::<(K, V)>()); - assert!(!oflo, "HashMap Corruption - capacity overflow"); - assert!(pairs_offset as isize > 0, "HashMap Corruption - pairs offset={}", pairs_offset); - assert!(index as isize >= 0, "HashMap Corruption - index={}", index); - assert!(index < self.capacity(), "HashMap Corruption - index={}", index); - - let buffer = self.hashes.ptr() as *mut u8; - let bucket = unsafe { - RawBucket { - hash_start: buffer as *mut HashUint, - pair_start: buffer.offset(pairs_offset as isize) as *const (K, V), - idx: index, - _marker: marker::PhantomData, - } - }; - - bucket.assert_bounds(self.bytes_allocated, Some(self.size)); - bucket - } - - /// Returns a raw pointer to the table's buffer. - #[inline] - pub fn raw_buffer(&self) -> *const u8 { - self.hashes.ptr() as *const u8 - } - - /// Verify that the table metadata is internally consistent. - #[inline] - pub fn verify(&self) { - assert!( - self.capacity() == 0 || self.capacity().is_power_of_two(), - "HashMap Corruption: mask={}, sz={}, alloc={}", self.capacity_mask, self.size, self.bytes_allocated, - ); - assert_eq!( - self.capacity() * (size_of::() + size_of::<(K, V)>()), - self.bytes_allocated, - "HashMap Corruption: mask={}, sz={}, alloc={}", self.capacity_mask, self.size, self.bytes_allocated, - ); - } - - /// Creates a new raw table from a given capacity. All buckets are - /// initially empty. - pub fn new(capacity: usize) -> Result, FailedAllocationError> { - unsafe { - let ret = RawTable::try_new_uninitialized(capacity)?; - ptr::write_bytes(ret.hashes.ptr(), 0, capacity); - Ok(ret) - } - } - - /// The hashtable's capacity, similar to a vector's. - pub fn capacity(&self) -> usize { - self.capacity_mask.wrapping_add(1) - } - - /// The number of elements ever `put` in the hashtable, minus the number - /// of elements ever `take`n. - pub fn size(&self) -> usize { - self.size - } - - fn raw_buckets(&self) -> RawBuckets { - RawBuckets { - raw: if self.capacity() == 0 { None } else { Some(self.raw_bucket_at(0)) }, - elems_left: self.size, - bytes_allocated: self.bytes_allocated, - marker: marker::PhantomData, - } - } - - pub fn iter(&self) -> Iter { - Iter { - iter: self.raw_buckets(), - } - } - - pub fn iter_mut(&mut self) -> IterMut { - IterMut { - iter: self.raw_buckets(), - _marker: marker::PhantomData, - } - } - - pub fn into_iter(self) -> IntoIter { - let RawBuckets { raw, elems_left, bytes_allocated, .. } = self.raw_buckets(); - // Replace the marker regardless of lifetime bounds on parameters. - IntoIter { - iter: RawBuckets { - raw, - elems_left, - bytes_allocated, - marker: marker::PhantomData, - }, - table: self, - } - } - - pub fn drain(&mut self) -> Drain { - let RawBuckets { raw, elems_left, bytes_allocated, .. } = self.raw_buckets(); - // Replace the marker regardless of lifetime bounds on parameters. - Drain { - iter: RawBuckets { - raw, - elems_left, - bytes_allocated, - marker: marker::PhantomData, - }, - table: Shared::from(self), - marker: marker::PhantomData, - } - } - - /// Drops buckets in reverse order. It leaves the table in an inconsistent - /// state and should only be used for dropping the table's remaining - /// entries. It's used in the implementation of Drop. - unsafe fn rev_drop_buckets(&mut self) { - let mut elems_left = self.size; - if elems_left == 0 { - return; - } - let mut raw = self.raw_bucket_at(self.capacity() - 1); - loop { - if *raw.hash() != EMPTY_BUCKET { - ptr::drop_in_place(raw.pair()); - elems_left = elems_left.checked_sub(1).unwrap(); - if elems_left == 0 { - return; - } - } - raw.idx = raw.idx.checked_sub(1).unwrap(); - raw.assert_bounds(self.bytes_allocated, Some(self.size)); - } - } - - /// Set the table tag - pub fn set_tag(&mut self, value: bool) { - self.hashes.set_tag(value) - } - - /// Get the table tag - pub fn tag(&self) -> bool { - self.hashes.tag() - } -} - -/// A raw iterator. The basis for some other iterators in this module. Although -/// this interface is safe, it's not used outside this module. -struct RawBuckets<'a, K, V> { - // We use an Option here to avoid ever constructing a RawBucket for - // invalid memory. - raw: Option>, - elems_left: usize, - bytes_allocated: usize, - - // Strictly speaking, this should be &'a (K,V), but that would - // require that K:'a, and we often use RawBuckets<'static...> for - // move iterations, so that messes up a lot of other things. So - // just use `&'a (K,V)` as this is not a publicly exposed type - // anyway. - marker: marker::PhantomData<&'a ()>, -} - -// FIXME(#19839) Remove in favor of `#[derive(Clone)]` -impl<'a, K, V> Clone for RawBuckets<'a, K, V> { - fn clone(&self) -> RawBuckets<'a, K, V> { - RawBuckets { - raw: self.raw, - elems_left: self.elems_left, - bytes_allocated: self.bytes_allocated, - marker: marker::PhantomData, - } - } -} - - -impl<'a, K, V> Iterator for RawBuckets<'a, K, V> { - type Item = RawBucket; - - fn next(&mut self) -> Option> { - if self.elems_left == 0 { - return None; - } - - loop { - unsafe { - let item = self.raw.unwrap(); - if *item.hash() != EMPTY_BUCKET { - self.elems_left = self.elems_left.checked_sub(1).unwrap(); - if self.elems_left != 0 { - self.raw.as_mut().unwrap().idx += 1; - self.raw.as_ref().unwrap().assert_bounds(self.bytes_allocated, None); - } - return Some(item); - } - self.raw.as_mut().unwrap().idx += 1; - self.raw.as_ref().unwrap().assert_bounds(self.bytes_allocated, None); - } - } - } - - fn size_hint(&self) -> (usize, Option) { - (self.elems_left, Some(self.elems_left)) - } -} - -impl<'a, K, V> ExactSizeIterator for RawBuckets<'a, K, V> { - fn len(&self) -> usize { - self.elems_left - } -} - -/// Iterator over shared references to entries in a table. -pub struct Iter<'a, K: 'a, V: 'a> { - iter: RawBuckets<'a, K, V>, -} - -unsafe impl<'a, K: Sync, V: Sync> Sync for Iter<'a, K, V> {} -unsafe impl<'a, K: Sync, V: Sync> Send for Iter<'a, K, V> {} - -// FIXME(#19839) Remove in favor of `#[derive(Clone)]` -impl<'a, K, V> Clone for Iter<'a, K, V> { - fn clone(&self) -> Iter<'a, K, V> { - Iter { - iter: self.iter.clone(), - } - } -} - -/// Iterator over mutable references to entries in a table. -pub struct IterMut<'a, K: 'a, V: 'a> { - iter: RawBuckets<'a, K, V>, - // To ensure invariance with respect to V - _marker: marker::PhantomData<&'a mut V>, -} - -unsafe impl<'a, K: Sync, V: Sync> Sync for IterMut<'a, K, V> {} -// Both K: Sync and K: Send are correct for IterMut's Send impl, -// but Send is the more useful bound -unsafe impl<'a, K: Send, V: Send> Send for IterMut<'a, K, V> {} - -impl<'a, K: 'a, V: 'a> IterMut<'a, K, V> { - pub fn iter(&self) -> Iter { - Iter { - iter: self.iter.clone(), - } - } -} - -/// Iterator over the entries in a table, consuming the table. -pub struct IntoIter { - table: RawTable, - iter: RawBuckets<'static, K, V>, -} - -unsafe impl Sync for IntoIter {} -unsafe impl Send for IntoIter {} - -impl IntoIter { - pub fn iter(&self) -> Iter { - Iter { - iter: self.iter.clone(), - } - } -} - -/// Iterator over the entries in a table, clearing the table. -pub struct Drain<'a, K: 'static, V: 'static> { - table: Shared>, - iter: RawBuckets<'static, K, V>, - marker: marker::PhantomData<&'a RawTable>, -} - -unsafe impl<'a, K: Sync, V: Sync> Sync for Drain<'a, K, V> {} -unsafe impl<'a, K: Send, V: Send> Send for Drain<'a, K, V> {} - -impl<'a, K, V> Drain<'a, K, V> { - pub fn iter(&self) -> Iter { - Iter { - iter: self.iter.clone(), - } - } -} - -impl<'a, K, V> Iterator for Iter<'a, K, V> { - type Item = (&'a K, &'a V); - - fn next(&mut self) -> Option<(&'a K, &'a V)> { - self.iter.next().map(|raw| unsafe { - let pair_ptr = raw.pair(); - (&(*pair_ptr).0, &(*pair_ptr).1) - }) - } - - fn size_hint(&self) -> (usize, Option) { - self.iter.size_hint() - } -} - -impl<'a, K, V> ExactSizeIterator for Iter<'a, K, V> { - fn len(&self) -> usize { - self.iter.len() - } -} - -impl<'a, K, V> Iterator for IterMut<'a, K, V> { - type Item = (&'a K, &'a mut V); - - fn next(&mut self) -> Option<(&'a K, &'a mut V)> { - self.iter.next().map(|raw| unsafe { - let pair_ptr = raw.pair(); - (&(*pair_ptr).0, &mut (*pair_ptr).1) - }) - } - - fn size_hint(&self) -> (usize, Option) { - self.iter.size_hint() - } -} - -impl<'a, K, V> ExactSizeIterator for IterMut<'a, K, V> { - fn len(&self) -> usize { - self.iter.len() - } -} - -impl Iterator for IntoIter { - type Item = (SafeHash, K, V); - - fn next(&mut self) -> Option<(SafeHash, K, V)> { - self.iter.next().map(|raw| { - self.table.size = self.table.size.checked_sub(1).unwrap(); - unsafe { - let (k, v) = ptr::read(raw.pair()); - (SafeHash { hash: *raw.hash() }, k, v) - } - }) - } - - fn size_hint(&self) -> (usize, Option) { - self.iter.size_hint() - } -} - -impl ExactSizeIterator for IntoIter { - fn len(&self) -> usize { - self.iter().len() - } -} - -impl<'a, K, V> Iterator for Drain<'a, K, V> { - type Item = (SafeHash, K, V); - - #[inline] - fn next(&mut self) -> Option<(SafeHash, K, V)> { - self.iter.next().map(|raw| { - unsafe { - self.table.as_mut().size = self.table.as_mut().size.checked_sub(1).unwrap(); - let (k, v) = ptr::read(raw.pair()); - (SafeHash { hash: ptr::replace(&mut *raw.hash(), EMPTY_BUCKET) }, k, v) - } - }) - } - - fn size_hint(&self) -> (usize, Option) { - self.iter.size_hint() - } -} - -impl<'a, K, V> ExactSizeIterator for Drain<'a, K, V> { - fn len(&self) -> usize { - self.iter.len() - } -} - -impl<'a, K: 'static, V: 'static> Drop for Drain<'a, K, V> { - fn drop(&mut self) { - for _ in self {} - } -} - -impl Clone for RawTable { - fn clone(&self) -> RawTable { - unsafe { - let cap = self.capacity(); - let mut new_ht = RawTable::new_uninitialized(cap); - if cap == 0 { - return new_ht; - } - - let mut new_buckets = new_ht.raw_bucket_at(0); - let mut buckets = self.raw_bucket_at(0); - loop { - *new_buckets.hash() = *buckets.hash(); - if *new_buckets.hash() != EMPTY_BUCKET { - let pair_ptr = buckets.pair(); - let kv = ((*pair_ptr).0.clone(), (*pair_ptr).1.clone()); - ptr::write(new_buckets.pair(), kv); - } - - if buckets.idx == cap - 1 { - break; - } - - buckets.idx += 1; - buckets.assert_bounds(self.bytes_allocated, None); - new_buckets.idx += 1; - new_buckets.assert_bounds(new_ht.bytes_allocated, None); - } - - new_ht.size = self.size(); - - new_ht - } - } -} - -// FORK NOTE: There may be lifetime errors that do not occur on std::HashMap -// since we removed the may_dangle (which allows more things to compile but has stricter guarantees). -// Generally we should be fine as long as no borrowed data is stuck into the map. -impl Drop for RawTable { - fn drop(&mut self) { - if self.capacity() == 0 { - return; - } - - // This is done in reverse because we've likely partially taken - // some elements out with `.into_iter()` from the front. - // Check if the size is 0, so we don't do a useless scan when - // dropping empty tables such as on resize. - // Also avoid double drop of elements that have been already moved out. - unsafe { - // FORK NOTE: Can't needs_drop on stable - // if needs_drop::<(K, V)>() { - // avoid linear runtime for types that don't need drop - self.rev_drop_buckets(); - // } - } - - let hashes_size = self.capacity() * size_of::(); - let pairs_size = self.capacity() * size_of::<(K, V)>(); - let (align, _, _, oflo) = calculate_allocation(hashes_size, - align_of::(), - pairs_size, - align_of::<(K, V)>()); - - assert!(!oflo, "HashMap Corruption - should be impossible"); - - unsafe { - dealloc(self.hashes.ptr() as *mut u8, align); - // Remember how everything was allocated out of one buffer - // during initialization? We only need one call to free here. - } - } -} diff --git a/collector/compile-benchmarks/style-servo/components/jstraceable_derive/Cargo.toml b/collector/compile-benchmarks/style-servo/components/jstraceable_derive/Cargo.toml deleted file mode 100644 index eb01baa08..000000000 --- a/collector/compile-benchmarks/style-servo/components/jstraceable_derive/Cargo.toml +++ /dev/null @@ -1,15 +0,0 @@ -[package] -name = "jstraceable_derive" -version = "0.0.1" -authors = ["The Servo Project Developers"] -license = "MPL-2.0" -publish = false - -[lib] -path = "lib.rs" -proc-macro = true - -[dependencies] -quote = "0.3.15" -syn = "0.11" -synstructure = "0.5" diff --git a/collector/compile-benchmarks/style-servo/components/jstraceable_derive/lib.rs b/collector/compile-benchmarks/style-servo/components/jstraceable_derive/lib.rs deleted file mode 100644 index 66b172d6a..000000000 --- a/collector/compile-benchmarks/style-servo/components/jstraceable_derive/lib.rs +++ /dev/null @@ -1,55 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ - -extern crate proc_macro; -#[macro_use] extern crate quote; -extern crate syn; -extern crate synstructure; - -#[proc_macro_derive(JSTraceable)] -pub fn expand_token_stream(input: proc_macro::TokenStream) -> proc_macro::TokenStream { - expand_string(&input.to_string()).parse().unwrap() -} - -fn expand_string(input: &str) -> String { - let mut type_ = syn::parse_macro_input(input).unwrap(); - - let style = synstructure::BindStyle::Ref.into(); - let match_body = synstructure::each_field(&mut type_, &style, |binding| { - Some(quote! { #binding.trace(tracer); }) - }); - - let name = &type_.ident; - let (impl_generics, ty_generics, where_clause) = type_.generics.split_for_impl(); - let mut where_clause = where_clause.clone(); - for param in &type_.generics.ty_params { - where_clause.predicates.push(syn::WherePredicate::BoundPredicate(syn::WhereBoundPredicate { - bound_lifetimes: Vec::new(), - bounded_ty: syn::Ty::Path(None, param.ident.clone().into()), - bounds: vec![syn::TyParamBound::Trait( - syn::PolyTraitRef { - bound_lifetimes: Vec::new(), - trait_ref: syn::parse_path("::dom::bindings::trace::JSTraceable").unwrap(), - }, - syn::TraitBoundModifier::None - )], - })) - } - - let tokens = quote! { - #[allow(unsafe_code)] - unsafe impl #impl_generics ::dom::bindings::trace::JSTraceable for #name #ty_generics #where_clause { - #[inline] - #[allow(unused_variables, unused_imports)] - unsafe fn trace(&self, tracer: *mut ::js::jsapi::JSTracer) { - use ::dom::bindings::trace::JSTraceable; - match *self { - #match_body - } - } - } - }; - - tokens.to_string() -} diff --git a/collector/compile-benchmarks/style-servo/components/layout/Cargo.toml b/collector/compile-benchmarks/style-servo/components/layout/Cargo.toml deleted file mode 100644 index 273a1446a..000000000 --- a/collector/compile-benchmarks/style-servo/components/layout/Cargo.toml +++ /dev/null @@ -1,48 +0,0 @@ -[package] -name = "layout" -version = "0.0.1" -authors = ["The Servo Project Developers"] -license = "MPL-2.0" -publish = false - -[lib] -name = "layout" -path = "lib.rs" - -[dependencies] -app_units = "0.5" -atomic_refcell = "0.1" -bitflags = "0.8" -canvas_traits = {path = "../canvas_traits"} -euclid = "0.15" -fnv = "1.0" -gfx = {path = "../gfx"} -gfx_traits = {path = "../gfx_traits"} -heapsize = "0.4" -html5ever = "0.20.0" -ipc-channel = "0.8" -libc = "0.2" -log = "0.3.5" -msg = {path = "../msg"} -net_traits = {path = "../net_traits"} -ordered-float = "0.4" -parking_lot = "0.4" -profile_traits = {path = "../profile_traits"} -range = {path = "../range"} -rayon = "0.8" -script_layout_interface = {path = "../script_layout_interface"} -script_traits = {path = "../script_traits"} -selectors = { path = "../selectors" } -serde = "1.0" -servo_arc = {path = "../servo_arc"} -servo_atoms = {path = "../atoms"} -servo_geometry = {path = "../geometry"} -serde_json = "1.0" -servo_config = {path = "../config"} -servo_url = {path = "../url"} -smallvec = "0.4" -style = {path = "../style"} -style_traits = {path = "../style_traits"} -unicode-bidi = {version = "0.3", features = ["with_serde"]} -unicode-script = {version = "0.1", features = ["harfbuzz"]} -webrender_api = {git = "https://github.com/servo/webrender", features = ["ipc"]} diff --git a/collector/compile-benchmarks/style-servo/components/layout/animation.rs b/collector/compile-benchmarks/style-servo/components/layout/animation.rs deleted file mode 100644 index 19f621f7f..000000000 --- a/collector/compile-benchmarks/style-servo/components/layout/animation.rs +++ /dev/null @@ -1,178 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ - -//! CSS transitions and animations. - -use context::LayoutContext; -use flow::{self, Flow}; -use fnv::FnvHashMap; -use gfx::display_list::OpaqueNode; -use ipc_channel::ipc::IpcSender; -use msg::constellation_msg::PipelineId; -use opaque_node::OpaqueNodeMethods; -use script_traits::{AnimationState, ConstellationControlMsg, LayoutMsg as ConstellationMsg}; -use script_traits::UntrustedNodeAddress; -use std::sync::mpsc::Receiver; -use style::animation::{Animation, update_style_for_animation}; -use style::font_metrics::ServoMetricsProvider; -use style::selector_parser::RestyleDamage; -use style::timer::Timer; - -/// Processes any new animations that were discovered after style recalculation. -/// Also expire any old animations that have completed, inserting them into -/// `expired_animations`. -pub fn update_animation_state(constellation_chan: &IpcSender, - script_chan: &IpcSender, - running_animations: &mut FnvHashMap>, - expired_animations: &mut FnvHashMap>, - mut newly_transitioning_nodes: Option<&mut Vec>, - new_animations_receiver: &Receiver, - pipeline_id: PipelineId, - timer: &Timer) { - let mut new_running_animations = vec![]; - while let Ok(animation) = new_animations_receiver.try_recv() { - let mut should_push = true; - if let Animation::Keyframes(ref node, ref name, ref state) = animation { - // If the animation was already present in the list for the - // node, just update its state, else push the new animation to - // run. - if let Some(ref mut animations) = running_animations.get_mut(node) { - // TODO: This being linear is probably not optimal. - for anim in animations.iter_mut() { - if let Animation::Keyframes(_, ref anim_name, ref mut anim_state) = *anim { - if *name == *anim_name { - debug!("update_animation_state: Found other animation {}", name); - anim_state.update_from_other(&state, timer); - should_push = false; - break; - } - } - } - } - } - - if should_push { - new_running_animations.push(animation); - } - } - - if running_animations.is_empty() && new_running_animations.is_empty() { - // Nothing to do. Return early so we don't flood the compositor with - // `ChangeRunningAnimationsState` messages. - return - } - - let now = timer.seconds(); - // Expire old running animations. - // - // TODO: Do not expunge Keyframes animations, since we need that state if - // the animation gets re-triggered. Probably worth splitting in two - // different maps, or at least using a linked list? - let mut keys_to_remove = vec![]; - for (key, running_animations) in running_animations.iter_mut() { - let mut animations_still_running = vec![]; - for mut running_animation in running_animations.drain(..) { - let still_running = !running_animation.is_expired() && match running_animation { - Animation::Transition(_, started_at, ref frame, _expired) => { - now < started_at + frame.duration - } - Animation::Keyframes(_, _, ref mut state) => { - // This animation is still running, or we need to keep - // iterating. - now < state.started_at + state.duration || state.tick() - } - }; - - if still_running { - animations_still_running.push(running_animation); - continue - } - - if let Animation::Transition(node, _, ref frame, _) = running_animation { - script_chan.send(ConstellationControlMsg::TransitionEnd(node.to_untrusted_node_address(), - frame.property_animation - .property_name().into(), - frame.duration)) - .unwrap(); - } - - expired_animations.entry(*key) - .or_insert_with(Vec::new) - .push(running_animation); - } - - if animations_still_running.is_empty() { - keys_to_remove.push(*key); - } else { - *running_animations = animations_still_running - } - } - - for key in keys_to_remove { - running_animations.remove(&key).unwrap(); - } - - // Add new running animations. - for new_running_animation in new_running_animations { - if new_running_animation.is_transition() { - match newly_transitioning_nodes { - Some(ref mut nodes) => { - nodes.push(new_running_animation.node().to_untrusted_node_address()); - } - None => { - warn!("New transition encountered from compositor-initiated layout."); - } - } - } - - running_animations.entry(*new_running_animation.node()) - .or_insert_with(Vec::new) - .push(new_running_animation) - } - - let animation_state = if running_animations.is_empty() { - AnimationState::NoAnimationsPresent - } else { - AnimationState::AnimationsPresent - }; - - constellation_chan.send(ConstellationMsg::ChangeRunningAnimationsState(pipeline_id, - animation_state)) - .unwrap(); -} - -/// Recalculates style for a set of animations. This does *not* run with the DOM -/// lock held. -// NB: This is specific for SelectorImpl, since the layout context and the -// flows are SelectorImpl specific too. If that goes away at some point, -// this should be made generic. -pub fn recalc_style_for_animations(context: &LayoutContext, - flow: &mut Flow, - animations: &FnvHashMap>) { - let mut damage = RestyleDamage::empty(); - flow.mutate_fragments(&mut |fragment| { - if let Some(ref animations) = animations.get(&fragment.node) { - for animation in animations.iter() { - let old_style = fragment.style.clone(); - update_style_for_animation(&context.style_context, - animation, - &mut fragment.style, - &ServoMetricsProvider); - let difference = - RestyleDamage::compute_style_difference( - &old_style, - &fragment.style, - ); - damage |= difference.damage; - } - } - }); - - let base = flow::mut_base(flow); - base.restyle_damage.insert(damage); - for kid in base.children.iter_mut() { - recalc_style_for_animations(context, kid, animations) - } -} diff --git a/collector/compile-benchmarks/style-servo/components/layout/block.rs b/collector/compile-benchmarks/style-servo/components/layout/block.rs deleted file mode 100644 index 86e540b11..000000000 --- a/collector/compile-benchmarks/style-servo/components/layout/block.rs +++ /dev/null @@ -1,3111 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ - -//! Layout for CSS block-level elements. -//! -//! As a terminology note, the term *absolute positioning* here refers to elements with position -//! `absolute` or `fixed`. The term *positioned element* refers to elements with position -//! `relative`, `absolute`, and `fixed`. The term *containing block* (occasionally abbreviated as -//! *CB*) is the containing block for the current flow, which differs from the static containing -//! block if the flow is absolutely-positioned. -//! -//! "CSS 2.1" or "CSS 2.2" refers to the editor's draft of the W3C "Cascading Style Sheets Level 2 -//! Revision 2 (CSS 2.2) Specification" available here: -//! -//! http://dev.w3.org/csswg/css2/ -//! -//! "INTRINSIC" refers to L. David Baron's "More Precise Definitions of Inline Layout and Table -//! Layout" available here: -//! -//! http://dbaron.org/css/intrinsic/ -//! -//! "CSS-SIZING" refers to the W3C "CSS Intrinsic & Extrinsic Sizing Module Level 3" document -//! available here: -//! -//! http://dev.w3.org/csswg/css-sizing/ - -#![deny(unsafe_code)] - -use app_units::{Au, MAX_AU}; -use context::LayoutContext; -use display_list_builder::{BlockFlowDisplayListBuilding, BorderPaintingMode}; -use display_list_builder::{DisplayListBuildState, StackingContextCollectionFlags}; -use display_list_builder::StackingContextCollectionState; -use euclid::{Point2D, Rect, SideOffsets2D, Size2D}; -use floats::{ClearType, FloatKind, Floats, PlacementInfo}; -use flow::{self, BaseFlow, EarlyAbsolutePositionInfo, Flow, FlowClass, ForceNonfloatedFlag}; -use flow::{BLOCK_POSITION_IS_STATIC, CLEARS_LEFT, CLEARS_RIGHT}; -use flow::{CONTAINS_TEXT_OR_REPLACED_FRAGMENTS, INLINE_POSITION_IS_STATIC}; -use flow::{IS_ABSOLUTELY_POSITIONED, FragmentationContext, MARGINS_CANNOT_COLLAPSE}; -use flow::{ImmutableFlowUtils, LateAbsolutePositionInfo, OpaqueFlow}; -use flow_list::FlowList; -use fragment::{CoordinateSystem, Fragment, FragmentBorderBoxIterator, Overflow}; -use fragment::{IS_INLINE_FLEX_ITEM, IS_BLOCK_FLEX_ITEM}; -use gfx_traits::print_tree::PrintTree; -use incremental::RelayoutMode; -use layout_debug; -use model::{AdjoiningMargins, CollapsibleMargins, IntrinsicISizes, MarginCollapseInfo, MaybeAuto}; -use sequential; -use serde::{Serialize, Serializer}; -use servo_geometry::max_rect; -use std::cmp::{max, min}; -use std::fmt; -use std::sync::Arc; -use style::computed_values::{box_sizing, display, float, overflow_x}; -use style::computed_values::{position, text_align}; -use style::context::SharedStyleContext; -use style::logical_geometry::{LogicalMargin, LogicalPoint, LogicalRect, LogicalSize, WritingMode}; -use style::properties::ComputedValues; -use style::servo::restyle_damage::{BUBBLE_ISIZES, REFLOW, REFLOW_OUT_OF_FLOW}; -use style::values::computed::{LengthOrPercentageOrNone, LengthOrPercentage}; -use style::values::computed::LengthOrPercentageOrAuto; -use traversal::PreorderFlowTraversal; - -/// Information specific to floated blocks. -#[derive(Clone, Serialize)] -pub struct FloatedBlockInfo { - /// The amount of inline size that is available for the float. - pub containing_inline_size: Au, - - /// The float ceiling, relative to `BaseFlow::position::cur_b` (i.e. the top part of the border - /// box). - pub float_ceiling: Au, - - /// Left or right? - pub float_kind: FloatKind, -} - -impl FloatedBlockInfo { - pub fn new(float_kind: FloatKind) -> FloatedBlockInfo { - FloatedBlockInfo { - containing_inline_size: Au(0), - float_ceiling: Au(0), - float_kind: float_kind, - } - } -} - -/// The solutions for the block-size-and-margins constraint equation. -#[derive(Clone, Copy)] -struct BSizeConstraintSolution { - block_start: Au, - block_size: Au, - margin_block_start: Au, - margin_block_end: Au -} - -impl BSizeConstraintSolution { - fn new(block_start: Au, - block_size: Au, - margin_block_start: Au, - margin_block_end: Au) - -> BSizeConstraintSolution { - BSizeConstraintSolution { - block_start: block_start, - block_size: block_size, - margin_block_start: margin_block_start, - margin_block_end: margin_block_end, - } - } - - /// Solve the vertical constraint equation for absolute non-replaced elements. - /// - /// CSS Section 10.6.4 - /// Constraint equation: - /// block-start + block-end + block-size + margin-block-start + margin-block-end - /// = absolute containing block block-size - (vertical padding and border) - /// [aka available_block-size] - /// - /// Return the solution for the equation. - fn solve_vertical_constraints_abs_nonreplaced(block_size: MaybeAuto, - block_start_margin: MaybeAuto, - block_end_margin: MaybeAuto, - block_start: MaybeAuto, - block_end: MaybeAuto, - content_block_size: Au, - available_block_size: Au) - -> BSizeConstraintSolution { - let (block_start, block_size, margin_block_start, margin_block_end) = - match (block_start, block_end, block_size) { - (MaybeAuto::Auto, MaybeAuto::Auto, MaybeAuto::Auto) => { - let margin_block_start = block_start_margin.specified_or_zero(); - let margin_block_end = block_end_margin.specified_or_zero(); - // Now it is the same situation as block-start Specified and block-end - // and block-size Auto. - let block_size = content_block_size; - // Use a dummy value for `block_start`, since it has the static position. - (Au(0), block_size, margin_block_start, margin_block_end) - } - (MaybeAuto::Specified(block_start), - MaybeAuto::Specified(block_end), - MaybeAuto::Specified(block_size)) => { - match (block_start_margin, block_end_margin) { - (MaybeAuto::Auto, MaybeAuto::Auto) => { - let total_margin_val = - available_block_size - block_start - block_end - block_size; - (block_start, - block_size, - total_margin_val.scale_by(0.5), - total_margin_val.scale_by(0.5)) - } - (MaybeAuto::Specified(margin_block_start), MaybeAuto::Auto) => { - let sum = block_start + block_end + block_size + margin_block_start; - (block_start, - block_size, - margin_block_start, - available_block_size - sum) - } - (MaybeAuto::Auto, MaybeAuto::Specified(margin_block_end)) => { - let sum = block_start + block_end + block_size + margin_block_end; - (block_start, block_size, available_block_size - sum, margin_block_end) - } - (MaybeAuto::Specified(margin_block_start), - MaybeAuto::Specified(margin_block_end)) => { - // Values are over-constrained. Ignore value for 'block-end'. - (block_start, block_size, margin_block_start, margin_block_end) - } - } - } - - // For the rest of the cases, auto values for margin are set to 0 - - // If only one is Auto, solve for it - (MaybeAuto::Auto, - MaybeAuto::Specified(block_end), - MaybeAuto::Specified(block_size)) => { - let margin_block_start = block_start_margin.specified_or_zero(); - let margin_block_end = block_end_margin.specified_or_zero(); - let sum = block_end + block_size + margin_block_start + margin_block_end; - (available_block_size - sum, block_size, margin_block_start, margin_block_end) - } - (MaybeAuto::Specified(block_start), - MaybeAuto::Auto, - MaybeAuto::Specified(block_size)) => { - let margin_block_start = block_start_margin.specified_or_zero(); - let margin_block_end = block_end_margin.specified_or_zero(); - (block_start, block_size, margin_block_start, margin_block_end) - } - (MaybeAuto::Specified(block_start), - MaybeAuto::Specified(block_end), - MaybeAuto::Auto) => { - let margin_block_start = block_start_margin.specified_or_zero(); - let margin_block_end = block_end_margin.specified_or_zero(); - let sum = block_start + block_end + margin_block_start + margin_block_end; - (block_start, available_block_size - sum, margin_block_start, margin_block_end) - } - - // If block-size is auto, then block-size is content block-size. Solve for the - // non-auto value. - (MaybeAuto::Specified(block_start), MaybeAuto::Auto, MaybeAuto::Auto) => { - let margin_block_start = block_start_margin.specified_or_zero(); - let margin_block_end = block_end_margin.specified_or_zero(); - let block_size = content_block_size; - (block_start, block_size, margin_block_start, margin_block_end) - } - (MaybeAuto::Auto, MaybeAuto::Specified(block_end), MaybeAuto::Auto) => { - let margin_block_start = block_start_margin.specified_or_zero(); - let margin_block_end = block_end_margin.specified_or_zero(); - let block_size = content_block_size; - let sum = block_end + block_size + margin_block_start + margin_block_end; - (available_block_size - sum, block_size, margin_block_start, margin_block_end) - } - - (MaybeAuto::Auto, MaybeAuto::Auto, MaybeAuto::Specified(block_size)) => { - let margin_block_start = block_start_margin.specified_or_zero(); - let margin_block_end = block_end_margin.specified_or_zero(); - // Use a dummy value for `block_start`, since it has the static position. - (Au(0), block_size, margin_block_start, margin_block_end) - } - }; - - BSizeConstraintSolution::new(block_start, block_size, margin_block_start, margin_block_end) - } - - /// Solve the vertical constraint equation for absolute replaced elements. - /// - /// Assumption: The used value for block-size has already been calculated. - /// - /// CSS Section 10.6.5 - /// Constraint equation: - /// block-start + block-end + block-size + margin-block-start + margin-block-end - /// = absolute containing block block-size - (vertical padding and border) - /// [aka available block-size] - /// - /// Return the solution for the equation. - fn solve_vertical_constraints_abs_replaced(block_size: Au, - block_start_margin: MaybeAuto, - block_end_margin: MaybeAuto, - block_start: MaybeAuto, - block_end: MaybeAuto, - _: Au, - available_block_size: Au) - -> BSizeConstraintSolution { - let (block_start, block_size, margin_block_start, margin_block_end) = - match (block_start, block_end) { - (MaybeAuto::Auto, MaybeAuto::Auto) => { - let margin_block_start = block_start_margin.specified_or_zero(); - let margin_block_end = block_end_margin.specified_or_zero(); - // Use a dummy value for `block_start`, since it has the static position. - (Au(0), block_size, margin_block_start, margin_block_end) - } - (MaybeAuto::Specified(block_start), MaybeAuto::Specified(block_end)) => { - match (block_start_margin, block_end_margin) { - (MaybeAuto::Auto, MaybeAuto::Auto) => { - let total_margin_val = available_block_size - block_start - block_end - - block_size; - (block_start, - block_size, - total_margin_val.scale_by(0.5), - total_margin_val.scale_by(0.5)) - } - (MaybeAuto::Specified(margin_block_start), MaybeAuto::Auto) => { - let sum = block_start + block_end + block_size + margin_block_start; - (block_start, - block_size, - margin_block_start, - available_block_size - sum) - } - (MaybeAuto::Auto, MaybeAuto::Specified(margin_block_end)) => { - let sum = block_start + block_end + block_size + margin_block_end; - (block_start, block_size, available_block_size - sum, margin_block_end) - } - (MaybeAuto::Specified(margin_block_start), - MaybeAuto::Specified(margin_block_end)) => { - // Values are over-constrained. Ignore value for 'block-end'. - (block_start, block_size, margin_block_start, margin_block_end) - } - } - } - - // If only one is Auto, solve for it - (MaybeAuto::Auto, MaybeAuto::Specified(block_end)) => { - let margin_block_start = block_start_margin.specified_or_zero(); - let margin_block_end = block_end_margin.specified_or_zero(); - let sum = block_end + block_size + margin_block_start + margin_block_end; - (available_block_size - sum, block_size, margin_block_start, margin_block_end) - } - (MaybeAuto::Specified(block_start), MaybeAuto::Auto) => { - let margin_block_start = block_start_margin.specified_or_zero(); - let margin_block_end = block_end_margin.specified_or_zero(); - (block_start, block_size, margin_block_start, margin_block_end) - } - }; - BSizeConstraintSolution::new(block_start, block_size, margin_block_start, margin_block_end) - } -} - -/// Performs block-size calculations potentially multiple times, taking -/// (assuming an horizontal writing mode) `height`, `min-height`, and `max-height` -/// into account. After each call to `next()`, the caller must call `.try()` with the -/// current calculated value of `height`. -/// -/// See CSS 2.1 § 10.7. -pub struct CandidateBSizeIterator { - block_size: MaybeAuto, - max_block_size: Option, - min_block_size: Au, - pub candidate_value: Au, - status: CandidateBSizeIteratorStatus, -} - -impl CandidateBSizeIterator { - /// Creates a new candidate block-size iterator. `block_container_block-size` is `None` if the block-size - /// of the block container has not been determined yet. It will always be `Some` in the case of - /// absolutely-positioned containing blocks. - pub fn new(fragment: &Fragment, block_container_block_size: Option) - -> CandidateBSizeIterator { - // Per CSS 2.1 § 10.7, (assuming an horizontal writing mode,) - // percentages in `min-height` and `max-height` refer to the height of - // the containing block. - // If that is not determined yet by the time we need to resolve - // `min-height` and `max-height`, percentage values are ignored. - - let block_size = match (fragment.style.content_block_size(), block_container_block_size) { - (LengthOrPercentageOrAuto::Percentage(percent), Some(block_container_block_size)) => { - MaybeAuto::Specified(block_container_block_size.scale_by(percent.0)) - } - (LengthOrPercentageOrAuto::Calc(calc), _) => { - MaybeAuto::from_option(calc.to_used_value(block_container_block_size)) - } - (LengthOrPercentageOrAuto::Percentage(_), None) | - (LengthOrPercentageOrAuto::Auto, _) => MaybeAuto::Auto, - (LengthOrPercentageOrAuto::Length(length), _) => MaybeAuto::Specified(Au::from(length)), - }; - let max_block_size = match (fragment.style.max_block_size(), block_container_block_size) { - (LengthOrPercentageOrNone::Percentage(percent), Some(block_container_block_size)) => { - Some(block_container_block_size.scale_by(percent.0)) - } - (LengthOrPercentageOrNone::Calc(calc), _) => { - calc.to_used_value(block_container_block_size) - } - (LengthOrPercentageOrNone::Percentage(_), None) | - (LengthOrPercentageOrNone::None, _) => None, - (LengthOrPercentageOrNone::Length(length), _) => Some(Au::from(length)), - }; - let min_block_size = match (fragment.style.min_block_size(), block_container_block_size) { - (LengthOrPercentage::Percentage(percent), Some(block_container_block_size)) => { - block_container_block_size.scale_by(percent.0) - } - (LengthOrPercentage::Calc(calc), _) => { - calc.to_used_value(block_container_block_size).unwrap_or(Au(0)) - } - (LengthOrPercentage::Percentage(_), None) => Au(0), - (LengthOrPercentage::Length(length), _) => Au::from(length), - }; - - // If the style includes `box-sizing: border-box`, subtract the border and padding. - let adjustment_for_box_sizing = match fragment.style.get_position().box_sizing { - box_sizing::T::border_box => fragment.border_padding.block_start_end(), - box_sizing::T::content_box => Au(0), - }; - - return CandidateBSizeIterator { - block_size: block_size.map(|size| adjust(size, adjustment_for_box_sizing)), - max_block_size: max_block_size.map(|size| adjust(size, adjustment_for_box_sizing)), - min_block_size: adjust(min_block_size, adjustment_for_box_sizing), - candidate_value: Au(0), - status: CandidateBSizeIteratorStatus::Initial, - }; - - fn adjust(size: Au, delta: Au) -> Au { - max(size - delta, Au(0)) - } - } -} - -impl Iterator for CandidateBSizeIterator { - type Item = MaybeAuto; - fn next(&mut self) -> Option { - self.status = match self.status { - CandidateBSizeIteratorStatus::Initial => CandidateBSizeIteratorStatus::Trying, - CandidateBSizeIteratorStatus::Trying => { - match self.max_block_size { - Some(max_block_size) if self.candidate_value > max_block_size => { - CandidateBSizeIteratorStatus::TryingMax - } - _ if self.candidate_value < self.min_block_size => { - CandidateBSizeIteratorStatus::TryingMin - } - _ => CandidateBSizeIteratorStatus::Found, - } - } - CandidateBSizeIteratorStatus::TryingMax => { - if self.candidate_value < self.min_block_size { - CandidateBSizeIteratorStatus::TryingMin - } else { - CandidateBSizeIteratorStatus::Found - } - } - CandidateBSizeIteratorStatus::TryingMin | CandidateBSizeIteratorStatus::Found => { - CandidateBSizeIteratorStatus::Found - } - }; - - match self.status { - CandidateBSizeIteratorStatus::Trying => Some(self.block_size), - CandidateBSizeIteratorStatus::TryingMax => { - Some(MaybeAuto::Specified(self.max_block_size.unwrap())) - } - CandidateBSizeIteratorStatus::TryingMin => { - Some(MaybeAuto::Specified(self.min_block_size)) - } - CandidateBSizeIteratorStatus::Found => None, - CandidateBSizeIteratorStatus::Initial => panic!(), - } - } -} - -enum CandidateBSizeIteratorStatus { - Initial, - Trying, - TryingMax, - TryingMin, - Found, -} - -// A helper function used in block-size calculation. -fn translate_including_floats(cur_b: &mut Au, delta: Au, floats: &mut Floats) { - *cur_b = *cur_b + delta; - let writing_mode = floats.writing_mode; - floats.translate(LogicalSize::new(writing_mode, Au(0), -delta)); -} - -/// The real assign-block-sizes traversal for flows with position 'absolute'. -/// -/// This is a traversal of an Absolute Flow tree. -/// - Relatively positioned flows and the Root flow start new Absolute flow trees. -/// - The kids of a flow in this tree will be the flows for which it is the -/// absolute Containing Block. -/// - Thus, leaf nodes and inner non-root nodes are all Absolute Flows. -/// -/// A Flow tree can have several Absolute Flow trees (depending on the number -/// of relatively positioned flows it has). -/// -/// Note that flows with position 'fixed' just form a flat list as they all -/// have the Root flow as their CB. -pub struct AbsoluteAssignBSizesTraversal<'a>(pub &'a SharedStyleContext<'a>); - -impl<'a> PreorderFlowTraversal for AbsoluteAssignBSizesTraversal<'a> { - #[inline] - fn process(&self, flow: &mut Flow) { - if !flow.is_block_like() { - return - } - - // This flow might not be an absolutely positioned flow if it is the root of the tree. - let block = flow.as_mut_block(); - if !block.base.flags.contains(IS_ABSOLUTELY_POSITIONED) { - return; - } - - if !block.base.restyle_damage.intersects(REFLOW_OUT_OF_FLOW | REFLOW) { - return - } - - block.calculate_absolute_block_size_and_margins(self.0); - } -} - -pub enum BlockType { - Replaced, - NonReplaced, - AbsoluteReplaced, - AbsoluteNonReplaced, - FloatReplaced, - FloatNonReplaced, - InlineBlockReplaced, - InlineBlockNonReplaced, - InlineFlexItem, -} - -#[derive(Clone, PartialEq)] -pub enum MarginsMayCollapseFlag { - MarginsMayCollapse, - MarginsMayNotCollapse, -} - -#[derive(PartialEq)] -pub enum FormattingContextType { - None, - Block, - Other, -} - -// A block formatting context. -#[derive(Serialize)] -pub struct BlockFlow { - /// Data common to all flows. - pub base: BaseFlow, - - /// The associated fragment. - pub fragment: Fragment, - - /// Additional floating flow members. - pub float: Option>, - - /// Various flags. - flags: BlockFlowFlags, -} - -bitflags! { - flags BlockFlowFlags: u8 { - #[doc = "If this is set, then this block flow is the root flow."] - const IS_ROOT = 0b0000_0001, - #[doc = "If this is set, then this block flow has overflow and it will scroll."] - const HAS_SCROLLING_OVERFLOW = 0b0000_0010, - } -} - -impl Serialize for BlockFlowFlags { - fn serialize(&self, serializer: S) -> Result { - self.bits().serialize(serializer) - } -} - -impl BlockFlow { - pub fn from_fragment(fragment: Fragment) -> BlockFlow { - BlockFlow::from_fragment_and_float_kind(fragment, None) - } - - pub fn from_fragment_and_float_kind(fragment: Fragment, float_kind: Option) - -> BlockFlow { - let writing_mode = fragment.style().writing_mode; - BlockFlow { - base: BaseFlow::new(Some(fragment.style()), writing_mode, match float_kind { - Some(_) => ForceNonfloatedFlag::FloatIfNecessary, - None => ForceNonfloatedFlag::ForceNonfloated, - }), - fragment: fragment, - float: float_kind.map(|kind| box FloatedBlockInfo::new(kind)), - flags: BlockFlowFlags::empty(), - } - } - - /// Return the type of this block. - /// - /// This determines the algorithm used to calculate inline-size, block-size, and the - /// relevant margins for this Block. - pub fn block_type(&self) -> BlockType { - if self.base.flags.contains(IS_ABSOLUTELY_POSITIONED) { - if self.fragment.is_replaced() { - BlockType::AbsoluteReplaced - } else { - BlockType::AbsoluteNonReplaced - } - } else if self.is_inline_flex_item() { - BlockType::InlineFlexItem - } else if self.base.flags.is_float() { - if self.fragment.is_replaced() { - BlockType::FloatReplaced - } else { - BlockType::FloatNonReplaced - } - } else if self.is_inline_block_or_inline_flex() { - if self.fragment.is_replaced() { - BlockType::InlineBlockReplaced - } else { - BlockType::InlineBlockNonReplaced - } - } else { - if self.fragment.is_replaced() { - BlockType::Replaced - } else { - BlockType::NonReplaced - } - } - } - - /// Compute the actual inline size and position for this block. - pub fn compute_used_inline_size(&mut self, - shared_context: &SharedStyleContext, - containing_block_inline_size: Au) { - let block_type = self.block_type(); - match block_type { - BlockType::AbsoluteReplaced => { - let inline_size_computer = AbsoluteReplaced; - inline_size_computer.compute_used_inline_size(self, - shared_context, - containing_block_inline_size); - } - BlockType::AbsoluteNonReplaced => { - let inline_size_computer = AbsoluteNonReplaced; - inline_size_computer.compute_used_inline_size(self, - shared_context, - containing_block_inline_size); - } - BlockType::FloatReplaced => { - let inline_size_computer = FloatReplaced; - inline_size_computer.compute_used_inline_size(self, - shared_context, - containing_block_inline_size); - } - BlockType::FloatNonReplaced => { - let inline_size_computer = FloatNonReplaced; - inline_size_computer.compute_used_inline_size(self, - shared_context, - containing_block_inline_size); - } - BlockType::InlineBlockReplaced => { - let inline_size_computer = InlineBlockReplaced; - inline_size_computer.compute_used_inline_size(self, - shared_context, - containing_block_inline_size); - } - BlockType::InlineBlockNonReplaced => { - let inline_size_computer = InlineBlockNonReplaced; - inline_size_computer.compute_used_inline_size(self, - shared_context, - containing_block_inline_size); - } - BlockType::Replaced => { - let inline_size_computer = BlockReplaced; - inline_size_computer.compute_used_inline_size(self, - shared_context, - containing_block_inline_size); - } - BlockType::NonReplaced => { - let inline_size_computer = BlockNonReplaced; - inline_size_computer.compute_used_inline_size(self, - shared_context, - containing_block_inline_size); - } - BlockType::InlineFlexItem => { - let inline_size_computer = InlineFlexItem; - inline_size_computer.compute_used_inline_size(self, - shared_context, - containing_block_inline_size); - } - } - } - - /// Return this flow's fragment. - pub fn fragment(&mut self) -> &mut Fragment { - &mut self.fragment - } - - pub fn stacking_relative_border_box(&self, coor: CoordinateSystem) -> Rect { - return self.fragment.stacking_relative_border_box( - &self.base.stacking_relative_position, - &self.base.early_absolute_position_info.relative_containing_block_size, - self.base.early_absolute_position_info.relative_containing_block_mode, - coor); - } - - /// Return the size of the containing block for the given immediate absolute descendant of this - /// flow. - /// - /// Right now, this only gets the containing block size for absolutely positioned elements. - /// Note: We assume this is called in a top-down traversal, so it is ok to reference the CB. - #[inline] - pub fn containing_block_size(&self, viewport_size: &Size2D, descendant: OpaqueFlow) - -> LogicalSize { - debug_assert!(self.base.flags.contains(IS_ABSOLUTELY_POSITIONED)); - if self.is_fixed() || self.is_root() { - // Initial containing block is the CB for the root - LogicalSize::from_physical(self.base.writing_mode, *viewport_size) - } else { - self.base.absolute_cb.generated_containing_block_size(descendant) - } - } - - /// Return shrink-to-fit inline-size. - /// - /// This is where we use the preferred inline-sizes and minimum inline-sizes - /// calculated in the bubble-inline-sizes traversal. - pub fn get_shrink_to_fit_inline_size(&self, available_inline_size: Au) -> Au { - let content_intrinsic_inline_sizes = self.content_intrinsic_inline_sizes(); - min(content_intrinsic_inline_sizes.preferred_inline_size, - max(content_intrinsic_inline_sizes.minimum_inline_size, available_inline_size)) - } - - /// If this is the root flow, shifts all kids down and adjusts our size to account for - /// root flow margins, which should never be collapsed according to CSS § 8.3.1. - /// - /// TODO(#2017, pcwalton): This is somewhat inefficient (traverses kids twice); can we do - /// better? - fn adjust_fragments_for_collapsed_margins_if_root(&mut self, - shared_context: &SharedStyleContext) { - if !self.is_root() { - return - } - - let (block_start_margin_value, block_end_margin_value) = - match self.base.collapsible_margins { - CollapsibleMargins::CollapseThrough(_) => { - panic!("Margins unexpectedly collapsed through root flow.") - } - CollapsibleMargins::Collapse(block_start_margin, block_end_margin) => { - (block_start_margin.collapse(), block_end_margin.collapse()) - } - CollapsibleMargins::None(block_start, block_end) => (block_start, block_end), - }; - - // Shift all kids down (or up, if margins are negative) if necessary. - if block_start_margin_value != Au(0) { - for kid in self.base.child_iter_mut() { - let kid_base = flow::mut_base(kid); - kid_base.position.start.b = kid_base.position.start.b + block_start_margin_value - } - } - - // FIXME(#2003, pcwalton): The max is taken here so that you can scroll the page, but this - // is not correct behavior according to CSS 2.1 § 10.5. Instead I think we should treat the - // root element as having `overflow: scroll` and use the layers-based scrolling - // infrastructure to make it scrollable. - let viewport_size = - LogicalSize::from_physical(self.fragment.style.writing_mode, - shared_context.viewport_size()); - let block_size = max(viewport_size.block, - self.fragment.border_box.size.block + block_start_margin_value + - block_end_margin_value); - - self.base.position.size.block = block_size; - self.fragment.border_box.size.block = block_size; - } - - // FIXME: Record enough info to deal with fragmented decorations. - // See https://drafts.csswg.org/css-break/#break-decoration - // For borders, this might be `enum FragmentPosition { First, Middle, Last }` - fn clone_with_children(&self, new_children: FlowList) -> BlockFlow { - BlockFlow { - base: self.base.clone_with_children(new_children), - fragment: self.fragment.clone(), - float: self.float.clone(), - ..*self - } - } - - /// Writes in the size of the relative containing block for children. (This information - /// is also needed to handle RTL.) - fn propagate_early_absolute_position_info_to_children(&mut self) { - for kid in self.base.child_iter_mut() { - flow::mut_base(kid).early_absolute_position_info = EarlyAbsolutePositionInfo { - relative_containing_block_size: self.fragment.content_box().size, - relative_containing_block_mode: self.fragment.style().writing_mode, - } - } - } - - /// Assign block-size for current flow. - /// - /// * Collapse margins for flow's children and set in-flow child flows' block offsets now that - /// we know their block-sizes. - /// * Calculate and set the block-size of the current flow. - /// * Calculate block-size, vertical margins, and block offset for the flow's box using CSS § - /// 10.6.7. - /// - /// For absolute flows, we store the calculated content block-size for the flow. We defer the - /// calculation of the other values until a later traversal. - /// - /// When `fragmentation_context` is given (not `None`), this should fit as much of the content - /// as possible within the available block size. - /// If there is more content (that doesn’t fit), this flow is *fragmented* - /// with the extra content moved to another fragment (a flow like this one) which is returned. - /// See `Flow::fragment`. - /// - /// The return value is always `None` when `fragmentation_context` is `None`. - /// - /// `inline(always)` because this is only ever called by in-order or non-in-order top-level - /// methods. - #[inline(always)] - pub fn assign_block_size_block_base(&mut self, - layout_context: &LayoutContext, - mut fragmentation_context: Option, - margins_may_collapse: MarginsMayCollapseFlag) - -> Option> { - let _scope = layout_debug_scope!("assign_block_size_block_base {:x}", - self.base.debug_id()); - - let mut break_at = None; - let content_box = self.fragment.content_box(); - if self.base.restyle_damage.contains(REFLOW) { - // Our current border-box position. - let mut cur_b = Au(0); - - // Absolute positioning establishes a block formatting context. Don't propagate floats - // in or out. (But do propagate them between kids.) - if self.base.flags.contains(IS_ABSOLUTELY_POSITIONED) || - margins_may_collapse != MarginsMayCollapseFlag::MarginsMayCollapse { - self.base.floats = Floats::new(self.fragment.style.writing_mode); - } - - let mut margin_collapse_info = MarginCollapseInfo::new(); - let writing_mode = self.base.floats.writing_mode; - self.base.floats.translate(LogicalSize::new( - writing_mode, -self.fragment.inline_start_offset(), Au(0))); - - // The sum of our block-start border and block-start padding. - let block_start_offset = self.fragment.border_padding.block_start; - translate_including_floats(&mut cur_b, block_start_offset, &mut self.base.floats); - - let can_collapse_block_start_margin_with_kids = - margins_may_collapse == MarginsMayCollapseFlag::MarginsMayCollapse && - !self.base.flags.contains(IS_ABSOLUTELY_POSITIONED) && - self.fragment.border_padding.block_start == Au(0); - margin_collapse_info.initialize_block_start_margin( - &self.fragment, - can_collapse_block_start_margin_with_kids); - - // At this point, `cur_b` is at the content edge of our box. Now iterate over children. - let mut floats = self.base.floats.clone(); - let thread_id = self.base.thread_id; - let (mut had_floated_children, mut had_children_with_clearance) = (false, false); - for (child_index, kid) in self.base.child_iter_mut().enumerate() { - if flow::base(kid).flags.contains(IS_ABSOLUTELY_POSITIONED) { - // Assume that the *hypothetical box* for an absolute flow starts immediately - // after the margin-end border edge of the previous flow. - if flow::base(kid).flags.contains(BLOCK_POSITION_IS_STATIC) { - let previous_bottom_margin = margin_collapse_info.current_float_ceiling(); - - flow::mut_base(kid).position.start.b = cur_b + - flow::base(kid).collapsible_margins - .block_start_margin_for_noncollapsible_context() + - previous_bottom_margin - } - kid.place_float_if_applicable(); - if !flow::base(kid).flags.is_float() { - kid.assign_block_size_for_inorder_child_if_necessary(layout_context, - thread_id, - content_box); - } - - // Skip the collapsing and float processing for absolute flow kids and continue - // with the next flow. - continue - } - - let previous_b = cur_b; - if let Some(ctx) = fragmentation_context { - let child_ctx = FragmentationContext { - available_block_size: ctx.available_block_size - cur_b, - this_fragment_is_empty: ctx.this_fragment_is_empty, - }; - if let Some(remaining) = kid.fragment(layout_context, Some(child_ctx)) { - break_at = Some((child_index + 1, Some(remaining))); - } - } - - // Assign block-size now for the child if it might have floats in and we couldn't - // before. - flow::mut_base(kid).floats = floats.clone(); - if flow::base(kid).flags.is_float() { - had_floated_children = true; - flow::mut_base(kid).position.start.b = cur_b; - { - let kid_block = kid.as_mut_block(); - let float_ceiling = margin_collapse_info.current_float_ceiling(); - kid_block.float.as_mut().unwrap().float_ceiling = float_ceiling - } - kid.place_float_if_applicable(); - - let kid_base = flow::mut_base(kid); - floats = kid_base.floats.clone(); - continue - } - - // If we have clearance, assume there are no floats in. - // - // FIXME(#2008, pcwalton): This could be wrong if we have `clear: left` or `clear: - // right` and there are still floats to impact, of course. But this gets - // complicated with margin collapse. Possibly the right thing to do is to lay out - // the block again in this rare case. (Note that WebKit can lay blocks out twice; - // this may be related, although I haven't looked into it closely.) - if flow::base(kid).flags.clears_floats() { - flow::mut_base(kid).floats = Floats::new(self.fragment.style.writing_mode) - } - - // Lay the child out if this was an in-order traversal. - let need_to_process_child_floats = - kid.assign_block_size_for_inorder_child_if_necessary(layout_context, - thread_id, - content_box); - - if !had_children_with_clearance && - floats.is_present() && - (flow::base(kid).flags.contains(CLEARS_LEFT) || - flow::base(kid).flags.contains(CLEARS_RIGHT)) { - had_children_with_clearance = true - } - - // Handle any (possibly collapsed) top margin. - let delta = margin_collapse_info.advance_block_start_margin( - &flow::base(kid).collapsible_margins, - !had_children_with_clearance); - translate_including_floats(&mut cur_b, delta, &mut floats); - - // Collapse-through margins should be placed at the top edge, - // so we'll handle the delta after the bottom margin is processed - if let CollapsibleMargins::CollapseThrough(_) = flow::base(kid).collapsible_margins { - cur_b = cur_b - delta; - } - - // Clear past the floats that came in, if necessary. - let clearance = match (flow::base(kid).flags.contains(CLEARS_LEFT), - flow::base(kid).flags.contains(CLEARS_RIGHT)) { - (false, false) => Au(0), - (true, false) => floats.clearance(ClearType::Left), - (false, true) => floats.clearance(ClearType::Right), - (true, true) => floats.clearance(ClearType::Both), - }; - translate_including_floats(&mut cur_b, clearance, &mut floats); - - // At this point, `cur_b` is at the border edge of the child. - flow::mut_base(kid).position.start.b = cur_b; - - // Now pull out the child's outgoing floats. We didn't do this immediately after - // the `assign_block_size_for_inorder_child_if_necessary` call because clearance on - // a block operates on the floats that come *in*, not the floats that go *out*. - if need_to_process_child_floats { - floats = flow::mut_base(kid).floats.clone() - } - - // Move past the child's border box. Do not use the `translate_including_floats` - // function here because the child has already translated floats past its border - // box. - let kid_base = flow::mut_base(kid); - cur_b = cur_b + kid_base.position.size.block; - - // Handle any (possibly collapsed) block-end margin. - let delta = - margin_collapse_info.advance_block_end_margin(&kid_base.collapsible_margins); - translate_including_floats(&mut cur_b, delta, &mut floats); - - // Collapse-through margin should be placed at the top edge of the flow. - let collapse_delta = match kid_base.collapsible_margins { - CollapsibleMargins::CollapseThrough(_) => { - let delta = margin_collapse_info.current_float_ceiling(); - cur_b = cur_b + delta; - kid_base.position.start.b = kid_base.position.start.b + delta; - delta - } - _ => Au(0) - }; - - if break_at.is_some() { - break - } - - if let Some(ref mut ctx) = fragmentation_context { - if cur_b > ctx.available_block_size && !ctx.this_fragment_is_empty { - break_at = Some((child_index, None)); - cur_b = previous_b; - break - } - ctx.this_fragment_is_empty = false - } - - // For consecutive collapse-through flows, their top margin should be calculated - // from the same baseline. - cur_b = cur_b - collapse_delta; - } - - // Add in our block-end margin and compute our collapsible margins. - let can_collapse_block_end_margin_with_kids = - margins_may_collapse == MarginsMayCollapseFlag::MarginsMayCollapse && - !self.base.flags.contains(IS_ABSOLUTELY_POSITIONED) && - self.fragment.border_padding.block_end == Au(0); - let (collapsible_margins, delta) = - margin_collapse_info.finish_and_compute_collapsible_margins( - &self.fragment, - self.base.block_container_explicit_block_size, - can_collapse_block_end_margin_with_kids, - !had_floated_children); - self.base.collapsible_margins = collapsible_margins; - translate_including_floats(&mut cur_b, delta, &mut floats); - - let mut block_size = cur_b - block_start_offset; - let is_root = self.is_root(); - - if is_root || self.formatting_context_type() != FormattingContextType::None || - self.base.flags.contains(IS_ABSOLUTELY_POSITIONED) { - // The content block-size includes all the floats per CSS 2.1 § 10.6.7. The easiest - // way to handle this is to just treat it as clearance. - block_size = block_size + floats.clearance(ClearType::Both); - } - - if self.base.flags.contains(IS_ABSOLUTELY_POSITIONED) { - // FIXME(#2003, pcwalton): The max is taken here so that you can scroll the page, - // but this is not correct behavior according to CSS 2.1 § 10.5. Instead I think we - // should treat the root element as having `overflow: scroll` and use the layers- - // based scrolling infrastructure to make it scrollable. - if is_root { - let viewport_size = - LogicalSize::from_physical(self.fragment.style.writing_mode, - layout_context.shared_context().viewport_size()); - block_size = max(viewport_size.block, block_size) - } - - // Store the content block-size for use in calculating the absolute flow's - // dimensions later. - // - // FIXME(pcwalton): This looks not idempotent. Is it? - self.fragment.border_box.size.block = block_size; - } - - - if self.base.flags.contains(IS_ABSOLUTELY_POSITIONED) { - self.propagate_early_absolute_position_info_to_children(); - return None - } - - // Compute any explicitly-specified block size. - // Can't use `for` because we assign to `candidate_block_size_iterator.candidate_value`. - let mut candidate_block_size_iterator = CandidateBSizeIterator::new( - &self.fragment, - self.base.block_container_explicit_block_size); - while let Some(candidate_block_size) = candidate_block_size_iterator.next() { - candidate_block_size_iterator.candidate_value = - match candidate_block_size { - MaybeAuto::Auto => block_size, - MaybeAuto::Specified(value) => value - } - } - - // Adjust `cur_b` as necessary to account for the explicitly-specified block-size. - block_size = candidate_block_size_iterator.candidate_value; - let delta = block_size - (cur_b - block_start_offset); - translate_including_floats(&mut cur_b, delta, &mut floats); - - // Take border and padding into account. - let block_end_offset = self.fragment.border_padding.block_end; - translate_including_floats(&mut cur_b, block_end_offset, &mut floats); - - // Now that `cur_b` is at the block-end of the border box, compute the final border box - // position. - self.fragment.border_box.size.block = cur_b; - self.fragment.border_box.start.b = Au(0); - self.base.position.size.block = cur_b; - - self.propagate_early_absolute_position_info_to_children(); - - // Translate the current set of floats back into the parent coordinate system in the - // inline direction, and store them in the flow so that flows that come later in the - // document can access them. - floats.translate(LogicalSize::new(writing_mode, - self.fragment.inline_start_offset(), - Au(0))); - self.base.floats = floats.clone(); - self.adjust_fragments_for_collapsed_margins_if_root(layout_context.shared_context()); - } else { - // We don't need to reflow, but we still need to perform in-order traversals if - // necessary. - let thread_id = self.base.thread_id; - for kid in self.base.child_iter_mut() { - kid.assign_block_size_for_inorder_child_if_necessary(layout_context, - thread_id, - content_box); - } - } - - if (&*self as &Flow).contains_roots_of_absolute_flow_tree() { - // Assign block-sizes for all flows in this absolute flow tree. - // This is preorder because the block-size of an absolute flow may depend on - // the block-size of its containing block, which may also be an absolute flow. - let assign_abs_b_sizes = AbsoluteAssignBSizesTraversal(layout_context.shared_context()); - assign_abs_b_sizes.traverse_absolute_flows(&mut *self); - } - - // Don't remove the dirty bits yet if we're absolutely-positioned, since our final size - // has not been calculated yet. (See `calculate_absolute_block_size_and_margins` for that.) - // Also don't remove the dirty bits if we're a block formatting context since our inline - // size has not yet been computed. (See `assign_inline_position_for_formatting_context()`.) - if (self.base.flags.is_float() || - self.formatting_context_type() == FormattingContextType::None) && - !self.base.flags.contains(IS_ABSOLUTELY_POSITIONED) { - self.base.restyle_damage.remove(REFLOW_OUT_OF_FLOW | REFLOW); - self.fragment.restyle_damage.remove(REFLOW_OUT_OF_FLOW | REFLOW); - } - - break_at.and_then(|(i, child_remaining)| { - if i == self.base.children.len() && child_remaining.is_none() { - None - } else { - let mut children = self.base.children.split_off(i); - if let Some(child) = child_remaining { - children.push_front_arc(child); - } - Some(Arc::new(self.clone_with_children(children)) as Arc) - } - }) - } - - /// Add placement information about current float flow for use by the parent. - /// - /// Also, use information given by parent about other floats to find out our relative position. - /// - /// This does not give any information about any float descendants because they do not affect - /// elements outside of the subtree rooted at this float. - /// - /// This function is called on a kid flow by a parent. Therefore, `assign_block_size_float` was - /// already called on this kid flow by the traversal function. So, the values used are - /// well-defined. - pub fn place_float(&mut self) { - let block_size = self.fragment.border_box.size.block; - let clearance = match self.fragment.clear() { - None => Au(0), - Some(clear) => self.base.floats.clearance(clear), - }; - - let float_info: FloatedBlockInfo = (**self.float.as_ref().unwrap()).clone(); - - // Our `position` field accounts for positive margins, but not negative margins. (See - // calculation of `extra_inline_size_from_margin` below.) Negative margins must be taken - // into account for float placement, however. So we add them in here. - let inline_size_for_float_placement = self.base.position.size.inline + - min(Au(0), self.fragment.margin.inline_start_end()); - - let info = PlacementInfo { - size: LogicalSize::new( - self.fragment.style.writing_mode, - inline_size_for_float_placement, - block_size + self.fragment.margin.block_start_end()) - .convert(self.fragment.style.writing_mode, self.base.floats.writing_mode), - ceiling: clearance + float_info.float_ceiling, - max_inline_size: float_info.containing_inline_size, - kind: float_info.float_kind, - }; - - // Place the float and return the `Floats` back to the parent flow. - // After, grab the position and use that to set our position. - self.base.floats.add_float(&info); - - // FIXME (mbrubeck) Get the correct container size for self.base.floats; - let container_size = Size2D::new(self.base.block_container_inline_size, Au(0)); - - // Move in from the margin edge, as per CSS 2.1 § 9.5, floats may not overlap anything on - // their margin edges. - let float_offset = self.base.floats.last_float_pos().unwrap() - .convert(self.base.floats.writing_mode, - self.base.writing_mode, - container_size) - .start; - let margin_offset = LogicalPoint::new(self.base.writing_mode, - Au(0), - self.fragment.margin.block_start); - - let mut origin = LogicalPoint::new(self.base.writing_mode, - self.base.position.start.i, - self.base.position.start.b); - origin = origin.add_point(&float_offset).add_point(&margin_offset); - self.base.position = LogicalRect::from_point_size(self.base.writing_mode, - origin, - self.base.position.size); - } - - pub fn explicit_block_containing_size(&self, shared_context: &SharedStyleContext) -> Option { - if self.is_root() || self.is_fixed() { - let viewport_size = LogicalSize::from_physical(self.fragment.style.writing_mode, - shared_context.viewport_size()); - Some(viewport_size.block) - } else if self.base.flags.contains(IS_ABSOLUTELY_POSITIONED) && - self.base.block_container_explicit_block_size.is_none() { - self.base.absolute_cb.explicit_block_containing_size(shared_context) - } else { - self.base.block_container_explicit_block_size - } - } - - pub fn explicit_block_size(&self, containing_block_size: Option) -> Option { - let content_block_size = self.fragment.style().content_block_size(); - - match (content_block_size, containing_block_size) { - (LengthOrPercentageOrAuto::Calc(calc), _) => { - calc.to_used_value(containing_block_size) - } - (LengthOrPercentageOrAuto::Length(length), _) => Some(Au::from(length)), - (LengthOrPercentageOrAuto::Percentage(percent), Some(container_size)) => { - Some(container_size.scale_by(percent.0)) - } - (LengthOrPercentageOrAuto::Percentage(_), None) | - (LengthOrPercentageOrAuto::Auto, None) => { - None - } - (LengthOrPercentageOrAuto::Auto, Some(container_size)) => { - let (block_start, block_end) = { - let position = self.fragment.style().logical_position(); - (MaybeAuto::from_style(position.block_start, container_size), - MaybeAuto::from_style(position.block_end, container_size)) - }; - - match (block_start, block_end) { - (MaybeAuto::Specified(block_start), MaybeAuto::Specified(block_end)) => { - let available_block_size = container_size - self.fragment.border_padding.block_start_end(); - - // Non-auto margin-block-start and margin-block-end values have already been - // calculated during assign-inline-size. - let margin = self.fragment.style().logical_margin(); - let margin_block_start = match margin.block_start { - LengthOrPercentageOrAuto::Auto => MaybeAuto::Auto, - _ => MaybeAuto::Specified(self.fragment.margin.block_start) - }; - let margin_block_end = match margin.block_end { - LengthOrPercentageOrAuto::Auto => MaybeAuto::Auto, - _ => MaybeAuto::Specified(self.fragment.margin.block_end) - }; - - let margin_block_start = margin_block_start.specified_or_zero(); - let margin_block_end = margin_block_end.specified_or_zero(); - let sum = block_start + block_end + margin_block_start + margin_block_end; - Some(available_block_size - sum) - } - - (_, _) => { - None - } - } - } - } - } - - fn calculate_absolute_block_size_and_margins(&mut self, shared_context: &SharedStyleContext) { - let opaque_self = OpaqueFlow::from_flow(self); - let containing_block_block_size = - self.containing_block_size(&shared_context.viewport_size(), opaque_self).block; - - // This is the stored content block-size value from assign-block-size - let content_block_size = self.fragment.border_box.size.block; - - let mut solution = None; - { - // Non-auto margin-block-start and margin-block-end values have already been - // calculated during assign-inline-size. - let margin = self.fragment.style().logical_margin(); - let margin_block_start = match margin.block_start { - LengthOrPercentageOrAuto::Auto => MaybeAuto::Auto, - _ => MaybeAuto::Specified(self.fragment.margin.block_start) - }; - let margin_block_end = match margin.block_end { - LengthOrPercentageOrAuto::Auto => MaybeAuto::Auto, - _ => MaybeAuto::Specified(self.fragment.margin.block_end) - }; - - let block_start; - let block_end; - { - let position = self.fragment.style().logical_position(); - block_start = MaybeAuto::from_style(position.block_start, - containing_block_block_size); - block_end = MaybeAuto::from_style(position.block_end, containing_block_block_size); - } - - let available_block_size = containing_block_block_size - - self.fragment.border_padding.block_start_end(); - if self.fragment.is_replaced() { - // Calculate used value of block-size just like we do for inline replaced elements. - // TODO: Pass in the containing block block-size when Fragment's - // assign-block-size can handle it correctly. - self.fragment.assign_replaced_block_size_if_necessary(); - // TODO: Right now, this content block-size value includes the - // margin because of erroneous block-size calculation in fragment. - // Check this when that has been fixed. - let block_size_used_val = self.fragment.border_box.size.block; - solution = Some(BSizeConstraintSolution::solve_vertical_constraints_abs_replaced( - block_size_used_val, - margin_block_start, - margin_block_end, - block_start, - block_end, - content_block_size, - available_block_size)) - } else { - let mut candidate_block_size_iterator = - CandidateBSizeIterator::new(&self.fragment, Some(containing_block_block_size)); - - // Can't use `for` because we assign to - // `candidate_block_size_iterator.candidate_value`. - while let Some(block_size_used_val) = candidate_block_size_iterator.next() { - solution = Some( - BSizeConstraintSolution::solve_vertical_constraints_abs_nonreplaced( - block_size_used_val, - margin_block_start, - margin_block_end, - block_start, - block_end, - content_block_size, - available_block_size)); - - candidate_block_size_iterator.candidate_value = - solution.unwrap().block_size; - } - } - } - - let solution = solution.unwrap(); - self.fragment.margin.block_start = solution.margin_block_start; - self.fragment.margin.block_end = solution.margin_block_end; - self.fragment.border_box.start.b = Au(0); - - if !self.base.flags.contains(BLOCK_POSITION_IS_STATIC) { - self.base.position.start.b = solution.block_start + self.fragment.margin.block_start - } - - let block_size = solution.block_size + self.fragment.border_padding.block_start_end(); - self.fragment.border_box.size.block = block_size; - self.base.position.size.block = block_size; - - self.base.restyle_damage.remove(REFLOW_OUT_OF_FLOW | REFLOW); - self.fragment.restyle_damage.remove(REFLOW_OUT_OF_FLOW | REFLOW); - } - - /// Compute inline size based using the `block_container_inline_size` set by the parent flow. - /// - /// This is run in the `AssignISizes` traversal. - fn propagate_and_compute_used_inline_size(&mut self, shared_context: &SharedStyleContext) { - let containing_block_inline_size = self.base.block_container_inline_size; - self.compute_used_inline_size(shared_context, containing_block_inline_size); - if self.base.flags.is_float() { - self.float.as_mut().unwrap().containing_inline_size = containing_block_inline_size - } - } - - /// Assigns the computed inline-start content edge and inline-size to all the children of this - /// block flow. The given `callback`, if supplied, will be called once per child; it is - /// currently used to push down column sizes for tables. - /// - /// `#[inline(always)]` because this is called only from block or table inline-size assignment - /// and the code for block layout is significantly simpler. - #[inline(always)] - pub fn propagate_assigned_inline_size_to_children(&mut self, - shared_context: &SharedStyleContext, - inline_start_content_edge: Au, - inline_end_content_edge: Au, - content_inline_size: Au, - mut callback: F) - where F: FnMut(&mut Flow, - usize, - Au, - WritingMode, - &mut Au, - &mut Au) { - let flags = self.base.flags.clone(); - - let opaque_self = OpaqueFlow::from_flow(self); - - // Calculate non-auto block size to pass to children. - let box_border = match self.fragment.style().get_position().box_sizing { - box_sizing::T::border_box => self.fragment.border_padding.block_start_end(), - box_sizing::T::content_box => Au(0), - }; - let parent_container_size = self.explicit_block_containing_size(shared_context); - // https://drafts.csswg.org/css-ui-3/#box-sizing - let mut explicit_content_size = self - .explicit_block_size(parent_container_size) - .map(|x| if x < box_border { Au(0) } else { x - box_border }); - if self.is_root() { explicit_content_size = max(parent_container_size, explicit_content_size); } - // Calculate containing block inline size. - let containing_block_size = if flags.contains(IS_ABSOLUTELY_POSITIONED) { - self.containing_block_size(&shared_context.viewport_size(), opaque_self).inline - } else { - content_inline_size - }; - // FIXME (mbrubeck): Get correct mode for absolute containing block - let containing_block_mode = self.base.writing_mode; - - let mut inline_start_margin_edge = inline_start_content_edge; - let mut inline_end_margin_edge = inline_end_content_edge; - - let mut iterator = self.base.child_iter_mut().enumerate().peekable(); - while let Some((i, kid)) = iterator.next() { - flow::mut_base(kid).block_container_explicit_block_size = explicit_content_size; - - // The inline-start margin edge of the child flow is at our inline-start content edge, - // and its inline-size is our content inline-size. - let kid_mode = flow::base(kid).writing_mode; - { - // Don't assign positions to children unless they're going to be reflowed. - // Otherwise, the position we assign might be incorrect and never fixed up. (Issue - // #13704.) - // - // For instance, floats have their true inline position calculated in - // `assign_block_size()`, which won't do anything unless `REFLOW` is set. So, if a - // float child does not have `REFLOW` set, we must be careful to avoid touching its - // inline position, as no logic will run afterward to set its true value. - let kid_base = flow::mut_base(kid); - let reflow_damage = if kid_base.flags.contains(IS_ABSOLUTELY_POSITIONED) { - REFLOW_OUT_OF_FLOW - } else { - REFLOW - }; - if kid_base.flags.contains(INLINE_POSITION_IS_STATIC) && - kid_base.restyle_damage.contains(reflow_damage) { - kid_base.position.start.i = - if kid_mode.is_bidi_ltr() == containing_block_mode.is_bidi_ltr() { - inline_start_content_edge - } else { - // The kid's inline 'start' is at the parent's 'end' - inline_end_content_edge - }; - } - kid_base.block_container_inline_size = content_inline_size; - kid_base.block_container_writing_mode = containing_block_mode; - } - - // Call the callback to propagate extra inline size information down to the child. This - // is currently used for tables. - callback(kid, - i, - content_inline_size, - containing_block_mode, - &mut inline_start_margin_edge, - &mut inline_end_margin_edge); - - // Per CSS 2.1 § 16.3.1, text alignment propagates to all children in flow. - // - // TODO(#2265, pcwalton): Do this in the cascade instead. - let containing_block_text_align = self.fragment.style().get_inheritedtext().text_align; - flow::mut_base(kid).flags.set_text_align(containing_block_text_align); - - // Handle `text-indent` on behalf of any inline children that we have. This is - // necessary because any percentages are relative to the containing block, which only - // we know. - if kid.is_inline_flow() { - kid.as_mut_inline().first_line_indentation = - self.fragment.style().get_inheritedtext().text_indent - .to_used_value(containing_block_size); - } - } - } - - /// Determines the type of formatting context this is. See the definition of - /// `FormattingContextType`. - pub fn formatting_context_type(&self) -> FormattingContextType { - if self.is_inline_flex_item() || self.is_block_flex_item() { - return FormattingContextType::Other - } - let style = self.fragment.style(); - if style.get_box().float != float::T::none { - return FormattingContextType::Other - } - match style.get_box().display { - display::T::table_cell | - display::T::table_caption | - display::T::table_row_group | - display::T::table | - display::T::inline_block | - display::T::flex => { - FormattingContextType::Other - } - _ if style.get_box().overflow_x != overflow_x::T::visible || - style.get_box().overflow_y != overflow_x::T::visible || - style.is_multicol() => { - FormattingContextType::Block - } - _ => FormattingContextType::None, - } - } - - /// Per CSS 2.1 § 9.5, block formatting contexts' inline widths and positions are affected by - /// the presence of floats. This is the part of the assign-heights traversal that computes - /// the final inline position and width for such flows. - /// - /// Note that this is part of the assign-block-sizes traversal, not the assign-inline-sizes - /// traversal as one might expect. That is because, in general, float placement cannot occur - /// until heights are assigned. To work around this unfortunate circular dependency, by the - /// time we get here we have already estimated the width of the block formatting context based - /// on the floats we could see at the time of inline-size assignment. The job of this function, - /// therefore, is not only to assign the final size but also to perform the layout again for - /// this block formatting context if our speculation was wrong. - fn assign_inline_position_for_formatting_context(&mut self, - layout_context: &LayoutContext, - content_box: LogicalRect) { - debug_assert!(self.formatting_context_type() != FormattingContextType::None); - - if !self.base.restyle_damage.intersects(REFLOW_OUT_OF_FLOW | REFLOW) { - return - } - - // We do this first to avoid recomputing our inline size when we propagate it. - self.base.restyle_damage.remove(REFLOW_OUT_OF_FLOW | REFLOW); - self.fragment.restyle_damage.remove(REFLOW_OUT_OF_FLOW | REFLOW); - - // The code below would completely wreck the layout if run on a flex item, however: - // * Flex items are always the children of flex containers. - // * Flex containers only contain flex items. - // * Floats cannot intrude into flex containers. - // * Floats cannot escape flex items. - // * Flex items cannot also be floats. - // Therefore, a flex item cannot be impacted by a float. - // See also: https://www.w3.org/TR/css-flexbox-1/#flex-containers - if !self.base.might_have_floats_in() { - return - } - - // If you remove the might_have_floats_in conditional, this will go off. - debug_assert!(!self.is_inline_flex_item()); - - // Compute the available space for us, based on the actual floats. - let rect = self.base.floats.available_rect(Au(0), - self.fragment.border_box.size.block, - content_box.size.inline); - let available_inline_size = if let Some(rect) = rect { - // Offset our position by whatever displacement is needed to not impact the floats. - // Also, account for margins sliding behind floats. - let inline_offset = if self.fragment.margin.inline_start < rect.start.i { - // Do not do anything for negative margins; those are handled separately. - rect.start.i - max(Au(0), self.fragment.margin.inline_start) - } else { - Au(0) - }; - self.base.position.start.i = content_box.start.i + inline_offset; - // Handle the end margin sliding behind the float. - let end = content_box.size.inline - rect.start.i - rect.size.inline; - let inline_end_offset = if self.fragment.margin.inline_end < end { - end - max(Au(0), self.fragment.margin.inline_end) - } else { - Au(0) - }; - content_box.size.inline - inline_offset - inline_end_offset - } else { - content_box.size.inline - } - self.fragment.margin.inline_start_end(); - let max_inline_size = - self.fragment.style().max_inline_size() - .to_used_value(self.base.block_container_inline_size) - .unwrap_or(MAX_AU); - let min_inline_size = - self.fragment.style().min_inline_size().to_used_value(self.base.block_container_inline_size); - let specified_inline_size = self.fragment.style().content_inline_size(); - let container_size = self.base.block_container_inline_size; - let inline_size = - if let MaybeAuto::Specified(size) = MaybeAuto::from_style(specified_inline_size, - container_size) { - match self.fragment.style().get_position().box_sizing { - box_sizing::T::border_box => size, - box_sizing::T::content_box => - size + self.fragment.border_padding.inline_start_end(), - } - } else { - max(min_inline_size, min(available_inline_size, max_inline_size)) - }; - self.base.position.size.inline = inline_size + self.fragment.margin.inline_start_end(); - - // If float speculation failed, fixup our layout, and re-layout all the children. - if self.fragment.margin_box_inline_size() != self.base.position.size.inline { - debug!("assign_inline_position_for_formatting_context: float speculation failed"); - // Fix-up our own layout. - // We can't just traverse_flow_tree_preorder ourself, because that would re-run - // float speculation, instead of acting on the actual results. - self.fragment.border_box.size.inline = inline_size; - // Assign final-final inline sizes on all our children. - self.assign_inline_sizes(layout_context); - // Re-run layout on our children. - for child in flow::mut_base(self).children.iter_mut() { - sequential::reflow(child, layout_context, RelayoutMode::Force); - } - // Assign our final-final block size. - self.assign_block_size(layout_context); - } - - debug_assert_eq!(self.fragment.margin_box_inline_size(), self.base.position.size.inline); - } - - fn is_inline_block_or_inline_flex(&self) -> bool { - self.fragment.style().get_box().display == display::T::inline_block || - self.fragment.style().get_box().display == display::T::inline_flex - } - - /// Computes the content portion (only) of the intrinsic inline sizes of this flow. This is - /// used for calculating shrink-to-fit width. Assumes that intrinsic sizes have already been - /// computed for this flow. - fn content_intrinsic_inline_sizes(&self) -> IntrinsicISizes { - let (border_padding, margin) = self.fragment.surrounding_intrinsic_inline_size(); - IntrinsicISizes { - minimum_inline_size: self.base.intrinsic_inline_sizes.minimum_inline_size - - border_padding - margin, - preferred_inline_size: self.base.intrinsic_inline_sizes.preferred_inline_size - - border_padding - margin, - } - } - - /// Computes intrinsic inline sizes for a block. - pub fn bubble_inline_sizes_for_block(&mut self, consult_children: bool) { - let _scope = layout_debug_scope!("block::bubble_inline_sizes {:x}", self.base.debug_id()); - - let mut flags = self.base.flags; - if self.definitely_has_zero_block_size() { - // This is kind of a hack for Acid2. But it's a harmless one, because (a) this behavior - // is unspecified; (b) it matches the behavior one would intuitively expect, since - // floats don't flow around blocks that take up no space in the block direction. - flags.remove(CONTAINS_TEXT_OR_REPLACED_FRAGMENTS); - } else if self.fragment.is_text_or_replaced() { - flags.insert(CONTAINS_TEXT_OR_REPLACED_FRAGMENTS); - } else { - flags.remove(CONTAINS_TEXT_OR_REPLACED_FRAGMENTS); - for kid in self.base.children.iter() { - if flow::base(kid).flags.contains(CONTAINS_TEXT_OR_REPLACED_FRAGMENTS) { - flags.insert(CONTAINS_TEXT_OR_REPLACED_FRAGMENTS); - break - } - } - } - - // Find the maximum inline-size from children. - // - // See: https://lists.w3.org/Archives/Public/www-style/2014Nov/0085.html - // - // FIXME(pcwalton): This doesn't exactly follow that algorithm at the moment. - // FIXME(pcwalton): This should consider all float descendants, not just children. - let mut computation = self.fragment.compute_intrinsic_inline_sizes(); - let (mut left_float_width, mut right_float_width) = (Au(0), Au(0)); - let (mut left_float_width_accumulator, mut right_float_width_accumulator) = (Au(0), Au(0)); - let mut preferred_inline_size_of_children_without_text_or_replaced_fragments = Au(0); - for kid in self.base.child_iter_mut() { - if flow::base(kid).flags.contains(IS_ABSOLUTELY_POSITIONED) || !consult_children { - continue - } - - let child_base = flow::mut_base(kid); - let float_kind = child_base.flags.float_kind(); - computation.content_intrinsic_sizes.minimum_inline_size = - max(computation.content_intrinsic_sizes.minimum_inline_size, - child_base.intrinsic_inline_sizes.minimum_inline_size); - - if child_base.flags.contains(CLEARS_LEFT) { - left_float_width = max(left_float_width, left_float_width_accumulator); - left_float_width_accumulator = Au(0) - } - if child_base.flags.contains(CLEARS_RIGHT) { - right_float_width = max(right_float_width, right_float_width_accumulator); - right_float_width_accumulator = Au(0) - } - - match (float_kind, child_base.flags.contains(CONTAINS_TEXT_OR_REPLACED_FRAGMENTS)) { - (float::T::none, true) => { - computation.content_intrinsic_sizes.preferred_inline_size = - max(computation.content_intrinsic_sizes.preferred_inline_size, - child_base.intrinsic_inline_sizes.preferred_inline_size); - } - (float::T::none, false) => { - preferred_inline_size_of_children_without_text_or_replaced_fragments = max( - preferred_inline_size_of_children_without_text_or_replaced_fragments, - child_base.intrinsic_inline_sizes.preferred_inline_size) - } - (float::T::left, _) => { - left_float_width_accumulator = left_float_width_accumulator + - child_base.intrinsic_inline_sizes.preferred_inline_size; - } - (float::T::right, _) => { - right_float_width_accumulator = right_float_width_accumulator + - child_base.intrinsic_inline_sizes.preferred_inline_size; - } - } - } - - left_float_width = max(left_float_width, left_float_width_accumulator); - right_float_width = max(right_float_width, right_float_width_accumulator); - - computation.content_intrinsic_sizes.preferred_inline_size = - computation.content_intrinsic_sizes.preferred_inline_size + left_float_width + - right_float_width; - computation.content_intrinsic_sizes.preferred_inline_size = - max(computation.content_intrinsic_sizes.preferred_inline_size, - preferred_inline_size_of_children_without_text_or_replaced_fragments); - - self.base.intrinsic_inline_sizes = computation.finish(); - self.base.flags = flags - } - - pub fn block_stacking_context_type(&self) -> BlockStackingContextType { - if self.fragment.establishes_stacking_context() { - return BlockStackingContextType::StackingContext - } - - if self.base.flags.contains(IS_ABSOLUTELY_POSITIONED) || - self.fragment.style.get_box().position != position::T::static_ || - self.base.flags.is_float() { - BlockStackingContextType::PseudoStackingContext - } else { - BlockStackingContextType::NonstackingContext - } - } - - pub fn overflow_style_may_require_clip_scroll_node(&self) -> bool { - match (self.fragment.style().get_box().overflow_x, - self.fragment.style().get_box().overflow_y) { - (overflow_x::T::auto, _) | (overflow_x::T::scroll, _) | (overflow_x::T::hidden, _) | - (_, overflow_x::T::auto) | (_, overflow_x::T::scroll) | (_, overflow_x::T::hidden) => - true, - (_, _) => false, - } - } - - pub fn compute_inline_sizes(&mut self, shared_context: &SharedStyleContext) { - if !self.base.restyle_damage.intersects(REFLOW_OUT_OF_FLOW | REFLOW) { - return - } - - debug!("assign_inline_sizes({}): assigning inline_size for flow", - if self.base.flags.is_float() { - "float" - } else { - "block" - }); - - self.base.floats = Floats::new(self.base.writing_mode); - - self.initialize_container_size_for_root(shared_context); - - // Our inline-size was set to the inline-size of the containing block by the flow's parent. - // Now compute the real value. - self.propagate_and_compute_used_inline_size(shared_context); - - self.guess_inline_size_for_block_formatting_context_if_necessary() - } - - /// If this is the root flow, initialize values that would normally be set by the parent. - /// - /// Should be called during `assign_inline_sizes` for flows that may be the root. - pub fn initialize_container_size_for_root(&mut self, shared_context: &SharedStyleContext) { - if self.is_root() { - debug!("Setting root position"); - self.base.position.start = LogicalPoint::zero(self.base.writing_mode); - self.base.block_container_inline_size = LogicalSize::from_physical( - self.base.writing_mode, shared_context.viewport_size()).inline; - self.base.block_container_writing_mode = self.base.writing_mode; - } - } - - fn guess_inline_size_for_block_formatting_context_if_necessary(&mut self) { - // We don't need to guess anything unless this is a block formatting context. - if self.formatting_context_type() != FormattingContextType::Block { - return - } - - // If `max-width` is set, then don't perform this speculation. We guess that the - // page set `max-width` in order to avoid hitting floats. The search box on Google - // SERPs falls into this category. - if self.fragment.style.max_inline_size() != LengthOrPercentageOrNone::None { - return - } - - // At this point, we know we can't precisely compute the inline-size of this block now, - // because floats might affect it. Speculate that its inline-size is equal to the - // inline-size computed above minus the inline-size of the previous left and/or right - // floats. - let speculated_left_float_size = if self.fragment.margin.inline_start >= Au(0) && - self.base.speculated_float_placement_in.left > self.fragment.margin.inline_start { - self.base.speculated_float_placement_in.left - self.fragment.margin.inline_start - } else { - Au(0) - }; - let speculated_right_float_size = if self.fragment.margin.inline_end >= Au(0) && - self.base.speculated_float_placement_in.right > self.fragment.margin.inline_end { - self.base.speculated_float_placement_in.right - self.fragment.margin.inline_end - } else { - Au(0) - }; - self.fragment.border_box.size.inline = self.fragment.border_box.size.inline - - speculated_left_float_size - speculated_right_float_size - } - - fn definitely_has_zero_block_size(&self) -> bool { - if !self.fragment.style.content_block_size().is_definitely_zero() { - return false - } - let border_width = self.fragment.border_width(); - if border_width.block_start != Au(0) || border_width.block_end != Au(0) { - return false - } - let padding = self.fragment.style.logical_padding(); - padding.block_start.is_definitely_zero() && padding.block_end.is_definitely_zero() - } - - pub fn is_inline_flex_item(&self) -> bool { - self.fragment.flags.contains(IS_INLINE_FLEX_ITEM) - } - - pub fn is_block_flex_item(&self) -> bool { - self.fragment.flags.contains(IS_BLOCK_FLEX_ITEM) - } - - pub fn mark_scrolling_overflow(&mut self, has_scrolling_overflow: bool) { - if has_scrolling_overflow { - self.flags.insert(HAS_SCROLLING_OVERFLOW); - } else { - self.flags.remove(HAS_SCROLLING_OVERFLOW); - } - } - - pub fn has_scrolling_overflow(&mut self) -> bool { - self.flags.contains(HAS_SCROLLING_OVERFLOW) - } - - // Return offset from original position because of `position: sticky`. - pub fn sticky_position(&self) -> SideOffsets2D { - let containing_block_size = &self.base.early_absolute_position_info - .relative_containing_block_size; - let writing_mode = self.base.early_absolute_position_info.relative_containing_block_mode; - let offsets = self.fragment.style().logical_position(); - let as_margins = LogicalMargin::new(writing_mode, - MaybeAuto::from_style(offsets.block_start, containing_block_size.inline), - MaybeAuto::from_style(offsets.inline_end, containing_block_size.inline), - MaybeAuto::from_style(offsets.block_end, containing_block_size.inline), - MaybeAuto::from_style(offsets.inline_start, containing_block_size.inline)); - as_margins.to_physical(writing_mode) - } - -} - -impl Flow for BlockFlow { - fn class(&self) -> FlowClass { - FlowClass::Block - } - - fn as_mut_block(&mut self) -> &mut BlockFlow { - self - } - - fn as_block(&self) -> &BlockFlow { - self - } - - /// Pass 1 of reflow: computes minimum and preferred inline-sizes. - /// - /// Recursively (bottom-up) determine the flow's minimum and preferred inline-sizes. When - /// called on this flow, all child flows have had their minimum and preferred inline-sizes set. - /// This function must decide minimum/preferred inline-sizes based on its children's - /// inline-sizes and the dimensions of any fragments it is responsible for flowing. - fn bubble_inline_sizes(&mut self) { - // If this block has a fixed width, just use that for the minimum and preferred width, - // rather than bubbling up children inline width. - let consult_children = match self.fragment.style().get_position().width { - LengthOrPercentageOrAuto::Length(_) => false, - _ => true, - }; - self.bubble_inline_sizes_for_block(consult_children); - self.fragment.restyle_damage.remove(BUBBLE_ISIZES); - } - - /// Recursively (top-down) determines the actual inline-size of child contexts and fragments. - /// When called on this context, the context has had its inline-size set by the parent context. - /// - /// Dual fragments consume some inline-size first, and the remainder is assigned to all child - /// (block) contexts. - fn assign_inline_sizes(&mut self, layout_context: &LayoutContext) { - let _scope = layout_debug_scope!("block::assign_inline_sizes {:x}", self.base.debug_id()); - - let shared_context = layout_context.shared_context(); - self.compute_inline_sizes(shared_context); - - // Move in from the inline-start border edge. - let inline_start_content_edge = self.fragment.border_box.start.i + - self.fragment.border_padding.inline_start; - - let padding_and_borders = self.fragment.border_padding.inline_start_end(); - - // Distance from the inline-end margin edge to the inline-end content edge. - let inline_end_content_edge = - self.fragment.margin.inline_end + - self.fragment.border_padding.inline_end; - - let content_inline_size = self.fragment.border_box.size.inline - padding_and_borders; - - self.propagate_assigned_inline_size_to_children(shared_context, - inline_start_content_edge, - inline_end_content_edge, - content_inline_size, - |_, _, _, _, _, _| {}); - } - - fn place_float_if_applicable<'a>(&mut self) { - if self.base.flags.is_float() { - self.place_float(); - } - } - - fn assign_block_size_for_inorder_child_if_necessary(&mut self, - layout_context: &LayoutContext, - parent_thread_id: u8, - content_box: LogicalRect) - -> bool { - if self.base.flags.is_float() { - return false - } - - let is_formatting_context = self.formatting_context_type() != FormattingContextType::None; - if !self.base.flags.contains(IS_ABSOLUTELY_POSITIONED) && is_formatting_context { - self.assign_inline_position_for_formatting_context(layout_context, content_box); - } - - if (self as &Flow).floats_might_flow_through() { - self.base.thread_id = parent_thread_id; - if self.base.restyle_damage.intersects(REFLOW_OUT_OF_FLOW | REFLOW) { - self.assign_block_size(layout_context); - // Don't remove the restyle damage; `assign_block_size` decides whether that is - // appropriate (which in the case of e.g. absolutely-positioned flows, it is not). - } - return true - } - - if is_formatting_context { - // If this is a formatting context and definitely did not have floats in, then we must - // translate the floats past us. - let writing_mode = self.base.floats.writing_mode; - let delta = self.base.position.size.block; - self.base.floats.translate(LogicalSize::new(writing_mode, Au(0), -delta)); - return true - } - - false - } - - fn assign_block_size(&mut self, ctx: &LayoutContext) { - let remaining = Flow::fragment(self, ctx, None); - debug_assert!(remaining.is_none()); - } - - fn fragment(&mut self, layout_context: &LayoutContext, - fragmentation_context: Option) - -> Option> { - if self.fragment.is_replaced() { - let _scope = layout_debug_scope!("assign_replaced_block_size_if_necessary {:x}", - self.base.debug_id()); - - // Assign block-size for fragment if it is an image fragment. - self.fragment.assign_replaced_block_size_if_necessary(); - if !self.base.flags.contains(IS_ABSOLUTELY_POSITIONED) { - self.base.position.size.block = self.fragment.border_box.size.block; - let mut block_start = AdjoiningMargins::from_margin(self.fragment.margin.block_start); - let block_end = AdjoiningMargins::from_margin(self.fragment.margin.block_end); - if self.fragment.border_box.size.block == Au(0) { - block_start.union(block_end); - self.base.collapsible_margins = CollapsibleMargins::CollapseThrough(block_start); - } else { - self.base.collapsible_margins = CollapsibleMargins::Collapse(block_start, block_end); - } - self.base.restyle_damage.remove(REFLOW_OUT_OF_FLOW | REFLOW); - self.fragment.restyle_damage.remove(REFLOW_OUT_OF_FLOW | REFLOW); - } - None - } else if self.is_root() || - self.formatting_context_type() != FormattingContextType::None || - self.base.flags.contains(MARGINS_CANNOT_COLLAPSE) { - // Root element margins should never be collapsed according to CSS § 8.3.1. - debug!("assign_block_size: assigning block_size for root flow {:?}", - flow::base(self).debug_id()); - self.assign_block_size_block_base( - layout_context, - fragmentation_context, - MarginsMayCollapseFlag::MarginsMayNotCollapse) - } else { - debug!("assign_block_size: assigning block_size for block {:?}", - flow::base(self).debug_id()); - self.assign_block_size_block_base( - layout_context, - fragmentation_context, - MarginsMayCollapseFlag::MarginsMayCollapse) - } - } - - fn compute_stacking_relative_position(&mut self, _layout_context: &LayoutContext) { - // FIXME (mbrubeck): Get the real container size, taking the container writing mode into - // account. Must handle vertical writing modes. - let container_size = Size2D::new(self.base.block_container_inline_size, Au(0)); - - if self.is_root() { - self.base.clip = max_rect(); - } - - if self.base.flags.contains(IS_ABSOLUTELY_POSITIONED) { - let position_start = self.base.position.start.to_physical(self.base.writing_mode, - container_size); - - // Compute our position relative to the nearest ancestor stacking context. This will be - // passed down later as part of containing block details for absolute descendants. - let absolute_stacking_relative_position = if self.is_fixed() { - // The viewport is initially at (0, 0). - position_start - } else { - // Absolute position of the containing block + position of absolute - // flow w.r.t. the containing block. - self.base - .late_absolute_position_info - .stacking_relative_position_of_absolute_containing_block + position_start.to_vector() - }; - - if !self.base.writing_mode.is_vertical() { - if !self.base.flags.contains(INLINE_POSITION_IS_STATIC) { - self.base.stacking_relative_position.x = absolute_stacking_relative_position.x - } - if !self.base.flags.contains(BLOCK_POSITION_IS_STATIC) { - self.base.stacking_relative_position.y = absolute_stacking_relative_position.y - } - } else { - if !self.base.flags.contains(INLINE_POSITION_IS_STATIC) { - self.base.stacking_relative_position.y = absolute_stacking_relative_position.y - } - if !self.base.flags.contains(BLOCK_POSITION_IS_STATIC) { - self.base.stacking_relative_position.x = absolute_stacking_relative_position.x - } - } - } - - // For relatively-positioned descendants, the containing block formed by a block is just - // the content box. The containing block for absolutely-positioned descendants, on the - // other hand, is established in other circumstances (see `is_absolute_containing_block'). - let relative_offset = - self.fragment.relative_position(&self.base - .early_absolute_position_info - .relative_containing_block_size); - if self.is_absolute_containing_block() { - let border_box_origin = (self.fragment.border_box - - self.fragment.style.logical_border_width()).start; - self.base - .late_absolute_position_info - .stacking_relative_position_of_absolute_containing_block = - self.base.stacking_relative_position.to_point() + - (border_box_origin + relative_offset).to_physical(self.base.writing_mode, - container_size).to_vector() - } - - // Compute absolute position info for children. - let stacking_relative_position_of_absolute_containing_block_for_children = - if self.fragment.establishes_stacking_context() { - let logical_border_width = self.fragment.style().logical_border_width(); - let position = LogicalPoint::new(self.base.writing_mode, - logical_border_width.inline_start, - logical_border_width.block_start); - let position = position.to_physical(self.base.writing_mode, container_size); - - // Some blocks establish a stacking context, but not a containing block for - // absolutely positioned elements. An example of this might be a block that has - // `position: static` and `opacity` set. In these cases, absolutely-positioned - // children will not be positioned relative to us but will instead be positioned - // relative to our containing block. - if self.is_absolute_containing_block() { - position - } else { - position - self.base.stacking_relative_position - } - } else { - self.base - .late_absolute_position_info - .stacking_relative_position_of_absolute_containing_block - }; - let late_absolute_position_info_for_children = LateAbsolutePositionInfo { - stacking_relative_position_of_absolute_containing_block: - stacking_relative_position_of_absolute_containing_block_for_children, - }; - let container_size_for_children = - self.base.position.size.to_physical(self.base.writing_mode); - - // Compute the origin and clipping rectangle for children. - let relative_offset = relative_offset.to_physical(self.base.writing_mode).to_vector(); - let is_stacking_context = self.fragment.establishes_stacking_context(); - let origin_for_children = if is_stacking_context { - // We establish a stacking context, so the position of our children is vertically - // correct, but has to be adjusted to accommodate horizontal margins. (Note the - // calculation involving `position` below and recall that inline-direction flow - // positions are relative to the edges of the margin box.) - // - // FIXME(pcwalton): Is this vertical-writing-direction-safe? - let margin = self.fragment.margin.to_physical(self.base.writing_mode); - Point2D::new(-margin.left, Au(0)) - } else { - self.base.stacking_relative_position.to_point() + relative_offset - }; - - // Process children. - for kid in self.base.child_iter_mut() { - if flow::base(kid).flags.contains(INLINE_POSITION_IS_STATIC) || - flow::base(kid).flags.contains(BLOCK_POSITION_IS_STATIC) { - let kid_base = flow::mut_base(kid); - let physical_position = kid_base.position.to_physical(kid_base.writing_mode, - container_size_for_children); - - // Set the inline and block positions as necessary. - if !kid_base.writing_mode.is_vertical() { - if kid_base.flags.contains(INLINE_POSITION_IS_STATIC) { - kid_base.stacking_relative_position.x = origin_for_children.x + - physical_position.origin.x - } - if kid_base.flags.contains(BLOCK_POSITION_IS_STATIC) { - kid_base.stacking_relative_position.y = origin_for_children.y + - physical_position.origin.y - } - } else { - if kid_base.flags.contains(INLINE_POSITION_IS_STATIC) { - kid_base.stacking_relative_position.y = origin_for_children.y + - physical_position.origin.y - } - if kid_base.flags.contains(BLOCK_POSITION_IS_STATIC) { - kid_base.stacking_relative_position.x = origin_for_children.x + - physical_position.origin.x - } - } - } - - flow::mut_base(kid).late_absolute_position_info = - late_absolute_position_info_for_children; - } - } - - fn mark_as_root(&mut self) { - self.flags.insert(IS_ROOT) - } - - fn is_root(&self) -> bool { - self.flags.contains(IS_ROOT) - } - - /// The 'position' property of this flow. - fn positioning(&self) -> position::T { - self.fragment.style.get_box().position - } - - /// Return the dimensions of the containing block generated by this flow for absolutely- - /// positioned descendants. For block flows, this is the padding box. - fn generated_containing_block_size(&self, _: OpaqueFlow) -> LogicalSize { - (self.fragment.border_box - self.fragment.style().logical_border_width()).size - } - - /// Returns true if this flow contains fragments that are roots of an absolute flow tree. - fn contains_roots_of_absolute_flow_tree(&self) -> bool { - self.contains_relatively_positioned_fragments() || self.is_root() || - self.fragment.has_filter_transform_or_perspective() - } - - /// Returns true if this is an absolute containing block. - fn is_absolute_containing_block(&self) -> bool { - self.contains_positioned_fragments() || self.fragment.has_filter_transform_or_perspective() - } - - fn update_late_computed_inline_position_if_necessary(&mut self, inline_position: Au) { - if self.base.flags.contains(IS_ABSOLUTELY_POSITIONED) && - self.fragment.style().logical_position().inline_start == - LengthOrPercentageOrAuto::Auto && - self.fragment.style().logical_position().inline_end == - LengthOrPercentageOrAuto::Auto { - self.base.position.start.i = inline_position - } - } - - fn update_late_computed_block_position_if_necessary(&mut self, block_position: Au) { - if self.base.flags.contains(IS_ABSOLUTELY_POSITIONED) && - self.fragment.style().logical_position().block_start == - LengthOrPercentageOrAuto::Auto && - self.fragment.style().logical_position().block_end == - LengthOrPercentageOrAuto::Auto { - self.base.position.start.b = block_position - } - } - - fn collect_stacking_contexts(&mut self, state: &mut StackingContextCollectionState) { - self.collect_stacking_contexts_for_block(state, StackingContextCollectionFlags::empty()); - } - - fn build_display_list(&mut self, state: &mut DisplayListBuildState) { - self.build_display_list_for_block(state, BorderPaintingMode::Separate); - } - - fn repair_style(&mut self, new_style: &::ServoArc) { - self.fragment.repair_style(new_style) - } - - fn compute_overflow(&self) -> Overflow { - let flow_size = self.base.position.size.to_physical(self.base.writing_mode); - let overflow = self.fragment.compute_overflow(&flow_size, - &self.base - .early_absolute_position_info - .relative_containing_block_size); - overflow - } - - fn iterate_through_fragment_border_boxes(&self, - iterator: &mut FragmentBorderBoxIterator, - level: i32, - stacking_context_position: &Point2D) { - if !iterator.should_process(&self.fragment) { - return - } - - iterator.process(&self.fragment, - level, - &self.fragment - .stacking_relative_border_box(&self.base.stacking_relative_position, - &self.base - .early_absolute_position_info - .relative_containing_block_size, - self.base - .early_absolute_position_info - .relative_containing_block_mode, - CoordinateSystem::Own) - .translate(&stacking_context_position.to_vector())); - } - - fn mutate_fragments(&mut self, mutator: &mut FnMut(&mut Fragment)) { - (*mutator)(&mut self.fragment) - } - - fn print_extra_flow_children(&self, print_tree: &mut PrintTree) { - print_tree.add_item(format!("↑↑ Fragment for block: {:?}", self.fragment)); - } -} - -impl fmt::Debug for BlockFlow { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, - "{:?}({:x}) {:?}", - self.class(), - self.base.debug_id(), - self.base) - } -} - -/// The inputs for the inline-sizes-and-margins constraint equation. -#[derive(Clone, Copy, Debug)] -pub struct ISizeConstraintInput { - pub computed_inline_size: MaybeAuto, - pub inline_start_margin: MaybeAuto, - pub inline_end_margin: MaybeAuto, - pub inline_start: MaybeAuto, - pub inline_end: MaybeAuto, - pub text_align: text_align::T, - pub available_inline_size: Au, -} - -impl ISizeConstraintInput { - pub fn new(computed_inline_size: MaybeAuto, - inline_start_margin: MaybeAuto, - inline_end_margin: MaybeAuto, - inline_start: MaybeAuto, - inline_end: MaybeAuto, - text_align: text_align::T, - available_inline_size: Au) - -> ISizeConstraintInput { - ISizeConstraintInput { - computed_inline_size: computed_inline_size, - inline_start_margin: inline_start_margin, - inline_end_margin: inline_end_margin, - inline_start: inline_start, - inline_end: inline_end, - text_align: text_align, - available_inline_size: available_inline_size, - } - } -} - -/// The solutions for the inline-size-and-margins constraint equation. -#[derive(Clone, Copy, Debug)] -pub struct ISizeConstraintSolution { - pub inline_start: Au, - pub inline_size: Au, - pub margin_inline_start: Au, - pub margin_inline_end: Au -} - -impl ISizeConstraintSolution { - pub fn new(inline_size: Au, margin_inline_start: Au, margin_inline_end: Au) - -> ISizeConstraintSolution { - ISizeConstraintSolution { - inline_start: Au(0), - inline_size: inline_size, - margin_inline_start: margin_inline_start, - margin_inline_end: margin_inline_end, - } - } - - fn for_absolute_flow(inline_start: Au, - inline_size: Au, - margin_inline_start: Au, - margin_inline_end: Au) - -> ISizeConstraintSolution { - ISizeConstraintSolution { - inline_start: inline_start, - inline_size: inline_size, - margin_inline_start: margin_inline_start, - margin_inline_end: margin_inline_end, - } - } -} - -// Trait to encapsulate the ISize and Margin calculation. -// -// CSS Section 10.3 -pub trait ISizeAndMarginsComputer { - /// Instructs the fragment to compute its border and padding. - fn compute_border_and_padding(&self, block: &mut BlockFlow, containing_block_inline_size: Au) { - block.fragment.compute_border_and_padding(containing_block_inline_size); - } - - /// Compute the inputs for the ISize constraint equation. - /// - /// This is called only once to compute the initial inputs. For calculations involving - /// minimum and maximum inline-size, we don't need to recompute these. - fn compute_inline_size_constraint_inputs(&self, - block: &mut BlockFlow, - parent_flow_inline_size: Au, - shared_context: &SharedStyleContext) - -> ISizeConstraintInput { - let containing_block_inline_size = - self.containing_block_inline_size(block, parent_flow_inline_size, shared_context); - - block.fragment.compute_block_direction_margins(containing_block_inline_size); - block.fragment.compute_inline_direction_margins(containing_block_inline_size); - self.compute_border_and_padding(block, containing_block_inline_size); - - let mut computed_inline_size = self.initial_computed_inline_size(block, - parent_flow_inline_size, - shared_context); - let style = block.fragment.style(); - match (computed_inline_size, style.get_position().box_sizing) { - (MaybeAuto::Specified(size), box_sizing::T::border_box) => { - computed_inline_size = - MaybeAuto::Specified(size - block.fragment.border_padding.inline_start_end()) - } - (MaybeAuto::Auto, box_sizing::T::border_box) | - (_, box_sizing::T::content_box) => {} - } - - let margin = style.logical_margin(); - let position = style.logical_position(); - - let available_inline_size = containing_block_inline_size - - block.fragment.border_padding.inline_start_end(); - ISizeConstraintInput::new(computed_inline_size, - MaybeAuto::from_style(margin.inline_start, - containing_block_inline_size), - MaybeAuto::from_style(margin.inline_end, - containing_block_inline_size), - MaybeAuto::from_style(position.inline_start, - containing_block_inline_size), - MaybeAuto::from_style(position.inline_end, - containing_block_inline_size), - style.get_inheritedtext().text_align, - available_inline_size) - } - - /// Set the used values for inline-size and margins from the relevant constraint equation. - /// This is called only once. - /// - /// Set: - /// * Used values for content inline-size, inline-start margin, and inline-end margin for this - /// flow's box; - /// * Inline-start coordinate of this flow's box; - /// * Inline-start coordinate of the flow with respect to its containing block (if this is an - /// absolute flow). - fn set_inline_size_constraint_solutions(&self, - block: &mut BlockFlow, - solution: ISizeConstraintSolution) { - let inline_size; - let extra_inline_size_from_margin; - { - let block_mode = block.base.writing_mode; - - // FIXME (mbrubeck): Get correct containing block for positioned blocks? - let container_mode = block.base.block_container_writing_mode; - let container_size = block.base.block_container_inline_size; - - let fragment = block.fragment(); - fragment.margin.inline_start = solution.margin_inline_start; - fragment.margin.inline_end = solution.margin_inline_end; - - // The associated fragment has the border box of this flow. - inline_size = solution.inline_size + fragment.border_padding.inline_start_end(); - fragment.border_box.size.inline = inline_size; - - // Start border edge. - // FIXME (mbrubeck): Handle vertical writing modes. - fragment.border_box.start.i = - if container_mode.is_bidi_ltr() == block_mode.is_bidi_ltr() { - fragment.margin.inline_start - } else { - // The parent's "start" direction is the child's "end" direction. - container_size - inline_size - fragment.margin.inline_end - }; - - // To calculate the total size of this block, we also need to account for any - // additional size contribution from positive margins. Negative margins means the block - // isn't made larger at all by the margin. - extra_inline_size_from_margin = max(Au(0), fragment.margin.inline_start) + - max(Au(0), fragment.margin.inline_end); - } - - // We also resize the block itself, to ensure that overflow is not calculated - // as the inline-size of our parent. We might be smaller and we might be larger if we - // overflow. - flow::mut_base(block).position.size.inline = inline_size + extra_inline_size_from_margin; - } - - /// Set the inline coordinate of the given flow if it is absolutely positioned. - fn set_inline_position_of_flow_if_necessary(&self, - _: &mut BlockFlow, - _: ISizeConstraintSolution) {} - - /// Solve the inline-size and margins constraints for this block flow. - fn solve_inline_size_constraints(&self, - block: &mut BlockFlow, - input: &ISizeConstraintInput) - -> ISizeConstraintSolution; - - fn initial_computed_inline_size(&self, - block: &mut BlockFlow, - parent_flow_inline_size: Au, - shared_context: &SharedStyleContext) - -> MaybeAuto { - MaybeAuto::from_style(block.fragment().style().content_inline_size(), - self.containing_block_inline_size(block, - parent_flow_inline_size, - shared_context)) - } - - fn containing_block_inline_size(&self, - _: &mut BlockFlow, - parent_flow_inline_size: Au, - _: &SharedStyleContext) - -> Au { - parent_flow_inline_size - } - - /// Compute the used value of inline-size, taking care of min-inline-size and max-inline-size. - /// - /// CSS Section 10.4: Minimum and Maximum inline-sizes - fn compute_used_inline_size(&self, - block: &mut BlockFlow, - shared_context: &SharedStyleContext, - parent_flow_inline_size: Au) { - let mut input = self.compute_inline_size_constraint_inputs(block, - parent_flow_inline_size, - shared_context); - - let containing_block_inline_size = - self.containing_block_inline_size(block, parent_flow_inline_size, shared_context); - - let mut solution = self.solve_inline_size_constraints(block, &input); - - // If the tentative used inline-size is greater than 'max-inline-size', inline-size should - // be recalculated, but this time using the computed value of 'max-inline-size' as the - // computed value for 'inline-size'. - match block.fragment().style().max_inline_size().to_used_value(containing_block_inline_size) { - Some(max_inline_size) if max_inline_size < solution.inline_size => { - input.computed_inline_size = MaybeAuto::Specified(max_inline_size); - solution = self.solve_inline_size_constraints(block, &input); - } - _ => {} - } - - // If the resulting inline-size is smaller than 'min-inline-size', inline-size should be - // recalculated, but this time using the value of 'min-inline-size' as the computed value - // for 'inline-size'. - let computed_min_inline_size = - block.fragment().style().min_inline_size().to_used_value(containing_block_inline_size); - if computed_min_inline_size > solution.inline_size { - input.computed_inline_size = MaybeAuto::Specified(computed_min_inline_size); - solution = self.solve_inline_size_constraints(block, &input); - } - - self.set_inline_size_constraint_solutions(block, solution); - self.set_inline_position_of_flow_if_necessary(block, solution); - } - - /// Computes inline-start and inline-end margins and inline-size. - /// - /// This is used by both replaced and non-replaced Blocks. - /// - /// CSS 2.1 Section 10.3.3. - /// Constraint Equation: margin-inline-start + margin-inline-end + inline-size = - /// available_inline-size - /// where available_inline-size = CB inline-size - (horizontal border + padding) - fn solve_block_inline_size_constraints(&self, - block: &mut BlockFlow, - input: &ISizeConstraintInput) - -> ISizeConstraintSolution { - let (computed_inline_size, inline_start_margin, inline_end_margin, available_inline_size) = - (input.computed_inline_size, - input.inline_start_margin, - input.inline_end_margin, - input.available_inline_size); - - // Check for direction of parent flow (NOT Containing Block) - let block_mode = block.base.writing_mode; - let container_mode = block.base.block_container_writing_mode; - let block_align = block.base.flags.text_align(); - - // FIXME (mbrubeck): Handle vertical writing modes. - let parent_has_same_direction = container_mode.is_bidi_ltr() == block_mode.is_bidi_ltr(); - - // If inline-size is not 'auto', and inline-size + margins > available_inline-size, all - // 'auto' margins are treated as 0. - let (inline_start_margin, inline_end_margin) = match computed_inline_size { - MaybeAuto::Auto => (inline_start_margin, inline_end_margin), - MaybeAuto::Specified(inline_size) => { - let inline_start = inline_start_margin.specified_or_zero(); - let inline_end = inline_end_margin.specified_or_zero(); - - if (inline_start + inline_end + inline_size) > available_inline_size { - (MaybeAuto::Specified(inline_start), MaybeAuto::Specified(inline_end)) - } else { - (inline_start_margin, inline_end_margin) - } - } - }; - - // Invariant: inline-start_margin + inline-size + inline-end_margin == - // available_inline-size - let (inline_start_margin, inline_size, inline_end_margin) = - match (inline_start_margin, computed_inline_size, inline_end_margin) { - // If all have a computed value other than 'auto', the system is over-constrained. - (MaybeAuto::Specified(margin_start), - MaybeAuto::Specified(inline_size), - MaybeAuto::Specified(margin_end)) => { - // servo_left, servo_right, and servo_center are used to implement - // the "align descendants" rule in HTML5 § 14.2. - if block_align == text_align::T::servo_center { - // Ignore any existing margins, and make the inline-start and - // inline-end margins equal. - let margin = (available_inline_size - inline_size).scale_by(0.5); - (margin, inline_size, margin) - } else { - let ignore_end_margin = match block_align { - text_align::T::servo_left => block_mode.is_bidi_ltr(), - text_align::T::servo_right => !block_mode.is_bidi_ltr(), - _ => parent_has_same_direction, - }; - if ignore_end_margin { - (margin_start, inline_size, available_inline_size - - (margin_start + inline_size)) - } else { - (available_inline_size - (margin_end + inline_size), - inline_size, - margin_end) - } - } - } - // If exactly one value is 'auto', solve for it - (MaybeAuto::Auto, - MaybeAuto::Specified(inline_size), - MaybeAuto::Specified(margin_end)) => - (available_inline_size - (inline_size + margin_end), inline_size, margin_end), - (MaybeAuto::Specified(margin_start), - MaybeAuto::Auto, - MaybeAuto::Specified(margin_end)) => { - (margin_start, - available_inline_size - (margin_start + margin_end), - margin_end) - } - (MaybeAuto::Specified(margin_start), - MaybeAuto::Specified(inline_size), - MaybeAuto::Auto) => { - (margin_start, - inline_size, - available_inline_size - (margin_start + inline_size)) - } - - // If inline-size is set to 'auto', any other 'auto' value becomes '0', - // and inline-size is solved for - (MaybeAuto::Auto, MaybeAuto::Auto, MaybeAuto::Specified(margin_end)) => { - (Au(0), available_inline_size - margin_end, margin_end) - } - (MaybeAuto::Specified(margin_start), MaybeAuto::Auto, MaybeAuto::Auto) => { - (margin_start, available_inline_size - margin_start, Au(0)) - } - (MaybeAuto::Auto, MaybeAuto::Auto, MaybeAuto::Auto) => { - (Au(0), available_inline_size, Au(0)) - } - - // If inline-start and inline-end margins are auto, they become equal - (MaybeAuto::Auto, MaybeAuto::Specified(inline_size), MaybeAuto::Auto) => { - let margin = (available_inline_size - inline_size).scale_by(0.5); - (margin, inline_size, margin) - } - }; - - ISizeConstraintSolution::new(inline_size, inline_start_margin, inline_end_margin) - } -} - -/// The different types of Blocks. -/// -/// They mainly differ in the way inline-size and block-sizes and margins are calculated -/// for them. -pub struct AbsoluteNonReplaced; -pub struct AbsoluteReplaced; -pub struct BlockNonReplaced; -pub struct BlockReplaced; -pub struct FloatNonReplaced; -pub struct FloatReplaced; -pub struct InlineBlockNonReplaced; -pub struct InlineBlockReplaced; -pub struct InlineFlexItem; - -impl ISizeAndMarginsComputer for AbsoluteNonReplaced { - /// Solve the horizontal constraint equation for absolute non-replaced elements. - /// - /// CSS Section 10.3.7 - /// Constraint equation: - /// inline-start + inline-end + inline-size + margin-inline-start + margin-inline-end - /// = absolute containing block inline-size - (horizontal padding and border) - /// [aka available inline-size] - /// - /// Return the solution for the equation. - fn solve_inline_size_constraints(&self, - block: &mut BlockFlow, - input: &ISizeConstraintInput) - -> ISizeConstraintSolution { - let &ISizeConstraintInput { - computed_inline_size, - inline_start_margin, - inline_end_margin, - inline_start, - inline_end, - available_inline_size, - .. - } = input; - - // Check for direction of parent flow (NOT Containing Block) - let block_mode = block.base.writing_mode; - let container_mode = block.base.block_container_writing_mode; - - // FIXME (mbrubeck): Handle vertical writing modes. - let parent_has_same_direction = container_mode.is_bidi_ltr() == block_mode.is_bidi_ltr(); - - let (inline_start, inline_size, margin_inline_start, margin_inline_end) = - match (inline_start, inline_end, computed_inline_size) { - (MaybeAuto::Auto, MaybeAuto::Auto, MaybeAuto::Auto) => { - let margin_start = inline_start_margin.specified_or_zero(); - let margin_end = inline_end_margin.specified_or_zero(); - // Now it is the same situation as inline-start Specified and inline-end - // and inline-size Auto. - - // Set inline-end to zero to calculate inline-size. - let inline_size = - block.get_shrink_to_fit_inline_size(available_inline_size - - (margin_start + margin_end)); - (Au(0), inline_size, margin_start, margin_end) - } - (MaybeAuto::Specified(inline_start), - MaybeAuto::Specified(inline_end), - MaybeAuto::Specified(inline_size)) => { - match (inline_start_margin, inline_end_margin) { - (MaybeAuto::Auto, MaybeAuto::Auto) => { - let total_margin_val = - available_inline_size - inline_start - inline_end - inline_size; - if total_margin_val < Au(0) { - if parent_has_same_direction { - // margin-inline-start becomes 0 - (inline_start, inline_size, Au(0), total_margin_val) - } else { - // margin-inline-end becomes 0, because it's toward the parent's - // inline-start edge. - (inline_start, inline_size, total_margin_val, Au(0)) - } - } else { - // Equal margins - (inline_start, - inline_size, - total_margin_val.scale_by(0.5), - total_margin_val.scale_by(0.5)) - } - } - (MaybeAuto::Specified(margin_start), MaybeAuto::Auto) => { - let sum = inline_start + inline_end + inline_size + margin_start; - (inline_start, inline_size, margin_start, available_inline_size - sum) - } - (MaybeAuto::Auto, MaybeAuto::Specified(margin_end)) => { - let sum = inline_start + inline_end + inline_size + margin_end; - (inline_start, inline_size, available_inline_size - sum, margin_end) - } - (MaybeAuto::Specified(margin_start), MaybeAuto::Specified(margin_end)) => { - // Values are over-constrained. - let sum = inline_start + inline_size + margin_start + margin_end; - if parent_has_same_direction { - // Ignore value for 'inline-end' - (inline_start, inline_size, margin_start, margin_end) - } else { - // Ignore value for 'inline-start' - (available_inline_size - sum, - inline_size, - margin_start, - margin_end) - } - } - } - } - // For the rest of the cases, auto values for margin are set to 0 - - // If only one is Auto, solve for it - (MaybeAuto::Auto, - MaybeAuto::Specified(inline_end), - MaybeAuto::Specified(inline_size)) => { - let margin_start = inline_start_margin.specified_or_zero(); - let margin_end = inline_end_margin.specified_or_zero(); - let sum = inline_end + inline_size + margin_start + margin_end; - (available_inline_size - sum, inline_size, margin_start, margin_end) - } - (MaybeAuto::Specified(inline_start), - MaybeAuto::Auto, - MaybeAuto::Specified(inline_size)) => { - let margin_start = inline_start_margin.specified_or_zero(); - let margin_end = inline_end_margin.specified_or_zero(); - (inline_start, inline_size, margin_start, margin_end) - } - (MaybeAuto::Specified(inline_start), - MaybeAuto::Specified(inline_end), - MaybeAuto::Auto) => { - let margin_start = inline_start_margin.specified_or_zero(); - let margin_end = inline_end_margin.specified_or_zero(); - let sum = inline_start + inline_end + margin_start + margin_end; - (inline_start, available_inline_size - sum, margin_start, margin_end) - } - - // If inline-size is auto, then inline-size is shrink-to-fit. Solve for the - // non-auto value. - (MaybeAuto::Specified(inline_start), MaybeAuto::Auto, MaybeAuto::Auto) => { - let margin_start = inline_start_margin.specified_or_zero(); - let margin_end = inline_end_margin.specified_or_zero(); - // Set inline-end to zero to calculate inline-size - let inline_size = - block.get_shrink_to_fit_inline_size(available_inline_size - - (margin_start + margin_end)); - (inline_start, inline_size, margin_start, margin_end) - } - (MaybeAuto::Auto, MaybeAuto::Specified(inline_end), MaybeAuto::Auto) => { - let margin_start = inline_start_margin.specified_or_zero(); - let margin_end = inline_end_margin.specified_or_zero(); - // Set inline-start to zero to calculate inline-size - let inline_size = - block.get_shrink_to_fit_inline_size(available_inline_size - - (margin_start + margin_end)); - let sum = inline_end + inline_size + margin_start + margin_end; - (available_inline_size - sum, inline_size, margin_start, margin_end) - } - - (MaybeAuto::Auto, MaybeAuto::Auto, MaybeAuto::Specified(inline_size)) => { - let margin_start = inline_start_margin.specified_or_zero(); - let margin_end = inline_end_margin.specified_or_zero(); - // Setting 'inline-start' to static position because direction is 'ltr'. - // TODO: Handle 'rtl' when it is implemented. - (Au(0), inline_size, margin_start, margin_end) - } - }; - ISizeConstraintSolution::for_absolute_flow(inline_start, - inline_size, - margin_inline_start, - margin_inline_end) - } - - fn containing_block_inline_size(&self, - block: &mut BlockFlow, - _: Au, - shared_context: &SharedStyleContext) - -> Au { - let opaque_block = OpaqueFlow::from_flow(block); - block.containing_block_size(&shared_context.viewport_size(), opaque_block).inline - } - - fn set_inline_position_of_flow_if_necessary(&self, - block: &mut BlockFlow, - solution: ISizeConstraintSolution) { - // Set the inline position of the absolute flow wrt to its containing block. - if !block.base.flags.contains(INLINE_POSITION_IS_STATIC) { - block.base.position.start.i = solution.inline_start; - } - } -} - -impl ISizeAndMarginsComputer for AbsoluteReplaced { - /// Solve the horizontal constraint equation for absolute replaced elements. - /// - /// CSS Section 10.3.8 - /// Constraint equation: - /// inline-start + inline-end + inline-size + margin-inline-start + margin-inline-end - /// = absolute containing block inline-size - (horizontal padding and border) - /// [aka available_inline-size] - /// - /// Return the solution for the equation. - fn solve_inline_size_constraints(&self, _: &mut BlockFlow, input: &ISizeConstraintInput) - -> ISizeConstraintSolution { - let &ISizeConstraintInput { - computed_inline_size, - inline_start_margin, - inline_end_margin, - inline_start, - inline_end, - available_inline_size, - .. - } = input; - // TODO: Check for direction of static-position Containing Block (aka - // parent flow, _not_ the actual Containing Block) when right-to-left - // is implemented - // Assume direction is 'ltr' for now - // TODO: Handle all the cases for 'rtl' direction. - - let inline_size = match computed_inline_size { - MaybeAuto::Specified(w) => w, - _ => panic!("{} {}", - "The used value for inline_size for absolute replaced flow", - "should have already been calculated by now.") - }; - - let (inline_start, inline_size, margin_inline_start, margin_inline_end) = - match (inline_start, inline_end) { - (MaybeAuto::Auto, MaybeAuto::Auto) => { - let margin_start = inline_start_margin.specified_or_zero(); - let margin_end = inline_end_margin.specified_or_zero(); - (Au(0), inline_size, margin_start, margin_end) - } - // If only one is Auto, solve for it - (MaybeAuto::Auto, MaybeAuto::Specified(inline_end)) => { - let margin_start = inline_start_margin.specified_or_zero(); - let margin_end = inline_end_margin.specified_or_zero(); - let sum = inline_end + inline_size + margin_start + margin_end; - (available_inline_size - sum, inline_size, margin_start, margin_end) - } - (MaybeAuto::Specified(inline_start), MaybeAuto::Auto) => { - let margin_start = inline_start_margin.specified_or_zero(); - let margin_end = inline_end_margin.specified_or_zero(); - (inline_start, inline_size, margin_start, margin_end) - } - (MaybeAuto::Specified(inline_start), MaybeAuto::Specified(inline_end)) => { - match (inline_start_margin, inline_end_margin) { - (MaybeAuto::Auto, MaybeAuto::Auto) => { - let total_margin_val = available_inline_size - inline_start - - inline_end - inline_size; - if total_margin_val < Au(0) { - // margin-inline-start becomes 0 because direction is 'ltr'. - (inline_start, inline_size, Au(0), total_margin_val) - } else { - // Equal margins - (inline_start, - inline_size, - total_margin_val.scale_by(0.5), - total_margin_val.scale_by(0.5)) - } - } - (MaybeAuto::Specified(margin_start), MaybeAuto::Auto) => { - let sum = inline_start + inline_end + inline_size + margin_start; - (inline_start, inline_size, margin_start, available_inline_size - sum) - } - (MaybeAuto::Auto, MaybeAuto::Specified(margin_end)) => { - let sum = inline_start + inline_end + inline_size + margin_end; - (inline_start, inline_size, available_inline_size - sum, margin_end) - } - (MaybeAuto::Specified(margin_start), MaybeAuto::Specified(margin_end)) => { - // Values are over-constrained. - // Ignore value for 'inline-end' cos direction is 'ltr'. - (inline_start, inline_size, margin_start, margin_end) - } - } - } - }; - ISizeConstraintSolution::for_absolute_flow(inline_start, - inline_size, - margin_inline_start, - margin_inline_end) - } - - /// Calculate used value of inline-size just like we do for inline replaced elements. - fn initial_computed_inline_size(&self, - block: &mut BlockFlow, - _: Au, - shared_context: &SharedStyleContext) - -> MaybeAuto { - let opaque_block = OpaqueFlow::from_flow(block); - let containing_block_inline_size = - block.containing_block_size(&shared_context.viewport_size(), opaque_block).inline; - let container_block_size = block.explicit_block_containing_size(shared_context); - let fragment = block.fragment(); - fragment.assign_replaced_inline_size_if_necessary(containing_block_inline_size, container_block_size); - // For replaced absolute flow, the rest of the constraint solving will - // take inline-size to be specified as the value computed here. - MaybeAuto::Specified(fragment.content_box().size.inline) - } - - fn containing_block_inline_size(&self, - block: &mut BlockFlow, - _: Au, - shared_context: &SharedStyleContext) - -> Au { - let opaque_block = OpaqueFlow::from_flow(block); - block.containing_block_size(&shared_context.viewport_size(), opaque_block).inline - } - - fn set_inline_position_of_flow_if_necessary(&self, - block: &mut BlockFlow, - solution: ISizeConstraintSolution) { - // Set the x-coordinate of the absolute flow wrt to its containing block. - block.base.position.start.i = solution.inline_start; - } -} - -impl ISizeAndMarginsComputer for BlockNonReplaced { - /// Compute inline-start and inline-end margins and inline-size. - fn solve_inline_size_constraints(&self, - block: &mut BlockFlow, - input: &ISizeConstraintInput) - -> ISizeConstraintSolution { - self.solve_block_inline_size_constraints(block, input) - } -} - -impl ISizeAndMarginsComputer for BlockReplaced { - /// Compute inline-start and inline-end margins and inline-size. - /// - /// ISize has already been calculated. We now calculate the margins just - /// like for non-replaced blocks. - fn solve_inline_size_constraints(&self, - block: &mut BlockFlow, - input: &ISizeConstraintInput) - -> ISizeConstraintSolution { - match input.computed_inline_size { - MaybeAuto::Specified(_) => {}, - MaybeAuto::Auto => { - panic!("BlockReplaced: inline_size should have been computed by now") - } - }; - self.solve_block_inline_size_constraints(block, input) - } - - /// Calculate used value of inline-size just like we do for inline replaced elements. - fn initial_computed_inline_size(&self, - block: &mut BlockFlow, - parent_flow_inline_size: Au, - shared_context: &SharedStyleContext) - -> MaybeAuto { - let container_block_size = block.explicit_block_containing_size(shared_context); - let fragment = block.fragment(); - fragment.assign_replaced_inline_size_if_necessary(parent_flow_inline_size, container_block_size); - // For replaced block flow, the rest of the constraint solving will - // take inline-size to be specified as the value computed here. - MaybeAuto::Specified(fragment.content_box().size.inline) - } - -} - -impl ISizeAndMarginsComputer for FloatNonReplaced { - /// CSS Section 10.3.5 - /// - /// If inline-size is computed as 'auto', the used value is the 'shrink-to-fit' inline-size. - fn solve_inline_size_constraints(&self, - block: &mut BlockFlow, - input: &ISizeConstraintInput) - -> ISizeConstraintSolution { - let (computed_inline_size, inline_start_margin, inline_end_margin, available_inline_size) = - (input.computed_inline_size, - input.inline_start_margin, - input.inline_end_margin, - input.available_inline_size); - let margin_inline_start = inline_start_margin.specified_or_zero(); - let margin_inline_end = inline_end_margin.specified_or_zero(); - let available_inline_size_float = available_inline_size - margin_inline_start - - margin_inline_end; - let shrink_to_fit = block.get_shrink_to_fit_inline_size(available_inline_size_float); - let inline_size = computed_inline_size.specified_or_default(shrink_to_fit); - debug!("assign_inline_sizes_float -- inline_size: {:?}", inline_size); - ISizeConstraintSolution::new(inline_size, margin_inline_start, margin_inline_end) - } -} - -impl ISizeAndMarginsComputer for FloatReplaced { - /// CSS Section 10.3.5 - /// - /// If inline-size is computed as 'auto', the used value is the 'shrink-to-fit' inline-size. - fn solve_inline_size_constraints(&self, _: &mut BlockFlow, input: &ISizeConstraintInput) - -> ISizeConstraintSolution { - let (computed_inline_size, inline_start_margin, inline_end_margin) = - (input.computed_inline_size, input.inline_start_margin, input.inline_end_margin); - let margin_inline_start = inline_start_margin.specified_or_zero(); - let margin_inline_end = inline_end_margin.specified_or_zero(); - let inline_size = match computed_inline_size { - MaybeAuto::Specified(w) => w, - MaybeAuto::Auto => panic!("FloatReplaced: inline_size should have been computed by now") - }; - debug!("assign_inline_sizes_float -- inline_size: {:?}", inline_size); - ISizeConstraintSolution::new(inline_size, margin_inline_start, margin_inline_end) - } - - /// Calculate used value of inline-size just like we do for inline replaced elements. - fn initial_computed_inline_size(&self, - block: &mut BlockFlow, - parent_flow_inline_size: Au, - shared_context: &SharedStyleContext) - -> MaybeAuto { - let container_block_size = block.explicit_block_containing_size(shared_context); - let fragment = block.fragment(); - fragment.assign_replaced_inline_size_if_necessary(parent_flow_inline_size, container_block_size); - // For replaced block flow, the rest of the constraint solving will - // take inline-size to be specified as the value computed here. - MaybeAuto::Specified(fragment.content_box().size.inline) - } -} - -impl ISizeAndMarginsComputer for InlineBlockNonReplaced { - /// Compute inline-start and inline-end margins and inline-size. - fn solve_inline_size_constraints(&self, - block: &mut BlockFlow, - input: &ISizeConstraintInput) - -> ISizeConstraintSolution { - let (computed_inline_size, - inline_start_margin, - inline_end_margin, - available_inline_size) = - (input.computed_inline_size, - input.inline_start_margin, - input.inline_end_margin, - input.available_inline_size); - - // For inline-blocks, `auto` margins compute to 0. - let inline_start_margin = inline_start_margin.specified_or_zero(); - let inline_end_margin = inline_end_margin.specified_or_zero(); - - // If inline-size is set to 'auto', and this is an inline block, use the - // shrink to fit algorithm (see CSS 2.1 § 10.3.9) - let inline_size = match computed_inline_size { - MaybeAuto::Auto => { - block.get_shrink_to_fit_inline_size(available_inline_size - (inline_start_margin + - inline_end_margin)) - } - MaybeAuto::Specified(inline_size) => inline_size, - }; - - ISizeConstraintSolution::new(inline_size, inline_start_margin, inline_end_margin) - } -} - -impl ISizeAndMarginsComputer for InlineBlockReplaced { - /// Compute inline-start and inline-end margins and inline-size. - /// - /// ISize has already been calculated. We now calculate the margins just - /// like for non-replaced blocks. - fn solve_inline_size_constraints(&self, - block: &mut BlockFlow, - input: &ISizeConstraintInput) - -> ISizeConstraintSolution { - debug_assert!(match input.computed_inline_size { - MaybeAuto::Specified(_) => true, - MaybeAuto::Auto => false, - }); - - let (computed_inline_size, - inline_start_margin, - inline_end_margin, - available_inline_size) = - (input.computed_inline_size, - input.inline_start_margin, - input.inline_end_margin, - input.available_inline_size); - - // For inline-blocks, `auto` margins compute to 0. - let inline_start_margin = inline_start_margin.specified_or_zero(); - let inline_end_margin = inline_end_margin.specified_or_zero(); - - // If inline-size is set to 'auto', and this is an inline block, use the - // shrink to fit algorithm (see CSS 2.1 § 10.3.9) - let inline_size = match computed_inline_size { - MaybeAuto::Auto => { - block.get_shrink_to_fit_inline_size(available_inline_size - (inline_start_margin + - inline_end_margin)) - } - MaybeAuto::Specified(inline_size) => inline_size, - }; - - ISizeConstraintSolution::new(inline_size, inline_start_margin, inline_end_margin) - } - - /// Calculate used value of inline-size just like we do for inline replaced elements. - fn initial_computed_inline_size(&self, - block: &mut BlockFlow, - parent_flow_inline_size: Au, - shared_context: &SharedStyleContext) - -> MaybeAuto { - let container_block_size = block.explicit_block_containing_size(shared_context); - let fragment = block.fragment(); - fragment.assign_replaced_inline_size_if_necessary(parent_flow_inline_size, container_block_size); - // For replaced block flow, the rest of the constraint solving will - // take inline-size to be specified as the value computed here. - MaybeAuto::Specified(fragment.content_box().size.inline) - } -} - -impl ISizeAndMarginsComputer for InlineFlexItem { - // Replace the default method directly to prevent recalculating and setting margins again - // which has already been set by its parent. - fn compute_used_inline_size(&self, - block: &mut BlockFlow, - shared_context: &SharedStyleContext, - parent_flow_inline_size: Au) { - let container_block_size = block.explicit_block_containing_size(shared_context); - block.fragment.assign_replaced_inline_size_if_necessary(parent_flow_inline_size, - container_block_size); - } - - // The used inline size and margins are set by parent flex flow, do nothing here. - fn solve_inline_size_constraints(&self, - block: &mut BlockFlow, - _: &ISizeConstraintInput) - -> ISizeConstraintSolution { - let fragment = block.fragment(); - ISizeConstraintSolution::new(fragment.border_box.size.inline, - fragment.margin.inline_start, - fragment.margin.inline_end) - } -} - -/// A stacking context, a pseudo-stacking context, or a non-stacking context. -#[derive(Clone, Copy, PartialEq)] -pub enum BlockStackingContextType { - NonstackingContext, - PseudoStackingContext, - StackingContext, -} diff --git a/collector/compile-benchmarks/style-servo/components/layout/construct.rs b/collector/compile-benchmarks/style-servo/components/layout/construct.rs deleted file mode 100644 index fa6840292..000000000 --- a/collector/compile-benchmarks/style-servo/components/layout/construct.rs +++ /dev/null @@ -1,2071 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ - -//! Creates flows and fragments from a DOM tree via a bottom-up, incremental traversal of the DOM. -//! -//! Each step of the traversal considers the node and existing flow, if there is one. If a node is -//! not dirty and an existing flow exists, then the traversal reuses that flow. Otherwise, it -//! proceeds to construct either a flow or a `ConstructionItem`. A construction item is a piece of -//! intermediate data that goes with a DOM node and hasn't found its "home" yet-maybe it's a box, -//! maybe it's an absolute or fixed position thing that hasn't found its containing block yet. -//! Construction items bubble up the tree from children to parents until they find their homes. - -#![deny(unsafe_code)] - -use ServoArc; -use block::BlockFlow; -use context::{LayoutContext, with_thread_local_font_context}; -use data::{HAS_NEWLY_CONSTRUCTED_FLOW, LayoutData}; -use flex::FlexFlow; -use floats::FloatKind; -use flow::{self, AbsoluteDescendants, Flow, FlowClass, ImmutableFlowUtils}; -use flow::{CAN_BE_FRAGMENTED, IS_ABSOLUTELY_POSITIONED, MARGINS_CANNOT_COLLAPSE}; -use flow::{MutableFlowUtils, MutableOwnedFlowUtils}; -use flow_ref::FlowRef; -use fragment::{CanvasFragmentInfo, ImageFragmentInfo, InlineAbsoluteFragmentInfo, SvgFragmentInfo}; -use fragment::{Fragment, GeneratedContentInfo, IframeFragmentInfo}; -use fragment::{IS_INLINE_FLEX_ITEM, IS_BLOCK_FLEX_ITEM}; -use fragment::{InlineAbsoluteHypotheticalFragmentInfo, TableColumnFragmentInfo}; -use fragment::{InlineBlockFragmentInfo, SpecificFragmentInfo, UnscannedTextFragmentInfo}; -use fragment::WhitespaceStrippingResult; -use gfx::display_list::OpaqueNode; -use inline::{FIRST_FRAGMENT_OF_ELEMENT, InlineFlow}; -use inline::{InlineFragmentNodeInfo, LAST_FRAGMENT_OF_ELEMENT}; -use linked_list::prepend_from; -use list_item::{ListItemFlow, ListStyleTypeContent}; -use multicol::{MulticolColumnFlow, MulticolFlow}; -use parallel; -use script_layout_interface::{LayoutElementType, LayoutNodeType, is_image_data}; -use script_layout_interface::wrapper_traits::{PseudoElementType, ThreadSafeLayoutElement, ThreadSafeLayoutNode}; -use servo_config::opts; -use servo_url::ServoUrl; -use std::borrow::ToOwned; -use std::collections::LinkedList; -use std::marker::PhantomData; -use std::mem; -use std::sync::Arc; -use std::sync::atomic::Ordering; -use style::computed_values::{caption_side, display, empty_cells, float, list_style_position}; -use style::computed_values::content::ContentItem; -use style::computed_values::position; -use style::context::SharedStyleContext; -use style::logical_geometry::Direction; -use style::properties::ComputedValues; -use style::properties::longhands::list_style_image; -use style::selector_parser::{PseudoElement, RestyleDamage}; -use style::servo::restyle_damage::{BUBBLE_ISIZES, RECONSTRUCT_FLOW}; -use style::values::Either; -use table::TableFlow; -use table_caption::TableCaptionFlow; -use table_cell::TableCellFlow; -use table_colgroup::TableColGroupFlow; -use table_row::TableRowFlow; -use table_rowgroup::TableRowGroupFlow; -use table_wrapper::TableWrapperFlow; -use text::TextRunScanner; -use traversal::PostorderNodeMutTraversal; -use wrapper::{LayoutNodeLayoutData, TextContent, ThreadSafeLayoutNodeHelpers}; - -/// The results of flow construction for a DOM node. -#[derive(Clone)] -pub enum ConstructionResult { - /// This node contributes nothing at all (`display: none`). Alternately, this is what newly - /// created nodes have their `ConstructionResult` set to. - None, - - /// This node contributed a flow at the proper position in the tree. - /// Nothing more needs to be done for this node. It has bubbled up fixed - /// and absolute descendant flows that have a containing block above it. - Flow(FlowRef, AbsoluteDescendants), - - /// This node contributed some object or objects that will be needed to construct a proper flow - /// later up the tree, but these objects have not yet found their home. - ConstructionItem(ConstructionItem), -} - -impl ConstructionResult { - pub fn get(&mut self) -> ConstructionResult { - // FIXME(pcwalton): Stop doing this with inline fragments. Cloning fragments is very - // inefficient! - (*self).clone() - } - - pub fn debug_id(&self) -> usize { - match *self { - ConstructionResult::None => 0, - ConstructionResult::ConstructionItem(_) => 0, - ConstructionResult::Flow(ref flow_ref, _) => flow::base(&**flow_ref).debug_id(), - } - } -} - -/// Represents the output of flow construction for a DOM node that has not yet resulted in a -/// complete flow. Construction items bubble up the tree until they find a `Flow` to be attached -/// to. -#[derive(Clone)] -pub enum ConstructionItem { - /// Inline fragments and associated {ib} splits that have not yet found flows. - InlineFragments(InlineFragmentsConstructionResult), - /// Potentially ignorable whitespace. - Whitespace(OpaqueNode, PseudoElementType<()>, ServoArc, RestyleDamage), - /// TableColumn Fragment - TableColumnFragment(Fragment), -} - -/// Represents inline fragments and {ib} splits that are bubbling up from an inline. -#[derive(Clone)] -pub struct InlineFragmentsConstructionResult { - /// Any {ib} splits that we're bubbling up. - pub splits: LinkedList, - - /// Any fragments that succeed the {ib} splits. - pub fragments: IntermediateInlineFragments, -} - -/// Represents an {ib} split that has not yet found the containing block that it belongs to. This -/// is somewhat tricky. An example may be helpful. For this DOM fragment: -/// -/// ```html -/// -/// A -///
B
-/// C -///
-/// ``` -/// -/// The resulting `ConstructionItem` for the outer `span` will be: -/// -/// ```ignore -/// ConstructionItem::InlineFragments( -/// InlineFragmentsConstructionResult{ -/// splits: linked_list![ -/// InlineBlockSplit{ -/// predecessors: IntermediateInlineFragments{ -/// fragments: linked_list![A], -/// absolute_descendents: AbsoluteDescendents{ -/// descendant_links: vec![] -/// } -/// }, -/// flow: B -/// } -/// ], -/// fragments: linked_list![C], -/// } -/// ) -/// ``` -#[derive(Clone)] -pub struct InlineBlockSplit { - /// The inline fragments that precede the flow. - pub predecessors: IntermediateInlineFragments, - - /// The flow that caused this {ib} split. - pub flow: FlowRef, -} - -impl InlineBlockSplit { - /// Flushes the given accumulator to the new split and makes a new accumulator to hold any - /// subsequent fragments. - fn new(fragment_accumulator: &mut InlineFragmentsAccumulator, - node: &ConcreteThreadSafeLayoutNode, - style_context: &SharedStyleContext, - flow: FlowRef) - -> InlineBlockSplit { - fragment_accumulator.enclosing_node.as_mut().expect( - "enclosing_node is None; Are {ib} splits being generated outside of an inline node?" - ).flags.remove(LAST_FRAGMENT_OF_ELEMENT); - - let split = InlineBlockSplit { - predecessors: mem::replace( - fragment_accumulator, - InlineFragmentsAccumulator::from_inline_node( - node, style_context)).to_intermediate_inline_fragments(style_context), - flow: flow, - }; - - fragment_accumulator.enclosing_node.as_mut().unwrap().flags.remove(FIRST_FRAGMENT_OF_ELEMENT); - - split - } -} - -/// Holds inline fragments and absolute descendants. -#[derive(Clone)] -pub struct IntermediateInlineFragments { - /// The list of fragments. - pub fragments: LinkedList, - - /// The list of absolute descendants of those inline fragments. - pub absolute_descendants: AbsoluteDescendants, -} - -impl IntermediateInlineFragments { - fn new() -> IntermediateInlineFragments { - IntermediateInlineFragments { - fragments: LinkedList::new(), - absolute_descendants: AbsoluteDescendants::new(), - } - } - - fn is_empty(&self) -> bool { - self.fragments.is_empty() && self.absolute_descendants.is_empty() - } - - fn push_all(&mut self, mut other: IntermediateInlineFragments) { - self.fragments.append(&mut other.fragments); - self.absolute_descendants.push_descendants(other.absolute_descendants); - } -} - -/// Holds inline fragments that we're gathering for children of an inline node. -struct InlineFragmentsAccumulator { - /// The list of fragments. - fragments: IntermediateInlineFragments, - - /// Information about the inline box directly enclosing the fragments being gathered, if any. - /// - /// `inline::InlineFragmentNodeInfo` also stores flags indicating whether a fragment is the - /// first and/or last of the corresponding inline box. This `InlineFragmentsAccumulator` may - /// represent only one side of an {ib} split, so we store these flags as if it represented only - /// one fragment. `to_intermediate_inline_fragments` later splits this hypothetical fragment - /// into pieces, leaving the `FIRST_FRAGMENT_OF_ELEMENT` and `LAST_FRAGMENT_OF_ELEMENT` flags, - /// if present, on the first and last fragments of the output. - enclosing_node: Option, - - /// Restyle damage to use for fragments created in this node. - restyle_damage: RestyleDamage, - - /// Bidi control characters to insert before and after these fragments. - bidi_control_chars: Option<(&'static str, &'static str)>, -} - -impl InlineFragmentsAccumulator { - fn new() -> InlineFragmentsAccumulator { - InlineFragmentsAccumulator { - fragments: IntermediateInlineFragments::new(), - enclosing_node: None, - bidi_control_chars: None, - restyle_damage: RestyleDamage::empty(), - } - } - - fn from_inline_node(node: &N, style_context: &SharedStyleContext) -> InlineFragmentsAccumulator - where N: ThreadSafeLayoutNode { - InlineFragmentsAccumulator { - fragments: IntermediateInlineFragments::new(), - enclosing_node: Some(InlineFragmentNodeInfo { - address: node.opaque(), - pseudo: node.get_pseudo_element_type().strip(), - style: node.style(style_context), - selected_style: node.selected_style(), - flags: FIRST_FRAGMENT_OF_ELEMENT | LAST_FRAGMENT_OF_ELEMENT, - }), - bidi_control_chars: None, - restyle_damage: node.restyle_damage(), - } - } - - fn push(&mut self, fragment: Fragment) { - self.fragments.fragments.push_back(fragment) - } - - fn push_all(&mut self, mut fragments: IntermediateInlineFragments) { - self.fragments.fragments.append(&mut fragments.fragments); - self.fragments.absolute_descendants.push_descendants(fragments.absolute_descendants); - } - - fn to_intermediate_inline_fragments(self, context: &SharedStyleContext) - -> IntermediateInlineFragments { - let InlineFragmentsAccumulator { - mut fragments, - enclosing_node, - bidi_control_chars, - restyle_damage, - } = self; - if let Some(mut enclosing_node) = enclosing_node { - let fragment_count = fragments.fragments.len(); - for (index, fragment) in fragments.fragments.iter_mut().enumerate() { - let mut enclosing_node = enclosing_node.clone(); - if index != 0 { - enclosing_node.flags.remove(FIRST_FRAGMENT_OF_ELEMENT) - } - if index != fragment_count - 1 { - enclosing_node.flags.remove(LAST_FRAGMENT_OF_ELEMENT) - } - fragment.add_inline_context_style(enclosing_node); - } - - // Control characters are later discarded in transform_text, so they don't affect the - // is_first/is_last styles above. - enclosing_node.flags.remove(FIRST_FRAGMENT_OF_ELEMENT | LAST_FRAGMENT_OF_ELEMENT); - - if let Some((start, end)) = bidi_control_chars { - fragments.fragments.push_front( - control_chars_to_fragment(&enclosing_node, context, start, restyle_damage)); - fragments.fragments.push_back( - control_chars_to_fragment(&enclosing_node, context, end, restyle_damage)); - } - } - fragments - } -} - -/// An object that knows how to create flows. -pub struct FlowConstructor<'a, N: ThreadSafeLayoutNode> { - /// The layout context. - pub layout_context: &'a LayoutContext<'a>, - /// Satisfy the compiler about the unused parameters, which we use to improve the ergonomics of - /// the ensuing impl {} by removing the need to parameterize all the methods individually. - phantom2: PhantomData, -} - -impl<'a, ConcreteThreadSafeLayoutNode: ThreadSafeLayoutNode> - FlowConstructor<'a, ConcreteThreadSafeLayoutNode> { - /// Creates a new flow constructor. - pub fn new(layout_context: &'a LayoutContext<'a>) -> Self { - FlowConstructor { - layout_context: layout_context, - phantom2: PhantomData, - } - } - - #[inline] - fn style_context(&self) -> &SharedStyleContext { - self.layout_context.shared_context() - } - - #[inline] - fn set_flow_construction_result(&self, - node: &ConcreteThreadSafeLayoutNode, - result: ConstructionResult) { - node.set_flow_construction_result(result); - } - - /// Builds the fragment for the given block or subclass thereof. - fn build_fragment_for_block(&self, node: &ConcreteThreadSafeLayoutNode) -> Fragment { - let specific_fragment_info = match node.type_id() { - Some(LayoutNodeType::Element(LayoutElementType::HTMLIFrameElement)) => { - SpecificFragmentInfo::Iframe(IframeFragmentInfo::new(node)) - } - Some(LayoutNodeType::Element(LayoutElementType::HTMLImageElement)) => { - let image_info = box ImageFragmentInfo::new(node.image_url(), - node, - &self.layout_context); - SpecificFragmentInfo::Image(image_info) - } - Some(LayoutNodeType::Element(LayoutElementType::HTMLObjectElement)) => { - let image_info = box ImageFragmentInfo::new(node.object_data(), - node, - &self.layout_context); - SpecificFragmentInfo::Image(image_info) - } - Some(LayoutNodeType::Element(LayoutElementType::HTMLTableElement)) => { - SpecificFragmentInfo::TableWrapper - } - Some(LayoutNodeType::Element(LayoutElementType::HTMLTableColElement)) => { - SpecificFragmentInfo::TableColumn(TableColumnFragmentInfo::new(node)) - } - Some(LayoutNodeType::Element(LayoutElementType::HTMLTableCellElement)) => { - SpecificFragmentInfo::TableCell - } - Some(LayoutNodeType::Element(LayoutElementType::HTMLTableRowElement)) | - Some(LayoutNodeType::Element(LayoutElementType::HTMLTableSectionElement)) => { - SpecificFragmentInfo::TableRow - } - Some(LayoutNodeType::Element(LayoutElementType::HTMLCanvasElement)) => { - let data = node.canvas_data().unwrap(); - SpecificFragmentInfo::Canvas(box CanvasFragmentInfo::new(data)) - } - Some(LayoutNodeType::Element(LayoutElementType::SVGSVGElement)) => { - let data = node.svg_data().unwrap(); - SpecificFragmentInfo::Svg(box SvgFragmentInfo::new(data)) - } - _ => { - // This includes pseudo-elements. - SpecificFragmentInfo::Generic - } - }; - - Fragment::new(node, specific_fragment_info, self.layout_context) - } - - /// Creates an inline flow from a set of inline fragments, then adds it as a child of the given - /// flow or pushes it onto the given flow list. - /// - /// `#[inline(always)]` because this is performance critical and LLVM will not inline it - /// otherwise. - #[inline(always)] - fn flush_inline_fragments_to_flow(&mut self, - fragment_accumulator: InlineFragmentsAccumulator, - flow: &mut FlowRef, - absolute_descendants: &mut AbsoluteDescendants, - legalizer: &mut Legalizer, - node: &ConcreteThreadSafeLayoutNode) { - let mut fragments = fragment_accumulator.to_intermediate_inline_fragments(self.style_context()); - if fragments.is_empty() { - return - }; - - strip_ignorable_whitespace_from_start(&mut fragments.fragments); - strip_ignorable_whitespace_from_end(&mut fragments.fragments); - if fragments.fragments.is_empty() { - absolute_descendants.push_descendants(fragments.absolute_descendants); - return - } - - // Build a list of all the inline-block fragments before fragments is moved. - let mut inline_block_flows = vec!(); - for fragment in &fragments.fragments { - match fragment.specific { - SpecificFragmentInfo::InlineBlock(ref info) => { - inline_block_flows.push(info.flow_ref.clone()) - } - SpecificFragmentInfo::InlineAbsoluteHypothetical(ref info) => { - inline_block_flows.push(info.flow_ref.clone()) - } - SpecificFragmentInfo::InlineAbsolute(ref info) => { - inline_block_flows.push(info.flow_ref.clone()) - } - _ => {} - } - } - - // We must scan for runs before computing minimum ascent and descent because scanning - // for runs might collapse so much whitespace away that only hypothetical fragments - // remain. In that case the inline flow will compute its ascent and descent to be zero. - let scanned_fragments = - with_thread_local_font_context(self.layout_context, |font_context| { - TextRunScanner::new().scan_for_runs(font_context, - mem::replace(&mut fragments.fragments, LinkedList::new())) - }); - let mut inline_flow_ref = - FlowRef::new(Arc::new(InlineFlow::from_fragments(scanned_fragments, - node.style(self.style_context()).writing_mode))); - - // Add all the inline-block fragments as children of the inline flow. - for inline_block_flow in &inline_block_flows { - inline_flow_ref.add_new_child(inline_block_flow.clone()); - } - - // Set up absolute descendants as necessary. - // - // The inline flow itself may need to become the containing block for absolute descendants - // in order to handle cases like: - // - //
- // - // - // - //
- // - // See the comment above `flow::AbsoluteDescendantInfo` for more information. - inline_flow_ref.take_applicable_absolute_descendants(&mut fragments.absolute_descendants); - absolute_descendants.push_descendants(fragments.absolute_descendants); - - { - // FIXME(#6503): Use Arc::get_mut().unwrap() here. - let inline_flow = FlowRef::deref_mut(&mut inline_flow_ref).as_mut_inline(); - inline_flow.minimum_line_metrics = - with_thread_local_font_context(self.layout_context, |font_context| { - inline_flow.minimum_line_metrics(font_context, &node.style(self.style_context())) - }); - } - - inline_flow_ref.finish(); - legalizer.add_child(self.style_context(), flow, inline_flow_ref) - } - - fn build_block_flow_using_construction_result_of_child( - &mut self, - flow: &mut FlowRef, - node: &ConcreteThreadSafeLayoutNode, - kid: ConcreteThreadSafeLayoutNode, - inline_fragment_accumulator: &mut InlineFragmentsAccumulator, - abs_descendants: &mut AbsoluteDescendants, - legalizer: &mut Legalizer) { - match kid.get_construction_result() { - ConstructionResult::None => {} - ConstructionResult::Flow(kid_flow, kid_abs_descendants) => { - // If kid_flow is TableCaptionFlow, kid_flow should be added under - // TableWrapperFlow. - if flow.is_table() && kid_flow.is_table_caption() { - let construction_result = - ConstructionResult::Flow(kid_flow, AbsoluteDescendants::new()); - self.set_flow_construction_result(&kid, construction_result) - } else { - if !flow::base(&*kid_flow).flags.contains(IS_ABSOLUTELY_POSITIONED) { - // Flush any inline fragments that we were gathering up. This allows us to - // handle {ib} splits. - let old_inline_fragment_accumulator = - mem::replace(inline_fragment_accumulator, - InlineFragmentsAccumulator::new()); - self.flush_inline_fragments_to_flow(old_inline_fragment_accumulator, - flow, - abs_descendants, - legalizer, - node); - } - legalizer.add_child(self.style_context(), flow, kid_flow) - } - abs_descendants.push_descendants(kid_abs_descendants); - } - ConstructionResult::ConstructionItem(ConstructionItem::InlineFragments( - InlineFragmentsConstructionResult { - splits, - fragments: successor_fragments, - })) => { - // Add any {ib} splits. - for split in splits { - // Pull apart the {ib} split object and push its predecessor fragments - // onto the list. - let InlineBlockSplit { - predecessors, - flow: kid_flow - } = split; - inline_fragment_accumulator.push_all(predecessors); - - // Flush any inline fragments that we were gathering up. - debug!("flushing {} inline box(es) to flow A", - inline_fragment_accumulator.fragments.fragments.len()); - let old_inline_fragment_accumulator = - mem::replace(inline_fragment_accumulator, - InlineFragmentsAccumulator::new()); - let absolute_descendants = - &mut inline_fragment_accumulator.fragments.absolute_descendants; - self.flush_inline_fragments_to_flow(old_inline_fragment_accumulator, - flow, - absolute_descendants, - legalizer, - node); - - // Push the flow generated by the {ib} split onto our list of flows. - legalizer.add_child(self.style_context(), flow, kid_flow) - } - - // Add the fragments to the list we're maintaining. - inline_fragment_accumulator.push_all(successor_fragments); - } - ConstructionResult::ConstructionItem(ConstructionItem::Whitespace( - whitespace_node, - whitespace_pseudo, - whitespace_style, - whitespace_damage)) => { - // Add whitespace results. They will be stripped out later on when - // between block elements, and retained when between inline elements. - let fragment_info = SpecificFragmentInfo::UnscannedText( - box UnscannedTextFragmentInfo::new(" ".to_owned(), None)); - let fragment = Fragment::from_opaque_node_and_style(whitespace_node, - whitespace_pseudo, - whitespace_style, - node.selected_style(), - whitespace_damage, - fragment_info); - inline_fragment_accumulator.fragments.fragments.push_back(fragment); - } - ConstructionResult::ConstructionItem(ConstructionItem::TableColumnFragment(_)) => { - // TODO: Implement anonymous table objects for missing parents - // CSS 2.1 § 17.2.1, step 3-2 - } - } - } - - /// Constructs a block flow, beginning with the given `initial_fragments` if present and then - /// appending the construction results of children to the child list of the block flow. {ib} - /// splits and absolutely-positioned descendants are handled correctly. - fn build_flow_for_block_starting_with_fragments( - &mut self, - mut flow: FlowRef, - node: &ConcreteThreadSafeLayoutNode, - initial_fragments: IntermediateInlineFragments) - -> ConstructionResult { - // Gather up fragments for the inline flows we might need to create. - let mut inline_fragment_accumulator = InlineFragmentsAccumulator::new(); - - inline_fragment_accumulator.fragments.push_all(initial_fragments); - - // List of absolute descendants, in tree order. - let mut abs_descendants = AbsoluteDescendants::new(); - let mut legalizer = Legalizer::new(); - if !node.is_replaced_content() { - for kid in node.children() { - if kid.get_pseudo_element_type() != PseudoElementType::Normal { - self.process(&kid); - } - - self.build_block_flow_using_construction_result_of_child( - &mut flow, - node, - kid, - &mut inline_fragment_accumulator, - &mut abs_descendants, - &mut legalizer); - } - } - - // Perform a final flush of any inline fragments that we were gathering up to handle {ib} - // splits, after stripping ignorable whitespace. - self.flush_inline_fragments_to_flow(inline_fragment_accumulator, - &mut flow, - &mut abs_descendants, - &mut legalizer, - node); - - // The flow is done. - legalizer.finish(&mut flow); - flow.finish(); - - // Set up the absolute descendants. - if flow.is_absolute_containing_block() { - // This is the containing block for all the absolute descendants. - flow.set_absolute_descendants(abs_descendants); - - abs_descendants = AbsoluteDescendants::new(); - if flow::base(&*flow).flags.contains(IS_ABSOLUTELY_POSITIONED) { - // This is now the only absolute flow in the subtree which hasn't yet - // reached its CB. - abs_descendants.push(flow.clone()); - } - } - ConstructionResult::Flow(flow, abs_descendants) - } - - /// Constructs a flow for the given block node and its children. This method creates an - /// initial fragment as appropriate and then dispatches to - /// `build_flow_for_block_starting_with_fragments`. Currently the following kinds of flows get - /// initial content: - /// - /// * Generated content gets the initial content specified by the `content` attribute of the - /// CSS. - /// * `` and `