Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
788 changes: 20 additions & 768 deletions Cargo.lock

Large diffs are not rendered by default.

11 changes: 3 additions & 8 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -33,15 +33,10 @@ harfrust = { version = "0.3.2", default-features = false }
hashbrown = { version = "0.16.1", default-features = false, features = ["default-hasher"] }
icu_codepointtrie_builder = { version = "0.5.1", default-features = false, features = ["wasm"] }
icu_collections = { version = "2.1.1", default-features = false }
icu_locale = { version = "2.1.1", default-features = false }
icu_locale_core = { version = "2.1.1", default-features = false }
icu_normalizer = { version = "~2.1.1", default-features = false }
icu_properties = { version = "~2.1.2", default-features = false }
icu_provider = { version = "2.1.1", default-features = false }
icu_provider_adapters = { version = "2.1.1", default-features = false }
icu_provider_export = { version = "2.1.1", default-features = false }
icu_provider_source = { version = "2.1.1", default-features = false }
icu_segmenter = { version = "~2.1.1", default-features = false }
icu_normalizer = { version = "2.1.1", default-features = false }
icu_properties = { version = "2.1.2", default-features = false }
icu_segmenter = { version = "2.1.2", default-features = false }
linebender_resource_handle = { version = "0.1.1", default-features = false }
parley = { version = "0.7.0", default-features = false, path = "parley" }
parley_data = { path = "parley_data", default-features = false }
Expand Down
12 changes: 4 additions & 8 deletions parley/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -27,20 +27,16 @@ accesskit = ["dep:accesskit"]
[dependencies]
skrifa = { workspace = true }
linebender_resource_handle = { workspace = true }
fontique = { workspace = true }
fontique = { workspace = true, features = ["icu_properties"] }
core_maths = { version = "0.1.1", optional = true }
parley_data = { workspace = true, features = ["baked"] }
accesskit = { workspace = true, optional = true }
hashbrown = { workspace = true }
harfrust = { workspace = true }
icu_collections = { workspace = true }
icu_locale_core = { workspace = true }
icu_normalizer = { workspace = true }
icu_properties = { workspace = true }
icu_provider = { workspace = true }
icu_segmenter = { workspace = true, features = ["auto"] }
# Used in ICU4X baked data sources
zerovec = { workspace = true }
icu_normalizer = { workspace = true, features = ["compiled_data"] }
icu_properties = { workspace = true, features = ["compiled_data"] }
icu_segmenter = { workspace = true, features = ["compiled_data"] }

[dev-dependencies]
parley_dev = { workspace = true }
Expand Down
39 changes: 15 additions & 24 deletions parley/src/analysis/cluster.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,9 @@
// SPDX-License-Identifier: Apache-2.0 OR MIT

use alloc::vec::Vec;
use icu_normalizer::properties::Decomposed;

use crate::analysis::AnalysisDataSources;
use icu_normalizer::properties::{CanonicalComposition, CanonicalDecomposition, Decomposed};
use parley_data::Properties;

/// The maximum number of characters in a single cluster.
const MAX_CLUSTER_SIZE: usize = 32;
Expand Down Expand Up @@ -116,12 +116,12 @@ impl CharCluster {
}

#[inline(always)]
fn contributes_to_shaping(ch: char, analysis_data_sources: &AnalysisDataSources) -> bool {
let props = analysis_data_sources.composite.properties(ch as u32);
fn contributes_to_shaping(ch: char) -> bool {
let props = Properties::get(ch);
crate::analysis::contributes_to_shaping(props.general_category(), props.script())
}

fn decomposed(&mut self, analysis_data_sources: &AnalysisDataSources) -> Option<&[Char]> {
fn decomposed(&mut self) -> Option<&[Char]> {
match self.decomp.state {
FormState::Invalid => None,
FormState::None => {
Expand All @@ -132,22 +132,19 @@ impl CharCluster {
return None;
}

let decomposer = analysis_data_sources.decomposing_normalizer();
let decomp = decomposer.decompose(self.chars[0].ch);
let decomp = CanonicalDecomposition::new().decompose(self.chars[0].ch);
match decomp {
Decomposed::Default | Decomposed::Singleton(_) => {
return None;
}
Decomposed::Expansion(a, b) => {
let mut copy = self.chars[0];
copy.ch = a;
copy.contributes_to_shaping =
Self::contributes_to_shaping(a, analysis_data_sources);
copy.contributes_to_shaping = Self::contributes_to_shaping(a);
self.decomp.chars[0] = copy;

copy.ch = b;
copy.contributes_to_shaping =
Self::contributes_to_shaping(b, analysis_data_sources);
copy.contributes_to_shaping = Self::contributes_to_shaping(b);
self.decomp.chars[1] = copy;

self.decomp.len = 2;
Expand All @@ -162,7 +159,7 @@ impl CharCluster {
}
}

fn composed(&mut self, analysis_data_sources: &AnalysisDataSources) -> Option<&[Char]> {
fn composed(&mut self) -> Option<&[Char]> {
match self.comp.state {
FormState::Invalid => None,
FormState::None => {
Expand All @@ -173,15 +170,13 @@ impl CharCluster {
return None;
}

let composer = analysis_data_sources.composing_normalizer();
let comp = composer.compose(self.chars[0].ch, self.chars[1].ch);
let comp = CanonicalComposition::new().compose(self.chars[0].ch, self.chars[1].ch);
match comp {
None => {}
Some(ch) => {
let mut copy = self.chars[0];
copy.ch = ch;
copy.contributes_to_shaping =
Self::contributes_to_shaping(ch, analysis_data_sources);
copy.contributes_to_shaping = Self::contributes_to_shaping(ch);
self.comp.chars[0] = copy;
self.comp.len = 1;
}
Expand All @@ -195,19 +190,15 @@ impl CharCluster {
}
}

pub(crate) fn map(
&mut self,
f: impl Fn(char) -> GlyphId,
analysis_data_sources: &AnalysisDataSources,
) -> Status {
pub(crate) fn map(&mut self, f: impl Fn(char) -> GlyphId) -> Status {
let len = self.len();
if len == 0 {
return Status::Complete;
}
let mut glyph_ids = [0_u16; MAX_CLUSTER_SIZE];
let prev_ratio = self.best_ratio;
let mut ratio;
if self.force_normalize && self.composed(analysis_data_sources).is_some() {
if self.force_normalize && self.composed().is_some() {
ratio = self.comp.map(&f, &mut glyph_ids, self.best_ratio);
if ratio > self.best_ratio {
self.best_ratio = ratio;
Expand All @@ -229,7 +220,7 @@ impl CharCluster {
return Status::Complete;
}
}
if self.decomposed(analysis_data_sources).is_some() {
if self.decomposed().is_some() {
ratio = self.decomp.map(&f, &mut glyph_ids, self.best_ratio);
if ratio > self.best_ratio {
self.best_ratio = ratio;
Expand All @@ -238,7 +229,7 @@ impl CharCluster {
return Status::Complete;
}
}
if !self.force_normalize && self.composed(analysis_data_sources).is_some() {
if !self.force_normalize && self.composed().is_some() {
ratio = self.comp.map(&f, &mut glyph_ids, self.best_ratio);
if ratio > self.best_ratio {
self.best_ratio = ratio;
Expand Down
Loading