Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions components/icu/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -128,6 +128,7 @@ unstable = [
"icu_calendar/unstable",
"icu_datetime/unstable",
"icu_plurals/unstable",
"icu_segmenter/unstable",
"icu_time/unstable",
"dep:icu_experimental",
"dep:icu_pattern",
Expand Down
1 change: 1 addition & 0 deletions components/segmenter/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,7 @@ default = ["compiled_data", "auto"]
serde = ["dep:serde", "potential_utf/serde", "zerovec/serde", "icu_collections/serde", "icu_provider/serde"]
datagen = ["serde", "dep:databake", "potential_utf/databake", "zerovec/databake", "icu_collections/databake", "icu_provider/export"]
lstm = ["dep:core_maths"]
unstable = []
auto = ["lstm"] # Enables [try_]new_auto constructors
compiled_data = ["dep:icu_segmenter_data", "dep:icu_locale", "icu_locale?/compiled_data", "icu_provider/baked"]

Expand Down
43 changes: 34 additions & 9 deletions components/segmenter/examples/experimental_segmenter.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,24 +14,48 @@ mod cnn;
use adaboost::Predictor;
use cnn::{CnnSegmenter, RawCnnData};
use icu_segmenter::{options::WordBreakOptions, WordSegmenter, WordSegmenterBorrowed};
use std::time::SystemTime;
use std::time::Instant;

const REPETITIONS: usize = 1000;

fn main_adaboost(args: &[String]) {
fn main_radaboost(args: &[String]) {
let segmenter = Predictor::for_test();
let s = &args[0];
let start_time = SystemTime::now();
let start_time = Instant::now();
for _ in 0..REPETITIONS {
segmenter.predict(s);
}
let elapsed = start_time.elapsed().unwrap();
let elapsed = start_time.elapsed();
println!("Output:");
let mut prev = 0;
for breakpoint in segmenter.predict_breakpoints(s) {
print!("{}|", &s[prev..breakpoint]);
prev = breakpoint;
}
if prev < s.len() {
print!("{}", &s[prev..]);
}
println!();
println!("{} repetitions done in: {:?}", REPETITIONS, elapsed);
}

fn main_thadaboost(args: &[String]) {
let segmenter = Predictor::for_test_thai();
let s = &args[0];
let start_time = Instant::now();
for _ in 0..REPETITIONS {
segmenter.predict_thai(s);
}
let elapsed = start_time.elapsed();
println!("Output:");
let mut prev = 0;
for breakpoint in segmenter.predict_thai_breakpoints(s) {
print!("{}|", &s[prev..breakpoint]);
prev = breakpoint;
}
if prev < s.len() {
print!("{}", &s[prev..]);
}
println!();
println!("{} repetitions done in: {:?}", REPETITIONS, elapsed);
}
Expand All @@ -55,11 +79,11 @@ fn main_cnn(args: &[String]) {
.unwrap();
let segmenter = CnnSegmenter::new(&cnndata);
let s = &args[0];
let start_time = SystemTime::now();
let start_time = Instant::now();
for _ in 0..REPETITIONS {
segmenter.segment_str(s);
}
let elapsed = start_time.elapsed().unwrap();
let elapsed = start_time.elapsed();
println!("Output:");
let mut prev = 0;
for breakpoint in segmenter.segment_str(s).to_breakpoints() {
Expand All @@ -82,11 +106,11 @@ fn main_lstm(mut args: &[String]) {
}

fn run_word_segmenter(segmenter: WordSegmenterBorrowed, s: &str) {
let start_time = SystemTime::now();
let start_time = Instant::now();
for _ in 0..REPETITIONS {
segmenter.segment_str(s).count(); // consume the iterator
}
let elapsed = start_time.elapsed().unwrap();
let elapsed = start_time.elapsed();
println!("Output:");
let mut prev = 0;
for breakpoint in segmenter.segment_str(s) {
Expand All @@ -104,7 +128,8 @@ fn main() {
return;
}
match args[1].as_str() {
"adaboost" => main_adaboost(&args[2..]),
"radaboost" => main_radaboost(&args[2..]),
"thadaboost" => main_thadaboost(&args[2..]),
"dict" | "dictionary" => main_dict(&args[2..]),
"cnn" => main_cnn(&args[2..]),
"lstm" => main_lstm(&args[2..]),
Expand Down
6 changes: 6 additions & 0 deletions components/segmenter/src/provider/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,8 @@

mod lstm;
pub use lstm::*;
#[cfg(feature = "unstable")]
pub mod radical;

use crate::options::WordType;
use icu_collections::codepointtrie::CodePointTrie;
Expand Down Expand Up @@ -51,6 +53,8 @@ const _: () = {
impl_segmenter_break_line_v1!(Baked);
#[cfg(feature = "lstm")]
impl_segmenter_lstm_auto_v1!(Baked);
#[cfg(feature = "unstable")]
impl_segmenter_unihan_radical_v1!(Baked);
impl_segmenter_break_word_v1!(Baked);
impl_segmenter_break_word_override_v1!(Baked);
impl_segmenter_break_sentence_override_v1!(Baked);
Expand Down Expand Up @@ -135,6 +139,8 @@ pub const MARKERS: &[DataMarkerInfo] = &[
SegmenterDictionaryAutoV1::INFO,
SegmenterDictionaryExtendedV1::INFO,
SegmenterLstmAutoV1::INFO,
#[cfg(feature = "unstable")]
radical::SegmenterUnihanRadicalV1::INFO,
];

/// Pre-processed Unicode data in the form of tables to be used for rule-based breaking.
Expand Down
32 changes: 32 additions & 0 deletions components/segmenter/src/provider/radical.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
// This file is part of ICU4X. For terms of use, please see the file
// called LICENSE at the top level of the ICU4X source tree
// (online at: https://github.com/unicode-org/icu4x/blob/main/LICENSE ).

//! Data provider struct definitions for radicals.

use icu_collections::codepointtrie::CodePointTrie;
use icu_provider::prelude::*;

/// Data for Unihan radicals.
#[derive(Debug, PartialEq, Clone, yoke::Yokeable, zerofrom::ZeroFrom)]
#[cfg_attr(feature = "datagen", derive(serde::Serialize, databake::Bake))]
#[cfg_attr(feature = "datagen", databake(path = icu_segmenter::provider::radical))]
#[cfg_attr(feature = "serde", derive(serde::Deserialize))]
pub struct UnihanRadicalsData<'data> {
/// Trie mapping code points to their IRG source radical ID (u8).
#[cfg_attr(feature = "serde", serde(borrow))]
pub trie: CodePointTrie<'data, u8>,
}

icu_provider::data_struct!(
UnihanRadicalsData<'_>,
#[cfg(feature = "datagen")]
);

icu_provider::data_marker!(
/// Marker for the singleton trie mapping code points to their Unihan IRG source radical IDs.
SegmenterUnihanRadicalV1,
"segmenter/unihan/radical/v1",
UnihanRadicalsData<'static>,
is_singleton = true
);
64 changes: 27 additions & 37 deletions components/segmenter/tests/adaboost/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,59 +4,37 @@

#![allow(dead_code)]

use icu_segmenter::provider::{radical::UnihanRadicalsData, Baked};
use std::collections::HashMap;

static MODEL_FOR_TEST: &str = include_str!("model.json");
static MODEL_FOR_TEST_THAI: &str = include_str!("model_thai.json");

static CODEPOINTS: &[u16] = &[
20008, 20022, 20031, 20057, 20101, 20108, 20128, 20154, 20799, 20837, 20843, 20866, 20886,
20907, 20960, 20981, 20992, 21147, 21241, 21269, 21274, 21304, 21313, 21340, 21353, 21378,
21430, 21448, 21475, 22231, 22303, 22763, 22786, 22794, 22805, 22823, 22899, 23376, 23424,
23544, 23567, 23586, 23608, 23662, 23665, 24027, 24037, 24049, 24062, 24178, 24186, 24191,
24308, 24318, 24331, 24339, 24400, 24417, 24435, 24515, 25096, 25142, 25163, 25903, 25908,
25991, 26007, 26020, 26041, 26080, 26085, 26352, 26376, 26408, 27424, 27490, 27513, 27571,
27595, 27604, 27611, 27663, 27668, 27700, 28779, 29226, 29238, 29243, 29247, 29255, 29273,
29275, 29356, 29572, 29577, 29916, 29926, 29976, 29983, 29992, 30000, 30091, 30098, 30326,
30333, 30382, 30399, 30446, 30683, 30690, 30707, 31034, 31160, 31166, 31348, 31435, 31481,
31859, 31992, 32566, 32593, 32650, 32701, 32769, 32780, 32786, 32819, 32895, 32905, 33251,
33258, 33267, 33276, 33292, 33307, 33311, 33390, 33394, 33400, 34381, 34411, 34880, 34892,
34915, 35198, 35211, 35282, 35328, 35895, 35910, 35925, 35960, 35997, 36196, 36208, 36275,
36523, 36554, 36763, 36784, 36789, 37009, 37193, 37318, 37324, 37329, 38263, 38272, 38428,
38582, 38585, 38632, 38737, 38750, 38754, 38761, 38859, 38893, 38899, 38913, 39080, 39131,
39135, 39318, 39321, 39340, 39592, 39640, 39647, 39717, 39727, 39730, 39740, 39770, 40165,
40565, 40575, 40613, 40635, 40643, 40653, 40657, 40697, 40701, 40718, 40723, 40736, 40763,
40778, 40786, 40845, 40860, 40864,
];

pub(crate) fn get_radical(ch: char) -> u8 {
let id = ch as u32;

if !(19968..=40869).contains(&id) {
return 0;
}

let idx = CODEPOINTS.partition_point(|&b| (b as u32) <= id);
(idx as u8) + 1
pub(crate) fn get_radical(radicals: &UnihanRadicalsData<'_>, ch: char) -> u8 {
radicals.trie.get(ch)
}

pub(crate) struct Predictor {
pub(crate) struct Predictor<'a> {
pub(crate) model: HashMap<String, HashMap<String, i16>>,
radicals: &'a UnihanRadicalsData<'a>,
}

impl Predictor {
pub(crate) fn from_json(json: &str) -> Self {
impl<'a> Predictor<'a> {
pub(crate) fn from_json(json: &str, radicals: &'a UnihanRadicalsData<'a>) -> Self {
let model: HashMap<String, HashMap<String, i16>> =
serde_json::from_str(json).unwrap_or_default();
Self { model }
Self { model, radicals }
}

pub(crate) fn for_test() -> Self {
Self::from_json(MODEL_FOR_TEST)
Self::from_json(MODEL_FOR_TEST, Baked::SINGLETON_SEGMENTER_UNIHAN_RADICAL_V1)
}

pub(crate) fn for_test_thai() -> Self {
Self::from_json(MODEL_FOR_TEST_THAI)
Self::from_json(
MODEL_FOR_TEST_THAI,
Baked::SINGLETON_SEGMENTER_UNIHAN_RADICAL_V1,
)
}

pub(crate) fn predict(&self, sentence: &str) -> Vec<i16> {
Expand All @@ -73,15 +51,15 @@ impl Predictor {

let mut score: i16 = 4;

let rad4 = get_radical(c);
let rad4 = get_radical(self.radicals, c);
if rad4 != 0 {
if let Some(map) = self.model.get("RSRID") {
let key = format!("{}:{}", c_prev, rad4);
score += map.get(&key).copied().unwrap_or(0);
}
}

let rad3 = get_radical(c_prev);
let rad3 = get_radical(self.radicals, c_prev);
if rad3 != 0 {
if let Some(map) = self.model.get("LSRID") {
let key = format!("{}:{}", rad3, c);
Expand Down Expand Up @@ -247,6 +225,18 @@ impl Predictor {
}
breakpoints
}

pub(crate) fn predict_thai_breakpoints(&self, sentence: &str) -> Vec<usize> {
let mut breakpoints = vec![0];
let mut offset = 0;
for (&score, ch) in self.predict_thai(sentence).iter().zip(sentence.chars()) {
offset += ch.len_utf8();
if score > 0 {
breakpoints.push(offset);
}
}
breakpoints
}
}

#[cfg(test)]
Expand Down
4 changes: 2 additions & 2 deletions components/segmenter/tests/adaboost/python_test_output.txt
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@
2404
-1604
2824
440
320
-525
5881
4892
Expand All @@ -46,4 +46,4 @@
4605
4324
-2139
5215
5215
2 changes: 2 additions & 0 deletions provider/data/segmenter/data/mod.rs

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Large diffs are not rendered by default.

1 change: 1 addition & 0 deletions provider/data/segmenter/fingerprints.csv
Original file line number Diff line number Diff line change
Expand Up @@ -24,3 +24,4 @@ segmenter/lstm/auto/v1, und/Burmese_codepoints_exclusive_model4_heavy, 91365B, 9
segmenter/lstm/auto/v1, und/Khmer_codepoints_exclusive_model4_heavy, 74665B, 74368B, cc6fe9f66fed196d
segmenter/lstm/auto/v1, und/Lao_codepoints_exclusive_model4_heavy, 72160B, 71863B, 3f52a4025c7d618f
segmenter/lstm/auto/v1, und/Thai_codepoints_exclusive_model4_heavy, 72327B, 72030B, 4486b38238d7c651
segmenter/unihan/radical/v1, <singleton>, 13080B, 13020B, d875e702fbedc249
2 changes: 2 additions & 0 deletions provider/data/segmenter/stubdata/mod.rs

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Large diffs are not rendered by default.

1 change: 1 addition & 0 deletions provider/registry/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -341,6 +341,7 @@ macro_rules! registry(
icu::experimental::transliterate::provider::TransliteratorRulesV1: TransliteratorRulesV1,
icu::experimental::units::provider::UnitsInfoV1: UnitsInfoV1,
icu::plurals::provider::PluralsRangesV1: PluralsRangesV1,
icu::segmenter::provider::radical::SegmenterUnihanRadicalV1: SegmenterUnihanRadicalV1,
);
}
);
Expand Down
Loading
Loading