diff --git a/Cargo.toml b/Cargo.toml index edc9c9b..ece5f62 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -70,6 +70,7 @@ features = ["rstar"] [dev-dependencies] serde_json.workspace = true +proptest = "1.6" [package.metadata.docs.rs] cargo-args = ["-Zunstable-options", "-Zrustdoc-scrape-examples"] diff --git a/crates/planar-incr-embed/Cargo.toml b/crates/planar-incr-embed/Cargo.toml index 12aaaef..0707f23 100644 --- a/crates/planar-incr-embed/Cargo.toml +++ b/crates/planar-incr-embed/Cargo.toml @@ -24,10 +24,8 @@ optional = true [dev-dependencies] ron = "0.8" +serde.workspace = true [dev-dependencies.insta] version = "1.42" features = ["json"] - -[dev-dependencies.serde] -workspace = true diff --git a/proptest-regressions/math/cyclic_search.txt b/proptest-regressions/math/cyclic_search.txt new file mode 100644 index 0000000..4e48f83 --- /dev/null +++ b/proptest-regressions/math/cyclic_search.txt @@ -0,0 +1,11 @@ +# SPDX-FileCopyrightText: 2025 Topola contributors +# +# SPDX-License-Identifier: MIT + +# Seeds for failure cases proptest has generated in the past. It is +# automatically read and these particular cases re-run before any +# novel cases are generated. +# +# It is recommended to check this file in to source control so that +# everyone who runs the test benefits from these saved cases. +cc 10a8063ed53caab61327919fad368bd440c684021e1022082d6bf2098e19aea9 # shrinks to len = 4, offset = 0, amount_true = 238 diff --git a/src/math/cyclic_search.rs b/src/math/cyclic_search.rs new file mode 100644 index 0000000..d444121 --- /dev/null +++ b/src/math/cyclic_search.rs @@ -0,0 +1,299 @@ +// SPDX-FileCopyrightText: 2025 Topola contributors +// +// SPDX-License-Identifier: MIT + +use core::{mem::take, ops::Range}; + +/// generate a breadth-first search list for given bounds and level +fn breadth4level(bounds: Range, level: u8, mut callback: impl FnMut(Range) -> bool) { + // level is the exponent of 2 + let block_length = (bounds.end - bounds.start) >> level; + let blocks_count = (bounds.end - bounds.start) / block_length; + if blocks_count == 0 { + return; + } + for i in 0..(blocks_count - 1) { + let block_start = bounds.start + i * block_length; + debug_assert!(block_start + block_length < bounds.end); + if !callback(block_start..(block_start + block_length)) { + return; + } + } + let block_start = bounds.start + (blocks_count - 1) * block_length; + debug_assert!(block_start + block_length <= bounds.end); + callback(block_start..bounds.end); +} + +#[derive(Clone, Copy)] +enum TriState { + Nothing, + Got(T), + Fixed(T), +} + +impl Default for TriState { + fn default() -> Self { + TriState::Nothing + } +} + +impl TriState { + fn update(&mut self, value: T) -> bool { + match self { + TriState::Fixed(_) => false, + _ => { + *self = TriState::Got(value); + true + } + } + } + + fn fix(&mut self) { + *self = match take(self) { + TriState::Got(x) => TriState::Fixed(x), + x => x, + }; + } + + fn is_fixed(&self) -> bool { + matches!(self, TriState::Fixed(_)) + } +} + +impl From> for Option { + fn from(x: TriState) -> Self { + match x { + TriState::Nothing => None, + TriState::Got(x) | TriState::Fixed(x) => Some(x), + } + } +} + +struct Discover { + pos_false: TriState, + pos_true: TriState, +} + +impl Discover { + fn new() -> Self { + Self { + pos_false: TriState::Nothing, + pos_true: TriState::Nothing, + } + } + + fn update(&mut self, pos: usize, value: bool) { + match value { + false => { + self.pos_false.update(pos); + self.pos_true.fix(); + } + true => { + self.pos_true.update(pos); + self.pos_false.fix(); + } + } + } + + fn is_finished_minimal(&self) -> bool { + self.pos_false.is_fixed() || self.pos_true.is_fixed() + } + + fn is_finished(&self) -> bool { + self.pos_false.is_fixed() && self.pos_true.is_fixed() + } + + fn results(&self) -> (Option, Option) { + (self.pos_false.into(), self.pos_true.into()) + } +} + +/// A brute-force implementation of [`cyclic_breadth_binary_search`]. +fn cbps_brute_force( + bounds: core::ops::Range, + eval: &EF, +) -> (Option, Option) +where + EF: Fn(usize) -> bool, +{ + let mut discover = Discover::new(); + + for i in bounds { + discover.update(i, eval(i)); + if discover.is_finished() { + break; + } + } + + discover.results() +} + +/// Search for the largest index inside the bounds which still fulfills the condition +fn exponential_search( + eval: &EF, + expected_value: T, + mut bounds: core::ops::Range, +) -> Option +where + EF: Fn(usize) -> T, + T: Eq + core::fmt::Debug, +{ + assert!(bounds.start <= bounds.end); + if bounds.is_empty() || eval(bounds.start) != expected_value { + return None; + } + + let mut largest_checked = bounds.start; + while (bounds.start + 1) < bounds.end { + let len = bounds.end - bounds.start; + for level in 0..64u8 { + let mut index = 1 << level; + if index >= len { + break; + } + index += bounds.start; + if eval(index) != expected_value { + bounds.end = index; + break; + } + largest_checked = index; + } + + bounds.start = largest_checked; + // this implies that `bounds.start` doesn't have to get checked again + } + + debug_assert_eq!(eval(largest_checked), expected_value); + Some(largest_checked) +} + +/// Perform a breadth-first search on an induced binary tree on the list, +/// searching for the bounds of the partition induced by `eval`, +/// returning the last item indices in the `false` and `true` blocks +pub fn cyclic_breadth_partition_search( + bounds: Range, + eval: EF, +) -> (Option, Option) +where + EF: Fn(usize) -> bool, +{ + if bounds.is_empty() { + return (None, None); + } + + // discover gaps (true is a gap for false and vice versa) + let mut discover = Discover::new(); + + for i in 0..((bounds.end - bounds.start).ilog2() as u8) { + breadth4level(bounds.clone(), i, |bounds| { + let middle = bounds.start + (bounds.end - bounds.start) / 2; + discover.update(middle, eval(middle)); + !discover.is_finished_minimal() + }); + if discover.is_finished_minimal() { + break; + } + } + + // brute force on failure + if !discover.is_finished() { + return cbps_brute_force(bounds, &eval); + } + + let (pos_false, pos_true) = discover.results(); + let (mut pos_false, mut pos_true) = (pos_false.unwrap(), pos_true.unwrap()); + + // discover bounds + debug_assert_ne!(pos_false, pos_true); + + // whatever block is at the beginning has + // its end somewhere strictly before the other block + // either: + // - the later block continues at the beginning + // format: L...!L...L... + // - or the later block doesn't continue at the beginning + // format: !L...L... + let val_start = eval(bounds.start); + + { + let (pos_start, pos_next) = match val_start { + false => (&mut pos_false, &mut pos_true), + true => (&mut pos_true, &mut pos_false), + }; + + *pos_start = exponential_search(&eval, val_start, bounds.start..*pos_next).unwrap(); + *pos_next = exponential_search(&eval, !val_start, *pos_start + 1..bounds.end).unwrap(); + } + + (Some(pos_false), Some(pos_true)) +} + +#[cfg(test)] +mod tests { + use super::{cbps_brute_force, cyclic_breadth_partition_search as cbps}; + use proptest::prelude::*; + + fn cbps_assert_eq(list: &[T], partition: PF) -> (Option, Option) + where + PF: Fn(&T) -> bool, + T: Eq + core::fmt::Debug, + { + let eval = &|i: usize| partition(&list[i]); + let res_expected = cbps_brute_force(0..list.len(), eval); + assert_eq!(cbps(0..list.len(), eval), res_expected); + res_expected + } + + #[test] + fn cbps_bpw3_simple00() { + let list = &[false, false, false, true, true]; + assert_eq!(cbps_assert_eq(list, |i| *i), (Some(2), Some(4))); + } + + #[test] + fn cbps_bpw3_cont_false() { + let list = &[false, false, false]; + assert_eq!(cbps_assert_eq(list, |i| *i), (Some(2), None)); + } + + #[test] + fn cbps_bpw3_cont_true() { + let list = &[true, true]; + assert_eq!(cbps_assert_eq(list, |i| *i), (None, Some(1))); + } + + #[test] + fn cbps_bpw3_simple01() { + let list = &[true, false, false, false, true, true]; + assert_eq!(cbps_assert_eq(list, |i| *i), (Some(3), Some(0))); + } + + #[test] + fn cbps_bpw3_1exception() { + let list = &[true, false, true, true, true, true]; + assert_eq!(cbps_assert_eq(list, |i| *i), (Some(1), Some(0))); + } + + proptest! { + #![proptest_config(ProptestConfig { + cases: 4096, .. ProptestConfig::default() + })] + + #[test] + fn cbps_arbitrary(len in 1..4096usize, offset in 0..4096usize, amount_true in 0..4096usize) { + let offset = offset % len; + let amount_true = amount_true % len; + + let mut list = vec![false; len]; + for i in offset..(offset + amount_true) { + let i = i % len; + list[i] = true; + } + let eval = &|i: usize| list[i]; + prop_assert_eq!( + cbps(0..list.len(), eval), + cbps_brute_force(0..list.len(), eval) + ); + } + } +} diff --git a/src/math/mod.rs b/src/math/mod.rs index cffa601..e8f31c9 100644 --- a/src/math/mod.rs +++ b/src/math/mod.rs @@ -6,6 +6,12 @@ use geo::algorithm::line_measures::{Distance, Euclidean}; use geo::{geometry::Point, point, Line}; pub use specctra_core::math::{Circle, PointWithRotation}; +mod cyclic_search; +pub use cyclic_search::*; + +mod polygon_tangents; +pub use polygon_tangents::*; + mod tangents; pub use tangents::*; diff --git a/src/math/polygon_tangents.rs b/src/math/polygon_tangents.rs new file mode 100644 index 0000000..4356b12 --- /dev/null +++ b/src/math/polygon_tangents.rs @@ -0,0 +1,114 @@ +// SPDX-FileCopyrightText: 2025 Topola contributors +// +// SPDX-License-Identifier: MIT + +use super::{between_vectors, cyclic_breadth_partition_search}; +use geo::Point; + +#[derive(Clone, Debug, thiserror::Error, PartialEq)] +pub enum PolyTangentException { + #[error("trying to target empty polygon")] + EmptyTargetPolygon { origin: Point }, + + #[error("invalid polygon tangent arguments")] + InvalidData { + poly_ext: Box<[(Point, I)]>, + origin: Point, + }, +} + +/// Calculates the tangents to the polygon exterior `poly_ext` oriented `cw?=poly_ext_is_cw` +/// going through point `origin`. +pub fn poly_ext_tangent_points( + poly_ext: &[(Point, I)], + poly_ext_is_cw: bool, + origin: Point, +) -> Result<(I, I), PolyTangentException> { + if poly_ext.is_empty() { + return Err(PolyTangentException::EmptyTargetPolygon { origin }); + } + + let (pos_false, pos_true) = cyclic_breadth_partition_search(0..poly_ext.len(), &|i: usize| { + let prev = &poly_ext[(poly_ext.len() + i - 1) % poly_ext.len()]; + let cur = &poly_ext[i]; + let next = &poly_ext[(i + 1) % poly_ext.len()]; + + // local coordinate system with origin at `cur.0`. + between_vectors(cur.0 - origin, cur.0 - prev.0, cur.0 - next.0) + }); + + let (mut pos_false, mut pos_true) = + if let (Some(pos_false), Some(pos_true)) = (pos_false, pos_true) { + (pos_false, pos_true) + } else { + return Err(PolyTangentException::InvalidData { + poly_ext: poly_ext.to_vec().into_boxed_slice(), + origin, + }); + }; + + // * `pos_false` points to the maximum + // * `pos_true` points to the minimum + + // NOTE: although pos_{false,true} are vertex indices, they are actually + // referring to the "critical" segment(s) (pos_false, pos_false + 1) (and resp. for pos_true). + // because that is where the `between_vectors` result flips. + // These critical segments are independent of CW/CCW. + + // if `poly_ext` is oriented CCW, then + // * `pos_false` will be one too early, and + // * `pos_true` will be correct. + + // if `poly_ext` is oriented CW, then + // * `pos_false` will be correct. + // * `pos_true` will be one too early, and + + // TODO: can we (without too much overhead) determine if `poly_ext` is CW or CCW? + + if poly_ext_is_cw { + pos_true += 1; + pos_true %= poly_ext.len(); + } else { + pos_false += 1; + pos_false %= poly_ext.len(); + } + + Ok((poly_ext[pos_true].1, poly_ext[pos_false].1)) +} + +#[cfg(test)] +mod tests { + use super::poly_ext_tangent_points as petp; + use crate::drawing::dot::FixedDotIndex; + use geo::point; + + #[test] + fn petp00() { + let poly_ext = &[ + (point! { x: 0.0, y: 0.0 }, FixedDotIndex::new(0.into())), + (point! { x: 1.0, y: 0.0 }, FixedDotIndex::new(1.into())), + (point! { x: 1.0, y: 1.0 }, FixedDotIndex::new(2.into())), + (point! { x: 0.0, y: 1.0 }, FixedDotIndex::new(3.into())), + ]; + let origin = point! { x: 0.5, y: -1.0 }; + assert_eq!( + petp(poly_ext, false, origin), + Ok((FixedDotIndex::new(1.into()), FixedDotIndex::new(0.into()))) + ); + } + + #[test] + fn petp00cw() { + let poly_ext = &[ + (point! { x: 0.0, y: 0.0 }, FixedDotIndex::new(0.into())), + (point! { x: 0.0, y: 1.0 }, FixedDotIndex::new(3.into())), + (point! { x: 1.0, y: 1.0 }, FixedDotIndex::new(2.into())), + (point! { x: 1.0, y: 0.0 }, FixedDotIndex::new(1.into())), + ]; + let origin = point! { x: 0.5, y: -1.0 }; + assert_eq!( + petp(poly_ext, true, origin), + Ok((FixedDotIndex::new(1.into()), FixedDotIndex::new(0.into()))) + ); + } +}