refactor(specctra/structure): get rid of code duplication in Vec<Point>

This commit is contained in:
Alain Emilia Anna Zscheile 2024-12-02 22:58:37 +01:00 committed by mikolaj
parent da72118255
commit 0a862a7e5f
1 changed files with 7 additions and 17 deletions

View File

@ -395,19 +395,9 @@ pub struct Point {
// Custom impl for the case described above // Custom impl for the case described above
impl<R: std::io::BufRead> ReadDsn<R> for Vec<Point> { impl<R: std::io::BufRead> ReadDsn<R> for Vec<Point> {
fn read_dsn(tokenizer: &mut ListTokenizer<R>) -> Result<Self, ParseErrorContext> { fn read_dsn(tokenizer: &mut ListTokenizer<R>) -> Result<Self, ParseErrorContext> {
let mut array = Vec::<Point>::new(); let mut array = Vec::new();
loop { while let Some(x) = tokenizer.read_value::<Option<Point>>()? {
let input = tokenizer.consume_token()?; array.push(x);
if let ListToken::Leaf { value: ref x } = input.token {
let x = x
.parse::<f64>()
.map_err(|_| ParseError::Expected("f64").add_context(input.context))?;
let y = tokenizer.read_value::<f64>()?;
array.push(Point { x, y });
} else {
tokenizer.return_token(input);
break;
}
} }
Ok(array) Ok(array)
} }
@ -416,16 +406,16 @@ impl<R: std::io::BufRead> ReadDsn<R> for Vec<Point> {
impl<R: std::io::BufRead> ReadDsn<R> for Option<Point> { impl<R: std::io::BufRead> ReadDsn<R> for Option<Point> {
fn read_dsn(tokenizer: &mut ListTokenizer<R>) -> Result<Self, ParseErrorContext> { fn read_dsn(tokenizer: &mut ListTokenizer<R>) -> Result<Self, ParseErrorContext> {
let input = tokenizer.consume_token()?; let input = tokenizer.consume_token()?;
if let ListToken::Leaf { value: ref x } = input.token { Ok(if let ListToken::Leaf { value: ref x } = input.token {
let x = x let x = x
.parse::<f64>() .parse::<f64>()
.map_err(|_| ParseError::Expected("f64").add_context(input.context))?; .map_err(|_| ParseError::Expected("f64").add_context(input.context))?;
let y = tokenizer.read_value::<f64>()?; let y = tokenizer.read_value::<f64>()?;
Ok(Some(Point { x, y })) Some(Point { x, y })
} else { } else {
tokenizer.return_token(input); tokenizer.return_token(input);
Ok(None) None
} })
} }
} }