From 479646d503164e6b22858bd5d0b0c027db355caa Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tomasz=20Cicho=C5=84?= Date: Sat, 28 Sep 2024 10:04:47 +0200 Subject: [PATCH] specctra: propagate line/column info to returned error type --- macros/specctra_derive/src/read.rs | 2 +- src/specctra/design.rs | 2 +- src/specctra/read.rs | 198 ++++++++++++++++++++--------- src/specctra/structure.rs | 23 ++-- 4 files changed, 151 insertions(+), 74 deletions(-) diff --git a/macros/specctra_derive/src/read.rs b/macros/specctra_derive/src/read.rs index cd44a81..b38da66 100644 --- a/macros/specctra_derive/src/read.rs +++ b/macros/specctra_derive/src/read.rs @@ -14,7 +14,7 @@ pub fn impl_read(input: &DeriveInput) -> TokenStream { quote! { impl ReadDsn for #name { fn read_dsn(tokenizer: &mut ListTokenizer) - -> Result + -> Result { #body } diff --git a/src/specctra/design.rs b/src/specctra/design.rs index e7fbec8..ff57941 100644 --- a/src/specctra/design.rs +++ b/src/specctra/design.rs @@ -33,7 +33,7 @@ pub enum LoadingError { /// File parsing errors containing information about unexpected end of file, /// or any other parsing issues with provided DSN file #[error(transparent)] - Parse(#[from] read::ParseError), + Parse(#[from] read::ParseErrorContext), } diff --git a/src/specctra/read.rs b/src/specctra/read.rs index 6e18217..d782af8 100644 --- a/src/specctra/read.rs +++ b/src/specctra/read.rs @@ -15,12 +15,58 @@ pub enum ParseError { ExpectedStartOfList(&'static str), } +impl ParseError { + pub fn add_context(self, context: (usize, usize)) -> ParseErrorContext { + ParseErrorContext { + error: self, + context, + } + } +} + +#[derive(Error, Debug)] +#[error("line {}, column {}: {error}", .context.0, .context.1)] +pub struct ParseErrorContext { + error: ParseError, + context: (usize, usize), +} + +pub struct InputToken { + pub token: ListToken, + context: (usize, usize), +} + +impl InputToken { + pub fn new(token: ListToken, context: (usize, usize)) -> Self { + Self { + token, + context, + } + } + + pub fn expect_start(self, name: &'static str) -> Result<(), ParseErrorContext> { + self.token.expect_start(name).map_err(|err| err.add_context(self.context)) + } + + pub fn expect_any_start(self) -> Result { + self.token.expect_any_start().map_err(|err| err.add_context(self.context)) + } + + pub fn expect_leaf(self) -> Result { + self.token.expect_leaf().map_err(|err| err.add_context(self.context)) + } + + pub fn expect_end(self) -> Result<(), ParseErrorContext> { + self.token.expect_end().map_err(|err| err.add_context(self.context)) + } +} + pub trait ReadDsn: Sized { - fn read_dsn(tokenizer: &mut ListTokenizer) -> Result; + fn read_dsn(tokenizer: &mut ListTokenizer) -> Result; } // custom impl feeding the read values back into the tokenizer impl ReadDsn for Parser { - fn read_dsn(tokenizer: &mut ListTokenizer) -> Result { + fn read_dsn(tokenizer: &mut ListTokenizer) -> Result { Ok(Self { string_quote: tokenizer .read_optional("string_quote")? @@ -35,86 +81,86 @@ impl ReadDsn for Parser { } impl ReadDsn for String { - fn read_dsn(tokenizer: &mut ListTokenizer) -> Result { + fn read_dsn(tokenizer: &mut ListTokenizer) -> Result { Ok(tokenizer.consume_token()?.expect_leaf()?) } } impl ReadDsn for char { - fn read_dsn(tokenizer: &mut ListTokenizer) -> Result { + fn read_dsn(tokenizer: &mut ListTokenizer) -> Result { let string = tokenizer.consume_token()?.expect_leaf()?; if string.chars().count() == 1 { Ok(string.chars().next().unwrap()) } else { - Err(ParseError::Expected("a single character")) + Err(tokenizer.add_context(ParseError::Expected("a single character"))) } } } impl ReadDsn for bool { - fn read_dsn(tokenizer: &mut ListTokenizer) -> Result { + fn read_dsn(tokenizer: &mut ListTokenizer) -> Result { match tokenizer.consume_token()?.expect_leaf()?.as_str() { "on" => Ok(true), "off" => Ok(false), - _ => Err(ParseError::Expected("boolean")), + _ => Err(tokenizer.add_context(ParseError::Expected("boolean"))), } } } impl ReadDsn for i32 { - fn read_dsn(tokenizer: &mut ListTokenizer) -> Result { + fn read_dsn(tokenizer: &mut ListTokenizer) -> Result { Ok(tokenizer .consume_token()? .expect_leaf()? .parse() - .map_err(|_| ParseError::Expected("i32"))?) + .map_err(|_| tokenizer.add_context(ParseError::Expected("i32")))?) } } impl ReadDsn for u32 { - fn read_dsn(tokenizer: &mut ListTokenizer) -> Result { + fn read_dsn(tokenizer: &mut ListTokenizer) -> Result { Ok(tokenizer .consume_token()? .expect_leaf()? .parse() - .map_err(|_| ParseError::Expected("u32"))?) + .map_err(|_| tokenizer.add_context(ParseError::Expected("u32")))?) } } impl ReadDsn for usize { - fn read_dsn(tokenizer: &mut ListTokenizer) -> Result { + fn read_dsn(tokenizer: &mut ListTokenizer) -> Result { Ok(tokenizer .consume_token()? .expect_leaf()? .parse() - .map_err(|_| ParseError::Expected("usize"))?) + .map_err(|_| tokenizer.add_context(ParseError::Expected("usize")))?) } } impl ReadDsn for f32 { - fn read_dsn(tokenizer: &mut ListTokenizer) -> Result { + fn read_dsn(tokenizer: &mut ListTokenizer) -> Result { Ok(tokenizer .consume_token()? .expect_leaf()? .parse() - .map_err(|_| ParseError::Expected("f32"))?) + .map_err(|_| tokenizer.add_context(ParseError::Expected("f32")))?) } } impl ReadDsn for f64 { - fn read_dsn(tokenizer: &mut ListTokenizer) -> Result { + fn read_dsn(tokenizer: &mut ListTokenizer) -> Result { Ok(tokenizer .consume_token()? .expect_leaf()? .parse() - .map_err(|_| ParseError::Expected("f64"))?) + .map_err(|_| tokenizer.add_context(ParseError::Expected("f64")))?) } } pub struct ListTokenizer { reader: R, peeked_char: Option, - cached_token: Option, + cached_token: Option, space_in_quoted: bool, quote_char: Option, line: usize, @@ -134,12 +180,33 @@ impl ListTokenizer { } } - fn next_char(&mut self) -> Result { + pub fn context(&self) -> (usize, usize) { + (self.line, self.column) + } + + fn add_context(&self, error: ParseError) -> ParseErrorContext { + ParseErrorContext { + error, + context: (self.line, self.column), + } + } + + fn map_context(&self, result: Result) + -> Result + { + result.map_err(|err| self.add_context(err)) + } + + fn next_char(&mut self) -> Result { let return_chr = if let Some(chr) = self.peeked_char { self.peeked_char = None; chr } else { - self.reader.chars().next().ok_or(ParseError::Eof)?? + self.reader + .chars() + .next() + .ok_or(self.add_context(ParseError::Eof))? + .map_err(|err| self.add_context(err.into()))? }; if return_chr == '\n' { @@ -152,17 +219,21 @@ impl ListTokenizer { Ok(return_chr) } - fn peek_char(&mut self) -> Result { + fn peek_char(&mut self) -> Result { if let Some(chr) = self.peeked_char { Ok(chr) } else { - let chr = self.reader.chars().next().ok_or(ParseError::Eof)??; + let chr = self.reader + .chars() + .next() + .ok_or(self.add_context(ParseError::Eof))? + .map_err(|err| self.add_context(err.into()))?; self.peeked_char = Some(chr); Ok(chr) } } - fn skip_whitespace(&mut self) -> Result<(), ParseError> { + fn skip_whitespace(&mut self) -> Result<(), ParseErrorContext> { loop { let chr = self.peek_char()?; if chr == ' ' || chr == '\r' || chr == '\n' { @@ -173,7 +244,7 @@ impl ListTokenizer { } } - fn read_string(&mut self) -> Result { + fn read_string(&mut self) -> Result { if let Some(chr) = self.quote_char { if chr == self.peek_char()? { return self.read_quoted(); @@ -182,7 +253,7 @@ impl ListTokenizer { self.read_unquoted() } - fn read_unquoted(&mut self) -> Result { + fn read_unquoted(&mut self) -> Result { let mut string = String::new(); loop { @@ -194,13 +265,13 @@ impl ListTokenizer { } if string.is_empty() { - Err(ParseError::Expected("string (unquoted)")) + Err(self.add_context(ParseError::Expected("string (unquoted)"))) } else { Ok(string) } } - fn read_quoted(&mut self) -> Result { + fn read_quoted(&mut self) -> Result { let mut string = String::new(); if self.next_char().unwrap() != self.quote_char.unwrap() { @@ -225,7 +296,7 @@ impl ListTokenizer { // the following two methods effectively allow 1 token of lookahead // returns next token, either a cached one returned earlier or a newly read one - pub fn consume_token(&mut self) -> Result { + pub fn consume_token(&mut self) -> Result { // move out of cache if not empty, otherwise consume input // always leaves cache empty if let Some(token) = self.cached_token.take() { @@ -237,34 +308,39 @@ impl ListTokenizer { } // puts a token back into cache, to be consumed by something else - pub fn return_token(&mut self, token: ListToken) { + pub fn return_token(&mut self, token: InputToken) { self.cached_token = Some(token); } - fn read_token(&mut self) -> Result { + fn read_token(&mut self) -> Result { self.skip_whitespace()?; + let context = self.context(); + let chr = self.peek_char()?; - Ok(if chr == '(' { - self.next_char().unwrap(); - self.skip_whitespace()?; - ListToken::Start { - name: self.read_string()?, - } - } else if chr == ')' { - self.next_char().unwrap(); - ListToken::End - } else { - ListToken::Leaf { - value: self.read_string()?, - } - }) + Ok(InputToken::new( + if chr == '(' { + self.next_char().unwrap(); + self.skip_whitespace()?; + ListToken::Start { + name: self.read_string()?, + } + } else if chr == ')' { + self.next_char().unwrap(); + ListToken::End + } else { + ListToken::Leaf { + value: self.read_string()?, + } + }, + context, + )) } - pub fn read_value>(&mut self) -> Result { + pub fn read_value>(&mut self) -> Result { T::read_dsn(self) } - pub fn read_named>(&mut self, name: &'static str) -> Result { + pub fn read_named>(&mut self, name: &'static str) -> Result { self.consume_token()?.expect_start(name)?; let value = self.read_value::()?; self.consume_token()?.expect_end()?; @@ -274,35 +350,35 @@ impl ListTokenizer { pub fn read_optional>( &mut self, name: &'static str, - ) -> Result, ParseError> { - let token = self.consume_token()?; + ) -> Result, ParseErrorContext> { + let input = self.consume_token()?; if let ListToken::Start { name: ref actual_name, - } = token + } = input.token { if actual_name == name { let value = self.read_value::()?; self.consume_token()?.expect_end()?; Ok(Some(value)) } else { - self.return_token(token); + self.return_token(input); Ok(None) } } else { - self.return_token(token); + self.return_token(input); Ok(None) } } - pub fn read_array>(&mut self) -> Result, ParseError> { + pub fn read_array>(&mut self) -> Result, ParseErrorContext> { let mut array = Vec::::new(); loop { - let token = self.consume_token()?; - if let ListToken::Leaf { .. } = token { - self.return_token(token); + let input = self.consume_token()?; + if let ListToken::Leaf { .. } = input.token { + self.return_token(input); array.push(self.read_value::()?); } else { - self.return_token(token); + self.return_token(input); break; } } @@ -312,24 +388,24 @@ impl ListTokenizer { pub fn read_named_array>( &mut self, name: &'static str, - ) -> Result, ParseError> { + ) -> Result, ParseErrorContext> { let mut array = Vec::::new(); loop { - let token = self.consume_token()?; + let input = self.consume_token()?; if let ListToken::Start { name: ref actual_name, - } = token + } = input.token { if actual_name == name { let value = self.read_value::()?; self.consume_token()?.expect_end()?; array.push(value); } else { - self.return_token(token); + self.return_token(input); break; } } else { - self.return_token(token); + self.return_token(input); break; } } diff --git a/src/specctra/structure.rs b/src/specctra/structure.rs index e7825db..2868453 100644 --- a/src/specctra/structure.rs +++ b/src/specctra/structure.rs @@ -1,6 +1,6 @@ use super::common::ListToken; use super::read::ReadDsn; -use super::read::{ListTokenizer, ParseError}; +use super::read::{ListTokenizer, ParseError, ParseErrorContext}; use super::write::ListWriter; use super::write::WriteSes; use specctra_derive::ReadDsn; @@ -237,14 +237,15 @@ pub enum Shape { } impl ReadDsn for Shape { - fn read_dsn(tokenizer: &mut ListTokenizer) -> Result { + fn read_dsn(tokenizer: &mut ListTokenizer) -> Result { + let ctx = tokenizer.context(); let name = tokenizer.consume_token()?.expect_any_start()?; let value = match name.as_str() { "circle" => Ok(Shape::Circle(tokenizer.read_value()?)), "rect" => Ok(Shape::Rect(tokenizer.read_value()?)), "path" => Ok(Shape::Path(tokenizer.read_value()?)), "polygon" => Ok(Shape::Polygon(tokenizer.read_value()?)), - _ => Err(ParseError::Expected("a different keyword")), + _ => Err(ParseError::Expected("a different keyword").add_context(ctx)), }; tokenizer.consume_token()?.expect_end()?; value @@ -345,16 +346,16 @@ pub struct Point { // Custom impl for the case described above impl ReadDsn for Vec { - fn read_dsn(tokenizer: &mut ListTokenizer) -> Result { + fn read_dsn(tokenizer: &mut ListTokenizer) -> Result { let mut array = Vec::::new(); loop { - let token = tokenizer.consume_token()?; - if let ListToken::Leaf { value: ref x } = token { + let input = tokenizer.consume_token()?; + if let ListToken::Leaf { value: ref x } = input.token { let x = x.parse::().unwrap(); let y = tokenizer.read_value::()?; array.push(Point { x, y }); } else { - tokenizer.return_token(token); + tokenizer.return_token(input); break; } } @@ -363,14 +364,14 @@ impl ReadDsn for Vec { } impl ReadDsn for Option { - fn read_dsn(tokenizer: &mut ListTokenizer) -> Result { - let token = tokenizer.consume_token()?; - if let ListToken::Leaf { value: ref x } = token { + fn read_dsn(tokenizer: &mut ListTokenizer) -> Result { + let input = tokenizer.consume_token()?; + if let ListToken::Leaf { value: ref x } = input.token { let x = x.parse::().unwrap(); let y = tokenizer.read_value::()?; Ok(Some(Point { x, y })) } else { - tokenizer.return_token(token); + tokenizer.return_token(input); Ok(None) } }