specctra: propagate line/column info to returned error type

This commit is contained in:
Tomasz Cichoń 2024-09-28 10:04:47 +02:00
parent b7027ae603
commit 479646d503
4 changed files with 151 additions and 74 deletions

View File

@ -14,7 +14,7 @@ pub fn impl_read(input: &DeriveInput) -> TokenStream {
quote! { quote! {
impl<R: std::io::BufRead> ReadDsn<R> for #name { impl<R: std::io::BufRead> ReadDsn<R> for #name {
fn read_dsn(tokenizer: &mut ListTokenizer<R>) fn read_dsn(tokenizer: &mut ListTokenizer<R>)
-> Result<Self, ParseError> -> Result<Self, ParseErrorContext>
{ {
#body #body
} }

View File

@ -33,7 +33,7 @@ pub enum LoadingError {
/// File parsing errors containing information about unexpected end of file, /// File parsing errors containing information about unexpected end of file,
/// or any other parsing issues with provided DSN file /// or any other parsing issues with provided DSN file
#[error(transparent)] #[error(transparent)]
Parse(#[from] read::ParseError), Parse(#[from] read::ParseErrorContext),
} }

View File

@ -15,12 +15,58 @@ pub enum ParseError {
ExpectedStartOfList(&'static str), ExpectedStartOfList(&'static str),
} }
impl ParseError {
pub fn add_context(self, context: (usize, usize)) -> ParseErrorContext {
ParseErrorContext {
error: self,
context,
}
}
}
#[derive(Error, Debug)]
#[error("line {}, column {}: {error}", .context.0, .context.1)]
pub struct ParseErrorContext {
error: ParseError,
context: (usize, usize),
}
pub struct InputToken {
pub token: ListToken,
context: (usize, usize),
}
impl InputToken {
pub fn new(token: ListToken, context: (usize, usize)) -> Self {
Self {
token,
context,
}
}
pub fn expect_start(self, name: &'static str) -> Result<(), ParseErrorContext> {
self.token.expect_start(name).map_err(|err| err.add_context(self.context))
}
pub fn expect_any_start(self) -> Result<String, ParseErrorContext> {
self.token.expect_any_start().map_err(|err| err.add_context(self.context))
}
pub fn expect_leaf(self) -> Result<String, ParseErrorContext> {
self.token.expect_leaf().map_err(|err| err.add_context(self.context))
}
pub fn expect_end(self) -> Result<(), ParseErrorContext> {
self.token.expect_end().map_err(|err| err.add_context(self.context))
}
}
pub trait ReadDsn<R: std::io::BufRead>: Sized { pub trait ReadDsn<R: std::io::BufRead>: Sized {
fn read_dsn(tokenizer: &mut ListTokenizer<R>) -> Result<Self, ParseError>; fn read_dsn(tokenizer: &mut ListTokenizer<R>) -> Result<Self, ParseErrorContext>;
} }
// custom impl feeding the read values back into the tokenizer // custom impl feeding the read values back into the tokenizer
impl<R: std::io::BufRead> ReadDsn<R> for Parser { impl<R: std::io::BufRead> ReadDsn<R> for Parser {
fn read_dsn(tokenizer: &mut ListTokenizer<R>) -> Result<Self, ParseError> { fn read_dsn(tokenizer: &mut ListTokenizer<R>) -> Result<Self, ParseErrorContext> {
Ok(Self { Ok(Self {
string_quote: tokenizer string_quote: tokenizer
.read_optional("string_quote")? .read_optional("string_quote")?
@ -35,86 +81,86 @@ impl<R: std::io::BufRead> ReadDsn<R> for Parser {
} }
impl<R: std::io::BufRead> ReadDsn<R> for String { impl<R: std::io::BufRead> ReadDsn<R> for String {
fn read_dsn(tokenizer: &mut ListTokenizer<R>) -> Result<Self, ParseError> { fn read_dsn(tokenizer: &mut ListTokenizer<R>) -> Result<Self, ParseErrorContext> {
Ok(tokenizer.consume_token()?.expect_leaf()?) Ok(tokenizer.consume_token()?.expect_leaf()?)
} }
} }
impl<R: std::io::BufRead> ReadDsn<R> for char { impl<R: std::io::BufRead> ReadDsn<R> for char {
fn read_dsn(tokenizer: &mut ListTokenizer<R>) -> Result<Self, ParseError> { fn read_dsn(tokenizer: &mut ListTokenizer<R>) -> Result<Self, ParseErrorContext> {
let string = tokenizer.consume_token()?.expect_leaf()?; let string = tokenizer.consume_token()?.expect_leaf()?;
if string.chars().count() == 1 { if string.chars().count() == 1 {
Ok(string.chars().next().unwrap()) Ok(string.chars().next().unwrap())
} else { } else {
Err(ParseError::Expected("a single character")) Err(tokenizer.add_context(ParseError::Expected("a single character")))
} }
} }
} }
impl<R: std::io::BufRead> ReadDsn<R> for bool { impl<R: std::io::BufRead> ReadDsn<R> for bool {
fn read_dsn(tokenizer: &mut ListTokenizer<R>) -> Result<Self, ParseError> { fn read_dsn(tokenizer: &mut ListTokenizer<R>) -> Result<Self, ParseErrorContext> {
match tokenizer.consume_token()?.expect_leaf()?.as_str() { match tokenizer.consume_token()?.expect_leaf()?.as_str() {
"on" => Ok(true), "on" => Ok(true),
"off" => Ok(false), "off" => Ok(false),
_ => Err(ParseError::Expected("boolean")), _ => Err(tokenizer.add_context(ParseError::Expected("boolean"))),
} }
} }
} }
impl<R: std::io::BufRead> ReadDsn<R> for i32 { impl<R: std::io::BufRead> ReadDsn<R> for i32 {
fn read_dsn(tokenizer: &mut ListTokenizer<R>) -> Result<Self, ParseError> { fn read_dsn(tokenizer: &mut ListTokenizer<R>) -> Result<Self, ParseErrorContext> {
Ok(tokenizer Ok(tokenizer
.consume_token()? .consume_token()?
.expect_leaf()? .expect_leaf()?
.parse() .parse()
.map_err(|_| ParseError::Expected("i32"))?) .map_err(|_| tokenizer.add_context(ParseError::Expected("i32")))?)
} }
} }
impl<R: std::io::BufRead> ReadDsn<R> for u32 { impl<R: std::io::BufRead> ReadDsn<R> for u32 {
fn read_dsn(tokenizer: &mut ListTokenizer<R>) -> Result<Self, ParseError> { fn read_dsn(tokenizer: &mut ListTokenizer<R>) -> Result<Self, ParseErrorContext> {
Ok(tokenizer Ok(tokenizer
.consume_token()? .consume_token()?
.expect_leaf()? .expect_leaf()?
.parse() .parse()
.map_err(|_| ParseError::Expected("u32"))?) .map_err(|_| tokenizer.add_context(ParseError::Expected("u32")))?)
} }
} }
impl<R: std::io::BufRead> ReadDsn<R> for usize { impl<R: std::io::BufRead> ReadDsn<R> for usize {
fn read_dsn(tokenizer: &mut ListTokenizer<R>) -> Result<Self, ParseError> { fn read_dsn(tokenizer: &mut ListTokenizer<R>) -> Result<Self, ParseErrorContext> {
Ok(tokenizer Ok(tokenizer
.consume_token()? .consume_token()?
.expect_leaf()? .expect_leaf()?
.parse() .parse()
.map_err(|_| ParseError::Expected("usize"))?) .map_err(|_| tokenizer.add_context(ParseError::Expected("usize")))?)
} }
} }
impl<R: std::io::BufRead> ReadDsn<R> for f32 { impl<R: std::io::BufRead> ReadDsn<R> for f32 {
fn read_dsn(tokenizer: &mut ListTokenizer<R>) -> Result<Self, ParseError> { fn read_dsn(tokenizer: &mut ListTokenizer<R>) -> Result<Self, ParseErrorContext> {
Ok(tokenizer Ok(tokenizer
.consume_token()? .consume_token()?
.expect_leaf()? .expect_leaf()?
.parse() .parse()
.map_err(|_| ParseError::Expected("f32"))?) .map_err(|_| tokenizer.add_context(ParseError::Expected("f32")))?)
} }
} }
impl<R: std::io::BufRead> ReadDsn<R> for f64 { impl<R: std::io::BufRead> ReadDsn<R> for f64 {
fn read_dsn(tokenizer: &mut ListTokenizer<R>) -> Result<Self, ParseError> { fn read_dsn(tokenizer: &mut ListTokenizer<R>) -> Result<Self, ParseErrorContext> {
Ok(tokenizer Ok(tokenizer
.consume_token()? .consume_token()?
.expect_leaf()? .expect_leaf()?
.parse() .parse()
.map_err(|_| ParseError::Expected("f64"))?) .map_err(|_| tokenizer.add_context(ParseError::Expected("f64")))?)
} }
} }
pub struct ListTokenizer<R: std::io::BufRead> { pub struct ListTokenizer<R: std::io::BufRead> {
reader: R, reader: R,
peeked_char: Option<char>, peeked_char: Option<char>,
cached_token: Option<ListToken>, cached_token: Option<InputToken>,
space_in_quoted: bool, space_in_quoted: bool,
quote_char: Option<char>, quote_char: Option<char>,
line: usize, line: usize,
@ -134,12 +180,33 @@ impl<R: std::io::BufRead> ListTokenizer<R> {
} }
} }
fn next_char(&mut self) -> Result<char, ParseError> { pub fn context(&self) -> (usize, usize) {
(self.line, self.column)
}
fn add_context(&self, error: ParseError) -> ParseErrorContext {
ParseErrorContext {
error,
context: (self.line, self.column),
}
}
fn map_context<T>(&self, result: Result<T, ParseError>)
-> Result<T, ParseErrorContext>
{
result.map_err(|err| self.add_context(err))
}
fn next_char(&mut self) -> Result<char, ParseErrorContext> {
let return_chr = if let Some(chr) = self.peeked_char { let return_chr = if let Some(chr) = self.peeked_char {
self.peeked_char = None; self.peeked_char = None;
chr chr
} else { } else {
self.reader.chars().next().ok_or(ParseError::Eof)?? self.reader
.chars()
.next()
.ok_or(self.add_context(ParseError::Eof))?
.map_err(|err| self.add_context(err.into()))?
}; };
if return_chr == '\n' { if return_chr == '\n' {
@ -152,17 +219,21 @@ impl<R: std::io::BufRead> ListTokenizer<R> {
Ok(return_chr) Ok(return_chr)
} }
fn peek_char(&mut self) -> Result<char, ParseError> { fn peek_char(&mut self) -> Result<char, ParseErrorContext> {
if let Some(chr) = self.peeked_char { if let Some(chr) = self.peeked_char {
Ok(chr) Ok(chr)
} else { } else {
let chr = self.reader.chars().next().ok_or(ParseError::Eof)??; let chr = self.reader
.chars()
.next()
.ok_or(self.add_context(ParseError::Eof))?
.map_err(|err| self.add_context(err.into()))?;
self.peeked_char = Some(chr); self.peeked_char = Some(chr);
Ok(chr) Ok(chr)
} }
} }
fn skip_whitespace(&mut self) -> Result<(), ParseError> { fn skip_whitespace(&mut self) -> Result<(), ParseErrorContext> {
loop { loop {
let chr = self.peek_char()?; let chr = self.peek_char()?;
if chr == ' ' || chr == '\r' || chr == '\n' { if chr == ' ' || chr == '\r' || chr == '\n' {
@ -173,7 +244,7 @@ impl<R: std::io::BufRead> ListTokenizer<R> {
} }
} }
fn read_string(&mut self) -> Result<String, ParseError> { fn read_string(&mut self) -> Result<String, ParseErrorContext> {
if let Some(chr) = self.quote_char { if let Some(chr) = self.quote_char {
if chr == self.peek_char()? { if chr == self.peek_char()? {
return self.read_quoted(); return self.read_quoted();
@ -182,7 +253,7 @@ impl<R: std::io::BufRead> ListTokenizer<R> {
self.read_unquoted() self.read_unquoted()
} }
fn read_unquoted(&mut self) -> Result<String, ParseError> { fn read_unquoted(&mut self) -> Result<String, ParseErrorContext> {
let mut string = String::new(); let mut string = String::new();
loop { loop {
@ -194,13 +265,13 @@ impl<R: std::io::BufRead> ListTokenizer<R> {
} }
if string.is_empty() { if string.is_empty() {
Err(ParseError::Expected("string (unquoted)")) Err(self.add_context(ParseError::Expected("string (unquoted)")))
} else { } else {
Ok(string) Ok(string)
} }
} }
fn read_quoted(&mut self) -> Result<String, ParseError> { fn read_quoted(&mut self) -> Result<String, ParseErrorContext> {
let mut string = String::new(); let mut string = String::new();
if self.next_char().unwrap() != self.quote_char.unwrap() { if self.next_char().unwrap() != self.quote_char.unwrap() {
@ -225,7 +296,7 @@ impl<R: std::io::BufRead> ListTokenizer<R> {
// the following two methods effectively allow 1 token of lookahead // the following two methods effectively allow 1 token of lookahead
// returns next token, either a cached one returned earlier or a newly read one // returns next token, either a cached one returned earlier or a newly read one
pub fn consume_token(&mut self) -> Result<ListToken, ParseError> { pub fn consume_token(&mut self) -> Result<InputToken, ParseErrorContext> {
// move out of cache if not empty, otherwise consume input // move out of cache if not empty, otherwise consume input
// always leaves cache empty // always leaves cache empty
if let Some(token) = self.cached_token.take() { if let Some(token) = self.cached_token.take() {
@ -237,34 +308,39 @@ impl<R: std::io::BufRead> ListTokenizer<R> {
} }
// puts a token back into cache, to be consumed by something else // puts a token back into cache, to be consumed by something else
pub fn return_token(&mut self, token: ListToken) { pub fn return_token(&mut self, token: InputToken) {
self.cached_token = Some(token); self.cached_token = Some(token);
} }
fn read_token(&mut self) -> Result<ListToken, ParseError> { fn read_token(&mut self) -> Result<InputToken, ParseErrorContext> {
self.skip_whitespace()?; self.skip_whitespace()?;
let context = self.context();
let chr = self.peek_char()?; let chr = self.peek_char()?;
Ok(if chr == '(' { Ok(InputToken::new(
self.next_char().unwrap(); if chr == '(' {
self.skip_whitespace()?; self.next_char().unwrap();
ListToken::Start { self.skip_whitespace()?;
name: self.read_string()?, ListToken::Start {
} name: self.read_string()?,
} else if chr == ')' { }
self.next_char().unwrap(); } else if chr == ')' {
ListToken::End self.next_char().unwrap();
} else { ListToken::End
ListToken::Leaf { } else {
value: self.read_string()?, ListToken::Leaf {
} value: self.read_string()?,
}) }
},
context,
))
} }
pub fn read_value<T: ReadDsn<R>>(&mut self) -> Result<T, ParseError> { pub fn read_value<T: ReadDsn<R>>(&mut self) -> Result<T, ParseErrorContext> {
T::read_dsn(self) T::read_dsn(self)
} }
pub fn read_named<T: ReadDsn<R>>(&mut self, name: &'static str) -> Result<T, ParseError> { pub fn read_named<T: ReadDsn<R>>(&mut self, name: &'static str) -> Result<T, ParseErrorContext> {
self.consume_token()?.expect_start(name)?; self.consume_token()?.expect_start(name)?;
let value = self.read_value::<T>()?; let value = self.read_value::<T>()?;
self.consume_token()?.expect_end()?; self.consume_token()?.expect_end()?;
@ -274,35 +350,35 @@ impl<R: std::io::BufRead> ListTokenizer<R> {
pub fn read_optional<T: ReadDsn<R>>( pub fn read_optional<T: ReadDsn<R>>(
&mut self, &mut self,
name: &'static str, name: &'static str,
) -> Result<Option<T>, ParseError> { ) -> Result<Option<T>, ParseErrorContext> {
let token = self.consume_token()?; let input = self.consume_token()?;
if let ListToken::Start { if let ListToken::Start {
name: ref actual_name, name: ref actual_name,
} = token } = input.token
{ {
if actual_name == name { if actual_name == name {
let value = self.read_value::<T>()?; let value = self.read_value::<T>()?;
self.consume_token()?.expect_end()?; self.consume_token()?.expect_end()?;
Ok(Some(value)) Ok(Some(value))
} else { } else {
self.return_token(token); self.return_token(input);
Ok(None) Ok(None)
} }
} else { } else {
self.return_token(token); self.return_token(input);
Ok(None) Ok(None)
} }
} }
pub fn read_array<T: ReadDsn<R>>(&mut self) -> Result<Vec<T>, ParseError> { pub fn read_array<T: ReadDsn<R>>(&mut self) -> Result<Vec<T>, ParseErrorContext> {
let mut array = Vec::<T>::new(); let mut array = Vec::<T>::new();
loop { loop {
let token = self.consume_token()?; let input = self.consume_token()?;
if let ListToken::Leaf { .. } = token { if let ListToken::Leaf { .. } = input.token {
self.return_token(token); self.return_token(input);
array.push(self.read_value::<T>()?); array.push(self.read_value::<T>()?);
} else { } else {
self.return_token(token); self.return_token(input);
break; break;
} }
} }
@ -312,24 +388,24 @@ impl<R: std::io::BufRead> ListTokenizer<R> {
pub fn read_named_array<T: ReadDsn<R>>( pub fn read_named_array<T: ReadDsn<R>>(
&mut self, &mut self,
name: &'static str, name: &'static str,
) -> Result<Vec<T>, ParseError> { ) -> Result<Vec<T>, ParseErrorContext> {
let mut array = Vec::<T>::new(); let mut array = Vec::<T>::new();
loop { loop {
let token = self.consume_token()?; let input = self.consume_token()?;
if let ListToken::Start { if let ListToken::Start {
name: ref actual_name, name: ref actual_name,
} = token } = input.token
{ {
if actual_name == name { if actual_name == name {
let value = self.read_value::<T>()?; let value = self.read_value::<T>()?;
self.consume_token()?.expect_end()?; self.consume_token()?.expect_end()?;
array.push(value); array.push(value);
} else { } else {
self.return_token(token); self.return_token(input);
break; break;
} }
} else { } else {
self.return_token(token); self.return_token(input);
break; break;
} }
} }

View File

@ -1,6 +1,6 @@
use super::common::ListToken; use super::common::ListToken;
use super::read::ReadDsn; use super::read::ReadDsn;
use super::read::{ListTokenizer, ParseError}; use super::read::{ListTokenizer, ParseError, ParseErrorContext};
use super::write::ListWriter; use super::write::ListWriter;
use super::write::WriteSes; use super::write::WriteSes;
use specctra_derive::ReadDsn; use specctra_derive::ReadDsn;
@ -237,14 +237,15 @@ pub enum Shape {
} }
impl<R: std::io::BufRead> ReadDsn<R> for Shape { impl<R: std::io::BufRead> ReadDsn<R> for Shape {
fn read_dsn(tokenizer: &mut ListTokenizer<R>) -> Result<Self, ParseError> { fn read_dsn(tokenizer: &mut ListTokenizer<R>) -> Result<Self, ParseErrorContext> {
let ctx = tokenizer.context();
let name = tokenizer.consume_token()?.expect_any_start()?; let name = tokenizer.consume_token()?.expect_any_start()?;
let value = match name.as_str() { let value = match name.as_str() {
"circle" => Ok(Shape::Circle(tokenizer.read_value()?)), "circle" => Ok(Shape::Circle(tokenizer.read_value()?)),
"rect" => Ok(Shape::Rect(tokenizer.read_value()?)), "rect" => Ok(Shape::Rect(tokenizer.read_value()?)),
"path" => Ok(Shape::Path(tokenizer.read_value()?)), "path" => Ok(Shape::Path(tokenizer.read_value()?)),
"polygon" => Ok(Shape::Polygon(tokenizer.read_value()?)), "polygon" => Ok(Shape::Polygon(tokenizer.read_value()?)),
_ => Err(ParseError::Expected("a different keyword")), _ => Err(ParseError::Expected("a different keyword").add_context(ctx)),
}; };
tokenizer.consume_token()?.expect_end()?; tokenizer.consume_token()?.expect_end()?;
value value
@ -345,16 +346,16 @@ pub struct Point {
// Custom impl for the case described above // Custom impl for the case described above
impl<R: std::io::BufRead> ReadDsn<R> for Vec<Point> { impl<R: std::io::BufRead> ReadDsn<R> for Vec<Point> {
fn read_dsn(tokenizer: &mut ListTokenizer<R>) -> Result<Self, ParseError> { fn read_dsn(tokenizer: &mut ListTokenizer<R>) -> Result<Self, ParseErrorContext> {
let mut array = Vec::<Point>::new(); let mut array = Vec::<Point>::new();
loop { loop {
let token = tokenizer.consume_token()?; let input = tokenizer.consume_token()?;
if let ListToken::Leaf { value: ref x } = token { if let ListToken::Leaf { value: ref x } = input.token {
let x = x.parse::<f64>().unwrap(); let x = x.parse::<f64>().unwrap();
let y = tokenizer.read_value::<f64>()?; let y = tokenizer.read_value::<f64>()?;
array.push(Point { x, y }); array.push(Point { x, y });
} else { } else {
tokenizer.return_token(token); tokenizer.return_token(input);
break; break;
} }
} }
@ -363,14 +364,14 @@ impl<R: std::io::BufRead> ReadDsn<R> for Vec<Point> {
} }
impl<R: std::io::BufRead> ReadDsn<R> for Option<Point> { impl<R: std::io::BufRead> ReadDsn<R> for Option<Point> {
fn read_dsn(tokenizer: &mut ListTokenizer<R>) -> Result<Self, ParseError> { fn read_dsn(tokenizer: &mut ListTokenizer<R>) -> Result<Self, ParseErrorContext> {
let token = tokenizer.consume_token()?; let input = tokenizer.consume_token()?;
if let ListToken::Leaf { value: ref x } = token { if let ListToken::Leaf { value: ref x } = input.token {
let x = x.parse::<f64>().unwrap(); let x = x.parse::<f64>().unwrap();
let y = tokenizer.read_value::<f64>()?; let y = tokenizer.read_value::<f64>()?;
Ok(Some(Point { x, y })) Ok(Some(Point { x, y }))
} else { } else {
tokenizer.return_token(token); tokenizer.return_token(input);
Ok(None) Ok(None)
} }
} }