refactor(specctra-core/read): accept multiple possible names anywhere

Previously, some methods handled items/lists with multiple allowed names,
and some only allowed a single name. As the overhead of always handling
multiple names isn't that large, do that everywhere and make the overall
API more streamlined in the process.
This commit is contained in:
Alain Emilia Anna Zscheile 2024-12-06 16:13:19 +01:00 committed by mikolaj
parent e90449b75e
commit 5e54453433
4 changed files with 36 additions and 49 deletions

View File

@ -7,15 +7,22 @@ pub enum ListToken {
} }
impl ListToken { impl ListToken {
pub fn expect_start(self, name: &'static str) -> Result<(), ParseError> { pub fn is_start_of(&self, valid_names: &[&'static str]) -> bool {
if let Self::Start { name: actual_name } = self { if let Self::Start { name: actual_name } = self {
if name.eq_ignore_ascii_case(&actual_name) { valid_names
Ok(()) .iter()
} else { .any(|i| i.eq_ignore_ascii_case(actual_name))
Err(ParseError::ExpectedStartOfList(name))
}
} else { } else {
Err(ParseError::ExpectedStartOfList(name)) false
}
}
pub fn expect_start(self, valid_names: &[&'static str]) -> Result<(), ParseError> {
assert!(!valid_names.is_empty());
if self.is_start_of(valid_names) {
Ok(())
} else {
Err(ParseError::ExpectedStartOfList(valid_names[0]))
} }
} }

View File

@ -13,9 +13,9 @@ impl InputToken {
Self { token, context } Self { token, context }
} }
pub fn expect_start(self, name: &'static str) -> Result<(), ParseErrorContext> { pub fn expect_start(self, valid_names: &[&'static str]) -> Result<(), ParseErrorContext> {
self.token self.token
.expect_start(name) .expect_start(valid_names)
.map_err(|err| err.add_context(self.context)) .map_err(|err| err.add_context(self.context))
} }
@ -295,9 +295,9 @@ impl<R: std::io::BufRead> ListTokenizer<R> {
pub fn read_named<T: ReadDsn<R>>( pub fn read_named<T: ReadDsn<R>>(
&mut self, &mut self,
name: &'static str, valid_names: &[&'static str],
) -> Result<T, ParseErrorContext> { ) -> Result<T, ParseErrorContext> {
self.consume_token()?.expect_start(name)?; self.consume_token()?.expect_start(valid_names)?;
let value = self.read_value::<T>()?; let value = self.read_value::<T>()?;
self.consume_token()?.expect_end()?; self.consume_token()?.expect_end()?;
Ok(value) Ok(value)
@ -308,27 +308,14 @@ impl<R: std::io::BufRead> ListTokenizer<R> {
valid_names: &[&'static str], valid_names: &[&'static str],
) -> Result<Option<T>, ParseErrorContext> { ) -> Result<Option<T>, ParseErrorContext> {
let input = self.consume_token()?; let input = self.consume_token()?;
Ok( Ok(if input.token.is_start_of(valid_names) {
if let ListToken::Start { let value = self.read_value::<T>()?;
name: ref actual_name, self.consume_token()?.expect_end()?;
} = input.token Some(value)
{ } else {
if valid_names self.return_token(input);
.iter() None
.any(|i| i.eq_ignore_ascii_case(actual_name)) })
{
let value = self.read_value::<T>()?;
self.consume_token()?.expect_end()?;
Some(value)
} else {
self.return_token(input);
None
}
} else {
self.return_token(input);
None
},
)
} }
pub fn read_array<T: ReadDsn<R>>(&mut self) -> Result<Vec<T>, ParseErrorContext> { pub fn read_array<T: ReadDsn<R>>(&mut self) -> Result<Vec<T>, ParseErrorContext> {
@ -347,13 +334,6 @@ impl<R: std::io::BufRead> ListTokenizer<R> {
} }
pub fn read_named_array<T: ReadDsn<R>>( pub fn read_named_array<T: ReadDsn<R>>(
&mut self,
name: &'static str,
) -> Result<Vec<T>, ParseErrorContext> {
self.read_array_with_alias(&[name])
}
pub fn read_array_with_alias<T: ReadDsn<R>>(
&mut self, &mut self,
valid_names: &[&'static str], valid_names: &[&'static str],
) -> Result<Vec<T>, ParseErrorContext> { ) -> Result<Vec<T>, ParseErrorContext> {

View File

@ -103,18 +103,18 @@ pub struct Structure {
impl<R: std::io::BufRead> ReadDsn<R> for Structure { impl<R: std::io::BufRead> ReadDsn<R> for Structure {
fn read_dsn(tokenizer: &mut ListTokenizer<R>) -> Result<Self, ParseErrorContext> { fn read_dsn(tokenizer: &mut ListTokenizer<R>) -> Result<Self, ParseErrorContext> {
let mut value = Self { let mut value = Self {
layers: tokenizer.read_named_array("layer")?, layers: tokenizer.read_named_array(&["layer"])?,
boundary: tokenizer.read_named("boundary")?, boundary: tokenizer.read_named(&["boundary"])?,
planes: tokenizer.read_named_array("plane")?, planes: tokenizer.read_named_array(&["plane"])?,
keepouts: tokenizer.read_named_array("keepout")?, keepouts: tokenizer.read_named_array(&["keepout"])?,
via: tokenizer.read_named("via")?, via: tokenizer.read_named(&["via"])?,
grids: tokenizer.read_named_array("grid")?, grids: tokenizer.read_named_array(&["grid"])?,
rules: tokenizer.read_named_array("rule")?, rules: tokenizer.read_named_array(&["rule"])?,
}; };
value value
.layers .layers
.append(&mut tokenizer.read_named_array("layer")?); .append(&mut tokenizer.read_named_array(&["layer"])?);
Ok(value) Ok(value)
} }

View File

@ -61,7 +61,7 @@ fn impl_field(field: &Field) -> TokenStream {
} }
FieldType::NamedVec(valid_aliases) => { FieldType::NamedVec(valid_aliases) => {
quote! { quote! {
#name: tokenizer.read_array_with_alias(&[#(#valid_aliases),*])?, #name: tokenizer.read_named_array(&[#(#valid_aliases),*])?,
} }
} }
FieldType::NotSpecified => { FieldType::NotSpecified => {
@ -79,7 +79,7 @@ fn impl_field(field: &Field) -> TokenStream {
} }
quote! { quote! {
#name: tokenizer.read_named(stringify!(#name_str))?, #name: tokenizer.read_named(&[stringify!(#name_str)])?,
} }
} }
} }