refactor(specctra-core/read): accept multiple possible names anywhere

Previously, some methods handled items/lists with multiple allowed names,
and some only allowed a single name. As the overhead of always handling
multiple names isn't that large, do that everywhere and make the overall
API more streamlined in the process.
This commit is contained in:
Alain Emilia Anna Zscheile 2024-12-06 16:13:19 +01:00 committed by mikolaj
parent e90449b75e
commit 5e54453433
4 changed files with 36 additions and 49 deletions

View File

@ -7,15 +7,22 @@ pub enum ListToken {
}
impl ListToken {
pub fn expect_start(self, name: &'static str) -> Result<(), ParseError> {
pub fn is_start_of(&self, valid_names: &[&'static str]) -> bool {
if let Self::Start { name: actual_name } = self {
if name.eq_ignore_ascii_case(&actual_name) {
Ok(())
} else {
Err(ParseError::ExpectedStartOfList(name))
}
valid_names
.iter()
.any(|i| i.eq_ignore_ascii_case(actual_name))
} else {
Err(ParseError::ExpectedStartOfList(name))
false
}
}
pub fn expect_start(self, valid_names: &[&'static str]) -> Result<(), ParseError> {
assert!(!valid_names.is_empty());
if self.is_start_of(valid_names) {
Ok(())
} else {
Err(ParseError::ExpectedStartOfList(valid_names[0]))
}
}

View File

@ -13,9 +13,9 @@ impl InputToken {
Self { token, context }
}
pub fn expect_start(self, name: &'static str) -> Result<(), ParseErrorContext> {
pub fn expect_start(self, valid_names: &[&'static str]) -> Result<(), ParseErrorContext> {
self.token
.expect_start(name)
.expect_start(valid_names)
.map_err(|err| err.add_context(self.context))
}
@ -295,9 +295,9 @@ impl<R: std::io::BufRead> ListTokenizer<R> {
pub fn read_named<T: ReadDsn<R>>(
&mut self,
name: &'static str,
valid_names: &[&'static str],
) -> Result<T, ParseErrorContext> {
self.consume_token()?.expect_start(name)?;
self.consume_token()?.expect_start(valid_names)?;
let value = self.read_value::<T>()?;
self.consume_token()?.expect_end()?;
Ok(value)
@ -308,27 +308,14 @@ impl<R: std::io::BufRead> ListTokenizer<R> {
valid_names: &[&'static str],
) -> Result<Option<T>, ParseErrorContext> {
let input = self.consume_token()?;
Ok(
if let ListToken::Start {
name: ref actual_name,
} = input.token
{
if valid_names
.iter()
.any(|i| i.eq_ignore_ascii_case(actual_name))
{
let value = self.read_value::<T>()?;
self.consume_token()?.expect_end()?;
Some(value)
} else {
self.return_token(input);
None
}
} else {
self.return_token(input);
None
},
)
Ok(if input.token.is_start_of(valid_names) {
let value = self.read_value::<T>()?;
self.consume_token()?.expect_end()?;
Some(value)
} else {
self.return_token(input);
None
})
}
pub fn read_array<T: ReadDsn<R>>(&mut self) -> Result<Vec<T>, ParseErrorContext> {
@ -347,13 +334,6 @@ impl<R: std::io::BufRead> ListTokenizer<R> {
}
pub fn read_named_array<T: ReadDsn<R>>(
&mut self,
name: &'static str,
) -> Result<Vec<T>, ParseErrorContext> {
self.read_array_with_alias(&[name])
}
pub fn read_array_with_alias<T: ReadDsn<R>>(
&mut self,
valid_names: &[&'static str],
) -> Result<Vec<T>, ParseErrorContext> {

View File

@ -103,18 +103,18 @@ pub struct Structure {
impl<R: std::io::BufRead> ReadDsn<R> for Structure {
fn read_dsn(tokenizer: &mut ListTokenizer<R>) -> Result<Self, ParseErrorContext> {
let mut value = Self {
layers: tokenizer.read_named_array("layer")?,
boundary: tokenizer.read_named("boundary")?,
planes: tokenizer.read_named_array("plane")?,
keepouts: tokenizer.read_named_array("keepout")?,
via: tokenizer.read_named("via")?,
grids: tokenizer.read_named_array("grid")?,
rules: tokenizer.read_named_array("rule")?,
layers: tokenizer.read_named_array(&["layer"])?,
boundary: tokenizer.read_named(&["boundary"])?,
planes: tokenizer.read_named_array(&["plane"])?,
keepouts: tokenizer.read_named_array(&["keepout"])?,
via: tokenizer.read_named(&["via"])?,
grids: tokenizer.read_named_array(&["grid"])?,
rules: tokenizer.read_named_array(&["rule"])?,
};
value
.layers
.append(&mut tokenizer.read_named_array("layer")?);
.append(&mut tokenizer.read_named_array(&["layer"])?);
Ok(value)
}

View File

@ -61,7 +61,7 @@ fn impl_field(field: &Field) -> TokenStream {
}
FieldType::NamedVec(valid_aliases) => {
quote! {
#name: tokenizer.read_array_with_alias(&[#(#valid_aliases),*])?,
#name: tokenizer.read_named_array(&[#(#valid_aliases),*])?,
}
}
FieldType::NotSpecified => {
@ -79,7 +79,7 @@ fn impl_field(field: &Field) -> TokenStream {
}
quote! {
#name: tokenizer.read_named(stringify!(#name_str))?,
#name: tokenizer.read_named(&[stringify!(#name_str)])?,
}
}
}