refactor(specctra-core/read): align read_optional to be like read_array_with_alias

This commit is contained in:
Alain Emilia Anna Zscheile 2024-12-06 15:53:11 +01:00 committed by mikolaj
parent 8fb9bfc0e6
commit 5f4496ffce
3 changed files with 33 additions and 48 deletions

View File

@ -190,15 +190,11 @@ impl AccessRules for SpecctraMesadata {
} }
fn largest_clearance(&self, _maybe_net: Option<usize>) -> f64 { fn largest_clearance(&self, _maybe_net: Option<usize>) -> f64 {
let mut largest: f64 = 0.0; self.class_rules
.values()
for rule in self.class_rules.values() { .map(|rule| rule.clearance)
if rule.clearance > largest { .reduce(f64::max)
largest = rule.clearance; .unwrap_or(0.0)
}
}
largest
} }
} }

View File

@ -46,13 +46,13 @@ impl<R: std::io::BufRead> ReadDsn<R> for Parser {
fn read_dsn(tokenizer: &mut ListTokenizer<R>) -> Result<Self, ParseErrorContext> { fn read_dsn(tokenizer: &mut ListTokenizer<R>) -> Result<Self, ParseErrorContext> {
Ok(Self { Ok(Self {
string_quote: tokenizer string_quote: tokenizer
.read_optional("string_quote")? .read_optional(&["string_quote"])?
.inspect(|v| tokenizer.quote_char = Some(*v)), .inspect(|v| tokenizer.quote_char = Some(*v)),
space_in_quoted_tokens: tokenizer space_in_quoted_tokens: tokenizer
.read_optional("space_in_quoted_tokens")? .read_optional(&["space_in_quoted_tokens"])?
.inspect(|v| tokenizer.space_in_quoted = *v), .inspect(|v| tokenizer.space_in_quoted = *v),
host_cad: tokenizer.read_optional("host_cad")?, host_cad: tokenizer.read_optional(&["host_cad"])?,
host_version: tokenizer.read_optional("host_version")?, host_version: tokenizer.read_optional(&["host_version"])?,
}) })
} }
} }
@ -332,25 +332,30 @@ impl<R: std::io::BufRead> ListTokenizer<R> {
pub fn read_optional<T: ReadDsn<R>>( pub fn read_optional<T: ReadDsn<R>>(
&mut self, &mut self,
name: &'static str, valid_names: &[&'static str],
) -> Result<Option<T>, ParseErrorContext> { ) -> Result<Option<T>, ParseErrorContext> {
let input = self.consume_token()?; let input = self.consume_token()?;
Ok(
if let ListToken::Start { if let ListToken::Start {
name: ref actual_name, name: ref actual_name,
} = input.token } = input.token
{ {
if actual_name == name { if valid_names
.iter()
.any(|i| i.eq_ignore_ascii_case(actual_name))
{
let value = self.read_value::<T>()?; let value = self.read_value::<T>()?;
self.consume_token()?.expect_end()?; self.consume_token()?.expect_end()?;
Ok(Some(value)) Some(value)
} else { } else {
self.return_token(input); self.return_token(input);
Ok(None) None
} }
} else { } else {
self.return_token(input); self.return_token(input);
Ok(None) None
} },
)
} }
pub fn read_array<T: ReadDsn<R>>(&mut self) -> Result<Vec<T>, ParseErrorContext> { pub fn read_array<T: ReadDsn<R>>(&mut self) -> Result<Vec<T>, ParseErrorContext> {
@ -379,25 +384,9 @@ impl<R: std::io::BufRead> ListTokenizer<R> {
&mut self, &mut self,
valid_names: &[&'static str], valid_names: &[&'static str],
) -> Result<Vec<T>, ParseErrorContext> { ) -> Result<Vec<T>, ParseErrorContext> {
let mut array = Vec::<T>::new(); let mut array = Vec::new();
loop { while let Some(value) = self.read_optional::<T>(valid_names)? {
let input = self.consume_token()?;
if let ListToken::Start {
name: ref actual_name,
} = input.token
{
if valid_names.contains(&actual_name.to_ascii_lowercase().as_ref()) {
let value = self.read_value::<T>()?;
self.consume_token()?.expect_end()?;
array.push(value); array.push(value);
} else {
self.return_token(input);
break;
}
} else {
self.return_token(input);
break;
}
} }
Ok(array) Ok(array)
} }

View File

@ -72,7 +72,7 @@ fn impl_field(field: &Field) -> TokenStream {
if ident == "Option" { if ident == "Option" {
return quote! { return quote! {
#name: tokenizer #name: tokenizer
.read_optional(stringify!(#name_str))?, .read_optional(&[stringify!(#name_str)])?,
}; };
} }
} }