mirror of https://codeberg.org/topola/topola.git
specctra, specctra_derive: rewrite logic inspecting macro attributes
This allows parsing more complicated attribute syntax, here needed to specify multiple names for a `Vec` field.
This commit is contained in:
parent
9159312ea5
commit
336cea11e8
|
|
@ -1,5 +1,6 @@
|
||||||
use proc_macro::TokenStream;
|
use proc_macro::TokenStream;
|
||||||
use syn::{Attribute, DeriveInput, LitStr};
|
use syn::{Attribute, DeriveInput, LitStr, Meta, Token};
|
||||||
|
use syn::punctuated::Punctuated;
|
||||||
|
|
||||||
mod read;
|
mod read;
|
||||||
mod write;
|
mod write;
|
||||||
|
|
@ -16,16 +17,37 @@ pub fn derive_write(input: TokenStream) -> TokenStream {
|
||||||
write::impl_write(&input).into()
|
write::impl_write(&input).into()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn attr_present(attrs: &Vec<Attribute>, name: &str) -> bool {
|
enum FieldType {
|
||||||
attrs
|
Anonymous,
|
||||||
.iter()
|
AnonymousVec,
|
||||||
.find(|attr| attr.path().is_ident(name))
|
NamedVec(Vec<LitStr>),
|
||||||
.is_some()
|
NotSpecified,
|
||||||
}
|
}
|
||||||
|
|
||||||
fn attr_content(attrs: &Vec<Attribute>, name: &str) -> Option<String> {
|
fn parse_attributes(attrs: &Vec<Attribute>) -> FieldType {
|
||||||
attrs
|
for attr in attrs {
|
||||||
.iter()
|
match &attr.meta {
|
||||||
.find(|attr| attr.path().is_ident(name))
|
Meta::Path(path) => {
|
||||||
.and_then(|attr| Some(attr.parse_args::<LitStr>().expect("string literal").value()))
|
if path.is_ident("anon") {
|
||||||
|
return FieldType::Anonymous;
|
||||||
|
} else if path.is_ident("anon_vec") {
|
||||||
|
return FieldType::AnonymousVec;
|
||||||
|
}
|
||||||
|
},
|
||||||
|
Meta::List(list) if list.path.is_ident("vec") => {
|
||||||
|
return FieldType::NamedVec(list
|
||||||
|
.parse_args_with(
|
||||||
|
Punctuated::<LitStr, Token![,]>::parse_terminated
|
||||||
|
)
|
||||||
|
.expect("#[vec(...)] must contain a list of string literals")
|
||||||
|
.iter()
|
||||||
|
.cloned()
|
||||||
|
.collect()
|
||||||
|
);
|
||||||
|
},
|
||||||
|
_ => (),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
FieldType::NotSpecified
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -4,8 +4,8 @@ use syn::ext::IdentExt;
|
||||||
use syn::Type::Path;
|
use syn::Type::Path;
|
||||||
use syn::{Data, DeriveInput, Field, Fields};
|
use syn::{Data, DeriveInput, Field, Fields};
|
||||||
|
|
||||||
use crate::attr_content;
|
use crate::parse_attributes;
|
||||||
use crate::attr_present;
|
use crate::FieldType;
|
||||||
|
|
||||||
pub fn impl_read(input: &DeriveInput) -> TokenStream {
|
pub fn impl_read(input: &DeriveInput) -> TokenStream {
|
||||||
let name = &input.ident;
|
let name = &input.ident;
|
||||||
|
|
@ -46,34 +46,41 @@ fn impl_body(data: &Data) -> TokenStream {
|
||||||
fn impl_field(field: &Field) -> TokenStream {
|
fn impl_field(field: &Field) -> TokenStream {
|
||||||
let name = &field.ident;
|
let name = &field.ident;
|
||||||
let name_str = name.as_ref().expect("field name").unraw();
|
let name_str = name.as_ref().expect("field name").unraw();
|
||||||
|
let field_type = parse_attributes(&field.attrs);
|
||||||
|
|
||||||
if attr_present(&field.attrs, "anon") {
|
match field_type {
|
||||||
quote! {
|
FieldType::Anonymous => {
|
||||||
#name: tokenizer.read_value()?,
|
quote! {
|
||||||
}
|
#name: tokenizer.read_value()?,
|
||||||
} else if let Some(dsn_name) = attr_content(&field.attrs, "vec") {
|
}
|
||||||
quote! {
|
},
|
||||||
#name: tokenizer.read_named_array(#dsn_name)?,
|
FieldType::AnonymousVec => {
|
||||||
}
|
quote! {
|
||||||
} else if attr_present(&field.attrs, "anon_vec") {
|
#name: tokenizer.read_array()?,
|
||||||
quote! {
|
}
|
||||||
#name: tokenizer.read_array()?,
|
},
|
||||||
}
|
FieldType::NamedVec(valid_aliases) => {
|
||||||
} else {
|
quote! {
|
||||||
if let Path(type_path) = &field.ty {
|
#name: tokenizer.read_array_with_alias(&[#(#valid_aliases),*])?,
|
||||||
let segments = &type_path.path.segments;
|
}
|
||||||
if segments.len() == 1 {
|
},
|
||||||
let ident = &segments.first().unwrap().ident;
|
FieldType::NotSpecified => {
|
||||||
if ident == "Option" {
|
if let Path(type_path) = &field.ty {
|
||||||
return quote! {
|
let segments = &type_path.path.segments;
|
||||||
#name: tokenizer.read_optional(stringify!(#name_str))?,
|
if segments.len() == 1 {
|
||||||
};
|
let ident = &segments.first().unwrap().ident;
|
||||||
|
if ident == "Option" {
|
||||||
|
return quote! {
|
||||||
|
#name: tokenizer
|
||||||
|
.read_optional(stringify!(#name_str))?,
|
||||||
|
};
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
quote! {
|
quote! {
|
||||||
#name: tokenizer.read_named(stringify!(#name_str))?,
|
#name: tokenizer.read_named(stringify!(#name_str))?,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -4,8 +4,8 @@ use syn::ext::IdentExt;
|
||||||
use syn::Type::Path;
|
use syn::Type::Path;
|
||||||
use syn::{Data, DeriveInput, Field, Fields};
|
use syn::{Data, DeriveInput, Field, Fields};
|
||||||
|
|
||||||
use crate::attr_content;
|
use crate::parse_attributes;
|
||||||
use crate::attr_present;
|
use crate::FieldType;
|
||||||
|
|
||||||
pub fn impl_write(input: &DeriveInput) -> TokenStream {
|
pub fn impl_write(input: &DeriveInput) -> TokenStream {
|
||||||
let name = &input.ident;
|
let name = &input.ident;
|
||||||
|
|
@ -44,34 +44,42 @@ fn impl_body(data: &Data) -> TokenStream {
|
||||||
fn impl_field(field: &Field) -> TokenStream {
|
fn impl_field(field: &Field) -> TokenStream {
|
||||||
let name = &field.ident;
|
let name = &field.ident;
|
||||||
let name_str = name.as_ref().expect("field name").unraw();
|
let name_str = name.as_ref().expect("field name").unraw();
|
||||||
|
let field_type = parse_attributes(&field.attrs);
|
||||||
|
|
||||||
if attr_present(&field.attrs, "anon") {
|
match field_type {
|
||||||
quote! {
|
FieldType::Anonymous => {
|
||||||
writer.write_value(&self.#name)?;
|
quote! {
|
||||||
}
|
writer.write_value(&self.#name)?;
|
||||||
} else if let Some(dsn_name) = attr_content(&field.attrs, "vec") {
|
}
|
||||||
quote! {
|
},
|
||||||
writer.write_named_array(#dsn_name, &self.#name)?;
|
FieldType::AnonymousVec => {
|
||||||
}
|
quote! {
|
||||||
} else if attr_present(&field.attrs, "anon_vec") {
|
writer.write_array(&self.#name)?;
|
||||||
quote! {
|
}
|
||||||
writer.write_array(&self.#name)?;
|
},
|
||||||
}
|
FieldType::NamedVec(valid_aliases) => {
|
||||||
} else {
|
let canonical_name = &valid_aliases[0];
|
||||||
if let Path(type_path) = &field.ty {
|
quote! {
|
||||||
let segments = &type_path.path.segments;
|
writer.write_named_array(#canonical_name, &self.#name)?;
|
||||||
if segments.len() == 1 {
|
}
|
||||||
let ident = &segments.first().unwrap().ident;
|
},
|
||||||
if ident == "Option" {
|
FieldType::NotSpecified => {
|
||||||
return quote! {
|
if let Path(type_path) = &field.ty {
|
||||||
writer.write_optional(stringify!(#name_str), &self.#name)?;
|
let segments = &type_path.path.segments;
|
||||||
};
|
if segments.len() == 1 {
|
||||||
|
let ident = &segments.first().unwrap().ident;
|
||||||
|
if ident == "Option" {
|
||||||
|
return quote! {
|
||||||
|
writer
|
||||||
|
.write_optional(stringify!(#name_str), &self.#name)?;
|
||||||
|
};
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
quote! {
|
quote! {
|
||||||
writer.write_named(stringify!(#name_str), &self.#name)?;
|
writer.write_named(stringify!(#name_str), &self.#name)?;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -387,6 +387,13 @@ impl<R: std::io::BufRead> ListTokenizer<R> {
|
||||||
pub fn read_named_array<T: ReadDsn<R>>(
|
pub fn read_named_array<T: ReadDsn<R>>(
|
||||||
&mut self,
|
&mut self,
|
||||||
name: &'static str,
|
name: &'static str,
|
||||||
|
) -> Result<Vec<T>, ParseErrorContext> {
|
||||||
|
self.read_array_with_alias(&[name])
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn read_array_with_alias<T: ReadDsn<R>>(
|
||||||
|
&mut self,
|
||||||
|
valid_names: &[&'static str],
|
||||||
) -> Result<Vec<T>, ParseErrorContext> {
|
) -> Result<Vec<T>, ParseErrorContext> {
|
||||||
let mut array = Vec::<T>::new();
|
let mut array = Vec::<T>::new();
|
||||||
loop {
|
loop {
|
||||||
|
|
@ -395,7 +402,7 @@ impl<R: std::io::BufRead> ListTokenizer<R> {
|
||||||
name: ref actual_name,
|
name: ref actual_name,
|
||||||
} = input.token
|
} = input.token
|
||||||
{
|
{
|
||||||
if actual_name == name {
|
if valid_names.contains(&actual_name.to_ascii_lowercase().as_ref()) {
|
||||||
let value = self.read_value::<T>()?;
|
let value = self.read_value::<T>()?;
|
||||||
self.consume_token()?.expect_end()?;
|
self.consume_token()?.expect_end()?;
|
||||||
array.push(value);
|
array.push(value);
|
||||||
|
|
|
||||||
|
|
@ -159,7 +159,7 @@ pub struct Grid {
|
||||||
#[derive(ReadDsn, WriteSes, Debug)]
|
#[derive(ReadDsn, WriteSes, Debug)]
|
||||||
pub struct StructureRule {
|
pub struct StructureRule {
|
||||||
pub width: Option<f32>,
|
pub width: Option<f32>,
|
||||||
#[vec("clearance")]
|
#[vec("clearance", "clear")]
|
||||||
pub clearances: Vec<Clearance>,
|
pub clearances: Vec<Clearance>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue