mirror of https://git.sr.ht/~stygianentity/bincode
Fixes for 427 (#428)
* Made bincode_derive handle empty lines of docs correctly * Made bincode_derive properly support const generics * Added support for enums with variants with fixed values
This commit is contained in:
parent
7174f6422d
commit
b4c46a789a
|
|
@ -1,4 +1,4 @@
|
||||||
use crate::generate::{FnSelfArg, Generator};
|
use crate::generate::{FnSelfArg, Generator, StreamBuilder};
|
||||||
use crate::parse::{EnumVariant, Fields};
|
use crate::parse::{EnumVariant, Fields};
|
||||||
use crate::prelude::*;
|
use crate::prelude::*;
|
||||||
use crate::Result;
|
use crate::Result;
|
||||||
|
|
@ -10,9 +10,15 @@ pub struct DeriveEnum {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl DeriveEnum {
|
impl DeriveEnum {
|
||||||
pub fn generate_encode(self, generator: &mut Generator) -> Result<()> {
|
fn iter_fields(&self) -> EnumVariantIterator {
|
||||||
let DeriveEnum { variants } = self;
|
EnumVariantIterator {
|
||||||
|
idx: 0,
|
||||||
|
last_val: None,
|
||||||
|
variants: &self.variants,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn generate_encode(self, generator: &mut Generator) -> Result<()> {
|
||||||
generator
|
generator
|
||||||
.impl_for("bincode::enc::Encode")
|
.impl_for("bincode::enc::Encode")
|
||||||
.unwrap()
|
.unwrap()
|
||||||
|
|
@ -25,7 +31,7 @@ impl DeriveEnum {
|
||||||
fn_body.ident_str("match");
|
fn_body.ident_str("match");
|
||||||
fn_body.ident_str("self");
|
fn_body.ident_str("self");
|
||||||
fn_body.group(Delimiter::Brace, |match_body| {
|
fn_body.group(Delimiter::Brace, |match_body| {
|
||||||
for (variant_index, variant) in variants.into_iter().enumerate() {
|
for (variant_index, variant) in self.iter_fields() {
|
||||||
// Self::Variant
|
// Self::Variant
|
||||||
match_body.ident_str("Self");
|
match_body.ident_str("Self");
|
||||||
match_body.puncts("::");
|
match_body.puncts("::");
|
||||||
|
|
@ -62,11 +68,16 @@ impl DeriveEnum {
|
||||||
// }
|
// }
|
||||||
match_body.group(Delimiter::Brace, |body| {
|
match_body.group(Delimiter::Brace, |body| {
|
||||||
// variant index
|
// variant index
|
||||||
body.push_parsed(format!(
|
body.push_parsed("<u32 as bincode::enc::Encode>::encode")
|
||||||
"<u32 as bincode::enc::Encode>::encode(&{}, &mut encoder)?;",
|
|
||||||
variant_index
|
|
||||||
))
|
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
body.group(Delimiter::Parenthesis, |args| {
|
||||||
|
args.punct('&');
|
||||||
|
args.group(Delimiter::Parenthesis, |num| num.extend(variant_index));
|
||||||
|
args.punct(',');
|
||||||
|
args.push_parsed("&mut encoder").unwrap();
|
||||||
|
});
|
||||||
|
body.punct('?');
|
||||||
|
body.punct(';');
|
||||||
// If we have any fields, encode them all one by one
|
// If we have any fields, encode them all one by one
|
||||||
for field_name in variant.fields.names() {
|
for field_name in variant.fields.names() {
|
||||||
body.push_parsed(format!(
|
body.push_parsed(format!(
|
||||||
|
|
@ -85,8 +96,73 @@ impl DeriveEnum {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Build the catch-all case for an int-to-enum decode implementation
|
||||||
|
fn invalid_variant_case(&self, enum_name: &str, result: &mut StreamBuilder) {
|
||||||
|
// we'll be generating:
|
||||||
|
// variant => Err(
|
||||||
|
// bincode::error::DecodeError::UnexpectedVariant {
|
||||||
|
// found: variant,
|
||||||
|
// type_name: <enum_name>
|
||||||
|
// allowed: ...,
|
||||||
|
// }
|
||||||
|
// )
|
||||||
|
//
|
||||||
|
// Where allowed is either:
|
||||||
|
// - bincode::error::AllowedEnumVariants::Range { min: 0, max: <max> }
|
||||||
|
// if we have no fixed value variants
|
||||||
|
// - bincode::error::AllowedEnumVariants::Allowed(&[<variant1>, <variant2>, ...])
|
||||||
|
// if we have fixed value variants
|
||||||
|
result.ident_str("variant");
|
||||||
|
result.puncts("=>");
|
||||||
|
result.ident_str("Err");
|
||||||
|
result.group(Delimiter::Parenthesis, |err_inner| {
|
||||||
|
err_inner
|
||||||
|
.push_parsed("bincode::error::DecodeError::UnexpectedVariant")
|
||||||
|
.unwrap();
|
||||||
|
err_inner.group(Delimiter::Brace, |variant_inner| {
|
||||||
|
variant_inner.ident_str("found");
|
||||||
|
variant_inner.punct(':');
|
||||||
|
variant_inner.ident_str("variant");
|
||||||
|
variant_inner.punct(',');
|
||||||
|
|
||||||
|
variant_inner.ident_str("type_name");
|
||||||
|
variant_inner.punct(':');
|
||||||
|
variant_inner.lit_str(enum_name);
|
||||||
|
variant_inner.punct(',');
|
||||||
|
|
||||||
|
variant_inner.ident_str("allowed");
|
||||||
|
variant_inner.punct(':');
|
||||||
|
|
||||||
|
if self.variants.iter().any(|i| i.has_fixed_value()) {
|
||||||
|
// we have fixed values, implement AllowedEnumVariants::Allowed
|
||||||
|
variant_inner
|
||||||
|
.push_parsed("bincode::error::AllowedEnumVariants::Allowed")
|
||||||
|
.unwrap();
|
||||||
|
variant_inner.group(Delimiter::Parenthesis, |allowed_inner| {
|
||||||
|
allowed_inner.punct('&');
|
||||||
|
allowed_inner.group(Delimiter::Bracket, |allowed_slice| {
|
||||||
|
for (idx, (ident, _)) in self.iter_fields().enumerate() {
|
||||||
|
if idx != 0 {
|
||||||
|
allowed_slice.punct(',');
|
||||||
|
}
|
||||||
|
allowed_slice.extend(ident);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
// no fixed values, implement a range
|
||||||
|
variant_inner
|
||||||
|
.push_parsed(format!(
|
||||||
|
"bincode::error::AllowedEnumVariants::Range {{ min: 0, max: {} }}",
|
||||||
|
self.variants.len() - 1
|
||||||
|
))
|
||||||
|
.unwrap();
|
||||||
|
}
|
||||||
|
})
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
pub fn generate_decode(self, generator: &mut Generator) -> Result<()> {
|
pub fn generate_decode(self, generator: &mut Generator) -> Result<()> {
|
||||||
let DeriveEnum { variants } = self;
|
|
||||||
let enum_name = generator.target_name().to_string();
|
let enum_name = generator.target_name().to_string();
|
||||||
|
|
||||||
if generator.has_lifetimes() {
|
if generator.has_lifetimes() {
|
||||||
|
|
@ -103,9 +179,14 @@ impl DeriveEnum {
|
||||||
.push_parsed("let variant_index = <u32 as bincode::de::Decode>::decode(&mut decoder)?;").unwrap();
|
.push_parsed("let variant_index = <u32 as bincode::de::Decode>::decode(&mut decoder)?;").unwrap();
|
||||||
fn_builder.push_parsed("match variant_index").unwrap();
|
fn_builder.push_parsed("match variant_index").unwrap();
|
||||||
fn_builder.group(Delimiter::Brace, |variant_case| {
|
fn_builder.group(Delimiter::Brace, |variant_case| {
|
||||||
for (idx, variant) in variants.iter().enumerate() {
|
for (mut variant_index, variant) in self.iter_fields() {
|
||||||
// idx => Ok(..)
|
// idx => Ok(..)
|
||||||
variant_case.lit_u32(idx as u32);
|
if variant_index.len() > 1 {
|
||||||
|
variant_case.push_parsed("x if x == ").unwrap();
|
||||||
|
variant_case.extend(variant_index);
|
||||||
|
} else {
|
||||||
|
variant_case.push(variant_index.remove(0));
|
||||||
|
}
|
||||||
variant_case.puncts("=>");
|
variant_case.puncts("=>");
|
||||||
variant_case.ident_str("Ok");
|
variant_case.ident_str("Ok");
|
||||||
variant_case.group(Delimiter::Parenthesis, |variant_case_body| {
|
variant_case.group(Delimiter::Parenthesis, |variant_case_body| {
|
||||||
|
|
@ -133,11 +214,7 @@ impl DeriveEnum {
|
||||||
}
|
}
|
||||||
|
|
||||||
// invalid idx
|
// invalid idx
|
||||||
variant_case.push_parsed(format!(
|
self.invalid_variant_case(&enum_name, variant_case);
|
||||||
"variant => return Err(bincode::error::DecodeError::UnexpectedVariant {{ min: 0, max: {}, found: variant, type_name: {:?} }})",
|
|
||||||
variants.len() - 1,
|
|
||||||
enum_name.to_string()
|
|
||||||
)).unwrap();
|
|
||||||
});
|
});
|
||||||
}).unwrap();
|
}).unwrap();
|
||||||
} else {
|
} else {
|
||||||
|
|
@ -153,9 +230,14 @@ impl DeriveEnum {
|
||||||
.push_parsed("let variant_index = <u32 as bincode::de::Decode>::decode(&mut decoder)?;").unwrap();
|
.push_parsed("let variant_index = <u32 as bincode::de::Decode>::decode(&mut decoder)?;").unwrap();
|
||||||
fn_builder.push_parsed("match variant_index").unwrap();
|
fn_builder.push_parsed("match variant_index").unwrap();
|
||||||
fn_builder.group(Delimiter::Brace, |variant_case| {
|
fn_builder.group(Delimiter::Brace, |variant_case| {
|
||||||
for (idx, variant) in variants.iter().enumerate() {
|
for (mut variant_index, variant) in self.iter_fields() {
|
||||||
// idx => Ok(..)
|
// idx => Ok(..)
|
||||||
variant_case.lit_u32(idx as u32);
|
if variant_index.len() > 1 {
|
||||||
|
variant_case.push_parsed("x if x == ").unwrap();
|
||||||
|
variant_case.extend(variant_index);
|
||||||
|
} else {
|
||||||
|
variant_case.push(variant_index.remove(0));
|
||||||
|
}
|
||||||
variant_case.puncts("=>");
|
variant_case.puncts("=>");
|
||||||
variant_case.ident_str("Ok");
|
variant_case.ident_str("Ok");
|
||||||
variant_case.group(Delimiter::Parenthesis, |variant_case_body| {
|
variant_case.group(Delimiter::Parenthesis, |variant_case_body| {
|
||||||
|
|
@ -183,11 +265,7 @@ impl DeriveEnum {
|
||||||
}
|
}
|
||||||
|
|
||||||
// invalid idx
|
// invalid idx
|
||||||
variant_case.push_parsed(format!(
|
self.invalid_variant_case(&enum_name, variant_case);
|
||||||
"variant => return Err(bincode::error::DecodeError::UnexpectedVariant {{ min: 0, max: {}, found: variant, type_name: {:?} }})",
|
|
||||||
variants.len() - 1,
|
|
||||||
enum_name.to_string()
|
|
||||||
)).unwrap();
|
|
||||||
});
|
});
|
||||||
}).unwrap();
|
}).unwrap();
|
||||||
}
|
}
|
||||||
|
|
@ -195,3 +273,36 @@ impl DeriveEnum {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
struct EnumVariantIterator<'a> {
|
||||||
|
variants: &'a [EnumVariant],
|
||||||
|
idx: usize,
|
||||||
|
last_val: Option<(Literal, u32)>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> Iterator for EnumVariantIterator<'a> {
|
||||||
|
type Item = (Vec<TokenTree>, &'a EnumVariant);
|
||||||
|
|
||||||
|
fn next(&mut self) -> Option<Self::Item> {
|
||||||
|
let idx = self.idx;
|
||||||
|
let variant = self.variants.get(self.idx)?;
|
||||||
|
self.idx += 1;
|
||||||
|
|
||||||
|
let tokens = if let Fields::Integer(lit) = &variant.fields {
|
||||||
|
let tree = TokenTree::Literal(lit.clone());
|
||||||
|
self.last_val = Some((lit.clone(), 0));
|
||||||
|
vec![tree]
|
||||||
|
} else if let Some((lit, add)) = self.last_val.as_mut() {
|
||||||
|
*add += 1;
|
||||||
|
vec![
|
||||||
|
TokenTree::Literal(lit.clone()),
|
||||||
|
TokenTree::Punct(Punct::new('+', Spacing::Alone)),
|
||||||
|
TokenTree::Literal(Literal::u32_suffixed(*add)),
|
||||||
|
]
|
||||||
|
} else {
|
||||||
|
vec![TokenTree::Literal(Literal::u32_suffixed(idx as u32))]
|
||||||
|
};
|
||||||
|
|
||||||
|
Some((tokens, variant))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
||||||
|
|
@ -4,10 +4,19 @@ use std::fmt;
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub enum Error {
|
pub enum Error {
|
||||||
UnknownDataType(Span),
|
UnknownDataType(Span),
|
||||||
InvalidRustSyntax(Span),
|
InvalidRustSyntax { span: Span, expected: String },
|
||||||
ExpectedIdent(Span),
|
ExpectedIdent(Span),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl Error {
|
||||||
|
pub fn wrong_token<T>(token: Option<&TokenTree>, expected: &'static str) -> Result<T, Self> {
|
||||||
|
Err(Self::InvalidRustSyntax {
|
||||||
|
span: token.map(|t| t.span()).unwrap_or_else(Span::call_site),
|
||||||
|
expected: format!("{}, got {:?}", expected, token),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// helper functions for the unit tests
|
// helper functions for the unit tests
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
impl Error {
|
impl Error {
|
||||||
|
|
@ -16,7 +25,7 @@ impl Error {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_invalid_rust_syntax(&self) -> bool {
|
pub fn is_invalid_rust_syntax(&self) -> bool {
|
||||||
matches!(self, Error::InvalidRustSyntax(_))
|
matches!(self, Error::InvalidRustSyntax { .. })
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -26,7 +35,9 @@ impl fmt::Display for Error {
|
||||||
Self::UnknownDataType(_) => {
|
Self::UnknownDataType(_) => {
|
||||||
write!(fmt, "Unknown data type, only enum and struct are supported")
|
write!(fmt, "Unknown data type, only enum and struct are supported")
|
||||||
}
|
}
|
||||||
Self::InvalidRustSyntax(_) => write!(fmt, "Invalid rust syntax"),
|
Self::InvalidRustSyntax { expected, .. } => {
|
||||||
|
write!(fmt, "Invalid rust syntax, expected {}", expected)
|
||||||
|
}
|
||||||
Self::ExpectedIdent(_) => write!(fmt, "Expected ident"),
|
Self::ExpectedIdent(_) => write!(fmt, "Expected ident"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -37,7 +48,7 @@ impl Error {
|
||||||
let maybe_span = match &self {
|
let maybe_span = match &self {
|
||||||
Error::UnknownDataType(span)
|
Error::UnknownDataType(span)
|
||||||
| Error::ExpectedIdent(span)
|
| Error::ExpectedIdent(span)
|
||||||
| Error::InvalidRustSyntax(span) => Some(*span),
|
| Error::InvalidRustSyntax { span, .. } => Some(*span),
|
||||||
};
|
};
|
||||||
self.throw_with_span(maybe_span.unwrap_or_else(Span::call_site))
|
self.throw_with_span(maybe_span.unwrap_or_else(Span::call_site))
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -125,12 +125,6 @@ impl StreamBuilder {
|
||||||
.extend([TokenTree::Literal(Literal::string(str.as_ref()))]);
|
.extend([TokenTree::Literal(Literal::string(str.as_ref()))]);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Add an `u32` value to the stream.
|
|
||||||
pub fn lit_u32(&mut self, val: u32) {
|
|
||||||
self.stream
|
|
||||||
.extend([TokenTree::Literal(Literal::u32_unsuffixed(val))]);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Add an `usize` value to the stream.
|
/// Add an `usize` value to the stream.
|
||||||
pub fn lit_usize(&mut self, val: usize) {
|
pub fn lit_usize(&mut self, val: usize) {
|
||||||
self.stream
|
self.stream
|
||||||
|
|
|
||||||
|
|
@ -31,7 +31,7 @@ pub fn derive_encode(input: proc_macro::TokenStream) -> proc_macro::TokenStream
|
||||||
fn derive_encode_inner(input: TokenStream) -> Result<TokenStream> {
|
fn derive_encode_inner(input: TokenStream) -> Result<TokenStream> {
|
||||||
let source = &mut input.into_iter().peekable();
|
let source = &mut input.into_iter().peekable();
|
||||||
|
|
||||||
let _attributes = parse::Attributes::try_take(source)?;
|
let _attributes = parse::Attribute::try_take(source)?;
|
||||||
let _visibility = parse::Visibility::try_take(source)?;
|
let _visibility = parse::Visibility::try_take(source)?;
|
||||||
let (datatype, name) = parse::DataType::take(source)?;
|
let (datatype, name) = parse::DataType::take(source)?;
|
||||||
let generics = parse::Generics::try_take(source)?;
|
let generics = parse::Generics::try_take(source)?;
|
||||||
|
|
@ -72,7 +72,7 @@ pub fn derive_decode(input: proc_macro::TokenStream) -> proc_macro::TokenStream
|
||||||
fn derive_decode_inner(input: TokenStream) -> Result<TokenStream> {
|
fn derive_decode_inner(input: TokenStream) -> Result<TokenStream> {
|
||||||
let source = &mut input.into_iter().peekable();
|
let source = &mut input.into_iter().peekable();
|
||||||
|
|
||||||
let _attributes = parse::Attributes::try_take(source)?;
|
let _attributes = parse::Attribute::try_take(source)?;
|
||||||
let _visibility = parse::Visibility::try_take(source)?;
|
let _visibility = parse::Visibility::try_take(source)?;
|
||||||
let (datatype, name) = parse::DataType::take(source)?;
|
let (datatype, name) = parse::DataType::take(source)?;
|
||||||
let generics = parse::Generics::try_take(source)?;
|
let generics = parse::Generics::try_take(source)?;
|
||||||
|
|
|
||||||
|
|
@ -1,38 +1,48 @@
|
||||||
use super::assume_group;
|
use super::{assume_group, assume_punct};
|
||||||
use crate::parse::consume_punct_if;
|
use crate::parse::consume_punct_if;
|
||||||
use crate::prelude::{Delimiter, Group, Punct, TokenTree};
|
use crate::prelude::{Delimiter, Group, Punct, TokenTree};
|
||||||
use crate::{Error, Result};
|
use crate::{Error, Result};
|
||||||
use std::iter::Peekable;
|
use std::iter::Peekable;
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct Attributes {
|
pub struct Attribute {
|
||||||
// we don't use these fields yet
|
// we don't use these fields yet
|
||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
punct: Punct,
|
punct: Punct,
|
||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
tokens: Group,
|
tokens: Option<Group>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Attributes {
|
impl Attribute {
|
||||||
pub fn try_take(input: &mut Peekable<impl Iterator<Item = TokenTree>>) -> Result<Option<Self>> {
|
pub fn try_take(input: &mut Peekable<impl Iterator<Item = TokenTree>>) -> Result<Vec<Self>> {
|
||||||
if let Some(punct) = consume_punct_if(input, '#') {
|
let mut result = Vec::new();
|
||||||
// found attributes, next token should be a [] group
|
|
||||||
if let Some(TokenTree::Group(g)) = input.peek() {
|
while let Some(punct) = consume_punct_if(input, '#') {
|
||||||
if g.delimiter() != Delimiter::Bracket {
|
match input.peek() {
|
||||||
return Err(Error::InvalidRustSyntax(g.span()));
|
Some(TokenTree::Group(g)) if g.delimiter() == Delimiter::Bracket => {
|
||||||
}
|
result.push(Attribute {
|
||||||
return Ok(Some(Attributes {
|
|
||||||
punct,
|
punct,
|
||||||
tokens: assume_group(input.next()),
|
tokens: Some(assume_group(input.next())),
|
||||||
}));
|
});
|
||||||
}
|
}
|
||||||
// expected [] group, found something else
|
Some(TokenTree::Group(g)) => {
|
||||||
return Err(Error::InvalidRustSyntax(match input.peek() {
|
return Err(Error::InvalidRustSyntax {
|
||||||
Some(next_token) => next_token.span(),
|
span: g.span(),
|
||||||
None => punct.span(),
|
expected: format!("[] bracket, got {:?}", g.delimiter()),
|
||||||
}));
|
});
|
||||||
}
|
}
|
||||||
Ok(None)
|
Some(TokenTree::Punct(p)) if p.as_char() == '#' => {
|
||||||
|
// sometimes with empty lines of doc comments, we get two #'s in a row
|
||||||
|
// add an empty attributes and continue to the next loop
|
||||||
|
result.push(Attribute {
|
||||||
|
punct: assume_punct(input.next(), '#'),
|
||||||
|
tokens: None,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
token => return Error::wrong_token(token, "[] group or next # attribute"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(result)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -41,14 +51,14 @@ fn test_attributes_try_take() {
|
||||||
use crate::token_stream;
|
use crate::token_stream;
|
||||||
|
|
||||||
let stream = &mut token_stream("struct Foo;");
|
let stream = &mut token_stream("struct Foo;");
|
||||||
assert!(Attributes::try_take(stream).unwrap().is_none());
|
assert!(Attribute::try_take(stream).unwrap().is_empty());
|
||||||
match stream.next().unwrap() {
|
match stream.next().unwrap() {
|
||||||
TokenTree::Ident(i) => assert_eq!(i, "struct"),
|
TokenTree::Ident(i) => assert_eq!(i, "struct"),
|
||||||
x => panic!("Expected ident, found {:?}", x),
|
x => panic!("Expected ident, found {:?}", x),
|
||||||
}
|
}
|
||||||
|
|
||||||
let stream = &mut token_stream("#[cfg(test)] struct Foo;");
|
let stream = &mut token_stream("#[cfg(test)] struct Foo;");
|
||||||
assert!(Attributes::try_take(stream).unwrap().is_some());
|
assert!(!Attribute::try_take(stream).unwrap().is_empty());
|
||||||
match stream.next().unwrap() {
|
match stream.next().unwrap() {
|
||||||
TokenTree::Ident(i) => assert_eq!(i, "struct"),
|
TokenTree::Ident(i) => assert_eq!(i, "struct"),
|
||||||
x => panic!("Expected ident, found {:?}", x),
|
x => panic!("Expected ident, found {:?}", x),
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,8 @@
|
||||||
use super::{assume_group, assume_ident, read_tokens_until_punct, Attributes, Visibility};
|
use super::{
|
||||||
|
assume_group, assume_ident, assume_punct, read_tokens_until_punct, Attribute, Visibility,
|
||||||
|
};
|
||||||
use crate::parse::consume_punct_if;
|
use crate::parse::consume_punct_if;
|
||||||
use crate::prelude::{Delimiter, Ident, Span, TokenTree};
|
use crate::prelude::{Delimiter, Ident, Literal, Span, TokenTree};
|
||||||
use crate::{Error, Result};
|
use crate::{Error, Result};
|
||||||
use std::iter::Peekable;
|
use std::iter::Peekable;
|
||||||
|
|
||||||
|
|
@ -18,19 +20,19 @@ impl StructBody {
|
||||||
fields: Fields::Unit,
|
fields: Fields::Unit,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
Some(t) => {
|
token => return Error::wrong_token(token, "group or punct"),
|
||||||
return Err(Error::InvalidRustSyntax(t.span()));
|
|
||||||
}
|
|
||||||
_ => {
|
|
||||||
return Err(Error::InvalidRustSyntax(Span::call_site()));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
let group = assume_group(input.next());
|
let group = assume_group(input.next());
|
||||||
let mut stream = group.stream().into_iter().peekable();
|
let mut stream = group.stream().into_iter().peekable();
|
||||||
let fields = match group.delimiter() {
|
let fields = match group.delimiter() {
|
||||||
Delimiter::Brace => Fields::Struct(UnnamedField::parse_with_name(&mut stream)?),
|
Delimiter::Brace => Fields::Struct(UnnamedField::parse_with_name(&mut stream)?),
|
||||||
Delimiter::Parenthesis => Fields::Tuple(UnnamedField::parse(&mut stream)?),
|
Delimiter::Parenthesis => Fields::Tuple(UnnamedField::parse(&mut stream)?),
|
||||||
_ => return Err(Error::InvalidRustSyntax(group.span())),
|
found => {
|
||||||
|
return Err(Error::InvalidRustSyntax {
|
||||||
|
span: group.span(),
|
||||||
|
expected: format!("brace or parenthesis, found {:?}", found),
|
||||||
|
})
|
||||||
|
}
|
||||||
};
|
};
|
||||||
Ok(StructBody { fields })
|
Ok(StructBody { fields })
|
||||||
}
|
}
|
||||||
|
|
@ -124,37 +126,57 @@ impl EnumBody {
|
||||||
variants: Vec::new(),
|
variants: Vec::new(),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
Some(t) => {
|
token => return Error::wrong_token(token, "group or ;"),
|
||||||
return Err(Error::InvalidRustSyntax(t.span()));
|
|
||||||
}
|
|
||||||
_ => {
|
|
||||||
return Err(Error::InvalidRustSyntax(Span::call_site()));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
let group = assume_group(input.next());
|
let group = assume_group(input.next());
|
||||||
let mut variants = Vec::new();
|
let mut variants = Vec::new();
|
||||||
let stream = &mut group.stream().into_iter().peekable();
|
let stream = &mut group.stream().into_iter().peekable();
|
||||||
while stream.peek().is_some() {
|
while stream.peek().is_some() {
|
||||||
let attributes = Attributes::try_take(stream)?;
|
let attributes = Attribute::try_take(stream)?;
|
||||||
let ident = match stream.peek() {
|
let ident = match stream.peek() {
|
||||||
Some(TokenTree::Ident(_)) => assume_ident(stream.next()),
|
Some(TokenTree::Ident(_)) => assume_ident(stream.next()),
|
||||||
Some(x) => return Err(Error::InvalidRustSyntax(x.span())),
|
token => return Error::wrong_token(token, "ident"),
|
||||||
None => return Err(Error::InvalidRustSyntax(Span::call_site())),
|
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut fields = Fields::Unit;
|
let mut fields = Fields::Unit;
|
||||||
|
|
||||||
if let Some(TokenTree::Group(_)) = stream.peek() {
|
match stream.peek() {
|
||||||
|
Some(TokenTree::Group(_)) => {
|
||||||
let group = assume_group(stream.next());
|
let group = assume_group(stream.next());
|
||||||
let stream = &mut group.stream().into_iter().peekable();
|
let stream = &mut group.stream().into_iter().peekable();
|
||||||
match group.delimiter() {
|
match group.delimiter() {
|
||||||
Delimiter::Brace => {
|
Delimiter::Brace => {
|
||||||
fields = Fields::Struct(UnnamedField::parse_with_name(stream)?)
|
fields = Fields::Struct(UnnamedField::parse_with_name(stream)?)
|
||||||
}
|
}
|
||||||
Delimiter::Parenthesis => fields = Fields::Tuple(UnnamedField::parse(stream)?),
|
Delimiter::Parenthesis => {
|
||||||
_ => return Err(Error::InvalidRustSyntax(group.span())),
|
fields = Fields::Tuple(UnnamedField::parse(stream)?)
|
||||||
|
}
|
||||||
|
delim => {
|
||||||
|
return Err(Error::InvalidRustSyntax {
|
||||||
|
span: group.span(),
|
||||||
|
expected: format!("Brace or parenthesis, found {:?}", delim),
|
||||||
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
Some(TokenTree::Punct(p)) if p.as_char() == '=' => {
|
||||||
|
assume_punct(stream.next(), '=');
|
||||||
|
match stream.next() {
|
||||||
|
Some(TokenTree::Literal(lit)) => {
|
||||||
|
fields = Fields::Integer(lit);
|
||||||
|
}
|
||||||
|
token => return Error::wrong_token(token.as_ref(), "literal"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Some(TokenTree::Punct(p)) if p.as_char() == ',' => {
|
||||||
|
// next field
|
||||||
|
}
|
||||||
|
None => {
|
||||||
|
// group done
|
||||||
|
}
|
||||||
|
token => return Error::wrong_token(token, "group, comma or ="),
|
||||||
|
}
|
||||||
|
|
||||||
consume_punct_if(stream, ',');
|
consume_punct_if(stream, ',');
|
||||||
|
|
||||||
variants.push(EnumVariant {
|
variants.push(EnumVariant {
|
||||||
|
|
@ -209,7 +231,13 @@ fn test_enum_body_take() {
|
||||||
pub struct EnumVariant {
|
pub struct EnumVariant {
|
||||||
pub name: Ident,
|
pub name: Ident,
|
||||||
pub fields: Fields,
|
pub fields: Fields,
|
||||||
pub attributes: Option<Attributes>,
|
pub attributes: Vec<Attribute>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl EnumVariant {
|
||||||
|
pub fn has_fixed_value(&self) -> bool {
|
||||||
|
matches!(&self.fields, Fields::Integer(_))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
|
|
@ -223,6 +251,14 @@ pub enum Fields {
|
||||||
/// ```
|
/// ```
|
||||||
Unit,
|
Unit,
|
||||||
|
|
||||||
|
/// Variant with an integer value.
|
||||||
|
/// ```rs
|
||||||
|
/// enum Foo {
|
||||||
|
/// Baz = 5,
|
||||||
|
/// }
|
||||||
|
/// ```
|
||||||
|
Integer(Literal),
|
||||||
|
|
||||||
/// Tuple-like variant
|
/// Tuple-like variant
|
||||||
/// ```rs
|
/// ```rs
|
||||||
/// enum Foo {
|
/// enum Foo {
|
||||||
|
|
@ -258,7 +294,7 @@ impl Fields {
|
||||||
.iter()
|
.iter()
|
||||||
.map(|(ident, _)| IdentOrIndex::Ident(ident))
|
.map(|(ident, _)| IdentOrIndex::Ident(ident))
|
||||||
.collect(),
|
.collect(),
|
||||||
Self::Unit => Vec::new(),
|
Self::Unit | Self::Integer(_) => Vec::new(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -266,7 +302,7 @@ impl Fields {
|
||||||
match self {
|
match self {
|
||||||
Self::Tuple(_) => Some(Delimiter::Parenthesis),
|
Self::Tuple(_) => Some(Delimiter::Parenthesis),
|
||||||
Self::Struct(_) => Some(Delimiter::Brace),
|
Self::Struct(_) => Some(Delimiter::Brace),
|
||||||
Self::Unit => None,
|
Self::Unit | Self::Integer(_) => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -282,6 +318,7 @@ impl Fields {
|
||||||
Self::Tuple(fields) => fields.len(),
|
Self::Tuple(fields) => fields.len(),
|
||||||
Self::Struct(fields) => fields.len(),
|
Self::Struct(fields) => fields.len(),
|
||||||
Self::Unit => 0,
|
Self::Unit => 0,
|
||||||
|
Self::Integer(_) => 0,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -290,6 +327,7 @@ impl Fields {
|
||||||
Self::Tuple(fields) => fields.get(index).map(|f| (None, f)),
|
Self::Tuple(fields) => fields.get(index).map(|f| (None, f)),
|
||||||
Self::Struct(fields) => fields.get(index).map(|(ident, field)| (Some(ident), field)),
|
Self::Struct(fields) => fields.get(index).map(|(ident, field)| (Some(ident), field)),
|
||||||
Self::Unit => None,
|
Self::Unit => None,
|
||||||
|
Self::Integer(_) => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -298,7 +336,7 @@ impl Fields {
|
||||||
pub struct UnnamedField {
|
pub struct UnnamedField {
|
||||||
pub vis: Visibility,
|
pub vis: Visibility,
|
||||||
pub r#type: Vec<TokenTree>,
|
pub r#type: Vec<TokenTree>,
|
||||||
pub attributes: Option<Attributes>,
|
pub attributes: Vec<Attribute>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl UnnamedField {
|
impl UnnamedField {
|
||||||
|
|
@ -307,20 +345,24 @@ impl UnnamedField {
|
||||||
) -> Result<Vec<(Ident, Self)>> {
|
) -> Result<Vec<(Ident, Self)>> {
|
||||||
let mut result = Vec::new();
|
let mut result = Vec::new();
|
||||||
loop {
|
loop {
|
||||||
let attributes = Attributes::try_take(input)?;
|
let attributes = Attribute::try_take(input)?;
|
||||||
let vis = Visibility::try_take(input)?;
|
let vis = Visibility::try_take(input)?;
|
||||||
|
|
||||||
let ident = match input.peek() {
|
let ident = match input.peek() {
|
||||||
Some(TokenTree::Ident(_)) => assume_ident(input.next()),
|
Some(TokenTree::Ident(_)) => assume_ident(input.next()),
|
||||||
Some(x) => return Err(Error::InvalidRustSyntax(x.span())),
|
Some(x) => {
|
||||||
|
return Err(Error::InvalidRustSyntax {
|
||||||
|
span: x.span(),
|
||||||
|
expected: format!("ident or end of group, got {:?}", x),
|
||||||
|
})
|
||||||
|
}
|
||||||
None => break,
|
None => break,
|
||||||
};
|
};
|
||||||
match input.peek() {
|
match input.peek() {
|
||||||
Some(TokenTree::Punct(p)) if p.as_char() == ':' => {
|
Some(TokenTree::Punct(p)) if p.as_char() == ':' => {
|
||||||
input.next();
|
input.next();
|
||||||
}
|
}
|
||||||
Some(x) => return Err(Error::InvalidRustSyntax(x.span())),
|
token => return Error::wrong_token(token, ":"),
|
||||||
None => return Err(Error::InvalidRustSyntax(Span::call_site())),
|
|
||||||
}
|
}
|
||||||
let r#type = read_tokens_until_punct(input, &[','])?;
|
let r#type = read_tokens_until_punct(input, &[','])?;
|
||||||
consume_punct_if(input, ',');
|
consume_punct_if(input, ',');
|
||||||
|
|
@ -339,7 +381,7 @@ impl UnnamedField {
|
||||||
pub fn parse(input: &mut Peekable<impl Iterator<Item = TokenTree>>) -> Result<Vec<Self>> {
|
pub fn parse(input: &mut Peekable<impl Iterator<Item = TokenTree>>) -> Result<Vec<Self>> {
|
||||||
let mut result = Vec::new();
|
let mut result = Vec::new();
|
||||||
while input.peek().is_some() {
|
while input.peek().is_some() {
|
||||||
let attributes = Attributes::try_take(input)?;
|
let attributes = Attribute::try_take(input)?;
|
||||||
let vis = Visibility::try_take(input)?;
|
let vis = Visibility::try_take(input)?;
|
||||||
|
|
||||||
let r#type = read_tokens_until_punct(input, &[','])?;
|
let r#type = read_tokens_until_punct(input, &[','])?;
|
||||||
|
|
|
||||||
|
|
@ -1,4 +1,4 @@
|
||||||
use crate::prelude::{Ident, Span, TokenTree};
|
use crate::prelude::{Ident, TokenTree};
|
||||||
use crate::{Error, Result};
|
use crate::{Error, Result};
|
||||||
use std::iter::Peekable;
|
use std::iter::Peekable;
|
||||||
|
|
||||||
|
|
@ -10,24 +10,19 @@ pub enum DataType {
|
||||||
|
|
||||||
impl DataType {
|
impl DataType {
|
||||||
pub fn take(input: &mut Peekable<impl Iterator<Item = TokenTree>>) -> Result<(Self, Ident)> {
|
pub fn take(input: &mut Peekable<impl Iterator<Item = TokenTree>>) -> Result<(Self, Ident)> {
|
||||||
if let Some(TokenTree::Ident(ident)) = input.peek() {
|
if let Some(TokenTree::Ident(_)) = input.peek() {
|
||||||
|
let ident = super::assume_ident(input.next());
|
||||||
let result = match ident.to_string().as_str() {
|
let result = match ident.to_string().as_str() {
|
||||||
"struct" => DataType::Struct,
|
"struct" => DataType::Struct,
|
||||||
"enum" => DataType::Enum,
|
"enum" => DataType::Enum,
|
||||||
_ => return Err(Error::UnknownDataType(ident.span())),
|
_ => return Err(Error::UnknownDataType(ident.span())),
|
||||||
};
|
};
|
||||||
let ident = super::assume_ident(input.next());
|
|
||||||
return match input.next() {
|
return match input.next() {
|
||||||
Some(TokenTree::Ident(ident)) => Ok((result, ident)),
|
Some(TokenTree::Ident(ident)) => Ok((result, ident)),
|
||||||
Some(t) => Err(Error::InvalidRustSyntax(t.span())),
|
token => Error::wrong_token(token.as_ref(), "ident"),
|
||||||
None => Err(Error::InvalidRustSyntax(ident.span())),
|
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
let span = input
|
Error::wrong_token(input.peek(), "ident")
|
||||||
.peek()
|
|
||||||
.map(|t| t.span())
|
|
||||||
.unwrap_or_else(Span::call_site);
|
|
||||||
Err(Error::InvalidRustSyntax(span))
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -7,7 +7,7 @@ use std::iter::Peekable;
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct Generics {
|
pub struct Generics {
|
||||||
lifetimes_and_generics: Vec<LifetimeOrGeneric>,
|
generics: Vec<Generic>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Generics {
|
impl Generics {
|
||||||
|
|
@ -17,30 +17,31 @@ impl Generics {
|
||||||
if punct.as_char() == '<' {
|
if punct.as_char() == '<' {
|
||||||
let punct = super::assume_punct(input.next(), '<');
|
let punct = super::assume_punct(input.next(), '<');
|
||||||
let mut result = Generics {
|
let mut result = Generics {
|
||||||
lifetimes_and_generics: Vec::new(),
|
generics: Vec::new(),
|
||||||
};
|
};
|
||||||
loop {
|
loop {
|
||||||
match input.peek() {
|
match input.peek() {
|
||||||
Some(TokenTree::Punct(punct)) if punct.as_char() == '\'' => {
|
Some(TokenTree::Punct(punct)) if punct.as_char() == '\'' => {
|
||||||
result
|
result.generics.push(Lifetime::take(input)?.into());
|
||||||
.lifetimes_and_generics
|
|
||||||
.push(Lifetime::take(input)?.into());
|
|
||||||
super::consume_punct_if(input, ',');
|
super::consume_punct_if(input, ',');
|
||||||
}
|
}
|
||||||
Some(TokenTree::Punct(punct)) if punct.as_char() == '>' => {
|
Some(TokenTree::Punct(punct)) if punct.as_char() == '>' => {
|
||||||
assume_punct(input.next(), '>');
|
assume_punct(input.next(), '>');
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
Some(TokenTree::Ident(ident)) if ident_eq(ident, "const") => {
|
||||||
|
result.generics.push(ConstGeneric::take(input)?.into());
|
||||||
|
super::consume_punct_if(input, ',');
|
||||||
|
}
|
||||||
Some(TokenTree::Ident(_)) => {
|
Some(TokenTree::Ident(_)) => {
|
||||||
result
|
result.generics.push(SimpleGeneric::take(input)?.into());
|
||||||
.lifetimes_and_generics
|
|
||||||
.push(Generic::take(input)?.into());
|
|
||||||
super::consume_punct_if(input, ',');
|
super::consume_punct_if(input, ',');
|
||||||
}
|
}
|
||||||
x => {
|
x => {
|
||||||
return Err(Error::InvalidRustSyntax(
|
return Err(Error::InvalidRustSyntax {
|
||||||
x.map(|x| x.span()).unwrap_or_else(|| punct.span()),
|
span: x.map(|x| x.span()).unwrap_or_else(|| punct.span()),
|
||||||
));
|
expected: format!("', > or an ident, got {:?}", x),
|
||||||
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -51,30 +52,19 @@ impl Generics {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn has_lifetime(&self) -> bool {
|
pub fn has_lifetime(&self) -> bool {
|
||||||
self.lifetimes_and_generics
|
self.generics.iter().any(|lt| lt.is_lifetime())
|
||||||
.iter()
|
|
||||||
.any(|lt| lt.is_lifetime())
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn impl_generics(&self) -> StreamBuilder {
|
pub fn impl_generics(&self) -> StreamBuilder {
|
||||||
let mut result = StreamBuilder::new();
|
let mut result = StreamBuilder::new();
|
||||||
result.punct('<');
|
result.punct('<');
|
||||||
|
|
||||||
for (idx, generic) in self.lifetimes_and_generics.iter().enumerate() {
|
for (idx, generic) in self.generics.iter().enumerate() {
|
||||||
if idx > 0 {
|
if idx > 0 {
|
||||||
result.punct(',');
|
result.punct(',');
|
||||||
}
|
}
|
||||||
|
|
||||||
if generic.is_lifetime() {
|
generic.append_to_result_with_constraints(&mut result);
|
||||||
result.lifetime(generic.ident());
|
|
||||||
} else {
|
|
||||||
result.ident(generic.ident());
|
|
||||||
}
|
|
||||||
|
|
||||||
if generic.has_constraints() {
|
|
||||||
result.punct(':');
|
|
||||||
result.extend(generic.constraints());
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
result.punct('>');
|
result.punct('>');
|
||||||
|
|
@ -91,7 +81,7 @@ impl Generics {
|
||||||
|
|
||||||
if self.has_lifetime() {
|
if self.has_lifetime() {
|
||||||
for (idx, lt) in self
|
for (idx, lt) in self
|
||||||
.lifetimes_and_generics
|
.generics
|
||||||
.iter()
|
.iter()
|
||||||
.filter_map(|lt| lt.as_lifetime())
|
.filter_map(|lt| lt.as_lifetime())
|
||||||
.enumerate()
|
.enumerate()
|
||||||
|
|
@ -101,19 +91,9 @@ impl Generics {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for generic in &self.lifetimes_and_generics {
|
for generic in &self.generics {
|
||||||
result.punct(',');
|
result.punct(',');
|
||||||
|
generic.append_to_result_with_constraints(&mut result);
|
||||||
if generic.is_lifetime() {
|
|
||||||
result.lifetime(generic.ident());
|
|
||||||
} else {
|
|
||||||
result.ident(generic.ident());
|
|
||||||
}
|
|
||||||
|
|
||||||
if generic.has_constraints() {
|
|
||||||
result.punct(':');
|
|
||||||
result.extend(generic.constraints());
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
result.punct('>');
|
result.punct('>');
|
||||||
|
|
@ -125,7 +105,7 @@ impl Generics {
|
||||||
let mut result = StreamBuilder::new();
|
let mut result = StreamBuilder::new();
|
||||||
result.punct('<');
|
result.punct('<');
|
||||||
|
|
||||||
for (idx, generic) in self.lifetimes_and_generics.iter().enumerate() {
|
for (idx, generic) in self.generics.iter().enumerate() {
|
||||||
if idx > 0 {
|
if idx > 0 {
|
||||||
result.punct(',');
|
result.punct(',');
|
||||||
}
|
}
|
||||||
|
|
@ -142,27 +122,29 @@ impl Generics {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
enum LifetimeOrGeneric {
|
enum Generic {
|
||||||
Lifetime(Lifetime),
|
Lifetime(Lifetime),
|
||||||
Generic(Generic),
|
Generic(SimpleGeneric),
|
||||||
|
Const(ConstGeneric),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl LifetimeOrGeneric {
|
impl Generic {
|
||||||
fn is_lifetime(&self) -> bool {
|
fn is_lifetime(&self) -> bool {
|
||||||
matches!(self, LifetimeOrGeneric::Lifetime(_))
|
matches!(self, Generic::Lifetime(_))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn ident(&self) -> Ident {
|
fn ident(&self) -> Ident {
|
||||||
match self {
|
match self {
|
||||||
Self::Lifetime(lt) => lt.ident.clone(),
|
Self::Lifetime(lt) => lt.ident.clone(),
|
||||||
Self::Generic(gen) => gen.ident.clone(),
|
Self::Generic(gen) => gen.ident.clone(),
|
||||||
|
Self::Const(gen) => gen.ident.clone(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn as_lifetime(&self) -> Option<&Lifetime> {
|
fn as_lifetime(&self) -> Option<&Lifetime> {
|
||||||
match self {
|
match self {
|
||||||
Self::Lifetime(lt) => Some(lt),
|
Self::Lifetime(lt) => Some(lt),
|
||||||
Self::Generic(_) => None,
|
_ => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -170,6 +152,7 @@ impl LifetimeOrGeneric {
|
||||||
match self {
|
match self {
|
||||||
Self::Lifetime(lt) => !lt.constraint.is_empty(),
|
Self::Lifetime(lt) => !lt.constraint.is_empty(),
|
||||||
Self::Generic(gen) => !gen.constraints.is_empty(),
|
Self::Generic(gen) => !gen.constraints.is_empty(),
|
||||||
|
Self::Const(_) => true, // const generics always have a constraint
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -177,22 +160,46 @@ impl LifetimeOrGeneric {
|
||||||
match self {
|
match self {
|
||||||
Self::Lifetime(lt) => lt.constraint.clone(),
|
Self::Lifetime(lt) => lt.constraint.clone(),
|
||||||
Self::Generic(gen) => gen.constraints.clone(),
|
Self::Generic(gen) => gen.constraints.clone(),
|
||||||
|
Self::Const(gen) => gen.constraints.clone(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn append_to_result_with_constraints(&self, builder: &mut StreamBuilder) {
|
||||||
|
match self {
|
||||||
|
Self::Lifetime(lt) => builder.lifetime(lt.ident.clone()),
|
||||||
|
Self::Generic(gen) => {
|
||||||
|
builder.ident(gen.ident.clone());
|
||||||
|
}
|
||||||
|
Self::Const(gen) => {
|
||||||
|
builder.ident(gen.const_token.clone());
|
||||||
|
builder.ident(gen.ident.clone());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if self.has_constraints() {
|
||||||
|
builder.punct(':');
|
||||||
|
builder.extend(self.constraints());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<Lifetime> for LifetimeOrGeneric {
|
impl From<Lifetime> for Generic {
|
||||||
fn from(lt: Lifetime) -> Self {
|
fn from(lt: Lifetime) -> Self {
|
||||||
Self::Lifetime(lt)
|
Self::Lifetime(lt)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<Generic> for LifetimeOrGeneric {
|
impl From<SimpleGeneric> for Generic {
|
||||||
fn from(gen: Generic) -> Self {
|
fn from(gen: SimpleGeneric) -> Self {
|
||||||
Self::Generic(gen)
|
Self::Generic(gen)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl From<ConstGeneric> for Generic {
|
||||||
|
fn from(gen: ConstGeneric) -> Self {
|
||||||
|
Self::Const(gen)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_generics_try_take() {
|
fn test_generics_try_take() {
|
||||||
use crate::token_stream;
|
use crate::token_stream;
|
||||||
|
|
@ -210,18 +217,18 @@ fn test_generics_try_take() {
|
||||||
assert_eq!(data_type, super::DataType::Struct);
|
assert_eq!(data_type, super::DataType::Struct);
|
||||||
assert_eq!(ident, "Foo");
|
assert_eq!(ident, "Foo");
|
||||||
let generics = Generics::try_take(stream).unwrap().unwrap();
|
let generics = Generics::try_take(stream).unwrap().unwrap();
|
||||||
assert_eq!(generics.lifetimes_and_generics.len(), 2);
|
assert_eq!(generics.generics.len(), 2);
|
||||||
assert_eq!(generics.lifetimes_and_generics[0].ident(), "a");
|
assert_eq!(generics.generics[0].ident(), "a");
|
||||||
assert_eq!(generics.lifetimes_and_generics[1].ident(), "T");
|
assert_eq!(generics.generics[1].ident(), "T");
|
||||||
|
|
||||||
let stream = &mut token_stream("struct Foo<A, B>()");
|
let stream = &mut token_stream("struct Foo<A, B>()");
|
||||||
let (data_type, ident) = super::DataType::take(stream).unwrap();
|
let (data_type, ident) = super::DataType::take(stream).unwrap();
|
||||||
assert_eq!(data_type, super::DataType::Struct);
|
assert_eq!(data_type, super::DataType::Struct);
|
||||||
assert_eq!(ident, "Foo");
|
assert_eq!(ident, "Foo");
|
||||||
let generics = Generics::try_take(stream).unwrap().unwrap();
|
let generics = Generics::try_take(stream).unwrap().unwrap();
|
||||||
assert_eq!(generics.lifetimes_and_generics.len(), 2);
|
assert_eq!(generics.generics.len(), 2);
|
||||||
assert_eq!(generics.lifetimes_and_generics[0].ident(), "A");
|
assert_eq!(generics.generics[0].ident(), "A");
|
||||||
assert_eq!(generics.lifetimes_and_generics[1].ident(), "B");
|
assert_eq!(generics.generics[1].ident(), "B");
|
||||||
|
|
||||||
let stream = &mut token_stream("struct Foo<'a, T: Display>()");
|
let stream = &mut token_stream("struct Foo<'a, T: Display>()");
|
||||||
let (data_type, ident) = super::DataType::take(stream).unwrap();
|
let (data_type, ident) = super::DataType::take(stream).unwrap();
|
||||||
|
|
@ -229,18 +236,18 @@ fn test_generics_try_take() {
|
||||||
assert_eq!(ident, "Foo");
|
assert_eq!(ident, "Foo");
|
||||||
let generics = Generics::try_take(stream).unwrap().unwrap();
|
let generics = Generics::try_take(stream).unwrap().unwrap();
|
||||||
dbg!(&generics);
|
dbg!(&generics);
|
||||||
assert_eq!(generics.lifetimes_and_generics.len(), 2);
|
assert_eq!(generics.generics.len(), 2);
|
||||||
assert_eq!(generics.lifetimes_and_generics[0].ident(), "a");
|
assert_eq!(generics.generics[0].ident(), "a");
|
||||||
assert_eq!(generics.lifetimes_and_generics[1].ident(), "T");
|
assert_eq!(generics.generics[1].ident(), "T");
|
||||||
|
|
||||||
let stream = &mut token_stream("struct Foo<'a, T: for<'a> Bar<'a> + 'static>()");
|
let stream = &mut token_stream("struct Foo<'a, T: for<'a> Bar<'a> + 'static>()");
|
||||||
let (data_type, ident) = super::DataType::take(stream).unwrap();
|
let (data_type, ident) = super::DataType::take(stream).unwrap();
|
||||||
assert_eq!(data_type, super::DataType::Struct);
|
assert_eq!(data_type, super::DataType::Struct);
|
||||||
assert_eq!(ident, "Foo");
|
assert_eq!(ident, "Foo");
|
||||||
dbg!(&generics);
|
dbg!(&generics);
|
||||||
assert_eq!(generics.lifetimes_and_generics.len(), 2);
|
assert_eq!(generics.generics.len(), 2);
|
||||||
assert_eq!(generics.lifetimes_and_generics[0].ident(), "a");
|
assert_eq!(generics.generics[0].ident(), "a");
|
||||||
assert_eq!(generics.lifetimes_and_generics[1].ident(), "T");
|
assert_eq!(generics.generics[1].ident(), "T");
|
||||||
|
|
||||||
let stream = &mut token_stream(
|
let stream = &mut token_stream(
|
||||||
"struct Baz<T: for<'a> Bar<'a, for<'b> Bar<'b, for<'c> Bar<'c, u32>>>> {}",
|
"struct Baz<T: for<'a> Bar<'a, for<'b> Bar<'b, for<'c> Bar<'c, u32>>>> {}",
|
||||||
|
|
@ -250,8 +257,8 @@ fn test_generics_try_take() {
|
||||||
assert_eq!(ident, "Baz");
|
assert_eq!(ident, "Baz");
|
||||||
let generics = Generics::try_take(stream).unwrap().unwrap();
|
let generics = Generics::try_take(stream).unwrap().unwrap();
|
||||||
dbg!(&generics);
|
dbg!(&generics);
|
||||||
assert_eq!(generics.lifetimes_and_generics.len(), 1);
|
assert_eq!(generics.generics.len(), 1);
|
||||||
assert_eq!(generics.lifetimes_and_generics[0].ident(), "T");
|
assert_eq!(generics.generics[0].ident(), "T");
|
||||||
|
|
||||||
let stream = &mut token_stream("struct Baz<()> {}");
|
let stream = &mut token_stream("struct Baz<()> {}");
|
||||||
let (data_type, ident) = super::DataType::take(stream).unwrap();
|
let (data_type, ident) = super::DataType::take(stream).unwrap();
|
||||||
|
|
@ -267,9 +274,9 @@ fn test_generics_try_take() {
|
||||||
assert_eq!(ident, "Bar");
|
assert_eq!(ident, "Bar");
|
||||||
let generics = Generics::try_take(stream).unwrap().unwrap();
|
let generics = Generics::try_take(stream).unwrap().unwrap();
|
||||||
dbg!(&generics);
|
dbg!(&generics);
|
||||||
assert_eq!(generics.lifetimes_and_generics.len(), 2);
|
assert_eq!(generics.generics.len(), 2);
|
||||||
assert_eq!(generics.lifetimes_and_generics[0].ident(), "A");
|
assert_eq!(generics.generics[0].ident(), "A");
|
||||||
assert_eq!(generics.lifetimes_and_generics[1].ident(), "B");
|
assert_eq!(generics.generics[1].ident(), "B");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
|
|
@ -325,12 +332,12 @@ fn test_lifetime_take() {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct Generic {
|
pub struct SimpleGeneric {
|
||||||
ident: Ident,
|
ident: Ident,
|
||||||
constraints: Vec<TokenTree>,
|
constraints: Vec<TokenTree>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Generic {
|
impl SimpleGeneric {
|
||||||
pub fn take(input: &mut Peekable<impl Iterator<Item = TokenTree>>) -> Result<Self> {
|
pub fn take(input: &mut Peekable<impl Iterator<Item = TokenTree>>) -> Result<Self> {
|
||||||
let ident = super::assume_ident(input.next());
|
let ident = super::assume_ident(input.next());
|
||||||
let mut constraints = Vec::new();
|
let mut constraints = Vec::new();
|
||||||
|
|
@ -340,7 +347,33 @@ impl Generic {
|
||||||
constraints = super::read_tokens_until_punct(input, &['>', ','])?;
|
constraints = super::read_tokens_until_punct(input, &['>', ','])?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Ok(Generic { ident, constraints })
|
Ok(Self { ident, constraints })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct ConstGeneric {
|
||||||
|
const_token: Ident,
|
||||||
|
ident: Ident,
|
||||||
|
constraints: Vec<TokenTree>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ConstGeneric {
|
||||||
|
pub fn take(input: &mut Peekable<impl Iterator<Item = TokenTree>>) -> Result<Self> {
|
||||||
|
let const_token = super::assume_ident(input.next());
|
||||||
|
let ident = super::assume_ident(input.next());
|
||||||
|
let mut constraints = Vec::new();
|
||||||
|
if let Some(TokenTree::Punct(punct)) = input.peek() {
|
||||||
|
if punct.as_char() == ':' {
|
||||||
|
super::assume_punct(input.next(), ':');
|
||||||
|
constraints = super::read_tokens_until_punct(input, &['>', ','])?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(Self {
|
||||||
|
const_token,
|
||||||
|
ident,
|
||||||
|
constraints,
|
||||||
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -410,8 +443,8 @@ fn test_generic_constraints_try_take() {
|
||||||
assert_eq!(data_type, DataType::Struct);
|
assert_eq!(data_type, DataType::Struct);
|
||||||
assert_eq!(ident, "Test");
|
assert_eq!(ident, "Test");
|
||||||
let constraints = Generics::try_take(stream).unwrap().unwrap();
|
let constraints = Generics::try_take(stream).unwrap().unwrap();
|
||||||
assert_eq!(constraints.lifetimes_and_generics.len(), 1);
|
assert_eq!(constraints.generics.len(), 1);
|
||||||
assert_eq!(constraints.lifetimes_and_generics[0].ident(), "T");
|
assert_eq!(constraints.generics[0].ident(), "T");
|
||||||
let body = StructBody::take(stream).unwrap();
|
let body = StructBody::take(stream).unwrap();
|
||||||
assert_eq!(body.fields.len(), 0);
|
assert_eq!(body.fields.len(), 0);
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -8,10 +8,10 @@ mod data_type;
|
||||||
mod generics;
|
mod generics;
|
||||||
mod visibility;
|
mod visibility;
|
||||||
|
|
||||||
pub use self::attributes::Attributes;
|
pub use self::attributes::Attribute;
|
||||||
pub use self::body::{EnumBody, EnumVariant, Fields, StructBody, UnnamedField};
|
pub use self::body::{EnumBody, EnumVariant, Fields, StructBody, UnnamedField};
|
||||||
pub use self::data_type::DataType;
|
pub use self::data_type::DataType;
|
||||||
pub use self::generics::{Generic, GenericConstraints, Generics, Lifetime};
|
pub use self::generics::{GenericConstraints, Generics, Lifetime, SimpleGeneric};
|
||||||
pub use self::visibility::Visibility;
|
pub use self::visibility::Visibility;
|
||||||
|
|
||||||
pub(self) fn assume_group(t: Option<TokenTree>) -> Group {
|
pub(self) fn assume_group(t: Option<TokenTree>) -> Group {
|
||||||
|
|
@ -103,7 +103,14 @@ pub(self) fn read_tokens_until_punct(
|
||||||
if expected_puncts.contains(&punct.as_char()) {
|
if expected_puncts.contains(&punct.as_char()) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
return Err(Error::InvalidRustSyntax(punct.span()));
|
return Err(Error::InvalidRustSyntax {
|
||||||
|
span: punct.span(),
|
||||||
|
expected: format!(
|
||||||
|
"one of {:?}, got '{}'",
|
||||||
|
expected_puncts,
|
||||||
|
punct.as_char()
|
||||||
|
),
|
||||||
|
});
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
let expected = OPEN_BRACKETS[index];
|
let expected = OPEN_BRACKETS[index];
|
||||||
|
|
|
||||||
|
|
@ -445,8 +445,7 @@ where
|
||||||
}
|
}
|
||||||
x => Err(DecodeError::UnexpectedVariant {
|
x => Err(DecodeError::UnexpectedVariant {
|
||||||
found: x as u32,
|
found: x as u32,
|
||||||
max: 1,
|
allowed: crate::error::AllowedEnumVariants::Range { max: 1, min: 0 },
|
||||||
min: 0,
|
|
||||||
type_name: core::any::type_name::<Option<T>>(),
|
type_name: core::any::type_name::<Option<T>>(),
|
||||||
}),
|
}),
|
||||||
}
|
}
|
||||||
|
|
@ -471,8 +470,7 @@ where
|
||||||
}
|
}
|
||||||
x => Err(DecodeError::UnexpectedVariant {
|
x => Err(DecodeError::UnexpectedVariant {
|
||||||
found: x as u32,
|
found: x as u32,
|
||||||
max: 1,
|
allowed: crate::error::AllowedEnumVariants::Range { max: 1, min: 0 },
|
||||||
min: 0,
|
|
||||||
type_name: core::any::type_name::<Result<T, U>>(),
|
type_name: core::any::type_name::<Result<T, U>>(),
|
||||||
}),
|
}),
|
||||||
}
|
}
|
||||||
|
|
@ -539,8 +537,7 @@ where
|
||||||
1 => Ok(Bound::Included(T::decode(decoder)?)),
|
1 => Ok(Bound::Included(T::decode(decoder)?)),
|
||||||
2 => Ok(Bound::Excluded(T::decode(decoder)?)),
|
2 => Ok(Bound::Excluded(T::decode(decoder)?)),
|
||||||
x => Err(DecodeError::UnexpectedVariant {
|
x => Err(DecodeError::UnexpectedVariant {
|
||||||
min: 0,
|
allowed: crate::error::AllowedEnumVariants::Range { max: 2, min: 0 },
|
||||||
max: 2,
|
|
||||||
found: x,
|
found: x,
|
||||||
type_name: core::any::type_name::<Bound<T>>(),
|
type_name: core::any::type_name::<Bound<T>>(),
|
||||||
}),
|
}),
|
||||||
|
|
|
||||||
22
src/error.rs
22
src/error.rs
|
|
@ -50,7 +50,7 @@ pub enum EncodeError {
|
||||||
|
|
||||||
/// Errors that can be encounted by decoding a type
|
/// Errors that can be encounted by decoding a type
|
||||||
#[non_exhaustive]
|
#[non_exhaustive]
|
||||||
#[derive(Debug)]
|
#[derive(Debug, PartialEq)]
|
||||||
pub enum DecodeError {
|
pub enum DecodeError {
|
||||||
/// The reader reached its end but more bytes were expected.
|
/// The reader reached its end but more bytes were expected.
|
||||||
UnexpectedEnd,
|
UnexpectedEnd,
|
||||||
|
|
@ -74,11 +74,8 @@ pub enum DecodeError {
|
||||||
/// The type name that was being decoded.
|
/// The type name that was being decoded.
|
||||||
type_name: &'static str,
|
type_name: &'static str,
|
||||||
|
|
||||||
/// The min index of the enum. Usually this is `0`.
|
/// The variants that are allowed
|
||||||
min: u32,
|
allowed: AllowedEnumVariants,
|
||||||
|
|
||||||
/// the max index of the enum.
|
|
||||||
max: u32,
|
|
||||||
|
|
||||||
/// The index of the enum that the decoder encountered
|
/// The index of the enum that the decoder encountered
|
||||||
found: u32,
|
found: u32,
|
||||||
|
|
@ -126,9 +123,20 @@ impl DecodeError {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Indicates which enum variants are allowed
|
||||||
|
#[non_exhaustive]
|
||||||
|
#[derive(Debug, PartialEq)]
|
||||||
|
pub enum AllowedEnumVariants {
|
||||||
|
/// All values between `min` and `max` (inclusive) are allowed
|
||||||
|
#[allow(missing_docs)]
|
||||||
|
Range { min: u32, max: u32 },
|
||||||
|
/// Each one of these values is allowed
|
||||||
|
Allowed(&'static [u32]),
|
||||||
|
}
|
||||||
|
|
||||||
/// Integer types. Used by [DecodeError]. These types have no purpose other than being shown in errors.
|
/// Integer types. Used by [DecodeError]. These types have no purpose other than being shown in errors.
|
||||||
#[non_exhaustive]
|
#[non_exhaustive]
|
||||||
#[derive(Debug)]
|
#[derive(Debug, PartialEq, Eq)]
|
||||||
#[allow(missing_docs)]
|
#[allow(missing_docs)]
|
||||||
pub enum IntegerType {
|
pub enum IntegerType {
|
||||||
U8,
|
U8,
|
||||||
|
|
|
||||||
|
|
@ -252,8 +252,7 @@ impl Decode for IpAddr {
|
||||||
0 => Ok(IpAddr::V4(Ipv4Addr::decode(decoder)?)),
|
0 => Ok(IpAddr::V4(Ipv4Addr::decode(decoder)?)),
|
||||||
1 => Ok(IpAddr::V6(Ipv6Addr::decode(decoder)?)),
|
1 => Ok(IpAddr::V6(Ipv6Addr::decode(decoder)?)),
|
||||||
found => Err(DecodeError::UnexpectedVariant {
|
found => Err(DecodeError::UnexpectedVariant {
|
||||||
min: 0,
|
allowed: crate::error::AllowedEnumVariants::Range { min: 0, max: 1 },
|
||||||
max: 1,
|
|
||||||
found,
|
found,
|
||||||
type_name: core::any::type_name::<IpAddr>(),
|
type_name: core::any::type_name::<IpAddr>(),
|
||||||
}),
|
}),
|
||||||
|
|
@ -306,8 +305,7 @@ impl Decode for SocketAddr {
|
||||||
0 => Ok(SocketAddr::V4(SocketAddrV4::decode(decoder)?)),
|
0 => Ok(SocketAddr::V4(SocketAddrV4::decode(decoder)?)),
|
||||||
1 => Ok(SocketAddr::V6(SocketAddrV6::decode(decoder)?)),
|
1 => Ok(SocketAddr::V6(SocketAddrV6::decode(decoder)?)),
|
||||||
found => Err(DecodeError::UnexpectedVariant {
|
found => Err(DecodeError::UnexpectedVariant {
|
||||||
min: 0,
|
allowed: crate::error::AllowedEnumVariants::Range { min: 0, max: 1 },
|
||||||
max: 1,
|
|
||||||
found,
|
found,
|
||||||
type_name: core::any::type_name::<SocketAddr>(),
|
type_name: core::any::type_name::<SocketAddr>(),
|
||||||
}),
|
}),
|
||||||
|
|
|
||||||
|
|
@ -144,3 +144,50 @@ fn test_decode_enum_tuple_variant() {
|
||||||
bincode::decode_from_slice(&mut slice, Configuration::standard()).unwrap();
|
bincode::decode_from_slice(&mut slice, Configuration::standard()).unwrap();
|
||||||
assert_eq!(result, start);
|
assert_eq!(result, start);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(bincode::Decode, bincode::Encode, PartialEq, Eq, Debug)]
|
||||||
|
enum CStyleEnum {
|
||||||
|
A = 1,
|
||||||
|
B = 2,
|
||||||
|
C,
|
||||||
|
D = 5,
|
||||||
|
E,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_c_style_enum() {
|
||||||
|
fn ser(e: CStyleEnum) -> u8 {
|
||||||
|
let mut slice = [0u8; 10];
|
||||||
|
let bytes_written =
|
||||||
|
bincode::encode_into_slice(e, &mut slice, Configuration::standard()).unwrap();
|
||||||
|
assert_eq!(bytes_written, 1);
|
||||||
|
slice[0]
|
||||||
|
}
|
||||||
|
|
||||||
|
assert_eq!(ser(CStyleEnum::A), 1);
|
||||||
|
assert_eq!(ser(CStyleEnum::B), 2);
|
||||||
|
assert_eq!(ser(CStyleEnum::C), 3);
|
||||||
|
assert_eq!(ser(CStyleEnum::D), 5);
|
||||||
|
assert_eq!(ser(CStyleEnum::E), 6);
|
||||||
|
|
||||||
|
fn de(num: u8) -> Result<CStyleEnum, bincode::error::DecodeError> {
|
||||||
|
bincode::decode_from_slice(&[num], Configuration::standard())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn expected_err(idx: u32) -> Result<CStyleEnum, bincode::error::DecodeError> {
|
||||||
|
Err(bincode::error::DecodeError::UnexpectedVariant {
|
||||||
|
type_name: "CStyleEnum",
|
||||||
|
allowed: bincode::error::AllowedEnumVariants::Allowed(&[1, 2, 3, 5, 6]),
|
||||||
|
found: idx,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
assert_eq!(de(0), expected_err(0));
|
||||||
|
assert_eq!(de(1).unwrap(), CStyleEnum::A);
|
||||||
|
assert_eq!(de(2).unwrap(), CStyleEnum::B);
|
||||||
|
assert_eq!(de(3).unwrap(), CStyleEnum::C);
|
||||||
|
assert_eq!(de(4), expected_err(4));
|
||||||
|
assert_eq!(de(5).unwrap(), CStyleEnum::D);
|
||||||
|
assert_eq!(de(6).unwrap(), CStyleEnum::E);
|
||||||
|
assert_eq!(de(7), expected_err(7));
|
||||||
|
}
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,4 @@
|
||||||
|
#![no_std]
|
||||||
|
|
||||||
|
#[path = "issues/issue_427.rs"]
|
||||||
|
mod issue_427;
|
||||||
|
|
@ -0,0 +1,69 @@
|
||||||
|
#![cfg(feature = "derive")]
|
||||||
|
|
||||||
|
/// HID-IO Packet Buffer Struct
|
||||||
|
///
|
||||||
|
/// # Remarks
|
||||||
|
/// Used to store HID-IO data chunks. Will be chunked into individual packets on transmission.
|
||||||
|
#[repr(C)]
|
||||||
|
#[derive(PartialEq, Clone, Debug, bincode::Encode)]
|
||||||
|
pub struct HidIoPacketBuffer<const H: usize> {
|
||||||
|
/// Type of packet (Continued is automatically set if needed)
|
||||||
|
pub ptype: u32,
|
||||||
|
/// Packet Id
|
||||||
|
pub id: u32,
|
||||||
|
/// Packet length for serialization (in bytes)
|
||||||
|
pub max_len: u32,
|
||||||
|
/// Payload data, chunking is done automatically by serializer
|
||||||
|
pub data: [u8; H],
|
||||||
|
/// Set False if buffer is not complete, True if it is
|
||||||
|
pub done: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[repr(u32)]
|
||||||
|
#[derive(PartialEq, Clone, Copy, Debug, bincode::Encode)]
|
||||||
|
#[allow(dead_code)]
|
||||||
|
/// Requests for to perform a specific action
|
||||||
|
pub enum HidIoCommandId {
|
||||||
|
SupportedIds = 0x00,
|
||||||
|
GetInfo = 0x01,
|
||||||
|
TestPacket = 0x02,
|
||||||
|
ResetHidIo = 0x03,
|
||||||
|
Reserved = 0x04, // ... 0x0F
|
||||||
|
|
||||||
|
GetProperties = 0x10,
|
||||||
|
KeyState = 0x11,
|
||||||
|
KeyboardLayout = 0x12,
|
||||||
|
KeyLayout = 0x13,
|
||||||
|
KeyShapes = 0x14,
|
||||||
|
LedLayout = 0x15,
|
||||||
|
FlashMode = 0x16,
|
||||||
|
UnicodeText = 0x17,
|
||||||
|
UnicodeState = 0x18,
|
||||||
|
HostMacro = 0x19,
|
||||||
|
SleepMode = 0x1A,
|
||||||
|
|
||||||
|
KllState = 0x20,
|
||||||
|
PixelSetting = 0x21,
|
||||||
|
PixelSet1c8b = 0x22,
|
||||||
|
PixelSet3c8b = 0x23,
|
||||||
|
PixelSet1c16b = 0x24,
|
||||||
|
PixelSet3c16b = 0x25,
|
||||||
|
|
||||||
|
OpenUrl = 0x30,
|
||||||
|
TerminalCmd = 0x31,
|
||||||
|
GetInputLayout = 0x32,
|
||||||
|
SetInputLayout = 0x33,
|
||||||
|
TerminalOut = 0x34,
|
||||||
|
|
||||||
|
HidKeyboard = 0x40,
|
||||||
|
HidKeyboardLed = 0x41,
|
||||||
|
HidMouse = 0x42,
|
||||||
|
HidJoystick = 0x43,
|
||||||
|
HidSystemCtrl = 0x44,
|
||||||
|
HidConsumerCtrl = 0x45,
|
||||||
|
|
||||||
|
ManufacturingTest = 0x50,
|
||||||
|
ManufacturingResult = 0x51,
|
||||||
|
|
||||||
|
Unused = 0xFFFF,
|
||||||
|
}
|
||||||
Loading…
Reference in New Issue