paste inclusion

This commit is contained in:
Matteo Bovetti 2025-03-09 17:43:39 +01:00
parent 6f5b81d2a0
commit 2fd31217bd
6 changed files with 199 additions and 6 deletions

9
Cargo.lock generated
View File

@ -69,7 +69,6 @@ dependencies = [
"actix-utils",
"futures-core",
"futures-util",
"paste",
"pin-project-lite",
]
@ -231,7 +230,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5e4e8200b9a4a5801a769d50eeabc05670fec7e959a8cb7a63a93e4e519942ae"
dependencies = [
"aws-lc-sys",
"paste",
"paste 1.0.15",
"zeroize",
]
@ -246,7 +245,7 @@ dependencies = [
"cmake",
"dunce",
"fs_extra",
"paste",
"paste 1.0.15",
]
[[package]]
@ -1335,6 +1334,10 @@ dependencies = [
"windows-targets 0.52.6",
]
[[package]]
name = "paste"
version = "0.1.0"
[[package]]
name = "paste"
version = "1.0.15"

View File

@ -12,6 +12,7 @@ members = [
"bytestring",
"local-channel",
"local-waker",
"paste",
]
[workspace.package]

View File

@ -12,7 +12,6 @@ rust-version.workspace = true
[dependencies]
futures-core = { version = "0.3.17", default-features = false }
paste = "1"
pin-project-lite = "0.2"
[dev-dependencies]

View File

@ -3,8 +3,6 @@
use alloc::{boxed::Box, rc::Rc};
use core::{future::Future, pin::Pin};
use paste::paste;
use crate::{Service, ServiceFactory};
/// A boxed future with no send bound or lifetime parameters.

14
paste/Cargo.toml Normal file
View File

@ -0,0 +1,14 @@
[package]
name = "paste"
version = "0.1.0"
license.workspace = true
edition.workspace = true
rust-version.workspace = true
[dependencies]
[lints]
workspace = true
[lib]
proc-macro = true

178
paste/src/lib.rs Normal file
View File

@ -0,0 +1,178 @@
//! A minimal implementation of the paste crate, allowing identifier concatenation in macros.
use proc_macro::{
Delimiter, Group, Ident, Punct, Spacing, Span, TokenStream, TokenTree
};
use std::{iter, str::FromStr};
use std::panic;
#[proc_macro]
pub fn paste(input: TokenStream) -> TokenStream {
let mut expanded = TokenStream::new();
let mut tokens = input.into_iter().peekable();
while let Some(token) = tokens.next() {
match token {
TokenTree::Group(group) => {
let delimiter = group.delimiter();
let content = group.stream();
let span = group.span();
if delimiter == Delimiter::Bracket && is_paste_operation(&content) {
// Process [< ... >] paste operation
if let Ok(pasted) = process_paste_operation(content, span) {
expanded.extend(pasted);
} else {
// On error, return the original token
expanded.extend(iter::once(TokenTree::Group(group)));
}
} else {
// Handle nested groups recursively
let nested = paste(content);
let mut new_group = Group::new(delimiter, nested);
new_group.set_span(span);
expanded.extend(iter::once(TokenTree::Group(new_group)));
}
}
// Pass through all other tokens unchanged
_ => expanded.extend(iter::once(token)),
}
}
expanded
}
// Check if a token stream is a paste operation: [< ... >]
fn is_paste_operation(input: &TokenStream) -> bool {
let mut tokens = input.clone().into_iter();
match tokens.next() {
Some(TokenTree::Punct(punct)) if punct.as_char() == '<' => {}
_ => return false,
}
let mut has_content = false;
for token in tokens {
match token {
TokenTree::Punct(punct) if punct.as_char() == '>' => return has_content,
_ => has_content = true,
}
}
false
}
// Process the content inside [< ... >]
fn process_paste_operation(input: TokenStream, span: Span) -> Result<TokenStream, ()> {
let mut tokens = input.into_iter();
// Skip opening '<'
if let Some(TokenTree::Punct(punct)) = tokens.next() {
if punct.as_char() != '<' {
return Err(());
}
} else {
return Err(());
}
// Collect and process segments
let mut segments = Vec::new();
while let Some(token) = tokens.next() {
match &token {
TokenTree::Punct(punct) if punct.as_char() == '>' => break,
TokenTree::Ident(ident) => segments.push(ident.to_string()),
TokenTree::Literal(lit) => {
let lit_str = lit.to_string();
if lit_str.starts_with('"') && lit_str.ends_with('"') && lit_str.len() >= 2 {
segments.push(lit_str[1..lit_str.len() - 1].to_owned());
} else {
segments.push(lit_str);
}
},
TokenTree::Punct(punct) if punct.as_char() == '_' => segments.push("_".to_owned()),
TokenTree::Punct(punct) if punct.as_char() == ':' => {
if segments.is_empty() {
return Err(());
}
// Handle modifiers like :lower, :upper, etc.
if let Some(TokenTree::Ident(ident)) = tokens.next() {
let modifier = ident.to_string();
let last = segments.pop().unwrap();
let result = match modifier.as_str() {
"lower" => last.to_lowercase(),
"upper" => last.to_uppercase(),
"snake" => to_snake_case(&last),
"camel" => to_camel_case(&last),
_ => return Err(()),
};
segments.push(result);
} else {
return Err(());
}
},
_ => return Err(()),
}
}
// Create identifier from the concatenated segments
let pasted = segments.join("");
// Convert to a valid Rust identifier
let ident = match panic::catch_unwind(|| Ident::new(&pasted, span)) {
Ok(ident) => TokenTree::Ident(ident),
Err(_) => {
// If it starts with a number, try to create a literal
if pasted.starts_with(|c: char| c.is_ascii_digit()) {
match TokenStream::from_str(&pasted) {
Ok(ts) => {
if let Some(token) = ts.into_iter().next() {
return Ok(iter::once(token).collect());
}
}
Err(_) => {}
}
}
return Err(());
}
};
Ok(iter::once(ident).collect())
}
// Helper function to convert CamelCase to snake_case
fn to_snake_case(input: &str) -> String {
let mut result = String::new();
let mut prev = '_';
for ch in input.chars() {
if ch.is_uppercase() && prev != '_' {
result.push('_');
}
result.push(ch.to_lowercase().next().unwrap_or(ch));
prev = ch;
}
result
}
// Helper function to convert snake_case to CamelCase
fn to_camel_case(input: &str) -> String {
let mut result = String::new();
let mut capitalize_next = false;
for ch in input.chars() {
if ch == '_' {
capitalize_next = true;
} else if capitalize_next {
result.push(ch.to_uppercase().next().unwrap_or(ch));
capitalize_next = false;
} else {
result.push(ch);
}
}
result
}