Skip to content

Commit da7df7f

Browse files
committed
rust: macros: add seq! macro
Signed-off-by: Fabien Parent <[email protected]>
1 parent 6f150e2 commit da7df7f

File tree

2 files changed

+152
-0
lines changed

2 files changed

+152
-0
lines changed

rust/macros/lib.rs

Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@ mod module;
1010
mod paste;
1111
mod pin_data;
1212
mod pinned_drop;
13+
mod seq;
1314
mod vtable;
1415
mod zeroable;
1516

@@ -387,6 +388,25 @@ pub fn paste(input: TokenStream) -> TokenStream {
387388
tokens.into_iter().collect()
388389
}
389390

391+
/// Repeat a fragment of code and provide a numerical index for the current repetition
392+
///
393+
/// # Examples
394+
///
395+
/// ```rust,ignore
396+
/// seq!(i in 0..10) {
397+
/// func$i() {
398+
/// }
399+
/// }
400+
///
401+
/// seq!(i in 8..=15) {
402+
/// bit$i() {
403+
/// }
404+
/// }
405+
#[proc_macro]
406+
pub fn seq(input: TokenStream) -> TokenStream {
407+
seq::expand(input)
408+
}
409+
390410
/// Derives the [`Zeroable`] trait for the given struct.
391411
///
392412
/// This can only be used for structs where every field implements the [`Zeroable`] trait.

rust/macros/seq.rs

Lines changed: 132 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,132 @@
1+
use proc_macro::{
2+
Delimiter, TokenStream,
3+
TokenTree::{self, Group, Ident, Literal},
4+
};
5+
use std::ops::Range;
6+
7+
fn process_group(var: &str, i: isize, tokens: impl Iterator<Item = TokenTree>) -> Vec<TokenTree> {
8+
let mut tt = Vec::<TokenTree>::new();
9+
10+
let mut tokens = tokens.peekable();
11+
12+
while let Some(token) = tokens.next() {
13+
match token {
14+
Group(ref group) => {
15+
let group_tokens = process_group(var, i, group.stream().into_iter());
16+
let stream = FromIterator::from_iter(group_tokens.into_iter());
17+
let new_group = proc_macro::Group::new(group.delimiter(), stream);
18+
tt.push(TokenTree::Group(new_group));
19+
}
20+
TokenTree::Punct(ref punct) => {
21+
if punct.to_string() == "$" {
22+
if let Some(TokenTree::Ident(ident)) = tokens.peek() {
23+
if ident.to_string() == var {
24+
tt.push(TokenTree::Literal(proc_macro::Literal::isize_unsuffixed(i)));
25+
tokens.next();
26+
continue;
27+
}
28+
}
29+
}
30+
31+
tt.push(token);
32+
}
33+
_ => tt.push(token),
34+
}
35+
}
36+
37+
tt
38+
}
39+
40+
pub(crate) fn expand(input: TokenStream) -> TokenStream {
41+
let mut tokens = input.into_iter().peekable();
42+
43+
let var = if let Some(Ident(i)) = tokens.next() {
44+
i.to_string()
45+
} else {
46+
panic!("seq! first token should be an identifier");
47+
};
48+
49+
let token = tokens.next().expect("missing token, expecting '.'");
50+
assert!(matches!(token, TokenTree::Ident(x) if x.to_string() == "in"));
51+
52+
let token = tokens
53+
.next()
54+
.expect("seq!: missing token, expecting integer");
55+
let token = if let Group(group) = token {
56+
group
57+
.stream()
58+
.into_iter()
59+
.next()
60+
.expect("seq: missing token, expecting integer")
61+
} else {
62+
token
63+
};
64+
65+
let start = if let Literal(lit) = token {
66+
lit.to_string()
67+
.parse::<isize>()
68+
.expect("Failed to convert literal to isize")
69+
} else {
70+
panic!("seq!: unexpected token '{token}'");
71+
};
72+
73+
let token = tokens.next().expect("seq!: missing token, expecting '.'");
74+
assert!(matches!(token, TokenTree::Punct(x) if x == '.'));
75+
let token = tokens.next().expect("seq!: missing token, expecting '.'");
76+
assert!(matches!(token, TokenTree::Punct(x) if x == '.'));
77+
78+
let is_inclusive_range = if let Some(TokenTree::Punct(p)) = tokens.peek() {
79+
if p.as_char() == '=' {
80+
tokens.next();
81+
true
82+
} else {
83+
false
84+
}
85+
} else {
86+
false
87+
};
88+
89+
let token = tokens
90+
.next()
91+
.expect("seq!: missing token, expecting integer");
92+
let token = if let Group(group) = token {
93+
group
94+
.stream()
95+
.into_iter()
96+
.next()
97+
.expect("seq: missing token, expecting integer")
98+
} else {
99+
token
100+
};
101+
102+
let end = if let Literal(lit) = token {
103+
lit.to_string()
104+
.parse::<isize>()
105+
.expect("Failed to convert literal to isize")
106+
} else {
107+
panic!("seq!: unexpected token '{token}'");
108+
};
109+
let range = Range {
110+
start,
111+
end: end + if is_inclusive_range { 1 } else { 0 },
112+
};
113+
114+
let tokens = if let Some(Group(group)) = tokens.next() {
115+
if group.delimiter() != Delimiter::Brace {
116+
panic!("seq! expected brace");
117+
}
118+
119+
group.stream().into_iter()
120+
} else {
121+
panic!("seq! missing opening brace");
122+
};
123+
124+
let tokens: Vec<TokenTree> = tokens.collect();
125+
let mut tt = Vec::<TokenTree>::new();
126+
127+
for i in range {
128+
tt.extend_from_slice(&process_group(&var, i, tokens.clone().into_iter()));
129+
}
130+
131+
FromIterator::from_iter(tt)
132+
}

0 commit comments

Comments
 (0)