Skip to content

Commit 91e777e

Browse files
committed
f better doc/split skip_legacy_fields
1 parent 9439a7b commit 91e777e

File tree

1 file changed

+82
-49
lines changed

1 file changed

+82
-49
lines changed

lightning-macros/src/lib.rs

+82-49
Original file line numberDiff line numberDiff line change
@@ -96,15 +96,89 @@ fn expect_punct(token: &TokenTree, expected: char) {
9696
}
9797
}
9898

99-
/// Scans a match statement for fields which should be skipped
99+
fn token_to_stream(token: TokenTree) -> proc_macro::TokenStream {
100+
proc_macro::TokenStream::from(token)
101+
}
102+
103+
/// Processes a list of fields in a variant definition (see the docs for [`skip_legacy_fields`])
104+
fn process_fields(group: Group) -> proc_macro::TokenStream {
105+
let mut computed_fields = proc_macro::TokenStream::new();
106+
if group.delimiter() == Delimiter::Brace {
107+
let mut fields_stream = group.stream().into_iter().peekable();
108+
109+
let mut new_fields = proc_macro::TokenStream::new();
110+
loop {
111+
// The field list should end with .., at which point we break
112+
let next_tok = fields_stream.peek();
113+
if let Some(TokenTree::Punct(_)) = next_tok {
114+
let dot1 = fields_stream.next().unwrap();
115+
expect_punct(&dot1, '.');
116+
let dot2 = fields_stream.next().expect("Missing second trailing .");
117+
expect_punct(&dot2, '.');
118+
let trailing_dots = [dot1, dot2];
119+
new_fields.extend(trailing_dots.into_iter().map(token_to_stream));
120+
assert!(fields_stream.peek().is_none());
121+
break;
122+
}
123+
124+
// Fields should take the form `ref field_name: ty_info` where `ty_info`
125+
// may be a single ident or may be a group. We skip the field if `ty_info`
126+
// is a group where the first token is the ident `legacy`.
127+
let ref_ident = fields_stream.next().unwrap();
128+
expect_ident(&ref_ident, Some("ref"));
129+
let field_name_ident = fields_stream.next().unwrap();
130+
let co = fields_stream.next().unwrap();
131+
expect_punct(&co, ':');
132+
let ty_info = fields_stream.next().unwrap();
133+
let com = fields_stream.next().unwrap();
134+
expect_punct(&com, ',');
135+
136+
if let TokenTree::Group(group) = ty_info {
137+
let first_group_tok = group.stream().into_iter().next().unwrap();
138+
if let TokenTree::Ident(ident) = first_group_tok {
139+
if ident.to_string() == "legacy" {
140+
continue;
141+
}
142+
}
143+
}
144+
145+
let field = [ref_ident, field_name_ident, com];
146+
new_fields.extend(field.into_iter().map(token_to_stream));
147+
}
148+
let fields_group = Group::new(Delimiter::Brace, new_fields);
149+
computed_fields.extend(token_to_stream(TokenTree::Group(fields_group)));
150+
} else {
151+
computed_fields.extend(token_to_stream(TokenTree::Group(group)));
152+
}
153+
computed_fields
154+
}
155+
156+
/// Scans a match statement for legacy fields which should be skipped.
157+
///
158+
/// This is used internally in LDK's TLV serialization logic and is not expected to be used by
159+
/// other crates.
100160
///
101161
/// Wraps a `match self {..}` statement and scans the fields in the match patterns (in the form
102162
/// `ref $field_name: $field_ty`) for types marked `legacy`, skipping those fields.
163+
///
164+
/// Specifically, it expects input like the following, simply dropping `field3` and the
165+
/// `: $field_ty` after each field name.
166+
/// ```ignore
167+
/// match self {
168+
/// Enum::Variant {
169+
/// ref field1: option,
170+
/// ref field2: (option, explicit_type: u64),
171+
/// ref field3: (legacy, u64, {}, {}), // will be skipped
172+
/// ..
173+
/// } => expression
174+
/// }
175+
/// ```
103176
#[proc_macro]
104177
pub fn skip_legacy_fields(expr: TokenStream) -> TokenStream {
105178
let mut stream = expr.clone().into_iter();
106179
let mut res = TokenStream::new();
107180

181+
// First expect `match self` followed by a `{}` group...
108182
let match_ident = stream.next().unwrap();
109183
expect_ident(&match_ident, Some("match"));
110184
res.extend(proc_macro::TokenStream::from(match_ident));
@@ -113,14 +187,14 @@ pub fn skip_legacy_fields(expr: TokenStream) -> TokenStream {
113187
expect_ident(&self_ident, Some("self"));
114188
res.extend(proc_macro::TokenStream::from(self_ident));
115189

116-
let token_to_stream = |tok| proc_macro::TokenStream::from(tok);
117-
118190
let arms = stream.next().unwrap();
119191
if let TokenTree::Group(group) = arms {
120192
let mut new_arms = TokenStream::new();
121193

122194
let mut arm_stream = group.stream().into_iter().peekable();
123195
while arm_stream.peek().is_some() {
196+
// Each arm should contain Enum::Variant { fields } => init
197+
// We explicitly check the :s, =, and >, as well as an optional trailing ,
124198
let enum_ident = arm_stream.next().unwrap();
125199
let co1 = arm_stream.next().unwrap();
126200
expect_punct(&co1, ':');
@@ -140,52 +214,11 @@ pub fn skip_legacy_fields(expr: TokenStream) -> TokenStream {
140214
arm_stream.next();
141215
}
142216

143-
let mut computed_fields = proc_macro::TokenStream::new();
144-
if let TokenTree::Group(group) = fields {
145-
if group.delimiter() == Delimiter::Brace {
146-
let mut fields_stream = group.stream().into_iter().peekable();
147-
148-
let mut new_fields = proc_macro::TokenStream::new();
149-
loop {
150-
let next_tok = fields_stream.peek();
151-
if let Some(TokenTree::Punct(_)) = next_tok {
152-
let dot1 = fields_stream.next().unwrap();
153-
expect_punct(&dot1, '.');
154-
let dot2 = fields_stream.next().expect("Missing second trailing .");
155-
expect_punct(&dot2, '.');
156-
let trailing_dots = [dot1, dot2];
157-
new_fields.extend(trailing_dots.into_iter().map(token_to_stream));
158-
assert!(fields_stream.peek().is_none());
159-
break;
160-
}
161-
162-
let ref_ident = fields_stream.next().unwrap();
163-
expect_ident(&ref_ident, Some("ref"));
164-
let field_name_ident = fields_stream.next().unwrap();
165-
let co = fields_stream.next().unwrap();
166-
expect_punct(&co, ':');
167-
let ty_info = fields_stream.next().unwrap();
168-
let com = fields_stream.next().unwrap();
169-
expect_punct(&com, ',');
170-
171-
if let TokenTree::Group(group) = ty_info {
172-
let first_group_tok = group.stream().into_iter().next().unwrap();
173-
if let TokenTree::Ident(ident) = first_group_tok {
174-
if ident.to_string() == "legacy" {
175-
continue;
176-
}
177-
}
178-
}
179-
180-
let field = [ref_ident, field_name_ident, com];
181-
new_fields.extend(field.into_iter().map(token_to_stream));
182-
}
183-
let fields_group = Group::new(Delimiter::Brace, new_fields);
184-
computed_fields.extend(token_to_stream(TokenTree::Group(fields_group)));
185-
} else {
186-
computed_fields.extend(token_to_stream(TokenTree::Group(group)));
187-
}
188-
}
217+
let computed_fields = if let TokenTree::Group(group) = fields {
218+
process_fields(group)
219+
} else {
220+
panic!("Expected a group for the fields in a match arm");
221+
};
189222

190223
let arm_pfx = [enum_ident, co1, co2, variant_ident];
191224
new_arms.extend(arm_pfx.into_iter().map(token_to_stream));

0 commit comments

Comments
 (0)