Skip to content

Commit

Permalink
refactor: add chunks method to TokenStream to obviate rustdoc clones
Browse files Browse the repository at this point in the history
  • Loading branch information
calebcartwright committed May 13, 2023
1 parent 2c41369 commit 00c3f75
Show file tree
Hide file tree
Showing 2 changed files with 5 additions and 2 deletions.
4 changes: 4 additions & 0 deletions compiler/rustc_ast/src/tokenstream.rs
Original file line number Diff line number Diff line change
Expand Up @@ -551,6 +551,10 @@ impl TokenStream {
vec_mut.extend(stream_iter);
}
}

pub fn chunks(&self, chunk_size: usize) -> core::slice::Chunks<'_, TokenTree> {
self.0.chunks(chunk_size)
}
}

/// By-reference iterator over a [`TokenStream`], that produces `&TokenTree`
Expand Down
3 changes: 1 addition & 2 deletions src/librustdoc/clean/utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -594,9 +594,8 @@ pub(super) fn display_macro_source(
def_id: DefId,
vis: ty::Visibility<DefId>,
) -> String {
let tts: Vec<_> = def.body.tokens.clone().into_trees().collect();
// Extract the spans of all matchers. They represent the "interface" of the macro.
let matchers = tts.chunks(4).map(|arm| &arm[0]);
let matchers = def.body.tokens.chunks(4).map(|arm| &arm[0]);

if def.macro_rules {
format!("macro_rules! {} {{\n{}}}", name, render_macro_arms(cx.tcx, matchers, ";"))
Expand Down

0 comments on commit 00c3f75

Please sign in to comment.