Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Remove Option from TokenStream #65261

Merged
merged 3 commits into from
Oct 15, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion src/libsyntax/attr/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -551,7 +551,7 @@ impl MetaItem {
impl MetaItemKind {
pub fn tokens(&self, span: Span) -> TokenStream {
match *self {
MetaItemKind::Word => TokenStream::empty(),
MetaItemKind::Word => TokenStream::default(),
MetaItemKind::NameValue(ref lit) => {
let mut vec = vec![TokenTree::token(token::Eq, span).into()];
lit.tokens().append_to_tree_and_joint_vec(&mut vec);
Expand Down
4 changes: 2 additions & 2 deletions src/libsyntax/ext/expand.rs
Original file line number Diff line number Diff line change
Expand Up @@ -671,12 +671,12 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
}
}
Some(TokenTree::Token(..)) => {}
None => return TokenStream::empty(),
None => return TokenStream::default(),
}
self.cx.span_err(span, "custom attribute invocations must be \
of the form `#[foo]` or `#[foo(..)]`, the macro name must only be \
followed by a delimiter token");
TokenStream::empty()
TokenStream::default()
}

fn gate_proc_macro_attr_item(&self, span: Span, item: &Annotatable) {
Expand Down
2 changes: 1 addition & 1 deletion src/libsyntax/ext/mbe/transcribe.rs
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@ pub(super) fn transcribe(
) -> TokenStream {
// Nothing for us to transcribe...
if src.is_empty() {
return TokenStream::empty();
return TokenStream::default();
}

// We descend into the RHS (`src`), expanding things as we go. This stack contains the things
Expand Down
16 changes: 8 additions & 8 deletions src/libsyntax/ext/placeholders.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ pub fn placeholder(kind: AstFragmentKind, id: ast::NodeId) -> AstFragment {
fn mac_placeholder() -> ast::Mac {
ast::Mac {
path: ast::Path { span: DUMMY_SP, segments: Vec::new() },
tts: TokenStream::empty().into(),
tts: TokenStream::default().into(),
delim: ast::MacDelimiter::Brace,
span: DUMMY_SP,
prior_type_ascription: None,
Expand All @@ -32,12 +32,12 @@ pub fn placeholder(kind: AstFragmentKind, id: ast::NodeId) -> AstFragment {
attrs: ThinVec::new(),
kind: ast::ExprKind::Mac(mac_placeholder()),
});
let ty = P(ast::Ty {
let ty = || P(ast::Ty {
id,
kind: ast::TyKind::Mac(mac_placeholder()),
span,
});
let pat = P(ast::Pat {
let pat = || P(ast::Pat {
nnethercote marked this conversation as resolved.
Show resolved Hide resolved
id,
kind: ast::PatKind::Mac(mac_placeholder()),
span,
Expand Down Expand Up @@ -83,7 +83,7 @@ pub fn placeholder(kind: AstFragmentKind, id: ast::NodeId) -> AstFragment {
body: expr_placeholder(),
guard: None,
id,
pat,
pat: pat(),
span,
is_placeholder: true,
}
Expand All @@ -105,7 +105,7 @@ pub fn placeholder(kind: AstFragmentKind, id: ast::NodeId) -> AstFragment {
id,
ident,
is_shorthand: false,
pat,
pat: pat(),
span,
is_placeholder: true,
}
Expand All @@ -124,9 +124,9 @@ pub fn placeholder(kind: AstFragmentKind, id: ast::NodeId) -> AstFragment {
ast::Param {
attrs: Default::default(),
id,
pat,
pat: pat(),
span,
ty,
ty: ty(),
is_placeholder: true,
}
]),
Expand All @@ -136,7 +136,7 @@ pub fn placeholder(kind: AstFragmentKind, id: ast::NodeId) -> AstFragment {
id,
ident: None,
span,
ty,
ty: ty(),
vis,
is_placeholder: true,
}
Expand Down
2 changes: 1 addition & 1 deletion src/libsyntax/ext/proc_macro_server.rs
Original file line number Diff line number Diff line change
Expand Up @@ -393,7 +393,7 @@ impl server::Types for Rustc<'_> {

impl server::TokenStream for Rustc<'_> {
fn new(&mut self) -> Self::TokenStream {
TokenStream::empty()
TokenStream::default()
}
fn is_empty(&mut self, stream: &Self::TokenStream) -> bool {
stream.is_empty()
Expand Down
6 changes: 2 additions & 4 deletions src/libsyntax/mut_visit.rs
Original file line number Diff line number Diff line change
Expand Up @@ -610,10 +610,8 @@ pub fn noop_visit_tt<T: MutVisitor>(tt: &mut TokenTree, vis: &mut T) {
}

pub fn noop_visit_tts<T: MutVisitor>(TokenStream(tts): &mut TokenStream, vis: &mut T) {
visit_opt(tts, |tts| {
let tts = Lrc::make_mut(tts);
visit_vec(tts, |(tree, _is_joint)| vis.visit_tt(tree));
})
let tts = Lrc::make_mut(tts);
visit_vec(tts, |(tree, _is_joint)| vis.visit_tt(tree));
}

// Applies ident visitor if it's an ident; applies other visits to interpolated nodes.
Expand Down
2 changes: 1 addition & 1 deletion src/libsyntax/parse/attr.rs
Original file line number Diff line number Diff line change
Expand Up @@ -203,7 +203,7 @@ impl<'a> Parser<'a> {
};
TokenStream::from_streams(smallvec![eq.into(), tokens])
} else {
TokenStream::empty()
TokenStream::default()
};
ast::AttrItem { path, tokens }
})
Expand Down
4 changes: 2 additions & 2 deletions src/libsyntax/parse/parser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1548,7 +1548,7 @@ impl<'a> Parser<'a> {
// This can happen due to a bad interaction of two unrelated recovery mechanisms with
// mismatched delimiters *and* recovery lookahead on the likely typo `pub ident(`
// (#62881).
return Ok((ret?, TokenStream::new(vec![])));
return Ok((ret?, TokenStream::default()));
} else {
&mut self.token_cursor.stack[prev].last_token
};
Expand All @@ -1563,7 +1563,7 @@ impl<'a> Parser<'a> {
// This can happen due to a bad interaction of two unrelated recovery mechanisms
// with mismatched delimiters *and* recovery lookahead on the likely typo
// `pub ident(` (#62895, different but similar to the case above).
return Ok((ret?, TokenStream::new(vec![])));
return Ok((ret?, TokenStream::default()));
}
};

Expand Down
Loading