Skip to content

Commit

Permalink
Rollup merge of #80784 - petrochenkov:nontspan, r=Aaron1011
Browse files Browse the repository at this point in the history
rustc_parse: Better spans for synthesized token streams

I think using the nonterminal span for synthesizing its tokens is a better approximation than using `DUMMY_SP` or the attribute span like #79472 did in `expand.rs`.

r? `@Aaron1011`
  • Loading branch information
JohnTitor authored Jan 7, 2021
2 parents 695f18e + 0dab076 commit 695f878
Show file tree
Hide file tree
Showing 9 changed files with 151 additions and 167 deletions.
2 changes: 1 addition & 1 deletion compiler/rustc_ast/src/token.rs
Original file line number Diff line number Diff line change
Expand Up @@ -771,7 +771,7 @@ impl fmt::Display for NonterminalKind {
}

impl Nonterminal {
fn span(&self) -> Span {
pub fn span(&self) -> Span {
match self {
NtItem(item) => item.span,
NtBlock(block) => block.span,
Expand Down
10 changes: 2 additions & 8 deletions compiler/rustc_ast_lowering/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -206,8 +206,7 @@ pub trait ResolverAstLowering {
) -> LocalDefId;
}

type NtToTokenstream =
fn(&Nonterminal, &ParseSess, Span, CanSynthesizeMissingTokens) -> TokenStream;
type NtToTokenstream = fn(&Nonterminal, &ParseSess, CanSynthesizeMissingTokens) -> TokenStream;

/// Context of `impl Trait` in code, which determines whether it is allowed in an HIR subtree,
/// and if so, what meaning it has.
Expand Down Expand Up @@ -417,12 +416,7 @@ impl<'a> TokenStreamLowering<'a> {
fn lower_token(&mut self, token: Token) -> TokenStream {
match token.kind {
token::Interpolated(nt) => {
let tts = (self.nt_to_tokenstream)(
&nt,
self.parse_sess,
token.span,
self.synthesize_tokens,
);
let tts = (self.nt_to_tokenstream)(&nt, self.parse_sess, self.synthesize_tokens);
TokenTree::Delimited(
DelimSpan::from_single(token.span),
DelimToken::NoDelim,
Expand Down
2 changes: 1 addition & 1 deletion compiler/rustc_expand/src/base.rs
Original file line number Diff line number Diff line change
Expand Up @@ -141,7 +141,7 @@ impl Annotatable {
}

crate fn into_tokens(self, sess: &ParseSess) -> TokenStream {
nt_to_tokenstream(&self.into_nonterminal(), sess, DUMMY_SP, CanSynthesizeMissingTokens::No)
nt_to_tokenstream(&self.into_nonterminal(), sess, CanSynthesizeMissingTokens::No)
}

pub fn expect_item(self) -> P<ast::Item> {
Expand Down
1 change: 0 additions & 1 deletion compiler/rustc_expand/src/expand.rs
Original file line number Diff line number Diff line change
Expand Up @@ -743,7 +743,6 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
AttrStyle::Inner => rustc_parse::fake_token_stream(
&self.cx.sess.parse_sess,
&item.into_nonterminal(),
span,
),
};
let attr_item = attr.unwrap_normal_item();
Expand Down
7 changes: 1 addition & 6 deletions compiler/rustc_expand/src/proc_macro.rs
Original file line number Diff line number Diff line change
Expand Up @@ -94,12 +94,7 @@ impl MultiItemModifier for ProcMacroDerive {
let input = if item.pretty_printing_compatibility_hack() {
TokenTree::token(token::Interpolated(Lrc::new(item)), DUMMY_SP).into()
} else {
nt_to_tokenstream(
&item,
&ecx.sess.parse_sess,
DUMMY_SP,
CanSynthesizeMissingTokens::Yes,
)
nt_to_tokenstream(&item, &ecx.sess.parse_sess, CanSynthesizeMissingTokens::Yes)
};

let server = proc_macro_server::Rustc::new(ecx);
Expand Down
2 changes: 1 addition & 1 deletion compiler/rustc_expand/src/proc_macro_server.rs
Original file line number Diff line number Diff line change
Expand Up @@ -179,7 +179,7 @@ impl FromInternal<(TreeAndSpacing, &'_ ParseSess, &'_ mut Vec<Self>)>
{
TokenTree::Ident(Ident::new(sess, name.name, is_raw, name.span))
} else {
let stream = nt_to_tokenstream(&nt, sess, span, CanSynthesizeMissingTokens::No);
let stream = nt_to_tokenstream(&nt, sess, CanSynthesizeMissingTokens::No);
TokenTree::Group(Group {
delimiter: Delimiter::None,
stream,
Expand Down
20 changes: 8 additions & 12 deletions compiler/rustc_parse/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -236,7 +236,6 @@ pub fn parse_in<'a, T>(
pub fn nt_to_tokenstream(
nt: &Nonterminal,
sess: &ParseSess,
span: Span,
synthesize_tokens: CanSynthesizeMissingTokens,
) -> TokenStream {
// A `Nonterminal` is often a parsed AST item. At this point we now
Expand All @@ -256,11 +255,9 @@ pub fn nt_to_tokenstream(
|tokens: Option<&LazyTokenStream>| tokens.as_ref().map(|t| t.create_token_stream());

let tokens = match *nt {
Nonterminal::NtItem(ref item) => {
prepend_attrs(sess, &item.attrs, nt, span, item.tokens.as_ref())
}
Nonterminal::NtItem(ref item) => prepend_attrs(sess, &item.attrs, nt, item.tokens.as_ref()),
Nonterminal::NtBlock(ref block) => convert_tokens(block.tokens.as_ref()),
Nonterminal::NtStmt(ref stmt) => prepend_attrs(sess, stmt.attrs(), nt, span, stmt.tokens()),
Nonterminal::NtStmt(ref stmt) => prepend_attrs(sess, stmt.attrs(), nt, stmt.tokens()),
Nonterminal::NtPat(ref pat) => convert_tokens(pat.tokens.as_ref()),
Nonterminal::NtTy(ref ty) => convert_tokens(ty.tokens.as_ref()),
Nonterminal::NtIdent(ident, is_raw) => {
Expand All @@ -277,31 +274,30 @@ pub fn nt_to_tokenstream(
if expr.tokens.is_none() {
debug!("missing tokens for expr {:?}", expr);
}
prepend_attrs(sess, &expr.attrs, nt, span, expr.tokens.as_ref())
prepend_attrs(sess, &expr.attrs, nt, expr.tokens.as_ref())
}
};

if let Some(tokens) = tokens {
return tokens;
} else if matches!(synthesize_tokens, CanSynthesizeMissingTokens::Yes) {
return fake_token_stream(sess, nt, span);
return fake_token_stream(sess, nt);
} else {
let pretty = rustc_ast_pretty::pprust::nonterminal_to_string_no_extra_parens(&nt);
panic!("Missing tokens at {:?} for nt {:?}", span, pretty);
panic!("Missing tokens for nt {:?}", pretty);
}
}

pub fn fake_token_stream(sess: &ParseSess, nt: &Nonterminal, span: Span) -> TokenStream {
pub fn fake_token_stream(sess: &ParseSess, nt: &Nonterminal) -> TokenStream {
let source = pprust::nonterminal_to_string(nt);
let filename = FileName::macro_expansion_source_code(&source);
parse_stream_from_source_str(filename, source, sess, Some(span))
parse_stream_from_source_str(filename, source, sess, Some(nt.span()))
}

fn prepend_attrs(
sess: &ParseSess,
attrs: &[ast::Attribute],
nt: &Nonterminal,
span: Span,
tokens: Option<&tokenstream::LazyTokenStream>,
) -> Option<tokenstream::TokenStream> {
if attrs.is_empty() {
Expand All @@ -312,7 +308,7 @@ fn prepend_attrs(
// FIXME: Correctly handle tokens for inner attributes.
// For now, we fall back to reparsing the original AST node
if attr.style == ast::AttrStyle::Inner {
return Some(fake_token_stream(sess, nt, span));
return Some(fake_token_stream(sess, nt));
}
builder.push(attr.tokens());
}
Expand Down
Loading

0 comments on commit 695f878

Please sign in to comment.