Skip to content

Commit 7ae0833

Browse files
committed
Move doc comment desugaring into the parser.
1 parent e2b3fec commit 7ae0833

File tree

5 files changed

+16
-33
lines changed

5 files changed

+16
-33
lines changed

src/libsyntax/ext/tt/macro_parser.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -279,7 +279,7 @@ pub fn token_name_eq(t1 : &Token, t2 : &Token) -> bool {
279279
}
280280

281281
pub fn parse(sess: &ParseSess, rdr: TtReader, ms: &[TokenTree]) -> NamedParseResult {
282-
let mut parser = Parser::new(sess, Box::new(rdr));
282+
let mut parser = Parser::new_with_doc_flag(sess, Box::new(rdr), true);
283283
let mut cur_eis = SmallVector::one(initial_matcher_pos(ms.to_owned(), None, parser.span.lo));
284284

285285
loop {

src/libsyntax/ext/tt/transcribe.rs

+1-27
Original file line numberDiff line numberDiff line change
@@ -12,9 +12,7 @@ use self::LockstepIterSize::*;
1212
use ast::Ident;
1313
use errors::{Handler, DiagnosticBuilder};
1414
use ext::tt::macro_parser::{NamedMatch, MatchedSeq, MatchedNonterminal};
15-
use parse::token::{DocComment, MatchNt, SubstNt};
16-
use parse::token::{Token, NtIdent};
17-
use parse::token;
15+
use parse::token::{self, MatchNt, SubstNt, Token, NtIdent};
1816
use parse::lexer::TokenAndSpan;
1917
use syntax_pos::{Span, DUMMY_SP};
2018
use tokenstream::{self, TokenTree};
@@ -48,7 +46,6 @@ pub struct TtReader<'a> {
4846
pub cur_span: Span,
4947
pub next_tok: Option<TokenAndSpan>,
5048
/// Transform doc comments. Only useful in macro invocations
51-
pub desugar_doc_comments: bool,
5249
pub fatal_errs: Vec<DiagnosticBuilder<'a>>,
5350
}
5451

@@ -59,20 +56,6 @@ pub fn new_tt_reader(sp_diag: &Handler,
5956
interp: Option<HashMap<Ident, Rc<NamedMatch>>>,
6057
src: Vec<tokenstream::TokenTree>)
6158
-> TtReader {
62-
new_tt_reader_with_doc_flag(sp_diag, interp, src, false)
63-
}
64-
65-
/// The extra `desugar_doc_comments` flag enables reading doc comments
66-
/// like any other attribute which consists of `meta` and surrounding #[ ] tokens.
67-
///
68-
/// This can do Macro-By-Example transcription. On the other hand, if
69-
/// `src` contains no `TokenTree::Sequence`s, `MatchNt`s or `SubstNt`s, `interp` can
70-
/// (and should) be None.
71-
pub fn new_tt_reader_with_doc_flag(sp_diag: &Handler,
72-
interp: Option<HashMap<Ident, Rc<NamedMatch>>>,
73-
src: Vec<tokenstream::TokenTree>,
74-
desugar_doc_comments: bool)
75-
-> TtReader {
7659
let mut r = TtReader {
7760
sp_diag: sp_diag,
7861
stack: SmallVector::one(TtFrame {
@@ -91,7 +74,6 @@ pub fn new_tt_reader_with_doc_flag(sp_diag: &Handler,
9174
},
9275
repeat_idx: Vec::new(),
9376
repeat_len: Vec::new(),
94-
desugar_doc_comments: desugar_doc_comments,
9577
/* dummy values, never read: */
9678
cur_tok: token::Eof,
9779
cur_span: DUMMY_SP,
@@ -312,14 +294,6 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
312294
});
313295
// if this could be 0-length, we'd need to potentially recur here
314296
}
315-
TokenTree::Token(sp, DocComment(name)) if r.desugar_doc_comments => {
316-
r.stack.push(TtFrame {
317-
forest: TokenTree::Token(sp, DocComment(name)),
318-
idx: 0,
319-
dotdotdoted: false,
320-
sep: None
321-
});
322-
}
323297
TokenTree::Token(sp, tok) => {
324298
r.cur_span = sp;
325299
r.cur_tok = tok;

src/libsyntax/parse/lexer/mod.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@ use std::char;
2222
use std::mem::replace;
2323
use std::rc::Rc;
2424

25-
pub use ext::tt::transcribe::{TtReader, new_tt_reader, new_tt_reader_with_doc_flag};
25+
pub use ext::tt::transcribe::{TtReader, new_tt_reader};
2626

2727
pub mod comments;
2828
mod unicode_chars;

src/libsyntax/parse/parser.rs

+11
Original file line numberDiff line numberDiff line change
@@ -211,6 +211,7 @@ pub struct Parser<'a> {
211211
pub root_module_name: Option<String>,
212212
pub expected_tokens: Vec<TokenType>,
213213
pub tts: Vec<(TokenTree, usize)>,
214+
pub desugar_doc_comments: bool,
214215
}
215216

216217
#[derive(PartialEq, Eq, Clone)]
@@ -275,6 +276,11 @@ impl From<P<Expr>> for LhsExpr {
275276

276277
impl<'a> Parser<'a> {
277278
pub fn new(sess: &'a ParseSess, rdr: Box<Reader+'a>) -> Self {
279+
Parser::new_with_doc_flag(sess, rdr, false)
280+
}
281+
282+
pub fn new_with_doc_flag(sess: &'a ParseSess, rdr: Box<Reader+'a>, desugar_doc_comments: bool)
283+
-> Self {
278284
let mut parser = Parser {
279285
reader: rdr,
280286
sess: sess,
@@ -294,6 +300,7 @@ impl<'a> Parser<'a> {
294300
root_module_name: None,
295301
expected_tokens: Vec::new(),
296302
tts: Vec::new(),
303+
desugar_doc_comments: desugar_doc_comments,
297304
};
298305

299306
let tok = parser.next_tok();
@@ -326,6 +333,10 @@ impl<'a> Parser<'a> {
326333
loop {
327334
let nt = match tok.tok {
328335
token::Interpolated(ref nt) => nt.clone(),
336+
token::DocComment(name) if self.desugar_doc_comments => {
337+
self.tts.push((TokenTree::Token(tok.sp, token::DocComment(name)), 0));
338+
continue 'outer
339+
}
329340
_ => return tok,
330341
};
331342
match *nt {

src/libsyntax/tokenstream.rs

+2-4
Original file line numberDiff line numberDiff line change
@@ -214,11 +214,9 @@ impl TokenTree {
214214
mtch: &[TokenTree],
215215
tts: &[TokenTree])
216216
-> macro_parser::NamedParseResult {
217+
let diag = &cx.parse_sess().span_diagnostic;
217218
// `None` is because we're not interpolating
218-
let arg_rdr = lexer::new_tt_reader_with_doc_flag(&cx.parse_sess().span_diagnostic,
219-
None,
220-
tts.iter().cloned().collect(),
221-
true);
219+
let arg_rdr = lexer::new_tt_reader(diag, None, tts.iter().cloned().collect());
222220
macro_parser::parse(cx.parse_sess(), arg_rdr, mtch)
223221
}
224222

0 commit comments

Comments
 (0)