diff --git a/src/librustc_errors/emitter.rs b/src/librustc_errors/emitter.rs index 5f275b7003873..3b7159cf07d23 100644 --- a/src/librustc_errors/emitter.rs +++ b/src/librustc_errors/emitter.rs @@ -678,8 +678,8 @@ impl EmitterWriter { // | | something about `foo` // | something about `fn foo()` annotations_position.sort_by(|a, b| { - // Decreasing order - a.1.len().cmp(&b.1.len()).reverse() + // Decreasing order. When `a` and `b` are the same length, prefer `Primary`. + (a.1.len(), !a.1.is_primary).cmp(&(b.1.len(), !b.1.is_primary)).reverse() }); // Write the underlines. diff --git a/src/librustc_metadata/cstore_impl.rs b/src/librustc_metadata/cstore_impl.rs index 87a32b5a53e7a..0b8d5a93c9543 100644 --- a/src/librustc_metadata/cstore_impl.rs +++ b/src/librustc_metadata/cstore_impl.rs @@ -465,7 +465,7 @@ impl cstore::CStore { let source_file = sess.parse_sess.source_map().new_source_file(source_name, def.body); let local_span = Span::new(source_file.start_pos, source_file.end_pos, NO_EXPANSION); - let body = source_file_to_stream(&sess.parse_sess, source_file, None); + let body = source_file_to_stream(&sess.parse_sess, source_file, None).0; // Mark the attrs as used let attrs = data.get_item_attrs(id.index, sess); diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs index aa47d5bf669bc..764e60ad1509a 100644 --- a/src/libsyntax/parse/lexer/mod.rs +++ b/src/libsyntax/parse/lexer/mod.rs @@ -66,6 +66,7 @@ pub struct StringReader<'a> { /// The raw source span which *does not* take `override_span` into account span_src_raw: Span, open_braces: Vec<(token::DelimToken, Span)>, + crate unmatched_braces: Vec<(token::DelimToken, Span)>, /// The type and spans for all braces /// /// Used only for error recovery when arriving to EOF with mismatched braces. @@ -221,6 +222,7 @@ impl<'a> StringReader<'a> { span_src_raw: syntax_pos::DUMMY_SP, open_braces: Vec::new(), matching_delim_spans: Vec::new(), + unmatched_braces: Vec::new(), override_span, last_unclosed_found_span: None, } diff --git a/src/libsyntax/parse/lexer/tokentrees.rs b/src/libsyntax/parse/lexer/tokentrees.rs index 8047ab0146514..ee6caad15cfc3 100644 --- a/src/libsyntax/parse/lexer/tokentrees.rs +++ b/src/libsyntax/parse/lexer/tokentrees.rs @@ -49,8 +49,9 @@ impl<'a> StringReader<'a> { token::Eof => { let msg = "this file contains an un-closed delimiter"; let mut err = self.sess.span_diagnostic.struct_span_err(self.span, msg); - for &(_, sp) in &self.open_braces { + for &(tok, sp) in &self.open_braces { err.span_label(sp, "un-closed delimiter"); + self.unmatched_braces.push((tok, sp)); } if let Some((delim, _)) = self.open_braces.last() { @@ -134,7 +135,7 @@ impl<'a> StringReader<'a> { } err.emit(); } - self.open_braces.pop().unwrap(); + self.unmatched_braces.push(self.open_braces.pop().unwrap()); // If the incorrect delimiter matches an earlier opening // delimiter, then don't consume it (it can be used to diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index 5c6d5816a472b..eedfb5445016f 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -168,10 +168,13 @@ crate fn parse_stmt_from_source_str(name: FileName, source: String, sess: &Parse new_parser_from_source_str(sess, name, source).parse_stmt() } -pub fn parse_stream_from_source_str(name: FileName, source: String, sess: &ParseSess, - override_span: Option) - -> TokenStream { - source_file_to_stream(sess, sess.source_map().new_source_file(name, source), override_span) +pub fn parse_stream_from_source_str( + name: FileName, + source: String, + sess: &ParseSess, + override_span: Option, +) -> TokenStream { + source_file_to_stream(sess, sess.source_map().new_source_file(name, source), override_span).0 } // Create a new parser from a source string @@ -191,11 +194,13 @@ pub fn new_parser_from_file<'a>(sess: &'a ParseSess, path: &Path) -> Parser<'a> /// Given a session, a crate config, a path, and a span, add /// the file at the given path to the source_map, and return a parser. /// On an error, use the given span as the source of the problem. -crate fn new_sub_parser_from_file<'a>(sess: &'a ParseSess, - path: &Path, - directory_ownership: DirectoryOwnership, - module_name: Option, - sp: Span) -> Parser<'a> { +crate fn new_sub_parser_from_file<'a>( + sess: &'a ParseSess, + path: &Path, + directory_ownership: DirectoryOwnership, + module_name: Option, + sp: Span, +) -> Parser<'a> { let mut p = source_file_to_parser(sess, file_to_source_file(sess, path, Some(sp))); p.directory.ownership = directory_ownership; p.root_module_name = module_name; @@ -203,14 +208,14 @@ crate fn new_sub_parser_from_file<'a>(sess: &'a ParseSess, } /// Given a source_file and config, return a parser -fn source_file_to_parser(sess: & ParseSess, source_file: Lrc) -> Parser { +fn source_file_to_parser(sess: &ParseSess, source_file: Lrc) -> Parser { let end_pos = source_file.end_pos; - let mut parser = stream_to_parser(sess, source_file_to_stream(sess, source_file, None)); - + let (tts, open_braces) = source_file_to_stream(sess, source_file, None); + let mut parser = stream_to_parser(sess, tts); + parser.open_braces = open_braces; if parser.token == token::Eof && parser.span.is_dummy() { parser.span = Span::new(end_pos, end_pos, parser.span.ctxt()); } - parser } @@ -240,12 +245,15 @@ fn file_to_source_file(sess: &ParseSess, path: &Path, spanopt: Option) } /// Given a source_file, produce a sequence of token-trees -pub fn source_file_to_stream(sess: &ParseSess, - source_file: Lrc, - override_span: Option) -> TokenStream { +pub fn source_file_to_stream( + sess: &ParseSess, + source_file: Lrc, + override_span: Option, +) -> (TokenStream, Vec<(token::DelimToken, Span)>) { let mut srdr = lexer::StringReader::new(sess, source_file, override_span); srdr.real_token(); - panictry!(srdr.parse_all_token_trees()) + let tt = panictry!(srdr.parse_all_token_trees()); + (tt, srdr.unmatched_braces) } /// Given stream and the `ParseSess`, produce a parser diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index f57fca2cfcf60..09502399ac835 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -250,6 +250,8 @@ pub struct Parser<'a> { desugar_doc_comments: bool, /// Whether we should configure out of line modules as we parse. pub cfg_mods: bool, + /// Unmatched open delimiters, used for parse recovery when multiple tokens could be valid. + crate open_braces: Vec<(token::DelimToken, Span)>, } @@ -569,6 +571,7 @@ impl<'a> Parser<'a> { }, desugar_doc_comments, cfg_mods: true, + open_braces: Vec::new(), }; let tok = parser.next_tok(); @@ -635,13 +638,64 @@ impl<'a> Parser<'a> { } } - /// Expect and consume the token t. Signal an error if - /// the next token is not t. - pub fn expect(&mut self, t: &token::Token) -> PResult<'a, ()> { + fn recover_closing_delimiter( + &mut self, + tokens: &[token::Token], + mut err: DiagnosticBuilder<'a>, + ) -> PResult<'a, ()> { + let mut pos = None; + let mut tokens: Vec = tokens.to_vec(); + tokens.extend(self.expected_tokens.iter().filter_map(|t| match t { + TokenType::Token(t) => Some((*t).clone()), + _ => None, + })); + // we want to use the last closing delim that would apply + for (i, (delim, span)) in self.open_braces.iter().enumerate().rev() { + if tokens.contains(&token::CloseDelim(*delim)) && self.span > *span { + pos = Some(i); + break; + } + } + match pos { + Some(pos) => { + // Recover and assume that the detected unclosed delimiter was meant for + // this location. Emit the diagnostic and act as if the delimiter was + // present for the parser's sake. + + // Don't attempt to recover from this unclosed delimiter more than once. + let (delim, open_sp) = self.open_braces.remove(pos); + let delim = TokenType::Token(token::CloseDelim(delim)); + + // We want to suggest the inclusion of the closing delimiter where it makes + // the most sense, which is immediately after the last token: + // + // {foo(bar {}} + // - ^ + // | | + // | expected one of `)`, <...> + // | help: ...missing `)` might belong here + // you might have meant to close this... + err.span_label(open_sp, "if you meant to close this..."); + err.span_suggestion_short_with_applicability( + self.sess.source_map().next_point(self.prev_span), + &format!("...the missing {} may belong here", delim.to_string()), + delim.to_string(), + Applicability::MaybeIncorrect, + ); + err.emit(); + self.expected_tokens.clear(); // reduce errors + Ok(()) + } + _ => Err(err), + } + } + + /// Expect and consume the token `t`. Signal an error if the next token is not `t`. + pub fn expect(&mut self, t: &token::Token) -> PResult<'a, bool /* recovered */> { if self.expected_tokens.is_empty() { if self.token == *t { self.bump(); - Ok(()) + Ok(false) } else { let token_str = pprust::token_to_string(t); let this_token_str = self.this_token_to_string(); @@ -668,7 +722,7 @@ impl<'a> Parser<'a> { err.span_label(self.span, "unexpected token"); } } - Err(err) + self.recover_closing_delimiter(&[t.clone()], err).map(|_| true) } } else { self.expect_one_of(slice::from_ref(t), &[]) @@ -678,9 +732,11 @@ impl<'a> Parser<'a> { /// Expect next token to be edible or inedible token. If edible, /// then consume it; if inedible, then return without consuming /// anything. Signal a fatal error if next token is unexpected. - fn expect_one_of(&mut self, - edible: &[token::Token], - inedible: &[token::Token]) -> PResult<'a, ()>{ + fn expect_one_of( + &mut self, + edible: &[token::Token], + inedible: &[token::Token], + ) -> PResult<'a, bool /* recovered */>{ fn tokens_to_string(tokens: &[TokenType]) -> String { let mut i = tokens.iter(); // This might be a sign we need a connect method on Iterator. @@ -700,10 +756,10 @@ impl<'a> Parser<'a> { } if edible.contains(&self.token) { self.bump(); - Ok(()) + Ok(false) } else if inedible.contains(&self.token) { // leave it in the input - Ok(()) + Ok(false) } else { let mut expected = edible.iter() .map(|x| TokenType::Token(x.clone())) @@ -761,7 +817,7 @@ impl<'a> Parser<'a> { err.span_label(self.span, "unexpected token"); } } - Err(err) + self.recover_closing_delimiter(edible, err).map(|_| true) } } @@ -1060,26 +1116,30 @@ impl<'a> Parser<'a> { /// Parse a sequence, including the closing delimiter. The function /// f must consume tokens until reaching the next separator or /// closing bracket. - pub fn parse_seq_to_end(&mut self, + pub fn parse_seq_to_end( + &mut self, ket: &token::Token, sep: SeqSep, - f: F) - -> PResult<'a, Vec> where - F: FnMut(&mut Parser<'a>) -> PResult<'a, T>, + f: F, + ) -> PResult<'a, Vec> + where F: FnMut(&mut Parser<'a>) -> PResult<'a, T>, { - let val = self.parse_seq_to_before_end(ket, sep, f)?; - self.bump(); + let (val, recovered) = self.parse_seq_to_before_end(ket, sep, f)?; + if !recovered { + self.bump(); + } Ok(val) } /// Parse a sequence, not including the closing delimiter. The function /// f must consume tokens until reaching the next separator or /// closing bracket. - pub fn parse_seq_to_before_end(&mut self, - ket: &token::Token, - sep: SeqSep, - f: F) - -> PResult<'a, Vec> + pub fn parse_seq_to_before_end( + &mut self, + ket: &token::Token, + sep: SeqSep, + f: F, + ) -> PResult<'a, (Vec, bool /* recovered */)> where F: FnMut(&mut Parser<'a>) -> PResult<'a, T> { self.parse_seq_to_before_tokens(&[ket], sep, TokenExpectType::Expect, f) @@ -1091,10 +1151,11 @@ impl<'a> Parser<'a> { sep: SeqSep, expect: TokenExpectType, mut f: F, - ) -> PResult<'a, Vec> + ) -> PResult<'a, (Vec, bool /* recovered */)> where F: FnMut(&mut Parser<'a>) -> PResult<'a, T> { let mut first: bool = true; + let mut recovered = false; let mut v = vec![]; while !kets.iter().any(|k| { match expect { @@ -1110,25 +1171,38 @@ impl<'a> Parser<'a> { if first { first = false; } else { - if let Err(mut e) = self.expect(t) { - // Attempt to keep parsing if it was a similar separator - if let Some(ref tokens) = t.similar_tokens() { - if tokens.contains(&self.token) { - self.bump(); - } + match self.expect(t) { + Ok(true) => { + recovered = true; + break; } - e.emit(); - // Attempt to keep parsing if it was an omitted separator - match f(self) { - Ok(t) => { - v.push(t); - continue; - }, - Err(mut e) => { - e.cancel(); - break; + Err(mut e) => { + // Attempt to keep parsing if it was a similar separator + if let Some(ref tokens) = t.similar_tokens() { + if tokens.contains(&self.token) { + self.bump(); + } + } + e.emit(); + // Attempt to keep parsing if it was an omitted separator + match f(self) { + Ok(t) => { + v.push(t); + continue; + }, + Err(mut err) => { + err.cancel(); + let kets: Vec = kets.iter() + .map(|t| (*t).clone()) + .collect(); + if let Ok(()) = self.recover_closing_delimiter(&kets[..], e) { + recovered = true; + } + break; + } } } + _ => {} } } } @@ -1141,27 +1215,36 @@ impl<'a> Parser<'a> { break; } - let t = f(self)?; + let t = match f(self) { + Ok(t) => t, + Err(e) => { + let kets: Vec = kets.iter().map(|t| (*t).clone()).collect(); + return self.recover_closing_delimiter(&kets[..], e).map(|_| (v, true)); + } + }; v.push(t); } - Ok(v) + Ok((v, recovered)) } /// Parse a sequence, including the closing delimiter. The function /// f must consume tokens until reaching the next separator or /// closing bracket. - fn parse_unspanned_seq(&mut self, - bra: &token::Token, - ket: &token::Token, - sep: SeqSep, - f: F) - -> PResult<'a, Vec> where + fn parse_unspanned_seq( + &mut self, + bra: &token::Token, + ket: &token::Token, + sep: SeqSep, + f: F, + ) -> PResult<'a, Vec> where F: FnMut(&mut Parser<'a>) -> PResult<'a, T>, { self.expect(bra)?; - let result = self.parse_seq_to_before_end(ket, sep, f)?; - self.eat(ket); + let (result, recovered) = self.parse_seq_to_before_end(ket, sep, f)?; + if !recovered { + self.eat(ket); + } Ok(result) } @@ -1189,6 +1272,7 @@ impl<'a> Parser<'a> { self.span = next.sp; self.token = next.tok; self.expected_tokens.clear(); + self.expected_tokens.clear(); // check after each token self.process_potential_macro_variable(); } @@ -2071,12 +2155,14 @@ impl<'a> Parser<'a> { } else { // `(T, U) -> R` self.bump(); // `(` - let inputs = self.parse_seq_to_before_tokens( + let (inputs, recovered) = self.parse_seq_to_before_tokens( &[&token::CloseDelim(token::Paren)], SeqSep::trailing_allowed(token::Comma), TokenExpectType::Expect, |p| p.parse_ty())?; - self.bump(); // `)` + if !recovered { + self.bump(); // `)` + } let span = lo.to(self.prev_span); let output = if self.eat(&token::RArrow) { Some(self.parse_ty_common(false, false)?) @@ -2500,6 +2586,7 @@ impl<'a> Parser<'a> { self.bump(); let mut fields = Vec::new(); let mut base = None; + let mut recovered = false; attrs.extend(self.parse_inner_attributes()?); @@ -2551,7 +2638,7 @@ impl<'a> Parser<'a> { match self.expect_one_of(&[token::Comma], &[token::CloseDelim(token::Brace)]) { - Ok(()) => {} + Ok(r) => recovered = r, Err(mut e) => { e.emit(); self.recover_stmt(); @@ -2561,7 +2648,9 @@ impl<'a> Parser<'a> { } let span = lo.to(self.span); - self.expect(&token::CloseDelim(token::Brace))?; + if !recovered { + self.expect(&token::CloseDelim(token::Brace))?; + } return Ok(self.mk_expr(span, ExprKind::Struct(pth, fields, base), attrs)); } @@ -5438,15 +5527,18 @@ impl<'a> Parser<'a> { // Parse the rest of the function parameter list. let sep = SeqSep::trailing_allowed(token::Comma); - let fn_inputs = if let Some(self_arg) = self_arg { + let (fn_inputs, recovered) = if let Some(self_arg) = self_arg { if self.check(&token::CloseDelim(token::Paren)) { - vec![self_arg] + (vec![self_arg], false) } else if self.eat(&token::Comma) { let mut fn_inputs = vec![self_arg]; - fn_inputs.append(&mut self.parse_seq_to_before_end( - &token::CloseDelim(token::Paren), sep, parse_arg_fn)? - ); - fn_inputs + let (mut inputs, recovered) = self.parse_seq_to_before_end( + &token::CloseDelim(token::Paren), + sep, + parse_arg_fn, + )?; + fn_inputs.append(&mut inputs); + (fn_inputs, recovered) } else { return self.unexpected(); } @@ -5454,8 +5546,10 @@ impl<'a> Parser<'a> { self.parse_seq_to_before_end(&token::CloseDelim(token::Paren), sep, parse_arg_fn)? }; - // Parse closing paren and return type. - self.expect(&token::CloseDelim(token::Paren))?; + if !recovered { + // Parse closing paren and return type. + self.expect(&token::CloseDelim(token::Paren))?; + } Ok(P(FnDecl { inputs: fn_inputs, output: self.parse_ret_ty(true)?, @@ -5470,7 +5564,7 @@ impl<'a> Parser<'a> { Vec::new() } else { self.expect(&token::BinOp(token::Or))?; - let args = self.parse_seq_to_before_tokens( + let (args, _) = self.parse_seq_to_before_tokens( &[&token::BinOp(token::Or), &token::OrOr], SeqSep::trailing_allowed(token::Comma), TokenExpectType::NoExpect, @@ -7394,7 +7488,7 @@ impl<'a> Parser<'a> { // eat a matched-delimiter token tree: let (delim, tts) = self.expect_delimited_token_tree()?; if delim != MacDelimiter::Brace { - self.expect(&token::Semi)? + self.expect(&token::Semi)?; } Ok(Some(respan(lo.to(self.prev_span), Mac_ { path: pth, tts, delim }))) diff --git a/src/libsyntax/util/parser_testing.rs b/src/libsyntax/util/parser_testing.rs index 374154e63333d..bdc3891d17c32 100644 --- a/src/libsyntax/util/parser_testing.rs +++ b/src/libsyntax/util/parser_testing.rs @@ -22,7 +22,7 @@ use std::path::PathBuf; pub fn string_to_stream(source_str: String) -> TokenStream { let ps = ParseSess::new(FilePathMapping::empty()); source_file_to_stream(&ps, ps.source_map() - .new_source_file(PathBuf::from("bogofile").into(), source_str), None) + .new_source_file(PathBuf::from("bogofile").into(), source_str), None).0 } /// Map string to parser (via tts) diff --git a/src/test/ui/augmented-assignments.nll.stderr b/src/test/ui/augmented-assignments.nll.stderr index 57a86227f764d..65fba1f321650 100644 --- a/src/test/ui/augmented-assignments.nll.stderr +++ b/src/test/ui/augmented-assignments.nll.stderr @@ -9,7 +9,7 @@ LL | x //~ error: use of moved value: `x` LL | | //~^ value used here after move LL | | += LL | | x; //~ value moved here - | | - + | | ^ | | | | |_____move out of `x` occurs here | borrow later used here diff --git a/src/test/ui/parser-recovery-3.rs b/src/test/ui/parser-recovery-3.rs new file mode 100644 index 0000000000000..abba340a4daa9 --- /dev/null +++ b/src/test/ui/parser-recovery-3.rs @@ -0,0 +1,24 @@ +// Copyright 2018 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +struct A; + +impl A { + fn banana(&mut self) { + fn peach(this: &Self, foo: usize { + //~^ ERROR expected one of + //~| ERROR can't use type parameters from outer function + } + } + //~^ ERROR incorrect close delimiter + //~| ERROR expected expression +} + +fn main() {} diff --git a/src/test/ui/parser-recovery-3.stderr b/src/test/ui/parser-recovery-3.stderr new file mode 100644 index 0000000000000..320c37f6e261e --- /dev/null +++ b/src/test/ui/parser-recovery-3.stderr @@ -0,0 +1,41 @@ +error: incorrect close delimiter: `}` + --> $DIR/parser-recovery-3.rs:19:5 + | +LL | fn banana(&mut self) { + | - close delimiter possibly meant for this +LL | fn peach(this: &Self, foo: usize { + | - un-closed delimiter +... +LL | } + | ^ incorrect close delimiter + +error: expected one of `!`, `(`, `)`, `+`, `,`, `::`, or `<`, found `{` + --> $DIR/parser-recovery-3.rs:15:42 + | +LL | fn peach(this: &Self, foo: usize { + | - -^ expected one of 7 possible tokens here + | | | + | | help: ...the missing `)` may belong here + | if you meant to close this... + +error: expected expression, found `)` + --> $DIR/parser-recovery-3.rs:19:5 + | +LL | } + | ^ expected expression + +error[E0401]: can't use type parameters from outer function + --> $DIR/parser-recovery-3.rs:15:25 + | +LL | impl A { + | ---- `Self` type implicitly declared here, by this `impl` +LL | fn banana(&mut self) { +LL | fn peach(this: &Self, foo: usize { + | ^^^^ + | | + | use of type variable from outer function + | use a type here instead + +error: aborting due to 4 previous errors + +For more information about this error, try `rustc --explain E0401`. diff --git a/src/test/ui/parser-recovery-4.rs b/src/test/ui/parser-recovery-4.rs new file mode 100644 index 0000000000000..00215e71cd949 --- /dev/null +++ b/src/test/ui/parser-recovery-4.rs @@ -0,0 +1,21 @@ +// Copyright 2018 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +fn foo(_: usize) {} +fn bar() {} + +fn main() { + let x = 1; + foo(x + bar() + //~^ ERROR expected one of + //~| ERROR expected one of +} +//~^ ERROR incorrect close delimiter diff --git a/src/test/ui/parser-recovery-4.stderr b/src/test/ui/parser-recovery-4.stderr new file mode 100644 index 0000000000000..ecf61915bfbfb --- /dev/null +++ b/src/test/ui/parser-recovery-4.stderr @@ -0,0 +1,34 @@ +error: incorrect close delimiter: `}` + --> $DIR/parser-recovery-4.rs:20:1 + | +LL | fn main() { + | - close delimiter possibly meant for this +LL | let x = 1; +LL | foo(x + | - un-closed delimiter +... +LL | } + | ^ incorrect close delimiter + +error: expected one of `!`, `)`, `,`, `.`, `::`, `?`, `{`, or an operator, found `bar` + --> $DIR/parser-recovery-4.rs:17:5 + | +LL | foo(x + | - - + | | | + | | expected one of 8 possible tokens here + | | help: ...the missing `)` may belong here + | if you meant to close this... +LL | bar() + | ^^^ unexpected token + +error: expected one of `.`, `;`, `?`, `}`, or an operator, found `bar` + --> $DIR/parser-recovery-4.rs:17:5 + | +LL | foo(x + | - expected one of `.`, `;`, `?`, `}`, or an operator here +LL | bar() + | ^^^ unexpected token + +error: aborting due to 3 previous errors + diff --git a/src/test/ui/resolve/token-error-correct-3.rs b/src/test/ui/resolve/token-error-correct-3.rs index 8881b965f9480..aad1c3119a327 100644 --- a/src/test/ui/resolve/token-error-correct-3.rs +++ b/src/test/ui/resolve/token-error-correct-3.rs @@ -20,21 +20,21 @@ pub mod raw { pub fn ensure_dir_exists, F: FnOnce(&Path)>(path: P, callback: F) -> io::Result { - if !is_directory(path.as_ref()) { //~ ERROR: cannot find function `is_directory` - callback(path.as_ref(); //~ ERROR expected one of - fs::create_dir_all(path.as_ref()).map(|()| true) //~ ERROR: mismatched types - //~^ expected (), found enum `std::result::Result` - //~| expected type `()` - //~| found type `std::result::Result` - //~| expected one of - } else { //~ ERROR: incorrect close delimiter: `}` - //~^ ERROR: expected one of - //~| unexpected token + if !is_directory(path.as_ref()) { + callback(path.as_ref(); + //~^ ERROR expected one of + fs::create_dir_all(path.as_ref()).map(|()| true) + } else { + //~^ ERROR incorrect close delimiter: `}` + //~| ERROR expected one of + //~^^^^ ERROR mismatched types Ok(false); } panic!(); } + + fn is_directory>(_: P) -> bool { true } } fn main() {} diff --git a/src/test/ui/resolve/token-error-correct-3.stderr b/src/test/ui/resolve/token-error-correct-3.stderr index b87a59d219655..c3f74a0675f79 100644 --- a/src/test/ui/resolve/token-error-correct-3.stderr +++ b/src/test/ui/resolve/token-error-correct-3.stderr @@ -1,39 +1,36 @@ error: incorrect close delimiter: `}` - --> $DIR/token-error-correct-3.rs:30:9 + --> $DIR/token-error-correct-3.rs:27:9 | -LL | if !is_directory(path.as_ref()) { //~ ERROR: cannot find function `is_directory` +LL | if !is_directory(path.as_ref()) { | - close delimiter possibly meant for this -LL | callback(path.as_ref(); //~ ERROR expected one of +LL | callback(path.as_ref(); | - un-closed delimiter ... -LL | } else { //~ ERROR: incorrect close delimiter: `}` +LL | } else { | ^ incorrect close delimiter error: expected one of `)`, `,`, `.`, `?`, or an operator, found `;` --> $DIR/token-error-correct-3.rs:24:35 | -LL | callback(path.as_ref(); //~ ERROR expected one of - | ^ expected one of `)`, `,`, `.`, `?`, or an operator here +LL | callback(path.as_ref(); + | - ^ + | | | + | | expected one of `)`, `,`, `.`, `?`, or an operator here + | | help: ...the missing `)` may belong here + | if you meant to close this... error: expected one of `.`, `;`, `?`, `}`, or an operator, found `)` - --> $DIR/token-error-correct-3.rs:30:9 + --> $DIR/token-error-correct-3.rs:27:9 | -LL | fs::create_dir_all(path.as_ref()).map(|()| true) //~ ERROR: mismatched types +LL | fs::create_dir_all(path.as_ref()).map(|()| true) | - expected one of `.`, `;`, `?`, `}`, or an operator here -... -LL | } else { //~ ERROR: incorrect close delimiter: `}` +LL | } else { | ^ unexpected token -error[E0425]: cannot find function `is_directory` in this scope - --> $DIR/token-error-correct-3.rs:23:13 - | -LL | if !is_directory(path.as_ref()) { //~ ERROR: cannot find function `is_directory` - | ^^^^^^^^^^^^ not found in this scope - error[E0308]: mismatched types - --> $DIR/token-error-correct-3.rs:25:13 + --> $DIR/token-error-correct-3.rs:26:13 | -LL | fs::create_dir_all(path.as_ref()).map(|()| true) //~ ERROR: mismatched types +LL | fs::create_dir_all(path.as_ref()).map(|()| true) | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^- help: try adding a semicolon: `;` | | | expected (), found enum `std::result::Result` @@ -41,7 +38,6 @@ LL | fs::create_dir_all(path.as_ref()).map(|()| true) //~ ERROR: mis = note: expected type `()` found type `std::result::Result` -error: aborting due to 5 previous errors +error: aborting due to 4 previous errors -Some errors occurred: E0308, E0425. -For more information about an error, try `rustc --explain E0308`. +For more information about this error, try `rustc --explain E0308`. diff --git a/src/test/ui/resolve/token-error-correct.rs b/src/test/ui/resolve/token-error-correct.rs index 39c664e270c45..7057b454ac5d7 100644 --- a/src/test/ui/resolve/token-error-correct.rs +++ b/src/test/ui/resolve/token-error-correct.rs @@ -12,6 +12,10 @@ fn main() { foo(bar(; - //~^ ERROR: expected expression, found `;` + //~^ ERROR expected expression, found `;` + //~| ERROR expected one of + //~| ERROR cannot find function `foo` in this scope + //~| ERROR cannot find function `bar` in this scope } -//~^ ERROR: incorrect close delimiter: `}` +//~^ ERROR incorrect close delimiter: `}` +//~| ERROR expected expression diff --git a/src/test/ui/resolve/token-error-correct.stderr b/src/test/ui/resolve/token-error-correct.stderr index b69098407323a..b1f9735a5acf2 100644 --- a/src/test/ui/resolve/token-error-correct.stderr +++ b/src/test/ui/resolve/token-error-correct.stderr @@ -1,11 +1,11 @@ error: incorrect close delimiter: `}` - --> $DIR/token-error-correct.rs:16:1 + --> $DIR/token-error-correct.rs:19:1 | LL | fn main() { | - close delimiter possibly meant for this LL | foo(bar(; | - un-closed delimiter -LL | //~^ ERROR: expected expression, found `;` +... LL | } | ^ incorrect close delimiter @@ -13,7 +13,40 @@ error: expected expression, found `;` --> $DIR/token-error-correct.rs:14:13 | LL | foo(bar(; - | ^ expected expression + | - ^ + | | | + | | expected expression + | | help: ...the missing `)` may belong here + | if you meant to close this... -error: aborting due to 2 previous errors +error: expected one of `)`, `,`, `.`, `?`, or an operator, found `;` + --> $DIR/token-error-correct.rs:14:13 + | +LL | foo(bar(; + | -^ + | || + | |expected one of `)`, `,`, `.`, `?`, or an operator here + | |help: ...the missing `)` may belong here + | if you meant to close this... + +error: expected expression, found `)` + --> $DIR/token-error-correct.rs:19:1 + | +LL | } + | ^ expected expression + +error[E0425]: cannot find function `foo` in this scope + --> $DIR/token-error-correct.rs:14:5 + | +LL | foo(bar(; + | ^^^ not found in this scope + +error[E0425]: cannot find function `bar` in this scope + --> $DIR/token-error-correct.rs:14:9 + | +LL | foo(bar(; + | ^^^ not found in this scope + +error: aborting due to 6 previous errors +For more information about this error, try `rustc --explain E0425`. diff --git a/src/test/ui/token/issue-10636-2.rs b/src/test/ui/token/issue-10636-2.rs index 711803754408f..1aba26131c143 100644 --- a/src/test/ui/token/issue-10636-2.rs +++ b/src/test/ui/token/issue-10636-2.rs @@ -13,9 +13,10 @@ pub fn trace_option(option: Option) { option.map(|some| 42; - //~^ ERROR: expected one of + //~^ ERROR expected one of -} //~ ERROR: incorrect close delimiter -//~^ ERROR: expected expression, found `)` +} +//~^ ERROR: incorrect close delimiter +//~| ERROR expected expression fn main() {} diff --git a/src/test/ui/token/issue-10636-2.stderr b/src/test/ui/token/issue-10636-2.stderr index 9800b0c5e3f05..156a8db0a0543 100644 --- a/src/test/ui/token/issue-10636-2.stderr +++ b/src/test/ui/token/issue-10636-2.stderr @@ -6,19 +6,23 @@ LL | pub fn trace_option(option: Option) { LL | option.map(|some| 42; | - un-closed delimiter ... -LL | } //~ ERROR: incorrect close delimiter +LL | } | ^ incorrect close delimiter error: expected one of `)`, `,`, `.`, `?`, or an operator, found `;` --> $DIR/issue-10636-2.rs:15:25 | LL | option.map(|some| 42; - | ^ expected one of `)`, `,`, `.`, `?`, or an operator here + | - ^ + | | | + | | expected one of `)`, `,`, `.`, `?`, or an operator here + | | help: ...the missing `)` may belong here + | if you meant to close this... error: expected expression, found `)` --> $DIR/issue-10636-2.rs:18:1 | -LL | } //~ ERROR: incorrect close delimiter +LL | } | ^ expected expression error: aborting due to 3 previous errors