Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 2 additions & 3 deletions Cargo.lock
Original file line number Diff line number Diff line change
Expand Up @@ -4072,7 +4072,6 @@ name = "rustc_log"
version = "0.0.0"
dependencies = [
"tracing",
"tracing-core",
"tracing-subscriber",
"tracing-tree",
]
Expand Down Expand Up @@ -5541,9 +5540,9 @@ dependencies = [

[[package]]
name = "tracing-core"
version = "0.1.30"
version = "0.1.34"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "24eb03ba0eab1fd845050058ce5e616558e8f8d8fca633e6b163fe25c797213a"
checksum = "b9d12581f227e93f094d3af2ae690a574abb8a2b9b7a96e7cfe9647b2b617678"
dependencies = [
"once_cell",
"valuable",
Expand Down
4 changes: 2 additions & 2 deletions compiler/rustc_log/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,8 @@ edition = "2024"

[dependencies]
# tidy-alphabetical-start
tracing = "0.1.28"
tracing-core = "=0.1.30" # FIXME(Nilstrieb) tracing has a deadlock: https://github.com/tokio-rs/tracing/issues/2635
# tracing > 0.1.37 have huge binary size / instructions regression
tracing = "=0.1.37"
tracing-subscriber = { version = "0.3.3", default-features = false, features = ["fmt", "env-filter", "smallvec", "parking_lot", "ansi"] }
tracing-tree = "0.3.1"
# tidy-alphabetical-end
Expand Down
2 changes: 1 addition & 1 deletion compiler/rustc_log/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ use std::fmt::{self, Display};
use std::io::{self, IsTerminal};

use tracing::dispatcher::SetGlobalDefaultError;
use tracing_core::{Event, Subscriber};
use tracing::{Event, Subscriber};
use tracing_subscriber::filter::{Directive, EnvFilter, LevelFilter};
use tracing_subscriber::fmt::FmtContext;
use tracing_subscriber::fmt::format::{self, FormatEvent, FormatFields};
Expand Down
35 changes: 30 additions & 5 deletions compiler/rustc_parse/src/lexer/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -44,19 +44,44 @@ pub(crate) struct UnmatchedDelim {
pub candidate_span: Option<Span>,
}

/// Which tokens should be stripped before lexing the tokens.
pub(crate) enum StripTokens {
/// Strip both shebang and frontmatter.
ShebangAndFrontmatter,
/// Strip the shebang but not frontmatter.
///
/// That means that char sequences looking like frontmatter are simply
/// interpreted as regular Rust lexemes.
Shebang,
/// Strip nothing.
///
/// In other words, char sequences looking like a shebang or frontmatter
/// are simply interpreted as regular Rust lexemes.
Nothing,
}

pub(crate) fn lex_token_trees<'psess, 'src>(
psess: &'psess ParseSess,
mut src: &'src str,
mut start_pos: BytePos,
override_span: Option<Span>,
frontmatter_allowed: FrontmatterAllowed,
strip_tokens: StripTokens,
) -> Result<TokenStream, Vec<Diag<'psess>>> {
// Skip `#!`, if present.
if let Some(shebang_len) = rustc_lexer::strip_shebang(src) {
src = &src[shebang_len..];
start_pos = start_pos + BytePos::from_usize(shebang_len);
match strip_tokens {
StripTokens::Shebang | StripTokens::ShebangAndFrontmatter => {
if let Some(shebang_len) = rustc_lexer::strip_shebang(src) {
src = &src[shebang_len..];
start_pos = start_pos + BytePos::from_usize(shebang_len);
}
}
StripTokens::Nothing => {}
}

let frontmatter_allowed = match strip_tokens {
StripTokens::ShebangAndFrontmatter => FrontmatterAllowed::Yes,
StripTokens::Shebang | StripTokens::Nothing => FrontmatterAllowed::No,
};

let cursor = Cursor::new(src, frontmatter_allowed);
let mut lexer = Lexer {
psess,
Expand Down
29 changes: 12 additions & 17 deletions compiler/rustc_parse/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@ use rustc_ast::tokenstream::{DelimSpan, TokenStream};
use rustc_ast::{AttrItem, Attribute, MetaItemInner, token};
use rustc_ast_pretty::pprust;
use rustc_errors::{Diag, EmissionGuarantee, FatalError, PResult, pluralize};
use rustc_lexer::FrontmatterAllowed;
use rustc_session::parse::ParseSess;
use rustc_span::source_map::SourceMap;
use rustc_span::{FileName, SourceFile, Span};
Expand All @@ -34,6 +33,8 @@ pub mod parser;
use parser::Parser;
use rustc_ast::token::Delimiter;

use crate::lexer::StripTokens;

pub mod lexer;

mod errors;
Expand Down Expand Up @@ -62,10 +63,10 @@ pub fn new_parser_from_source_str(
source: String,
) -> Result<Parser<'_>, Vec<Diag<'_>>> {
let source_file = psess.source_map().new_source_file(name, source);
new_parser_from_source_file(psess, source_file, FrontmatterAllowed::Yes)
new_parser_from_source_file(psess, source_file, StripTokens::ShebangAndFrontmatter)
}

/// Creates a new parser from a simple (no frontmatter) source string.
/// Creates a new parser from a simple (no shebang, no frontmatter) source string.
///
/// On failure, the errors must be consumed via `unwrap_or_emit_fatal`, `emit`, `cancel`,
/// etc., otherwise a panic will occur when they are dropped.
Expand All @@ -75,7 +76,7 @@ pub fn new_parser_from_simple_source_str(
source: String,
) -> Result<Parser<'_>, Vec<Diag<'_>>> {
let source_file = psess.source_map().new_source_file(name, source);
new_parser_from_source_file(psess, source_file, FrontmatterAllowed::No)
new_parser_from_source_file(psess, source_file, StripTokens::Nothing)
}

/// Creates a new parser from a filename. On failure, the errors must be consumed via
Expand Down Expand Up @@ -109,7 +110,7 @@ pub fn new_parser_from_file<'a>(
}
err.emit();
});
new_parser_from_source_file(psess, source_file, FrontmatterAllowed::Yes)
new_parser_from_source_file(psess, source_file, StripTokens::ShebangAndFrontmatter)
}

pub fn utf8_error<E: EmissionGuarantee>(
Expand Down Expand Up @@ -160,10 +161,10 @@ pub fn utf8_error<E: EmissionGuarantee>(
fn new_parser_from_source_file(
psess: &ParseSess,
source_file: Arc<SourceFile>,
frontmatter_allowed: FrontmatterAllowed,
strip_tokens: StripTokens,
) -> Result<Parser<'_>, Vec<Diag<'_>>> {
let end_pos = source_file.end_position();
let stream = source_file_to_stream(psess, source_file, None, frontmatter_allowed)?;
let stream = source_file_to_stream(psess, source_file, None, strip_tokens)?;
let mut parser = Parser::new(psess, stream, None);
if parser.token == token::Eof {
parser.token.span = Span::new(end_pos, end_pos, parser.token.span.ctxt(), None);
Expand All @@ -179,8 +180,8 @@ pub fn source_str_to_stream(
) -> Result<TokenStream, Vec<Diag<'_>>> {
let source_file = psess.source_map().new_source_file(name, source);
// used mainly for `proc_macro` and the likes, not for our parsing purposes, so don't parse
// frontmatters as frontmatters.
source_file_to_stream(psess, source_file, override_span, FrontmatterAllowed::No)
// frontmatters as frontmatters, but for compatibility reason still strip the shebang
source_file_to_stream(psess, source_file, override_span, StripTokens::Shebang)
}

/// Given a source file, produces a sequence of token trees. Returns any buffered errors from
Expand All @@ -189,7 +190,7 @@ fn source_file_to_stream<'psess>(
psess: &'psess ParseSess,
source_file: Arc<SourceFile>,
override_span: Option<Span>,
frontmatter_allowed: FrontmatterAllowed,
strip_tokens: StripTokens,
) -> Result<TokenStream, Vec<Diag<'psess>>> {
let src = source_file.src.as_ref().unwrap_or_else(|| {
psess.dcx().bug(format!(
Expand All @@ -198,13 +199,7 @@ fn source_file_to_stream<'psess>(
));
});

lexer::lex_token_trees(
psess,
src.as_str(),
source_file.start_pos,
override_span,
frontmatter_allowed,
)
lexer::lex_token_trees(psess, src.as_str(), source_file.start_pos, override_span, strip_tokens)
}

/// Runs the given subparser `f` on the tokens of the given `attr`'s item.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,8 +32,8 @@ use rustc_middle::ty::print::{
};
use rustc_middle::ty::{
self, AdtKind, GenericArgs, InferTy, IsSuggestable, Ty, TyCtxt, TypeFoldable, TypeFolder,
TypeSuperFoldable, TypeVisitableExt, TypeckResults, Upcast, suggest_arbitrary_trait_bound,
suggest_constraining_type_param,
TypeSuperFoldable, TypeSuperVisitable, TypeVisitableExt, TypeVisitor, TypeckResults, Upcast,
suggest_arbitrary_trait_bound, suggest_constraining_type_param,
};
use rustc_middle::{bug, span_bug};
use rustc_span::def_id::LocalDefId;
Expand Down Expand Up @@ -263,6 +263,9 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> {
_ => (false, None),
};

let mut finder = ParamFinder { .. };
finder.visit_binder(&trait_pred);

// FIXME: Add check for trait bound that is already present, particularly `?Sized` so we
// don't suggest `T: Sized + ?Sized`.
loop {
Expand Down Expand Up @@ -411,6 +414,26 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> {
}
}

hir::Node::TraitItem(hir::TraitItem {
generics,
kind: hir::TraitItemKind::Fn(..),
..
})
| hir::Node::ImplItem(hir::ImplItem {
generics,
trait_item_def_id: None,
kind: hir::ImplItemKind::Fn(..),
..
}) if finder.can_suggest_bound(generics) => {
// Missing generic type parameter bound.
suggest_arbitrary_trait_bound(
self.tcx,
generics,
err,
trait_pred,
associated_ty,
);
}
hir::Node::Item(hir::Item {
kind:
hir::ItemKind::Struct(_, generics, _)
Expand All @@ -423,7 +446,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> {
| hir::ItemKind::Const(_, generics, _, _)
| hir::ItemKind::TraitAlias(_, generics, _),
..
}) if !param_ty => {
}) if finder.can_suggest_bound(generics) => {
// Missing generic type parameter bound.
if suggest_arbitrary_trait_bound(
self.tcx,
Expand Down Expand Up @@ -5068,8 +5091,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> {
// Suggesting `T: ?Sized` is only valid in an ADT if `T` is only used in a
// borrow. `struct S<'a, T: ?Sized>(&'a T);` is valid, `struct S<T: ?Sized>(T);`
// is not. Look for invalid "bare" parameter uses, and suggest using indirection.
let mut visitor =
FindTypeParam { param: param.name.ident().name, invalid_spans: vec![], nested: false };
let mut visitor = FindTypeParam { param: param.name.ident().name, .. };
visitor.visit_item(item);
if visitor.invalid_spans.is_empty() {
return false;
Expand Down Expand Up @@ -5228,7 +5250,7 @@ fn hint_missing_borrow<'tcx>(
/// Used to suggest replacing associated types with an explicit type in `where` clauses.
#[derive(Debug)]
pub struct SelfVisitor<'v> {
pub paths: Vec<&'v hir::Ty<'v>>,
pub paths: Vec<&'v hir::Ty<'v>> = Vec::new(),
pub name: Option<Symbol>,
}

Expand Down Expand Up @@ -5599,7 +5621,7 @@ fn point_at_assoc_type_restriction<G: EmissionGuarantee>(
);
// Search for the associated type `Self::{name}`, get
// its type and suggest replacing the bound with it.
let mut visitor = SelfVisitor { paths: vec![], name: Some(name) };
let mut visitor = SelfVisitor { name: Some(name), .. };
visitor.visit_trait_ref(trait_ref);
for path in visitor.paths {
err.span_suggestion_verbose(
Expand All @@ -5610,7 +5632,7 @@ fn point_at_assoc_type_restriction<G: EmissionGuarantee>(
);
}
} else {
let mut visitor = SelfVisitor { paths: vec![], name: None };
let mut visitor = SelfVisitor { name: None, .. };
visitor.visit_trait_ref(trait_ref);
let span: MultiSpan =
visitor.paths.iter().map(|p| p.span).collect::<Vec<Span>>().into();
Expand Down Expand Up @@ -5640,8 +5662,8 @@ fn get_deref_type_and_refs(mut ty: Ty<'_>) -> (Ty<'_>, Vec<hir::Mutability>) {
/// `param: ?Sized` would be a valid constraint.
struct FindTypeParam {
param: rustc_span::Symbol,
invalid_spans: Vec<Span>,
nested: bool,
invalid_spans: Vec<Span> = Vec::new(),
nested: bool = false,
}

impl<'v> Visitor<'v> for FindTypeParam {
Expand Down Expand Up @@ -5679,3 +5701,38 @@ impl<'v> Visitor<'v> for FindTypeParam {
}
}
}

/// Look for type parameters in predicates. We use this to identify whether a bound is suitable in
/// on a given item.
struct ParamFinder {
params: Vec<Symbol> = Vec::new(),
}

impl<'tcx> TypeVisitor<TyCtxt<'tcx>> for ParamFinder {
fn visit_ty(&mut self, t: Ty<'tcx>) -> Self::Result {
match t.kind() {
ty::Param(p) => self.params.push(p.name),
_ => {}
}
t.super_visit_with(self)
}
}

impl ParamFinder {
/// Whether the `hir::Generics` of the current item can suggest the evaluated bound because its
/// references to type parameters are present in the generics.
fn can_suggest_bound(&self, generics: &hir::Generics<'_>) -> bool {
if self.params.is_empty() {
// There are no references to type parameters at all, so suggesting the bound
// would be reasonable.
return true;
}
generics.params.iter().any(|p| match p.name {
hir::ParamName::Plain(p_name) => {
// All of the parameters in the bound can be referenced in the current item.
self.params.iter().any(|p| *p == p_name.name || *p == kw::SelfUpper)
}
_ => true,
})
}
}
1 change: 1 addition & 0 deletions compiler/rustc_trait_selection/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
#![feature(assert_matches)]
#![feature(associated_type_defaults)]
#![feature(box_patterns)]
#![feature(default_field_values)]
#![feature(if_let_guard)]
#![feature(iter_intersperse)]
#![feature(iterator_try_reduce)]
Expand Down
3 changes: 3 additions & 0 deletions tests/run-make/multiline-args-value/cfg-shebang.stderr
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
error: invalid `--cfg` argument: `#!/usr/bin/shebang
key` (expected `key` or `key="value"`)

6 changes: 6 additions & 0 deletions tests/run-make/multiline-args-value/check-cfg-shebang.stderr
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
error: invalid `--check-cfg` argument: `#!/usr/bin/shebang
cfg(key)`
|
= note: expected `cfg(name, values("value1", "value2", ... "valueN"))`
= note: visit <https://doc.rust-lang.org/nightly/rustc/check-cfg.html> for more details

Loading
Loading