diff --git a/compiler/rustc_codegen_cranelift/Cargo.lock b/compiler/rustc_codegen_cranelift/Cargo.lock index 532049c858d4f..402fbb16f97ee 100644 --- a/compiler/rustc_codegen_cranelift/Cargo.lock +++ b/compiler/rustc_codegen_cranelift/Cargo.lock @@ -15,9 +15,9 @@ dependencies = [ [[package]] name = "anyhow" -version = "1.0.56" +version = "1.0.60" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4361135be9122e0870de935d7c439aef945b9f9ddd4199a553b5270b49c82a27" +checksum = "c794e162a5eff65c72ef524dfe393eb923c354e350bb78b9c7383df13f3bc142" [[package]] name = "ar" diff --git a/compiler/rustc_driver/src/lib.rs b/compiler/rustc_driver/src/lib.rs index 53ae913f94f12..94639bf8e1ee4 100644 --- a/compiler/rustc_driver/src/lib.rs +++ b/compiler/rustc_driver/src/lib.rs @@ -1148,6 +1148,17 @@ static DEFAULT_HOOK: LazyLock) + Sync + Send + LazyLock::new(|| { let hook = panic::take_hook(); panic::set_hook(Box::new(|info| { + // If the error was caused by a broken pipe then this is not a bug. + // Write the error and return immediately. See #98700. + #[cfg(windows)] + if let Some(msg) = info.payload().downcast_ref::() { + if msg.starts_with("failed printing to stdout: ") && msg.ends_with("(os error 232)") + { + early_error_no_abort(ErrorOutputType::default(), &msg); + return; + } + }; + // Invoke the default handler, which prints the actual panic message and optionally a backtrace (*DEFAULT_HOOK)(info); diff --git a/compiler/rustc_lint/src/unused.rs b/compiler/rustc_lint/src/unused.rs index b6cf182916cb9..4e7ba1c6ce4fa 100644 --- a/compiler/rustc_lint/src/unused.rs +++ b/compiler/rustc_lint/src/unused.rs @@ -11,7 +11,7 @@ use rustc_middle::ty::adjustment; use rustc_middle::ty::{self, Ty}; use rustc_span::symbol::Symbol; use rustc_span::symbol::{kw, sym}; -use rustc_span::{BytePos, Span, DUMMY_SP}; +use rustc_span::{BytePos, Span}; declare_lint! { /// The `unused_must_use` lint detects unused result of a type flagged as @@ -504,23 +504,23 @@ trait UnusedDelimLint { ast::ExprKind::Block(ref block, None) if block.stmts.len() > 0 => { let start = block.stmts[0].span; let end = block.stmts[block.stmts.len() - 1].span; - if value.span.from_expansion() || start.from_expansion() || end.from_expansion() { - ( - value.span.with_hi(value.span.lo() + BytePos(1)), - value.span.with_lo(value.span.hi() - BytePos(1)), - ) + if let Some(start) = start.find_ancestor_inside(value.span) + && let Some(end) = end.find_ancestor_inside(value.span) + { + Some(( + value.span.with_hi(start.lo()), + value.span.with_lo(end.hi()), + )) } else { - (value.span.with_hi(start.lo()), value.span.with_lo(end.hi())) + None } } ast::ExprKind::Paren(ref expr) => { - if value.span.from_expansion() || expr.span.from_expansion() { - ( - value.span.with_hi(value.span.lo() + BytePos(1)), - value.span.with_lo(value.span.hi() - BytePos(1)), - ) + let expr_span = expr.span.find_ancestor_inside(value.span); + if let Some(expr_span) = expr_span { + Some((value.span.with_hi(expr_span.lo()), value.span.with_lo(expr_span.hi()))) } else { - (value.span.with_hi(expr.span.lo()), value.span.with_lo(expr.span.hi())) + None } } _ => return, @@ -529,36 +529,38 @@ trait UnusedDelimLint { left_pos.map_or(false, |s| s >= value.span.lo()), right_pos.map_or(false, |s| s <= value.span.hi()), ); - self.emit_unused_delims(cx, spans, ctx.into(), keep_space); + self.emit_unused_delims(cx, value.span, spans, ctx.into(), keep_space); } fn emit_unused_delims( &self, cx: &EarlyContext<'_>, - spans: (Span, Span), + value_span: Span, + spans: Option<(Span, Span)>, msg: &str, keep_space: (bool, bool), ) { - // FIXME(flip1995): Quick and dirty fix for #70814. This should be fixed in rustdoc - // properly. - if spans.0 == DUMMY_SP || spans.1 == DUMMY_SP { - return; - } - - cx.struct_span_lint(self.lint(), MultiSpan::from(vec![spans.0, spans.1]), |lint| { - let replacement = vec![ - (spans.0, if keep_space.0 { " ".into() } else { "".into() }), - (spans.1, if keep_space.1 { " ".into() } else { "".into() }), - ]; - lint.build(fluent::lint::unused_delim) - .set_arg("delim", Self::DELIM_STR) - .set_arg("item", msg) - .multipart_suggestion( + let primary_span = if let Some((lo, hi)) = spans { + MultiSpan::from(vec![lo, hi]) + } else { + MultiSpan::from(value_span) + }; + cx.struct_span_lint(self.lint(), primary_span, |lint| { + let mut db = lint.build(fluent::lint::unused_delim); + db.set_arg("delim", Self::DELIM_STR); + db.set_arg("item", msg); + if let Some((lo, hi)) = spans { + let replacement = vec![ + (lo, if keep_space.0 { " ".into() } else { "".into() }), + (hi, if keep_space.1 { " ".into() } else { "".into() }), + ]; + db.multipart_suggestion( fluent::lint::suggestion, replacement, Applicability::MachineApplicable, - ) - .emit(); + ); + } + db.emit(); }); } @@ -766,15 +768,12 @@ impl UnusedParens { // Otherwise proceed with linting. _ => {} } - let spans = if value.span.from_expansion() || inner.span.from_expansion() { - ( - value.span.with_hi(value.span.lo() + BytePos(1)), - value.span.with_lo(value.span.hi() - BytePos(1)), - ) + let spans = if let Some(inner) = inner.span.find_ancestor_inside(value.span) { + Some((value.span.with_hi(inner.lo()), value.span.with_lo(inner.hi()))) } else { - (value.span.with_hi(inner.span.lo()), value.span.with_lo(inner.span.hi())) + None }; - self.emit_unused_delims(cx, spans, "pattern", (false, false)); + self.emit_unused_delims(cx, value.span, spans, "pattern", (false, false)); } } } @@ -879,15 +878,12 @@ impl EarlyLintPass for UnusedParens { ); } _ => { - let spans = if ty.span.from_expansion() || r.span.from_expansion() { - ( - ty.span.with_hi(ty.span.lo() + BytePos(1)), - ty.span.with_lo(ty.span.hi() - BytePos(1)), - ) + let spans = if let Some(r) = r.span.find_ancestor_inside(ty.span) { + Some((ty.span.with_hi(r.lo()), ty.span.with_lo(r.hi()))) } else { - (ty.span.with_hi(r.span.lo()), ty.span.with_lo(r.span.hi())) + None }; - self.emit_unused_delims(cx, spans, "type", (false, false)); + self.emit_unused_delims(cx, ty.span, spans, "type", (false, false)); } } } diff --git a/compiler/rustc_mir_build/src/thir/pattern/const_to_pat.rs b/compiler/rustc_mir_build/src/thir/pattern/const_to_pat.rs index d6dd0f017941a..f2045ac19cac4 100644 --- a/compiler/rustc_mir_build/src/thir/pattern/const_to_pat.rs +++ b/compiler/rustc_mir_build/src/thir/pattern/const_to_pat.rs @@ -168,7 +168,12 @@ impl<'a, 'tcx> ConstToPat<'a, 'tcx> { // once indirect_structural_match is a full fledged error, this // level of indirection can be eliminated - let inlined_const_as_pat = self.recur(cv, mir_structural_match_violation).unwrap(); + let inlined_const_as_pat = + self.recur(cv, mir_structural_match_violation).unwrap_or_else(|_| Pat { + span: self.span, + ty: cv.ty(), + kind: Box::new(PatKind::Constant { value: cv }), + }); if self.include_lint_checks && !self.saw_const_match_error.get() { // If we were able to successfully convert the const to some pat, diff --git a/compiler/rustc_typeck/src/check/coercion.rs b/compiler/rustc_typeck/src/check/coercion.rs index 540a8c3a83d95..49dc5532abdc9 100644 --- a/compiler/rustc_typeck/src/check/coercion.rs +++ b/compiler/rustc_typeck/src/check/coercion.rs @@ -1479,6 +1479,10 @@ impl<'tcx, 'exprs, E: AsCoercionSite> CoerceMany<'tcx, 'exprs, E> { } } Err(coercion_error) => { + // Mark that we've failed to coerce the types here to suppress + // any superfluous errors we might encounter while trying to + // emit or provide suggestions on how to fix the initial error. + fcx.set_tainted_by_errors(); let (expected, found) = if label_expression_as_expected { // In the case where this is a "forced unit", like // `break`, we want to call the `()` "expected" diff --git a/compiler/rustc_typeck/src/check/expr.rs b/compiler/rustc_typeck/src/check/expr.rs index fb78632069e44..a685add7f56a6 100644 --- a/compiler/rustc_typeck/src/check/expr.rs +++ b/compiler/rustc_typeck/src/check/expr.rs @@ -2526,15 +2526,18 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { ); // try to add a suggestion in case the field is a nested field of a field of the Adt - if let Some((fields, substs)) = self.get_field_candidates(span, expr_t) { - for candidate_field in fields.iter() { + let mod_id = self.tcx.parent_module(id).to_def_id(); + if let Some((fields, substs)) = + self.get_field_candidates_considering_privacy(span, expr_t, mod_id) + { + for candidate_field in fields { if let Some(mut field_path) = self.check_for_nested_field_satisfying( span, &|candidate_field, _| candidate_field.ident(self.tcx()) == field, candidate_field, substs, vec![], - self.tcx.parent_module(id).to_def_id(), + mod_id, ) { // field_path includes `field` that we're looking for, so pop it. field_path.pop(); @@ -2558,22 +2561,33 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { err } - pub(crate) fn get_field_candidates( + pub(crate) fn get_field_candidates_considering_privacy( &self, span: Span, - base_t: Ty<'tcx>, - ) -> Option<(&[ty::FieldDef], SubstsRef<'tcx>)> { - debug!("get_field_candidates(span: {:?}, base_t: {:?}", span, base_t); + base_ty: Ty<'tcx>, + mod_id: DefId, + ) -> Option<(impl Iterator + 'tcx, SubstsRef<'tcx>)> { + debug!("get_field_candidates(span: {:?}, base_t: {:?}", span, base_ty); - for (base_t, _) in self.autoderef(span, base_t) { + for (base_t, _) in self.autoderef(span, base_ty) { match base_t.kind() { ty::Adt(base_def, substs) if !base_def.is_enum() => { + let tcx = self.tcx; let fields = &base_def.non_enum_variant().fields; - // For compile-time reasons put a limit on number of fields we search - if fields.len() > 100 { - return None; + // Some struct, e.g. some that impl `Deref`, have all private fields + // because you're expected to deref them to access the _real_ fields. + // This, for example, will help us suggest accessing a field through a `Box`. + if fields.iter().all(|field| !field.vis.is_accessible_from(mod_id, tcx)) { + continue; } - return Some((fields, substs)); + return Some(( + fields + .iter() + .filter(move |field| field.vis.is_accessible_from(mod_id, tcx)) + // For compile-time reasons put a limit on number of fields we search + .take(100), + substs, + )); } _ => {} } @@ -2590,7 +2604,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { candidate_field: &ty::FieldDef, subst: SubstsRef<'tcx>, mut field_path: Vec, - id: DefId, + mod_id: DefId, ) -> Option> { debug!( "check_for_nested_field_satisfying(span: {:?}, candidate_field: {:?}, field_path: {:?}", @@ -2602,24 +2616,24 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { // up to a depth of three None } else { - // recursively search fields of `candidate_field` if it's a ty::Adt field_path.push(candidate_field.ident(self.tcx).normalize_to_macros_2_0()); let field_ty = candidate_field.ty(self.tcx, subst); - if let Some((nested_fields, subst)) = self.get_field_candidates(span, field_ty) { - for field in nested_fields.iter() { - if field.vis.is_accessible_from(id, self.tcx) { - if matches(candidate_field, field_ty) { - return Some(field_path); - } else if let Some(field_path) = self.check_for_nested_field_satisfying( - span, - matches, - field, - subst, - field_path.clone(), - id, - ) { - return Some(field_path); - } + if matches(candidate_field, field_ty) { + return Some(field_path); + } else if let Some((nested_fields, subst)) = + self.get_field_candidates_considering_privacy(span, field_ty, mod_id) + { + // recursively search fields of `candidate_field` if it's a ty::Adt + for field in nested_fields { + if let Some(field_path) = self.check_for_nested_field_satisfying( + span, + matches, + field, + subst, + field_path.clone(), + mod_id, + ) { + return Some(field_path); } } } diff --git a/compiler/rustc_typeck/src/check/fn_ctxt/mod.rs b/compiler/rustc_typeck/src/check/fn_ctxt/mod.rs index 05bcc710e1625..e008d50aa514a 100644 --- a/compiler/rustc_typeck/src/check/fn_ctxt/mod.rs +++ b/compiler/rustc_typeck/src/check/fn_ctxt/mod.rs @@ -26,6 +26,17 @@ use rustc_trait_selection::traits::{ObligationCause, ObligationCauseCode}; use std::cell::{Cell, RefCell}; use std::ops::Deref; +/// The `FnCtxt` stores type-checking context needed to type-check bodies of +/// functions, closures, and `const`s, including performing type inference +/// with [`InferCtxt`]. +/// +/// This is in contrast to [`ItemCtxt`], which is used to type-check item *signatures* +/// and thus does not perform type inference. +/// +/// See [`ItemCtxt`]'s docs for more. +/// +/// [`ItemCtxt`]: crate::collect::ItemCtxt +/// [`InferCtxt`]: infer::InferCtxt pub struct FnCtxt<'a, 'tcx> { pub(super) body_id: hir::HirId, diff --git a/compiler/rustc_typeck/src/check/method/suggest.rs b/compiler/rustc_typeck/src/check/method/suggest.rs index c92b93cbc22d2..f73d0fbb27716 100644 --- a/compiler/rustc_typeck/src/check/method/suggest.rs +++ b/compiler/rustc_typeck/src/check/method/suggest.rs @@ -1000,7 +1000,11 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { label_span_not_found(&mut err); } - self.check_for_field_method(&mut err, source, span, actual, item_name); + // Don't suggest (for example) `expr.field.method()` if `expr.method()` + // doesn't exist due to unsatisfied predicates. + if unsatisfied_predicates.is_empty() { + self.check_for_field_method(&mut err, source, span, actual, item_name); + } self.check_for_unwrap_self(&mut err, source, span, actual, item_name); @@ -1334,10 +1338,11 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { item_name: Ident, ) { if let SelfSource::MethodCall(expr) = source - && let Some((fields, substs)) = self.get_field_candidates(span, actual) + && let mod_id = self.tcx.parent_module(expr.hir_id).to_def_id() + && let Some((fields, substs)) = self.get_field_candidates_considering_privacy(span, actual, mod_id) { let call_expr = self.tcx.hir().expect_expr(self.tcx.hir().get_parent_node(expr.hir_id)); - for candidate_field in fields.iter() { + for candidate_field in fields { if let Some(field_path) = self.check_for_nested_field_satisfying( span, &|_, field_ty| { @@ -1353,7 +1358,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { candidate_field, substs, vec![], - self.tcx.parent_module(expr.hir_id).to_def_id(), + mod_id, ) { let field_path_str = field_path .iter() diff --git a/compiler/rustc_typeck/src/collect.rs b/compiler/rustc_typeck/src/collect.rs index 99996e80c9ce9..e7c5ecc60ec78 100644 --- a/compiler/rustc_typeck/src/collect.rs +++ b/compiler/rustc_typeck/src/collect.rs @@ -94,7 +94,27 @@ pub fn provide(providers: &mut Providers) { /////////////////////////////////////////////////////////////////////////// /// Context specific to some particular item. This is what implements -/// `AstConv`. It has information about the predicates that are defined +/// [`AstConv`]. +/// +/// # `ItemCtxt` vs `FnCtxt` +/// +/// `ItemCtxt` is primarily used to type-check item signatures and lower them +/// from HIR to their [`ty::Ty`] representation, which is exposed using [`AstConv`]. +/// It's also used for the bodies of items like structs where the body (the fields) +/// are just signatures. +/// +/// This is in contrast to [`FnCtxt`], which is used to type-check bodies of +/// functions, closures, and `const`s -- anywhere that expressions and statements show up. +/// +/// An important thing to note is that `ItemCtxt` does no inference -- it has no [`InferCtxt`] -- +/// while `FnCtxt` does do inference. +/// +/// [`FnCtxt`]: crate::check::FnCtxt +/// [`InferCtxt`]: rustc_infer::infer::InferCtxt +/// +/// # Trait predicates +/// +/// `ItemCtxt` has information about the predicates that are defined /// on the trait. Unfortunately, this predicate information is /// available in various different forms at various points in the /// process. So we can't just store a pointer to e.g., the AST or the diff --git a/src/doc/rustc/src/linker-plugin-lto.md b/src/doc/rustc/src/linker-plugin-lto.md index 9c644dd404dc4..b1854b22a7cd6 100644 --- a/src/doc/rustc/src/linker-plugin-lto.md +++ b/src/doc/rustc/src/linker-plugin-lto.md @@ -30,7 +30,7 @@ Using `rustc` directly: # Compile the Rust staticlib rustc --crate-type=staticlib -Clinker-plugin-lto -Copt-level=2 ./lib.rs # Compile the C code with `-flto=thin` -clang -c -O2 -flto=thin -o main.o ./main.c +clang -c -O2 -flto=thin -o cmain.o ./cmain.c # Link everything, making sure that we use an appropriate linker clang -flto=thin -fuse-ld=lld -L . -l"name-of-your-rust-lib" -o main -O2 ./cmain.o ``` @@ -41,7 +41,7 @@ Using `cargo`: # Compile the Rust staticlib RUSTFLAGS="-Clinker-plugin-lto" cargo build --release # Compile the C code with `-flto=thin` -clang -c -O2 -flto=thin -o main.o ./main.c +clang -c -O2 -flto=thin -o cmain.o ./cmain.c # Link everything, making sure that we use an appropriate linker clang -flto=thin -fuse-ld=lld -L . -l"name-of-your-rust-lib" -o main -O2 ./cmain.o ``` diff --git a/src/librustdoc/clean/inline.rs b/src/librustdoc/clean/inline.rs index 58d0aedb02399..4e9456ba7cb71 100644 --- a/src/librustdoc/clean/inline.rs +++ b/src/librustdoc/clean/inline.rs @@ -16,8 +16,8 @@ use rustc_span::hygiene::MacroKind; use rustc_span::symbol::{kw, sym, Symbol}; use crate::clean::{ - self, clean_fn_decl_from_did_and_sig, clean_middle_field, clean_middle_ty, - clean_trait_ref_with_bindings, clean_ty, clean_ty_generics, clean_variant_def, + self, clean_fn_decl_from_did_and_sig, clean_generics, clean_impl_item, clean_middle_field, + clean_middle_ty, clean_trait_ref_with_bindings, clean_ty, clean_ty_generics, clean_variant_def, clean_visibility, utils, Attributes, AttributesExt, Clean, ImplKind, ItemId, Type, Visibility, }; use crate::core::DocContext; @@ -426,9 +426,9 @@ pub(crate) fn build_impl( true } }) - .map(|item| item.clean(cx)) + .map(|item| clean_impl_item(item, cx)) .collect::>(), - impl_.generics.clean(cx), + clean_generics(impl_.generics, cx), ), None => ( tcx.associated_items(did) diff --git a/src/librustdoc/clean/mod.rs b/src/librustdoc/clean/mod.rs index 78b93725537be..402e4f29860a2 100644 --- a/src/librustdoc/clean/mod.rs +++ b/src/librustdoc/clean/mod.rs @@ -48,75 +48,68 @@ pub(crate) trait Clean<'tcx, T> { fn clean(&self, cx: &mut DocContext<'tcx>) -> T; } -impl<'tcx> Clean<'tcx, Item> for DocModule<'tcx> { - fn clean(&self, cx: &mut DocContext<'tcx>) -> Item { - let mut items: Vec = vec![]; - let mut inserted = FxHashSet::default(); - items.extend(self.foreigns.iter().map(|(item, renamed)| { - let item = clean_maybe_renamed_foreign_item(cx, item, *renamed); +pub(crate) fn clean_doc_module<'tcx>(doc: &DocModule<'tcx>, cx: &mut DocContext<'tcx>) -> Item { + let mut items: Vec = vec![]; + let mut inserted = FxHashSet::default(); + items.extend(doc.foreigns.iter().map(|(item, renamed)| { + let item = clean_maybe_renamed_foreign_item(cx, item, *renamed); + if let Some(name) = item.name { + inserted.insert((item.type_(), name)); + } + item + })); + items.extend(doc.mods.iter().map(|x| { + inserted.insert((ItemType::Module, x.name)); + clean_doc_module(x, cx) + })); + + // Split up imports from all other items. + // + // This covers the case where somebody does an import which should pull in an item, + // but there's already an item with the same namespace and same name. Rust gives + // priority to the not-imported one, so we should, too. + items.extend(doc.items.iter().flat_map(|(item, renamed)| { + // First, lower everything other than imports. + if matches!(item.kind, hir::ItemKind::Use(_, hir::UseKind::Glob)) { + return Vec::new(); + } + let v = clean_maybe_renamed_item(cx, item, *renamed); + for item in &v { if let Some(name) = item.name { inserted.insert((item.type_(), name)); } - item - })); - items.extend(self.mods.iter().map(|x| { - inserted.insert((ItemType::Module, x.name)); - x.clean(cx) - })); - - // Split up imports from all other items. - // - // This covers the case where somebody does an import which should pull in an item, - // but there's already an item with the same namespace and same name. Rust gives - // priority to the not-imported one, so we should, too. - items.extend(self.items.iter().flat_map(|(item, renamed)| { - // First, lower everything other than imports. - if matches!(item.kind, hir::ItemKind::Use(_, hir::UseKind::Glob)) { - return Vec::new(); - } - let v = clean_maybe_renamed_item(cx, item, *renamed); - for item in &v { - if let Some(name) = item.name { - inserted.insert((item.type_(), name)); - } - } - v - })); - items.extend(self.items.iter().flat_map(|(item, renamed)| { - // Now we actually lower the imports, skipping everything else. - if let hir::ItemKind::Use(path, hir::UseKind::Glob) = item.kind { - let name = renamed.unwrap_or_else(|| cx.tcx.hir().name(item.hir_id())); - clean_use_statement(item, name, path, hir::UseKind::Glob, cx, &mut inserted) - } else { - // skip everything else - Vec::new() - } - })); - - // determine if we should display the inner contents or - // the outer `mod` item for the source code. - - let span = Span::new({ - let where_outer = self.where_outer(cx.tcx); - let sm = cx.sess().source_map(); - let outer = sm.lookup_char_pos(where_outer.lo()); - let inner = sm.lookup_char_pos(self.where_inner.lo()); - if outer.file.start_pos == inner.file.start_pos { - // mod foo { ... } - where_outer - } else { - // mod foo; (and a separate SourceFile for the contents) - self.where_inner - } - }); + } + v + })); + items.extend(doc.items.iter().flat_map(|(item, renamed)| { + // Now we actually lower the imports, skipping everything else. + if let hir::ItemKind::Use(path, hir::UseKind::Glob) = item.kind { + let name = renamed.unwrap_or_else(|| cx.tcx.hir().name(item.hir_id())); + clean_use_statement(item, name, path, hir::UseKind::Glob, cx, &mut inserted) + } else { + // skip everything else + Vec::new() + } + })); + + // determine if we should display the inner contents or + // the outer `mod` item for the source code. + + let span = Span::new({ + let where_outer = doc.where_outer(cx.tcx); + let sm = cx.sess().source_map(); + let outer = sm.lookup_char_pos(where_outer.lo()); + let inner = sm.lookup_char_pos(doc.where_inner.lo()); + if outer.file.start_pos == inner.file.start_pos { + // mod foo { ... } + where_outer + } else { + // mod foo; (and a separate SourceFile for the contents) + doc.where_inner + } + }); - Item::from_hir_id_and_parts( - self.id, - Some(self.name), - ModuleItem(Module { items, span }), - cx, - ) - } + Item::from_hir_id_and_parts(doc.id, Some(doc.name), ModuleItem(Module { items, span }), cx) } fn clean_generic_bound<'tcx>( @@ -568,69 +561,68 @@ fn is_elided_lifetime(param: &hir::GenericParam<'_>) -> bool { matches!(param.kind, hir::GenericParamKind::Lifetime { kind: hir::LifetimeParamKind::Elided }) } -impl<'tcx> Clean<'tcx, Generics> for hir::Generics<'tcx> { - fn clean(&self, cx: &mut DocContext<'tcx>) -> Generics { - let impl_trait_params = self - .params - .iter() - .filter(|param| is_impl_trait(param)) - .map(|param| { - let param = clean_generic_param(cx, Some(self), param); - match param.kind { - GenericParamDefKind::Lifetime { .. } => unreachable!(), - GenericParamDefKind::Type { did, ref bounds, .. } => { - cx.impl_trait_bounds.insert(did.into(), bounds.clone()); - } - GenericParamDefKind::Const { .. } => unreachable!(), +pub(crate) fn clean_generics<'tcx>( + gens: &hir::Generics<'tcx>, + cx: &mut DocContext<'tcx>, +) -> Generics { + let impl_trait_params = gens + .params + .iter() + .filter(|param| is_impl_trait(param)) + .map(|param| { + let param = clean_generic_param(cx, Some(gens), param); + match param.kind { + GenericParamDefKind::Lifetime { .. } => unreachable!(), + GenericParamDefKind::Type { did, ref bounds, .. } => { + cx.impl_trait_bounds.insert(did.into(), bounds.clone()); } - param - }) - .collect::>(); + GenericParamDefKind::Const { .. } => unreachable!(), + } + param + }) + .collect::>(); - let mut params = Vec::with_capacity(self.params.len()); - for p in self.params.iter().filter(|p| !is_impl_trait(p) && !is_elided_lifetime(p)) { - let p = clean_generic_param(cx, Some(self), p); - params.push(p); - } - params.extend(impl_trait_params); + let mut params = Vec::with_capacity(gens.params.len()); + for p in gens.params.iter().filter(|p| !is_impl_trait(p) && !is_elided_lifetime(p)) { + let p = clean_generic_param(cx, Some(gens), p); + params.push(p); + } + params.extend(impl_trait_params); - let mut generics = Generics { - params, - where_predicates: self - .predicates - .iter() - .filter_map(|x| clean_where_predicate(x, cx)) - .collect(), - }; + let mut generics = Generics { + params, + where_predicates: gens + .predicates + .iter() + .filter_map(|x| clean_where_predicate(x, cx)) + .collect(), + }; - // Some duplicates are generated for ?Sized bounds between type params and where - // predicates. The point in here is to move the bounds definitions from type params - // to where predicates when such cases occur. - for where_pred in &mut generics.where_predicates { - match *where_pred { - WherePredicate::BoundPredicate { - ty: Generic(ref name), ref mut bounds, .. - } => { - if bounds.is_empty() { - for param in &mut generics.params { - match param.kind { - GenericParamDefKind::Lifetime { .. } => {} - GenericParamDefKind::Type { bounds: ref mut ty_bounds, .. } => { - if ¶m.name == name { - mem::swap(bounds, ty_bounds); - break; - } + // Some duplicates are generated for ?Sized bounds between type params and where + // predicates. The point in here is to move the bounds definitions from type params + // to where predicates when such cases occur. + for where_pred in &mut generics.where_predicates { + match *where_pred { + WherePredicate::BoundPredicate { ty: Generic(ref name), ref mut bounds, .. } => { + if bounds.is_empty() { + for param in &mut generics.params { + match param.kind { + GenericParamDefKind::Lifetime { .. } => {} + GenericParamDefKind::Type { bounds: ref mut ty_bounds, .. } => { + if ¶m.name == name { + mem::swap(bounds, ty_bounds); + break; } - GenericParamDefKind::Const { .. } => {} } + GenericParamDefKind::Const { .. } => {} } } } - _ => continue, } + _ => continue, } - generics } + generics } fn clean_ty_generics<'tcx>( @@ -896,7 +888,7 @@ fn clean_function<'tcx>( ) -> Box { let (generics, decl) = enter_impl_trait(cx, |cx| { // NOTE: generics must be cleaned before args - let generics = generics.clean(cx); + let generics = clean_generics(generics, cx); let args = clean_args_from_types_and_body_id(cx, sig.decl.inputs, body_id); let decl = clean_fn_decl_with_args(cx, sig.decl, args); (generics, decl) @@ -1025,7 +1017,7 @@ fn clean_trait_item<'tcx>(trait_item: &hir::TraitItem<'tcx>, cx: &mut DocContext hir::TraitItemKind::Fn(ref sig, hir::TraitFn::Required(names)) => { let (generics, decl) = enter_impl_trait(cx, |cx| { // NOTE: generics must be cleaned before args - let generics = trait_item.generics.clean(cx); + let generics = clean_generics(trait_item.generics, cx); let args = clean_args_from_types_and_names(cx, sig.decl.inputs, names); let decl = clean_fn_decl_with_args(cx, sig.decl, args); (generics, decl) @@ -1033,7 +1025,7 @@ fn clean_trait_item<'tcx>(trait_item: &hir::TraitItem<'tcx>, cx: &mut DocContext TyMethodItem(Box::new(Function { decl, generics })) } hir::TraitItemKind::Type(bounds, Some(default)) => { - let generics = enter_impl_trait(cx, |cx| trait_item.generics.clean(cx)); + let generics = enter_impl_trait(cx, |cx| clean_generics(trait_item.generics, cx)); let bounds = bounds.iter().filter_map(|x| clean_generic_bound(x, cx)).collect(); let item_type = clean_middle_ty(hir_ty_to_ty(cx.tcx, default), cx, None); AssocTypeItem( @@ -1046,7 +1038,7 @@ fn clean_trait_item<'tcx>(trait_item: &hir::TraitItem<'tcx>, cx: &mut DocContext ) } hir::TraitItemKind::Type(bounds, None) => { - let generics = enter_impl_trait(cx, |cx| trait_item.generics.clean(cx)); + let generics = enter_impl_trait(cx, |cx| clean_generics(trait_item.generics, cx)); let bounds = bounds.iter().filter_map(|x| clean_generic_bound(x, cx)).collect(); TyAssocTypeItem(Box::new(generics), bounds) } @@ -1058,45 +1050,46 @@ fn clean_trait_item<'tcx>(trait_item: &hir::TraitItem<'tcx>, cx: &mut DocContext }) } -impl<'tcx> Clean<'tcx, Item> for hir::ImplItem<'tcx> { - fn clean(&self, cx: &mut DocContext<'tcx>) -> Item { - let local_did = self.def_id.to_def_id(); - cx.with_param_env(local_did, |cx| { - let inner = match self.kind { - hir::ImplItemKind::Const(ty, expr) => { - let default = ConstantKind::Local { def_id: local_did, body: expr }; - AssocConstItem(clean_ty(ty, cx), default) - } - hir::ImplItemKind::Fn(ref sig, body) => { - let m = clean_function(cx, sig, self.generics, body); - let defaultness = cx.tcx.impl_defaultness(self.def_id); - MethodItem(m, Some(defaultness)) - } - hir::ImplItemKind::TyAlias(hir_ty) => { - let type_ = clean_ty(hir_ty, cx); - let generics = self.generics.clean(cx); - let item_type = clean_middle_ty(hir_ty_to_ty(cx.tcx, hir_ty), cx, None); - AssocTypeItem( - Box::new(Typedef { type_, generics, item_type: Some(item_type) }), - Vec::new(), - ) - } - }; +pub(crate) fn clean_impl_item<'tcx>( + impl_: &hir::ImplItem<'tcx>, + cx: &mut DocContext<'tcx>, +) -> Item { + let local_did = impl_.def_id.to_def_id(); + cx.with_param_env(local_did, |cx| { + let inner = match impl_.kind { + hir::ImplItemKind::Const(ty, expr) => { + let default = ConstantKind::Local { def_id: local_did, body: expr }; + AssocConstItem(clean_ty(ty, cx), default) + } + hir::ImplItemKind::Fn(ref sig, body) => { + let m = clean_function(cx, sig, impl_.generics, body); + let defaultness = cx.tcx.impl_defaultness(impl_.def_id); + MethodItem(m, Some(defaultness)) + } + hir::ImplItemKind::TyAlias(hir_ty) => { + let type_ = clean_ty(hir_ty, cx); + let generics = clean_generics(impl_.generics, cx); + let item_type = clean_middle_ty(hir_ty_to_ty(cx.tcx, hir_ty), cx, None); + AssocTypeItem( + Box::new(Typedef { type_, generics, item_type: Some(item_type) }), + Vec::new(), + ) + } + }; - let mut what_rustc_thinks = - Item::from_def_id_and_parts(local_did, Some(self.ident.name), inner, cx); + let mut what_rustc_thinks = + Item::from_def_id_and_parts(local_did, Some(impl_.ident.name), inner, cx); - let impl_ref = cx.tcx.impl_trait_ref(cx.tcx.local_parent(self.def_id)); + let impl_ref = cx.tcx.impl_trait_ref(cx.tcx.local_parent(impl_.def_id)); - // Trait impl items always inherit the impl's visibility -- - // we don't want to show `pub`. - if impl_ref.is_some() { - what_rustc_thinks.visibility = Inherited; - } + // Trait impl items always inherit the impl's visibility -- + // we don't want to show `pub`. + if impl_ref.is_some() { + what_rustc_thinks.visibility = Inherited; + } - what_rustc_thinks - }) - } + what_rustc_thinks + }) } impl<'tcx> Clean<'tcx, Item> for ty::AssocItem { @@ -1905,32 +1898,32 @@ fn clean_maybe_renamed_item<'tcx>( }), ItemKind::OpaqueTy(ref ty) => OpaqueTyItem(OpaqueTy { bounds: ty.bounds.iter().filter_map(|x| clean_generic_bound(x, cx)).collect(), - generics: ty.generics.clean(cx), + generics: clean_generics(ty.generics, cx), }), ItemKind::TyAlias(hir_ty, generics) => { let rustdoc_ty = clean_ty(hir_ty, cx); let ty = clean_middle_ty(hir_ty_to_ty(cx.tcx, hir_ty), cx, None); TypedefItem(Box::new(Typedef { type_: rustdoc_ty, - generics: generics.clean(cx), + generics: clean_generics(generics, cx), item_type: Some(ty), })) } ItemKind::Enum(ref def, generics) => EnumItem(Enum { variants: def.variants.iter().map(|v| v.clean(cx)).collect(), - generics: generics.clean(cx), + generics: clean_generics(generics, cx), }), ItemKind::TraitAlias(generics, bounds) => TraitAliasItem(TraitAlias { - generics: generics.clean(cx), + generics: clean_generics(generics, cx), bounds: bounds.iter().filter_map(|x| clean_generic_bound(x, cx)).collect(), }), ItemKind::Union(ref variant_data, generics) => UnionItem(Union { - generics: generics.clean(cx), + generics: clean_generics(generics, cx), fields: variant_data.fields().iter().map(|x| clean_field(x, cx)).collect(), }), ItemKind::Struct(ref variant_data, generics) => StructItem(Struct { struct_type: CtorKind::from_hir(variant_data), - generics: generics.clean(cx), + generics: clean_generics(generics, cx), fields: variant_data.fields().iter().map(|x| clean_field(x, cx)).collect(), }), ItemKind::Impl(impl_) => return clean_impl(impl_, item.hir_id(), cx), @@ -1953,7 +1946,7 @@ fn clean_maybe_renamed_item<'tcx>( TraitItem(Trait { def_id, items, - generics: generics.clean(cx), + generics: clean_generics(generics, cx), bounds: bounds.iter().filter_map(|x| clean_generic_bound(x, cx)).collect(), }) } @@ -1988,8 +1981,11 @@ fn clean_impl<'tcx>( let tcx = cx.tcx; let mut ret = Vec::new(); let trait_ = impl_.of_trait.as_ref().map(|t| clean_trait_ref(t, cx)); - let items = - impl_.items.iter().map(|ii| tcx.hir().impl_item(ii.id).clean(cx)).collect::>(); + let items = impl_ + .items + .iter() + .map(|ii| clean_impl_item(tcx.hir().impl_item(ii.id), cx)) + .collect::>(); let def_id = tcx.hir().local_def_id(hir_id); // If this impl block is an implementation of the Deref trait, then we @@ -2006,7 +2002,7 @@ fn clean_impl<'tcx>( let mut make_item = |trait_: Option, for_: Type, items: Vec| { let kind = ImplItem(Box::new(Impl { unsafety: impl_.unsafety, - generics: impl_.generics.clean(cx), + generics: clean_generics(impl_.generics, cx), trait_, for_, items, @@ -2203,7 +2199,7 @@ fn clean_maybe_renamed_foreign_item<'tcx>( hir::ForeignItemKind::Fn(decl, names, generics) => { let (generics, decl) = enter_impl_trait(cx, |cx| { // NOTE: generics must be cleaned before args - let generics = generics.clean(cx); + let generics = clean_generics(generics, cx); let args = clean_args_from_types_and_names(cx, decl.inputs, names); let decl = clean_fn_decl_with_args(cx, decl, args); (generics, decl) diff --git a/src/librustdoc/clean/utils.rs b/src/librustdoc/clean/utils.rs index 43e71e90a6f5b..718cbbd2b8374 100644 --- a/src/librustdoc/clean/utils.rs +++ b/src/librustdoc/clean/utils.rs @@ -2,9 +2,9 @@ use crate::clean::auto_trait::AutoTraitFinder; use crate::clean::blanket_impl::BlanketImplFinder; use crate::clean::render_macro_matchers::render_macro_matcher; use crate::clean::{ - clean_middle_const, clean_middle_region, clean_middle_ty, inline, Clean, Crate, ExternalCrate, - Generic, GenericArg, GenericArgs, ImportSource, Item, ItemKind, Lifetime, Path, PathSegment, - Primitive, PrimitiveType, Type, TypeBinding, Visibility, + clean_doc_module, clean_middle_const, clean_middle_region, clean_middle_ty, inline, Crate, + ExternalCrate, Generic, GenericArg, GenericArgs, ImportSource, Item, ItemKind, Lifetime, Path, + PathSegment, Primitive, PrimitiveType, Type, TypeBinding, Visibility, }; use crate::core::DocContext; use crate::formats::item_type::ItemType; @@ -37,7 +37,7 @@ pub(crate) fn krate(cx: &mut DocContext<'_>) -> Crate { // Clean the crate, translating the entire librustc_ast AST to one that is // understood by rustdoc. - let mut module = module.clean(cx); + let mut module = clean_doc_module(&module, cx); match *module.kind { ItemKind::ModuleItem(ref module) => { diff --git a/src/librustdoc/html/static/js/main.js b/src/librustdoc/html/static/js/main.js index 2e05c4be2f3e9..5dec610b30c7b 100644 --- a/src/librustdoc/html/static/js/main.js +++ b/src/librustdoc/html/static/js/main.js @@ -348,8 +348,7 @@ function loadCss(cssFileName) { function onHashChange(ev) { // If we're in mobile mode, we should hide the sidebar in any case. - const sidebar = document.getElementsByClassName("sidebar")[0]; - removeClass(sidebar, "shown"); + hideSidebar(); handleHashes(ev); } @@ -734,11 +733,50 @@ function loadCss(cssFileName) { }); }()); + let oldSidebarScrollPosition = null; + + function showSidebar() { + if (window.innerWidth < window.RUSTDOC_MOBILE_BREAKPOINT) { + // This is to keep the scroll position on mobile. + oldSidebarScrollPosition = window.scrollY; + document.body.style.width = `${document.body.offsetWidth}px`; + document.body.style.position = "fixed"; + document.body.style.top = `-${oldSidebarScrollPosition}px`; + document.querySelector(".mobile-topbar").style.top = `${oldSidebarScrollPosition}px`; + document.querySelector(".mobile-topbar").style.position = "relative"; + } else { + oldSidebarScrollPosition = null; + } + const sidebar = document.getElementsByClassName("sidebar")[0]; + addClass(sidebar, "shown"); + } + function hideSidebar() { + if (oldSidebarScrollPosition !== null) { + // This is to keep the scroll position on mobile. + document.body.style.width = ""; + document.body.style.position = ""; + document.body.style.top = ""; + document.querySelector(".mobile-topbar").style.top = ""; + document.querySelector(".mobile-topbar").style.position = ""; + // The scroll position is lost when resetting the style, hence why we store it in + // `oldSidebarScrollPosition`. + window.scrollTo(0, oldSidebarScrollPosition); + oldSidebarScrollPosition = null; + } const sidebar = document.getElementsByClassName("sidebar")[0]; removeClass(sidebar, "shown"); } + window.addEventListener("resize", () => { + if (window.innerWidth >= window.RUSTDOC_MOBILE_BREAKPOINT && + oldSidebarScrollPosition !== null) { + // If the user opens the sidebar in "mobile" mode, and then grows the browser window, + // we need to switch away from mobile mode and make the main content area scrollable. + hideSidebar(); + } + }); + function handleClick(id, f) { const elem = document.getElementById(id); if (elem) { @@ -781,9 +819,9 @@ function loadCss(cssFileName) { sidebar_menu_toggle.addEventListener("click", () => { const sidebar = document.getElementsByClassName("sidebar")[0]; if (!hasClass(sidebar, "shown")) { - addClass(sidebar, "shown"); + showSidebar(); } else { - removeClass(sidebar, "shown"); + hideSidebar(); } }); } diff --git a/src/librustdoc/html/static/js/source-script.js b/src/librustdoc/html/static/js/source-script.js index c45d614293a85..06d15d9e5ffea 100644 --- a/src/librustdoc/html/static/js/source-script.js +++ b/src/librustdoc/html/static/js/source-script.js @@ -10,7 +10,7 @@ (function() { const rootPath = document.getElementById("rustdoc-vars").attributes["data-root-path"].value; -let oldScrollPosition = 0; +let oldScrollPosition = null; const NAME_OFFSET = 0; const DIRS_OFFSET = 1; @@ -75,18 +75,21 @@ function toggleSidebar() { oldScrollPosition = window.scrollY; document.body.style.position = "fixed"; document.body.style.top = `-${oldScrollPosition}px`; + } else { + oldScrollPosition = null; } addClass(document.documentElement, "source-sidebar-expanded"); child.innerText = "<"; updateLocalStorage("source-sidebar-show", "true"); } else { - if (window.innerWidth < window.RUSTDOC_MOBILE_BREAKPOINT) { + if (window.innerWidth < window.RUSTDOC_MOBILE_BREAKPOINT && oldScrollPosition !== null) { // This is to keep the scroll position on mobile. document.body.style.position = ""; document.body.style.top = ""; // The scroll position is lost when resetting the style, hence why we store it in - // `oldScroll`. + // `oldScrollPosition`. window.scrollTo(0, oldScrollPosition); + oldScrollPosition = null; } removeClass(document.documentElement, "source-sidebar-expanded"); child.innerText = ">"; @@ -94,6 +97,17 @@ function toggleSidebar() { } } +window.addEventListener("resize", () => { + if (window.innerWidth >= window.RUSTDOC_MOBILE_BREAKPOINT && oldScrollPosition !== null) { + // If the user opens the sidebar in "mobile" mode, and then grows the browser window, + // we need to switch away from mobile mode and make the main content area scrollable. + document.body.style.position = ""; + document.body.style.top = ""; + window.scrollTo(0, oldScrollPosition); + oldScrollPosition = null; + } +}); + function createSidebarToggle() { const sidebarToggle = document.createElement("div"); sidebarToggle.id = "sidebar-toggle"; diff --git a/src/librustdoc/json/conversions.rs b/src/librustdoc/json/conversions.rs index 8caba8cb9029c..4c21fd553289b 100644 --- a/src/librustdoc/json/conversions.rs +++ b/src/librustdoc/json/conversions.rs @@ -685,24 +685,18 @@ impl FromWithTcx for Variant { impl FromWithTcx for Import { fn from_tcx(import: clean::Import, tcx: TyCtxt<'_>) -> Self { use clean::ImportKind::*; - match import.kind { - Simple(s) => Import { - source: import.source.path.whole_name(), - name: s.to_string(), - id: import.source.did.map(ItemId::from).map(|i| from_item_id(i, tcx)), - glob: false, - }, - Glob => Import { - source: import.source.path.whole_name(), - name: import - .source - .path - .last_opt() - .unwrap_or_else(|| Symbol::intern("*")) - .to_string(), - id: import.source.did.map(ItemId::from).map(|i| from_item_id(i, tcx)), - glob: true, - }, + let (name, glob) = match import.kind { + Simple(s) => (s.to_string(), false), + Glob => ( + import.source.path.last_opt().unwrap_or_else(|| Symbol::intern("*")).to_string(), + true, + ), + }; + Import { + source: import.source.path.whole_name(), + name, + id: import.source.did.map(ItemId::from).map(|i| from_item_id(i, tcx)), + glob, } } } diff --git a/src/librustdoc/passes/strip_hidden.rs b/src/librustdoc/passes/strip_hidden.rs index 533e2ce46ddd8..9914edf3036e4 100644 --- a/src/librustdoc/passes/strip_hidden.rs +++ b/src/librustdoc/passes/strip_hidden.rs @@ -17,6 +17,7 @@ pub(crate) const STRIP_HIDDEN: Pass = Pass { /// Strip items marked `#[doc(hidden)]` pub(crate) fn strip_hidden(krate: clean::Crate, cx: &mut DocContext<'_>) -> clean::Crate { let mut retained = ItemIdSet::default(); + let is_json_output = cx.output_format.is_json() && !cx.show_coverage; // strip all #[doc(hidden)] items let krate = { @@ -25,7 +26,12 @@ pub(crate) fn strip_hidden(krate: clean::Crate, cx: &mut DocContext<'_>) -> clea }; // strip all impls referencing stripped items - let mut stripper = ImplStripper { retained: &retained, cache: &cx.cache }; + let mut stripper = ImplStripper { + retained: &retained, + cache: &cx.cache, + is_json_output, + document_private: cx.render_options.document_private, + }; stripper.fold_crate(krate) } diff --git a/src/librustdoc/passes/strip_private.rs b/src/librustdoc/passes/strip_private.rs index 9ba841a31cf95..f3aa3c7ce2459 100644 --- a/src/librustdoc/passes/strip_private.rs +++ b/src/librustdoc/passes/strip_private.rs @@ -17,6 +17,7 @@ pub(crate) const STRIP_PRIVATE: Pass = Pass { pub(crate) fn strip_private(mut krate: clean::Crate, cx: &mut DocContext<'_>) -> clean::Crate { // This stripper collects all *retained* nodes. let mut retained = ItemIdSet::default(); + let is_json_output = cx.output_format.is_json() && !cx.show_coverage; // strip all private items { @@ -24,12 +25,17 @@ pub(crate) fn strip_private(mut krate: clean::Crate, cx: &mut DocContext<'_>) -> retained: &mut retained, access_levels: &cx.cache.access_levels, update_retained: true, - is_json_output: cx.output_format.is_json() && !cx.show_coverage, + is_json_output, }; krate = ImportStripper.fold_crate(stripper.fold_crate(krate)); } // strip all impls referencing private items - let mut stripper = ImplStripper { retained: &retained, cache: &cx.cache }; + let mut stripper = ImplStripper { + retained: &retained, + cache: &cx.cache, + is_json_output, + document_private: cx.render_options.document_private, + }; stripper.fold_crate(krate) } diff --git a/src/librustdoc/passes/stripper.rs b/src/librustdoc/passes/stripper.rs index 0d419042a10a4..3f069e8393f7e 100644 --- a/src/librustdoc/passes/stripper.rs +++ b/src/librustdoc/passes/stripper.rs @@ -14,17 +14,19 @@ pub(crate) struct Stripper<'a> { pub(crate) is_json_output: bool, } -impl<'a> Stripper<'a> { - // We need to handle this differently for the JSON output because some non exported items could - // be used in public API. And so, we need these items as well. `is_exported` only checks if they - // are in the public API, which is not enough. - #[inline] - fn is_item_reachable(&self, item_id: ItemId) -> bool { - if self.is_json_output { - self.access_levels.is_reachable(item_id.expect_def_id()) - } else { - self.access_levels.is_exported(item_id.expect_def_id()) - } +// We need to handle this differently for the JSON output because some non exported items could +// be used in public API. And so, we need these items as well. `is_exported` only checks if they +// are in the public API, which is not enough. +#[inline] +fn is_item_reachable( + is_json_output: bool, + access_levels: &AccessLevels, + item_id: ItemId, +) -> bool { + if is_json_output { + access_levels.is_reachable(item_id.expect_def_id()) + } else { + access_levels.is_exported(item_id.expect_def_id()) } } @@ -61,7 +63,9 @@ impl<'a> DocFolder for Stripper<'a> { | clean::MacroItem(..) | clean::ForeignTypeItem => { let item_id = i.item_id; - if item_id.is_local() && !self.is_item_reachable(item_id) { + if item_id.is_local() + && !is_item_reachable(self.is_json_output, self.access_levels, item_id) + { debug!("Stripper: stripping {:?} {:?}", i.type_(), i.name); return None; } @@ -133,6 +137,8 @@ impl<'a> DocFolder for Stripper<'a> { pub(crate) struct ImplStripper<'a> { pub(crate) retained: &'a ItemIdSet, pub(crate) cache: &'a Cache, + pub(crate) is_json_output: bool, + pub(crate) document_private: bool, } impl<'a> DocFolder for ImplStripper<'a> { @@ -140,8 +146,27 @@ impl<'a> DocFolder for ImplStripper<'a> { if let clean::ImplItem(ref imp) = *i.kind { // Impl blocks can be skipped if they are: empty; not a trait impl; and have no // documentation. - if imp.trait_.is_none() && imp.items.is_empty() && i.doc_value().is_none() { - return None; + // + // There is one special case: if the impl block contains only private items. + if imp.trait_.is_none() { + // If the only items present are private ones and we're not rendering private items, + // we don't document it. + if !imp.items.is_empty() + && !self.document_private + && imp.items.iter().all(|i| { + let item_id = i.item_id; + item_id.is_local() + && !is_item_reachable( + self.is_json_output, + &self.cache.access_levels, + item_id, + ) + }) + { + return None; + } else if imp.items.is_empty() && i.doc_value().is_none() { + return None; + } } if let Some(did) = imp.for_.def_id(self.cache) { if did.is_local() && !imp.for_.is_assoc_ty() && !self.retained.contains(&did.into()) diff --git a/src/rustdoc-json-types/lib.rs b/src/rustdoc-json-types/lib.rs index bd4ea98441d66..ecdecadd2efb5 100644 --- a/src/rustdoc-json-types/lib.rs +++ b/src/rustdoc-json-types/lib.rs @@ -591,8 +591,11 @@ pub struct Import { /// May be different from the last segment of `source` when renaming imports: /// `use source as name;` pub name: String, - /// The ID of the item being imported. - pub id: Option, // FIXME is this actually ever None? + /// The ID of the item being imported. Will be `None` in case of re-exports of primitives: + /// ```rust + /// pub use i32 as my_i32; + /// ``` + pub id: Option, /// Whether this import uses a glob: `use source::*;` pub glob: bool, } diff --git a/src/test/mir-opt/README.md b/src/test/mir-opt/README.md index a0550466cf07b..0721d9f7019bc 100644 --- a/src/test/mir-opt/README.md +++ b/src/test/mir-opt/README.md @@ -14,6 +14,18 @@ presence of pointers in constants or other bit width dependent things. In that c to your test, causing separate files to be generated for 32bit and 64bit systems. +## Unit testing + +If you are only testing the behavior of a particular mir-opt pass on some specific input (as is +usually the case), you should add + +``` +// unit-test: PassName +``` + +to the top of the file. This makes sure that other passes don't run which means you'll get the input +you expected and your test won't break when other code changes. + ## Emit a diff of the mir for a specific optimization This is what you want most often when you want to see how an optimization changes the MIR. diff --git a/src/test/rustdoc-gui/sidebar-mobile-scroll.goml b/src/test/rustdoc-gui/sidebar-mobile-scroll.goml new file mode 100644 index 0000000000000..b3bcea253388f --- /dev/null +++ b/src/test/rustdoc-gui/sidebar-mobile-scroll.goml @@ -0,0 +1,31 @@ +// This test ensures that the mobile sidebar preserves scroll position. +goto: file://|DOC_PATH|/test_docs/struct.Foo.html +// Switching to "mobile view" by reducing the width to 600px. +size: (600, 600) +assert-css: (".sidebar", {"display": "block", "left": "-1000px"}) + +// Scroll down. +scroll-to: "//h2[@id='blanket-implementations']" +assert-window-property: {"pageYOffset": "702"} + +// Open the sidebar menu. +click: ".sidebar-menu-toggle" +wait-for-css: (".sidebar", {"left": "0px"}) + +// We are no longer "scrolled". It's important that the user can't +// scroll the body at all, but these test scripts are run only in Chrome, +// and we need to use a more complicated solution to this problem because +// of Mobile Safari... +assert-window-property: {"pageYOffset": "0"} + +// Close the sidebar menu. Make sure the scroll position gets restored. +click: ".sidebar-menu-toggle" +wait-for-css: (".sidebar", {"left": "-1000px"}) +assert-window-property: {"pageYOffset": "702"} + +// Now test that scrollability returns when the browser window is just resized. +click: ".sidebar-menu-toggle" +wait-for-css: (".sidebar", {"left": "0px"}) +assert-window-property: {"pageYOffset": "0"} +size: (900, 900) +assert-window-property: {"pageYOffset": "702"} diff --git a/src/test/rustdoc-gui/sidebar-source-code-display.goml b/src/test/rustdoc-gui/sidebar-source-code-display.goml index fa322574fdec7..e4662a10ed5da 100644 --- a/src/test/rustdoc-gui/sidebar-source-code-display.goml +++ b/src/test/rustdoc-gui/sidebar-source-code-display.goml @@ -233,6 +233,17 @@ wait-for-css: (".sidebar", {"width": "0px"}) // The "scrollTop" property should be the same. assert-window-property: {"pageYOffset": "2519"} +// We now check that the scroll position is restored if the window is resized. +size: (500, 700) +click: "#sidebar-toggle" +wait-for-css: ("#source-sidebar", {"visibility": "visible"}) +assert-window-property: {"pageYOffset": "0"} +size: (900, 900) +assert-window-property: {"pageYOffset": "2519"} +size: (500, 700) +click: "#sidebar-toggle" +wait-for-css: ("#source-sidebar", {"visibility": "hidden"}) + // We now check that opening the sidebar and clicking a link will close it. // The behavior here on mobile is different than the behavior on desktop, // but common sense dictates that if you have a list of files that fills the entire screen, and diff --git a/src/test/rustdoc-json/primitive.rs b/src/test/rustdoc-json/primitive.rs index b84c2f7c6ac39..878a1a4a79ce7 100644 --- a/src/test/rustdoc-json/primitive.rs +++ b/src/test/rustdoc-json/primitive.rs @@ -12,3 +12,9 @@ mod usize {} // @has - "$.index[*][?(@.name=='checked_add')]" // @!is - "$.index[*][?(@.name=='checked_add')]" $local_crate_id // @!has - "$.index[*][?(@.name=='is_ascii_uppercase')]" + +// @is - "$.index[*][?(@.kind=='import' && @.inner.name=='my_i32')].inner.id" null +pub use i32 as my_i32; + +// @is - "$.index[*][?(@.kind=='import' && @.inner.name=='u32')].inner.id" null +pub use u32; diff --git a/src/test/rustdoc/empty-impl-block-private-with-doc.rs b/src/test/rustdoc/empty-impl-block-private-with-doc.rs new file mode 100644 index 0000000000000..4397199616302 --- /dev/null +++ b/src/test/rustdoc/empty-impl-block-private-with-doc.rs @@ -0,0 +1,44 @@ +// compile-flags: --document-private-items + +#![feature(inherent_associated_types)] +#![allow(incomplete_features)] +#![crate_name = "foo"] + +// @has 'foo/struct.Foo.html' +pub struct Foo; + +// There are 3 impl blocks with public item and one that should not be displayed +// by default because it only contains private items (but not in this case because +// we used `--document-private-items`). +// @count - '//*[@class="impl has-srclink"]' 'impl Foo' 4 + +// Impl block only containing private items should not be displayed unless the +// `--document-private-items` flag is used. +/// Private +impl Foo { + const BAR: u32 = 0; + type FOO = i32; + fn hello() {} +} + +// But if any element of the impl block is public, it should be displayed. +/// Not private +impl Foo { + pub const BAR: u32 = 0; + type FOO = i32; + fn hello() {} +} + +/// Not private +impl Foo { + const BAR: u32 = 0; + pub type FOO = i32; + fn hello() {} +} + +/// Not private +impl Foo { + const BAR: u32 = 0; + type FOO = i32; + pub fn hello() {} +} diff --git a/src/test/rustdoc/empty-impl-block-private.rs b/src/test/rustdoc/empty-impl-block-private.rs new file mode 100644 index 0000000000000..5caf020658c5f --- /dev/null +++ b/src/test/rustdoc/empty-impl-block-private.rs @@ -0,0 +1,40 @@ +#![feature(inherent_associated_types)] +#![allow(incomplete_features)] +#![crate_name = "foo"] + +// @has 'foo/struct.Foo.html' +pub struct Foo; + +// There are 3 impl blocks with public item and one that should not be displayed +// because it only contains private items. +// @count - '//*[@class="impl has-srclink"]' 'impl Foo' 3 + +// Impl block only containing private items should not be displayed. +/// Private +impl Foo { + const BAR: u32 = 0; + type FOO = i32; + fn hello() {} +} + +// But if any element of the impl block is public, it should be displayed. +/// Not private +impl Foo { + pub const BAR: u32 = 0; + type FOO = i32; + fn hello() {} +} + +/// Not private +impl Foo { + const BAR: u32 = 0; + pub type FOO = i32; + fn hello() {} +} + +/// Not private +impl Foo { + const BAR: u32 = 0; + type FOO = i32; + pub fn hello() {} +} diff --git a/src/test/ui/consts/const_in_pattern/incomplete-slice.rs b/src/test/ui/consts/const_in_pattern/incomplete-slice.rs new file mode 100644 index 0000000000000..e1ccda71d40ea --- /dev/null +++ b/src/test/ui/consts/const_in_pattern/incomplete-slice.rs @@ -0,0 +1,15 @@ +#[derive(PartialEq)] +enum E { + A, +} + +const E_SL: &[E] = &[E::A]; + +fn main() { + match &[][..] { + //~^ ERROR non-exhaustive patterns: `&_` not covered [E0004] + E_SL => {} + //~^ WARN to use a constant of type `E` in a pattern, `E` must be annotated with `#[derive(PartialEq, Eq)]` + //~| WARN this was previously accepted by the compiler but is being phased out + } +} diff --git a/src/test/ui/consts/const_in_pattern/incomplete-slice.stderr b/src/test/ui/consts/const_in_pattern/incomplete-slice.stderr new file mode 100644 index 0000000000000..0ff7083713843 --- /dev/null +++ b/src/test/ui/consts/const_in_pattern/incomplete-slice.stderr @@ -0,0 +1,26 @@ +warning: to use a constant of type `E` in a pattern, `E` must be annotated with `#[derive(PartialEq, Eq)]` + --> $DIR/incomplete-slice.rs:11:9 + | +LL | E_SL => {} + | ^^^^ + | + = note: `#[warn(indirect_structural_match)]` on by default + = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release! + = note: for more information, see issue #62411 + +error[E0004]: non-exhaustive patterns: `&_` not covered + --> $DIR/incomplete-slice.rs:9:11 + | +LL | match &[][..] { + | ^^^^^^^ pattern `&_` not covered + | + = note: the matched value is of type `&[E]` +help: ensure that all possible cases are being handled by adding a match arm with a wildcard pattern or an explicit pattern as shown + | +LL ~ E_SL => {} +LL + &_ => todo!() + | + +error: aborting due to previous error; 1 warning emitted + +For more information about this error, try `rustc --explain E0004`. diff --git a/src/test/ui/copy-a-resource.stderr b/src/test/ui/copy-a-resource.stderr index 128087f1e3755..b92449c6e0aff 100644 --- a/src/test/ui/copy-a-resource.stderr +++ b/src/test/ui/copy-a-resource.stderr @@ -10,6 +10,10 @@ LL | let _y = x.clone(); = help: items from traits can only be used if the trait is implemented and in scope = note: the following trait defines an item `clone`, perhaps you need to implement it: candidate #1: `Clone` +help: one of the expressions' fields has a method of the same name + | +LL | let _y = x.i.clone(); + | ++ error: aborting due to previous error diff --git a/src/test/ui/hrtb/issue-30786.stderr b/src/test/ui/hrtb/issue-30786.stderr index bc7b5e914e127..ffe3d7b81f51e 100644 --- a/src/test/ui/hrtb/issue-30786.stderr +++ b/src/test/ui/hrtb/issue-30786.stderr @@ -18,10 +18,6 @@ note: the following trait bounds were not satisfied: | LL | impl StreamExt for T where for<'a> &'a mut T: Stream {} | --------- - ^^^^^^ unsatisfied trait bound introduced here -help: one of the expressions' fields has a method of the same name - | -LL | let filter = map.stream.filterx(|x: &_| true); - | +++++++ error[E0599]: the method `countx` exists for struct `Filter fn(&'r u64) -> &'r u64 {identity::}>, [closure@$DIR/issue-30786.rs:129:30: 129:37]>`, but its trait bounds were not satisfied --> $DIR/issue-30786.rs:130:24 @@ -43,10 +39,6 @@ note: the following trait bounds were not satisfied: | LL | impl StreamExt for T where for<'a> &'a mut T: Stream {} | --------- - ^^^^^^ unsatisfied trait bound introduced here -help: one of the expressions' fields has a method of the same name - | -LL | let count = filter.stream.countx(); - | +++++++ error: aborting due to 2 previous errors diff --git a/src/test/ui/issues/issue-2823.stderr b/src/test/ui/issues/issue-2823.stderr index b5a2b2f55a6d4..208b340d06410 100644 --- a/src/test/ui/issues/issue-2823.stderr +++ b/src/test/ui/issues/issue-2823.stderr @@ -10,6 +10,10 @@ LL | let _d = c.clone(); = help: items from traits can only be used if the trait is implemented and in scope = note: the following trait defines an item `clone`, perhaps you need to implement it: candidate #1: `Clone` +help: one of the expressions' fields has a method of the same name + | +LL | let _d = c.x.clone(); + | ++ error: aborting due to previous error diff --git a/src/test/ui/lint/unused_parens_multibyte_recovery.rs b/src/test/ui/lint/unused_parens_multibyte_recovery.rs new file mode 100644 index 0000000000000..8fcfae22a3d33 --- /dev/null +++ b/src/test/ui/lint/unused_parens_multibyte_recovery.rs @@ -0,0 +1,11 @@ +// ignore-tidy-trailing-newlines +// +// error-pattern: this file contains an unclosed delimiter +// error-pattern: this file contains an unclosed delimiter +// error-pattern: this file contains an unclosed delimiter +// error-pattern: format argument must be a string literal +// +// Verify that unused parens lint does not try to create a span +// which points in the middle of a multibyte character. + +fn f(){(print!(á \ No newline at end of file diff --git a/src/test/ui/lint/unused_parens_multibyte_recovery.stderr b/src/test/ui/lint/unused_parens_multibyte_recovery.stderr new file mode 100644 index 0000000000000..a0302b17e255d --- /dev/null +++ b/src/test/ui/lint/unused_parens_multibyte_recovery.stderr @@ -0,0 +1,43 @@ +error: this file contains an unclosed delimiter + --> $DIR/unused_parens_multibyte_recovery.rs:11:17 + | +LL | fn f(){(print!(á + | -- - ^ + | || | + | || unclosed delimiter + | |unclosed delimiter + | unclosed delimiter + +error: this file contains an unclosed delimiter + --> $DIR/unused_parens_multibyte_recovery.rs:11:17 + | +LL | fn f(){(print!(á + | -- - ^ + | || | + | || unclosed delimiter + | |unclosed delimiter + | unclosed delimiter + +error: this file contains an unclosed delimiter + --> $DIR/unused_parens_multibyte_recovery.rs:11:17 + | +LL | fn f(){(print!(á + | -- - ^ + | || | + | || unclosed delimiter + | |unclosed delimiter + | unclosed delimiter + +error: format argument must be a string literal + --> $DIR/unused_parens_multibyte_recovery.rs:11:16 + | +LL | fn f(){(print!(á + | ^ + | +help: you might be missing a string literal to format with + | +LL | fn f(){(print!("{}", á + | +++++ + +error: aborting due to 4 previous errors + diff --git a/src/test/ui/noncopyable-class.stderr b/src/test/ui/noncopyable-class.stderr index 0c696163a26c5..15e22e946da8a 100644 --- a/src/test/ui/noncopyable-class.stderr +++ b/src/test/ui/noncopyable-class.stderr @@ -10,6 +10,14 @@ LL | let _y = x.clone(); = help: items from traits can only be used if the trait is implemented and in scope = note: the following trait defines an item `clone`, perhaps you need to implement it: candidate #1: `Clone` +help: one of the expressions' fields has a method of the same name + | +LL | let _y = x.i.clone(); + | ++ +help: one of the expressions' fields has a method of the same name + | +LL | let _y = x.j.x.clone(); + | ++++ error: aborting due to previous error diff --git a/src/test/ui/suggestions/field-access-considering-privacy.rs b/src/test/ui/suggestions/field-access-considering-privacy.rs new file mode 100644 index 0000000000000..3de06b21420b4 --- /dev/null +++ b/src/test/ui/suggestions/field-access-considering-privacy.rs @@ -0,0 +1,35 @@ +use a::TyCtxt; + +mod a { + use std::ops::Deref; + pub struct TyCtxt<'tcx> { + gcx: &'tcx GlobalCtxt<'tcx>, + } + + impl<'tcx> Deref for TyCtxt<'tcx> { + type Target = &'tcx GlobalCtxt<'tcx>; + + fn deref(&self) -> &Self::Target { + &self.gcx + } + } + + pub struct GlobalCtxt<'tcx> { + pub sess: &'tcx Session, + _t: &'tcx (), + } + + pub struct Session { + pub opts: (), + } +} + +mod b { + fn foo<'tcx>(tcx: crate::TyCtxt<'tcx>) { + tcx.opts; + //~^ ERROR no field `opts` on type `TyCtxt<'tcx>` + //~| HELP one of the expressions' fields has a field of the same name + } +} + +fn main() {} diff --git a/src/test/ui/suggestions/field-access-considering-privacy.stderr b/src/test/ui/suggestions/field-access-considering-privacy.stderr new file mode 100644 index 0000000000000..cbf6f3d100258 --- /dev/null +++ b/src/test/ui/suggestions/field-access-considering-privacy.stderr @@ -0,0 +1,14 @@ +error[E0609]: no field `opts` on type `TyCtxt<'tcx>` + --> $DIR/field-access-considering-privacy.rs:29:13 + | +LL | tcx.opts; + | ^^^^ unknown field + | +help: one of the expressions' fields has a field of the same name + | +LL | tcx.sess.opts; + | +++++ + +error: aborting due to previous error + +For more information about this error, try `rustc --explain E0609`. diff --git a/src/test/ui/typeck/issue-100246.rs b/src/test/ui/typeck/issue-100246.rs new file mode 100644 index 0000000000000..8f0b34bab0c87 --- /dev/null +++ b/src/test/ui/typeck/issue-100246.rs @@ -0,0 +1,30 @@ +#![recursion_limit = "5"] // To reduce noise + +//expect incompatible type error when ambiguous traits are in scope +//and not an overflow error on the span in the main function. + +struct Ratio(T); + +pub trait Pow { + fn pow(self) -> Self; +} + +impl<'a, T> Pow for &'a Ratio +where + &'a T: Pow, +{ + fn pow(self) -> Self { + self + } +} + +fn downcast<'a, W: ?Sized>() -> std::io::Result<&'a W> { + todo!() +} + +struct Other; + +fn main() -> std::io::Result<()> { + let other: Other = downcast()?;//~ERROR 28:24: 28:35: `?` operator has incompatible types + Ok(()) +} diff --git a/src/test/ui/typeck/issue-100246.stderr b/src/test/ui/typeck/issue-100246.stderr new file mode 100644 index 0000000000000..8b77de94e89f8 --- /dev/null +++ b/src/test/ui/typeck/issue-100246.stderr @@ -0,0 +1,13 @@ +error[E0308]: `?` operator has incompatible types + --> $DIR/issue-100246.rs:28:24 + | +LL | let other: Other = downcast()?; + | ^^^^^^^^^^^ expected struct `Other`, found reference + | + = note: `?` operator cannot convert from `&_` to `Other` + = note: expected struct `Other` + found reference `&_` + +error: aborting due to previous error + +For more information about this error, try `rustc --explain E0308`.