diff --git a/.mailmap b/.mailmap index fa0728bd79461..f476926832158 100644 --- a/.mailmap +++ b/.mailmap @@ -29,7 +29,6 @@ Ariel Ben-Yehuda arielb1 Austin Seipp Aydin Kim aydin.kim Barosl Lee Barosl LEE -Bastian Kauschke Ben Alpert Ben Sago Ben S Ben Sago Ben S @@ -161,6 +160,7 @@ Kyle J Strand Kyle J Strand Kyle J Strand Laurențiu Nicola +lcnr Lee Jeffery Lee Jeffery Lee Wondong Lennart Kudling diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 2a4c42ea0a456..b600074c19770 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -2,14 +2,14 @@ Thank you for your interest in contributing to Rust! -To get started, read the [Getting Started] guide in the [rustc-dev-guide]. +To get started, read the [Contributing to Rust] chapter of the [rustc-dev-guide]. ## Bug reports Did a compiler error message tell you to come here? If you want to create an ICE report, refer to [this section][contributing-bug-reports] and [open an issue][issue template]. -[Getting Started]: https://rustc-dev-guide.rust-lang.org/getting-started.html +[Contributing to Rust]: https://rustc-dev-guide.rust-lang.org/contributing.html#contributing-to-rust [rustc-dev-guide]: https://rustc-dev-guide.rust-lang.org/ [contributing-bug-reports]: https://rustc-dev-guide.rust-lang.org/contributing.html#bug-reports [issue template]: https://github.com/rust-lang/rust/issues/new/choose diff --git a/Cargo.lock b/Cargo.lock index 5ca804cb739eb..98e7e34faf76f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -310,7 +310,7 @@ dependencies = [ "crypto-hash", "curl", "curl-sys", - "env_logger 0.7.1", + "env_logger 0.8.1", "filetime", "flate2", "fwdansi", @@ -418,6 +418,17 @@ dependencies = [ "serde_json", ] +[[package]] +name = "cargo_metadata" +version = "0.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d5a5f7b42f606b7f23674f6f4d877628350682bc40687d3fae65679a58d55345" +dependencies = [ + "semver 0.11.0", + "serde", + "serde_json", +] + [[package]] name = "cargotest2" version = "0.1.0" @@ -530,15 +541,14 @@ dependencies = [ name = "clippy" version = "0.0.212" dependencies = [ - "cargo_metadata 0.11.1", + "cargo_metadata 0.12.0", "clippy-mini-macro-test", "clippy_lints", "compiletest_rs", "derive-new", - "lazy_static", "rustc-workspace-hack", "rustc_tools_util 0.2.0", - "semver 0.10.0", + "semver 0.11.0", "serde", "tempfile", "tester", @@ -552,14 +562,14 @@ version = "0.2.0" name = "clippy_lints" version = "0.0.212" dependencies = [ - "cargo_metadata 0.11.1", + "cargo_metadata 0.12.0", "if_chain", "itertools 0.9.0", "pulldown-cmark 0.8.0", "quine-mc_cluskey", "quote", "regex-syntax", - "semver 0.10.0", + "semver 0.11.0", "serde", "smallvec 1.4.2", "syn", @@ -626,9 +636,9 @@ dependencies = [ [[package]] name = "compiler_builtins" -version = "0.1.35" +version = "0.1.36" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3fcd8aba10d17504c87ef12d4f62ef404c6a4703d16682a9eb5543e6cf24455" +checksum = "7cd0782e0a7da7598164153173e5a5d4d9b1da094473c98dce0ff91406112369" dependencies = [ "cc", "rustc-std-workspace-core", @@ -1035,6 +1045,19 @@ dependencies = [ "termcolor", ] +[[package]] +name = "env_logger" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "54532e3223c5af90a6a757c90b5c5521564b07e5e7a958681bcd2afad421cdcd" +dependencies = [ + "atty", + "humantime 2.0.1", + "log", + "regex", + "termcolor", +] + [[package]] name = "error_index_generator" version = "0.0.0" @@ -1353,9 +1376,9 @@ dependencies = [ [[package]] name = "hermit-abi" -version = "0.1.15" +version = "0.1.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3deed196b6e7f9e44a2ae8d94225d80302d81208b1bb673fd21fe634645c85a9" +checksum = "5aca5565f760fb5b220e499d72710ed156fdb74e631659e99377d9ebfbd13ae8" dependencies = [ "compiler_builtins", "libc", @@ -1744,6 +1767,10 @@ dependencies = [ [[package]] name = "linkchecker" version = "0.1.0" +dependencies = [ + "once_cell", + "regex", +] [[package]] name = "linked-hash-map" @@ -1940,6 +1967,17 @@ dependencies = [ "rustc-hash", ] +[[package]] +name = "measureme" +version = "9.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "22bf8d885d073610aee20e7fa205c4341ed32a761dbde96da5fd96301a8d3e82" +dependencies = [ + "parking_lot 0.11.0", + "rustc-hash", + "smallvec 1.4.2", +] + [[package]] name = "memchr" version = "2.3.3" @@ -3072,7 +3110,7 @@ dependencies = [ "indexmap", "jobserver", "libc", - "measureme", + "measureme 0.7.1", "parking_lot 0.11.0", "rustc-ap-rustc_graphviz", "rustc-ap-rustc_index", @@ -3272,9 +3310,9 @@ dependencies = [ [[package]] name = "rustc-demangle" -version = "0.1.16" +version = "0.1.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c691c0e608126e00913e33f0ccf3727d5fc84573623b8d65b2df340b5201783" +checksum = "6e3bad0ee36814ca07d7968269dd4b7ec89ec2da10c4bb613928d3077083c232" dependencies = [ "compiler_builtins", "rustc-std-workspace-core", @@ -3474,7 +3512,7 @@ version = "0.0.0" dependencies = [ "bitflags", "libc", - "measureme", + "measureme 9.0.0", "rustc-demangle", "rustc_ast", "rustc_attr", @@ -3540,7 +3578,7 @@ dependencies = [ "indexmap", "jobserver", "libc", - "measureme", + "measureme 9.0.0", "parking_lot 0.11.0", "rustc-hash", "rustc-rayon", @@ -3845,7 +3883,7 @@ version = "0.0.0" dependencies = [ "bitflags", "chalk-ir", - "measureme", + "measureme 9.0.0", "polonius-engine", "rustc-rayon-core", "rustc_apfloat", @@ -4356,7 +4394,7 @@ version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1d7eb9ef2c18661902cc47e535f9bc51b78acd254da71d375c2f6720d9a40403" dependencies = [ - "semver-parser", + "semver-parser 0.7.0", "serde", ] @@ -4366,7 +4404,17 @@ version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "394cec28fa623e00903caf7ba4fa6fb9a0e260280bb8cdbbba029611108a0190" dependencies = [ - "semver-parser", + "semver-parser 0.7.0", + "serde", +] + +[[package]] +name = "semver" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f301af10236f6df4160f7c3f04eec6dbc70ace82d23326abad5edee88801c6b6" +dependencies = [ + "semver-parser 0.10.1", "serde", ] @@ -4376,6 +4424,15 @@ version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3" +[[package]] +name = "semver-parser" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42ef146c2ad5e5f4b037cd6ce2ebb775401729b19a82040c1beac9d36c7d1428" +dependencies = [ + "pest", +] + [[package]] name = "serde" version = "1.0.115" @@ -4407,9 +4464,9 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.57" +version = "1.0.59" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "164eacbdb13512ec2745fb09d51fd5b22b0d65ed294a1dcf7285a360c80a675c" +checksum = "dcac07dbffa1c65e7f816ab9eba78eb142c6d44410f4eeba1e26e4f5dfa56b95" dependencies = [ "itoa", "ryu", diff --git a/README.md b/README.md index d445bbdf6e842..07c0960466400 100644 --- a/README.md +++ b/README.md @@ -9,8 +9,7 @@ standard library, and documentation. **Note: this README is for _users_ rather than _contributors_. If you wish to _contribute_ to the compiler, you should read the -[Getting Started][gettingstarted] of the rustc-dev-guide instead of this -section.** +[Getting Started][gettingstarted] section of the rustc-dev-guide instead.** ## Quick Start diff --git a/compiler/rustc_arena/src/lib.rs b/compiler/rustc_arena/src/lib.rs index c051c607ff217..1a85a46ed74f0 100644 --- a/compiler/rustc_arena/src/lib.rs +++ b/compiler/rustc_arena/src/lib.rs @@ -217,16 +217,18 @@ impl TypedArena { let mut chunks = self.chunks.borrow_mut(); let mut new_cap; if let Some(last_chunk) = chunks.last_mut() { - let used_bytes = self.ptr.get() as usize - last_chunk.start() as usize; - last_chunk.entries = used_bytes / mem::size_of::(); + // If a type is `!needs_drop`, we don't need to keep track of how many elements + // the chunk stores - the field will be ignored anyway. + if mem::needs_drop::() { + let used_bytes = self.ptr.get() as usize - last_chunk.start() as usize; + last_chunk.entries = used_bytes / mem::size_of::(); + } // If the previous chunk's len is less than HUGE_PAGE // bytes, then this chunk will be least double the previous // chunk's size. - new_cap = last_chunk.storage.len(); - if new_cap < HUGE_PAGE / elem_size { - new_cap = new_cap.checked_mul(2).unwrap(); - } + new_cap = last_chunk.storage.len().min(HUGE_PAGE / elem_size / 2); + new_cap = new_cap * 2; } else { new_cap = PAGE / elem_size; } @@ -343,10 +345,8 @@ impl DroplessArena { // If the previous chunk's len is less than HUGE_PAGE // bytes, then this chunk will be least double the previous // chunk's size. - new_cap = last_chunk.storage.len(); - if new_cap < HUGE_PAGE { - new_cap = new_cap.checked_mul(2).unwrap(); - } + new_cap = last_chunk.storage.len().min(HUGE_PAGE / 2); + new_cap = new_cap * 2; } else { new_cap = PAGE; } diff --git a/compiler/rustc_ast/src/ast.rs b/compiler/rustc_ast/src/ast.rs index 8f156aea2ff5b..7224b482ed780 100644 --- a/compiler/rustc_ast/src/ast.rs +++ b/compiler/rustc_ast/src/ast.rs @@ -24,7 +24,7 @@ pub use UnsafeSource::*; use crate::ptr::P; use crate::token::{self, CommentKind, DelimToken}; -use crate::tokenstream::{DelimSpan, TokenStream, TokenTree}; +use crate::tokenstream::{DelimSpan, LazyTokenStream, TokenStream, TokenTree}; use rustc_data_structures::stable_hasher::{HashStable, StableHasher}; use rustc_data_structures::stack::ensure_sufficient_stack; @@ -97,7 +97,7 @@ pub struct Path { /// The segments in the path: the things separated by `::`. /// Global paths begin with `kw::PathRoot`. pub segments: Vec, - pub tokens: Option, + pub tokens: Option, } impl PartialEq for Path { @@ -535,7 +535,7 @@ pub struct Block { /// Distinguishes between `unsafe { ... }` and `{ ... }`. pub rules: BlockCheckMode, pub span: Span, - pub tokens: Option, + pub tokens: Option, } /// A match pattern. @@ -546,7 +546,7 @@ pub struct Pat { pub id: NodeId, pub kind: PatKind, pub span: Span, - pub tokens: Option, + pub tokens: Option, } impl Pat { @@ -892,7 +892,7 @@ pub struct Stmt { pub id: NodeId, pub kind: StmtKind, pub span: Span, - pub tokens: Option, + pub tokens: Option, } impl Stmt { @@ -1040,7 +1040,7 @@ pub struct Expr { pub kind: ExprKind, pub span: Span, pub attrs: AttrVec, - pub tokens: Option, + pub tokens: Option, } // `Expr` is used a lot. Make sure it doesn't unintentionally get bigger. @@ -1152,6 +1152,7 @@ impl Expr { match self.kind { ExprKind::Box(_) => ExprPrecedence::Box, ExprKind::Array(_) => ExprPrecedence::Array, + ExprKind::ConstBlock(_) => ExprPrecedence::ConstBlock, ExprKind::Call(..) => ExprPrecedence::Call, ExprKind::MethodCall(..) => ExprPrecedence::MethodCall, ExprKind::Tup(_) => ExprPrecedence::Tup, @@ -1207,6 +1208,8 @@ pub enum ExprKind { Box(P), /// An array (`[a, b, c, d]`) Array(Vec>), + /// Allow anonymous constants from an inline `const` block + ConstBlock(AnonConst), /// A function call /// /// The first field resolves to the function itself, @@ -1832,7 +1835,7 @@ pub struct Ty { pub id: NodeId, pub kind: TyKind, pub span: Span, - pub tokens: Option, + pub tokens: Option, } impl Clone for Ty { @@ -2405,7 +2408,7 @@ impl rustc_serialize::Decodable for AttrId { pub struct AttrItem { pub path: Path, pub args: MacArgs, - pub tokens: Option, + pub tokens: Option, } /// A list of attributes. @@ -2420,6 +2423,7 @@ pub struct Attribute { /// or the construct this attribute is contained within (inner). pub style: AttrStyle, pub span: Span, + pub tokens: Option, } #[derive(Clone, Encodable, Decodable, Debug)] @@ -2479,7 +2483,7 @@ pub enum CrateSugar { pub struct Visibility { pub kind: VisibilityKind, pub span: Span, - pub tokens: Option, + pub tokens: Option, } #[derive(Clone, Encodable, Decodable, Debug)] @@ -2566,7 +2570,7 @@ pub struct Item { /// /// Note that the tokens here do not include the outer attributes, but will /// include inner attributes. - pub tokens: Option, + pub tokens: Option, } impl Item { diff --git a/compiler/rustc_ast/src/attr/mod.rs b/compiler/rustc_ast/src/attr/mod.rs index 8351be222f6bd..70ad43ecad219 100644 --- a/compiler/rustc_ast/src/attr/mod.rs +++ b/compiler/rustc_ast/src/attr/mod.rs @@ -325,7 +325,7 @@ pub fn mk_attr(style: AttrStyle, path: Path, args: MacArgs, span: Span) -> Attri } pub fn mk_attr_from_item(style: AttrStyle, item: AttrItem, span: Span) -> Attribute { - Attribute { kind: AttrKind::Normal(item), id: mk_attr_id(), style, span } + Attribute { kind: AttrKind::Normal(item), id: mk_attr_id(), style, span, tokens: None } } /// Returns an inner attribute with the given value and span. @@ -344,7 +344,13 @@ pub fn mk_doc_comment( data: Symbol, span: Span, ) -> Attribute { - Attribute { kind: AttrKind::DocComment(comment_kind, data), id: mk_attr_id(), style, span } + Attribute { + kind: AttrKind::DocComment(comment_kind, data), + id: mk_attr_id(), + style, + span, + tokens: None, + } } pub fn list_contains_name(items: &[NestedMetaItem], name: Symbol) -> bool { @@ -623,7 +629,8 @@ impl HasAttrs for StmtKind { match *self { StmtKind::Local(ref local) => local.attrs(), StmtKind::Expr(ref expr) | StmtKind::Semi(ref expr) => expr.attrs(), - StmtKind::Empty | StmtKind::Item(..) => &[], + StmtKind::Item(ref item) => item.attrs(), + StmtKind::Empty => &[], StmtKind::MacCall(ref mac) => mac.attrs.attrs(), } } @@ -632,7 +639,8 @@ impl HasAttrs for StmtKind { match self { StmtKind::Local(local) => local.visit_attrs(f), StmtKind::Expr(expr) | StmtKind::Semi(expr) => expr.visit_attrs(f), - StmtKind::Empty | StmtKind::Item(..) => {} + StmtKind::Item(item) => item.visit_attrs(f), + StmtKind::Empty => {} StmtKind::MacCall(mac) => { mac.attrs.visit_attrs(f); } diff --git a/compiler/rustc_ast/src/mut_visit.rs b/compiler/rustc_ast/src/mut_visit.rs index 425ef83b57af5..166d36ce42433 100644 --- a/compiler/rustc_ast/src/mut_visit.rs +++ b/compiler/rustc_ast/src/mut_visit.rs @@ -577,7 +577,7 @@ pub fn noop_visit_local(local: &mut P, vis: &mut T) { } pub fn noop_visit_attribute(attr: &mut Attribute, vis: &mut T) { - let Attribute { kind, id: _, style: _, span } = attr; + let Attribute { kind, id: _, style: _, span, tokens: _ } = attr; match kind { AttrKind::Normal(AttrItem { path, args, tokens: _ }) => { vis.visit_path(path); @@ -1106,6 +1106,9 @@ pub fn noop_visit_expr( match kind { ExprKind::Box(expr) => vis.visit_expr(expr), ExprKind::Array(exprs) => visit_exprs(exprs, vis), + ExprKind::ConstBlock(anon_const) => { + vis.visit_anon_const(anon_const); + } ExprKind::Repeat(expr, count) => { vis.visit_expr(expr); vis.visit_anon_const(count); diff --git a/compiler/rustc_ast/src/token.rs b/compiler/rustc_ast/src/token.rs index ad9c7391939a9..d991027cb4573 100644 --- a/compiler/rustc_ast/src/token.rs +++ b/compiler/rustc_ast/src/token.rs @@ -153,6 +153,7 @@ pub fn ident_can_begin_expr(name: Symbol, span: Span, is_raw: bool) -> bool { kw::Do, kw::Box, kw::Break, + kw::Const, kw::Continue, kw::False, kw::For, diff --git a/compiler/rustc_ast/src/tokenstream.rs b/compiler/rustc_ast/src/tokenstream.rs index 8acb6b2f37589..cb419b6362ce7 100644 --- a/compiler/rustc_ast/src/tokenstream.rs +++ b/compiler/rustc_ast/src/tokenstream.rs @@ -16,8 +16,9 @@ use crate::token::{self, DelimToken, Token, TokenKind}; use rustc_data_structures::stable_hasher::{HashStable, StableHasher}; -use rustc_data_structures::sync::Lrc; +use rustc_data_structures::sync::{self, Lrc}; use rustc_macros::HashStable_Generic; +use rustc_serialize::{Decodable, Decoder, Encodable, Encoder}; use rustc_span::{Span, DUMMY_SP}; use smallvec::{smallvec, SmallVec}; @@ -119,13 +120,84 @@ where } } +// A cloneable callback which produces a `TokenStream`. Each clone +// of this should produce the same `TokenStream` +pub trait CreateTokenStream: sync::Send + sync::Sync + FnOnce() -> TokenStream { + // Workaround for the fact that `Clone` is not object-safe + fn clone_it(&self) -> Box; +} + +impl TokenStream> CreateTokenStream + for F +{ + fn clone_it(&self) -> Box { + Box::new(self.clone()) + } +} + +impl Clone for Box { + fn clone(&self) -> Self { + let val: &(dyn CreateTokenStream) = &**self; + val.clone_it() + } +} + +/// A lazy version of `TokenStream`, which may defer creation +/// of an actual `TokenStream` until it is needed. +pub type LazyTokenStream = Lrc; + +#[derive(Clone)] +pub enum LazyTokenStreamInner { + Lazy(Box), + Ready(TokenStream), +} + +impl std::fmt::Debug for LazyTokenStreamInner { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + LazyTokenStreamInner::Lazy(..) => f.debug_struct("LazyTokenStream::Lazy").finish(), + LazyTokenStreamInner::Ready(..) => f.debug_struct("LazyTokenStream::Ready").finish(), + } + } +} + +impl LazyTokenStreamInner { + pub fn into_token_stream(&self) -> TokenStream { + match self { + // Note that we do not cache this. If this ever becomes a performance + // problem, we should investigate wrapping `LazyTokenStreamInner` + // in a lock + LazyTokenStreamInner::Lazy(cb) => (cb.clone())(), + LazyTokenStreamInner::Ready(stream) => stream.clone(), + } + } +} + +impl Encodable for LazyTokenStreamInner { + fn encode(&self, _s: &mut S) -> Result<(), S::Error> { + panic!("Attempted to encode LazyTokenStream"); + } +} + +impl Decodable for LazyTokenStreamInner { + fn decode(_d: &mut D) -> Result { + panic!("Attempted to decode LazyTokenStream"); + } +} + +impl HashStable for LazyTokenStreamInner { + fn hash_stable(&self, _hcx: &mut CTX, _hasher: &mut StableHasher) { + panic!("Attempted to compute stable hash for LazyTokenStream"); + } +} + /// A `TokenStream` is an abstract sequence of tokens, organized into `TokenTree`s. /// /// The goal is for procedural macros to work with `TokenStream`s and `TokenTree`s /// instead of a representation of the abstract syntax tree. /// Today's `TokenTree`s can still contain AST via `token::Interpolated` for back-compat. #[derive(Clone, Debug, Default, Encodable, Decodable)] -pub struct TokenStream(pub Lrc>); +pub struct TokenStream(pub(crate) Lrc>); pub type TreeAndSpacing = (TokenTree, Spacing); @@ -286,12 +358,12 @@ impl TokenStream { t1.next().is_none() && t2.next().is_none() } - pub fn map_enumerated TokenTree>(self, mut f: F) -> TokenStream { + pub fn map_enumerated TokenTree>(self, mut f: F) -> TokenStream { TokenStream(Lrc::new( self.0 .iter() .enumerate() - .map(|(i, (tree, is_joint))| (f(i, tree.clone()), *is_joint)) + .map(|(i, (tree, is_joint))| (f(i, tree), *is_joint)) .collect(), )) } @@ -394,8 +466,8 @@ impl Cursor { self.index = index; } - pub fn look_ahead(&self, n: usize) -> Option { - self.stream.0[self.index..].get(n).map(|(tree, _)| tree.clone()) + pub fn look_ahead(&self, n: usize) -> Option<&TokenTree> { + self.stream.0[self.index..].get(n).map(|(tree, _)| tree) } } diff --git a/compiler/rustc_ast/src/util/lev_distance.rs b/compiler/rustc_ast/src/util/lev_distance.rs index 754b1f13381cb..21c2c925bc479 100644 --- a/compiler/rustc_ast/src/util/lev_distance.rs +++ b/compiler/rustc_ast/src/util/lev_distance.rs @@ -54,7 +54,7 @@ where T: Iterator, { let lookup = &lookup.as_str(); - let max_dist = dist.map_or_else(|| cmp::max(lookup.len(), 3) / 3, |d| d); + let max_dist = dist.unwrap_or_else(|| cmp::max(lookup.len(), 3) / 3); let name_vec: Vec<&Symbol> = iter_names.collect(); let (case_insensitive_match, levenshtein_match) = name_vec diff --git a/compiler/rustc_ast/src/util/parser.rs b/compiler/rustc_ast/src/util/parser.rs index be5516ef4718b..078dd4bd6e602 100644 --- a/compiler/rustc_ast/src/util/parser.rs +++ b/compiler/rustc_ast/src/util/parser.rs @@ -282,6 +282,7 @@ pub enum ExprPrecedence { ForLoop, Loop, Match, + ConstBlock, Block, TryBlock, Struct, @@ -346,6 +347,7 @@ impl ExprPrecedence { ExprPrecedence::ForLoop | ExprPrecedence::Loop | ExprPrecedence::Match | + ExprPrecedence::ConstBlock | ExprPrecedence::Block | ExprPrecedence::TryBlock | ExprPrecedence::Async | diff --git a/compiler/rustc_ast/src/visit.rs b/compiler/rustc_ast/src/visit.rs index 86fd87f6c42c5..507b49616ea0f 100644 --- a/compiler/rustc_ast/src/visit.rs +++ b/compiler/rustc_ast/src/visit.rs @@ -200,11 +200,7 @@ pub trait Visitor<'ast>: Sized { walk_generic_args(self, path_span, generic_args) } fn visit_generic_arg(&mut self, generic_arg: &'ast GenericArg) { - match generic_arg { - GenericArg::Lifetime(lt) => self.visit_lifetime(lt), - GenericArg::Type(ty) => self.visit_ty(ty), - GenericArg::Const(ct) => self.visit_anon_const(ct), - } + walk_generic_arg(self, generic_arg) } fn visit_assoc_ty_constraint(&mut self, constraint: &'ast AssocTyConstraint) { walk_assoc_ty_constraint(self, constraint) @@ -486,6 +482,17 @@ where } } +pub fn walk_generic_arg<'a, V>(visitor: &mut V, generic_arg: &'a GenericArg) +where + V: Visitor<'a>, +{ + match generic_arg { + GenericArg::Lifetime(lt) => visitor.visit_lifetime(lt), + GenericArg::Type(ty) => visitor.visit_ty(ty), + GenericArg::Const(ct) => visitor.visit_anon_const(ct), + } +} + pub fn walk_assoc_ty_constraint<'a, V: Visitor<'a>>( visitor: &mut V, constraint: &'a AssocTyConstraint, @@ -717,6 +724,7 @@ pub fn walk_expr<'a, V: Visitor<'a>>(visitor: &mut V, expression: &'a Expr) { ExprKind::Array(ref subexpressions) => { walk_list!(visitor, visit_expr, subexpressions); } + ExprKind::ConstBlock(ref anon_const) => visitor.visit_anon_const(anon_const), ExprKind::Repeat(ref element, ref count) => { visitor.visit_expr(element); visitor.visit_anon_const(count) diff --git a/compiler/rustc_ast_lowering/src/expr.rs b/compiler/rustc_ast_lowering/src/expr.rs index 2d2caa7a808d7..a6ac056b93b5e 100644 --- a/compiler/rustc_ast_lowering/src/expr.rs +++ b/compiler/rustc_ast_lowering/src/expr.rs @@ -30,6 +30,10 @@ impl<'hir> LoweringContext<'_, 'hir> { let kind = match e.kind { ExprKind::Box(ref inner) => hir::ExprKind::Box(self.lower_expr(inner)), ExprKind::Array(ref exprs) => hir::ExprKind::Array(self.lower_exprs(exprs)), + ExprKind::ConstBlock(ref anon_const) => { + let anon_const = self.lower_anon_const(anon_const); + hir::ExprKind::ConstBlock(anon_const) + } ExprKind::Repeat(ref expr, ref count) => { let expr = self.lower_expr(expr); let count = self.lower_anon_const(count); @@ -206,9 +210,9 @@ impl<'hir> LoweringContext<'_, 'hir> { ex.span = e.span; } // Merge attributes into the inner expression. - let mut attrs = e.attrs.clone(); + let mut attrs: Vec<_> = e.attrs.iter().map(|a| self.lower_attr(a)).collect(); attrs.extend::>(ex.attrs.into()); - ex.attrs = attrs; + ex.attrs = attrs.into(); return ex; } @@ -1186,52 +1190,47 @@ impl<'hir> LoweringContext<'_, 'hir> { input| { match used_regs.entry(r) { Entry::Occupied(o) => { - if !skip { - skip = true; - - let idx2 = *o.get(); - let op2 = &operands[idx2]; - let op_sp2 = asm.operands[idx2].1; - let reg2 = match op2.reg() { - Some(asm::InlineAsmRegOrRegClass::Reg(r)) => r, - _ => unreachable!(), - }; - - let msg = format!( - "register `{}` conflicts with register `{}`", - reg.name(), - reg2.name() - ); - let mut err = sess.struct_span_err(op_sp, &msg); - err.span_label( - op_sp, - &format!("register `{}`", reg.name()), - ); - err.span_label( - op_sp2, - &format!("register `{}`", reg2.name()), - ); - - match (op, op2) { - ( - hir::InlineAsmOperand::In { .. }, - hir::InlineAsmOperand::Out { late, .. }, - ) - | ( - hir::InlineAsmOperand::Out { late, .. }, - hir::InlineAsmOperand::In { .. }, - ) => { - assert!(!*late); - let out_op_sp = if input { op_sp2 } else { op_sp }; - let msg = "use `lateout` instead of \ - `out` to avoid conflict"; - err.span_help(out_op_sp, msg); - } - _ => {} + if skip { + return; + } + skip = true; + + let idx2 = *o.get(); + let op2 = &operands[idx2]; + let op_sp2 = asm.operands[idx2].1; + let reg2 = match op2.reg() { + Some(asm::InlineAsmRegOrRegClass::Reg(r)) => r, + _ => unreachable!(), + }; + + let msg = format!( + "register `{}` conflicts with register `{}`", + reg.name(), + reg2.name() + ); + let mut err = sess.struct_span_err(op_sp, &msg); + err.span_label(op_sp, &format!("register `{}`", reg.name())); + err.span_label(op_sp2, &format!("register `{}`", reg2.name())); + + match (op, op2) { + ( + hir::InlineAsmOperand::In { .. }, + hir::InlineAsmOperand::Out { late, .. }, + ) + | ( + hir::InlineAsmOperand::Out { late, .. }, + hir::InlineAsmOperand::In { .. }, + ) => { + assert!(!*late); + let out_op_sp = if input { op_sp2 } else { op_sp }; + let msg = "use `lateout` instead of \ + `out` to avoid conflict"; + err.span_help(out_op_sp, msg); } - - err.emit(); + _ => {} } + + err.emit(); } Entry::Vacant(v) => { v.insert(idx); @@ -1472,13 +1471,15 @@ impl<'hir> LoweringContext<'_, 'hir> { hir::MatchSource::ForLoopDesugar, )); + let attrs: Vec<_> = e.attrs.iter().map(|a| self.lower_attr(a)).collect(); + // This is effectively `{ let _result = ...; _result }`. // The construct was introduced in #21984 and is necessary to make sure that // temporaries in the `head` expression are dropped and do not leak to the // surrounding scope of the `match` since the `match` is not a terminating scope. // // Also, add the attributes to the outer returned expr node. - self.expr_drop_temps_mut(desugared_span, match_expr, e.attrs.clone()) + self.expr_drop_temps_mut(desugared_span, match_expr, attrs.into()) } /// Desugar `ExprKind::Try` from: `?` into: diff --git a/compiler/rustc_ast_lowering/src/lib.rs b/compiler/rustc_ast_lowering/src/lib.rs index a28d022c66139..a3f046986c014 100644 --- a/compiler/rustc_ast_lowering/src/lib.rs +++ b/compiler/rustc_ast_lowering/src/lib.rs @@ -538,6 +538,12 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { } self.visit_fn_ret_ty(&f.decl.output) } + TyKind::ImplTrait(def_node_id, _) => { + self.lctx.allocate_hir_id_counter(def_node_id); + self.with_hir_id_owner(Some(def_node_id), |this| { + visit::walk_ty(this, t); + }); + } _ => visit::walk_ty(self, t), } } @@ -972,7 +978,8 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { AttrKind::DocComment(comment_kind, data) => AttrKind::DocComment(comment_kind, data), }; - Attribute { kind, id: attr.id, style: attr.style, span: attr.span } + // Tokens aren't needed after macro expansion and parsing + Attribute { kind, id: attr.id, style: attr.style, span: attr.span, tokens: None } } fn lower_mac_args(&mut self, args: &MacArgs) -> MacArgs { @@ -1346,10 +1353,14 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { // Add a definition for the in-band `Param`. let def_id = self.resolver.local_def_id(def_node_id); - let hir_bounds = self.lower_param_bounds( - bounds, - ImplTraitContext::Universal(in_band_ty_params), - ); + self.allocate_hir_id_counter(def_node_id); + + let hir_bounds = self.with_hir_id_owner(def_node_id, |this| { + this.lower_param_bounds( + bounds, + ImplTraitContext::Universal(in_band_ty_params), + ) + }); // Set the name to `impl Bound1 + Bound2`. let ident = Ident::from_str_and_span(&pprust::ty_to_string(t), span); in_band_ty_params.push(hir::GenericParam { @@ -1713,7 +1724,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { pat: self.lower_pat(&l.pat), init, span: l.span, - attrs: l.attrs.clone(), + attrs: l.attrs.iter().map(|a| self.lower_attr(a)).collect::>().into(), source: hir::LocalSource::Normal, }, ids, @@ -2200,7 +2211,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { .attrs .iter() .filter(|attr| self.sess.check_name(attr, sym::rustc_synthetic)) - .map(|_| hir::SyntheticTyParamKind::ImplTrait) + .map(|_| hir::SyntheticTyParamKind::FromAttr) .next(), }; diff --git a/compiler/rustc_ast_lowering/src/pat.rs b/compiler/rustc_ast_lowering/src/pat.rs index cb7b7c0eb6116..a1cbcde1f4291 100644 --- a/compiler/rustc_ast_lowering/src/pat.rs +++ b/compiler/rustc_ast_lowering/src/pat.rs @@ -10,82 +10,90 @@ use rustc_span::symbol::Ident; use rustc_span::{source_map::Spanned, Span}; impl<'a, 'hir> LoweringContext<'a, 'hir> { - crate fn lower_pat(&mut self, p: &Pat) -> &'hir hir::Pat<'hir> { + crate fn lower_pat(&mut self, mut pattern: &Pat) -> &'hir hir::Pat<'hir> { ensure_sufficient_stack(|| { - let node = match p.kind { - PatKind::Wild => hir::PatKind::Wild, - PatKind::Ident(ref binding_mode, ident, ref sub) => { - let lower_sub = |this: &mut Self| sub.as_ref().map(|s| this.lower_pat(&*s)); - let node = self.lower_pat_ident(p, binding_mode, ident, lower_sub); - node - } - PatKind::Lit(ref e) => hir::PatKind::Lit(self.lower_expr(e)), - PatKind::TupleStruct(ref path, ref pats) => { - let qpath = self.lower_qpath( - p.id, - &None, - path, - ParamMode::Optional, - ImplTraitContext::disallowed(), - ); - let (pats, ddpos) = self.lower_pat_tuple(pats, "tuple struct"); - hir::PatKind::TupleStruct(qpath, pats, ddpos) - } - PatKind::Or(ref pats) => hir::PatKind::Or( - self.arena.alloc_from_iter(pats.iter().map(|x| self.lower_pat(x))), - ), - PatKind::Path(ref qself, ref path) => { - let qpath = self.lower_qpath( - p.id, - qself, - path, - ParamMode::Optional, - ImplTraitContext::disallowed(), - ); - hir::PatKind::Path(qpath) - } - PatKind::Struct(ref path, ref fields, etc) => { - let qpath = self.lower_qpath( - p.id, - &None, - path, - ParamMode::Optional, - ImplTraitContext::disallowed(), - ); + // loop here to avoid recursion + let node = loop { + match pattern.kind { + PatKind::Wild => break hir::PatKind::Wild, + PatKind::Ident(ref binding_mode, ident, ref sub) => { + let lower_sub = |this: &mut Self| sub.as_ref().map(|s| this.lower_pat(&*s)); + break self.lower_pat_ident(pattern, binding_mode, ident, lower_sub); + } + PatKind::Lit(ref e) => break hir::PatKind::Lit(self.lower_expr(e)), + PatKind::TupleStruct(ref path, ref pats) => { + let qpath = self.lower_qpath( + pattern.id, + &None, + path, + ParamMode::Optional, + ImplTraitContext::disallowed(), + ); + let (pats, ddpos) = self.lower_pat_tuple(pats, "tuple struct"); + break hir::PatKind::TupleStruct(qpath, pats, ddpos); + } + PatKind::Or(ref pats) => { + break hir::PatKind::Or( + self.arena.alloc_from_iter(pats.iter().map(|x| self.lower_pat(x))), + ); + } + PatKind::Path(ref qself, ref path) => { + let qpath = self.lower_qpath( + pattern.id, + qself, + path, + ParamMode::Optional, + ImplTraitContext::disallowed(), + ); + break hir::PatKind::Path(qpath); + } + PatKind::Struct(ref path, ref fields, etc) => { + let qpath = self.lower_qpath( + pattern.id, + &None, + path, + ParamMode::Optional, + ImplTraitContext::disallowed(), + ); - let fs = self.arena.alloc_from_iter(fields.iter().map(|f| hir::FieldPat { - hir_id: self.next_id(), - ident: f.ident, - pat: self.lower_pat(&f.pat), - is_shorthand: f.is_shorthand, - span: f.span, - })); - hir::PatKind::Struct(qpath, fs, etc) - } - PatKind::Tuple(ref pats) => { - let (pats, ddpos) = self.lower_pat_tuple(pats, "tuple"); - hir::PatKind::Tuple(pats, ddpos) - } - PatKind::Box(ref inner) => hir::PatKind::Box(self.lower_pat(inner)), - PatKind::Ref(ref inner, mutbl) => hir::PatKind::Ref(self.lower_pat(inner), mutbl), - PatKind::Range(ref e1, ref e2, Spanned { node: ref end, .. }) => { - hir::PatKind::Range( - e1.as_deref().map(|e| self.lower_expr(e)), - e2.as_deref().map(|e| self.lower_expr(e)), - self.lower_range_end(end, e2.is_some()), - ) - } - PatKind::Slice(ref pats) => self.lower_pat_slice(pats), - PatKind::Rest => { - // If we reach here the `..` pattern is not semantically allowed. - self.ban_illegal_rest_pat(p.span) + let fs = self.arena.alloc_from_iter(fields.iter().map(|f| hir::FieldPat { + hir_id: self.next_id(), + ident: f.ident, + pat: self.lower_pat(&f.pat), + is_shorthand: f.is_shorthand, + span: f.span, + })); + break hir::PatKind::Struct(qpath, fs, etc); + } + PatKind::Tuple(ref pats) => { + let (pats, ddpos) = self.lower_pat_tuple(pats, "tuple"); + break hir::PatKind::Tuple(pats, ddpos); + } + PatKind::Box(ref inner) => { + break hir::PatKind::Box(self.lower_pat(inner)); + } + PatKind::Ref(ref inner, mutbl) => { + break hir::PatKind::Ref(self.lower_pat(inner), mutbl); + } + PatKind::Range(ref e1, ref e2, Spanned { node: ref end, .. }) => { + break hir::PatKind::Range( + e1.as_deref().map(|e| self.lower_expr(e)), + e2.as_deref().map(|e| self.lower_expr(e)), + self.lower_range_end(end, e2.is_some()), + ); + } + PatKind::Slice(ref pats) => break self.lower_pat_slice(pats), + PatKind::Rest => { + // If we reach here the `..` pattern is not semantically allowed. + break self.ban_illegal_rest_pat(pattern.span); + } + // return inner to be processed in next loop + PatKind::Paren(ref inner) => pattern = inner, + PatKind::MacCall(_) => panic!("{:?} shouldn't exist here", pattern.span), } - // FIXME: consider not using recursion to lower this. - PatKind::Paren(ref inner) => return self.lower_pat(inner), - PatKind::MacCall(_) => panic!("{:?} shouldn't exist here", p.span), }; - self.pat_with_node_id_of(p, node) + self.pat_with_node_id_of(pattern, node) }) } diff --git a/compiler/rustc_ast_passes/src/ast_validation.rs b/compiler/rustc_ast_passes/src/ast_validation.rs index d9791fe743cde..b1c8e0ee72730 100644 --- a/compiler/rustc_ast_passes/src/ast_validation.rs +++ b/compiler/rustc_ast_passes/src/ast_validation.rs @@ -287,7 +287,7 @@ impl<'a> AstValidator<'a> { // ``` fn check_expr_within_pat(&self, expr: &Expr, allow_paths: bool) { match expr.kind { - ExprKind::Lit(..) | ExprKind::Err => {} + ExprKind::Lit(..) | ExprKind::ConstBlock(..) | ExprKind::Err => {} ExprKind::Path(..) if allow_paths => {} ExprKind::Unary(UnOp::Neg, ref inner) if matches!(inner.kind, ExprKind::Lit(_)) => {} _ => self.err_handler().span_err( diff --git a/compiler/rustc_ast_passes/src/feature_gate.rs b/compiler/rustc_ast_passes/src/feature_gate.rs index 00d3db73766ac..f20084497671f 100644 --- a/compiler/rustc_ast_passes/src/feature_gate.rs +++ b/compiler/rustc_ast_passes/src/feature_gate.rs @@ -629,6 +629,7 @@ pub fn check_crate(krate: &ast::Crate, sess: &Session) { gate_all!(const_trait_bound_opt_out, "`?const` on trait bounds is experimental"); gate_all!(const_trait_impl, "const trait impls are experimental"); gate_all!(half_open_range_patterns, "half-open range patterns are unstable"); + gate_all!(inline_const, "inline-const is experimental"); // All uses of `gate_all!` below this point were added in #65742, // and subsequently disabled (with the non-early gating readded). diff --git a/compiler/rustc_ast_pretty/src/pp.rs b/compiler/rustc_ast_pretty/src/pp.rs index 95f969d7691e0..56e769ba6b710 100644 --- a/compiler/rustc_ast_pretty/src/pp.rs +++ b/compiler/rustc_ast_pretty/src/pp.rs @@ -390,7 +390,7 @@ impl Printer { self.scan_stack.pop_front().unwrap() } - fn scan_top(&mut self) -> usize { + fn scan_top(&self) -> usize { *self.scan_stack.front().unwrap() } @@ -484,7 +484,7 @@ impl Printer { self.pending_indentation += amount; } - fn get_top(&mut self) -> PrintStackElem { + fn get_top(&self) -> PrintStackElem { *self.print_stack.last().unwrap_or({ &PrintStackElem { offset: 0, pbreak: PrintStackBreak::Broken(Breaks::Inconsistent) } }) diff --git a/compiler/rustc_ast_pretty/src/pprust/state.rs b/compiler/rustc_ast_pretty/src/pprust/state.rs index 9aa066370bb5b..029a6cb664dc3 100644 --- a/compiler/rustc_ast_pretty/src/pprust/state.rs +++ b/compiler/rustc_ast_pretty/src/pprust/state.rs @@ -63,7 +63,7 @@ impl<'a> Comments<'a> { } pub fn trailing_comment( - &mut self, + &self, span: rustc_span::Span, next_pos: Option, ) -> Option { @@ -1714,6 +1714,14 @@ impl<'a> State<'a> { self.end(); } + fn print_expr_anon_const(&mut self, expr: &ast::AnonConst, attrs: &[ast::Attribute]) { + self.ibox(INDENT_UNIT); + self.s.word("const"); + self.print_inner_attributes_inline(attrs); + self.print_expr(&expr.value); + self.end(); + } + fn print_expr_repeat( &mut self, element: &ast::Expr, @@ -1890,6 +1898,9 @@ impl<'a> State<'a> { ast::ExprKind::Array(ref exprs) => { self.print_expr_vec(&exprs[..], attrs); } + ast::ExprKind::ConstBlock(ref anon_const) => { + self.print_expr_anon_const(anon_const, attrs); + } ast::ExprKind::Repeat(ref element, ref count) => { self.print_expr_repeat(element, count, attrs); } diff --git a/compiler/rustc_attr/src/builtin.rs b/compiler/rustc_attr/src/builtin.rs index 9c309345000bb..218a9b229e0df 100644 --- a/compiler/rustc_attr/src/builtin.rs +++ b/compiler/rustc_attr/src/builtin.rs @@ -1013,13 +1013,28 @@ pub fn allow_internal_unstable<'a>( sess: &'a Session, attrs: &'a [Attribute], ) -> Option + 'a> { - let attrs = sess.filter_by_name(attrs, sym::allow_internal_unstable); + allow_unstable(sess, attrs, sym::allow_internal_unstable) +} + +pub fn rustc_allow_const_fn_unstable<'a>( + sess: &'a Session, + attrs: &'a [Attribute], +) -> Option + 'a> { + allow_unstable(sess, attrs, sym::rustc_allow_const_fn_unstable) +} + +fn allow_unstable<'a>( + sess: &'a Session, + attrs: &'a [Attribute], + symbol: Symbol, +) -> Option + 'a> { + let attrs = sess.filter_by_name(attrs, symbol); let list = attrs .filter_map(move |attr| { attr.meta_item_list().or_else(|| { sess.diagnostic().span_err( attr.span, - "`allow_internal_unstable` expects a list of feature names", + &format!("`{}` expects a list of feature names", symbol.to_ident_string()), ); None }) @@ -1029,8 +1044,10 @@ pub fn allow_internal_unstable<'a>( Some(list.into_iter().filter_map(move |it| { let name = it.ident().map(|ident| ident.name); if name.is_none() { - sess.diagnostic() - .span_err(it.span(), "`allow_internal_unstable` expects feature names"); + sess.diagnostic().span_err( + it.span(), + &format!("`{}` expects feature names", symbol.to_ident_string()), + ); } name })) diff --git a/compiler/rustc_builtin_macros/src/cmdline_attrs.rs b/compiler/rustc_builtin_macros/src/cmdline_attrs.rs index 5ed8b69d92ab7..747e48ece704c 100644 --- a/compiler/rustc_builtin_macros/src/cmdline_attrs.rs +++ b/compiler/rustc_builtin_macros/src/cmdline_attrs.rs @@ -15,7 +15,7 @@ pub fn inject(mut krate: ast::Crate, parse_sess: &ParseSess, attrs: &[String]) - ); let start_span = parser.token.span; - let AttrItem { path, args, tokens: _ } = match parser.parse_attr_item() { + let AttrItem { path, args, tokens: _ } = match parser.parse_attr_item(false) { Ok(ai) => ai, Err(mut err) => { err.emit(); diff --git a/compiler/rustc_codegen_llvm/Cargo.toml b/compiler/rustc_codegen_llvm/Cargo.toml index 04792b334d553..f9373640dce5e 100644 --- a/compiler/rustc_codegen_llvm/Cargo.toml +++ b/compiler/rustc_codegen_llvm/Cargo.toml @@ -11,11 +11,11 @@ doctest = false [dependencies] bitflags = "1.0" libc = "0.2" -measureme = "0.7.1" +measureme = "9.0.0" snap = "1" tracing = "0.1" rustc_middle = { path = "../rustc_middle" } -rustc-demangle = "0.1" +rustc-demangle = "0.1.18" rustc_attr = { path = "../rustc_attr" } rustc_codegen_ssa = { path = "../rustc_codegen_ssa" } rustc_data_structures = { path = "../rustc_data_structures" } diff --git a/compiler/rustc_codegen_llvm/src/builder.rs b/compiler/rustc_codegen_llvm/src/builder.rs index 174620ea2fa15..491191d058cf0 100644 --- a/compiler/rustc_codegen_llvm/src/builder.rs +++ b/compiler/rustc_codegen_llvm/src/builder.rs @@ -139,7 +139,7 @@ impl BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> { unsafe { llvm::LLVMGetInsertBlock(self.llbuilder) } } - fn set_span(&self, _span: Span) {} + fn set_span(&mut self, _span: Span) {} fn position_at_end(&mut self, llbb: &'ll BasicBlock) { unsafe { diff --git a/compiler/rustc_codegen_llvm/src/context.rs b/compiler/rustc_codegen_llvm/src/context.rs index 150cedde7e833..56ff580b43b59 100644 --- a/compiler/rustc_codegen_llvm/src/context.rs +++ b/compiler/rustc_codegen_llvm/src/context.rs @@ -324,8 +324,8 @@ impl<'ll, 'tcx> CodegenCx<'ll, 'tcx> { } #[inline] - pub fn coverage_context(&'a self) -> &'a coverageinfo::CrateCoverageContext<'tcx> { - self.coverage_cx.as_ref().unwrap() + pub fn coverage_context(&'a self) -> Option<&'a coverageinfo::CrateCoverageContext<'tcx>> { + self.coverage_cx.as_ref() } } diff --git a/compiler/rustc_codegen_llvm/src/coverageinfo/mapgen.rs b/compiler/rustc_codegen_llvm/src/coverageinfo/mapgen.rs index 0098555a3736b..c1163a871cf1f 100644 --- a/compiler/rustc_codegen_llvm/src/coverageinfo/mapgen.rs +++ b/compiler/rustc_codegen_llvm/src/coverageinfo/mapgen.rs @@ -26,7 +26,10 @@ use tracing::debug; /// undocumented details in Clang's implementation (that may or may not be important) were also /// replicated for Rust's Coverage Map. pub fn finalize<'ll, 'tcx>(cx: &CodegenCx<'ll, 'tcx>) { - let function_coverage_map = cx.coverage_context().take_function_coverage_map(); + let function_coverage_map = match cx.coverage_context() { + Some(ctx) => ctx.take_function_coverage_map(), + None => return, + }; if function_coverage_map.is_empty() { // This module has no functions with coverage instrumentation return; diff --git a/compiler/rustc_codegen_llvm/src/coverageinfo/mod.rs b/compiler/rustc_codegen_llvm/src/coverageinfo/mod.rs index 2bd37bf9c4f9c..7fdbe1a55128a 100644 --- a/compiler/rustc_codegen_llvm/src/coverageinfo/mod.rs +++ b/compiler/rustc_codegen_llvm/src/coverageinfo/mod.rs @@ -64,17 +64,22 @@ impl CoverageInfoBuilderMethods<'tcx> for Builder<'a, 'll, 'tcx> { function_source_hash: u64, id: CounterValueReference, region: CodeRegion, - ) { - debug!( - "adding counter to coverage_regions: instance={:?}, function_source_hash={}, id={:?}, \ - at {:?}", - instance, function_source_hash, id, region, - ); - let mut coverage_regions = self.coverage_context().function_coverage_map.borrow_mut(); - coverage_regions - .entry(instance) - .or_insert_with(|| FunctionCoverage::new(self.tcx, instance)) - .add_counter(function_source_hash, id, region); + ) -> bool { + if let Some(coverage_context) = self.coverage_context() { + debug!( + "adding counter to coverage_regions: instance={:?}, function_source_hash={}, id={:?}, \ + at {:?}", + instance, function_source_hash, id, region, + ); + let mut coverage_regions = coverage_context.function_coverage_map.borrow_mut(); + coverage_regions + .entry(instance) + .or_insert_with(|| FunctionCoverage::new(self.tcx, instance)) + .add_counter(function_source_hash, id, region); + true + } else { + false + } } fn add_counter_expression_region( @@ -85,29 +90,39 @@ impl CoverageInfoBuilderMethods<'tcx> for Builder<'a, 'll, 'tcx> { op: Op, rhs: ExpressionOperandId, region: CodeRegion, - ) { - debug!( - "adding counter expression to coverage_regions: instance={:?}, id={:?}, {:?} {:?} {:?}, \ - at {:?}", - instance, id, lhs, op, rhs, region, - ); - let mut coverage_regions = self.coverage_context().function_coverage_map.borrow_mut(); - coverage_regions - .entry(instance) - .or_insert_with(|| FunctionCoverage::new(self.tcx, instance)) - .add_counter_expression(id, lhs, op, rhs, region); + ) -> bool { + if let Some(coverage_context) = self.coverage_context() { + debug!( + "adding counter expression to coverage_regions: instance={:?}, id={:?}, {:?} {:?} {:?}, \ + at {:?}", + instance, id, lhs, op, rhs, region, + ); + let mut coverage_regions = coverage_context.function_coverage_map.borrow_mut(); + coverage_regions + .entry(instance) + .or_insert_with(|| FunctionCoverage::new(self.tcx, instance)) + .add_counter_expression(id, lhs, op, rhs, region); + true + } else { + false + } } - fn add_unreachable_region(&mut self, instance: Instance<'tcx>, region: CodeRegion) { - debug!( - "adding unreachable code to coverage_regions: instance={:?}, at {:?}", - instance, region, - ); - let mut coverage_regions = self.coverage_context().function_coverage_map.borrow_mut(); - coverage_regions - .entry(instance) - .or_insert_with(|| FunctionCoverage::new(self.tcx, instance)) - .add_unreachable_region(region); + fn add_unreachable_region(&mut self, instance: Instance<'tcx>, region: CodeRegion) -> bool { + if let Some(coverage_context) = self.coverage_context() { + debug!( + "adding unreachable code to coverage_regions: instance={:?}, at {:?}", + instance, region, + ); + let mut coverage_regions = coverage_context.function_coverage_map.borrow_mut(); + coverage_regions + .entry(instance) + .or_insert_with(|| FunctionCoverage::new(self.tcx, instance)) + .add_unreachable_region(region); + true + } else { + false + } } } diff --git a/compiler/rustc_codegen_llvm/src/intrinsic.rs b/compiler/rustc_codegen_llvm/src/intrinsic.rs index 8379fe472251f..e9900e8bc108a 100644 --- a/compiler/rustc_codegen_llvm/src/intrinsic.rs +++ b/compiler/rustc_codegen_llvm/src/intrinsic.rs @@ -334,8 +334,8 @@ impl IntrinsicCallMethods<'tcx> for Builder<'a, 'll, 'tcx> { self.call(expect, &[cond, self.const_bool(expected)], None) } - fn sideeffect(&mut self) { - if self.tcx.sess.opts.debugging_opts.insert_sideeffect { + fn sideeffect(&mut self, unconditional: bool) { + if unconditional || self.tcx.sess.opts.debugging_opts.insert_sideeffect { let fnname = self.get_intrinsic(&("llvm.sideeffect")); self.call(fnname, &[], None); } @@ -390,7 +390,7 @@ fn codegen_msvc_try( ) { let llfn = get_rust_try_fn(bx, &mut |mut bx| { bx.set_personality_fn(bx.eh_personality()); - bx.sideeffect(); + bx.sideeffect(false); let mut normal = bx.build_sibling_block("normal"); let mut catchswitch = bx.build_sibling_block("catchswitch"); @@ -553,7 +553,7 @@ fn codegen_gnu_try( // call %catch_func(%data, %ptr) // ret 1 - bx.sideeffect(); + bx.sideeffect(false); let mut then = bx.build_sibling_block("then"); let mut catch = bx.build_sibling_block("catch"); @@ -615,7 +615,7 @@ fn codegen_emcc_try( // call %catch_func(%data, %catch_data) // ret 1 - bx.sideeffect(); + bx.sideeffect(false); let mut then = bx.build_sibling_block("then"); let mut catch = bx.build_sibling_block("catch"); @@ -673,17 +673,9 @@ fn codegen_emcc_try( fn gen_fn<'ll, 'tcx>( cx: &CodegenCx<'ll, 'tcx>, name: &str, - inputs: Vec>, - output: Ty<'tcx>, + rust_fn_sig: ty::PolyFnSig<'tcx>, codegen: &mut dyn FnMut(Builder<'_, 'll, 'tcx>), ) -> &'ll Value { - let rust_fn_sig = ty::Binder::bind(cx.tcx.mk_fn_sig( - inputs.into_iter(), - output, - false, - hir::Unsafety::Unsafe, - Abi::Rust, - )); let fn_abi = FnAbi::of_fn_ptr(cx, rust_fn_sig, &[]); let llfn = cx.declare_fn(name, &fn_abi); cx.set_frame_pointer_elimination(llfn); @@ -710,22 +702,31 @@ fn get_rust_try_fn<'ll, 'tcx>( // Define the type up front for the signature of the rust_try function. let tcx = cx.tcx; let i8p = tcx.mk_mut_ptr(tcx.types.i8); - let try_fn_ty = tcx.mk_fn_ptr(ty::Binder::bind(tcx.mk_fn_sig( + // `unsafe fn(*mut i8) -> ()` + let try_fn_ty = tcx.mk_fn_ptr(ty::Binder::dummy(tcx.mk_fn_sig( iter::once(i8p), tcx.mk_unit(), false, hir::Unsafety::Unsafe, Abi::Rust, ))); - let catch_fn_ty = tcx.mk_fn_ptr(ty::Binder::bind(tcx.mk_fn_sig( + // `unsafe fn(*mut i8, *mut i8) -> ()` + let catch_fn_ty = tcx.mk_fn_ptr(ty::Binder::dummy(tcx.mk_fn_sig( [i8p, i8p].iter().cloned(), tcx.mk_unit(), false, hir::Unsafety::Unsafe, Abi::Rust, ))); - let output = tcx.types.i32; - let rust_try = gen_fn(cx, "__rust_try", vec![try_fn_ty, i8p, catch_fn_ty], output, codegen); + // `unsafe fn(unsafe fn(*mut i8) -> (), *mut i8, unsafe fn(*mut i8, *mut i8) -> ()) -> i32` + let rust_fn_sig = ty::Binder::dummy(cx.tcx.mk_fn_sig( + vec![try_fn_ty, i8p, catch_fn_ty].into_iter(), + tcx.types.i32, + false, + hir::Unsafety::Unsafe, + Abi::Rust, + )); + let rust_try = gen_fn(cx, "__rust_try", rust_fn_sig, codegen); cx.rust_try_fn.set(Some(rust_try)); rust_try } diff --git a/compiler/rustc_codegen_ssa/src/mir/block.rs b/compiler/rustc_codegen_ssa/src/mir/block.rs index a051ae25a9c17..bec0a84cac0a1 100644 --- a/compiler/rustc_codegen_ssa/src/mir/block.rs +++ b/compiler/rustc_codegen_ssa/src/mir/block.rs @@ -163,7 +163,7 @@ impl<'a, 'tcx> TerminatorCodegenHelper<'tcx> { target <= self.bb && target.start_location().is_predecessor_of(self.bb.start_location(), mir) }) { - bx.sideeffect(); + bx.sideeffect(false); } } } @@ -964,7 +964,23 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { } mir::TerminatorKind::Goto { target } => { - helper.maybe_sideeffect(self.mir, &mut bx, &[target]); + if bb == target { + // This is an unconditional branch back to this same basic + // block. That means we have something like a `loop {}` + // statement. Currently LLVM miscompiles this because it + // assumes forward progress. We want to prevent this in all + // cases, but that has a fairly high cost to compile times + // currently. Instead, try to handle this specific case + // which comes up commonly in practice (e.g., in embedded + // code). + // + // The `true` here means we insert side effects regardless + // of -Zinsert-sideeffect being passed on unconditional + // branching to the same basic block. + bx.sideeffect(true); + } else { + helper.maybe_sideeffect(self.mir, &mut bx, &[target]); + } helper.funclet_br(self, &mut bx, target); } diff --git a/compiler/rustc_codegen_ssa/src/mir/coverageinfo.rs b/compiler/rustc_codegen_ssa/src/mir/coverageinfo.rs index a2ad27b925c34..4811adea9ec06 100644 --- a/compiler/rustc_codegen_ssa/src/mir/coverageinfo.rs +++ b/compiler/rustc_codegen_ssa/src/mir/coverageinfo.rs @@ -10,19 +10,19 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { let Coverage { kind, code_region } = coverage; match kind { CoverageKind::Counter { function_source_hash, id } => { - bx.add_counter_region(self.instance, function_source_hash, id, code_region); + if bx.add_counter_region(self.instance, function_source_hash, id, code_region) { + let coverageinfo = bx.tcx().coverageinfo(self.instance.def_id()); - let coverageinfo = bx.tcx().coverageinfo(self.instance.def_id()); - - let fn_name = bx.create_pgo_func_name_var(self.instance); - let hash = bx.const_u64(function_source_hash); - let num_counters = bx.const_u32(coverageinfo.num_counters); - let id = bx.const_u32(u32::from(id)); - debug!( - "codegen intrinsic instrprof.increment(fn_name={:?}, hash={:?}, num_counters={:?}, index={:?})", - fn_name, hash, num_counters, id, - ); - bx.instrprof_increment(fn_name, hash, num_counters, id); + let fn_name = bx.create_pgo_func_name_var(self.instance); + let hash = bx.const_u64(function_source_hash); + let num_counters = bx.const_u32(coverageinfo.num_counters); + let id = bx.const_u32(u32::from(id)); + debug!( + "codegen intrinsic instrprof.increment(fn_name={:?}, hash={:?}, num_counters={:?}, index={:?})", + fn_name, hash, num_counters, id, + ); + bx.instrprof_increment(fn_name, hash, num_counters, id); + } } CoverageKind::Expression { id, lhs, op, rhs } => { bx.add_counter_expression_region(self.instance, id, lhs, op, rhs, code_region); diff --git a/compiler/rustc_codegen_ssa/src/mir/mod.rs b/compiler/rustc_codegen_ssa/src/mir/mod.rs index 64d456fb7aa67..bff263567bf6b 100644 --- a/compiler/rustc_codegen_ssa/src/mir/mod.rs +++ b/compiler/rustc_codegen_ssa/src/mir/mod.rs @@ -153,7 +153,7 @@ pub fn codegen_mir<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>( bx.set_personality_fn(cx.eh_personality()); } - bx.sideeffect(); + bx.sideeffect(false); let cleanup_kinds = analyze::cleanup_kinds(&mir); // Allocate a `Block` for every basic block, except diff --git a/compiler/rustc_codegen_ssa/src/traits/builder.rs b/compiler/rustc_codegen_ssa/src/traits/builder.rs index 0b8289a8dd92c..d5bd2780388e6 100644 --- a/compiler/rustc_codegen_ssa/src/traits/builder.rs +++ b/compiler/rustc_codegen_ssa/src/traits/builder.rs @@ -45,7 +45,7 @@ pub trait BuilderMethods<'a, 'tcx>: fn build_sibling_block(&self, name: &str) -> Self; fn cx(&self) -> &Self::CodegenCx; fn llbb(&self) -> Self::BasicBlock; - fn set_span(&self, span: Span); + fn set_span(&mut self, span: Span); fn position_at_end(&mut self, llbb: Self::BasicBlock); fn ret_void(&mut self); diff --git a/compiler/rustc_codegen_ssa/src/traits/coverageinfo.rs b/compiler/rustc_codegen_ssa/src/traits/coverageinfo.rs index b74e4e459016f..3b1654f3ad4fc 100644 --- a/compiler/rustc_codegen_ssa/src/traits/coverageinfo.rs +++ b/compiler/rustc_codegen_ssa/src/traits/coverageinfo.rs @@ -9,14 +9,18 @@ pub trait CoverageInfoMethods: BackendTypes { pub trait CoverageInfoBuilderMethods<'tcx>: BackendTypes { fn create_pgo_func_name_var(&self, instance: Instance<'tcx>) -> Self::Value; + /// Returns true if the counter was added to the coverage map; false if `-Z instrument-coverage` + /// is not enabled (a coverage map is not being generated). fn add_counter_region( &mut self, instance: Instance<'tcx>, function_source_hash: u64, id: CounterValueReference, region: CodeRegion, - ); + ) -> bool; + /// Returns true if the expression was added to the coverage map; false if + /// `-Z instrument-coverage` is not enabled (a coverage map is not being generated). fn add_counter_expression_region( &mut self, instance: Instance<'tcx>, @@ -25,7 +29,9 @@ pub trait CoverageInfoBuilderMethods<'tcx>: BackendTypes { op: Op, rhs: ExpressionOperandId, region: CodeRegion, - ); + ) -> bool; - fn add_unreachable_region(&mut self, instance: Instance<'tcx>, region: CodeRegion); + /// Returns true if the region was added to the coverage map; false if `-Z instrument-coverage` + /// is not enabled (a coverage map is not being generated). + fn add_unreachable_region(&mut self, instance: Instance<'tcx>, region: CodeRegion) -> bool; } diff --git a/compiler/rustc_codegen_ssa/src/traits/intrinsic.rs b/compiler/rustc_codegen_ssa/src/traits/intrinsic.rs index ccd294d92b2f4..ac3c99f9c908d 100644 --- a/compiler/rustc_codegen_ssa/src/traits/intrinsic.rs +++ b/compiler/rustc_codegen_ssa/src/traits/intrinsic.rs @@ -20,7 +20,9 @@ pub trait IntrinsicCallMethods<'tcx>: BackendTypes { fn abort(&mut self); fn assume(&mut self, val: Self::Value); fn expect(&mut self, cond: Self::Value, expected: bool) -> Self::Value; - fn sideeffect(&mut self); + /// Normally, sideeffect is only emitted if -Zinsert-sideeffect is passed; + /// in some cases though we want to emit it regardless. + fn sideeffect(&mut self, unconditional: bool); /// Trait method used to inject `va_start` on the "spoofed" `VaListImpl` in /// Rust defined C-variadic functions. fn va_start(&mut self, val: Self::Value) -> Self::Value; diff --git a/compiler/rustc_data_structures/Cargo.toml b/compiler/rustc_data_structures/Cargo.toml index caaf7c0c3c250..23e689fcae796 100644 --- a/compiler/rustc_data_structures/Cargo.toml +++ b/compiler/rustc_data_structures/Cargo.toml @@ -25,7 +25,7 @@ rustc-hash = "1.1.0" smallvec = { version = "1.0", features = ["union", "may_dangle"] } rustc_index = { path = "../rustc_index", package = "rustc_index" } bitflags = "1.2.1" -measureme = "0.7.1" +measureme = "9.0.0" libc = "0.2" stacker = "0.1.12" tempfile = "3.0.5" diff --git a/compiler/rustc_data_structures/src/graph/iterate/mod.rs b/compiler/rustc_data_structures/src/graph/iterate/mod.rs index bc3d1ce53bac5..5f42d46e28575 100644 --- a/compiler/rustc_data_structures/src/graph/iterate/mod.rs +++ b/compiler/rustc_data_structures/src/graph/iterate/mod.rs @@ -1,6 +1,7 @@ use super::{DirectedGraph, WithNumNodes, WithStartNode, WithSuccessors}; use rustc_index::bit_set::BitSet; use rustc_index::vec::IndexVec; +use std::ops::ControlFlow; #[cfg(test)] mod tests; @@ -86,10 +87,6 @@ where } } -/// Allows searches to terminate early with a value. -// FIXME (#75744): remove the alias once the generics are in a better order and `C=()`. -pub type ControlFlow = std::ops::ControlFlow<(), T>; - /// The status of a node in the depth-first search. /// /// See the documentation of `TriColorDepthFirstSearch` to see how a node's status is updated diff --git a/compiler/rustc_data_structures/src/lib.rs b/compiler/rustc_data_structures/src/lib.rs index 9958e5dd5e0ee..7669b78834c3f 100644 --- a/compiler/rustc_data_structures/src/lib.rs +++ b/compiler/rustc_data_structures/src/lib.rs @@ -28,6 +28,7 @@ #![feature(const_panic)] #![feature(min_const_generics)] #![feature(once_cell)] +#![feature(maybe_uninit_uninit_array)] #![allow(rustc::default_hash_types)] #[macro_use] diff --git a/compiler/rustc_data_structures/src/obligation_forest/mod.rs b/compiler/rustc_data_structures/src/obligation_forest/mod.rs index c0193e9fa0c4e..a5b2df1da5d6d 100644 --- a/compiler/rustc_data_structures/src/obligation_forest/mod.rs +++ b/compiler/rustc_data_structures/src/obligation_forest/mod.rs @@ -149,8 +149,8 @@ pub struct ObligationForest { /// comments in `process_obligation` for details. active_cache: FxHashMap, - /// A vector reused in compress(), to avoid allocating new vectors. - node_rewrites: Vec, + /// A vector reused in compress() and find_cycles_from_node(), to avoid allocating new vectors. + reused_node_vec: Vec, obligation_tree_id_generator: ObligationTreeIdGenerator, @@ -251,12 +251,22 @@ enum NodeState { Error, } +/// This trait allows us to have two different Outcome types: +/// - the normal one that does as little as possible +/// - one for tests that does some additional work and checking +pub trait OutcomeTrait { + type Error; + type Obligation; + + fn new() -> Self; + fn mark_not_stalled(&mut self); + fn is_stalled(&self) -> bool; + fn record_completed(&mut self, outcome: &Self::Obligation); + fn record_error(&mut self, error: Self::Error); +} + #[derive(Debug)] pub struct Outcome { - /// Obligations that were completely evaluated, including all - /// (transitive) subobligations. Only computed if requested. - pub completed: Option>, - /// Backtrace of obligations that were found to be in error. pub errors: Vec>, @@ -269,12 +279,29 @@ pub struct Outcome { pub stalled: bool, } -/// Should `process_obligations` compute the `Outcome::completed` field of its -/// result? -#[derive(PartialEq)] -pub enum DoCompleted { - No, - Yes, +impl OutcomeTrait for Outcome { + type Error = Error; + type Obligation = O; + + fn new() -> Self { + Self { stalled: true, errors: vec![] } + } + + fn mark_not_stalled(&mut self) { + self.stalled = false; + } + + fn is_stalled(&self) -> bool { + self.stalled + } + + fn record_completed(&mut self, _outcome: &Self::Obligation) { + // do nothing + } + + fn record_error(&mut self, error: Self::Error) { + self.errors.push(error) + } } #[derive(Debug, PartialEq, Eq)] @@ -289,7 +316,7 @@ impl ObligationForest { nodes: vec![], done_cache: Default::default(), active_cache: Default::default(), - node_rewrites: vec![], + reused_node_vec: vec![], obligation_tree_id_generator: (0..).map(ObligationTreeId), error_cache: Default::default(), } @@ -363,8 +390,7 @@ impl ObligationForest { .map(|(index, _node)| Error { error: error.clone(), backtrace: self.error_at(index) }) .collect(); - let successful_obligations = self.compress(DoCompleted::Yes); - assert!(successful_obligations.unwrap().is_empty()); + self.compress(|_| assert!(false)); errors } @@ -392,16 +418,12 @@ impl ObligationForest { /// be called in a loop until `outcome.stalled` is false. /// /// This _cannot_ be unrolled (presently, at least). - pub fn process_obligations

( - &mut self, - processor: &mut P, - do_completed: DoCompleted, - ) -> Outcome + pub fn process_obligations(&mut self, processor: &mut P) -> OUT where P: ObligationProcessor, + OUT: OutcomeTrait>, { - let mut errors = vec![]; - let mut stalled = true; + let mut outcome = OUT::new(); // Note that the loop body can append new nodes, and those new nodes // will then be processed by subsequent iterations of the loop. @@ -429,7 +451,7 @@ impl ObligationForest { } ProcessResult::Changed(children) => { // We are not (yet) stalled. - stalled = false; + outcome.mark_not_stalled(); node.state.set(NodeState::Success); for child in children { @@ -442,28 +464,22 @@ impl ObligationForest { } } ProcessResult::Error(err) => { - stalled = false; - errors.push(Error { error: err, backtrace: self.error_at(index) }); + outcome.mark_not_stalled(); + outcome.record_error(Error { error: err, backtrace: self.error_at(index) }); } } index += 1; } - if stalled { - // There's no need to perform marking, cycle processing and compression when nothing - // changed. - return Outcome { - completed: if do_completed == DoCompleted::Yes { Some(vec![]) } else { None }, - errors, - stalled, - }; + // There's no need to perform marking, cycle processing and compression when nothing + // changed. + if !outcome.is_stalled() { + self.mark_successes(); + self.process_cycles(processor); + self.compress(|obl| outcome.record_completed(obl)); } - self.mark_successes(); - self.process_cycles(processor); - let completed = self.compress(do_completed); - - Outcome { completed, errors, stalled } + outcome } /// Returns a vector of obligations for `p` and all of its @@ -526,7 +542,6 @@ impl ObligationForest { let node = &self.nodes[index]; let state = node.state.get(); if state == NodeState::Success { - node.state.set(NodeState::Waiting); // This call site is cold. self.uninlined_mark_dependents_as_waiting(node); } else { @@ -538,17 +553,18 @@ impl ObligationForest { // This never-inlined function is for the cold call site. #[inline(never)] fn uninlined_mark_dependents_as_waiting(&self, node: &Node) { + // Mark node Waiting in the cold uninlined code instead of the hot inlined + node.state.set(NodeState::Waiting); self.inlined_mark_dependents_as_waiting(node) } /// Report cycles between all `Success` nodes, and convert all `Success` /// nodes to `Done`. This must be called after `mark_successes`. - fn process_cycles

(&self, processor: &mut P) + fn process_cycles

(&mut self, processor: &mut P) where P: ObligationProcessor, { - let mut stack = vec![]; - + let mut stack = std::mem::take(&mut self.reused_node_vec); for (index, node) in self.nodes.iter().enumerate() { // For some benchmarks this state test is extremely hot. It's a win // to handle the no-op cases immediately to avoid the cost of the @@ -559,6 +575,7 @@ impl ObligationForest { } debug_assert!(stack.is_empty()); + self.reused_node_vec = stack; } fn find_cycles_from_node

(&self, stack: &mut Vec, processor: &mut P, index: usize) @@ -591,13 +608,12 @@ impl ObligationForest { /// indices and hence invalidates any outstanding indices. `process_cycles` /// must be run beforehand to remove any cycles on `Success` nodes. #[inline(never)] - fn compress(&mut self, do_completed: DoCompleted) -> Option> { + fn compress(&mut self, mut outcome_cb: impl FnMut(&O)) { let orig_nodes_len = self.nodes.len(); - let mut node_rewrites: Vec<_> = std::mem::take(&mut self.node_rewrites); + let mut node_rewrites: Vec<_> = std::mem::take(&mut self.reused_node_vec); debug_assert!(node_rewrites.is_empty()); node_rewrites.extend(0..orig_nodes_len); let mut dead_nodes = 0; - let mut removed_done_obligations: Vec = vec![]; // Move removable nodes to the end, preserving the order of the // remaining nodes. @@ -627,10 +643,8 @@ impl ObligationForest { } else { self.done_cache.insert(node.obligation.as_cache_key().clone()); } - if do_completed == DoCompleted::Yes { - // Extract the success stories. - removed_done_obligations.push(node.obligation.clone()); - } + // Extract the success stories. + outcome_cb(&node.obligation); node_rewrites[index] = orig_nodes_len; dead_nodes += 1; } @@ -654,9 +668,7 @@ impl ObligationForest { } node_rewrites.truncate(0); - self.node_rewrites = node_rewrites; - - if do_completed == DoCompleted::Yes { Some(removed_done_obligations) } else { None } + self.reused_node_vec = node_rewrites; } fn apply_rewrites(&mut self, node_rewrites: &[usize]) { diff --git a/compiler/rustc_data_structures/src/obligation_forest/tests.rs b/compiler/rustc_data_structures/src/obligation_forest/tests.rs index 01652465eea2c..371c62c063fa7 100644 --- a/compiler/rustc_data_structures/src/obligation_forest/tests.rs +++ b/compiler/rustc_data_structures/src/obligation_forest/tests.rs @@ -17,6 +17,40 @@ struct ClosureObligationProcessor { marker: PhantomData<(O, E)>, } +struct TestOutcome { + pub completed: Vec, + pub errors: Vec>, + pub stalled: bool, +} + +impl OutcomeTrait for TestOutcome +where + O: Clone, +{ + type Error = Error; + type Obligation = O; + + fn new() -> Self { + Self { errors: vec![], stalled: false, completed: vec![] } + } + + fn mark_not_stalled(&mut self) { + self.stalled = false; + } + + fn is_stalled(&self) -> bool { + self.stalled + } + + fn record_completed(&mut self, outcome: &Self::Obligation) { + self.completed.push(outcome.clone()) + } + + fn record_error(&mut self, error: Self::Error) { + self.errors.push(error) + } +} + #[allow(non_snake_case)] fn C(of: OF, bf: BF) -> ClosureObligationProcessor where @@ -65,20 +99,17 @@ fn push_pop() { // A |-> A.1 // |-> A.2 // |-> A.3 - let Outcome { completed: ok, errors: err, .. } = forest.process_obligations( - &mut C( - |obligation| match *obligation { - "A" => ProcessResult::Changed(vec!["A.1", "A.2", "A.3"]), - "B" => ProcessResult::Error("B is for broken"), - "C" => ProcessResult::Changed(vec![]), - "A.1" | "A.2" | "A.3" => ProcessResult::Unchanged, - _ => unreachable!(), - }, - |_| {}, - ), - DoCompleted::Yes, - ); - assert_eq!(ok.unwrap(), vec!["C"]); + let TestOutcome { completed: ok, errors: err, .. } = forest.process_obligations(&mut C( + |obligation| match *obligation { + "A" => ProcessResult::Changed(vec!["A.1", "A.2", "A.3"]), + "B" => ProcessResult::Error("B is for broken"), + "C" => ProcessResult::Changed(vec![]), + "A.1" | "A.2" | "A.3" => ProcessResult::Unchanged, + _ => unreachable!(), + }, + |_| {}, + )); + assert_eq!(ok, vec!["C"]); assert_eq!(err, vec![Error { error: "B is for broken", backtrace: vec!["B"] }]); // second round: two delays, one success, creating an uneven set of subtasks: @@ -88,60 +119,51 @@ fn push_pop() { // D |-> D.1 // |-> D.2 forest.register_obligation("D"); - let Outcome { completed: ok, errors: err, .. } = forest.process_obligations( - &mut C( - |obligation| match *obligation { - "A.1" => ProcessResult::Unchanged, - "A.2" => ProcessResult::Unchanged, - "A.3" => ProcessResult::Changed(vec!["A.3.i"]), - "D" => ProcessResult::Changed(vec!["D.1", "D.2"]), - "A.3.i" | "D.1" | "D.2" => ProcessResult::Unchanged, - _ => unreachable!(), - }, - |_| {}, - ), - DoCompleted::Yes, - ); - assert_eq!(ok.unwrap(), Vec::<&'static str>::new()); + let TestOutcome { completed: ok, errors: err, .. } = forest.process_obligations(&mut C( + |obligation| match *obligation { + "A.1" => ProcessResult::Unchanged, + "A.2" => ProcessResult::Unchanged, + "A.3" => ProcessResult::Changed(vec!["A.3.i"]), + "D" => ProcessResult::Changed(vec!["D.1", "D.2"]), + "A.3.i" | "D.1" | "D.2" => ProcessResult::Unchanged, + _ => unreachable!(), + }, + |_| {}, + )); + assert_eq!(ok, Vec::<&'static str>::new()); assert_eq!(err, Vec::new()); // third round: ok in A.1 but trigger an error in A.2. Check that it // propagates to A, but not D.1 or D.2. // D |-> D.1 |-> D.1.i // |-> D.2 |-> D.2.i - let Outcome { completed: ok, errors: err, .. } = forest.process_obligations( - &mut C( - |obligation| match *obligation { - "A.1" => ProcessResult::Changed(vec![]), - "A.2" => ProcessResult::Error("A is for apple"), - "A.3.i" => ProcessResult::Changed(vec![]), - "D.1" => ProcessResult::Changed(vec!["D.1.i"]), - "D.2" => ProcessResult::Changed(vec!["D.2.i"]), - "D.1.i" | "D.2.i" => ProcessResult::Unchanged, - _ => unreachable!(), - }, - |_| {}, - ), - DoCompleted::Yes, - ); - let mut ok = ok.unwrap(); + let TestOutcome { completed: ok, errors: err, .. } = forest.process_obligations(&mut C( + |obligation| match *obligation { + "A.1" => ProcessResult::Changed(vec![]), + "A.2" => ProcessResult::Error("A is for apple"), + "A.3.i" => ProcessResult::Changed(vec![]), + "D.1" => ProcessResult::Changed(vec!["D.1.i"]), + "D.2" => ProcessResult::Changed(vec!["D.2.i"]), + "D.1.i" | "D.2.i" => ProcessResult::Unchanged, + _ => unreachable!(), + }, + |_| {}, + )); + let mut ok = ok; ok.sort(); assert_eq!(ok, vec!["A.1", "A.3", "A.3.i"]); assert_eq!(err, vec![Error { error: "A is for apple", backtrace: vec!["A.2", "A"] }]); // fourth round: error in D.1.i - let Outcome { completed: ok, errors: err, .. } = forest.process_obligations( - &mut C( - |obligation| match *obligation { - "D.1.i" => ProcessResult::Error("D is for dumb"), - "D.2.i" => ProcessResult::Changed(vec![]), - _ => panic!("unexpected obligation {:?}", obligation), - }, - |_| {}, - ), - DoCompleted::Yes, - ); - let mut ok = ok.unwrap(); + let TestOutcome { completed: ok, errors: err, .. } = forest.process_obligations(&mut C( + |obligation| match *obligation { + "D.1.i" => ProcessResult::Error("D is for dumb"), + "D.2.i" => ProcessResult::Changed(vec![]), + _ => panic!("unexpected obligation {:?}", obligation), + }, + |_| {}, + )); + let mut ok = ok; ok.sort(); assert_eq!(ok, vec!["D.2", "D.2.i"]); assert_eq!(err, vec![Error { error: "D is for dumb", backtrace: vec!["D.1.i", "D.1", "D"] }]); @@ -160,72 +182,60 @@ fn success_in_grandchildren() { let mut forest = ObligationForest::new(); forest.register_obligation("A"); - let Outcome { completed: ok, errors: err, .. } = forest.process_obligations( - &mut C( - |obligation| match *obligation { - "A" => ProcessResult::Changed(vec!["A.1", "A.2", "A.3"]), - "A.1" => ProcessResult::Changed(vec![]), - "A.2" => ProcessResult::Changed(vec!["A.2.i", "A.2.ii"]), - "A.3" => ProcessResult::Changed(vec![]), - "A.2.i" | "A.2.ii" => ProcessResult::Unchanged, - _ => unreachable!(), - }, - |_| {}, - ), - DoCompleted::Yes, - ); - let mut ok = ok.unwrap(); + let TestOutcome { completed: ok, errors: err, .. } = forest.process_obligations(&mut C( + |obligation| match *obligation { + "A" => ProcessResult::Changed(vec!["A.1", "A.2", "A.3"]), + "A.1" => ProcessResult::Changed(vec![]), + "A.2" => ProcessResult::Changed(vec!["A.2.i", "A.2.ii"]), + "A.3" => ProcessResult::Changed(vec![]), + "A.2.i" | "A.2.ii" => ProcessResult::Unchanged, + _ => unreachable!(), + }, + |_| {}, + )); + let mut ok = ok; ok.sort(); assert_eq!(ok, vec!["A.1", "A.3"]); assert!(err.is_empty()); - let Outcome { completed: ok, errors: err, .. } = forest.process_obligations( - &mut C( - |obligation| match *obligation { - "A.2.i" => ProcessResult::Unchanged, - "A.2.ii" => ProcessResult::Changed(vec![]), - _ => unreachable!(), - }, - |_| {}, - ), - DoCompleted::Yes, - ); - assert_eq!(ok.unwrap(), vec!["A.2.ii"]); + let TestOutcome { completed: ok, errors: err, .. } = forest.process_obligations(&mut C( + |obligation| match *obligation { + "A.2.i" => ProcessResult::Unchanged, + "A.2.ii" => ProcessResult::Changed(vec![]), + _ => unreachable!(), + }, + |_| {}, + )); + assert_eq!(ok, vec!["A.2.ii"]); assert!(err.is_empty()); - let Outcome { completed: ok, errors: err, .. } = forest.process_obligations( - &mut C( - |obligation| match *obligation { - "A.2.i" => ProcessResult::Changed(vec!["A.2.i.a"]), - "A.2.i.a" => ProcessResult::Unchanged, - _ => unreachable!(), - }, - |_| {}, - ), - DoCompleted::Yes, - ); - assert!(ok.unwrap().is_empty()); + let TestOutcome { completed: ok, errors: err, .. } = forest.process_obligations(&mut C( + |obligation| match *obligation { + "A.2.i" => ProcessResult::Changed(vec!["A.2.i.a"]), + "A.2.i.a" => ProcessResult::Unchanged, + _ => unreachable!(), + }, + |_| {}, + )); + assert!(ok.is_empty()); assert!(err.is_empty()); - let Outcome { completed: ok, errors: err, .. } = forest.process_obligations( - &mut C( - |obligation| match *obligation { - "A.2.i.a" => ProcessResult::Changed(vec![]), - _ => unreachable!(), - }, - |_| {}, - ), - DoCompleted::Yes, - ); - let mut ok = ok.unwrap(); + let TestOutcome { completed: ok, errors: err, .. } = forest.process_obligations(&mut C( + |obligation| match *obligation { + "A.2.i.a" => ProcessResult::Changed(vec![]), + _ => unreachable!(), + }, + |_| {}, + )); + let mut ok = ok; ok.sort(); assert_eq!(ok, vec!["A", "A.2", "A.2.i", "A.2.i.a"]); assert!(err.is_empty()); - let Outcome { completed: ok, errors: err, .. } = - forest.process_obligations(&mut C(|_| unreachable!(), |_| {}), DoCompleted::Yes); + let TestOutcome { completed: ok, errors: err, .. } = + forest.process_obligations(&mut C(|_| unreachable!(), |_| {})); - assert!(ok.unwrap().is_empty()); + assert!(ok.is_empty()); assert!(err.is_empty()); } @@ -235,18 +245,15 @@ fn to_errors_no_throw() { // yields to correct errors (and does not panic, in particular). let mut forest = ObligationForest::new(); forest.register_obligation("A"); - let Outcome { completed: ok, errors: err, .. } = forest.process_obligations( - &mut C( - |obligation| match *obligation { - "A" => ProcessResult::Changed(vec!["A.1", "A.2", "A.3"]), - "A.1" | "A.2" | "A.3" => ProcessResult::Unchanged, - _ => unreachable!(), - }, - |_| {}, - ), - DoCompleted::Yes, - ); - assert_eq!(ok.unwrap().len(), 0); + let TestOutcome { completed: ok, errors: err, .. } = forest.process_obligations(&mut C( + |obligation| match *obligation { + "A" => ProcessResult::Changed(vec!["A.1", "A.2", "A.3"]), + "A.1" | "A.2" | "A.3" => ProcessResult::Unchanged, + _ => unreachable!(), + }, + |_| {}, + )); + assert_eq!(ok.len(), 0); assert_eq!(err.len(), 0); let errors = forest.to_errors(()); assert_eq!(errors[0].backtrace, vec!["A.1", "A"]); @@ -260,51 +267,42 @@ fn diamond() { // check that diamond dependencies are handled correctly let mut forest = ObligationForest::new(); forest.register_obligation("A"); - let Outcome { completed: ok, errors: err, .. } = forest.process_obligations( - &mut C( - |obligation| match *obligation { - "A" => ProcessResult::Changed(vec!["A.1", "A.2"]), - "A.1" | "A.2" => ProcessResult::Unchanged, - _ => unreachable!(), - }, - |_| {}, - ), - DoCompleted::Yes, - ); - assert_eq!(ok.unwrap().len(), 0); + let TestOutcome { completed: ok, errors: err, .. } = forest.process_obligations(&mut C( + |obligation| match *obligation { + "A" => ProcessResult::Changed(vec!["A.1", "A.2"]), + "A.1" | "A.2" => ProcessResult::Unchanged, + _ => unreachable!(), + }, + |_| {}, + )); + assert_eq!(ok.len(), 0); assert_eq!(err.len(), 0); - let Outcome { completed: ok, errors: err, .. } = forest.process_obligations( - &mut C( - |obligation| match *obligation { - "A.1" => ProcessResult::Changed(vec!["D"]), - "A.2" => ProcessResult::Changed(vec!["D"]), - "D" => ProcessResult::Unchanged, - _ => unreachable!(), - }, - |_| {}, - ), - DoCompleted::Yes, - ); - assert_eq!(ok.unwrap().len(), 0); + let TestOutcome { completed: ok, errors: err, .. } = forest.process_obligations(&mut C( + |obligation| match *obligation { + "A.1" => ProcessResult::Changed(vec!["D"]), + "A.2" => ProcessResult::Changed(vec!["D"]), + "D" => ProcessResult::Unchanged, + _ => unreachable!(), + }, + |_| {}, + )); + assert_eq!(ok.len(), 0); assert_eq!(err.len(), 0); let mut d_count = 0; - let Outcome { completed: ok, errors: err, .. } = forest.process_obligations( - &mut C( - |obligation| match *obligation { - "D" => { - d_count += 1; - ProcessResult::Changed(vec![]) - } - _ => unreachable!(), - }, - |_| {}, - ), - DoCompleted::Yes, - ); + let TestOutcome { completed: ok, errors: err, .. } = forest.process_obligations(&mut C( + |obligation| match *obligation { + "D" => { + d_count += 1; + ProcessResult::Changed(vec![]) + } + _ => unreachable!(), + }, + |_| {}, + )); assert_eq!(d_count, 1); - let mut ok = ok.unwrap(); + let mut ok = ok; ok.sort(); assert_eq!(ok, vec!["A", "A.1", "A.2", "D"]); assert_eq!(err.len(), 0); @@ -313,51 +311,42 @@ fn diamond() { assert_eq!(errors.len(), 0); forest.register_obligation("A'"); - let Outcome { completed: ok, errors: err, .. } = forest.process_obligations( - &mut C( - |obligation| match *obligation { - "A'" => ProcessResult::Changed(vec!["A'.1", "A'.2"]), - "A'.1" | "A'.2" => ProcessResult::Unchanged, - _ => unreachable!(), - }, - |_| {}, - ), - DoCompleted::Yes, - ); - assert_eq!(ok.unwrap().len(), 0); + let TestOutcome { completed: ok, errors: err, .. } = forest.process_obligations(&mut C( + |obligation| match *obligation { + "A'" => ProcessResult::Changed(vec!["A'.1", "A'.2"]), + "A'.1" | "A'.2" => ProcessResult::Unchanged, + _ => unreachable!(), + }, + |_| {}, + )); + assert_eq!(ok.len(), 0); assert_eq!(err.len(), 0); - let Outcome { completed: ok, errors: err, .. } = forest.process_obligations( - &mut C( - |obligation| match *obligation { - "A'.1" => ProcessResult::Changed(vec!["D'", "A'"]), - "A'.2" => ProcessResult::Changed(vec!["D'"]), - "D'" | "A'" => ProcessResult::Unchanged, - _ => unreachable!(), - }, - |_| {}, - ), - DoCompleted::Yes, - ); - assert_eq!(ok.unwrap().len(), 0); + let TestOutcome { completed: ok, errors: err, .. } = forest.process_obligations(&mut C( + |obligation| match *obligation { + "A'.1" => ProcessResult::Changed(vec!["D'", "A'"]), + "A'.2" => ProcessResult::Changed(vec!["D'"]), + "D'" | "A'" => ProcessResult::Unchanged, + _ => unreachable!(), + }, + |_| {}, + )); + assert_eq!(ok.len(), 0); assert_eq!(err.len(), 0); let mut d_count = 0; - let Outcome { completed: ok, errors: err, .. } = forest.process_obligations( - &mut C( - |obligation| match *obligation { - "D'" => { - d_count += 1; - ProcessResult::Error("operation failed") - } - _ => unreachable!(), - }, - |_| {}, - ), - DoCompleted::Yes, - ); + let TestOutcome { completed: ok, errors: err, .. } = forest.process_obligations(&mut C( + |obligation| match *obligation { + "D'" => { + d_count += 1; + ProcessResult::Error("operation failed") + } + _ => unreachable!(), + }, + |_| {}, + )); assert_eq!(d_count, 1); - assert_eq!(ok.unwrap().len(), 0); + assert_eq!(ok.len(), 0); assert_eq!( err, vec![super::Error { error: "operation failed", backtrace: vec!["D'", "A'.1", "A'"] }] @@ -375,35 +364,27 @@ fn done_dependency() { forest.register_obligation("B: Sized"); forest.register_obligation("C: Sized"); - let Outcome { completed: ok, errors: err, .. } = forest.process_obligations( - &mut C( - |obligation| match *obligation { - "A: Sized" | "B: Sized" | "C: Sized" => ProcessResult::Changed(vec![]), - _ => unreachable!(), - }, - |_| {}, - ), - DoCompleted::Yes, - ); - let mut ok = ok.unwrap(); + let TestOutcome { completed: ok, errors: err, .. } = forest.process_obligations(&mut C( + |obligation| match *obligation { + "A: Sized" | "B: Sized" | "C: Sized" => ProcessResult::Changed(vec![]), + _ => unreachable!(), + }, + |_| {}, + )); + let mut ok = ok; ok.sort(); assert_eq!(ok, vec!["A: Sized", "B: Sized", "C: Sized"]); assert_eq!(err.len(), 0); forest.register_obligation("(A,B,C): Sized"); - let Outcome { completed: ok, errors: err, .. } = forest.process_obligations( - &mut C( - |obligation| match *obligation { - "(A,B,C): Sized" => { - ProcessResult::Changed(vec!["A: Sized", "B: Sized", "C: Sized"]) - } - _ => unreachable!(), - }, - |_| {}, - ), - DoCompleted::Yes, - ); - assert_eq!(ok.unwrap(), vec!["(A,B,C): Sized"]); + let TestOutcome { completed: ok, errors: err, .. } = forest.process_obligations(&mut C( + |obligation| match *obligation { + "(A,B,C): Sized" => ProcessResult::Changed(vec!["A: Sized", "B: Sized", "C: Sized"]), + _ => unreachable!(), + }, + |_| {}, + )); + assert_eq!(ok, vec!["(A,B,C): Sized"]); assert_eq!(err.len(), 0); } @@ -416,64 +397,52 @@ fn orphan() { forest.register_obligation("C1"); forest.register_obligation("C2"); - let Outcome { completed: ok, errors: err, .. } = forest.process_obligations( - &mut C( - |obligation| match *obligation { - "A" => ProcessResult::Changed(vec!["D", "E"]), - "B" => ProcessResult::Unchanged, - "C1" => ProcessResult::Changed(vec![]), - "C2" => ProcessResult::Changed(vec![]), - "D" | "E" => ProcessResult::Unchanged, - _ => unreachable!(), - }, - |_| {}, - ), - DoCompleted::Yes, - ); - let mut ok = ok.unwrap(); + let TestOutcome { completed: ok, errors: err, .. } = forest.process_obligations(&mut C( + |obligation| match *obligation { + "A" => ProcessResult::Changed(vec!["D", "E"]), + "B" => ProcessResult::Unchanged, + "C1" => ProcessResult::Changed(vec![]), + "C2" => ProcessResult::Changed(vec![]), + "D" | "E" => ProcessResult::Unchanged, + _ => unreachable!(), + }, + |_| {}, + )); + let mut ok = ok; ok.sort(); assert_eq!(ok, vec!["C1", "C2"]); assert_eq!(err.len(), 0); - let Outcome { completed: ok, errors: err, .. } = forest.process_obligations( - &mut C( - |obligation| match *obligation { - "D" | "E" => ProcessResult::Unchanged, - "B" => ProcessResult::Changed(vec!["D"]), - _ => unreachable!(), - }, - |_| {}, - ), - DoCompleted::Yes, - ); - assert_eq!(ok.unwrap().len(), 0); + let TestOutcome { completed: ok, errors: err, .. } = forest.process_obligations(&mut C( + |obligation| match *obligation { + "D" | "E" => ProcessResult::Unchanged, + "B" => ProcessResult::Changed(vec!["D"]), + _ => unreachable!(), + }, + |_| {}, + )); + assert_eq!(ok.len(), 0); assert_eq!(err.len(), 0); - let Outcome { completed: ok, errors: err, .. } = forest.process_obligations( - &mut C( - |obligation| match *obligation { - "D" => ProcessResult::Unchanged, - "E" => ProcessResult::Error("E is for error"), - _ => unreachable!(), - }, - |_| {}, - ), - DoCompleted::Yes, - ); - assert_eq!(ok.unwrap().len(), 0); + let TestOutcome { completed: ok, errors: err, .. } = forest.process_obligations(&mut C( + |obligation| match *obligation { + "D" => ProcessResult::Unchanged, + "E" => ProcessResult::Error("E is for error"), + _ => unreachable!(), + }, + |_| {}, + )); + assert_eq!(ok.len(), 0); assert_eq!(err, vec![super::Error { error: "E is for error", backtrace: vec!["E", "A"] }]); - let Outcome { completed: ok, errors: err, .. } = forest.process_obligations( - &mut C( - |obligation| match *obligation { - "D" => ProcessResult::Error("D is dead"), - _ => unreachable!(), - }, - |_| {}, - ), - DoCompleted::Yes, - ); - assert_eq!(ok.unwrap().len(), 0); + let TestOutcome { completed: ok, errors: err, .. } = forest.process_obligations(&mut C( + |obligation| match *obligation { + "D" => ProcessResult::Error("D is dead"), + _ => unreachable!(), + }, + |_| {}, + )); + assert_eq!(ok.len(), 0); assert_eq!(err, vec![super::Error { error: "D is dead", backtrace: vec!["D"] }]); let errors = forest.to_errors(()); @@ -487,35 +456,29 @@ fn simultaneous_register_and_error() { forest.register_obligation("A"); forest.register_obligation("B"); - let Outcome { completed: ok, errors: err, .. } = forest.process_obligations( - &mut C( - |obligation| match *obligation { - "A" => ProcessResult::Error("An error"), - "B" => ProcessResult::Changed(vec!["A"]), - _ => unreachable!(), - }, - |_| {}, - ), - DoCompleted::Yes, - ); - assert_eq!(ok.unwrap().len(), 0); + let TestOutcome { completed: ok, errors: err, .. } = forest.process_obligations(&mut C( + |obligation| match *obligation { + "A" => ProcessResult::Error("An error"), + "B" => ProcessResult::Changed(vec!["A"]), + _ => unreachable!(), + }, + |_| {}, + )); + assert_eq!(ok.len(), 0); assert_eq!(err, vec![super::Error { error: "An error", backtrace: vec!["A"] }]); let mut forest = ObligationForest::new(); forest.register_obligation("B"); forest.register_obligation("A"); - let Outcome { completed: ok, errors: err, .. } = forest.process_obligations( - &mut C( - |obligation| match *obligation { - "A" => ProcessResult::Error("An error"), - "B" => ProcessResult::Changed(vec!["A"]), - _ => unreachable!(), - }, - |_| {}, - ), - DoCompleted::Yes, - ); - assert_eq!(ok.unwrap().len(), 0); + let TestOutcome { completed: ok, errors: err, .. } = forest.process_obligations(&mut C( + |obligation| match *obligation { + "A" => ProcessResult::Error("An error"), + "B" => ProcessResult::Changed(vec!["A"]), + _ => unreachable!(), + }, + |_| {}, + )); + assert_eq!(ok.len(), 0); assert_eq!(err, vec![super::Error { error: "An error", backtrace: vec!["A"] }]); } diff --git a/compiler/rustc_data_structures/src/profiling.rs b/compiler/rustc_data_structures/src/profiling.rs index 363879cbb1d19..e598d7a683d5a 100644 --- a/compiler/rustc_data_structures/src/profiling.rs +++ b/compiler/rustc_data_structures/src/profiling.rs @@ -94,34 +94,9 @@ use std::process; use std::sync::Arc; use std::time::{Duration, Instant}; -use measureme::{EventId, EventIdBuilder, SerializableString, StringId}; +use measureme::{EventId, EventIdBuilder, Profiler, SerializableString, StringId}; use parking_lot::RwLock; -cfg_if! { - if #[cfg(any(windows, target_os = "wasi"))] { - /// FileSerializationSink is faster on Windows - type SerializationSink = measureme::FileSerializationSink; - } else if #[cfg(target_arch = "wasm32")] { - type SerializationSink = measureme::ByteVecSink; - } else { - /// MmapSerializatioSink is faster on macOS and Linux - type SerializationSink = measureme::MmapSerializationSink; - } -} - -type Profiler = measureme::Profiler; - -#[derive(Clone, Copy, Debug, PartialEq, Eq, Ord, PartialOrd)] -pub enum ProfileCategory { - Parsing, - Expansion, - TypeChecking, - BorrowChecking, - Codegen, - Linking, - Other, -} - bitflags::bitflags! { struct EventFilter: u32 { const GENERIC_ACTIVITIES = 1 << 0; @@ -400,7 +375,7 @@ impl SelfProfiler { output_directory: &Path, crate_name: Option<&str>, event_filters: &Option>, - ) -> Result> { + ) -> Result> { fs::create_dir_all(output_directory)?; let crate_name = crate_name.unwrap_or("unknown-crate"); @@ -511,13 +486,13 @@ impl SelfProfiler { self.event_filter_mask.contains(EventFilter::QUERY_KEYS) } - pub fn event_id_builder(&self) -> EventIdBuilder<'_, SerializationSink> { + pub fn event_id_builder(&self) -> EventIdBuilder<'_> { EventIdBuilder::new(&self.profiler) } } #[must_use] -pub struct TimingGuard<'a>(Option>); +pub struct TimingGuard<'a>(Option>); impl<'a> TimingGuard<'a> { #[inline] diff --git a/compiler/rustc_data_structures/src/sip128.rs b/compiler/rustc_data_structures/src/sip128.rs index 2c4eff618c685..53062b9c20da8 100644 --- a/compiler/rustc_data_structures/src/sip128.rs +++ b/compiler/rustc_data_structures/src/sip128.rs @@ -1,21 +1,53 @@ //! This is a copy of `core::hash::sip` adapted to providing 128 bit hashes. -use std::cmp; use std::hash::Hasher; -use std::mem; +use std::mem::{self, MaybeUninit}; use std::ptr; #[cfg(test)] mod tests; +// The SipHash algorithm operates on 8-byte chunks. +const ELEM_SIZE: usize = mem::size_of::(); + +// Size of the buffer in number of elements, not including the spill. +// +// The selection of this size was guided by rustc-perf benchmark comparisons of +// different buffer sizes. It should be periodically reevaluated as the compiler +// implementation and input characteristics change. +// +// Using the same-sized buffer for everything we hash is a performance versus +// complexity tradeoff. The ideal buffer size, and whether buffering should even +// be used, depends on what is being hashed. It may be worth it to size the +// buffer appropriately (perhaps by making SipHasher128 generic over the buffer +// size) or disable buffering depending on what is being hashed. But at this +// time, we use the same buffer size for everything. +const BUFFER_CAPACITY: usize = 8; + +// Size of the buffer in bytes, not including the spill. +const BUFFER_SIZE: usize = BUFFER_CAPACITY * ELEM_SIZE; + +// Size of the buffer in number of elements, including the spill. +const BUFFER_WITH_SPILL_CAPACITY: usize = BUFFER_CAPACITY + 1; + +// Size of the buffer in bytes, including the spill. +const BUFFER_WITH_SPILL_SIZE: usize = BUFFER_WITH_SPILL_CAPACITY * ELEM_SIZE; + +// Index of the spill element in the buffer. +const BUFFER_SPILL_INDEX: usize = BUFFER_WITH_SPILL_CAPACITY - 1; + #[derive(Debug, Clone)] +#[repr(C)] pub struct SipHasher128 { - k0: u64, - k1: u64, - length: usize, // how many bytes we've processed - state: State, // hash State - tail: u64, // unprocessed bytes le - ntail: usize, // how many bytes in tail are valid + // The access pattern during hashing consists of accesses to `nbuf` and + // `buf` until the buffer is full, followed by accesses to `state` and + // `processed`, and then repetition of that pattern until hashing is done. + // This is the basis for the ordering of fields below. However, in practice + // the cache miss-rate for data access is extremely low regardless of order. + nbuf: usize, // how many bytes in buf are valid + buf: [MaybeUninit; BUFFER_WITH_SPILL_CAPACITY], // unprocessed bytes le + state: State, // hash State + processed: usize, // how many bytes we've processed } #[derive(Debug, Clone, Copy)] @@ -51,178 +83,328 @@ macro_rules! compress { }}; } -/// Loads an integer of the desired type from a byte stream, in LE order. Uses -/// `copy_nonoverlapping` to let the compiler generate the most efficient way -/// to load it from a possibly unaligned address. -/// -/// Unsafe because: unchecked indexing at i..i+size_of(int_ty) -macro_rules! load_int_le { - ($buf:expr, $i:expr, $int_ty:ident) => {{ - debug_assert!($i + mem::size_of::<$int_ty>() <= $buf.len()); - let mut data = 0 as $int_ty; - ptr::copy_nonoverlapping( - $buf.get_unchecked($i), - &mut data as *mut _ as *mut u8, - mem::size_of::<$int_ty>(), - ); - data.to_le() - }}; -} - -/// Loads a u64 using up to 7 bytes of a byte slice. It looks clumsy but the -/// `copy_nonoverlapping` calls that occur (via `load_int_le!`) all have fixed -/// sizes and avoid calling `memcpy`, which is good for speed. -/// -/// Unsafe because: unchecked indexing at start..start+len +// Copies up to 8 bytes from source to destination. This performs better than +// `ptr::copy_nonoverlapping` on microbenchmarks and may perform better on real +// workloads since all of the copies have fixed sizes and avoid calling memcpy. +// +// This is specifically designed for copies of up to 8 bytes, because that's the +// maximum of number bytes needed to fill an 8-byte-sized element on which +// SipHash operates. Note that for variable-sized copies which are known to be +// less than 8 bytes, this function will perform more work than necessary unless +// the compiler is able to optimize the extra work away. #[inline] -unsafe fn u8to64_le(buf: &[u8], start: usize, len: usize) -> u64 { - debug_assert!(len < 8); - let mut i = 0; // current byte index (from LSB) in the output u64 - let mut out = 0; - if i + 3 < len { - out = load_int_le!(buf, start + i, u32) as u64; +unsafe fn copy_nonoverlapping_small(src: *const u8, dst: *mut u8, count: usize) { + debug_assert!(count <= 8); + + if count == 8 { + ptr::copy_nonoverlapping(src, dst, 8); + return; + } + + let mut i = 0; + if i + 3 < count { + ptr::copy_nonoverlapping(src.add(i), dst.add(i), 4); i += 4; } - if i + 1 < len { - out |= (load_int_le!(buf, start + i, u16) as u64) << (i * 8); + + if i + 1 < count { + ptr::copy_nonoverlapping(src.add(i), dst.add(i), 2); i += 2 } - if i < len { - out |= (*buf.get_unchecked(start + i) as u64) << (i * 8); + + if i < count { + *dst.add(i) = *src.add(i); i += 1; } - debug_assert_eq!(i, len); - out + + debug_assert_eq!(i, count); } +// # Implementation +// +// This implementation uses buffering to reduce the hashing cost for inputs +// consisting of many small integers. Buffering simplifies the integration of +// integer input--the integer write function typically just appends to the +// buffer with a statically sized write, updates metadata, and returns. +// +// Buffering also prevents alternating between writes that do and do not trigger +// the hashing process. Only when the entire buffer is full do we transition +// into hashing. This allows us to keep the hash state in registers for longer, +// instead of loading and storing it before and after processing each element. +// +// When a write fills the buffer, a buffer processing function is invoked to +// hash all of the buffered input. The buffer processing functions are marked +// `#[inline(never)]` so that they aren't inlined into the append functions, +// which ensures the more frequently called append functions remain inlineable +// and don't include register pushing/popping that would only be made necessary +// by inclusion of the complex buffer processing path which uses those +// registers. +// +// The buffer includes a "spill"--an extra element at the end--which simplifies +// the integer write buffer processing path. The value that fills the buffer can +// be written with a statically sized write that may spill over into the spill. +// After the buffer is processed, the part of the value that spilled over can be +// written from the spill to the beginning of the buffer with another statically +// sized write. This write may copy more bytes than actually spilled over, but +// we maintain the metadata such that any extra copied bytes will be ignored by +// subsequent processing. Due to the static sizes, this scheme performs better +// than copying the exact number of bytes needed into the end and beginning of +// the buffer. +// +// The buffer is uninitialized, which improves performance, but may preclude +// efficient implementation of alternative approaches. The improvement is not so +// large that an alternative approach should be disregarded because it cannot be +// efficiently implemented with an uninitialized buffer. On the other hand, an +// uninitialized buffer may become more important should a larger one be used. +// +// # Platform Dependence +// +// The SipHash algorithm operates on byte sequences. It parses the input stream +// as 8-byte little-endian integers. Therefore, given the same byte sequence, it +// produces the same result on big- and little-endian hardware. +// +// However, the Hasher trait has methods which operate on multi-byte integers. +// How they are converted into byte sequences can be endian-dependent (by using +// native byte order) or independent (by consistently using either LE or BE byte +// order). It can also be `isize` and `usize` size dependent (by using the +// native size), or independent (by converting to a common size), supposing the +// values can be represented in 32 bits. +// +// In order to make `SipHasher128` consistent with `SipHasher` in libstd, we +// choose to do the integer to byte sequence conversion in the platform- +// dependent way. Clients can achieve platform-independent hashing by widening +// `isize` and `usize` integers to 64 bits on 32-bit systems and byte-swapping +// integers on big-endian systems before passing them to the writing functions. +// This causes the input byte sequence to look identical on big- and little- +// endian systems (supposing `isize` and `usize` values can be represented in 32 +// bits), which ensures platform-independent results. impl SipHasher128 { #[inline] pub fn new_with_keys(key0: u64, key1: u64) -> SipHasher128 { - let mut state = SipHasher128 { - k0: key0, - k1: key1, - length: 0, - state: State { v0: 0, v1: 0, v2: 0, v3: 0 }, - tail: 0, - ntail: 0, + let mut hasher = SipHasher128 { + nbuf: 0, + buf: MaybeUninit::uninit_array(), + state: State { + v0: key0 ^ 0x736f6d6570736575, + // The XOR with 0xee is only done on 128-bit algorithm version. + v1: key1 ^ (0x646f72616e646f6d ^ 0xee), + v2: key0 ^ 0x6c7967656e657261, + v3: key1 ^ 0x7465646279746573, + }, + processed: 0, }; - state.reset(); - state + + unsafe { + // Initialize spill because we read from it in `short_write_process_buffer`. + *hasher.buf.get_unchecked_mut(BUFFER_SPILL_INDEX) = MaybeUninit::zeroed(); + } + + hasher } + // A specialized write function for values with size <= 8. #[inline] - fn reset(&mut self) { - self.length = 0; - self.state.v0 = self.k0 ^ 0x736f6d6570736575; - self.state.v1 = self.k1 ^ 0x646f72616e646f6d; - self.state.v2 = self.k0 ^ 0x6c7967656e657261; - self.state.v3 = self.k1 ^ 0x7465646279746573; - self.ntail = 0; - - // This is only done in the 128 bit version: - self.state.v1 ^= 0xee; + fn short_write(&mut self, x: T) { + let size = mem::size_of::(); + let nbuf = self.nbuf; + debug_assert!(size <= 8); + debug_assert!(nbuf < BUFFER_SIZE); + debug_assert!(nbuf + size < BUFFER_WITH_SPILL_SIZE); + + if nbuf + size < BUFFER_SIZE { + unsafe { + // The memcpy call is optimized away because the size is known. + let dst = (self.buf.as_mut_ptr() as *mut u8).add(nbuf); + ptr::copy_nonoverlapping(&x as *const _ as *const u8, dst, size); + } + + self.nbuf = nbuf + size; + + return; + } + + unsafe { self.short_write_process_buffer(x) } } - // A specialized write function for values with size <= 8. - // - // The input must be zero-extended to 64-bits by the caller. This extension - // isn't hashed, but the implementation requires it for correctness. + // A specialized write function for values with size <= 8 that should only + // be called when the write would cause the buffer to fill. // - // This function, given the same integer size and value, has the same effect - // on both little- and big-endian hardware. It operates on values without - // depending on their sequence in memory, so is independent of endianness. - // - // However, we want SipHasher128 to be platform-dependent, in order to be - // consistent with the platform-dependent SipHasher in libstd. In other - // words, we want: - // - // - little-endian: `write_u32(0xDDCCBBAA)` == `write([0xAA, 0xBB, 0xCC, 0xDD])` - // - big-endian: `write_u32(0xDDCCBBAA)` == `write([0xDD, 0xCC, 0xBB, 0xAA])` - // - // Therefore, in order to produce endian-dependent results, SipHasher128's - // `write_xxx` Hasher trait methods byte-swap `x` prior to zero-extending. - // - // If clients of SipHasher128 itself want platform-independent results, they - // *also* must byte-swap integer inputs before invoking the `write_xxx` - // methods on big-endian hardware (that is, two byte-swaps must occur--one - // in the client, and one in SipHasher128). Additionally, they must extend - // `usize` and `isize` types to 64 bits on 32-bit systems. - #[inline] - fn short_write(&mut self, _x: T, x: u64) { + // SAFETY: the write of `x` into `self.buf` starting at byte offset + // `self.nbuf` must cause `self.buf` to become fully initialized (and not + // overflow) if it wasn't already. + #[inline(never)] + unsafe fn short_write_process_buffer(&mut self, x: T) { let size = mem::size_of::(); - self.length += size; - - // The original number must be zero-extended, not sign-extended. - debug_assert!(if size < 8 { x >> (8 * size) == 0 } else { true }); - - // The number of bytes needed to fill `self.tail`. - let needed = 8 - self.ntail; - - // SipHash parses the input stream as 8-byte little-endian integers. - // Inputs are put into `self.tail` until 8 bytes of data have been - // collected, and then that word is processed. - // - // For example, imagine that `self.tail` is 0x0000_00EE_DDCC_BBAA, - // `self.ntail` is 5 (because 5 bytes have been put into `self.tail`), - // and `needed` is therefore 3. - // - // - Scenario 1, `self.write_u8(0xFF)`: we have already zero-extended - // the input to 0x0000_0000_0000_00FF. We now left-shift it five - // bytes, giving 0x0000_FF00_0000_0000. We then bitwise-OR that value - // into `self.tail`, resulting in 0x0000_FFEE_DDCC_BBAA. - // (Zero-extension of the original input is critical in this scenario - // because we don't want the high two bytes of `self.tail` to be - // touched by the bitwise-OR.) `self.tail` is not yet full, so we - // return early, after updating `self.ntail` to 6. - // - // - Scenario 2, `self.write_u32(0xIIHH_GGFF)`: we have already - // zero-extended the input to 0x0000_0000_IIHH_GGFF. We now - // left-shift it five bytes, giving 0xHHGG_FF00_0000_0000. We then - // bitwise-OR that value into `self.tail`, resulting in - // 0xHHGG_FFEE_DDCC_BBAA. `self.tail` is now full, and we can use it - // to update `self.state`. (As mentioned above, this assumes a - // little-endian machine; on a big-endian machine we would have - // byte-swapped 0xIIHH_GGFF in the caller, giving 0xFFGG_HHII, and we - // would then end up bitwise-ORing 0xGGHH_II00_0000_0000 into - // `self.tail`). - // - self.tail |= x << (8 * self.ntail); - if size < needed { - self.ntail += size; + let nbuf = self.nbuf; + debug_assert!(size <= 8); + debug_assert!(nbuf < BUFFER_SIZE); + debug_assert!(nbuf + size >= BUFFER_SIZE); + debug_assert!(nbuf + size < BUFFER_WITH_SPILL_SIZE); + + // Copy first part of input into end of buffer, possibly into spill + // element. The memcpy call is optimized away because the size is known. + let dst = (self.buf.as_mut_ptr() as *mut u8).add(nbuf); + ptr::copy_nonoverlapping(&x as *const _ as *const u8, dst, size); + + // Process buffer. + for i in 0..BUFFER_CAPACITY { + let elem = self.buf.get_unchecked(i).assume_init().to_le(); + self.state.v3 ^= elem; + Sip24Rounds::c_rounds(&mut self.state); + self.state.v0 ^= elem; + } + + // Copy remaining input into start of buffer by copying size - 1 + // elements from spill (at most size - 1 bytes could have overflowed + // into the spill). The memcpy call is optimized away because the size + // is known. And the whole copy is optimized away for size == 1. + let src = self.buf.get_unchecked(BUFFER_SPILL_INDEX) as *const _ as *const u8; + ptr::copy_nonoverlapping(src, self.buf.as_mut_ptr() as *mut u8, size - 1); + + // This function should only be called when the write fills the buffer. + // Therefore, when size == 1, the new `self.nbuf` must be zero. The size + // is statically known, so the branch is optimized away. + self.nbuf = if size == 1 { 0 } else { nbuf + size - BUFFER_SIZE }; + self.processed += BUFFER_SIZE; + } + + // A write function for byte slices. + #[inline] + fn slice_write(&mut self, msg: &[u8]) { + let length = msg.len(); + let nbuf = self.nbuf; + debug_assert!(nbuf < BUFFER_SIZE); + + if nbuf + length < BUFFER_SIZE { + unsafe { + let dst = (self.buf.as_mut_ptr() as *mut u8).add(nbuf); + + if length <= 8 { + copy_nonoverlapping_small(msg.as_ptr(), dst, length); + } else { + // This memcpy is *not* optimized away. + ptr::copy_nonoverlapping(msg.as_ptr(), dst, length); + } + } + + self.nbuf = nbuf + length; + return; } - // `self.tail` is full, process it. - self.state.v3 ^= self.tail; - Sip24Rounds::c_rounds(&mut self.state); - self.state.v0 ^= self.tail; - - // Continuing scenario 2: we have one byte left over from the input. We - // set `self.ntail` to 1 and `self.tail` to `0x0000_0000_IIHH_GGFF >> - // 8*3`, which is 0x0000_0000_0000_00II. (Or on a big-endian machine - // the prior byte-swapping would leave us with 0x0000_0000_0000_00FF.) - // - // The `if` is needed to avoid shifting by 64 bits, which Rust - // complains about. - self.ntail = size - needed; - self.tail = if needed < 8 { x >> (8 * needed) } else { 0 }; + unsafe { self.slice_write_process_buffer(msg) } + } + + // A write function for byte slices that should only be called when the + // write would cause the buffer to fill. + // + // SAFETY: `self.buf` must be initialized up to the byte offset `self.nbuf`, + // and `msg` must contain enough bytes to initialize the rest of the element + // containing the byte offset `self.nbuf`. + #[inline(never)] + unsafe fn slice_write_process_buffer(&mut self, msg: &[u8]) { + let length = msg.len(); + let nbuf = self.nbuf; + debug_assert!(nbuf < BUFFER_SIZE); + debug_assert!(nbuf + length >= BUFFER_SIZE); + + // Always copy first part of input into current element of buffer. + // This function should only be called when the write fills the buffer, + // so we know that there is enough input to fill the current element. + let valid_in_elem = nbuf % ELEM_SIZE; + let needed_in_elem = ELEM_SIZE - valid_in_elem; + + let src = msg.as_ptr(); + let dst = (self.buf.as_mut_ptr() as *mut u8).add(nbuf); + copy_nonoverlapping_small(src, dst, needed_in_elem); + + // Process buffer. + + // Using `nbuf / ELEM_SIZE + 1` rather than `(nbuf + needed_in_elem) / + // ELEM_SIZE` to show the compiler that this loop's upper bound is > 0. + // We know that is true, because last step ensured we have a full + // element in the buffer. + let last = nbuf / ELEM_SIZE + 1; + + for i in 0..last { + let elem = self.buf.get_unchecked(i).assume_init().to_le(); + self.state.v3 ^= elem; + Sip24Rounds::c_rounds(&mut self.state); + self.state.v0 ^= elem; + } + + // Process the remaining element-sized chunks of input. + let mut processed = needed_in_elem; + let input_left = length - processed; + let elems_left = input_left / ELEM_SIZE; + let extra_bytes_left = input_left % ELEM_SIZE; + + for _ in 0..elems_left { + let elem = (msg.as_ptr().add(processed) as *const u64).read_unaligned().to_le(); + self.state.v3 ^= elem; + Sip24Rounds::c_rounds(&mut self.state); + self.state.v0 ^= elem; + processed += ELEM_SIZE; + } + + // Copy remaining input into start of buffer. + let src = msg.as_ptr().add(processed); + let dst = self.buf.as_mut_ptr() as *mut u8; + copy_nonoverlapping_small(src, dst, extra_bytes_left); + + self.nbuf = extra_bytes_left; + self.processed += nbuf + processed; } #[inline] pub fn finish128(mut self) -> (u64, u64) { - let b: u64 = ((self.length as u64 & 0xff) << 56) | self.tail; + debug_assert!(self.nbuf < BUFFER_SIZE); - self.state.v3 ^= b; - Sip24Rounds::c_rounds(&mut self.state); - self.state.v0 ^= b; + // Process full elements in buffer. + let last = self.nbuf / ELEM_SIZE; - self.state.v2 ^= 0xee; - Sip24Rounds::d_rounds(&mut self.state); - let _0 = self.state.v0 ^ self.state.v1 ^ self.state.v2 ^ self.state.v3; + // Since we're consuming self, avoid updating members for a potential + // performance gain. + let mut state = self.state; + + for i in 0..last { + let elem = unsafe { self.buf.get_unchecked(i).assume_init().to_le() }; + state.v3 ^= elem; + Sip24Rounds::c_rounds(&mut state); + state.v0 ^= elem; + } + + // Get remaining partial element. + let elem = if self.nbuf % ELEM_SIZE != 0 { + unsafe { + // Ensure element is initialized by writing zero bytes. At most + // `ELEM_SIZE - 1` are required given the above check. It's safe + // to write this many because we have the spill and we maintain + // `self.nbuf` such that this write will start before the spill. + let dst = (self.buf.as_mut_ptr() as *mut u8).add(self.nbuf); + ptr::write_bytes(dst, 0, ELEM_SIZE - 1); + self.buf.get_unchecked(last).assume_init().to_le() + } + } else { + 0 + }; + + // Finalize the hash. + let length = self.processed + self.nbuf; + let b: u64 = ((length as u64 & 0xff) << 56) | elem; + + state.v3 ^= b; + Sip24Rounds::c_rounds(&mut state); + state.v0 ^= b; + + state.v2 ^= 0xee; + Sip24Rounds::d_rounds(&mut state); + let _0 = state.v0 ^ state.v1 ^ state.v2 ^ state.v3; + + state.v1 ^= 0xdd; + Sip24Rounds::d_rounds(&mut state); + let _1 = state.v0 ^ state.v1 ^ state.v2 ^ state.v3; - self.state.v1 ^= 0xdd; - Sip24Rounds::d_rounds(&mut self.state); - let _1 = self.state.v0 ^ self.state.v1 ^ self.state.v2 ^ self.state.v3; (_0, _1) } } @@ -230,92 +412,57 @@ impl SipHasher128 { impl Hasher for SipHasher128 { #[inline] fn write_u8(&mut self, i: u8) { - self.short_write(i, i as u64); + self.short_write(i); } #[inline] fn write_u16(&mut self, i: u16) { - self.short_write(i, i.to_le() as u64); + self.short_write(i); } #[inline] fn write_u32(&mut self, i: u32) { - self.short_write(i, i.to_le() as u64); + self.short_write(i); } #[inline] fn write_u64(&mut self, i: u64) { - self.short_write(i, i.to_le() as u64); + self.short_write(i); } #[inline] fn write_usize(&mut self, i: usize) { - self.short_write(i, i.to_le() as u64); + self.short_write(i); } #[inline] fn write_i8(&mut self, i: i8) { - self.short_write(i, i as u8 as u64); + self.short_write(i as u8); } #[inline] fn write_i16(&mut self, i: i16) { - self.short_write(i, (i as u16).to_le() as u64); + self.short_write(i as u16); } #[inline] fn write_i32(&mut self, i: i32) { - self.short_write(i, (i as u32).to_le() as u64); + self.short_write(i as u32); } #[inline] fn write_i64(&mut self, i: i64) { - self.short_write(i, (i as u64).to_le() as u64); + self.short_write(i as u64); } #[inline] fn write_isize(&mut self, i: isize) { - self.short_write(i, (i as usize).to_le() as u64); + self.short_write(i as usize); } #[inline] fn write(&mut self, msg: &[u8]) { - let length = msg.len(); - self.length += length; - - let mut needed = 0; - - if self.ntail != 0 { - needed = 8 - self.ntail; - self.tail |= unsafe { u8to64_le(msg, 0, cmp::min(length, needed)) } << (8 * self.ntail); - if length < needed { - self.ntail += length; - return; - } else { - self.state.v3 ^= self.tail; - Sip24Rounds::c_rounds(&mut self.state); - self.state.v0 ^= self.tail; - self.ntail = 0; - } - } - - // Buffered tail is now flushed, process new input. - let len = length - needed; - let left = len & 0x7; - - let mut i = needed; - while i < len - left { - let mi = unsafe { load_int_le!(msg, i, u64) }; - - self.state.v3 ^= mi; - Sip24Rounds::c_rounds(&mut self.state); - self.state.v0 ^= mi; - - i += 8; - } - - self.tail = unsafe { u8to64_le(msg, i, left) }; - self.ntail = left; + self.slice_write(msg); } fn finish(&self) -> u64 { diff --git a/compiler/rustc_data_structures/src/sip128/tests.rs b/compiler/rustc_data_structures/src/sip128/tests.rs index 2e2274a7b775e..5fe967c4158fe 100644 --- a/compiler/rustc_data_structures/src/sip128/tests.rs +++ b/compiler/rustc_data_structures/src/sip128/tests.rs @@ -450,3 +450,48 @@ fn test_short_write_works() { assert_eq!(h1_hash, h2_hash); } + +macro_rules! test_fill_buffer { + ($type:ty, $write_method:ident) => {{ + // Test filling and overfilling the buffer from all possible offsets + // for a given integer type and its corresponding write method. + const SIZE: usize = std::mem::size_of::<$type>(); + let input = [42; BUFFER_SIZE]; + let x = 0x01234567_89ABCDEF_76543210_FEDCBA98_u128 as $type; + let x_bytes = &x.to_ne_bytes(); + + for i in 1..=SIZE { + let s = &input[..BUFFER_SIZE - i]; + + let mut h1 = SipHasher128::new_with_keys(7, 13); + h1.write(s); + h1.$write_method(x); + + let mut h2 = SipHasher128::new_with_keys(7, 13); + h2.write(s); + h2.write(x_bytes); + + let h1_hash = h1.finish128(); + let h2_hash = h2.finish128(); + + assert_eq!(h1_hash, h2_hash); + } + }}; +} + +#[test] +fn test_fill_buffer() { + test_fill_buffer!(u8, write_u8); + test_fill_buffer!(u16, write_u16); + test_fill_buffer!(u32, write_u32); + test_fill_buffer!(u64, write_u64); + test_fill_buffer!(u128, write_u128); + test_fill_buffer!(usize, write_usize); + + test_fill_buffer!(i8, write_i8); + test_fill_buffer!(i16, write_i16); + test_fill_buffer!(i32, write_i32); + test_fill_buffer!(i64, write_i64); + test_fill_buffer!(i128, write_i128); + test_fill_buffer!(isize, write_isize); +} diff --git a/compiler/rustc_error_codes/src/error_codes/E0660.md b/compiler/rustc_error_codes/src/error_codes/E0660.md index fccd1b96f60db..26d35f2620cb2 100644 --- a/compiler/rustc_error_codes/src/error_codes/E0660.md +++ b/compiler/rustc_error_codes/src/error_codes/E0660.md @@ -9,4 +9,4 @@ llvm_asm!("nop" "nop"); Considering that this would be a long explanation, we instead recommend you take a look at the [`llvm_asm`] chapter of the Unstable book: -[llvm_asm]: https://doc.rust-lang.org/stable/unstable-book/library-features/llvm-asm.html +[`llvm_asm`]: https://doc.rust-lang.org/stable/unstable-book/library-features/llvm-asm.html diff --git a/compiler/rustc_error_codes/src/error_codes/E0661.md b/compiler/rustc_error_codes/src/error_codes/E0661.md index f1debee7a18f1..0b8ba7fbbedac 100644 --- a/compiler/rustc_error_codes/src/error_codes/E0661.md +++ b/compiler/rustc_error_codes/src/error_codes/E0661.md @@ -10,4 +10,4 @@ llvm_asm!("nop" : "r"(a)); Considering that this would be a long explanation, we instead recommend you take a look at the [`llvm_asm`] chapter of the Unstable book: -[llvm_asm]: https://doc.rust-lang.org/stable/unstable-book/library-features/llvm-asm.html +[`llvm_asm`]: https://doc.rust-lang.org/stable/unstable-book/library-features/llvm-asm.html diff --git a/compiler/rustc_error_codes/src/error_codes/E0662.md b/compiler/rustc_error_codes/src/error_codes/E0662.md index d4765f078b0e6..8c1bab8d0410d 100644 --- a/compiler/rustc_error_codes/src/error_codes/E0662.md +++ b/compiler/rustc_error_codes/src/error_codes/E0662.md @@ -13,4 +13,4 @@ llvm_asm!("xor %eax, %eax" Considering that this would be a long explanation, we instead recommend you take a look at the [`llvm_asm`] chapter of the Unstable book: -[llvm_asm]: https://doc.rust-lang.org/stable/unstable-book/library-features/llvm-asm.html +[`llvm_asm`]: https://doc.rust-lang.org/stable/unstable-book/library-features/llvm-asm.html diff --git a/compiler/rustc_error_codes/src/error_codes/E0663.md b/compiler/rustc_error_codes/src/error_codes/E0663.md index d5a85b275db63..53ffd3373a51c 100644 --- a/compiler/rustc_error_codes/src/error_codes/E0663.md +++ b/compiler/rustc_error_codes/src/error_codes/E0663.md @@ -13,4 +13,4 @@ llvm_asm!("xor %eax, %eax" Considering that this would be a long explanation, we instead recommend you take a look at the [`llvm_asm`] chapter of the Unstable book: -[llvm_asm]: https://doc.rust-lang.org/stable/unstable-book/library-features/llvm-asm.html +[`llvm_asm`]: https://doc.rust-lang.org/stable/unstable-book/library-features/llvm-asm.html diff --git a/compiler/rustc_error_codes/src/error_codes/E0664.md b/compiler/rustc_error_codes/src/error_codes/E0664.md index ce9c9491df3d7..f8e72cd330a31 100644 --- a/compiler/rustc_error_codes/src/error_codes/E0664.md +++ b/compiler/rustc_error_codes/src/error_codes/E0664.md @@ -13,4 +13,4 @@ llvm_asm!("mov $$0x200, %eax" Considering that this would be a long explanation, we instead recommend you take a look at the [`llvm_asm`] chapter of the Unstable book: -[llvm_asm]: https://doc.rust-lang.org/stable/unstable-book/library-features/llvm-asm.html +[`llvm_asm`]: https://doc.rust-lang.org/stable/unstable-book/library-features/llvm-asm.html diff --git a/compiler/rustc_expand/src/config.rs b/compiler/rustc_expand/src/config.rs index dd087ab91509b..3551b92967c47 100644 --- a/compiler/rustc_expand/src/config.rs +++ b/compiler/rustc_expand/src/config.rs @@ -3,10 +3,13 @@ use rustc_ast::attr::HasAttrs; use rustc_ast::mut_visit::*; use rustc_ast::ptr::P; +use rustc_ast::token::{DelimToken, Token, TokenKind}; +use rustc_ast::tokenstream::{DelimSpan, LazyTokenStreamInner, Spacing, TokenStream, TokenTree}; use rustc_ast::{self as ast, AttrItem, Attribute, MetaItem}; use rustc_attr as attr; use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::map_in_place::MapInPlace; +use rustc_data_structures::sync::Lrc; use rustc_errors::{error_code, struct_span_err, Applicability, Handler}; use rustc_feature::{Feature, Features, State as FeatureState}; use rustc_feature::{ @@ -289,7 +292,37 @@ impl<'a> StripUnconfigured<'a> { expanded_attrs .into_iter() .flat_map(|(item, span)| { - let attr = attr::mk_attr_from_item(attr.style, item, span); + let orig_tokens = + attr.tokens.as_ref().unwrap_or_else(|| panic!("Missing tokens for {:?}", attr)); + + // We are taking an attribute of the form `#[cfg_attr(pred, attr)]` + // and producing an attribute of the form `#[attr]`. We + // have captured tokens for `attr` itself, but we need to + // synthesize tokens for the wrapper `#` and `[]`, which + // we do below. + + // Use the `#` in `#[cfg_attr(pred, attr)]` as the `#` token + // for `attr` when we expand it to `#[attr]` + let pound_token = orig_tokens.into_token_stream().trees().next().unwrap(); + if !matches!(pound_token, TokenTree::Token(Token { kind: TokenKind::Pound, .. })) { + panic!("Bad tokens for attribute {:?}", attr); + } + // We don't really have a good span to use for the syntheized `[]` + // in `#[attr]`, so just use the span of the `#` token. + let bracket_group = TokenTree::Delimited( + DelimSpan::from_single(pound_token.span()), + DelimToken::Bracket, + item.tokens + .clone() + .unwrap_or_else(|| panic!("Missing tokens for {:?}", item)) + .into_token_stream(), + ); + + let mut attr = attr::mk_attr_from_item(attr.style, item, span); + attr.tokens = Some(Lrc::new(LazyTokenStreamInner::Ready(TokenStream::new(vec![ + (pound_token, Spacing::Alone), + (bracket_group, Spacing::Alone), + ])))); self.process_cfg_attr(attr) }) .collect() diff --git a/compiler/rustc_expand/src/expand.rs b/compiler/rustc_expand/src/expand.rs index 0b43225a242f7..3e5762ab992f4 100644 --- a/compiler/rustc_expand/src/expand.rs +++ b/compiler/rustc_expand/src/expand.rs @@ -1357,7 +1357,8 @@ impl<'a, 'b> MutVisitor for InvocationCollector<'a, 'b> { // we'll expand attributes on expressions separately if !stmt.is_expr() { let (attr, derives, after_derive) = if stmt.is_item() { - self.classify_item(&mut stmt) + // FIXME: Handle custom attributes on statements (#15701) + (None, vec![], false) } else { // ignore derives on non-item statements so it falls through // to the unused-attributes lint @@ -1785,6 +1786,7 @@ impl<'a, 'b> MutVisitor for InvocationCollector<'a, 'b> { span: at.span, id: at.id, style: at.style, + tokens: None, }; } else { noop_visit_attribute(at, self) diff --git a/compiler/rustc_expand/src/mbe/macro_rules.rs b/compiler/rustc_expand/src/mbe/macro_rules.rs index f0e6fe39a3c7f..791d2686cb5eb 100644 --- a/compiler/rustc_expand/src/mbe/macro_rules.rs +++ b/compiler/rustc_expand/src/mbe/macro_rules.rs @@ -288,7 +288,8 @@ fn generic_extension<'cx>( // Replace all the tokens for the corresponding positions in the macro, to maintain // proper positions in error reporting, while maintaining the macro_backtrace. if rhs_spans.len() == tts.len() { - tts = tts.map_enumerated(|i, mut tt| { + tts = tts.map_enumerated(|i, tt| { + let mut tt = tt.clone(); let mut sp = rhs_spans[i]; sp = sp.with_ctxt(tt.span().ctxt()); tt.set_span(sp); diff --git a/compiler/rustc_feature/src/active.rs b/compiler/rustc_feature/src/active.rs index 7fbd070a609b7..d111ce7abb4c0 100644 --- a/compiler/rustc_feature/src/active.rs +++ b/compiler/rustc_feature/src/active.rs @@ -210,6 +210,11 @@ declare_features! ( /// it is not on path for eventual stabilization). (active, no_niche, "1.42.0", None, None), + /// Allows using `#[rustc_allow_const_fn_unstable]`. + /// This is an attribute on `const fn` for the same + /// purpose as `#[allow_internal_unstable]`. + (active, rustc_allow_const_fn_unstable, "1.49.0", Some(69399), None), + // no-tracking-issue-end // ------------------------------------------------------------------------- @@ -598,6 +603,9 @@ declare_features! ( /// Allows `#[instruction_set(_)]` attribute (active, isa_attribute, "1.48.0", Some(74727), None), + /// Allow anonymous constants from an inline `const` block + (active, inline_const, "1.49.0", Some(76001), None), + // ------------------------------------------------------------------------- // feature-group-end: actual feature gates // ------------------------------------------------------------------------- @@ -618,6 +626,8 @@ pub const INCOMPLETE_FEATURES: &[Symbol] = &[ sym::const_trait_bound_opt_out, sym::lazy_normalization_consts, sym::specialization, + sym::inline_const, + sym::repr128, ]; /// Some features are not allowed to be used together at the same time, if diff --git a/compiler/rustc_feature/src/builtin_attrs.rs b/compiler/rustc_feature/src/builtin_attrs.rs index 527a49b05389a..f73363cbccc25 100644 --- a/compiler/rustc_feature/src/builtin_attrs.rs +++ b/compiler/rustc_feature/src/builtin_attrs.rs @@ -379,6 +379,10 @@ pub const BUILTIN_ATTRIBUTES: &[BuiltinAttribute] = &[ allow_internal_unstable, AssumedUsed, template!(Word, List: "feat1, feat2, ..."), "allow_internal_unstable side-steps feature gating and stability checks", ), + gated!( + rustc_allow_const_fn_unstable, AssumedUsed, template!(Word, List: "feat1, feat2, ..."), + "rustc_allow_const_fn_unstable side-steps feature gating and stability checks" + ), gated!( allow_internal_unsafe, Normal, template!(Word), "allow_internal_unsafe side-steps the unsafe_code lint", @@ -598,7 +602,7 @@ pub fn is_builtin_attr_name(name: Symbol) -> bool { BUILTIN_ATTRIBUTE_MAP.get(&name).is_some() } -pub static BUILTIN_ATTRIBUTE_MAP: SyncLazy> = +pub static BUILTIN_ATTRIBUTE_MAP: SyncLazy> = SyncLazy::new(|| { let mut map = FxHashMap::default(); for attr in BUILTIN_ATTRIBUTES.iter() { diff --git a/compiler/rustc_hir/src/hir.rs b/compiler/rustc_hir/src/hir.rs index 52727e3a61949..fc15bb324c1d8 100644 --- a/compiler/rustc_hir/src/hir.rs +++ b/compiler/rustc_hir/src/hir.rs @@ -486,6 +486,8 @@ impl Generics<'hir> { #[derive(HashStable_Generic)] pub enum SyntheticTyParamKind { ImplTrait, + // Created by the `#[rustc_synthetic]` attribute. + FromAttr, } /// A where-clause in a definition. @@ -1099,11 +1101,11 @@ pub enum StmtKind<'hir> { Semi(&'hir Expr<'hir>), } -impl StmtKind<'hir> { - pub fn attrs(&self) -> &'hir [Attribute] { +impl<'hir> StmtKind<'hir> { + pub fn attrs(&self, get_item: impl FnOnce(ItemId) -> &'hir Item<'hir>) -> &'hir [Attribute] { match *self { StmtKind::Local(ref l) => &l.attrs, - StmtKind::Item(_) => &[], + StmtKind::Item(ref item_id) => &get_item(*item_id).attrs, StmtKind::Expr(ref e) | StmtKind::Semi(ref e) => &e.attrs, } } @@ -1361,6 +1363,7 @@ impl Expr<'_> { pub fn precedence(&self) -> ExprPrecedence { match self.kind { ExprKind::Box(_) => ExprPrecedence::Box, + ExprKind::ConstBlock(_) => ExprPrecedence::ConstBlock, ExprKind::Array(_) => ExprPrecedence::Array, ExprKind::Call(..) => ExprPrecedence::Call, ExprKind::MethodCall(..) => ExprPrecedence::MethodCall, @@ -1446,6 +1449,7 @@ impl Expr<'_> { | ExprKind::LlvmInlineAsm(..) | ExprKind::AssignOp(..) | ExprKind::Lit(_) + | ExprKind::ConstBlock(..) | ExprKind::Unary(..) | ExprKind::Box(..) | ExprKind::AddrOf(..) @@ -1501,6 +1505,8 @@ pub fn is_range_literal(expr: &Expr<'_>) -> bool { pub enum ExprKind<'hir> { /// A `box x` expression. Box(&'hir Expr<'hir>), + /// Allow anonymous constants from an inline `const` block + ConstBlock(AnonConst), /// An array (e.g., `[a, b, c, d]`). Array(&'hir [Expr<'hir>]), /// A function call. @@ -2377,15 +2383,6 @@ impl VisibilityKind<'_> { VisibilityKind::Crate(..) | VisibilityKind::Restricted { .. } => true, } } - - pub fn descr(&self) -> &'static str { - match *self { - VisibilityKind::Public => "public", - VisibilityKind::Inherited => "private", - VisibilityKind::Crate(..) => "crate-visible", - VisibilityKind::Restricted { .. } => "restricted", - } - } } #[derive(Debug, HashStable_Generic)] diff --git a/compiler/rustc_hir/src/intravisit.rs b/compiler/rustc_hir/src/intravisit.rs index 820d664c07d92..35615af0fc7df 100644 --- a/compiler/rustc_hir/src/intravisit.rs +++ b/compiler/rustc_hir/src/intravisit.rs @@ -1068,6 +1068,7 @@ pub fn walk_expr<'v, V: Visitor<'v>>(visitor: &mut V, expression: &'v Expr<'v>) ExprKind::Array(subexpressions) => { walk_list!(visitor, visit_expr, subexpressions); } + ExprKind::ConstBlock(ref anon_const) => visitor.visit_anon_const(anon_const), ExprKind::Repeat(ref element, ref count) => { visitor.visit_expr(element); visitor.visit_anon_const(count) diff --git a/compiler/rustc_hir/src/lang_items.rs b/compiler/rustc_hir/src/lang_items.rs index 5e4c03bec83dc..3e4eb9eafd7f9 100644 --- a/compiler/rustc_hir/src/lang_items.rs +++ b/compiler/rustc_hir/src/lang_items.rs @@ -263,6 +263,7 @@ language_item_table! { // is required to define it somewhere. Additionally, there are restrictions on crates that use // a weak lang item, but do not have it defined. Panic, sym::panic, panic_fn, Target::Fn; + PanicStr, sym::panic_str, panic_str, Target::Fn; PanicBoundsCheck, sym::panic_bounds_check, panic_bounds_check_fn, Target::Fn; PanicInfo, sym::panic_info, panic_info, Target::Struct; PanicLocation, sym::panic_location, panic_location, Target::Struct; diff --git a/compiler/rustc_hir_pretty/src/lib.rs b/compiler/rustc_hir_pretty/src/lib.rs index 57a38adc16913..1cd4ddad5783a 100644 --- a/compiler/rustc_hir_pretty/src/lib.rs +++ b/compiler/rustc_hir_pretty/src/lib.rs @@ -1135,6 +1135,13 @@ impl<'a> State<'a> { self.end() } + fn print_expr_anon_const(&mut self, anon_const: &hir::AnonConst) { + self.ibox(INDENT_UNIT); + self.s.word_space("const"); + self.print_anon_const(anon_const); + self.end() + } + fn print_expr_repeat(&mut self, element: &hir::Expr<'_>, count: &hir::AnonConst) { self.ibox(INDENT_UNIT); self.s.word("["); @@ -1287,6 +1294,9 @@ impl<'a> State<'a> { hir::ExprKind::Array(ref exprs) => { self.print_expr_vec(exprs); } + hir::ExprKind::ConstBlock(ref anon_const) => { + self.print_expr_anon_const(anon_const); + } hir::ExprKind::Repeat(ref element, ref count) => { self.print_expr_repeat(&element, count); } diff --git a/compiler/rustc_infer/src/infer/error_reporting/mod.rs b/compiler/rustc_infer/src/infer/error_reporting/mod.rs index 795c5a64d26b7..3a0ec6327c186 100644 --- a/compiler/rustc_infer/src/infer/error_reporting/mod.rs +++ b/compiler/rustc_infer/src/infer/error_reporting/mod.rs @@ -619,6 +619,7 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { scrut_hir_id, opt_suggest_box_span, arm_span, + scrut_span, .. }) => match source { hir::MatchSource::IfLetDesugar { .. } => { @@ -664,18 +665,29 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { Some(ty::error::ExpectedFound { expected, .. }) => expected, _ => last_ty, }); - let msg = "`match` arms have incompatible types"; - err.span_label(cause.span, msg); + let source_map = self.tcx.sess.source_map(); + let mut any_multiline_arm = source_map.is_multiline(arm_span); if prior_arms.len() <= 4 { for sp in prior_arms { + any_multiline_arm |= source_map.is_multiline(*sp); err.span_label(*sp, format!("this is found to be of type `{}`", t)); } } else if let Some(sp) = prior_arms.last() { + any_multiline_arm |= source_map.is_multiline(*sp); err.span_label( *sp, format!("this and all prior arms are found to be of type `{}`", t), ); } + let outer_error_span = if any_multiline_arm { + // Cover just `match` and the scrutinee expression, not + // the entire match body, to reduce diagram noise. + cause.span.shrink_to_lo().to(scrut_span) + } else { + cause.span + }; + let msg = "`match` arms have incompatible types"; + err.span_label(outer_error_span, msg); if let Some(sp) = semi_span { err.span_suggestion_short( sp, diff --git a/compiler/rustc_infer/src/infer/error_reporting/need_type_info.rs b/compiler/rustc_infer/src/infer/error_reporting/need_type_info.rs index 2f3089f1a92c1..21023a06bb2d9 100644 --- a/compiler/rustc_infer/src/infer/error_reporting/need_type_info.rs +++ b/compiler/rustc_infer/src/infer/error_reporting/need_type_info.rs @@ -91,17 +91,6 @@ impl<'a, 'tcx> Visitor<'tcx> for FindHirNodeVisitor<'a, 'tcx> { if let (None, Some(ty)) = (self.found_local_pattern, self.node_ty_contains_target(local.hir_id)) { - // FIXME: There's a trade-off here - we can either check that our target span - // is contained in `local.span` or not. If we choose to check containment - // we can avoid some spurious suggestions (see #72690), but we lose - // the ability to report on things like: - // - // ``` - // let x = vec![]; - // ``` - // - // because the target span will be in the macro expansion of `vec![]`. - // At present we choose not to check containment. self.found_local_pattern = Some(&*local.pat); self.found_node_ty = Some(ty); } @@ -113,10 +102,8 @@ impl<'a, 'tcx> Visitor<'tcx> for FindHirNodeVisitor<'a, 'tcx> { if let (None, Some(ty)) = (self.found_arg_pattern, self.node_ty_contains_target(param.hir_id)) { - if self.target_span.contains(param.pat.span) { - self.found_arg_pattern = Some(&*param.pat); - self.found_node_ty = Some(ty); - } + self.found_arg_pattern = Some(&*param.pat); + self.found_node_ty = Some(ty); } } intravisit::walk_body(self, body); diff --git a/compiler/rustc_infer/src/infer/free_regions.rs b/compiler/rustc_infer/src/infer/free_regions.rs index ffe5fb172bea3..32f73237dd410 100644 --- a/compiler/rustc_infer/src/infer/free_regions.rs +++ b/compiler/rustc_infer/src/infer/free_regions.rs @@ -157,7 +157,7 @@ impl<'tcx> FreeRegionRelations<'tcx> for FreeRegionMap<'tcx> { impl<'a, 'tcx> Lift<'tcx> for FreeRegionMap<'a> { type Lifted = FreeRegionMap<'tcx>; - fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option> { - self.relation.maybe_map(|&fr| tcx.lift(&fr)).map(|relation| FreeRegionMap { relation }) + fn lift_to_tcx(self, tcx: TyCtxt<'tcx>) -> Option> { + self.relation.maybe_map(|&fr| tcx.lift(fr)).map(|relation| FreeRegionMap { relation }) } } diff --git a/compiler/rustc_infer/src/infer/nll_relate/mod.rs b/compiler/rustc_infer/src/infer/nll_relate/mod.rs index 2fb9f638e3608..abdd6edea9024 100644 --- a/compiler/rustc_infer/src/infer/nll_relate/mod.rs +++ b/compiler/rustc_infer/src/infer/nll_relate/mod.rs @@ -636,7 +636,7 @@ where if let (Some(a), Some(b)) = (a.no_bound_vars(), b.no_bound_vars()) { // Fast path for the common case. self.relate(a, b)?; - return Ok(ty::Binder::bind(a)); + return Ok(ty::Binder::dummy(a)); } if self.ambient_covariance() { diff --git a/compiler/rustc_infer/src/traits/error_reporting/mod.rs b/compiler/rustc_infer/src/traits/error_reporting/mod.rs index f873358ff9fdd..835f75ec8ef06 100644 --- a/compiler/rustc_infer/src/traits/error_reporting/mod.rs +++ b/compiler/rustc_infer/src/traits/error_reporting/mod.rs @@ -2,12 +2,12 @@ use super::ObjectSafetyViolation; use crate::infer::InferCtxt; use rustc_data_structures::fx::FxHashSet; -use rustc_errors::{struct_span_err, Applicability, DiagnosticBuilder}; +use rustc_errors::{struct_span_err, DiagnosticBuilder}; use rustc_hir as hir; use rustc_hir::def_id::DefId; use rustc_middle::ty::TyCtxt; use rustc_span::symbol::Symbol; -use rustc_span::Span; +use rustc_span::{MultiSpan, Span}; use std::fmt; impl<'a, 'tcx> InferCtxt<'a, 'tcx> { @@ -54,10 +54,11 @@ pub fn report_object_safety_error( "the trait `{}` cannot be made into an object", trait_str ); - err.span_label(span, format!("the trait `{}` cannot be made into an object", trait_str)); + err.span_label(span, format!("`{}` cannot be made into an object", trait_str)); let mut reported_violations = FxHashSet::default(); - let mut had_span_label = false; + let mut multi_span = vec![]; + let mut messages = vec![]; for violation in violations { if let ObjectSafetyViolation::SizedSelf(sp) = &violation { if !sp.is_empty() { @@ -71,31 +72,37 @@ pub fn report_object_safety_error( let msg = if trait_span.is_none() || spans.is_empty() { format!("the trait cannot be made into an object because {}", violation.error_msg()) } else { - had_span_label = true; format!("...because {}", violation.error_msg()) }; if spans.is_empty() { err.note(&msg); } else { for span in spans { - err.span_label(span, &msg); + multi_span.push(span); + messages.push(msg.clone()); } } - match (trait_span, violation.solution()) { - (Some(_), Some((note, None))) => { - err.help(¬e); - } - (Some(_), Some((note, Some((sugg, span))))) => { - err.span_suggestion(span, ¬e, sugg, Applicability::MachineApplicable); - } + if trait_span.is_some() { // Only provide the help if its a local trait, otherwise it's not actionable. - _ => {} + violation.solution(&mut err); } } } - if let (Some(trait_span), true) = (trait_span, had_span_label) { - err.span_label(trait_span, "this trait cannot be made into an object..."); + let has_multi_span = !multi_span.is_empty(); + let mut note_span = MultiSpan::from_spans(multi_span.clone()); + if let (Some(trait_span), true) = (trait_span, has_multi_span) { + note_span + .push_span_label(trait_span, "this trait cannot be made into an object...".to_string()); } + for (span, msg) in multi_span.into_iter().zip(messages.into_iter()) { + note_span.push_span_label(span, msg); + } + err.span_note( + note_span, + "for a trait to be \"object safe\" it needs to allow building a vtable to allow the call \ + to be resolvable dynamically; for more information visit \ + ", + ); if tcx.sess.trait_methods_not_found.borrow().contains(&span) { // Avoid emitting error caused by non-existing method (#58734) diff --git a/compiler/rustc_infer/src/traits/util.rs b/compiler/rustc_infer/src/traits/util.rs index 1b7269706a758..f6ef98407887e 100644 --- a/compiler/rustc_infer/src/traits/util.rs +++ b/compiler/rustc_infer/src/traits/util.rs @@ -126,14 +126,15 @@ impl Elaborator<'tcx> { fn elaborate(&mut self, obligation: &PredicateObligation<'tcx>) { let tcx = self.visited.tcx; - match obligation.predicate.skip_binders() { + let bound_predicate = obligation.predicate.bound_atom(); + match bound_predicate.skip_binder() { ty::PredicateAtom::Trait(data, _) => { // Get predicates declared on the trait. let predicates = tcx.super_predicates_of(data.def_id()); let obligations = predicates.predicates.iter().map(|&(pred, _)| { predicate_obligation( - pred.subst_supertrait(tcx, &ty::Binder::bind(data.trait_ref)), + pred.subst_supertrait(tcx, &bound_predicate.rebind(data.trait_ref)), obligation.param_env, obligation.cause.clone(), ) diff --git a/compiler/rustc_interface/src/passes.rs b/compiler/rustc_interface/src/passes.rs index d9c24cc399482..bbb47a6e8071e 100644 --- a/compiler/rustc_interface/src/passes.rs +++ b/compiler/rustc_interface/src/passes.rs @@ -2,8 +2,9 @@ use crate::interface::{Compiler, Result}; use crate::proc_macro_decls; use crate::util; -use rustc_ast::mut_visit::MutVisitor; -use rustc_ast::{self as ast, visit}; +use rustc_ast::mut_visit::{self, MutVisitor}; +use rustc_ast::ptr::P; +use rustc_ast::{self as ast, token, visit}; use rustc_codegen_ssa::back::link::emit_metadata; use rustc_codegen_ssa::traits::CodegenBackend; use rustc_data_structures::sync::{par_iter, Lrc, OnceCell, ParallelIterator, WorkerLocal}; @@ -36,6 +37,7 @@ use rustc_span::symbol::Symbol; use rustc_span::{FileName, RealFileName}; use rustc_trait_selection::traits; use rustc_typeck as typeck; +use smallvec::SmallVec; use tracing::{info, warn}; use rustc_serialize::json; @@ -50,6 +52,64 @@ use std::path::PathBuf; use std::rc::Rc; use std::{env, fs, iter, mem}; +/// Remove alls `LazyTokenStreams` from an AST struct +/// Normally, this is done during AST lowering. However, +/// printing the AST JSON requires us to serialize +/// the entire AST, and we don't want to serialize +/// a `LazyTokenStream`. +struct TokenStripper; +impl mut_visit::MutVisitor for TokenStripper { + fn flat_map_item(&mut self, mut i: P) -> SmallVec<[P; 1]> { + i.tokens = None; + mut_visit::noop_flat_map_item(i, self) + } + fn visit_block(&mut self, b: &mut P) { + b.tokens = None; + mut_visit::noop_visit_block(b, self); + } + fn flat_map_stmt(&mut self, mut stmt: ast::Stmt) -> SmallVec<[ast::Stmt; 1]> { + stmt.tokens = None; + mut_visit::noop_flat_map_stmt(stmt, self) + } + fn visit_pat(&mut self, p: &mut P) { + p.tokens = None; + mut_visit::noop_visit_pat(p, self); + } + fn visit_ty(&mut self, ty: &mut P) { + ty.tokens = None; + mut_visit::noop_visit_ty(ty, self); + } + fn visit_attribute(&mut self, attr: &mut ast::Attribute) { + attr.tokens = None; + if let ast::AttrKind::Normal(ast::AttrItem { tokens, .. }) = &mut attr.kind { + *tokens = None; + } + mut_visit::noop_visit_attribute(attr, self); + } + + fn visit_interpolated(&mut self, nt: &mut token::Nonterminal) { + if let token::Nonterminal::NtMeta(meta) = nt { + meta.tokens = None; + } + // Handles all of the other cases + mut_visit::noop_visit_interpolated(nt, self); + } + + fn visit_path(&mut self, p: &mut ast::Path) { + p.tokens = None; + mut_visit::noop_visit_path(p, self); + } + fn visit_vis(&mut self, vis: &mut ast::Visibility) { + vis.tokens = None; + mut_visit::noop_visit_vis(vis, self); + } + fn visit_expr(&mut self, e: &mut P) { + e.tokens = None; + mut_visit::noop_visit_expr(e, self); + } + fn visit_mac(&mut self, _mac: &mut ast::MacCall) {} +} + pub fn parse<'a>(sess: &'a Session, input: &Input) -> PResult<'a, ast::Crate> { let krate = sess.time("parse_crate", || match input { Input::File(file) => parse_crate_from_file(file, &sess.parse_sess), @@ -59,6 +119,10 @@ pub fn parse<'a>(sess: &'a Session, input: &Input) -> PResult<'a, ast::Crate> { })?; if sess.opts.debugging_opts.ast_json_noexpand { + // Set any `token` fields to `None` before + // we display the AST. + let mut krate = krate.clone(); + TokenStripper.visit_crate(&mut krate); println!("{}", json::as_json(&krate)); } @@ -379,6 +443,10 @@ fn configure_and_expand_inner<'a>( } if sess.opts.debugging_opts.ast_json { + // Set any `token` fields to `None` before + // we display the AST. + let mut krate = krate.clone(); + TokenStripper.visit_crate(&mut krate); println!("{}", json::as_json(&krate)); } diff --git a/compiler/rustc_lint/src/builtin.rs b/compiler/rustc_lint/src/builtin.rs index abd899e8db4d3..e5f66611d0f9b 100644 --- a/compiler/rustc_lint/src/builtin.rs +++ b/compiler/rustc_lint/src/builtin.rs @@ -968,7 +968,7 @@ fn warn_if_doc(cx: &EarlyContext<'_>, node_span: Span, node_kind: &str, attrs: & while let Some(attr) = attrs.next() { if attr.is_doc_comment() { sugared_span = - Some(sugared_span.map_or_else(|| attr.span, |span| span.with_hi(attr.span.hi()))); + Some(sugared_span.map_or(attr.span, |span| span.with_hi(attr.span.hi()))); } if attrs.peek().map(|next_attr| next_attr.is_doc_comment()).unwrap_or_default() { @@ -994,7 +994,8 @@ impl EarlyLintPass for UnusedDocComment { fn check_stmt(&mut self, cx: &EarlyContext<'_>, stmt: &ast::Stmt) { let kind = match stmt.kind { ast::StmtKind::Local(..) => "statements", - ast::StmtKind::Item(..) => "inner items", + // Disabled pending discussion in #78306 + ast::StmtKind::Item(..) => return, // expressions will be reported by `check_expr`. ast::StmtKind::Empty | ast::StmtKind::Semi(_) @@ -2288,12 +2289,20 @@ impl EarlyLintPass for IncompleteFeatures { n, n, )); } + if HAS_MIN_FEATURES.contains(&name) { + builder.help(&format!( + "consider using `min_{}` instead, which is more stable and complete", + name, + )); + } builder.emit(); }) }); } } +const HAS_MIN_FEATURES: &[Symbol] = &[sym::const_generics, sym::specialization]; + declare_lint! { /// The `invalid_value` lint detects creating a value that is not valid, /// such as a NULL reference. diff --git a/compiler/rustc_lint/src/early.rs b/compiler/rustc_lint/src/early.rs index 998676d44d23c..9aeeb6277924e 100644 --- a/compiler/rustc_lint/src/early.rs +++ b/compiler/rustc_lint/src/early.rs @@ -18,6 +18,7 @@ use crate::context::{EarlyContext, LintContext, LintStore}; use crate::passes::{EarlyLintPass, EarlyLintPassObject}; use rustc_ast as ast; use rustc_ast::visit as ast_visit; +use rustc_attr::HasAttrs; use rustc_session::lint::{BufferedEarlyLint, LintBuffer, LintPass}; use rustc_session::Session; use rustc_span::symbol::Ident; @@ -119,8 +120,22 @@ impl<'a, T: EarlyLintPass> ast_visit::Visitor<'a> for EarlyContextAndPass<'a, T> } fn visit_stmt(&mut self, s: &'a ast::Stmt) { - run_early_pass!(self, check_stmt, s); - self.check_id(s.id); + // Add the statement's lint attributes to our + // current state when checking the statement itself. + // This allows us to handle attributes like + // `#[allow(unused_doc_comments)]`, which apply to + // sibling attributes on the same target + // + // Note that statements get their attributes from + // the AST struct that they wrap (e.g. an item) + self.with_lint_attrs(s.id, s.attrs(), |cx| { + run_early_pass!(cx, check_stmt, s); + cx.check_id(s.id); + }); + // The visitor for the AST struct wrapped + // by the statement (e.g. `Item`) will call + // `with_lint_attrs`, so do this walk + // outside of the above `with_lint_attrs` call ast_visit::walk_stmt(self, s); } @@ -195,6 +210,11 @@ impl<'a, T: EarlyLintPass> ast_visit::Visitor<'a> for EarlyContextAndPass<'a, T> run_early_pass!(self, check_expr_post, e); } + fn visit_generic_arg(&mut self, arg: &'a ast::GenericArg) { + run_early_pass!(self, check_generic_arg, arg); + ast_visit::walk_generic_arg(self, arg); + } + fn visit_generic_param(&mut self, param: &'a ast::GenericParam) { run_early_pass!(self, check_generic_param, param); ast_visit::walk_generic_param(self, param); diff --git a/compiler/rustc_lint/src/late.rs b/compiler/rustc_lint/src/late.rs index a6c04fb0b4c84..015e109871182 100644 --- a/compiler/rustc_lint/src/late.rs +++ b/compiler/rustc_lint/src/late.rs @@ -174,12 +174,13 @@ impl<'tcx, T: LateLintPass<'tcx>> hir_visit::Visitor<'tcx> for LateContextAndPas } fn visit_stmt(&mut self, s: &'tcx hir::Stmt<'tcx>) { - // statement attributes are actually just attributes on one of - // - item - // - local - // - expression - // so we keep track of lint levels there - lint_callback!(self, check_stmt, s); + let get_item = |id: hir::ItemId| self.context.tcx.hir().item(id.id); + let attrs = &s.kind.attrs(get_item); + // See `EarlyContextAndPass::visit_stmt` for an explanation + // of why we call `walk_stmt` outside of `with_lint_attrs` + self.with_lint_attrs(s.hir_id, attrs, |cx| { + lint_callback!(cx, check_stmt, s); + }); hir_visit::walk_stmt(self, s); } diff --git a/compiler/rustc_lint/src/levels.rs b/compiler/rustc_lint/src/levels.rs index 222333a578b7d..f36f598ade2de 100644 --- a/compiler/rustc_lint/src/levels.rs +++ b/compiler/rustc_lint/src/levels.rs @@ -562,6 +562,13 @@ impl<'tcx> intravisit::Visitor<'tcx> for LintLevelMapBuilder<'_, 'tcx> { }) } + fn visit_stmt(&mut self, e: &'tcx hir::Stmt<'tcx>) { + // We will call `with_lint_attrs` when we walk + // the `StmtKind`. The outer statement itself doesn't + // define the lint levels. + intravisit::walk_stmt(self, e); + } + fn visit_expr(&mut self, e: &'tcx hir::Expr<'tcx>) { self.with_lint_attrs(e.hir_id, &e.attrs, |builder| { intravisit::walk_expr(builder, e); diff --git a/compiler/rustc_lint/src/passes.rs b/compiler/rustc_lint/src/passes.rs index 159286c13a4ce..828f283d2a95a 100644 --- a/compiler/rustc_lint/src/passes.rs +++ b/compiler/rustc_lint/src/passes.rs @@ -33,6 +33,7 @@ macro_rules! late_lint_methods { fn check_expr(a: &$hir hir::Expr<$hir>); fn check_expr_post(a: &$hir hir::Expr<$hir>); fn check_ty(a: &$hir hir::Ty<$hir>); + fn check_generic_arg(a: &$hir hir::GenericArg<$hir>); fn check_generic_param(a: &$hir hir::GenericParam<$hir>); fn check_generics(a: &$hir hir::Generics<$hir>); fn check_where_predicate(a: &$hir hir::WherePredicate<$hir>); @@ -176,6 +177,7 @@ macro_rules! early_lint_methods { fn check_expr(a: &ast::Expr); fn check_expr_post(a: &ast::Expr); fn check_ty(a: &ast::Ty); + fn check_generic_arg(a: &ast::GenericArg); fn check_generic_param(a: &ast::GenericParam); fn check_generics(a: &ast::Generics); fn check_where_predicate(a: &ast::WherePredicate); diff --git a/compiler/rustc_lint/src/types.rs b/compiler/rustc_lint/src/types.rs index af14f28ff9f46..b502bd7f7a1bd 100644 --- a/compiler/rustc_lint/src/types.rs +++ b/compiler/rustc_lint/src/types.rs @@ -145,9 +145,9 @@ fn lint_overflowing_range_endpoint<'tcx>( // We need to preserve the literal's suffix, // as it may determine typing information. let suffix = match lit.node { - LitKind::Int(_, LitIntType::Signed(s)) => s.name_str().to_string(), - LitKind::Int(_, LitIntType::Unsigned(s)) => s.name_str().to_string(), - LitKind::Int(_, LitIntType::Unsuffixed) => "".to_string(), + LitKind::Int(_, LitIntType::Signed(s)) => s.name_str(), + LitKind::Int(_, LitIntType::Unsigned(s)) => s.name_str(), + LitKind::Int(_, LitIntType::Unsuffixed) => "", _ => bug!(), }; let suggestion = format!("{}..={}{}", start, lit_val - 1, suffix); @@ -170,24 +170,25 @@ fn lint_overflowing_range_endpoint<'tcx>( // warnings are consistent between 32- and 64-bit platforms. fn int_ty_range(int_ty: ast::IntTy) -> (i128, i128) { match int_ty { - ast::IntTy::Isize => (i64::MIN as i128, i64::MAX as i128), - ast::IntTy::I8 => (i8::MIN as i64 as i128, i8::MAX as i128), - ast::IntTy::I16 => (i16::MIN as i64 as i128, i16::MAX as i128), - ast::IntTy::I32 => (i32::MIN as i64 as i128, i32::MAX as i128), - ast::IntTy::I64 => (i64::MIN as i128, i64::MAX as i128), - ast::IntTy::I128 => (i128::MIN as i128, i128::MAX), + ast::IntTy::Isize => (i64::MIN.into(), i64::MAX.into()), + ast::IntTy::I8 => (i8::MIN.into(), i8::MAX.into()), + ast::IntTy::I16 => (i16::MIN.into(), i16::MAX.into()), + ast::IntTy::I32 => (i32::MIN.into(), i32::MAX.into()), + ast::IntTy::I64 => (i64::MIN.into(), i64::MAX.into()), + ast::IntTy::I128 => (i128::MIN, i128::MAX), } } fn uint_ty_range(uint_ty: ast::UintTy) -> (u128, u128) { - match uint_ty { - ast::UintTy::Usize => (u64::MIN as u128, u64::MAX as u128), - ast::UintTy::U8 => (u8::MIN as u128, u8::MAX as u128), - ast::UintTy::U16 => (u16::MIN as u128, u16::MAX as u128), - ast::UintTy::U32 => (u32::MIN as u128, u32::MAX as u128), - ast::UintTy::U64 => (u64::MIN as u128, u64::MAX as u128), - ast::UintTy::U128 => (u128::MIN, u128::MAX), - } + let max = match uint_ty { + ast::UintTy::Usize => u64::MAX.into(), + ast::UintTy::U8 => u8::MAX.into(), + ast::UintTy::U16 => u16::MAX.into(), + ast::UintTy::U32 => u32::MAX.into(), + ast::UintTy::U64 => u64::MAX.into(), + ast::UintTy::U128 => u128::MAX, + }; + (0, max) } fn get_bin_hex_repr(cx: &LateContext<'_>, lit: &hir::Lit) -> Option { diff --git a/compiler/rustc_lint/src/unused.rs b/compiler/rustc_lint/src/unused.rs index 3abd9a6325d6e..2409069031d93 100644 --- a/compiler/rustc_lint/src/unused.rs +++ b/compiler/rustc_lint/src/unused.rs @@ -751,13 +751,20 @@ impl UnusedDelimLint for UnusedParens { if !Self::is_expr_delims_necessary(inner, followed_by_block) && value.attrs.is_empty() && !value.span.from_expansion() + && (ctx != UnusedDelimsCtx::LetScrutineeExpr + || match inner.kind { + ast::ExprKind::Binary( + rustc_span::source_map::Spanned { node, .. }, + _, + _, + ) if node.lazy() => false, + _ => true, + }) { self.emit_unused_delims_expr(cx, value, ctx, left_pos, right_pos) } } ast::ExprKind::Let(_, ref expr) => { - // FIXME(#60336): Properly handle `let true = (false && true)` - // actually needing the parenthesis. self.check_unused_delims_expr( cx, expr, @@ -839,10 +846,6 @@ impl EarlyLintPass for UnusedParens { } } - fn check_anon_const(&mut self, cx: &EarlyContext<'_>, c: &ast::AnonConst) { - self.check_unused_delims_expr(cx, &c.value, UnusedDelimsCtx::AnonConst, false, None, None); - } - fn check_stmt(&mut self, cx: &EarlyContext<'_>, s: &ast::Stmt) { if let StmtKind::Local(ref local) = s.kind { self.check_unused_parens_pat(cx, &local.pat, false, false); @@ -965,13 +968,6 @@ impl UnusedDelimLint for UnusedBraces { if !Self::is_expr_delims_necessary(expr, followed_by_block) && (ctx != UnusedDelimsCtx::AnonConst || matches!(expr.kind, ast::ExprKind::Lit(_))) - // array length expressions are checked during `check_anon_const` and `check_ty`, - // once as `ArrayLenExpr` and once as `AnonConst`. - // - // As we do not want to lint this twice, we do not emit an error for - // `ArrayLenExpr` if `AnonConst` would do the same. - && (ctx != UnusedDelimsCtx::ArrayLenExpr - || !matches!(expr.kind, ast::ExprKind::Lit(_))) && !cx.sess().source_map().is_multiline(value.span) && value.attrs.is_empty() && !value.span.from_expansion() @@ -999,21 +995,54 @@ impl UnusedDelimLint for UnusedBraces { } impl EarlyLintPass for UnusedBraces { + fn check_stmt(&mut self, cx: &EarlyContext<'_>, s: &ast::Stmt) { + ::check_stmt(self, cx, s) + } + fn check_expr(&mut self, cx: &EarlyContext<'_>, e: &ast::Expr) { - ::check_expr(self, cx, e) + ::check_expr(self, cx, e); + + if let ExprKind::Repeat(_, ref anon_const) = e.kind { + self.check_unused_delims_expr( + cx, + &anon_const.value, + UnusedDelimsCtx::AnonConst, + false, + None, + None, + ); + } } - fn check_anon_const(&mut self, cx: &EarlyContext<'_>, c: &ast::AnonConst) { - self.check_unused_delims_expr(cx, &c.value, UnusedDelimsCtx::AnonConst, false, None, None); + fn check_generic_arg(&mut self, cx: &EarlyContext<'_>, arg: &ast::GenericArg) { + if let ast::GenericArg::Const(ct) = arg { + self.check_unused_delims_expr( + cx, + &ct.value, + UnusedDelimsCtx::AnonConst, + false, + None, + None, + ); + } } - fn check_stmt(&mut self, cx: &EarlyContext<'_>, s: &ast::Stmt) { - ::check_stmt(self, cx, s) + fn check_variant(&mut self, cx: &EarlyContext<'_>, v: &ast::Variant) { + if let Some(anon_const) = &v.disr_expr { + self.check_unused_delims_expr( + cx, + &anon_const.value, + UnusedDelimsCtx::AnonConst, + false, + None, + None, + ); + } } fn check_ty(&mut self, cx: &EarlyContext<'_>, ty: &ast::Ty) { - if let &ast::TyKind::Paren(ref r) = &ty.kind { - if let ast::TyKind::Array(_, ref len) = r.kind { + match ty.kind { + ast::TyKind::Array(_, ref len) => { self.check_unused_delims_expr( cx, &len.value, @@ -1023,6 +1052,19 @@ impl EarlyLintPass for UnusedBraces { None, ); } + + ast::TyKind::Typeof(ref anon_const) => { + self.check_unused_delims_expr( + cx, + &anon_const.value, + UnusedDelimsCtx::AnonConst, + false, + None, + None, + ); + } + + _ => {} } } diff --git a/compiler/rustc_macros/src/lift.rs b/compiler/rustc_macros/src/lift.rs index 4bf4ce00a4d38..ad7ac74041749 100644 --- a/compiler/rustc_macros/src/lift.rs +++ b/compiler/rustc_macros/src/lift.rs @@ -3,6 +3,7 @@ use syn::{self, parse_quote}; pub fn lift_derive(mut s: synstructure::Structure<'_>) -> proc_macro2::TokenStream { s.add_bounds(synstructure::AddBounds::Generics); + s.bind_with(|_| synstructure::BindStyle::Move); let tcx: syn::Lifetime = parse_quote!('tcx); let newtcx: syn::GenericParam = parse_quote!('__lifted); @@ -43,8 +44,8 @@ pub fn lift_derive(mut s: synstructure::Structure<'_>) -> proc_macro2::TokenStre quote! { type Lifted = #lifted; - fn lift_to_tcx(&self, __tcx: ::rustc_middle::ty::TyCtxt<'__lifted>) -> Option<#lifted> { - Some(match *self { #body }) + fn lift_to_tcx(self, __tcx: ::rustc_middle::ty::TyCtxt<'__lifted>) -> Option<#lifted> { + Some(match self { #body }) } }, ) diff --git a/compiler/rustc_macros/src/query.rs b/compiler/rustc_macros/src/query.rs index e7c054653accb..fd85919636949 100644 --- a/compiler/rustc_macros/src/query.rs +++ b/compiler/rustc_macros/src/query.rs @@ -190,7 +190,11 @@ impl Parse for List { } /// A named group containing queries. +/// +/// For now, the name is not used any more, but the capability remains interesting for future +/// developments of the query system. struct Group { + #[allow(unused)] name: Ident, queries: List, } @@ -417,12 +421,9 @@ pub fn rustc_queries(input: TokenStream) -> TokenStream { let mut query_stream = quote! {}; let mut query_description_stream = quote! {}; let mut dep_node_def_stream = quote! {}; - let mut dep_node_force_stream = quote! {}; - let mut try_load_from_on_disk_cache_stream = quote! {}; let mut cached_queries = quote! {}; for group in groups.0 { - let mut group_stream = quote! {}; for mut query in group.queries.0 { let modifiers = process_modifiers(&mut query); let name = &query.name; @@ -437,22 +438,6 @@ pub fn rustc_queries(input: TokenStream) -> TokenStream { cached_queries.extend(quote! { #name, }); - - try_load_from_on_disk_cache_stream.extend(quote! { - ::rustc_middle::dep_graph::DepKind::#name => { - if <#arg as DepNodeParams>>::can_reconstruct_query_key() { - debug_assert!($tcx.dep_graph - .node_color($dep_node) - .map(|c| c.is_green()) - .unwrap_or(false)); - - let key = <#arg as DepNodeParams>>::recover($tcx, $dep_node).unwrap(); - if queries::#name::cache_on_disk($tcx, &key, None) { - let _ = $tcx.#name(key); - } - } - } - }); } let mut attributes = Vec::new(); @@ -485,9 +470,9 @@ pub fn rustc_queries(input: TokenStream) -> TokenStream { let attribute_stream = quote! {#(#attributes),*}; let doc_comments = query.doc_comments.iter(); // Add the query to the group - group_stream.extend(quote! { + query_stream.extend(quote! { #(#doc_comments)* - [#attribute_stream] fn #name: #name(#arg) #result, + [#attribute_stream] fn #name(#arg) #result, }); // Create a dep node for the query @@ -495,37 +480,10 @@ pub fn rustc_queries(input: TokenStream) -> TokenStream { [#attribute_stream] #name(#arg), }); - // Add a match arm to force the query given the dep node - dep_node_force_stream.extend(quote! { - ::rustc_middle::dep_graph::DepKind::#name => { - if <#arg as DepNodeParams>>::can_reconstruct_query_key() { - if let Some(key) = <#arg as DepNodeParams>>::recover($tcx, $dep_node) { - force_query::, _>( - $tcx, - key, - DUMMY_SP, - *$dep_node - ); - return true; - } - } - } - }); - add_query_description_impl(&query, modifiers, &mut query_description_stream); } - let name = &group.name; - query_stream.extend(quote! { - #name { #group_stream }, - }); } - dep_node_force_stream.extend(quote! { - ::rustc_middle::dep_graph::DepKind::Null => { - bug!("Cannot force dep node: {:?}", $dep_node) - } - }); - TokenStream::from(quote! { macro_rules! rustc_query_append { ([$($macro:tt)*][$($other:tt)*]) => { @@ -546,15 +504,6 @@ pub fn rustc_queries(input: TokenStream) -> TokenStream { ); } } - macro_rules! rustc_dep_node_force { - ([$dep_node:expr, $tcx:expr] $($other:tt)*) => { - match $dep_node.kind { - $($other)* - - #dep_node_force_stream - } - } - } macro_rules! rustc_cached_queries { ($($macro:tt)*) => { $($macro)*(#cached_queries); @@ -562,14 +511,5 @@ pub fn rustc_queries(input: TokenStream) -> TokenStream { } #query_description_stream - - macro_rules! rustc_dep_node_try_load_from_on_disk_cache { - ($dep_node:expr, $tcx:expr) => { - match $dep_node.kind { - #try_load_from_on_disk_cache_stream - _ => (), - } - } - } }) } diff --git a/compiler/rustc_metadata/src/rmeta/decoder/cstore_impl.rs b/compiler/rustc_metadata/src/rmeta/decoder/cstore_impl.rs index 05b8dad3097e4..68faf9c7a629d 100644 --- a/compiler/rustc_metadata/src/rmeta/decoder/cstore_impl.rs +++ b/compiler/rustc_metadata/src/rmeta/decoder/cstore_impl.rs @@ -8,6 +8,7 @@ use rustc_ast as ast; use rustc_ast::expand::allocator::AllocatorKind; use rustc_data_structures::svh::Svh; use rustc_hir as hir; +use rustc_hir::def::DefKind; use rustc_hir::def_id::{CrateNum, DefId, DefIdMap, CRATE_DEF_INDEX, LOCAL_CRATE}; use rustc_hir::definitions::{DefKey, DefPath, DefPathHash}; use rustc_middle::hir::exports::Export; @@ -487,6 +488,10 @@ impl CrateStore for CStore { self.get_crate_data(def.krate).def_key(def.index) } + fn def_kind(&self, def: DefId) -> DefKind { + self.get_crate_data(def.krate).def_kind(def.index) + } + fn def_path(&self, def: DefId) -> DefPath { self.get_crate_data(def.krate).def_path(def.index) } diff --git a/compiler/rustc_metadata/src/rmeta/encoder.rs b/compiler/rustc_metadata/src/rmeta/encoder.rs index f0911928e81c9..7e33a479228ba 100644 --- a/compiler/rustc_metadata/src/rmeta/encoder.rs +++ b/compiler/rustc_metadata/src/rmeta/encoder.rs @@ -28,7 +28,6 @@ use rustc_middle::ty::{self, SymbolName, Ty, TyCtxt}; use rustc_serialize::{opaque, Encodable, Encoder}; use rustc_session::config::CrateType; use rustc_span::hygiene::{ExpnDataEncodeMode, HygieneEncodeContext}; -use rustc_span::source_map::Spanned; use rustc_span::symbol::{sym, Ident, Symbol}; use rustc_span::{self, ExternalSource, FileName, SourceFile, Span, SyntaxContext}; use rustc_target::abi::VariantIdx; @@ -436,8 +435,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { fn encode_info_for_items(&mut self) { let krate = self.tcx.hir().krate(); - let vis = Spanned { span: rustc_span::DUMMY_SP, node: hir::VisibilityKind::Public }; - self.encode_info_for_mod(hir::CRATE_HIR_ID, &krate.item.module, &krate.item.attrs, &vis); + self.encode_info_for_mod(hir::CRATE_HIR_ID, &krate.item.module, &krate.item.attrs); // Proc-macro crates only export proc-macro items, which are looked // up using `proc_macro_data` @@ -739,12 +737,8 @@ impl EncodeContext<'a, 'tcx> { is_non_exhaustive: variant.is_field_list_non_exhaustive(), }; - let enum_id = tcx.hir().local_def_id_to_hir_id(def.did.expect_local()); - let enum_vis = &tcx.hir().expect_item(enum_id).vis; - record!(self.tables.kind[def_id] <- EntryKind::Variant(self.lazy(data))); - record!(self.tables.visibility[def_id] <- - ty::Visibility::from_hir(enum_vis, enum_id, self.tcx)); + record!(self.tables.visibility[def_id] <- self.tcx.visibility(def_id)); record!(self.tables.span[def_id] <- self.tcx.def_span(def_id)); record!(self.tables.attributes[def_id] <- &self.tcx.get_attrs(def_id)[..]); record!(self.tables.expn_that_defined[def_id] <- self.tcx.expansion_that_defined(def_id)); @@ -785,17 +779,8 @@ impl EncodeContext<'a, 'tcx> { is_non_exhaustive: variant.is_field_list_non_exhaustive(), }; - // Variant constructors have the same visibility as the parent enums, unless marked as - // non-exhaustive, in which case they are lowered to `pub(crate)`. - let enum_id = tcx.hir().local_def_id_to_hir_id(def.did.expect_local()); - let enum_vis = &tcx.hir().expect_item(enum_id).vis; - let mut ctor_vis = ty::Visibility::from_hir(enum_vis, enum_id, tcx); - if variant.is_field_list_non_exhaustive() && ctor_vis == ty::Visibility::Public { - ctor_vis = ty::Visibility::Restricted(DefId::local(CRATE_DEF_INDEX)); - } - record!(self.tables.kind[def_id] <- EntryKind::Variant(self.lazy(data))); - record!(self.tables.visibility[def_id] <- ctor_vis); + record!(self.tables.visibility[def_id] <- self.tcx.visibility(def_id)); record!(self.tables.span[def_id] <- self.tcx.def_span(def_id)); self.encode_stability(def_id); self.encode_deprecation(def_id); @@ -811,13 +796,7 @@ impl EncodeContext<'a, 'tcx> { self.encode_promoted_mir(def_id.expect_local()); } - fn encode_info_for_mod( - &mut self, - id: hir::HirId, - md: &hir::Mod<'_>, - attrs: &[ast::Attribute], - vis: &hir::Visibility<'_>, - ) { + fn encode_info_for_mod(&mut self, id: hir::HirId, md: &hir::Mod<'_>, attrs: &[ast::Attribute]) { let tcx = self.tcx; let local_def_id = tcx.hir().local_def_id(id); let def_id = local_def_id.to_def_id(); @@ -850,7 +829,7 @@ impl EncodeContext<'a, 'tcx> { }; record!(self.tables.kind[def_id] <- EntryKind::Mod(self.lazy(data))); - record!(self.tables.visibility[def_id] <- ty::Visibility::from_hir(vis, id, self.tcx)); + record!(self.tables.visibility[def_id] <- self.tcx.visibility(def_id)); record!(self.tables.span[def_id] <- self.tcx.def_span(def_id)); record!(self.tables.attributes[def_id] <- attrs); if self.is_proc_macro { @@ -881,7 +860,7 @@ impl EncodeContext<'a, 'tcx> { let variant_data = tcx.hir().expect_variant_data(variant_id); record!(self.tables.kind[def_id] <- EntryKind::Field); - record!(self.tables.visibility[def_id] <- field.vis); + record!(self.tables.visibility[def_id] <- self.tcx.visibility(def_id)); record!(self.tables.span[def_id] <- self.tcx.def_span(def_id)); record!(self.tables.attributes[def_id] <- variant_data.fields()[field_index].attrs); record!(self.tables.expn_that_defined[def_id] <- self.tcx.expansion_that_defined(def_id)); @@ -906,25 +885,8 @@ impl EncodeContext<'a, 'tcx> { is_non_exhaustive: variant.is_field_list_non_exhaustive(), }; - let struct_id = tcx.hir().local_def_id_to_hir_id(adt_def.did.expect_local()); - let struct_vis = &tcx.hir().expect_item(struct_id).vis; - let mut ctor_vis = ty::Visibility::from_hir(struct_vis, struct_id, tcx); - for field in &variant.fields { - if ctor_vis.is_at_least(field.vis, tcx) { - ctor_vis = field.vis; - } - } - - // If the structure is marked as non_exhaustive then lower the visibility - // to within the crate. - if adt_def.non_enum_variant().is_field_list_non_exhaustive() - && ctor_vis == ty::Visibility::Public - { - ctor_vis = ty::Visibility::Restricted(DefId::local(CRATE_DEF_INDEX)); - } - record!(self.tables.kind[def_id] <- EntryKind::Struct(self.lazy(data), adt_def.repr)); - record!(self.tables.visibility[def_id] <- ctor_vis); + record!(self.tables.visibility[def_id] <- self.tcx.visibility(def_id)); record!(self.tables.span[def_id] <- self.tcx.def_span(def_id)); record!(self.tables.expn_that_defined[def_id] <- self.tcx.expansion_that_defined(def_id)); self.encode_stability(def_id); @@ -1030,7 +992,7 @@ impl EncodeContext<'a, 'tcx> { EntryKind::AssocType(container) } }); - record!(self.tables.visibility[def_id] <- trait_item.vis); + record!(self.tables.visibility[def_id] <- self.tcx.visibility(def_id)); record!(self.tables.span[def_id] <- ast_item.span); record!(self.tables.attributes[def_id] <- ast_item.attrs); self.encode_ident_span(def_id, ast_item.ident); @@ -1112,7 +1074,7 @@ impl EncodeContext<'a, 'tcx> { } ty::AssocKind::Type => EntryKind::AssocType(container) }); - record!(self.tables.visibility[def_id] <- impl_item.vis); + record!(self.tables.visibility[def_id] <- self.tcx.visibility(def_id)); record!(self.tables.span[def_id] <- ast_item.span); record!(self.tables.attributes[def_id] <- ast_item.attrs); self.encode_ident_span(def_id, impl_item.ident); @@ -1261,7 +1223,7 @@ impl EncodeContext<'a, 'tcx> { EntryKind::Fn(self.lazy(data)) } hir::ItemKind::Mod(ref m) => { - return self.encode_info_for_mod(item.hir_id, m, &item.attrs, &item.vis); + return self.encode_info_for_mod(item.hir_id, m, &item.attrs); } hir::ItemKind::ForeignMod(_) => EntryKind::ForeignMod, hir::ItemKind::GlobalAsm(..) => EntryKind::GlobalAsm, @@ -1352,8 +1314,7 @@ impl EncodeContext<'a, 'tcx> { hir::ItemKind::ExternCrate(_) | hir::ItemKind::Use(..) => bug!("cannot encode info for item {:?}", item), }); - record!(self.tables.visibility[def_id] <- - ty::Visibility::from_hir(&item.vis, item.hir_id, tcx)); + record!(self.tables.visibility[def_id] <- self.tcx.visibility(def_id)); record!(self.tables.span[def_id] <- self.tcx.def_span(def_id)); record!(self.tables.attributes[def_id] <- item.attrs); record!(self.tables.expn_that_defined[def_id] <- self.tcx.expansion_that_defined(def_id)); @@ -1470,7 +1431,7 @@ impl EncodeContext<'a, 'tcx> { fn encode_info_for_macro_def(&mut self, macro_def: &hir::MacroDef<'_>) { let def_id = self.tcx.hir().local_def_id(macro_def.hir_id).to_def_id(); record!(self.tables.kind[def_id] <- EntryKind::MacroDef(self.lazy(macro_def.ast.clone()))); - record!(self.tables.visibility[def_id] <- ty::Visibility::Public); + record!(self.tables.visibility[def_id] <- self.tcx.visibility(def_id)); record!(self.tables.span[def_id] <- macro_def.span); record!(self.tables.attributes[def_id] <- macro_def.attrs); self.encode_ident_span(def_id, macro_def.ident); @@ -1480,7 +1441,6 @@ impl EncodeContext<'a, 'tcx> { fn encode_info_for_generic_param(&mut self, def_id: DefId, kind: EntryKind, encode_type: bool) { record!(self.tables.kind[def_id] <- kind); - record!(self.tables.visibility[def_id] <- ty::Visibility::Public); record!(self.tables.span[def_id] <- self.tcx.def_span(def_id)); if encode_type { self.encode_item_type(def_id); @@ -1505,7 +1465,6 @@ impl EncodeContext<'a, 'tcx> { _ => bug!("closure that is neither generator nor closure"), }); - record!(self.tables.visibility[def_id.to_def_id()] <- ty::Visibility::Public); record!(self.tables.span[def_id.to_def_id()] <- self.tcx.def_span(def_id)); record!(self.tables.attributes[def_id.to_def_id()] <- &self.tcx.get_attrs(def_id.to_def_id())[..]); self.encode_item_type(def_id.to_def_id()); @@ -1525,7 +1484,6 @@ impl EncodeContext<'a, 'tcx> { let qualifs = self.tcx.mir_const_qualif(def_id); record!(self.tables.kind[def_id.to_def_id()] <- EntryKind::AnonConst(qualifs, const_data)); - record!(self.tables.visibility[def_id.to_def_id()] <- ty::Visibility::Public); record!(self.tables.span[def_id.to_def_id()] <- self.tcx.def_span(def_id)); self.encode_item_type(def_id.to_def_id()); self.encode_generics(def_id.to_def_id()); @@ -1762,8 +1720,7 @@ impl EncodeContext<'a, 'tcx> { hir::ForeignItemKind::Static(_, hir::Mutability::Not) => EntryKind::ForeignImmStatic, hir::ForeignItemKind::Type => EntryKind::ForeignType, }); - record!(self.tables.visibility[def_id] <- - ty::Visibility::from_hir(&nitem.vis, nitem.hir_id, self.tcx)); + record!(self.tables.visibility[def_id] <- self.tcx.visibility(def_id)); record!(self.tables.span[def_id] <- nitem.span); record!(self.tables.attributes[def_id] <- nitem.attrs); self.encode_ident_span(def_id, nitem.ident); diff --git a/compiler/rustc_middle/Cargo.toml b/compiler/rustc_middle/Cargo.toml index bdbc9ab52bf63..66532ea02f368 100644 --- a/compiler/rustc_middle/Cargo.toml +++ b/compiler/rustc_middle/Cargo.toml @@ -28,5 +28,5 @@ rustc_ast = { path = "../rustc_ast" } rustc_span = { path = "../rustc_span" } chalk-ir = "0.32.0" smallvec = { version = "1.0", features = ["union", "may_dangle"] } -measureme = "0.7.1" +measureme = "9.0.0" rustc_session = { path = "../rustc_session" } diff --git a/compiler/rustc_middle/src/hir/map/collector.rs b/compiler/rustc_middle/src/hir/map/collector.rs index d6869ab88751a..516c9b6752b97 100644 --- a/compiler/rustc_middle/src/hir/map/collector.rs +++ b/compiler/rustc_middle/src/hir/map/collector.rs @@ -360,8 +360,26 @@ impl<'a, 'hir> Visitor<'hir> for NodeCollector<'a, 'hir> { } fn visit_generic_param(&mut self, param: &'hir GenericParam<'hir>) { - self.insert(param.span, param.hir_id, Node::GenericParam(param)); - intravisit::walk_generic_param(self, param); + if let hir::GenericParamKind::Type { + synthetic: Some(hir::SyntheticTyParamKind::ImplTrait), + .. + } = param.kind + { + debug_assert_eq!( + param.hir_id.owner, + self.definitions.opt_hir_id_to_local_def_id(param.hir_id).unwrap() + ); + self.with_dep_node_owner(param.hir_id.owner, param, |this, hash| { + this.insert_with_hash(param.span, param.hir_id, Node::GenericParam(param), hash); + + this.with_parent(param.hir_id, |this| { + intravisit::walk_generic_param(this, param); + }); + }); + } else { + self.insert(param.span, param.hir_id, Node::GenericParam(param)); + intravisit::walk_generic_param(self, param); + } } fn visit_trait_item(&mut self, ti: &'hir TraitItem<'hir>) { diff --git a/compiler/rustc_middle/src/hir/map/mod.rs b/compiler/rustc_middle/src/hir/map/mod.rs index 57f03c2a5cf54..106fa8c78fa28 100644 --- a/compiler/rustc_middle/src/hir/map/mod.rs +++ b/compiler/rustc_middle/src/hir/map/mod.rs @@ -816,7 +816,7 @@ impl<'hir> Map<'hir> { Some(Node::Variant(ref v)) => Some(&v.attrs[..]), Some(Node::Field(ref f)) => Some(&f.attrs[..]), Some(Node::Expr(ref e)) => Some(&*e.attrs), - Some(Node::Stmt(ref s)) => Some(s.kind.attrs()), + Some(Node::Stmt(ref s)) => Some(s.kind.attrs(|id| self.item(id.id))), Some(Node::Arm(ref a)) => Some(&*a.attrs), Some(Node::GenericParam(param)) => Some(¶m.attrs[..]), // Unit/tuple structs/variants take the attributes straight from diff --git a/compiler/rustc_middle/src/hir/place.rs b/compiler/rustc_middle/src/hir/place.rs index bcb56fae1709d..5da4be4e98279 100644 --- a/compiler/rustc_middle/src/hir/place.rs +++ b/compiler/rustc_middle/src/hir/place.rs @@ -103,7 +103,7 @@ impl<'tcx> Place<'tcx> { /// Returns the type of this `Place` after all projections have been applied. pub fn ty(&self) -> Ty<'tcx> { - self.projections.last().map_or_else(|| self.base_ty, |proj| proj.ty) + self.projections.last().map_or(self.base_ty, |proj| proj.ty) } /// Returns the type of this `Place` immediately before `projection_index`th projection diff --git a/compiler/rustc_middle/src/ich/impls_syntax.rs b/compiler/rustc_middle/src/ich/impls_syntax.rs index 7aba4fc64a929..cab2ca2919f9f 100644 --- a/compiler/rustc_middle/src/ich/impls_syntax.rs +++ b/compiler/rustc_middle/src/ich/impls_syntax.rs @@ -40,11 +40,12 @@ impl<'ctx> rustc_ast::HashStableContext for StableHashingContext<'ctx> { debug_assert!(!attr.ident().map_or(false, |ident| self.is_ignored_attr(ident.name))); debug_assert!(!attr.is_doc_comment()); - let ast::Attribute { kind, id: _, style, span } = attr; + let ast::Attribute { kind, id: _, style, span, tokens } = attr; if let ast::AttrKind::Normal(item) = kind { item.hash_stable(self, hasher); style.hash_stable(self, hasher); span.hash_stable(self, hasher); + tokens.as_ref().expect_none("Tokens should have been removed during lowering!"); } else { unreachable!(); } diff --git a/compiler/rustc_middle/src/infer/unify_key.rs b/compiler/rustc_middle/src/infer/unify_key.rs index 4d884dde39387..16e9aafb25a54 100644 --- a/compiler/rustc_middle/src/infer/unify_key.rs +++ b/compiler/rustc_middle/src/infer/unify_key.rs @@ -175,19 +175,15 @@ impl<'tcx> UnifyKey for ty::ConstVid<'tcx> { impl<'tcx> UnifyValue for ConstVarValue<'tcx> { type Error = (&'tcx ty::Const<'tcx>, &'tcx ty::Const<'tcx>); - fn unify_values(value1: &Self, value2: &Self) -> Result { - let (val, span) = match (value1.val, value2.val) { + fn unify_values(&value1: &Self, &value2: &Self) -> Result { + Ok(match (value1.val, value2.val) { (ConstVariableValue::Known { .. }, ConstVariableValue::Known { .. }) => { bug!("equating two const variables, both of which have known values") } // If one side is known, prefer that one. - (ConstVariableValue::Known { .. }, ConstVariableValue::Unknown { .. }) => { - (value1.val, value1.origin.span) - } - (ConstVariableValue::Unknown { .. }, ConstVariableValue::Known { .. }) => { - (value2.val, value2.origin.span) - } + (ConstVariableValue::Known { .. }, ConstVariableValue::Unknown { .. }) => value1, + (ConstVariableValue::Unknown { .. }, ConstVariableValue::Known { .. }) => value2, // If both sides are *unknown*, it hardly matters, does it? ( @@ -200,16 +196,11 @@ impl<'tcx> UnifyValue for ConstVarValue<'tcx> { // universe is the minimum of the two universes, because that is // the one which contains the fewest names in scope. let universe = cmp::min(universe1, universe2); - (ConstVariableValue::Unknown { universe }, value1.origin.span) + ConstVarValue { + val: ConstVariableValue::Unknown { universe }, + origin: value1.origin, + } } - }; - - Ok(ConstVarValue { - origin: ConstVariableOrigin { - kind: ConstVariableOriginKind::ConstInference, - span: span, - }, - val, }) } } diff --git a/compiler/rustc_middle/src/macros.rs b/compiler/rustc_middle/src/macros.rs index a5482b7bdcfeb..6ff0a94ebf3ce 100644 --- a/compiler/rustc_middle/src/macros.rs +++ b/compiler/rustc_middle/src/macros.rs @@ -29,8 +29,8 @@ macro_rules! CloneLiftImpls { $( impl<$tcx> $crate::ty::Lift<$tcx> for $ty { type Lifted = Self; - fn lift_to_tcx(&self, _: $crate::ty::TyCtxt<$tcx>) -> Option { - Some(Clone::clone(self)) + fn lift_to_tcx(self, _: $crate::ty::TyCtxt<$tcx>) -> Option { + Some(self) } } )+ diff --git a/compiler/rustc_middle/src/middle/cstore.rs b/compiler/rustc_middle/src/middle/cstore.rs index f3d7c8506ab6f..ae9e4d364d3cb 100644 --- a/compiler/rustc_middle/src/middle/cstore.rs +++ b/compiler/rustc_middle/src/middle/cstore.rs @@ -8,6 +8,7 @@ use rustc_ast as ast; use rustc_ast::expand::allocator::AllocatorKind; use rustc_data_structures::svh::Svh; use rustc_data_structures::sync::{self, MetadataRef}; +use rustc_hir::def::DefKind; use rustc_hir::def_id::{CrateNum, DefId, LOCAL_CRATE}; use rustc_hir::definitions::{DefKey, DefPath, DefPathHash}; use rustc_macros::HashStable; @@ -185,6 +186,7 @@ pub trait CrateStore { // resolve fn def_key(&self, def: DefId) -> DefKey; + fn def_kind(&self, def: DefId) -> DefKind; fn def_path(&self, def: DefId) -> DefPath; fn def_path_hash(&self, def: DefId) -> DefPathHash; fn all_def_path_hashes_and_def_ids(&self, cnum: CrateNum) -> Vec<(DefPathHash, DefId)>; diff --git a/compiler/rustc_middle/src/middle/region.rs b/compiler/rustc_middle/src/middle/region.rs index 38cb3c1701f92..d060549ca8137 100644 --- a/compiler/rustc_middle/src/middle/region.rs +++ b/compiler/rustc_middle/src/middle/region.rs @@ -292,20 +292,20 @@ pub struct ScopeTree { /// /// Then: /// - /// 1. From the ordering guarantee of HIR visitors (see - /// `rustc_hir::intravisit`), `D` does not dominate `U`. + /// 1. From the ordering guarantee of HIR visitors (see + /// `rustc_hir::intravisit`), `D` does not dominate `U`. /// - /// 2. Therefore, `D` is *potentially* storage-dead at `U` (because - /// we might visit `U` without ever getting to `D`). + /// 2. Therefore, `D` is *potentially* storage-dead at `U` (because + /// we might visit `U` without ever getting to `D`). /// - /// 3. However, we guarantee that at each HIR point, each - /// binding/temporary is always either always storage-live - /// or always storage-dead. This is what is being guaranteed - /// by `terminating_scopes` including all blocks where the - /// count of executions is not guaranteed. + /// 3. However, we guarantee that at each HIR point, each + /// binding/temporary is always either always storage-live + /// or always storage-dead. This is what is being guaranteed + /// by `terminating_scopes` including all blocks where the + /// count of executions is not guaranteed. /// - /// 4. By `2.` and `3.`, `D` is *statically* storage-dead at `U`, - /// QED. + /// 4. By `2.` and `3.`, `D` is *statically* storage-dead at `U`, + /// QED. /// /// This property ought to not on (3) in an essential way -- it /// is probably still correct even if we have "unrestricted" terminating diff --git a/compiler/rustc_middle/src/middle/stability.rs b/compiler/rustc_middle/src/middle/stability.rs index 7e2415fd544d4..1a206b245d37b 100644 --- a/compiler/rustc_middle/src/middle/stability.rs +++ b/compiler/rustc_middle/src/middle/stability.rs @@ -256,24 +256,12 @@ pub enum EvalResult { } // See issue #38412. -fn skip_stability_check_due_to_privacy(tcx: TyCtxt<'_>, mut def_id: DefId) -> bool { - // Check if `def_id` is a trait method. - match tcx.def_kind(def_id) { - DefKind::AssocFn | DefKind::AssocTy | DefKind::AssocConst => { - if let ty::TraitContainer(trait_def_id) = tcx.associated_item(def_id).container { - // Trait methods do not declare visibility (even - // for visibility info in cstore). Use containing - // trait instead, so methods of `pub` traits are - // themselves considered `pub`. - def_id = trait_def_id; - } - } - _ => {} +fn skip_stability_check_due_to_privacy(tcx: TyCtxt<'_>, def_id: DefId) -> bool { + if tcx.def_kind(def_id) == DefKind::TyParam { + // Have no visibility, considered public for the purpose of this check. + return false; } - - let visibility = tcx.visibility(def_id); - - match visibility { + match tcx.visibility(def_id) { // Must check stability for `pub` items. ty::Visibility::Public => false, diff --git a/compiler/rustc_middle/src/mir/coverage/mod.rs b/compiler/rustc_middle/src/mir/coverage.rs similarity index 100% rename from compiler/rustc_middle/src/mir/coverage/mod.rs rename to compiler/rustc_middle/src/mir/coverage.rs diff --git a/compiler/rustc_middle/src/mir/mod.rs b/compiler/rustc_middle/src/mir/mod.rs index 16472c787572c..05bcf2ba0ce49 100644 --- a/compiler/rustc_middle/src/mir/mod.rs +++ b/compiler/rustc_middle/src/mir/mod.rs @@ -146,7 +146,7 @@ impl<'tcx> MirSource<'tcx> { /// The lowered representation of a single function. #[derive(Clone, TyEncodable, TyDecodable, Debug, HashStable, TypeFoldable)] pub struct Body<'tcx> { - /// A list of basic blocks. References to basic block use a newtyped index type `BasicBlock` + /// A list of basic blocks. References to basic block use a newtyped index type [`BasicBlock`] /// that indexes into this vector. basic_blocks: IndexVec>, @@ -821,9 +821,6 @@ pub struct LocalDecl<'tcx> { /// flag drop flags to avoid triggering this check as they are introduced /// after typeck. /// - /// Unsafety checking will also ignore dereferences of these locals, - /// so they can be used for raw pointers only used in a desugaring. - /// /// This should be sound because the drop flags are fully algebraic, and /// therefore don't affect the OIBIT or outlives properties of the /// generator. @@ -1010,13 +1007,13 @@ impl<'tcx> LocalDecl<'tcx> { } /// Returns `Some` if this is a reference to a static item that is used to - /// access that static + /// access that static. pub fn is_ref_to_static(&self) -> bool { matches!(self.local_info, Some(box LocalInfo::StaticRef { .. })) } - /// Returns `Some` if this is a reference to a static item that is used to - /// access that static + /// Returns `Some` if this is a reference to a thread-local static item that is used to + /// access that static. pub fn is_ref_to_thread_local(&self) -> bool { match self.local_info { Some(box LocalInfo::StaticRef { is_thread_local, .. }) => is_thread_local, @@ -1106,6 +1103,9 @@ rustc_index::newtype_index! { /// are edges that go from a multi-successor node to a multi-predecessor node. This pass is /// needed because some analyses require that there are no critical edges in the CFG. /// + /// Note that this type is just an index into [`Body.basic_blocks`](Body::basic_blocks); + /// the actual data that a basic block holds is in [`BasicBlockData`]. + /// /// Read more about basic blocks in the [rustc-dev-guide][guide-mir]. /// /// [CFG]: https://rustc-dev-guide.rust-lang.org/appendix/background.html#cfg @@ -2210,7 +2210,7 @@ impl<'tcx> Debug for Rvalue<'tcx> { let name = ty::tls::with(|tcx| { let mut name = String::new(); - let substs = tcx.lift(&substs).expect("could not lift for printing"); + let substs = tcx.lift(substs).expect("could not lift for printing"); FmtPrinter::new(tcx, &mut name, Namespace::ValueNS) .print_def_path(variant_def.def_id, substs)?; Ok(name) @@ -2233,7 +2233,7 @@ impl<'tcx> Debug for Rvalue<'tcx> { if let Some(def_id) = def_id.as_local() { let hir_id = tcx.hir().local_def_id_to_hir_id(def_id); let name = if tcx.sess.opts.debugging_opts.span_free_formats { - let substs = tcx.lift(&substs).unwrap(); + let substs = tcx.lift(substs).unwrap(); format!( "[closure@{}]", tcx.def_path_str_with_substs(def_id.to_def_id(), substs), @@ -2527,7 +2527,7 @@ fn pretty_print_const( ) -> fmt::Result { use crate::ty::print::PrettyPrinter; ty::tls::with(|tcx| { - let literal = tcx.lift(&c).unwrap(); + let literal = tcx.lift(c).unwrap(); let mut cx = FmtPrinter::new(tcx, fmt, Namespace::ValueNS); cx.print_alloc_ids = true; cx.pretty_print_const(literal, print_types)?; diff --git a/compiler/rustc_middle/src/mir/tcx.rs b/compiler/rustc_middle/src/mir/tcx.rs index b9e4f6fb12eb1..f0bfdae261c64 100644 --- a/compiler/rustc_middle/src/mir/tcx.rs +++ b/compiler/rustc_middle/src/mir/tcx.rs @@ -152,10 +152,14 @@ impl<'tcx> Rvalue<'tcx> { tcx.mk_ty(ty::Array(operand.ty(local_decls, tcx), count)) } Rvalue::ThreadLocalRef(did) => { + let static_ty = tcx.type_of(did); if tcx.is_mutable_static(did) { - tcx.mk_mut_ptr(tcx.type_of(did)) + tcx.mk_mut_ptr(static_ty) + } else if tcx.is_foreign_item(did) { + tcx.mk_imm_ptr(static_ty) } else { - tcx.mk_imm_ref(tcx.lifetimes.re_static, tcx.type_of(did)) + // FIXME: These things don't *really* have 'static lifetime. + tcx.mk_imm_ref(tcx.lifetimes.re_static, static_ty) } } Rvalue::Ref(reg, bk, ref place) => { diff --git a/compiler/rustc_middle/src/mir/terminator/mod.rs b/compiler/rustc_middle/src/mir/terminator.rs similarity index 99% rename from compiler/rustc_middle/src/mir/terminator/mod.rs rename to compiler/rustc_middle/src/mir/terminator.rs index e1071454e6501..c9493c679879e 100644 --- a/compiler/rustc_middle/src/mir/terminator/mod.rs +++ b/compiler/rustc_middle/src/mir/terminator.rs @@ -535,7 +535,7 @@ impl<'tcx> TerminatorKind<'tcx> { Goto { .. } => vec!["".into()], SwitchInt { ref targets, switch_ty, .. } => ty::tls::with(|tcx| { let param_env = ty::ParamEnv::empty(); - let switch_ty = tcx.lift(&switch_ty).unwrap(); + let switch_ty = tcx.lift(switch_ty).unwrap(); let size = tcx.layout_of(param_env.and(switch_ty)).unwrap().size; targets .values diff --git a/compiler/rustc_middle/src/query/mod.rs b/compiler/rustc_middle/src/query/mod.rs index 1acb44f6d22f3..89faa97d50fca 100644 --- a/compiler/rustc_middle/src/query/mod.rs +++ b/compiler/rustc_middle/src/query/mod.rs @@ -1267,6 +1267,7 @@ rustc_queries! { TypeChecking { query visibility(def_id: DefId) -> ty::Visibility { + eval_always desc { |tcx| "computing visibility of `{}`", tcx.def_path_str(def_id) } } } diff --git a/compiler/rustc_middle/src/traits/mod.rs b/compiler/rustc_middle/src/traits/mod.rs index 1dd6d590d908f..bbc46b8d60835 100644 --- a/compiler/rustc_middle/src/traits/mod.rs +++ b/compiler/rustc_middle/src/traits/mod.rs @@ -13,6 +13,7 @@ use crate::mir::interpret::ErrorHandled; use crate::ty::subst::SubstsRef; use crate::ty::{self, AdtKind, Ty, TyCtxt}; +use rustc_errors::{Applicability, DiagnosticBuilder}; use rustc_hir as hir; use rustc_hir::def_id::DefId; use rustc_span::symbol::Symbol; @@ -342,6 +343,7 @@ static_assert_size!(ObligationCauseCode<'_>, 32); #[derive(Clone, Debug, PartialEq, Eq, Hash, Lift)] pub struct MatchExpressionArmCause<'tcx> { pub arm_span: Span, + pub scrut_span: Span, pub semi_span: Option, pub source: hir::MatchSource, pub prior_arms: Vec, @@ -646,13 +648,13 @@ impl ObjectSafetyViolation { ObjectSafetyViolation::SizedSelf(_) => "it requires `Self: Sized`".into(), ObjectSafetyViolation::SupertraitSelf(ref spans) => { if spans.iter().any(|sp| *sp != DUMMY_SP) { - "it uses `Self` as a type parameter in this".into() + "it uses `Self` as a type parameter".into() } else { "it cannot use `Self` as a type parameter in a supertrait or `where`-clause" .into() } } - ObjectSafetyViolation::Method(name, MethodViolationCode::StaticMethod(_), _) => { + ObjectSafetyViolation::Method(name, MethodViolationCode::StaticMethod(_, _, _), _) => { format!("associated function `{}` has no `self` parameter", name).into() } ObjectSafetyViolation::Method( @@ -686,32 +688,65 @@ impl ObjectSafetyViolation { } } - pub fn solution(&self) -> Option<(String, Option<(String, Span)>)> { - Some(match *self { - ObjectSafetyViolation::SizedSelf(_) | ObjectSafetyViolation::SupertraitSelf(_) => { - return None; + pub fn solution(&self, err: &mut DiagnosticBuilder<'_>) { + match *self { + ObjectSafetyViolation::SizedSelf(_) | ObjectSafetyViolation::SupertraitSelf(_) => {} + ObjectSafetyViolation::Method( + name, + MethodViolationCode::StaticMethod(sugg, self_span, has_args), + _, + ) => { + err.span_suggestion( + self_span, + &format!( + "consider turning `{}` into a method by giving it a `&self` argument", + name + ), + format!("&self{}", if has_args { ", " } else { "" }), + Applicability::MaybeIncorrect, + ); + match sugg { + Some((sugg, span)) => { + err.span_suggestion( + span, + &format!( + "alternatively, consider constraining `{}` so it does not apply to \ + trait objects", + name + ), + sugg.to_string(), + Applicability::MaybeIncorrect, + ); + } + None => { + err.help(&format!( + "consider turning `{}` into a method by giving it a `&self` \ + argument or constraining it so it does not apply to trait objects", + name + )); + } + } } - ObjectSafetyViolation::Method(name, MethodViolationCode::StaticMethod(sugg), _) => ( - format!( - "consider turning `{}` into a method by giving it a `&self` argument or \ - constraining it so it does not apply to trait objects", - name - ), - sugg.map(|(sugg, sp)| (sugg.to_string(), sp)), - ), ObjectSafetyViolation::Method( name, MethodViolationCode::UndispatchableReceiver, span, - ) => ( - format!("consider changing method `{}`'s `self` parameter to be `&self`", name), - Some(("&Self".to_string(), span)), - ), + ) => { + err.span_suggestion( + span, + &format!( + "consider changing method `{}`'s `self` parameter to be `&self`", + name + ), + "&Self".to_string(), + Applicability::MachineApplicable, + ); + } ObjectSafetyViolation::AssocConst(name, _) | ObjectSafetyViolation::Method(name, ..) => { - (format!("consider moving `{}` to another trait", name), None) + err.help(&format!("consider moving `{}` to another trait", name)); } - }) + } } pub fn spans(&self) -> SmallVec<[Span; 1]> { @@ -735,7 +770,7 @@ impl ObjectSafetyViolation { #[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, HashStable)] pub enum MethodViolationCode { /// e.g., `fn foo()` - StaticMethod(Option<(&'static str, Span)>), + StaticMethod(Option<(&'static str, Span)>, Span, bool /* has args */), /// e.g., `fn foo(&self, x: Self)` ReferencesSelfInput(usize), diff --git a/compiler/rustc_middle/src/traits/select.rs b/compiler/rustc_middle/src/traits/select.rs index 358ead507b4d0..c570ad3273d4e 100644 --- a/compiler/rustc_middle/src/traits/select.rs +++ b/compiler/rustc_middle/src/traits/select.rs @@ -127,7 +127,10 @@ pub enum SelectionCandidate<'tcx> { TraitAliasCandidate(DefId), - ObjectCandidate, + /// Matching `dyn Trait` with a supertrait of `Trait`. The index is the + /// position in the iterator returned by + /// `rustc_infer::traits::util::supertraits`. + ObjectCandidate(usize), BuiltinObjectCandidate, diff --git a/compiler/rustc_middle/src/ty/context.rs b/compiler/rustc_middle/src/ty/context.rs index e24ba6d7a1e3d..f6ea6743a0e04 100644 --- a/compiler/rustc_middle/src/ty/context.rs +++ b/compiler/rustc_middle/src/ty/context.rs @@ -22,7 +22,7 @@ use crate::ty::{ ExistentialPredicate, FloatVar, FloatVid, GenericParamDefKind, InferConst, InferTy, IntVar, IntVid, List, ParamConst, ParamTy, PolyFnSig, Predicate, PredicateInner, PredicateKind, ProjectionTy, Region, RegionKind, ReprOptions, TraitObjectVisitor, Ty, TyKind, TyS, TyVar, - TyVid, TypeAndMut, + TyVid, TypeAndMut, Visibility, }; use rustc_ast as ast; use rustc_ast::expand::allocator::AllocatorKind; @@ -134,7 +134,7 @@ impl<'tcx> CtxtInterners<'tcx> { fn intern_predicate(&self, kind: PredicateKind<'tcx>) -> &'tcx PredicateInner<'tcx> { self.predicate .intern(kind, |kind| { - let flags = super::flags::FlagComputation::for_predicate(&kind); + let flags = super::flags::FlagComputation::for_predicate(kind); let predicate_struct = PredicateInner { kind, @@ -911,6 +911,9 @@ pub struct GlobalCtxt<'tcx> { /// Common consts, pre-interned for your convenience. pub consts: CommonConsts<'tcx>, + /// Visibilities produced by resolver. + pub visibilities: FxHashMap, + /// Resolutions of `extern crate` items produced by resolver. extern_crate_map: FxHashMap, @@ -1057,7 +1060,7 @@ impl<'tcx> TyCtxt<'tcx> { ) } - pub fn lift>(self, value: &T) -> Option { + pub fn lift>(self, value: T) -> Option { value.lift_to_tcx(self) } @@ -1124,6 +1127,7 @@ impl<'tcx> TyCtxt<'tcx> { types: common_types, lifetimes: common_lifetimes, consts: common_consts, + visibilities: resolutions.visibilities, extern_crate_map: resolutions.extern_crate_map, trait_map, export_map: resolutions.export_map, @@ -1565,16 +1569,16 @@ impl<'tcx> TyCtxt<'tcx> { /// e.g., `()` or `u8`, was interned in a different context. pub trait Lift<'tcx>: fmt::Debug { type Lifted: fmt::Debug + 'tcx; - fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option; + fn lift_to_tcx(self, tcx: TyCtxt<'tcx>) -> Option; } macro_rules! nop_lift { ($set:ident; $ty:ty => $lifted:ty) => { impl<'a, 'tcx> Lift<'tcx> for $ty { type Lifted = $lifted; - fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option { - if tcx.interners.$set.contains_pointer_to(&Interned(*self)) { - Some(unsafe { mem::transmute(*self) }) + fn lift_to_tcx(self, tcx: TyCtxt<'tcx>) -> Option { + if tcx.interners.$set.contains_pointer_to(&Interned(self)) { + Some(unsafe { mem::transmute(self) }) } else { None } @@ -1587,12 +1591,12 @@ macro_rules! nop_list_lift { ($set:ident; $ty:ty => $lifted:ty) => { impl<'a, 'tcx> Lift<'tcx> for &'a List<$ty> { type Lifted = &'tcx List<$lifted>; - fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option { + fn lift_to_tcx(self, tcx: TyCtxt<'tcx>) -> Option { if self.is_empty() { return Some(List::empty()); } - if tcx.interners.$set.contains_pointer_to(&Interned(*self)) { - Some(unsafe { mem::transmute(*self) }) + if tcx.interners.$set.contains_pointer_to(&Interned(self)) { + Some(unsafe { mem::transmute(self) }) } else { None } @@ -2032,13 +2036,13 @@ direct_interners! { macro_rules! slice_interners { ($($field:ident: $method:ident($ty:ty)),+ $(,)?) => ( - $(impl<'tcx> TyCtxt<'tcx> { - pub fn $method(self, v: &[$ty]) -> &'tcx List<$ty> { + impl<'tcx> TyCtxt<'tcx> { + $(pub fn $method(self, v: &[$ty]) -> &'tcx List<$ty> { self.interners.$field.intern_ref(v, || { Interned(List::from_arena(&*self.arena, v)) }).0 - } - })+ + })+ + } ); } diff --git a/compiler/rustc_middle/src/ty/error.rs b/compiler/rustc_middle/src/ty/error.rs index 82d698b37ab1d..235f8749cf917 100644 --- a/compiler/rustc_middle/src/ty/error.rs +++ b/compiler/rustc_middle/src/ty/error.rs @@ -229,7 +229,7 @@ impl<'tcx> ty::TyS<'tcx> { ty::Adt(def, _) => format!("{} `{}`", def.descr(), tcx.def_path_str(def.did)).into(), ty::Foreign(def_id) => format!("extern type `{}`", tcx.def_path_str(def_id)).into(), ty::Array(t, n) => { - let n = tcx.lift(&n).unwrap(); + let n = tcx.lift(n).unwrap(); match n.try_eval_usize(tcx, ty::ParamEnv::empty()) { _ if t.is_simple_ty() => format!("array `{}`", self).into(), Some(n) => format!("array of {} element{}", n, pluralize!(n)).into(), diff --git a/compiler/rustc_middle/src/ty/flags.rs b/compiler/rustc_middle/src/ty/flags.rs index c9a4022330a7a..8b97a87f214b8 100644 --- a/compiler/rustc_middle/src/ty/flags.rs +++ b/compiler/rustc_middle/src/ty/flags.rs @@ -22,7 +22,7 @@ impl FlagComputation { result } - pub fn for_predicate(kind: &ty::PredicateKind<'_>) -> FlagComputation { + pub fn for_predicate(kind: ty::PredicateKind<'_>) -> FlagComputation { let mut result = FlagComputation::new(); result.add_predicate_kind(kind); result @@ -53,7 +53,14 @@ impl FlagComputation { /// Adds the flags/depth from a set of types that appear within the current type, but within a /// region binder. - fn add_bound_computation(&mut self, computation: FlagComputation) { + fn bound_computation(&mut self, value: ty::Binder, f: F) + where + F: FnOnce(&mut Self, T), + { + let mut computation = FlagComputation::new(); + + f(&mut computation, value.skip_binder()); + self.add_flags(computation.flags); // The types that contributed to `computation` occurred within @@ -101,9 +108,7 @@ impl FlagComputation { } &ty::GeneratorWitness(ts) => { - let mut computation = FlagComputation::new(); - computation.add_tys(ts.skip_binder()); - self.add_bound_computation(computation); + self.bound_computation(ts, |flags, ts| flags.add_tys(ts)); } &ty::Closure(_, substs) => { @@ -154,20 +159,21 @@ impl FlagComputation { self.add_substs(substs); } - &ty::Dynamic(ref obj, r) => { - let mut computation = FlagComputation::new(); - for predicate in obj.skip_binder().iter() { - match predicate { - ty::ExistentialPredicate::Trait(tr) => computation.add_substs(tr.substs), - ty::ExistentialPredicate::Projection(p) => { - let mut proj_computation = FlagComputation::new(); - proj_computation.add_existential_projection(&p); - self.add_bound_computation(proj_computation); + &ty::Dynamic(obj, r) => { + self.bound_computation(obj, |computation, obj| { + for predicate in obj.iter() { + match predicate { + ty::ExistentialPredicate::Trait(tr) => { + computation.add_substs(tr.substs) + } + ty::ExistentialPredicate::Projection(p) => { + computation.add_existential_projection(&p); + } + ty::ExistentialPredicate::AutoTrait(_) => {} } - ty::ExistentialPredicate::AutoTrait(_) => {} } - } - self.add_bound_computation(computation); + }); + self.add_region(r); } @@ -195,22 +201,21 @@ impl FlagComputation { self.add_substs(substs); } - &ty::FnPtr(f) => { - self.add_fn_sig(f); - } + &ty::FnPtr(fn_sig) => self.bound_computation(fn_sig, |computation, fn_sig| { + computation.add_tys(fn_sig.inputs()); + computation.add_ty(fn_sig.output()); + }), } } - fn add_predicate_kind(&mut self, kind: &ty::PredicateKind<'_>) { + fn add_predicate_kind(&mut self, kind: ty::PredicateKind<'_>) { match kind { ty::PredicateKind::ForAll(binder) => { - let mut computation = FlagComputation::new(); - - computation.add_predicate_atom(binder.skip_binder()); - - self.add_bound_computation(computation); + self.bound_computation(binder, |computation, atom| { + computation.add_predicate_atom(atom) + }); } - &ty::PredicateKind::Atom(atom) => self.add_predicate_atom(atom), + ty::PredicateKind::Atom(atom) => self.add_predicate_atom(atom), } } @@ -266,15 +271,6 @@ impl FlagComputation { } } - fn add_fn_sig(&mut self, fn_sig: ty::PolyFnSig<'_>) { - let mut computation = FlagComputation::new(); - - computation.add_tys(fn_sig.skip_binder().inputs()); - computation.add_ty(fn_sig.skip_binder().output()); - - self.add_bound_computation(computation); - } - fn add_region(&mut self, r: ty::Region<'_>) { self.add_flags(r.type_flags()); if let ty::ReLateBound(debruijn, _) = *r { diff --git a/compiler/rustc_middle/src/ty/fold.rs b/compiler/rustc_middle/src/ty/fold.rs index 5524d91a6d533..0e5e22dcaae9f 100644 --- a/compiler/rustc_middle/src/ty/fold.rs +++ b/compiler/rustc_middle/src/ty/fold.rs @@ -30,8 +30,6 @@ //! //! These methods return true to indicate that the visitor has found what it is //! looking for, and does not need to visit anything else. - -use crate::ty::structural_impls::PredicateVisitor; use crate::ty::{self, flags::FlagComputation, Binder, Ty, TyCtxt, TypeFlags}; use rustc_hir as hir; use rustc_hir::def_id::DefId; @@ -211,6 +209,10 @@ pub trait TypeVisitor<'tcx>: Sized { fn visit_const(&mut self, c: &'tcx ty::Const<'tcx>) -> bool { c.super_visit_with(self) } + + fn visit_predicate(&mut self, p: ty::Predicate<'tcx>) -> bool { + p.super_visit_with(self) + } } /////////////////////////////////////////////////////////////////////////// @@ -868,9 +870,7 @@ impl<'tcx> TypeVisitor<'tcx> for HasEscapingVarsVisitor { _ => ct.super_visit_with(self), } } -} -impl<'tcx> PredicateVisitor<'tcx> for HasEscapingVarsVisitor { fn visit_predicate(&mut self, predicate: ty::Predicate<'tcx>) -> bool { predicate.inner.outer_exclusive_binder > self.outer_index } @@ -903,9 +903,7 @@ impl<'tcx> TypeVisitor<'tcx> for HasTypeFlagsVisitor { debug!("HasTypeFlagsVisitor: c={:?} c.flags={:?} self.flags={:?}", c, flags, self.flags); flags.intersects(self.flags) } -} -impl<'tcx> PredicateVisitor<'tcx> for HasTypeFlagsVisitor { fn visit_predicate(&mut self, predicate: ty::Predicate<'tcx>) -> bool { debug!( "HasTypeFlagsVisitor: predicate={:?} predicate.flags={:?} self.flags={:?}", @@ -914,6 +912,7 @@ impl<'tcx> PredicateVisitor<'tcx> for HasTypeFlagsVisitor { predicate.inner.flags.intersects(self.flags) } } + /// Collects all the late-bound regions at the innermost binding level /// into a hash set. struct LateBoundRegionsCollector { diff --git a/compiler/rustc_middle/src/ty/instance.rs b/compiler/rustc_middle/src/ty/instance.rs index 4a2c97b4a400f..8b3fb87507061 100644 --- a/compiler/rustc_middle/src/ty/instance.rs +++ b/compiler/rustc_middle/src/ty/instance.rs @@ -258,7 +258,7 @@ impl<'tcx> InstanceDef<'tcx> { impl<'tcx> fmt::Display for Instance<'tcx> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { ty::tls::with(|tcx| { - let substs = tcx.lift(&self.substs).expect("could not lift for printing"); + let substs = tcx.lift(self.substs).expect("could not lift for printing"); FmtPrinter::new(tcx, &mut *f, Namespace::ValueNS) .print_def_path(self.def_id(), substs)?; Ok(()) diff --git a/compiler/rustc_middle/src/ty/mod.rs b/compiler/rustc_middle/src/ty/mod.rs index 275888b0ce247..845fa8a47ae2c 100644 --- a/compiler/rustc_middle/src/ty/mod.rs +++ b/compiler/rustc_middle/src/ty/mod.rs @@ -125,6 +125,7 @@ mod sty; pub struct ResolverOutputs { pub definitions: rustc_hir::definitions::Definitions, pub cstore: Box, + pub visibilities: FxHashMap, pub extern_crate_map: FxHashMap, pub maybe_unused_trait_imports: FxHashSet, pub maybe_unused_extern_crates: Vec<(LocalDefId, Span)>, @@ -1056,9 +1057,21 @@ impl<'tcx> Predicate<'tcx> { } } + /// Converts this to a `Binder>`. If the value was an + /// `Atom`, then it is not allowed to contain escaping bound vars. + pub fn bound_atom(self) -> Binder> { + match self.kind() { + &PredicateKind::ForAll(binder) => binder, + &PredicateKind::Atom(atom) => { + debug_assert!(!atom.has_escaping_bound_vars()); + Binder::dummy(atom) + } + } + } + /// Allows using a `Binder>` even if the given predicate previously /// contained unbound variables by shifting these variables outwards. - pub fn bound_atom(self, tcx: TyCtxt<'tcx>) -> Binder> { + pub fn bound_atom_with_opt_escaping(self, tcx: TyCtxt<'tcx>) -> Binder> { match self.kind() { &PredicateKind::ForAll(binder) => binder, &PredicateKind::Atom(atom) => Binder::wrap_nonbinding(tcx, atom), @@ -2436,8 +2449,10 @@ impl<'tcx> AdtDef { self.variants.iter().flat_map(|v| v.fields.iter()) } + /// Whether the ADT lacks fields. Note that this includes uninhabited enums, + /// e.g., `enum Void {}` is considered payload free as well. pub fn is_payloadfree(&self) -> bool { - !self.variants.is_empty() && self.variants.iter().all(|v| v.fields.is_empty()) + self.variants.iter().all(|v| v.fields.is_empty()) } /// Return a `VariantDef` given a variant id. diff --git a/compiler/rustc_middle/src/ty/print/pretty.rs b/compiler/rustc_middle/src/ty/print/pretty.rs index 08f88eed66f80..de24322e07de2 100644 --- a/compiler/rustc_middle/src/ty/print/pretty.rs +++ b/compiler/rustc_middle/src/ty/print/pretty.rs @@ -618,10 +618,9 @@ pub trait PrettyPrinter<'tcx>: // may contain unbound variables. We therefore do this manually. // // FIXME(lcnr): Find out why exactly this is the case :) - if let ty::PredicateAtom::Trait(pred, _) = - predicate.bound_atom(self.tcx()).skip_binder() - { - let trait_ref = ty::Binder::bind(pred.trait_ref); + let bound_predicate = predicate.bound_atom_with_opt_escaping(self.tcx()); + if let ty::PredicateAtom::Trait(pred, _) = bound_predicate.skip_binder() { + let trait_ref = bound_predicate.rebind(pred.trait_ref); // Don't print +Sized, but rather +?Sized if absent. if Some(trait_ref.def_id()) == self.tcx().lang_items().sized_trait() { is_sized = true; @@ -1849,7 +1848,7 @@ macro_rules! forward_display_to_print { $(impl fmt::Display for $ty { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { ty::tls::with(|tcx| { - tcx.lift(self) + tcx.lift(*self) .expect("could not lift for printing") .print(FmtPrinter::new(tcx, f, Namespace::TypeNS))?; Ok(()) diff --git a/compiler/rustc_middle/src/ty/query/mod.rs b/compiler/rustc_middle/src/ty/query/mod.rs index d3a7412ef14e7..7ba4d5a14dffb 100644 --- a/compiler/rustc_middle/src/ty/query/mod.rs +++ b/compiler/rustc_middle/src/ty/query/mod.rs @@ -34,7 +34,6 @@ use crate::ty::util::AlwaysRequiresDrop; use crate::ty::{self, AdtSizedConstraint, CrateInherentImpls, ParamEnvAnd, Ty, TyCtxt}; use rustc_data_structures::fingerprint::Fingerprint; use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexMap}; -use rustc_data_structures::profiling::ProfileCategory::*; use rustc_data_structures::stable_hasher::StableVec; use rustc_data_structures::svh::Svh; use rustc_data_structures::sync::Lrc; @@ -169,26 +168,71 @@ pub fn force_from_dep_node<'tcx>(tcx: TyCtxt<'tcx>, dep_node: &DepNode) -> bool return false; } - rustc_dep_node_force!([dep_node, tcx] - // These are inputs that are expected to be pre-allocated and that - // should therefore always be red or green already. - DepKind::CrateMetadata | + macro_rules! force_from_dep_node { + ($($(#[$attr:meta])* [$($modifiers:tt)*] $name:ident($K:ty),)*) => { + match dep_node.kind { + // These are inputs that are expected to be pre-allocated and that + // should therefore always be red or green already. + DepKind::CrateMetadata | - // These are anonymous nodes. - DepKind::TraitSelect | + // These are anonymous nodes. + DepKind::TraitSelect | - // We don't have enough information to reconstruct the query key of - // these. - DepKind::CompileCodegenUnit => { - bug!("force_from_dep_node: encountered {:?}", dep_node) + // We don't have enough information to reconstruct the query key of + // these. + DepKind::CompileCodegenUnit | + + // Forcing this makes no sense. + DepKind::Null => { + bug!("force_from_dep_node: encountered {:?}", dep_node) + } + + $(DepKind::$name => { + debug_assert!(<$K as DepNodeParams>>::can_reconstruct_query_key()); + + if let Some(key) = <$K as DepNodeParams>>::recover(tcx, dep_node) { + force_query::, _>( + tcx, + key, + DUMMY_SP, + *dep_node + ); + return true; + } + })* + } } - ); + } + + rustc_dep_node_append! { [force_from_dep_node!][] } false } pub(crate) fn try_load_from_on_disk_cache<'tcx>(tcx: TyCtxt<'tcx>, dep_node: &DepNode) { - rustc_dep_node_try_load_from_on_disk_cache!(dep_node, tcx) + macro_rules! try_load_from_on_disk_cache { + ($($name:ident,)*) => { + match dep_node.kind { + $(DepKind::$name => { + if as DepNodeParams>>::can_reconstruct_query_key() { + debug_assert!(tcx.dep_graph + .node_color(dep_node) + .map(|c| c.is_green()) + .unwrap_or(false)); + + let key = as DepNodeParams>>::recover(tcx, dep_node).unwrap(); + if queries::$name::cache_on_disk(tcx, &key, None) { + let _ = tcx.$name(key); + } + } + })* + + _ => (), + } + } + } + + rustc_cached_queries!(try_load_from_on_disk_cache!); } mod sealed { diff --git a/compiler/rustc_middle/src/ty/query/plumbing.rs b/compiler/rustc_middle/src/ty/query/plumbing.rs index 27bf22dac75cb..d038695283c10 100644 --- a/compiler/rustc_middle/src/ty/query/plumbing.rs +++ b/compiler/rustc_middle/src/ty/query/plumbing.rs @@ -40,7 +40,8 @@ impl QueryContext for TyCtxt<'tcx> { fn try_collect_active_jobs( &self, - ) -> Option, QueryJobInfo>> { + ) -> Option, QueryJobInfo>> + { self.queries.try_collect_active_jobs() } @@ -241,25 +242,15 @@ macro_rules! hash_result { }; } -macro_rules! define_queries { - (<$tcx:tt> $($category:tt { - $($(#[$attr:meta])* [$($modifiers:tt)*] fn $name:ident: $node:ident($($K:tt)*) -> $V:ty,)* - },)*) => { - define_queries_inner! { <$tcx> - $($( $(#[$attr])* category<$category> [$($modifiers)*] fn $name: $node($($K)*) -> $V,)*)* - } - } -} - macro_rules! query_helper_param_ty { (DefId) => { impl IntoQueryParam }; ($K:ty) => { $K }; } -macro_rules! define_queries_inner { +macro_rules! define_queries { (<$tcx:tt> - $($(#[$attr:meta])* category<$category:tt> - [$($modifiers:tt)*] fn $name:ident: $node:ident($($K:tt)*) -> $V:ty,)*) => { + $($(#[$attr:meta])* + [$($modifiers:tt)*] fn $name:ident($($K:tt)*) -> $V:ty,)*) => { use std::mem; use crate::{ @@ -267,7 +258,6 @@ macro_rules! define_queries_inner { rustc_data_structures::stable_hasher::StableHasher, ich::StableHashingContext }; - use rustc_data_structures::profiling::ProfileCategory; define_queries_struct! { tcx: $tcx, @@ -353,7 +343,7 @@ macro_rules! define_queries_inner { $(pub type $name<$tcx> = $V;)* } - $(impl<$tcx> QueryConfig> for queries::$name<$tcx> { + $(impl<$tcx> QueryConfig for queries::$name<$tcx> { type Key = $($K)*; type Value = $V; type Stored = < @@ -361,18 +351,17 @@ macro_rules! define_queries_inner { as QueryStorage >::Stored; const NAME: &'static str = stringify!($name); - const CATEGORY: ProfileCategory = $category; } impl<$tcx> QueryAccessors> for queries::$name<$tcx> { const ANON: bool = is_anon!([$($modifiers)*]); const EVAL_ALWAYS: bool = is_eval_always!([$($modifiers)*]); - const DEP_KIND: dep_graph::DepKind = dep_graph::DepKind::$node; + const DEP_KIND: dep_graph::DepKind = dep_graph::DepKind::$name; type Cache = query_storage!([$($modifiers)*][$($K)*, $V]); #[inline(always)] - fn query_state<'a>(tcx: TyCtxt<$tcx>) -> &'a QueryState, Self::Cache> { + fn query_state<'a>(tcx: TyCtxt<$tcx>) -> &'a QueryState as QueryContext>::Query, Self::Cache> { &tcx.queries.$name } @@ -454,7 +443,7 @@ macro_rules! define_queries_inner { #[inline(always)] #[must_use] pub fn $name(self, key: query_helper_param_ty!($($K)*)) - -> as QueryConfig>>::Stored + -> as QueryConfig>::Stored { self.at(DUMMY_SP).$name(key.into_query_param()) })* @@ -493,7 +482,7 @@ macro_rules! define_queries_inner { $($(#[$attr])* #[inline(always)] pub fn $name(self, key: query_helper_param_ty!($($K)*)) - -> as QueryConfig>>::Stored + -> as QueryConfig>::Stored { get_query::, _>(self.tcx, self.span, key.into_query_param()) })* @@ -527,7 +516,8 @@ macro_rules! define_queries_struct { fallback_extern_providers: Box, $($(#[$attr])* $name: QueryState< - TyCtxt<$tcx>, + crate::dep_graph::DepKind, + as QueryContext>::Query, as QueryAccessors>>::Cache, >,)* } @@ -548,7 +538,7 @@ macro_rules! define_queries_struct { pub(crate) fn try_collect_active_jobs( &self - ) -> Option, QueryJobInfo>>> { + ) -> Option, QueryJobInfo as QueryContext>::Query>>> { let mut jobs = FxHashMap::default(); $( diff --git a/compiler/rustc_middle/src/ty/query/profiling_support.rs b/compiler/rustc_middle/src/ty/query/profiling_support.rs index 4e8db3194bdff..cbcecb8849188 100644 --- a/compiler/rustc_middle/src/ty/query/profiling_support.rs +++ b/compiler/rustc_middle/src/ty/query/profiling_support.rs @@ -5,8 +5,7 @@ use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::profiling::SelfProfiler; use rustc_hir::def_id::{CrateNum, DefId, DefIndex, LocalDefId, CRATE_DEF_INDEX, LOCAL_CRATE}; use rustc_hir::definitions::DefPathData; -use rustc_query_system::query::QueryCache; -use rustc_query_system::query::QueryState; +use rustc_query_system::query::{QueryCache, QueryContext, QueryState}; use std::fmt::Debug; use std::io::Write; @@ -231,7 +230,7 @@ where pub(super) fn alloc_self_profile_query_strings_for_query_cache<'tcx, C>( tcx: TyCtxt<'tcx>, query_name: &'static str, - query_state: &QueryState, C>, + query_state: &QueryState as QueryContext>::Query, C>, string_cache: &mut QueryKeyStringCache, ) where C: QueryCache, diff --git a/compiler/rustc_middle/src/ty/query/stats.rs b/compiler/rustc_middle/src/ty/query/stats.rs index b496bf839ab9e..e0b44ce23c912 100644 --- a/compiler/rustc_middle/src/ty/query/stats.rs +++ b/compiler/rustc_middle/src/ty/query/stats.rs @@ -1,11 +1,10 @@ use crate::ty::query::queries; use crate::ty::TyCtxt; use rustc_hir::def_id::{DefId, LOCAL_CRATE}; -use rustc_query_system::query::QueryCache; -use rustc_query_system::query::QueryState; -use rustc_query_system::query::{QueryAccessors, QueryContext}; +use rustc_query_system::query::{QueryAccessors, QueryCache, QueryContext, QueryState}; use std::any::type_name; +use std::hash::Hash; use std::mem; #[cfg(debug_assertions)] use std::sync::atomic::Ordering; @@ -38,10 +37,12 @@ struct QueryStats { local_def_id_keys: Option, } -fn stats( - name: &'static str, - map: &QueryState, -) -> QueryStats { +fn stats(name: &'static str, map: &QueryState) -> QueryStats +where + D: Copy + Clone + Eq + Hash, + Q: Clone, + C: QueryCache, +{ let mut stats = QueryStats { name, #[cfg(debug_assertions)] @@ -119,21 +120,22 @@ pub fn print_stats(tcx: TyCtxt<'_>) { } macro_rules! print_stats { - (<$tcx:tt> $($category:tt { - $($(#[$attr:meta])* [$($modifiers:tt)*] fn $name:ident: $node:ident($K:ty) -> $V:ty,)* - },)*) => { + (<$tcx:tt> + $($(#[$attr:meta])* [$($modifiers:tt)*] fn $name:ident($K:ty) -> $V:ty,)* + ) => { fn query_stats(tcx: TyCtxt<'_>) -> Vec { let mut queries = Vec::new(); - $($( + $( queries.push(stats::< - TyCtxt<'_>, + crate::dep_graph::DepKind, + as QueryContext>::Query, as QueryAccessors>>::Cache, >( stringify!($name), &tcx.queries.$name, )); - )*)* + )* queries } diff --git a/compiler/rustc_middle/src/ty/structural_impls.rs b/compiler/rustc_middle/src/ty/structural_impls.rs index 597ceac9386a0..53521d0e9f332 100644 --- a/compiler/rustc_middle/src/ty/structural_impls.rs +++ b/compiler/rustc_middle/src/ty/structural_impls.rs @@ -332,24 +332,23 @@ CloneTypeFoldableAndLiftImpls! { // FIXME(eddyb) replace all the uses of `Option::map` with `?`. impl<'tcx, A: Lift<'tcx>, B: Lift<'tcx>> Lift<'tcx> for (A, B) { type Lifted = (A::Lifted, B::Lifted); - fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option { - tcx.lift(&self.0).and_then(|a| tcx.lift(&self.1).map(|b| (a, b))) + fn lift_to_tcx(self, tcx: TyCtxt<'tcx>) -> Option { + Some((tcx.lift(self.0)?, tcx.lift(self.1)?)) } } impl<'tcx, A: Lift<'tcx>, B: Lift<'tcx>, C: Lift<'tcx>> Lift<'tcx> for (A, B, C) { type Lifted = (A::Lifted, B::Lifted, C::Lifted); - fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option { - tcx.lift(&self.0) - .and_then(|a| tcx.lift(&self.1).and_then(|b| tcx.lift(&self.2).map(|c| (a, b, c)))) + fn lift_to_tcx(self, tcx: TyCtxt<'tcx>) -> Option { + Some((tcx.lift(self.0)?, tcx.lift(self.1)?, tcx.lift(self.2)?)) } } impl<'tcx, T: Lift<'tcx>> Lift<'tcx> for Option { type Lifted = Option; - fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option { - match *self { - Some(ref x) => tcx.lift(x).map(Some), + fn lift_to_tcx(self, tcx: TyCtxt<'tcx>) -> Option { + match self { + Some(x) => tcx.lift(x).map(Some), None => Some(None), } } @@ -357,89 +356,72 @@ impl<'tcx, T: Lift<'tcx>> Lift<'tcx> for Option { impl<'tcx, T: Lift<'tcx>, E: Lift<'tcx>> Lift<'tcx> for Result { type Lifted = Result; - fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option { - match *self { - Ok(ref x) => tcx.lift(x).map(Ok), - Err(ref e) => tcx.lift(e).map(Err), + fn lift_to_tcx(self, tcx: TyCtxt<'tcx>) -> Option { + match self { + Ok(x) => tcx.lift(x).map(Ok), + Err(e) => tcx.lift(e).map(Err), } } } impl<'tcx, T: Lift<'tcx>> Lift<'tcx> for Box { type Lifted = Box; - fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option { - tcx.lift(&**self).map(Box::new) + fn lift_to_tcx(self, tcx: TyCtxt<'tcx>) -> Option { + tcx.lift(*self).map(Box::new) } } -impl<'tcx, T: Lift<'tcx>> Lift<'tcx> for Rc { +impl<'tcx, T: Lift<'tcx> + Clone> Lift<'tcx> for Rc { type Lifted = Rc; - fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option { - tcx.lift(&**self).map(Rc::new) + fn lift_to_tcx(self, tcx: TyCtxt<'tcx>) -> Option { + tcx.lift(self.as_ref().clone()).map(Rc::new) } } -impl<'tcx, T: Lift<'tcx>> Lift<'tcx> for Arc { +impl<'tcx, T: Lift<'tcx> + Clone> Lift<'tcx> for Arc { type Lifted = Arc; - fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option { - tcx.lift(&**self).map(Arc::new) - } -} - -impl<'tcx, T: Lift<'tcx>> Lift<'tcx> for [T] { - type Lifted = Vec; - fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option { - // type annotation needed to inform `projection_must_outlive` - let mut result: Vec<>::Lifted> = Vec::with_capacity(self.len()); - for x in self { - if let Some(value) = tcx.lift(x) { - result.push(value); - } else { - return None; - } - } - Some(result) + fn lift_to_tcx(self, tcx: TyCtxt<'tcx>) -> Option { + tcx.lift(self.as_ref().clone()).map(Arc::new) } } - impl<'tcx, T: Lift<'tcx>> Lift<'tcx> for Vec { type Lifted = Vec; - fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option { - tcx.lift(&self[..]) + fn lift_to_tcx(self, tcx: TyCtxt<'tcx>) -> Option { + self.into_iter().map(|v| tcx.lift(v)).collect() } } impl<'tcx, I: Idx, T: Lift<'tcx>> Lift<'tcx> for IndexVec { type Lifted = IndexVec; - fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option { - self.iter().map(|e| tcx.lift(e)).collect() + fn lift_to_tcx(self, tcx: TyCtxt<'tcx>) -> Option { + self.into_iter().map(|e| tcx.lift(e)).collect() } } impl<'a, 'tcx> Lift<'tcx> for ty::TraitRef<'a> { type Lifted = ty::TraitRef<'tcx>; - fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option { - tcx.lift(&self.substs).map(|substs| ty::TraitRef { def_id: self.def_id, substs }) + fn lift_to_tcx(self, tcx: TyCtxt<'tcx>) -> Option { + tcx.lift(self.substs).map(|substs| ty::TraitRef { def_id: self.def_id, substs }) } } impl<'a, 'tcx> Lift<'tcx> for ty::ExistentialTraitRef<'a> { type Lifted = ty::ExistentialTraitRef<'tcx>; - fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option { - tcx.lift(&self.substs).map(|substs| ty::ExistentialTraitRef { def_id: self.def_id, substs }) + fn lift_to_tcx(self, tcx: TyCtxt<'tcx>) -> Option { + tcx.lift(self.substs).map(|substs| ty::ExistentialTraitRef { def_id: self.def_id, substs }) } } impl<'a, 'tcx> Lift<'tcx> for ty::ExistentialPredicate<'a> { type Lifted = ty::ExistentialPredicate<'tcx>; - fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option { + fn lift_to_tcx(self, tcx: TyCtxt<'tcx>) -> Option { match self { ty::ExistentialPredicate::Trait(x) => tcx.lift(x).map(ty::ExistentialPredicate::Trait), ty::ExistentialPredicate::Projection(x) => { tcx.lift(x).map(ty::ExistentialPredicate::Projection) } ty::ExistentialPredicate::AutoTrait(def_id) => { - Some(ty::ExistentialPredicate::AutoTrait(*def_id)) + Some(ty::ExistentialPredicate::AutoTrait(def_id)) } } } @@ -447,15 +429,15 @@ impl<'a, 'tcx> Lift<'tcx> for ty::ExistentialPredicate<'a> { impl<'a, 'tcx> Lift<'tcx> for ty::TraitPredicate<'a> { type Lifted = ty::TraitPredicate<'tcx>; - fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option> { - tcx.lift(&self.trait_ref).map(|trait_ref| ty::TraitPredicate { trait_ref }) + fn lift_to_tcx(self, tcx: TyCtxt<'tcx>) -> Option> { + tcx.lift(self.trait_ref).map(|trait_ref| ty::TraitPredicate { trait_ref }) } } impl<'a, 'tcx> Lift<'tcx> for ty::SubtypePredicate<'a> { type Lifted = ty::SubtypePredicate<'tcx>; - fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option> { - tcx.lift(&(self.a, self.b)).map(|(a, b)| ty::SubtypePredicate { + fn lift_to_tcx(self, tcx: TyCtxt<'tcx>) -> Option> { + tcx.lift((self.a, self.b)).map(|(a, b)| ty::SubtypePredicate { a_is_expected: self.a_is_expected, a, b, @@ -465,33 +447,33 @@ impl<'a, 'tcx> Lift<'tcx> for ty::SubtypePredicate<'a> { impl<'tcx, A: Copy + Lift<'tcx>, B: Copy + Lift<'tcx>> Lift<'tcx> for ty::OutlivesPredicate { type Lifted = ty::OutlivesPredicate; - fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option { - tcx.lift(&(self.0, self.1)).map(|(a, b)| ty::OutlivesPredicate(a, b)) + fn lift_to_tcx(self, tcx: TyCtxt<'tcx>) -> Option { + tcx.lift((self.0, self.1)).map(|(a, b)| ty::OutlivesPredicate(a, b)) } } impl<'a, 'tcx> Lift<'tcx> for ty::ProjectionTy<'a> { type Lifted = ty::ProjectionTy<'tcx>; - fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option> { - tcx.lift(&self.substs) + fn lift_to_tcx(self, tcx: TyCtxt<'tcx>) -> Option> { + tcx.lift(self.substs) .map(|substs| ty::ProjectionTy { item_def_id: self.item_def_id, substs }) } } impl<'a, 'tcx> Lift<'tcx> for ty::ProjectionPredicate<'a> { type Lifted = ty::ProjectionPredicate<'tcx>; - fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option> { - tcx.lift(&(self.projection_ty, self.ty)) + fn lift_to_tcx(self, tcx: TyCtxt<'tcx>) -> Option> { + tcx.lift((self.projection_ty, self.ty)) .map(|(projection_ty, ty)| ty::ProjectionPredicate { projection_ty, ty }) } } impl<'a, 'tcx> Lift<'tcx> for ty::ExistentialProjection<'a> { type Lifted = ty::ExistentialProjection<'tcx>; - fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option { - tcx.lift(&self.substs).map(|substs| ty::ExistentialProjection { + fn lift_to_tcx(self, tcx: TyCtxt<'tcx>) -> Option { + tcx.lift(self.substs).map(|substs| ty::ExistentialProjection { substs, - ty: tcx.lift(&self.ty).expect("type must lift when substs do"), + ty: tcx.lift(self.ty).expect("type must lift when substs do"), item_def_id: self.item_def_id, }) } @@ -499,7 +481,7 @@ impl<'a, 'tcx> Lift<'tcx> for ty::ExistentialProjection<'a> { impl<'a, 'tcx> Lift<'tcx> for ty::PredicateKind<'a> { type Lifted = ty::PredicateKind<'tcx>; - fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option { + fn lift_to_tcx(self, tcx: TyCtxt<'tcx>) -> Option { match self { ty::PredicateKind::ForAll(binder) => tcx.lift(binder).map(ty::PredicateKind::ForAll), ty::PredicateKind::Atom(atom) => tcx.lift(atom).map(ty::PredicateKind::Atom), @@ -509,24 +491,24 @@ impl<'a, 'tcx> Lift<'tcx> for ty::PredicateKind<'a> { impl<'a, 'tcx> Lift<'tcx> for ty::PredicateAtom<'a> { type Lifted = ty::PredicateAtom<'tcx>; - fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option { - match *self { - ty::PredicateAtom::Trait(ref data, constness) => { + fn lift_to_tcx(self, tcx: TyCtxt<'tcx>) -> Option { + match self { + ty::PredicateAtom::Trait(data, constness) => { tcx.lift(data).map(|data| ty::PredicateAtom::Trait(data, constness)) } - ty::PredicateAtom::Subtype(ref data) => tcx.lift(data).map(ty::PredicateAtom::Subtype), - ty::PredicateAtom::RegionOutlives(ref data) => { + ty::PredicateAtom::Subtype(data) => tcx.lift(data).map(ty::PredicateAtom::Subtype), + ty::PredicateAtom::RegionOutlives(data) => { tcx.lift(data).map(ty::PredicateAtom::RegionOutlives) } - ty::PredicateAtom::TypeOutlives(ref data) => { + ty::PredicateAtom::TypeOutlives(data) => { tcx.lift(data).map(ty::PredicateAtom::TypeOutlives) } - ty::PredicateAtom::Projection(ref data) => { + ty::PredicateAtom::Projection(data) => { tcx.lift(data).map(ty::PredicateAtom::Projection) } - ty::PredicateAtom::WellFormed(ty) => tcx.lift(&ty).map(ty::PredicateAtom::WellFormed), + ty::PredicateAtom::WellFormed(ty) => tcx.lift(ty).map(ty::PredicateAtom::WellFormed), ty::PredicateAtom::ClosureKind(closure_def_id, closure_substs, kind) => { - tcx.lift(&closure_substs).map(|closure_substs| { + tcx.lift(closure_substs).map(|closure_substs| { ty::PredicateAtom::ClosureKind(closure_def_id, closure_substs, kind) }) } @@ -534,13 +516,13 @@ impl<'a, 'tcx> Lift<'tcx> for ty::PredicateAtom<'a> { Some(ty::PredicateAtom::ObjectSafe(trait_def_id)) } ty::PredicateAtom::ConstEvaluatable(def_id, substs) => { - tcx.lift(&substs).map(|substs| ty::PredicateAtom::ConstEvaluatable(def_id, substs)) + tcx.lift(substs).map(|substs| ty::PredicateAtom::ConstEvaluatable(def_id, substs)) } ty::PredicateAtom::ConstEquate(c1, c2) => { - tcx.lift(&(c1, c2)).map(|(c1, c2)| ty::PredicateAtom::ConstEquate(c1, c2)) + tcx.lift((c1, c2)).map(|(c1, c2)| ty::PredicateAtom::ConstEquate(c1, c2)) } ty::PredicateAtom::TypeWellFormedFromEnv(ty) => { - tcx.lift(&ty).map(ty::PredicateAtom::TypeWellFormedFromEnv) + tcx.lift(ty).map(ty::PredicateAtom::TypeWellFormedFromEnv) } } } @@ -548,61 +530,62 @@ impl<'a, 'tcx> Lift<'tcx> for ty::PredicateAtom<'a> { impl<'tcx, T: Lift<'tcx>> Lift<'tcx> for ty::Binder { type Lifted = ty::Binder; - fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option { - tcx.lift(self.as_ref().skip_binder()).map(ty::Binder::bind) + fn lift_to_tcx(self, tcx: TyCtxt<'tcx>) -> Option { + self.map_bound(|v| tcx.lift(v)).transpose() } } impl<'a, 'tcx> Lift<'tcx> for ty::ParamEnv<'a> { type Lifted = ty::ParamEnv<'tcx>; - fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option { - tcx.lift(&self.caller_bounds()) + fn lift_to_tcx(self, tcx: TyCtxt<'tcx>) -> Option { + tcx.lift(self.caller_bounds()) .map(|caller_bounds| ty::ParamEnv::new(caller_bounds, self.reveal())) } } impl<'a, 'tcx, T: Lift<'tcx>> Lift<'tcx> for ty::ParamEnvAnd<'a, T> { type Lifted = ty::ParamEnvAnd<'tcx, T::Lifted>; - fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option { - tcx.lift(&self.param_env).and_then(|param_env| { - tcx.lift(&self.value).map(|value| ty::ParamEnvAnd { param_env, value }) + fn lift_to_tcx(self, tcx: TyCtxt<'tcx>) -> Option { + tcx.lift(self.param_env).and_then(|param_env| { + tcx.lift(self.value).map(|value| ty::ParamEnvAnd { param_env, value }) }) } } impl<'a, 'tcx> Lift<'tcx> for ty::ClosureSubsts<'a> { type Lifted = ty::ClosureSubsts<'tcx>; - fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option { - tcx.lift(&self.substs).map(|substs| ty::ClosureSubsts { substs }) + fn lift_to_tcx(self, tcx: TyCtxt<'tcx>) -> Option { + tcx.lift(self.substs).map(|substs| ty::ClosureSubsts { substs }) } } impl<'a, 'tcx> Lift<'tcx> for ty::GeneratorSubsts<'a> { type Lifted = ty::GeneratorSubsts<'tcx>; - fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option { - tcx.lift(&self.substs).map(|substs| ty::GeneratorSubsts { substs }) + fn lift_to_tcx(self, tcx: TyCtxt<'tcx>) -> Option { + tcx.lift(self.substs).map(|substs| ty::GeneratorSubsts { substs }) } } impl<'a, 'tcx> Lift<'tcx> for ty::adjustment::Adjustment<'a> { type Lifted = ty::adjustment::Adjustment<'tcx>; - fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option { - tcx.lift(&self.kind).and_then(|kind| { - tcx.lift(&self.target).map(|target| ty::adjustment::Adjustment { kind, target }) + fn lift_to_tcx(self, tcx: TyCtxt<'tcx>) -> Option { + let ty::adjustment::Adjustment { kind, target } = self; + tcx.lift(kind).and_then(|kind| { + tcx.lift(target).map(|target| ty::adjustment::Adjustment { kind, target }) }) } } impl<'a, 'tcx> Lift<'tcx> for ty::adjustment::Adjust<'a> { type Lifted = ty::adjustment::Adjust<'tcx>; - fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option { - match *self { + fn lift_to_tcx(self, tcx: TyCtxt<'tcx>) -> Option { + match self { ty::adjustment::Adjust::NeverToAny => Some(ty::adjustment::Adjust::NeverToAny), ty::adjustment::Adjust::Pointer(ptr) => Some(ty::adjustment::Adjust::Pointer(ptr)), - ty::adjustment::Adjust::Deref(ref overloaded) => { + ty::adjustment::Adjust::Deref(overloaded) => { tcx.lift(overloaded).map(ty::adjustment::Adjust::Deref) } - ty::adjustment::Adjust::Borrow(ref autoref) => { + ty::adjustment::Adjust::Borrow(autoref) => { tcx.lift(autoref).map(ty::adjustment::Adjust::Borrow) } } @@ -611,8 +594,8 @@ impl<'a, 'tcx> Lift<'tcx> for ty::adjustment::Adjust<'a> { impl<'a, 'tcx> Lift<'tcx> for ty::adjustment::OverloadedDeref<'a> { type Lifted = ty::adjustment::OverloadedDeref<'tcx>; - fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option { - tcx.lift(&self.region).map(|region| ty::adjustment::OverloadedDeref { + fn lift_to_tcx(self, tcx: TyCtxt<'tcx>) -> Option { + tcx.lift(self.region).map(|region| ty::adjustment::OverloadedDeref { region, mutbl: self.mutbl, span: self.span, @@ -622,10 +605,10 @@ impl<'a, 'tcx> Lift<'tcx> for ty::adjustment::OverloadedDeref<'a> { impl<'a, 'tcx> Lift<'tcx> for ty::adjustment::AutoBorrow<'a> { type Lifted = ty::adjustment::AutoBorrow<'tcx>; - fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option { - match *self { + fn lift_to_tcx(self, tcx: TyCtxt<'tcx>) -> Option { + match self { ty::adjustment::AutoBorrow::Ref(r, m) => { - tcx.lift(&r).map(|r| ty::adjustment::AutoBorrow::Ref(r, m)) + tcx.lift(r).map(|r| ty::adjustment::AutoBorrow::Ref(r, m)) } ty::adjustment::AutoBorrow::RawPtr(m) => Some(ty::adjustment::AutoBorrow::RawPtr(m)), } @@ -634,16 +617,16 @@ impl<'a, 'tcx> Lift<'tcx> for ty::adjustment::AutoBorrow<'a> { impl<'a, 'tcx> Lift<'tcx> for ty::GenSig<'a> { type Lifted = ty::GenSig<'tcx>; - fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option { - tcx.lift(&(self.resume_ty, self.yield_ty, self.return_ty)) + fn lift_to_tcx(self, tcx: TyCtxt<'tcx>) -> Option { + tcx.lift((self.resume_ty, self.yield_ty, self.return_ty)) .map(|(resume_ty, yield_ty, return_ty)| ty::GenSig { resume_ty, yield_ty, return_ty }) } } impl<'a, 'tcx> Lift<'tcx> for ty::FnSig<'a> { type Lifted = ty::FnSig<'tcx>; - fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option { - tcx.lift(&self.inputs_and_output).map(|x| ty::FnSig { + fn lift_to_tcx(self, tcx: TyCtxt<'tcx>) -> Option { + tcx.lift(self.inputs_and_output).map(|x| ty::FnSig { inputs_and_output: x, c_variadic: self.c_variadic, unsafety: self.unsafety, @@ -654,19 +637,20 @@ impl<'a, 'tcx> Lift<'tcx> for ty::FnSig<'a> { impl<'tcx, T: Lift<'tcx>> Lift<'tcx> for ty::error::ExpectedFound { type Lifted = ty::error::ExpectedFound; - fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option { - tcx.lift(&self.expected).and_then(|expected| { - tcx.lift(&self.found).map(|found| ty::error::ExpectedFound { expected, found }) + fn lift_to_tcx(self, tcx: TyCtxt<'tcx>) -> Option { + let ty::error::ExpectedFound { expected, found } = self; + tcx.lift(expected).and_then(|expected| { + tcx.lift(found).map(|found| ty::error::ExpectedFound { expected, found }) }) } } impl<'a, 'tcx> Lift<'tcx> for ty::error::TypeError<'a> { type Lifted = ty::error::TypeError<'tcx>; - fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option { + fn lift_to_tcx(self, tcx: TyCtxt<'tcx>) -> Option { use crate::ty::error::TypeError::*; - Some(match *self { + Some(match self { Mismatch => Mismatch, UnsafetyMismatch(x) => UnsafetyMismatch(x), AbiMismatch(x) => AbiMismatch(x), @@ -675,51 +659,51 @@ impl<'a, 'tcx> Lift<'tcx> for ty::error::TypeError<'a> { FixedArraySize(x) => FixedArraySize(x), ArgCount => ArgCount, RegionsDoesNotOutlive(a, b) => { - return tcx.lift(&(a, b)).map(|(a, b)| RegionsDoesNotOutlive(a, b)); + return tcx.lift((a, b)).map(|(a, b)| RegionsDoesNotOutlive(a, b)); } RegionsInsufficientlyPolymorphic(a, b) => { - return tcx.lift(&b).map(|b| RegionsInsufficientlyPolymorphic(a, b)); + return tcx.lift(b).map(|b| RegionsInsufficientlyPolymorphic(a, b)); } RegionsOverlyPolymorphic(a, b) => { - return tcx.lift(&b).map(|b| RegionsOverlyPolymorphic(a, b)); + return tcx.lift(b).map(|b| RegionsOverlyPolymorphic(a, b)); } RegionsPlaceholderMismatch => RegionsPlaceholderMismatch, IntMismatch(x) => IntMismatch(x), FloatMismatch(x) => FloatMismatch(x), Traits(x) => Traits(x), VariadicMismatch(x) => VariadicMismatch(x), - CyclicTy(t) => return tcx.lift(&t).map(|t| CyclicTy(t)), - CyclicConst(ct) => return tcx.lift(&ct).map(|ct| CyclicConst(ct)), + CyclicTy(t) => return tcx.lift(t).map(|t| CyclicTy(t)), + CyclicConst(ct) => return tcx.lift(ct).map(|ct| CyclicConst(ct)), ProjectionMismatched(x) => ProjectionMismatched(x), - Sorts(ref x) => return tcx.lift(x).map(Sorts), - ExistentialMismatch(ref x) => return tcx.lift(x).map(ExistentialMismatch), - ConstMismatch(ref x) => return tcx.lift(x).map(ConstMismatch), + Sorts(x) => return tcx.lift(x).map(Sorts), + ExistentialMismatch(x) => return tcx.lift(x).map(ExistentialMismatch), + ConstMismatch(x) => return tcx.lift(x).map(ConstMismatch), IntrinsicCast => IntrinsicCast, - TargetFeatureCast(ref x) => TargetFeatureCast(*x), - ObjectUnsafeCoercion(ref x) => return tcx.lift(x).map(ObjectUnsafeCoercion), + TargetFeatureCast(x) => TargetFeatureCast(x), + ObjectUnsafeCoercion(x) => return tcx.lift(x).map(ObjectUnsafeCoercion), }) } } impl<'a, 'tcx> Lift<'tcx> for ty::InstanceDef<'a> { type Lifted = ty::InstanceDef<'tcx>; - fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option { - match *self { + fn lift_to_tcx(self, tcx: TyCtxt<'tcx>) -> Option { + match self { ty::InstanceDef::Item(def_id) => Some(ty::InstanceDef::Item(def_id)), ty::InstanceDef::VtableShim(def_id) => Some(ty::InstanceDef::VtableShim(def_id)), ty::InstanceDef::ReifyShim(def_id) => Some(ty::InstanceDef::ReifyShim(def_id)), ty::InstanceDef::Intrinsic(def_id) => Some(ty::InstanceDef::Intrinsic(def_id)), - ty::InstanceDef::FnPtrShim(def_id, ref ty) => { + ty::InstanceDef::FnPtrShim(def_id, ty) => { Some(ty::InstanceDef::FnPtrShim(def_id, tcx.lift(ty)?)) } ty::InstanceDef::Virtual(def_id, n) => Some(ty::InstanceDef::Virtual(def_id, n)), ty::InstanceDef::ClosureOnceShim { call_once } => { Some(ty::InstanceDef::ClosureOnceShim { call_once }) } - ty::InstanceDef::DropGlue(def_id, ref ty) => { + ty::InstanceDef::DropGlue(def_id, ty) => { Some(ty::InstanceDef::DropGlue(def_id, tcx.lift(ty)?)) } - ty::InstanceDef::CloneShim(def_id, ref ty) => { + ty::InstanceDef::CloneShim(def_id, ty) => { Some(ty::InstanceDef::CloneShim(def_id, tcx.lift(ty)?)) } } @@ -1056,16 +1040,6 @@ impl<'tcx> TypeFoldable<'tcx> for ty::Predicate<'tcx> { } } -pub(super) trait PredicateVisitor<'tcx>: TypeVisitor<'tcx> { - fn visit_predicate(&mut self, predicate: ty::Predicate<'tcx>) -> bool; -} - -impl> PredicateVisitor<'tcx> for T { - default fn visit_predicate(&mut self, predicate: ty::Predicate<'tcx>) -> bool { - predicate.super_visit_with(self) - } -} - impl<'tcx> TypeFoldable<'tcx> for &'tcx ty::List> { fn super_fold_with>(&self, folder: &mut F) -> Self { fold_list(*self, folder, |tcx, v| tcx.intern_predicates(v)) diff --git a/compiler/rustc_middle/src/ty/sty.rs b/compiler/rustc_middle/src/ty/sty.rs index 870cc4eee0505..0fd48d0928257 100644 --- a/compiler/rustc_middle/src/ty/sty.rs +++ b/compiler/rustc_middle/src/ty/sty.rs @@ -210,6 +210,18 @@ impl TyKind<'tcx> { _ => false, } } + + /// Get the article ("a" or "an") to use with this type. + pub fn article(&self) -> &'static str { + match self { + Int(_) | Float(_) | Array(_, _) => "an", + Adt(def, _) if def.is_enum() => "an", + // This should never happen, but ICEing and causing the user's code + // to not compile felt too harsh. + Error(_) => "a", + _ => "a", + } + } } // `TyKind` is used a lot. Make sure it doesn't unintentionally get bigger. @@ -703,14 +715,16 @@ impl<'tcx> Binder> { use crate::ty::ToPredicate; match self.skip_binder() { ExistentialPredicate::Trait(tr) => { - Binder(tr).with_self_ty(tcx, self_ty).without_const().to_predicate(tcx) + self.rebind(tr).with_self_ty(tcx, self_ty).without_const().to_predicate(tcx) } ExistentialPredicate::Projection(p) => { - Binder(p.with_self_ty(tcx, self_ty)).to_predicate(tcx) + self.rebind(p.with_self_ty(tcx, self_ty)).to_predicate(tcx) } ExistentialPredicate::AutoTrait(did) => { - let trait_ref = - Binder(ty::TraitRef { def_id: did, substs: tcx.mk_substs_trait(self_ty, &[]) }); + let trait_ref = self.rebind(ty::TraitRef { + def_id: did, + substs: tcx.mk_substs_trait(self_ty, &[]), + }); trait_ref.without_const().to_predicate(tcx) } } @@ -775,7 +789,7 @@ impl<'tcx> List> { impl<'tcx> Binder<&'tcx List>> { pub fn principal(&self) -> Option>> { - self.skip_binder().principal().map(Binder::bind) + self.map_bound(|b| b.principal()).transpose() } pub fn principal_def_id(&self) -> Option { @@ -858,8 +872,7 @@ impl<'tcx> PolyTraitRef<'tcx> { } pub fn to_poly_trait_predicate(&self) -> ty::PolyTraitPredicate<'tcx> { - // Note that we preserve binding levels - Binder(ty::TraitPredicate { trait_ref: self.skip_binder() }) + self.map_bound(|trait_ref| ty::TraitPredicate { trait_ref }) } } @@ -1001,6 +1014,19 @@ impl Binder { Binder(f(self.0)) } + /// Wraps a `value` in a binder, using the same bound variables as the + /// current `Binder`. This should not be used if the new value *changes* + /// the bound variables. Note: the (old or new) value itself does not + /// necessarily need to *name* all the bound variables. + /// + /// This currently doesn't do anything different than `bind`, because we + /// don't actually track bound vars. However, semantically, it is different + /// because bound vars aren't allowed to change here, whereas they are + /// in `bind`. This may be (debug) asserted in the future. + pub fn rebind(&self, value: U) -> Binder { + Binder(value) + } + /// Unwraps and returns the value within, but only if it contains /// no bound vars at all. (In other words, if this binder -- /// and indeed any enclosing binder -- doesn't bind anything at diff --git a/compiler/rustc_middle/src/ty/subst.rs b/compiler/rustc_middle/src/ty/subst.rs index 7d96adb7c8b4e..f04144c218e76 100644 --- a/compiler/rustc_middle/src/ty/subst.rs +++ b/compiler/rustc_middle/src/ty/subst.rs @@ -141,11 +141,11 @@ impl<'tcx> GenericArg<'tcx> { impl<'a, 'tcx> Lift<'tcx> for GenericArg<'a> { type Lifted = GenericArg<'tcx>; - fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option { + fn lift_to_tcx(self, tcx: TyCtxt<'tcx>) -> Option { match self.unpack() { - GenericArgKind::Lifetime(lt) => tcx.lift(<).map(|lt| lt.into()), - GenericArgKind::Type(ty) => tcx.lift(&ty).map(|ty| ty.into()), - GenericArgKind::Const(ct) => tcx.lift(&ct).map(|ct| ct.into()), + GenericArgKind::Lifetime(lt) => tcx.lift(lt).map(|lt| lt.into()), + GenericArgKind::Type(ty) => tcx.lift(ty).map(|ty| ty.into()), + GenericArgKind::Const(ct) => tcx.lift(ct).map(|ct| ct.into()), } } } diff --git a/compiler/rustc_middle/src/ty/util.rs b/compiler/rustc_middle/src/ty/util.rs index b0f0f0ba57fad..4a20e1c32f99e 100644 --- a/compiler/rustc_middle/src/ty/util.rs +++ b/compiler/rustc_middle/src/ty/util.rs @@ -529,8 +529,12 @@ impl<'tcx> TyCtxt<'tcx> { // Make sure that any constants in the static's type are evaluated. let static_ty = self.normalize_erasing_regions(ty::ParamEnv::empty(), self.type_of(def_id)); + // Make sure that accesses to unsafe statics end up using raw pointers. + // For thread-locals, this needs to be kept in sync with `Rvalue::ty`. if self.is_mutable_static(def_id) { self.mk_mut_ptr(static_ty) + } else if self.is_foreign_item(def_id) { + self.mk_imm_ptr(static_ty) } else { self.mk_imm_ref(self.lifetimes.re_erased, static_ty) } diff --git a/compiler/rustc_mir/src/borrow_check/mod.rs b/compiler/rustc_mir/src/borrow_check/mod.rs index 4b7af271baef0..de54c5582e049 100644 --- a/compiler/rustc_mir/src/borrow_check/mod.rs +++ b/compiler/rustc_mir/src/borrow_check/mod.rs @@ -674,29 +674,16 @@ impl<'cx, 'tcx> dataflow::ResultsVisitor<'cx, 'tcx> for MirBorrowckCtxt<'cx, 'tc TerminatorKind::SwitchInt { ref discr, switch_ty: _, targets: _ } => { self.consume_operand(loc, (discr, span), flow_state); } - TerminatorKind::Drop { place: ref drop_place, target: _, unwind: _ } => { - let tcx = self.infcx.tcx; - - // Compute the type with accurate region information. - let drop_place_ty = drop_place.ty(self.body, self.infcx.tcx); - - // Erase the regions. - let drop_place_ty = self.infcx.tcx.erase_regions(&drop_place_ty).ty; - - // "Lift" into the tcx -- once regions are erased, this type should be in the - // global arenas; this "lift" operation basically just asserts that is true, but - // that is useful later. - tcx.lift(&drop_place_ty).unwrap(); - + TerminatorKind::Drop { place, target: _, unwind: _ } => { debug!( "visit_terminator_drop \ - loc: {:?} term: {:?} drop_place: {:?} drop_place_ty: {:?} span: {:?}", - loc, term, drop_place, drop_place_ty, span + loc: {:?} term: {:?} place: {:?} span: {:?}", + loc, term, place, span ); self.access_place( loc, - (*drop_place, span), + (place, span), (AccessDepth::Drop, Write(WriteKind::StorageDeadOrDrop)), LocalMutationIsAllowed::Yes, flow_state, diff --git a/compiler/rustc_mir/src/borrow_check/type_check/input_output.rs b/compiler/rustc_mir/src/borrow_check/type_check/input_output.rs index 3c8cbeeca3836..444f9fe8d0a68 100644 --- a/compiler/rustc_mir/src/borrow_check/type_check/input_output.rs +++ b/compiler/rustc_mir/src/borrow_check/type_check/input_output.rs @@ -73,7 +73,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { ); // Equate expected input tys with those in the MIR. - for (&normalized_input_ty, argument_index) in normalized_input_tys.iter().zip(0..) { + for (argument_index, &normalized_input_ty) in normalized_input_tys.iter().enumerate() { // In MIR, argument N is stored in local N+1. let local = Local::new(argument_index + 1); @@ -87,8 +87,8 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { } if let Some(user_provided_sig) = user_provided_sig { - for (&user_provided_input_ty, argument_index) in - user_provided_sig.inputs().iter().zip(0..) + for (argument_index, &user_provided_input_ty) in + user_provided_sig.inputs().iter().enumerate() { // In MIR, closures begin an implicit `self`, so // argument N is stored in local N+2. diff --git a/compiler/rustc_mir/src/const_eval/machine.rs b/compiler/rustc_mir/src/const_eval/machine.rs index 73ca7e0d471ca..7e2cae094811a 100644 --- a/compiler/rustc_mir/src/const_eval/machine.rs +++ b/compiler/rustc_mir/src/const_eval/machine.rs @@ -70,9 +70,10 @@ impl<'mir, 'tcx> InterpCx<'mir, 'tcx, CompileTimeInterpreter<'mir, 'tcx>> { ) -> InterpResult<'tcx> { let def_id = instance.def_id(); if Some(def_id) == self.tcx.lang_items().panic_fn() + || Some(def_id) == self.tcx.lang_items().panic_str() || Some(def_id) == self.tcx.lang_items().begin_panic_fn() { - // &'static str + // &str assert!(args.len() == 1); let msg_place = self.deref_operand(args[0])?; diff --git a/compiler/rustc_mir/src/interpret/cast.rs b/compiler/rustc_mir/src/interpret/cast.rs index 0e16b0caefafa..affeae546b2aa 100644 --- a/compiler/rustc_mir/src/interpret/cast.rs +++ b/compiler/rustc_mir/src/interpret/cast.rs @@ -139,9 +139,14 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { // # First handle non-scalar source values. - // Handle cast from a univariant (ZST) enum. + // Handle cast from a ZST enum (0 or 1 variants). match src.layout.variants { Variants::Single { index } => { + if src.layout.abi.is_uninhabited() { + // This is dead code, because an uninhabited enum is UB to + // instantiate. + throw_ub!(Unreachable); + } if let Some(discr) = src.layout.ty.discriminant_for_variant(*self.tcx, index) { assert!(src.layout.is_zst()); let discr_layout = self.layout_of(discr.ty)?; diff --git a/compiler/rustc_mir/src/interpret/intern.rs b/compiler/rustc_mir/src/interpret/intern.rs index dd5e9c9977437..945791eddc8f1 100644 --- a/compiler/rustc_mir/src/interpret/intern.rs +++ b/compiler/rustc_mir/src/interpret/intern.rs @@ -187,6 +187,12 @@ impl<'rt, 'mir, 'tcx: 'mir, M: CompileTimeMachine<'mir, 'tcx>> ValueVisitor<'mir return walked; } } + + // ZSTs do not need validation unless they're uninhabited + if mplace.layout.is_zst() && !mplace.layout.abi.is_uninhabited() { + return Ok(()); + } + self.walk_aggregate(mplace, fields) } diff --git a/compiler/rustc_mir/src/interpret/operand.rs b/compiler/rustc_mir/src/interpret/operand.rs index d8f27ec95458b..3c68b1c8355d5 100644 --- a/compiler/rustc_mir/src/interpret/operand.rs +++ b/compiler/rustc_mir/src/interpret/operand.rs @@ -117,7 +117,7 @@ impl std::fmt::Display for ImmTy<'tcx, Tag> { ty::tls::with(|tcx| { match self.imm { Immediate::Scalar(s) => { - if let Some(ty) = tcx.lift(&self.layout.ty) { + if let Some(ty) = tcx.lift(self.layout.ty) { let cx = FmtPrinter::new(tcx, f, Namespace::ValueNS); p(cx, s, ty)?; return Ok(()); diff --git a/compiler/rustc_mir/src/transform/check_consts/mod.rs b/compiler/rustc_mir/src/transform/check_consts/mod.rs index 33815ceba620b..ba7bea4ac54e1 100644 --- a/compiler/rustc_mir/src/transform/check_consts/mod.rs +++ b/compiler/rustc_mir/src/transform/check_consts/mod.rs @@ -74,12 +74,18 @@ impl ConstCx<'mir, 'tcx> { /// Returns `true` if this `DefId` points to one of the official `panic` lang items. pub fn is_lang_panic_fn(tcx: TyCtxt<'tcx>, def_id: DefId) -> bool { - Some(def_id) == tcx.lang_items().panic_fn() || Some(def_id) == tcx.lang_items().begin_panic_fn() + Some(def_id) == tcx.lang_items().panic_fn() + || Some(def_id) == tcx.lang_items().panic_str() + || Some(def_id) == tcx.lang_items().begin_panic_fn() } -pub fn allow_internal_unstable(tcx: TyCtxt<'tcx>, def_id: DefId, feature_gate: Symbol) -> bool { +pub fn rustc_allow_const_fn_unstable( + tcx: TyCtxt<'tcx>, + def_id: DefId, + feature_gate: Symbol, +) -> bool { let attrs = tcx.get_attrs(def_id); - attr::allow_internal_unstable(&tcx.sess, attrs) + attr::rustc_allow_const_fn_unstable(&tcx.sess, attrs) .map_or(false, |mut features| features.any(|name| name == feature_gate)) } diff --git a/compiler/rustc_mir/src/transform/check_consts/ops.rs b/compiler/rustc_mir/src/transform/check_consts/ops.rs index 63b20c7c0273d..bd51136b8db58 100644 --- a/compiler/rustc_mir/src/transform/check_consts/ops.rs +++ b/compiler/rustc_mir/src/transform/check_consts/ops.rs @@ -224,7 +224,8 @@ impl NonConstOp for CellBorrow { } #[derive(Debug)] -pub struct MutBorrow; +pub struct MutBorrow(pub hir::BorrowKind); + impl NonConstOp for MutBorrow { fn status_in_item(&self, ccx: &ConstCx<'_, '_>) -> Status { // Forbid everywhere except in const fn with a feature gate @@ -236,22 +237,28 @@ impl NonConstOp for MutBorrow { } fn build_error(&self, ccx: &ConstCx<'_, 'tcx>, span: Span) -> DiagnosticBuilder<'tcx> { + let raw = match self.0 { + hir::BorrowKind::Raw => "raw ", + hir::BorrowKind::Ref => "", + }; + let mut err = if ccx.const_kind() == hir::ConstContext::ConstFn { feature_err( &ccx.tcx.sess.parse_sess, sym::const_mut_refs, span, - &format!("mutable references are not allowed in {}s", ccx.const_kind()), + &format!("{}mutable references are not allowed in {}s", raw, ccx.const_kind()), ) } else { let mut err = struct_span_err!( ccx.tcx.sess, span, E0764, - "mutable references are not allowed in {}s", + "{}mutable references are not allowed in {}s", + raw, ccx.const_kind(), ); - err.span_label(span, format!("`&mut` is only allowed in `const fn`")); + err.span_label(span, format!("`&{}mut` is only allowed in `const fn`", raw)); err }; if ccx.tcx.sess.teach(&err.get_code().unwrap()) { @@ -270,29 +277,6 @@ impl NonConstOp for MutBorrow { } } -// FIXME(ecstaticmorse): Unify this with `MutBorrow`. It has basically the same issues. -#[derive(Debug)] -pub struct MutAddressOf; -impl NonConstOp for MutAddressOf { - fn status_in_item(&self, ccx: &ConstCx<'_, '_>) -> Status { - // Forbid everywhere except in const fn with a feature gate - if ccx.const_kind() == hir::ConstContext::ConstFn { - Status::Unstable(sym::const_mut_refs) - } else { - Status::Forbidden - } - } - - fn build_error(&self, ccx: &ConstCx<'_, 'tcx>, span: Span) -> DiagnosticBuilder<'tcx> { - feature_err( - &ccx.tcx.sess.parse_sess, - sym::const_mut_refs, - span, - &format!("`&raw mut` is not allowed in {}s", ccx.const_kind()), - ) - } -} - #[derive(Debug)] pub struct MutDeref; impl NonConstOp for MutDeref { diff --git a/compiler/rustc_mir/src/transform/check_consts/validation.rs b/compiler/rustc_mir/src/transform/check_consts/validation.rs index 587b5b381288a..4139b544998fa 100644 --- a/compiler/rustc_mir/src/transform/check_consts/validation.rs +++ b/compiler/rustc_mir/src/transform/check_consts/validation.rs @@ -292,7 +292,11 @@ impl Validator<'mir, 'tcx> { Status::Unstable(gate) if self.tcx.features().enabled(gate) => { let unstable_in_stable = self.ccx.is_const_stable_const_fn() - && !super::allow_internal_unstable(self.tcx, self.def_id().to_def_id(), gate); + && !super::rustc_allow_const_fn_unstable( + self.tcx, + self.def_id().to_def_id(), + gate, + ); if unstable_in_stable { emit_unstable_in_stable_error(self.ccx, span, gate); } @@ -525,14 +529,16 @@ impl Visitor<'tcx> for Validator<'mir, 'tcx> { if !is_allowed { if let BorrowKind::Mut { .. } = kind { - self.check_op(ops::MutBorrow); + self.check_op(ops::MutBorrow(hir::BorrowKind::Ref)); } else { self.check_op(ops::CellBorrow); } } } - Rvalue::AddressOf(Mutability::Mut, _) => self.check_op(ops::MutAddressOf), + Rvalue::AddressOf(Mutability::Mut, _) => { + self.check_op(ops::MutBorrow(hir::BorrowKind::Raw)) + } Rvalue::Ref(_, BorrowKind::Shared | BorrowKind::Shallow, ref place) | Rvalue::AddressOf(Mutability::Not, ref place) => { @@ -805,7 +811,7 @@ impl Visitor<'tcx> for Validator<'mir, 'tcx> { } // Calling an unstable function *always* requires that the corresponding gate - // be enabled, even if the function has `#[allow_internal_unstable(the_gate)]`. + // be enabled, even if the function has `#[rustc_allow_const_fn_unstable(the_gate)]`. if !tcx.features().declared_lib_features.iter().any(|&(sym, _)| sym == gate) { self.check_op(ops::FnCallUnstable(callee, Some(gate))); return; @@ -819,7 +825,7 @@ impl Visitor<'tcx> for Validator<'mir, 'tcx> { // Otherwise, we are something const-stable calling a const-unstable fn. - if super::allow_internal_unstable(tcx, caller, gate) { + if super::rustc_allow_const_fn_unstable(tcx, caller, gate) { return; } @@ -965,8 +971,8 @@ fn emit_unstable_in_stable_error(ccx: &ConstCx<'_, '_>, span: Span, gate: Symbol ) .span_suggestion( attr_span, - "otherwise `#[allow_internal_unstable]` can be used to bypass stability checks", - format!("#[allow_internal_unstable({})]\n", gate), + "otherwise `#[rustc_allow_const_fn_unstable]` can be used to bypass stability checks", + format!("#[rustc_allow_const_fn_unstable({})]\n", gate), Applicability::MaybeIncorrect, ) .emit(); diff --git a/compiler/rustc_mir/src/transform/check_unsafety.rs b/compiler/rustc_mir/src/transform/check_unsafety.rs index 7309a4129e468..3d68b862df2d0 100644 --- a/compiler/rustc_mir/src/transform/check_unsafety.rs +++ b/compiler/rustc_mir/src/transform/check_unsafety.rs @@ -204,6 +204,9 @@ impl<'a, 'tcx> Visitor<'tcx> for UnsafetyChecker<'a, 'tcx> { if let [] = proj_base { let decl = &self.body.local_decls[place.local]; if decl.internal { + // If the projection root is an artifical local that we introduced when + // desugaring `static`, give a more specific error message + // (avoid the general "raw pointer" clause below, that would only be confusing). if let Some(box LocalInfo::StaticRef { def_id, .. }) = decl.local_info { if self.tcx.is_mutable_static(def_id) { self.require_unsafe( diff --git a/compiler/rustc_mir/src/transform/copy_prop.rs b/compiler/rustc_mir/src/transform/copy_prop.rs deleted file mode 100644 index 4f44bb7b20476..0000000000000 --- a/compiler/rustc_mir/src/transform/copy_prop.rs +++ /dev/null @@ -1,384 +0,0 @@ -//! Trivial copy propagation pass. -//! -//! This uses def-use analysis to remove values that have exactly one def and one use, which must -//! be an assignment. -//! -//! To give an example, we look for patterns that look like: -//! -//! DEST = SRC -//! ... -//! USE(DEST) -//! -//! where `DEST` and `SRC` are both locals of some form. We replace that with: -//! -//! NOP -//! ... -//! USE(SRC) -//! -//! The assignment `DEST = SRC` must be (a) the only mutation of `DEST` and (b) the only -//! (non-mutating) use of `SRC`. These restrictions are conservative and may be relaxed in the -//! future. - -use crate::transform::MirPass; -use crate::util::def_use::DefUseAnalysis; -use rustc_middle::mir::visit::MutVisitor; -use rustc_middle::mir::{ - Body, Constant, Local, LocalKind, Location, Operand, Place, Rvalue, StatementKind, -}; -use rustc_middle::ty::TyCtxt; - -pub struct CopyPropagation; - -impl<'tcx> MirPass<'tcx> for CopyPropagation { - fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { - let opts = &tcx.sess.opts.debugging_opts; - // We only run when the MIR optimization level is > 1. - // This avoids a slow pass, and messing up debug info. - // FIXME(76740): This optimization is buggy and can cause unsoundness. - if opts.mir_opt_level <= 1 || !opts.unsound_mir_opts { - return; - } - - let mut def_use_analysis = DefUseAnalysis::new(body); - loop { - def_use_analysis.analyze(body); - - if eliminate_self_assignments(body, &def_use_analysis) { - def_use_analysis.analyze(body); - } - - let mut changed = false; - for dest_local in body.local_decls.indices() { - debug!("considering destination local: {:?}", dest_local); - - let action; - let location; - { - // The destination must have exactly one def. - let dest_use_info = def_use_analysis.local_info(dest_local); - let dest_def_count = dest_use_info.def_count_not_including_drop(); - if dest_def_count == 0 { - debug!(" Can't copy-propagate local: dest {:?} undefined", dest_local); - continue; - } - if dest_def_count > 1 { - debug!( - " Can't copy-propagate local: dest {:?} defined {} times", - dest_local, - dest_use_info.def_count() - ); - continue; - } - if dest_use_info.use_count() == 0 { - debug!(" Can't copy-propagate local: dest {:?} unused", dest_local); - continue; - } - // Conservatively gives up if the dest is an argument, - // because there may be uses of the original argument value. - // Also gives up on the return place, as we cannot propagate into its implicit - // use by `return`. - if matches!( - body.local_kind(dest_local), - LocalKind::Arg | LocalKind::ReturnPointer - ) { - debug!(" Can't copy-propagate local: dest {:?} (argument)", dest_local); - continue; - } - let dest_place_def = dest_use_info.defs_not_including_drop().next().unwrap(); - location = dest_place_def.location; - - let basic_block = &body[location.block]; - let statement_index = location.statement_index; - let statement = match basic_block.statements.get(statement_index) { - Some(statement) => statement, - None => { - debug!(" Can't copy-propagate local: used in terminator"); - continue; - } - }; - - // That use of the source must be an assignment. - match &statement.kind { - StatementKind::Assign(box (place, Rvalue::Use(operand))) => { - if let Some(local) = place.as_local() { - if local == dest_local { - let maybe_action = match operand { - Operand::Copy(src_place) | Operand::Move(src_place) => { - Action::local_copy(&body, &def_use_analysis, *src_place) - } - Operand::Constant(ref src_constant) => { - Action::constant(src_constant) - } - }; - match maybe_action { - Some(this_action) => action = this_action, - None => continue, - } - } else { - debug!( - " Can't copy-propagate local: source use is not an \ - assignment" - ); - continue; - } - } else { - debug!( - " Can't copy-propagate local: source use is not an \ - assignment" - ); - continue; - } - } - _ => { - debug!( - " Can't copy-propagate local: source use is not an \ - assignment" - ); - continue; - } - } - } - - changed = - action.perform(body, &def_use_analysis, dest_local, location, tcx) || changed; - // FIXME(pcwalton): Update the use-def chains to delete the instructions instead of - // regenerating the chains. - break; - } - if !changed { - break; - } - } - } -} - -fn eliminate_self_assignments(body: &mut Body<'_>, def_use_analysis: &DefUseAnalysis) -> bool { - let mut changed = false; - - for dest_local in body.local_decls.indices() { - let dest_use_info = def_use_analysis.local_info(dest_local); - - for def in dest_use_info.defs_not_including_drop() { - let location = def.location; - if let Some(stmt) = body[location.block].statements.get(location.statement_index) { - match &stmt.kind { - StatementKind::Assign(box ( - place, - Rvalue::Use(Operand::Copy(src_place) | Operand::Move(src_place)), - )) => { - if let (Some(local), Some(src_local)) = - (place.as_local(), src_place.as_local()) - { - if local == dest_local && dest_local == src_local { - } else { - continue; - } - } else { - continue; - } - } - _ => { - continue; - } - } - } else { - continue; - } - debug!("deleting a self-assignment for {:?}", dest_local); - body.make_statement_nop(location); - changed = true; - } - } - - changed -} - -enum Action<'tcx> { - PropagateLocalCopy(Local), - PropagateConstant(Constant<'tcx>), -} - -impl<'tcx> Action<'tcx> { - fn local_copy( - body: &Body<'tcx>, - def_use_analysis: &DefUseAnalysis, - src_place: Place<'tcx>, - ) -> Option> { - // The source must be a local. - let src_local = if let Some(local) = src_place.as_local() { - local - } else { - debug!(" Can't copy-propagate local: source is not a local"); - return None; - }; - - // We're trying to copy propagate a local. - // There must be exactly one use of the source used in a statement (not in a terminator). - let src_use_info = def_use_analysis.local_info(src_local); - let src_use_count = src_use_info.use_count(); - if src_use_count == 0 { - debug!(" Can't copy-propagate local: no uses"); - return None; - } - if src_use_count != 1 { - debug!(" Can't copy-propagate local: {} uses", src_use_info.use_count()); - return None; - } - - // Verify that the source doesn't change in between. This is done conservatively for now, - // by ensuring that the source has exactly one mutation. The goal is to prevent things - // like: - // - // DEST = SRC; - // SRC = X; - // USE(DEST); - // - // From being misoptimized into: - // - // SRC = X; - // USE(SRC); - let src_def_count = src_use_info.def_count_not_including_drop(); - // allow function arguments to be propagated - let is_arg = body.local_kind(src_local) == LocalKind::Arg; - if (is_arg && src_def_count != 0) || (!is_arg && src_def_count != 1) { - debug!( - " Can't copy-propagate local: {} defs of src{}", - src_def_count, - if is_arg { " (argument)" } else { "" }, - ); - return None; - } - - Some(Action::PropagateLocalCopy(src_local)) - } - - fn constant(src_constant: &Constant<'tcx>) -> Option> { - Some(Action::PropagateConstant(*src_constant)) - } - - fn perform( - self, - body: &mut Body<'tcx>, - def_use_analysis: &DefUseAnalysis, - dest_local: Local, - location: Location, - tcx: TyCtxt<'tcx>, - ) -> bool { - match self { - Action::PropagateLocalCopy(src_local) => { - // Eliminate the destination and the assignment. - // - // First, remove all markers. - // - // FIXME(pcwalton): Don't do this. Merge live ranges instead. - debug!(" Replacing all uses of {:?} with {:?} (local)", dest_local, src_local); - for place_use in &def_use_analysis.local_info(dest_local).defs_and_uses { - if place_use.context.is_storage_marker() { - body.make_statement_nop(place_use.location) - } - } - for place_use in &def_use_analysis.local_info(src_local).defs_and_uses { - if place_use.context.is_storage_marker() { - body.make_statement_nop(place_use.location) - } - } - - // Replace all uses of the destination local with the source local. - def_use_analysis.replace_all_defs_and_uses_with(dest_local, body, src_local, tcx); - - // Finally, zap the now-useless assignment instruction. - debug!(" Deleting assignment"); - body.make_statement_nop(location); - - true - } - Action::PropagateConstant(src_constant) => { - // First, remove all markers. - // - // FIXME(pcwalton): Don't do this. Merge live ranges instead. - debug!( - " Replacing all uses of {:?} with {:?} (constant)", - dest_local, src_constant - ); - let dest_local_info = def_use_analysis.local_info(dest_local); - for place_use in &dest_local_info.defs_and_uses { - if place_use.context.is_storage_marker() { - body.make_statement_nop(place_use.location) - } - } - - // Replace all uses of the destination local with the constant. - let mut visitor = ConstantPropagationVisitor::new(dest_local, src_constant, tcx); - for dest_place_use in &dest_local_info.defs_and_uses { - visitor.visit_location(body, dest_place_use.location) - } - - // Zap the assignment instruction if we eliminated all the uses. We won't have been - // able to do that if the destination was used in a projection, because projections - // must have places on their LHS. - let use_count = dest_local_info.use_count(); - if visitor.uses_replaced == use_count { - debug!( - " {} of {} use(s) replaced; deleting assignment", - visitor.uses_replaced, use_count - ); - body.make_statement_nop(location); - true - } else if visitor.uses_replaced == 0 { - debug!(" No uses replaced; not deleting assignment"); - false - } else { - debug!( - " {} of {} use(s) replaced; not deleting assignment", - visitor.uses_replaced, use_count - ); - true - } - } - } - } -} - -struct ConstantPropagationVisitor<'tcx> { - dest_local: Local, - constant: Constant<'tcx>, - tcx: TyCtxt<'tcx>, - uses_replaced: usize, -} - -impl<'tcx> ConstantPropagationVisitor<'tcx> { - fn new( - dest_local: Local, - constant: Constant<'tcx>, - tcx: TyCtxt<'tcx>, - ) -> ConstantPropagationVisitor<'tcx> { - ConstantPropagationVisitor { dest_local, constant, tcx, uses_replaced: 0 } - } -} - -impl<'tcx> MutVisitor<'tcx> for ConstantPropagationVisitor<'tcx> { - fn tcx(&self) -> TyCtxt<'tcx> { - self.tcx - } - - fn visit_operand(&mut self, operand: &mut Operand<'tcx>, location: Location) { - self.super_operand(operand, location); - - match operand { - Operand::Copy(place) | Operand::Move(place) => { - if let Some(local) = place.as_local() { - if local == self.dest_local { - } else { - return; - } - } else { - return; - } - } - _ => return, - } - - *operand = Operand::Constant(box self.constant); - self.uses_replaced += 1 - } -} diff --git a/compiler/rustc_mir/src/transform/inline.rs b/compiler/rustc_mir/src/transform/inline.rs index 34aaefdcbeab6..796ad6c9c2978 100644 --- a/compiler/rustc_mir/src/transform/inline.rs +++ b/compiler/rustc_mir/src/transform/inline.rs @@ -201,9 +201,13 @@ impl Inliner<'tcx> { let terminator = bb_data.terminator(); if let TerminatorKind::Call { func: ref op, .. } = terminator.kind { if let ty::FnDef(callee_def_id, substs) = *op.ty(caller_body, self.tcx).kind() { - let instance = Instance::resolve(self.tcx, self.param_env, callee_def_id, substs) - .ok() - .flatten()?; + // To resolve an instance its substs have to be fully normalized, so + // we do this here. + let normalized_substs = self.tcx.normalize_erasing_regions(self.param_env, substs); + let instance = + Instance::resolve(self.tcx, self.param_env, callee_def_id, normalized_substs) + .ok() + .flatten()?; if let InstanceDef::Virtual(..) = instance.def { return None; diff --git a/compiler/rustc_mir/src/transform/instcombine.rs b/compiler/rustc_mir/src/transform/instcombine.rs index 1a8e281d417df..c0b3d5ff18b5c 100644 --- a/compiler/rustc_mir/src/transform/instcombine.rs +++ b/compiler/rustc_mir/src/transform/instcombine.rs @@ -119,6 +119,11 @@ impl OptimizationFinder<'b, 'tcx> { } fn find_deref_of_address(&mut self, rvalue: &Rvalue<'tcx>, location: Location) -> Option<()> { + // FIXME(#78192): This optimization can result in unsoundness. + if !self.tcx.sess.opts.debugging_opts.unsound_mir_opts { + return None; + } + // Look for the sequence // // _2 = &_1; diff --git a/compiler/rustc_mir/src/transform/instrument_coverage.rs b/compiler/rustc_mir/src/transform/instrument_coverage.rs index babe10a0f14da..6824c73ab60a0 100644 --- a/compiler/rustc_mir/src/transform/instrument_coverage.rs +++ b/compiler/rustc_mir/src/transform/instrument_coverage.rs @@ -22,9 +22,7 @@ use rustc_middle::ty::query::Providers; use rustc_middle::ty::TyCtxt; use rustc_span::def_id::DefId; use rustc_span::source_map::original_sp; -use rustc_span::{ - BytePos, CharPos, FileName, Pos, RealFileName, SourceFile, Span, Symbol, SyntaxContext, -}; +use rustc_span::{BytePos, CharPos, Pos, SourceFile, Span, Symbol, SyntaxContext}; use std::cmp::Ordering; @@ -549,13 +547,7 @@ impl<'a, 'tcx> Instrumentor<'a, 'tcx> { let mir_body = &self.mir_body; let body_span = self.body_span(); let source_file = source_map.lookup_source_file(body_span.lo()); - let file_name = match &source_file.name { - FileName::Real(RealFileName::Named(path)) => Symbol::intern(&path.to_string_lossy()), - _ => bug!( - "source_file.name should be a RealFileName, but it was: {:?}", - source_file.name - ), - }; + let file_name = Symbol::intern(&source_file.name.to_string()); debug!("instrumenting {:?}, span: {}", def_id, source_map.span_to_string(body_span)); diff --git a/compiler/rustc_mir/src/transform/match_branches.rs b/compiler/rustc_mir/src/transform/match_branches.rs index 8b2d6b09aa830..06690dcbf6eb7 100644 --- a/compiler/rustc_mir/src/transform/match_branches.rs +++ b/compiler/rustc_mir/src/transform/match_branches.rs @@ -38,12 +38,16 @@ pub struct MatchBranchSimplification; impl<'tcx> MirPass<'tcx> for MatchBranchSimplification { fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { + if tcx.sess.opts.debugging_opts.mir_opt_level <= 1 { + return; + } + let param_env = tcx.param_env(body.source.def_id()); - let bbs = body.basic_blocks_mut(); + let (bbs, local_decls) = body.basic_blocks_and_local_decls_mut(); 'outer: for bb_idx in bbs.indices() { let (discr, val, switch_ty, first, second) = match bbs[bb_idx].terminator().kind { TerminatorKind::SwitchInt { - discr: Operand::Copy(ref place) | Operand::Move(ref place), + discr: ref discr @ (Operand::Copy(_) | Operand::Move(_)), switch_ty, ref targets, .. @@ -52,7 +56,7 @@ impl<'tcx> MirPass<'tcx> for MatchBranchSimplification { if target == targets.otherwise() { continue; } - (place, value, switch_ty, target, targets.otherwise()) + (discr, value, switch_ty, target, targets.otherwise()) } // Only optimize switch int statements _ => continue, @@ -92,6 +96,10 @@ impl<'tcx> MirPass<'tcx> for MatchBranchSimplification { // Take ownership of items now that we know we can optimize. let discr = discr.clone(); + // Introduce a temporary for the discriminant value. + let source_info = bbs[bb_idx].terminator().source_info; + let discr_local = local_decls.push(LocalDecl::new(switch_ty, source_info.span)); + // We already checked that first and second are different blocks, // and bb_idx has a different terminator from both of them. let (from, first, second) = bbs.pick3_mut(bb_idx, first, second); @@ -120,7 +128,11 @@ impl<'tcx> MirPass<'tcx> for MatchBranchSimplification { rustc_span::DUMMY_SP, ); let op = if f_b { BinOp::Eq } else { BinOp::Ne }; - let rhs = Rvalue::BinaryOp(op, Operand::Copy(discr.clone()), const_cmp); + let rhs = Rvalue::BinaryOp( + op, + Operand::Copy(Place::from(discr_local)), + const_cmp, + ); Statement { source_info: f.source_info, kind: StatementKind::Assign(box (*lhs, rhs)), @@ -131,7 +143,16 @@ impl<'tcx> MirPass<'tcx> for MatchBranchSimplification { _ => unreachable!(), } }); + + from.statements + .push(Statement { source_info, kind: StatementKind::StorageLive(discr_local) }); + from.statements.push(Statement { + source_info, + kind: StatementKind::Assign(box (Place::from(discr_local), Rvalue::Use(discr))), + }); from.statements.extend(new_stmts); + from.statements + .push(Statement { source_info, kind: StatementKind::StorageDead(discr_local) }); from.terminator_mut().kind = first.terminator().kind.clone(); } } diff --git a/compiler/rustc_mir/src/transform/mod.rs b/compiler/rustc_mir/src/transform/mod.rs index ffb84950fc92c..20b8c90a9dcad 100644 --- a/compiler/rustc_mir/src/transform/mod.rs +++ b/compiler/rustc_mir/src/transform/mod.rs @@ -22,7 +22,6 @@ pub mod check_packed_ref; pub mod check_unsafety; pub mod cleanup_post_borrowck; pub mod const_prop; -pub mod copy_prop; pub mod deaggregator; pub mod dest_prop; pub mod dump_mir; @@ -401,8 +400,7 @@ fn run_optimization_passes<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { &simplify_try::SimplifyArmIdentity, &simplify_try::SimplifyBranchSame, &dest_prop::DestinationPropagation, - ©_prop::CopyPropagation, - &simplify_branches::SimplifyBranches::new("after-copy-prop"), + &simplify_branches::SimplifyBranches::new("final"), &remove_noop_landing_pads::RemoveNoopLandingPads, &simplify::SimplifyCfg::new("final"), &nrvo::RenameReturnPlace, diff --git a/compiler/rustc_mir/src/transform/promote_consts.rs b/compiler/rustc_mir/src/transform/promote_consts.rs index 7373abcc820ab..292380d7fec2d 100644 --- a/compiler/rustc_mir/src/transform/promote_consts.rs +++ b/compiler/rustc_mir/src/transform/promote_consts.rs @@ -301,17 +301,6 @@ impl std::ops::Deref for Validator<'a, 'tcx> { struct Unpromotable; impl<'tcx> Validator<'_, 'tcx> { - /// Determines if this code could be executed at runtime and thus is subject to codegen. - /// That means even unused constants need to be evaluated. - /// - /// `const_kind` should not be used in this file other than through this method! - fn maybe_runtime(&self) -> bool { - match self.const_kind { - None | Some(hir::ConstContext::ConstFn) => true, - Some(hir::ConstContext::Static(_) | hir::ConstContext::Const) => false, - } - } - fn validate_candidate(&self, candidate: Candidate) -> Result<(), Unpromotable> { match candidate { Candidate::Ref(loc) => { @@ -562,14 +551,12 @@ impl<'tcx> Validator<'_, 'tcx> { } ProjectionElem::Field(..) => { - if self.maybe_runtime() { - let base_ty = - Place::ty_from(place.local, proj_base, self.body, self.tcx).ty; - if let Some(def) = base_ty.ty_adt_def() { - // No promotion of union field accesses. - if def.is_union() { - return Err(Unpromotable); - } + let base_ty = + Place::ty_from(place.local, proj_base, self.body, self.tcx).ty; + if let Some(def) = base_ty.ty_adt_def() { + // No promotion of union field accesses. + if def.is_union() { + return Err(Unpromotable); } } } @@ -751,7 +738,14 @@ impl<'tcx> Validator<'_, 'tcx> { ) -> Result<(), Unpromotable> { let fn_ty = callee.ty(self.body, self.tcx); - if !self.explicit && self.maybe_runtime() { + // When doing explicit promotion and inside const/static items, we promote all (eligible) function calls. + // Everywhere else, we require `#[rustc_promotable]` on the callee. + let promote_all_const_fn = self.explicit + || matches!( + self.const_kind, + Some(hir::ConstContext::Static(_) | hir::ConstContext::Const) + ); + if !promote_all_const_fn { if let ty::FnDef(def_id, _) = *fn_ty.kind() { // Never promote runtime `const fn` calls of // functions without `#[rustc_promotable]`. diff --git a/compiler/rustc_mir/src/transform/simplify_branches.rs b/compiler/rustc_mir/src/transform/simplify_branches.rs index 5f63c03993d3a..a9a45e61a38cb 100644 --- a/compiler/rustc_mir/src/transform/simplify_branches.rs +++ b/compiler/rustc_mir/src/transform/simplify_branches.rs @@ -49,9 +49,10 @@ impl<'tcx> MirPass<'tcx> for SimplifyBranches { } TerminatorKind::Assert { target, cond: Operand::Constant(ref c), expected, .. - } if (c.literal.try_eval_bool(tcx, param_env) == Some(true)) == expected => { - TerminatorKind::Goto { target } - } + } => match c.literal.try_eval_bool(tcx, param_env) { + Some(v) if v == expected => TerminatorKind::Goto { target }, + _ => continue, + }, TerminatorKind::FalseEdge { real_target, .. } => { TerminatorKind::Goto { target: real_target } } diff --git a/compiler/rustc_mir/src/transform/validate.rs b/compiler/rustc_mir/src/transform/validate.rs index beffffa727e7e..7b22d643ab646 100644 --- a/compiler/rustc_mir/src/transform/validate.rs +++ b/compiler/rustc_mir/src/transform/validate.rs @@ -5,13 +5,17 @@ use crate::dataflow::{Analysis, ResultsCursor}; use crate::util::storage::AlwaysLiveLocals; use super::MirPass; -use rustc_middle::mir::visit::{PlaceContext, Visitor}; +use rustc_middle::mir::{ + interpret::Scalar, + visit::{PlaceContext, Visitor}, +}; use rustc_middle::mir::{ AggregateKind, BasicBlock, Body, BorrowKind, Local, Location, MirPhase, Operand, Rvalue, SourceScope, Statement, StatementKind, Terminator, TerminatorKind, VarDebugInfo, }; use rustc_middle::ty::relate::{Relate, RelateResult, TypeRelation}; use rustc_middle::ty::{self, ParamEnv, Ty, TyCtxt}; +use rustc_target::abi::Size; #[derive(Copy, Clone, Debug)] enum EdgeKind { @@ -346,7 +350,25 @@ impl<'a, 'tcx> Visitor<'tcx> for TypeChecker<'a, 'tcx> { ), ); } - for (_, target) in targets.iter() { + + let target_width = self.tcx.sess.target.pointer_width; + + let size = Size::from_bits(match switch_ty.kind() { + ty::Uint(uint) => uint.normalize(target_width).bit_width().unwrap(), + ty::Int(int) => int.normalize(target_width).bit_width().unwrap(), + ty::Char => 32, + ty::Bool => 1, + other => bug!("unhandled type: {:?}", other), + }); + + for (value, target) in targets.iter() { + if Scalar::<()>::try_from_uint(value, size).is_none() { + self.fail( + location, + format!("the value {:#x} is not a proper {:?}", value, switch_ty), + ) + } + self.check_edge(location, target, EdgeKind::Normal); } self.check_edge(location, targets.otherwise(), EdgeKind::Normal); diff --git a/compiler/rustc_mir/src/util/def_use.rs b/compiler/rustc_mir/src/util/def_use.rs deleted file mode 100644 index b4448ead8eb81..0000000000000 --- a/compiler/rustc_mir/src/util/def_use.rs +++ /dev/null @@ -1,158 +0,0 @@ -//! Def-use analysis. - -use rustc_index::vec::IndexVec; -use rustc_middle::mir::visit::{MutVisitor, PlaceContext, Visitor}; -use rustc_middle::mir::{Body, Local, Location, VarDebugInfo}; -use rustc_middle::ty::TyCtxt; -use std::mem; - -pub struct DefUseAnalysis { - info: IndexVec, -} - -#[derive(Clone)] -pub struct Info { - // FIXME(eddyb) use smallvec where possible. - pub defs_and_uses: Vec, - var_debug_info_indices: Vec, -} - -#[derive(Clone)] -pub struct Use { - pub context: PlaceContext, - pub location: Location, -} - -impl DefUseAnalysis { - pub fn new(body: &Body<'_>) -> DefUseAnalysis { - DefUseAnalysis { info: IndexVec::from_elem_n(Info::new(), body.local_decls.len()) } - } - - pub fn analyze(&mut self, body: &Body<'_>) { - self.clear(); - - let mut finder = DefUseFinder { - info: mem::take(&mut self.info), - var_debug_info_index: 0, - in_var_debug_info: false, - }; - finder.visit_body(&body); - self.info = finder.info - } - - fn clear(&mut self) { - for info in &mut self.info { - info.clear(); - } - } - - pub fn local_info(&self, local: Local) -> &Info { - &self.info[local] - } - - fn mutate_defs_and_uses( - &self, - local: Local, - body: &mut Body<'tcx>, - new_local: Local, - tcx: TyCtxt<'tcx>, - ) { - let mut visitor = MutateUseVisitor::new(local, new_local, tcx); - let info = &self.info[local]; - for place_use in &info.defs_and_uses { - visitor.visit_location(body, place_use.location) - } - // Update debuginfo as well, alongside defs/uses. - for &i in &info.var_debug_info_indices { - visitor.visit_var_debug_info(&mut body.var_debug_info[i]); - } - } - - // FIXME(pcwalton): this should update the def-use chains. - pub fn replace_all_defs_and_uses_with( - &self, - local: Local, - body: &mut Body<'tcx>, - new_local: Local, - tcx: TyCtxt<'tcx>, - ) { - self.mutate_defs_and_uses(local, body, new_local, tcx) - } -} - -struct DefUseFinder { - info: IndexVec, - var_debug_info_index: usize, - in_var_debug_info: bool, -} - -impl Visitor<'_> for DefUseFinder { - fn visit_local(&mut self, &local: &Local, context: PlaceContext, location: Location) { - let info = &mut self.info[local]; - if self.in_var_debug_info { - info.var_debug_info_indices.push(self.var_debug_info_index); - } else { - info.defs_and_uses.push(Use { context, location }); - } - } - fn visit_var_debug_info(&mut self, var_debug_info: &VarDebugInfo<'tcx>) { - assert!(!self.in_var_debug_info); - self.in_var_debug_info = true; - self.super_var_debug_info(var_debug_info); - self.in_var_debug_info = false; - self.var_debug_info_index += 1; - } -} - -impl Info { - fn new() -> Info { - Info { defs_and_uses: vec![], var_debug_info_indices: vec![] } - } - - fn clear(&mut self) { - self.defs_and_uses.clear(); - self.var_debug_info_indices.clear(); - } - - pub fn def_count(&self) -> usize { - self.defs_and_uses.iter().filter(|place_use| place_use.context.is_mutating_use()).count() - } - - pub fn def_count_not_including_drop(&self) -> usize { - self.defs_not_including_drop().count() - } - - pub fn defs_not_including_drop(&self) -> impl Iterator { - self.defs_and_uses - .iter() - .filter(|place_use| place_use.context.is_mutating_use() && !place_use.context.is_drop()) - } - - pub fn use_count(&self) -> usize { - self.defs_and_uses.iter().filter(|place_use| place_use.context.is_nonmutating_use()).count() - } -} - -struct MutateUseVisitor<'tcx> { - query: Local, - new_local: Local, - tcx: TyCtxt<'tcx>, -} - -impl MutateUseVisitor<'tcx> { - fn new(query: Local, new_local: Local, tcx: TyCtxt<'tcx>) -> MutateUseVisitor<'tcx> { - MutateUseVisitor { query, new_local, tcx } - } -} - -impl MutVisitor<'tcx> for MutateUseVisitor<'tcx> { - fn tcx(&self) -> TyCtxt<'tcx> { - self.tcx - } - - fn visit_local(&mut self, local: &mut Local, _context: PlaceContext, _location: Location) { - if *local == self.query { - *local = self.new_local; - } - } -} diff --git a/compiler/rustc_mir/src/util/mod.rs b/compiler/rustc_mir/src/util/mod.rs index 699f3bcf0146f..7da2f4ffe0889 100644 --- a/compiler/rustc_mir/src/util/mod.rs +++ b/compiler/rustc_mir/src/util/mod.rs @@ -1,6 +1,5 @@ pub mod aggregate; pub mod borrowck_errors; -pub mod def_use; pub mod elaborate_drops; pub mod patch; pub mod storage; diff --git a/compiler/rustc_mir_build/src/build/expr/as_constant.rs b/compiler/rustc_mir_build/src/build/expr/as_constant.rs index 244a70f83b03e..3a36ad590c506 100644 --- a/compiler/rustc_mir_build/src/build/expr/as_constant.rs +++ b/compiler/rustc_mir_build/src/build/expr/as_constant.rs @@ -33,6 +33,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { Constant { span, user_ty, literal } } ExprKind::StaticRef { literal, .. } => Constant { span, user_ty: None, literal }, + ExprKind::ConstBlock { value } => Constant { span, user_ty: None, literal: value }, _ => span_bug!(span, "expression is not a valid constant {:?}", kind), } } diff --git a/compiler/rustc_mir_build/src/build/expr/as_place.rs b/compiler/rustc_mir_build/src/build/expr/as_place.rs index 39dbb6dd3ff92..443025c4f8458 100644 --- a/compiler/rustc_mir_build/src/build/expr/as_place.rs +++ b/compiler/rustc_mir_build/src/build/expr/as_place.rs @@ -254,6 +254,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { | ExprKind::Continue { .. } | ExprKind::Return { .. } | ExprKind::Literal { .. } + | ExprKind::ConstBlock { .. } | ExprKind::StaticRef { .. } | ExprKind::InlineAsm { .. } | ExprKind::LlvmInlineAsm { .. } diff --git a/compiler/rustc_mir_build/src/build/expr/as_rvalue.rs b/compiler/rustc_mir_build/src/build/expr/as_rvalue.rs index 9c5fddc6b77c0..4033cc6cf46c4 100644 --- a/compiler/rustc_mir_build/src/build/expr/as_rvalue.rs +++ b/compiler/rustc_mir_build/src/build/expr/as_rvalue.rs @@ -234,6 +234,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { } ExprKind::Yield { .. } | ExprKind::Literal { .. } + | ExprKind::ConstBlock { .. } | ExprKind::StaticRef { .. } | ExprKind::Block { .. } | ExprKind::Match { .. } diff --git a/compiler/rustc_mir_build/src/build/expr/category.rs b/compiler/rustc_mir_build/src/build/expr/category.rs index 9cabd186d8460..ac5cf187aa01b 100644 --- a/compiler/rustc_mir_build/src/build/expr/category.rs +++ b/compiler/rustc_mir_build/src/build/expr/category.rs @@ -68,7 +68,9 @@ impl Category { | ExprKind::ThreadLocalRef(_) | ExprKind::LlvmInlineAsm { .. } => Some(Category::Rvalue(RvalueFunc::AsRvalue)), - ExprKind::Literal { .. } | ExprKind::StaticRef { .. } => Some(Category::Constant), + ExprKind::ConstBlock { .. } | ExprKind::Literal { .. } | ExprKind::StaticRef { .. } => { + Some(Category::Constant) + } ExprKind::Loop { .. } | ExprKind::Block { .. } diff --git a/compiler/rustc_mir_build/src/build/expr/into.rs b/compiler/rustc_mir_build/src/build/expr/into.rs index a12c22fb850e4..a268b0b46ff51 100644 --- a/compiler/rustc_mir_build/src/build/expr/into.rs +++ b/compiler/rustc_mir_build/src/build/expr/into.rs @@ -454,6 +454,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { | ExprKind::Array { .. } | ExprKind::Tuple { .. } | ExprKind::Closure { .. } + | ExprKind::ConstBlock { .. } | ExprKind::Literal { .. } | ExprKind::ThreadLocalRef(_) | ExprKind::StaticRef { .. } => { diff --git a/compiler/rustc_mir_build/src/build/mod.rs b/compiler/rustc_mir_build/src/build/mod.rs index 220e3b11a6adb..899fc647493a0 100644 --- a/compiler/rustc_mir_build/src/build/mod.rs +++ b/compiler/rustc_mir_build/src/build/mod.rs @@ -351,9 +351,6 @@ struct Builder<'a, 'tcx> { unit_temp: Option>, var_debug_info: Vec>, - - /// Cached block with the `UNREACHABLE` terminator. - cached_unreachable_block: Option, } impl<'a, 'tcx> Builder<'a, 'tcx> { @@ -634,10 +631,6 @@ where builder.cfg.terminate(return_block, source_info, TerminatorKind::Return); let should_abort = should_abort_on_panic(tcx, fn_def_id, abi); builder.build_drop_trees(should_abort); - // Attribute any unreachable codepaths to the function's closing brace - if let Some(unreachable_block) = builder.cached_unreachable_block { - builder.cfg.terminate(unreachable_block, source_info, TerminatorKind::Unreachable); - } return_block.unit() })); @@ -676,12 +669,6 @@ fn construct_const<'a, 'tcx>( builder.build_drop_trees(false); - // Constants may be match expressions in which case an unreachable block may - // be created, so terminate it properly. - if let Some(unreachable_block) = builder.cached_unreachable_block { - builder.cfg.terminate(unreachable_block, source_info, TerminatorKind::Unreachable); - } - builder.finish() } @@ -757,7 +744,6 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { var_indices: Default::default(), unit_temp: None, var_debug_info: vec![], - cached_unreachable_block: None, }; assert_eq!(builder.cfg.start_new_block(), START_BLOCK); diff --git a/compiler/rustc_mir_build/src/lints.rs b/compiler/rustc_mir_build/src/lints.rs index bdef02a011bac..576b537c01766 100644 --- a/compiler/rustc_mir_build/src/lints.rs +++ b/compiler/rustc_mir_build/src/lints.rs @@ -1,5 +1,5 @@ use rustc_data_structures::graph::iterate::{ - ControlFlow, NodeStatus, TriColorDepthFirstSearch, TriColorVisitor, + NodeStatus, TriColorDepthFirstSearch, TriColorVisitor, }; use rustc_hir::intravisit::FnKind; use rustc_middle::hir::map::blocks::FnLikeNode; @@ -8,6 +8,7 @@ use rustc_middle::ty::subst::{GenericArg, InternalSubsts}; use rustc_middle::ty::{self, AssocItem, AssocItemContainer, Instance, TyCtxt}; use rustc_session::lint::builtin::UNCONDITIONAL_RECURSION; use rustc_span::Span; +use std::ops::ControlFlow; crate fn check<'tcx>(tcx: TyCtxt<'tcx>, body: &Body<'tcx>) { let def_id = body.source.def_id().expect_local(); @@ -71,12 +72,14 @@ impl<'mir, 'tcx> Search<'mir, 'tcx> { let func_ty = func.ty(body, tcx); if let ty::FnDef(callee, substs) = *func_ty.kind() { - let (callee, call_substs) = - if let Ok(Some(instance)) = Instance::resolve(tcx, param_env, callee, substs) { - (instance.def_id(), instance.substs) - } else { - (callee, substs) - }; + let normalized_substs = tcx.normalize_erasing_regions(param_env, substs); + let (callee, call_substs) = if let Ok(Some(instance)) = + Instance::resolve(tcx, param_env, callee, normalized_substs) + { + (instance.def_id(), instance.substs) + } else { + (callee, normalized_substs) + }; // FIXME(#57965): Make this work across function boundaries diff --git a/compiler/rustc_mir_build/src/thir/cx/expr.rs b/compiler/rustc_mir_build/src/thir/cx/expr.rs index 13e69474cfb96..731bd954246ce 100644 --- a/compiler/rustc_mir_build/src/thir/cx/expr.rs +++ b/compiler/rustc_mir_build/src/thir/cx/expr.rs @@ -511,6 +511,12 @@ fn make_mirror_unadjusted<'a, 'tcx>( inputs: asm.inputs_exprs.to_ref(), }, + hir::ExprKind::ConstBlock(ref anon_const) => { + let anon_const_def_id = cx.tcx.hir().local_def_id(anon_const.hir_id); + let value = ty::Const::from_anon_const(cx.tcx, anon_const_def_id); + + ExprKind::ConstBlock { value } + } // Now comes the rote stuff: hir::ExprKind::Repeat(ref v, ref count) => { let count_def_id = cx.tcx.hir().local_def_id(count.hir_id); diff --git a/compiler/rustc_mir_build/src/thir/mod.rs b/compiler/rustc_mir_build/src/thir/mod.rs index 4d57fd5c64f8d..f2a2ef0d8f2bc 100644 --- a/compiler/rustc_mir_build/src/thir/mod.rs +++ b/compiler/rustc_mir_build/src/thir/mod.rs @@ -232,6 +232,9 @@ crate enum ExprKind<'tcx> { Return { value: Option>, }, + ConstBlock { + value: &'tcx Const<'tcx>, + }, Repeat { value: ExprRef<'tcx>, count: &'tcx Const<'tcx>, diff --git a/compiler/rustc_mir_build/src/thir/pattern/_match.rs b/compiler/rustc_mir_build/src/thir/pattern/_match.rs index 04de9a7a58dda..0449e14983150 100644 --- a/compiler/rustc_mir_build/src/thir/pattern/_match.rs +++ b/compiler/rustc_mir_build/src/thir/pattern/_match.rs @@ -78,20 +78,26 @@ //! new pattern `p`. //! //! For example, say we have the following: +//! //! ``` -//! // x: (Option, Result<()>) -//! match x { -//! (Some(true), _) => {} -//! (None, Err(())) => {} -//! (None, Err(_)) => {} -//! } +//! // x: (Option, Result<()>) +//! match x { +//! (Some(true), _) => {} +//! (None, Err(())) => {} +//! (None, Err(_)) => {} +//! } //! ``` +//! //! Here, the matrix `P` starts as: +//! +//! ``` //! [ //! [(Some(true), _)], //! [(None, Err(()))], //! [(None, Err(_))], //! ] +//! ``` +//! //! We can tell it's not exhaustive, because `U(P, _)` is true (we're not covering //! `[(Some(false), _)]`, for instance). In addition, row 3 is not useful, because //! all the values it covers are already covered by row 2. @@ -178,10 +184,14 @@ //! This special case is handled in `is_useful_specialized`. //! //! For example, if `P` is: +//! +//! ``` //! [ -//! [Some(true), _], -//! [None, 0], +//! [Some(true), _], +//! [None, 0], //! ] +//! ``` +//! //! and `p` is [Some(false), 0], then we don't care about row 2 since we know `p` only //! matches values that row 2 doesn't. For row 1 however, we need to dig into the //! arguments of `Some` to know whether some new value is covered. So we compute @@ -198,10 +208,14 @@ //! `U(P, p) := U(D(P), D(p))` //! //! For example, if `P` is: +//! +//! ``` //! [ //! [_, true, _], //! [None, false, 1], //! ] +//! ``` +//! //! and `p` is [_, false, _], the `Some` constructor doesn't appear in `P`. So if we //! only had row 2, we'd know that `p` is useful. However row 1 starts with a //! wildcard, so we need to check whether `U([[true, _]], [false, 1])`. @@ -215,10 +229,14 @@ //! `U(P, p) := ∃(k ϵ constructors) U(S(k, P), S(k, p))` //! //! For example, if `P` is: +//! +//! ``` //! [ //! [Some(true), _], //! [None, false], //! ] +//! ``` +//! //! and `p` is [_, false], both `None` and `Some` constructors appear in the first //! components of `P`. We will therefore try popping both constructors in turn: we //! compute `U([[true, _]], [_, false])` for the `Some` constructor, and `U([[false]], @@ -284,10 +302,9 @@ use super::{FieldPat, Pat, PatKind, PatRange}; use rustc_arena::TypedArena; use rustc_attr::{SignedInt, UnsignedInt}; -use rustc_errors::ErrorReported; use rustc_hir::def_id::DefId; use rustc_hir::{HirId, RangeEnd}; -use rustc_middle::mir::interpret::{truncate, AllocId, ConstValue, Pointer, Scalar}; +use rustc_middle::mir::interpret::{truncate, ConstValue}; use rustc_middle::mir::Field; use rustc_middle::ty::layout::IntegerExt; use rustc_middle::ty::{self, Const, Ty, TyCtxt}; @@ -296,108 +313,21 @@ use rustc_span::{Span, DUMMY_SP}; use rustc_target::abi::{Integer, Size, VariantIdx}; use smallvec::{smallvec, SmallVec}; -use std::borrow::Cow; use std::cmp::{self, max, min, Ordering}; -use std::convert::TryInto; use std::fmt; use std::iter::{FromIterator, IntoIterator}; use std::ops::RangeInclusive; -crate fn expand_pattern<'a, 'tcx>(cx: &MatchCheckCtxt<'a, 'tcx>, pat: Pat<'tcx>) -> Pat<'tcx> { - LiteralExpander { tcx: cx.tcx, param_env: cx.param_env }.fold_pattern(&pat) -} - -struct LiteralExpander<'tcx> { - tcx: TyCtxt<'tcx>, - param_env: ty::ParamEnv<'tcx>, +crate fn expand_pattern<'tcx>(pat: Pat<'tcx>) -> Pat<'tcx> { + LiteralExpander.fold_pattern(&pat) } -impl<'tcx> LiteralExpander<'tcx> { - /// Derefs `val` and potentially unsizes the value if `crty` is an array and `rty` a slice. - /// - /// `crty` and `rty` can differ because you can use array constants in the presence of slice - /// patterns. So the pattern may end up being a slice, but the constant is an array. We convert - /// the array to a slice in that case. - fn fold_const_value_deref( - &mut self, - val: ConstValue<'tcx>, - // the pattern's pointee type - rty: Ty<'tcx>, - // the constant's pointee type - crty: Ty<'tcx>, - ) -> ConstValue<'tcx> { - debug!("fold_const_value_deref {:?} {:?} {:?}", val, rty, crty); - match (val, &crty.kind(), &rty.kind()) { - // the easy case, deref a reference - (ConstValue::Scalar(p), x, y) if x == y => { - match p { - Scalar::Ptr(p) => { - let alloc = self.tcx.global_alloc(p.alloc_id).unwrap_memory(); - ConstValue::ByRef { alloc, offset: p.offset } - } - Scalar::Raw { .. } => { - let layout = self.tcx.layout_of(self.param_env.and(rty)).unwrap(); - if layout.is_zst() { - // Deref of a reference to a ZST is a nop. - ConstValue::Scalar(Scalar::zst()) - } else { - // FIXME(oli-obk): this is reachable for `const FOO: &&&u32 = &&&42;` - bug!("cannot deref {:#?}, {} -> {}", val, crty, rty); - } - } - } - } - // unsize array to slice if pattern is array but match value or other patterns are slice - (ConstValue::Scalar(Scalar::Ptr(p)), ty::Array(t, n), ty::Slice(u)) => { - assert_eq!(t, u); - ConstValue::Slice { - data: self.tcx.global_alloc(p.alloc_id).unwrap_memory(), - start: p.offset.bytes().try_into().unwrap(), - end: n.eval_usize(self.tcx, ty::ParamEnv::empty()).try_into().unwrap(), - } - } - // fat pointers stay the same - (ConstValue::Slice { .. }, _, _) - | (_, ty::Slice(_), ty::Slice(_)) - | (_, ty::Str, ty::Str) => val, - // FIXME(oli-obk): this is reachable for `const FOO: &&&u32 = &&&42;` being used - _ => bug!("cannot deref {:#?}, {} -> {}", val, crty, rty), - } - } -} +struct LiteralExpander; -impl<'tcx> PatternFolder<'tcx> for LiteralExpander<'tcx> { +impl<'tcx> PatternFolder<'tcx> for LiteralExpander { fn fold_pattern(&mut self, pat: &Pat<'tcx>) -> Pat<'tcx> { debug!("fold_pattern {:?} {:?} {:?}", pat, pat.ty.kind(), pat.kind); match (pat.ty.kind(), &*pat.kind) { - (&ty::Ref(_, rty, _), &PatKind::Constant { value: Const { val, ty: const_ty } }) - if const_ty.is_ref() => - { - let crty = - if let ty::Ref(_, crty, _) = const_ty.kind() { crty } else { unreachable!() }; - if let ty::ConstKind::Value(val) = val { - Pat { - ty: pat.ty, - span: pat.span, - kind: box PatKind::Deref { - subpattern: Pat { - ty: rty, - span: pat.span, - kind: box PatKind::Constant { - value: Const::from_value( - self.tcx, - self.fold_const_value_deref(*val, rty, crty), - rty, - ), - }, - }, - }, - } - } else { - bug!("cannot deref {:#?}, {} -> {}", val, crty, rty) - } - } - (_, &PatKind::Binding { subpattern: Some(ref s), .. }) => s.fold_with(self), (_, &PatKind::AscribeUserType { subpattern: ref s, .. }) => s.fold_with(self), _ => pat.super_fold_with(self), @@ -482,9 +412,15 @@ impl<'p, 'tcx> PatStack<'p, 'tcx> { cx: &mut MatchCheckCtxt<'p, 'tcx>, constructor: &Constructor<'tcx>, ctor_wild_subpatterns: &Fields<'p, 'tcx>, + is_my_head_ctor: bool, ) -> Option> { - let new_fields = - specialize_one_pattern(cx, self.head(), constructor, ctor_wild_subpatterns)?; + let new_fields = specialize_one_pattern( + cx, + self.head(), + constructor, + ctor_wild_subpatterns, + is_my_head_ctor, + )?; Some(new_fields.push_on_patstack(&self.0[1..])) } } @@ -662,6 +598,7 @@ impl<'p, 'tcx> Matrix<'p, 'tcx> { cx, constructor, ctor_wild_subpatterns, + false, ) }) .collect() @@ -687,7 +624,9 @@ impl<'p, 'tcx> Matrix<'p, 'tcx> { SpecializationCache::Incompatible => self .patterns .iter() - .filter_map(|r| r.specialize_constructor(cx, constructor, ctor_wild_subpatterns)) + .filter_map(|r| { + r.specialize_constructor(cx, constructor, ctor_wild_subpatterns, false) + }) .collect(), } } @@ -707,6 +646,7 @@ impl<'p, 'tcx> Matrix<'p, 'tcx> { /// +++++++++++++++++++++++++++++ /// + _ + [_, _, tail @ ..] + /// +++++++++++++++++++++++++++++ +/// ``` impl<'p, 'tcx> fmt::Debug for Matrix<'p, 'tcx> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "\n")?; @@ -908,42 +848,30 @@ enum Constructor<'tcx> { Single, /// Enum variants. Variant(DefId), - /// Literal values. - ConstantValue(&'tcx ty::Const<'tcx>), /// Ranges of integer literal values (`2`, `2..=5` or `2..5`). IntRange(IntRange<'tcx>), /// Ranges of floating-point literal values (`2.0..=5.2`). FloatRange(&'tcx ty::Const<'tcx>, &'tcx ty::Const<'tcx>, RangeEnd), + /// String literals. Strings are not quite the same as `&[u8]` so we treat them separately. + Str(&'tcx ty::Const<'tcx>), /// Array and slice patterns. Slice(Slice), + /// Constants that must not be matched structurally. They are treated as black + /// boxes for the purposes of exhaustiveness: we must not inspect them, and they + /// don't count towards making a match exhaustive. + Opaque, /// Fake extra constructor for enums that aren't allowed to be matched exhaustively. NonExhaustive, } impl<'tcx> Constructor<'tcx> { - fn is_slice(&self) -> bool { - match self { - Slice(_) => true, - _ => false, - } - } - - fn variant_index_for_adt<'a>( - &self, - cx: &MatchCheckCtxt<'a, 'tcx>, - adt: &'tcx ty::AdtDef, - ) -> VariantIdx { + fn variant_index_for_adt(&self, adt: &'tcx ty::AdtDef) -> VariantIdx { match *self { Variant(id) => adt.variant_index_with_id(id), Single => { assert!(!adt.is_enum()); VariantIdx::new(0) } - ConstantValue(c) => cx - .tcx - .destructure_const(cx.param_env.and(c)) - .variant - .expect("destructed const of adt without variant id"), _ => bug!("bad constructor {:?} for adt {:?}", self, adt), } } @@ -957,7 +885,7 @@ impl<'tcx> Constructor<'tcx> { match self { // Those constructors can only match themselves. - Single | Variant(_) | ConstantValue(..) | FloatRange(..) => { + Single | Variant(_) | Str(..) | FloatRange(..) => { if other_ctors.iter().any(|c| c == self) { vec![] } else { vec![self.clone()] } } &Slice(slice) => { @@ -965,8 +893,6 @@ impl<'tcx> Constructor<'tcx> { .iter() .filter_map(|c: &Constructor<'_>| match c { Slice(slice) => Some(*slice), - // FIXME(oli-obk): implement `deref` for `ConstValue` - ConstantValue(..) => None, _ => bug!("bad slice pattern constructor {:?}", c), }) .map(Slice::value_kind); @@ -1030,6 +956,7 @@ impl<'tcx> Constructor<'tcx> { } // This constructor is never covered by anything else NonExhaustive => vec![NonExhaustive], + Opaque => bug!("unexpected opaque ctor {:?} found in all_ctors", self), } } @@ -1069,7 +996,7 @@ impl<'tcx> Constructor<'tcx> { PatKind::Variant { adt_def: adt, substs, - variant_index: self.variant_index_for_adt(cx, adt), + variant_index: self.variant_index_for_adt(adt), subpatterns, } } else { @@ -1108,10 +1035,11 @@ impl<'tcx> Constructor<'tcx> { PatKind::Slice { prefix, slice: Some(wild), suffix } } }, - &ConstantValue(value) => PatKind::Constant { value }, + &Str(value) => PatKind::Constant { value }, &FloatRange(lo, hi, end) => PatKind::Range(PatRange { lo, hi, end }), IntRange(range) => return range.to_pat(cx.tcx), NonExhaustive => PatKind::Wild, + Opaque => bug!("we should not try to apply an opaque constructor {:?}", self), }; Pat { ty, span: DUMMY_SP, kind: Box::new(pat) } @@ -1186,12 +1114,6 @@ impl<'p, 'tcx> Fields<'p, 'tcx> { Fields::Slice(std::slice::from_ref(pat)) } - /// Construct a new `Fields` from the given patterns. You must be sure those patterns can't - /// contain fields that need to be filtered out. When in doubt, prefer `replace_fields`. - fn from_slice_unfiltered(pats: &'p [Pat<'tcx>]) -> Self { - Fields::Slice(pats) - } - /// Convenience; internal use. fn wildcards_from_tys( cx: &MatchCheckCtxt<'p, 'tcx>, @@ -1221,7 +1143,7 @@ impl<'p, 'tcx> Fields<'p, 'tcx> { // Use T as the sub pattern type of Box. Fields::from_single_pattern(wildcard_from_ty(substs.type_at(0))) } else { - let variant = &adt.variants[constructor.variant_index_for_adt(cx, adt)]; + let variant = &adt.variants[constructor.variant_index_for_adt(adt)]; // Whether we must not match the fields of this variant exhaustively. let is_non_exhaustive = variant.is_field_list_non_exhaustive() && !adt.did.is_local(); @@ -1269,7 +1191,7 @@ impl<'p, 'tcx> Fields<'p, 'tcx> { } _ => bug!("bad slice pattern {:?} {:?}", constructor, ty), }, - ConstantValue(..) | FloatRange(..) | IntRange(..) | NonExhaustive => Fields::empty(), + Str(..) | FloatRange(..) | IntRange(..) | NonExhaustive | Opaque => Fields::empty(), }; debug!("Fields::wildcards({:?}, {:?}) = {:#?}", constructor, ty, ret); ret @@ -1496,6 +1418,7 @@ struct PatCtxt<'tcx> { /// multiple patterns. /// /// For example, if we are constructing a witness for the match against +/// /// ``` /// struct Pair(Option<(u32, u32)>, bool); /// @@ -1581,9 +1504,7 @@ fn all_constructors<'a, 'tcx>( ) }; match *pcx.ty.kind() { - ty::Bool => { - [true, false].iter().map(|&b| ConstantValue(ty::Const::from_bool(cx.tcx, b))).collect() - } + ty::Bool => vec![make_range(0, 1)], ty::Array(ref sub_ty, len) if len.try_eval_usize(cx.tcx, cx.param_env).is_some() => { let len = len.eval_usize(cx.tcx, cx.param_env); if len != 0 && cx.is_uninhabited(sub_ty) { @@ -1619,12 +1540,14 @@ fn all_constructors<'a, 'tcx>( // actually match against them all themselves. So we always return only the fictitious // constructor. // E.g., in an example like: + // // ``` // let err: io::ErrorKind = ...; // match err { // io::ErrorKind::NotFound => {}, // } // ``` + // // we don't want to show every possible IO error, but instead have only `_` as the // witness. let is_declared_nonexhaustive = cx.is_foreign_non_exhaustive_enum(pcx.ty); @@ -1696,7 +1619,7 @@ impl<'tcx> IntRange<'tcx> { #[inline] fn is_integral(ty: Ty<'_>) -> bool { match ty.kind() { - ty::Char | ty::Int(_) | ty::Uint(_) => true, + ty::Char | ty::Int(_) | ty::Uint(_) | ty::Bool => true, _ => false, } } @@ -1718,6 +1641,7 @@ impl<'tcx> IntRange<'tcx> { #[inline] fn integral_size_and_signed_bias(tcx: TyCtxt<'tcx>, ty: Ty<'_>) -> Option<(Size, u128)> { match *ty.kind() { + ty::Bool => Some((Size::from_bytes(1), 0)), ty::Char => Some((Size::from_bytes(4), 0)), ty::Int(ity) => { let size = Integer::from_attr(&tcx, SignedInt(ity)).size(); @@ -2017,6 +1941,7 @@ crate fn is_useful<'p, 'tcx>( let mut unreachable_branches = Vec::new(); // Subpatterns that are unreachable from all branches. E.g. in the following case, the last // `true` is unreachable only from one branch, so it is overall reachable. + // // ``` // match (true, true) { // (true, true) => {} @@ -2161,10 +2086,12 @@ crate fn is_useful<'p, 'tcx>( // to do this and instead report a single `_` witness: // if the user didn't actually specify a constructor // in this arm, e.g., in + // // ``` // let x: (Direction, Direction, bool) = ...; // let (_, _, false) = x; // ``` + // // we don't want to show all 16 possible witnesses // `(, , true)` - we are // satisfied with `(_, _, true)`. In this case, @@ -2206,7 +2133,7 @@ fn is_useful_specialized<'p, 'tcx>( // We cache the result of `Fields::wildcards` because it is used a lot. let ctor_wild_subpatterns = Fields::wildcards(cx, &ctor, ty); let matrix = matrix.specialize_constructor(cx, &ctor, &ctor_wild_subpatterns); - v.specialize_constructor(cx, &ctor, &ctor_wild_subpatterns) + v.specialize_constructor(cx, &ctor, &ctor_wild_subpatterns, true) .map(|v| is_useful(cx, &matrix, &v, witness_preference, hir_id, is_under_guard, false)) .map(|u| u.apply_constructor(cx, &ctor, ty, &ctor_wild_subpatterns)) .unwrap_or(NotUseful) @@ -2231,18 +2158,13 @@ fn pat_constructor<'tcx>( if let Some(int_range) = IntRange::from_const(tcx, param_env, value, pat.span) { Some(IntRange(int_range)) } else { - match (value.val, &value.ty.kind()) { - (_, ty::Array(_, n)) => { - let len = n.eval_usize(tcx, param_env); - Some(Slice(Slice { array_len: Some(len), kind: FixedLen(len) })) - } - (ty::ConstKind::Value(ConstValue::Slice { start, end, .. }), ty::Slice(_)) => { - let len = (end - start) as u64; - Some(Slice(Slice { array_len: None, kind: FixedLen(len) })) - } - // FIXME(oli-obk): implement `deref` for `ConstValue` - // (ty::ConstKind::Value(ConstValue::ByRef { .. }), ty::Slice(_)) => { ... } - _ => Some(ConstantValue(value)), + match value.ty.kind() { + ty::Float(_) => Some(FloatRange(value, value, RangeEnd::Included)), + ty::Ref(_, t, _) if t.is_str() => Some(Str(value)), + // All constants that can be structurally matched have already been expanded + // into the corresponding `Pat`s by `const_to_pat`. Constants that remain are + // opaque. + _ => Some(Opaque), } } } @@ -2278,75 +2200,6 @@ fn pat_constructor<'tcx>( } } -// checks whether a constant is equal to a user-written slice pattern. Only supports byte slices, -// meaning all other types will compare unequal and thus equal patterns often do not cause the -// second pattern to lint about unreachable match arms. -fn slice_pat_covered_by_const<'tcx>( - tcx: TyCtxt<'tcx>, - _span: Span, - const_val: &'tcx ty::Const<'tcx>, - prefix: &[Pat<'tcx>], - slice: &Option>, - suffix: &[Pat<'tcx>], - param_env: ty::ParamEnv<'tcx>, -) -> Result { - let const_val_val = if let ty::ConstKind::Value(val) = const_val.val { - val - } else { - bug!( - "slice_pat_covered_by_const: {:#?}, {:#?}, {:#?}, {:#?}", - const_val, - prefix, - slice, - suffix, - ) - }; - - let data: &[u8] = match (const_val_val, &const_val.ty.kind()) { - (ConstValue::ByRef { offset, alloc, .. }, ty::Array(t, n)) => { - assert_eq!(*t, tcx.types.u8); - let n = n.eval_usize(tcx, param_env); - let ptr = Pointer::new(AllocId(0), offset); - alloc.get_bytes(&tcx, ptr, Size::from_bytes(n)).unwrap() - } - (ConstValue::Slice { data, start, end }, ty::Slice(t)) => { - assert_eq!(*t, tcx.types.u8); - let ptr = Pointer::new(AllocId(0), Size::from_bytes(start)); - data.get_bytes(&tcx, ptr, Size::from_bytes(end - start)).unwrap() - } - // FIXME(oli-obk): create a way to extract fat pointers from ByRef - (_, ty::Slice(_)) => return Ok(false), - _ => bug!( - "slice_pat_covered_by_const: {:#?}, {:#?}, {:#?}, {:#?}", - const_val, - prefix, - slice, - suffix, - ), - }; - - let pat_len = prefix.len() + suffix.len(); - if data.len() < pat_len || (slice.is_none() && data.len() > pat_len) { - return Ok(false); - } - - for (ch, pat) in data[..prefix.len()] - .iter() - .zip(prefix) - .chain(data[data.len() - suffix.len()..].iter().zip(suffix)) - { - if let box PatKind::Constant { value } = pat.kind { - let b = value.eval_bits(tcx, param_env, pat.ty); - assert_eq!(b as u8 as u128, b); - if b as u8 != *ch { - return Ok(false); - } - } - } - - Ok(true) -} - /// For exhaustive integer matching, some constructors are grouped within other constructors /// (namely integer typed values are grouped within ranges). However, when specialising these /// constructors, we want to be specialising for the underlying constructors (the integers), not @@ -2644,35 +2497,6 @@ fn lint_overlapping_patterns<'tcx>( } } -fn constructor_covered_by_range<'tcx>( - tcx: TyCtxt<'tcx>, - param_env: ty::ParamEnv<'tcx>, - ctor: &Constructor<'tcx>, - pat: &Pat<'tcx>, -) -> Option<()> { - if let Single = ctor { - return Some(()); - } - - let (pat_from, pat_to, pat_end, ty) = match *pat.kind { - PatKind::Constant { value } => (value, value, RangeEnd::Included, value.ty), - PatKind::Range(PatRange { lo, hi, end }) => (lo, hi, end, lo.ty), - _ => bug!("`constructor_covered_by_range` called with {:?}", pat), - }; - let (ctor_from, ctor_to, ctor_end) = match *ctor { - ConstantValue(value) => (value, value, RangeEnd::Included), - FloatRange(from, to, ctor_end) => (from, to, ctor_end), - _ => bug!("`constructor_covered_by_range` called with {:?}", ctor), - }; - trace!("constructor_covered_by_range {:#?}, {:#?}, {:#?}, {}", ctor, pat_from, pat_to, ty); - - let to = compare_const_vals(tcx, ctor_to, pat_to, param_env, ty)?; - let from = compare_const_vals(tcx, ctor_from, pat_from, param_env, ty)?; - let intersects = (from == Ordering::Greater || from == Ordering::Equal) - && (to == Ordering::Less || (pat_end == ctor_end && to == Ordering::Equal)); - if intersects { Some(()) } else { None } -} - /// This is the main specialization step. It expands the pattern /// into `arity` patterns based on the constructor. For most patterns, the step is trivial, /// for instance tuple patterns are flattened and box patterns expand into their inner pattern. @@ -2689,15 +2513,43 @@ fn specialize_one_pattern<'p, 'tcx>( pat: &'p Pat<'tcx>, constructor: &Constructor<'tcx>, ctor_wild_subpatterns: &Fields<'p, 'tcx>, + is_its_own_ctor: bool, // Whether `ctor` is known to be derived from `pat` ) -> Option> { if let NonExhaustive = constructor { - // Only a wildcard pattern can match the special extra constructor + // Only a wildcard pattern can match the special extra constructor. if !pat.is_wildcard() { return None; } return Some(Fields::empty()); } + if let Opaque = constructor { + // Only a wildcard pattern can match an opaque constant, unless we're specializing the + // value against its own constructor. That happens when we call + // `v.specialize_constructor(ctor)` with `ctor` obtained from `pat_constructor(v.head())`. + // For example, in the following match, when we are dealing with the third branch, we will + // specialize with an `Opaque` ctor. We want to ignore the second branch because opaque + // constants should not be inspected, but we don't want to ignore the current (third) + // branch, as that would cause us to always conclude that such a branch is unreachable. + // ```rust + // #[derive(PartialEq)] + // struct Foo(i32); + // impl Eq for Foo {} + // const FOO: Foo = Foo(42); + // + // match (Foo(0), true) { + // (_, true) => {} + // (FOO, true) => {} + // (FOO, false) => {} + // } + // ``` + if is_its_own_ctor || pat.is_wildcard() { + return Some(Fields::empty()); + } else { + return None; + } + } + let result = match *pat.kind { PatKind::AscribeUserType { .. } => bug!(), // Handled by `expand_pattern` @@ -2717,93 +2569,52 @@ fn specialize_one_pattern<'p, 'tcx>( PatKind::Deref { ref subpattern } => Some(Fields::from_single_pattern(subpattern)), - PatKind::Constant { value } if constructor.is_slice() => { - // We extract an `Option` for the pointer because slices of zero - // elements don't necessarily point to memory, they are usually - // just integers. The only time they should be pointing to memory - // is when they are subslices of nonzero slices. - let (alloc, offset, n, ty) = match value.ty.kind() { - ty::Array(t, n) => { - let n = n.eval_usize(cx.tcx, cx.param_env); - // Shortcut for `n == 0` where no matter what `alloc` and `offset` we produce, - // the result would be exactly what we early return here. - if n == 0 { - if ctor_wild_subpatterns.len() as u64 != n { - return None; - } - return Some(Fields::empty()); - } - match value.val { - ty::ConstKind::Value(ConstValue::ByRef { offset, alloc, .. }) => { - (Cow::Borrowed(alloc), offset, n, t) - } - _ => span_bug!(pat.span, "array pattern is {:?}", value,), + PatKind::Constant { .. } | PatKind::Range { .. } => { + match constructor { + IntRange(ctor) => { + let pat = IntRange::from_pat(cx.tcx, cx.param_env, pat)?; + ctor.intersection(cx.tcx, &pat)?; + // Constructor splitting should ensure that all intersections we encounter + // are actually inclusions. + assert!(ctor.is_subrange(&pat)); + } + FloatRange(ctor_from, ctor_to, ctor_end) => { + let (pat_from, pat_to, pat_end, ty) = match *pat.kind { + PatKind::Constant { value } => (value, value, RangeEnd::Included, value.ty), + PatKind::Range(PatRange { lo, hi, end }) => (lo, hi, end, lo.ty), + _ => unreachable!(), // This is ensured by the branch we're in + }; + let to = compare_const_vals(cx.tcx, ctor_to, pat_to, cx.param_env, ty)?; + let from = compare_const_vals(cx.tcx, ctor_from, pat_from, cx.param_env, ty)?; + let intersects = (from == Ordering::Greater || from == Ordering::Equal) + && (to == Ordering::Less + || (pat_end == *ctor_end && to == Ordering::Equal)); + if !intersects { + return None; } } - ty::Slice(t) => { - match value.val { - ty::ConstKind::Value(ConstValue::Slice { data, start, end }) => { - let offset = Size::from_bytes(start); - let n = (end - start) as u64; - (Cow::Borrowed(data), offset, n, t) - } - ty::ConstKind::Value(ConstValue::ByRef { .. }) => { - // FIXME(oli-obk): implement `deref` for `ConstValue` - return None; - } + Str(ctor_value) => { + let pat_value = match *pat.kind { + PatKind::Constant { value } => value, _ => span_bug!( pat.span, - "slice pattern constant must be scalar pair but is {:?}", - value, + "unexpected range pattern {:?} for constant value ctor", + pat ), + }; + + // FIXME: there's probably a more direct way of comparing for equality + if compare_const_vals(cx.tcx, ctor_value, pat_value, cx.param_env, pat.ty)? + != Ordering::Equal + { + return None; } } - _ => span_bug!( - pat.span, - "unexpected const-val {:?} with ctor {:?}", - value, - constructor, - ), - }; - if ctor_wild_subpatterns.len() as u64 != n { - return None; - } - - // Convert a constant slice/array pattern to a list of patterns. - let layout = cx.tcx.layout_of(cx.param_env.and(ty)).ok()?; - let ptr = Pointer::new(AllocId(0), offset); - let pats = cx.pattern_arena.alloc_from_iter((0..n).filter_map(|i| { - let ptr = ptr.offset(layout.size * i, &cx.tcx).ok()?; - let scalar = alloc.read_scalar(&cx.tcx, ptr, layout.size).ok()?; - let scalar = scalar.check_init().ok()?; - let value = ty::Const::from_scalar(cx.tcx, scalar, ty); - let pattern = Pat { ty, span: pat.span, kind: box PatKind::Constant { value } }; - Some(pattern) - })); - // Ensure none of the dereferences failed. - if pats.len() as u64 != n { - return None; - } - Some(Fields::from_slice_unfiltered(pats)) - } - - PatKind::Constant { .. } | PatKind::Range { .. } => { - // If the constructor is a: - // - Single value: add a row if the pattern contains the constructor. - // - Range: add a row if the constructor intersects the pattern. - if let IntRange(ctor) = constructor { - let pat = IntRange::from_pat(cx.tcx, cx.param_env, pat)?; - ctor.intersection(cx.tcx, &pat)?; - // Constructor splitting should ensure that all intersections we encounter - // are actually inclusions. - assert!(ctor.is_subrange(&pat)); - } else { - // Fallback for non-ranges and ranges that involve - // floating-point numbers, which are not conveniently handled - // by `IntRange`. For these cases, the constructor may not be a - // range so intersection actually devolves into being covered - // by the pattern. - constructor_covered_by_range(cx.tcx, cx.param_env, constructor, pat)?; + _ => { + // If we reach here, we must be trying to inspect an opaque constant. Thus we skip + // the row. + return None; + } } Some(Fields::empty()) } @@ -2826,21 +2637,6 @@ fn specialize_one_pattern<'p, 'tcx>( let suffix = suffix.iter().enumerate().map(|(i, p)| (arity - suffix.len() + i, p)); Some(ctor_wild_subpatterns.replace_fields_indexed(prefix.chain(suffix))) } - ConstantValue(cv) => { - match slice_pat_covered_by_const( - cx.tcx, - pat.span, - cv, - prefix, - slice, - suffix, - cx.param_env, - ) { - Ok(true) => Some(Fields::empty()), - Ok(false) => None, - Err(ErrorReported) => None, - } - } _ => span_bug!(pat.span, "unexpected ctor {:?} for slice pat", constructor), }, diff --git a/compiler/rustc_mir_build/src/thir/pattern/check_match.rs b/compiler/rustc_mir_build/src/thir/pattern/check_match.rs index 69de7c7e2ee0b..30b700a1d4f63 100644 --- a/compiler/rustc_mir_build/src/thir/pattern/check_match.rs +++ b/compiler/rustc_mir_build/src/thir/pattern/check_match.rs @@ -137,7 +137,7 @@ impl<'tcx> MatchVisitor<'_, 'tcx> { patcx.include_lint_checks(); let pattern = patcx.lower_pattern(pat); let pattern_ty = pattern.ty; - let pattern: &_ = cx.pattern_arena.alloc(expand_pattern(cx, pattern)); + let pattern: &_ = cx.pattern_arena.alloc(expand_pattern(pattern)); if !patcx.errors.is_empty() { *have_errors = true; patcx.report_inlining_errors(pat.span); diff --git a/compiler/rustc_mir_build/src/thir/pattern/const_to_pat.rs b/compiler/rustc_mir_build/src/thir/pattern/const_to_pat.rs index a203b3a142863..6370f8c375b2a 100644 --- a/compiler/rustc_mir_build/src/thir/pattern/const_to_pat.rs +++ b/compiler/rustc_mir_build/src/thir/pattern/const_to_pat.rs @@ -387,14 +387,16 @@ impl<'a, 'tcx> ConstToPat<'a, 'tcx> { // `&str` and `&[u8]` are represented as `ConstValue::Slice`, let's keep using this // optimization for now. ty::Str => PatKind::Constant { value: cv }, - ty::Slice(elem_ty) if elem_ty == tcx.types.u8 => PatKind::Constant { value: cv }, // `b"foo"` produces a `&[u8; 3]`, but you can't use constants of array type when // matching against references, you can only use byte string literals. - // FIXME: clean this up, likely by permitting array patterns when matching on slices - ty::Array(elem_ty, _) if elem_ty == tcx.types.u8 => PatKind::Constant { value: cv }, + // The typechecker has a special case for byte string literals, by treating them + // as slices. This means we turn `&[T; N]` constants into slice patterns, which + // has no negative effects on pattern matching, even if we're actually matching on + // arrays. + ty::Array(..) | // Cannot merge this with the catch all branch below, because the `const_deref` - // changes the type from slice to array, and slice patterns behave differently from - // array patterns. + // changes the type from slice to array, we need to keep the original type in the + // pattern. ty::Slice(..) => { let old = self.behind_reference.replace(true); let array = tcx.deref_const(self.param_env.and(cv)); diff --git a/compiler/rustc_mir_build/src/thir/pattern/mod.rs b/compiler/rustc_mir_build/src/thir/pattern/mod.rs index d46e9a98825a7..25e187243416d 100644 --- a/compiler/rustc_mir_build/src/thir/pattern/mod.rs +++ b/compiler/rustc_mir_build/src/thir/pattern/mod.rs @@ -158,6 +158,13 @@ crate enum PatKind<'tcx> { subpattern: Pat<'tcx>, }, + /// One of the following: + /// * `&str`, which will be handled as a string pattern and thus exhaustiveness + /// checking will detect if you use the same string twice in different patterns. + /// * integer, bool, char or float, which will be handled by exhaustivenes to cover exactly + /// its own value, similar to `&str`, but these values are much simpler. + /// * Opaque constants, that must not be matched structurally. So anything that does not derive + /// `PartialEq` and `Eq`. Constant { value: &'tcx ty::Const<'tcx>, }, @@ -856,6 +863,11 @@ impl<'a, 'tcx> PatCtxt<'a, 'tcx> { *self.lower_path(qpath, expr.hir_id, expr.span).kind } else { let (lit, neg) = match expr.kind { + hir::ExprKind::ConstBlock(ref anon_const) => { + let anon_const_def_id = self.tcx.hir().local_def_id(anon_const.hir_id); + let value = ty::Const::from_anon_const(self.tcx, anon_const_def_id); + return *self.const_to_pat(value, expr.hir_id, expr.span, false).kind; + } hir::ExprKind::Lit(ref lit) => (lit, false), hir::ExprKind::Unary(hir::UnOp::UnNeg, ref expr) => { let lit = match expr.kind { diff --git a/compiler/rustc_parse/src/lib.rs b/compiler/rustc_parse/src/lib.rs index 25deb46e14751..ba416be6b38d3 100644 --- a/compiler/rustc_parse/src/lib.rs +++ b/compiler/rustc_parse/src/lib.rs @@ -8,7 +8,7 @@ use rustc_ast as ast; use rustc_ast::token::{self, DelimToken, Nonterminal, Token, TokenKind}; -use rustc_ast::tokenstream::{self, TokenStream, TokenTree}; +use rustc_ast::tokenstream::{self, LazyTokenStream, TokenStream, TokenTree}; use rustc_ast_pretty::pprust; use rustc_data_structures::sync::Lrc; use rustc_errors::{Diagnostic, FatalError, Level, PResult}; @@ -22,7 +22,7 @@ use std::str; use tracing::{debug, info}; -pub const MACRO_ARGUMENTS: Option<&'static str> = Some("macro arguments"); +pub const MACRO_ARGUMENTS: Option<&str> = Some("macro arguments"); #[macro_use] pub mod parser; @@ -248,35 +248,36 @@ pub fn nt_to_tokenstream(nt: &Nonterminal, sess: &ParseSess, span: Span) -> Toke // As a result, some AST nodes are annotated with the token stream they // came from. Here we attempt to extract these lossless token streams // before we fall back to the stringification. + + let convert_tokens = |tokens: Option| tokens.map(|t| t.into_token_stream()); + let tokens = match *nt { - Nonterminal::NtItem(ref item) => { - prepend_attrs(sess, &item.attrs, item.tokens.as_ref(), span) - } - Nonterminal::NtBlock(ref block) => block.tokens.clone(), + Nonterminal::NtItem(ref item) => prepend_attrs(&item.attrs, item.tokens.as_ref()), + Nonterminal::NtBlock(ref block) => convert_tokens(block.tokens.clone()), Nonterminal::NtStmt(ref stmt) => { // FIXME: We currently only collect tokens for `:stmt` // matchers in `macro_rules!` macros. When we start collecting // tokens for attributes on statements, we will need to prepend // attributes here - stmt.tokens.clone() + convert_tokens(stmt.tokens.clone()) } - Nonterminal::NtPat(ref pat) => pat.tokens.clone(), - Nonterminal::NtTy(ref ty) => ty.tokens.clone(), + Nonterminal::NtPat(ref pat) => convert_tokens(pat.tokens.clone()), + Nonterminal::NtTy(ref ty) => convert_tokens(ty.tokens.clone()), Nonterminal::NtIdent(ident, is_raw) => { Some(tokenstream::TokenTree::token(token::Ident(ident.name, is_raw), ident.span).into()) } Nonterminal::NtLifetime(ident) => { Some(tokenstream::TokenTree::token(token::Lifetime(ident.name), ident.span).into()) } - Nonterminal::NtMeta(ref attr) => attr.tokens.clone(), - Nonterminal::NtPath(ref path) => path.tokens.clone(), - Nonterminal::NtVis(ref vis) => vis.tokens.clone(), + Nonterminal::NtMeta(ref attr) => convert_tokens(attr.tokens.clone()), + Nonterminal::NtPath(ref path) => convert_tokens(path.tokens.clone()), + Nonterminal::NtVis(ref vis) => convert_tokens(vis.tokens.clone()), Nonterminal::NtTT(ref tt) => Some(tt.clone().into()), Nonterminal::NtExpr(ref expr) | Nonterminal::NtLiteral(ref expr) => { if expr.tokens.is_none() { debug!("missing tokens for expr {:?}", expr); } - prepend_attrs(sess, &expr.attrs, expr.tokens.as_ref(), span) + prepend_attrs(&expr.attrs, expr.tokens.as_ref()) } }; @@ -600,12 +601,10 @@ fn token_probably_equal_for_proc_macro(first: &Token, other: &Token) -> bool { } fn prepend_attrs( - sess: &ParseSess, attrs: &[ast::Attribute], - tokens: Option<&tokenstream::TokenStream>, - span: rustc_span::Span, + tokens: Option<&tokenstream::LazyTokenStream>, ) -> Option { - let tokens = tokens?; + let tokens = tokens?.clone().into_token_stream(); if attrs.is_empty() { return Some(tokens.clone()); } @@ -616,47 +615,12 @@ fn prepend_attrs( ast::AttrStyle::Outer, "inner attributes should prevent cached tokens from existing" ); - - let source = pprust::attribute_to_string(attr); - let macro_filename = FileName::macro_expansion_source_code(&source); - - let item = match attr.kind { - ast::AttrKind::Normal(ref item) => item, - ast::AttrKind::DocComment(..) => { - let stream = parse_stream_from_source_str(macro_filename, source, sess, Some(span)); - builder.push(stream); - continue; - } - }; - - // synthesize # [ $path $tokens ] manually here - let mut brackets = tokenstream::TokenStreamBuilder::new(); - - // For simple paths, push the identifier directly - if item.path.segments.len() == 1 && item.path.segments[0].args.is_none() { - let ident = item.path.segments[0].ident; - let token = token::Ident(ident.name, ident.as_str().starts_with("r#")); - brackets.push(tokenstream::TokenTree::token(token, ident.span)); - - // ... and for more complicated paths, fall back to a reparse hack that - // should eventually be removed. - } else { - let stream = parse_stream_from_source_str(macro_filename, source, sess, Some(span)); - brackets.push(stream); - } - - brackets.push(item.args.outer_tokens()); - - // The span we list here for `#` and for `[ ... ]` are both wrong in - // that it encompasses more than each token, but it hopefully is "good - // enough" for now at least. - builder.push(tokenstream::TokenTree::token(token::Pound, attr.span)); - let delim_span = tokenstream::DelimSpan::from_single(attr.span); - builder.push(tokenstream::TokenTree::Delimited( - delim_span, - token::DelimToken::Bracket, - brackets.build(), - )); + builder.push( + attr.tokens + .clone() + .unwrap_or_else(|| panic!("Attribute {:?} is missing tokens!", attr)) + .into_token_stream(), + ); } builder.push(tokens.clone()); Some(builder.build()) diff --git a/compiler/rustc_parse/src/parser/attr.rs b/compiler/rustc_parse/src/parser/attr.rs index 98f94098bfc15..053b7e0b75fe4 100644 --- a/compiler/rustc_parse/src/parser/attr.rs +++ b/compiler/rustc_parse/src/parser/attr.rs @@ -4,7 +4,7 @@ use rustc_ast::attr; use rustc_ast::token::{self, Nonterminal}; use rustc_ast_pretty::pprust; use rustc_errors::{error_code, PResult}; -use rustc_span::Span; +use rustc_span::{sym, Span}; use tracing::debug; @@ -30,41 +30,53 @@ impl<'a> Parser<'a> { let mut just_parsed_doc_comment = false; loop { debug!("parse_outer_attributes: self.token={:?}", self.token); - if self.check(&token::Pound) { - let inner_error_reason = if just_parsed_doc_comment { - "an inner attribute is not permitted following an outer doc comment" - } else if !attrs.is_empty() { - "an inner attribute is not permitted following an outer attribute" - } else { - DEFAULT_UNEXPECTED_INNER_ATTR_ERR_MSG - }; - let inner_parse_policy = InnerAttrPolicy::Forbidden { - reason: inner_error_reason, - saw_doc_comment: just_parsed_doc_comment, - prev_attr_sp: attrs.last().map(|a| a.span), - }; - let attr = self.parse_attribute_with_inner_parse_policy(inner_parse_policy)?; - attrs.push(attr); - just_parsed_doc_comment = false; + let (attr, tokens) = if self.check(&token::Pound) { + self.collect_tokens(|this| { + let inner_error_reason = if just_parsed_doc_comment { + "an inner attribute is not permitted following an outer doc comment" + } else if !attrs.is_empty() { + "an inner attribute is not permitted following an outer attribute" + } else { + DEFAULT_UNEXPECTED_INNER_ATTR_ERR_MSG + }; + let inner_parse_policy = InnerAttrPolicy::Forbidden { + reason: inner_error_reason, + saw_doc_comment: just_parsed_doc_comment, + prev_attr_sp: attrs.last().map(|a| a.span), + }; + let attr = this.parse_attribute_with_inner_parse_policy(inner_parse_policy)?; + just_parsed_doc_comment = false; + Ok(Some(attr)) + })? } else if let token::DocComment(comment_kind, attr_style, data) = self.token.kind { - let attr = attr::mk_doc_comment(comment_kind, attr_style, data, self.token.span); - if attr.style != ast::AttrStyle::Outer { - self.sess - .span_diagnostic - .struct_span_err_with_code( - self.token.span, - "expected outer doc comment", - error_code!(E0753), - ) - .note( - "inner doc comments like this (starting with \ - `//!` or `/*!`) can only appear before items", - ) - .emit(); - } + self.collect_tokens(|this| { + let attr = + attr::mk_doc_comment(comment_kind, attr_style, data, this.token.span); + if attr.style != ast::AttrStyle::Outer { + this.sess + .span_diagnostic + .struct_span_err_with_code( + this.token.span, + "expected outer doc comment", + error_code!(E0753), + ) + .note( + "inner doc comments like this (starting with \ + `//!` or `/*!`) can only appear before items", + ) + .emit(); + } + this.bump(); + just_parsed_doc_comment = true; + Ok(Some(attr)) + })? + } else { + (None, None) + }; + + if let Some(mut attr) = attr { + attr.tokens = tokens; attrs.push(attr); - self.bump(); - just_parsed_doc_comment = true; } else { break; } @@ -99,7 +111,7 @@ impl<'a> Parser<'a> { if self.eat(&token::Not) { ast::AttrStyle::Inner } else { ast::AttrStyle::Outer }; self.expect(&token::OpenDelim(token::Bracket))?; - let item = self.parse_attr_item()?; + let item = self.parse_attr_item(false)?; self.expect(&token::CloseDelim(token::Bracket))?; let attr_sp = lo.to(self.prev_token.span); @@ -148,7 +160,7 @@ impl<'a> Parser<'a> { /// PATH /// PATH `=` UNSUFFIXED_LIT /// The delimiters or `=` are still put into the resulting token stream. - pub fn parse_attr_item(&mut self) -> PResult<'a, ast::AttrItem> { + pub fn parse_attr_item(&mut self, capture_tokens: bool) -> PResult<'a, ast::AttrItem> { let item = match self.token.kind { token::Interpolated(ref nt) => match **nt { Nonterminal::NtMeta(ref item) => Some(item.clone().into_inner()), @@ -160,9 +172,18 @@ impl<'a> Parser<'a> { self.bump(); item } else { - let path = self.parse_path(PathStyle::Mod)?; - let args = self.parse_attr_args()?; - ast::AttrItem { path, args, tokens: None } + let do_parse = |this: &mut Self| { + let path = this.parse_path(PathStyle::Mod)?; + let args = this.parse_attr_args()?; + Ok(ast::AttrItem { path, args, tokens: None }) + }; + if capture_tokens { + let (mut item, tokens) = self.collect_tokens(do_parse)?; + item.tokens = tokens; + item + } else { + do_parse(self)? + } }) } @@ -175,19 +196,31 @@ impl<'a> Parser<'a> { let mut attrs: Vec = vec![]; loop { // Only try to parse if it is an inner attribute (has `!`). - if self.check(&token::Pound) && self.look_ahead(1, |t| t == &token::Not) { - let attr = self.parse_attribute(true)?; - assert_eq!(attr.style, ast::AttrStyle::Inner); - attrs.push(attr); - } else if let token::DocComment(comment_kind, attr_style, data) = self.token.kind { - // We need to get the position of this token before we bump. - let attr = attr::mk_doc_comment(comment_kind, attr_style, data, self.token.span); - if attr.style == ast::AttrStyle::Inner { - attrs.push(attr); - self.bump(); + let (attr, tokens) = + if self.check(&token::Pound) && self.look_ahead(1, |t| t == &token::Not) { + self.collect_tokens(|this| { + let attr = this.parse_attribute(true)?; + assert_eq!(attr.style, ast::AttrStyle::Inner); + Ok(Some(attr)) + })? + } else if let token::DocComment(comment_kind, attr_style, data) = self.token.kind { + self.collect_tokens(|this| { + // We need to get the position of this token before we bump. + let attr = + attr::mk_doc_comment(comment_kind, attr_style, data, this.token.span); + if attr.style == ast::AttrStyle::Inner { + this.bump(); + Ok(Some(attr)) + } else { + Ok(None) + } + })? } else { - break; - } + (None, None) + }; + if let Some(mut attr) = attr { + attr.tokens = tokens; + attrs.push(attr); } else { break; } @@ -220,7 +253,7 @@ impl<'a> Parser<'a> { let mut expanded_attrs = Vec::with_capacity(1); while self.token.kind != token::Eof { let lo = self.token.span; - let item = self.parse_attr_item()?; + let item = self.parse_attr_item(true)?; expanded_attrs.push((item, lo.to(self.prev_token.span))); if !self.eat(&token::Comma) { break; @@ -302,3 +335,16 @@ impl<'a> Parser<'a> { Err(self.struct_span_err(self.token.span, &msg)) } } + +pub fn maybe_needs_tokens(attrs: &[ast::Attribute]) -> bool { + attrs.iter().any(|attr| { + if let Some(ident) = attr.ident() { + ident.name == sym::derive + // This might apply a custom attribute/derive + || ident.name == sym::cfg_attr + || !rustc_feature::is_builtin_attr_name(ident.name) + } else { + true + } + }) +} diff --git a/compiler/rustc_parse/src/parser/expr.rs b/compiler/rustc_parse/src/parser/expr.rs index 17cbaf65420f6..c44e00f861de6 100644 --- a/compiler/rustc_parse/src/parser/expr.rs +++ b/compiler/rustc_parse/src/parser/expr.rs @@ -6,6 +6,7 @@ use crate::maybe_recover_from_interpolated_ty_qpath; use rustc_ast::ptr::P; use rustc_ast::token::{self, Token, TokenKind}; +use rustc_ast::tokenstream::Spacing; use rustc_ast::util::classify; use rustc_ast::util::literal::LitError; use rustc_ast::util::parser::{prec_let_scrutinee_needs_par, AssocOp, Fixity}; @@ -18,7 +19,6 @@ use rustc_span::source_map::{self, Span, Spanned}; use rustc_span::symbol::{kw, sym, Ident, Symbol}; use rustc_span::{BytePos, Pos}; use std::mem; -use tracing::debug; /// Possibly accepts an `token::Interpolated` expression (a pre-parsed expression /// dropped into the token stream, which happens while parsing the result of @@ -459,7 +459,7 @@ impl<'a> Parser<'a> { /// Parses a prefix-unary-operator expr. fn parse_prefix_expr(&mut self, attrs: Option) -> PResult<'a, P> { let attrs = self.parse_or_use_outer_attributes(attrs)?; - self.maybe_collect_tokens(!attrs.is_empty(), |this| { + self.maybe_collect_tokens(super::attr::maybe_needs_tokens(&attrs), |this| { let lo = this.token.span; // Note: when adding new unary operators, don't forget to adjust TokenKind::can_begin_expr() let (hi, ex) = match this.token.uninterpolate().kind { @@ -884,7 +884,7 @@ impl<'a> Parser<'a> { assert!(suffix.is_none()); let symbol = Symbol::intern(&i); self.token = Token::new(token::Ident(symbol, false), ident_span); - let next_token = Token::new(token::Dot, dot_span); + let next_token = (Token::new(token::Dot, dot_span), self.token_spacing); self.parse_tuple_field_access_expr(lo, base, symbol, None, Some(next_token)) } // 1.2 | 1.2e3 @@ -902,12 +902,14 @@ impl<'a> Parser<'a> { }; let symbol1 = Symbol::intern(&i1); self.token = Token::new(token::Ident(symbol1, false), ident1_span); - let next_token1 = Token::new(token::Dot, dot_span); + // This needs to be `Spacing::Alone` to prevent regressions. + // See issue #76399 and PR #76285 for more details + let next_token1 = (Token::new(token::Dot, dot_span), Spacing::Alone); let base1 = self.parse_tuple_field_access_expr(lo, base, symbol1, None, Some(next_token1)); let symbol2 = Symbol::intern(&i2); let next_token2 = Token::new(token::Ident(symbol2, false), ident2_span); - self.bump_with(next_token2); // `.` + self.bump_with((next_token2, self.token_spacing)); // `.` self.parse_tuple_field_access_expr(lo, base1, symbol2, suffix, None) } // 1e+ | 1e- (recovered) @@ -930,7 +932,7 @@ impl<'a> Parser<'a> { base: P, field: Symbol, suffix: Option, - next_token: Option, + next_token: Option<(Token, Spacing)>, ) -> P { match next_token { Some(next_token) => self.bump_with(next_token), @@ -1060,6 +1062,8 @@ impl<'a> Parser<'a> { }) } else if self.eat_keyword(kw::Unsafe) { self.parse_block_expr(None, lo, BlockCheckMode::Unsafe(ast::UserProvided), attrs) + } else if self.check_inline_const(0) { + self.parse_const_block(lo.to(self.token.span)) } else if self.is_do_catch_block() { self.recover_do_catch(attrs) } else if self.is_try_block() { @@ -1107,13 +1111,12 @@ impl<'a> Parser<'a> { fn maybe_collect_tokens( &mut self, - has_outer_attrs: bool, + needs_tokens: bool, f: impl FnOnce(&mut Self) -> PResult<'a, P>, ) -> PResult<'a, P> { - if has_outer_attrs { + if needs_tokens { let (mut expr, tokens) = self.collect_tokens(f)?; - debug!("maybe_collect_tokens: Collected tokens for {:?} (tokens {:?}", expr, tokens); - expr.tokens = Some(tokens); + expr.tokens = tokens; Ok(expr) } else { f(self) diff --git a/compiler/rustc_parse/src/parser/item.rs b/compiler/rustc_parse/src/parser/item.rs index 48341f71d33ea..e57a2e42b5dde 100644 --- a/compiler/rustc_parse/src/parser/item.rs +++ b/compiler/rustc_parse/src/parser/item.rs @@ -116,15 +116,16 @@ impl<'a> Parser<'a> { Some(item.into_inner()) }); + let needs_tokens = super::attr::maybe_needs_tokens(&attrs); + let mut unclosed_delims = vec![]; - let has_attrs = !attrs.is_empty(); let parse_item = |this: &mut Self| { let item = this.parse_item_common_(attrs, mac_allowed, attrs_allowed, req_name); unclosed_delims.append(&mut this.unclosed_delims); item }; - let (mut item, tokens) = if has_attrs { + let (mut item, tokens) = if needs_tokens { let (item, tokens) = self.collect_tokens(parse_item)?; (item, Some(tokens)) } else { @@ -150,7 +151,7 @@ impl<'a> Parser<'a> { if let Some(tokens) = tokens { if let Some(item) = &mut item { if !item.attrs.iter().any(|attr| attr.style == AttrStyle::Inner) { - item.tokens = Some(tokens); + item.tokens = tokens; } } } diff --git a/compiler/rustc_parse/src/parser/mod.rs b/compiler/rustc_parse/src/parser/mod.rs index c10946812213b..175dd3fa53a6e 100644 --- a/compiler/rustc_parse/src/parser/mod.rs +++ b/compiler/rustc_parse/src/parser/mod.rs @@ -16,12 +16,15 @@ pub use path::PathStyle; use rustc_ast::ptr::P; use rustc_ast::token::{self, DelimToken, Token, TokenKind}; -use rustc_ast::tokenstream::{self, DelimSpan, TokenStream, TokenTree, TreeAndSpacing}; +use rustc_ast::tokenstream::{self, DelimSpan, LazyTokenStream, LazyTokenStreamInner, Spacing}; +use rustc_ast::tokenstream::{TokenStream, TokenTree}; use rustc_ast::DUMMY_NODE_ID; -use rustc_ast::{self as ast, AttrStyle, AttrVec, Const, CrateSugar, Extern, Unsafe}; -use rustc_ast::{Async, MacArgs, MacDelimiter, Mutability, StrLit, Visibility, VisibilityKind}; +use rustc_ast::{self as ast, AnonConst, AttrStyle, AttrVec, Const, CrateSugar, Extern, Unsafe}; +use rustc_ast::{Async, Expr, ExprKind, MacArgs, MacDelimiter, Mutability, StrLit}; +use rustc_ast::{Visibility, VisibilityKind}; use rustc_ast_pretty::pprust; -use rustc_errors::{struct_span_err, Applicability, DiagnosticBuilder, FatalError, PResult}; +use rustc_errors::PResult; +use rustc_errors::{struct_span_err, Applicability, DiagnosticBuilder, FatalError}; use rustc_session::parse::ParseSess; use rustc_span::source_map::{Span, DUMMY_SP}; use rustc_span::symbol::{kw, sym, Ident, Symbol}; @@ -84,10 +87,14 @@ pub struct Parser<'a> { pub sess: &'a ParseSess, /// The current token. pub token: Token, + /// The spacing for the current token + pub token_spacing: Spacing, /// The previous token. pub prev_token: Token, restrictions: Restrictions, expected_tokens: Vec, + // Important: This must only be advanced from `next_tok` + // to ensure that `token_cursor.num_next_calls` is updated properly token_cursor: TokenCursor, desugar_doc_comments: bool, /// This field is used to keep track of how many left angle brackets we have seen. This is @@ -119,8 +126,10 @@ impl<'a> Drop for Parser<'a> { struct TokenCursor { frame: TokenCursorFrame, stack: Vec, - cur_token: Option, - collecting: Option, + desugar_doc_comments: bool, + // Counts the number of calls to `next` or `next_desugared`, + // depending on whether `desugar_doc_comments` is set. + num_next_calls: usize, } #[derive(Clone)] @@ -132,40 +141,22 @@ struct TokenCursorFrame { close_delim: bool, } -/// Used to track additional state needed by `collect_tokens` -#[derive(Clone, Debug)] -struct Collecting { - /// Holds the current tokens captured during the most - /// recent call to `collect_tokens` - buf: Vec, - /// The depth of the `TokenCursor` stack at the time - /// collection was started. When we encounter a `TokenTree::Delimited`, - /// we want to record the `TokenTree::Delimited` itself, - /// but *not* any of the inner tokens while we are inside - /// the new frame (this would cause us to record duplicate tokens). - /// - /// This `depth` fields tracks stack depth we are recording tokens. - /// Only tokens encountered at this depth will be recorded. See - /// `TokenCursor::next` for more details. - depth: usize, -} - impl TokenCursorFrame { - fn new(span: DelimSpan, delim: DelimToken, tts: &TokenStream) -> Self { + fn new(span: DelimSpan, delim: DelimToken, tts: TokenStream) -> Self { TokenCursorFrame { delim, span, open_delim: delim == token::NoDelim, - tree_cursor: tts.clone().into_trees(), + tree_cursor: tts.into_trees(), close_delim: delim == token::NoDelim, } } } impl TokenCursor { - fn next(&mut self) -> Token { + fn next(&mut self) -> (Token, Spacing) { loop { - let tree = if !self.frame.open_delim { + let (tree, spacing) = if !self.frame.open_delim { self.frame.open_delim = true; TokenTree::open_tt(self.frame.span, self.frame.delim).into() } else if let Some(tree) = self.frame.tree_cursor.next_with_spacing() { @@ -177,40 +168,24 @@ impl TokenCursor { self.frame = frame; continue; } else { - return Token::new(token::Eof, DUMMY_SP); + (TokenTree::Token(Token::new(token::Eof, DUMMY_SP)), Spacing::Alone) }; - // Don't set an open delimiter as our current token - we want - // to leave it as the full `TokenTree::Delimited` from the previous - // iteration of this loop - if !matches!(tree.0, TokenTree::Token(Token { kind: TokenKind::OpenDelim(_), .. })) { - self.cur_token = Some(tree.clone()); - } - - if let Some(collecting) = &mut self.collecting { - if collecting.depth == self.stack.len() { - debug!( - "TokenCursor::next(): collected {:?} at depth {:?}", - tree, - self.stack.len() - ); - collecting.buf.push(tree.clone()) + match tree { + TokenTree::Token(token) => { + return (token, spacing); } - } - - match tree.0 { - TokenTree::Token(token) => return token, TokenTree::Delimited(sp, delim, tts) => { - let frame = TokenCursorFrame::new(sp, delim, &tts); + let frame = TokenCursorFrame::new(sp, delim, tts); self.stack.push(mem::replace(&mut self.frame, frame)); } } } } - fn next_desugared(&mut self) -> Token { + fn next_desugared(&mut self) -> (Token, Spacing) { let (data, attr_style, sp) = match self.next() { - Token { kind: token::DocComment(_, attr_style, data), span } => { + (Token { kind: token::DocComment(_, attr_style, data), span }, _) => { (data, attr_style, span) } tok => return tok, @@ -248,7 +223,7 @@ impl TokenCursor { TokenCursorFrame::new( delim_span, token::NoDelim, - &if attr_style == AttrStyle::Inner { + if attr_style == AttrStyle::Inner { [TokenTree::token(token::Pound, sp), TokenTree::token(token::Not, sp), body] .iter() .cloned() @@ -350,14 +325,15 @@ impl<'a> Parser<'a> { let mut parser = Parser { sess, token: Token::dummy(), + token_spacing: Spacing::Alone, prev_token: Token::dummy(), restrictions: Restrictions::empty(), expected_tokens: Vec::new(), token_cursor: TokenCursor { - frame: TokenCursorFrame::new(DelimSpan::dummy(), token::NoDelim, &tokens), + frame: TokenCursorFrame::new(DelimSpan::dummy(), token::NoDelim, tokens), stack: Vec::new(), - cur_token: None, - collecting: None, + num_next_calls: 0, + desugar_doc_comments, }, desugar_doc_comments, unmatched_angle_bracket_count: 0, @@ -374,17 +350,18 @@ impl<'a> Parser<'a> { parser } - fn next_tok(&mut self, fallback_span: Span) -> Token { - let mut next = if self.desugar_doc_comments { + fn next_tok(&mut self, fallback_span: Span) -> (Token, Spacing) { + let (mut next, spacing) = if self.desugar_doc_comments { self.token_cursor.next_desugared() } else { self.token_cursor.next() }; + self.token_cursor.num_next_calls += 1; if next.span.is_dummy() { // Tweak the location for better diagnostics, but keep syntactic context intact. next.span = fallback_span.with_ctxt(next.span.ctxt()); } - next + (next, spacing) } pub fn unexpected(&mut self) -> PResult<'a, T> { @@ -545,6 +522,15 @@ impl<'a> Parser<'a> { self.check_or_expected(self.token.can_begin_const_arg(), TokenType::Const) } + fn check_inline_const(&self, dist: usize) -> bool { + self.is_keyword_ahead(dist, &[kw::Const]) + && self.look_ahead(dist + 1, |t| match t.kind { + token::Interpolated(ref nt) => matches!(**nt, token::NtBlock(..)), + token::OpenDelim(DelimToken::Brace) => true, + _ => false, + }) + } + /// Checks to see if the next token is either `+` or `+=`. /// Otherwise returns `false`. fn check_plus(&mut self) -> bool { @@ -567,7 +553,9 @@ impl<'a> Parser<'a> { let first_span = self.sess.source_map().start_point(self.token.span); let second_span = self.token.span.with_lo(first_span.hi()); self.token = Token::new(first, first_span); - self.bump_with(Token::new(second, second_span)); + // Use the spacing of the glued token as the spacing + // of the unglued second token. + self.bump_with((Token::new(second, second_span), self.token_spacing)); true } _ => { @@ -799,7 +787,7 @@ impl<'a> Parser<'a> { } /// Advance the parser by one token using provided token as the next one. - fn bump_with(&mut self, next_token: Token) { + fn bump_with(&mut self, (next_token, next_spacing): (Token, Spacing)) { // Bumping after EOF is a bad sign, usually an infinite loop. if self.prev_token.kind == TokenKind::Eof { let msg = "attempted to bump the parser past EOF (may be stuck in a loop)"; @@ -808,6 +796,7 @@ impl<'a> Parser<'a> { // Update the current and previous tokens. self.prev_token = mem::replace(&mut self.token, next_token); + self.token_spacing = next_spacing; // Diagnostics. self.expected_tokens.clear(); @@ -827,15 +816,15 @@ impl<'a> Parser<'a> { } let frame = &self.token_cursor.frame; - looker(&match frame.tree_cursor.look_ahead(dist - 1) { + match frame.tree_cursor.look_ahead(dist - 1) { Some(tree) => match tree { - TokenTree::Token(token) => token, + TokenTree::Token(token) => looker(token), TokenTree::Delimited(dspan, delim, _) => { - Token::new(token::OpenDelim(delim), dspan.open) + looker(&Token::new(token::OpenDelim(*delim), dspan.open)) } }, - None => Token::new(token::CloseDelim(frame.delim), frame.span.close), - }) + None => looker(&Token::new(token::CloseDelim(frame.delim), frame.span.close)), + } } /// Returns whether any of the given keywords are `dist` tokens ahead of the current one. @@ -864,13 +853,28 @@ impl<'a> Parser<'a> { /// Parses constness: `const` or nothing. fn parse_constness(&mut self) -> Const { - if self.eat_keyword(kw::Const) { + // Avoid const blocks to be parsed as const items + if self.look_ahead(1, |t| t != &token::OpenDelim(DelimToken::Brace)) + && self.eat_keyword(kw::Const) + { Const::Yes(self.prev_token.uninterpolated_span()) } else { Const::No } } + /// Parses inline const expressions. + fn parse_const_block(&mut self, span: Span) -> PResult<'a, P> { + self.sess.gated_spans.gate(sym::inline_const, span); + self.eat_keyword(kw::Const); + let blk = self.parse_block()?; + let anon_const = AnonConst { + id: DUMMY_NODE_ID, + value: self.mk_expr(blk.span, ExprKind::Block(blk, None), AttrVec::new()), + }; + Ok(self.mk_expr(span, ExprKind::ConstBlock(anon_const), AttrVec::new())) + } + /// Parses mutability (`mut` or nothing). fn parse_mutability(&mut self) -> Mutability { if self.eat_keyword(kw::Mut) { Mutability::Mut } else { Mutability::Not } @@ -963,13 +967,27 @@ impl<'a> Parser<'a> { pub(crate) fn parse_token_tree(&mut self) -> TokenTree { match self.token.kind { token::OpenDelim(..) => { - let frame = mem::replace( - &mut self.token_cursor.frame, - self.token_cursor.stack.pop().unwrap(), - ); - self.token = Token::new(TokenKind::CloseDelim(frame.delim), frame.span.close); + let depth = self.token_cursor.stack.len(); + + // We keep advancing the token cursor until we hit + // the matching `CloseDelim` token. + while !(depth == self.token_cursor.stack.len() + && matches!(self.token.kind, token::CloseDelim(_))) + { + // Advance one token at a time, so `TokenCursor::next()` + // can capture these tokens if necessary. + self.bump(); + } + // We are still inside the frame corresponding + // to the delimited stream we captured, so grab + // the tokens from this frame. + let frame = &self.token_cursor.frame; + let stream = frame.tree_cursor.stream.clone(); + let span = frame.span; + let delim = frame.delim; + // Consume close delimiter self.bump(); - TokenTree::Delimited(frame.span, frame.delim, frame.tree_cursor.stream) + TokenTree::Delimited(span, delim, stream) } token::CloseDelim(_) | token::Eof => unreachable!(), _ => { @@ -1160,8 +1178,9 @@ impl<'a> Parser<'a> { /// Records all tokens consumed by the provided callback, /// including the current token. These tokens are collected - /// into a `TokenStream`, and returned along with the result - /// of the callback. + /// into a `LazyTokenStream`, and returned along with the result + /// of the callback. The returned `LazyTokenStream` will be `None` + /// if not tokens were captured. /// /// Note: If your callback consumes an opening delimiter /// (including the case where you call `collect_tokens` @@ -1177,79 +1196,50 @@ impl<'a> Parser<'a> { pub fn collect_tokens( &mut self, f: impl FnOnce(&mut Self) -> PResult<'a, R>, - ) -> PResult<'a, (R, TokenStream)> { - // Record all tokens we parse when parsing this item. - let tokens: Vec = self.token_cursor.cur_token.clone().into_iter().collect(); - debug!("collect_tokens: starting with {:?}", tokens); - - // We need special handling for the case where `collect_tokens` is called - // on an opening delimeter (e.g. '('). At this point, we have already pushed - // a new frame - however, we want to record the original `TokenTree::Delimited`, - // for consistency with the case where we start recording one token earlier. - // See `TokenCursor::next` to see how `cur_token` is set up. - let prev_depth = - if matches!(self.token_cursor.cur_token, Some((TokenTree::Delimited(..), _))) { - if self.token_cursor.stack.is_empty() { - // There is nothing below us in the stack that - // the function could consume, so the only thing it can legally - // capture is the entire contents of the current frame. - return Ok((f(self)?, TokenStream::new(tokens))); - } - // We have already recorded the full `TokenTree::Delimited` when we created - // our `tokens` vector at the start of this function. We are now inside - // a new frame corresponding to the `TokenTree::Delimited` we already recoreded. - // We don't want to record any of the tokens inside this frame, since they - // will be duplicates of the tokens nested inside the `TokenTree::Delimited`. - // Therefore, we set our recording depth to the *previous* frame. This allows - // us to record a sequence like: `(foo).bar()`: the `(foo)` will be recored - // as our initial `cur_token`, while the `.bar()` will be recored after we - // pop the `(foo)` frame. - self.token_cursor.stack.len() - 1 - } else { - self.token_cursor.stack.len() - }; - let prev_collecting = - self.token_cursor.collecting.replace(Collecting { buf: tokens, depth: prev_depth }); + ) -> PResult<'a, (R, Option)> { + let start_token = (self.token.clone(), self.token_spacing); + let mut cursor_snapshot = self.token_cursor.clone(); - let ret = f(self); + let ret = f(self)?; - let mut collected_tokens = if let Some(collecting) = self.token_cursor.collecting.take() { - collecting.buf - } else { - let msg = "our vector went away?"; - debug!("collect_tokens: {}", msg); - self.sess.span_diagnostic.delay_span_bug(self.token.span, &msg); - // This can happen due to a bad interaction of two unrelated recovery mechanisms - // with mismatched delimiters *and* recovery lookahead on the likely typo - // `pub ident(` (#62895, different but similar to the case above). - return Ok((ret?, TokenStream::default())); - }; + let new_calls = self.token_cursor.num_next_calls; + let num_calls = new_calls - cursor_snapshot.num_next_calls; + let desugar_doc_comments = self.desugar_doc_comments; - debug!("collect_tokens: got raw tokens {:?}", collected_tokens); - - // If we're not at EOF our current token wasn't actually consumed by - // `f`, but it'll still be in our list that we pulled out. In that case - // put it back. - let extra_token = if self.token != token::Eof { collected_tokens.pop() } else { None }; - - if let Some(mut collecting) = prev_collecting { - // If we were previously collecting at the same depth, - // then the previous call to `collect_tokens` needs to see - // the tokens we just recorded. - // - // If we were previously recording at an lower `depth`, - // then the previous `collect_tokens` call already recorded - // this entire frame in the form of a `TokenTree::Delimited`, - // so there is nothing else for us to do. - if collecting.depth == prev_depth { - collecting.buf.extend(collected_tokens.iter().cloned()); - collecting.buf.extend(extra_token); - debug!("collect_tokens: updating previous buf to {:?}", collecting); - } - self.token_cursor.collecting = Some(collecting) + // We didn't capture any tokens + if num_calls == 0 { + return Ok((ret, None)); } - Ok((ret?, TokenStream::new(collected_tokens))) + // Produces a `TokenStream` on-demand. Using `cursor_snapshot` + // and `num_calls`, we can reconstruct the `TokenStream` seen + // by the callback. This allows us to avoid producing a `TokenStream` + // if it is never needed - for example, a captured `macro_rules!` + // argument that is never passed to a proc macro. + // + // This also makes `Parser` very cheap to clone, since + // there is no intermediate collection buffer to clone. + let lazy_cb = move || { + // The token produced by the final call to `next` or `next_desugared` + // was not actually consumed by the callback. The combination + // of chaining the initial token and using `take` produces the desired + // result - we produce an empty `TokenStream` if no calls were made, + // and omit the final token otherwise. + let tokens = std::iter::once(start_token) + .chain((0..num_calls).map(|_| { + if desugar_doc_comments { + cursor_snapshot.next_desugared() + } else { + cursor_snapshot.next() + } + })) + .take(num_calls); + + make_token_stream(tokens) + }; + let stream = LazyTokenStream::new(LazyTokenStreamInner::Lazy(Box::new(lazy_cb))); + + Ok((ret, Some(stream))) } /// `::{` or `::*` @@ -1298,3 +1288,41 @@ pub fn emit_unclosed_delims(unclosed_delims: &mut Vec, sess: &Pa } } } + +/// Converts a flattened iterator of tokens (including open and close delimiter tokens) +/// into a `TokenStream`, creating a `TokenTree::Delimited` for each matching pair +/// of open and close delims. +fn make_token_stream(tokens: impl Iterator) -> TokenStream { + #[derive(Debug)] + struct FrameData { + open: Span, + inner: Vec<(TokenTree, Spacing)>, + } + let mut stack = vec![FrameData { open: DUMMY_SP, inner: vec![] }]; + for (token, spacing) in tokens { + match token { + Token { kind: TokenKind::OpenDelim(_), span } => { + stack.push(FrameData { open: span, inner: vec![] }); + } + Token { kind: TokenKind::CloseDelim(delim), span } => { + let frame_data = stack.pop().expect("Token stack was empty!"); + let dspan = DelimSpan::from_pair(frame_data.open, span); + let stream = TokenStream::new(frame_data.inner); + let delimited = TokenTree::Delimited(dspan, delim, stream); + stack + .last_mut() + .unwrap_or_else(|| panic!("Bottom token frame is missing for tokens!")) + .inner + .push((delimited, Spacing::Alone)); + } + token => stack + .last_mut() + .expect("Bottom token frame is missing!") + .inner + .push((TokenTree::Token(token), spacing)), + } + } + let final_buf = stack.pop().expect("Missing final buf!"); + assert!(stack.is_empty(), "Stack should be empty: final_buf={:?} stack={:?}", final_buf, stack); + TokenStream::new(final_buf.inner) +} diff --git a/compiler/rustc_parse/src/parser/nonterminal.rs b/compiler/rustc_parse/src/parser/nonterminal.rs index 15660fd574c13..98fb1c8292510 100644 --- a/compiler/rustc_parse/src/parser/nonterminal.rs +++ b/compiler/rustc_parse/src/parser/nonterminal.rs @@ -103,7 +103,7 @@ impl<'a> Parser<'a> { // If we captured tokens during parsing (due to outer attributes), // use those. if item.tokens.is_none() { - item.tokens = Some(tokens); + item.tokens = tokens; } token::NtItem(item) } @@ -115,7 +115,7 @@ impl<'a> Parser<'a> { let (mut block, tokens) = self.collect_tokens(|this| this.parse_block())?; // We have have eaten an NtBlock, which could already have tokens if block.tokens.is_none() { - block.tokens = Some(tokens); + block.tokens = tokens; } token::NtBlock(block) } @@ -124,7 +124,7 @@ impl<'a> Parser<'a> { match stmt { Some(mut s) => { if s.tokens.is_none() { - s.tokens = Some(tokens); + s.tokens = tokens; } token::NtStmt(s) } @@ -137,7 +137,7 @@ impl<'a> Parser<'a> { let (mut pat, tokens) = self.collect_tokens(|this| this.parse_pat(None))?; // We have have eaten an NtPat, which could already have tokens if pat.tokens.is_none() { - pat.tokens = Some(tokens); + pat.tokens = tokens; } token::NtPat(pat) } @@ -146,7 +146,7 @@ impl<'a> Parser<'a> { // If we captured tokens during parsing (due to outer attributes), // use those. if expr.tokens.is_none() { - expr.tokens = Some(tokens); + expr.tokens = tokens; } token::NtExpr(expr) } @@ -155,7 +155,7 @@ impl<'a> Parser<'a> { self.collect_tokens(|this| this.parse_literal_maybe_minus())?; // We have have eaten a nonterminal, which could already have tokens if lit.tokens.is_none() { - lit.tokens = Some(tokens); + lit.tokens = tokens; } token::NtLiteral(lit) } @@ -163,7 +163,7 @@ impl<'a> Parser<'a> { let (mut ty, tokens) = self.collect_tokens(|this| this.parse_ty())?; // We have an eaten an NtTy, which could already have tokens if ty.tokens.is_none() { - ty.tokens = Some(tokens); + ty.tokens = tokens; } token::NtTy(ty) } @@ -183,15 +183,15 @@ impl<'a> Parser<'a> { self.collect_tokens(|this| this.parse_path(PathStyle::Type))?; // We have have eaten an NtPath, which could already have tokens if path.tokens.is_none() { - path.tokens = Some(tokens); + path.tokens = tokens; } token::NtPath(path) } NonterminalKind::Meta => { - let (mut attr, tokens) = self.collect_tokens(|this| this.parse_attr_item())?; + let (mut attr, tokens) = self.collect_tokens(|this| this.parse_attr_item(false))?; // We may have eaten a nonterminal, which could already have tokens if attr.tokens.is_none() { - attr.tokens = Some(tokens); + attr.tokens = tokens; } token::NtMeta(P(attr)) } @@ -201,7 +201,7 @@ impl<'a> Parser<'a> { self.collect_tokens(|this| this.parse_visibility(FollowedByType::Yes))?; // We may have etan an `NtVis`, which could already have tokens if vis.tokens.is_none() { - vis.tokens = Some(tokens); + vis.tokens = tokens; } token::NtVis(vis) } diff --git a/compiler/rustc_parse/src/parser/pat.rs b/compiler/rustc_parse/src/parser/pat.rs index 5aced9dc37c7e..27fe75a23b6a8 100644 --- a/compiler/rustc_parse/src/parser/pat.rs +++ b/compiler/rustc_parse/src/parser/pat.rs @@ -313,6 +313,15 @@ impl<'a> Parser<'a> { let pat = self.parse_pat_with_range_pat(false, None)?; self.sess.gated_spans.gate(sym::box_patterns, lo.to(self.prev_token.span)); PatKind::Box(pat) + } else if self.check_inline_const(0) { + // Parse `const pat` + let const_expr = self.parse_const_block(lo.to(self.token.span))?; + + if let Some(re) = self.parse_range_end() { + self.parse_pat_range_begin_with(const_expr, re)? + } else { + PatKind::Lit(const_expr) + } } else if self.can_be_ident_pat() { // Parse `ident @ pat` // This can give false positives and parse nullary enums, @@ -714,16 +723,19 @@ impl<'a> Parser<'a> { /// Is the token `dist` away from the current suitable as the start of a range patterns end? fn is_pat_range_end_start(&self, dist: usize) -> bool { - self.look_ahead(dist, |t| { - t.is_path_start() // e.g. `MY_CONST`; + self.check_inline_const(dist) + || self.look_ahead(dist, |t| { + t.is_path_start() // e.g. `MY_CONST`; || t.kind == token::Dot // e.g. `.5` for recovery; || t.can_begin_literal_maybe_minus() // e.g. `42`. || t.is_whole_expr() - }) + }) } fn parse_pat_range_end(&mut self) -> PResult<'a, P> { - if self.check_path() { + if self.check_inline_const(0) { + self.parse_const_block(self.token.span) + } else if self.check_path() { let lo = self.token.span; let (qself, path) = if self.eat_lt() { // Parse a qualified path diff --git a/compiler/rustc_passes/src/check_attr.rs b/compiler/rustc_passes/src/check_attr.rs index 1acaa4c6eff5d..7679582f8811e 100644 --- a/compiler/rustc_passes/src/check_attr.rs +++ b/compiler/rustc_passes/src/check_attr.rs @@ -85,6 +85,10 @@ impl CheckAttrVisitor<'tcx> { self.check_export_name(&attr, span, target) } else if self.tcx.sess.check_name(attr, sym::rustc_args_required_const) { self.check_rustc_args_required_const(&attr, span, target, item) + } else if self.tcx.sess.check_name(attr, sym::allow_internal_unstable) { + self.check_allow_internal_unstable(&attr, span, target, &attrs) + } else if self.tcx.sess.check_name(attr, sym::rustc_allow_const_fn_unstable) { + self.check_rustc_allow_const_fn_unstable(hir_id, &attr, span, target) } else { // lint-only checks if self.tcx.sess.check_name(attr, sym::cold) { @@ -104,7 +108,7 @@ impl CheckAttrVisitor<'tcx> { return; } - if matches!(target, Target::Fn | Target::Method(_) | Target::ForeignFn) { + if matches!(target, Target::Closure | Target::Fn | Target::Method(_) | Target::ForeignFn) { self.tcx.ensure().codegen_fn_attrs(self.tcx.hir().local_def_id(hir_id)); } @@ -195,7 +199,7 @@ impl CheckAttrVisitor<'tcx> { /// Checks if the `#[non_exhaustive]` attribute on an `item` is valid. Returns `true` if valid. fn check_non_exhaustive(&self, attr: &Attribute, span: &Span, target: Target) -> bool { match target { - Target::Struct | Target::Enum => true, + Target::Struct | Target::Enum | Target::Variant => true, _ => { struct_span_err!( self.tcx.sess, @@ -587,6 +591,9 @@ impl CheckAttrVisitor<'tcx> { for hint in &hints { let (article, allowed_targets) = match hint.name_or_empty() { + _ if !matches!(target, Target::Struct | Target::Enum | Target::Union) => { + ("a", "struct, enum, or union") + } name @ sym::C | name @ sym::align => { is_c |= name == sym::C; match target { @@ -652,12 +659,16 @@ impl CheckAttrVisitor<'tcx> { } _ => continue, }; - self.emit_repr_error( + + struct_span_err!( + self.tcx.sess, hint.span(), - *span, - &format!("attribute should be applied to {}", allowed_targets), - &format!("not {} {}", article, allowed_targets), + E0517, + "{}", + &format!("attribute should be applied to {} {}", article, allowed_targets) ) + .span_label(*span, &format!("not {} {}", article, allowed_targets)) + .emit(); } // Just point at all repr hints if there are any incompatibilities. @@ -703,64 +714,63 @@ impl CheckAttrVisitor<'tcx> { } } - fn emit_repr_error( - &self, - hint_span: Span, - label_span: Span, - hint_message: &str, - label_message: &str, - ) { - struct_span_err!(self.tcx.sess, hint_span, E0517, "{}", hint_message) - .span_label(label_span, label_message) - .emit(); - } - - fn check_stmt_attributes(&self, stmt: &hir::Stmt<'_>) { - // When checking statements ignore expressions, they will be checked later - if let hir::StmtKind::Local(ref l) = stmt.kind { - self.check_attributes(l.hir_id, &l.attrs, &stmt.span, Target::Statement, None); - for attr in l.attrs.iter() { - if self.tcx.sess.check_name(attr, sym::repr) { - self.emit_repr_error( - attr.span, - stmt.span, - "attribute should not be applied to a statement", - "not a struct, enum, or union", - ); - } + fn check_used(&self, attrs: &'hir [Attribute], target: Target) { + for attr in attrs { + if self.tcx.sess.check_name(attr, sym::used) && target != Target::Static { + self.tcx + .sess + .span_err(attr.span, "attribute must be applied to a `static` variable"); } } } - fn check_expr_attributes(&self, expr: &hir::Expr<'_>) { - let target = match expr.kind { - hir::ExprKind::Closure(..) => Target::Closure, - _ => Target::Expression, - }; - self.check_attributes(expr.hir_id, &expr.attrs, &expr.span, target, None); - for attr in expr.attrs.iter() { - if self.tcx.sess.check_name(attr, sym::repr) { - self.emit_repr_error( - attr.span, - expr.span, - "attribute should not be applied to an expression", - "not defining a struct, enum, or union", - ); + /// Outputs an error for `#[allow_internal_unstable]` which can only be applied to macros. + /// (Allows proc_macro functions) + fn check_allow_internal_unstable( + &self, + attr: &Attribute, + span: &Span, + target: Target, + attrs: &[Attribute], + ) -> bool { + debug!("Checking target: {:?}", target); + if target == Target::Fn { + for attr in attrs { + if self.tcx.sess.is_proc_macro_attr(attr) { + debug!("Is proc macro attr"); + return true; + } } + debug!("Is not proc macro attr"); } - if target == Target::Closure { - self.tcx.ensure().codegen_fn_attrs(self.tcx.hir().local_def_id(expr.hir_id)); - } + self.tcx + .sess + .struct_span_err(attr.span, "attribute should be applied to a macro") + .span_label(*span, "not a macro") + .emit(); + false } - fn check_used(&self, attrs: &'hir [Attribute], target: Target) { - for attr in attrs { - if self.tcx.sess.check_name(attr, sym::used) && target != Target::Static { - self.tcx - .sess - .span_err(attr.span, "attribute must be applied to a `static` variable"); + /// Outputs an error for `#[allow_internal_unstable]` which can only be applied to macros. + /// (Allows proc_macro functions) + fn check_rustc_allow_const_fn_unstable( + &self, + hir_id: HirId, + attr: &Attribute, + span: &Span, + target: Target, + ) -> bool { + if let Target::Fn | Target::Method(_) = target { + if self.tcx.is_const_fn_raw(self.tcx.hir().local_def_id(hir_id)) { + return true; } } + self.tcx + .sess + .struct_span_err(attr.span, "attribute should be applied to `const fn`") + .span_label(*span, "not a `const fn`") + .emit(); + false } } @@ -808,14 +818,32 @@ impl Visitor<'tcx> for CheckAttrVisitor<'tcx> { } fn visit_stmt(&mut self, stmt: &'tcx hir::Stmt<'tcx>) { - self.check_stmt_attributes(stmt); + // When checking statements ignore expressions, they will be checked later. + if let hir::StmtKind::Local(ref l) = stmt.kind { + self.check_attributes(l.hir_id, &l.attrs, &stmt.span, Target::Statement, None); + } intravisit::walk_stmt(self, stmt) } fn visit_expr(&mut self, expr: &'tcx hir::Expr<'tcx>) { - self.check_expr_attributes(expr); + let target = match expr.kind { + hir::ExprKind::Closure(..) => Target::Closure, + _ => Target::Expression, + }; + + self.check_attributes(expr.hir_id, &expr.attrs, &expr.span, target, None); intravisit::walk_expr(self, expr) } + + fn visit_variant( + &mut self, + variant: &'tcx hir::Variant<'tcx>, + generics: &'tcx hir::Generics<'tcx>, + item_id: HirId, + ) { + self.check_attributes(variant.id, variant.attrs, &variant.span, Target::Variant, None); + intravisit::walk_variant(self, variant, generics, item_id) + } } fn is_c_like_enum(item: &Item<'_>) -> bool { diff --git a/compiler/rustc_passes/src/check_const.rs b/compiler/rustc_passes/src/check_const.rs index dd0bcbf208d7c..b24c62b971a46 100644 --- a/compiler/rustc_passes/src/check_const.rs +++ b/compiler/rustc_passes/src/check_const.rs @@ -87,7 +87,7 @@ impl<'tcx> CheckConstVisitor<'tcx> { let is_feature_allowed = |feature_gate| { // All features require that the corresponding gate be enabled, - // even if the function has `#[allow_internal_unstable(the_gate)]`. + // even if the function has `#[rustc_allow_const_fn_unstable(the_gate)]`. if !tcx.features().enabled(feature_gate) { return false; } @@ -105,8 +105,8 @@ impl<'tcx> CheckConstVisitor<'tcx> { } // However, we cannot allow stable `const fn`s to use unstable features without an explicit - // opt-in via `allow_internal_unstable`. - attr::allow_internal_unstable(&tcx.sess, &tcx.get_attrs(def_id)) + // opt-in via `rustc_allow_const_fn_unstable`. + attr::rustc_allow_const_fn_unstable(&tcx.sess, &tcx.get_attrs(def_id)) .map_or(false, |mut features| features.any(|name| name == feature_gate)) }; diff --git a/compiler/rustc_passes/src/dead.rs b/compiler/rustc_passes/src/dead.rs index 98ded4189cf19..f567dd83bc13d 100644 --- a/compiler/rustc_passes/src/dead.rs +++ b/compiler/rustc_passes/src/dead.rs @@ -458,8 +458,8 @@ fn create_and_seed_worklist<'tcx>( .map .iter() .filter_map( - |(&id, level)| { - if level >= &privacy::AccessLevel::Reachable { Some(id) } else { None } + |(&id, &level)| { + if level >= privacy::AccessLevel::Reachable { Some(id) } else { None } }, ) .chain( @@ -547,7 +547,7 @@ impl DeadVisitor<'tcx> { let def_id = self.tcx.hir().local_def_id(id); let inherent_impls = self.tcx.inherent_impls(def_id); for &impl_did in inherent_impls.iter() { - for &item_did in &self.tcx.associated_item_def_ids(impl_did)[..] { + for item_did in self.tcx.associated_item_def_ids(impl_did) { if let Some(did) = item_did.as_local() { let item_hir_id = self.tcx.hir().local_def_id_to_hir_id(did); if self.live_symbols.contains(&item_hir_id) { diff --git a/compiler/rustc_passes/src/hir_id_validator.rs b/compiler/rustc_passes/src/hir_id_validator.rs index 24695f5cdfa04..6d1a5fcc10b0f 100644 --- a/compiler/rustc_passes/src/hir_id_validator.rs +++ b/compiler/rustc_passes/src/hir_id_validator.rs @@ -163,4 +163,17 @@ impl<'a, 'hir> intravisit::Visitor<'hir> for HirIdValidator<'a, 'hir> { // we are currently in. So for those it's correct that they have a // different owner. } + + fn visit_generic_param(&mut self, param: &'hir hir::GenericParam<'hir>) { + if let hir::GenericParamKind::Type { + synthetic: Some(hir::SyntheticTyParamKind::ImplTrait), + .. + } = param.kind + { + // Synthetic impl trait parameters are owned by the node of the desugared type. + // This means it is correct for them to have a different owner. + } else { + intravisit::walk_generic_param(self, param); + } + } } diff --git a/compiler/rustc_passes/src/liveness.rs b/compiler/rustc_passes/src/liveness.rs index e8b97d7dc7d50..7288015e17029 100644 --- a/compiler/rustc_passes/src/liveness.rs +++ b/compiler/rustc_passes/src/liveness.rs @@ -432,6 +432,7 @@ impl<'tcx> Visitor<'tcx> for IrMaps<'tcx> { | hir::ExprKind::Break(..) | hir::ExprKind::Continue(_) | hir::ExprKind::Lit(_) + | hir::ExprKind::ConstBlock(..) | hir::ExprKind::Ret(..) | hir::ExprKind::Block(..) | hir::ExprKind::Assign(..) @@ -1173,7 +1174,7 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> { } } hir::InlineAsmOperand::InOut { expr, .. } => { - succ = self.write_place(expr, succ, ACC_READ | ACC_WRITE); + succ = self.write_place(expr, succ, ACC_READ | ACC_WRITE | ACC_USE); } hir::InlineAsmOperand::SplitInOut { out_expr, .. } => { if let Some(expr) = out_expr { @@ -1232,6 +1233,7 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> { } hir::ExprKind::Lit(..) + | hir::ExprKind::ConstBlock(..) | hir::ExprKind::Err | hir::ExprKind::Path(hir::QPath::TypeRelative(..)) | hir::ExprKind::Path(hir::QPath::LangItem(..)) => succ, @@ -1478,6 +1480,7 @@ fn check_expr<'tcx>(this: &mut Liveness<'_, 'tcx>, expr: &'tcx Expr<'tcx>) { | hir::ExprKind::Break(..) | hir::ExprKind::Continue(..) | hir::ExprKind::Lit(_) + | hir::ExprKind::ConstBlock(..) | hir::ExprKind::Block(..) | hir::ExprKind::AddrOf(..) | hir::ExprKind::Struct(..) diff --git a/compiler/rustc_passes/src/stability.rs b/compiler/rustc_passes/src/stability.rs index 1378b0d57053e..c9497f2a5b2b0 100644 --- a/compiler/rustc_passes/src/stability.rs +++ b/compiler/rustc_passes/src/stability.rs @@ -13,15 +13,13 @@ use rustc_hir::{Generics, HirId, Item, StructField, TraitRef, Ty, TyKind, Varian use rustc_middle::hir::map::Map; use rustc_middle::middle::privacy::AccessLevels; use rustc_middle::middle::stability::{DeprecationEntry, Index}; -use rustc_middle::ty::query::Providers; -use rustc_middle::ty::TyCtxt; +use rustc_middle::ty::{self, query::Providers, TyCtxt}; use rustc_session::lint; use rustc_session::lint::builtin::INEFFECTIVE_UNSTABLE_TRAIT_IMPL; use rustc_session::parse::feature_err; use rustc_session::Session; use rustc_span::symbol::{sym, Symbol}; -use rustc_span::Span; -use rustc_trait_selection::traits::misc::can_type_implement_copy; +use rustc_span::{Span, DUMMY_SP}; use std::cmp::Ordering; use std::mem::replace; @@ -711,27 +709,35 @@ impl Visitor<'tcx> for Checker<'tcx> { // so semi-randomly perform it here in stability.rs hir::ItemKind::Union(..) if !self.tcx.features().untagged_unions => { let def_id = self.tcx.hir().local_def_id(item.hir_id); - let adt_def = self.tcx.adt_def(def_id); let ty = self.tcx.type_of(def_id); + let (adt_def, substs) = match ty.kind() { + ty::Adt(adt_def, substs) => (adt_def, substs), + _ => bug!(), + }; - if adt_def.has_dtor(self.tcx) { - feature_err( - &self.tcx.sess.parse_sess, - sym::untagged_unions, - item.span, - "unions with `Drop` implementations are unstable", - ) - .emit(); - } else { - let param_env = self.tcx.param_env(def_id); - if can_type_implement_copy(self.tcx, param_env, ty).is_err() { - feature_err( - &self.tcx.sess.parse_sess, - sym::untagged_unions, - item.span, - "unions with non-`Copy` fields are unstable", - ) - .emit(); + // Non-`Copy` fields are unstable, except for `ManuallyDrop`. + let param_env = self.tcx.param_env(def_id); + for field in &adt_def.non_enum_variant().fields { + let field_ty = field.ty(self.tcx, substs); + if !field_ty.ty_adt_def().map_or(false, |adt_def| adt_def.is_manually_drop()) + && !field_ty.is_copy_modulo_regions(self.tcx.at(DUMMY_SP), param_env) + { + if field_ty.needs_drop(self.tcx, param_env) { + // Avoid duplicate error: This will error later anyway because fields + // that need drop are not allowed. + self.tcx.sess.delay_span_bug( + item.span, + "union should have been rejected due to potentially dropping field", + ); + } else { + feature_err( + &self.tcx.sess.parse_sess, + sym::untagged_unions, + self.tcx.def_span(field.did), + "unions with non-`Copy` fields other than `ManuallyDrop` are unstable", + ) + .emit(); + } } } } diff --git a/compiler/rustc_privacy/src/lib.rs b/compiler/rustc_privacy/src/lib.rs index 8f93bce6e9923..851e0dfbe0d45 100644 --- a/compiler/rustc_privacy/src/lib.rs +++ b/compiler/rustc_privacy/src/lib.rs @@ -1,6 +1,7 @@ #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")] #![feature(in_band_lifetimes)] #![feature(nll)] +#![feature(or_patterns)] #![recursion_limit = "256"] use rustc_attr as attr; @@ -14,13 +15,14 @@ use rustc_hir::{AssocItemKind, HirIdSet, Node, PatKind}; use rustc_middle::bug; use rustc_middle::hir::map::Map; use rustc_middle::middle::privacy::{AccessLevel, AccessLevels}; +use rustc_middle::span_bug; use rustc_middle::ty::fold::TypeVisitor; use rustc_middle::ty::query::Providers; use rustc_middle::ty::subst::InternalSubsts; use rustc_middle::ty::{self, GenericParamDefKind, TraitRef, Ty, TyCtxt, TypeFoldable}; use rustc_session::lint; use rustc_span::hygiene::Transparency; -use rustc_span::symbol::{kw, sym, Ident}; +use rustc_span::symbol::{kw, Ident}; use rustc_span::Span; use std::marker::PhantomData; @@ -233,125 +235,6 @@ where } } -fn def_id_visibility<'tcx>( - tcx: TyCtxt<'tcx>, - def_id: DefId, -) -> (ty::Visibility, Span, &'static str) { - match def_id.as_local().map(|def_id| tcx.hir().local_def_id_to_hir_id(def_id)) { - Some(hir_id) => { - let vis = match tcx.hir().get(hir_id) { - Node::Item(item) => &item.vis, - Node::ForeignItem(foreign_item) => &foreign_item.vis, - Node::MacroDef(macro_def) => { - if tcx.sess.contains_name(¯o_def.attrs, sym::macro_export) { - return (ty::Visibility::Public, macro_def.span, "public"); - } else { - ¯o_def.vis - } - } - Node::TraitItem(..) | Node::Variant(..) => { - return def_id_visibility(tcx, tcx.hir().get_parent_did(hir_id).to_def_id()); - } - Node::ImplItem(impl_item) => { - match tcx.hir().get(tcx.hir().get_parent_item(hir_id)) { - Node::Item(item) => match &item.kind { - hir::ItemKind::Impl { of_trait: None, .. } => &impl_item.vis, - hir::ItemKind::Impl { of_trait: Some(trait_ref), .. } => { - return def_id_visibility(tcx, trait_ref.path.res.def_id()); - } - kind => bug!("unexpected item kind: {:?}", kind), - }, - node => bug!("unexpected node kind: {:?}", node), - } - } - Node::Ctor(vdata) => { - let parent_hir_id = tcx.hir().get_parent_node(hir_id); - match tcx.hir().get(parent_hir_id) { - Node::Variant(..) => { - let parent_did = tcx.hir().local_def_id(parent_hir_id); - let (mut ctor_vis, mut span, mut descr) = - def_id_visibility(tcx, parent_did.to_def_id()); - - let adt_def = tcx.adt_def(tcx.hir().get_parent_did(hir_id).to_def_id()); - let ctor_did = tcx.hir().local_def_id(vdata.ctor_hir_id().unwrap()); - let variant = adt_def.variant_with_ctor_id(ctor_did.to_def_id()); - - if variant.is_field_list_non_exhaustive() - && ctor_vis == ty::Visibility::Public - { - ctor_vis = - ty::Visibility::Restricted(DefId::local(CRATE_DEF_INDEX)); - let attrs = tcx.get_attrs(variant.def_id); - span = tcx - .sess - .find_by_name(&attrs, sym::non_exhaustive) - .unwrap() - .span; - descr = "crate-visible"; - } - - return (ctor_vis, span, descr); - } - Node::Item(..) => { - let item = match tcx.hir().get(parent_hir_id) { - Node::Item(item) => item, - node => bug!("unexpected node kind: {:?}", node), - }; - let (mut ctor_vis, mut span, mut descr) = ( - ty::Visibility::from_hir(&item.vis, parent_hir_id, tcx), - item.vis.span, - item.vis.node.descr(), - ); - for field in vdata.fields() { - let field_vis = ty::Visibility::from_hir(&field.vis, hir_id, tcx); - if ctor_vis.is_at_least(field_vis, tcx) { - ctor_vis = field_vis; - span = field.vis.span; - descr = field.vis.node.descr(); - } - } - - // If the structure is marked as non_exhaustive then lower the - // visibility to within the crate. - if ctor_vis == ty::Visibility::Public { - let adt_def = - tcx.adt_def(tcx.hir().get_parent_did(hir_id).to_def_id()); - if adt_def.non_enum_variant().is_field_list_non_exhaustive() { - ctor_vis = - ty::Visibility::Restricted(DefId::local(CRATE_DEF_INDEX)); - span = tcx - .sess - .find_by_name(&item.attrs, sym::non_exhaustive) - .unwrap() - .span; - descr = "crate-visible"; - } - } - - return (ctor_vis, span, descr); - } - node => bug!("unexpected node kind: {:?}", node), - } - } - Node::Expr(expr) => { - return ( - ty::Visibility::Restricted(tcx.parent_module(expr.hir_id).to_def_id()), - expr.span, - "private", - ); - } - node => bug!("unexpected node kind: {:?}", node), - }; - (ty::Visibility::from_hir(vis, hir_id, tcx), vis.span, vis.node.descr()) - } - None => { - let vis = tcx.visibility(def_id); - let descr = if vis == ty::Visibility::Public { "public" } else { "private" }; - (vis, tcx.def_span(def_id), descr) - } - } -} - fn min(vis1: ty::Visibility, vis2: ty::Visibility, tcx: TyCtxt<'_>) -> ty::Visibility { if vis1.is_at_least(vis2, tcx) { vis2 } else { vis1 } } @@ -424,7 +307,7 @@ trait VisibilityLike: Sized { impl VisibilityLike for ty::Visibility { const MAX: Self = ty::Visibility::Public; fn new_min(find: &FindMin<'_, '_, Self>, def_id: DefId) -> Self { - min(def_id_visibility(find.tcx, def_id).0, find.min, find.tcx) + min(find.tcx.visibility(def_id), find.min, find.tcx) } } impl VisibilityLike for Option { @@ -534,17 +417,16 @@ impl EmbargoVisitor<'tcx> { let hir_id = item_id.id; let item_def_id = self.tcx.hir().local_def_id(hir_id); let def_kind = self.tcx.def_kind(item_def_id); - let item = self.tcx.hir().expect_item(hir_id); - let vis = ty::Visibility::from_hir(&item.vis, hir_id, self.tcx); + let vis = self.tcx.visibility(item_def_id); self.update_macro_reachable_def(hir_id, def_kind, vis, defining_mod); } if let Some(exports) = self.tcx.module_exports(module_def_id) { for export in exports { if export.vis.is_accessible_from(defining_mod, self.tcx) { if let Res::Def(def_kind, def_id) = export.res { - let vis = def_id_visibility(self.tcx, def_id).0; if let Some(def_id) = def_id.as_local() { let hir_id = self.tcx.hir().local_def_id_to_hir_id(def_id); + let vis = self.tcx.visibility(def_id.to_def_id()); self.update_macro_reachable_def(hir_id, def_kind, vis, defining_mod); } } @@ -596,7 +478,7 @@ impl EmbargoVisitor<'tcx> { { for field in struct_def.fields() { let field_vis = - ty::Visibility::from_hir(&field.vis, field.hir_id, self.tcx); + self.tcx.visibility(self.tcx.hir().local_def_id(field.hir_id)); if field_vis.is_accessible_from(module, self.tcx) { self.reach(field.hir_id, level).ty(); } @@ -1015,11 +897,10 @@ impl DefIdVisitor<'tcx> for ReachEverythingInTheInterfaceVisitor<'_, 'tcx> { } fn visit_def_id(&mut self, def_id: DefId, _kind: &str, _descr: &dyn fmt::Display) -> bool { if let Some(def_id) = def_id.as_local() { - let hir_id = self.ev.tcx.hir().local_def_id_to_hir_id(def_id); - if let ((ty::Visibility::Public, ..), _) - | (_, Some(AccessLevel::ReachableFromImplTrait)) = - (def_id_visibility(self.tcx(), def_id.to_def_id()), self.access_level) + if let (ty::Visibility::Public, _) | (_, Some(AccessLevel::ReachableFromImplTrait)) = + (self.tcx().visibility(def_id.to_def_id()), self.access_level) { + let hir_id = self.ev.tcx.hir().local_def_id_to_hir_id(def_id); self.ev.update(hir_id, self.access_level); } } @@ -1184,9 +1065,7 @@ impl<'tcx> TypePrivacyVisitor<'tcx> { } fn item_is_accessible(&self, did: DefId) -> bool { - def_id_visibility(self.tcx, did) - .0 - .is_accessible_from(self.current_item.to_def_id(), self.tcx) + self.tcx.visibility(did).is_accessible_from(self.current_item.to_def_id(), self.tcx) } // Take node-id of an expression or pattern and check its type for privacy. @@ -1840,8 +1719,21 @@ impl SearchInterfaceForPrivateItemsVisitor<'tcx> { None => return false, }; - let (vis, vis_span, vis_descr) = def_id_visibility(self.tcx, def_id); + let vis = self.tcx.visibility(def_id); if !vis.is_at_least(self.required_visibility, self.tcx) { + let vis_descr = match vis { + ty::Visibility::Public => "public", + ty::Visibility::Invisible => "private", + ty::Visibility::Restricted(vis_def_id) => { + if vis_def_id == self.tcx.parent_module(hir_id).to_def_id() { + "private" + } else if vis_def_id.is_top_level_module() { + "crate-private" + } else { + "restricted" + } + } + }; let make_msg = || format!("{} {} `{}` in public interface", vis_descr, kind, descr); if self.has_pub_restricted || self.has_old_errors || self.in_assoc_ty { let mut err = if kind == "trait" { @@ -1849,6 +1741,8 @@ impl SearchInterfaceForPrivateItemsVisitor<'tcx> { } else { struct_span_err!(self.tcx.sess, self.span, E0446, "{}", make_msg()) }; + let vis_span = + self.tcx.sess.source_map().guess_head_span(self.tcx.def_span(def_id)); err.span_label(self.span, format!("can't leak {} {}", vis_descr, kind)); err.span_label(vis_span, format!("`{}` declared as {}", descr, vis_descr)); err.emit(); @@ -1965,7 +1859,7 @@ impl<'a, 'tcx> Visitor<'tcx> for PrivateItemsInPublicInterfacesVisitor<'a, 'tcx> fn visit_item(&mut self, item: &'tcx hir::Item<'tcx>) { let tcx = self.tcx; - let item_visibility = ty::Visibility::from_hir(&item.vis, item.hir_id, tcx); + let item_visibility = tcx.visibility(tcx.hir().local_def_id(item.hir_id).to_def_id()); match item.kind { // Crates are always public. @@ -2019,7 +1913,7 @@ impl<'a, 'tcx> Visitor<'tcx> for PrivateItemsInPublicInterfacesVisitor<'a, 'tcx> // Subitems of foreign modules have their own publicity. hir::ItemKind::ForeignMod(ref foreign_mod) => { for foreign_item in foreign_mod.items { - let vis = ty::Visibility::from_hir(&foreign_item.vis, item.hir_id, tcx); + let vis = tcx.visibility(tcx.hir().local_def_id(foreign_item.hir_id)); self.check(foreign_item.hir_id, vis).generics().predicates().ty(); } } @@ -2028,7 +1922,7 @@ impl<'a, 'tcx> Visitor<'tcx> for PrivateItemsInPublicInterfacesVisitor<'a, 'tcx> self.check(item.hir_id, item_visibility).generics().predicates(); for field in struct_def.fields() { - let field_visibility = ty::Visibility::from_hir(&field.vis, item.hir_id, tcx); + let field_visibility = tcx.visibility(tcx.hir().local_def_id(field.hir_id)); self.check(field.hir_id, min(item_visibility, field_visibility, tcx)).ty(); } } @@ -2040,10 +1934,9 @@ impl<'a, 'tcx> Visitor<'tcx> for PrivateItemsInPublicInterfacesVisitor<'a, 'tcx> let impl_vis = ty::Visibility::of_impl(item.hir_id, tcx, &Default::default()); self.check(item.hir_id, impl_vis).generics().predicates(); for impl_item_ref in items { - let impl_item = tcx.hir().impl_item(impl_item_ref.id); let impl_item_vis = if of_trait.is_none() { min( - ty::Visibility::from_hir(&impl_item.vis, item.hir_id, tcx), + tcx.visibility(tcx.hir().local_def_id(impl_item_ref.id.hir_id)), impl_vis, tcx, ) @@ -2064,6 +1957,7 @@ impl<'a, 'tcx> Visitor<'tcx> for PrivateItemsInPublicInterfacesVisitor<'a, 'tcx> pub fn provide(providers: &mut Providers) { *providers = Providers { + visibility, privacy_access_levels, check_private_in_public, check_mod_privacy, @@ -2071,6 +1965,55 @@ pub fn provide(providers: &mut Providers) { }; } +fn visibility(tcx: TyCtxt<'_>, def_id: DefId) -> ty::Visibility { + let def_id = def_id.expect_local(); + match tcx.visibilities.get(&def_id) { + Some(vis) => *vis, + None => { + let hir_id = tcx.hir().local_def_id_to_hir_id(def_id); + match tcx.hir().get(hir_id) { + // Unique types created for closures participate in type privacy checking. + // They have visibilities inherited from the module they are defined in. + Node::Expr(hir::Expr { kind: hir::ExprKind::Closure(..), .. }) => { + ty::Visibility::Restricted(tcx.parent_module(hir_id).to_def_id()) + } + // - AST lowering may clone `use` items and the clones don't + // get their entries in the resolver's visibility table. + // - AST lowering also creates opaque type items with inherited visibilies. + // Visibility on them should have no effect, but to avoid the visibility + // query failing on some items, we provide it for opaque types as well. + Node::Item(hir::Item { + vis, + kind: hir::ItemKind::Use(..) | hir::ItemKind::OpaqueTy(..), + .. + }) => ty::Visibility::from_hir(vis, hir_id, tcx), + // Visibilities of trait impl items are inherited from their traits + // and are not filled in resolve. + Node::ImplItem(impl_item) => { + match tcx.hir().get(tcx.hir().get_parent_item(hir_id)) { + Node::Item(hir::Item { + kind: hir::ItemKind::Impl { of_trait: Some(tr), .. }, + .. + }) => tr.path.res.opt_def_id().map_or_else( + || { + tcx.sess.delay_span_bug(tr.path.span, "trait without a def-id"); + ty::Visibility::Public + }, + |def_id| tcx.visibility(def_id), + ), + _ => span_bug!(impl_item.span, "the parent is not a trait impl"), + } + } + _ => span_bug!( + tcx.def_span(def_id), + "visibility table unexpectedly missing a def-id: {:?}", + def_id, + ), + } + } + } +} + fn check_mod_privacy(tcx: TyCtxt<'_>, module_def_id: LocalDefId) { // Check privacy of names not checked in previous compilation stages. let mut visitor = NamePrivacyVisitor { tcx, maybe_typeck_results: None, current_item: None }; diff --git a/compiler/rustc_query_system/src/query/caches.rs b/compiler/rustc_query_system/src/query/caches.rs index 1839e1af45eef..7bc6ae1d1c6c3 100644 --- a/compiler/rustc_query_system/src/query/caches.rs +++ b/compiler/rustc_query_system/src/query/caches.rs @@ -1,12 +1,12 @@ use crate::dep_graph::DepNodeIndex; use crate::query::plumbing::{QueryLookup, QueryState}; -use crate::query::QueryContext; use rustc_arena::TypedArena; use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::sharded::Sharded; use rustc_data_structures::sync::WorkerLocal; use std::default::Default; +use std::fmt::Debug; use std::hash::Hash; use std::marker::PhantomData; @@ -24,16 +24,16 @@ pub trait QueryStorage: Default { } pub trait QueryCache: QueryStorage { - type Key: Hash; + type Key: Hash + Eq + Clone + Debug; type Sharded: Default; /// Checks if the query is already computed and in the cache. /// It returns the shard index and a lock guard to the shard, /// which will be used if the query is not in the cache and we need /// to compute it. - fn lookup( + fn lookup( &self, - state: &QueryState, + state: &QueryState, key: Self::Key, // `on_hit` can be called while holding a lock to the query state shard. on_hit: OnHit, @@ -41,7 +41,7 @@ pub trait QueryCache: QueryStorage { ) -> R where OnHit: FnOnce(&Self::Stored, DepNodeIndex) -> R, - OnMiss: FnOnce(Self::Key, QueryLookup<'_, CTX, Self::Key, Self::Sharded>) -> R; + OnMiss: FnOnce(Self::Key, QueryLookup<'_, D, Q, Self::Key, Self::Sharded>) -> R; fn complete( &self, @@ -86,21 +86,25 @@ impl QueryStorage for DefaultCache { } } -impl QueryCache for DefaultCache { +impl QueryCache for DefaultCache +where + K: Eq + Hash + Clone + Debug, + V: Clone, +{ type Key = K; type Sharded = FxHashMap; #[inline(always)] - fn lookup( + fn lookup( &self, - state: &QueryState, + state: &QueryState, key: K, on_hit: OnHit, on_miss: OnMiss, ) -> R where OnHit: FnOnce(&V, DepNodeIndex) -> R, - OnMiss: FnOnce(K, QueryLookup<'_, CTX, K, Self::Sharded>) -> R, + OnMiss: FnOnce(K, QueryLookup<'_, D, Q, K, Self::Sharded>) -> R, { let mut lookup = state.get_lookup(&key); let lock = &mut *lookup.lock; @@ -164,21 +168,24 @@ impl<'tcx, K: Eq + Hash, V: 'tcx> QueryStorage for ArenaCache<'tcx, K, V> { } } -impl<'tcx, K: Eq + Hash, V: 'tcx> QueryCache for ArenaCache<'tcx, K, V> { +impl<'tcx, K, V: 'tcx> QueryCache for ArenaCache<'tcx, K, V> +where + K: Eq + Hash + Clone + Debug, +{ type Key = K; type Sharded = FxHashMap; #[inline(always)] - fn lookup( + fn lookup( &self, - state: &QueryState, + state: &QueryState, key: K, on_hit: OnHit, on_miss: OnMiss, ) -> R where OnHit: FnOnce(&&'tcx V, DepNodeIndex) -> R, - OnMiss: FnOnce(K, QueryLookup<'_, CTX, K, Self::Sharded>) -> R, + OnMiss: FnOnce(K, QueryLookup<'_, D, Q, K, Self::Sharded>) -> R, { let mut lookup = state.get_lookup(&key); let lock = &mut *lookup.lock; diff --git a/compiler/rustc_query_system/src/query/config.rs b/compiler/rustc_query_system/src/query/config.rs index 549056570f9bc..0f0684b354791 100644 --- a/compiler/rustc_query_system/src/query/config.rs +++ b/compiler/rustc_query_system/src/query/config.rs @@ -5,18 +5,14 @@ use crate::dep_graph::SerializedDepNodeIndex; use crate::query::caches::QueryCache; use crate::query::plumbing::CycleError; use crate::query::{QueryContext, QueryState}; -use rustc_data_structures::profiling::ProfileCategory; use rustc_data_structures::fingerprint::Fingerprint; use std::borrow::Cow; use std::fmt::Debug; use std::hash::Hash; -// The parameter `CTX` is required in librustc_middle: -// implementations may need to access the `'tcx` lifetime in `CTX = TyCtxt<'tcx>`. -pub trait QueryConfig { +pub trait QueryConfig { const NAME: &'static str; - const CATEGORY: ProfileCategory; type Key: Eq + Hash + Clone + Debug; type Value; @@ -70,7 +66,7 @@ impl QueryVtable { } } -pub trait QueryAccessors: QueryConfig { +pub trait QueryAccessors: QueryConfig { const ANON: bool; const EVAL_ALWAYS: bool; const DEP_KIND: CTX::DepKind; @@ -78,7 +74,7 @@ pub trait QueryAccessors: QueryConfig { type Cache: QueryCache; // Don't use this method to access query results, instead use the methods on TyCtxt - fn query_state<'a>(tcx: CTX) -> &'a QueryState; + fn query_state<'a>(tcx: CTX) -> &'a QueryState; fn to_dep_node(tcx: CTX, key: &Self::Key) -> DepNode where diff --git a/compiler/rustc_query_system/src/query/job.rs b/compiler/rustc_query_system/src/query/job.rs index 190312bb33001..c1d3210b61768 100644 --- a/compiler/rustc_query_system/src/query/job.rs +++ b/compiler/rustc_query_system/src/query/job.rs @@ -1,16 +1,16 @@ -use crate::dep_graph::{DepContext, DepKind}; use crate::query::plumbing::CycleError; -use crate::query::QueryContext; use rustc_data_structures::fx::FxHashMap; use rustc_span::Span; use std::convert::TryFrom; +use std::hash::Hash; use std::marker::PhantomData; use std::num::NonZeroU32; #[cfg(parallel_compiler)] use { + super::QueryContext, parking_lot::{Condvar, Mutex}, rustc_data_structures::fx::FxHashSet, rustc_data_structures::stable_hasher::{HashStable, StableHasher}, @@ -31,7 +31,7 @@ pub struct QueryInfo { pub query: Q, } -type QueryMap = FxHashMap::DepKind>, QueryJobInfo>; +pub(crate) type QueryMap = FxHashMap, QueryJobInfo>; /// A value uniquely identifiying an active query job within a shard in the query cache. #[derive(Copy, Clone, Eq, PartialEq, Hash)] @@ -39,71 +39,75 @@ pub struct QueryShardJobId(pub NonZeroU32); /// A value uniquely identifiying an active query job. #[derive(Copy, Clone, Eq, PartialEq, Hash)] -pub struct QueryJobId { +pub struct QueryJobId { /// Which job within a shard is this pub job: QueryShardJobId, /// In which shard is this job pub shard: u16, - /// What kind of query this job is - pub kind: K, + /// What kind of query this job is. + pub kind: D, } -impl QueryJobId { - pub fn new(job: QueryShardJobId, shard: usize, kind: K) -> Self { +impl QueryJobId +where + D: Copy + Clone + Eq + Hash, +{ + pub fn new(job: QueryShardJobId, shard: usize, kind: D) -> Self { QueryJobId { job, shard: u16::try_from(shard).unwrap(), kind } } - fn query>(self, map: &QueryMap) -> CTX::Query { + fn query(self, map: &QueryMap) -> Q { map.get(&self).unwrap().info.query.clone() } #[cfg(parallel_compiler)] - fn span>(self, map: &QueryMap) -> Span { + fn span(self, map: &QueryMap) -> Span { map.get(&self).unwrap().job.span } #[cfg(parallel_compiler)] - fn parent>(self, map: &QueryMap) -> Option> { + fn parent(self, map: &QueryMap) -> Option> { map.get(&self).unwrap().job.parent } #[cfg(parallel_compiler)] - fn latch<'a, CTX: QueryContext>( - self, - map: &'a QueryMap, - ) -> Option<&'a QueryLatch> { + fn latch<'a, Q: Clone>(self, map: &'a QueryMap) -> Option<&'a QueryLatch> { map.get(&self).unwrap().job.latch.as_ref() } } -pub struct QueryJobInfo { - pub info: QueryInfo, - pub job: QueryJob, +pub struct QueryJobInfo { + pub info: QueryInfo, + pub job: QueryJob, } /// Represents an active query job. #[derive(Clone)] -pub struct QueryJob { +pub struct QueryJob { pub id: QueryShardJobId, /// The span corresponding to the reason for which this query was required. pub span: Span, /// The parent query job which created this job and is implicitly waiting on it. - pub parent: Option>, + pub parent: Option>, /// The latch that is used to wait on this job. #[cfg(parallel_compiler)] - latch: Option>, + latch: Option>, - dummy: PhantomData>, + dummy: PhantomData>, } -impl QueryJob { +impl QueryJob +where + D: Copy + Clone + Eq + Hash, + Q: Clone, +{ /// Creates a new query job. - pub fn new(id: QueryShardJobId, span: Span, parent: Option>) -> Self { + pub fn new(id: QueryShardJobId, span: Span, parent: Option>) -> Self { QueryJob { id, span, @@ -115,7 +119,7 @@ impl QueryJob { } #[cfg(parallel_compiler)] - pub(super) fn latch(&mut self, _id: QueryJobId) -> QueryLatch { + pub(super) fn latch(&mut self, _id: QueryJobId) -> QueryLatch { if self.latch.is_none() { self.latch = Some(QueryLatch::new()); } @@ -123,7 +127,7 @@ impl QueryJob { } #[cfg(not(parallel_compiler))] - pub(super) fn latch(&mut self, id: QueryJobId) -> QueryLatch { + pub(super) fn latch(&mut self, id: QueryJobId) -> QueryLatch { QueryLatch { id, dummy: PhantomData } } @@ -143,19 +147,26 @@ impl QueryJob { #[cfg(not(parallel_compiler))] #[derive(Clone)] -pub(super) struct QueryLatch { - id: QueryJobId, - dummy: PhantomData, +pub(super) struct QueryLatch { + id: QueryJobId, + dummy: PhantomData, } #[cfg(not(parallel_compiler))] -impl QueryLatch { - pub(super) fn find_cycle_in_stack(&self, tcx: CTX, span: Span) -> CycleError { - let query_map = tcx.try_collect_active_jobs().unwrap(); - - // Get the current executing query (waiter) and find the waitee amongst its parents - let mut current_job = tcx.current_query_job(); +impl QueryLatch +where + D: Copy + Clone + Eq + Hash, + Q: Clone, +{ + pub(super) fn find_cycle_in_stack( + &self, + query_map: QueryMap, + current_job: &Option>, + span: Span, + ) -> CycleError { + // Find the waitee amongst `current_job` parents let mut cycle = Vec::new(); + let mut current_job = Option::clone(current_job); while let Some(job) = current_job { let info = query_map.get(&job).unwrap(); @@ -186,15 +197,15 @@ impl QueryLatch { } #[cfg(parallel_compiler)] -struct QueryWaiter { - query: Option>, +struct QueryWaiter { + query: Option>, condvar: Condvar, span: Span, - cycle: Lock>>, + cycle: Lock>>, } #[cfg(parallel_compiler)] -impl QueryWaiter { +impl QueryWaiter { fn notify(&self, registry: &rayon_core::Registry) { rayon_core::mark_unblocked(registry); self.condvar.notify_one(); @@ -202,19 +213,19 @@ impl QueryWaiter { } #[cfg(parallel_compiler)] -struct QueryLatchInfo { +struct QueryLatchInfo { complete: bool, - waiters: Vec>>, + waiters: Vec>>, } #[cfg(parallel_compiler)] #[derive(Clone)] -pub(super) struct QueryLatch { - info: Lrc>>, +pub(super) struct QueryLatch { + info: Lrc>>, } #[cfg(parallel_compiler)] -impl QueryLatch { +impl QueryLatch { fn new() -> Self { QueryLatch { info: Lrc::new(Mutex::new(QueryLatchInfo { complete: false, waiters: Vec::new() })), @@ -223,10 +234,13 @@ impl QueryLatch { } #[cfg(parallel_compiler)] -impl QueryLatch { +impl QueryLatch { /// Awaits for the query job to complete. - pub(super) fn wait_on(&self, tcx: CTX, span: Span) -> Result<(), CycleError> { - let query = tcx.current_query_job(); + pub(super) fn wait_on( + &self, + query: Option>, + span: Span, + ) -> Result<(), CycleError> { let waiter = Lrc::new(QueryWaiter { query, span, cycle: Lock::new(None), condvar: Condvar::new() }); self.wait_on_inner(&waiter); @@ -239,12 +253,9 @@ impl QueryLatch { Some(cycle) => Err(cycle), } } -} -#[cfg(parallel_compiler)] -impl QueryLatch { /// Awaits the caller on this latch by blocking the current thread. - fn wait_on_inner(&self, waiter: &Lrc>) { + fn wait_on_inner(&self, waiter: &Lrc>) { let mut info = self.info.lock(); if !info.complete { // We push the waiter on to the `waiters` list. It can be accessed inside @@ -278,7 +289,7 @@ impl QueryLatch { /// Removes a single waiter from the list of waiters. /// This is used to break query cycles. - fn extract_waiter(&self, waiter: usize) -> Lrc> { + fn extract_waiter(&self, waiter: usize) -> Lrc> { let mut info = self.info.lock(); debug_assert!(!info.complete); // Remove the waiter from the list of waiters @@ -288,7 +299,7 @@ impl QueryLatch { /// A resumable waiter of a query. The usize is the index into waiters in the query's latch #[cfg(parallel_compiler)] -type Waiter = (QueryJobId, usize); +type Waiter = (QueryJobId, usize); /// Visits all the non-resumable and resumable waiters of a query. /// Only waiters in a query are visited. @@ -300,13 +311,15 @@ type Waiter = (QueryJobId, usize); /// required information to resume the waiter. /// If all `visit` calls returns None, this function also returns None. #[cfg(parallel_compiler)] -fn visit_waiters( - query_map: &QueryMap, - query: QueryJobId, +fn visit_waiters( + query_map: &QueryMap, + query: QueryJobId, mut visit: F, -) -> Option>> +) -> Option>> where - F: FnMut(Span, QueryJobId) -> Option>>, + D: Copy + Clone + Eq + Hash, + Q: Clone, + F: FnMut(Span, QueryJobId) -> Option>>, { // Visit the parent query which is a non-resumable waiter since it's on the same stack if let Some(parent) = query.parent(query_map) { @@ -335,13 +348,17 @@ where /// If a cycle is detected, this initial value is replaced with the span causing /// the cycle. #[cfg(parallel_compiler)] -fn cycle_check( - query_map: &QueryMap, - query: QueryJobId, +fn cycle_check( + query_map: &QueryMap, + query: QueryJobId, span: Span, - stack: &mut Vec<(Span, QueryJobId)>, - visited: &mut FxHashSet>, -) -> Option>> { + stack: &mut Vec<(Span, QueryJobId)>, + visited: &mut FxHashSet>, +) -> Option>> +where + D: Copy + Clone + Eq + Hash, + Q: Clone, +{ if !visited.insert(query) { return if let Some(p) = stack.iter().position(|q| q.1 == query) { // We detected a query cycle, fix up the initial span and return Some @@ -376,11 +393,15 @@ fn cycle_check( /// from `query` without going through any of the queries in `visited`. /// This is achieved with a depth first search. #[cfg(parallel_compiler)] -fn connected_to_root( - query_map: &QueryMap, - query: QueryJobId, - visited: &mut FxHashSet>, -) -> bool { +fn connected_to_root( + query_map: &QueryMap, + query: QueryJobId, + visited: &mut FxHashSet>, +) -> bool +where + D: Copy + Clone + Eq + Hash, + Q: Clone, +{ // We already visited this or we're deliberately ignoring it if !visited.insert(query) { return false; @@ -399,7 +420,12 @@ fn connected_to_root( // Deterministically pick an query from a list #[cfg(parallel_compiler)] -fn pick_query<'a, CTX, T, F>(query_map: &QueryMap, tcx: CTX, queries: &'a [T], f: F) -> &'a T +fn pick_query<'a, CTX, T, F>( + query_map: &QueryMap, + tcx: CTX, + queries: &'a [T], + f: F, +) -> &'a T where CTX: QueryContext, F: Fn(&T) -> (Span, QueryJobId), @@ -429,9 +455,9 @@ where /// the function returns false. #[cfg(parallel_compiler)] fn remove_cycle( - query_map: &QueryMap, + query_map: &QueryMap, jobs: &mut Vec>, - wakelist: &mut Vec>>, + wakelist: &mut Vec>>, tcx: CTX, ) -> bool { let mut visited = FxHashSet::default(); diff --git a/compiler/rustc_query_system/src/query/mod.rs b/compiler/rustc_query_system/src/query/mod.rs index 49097725bc9b9..da45565dbe6bd 100644 --- a/compiler/rustc_query_system/src/query/mod.rs +++ b/compiler/rustc_query_system/src/query/mod.rs @@ -15,8 +15,8 @@ mod config; pub use self::config::{QueryAccessors, QueryConfig, QueryDescription}; use crate::dep_graph::{DepContext, DepGraph}; +use crate::query::job::QueryMap; -use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::stable_hasher::HashStable; use rustc_data_structures::sync::Lock; use rustc_data_structures::thin_vec::ThinVec; @@ -38,9 +38,7 @@ pub trait QueryContext: DepContext { /// Get the query information from the TLS context. fn current_query_job(&self) -> Option>; - fn try_collect_active_jobs( - &self, - ) -> Option, QueryJobInfo>>; + fn try_collect_active_jobs(&self) -> Option>; /// Executes a job by changing the `ImplicitCtxt` to point to the /// new query job while it executes. It returns the diagnostics diff --git a/compiler/rustc_query_system/src/query/plumbing.rs b/compiler/rustc_query_system/src/query/plumbing.rs index ae042cc808126..50f443716f44b 100644 --- a/compiler/rustc_query_system/src/query/plumbing.rs +++ b/compiler/rustc_query_system/src/query/plumbing.rs @@ -7,7 +7,7 @@ use crate::dep_graph::{DepNodeIndex, SerializedDepNodeIndex}; use crate::query::caches::QueryCache; use crate::query::config::{QueryDescription, QueryVtable, QueryVtableExt}; use crate::query::job::{QueryInfo, QueryJob, QueryJobId, QueryJobInfo, QueryShardJobId}; -use crate::query::QueryContext; +use crate::query::{QueryContext, QueryMap}; #[cfg(not(parallel_compiler))] use rustc_data_structures::cold_path; @@ -20,8 +20,6 @@ use rustc_errors::{Diagnostic, FatalError}; use rustc_span::source_map::DUMMY_SP; use rustc_span::Span; use std::collections::hash_map::Entry; -use std::convert::TryFrom; -use std::fmt::Debug; use std::hash::{Hash, Hasher}; use std::mem; use std::num::NonZeroU32; @@ -29,33 +27,33 @@ use std::ptr; #[cfg(debug_assertions)] use std::sync::atomic::{AtomicUsize, Ordering}; -pub(super) struct QueryStateShard { +pub(super) struct QueryStateShard { pub(super) cache: C, - active: FxHashMap>, + active: FxHashMap>, /// Used to generate unique ids for active jobs. jobs: u32, } -impl Default for QueryStateShard { - fn default() -> QueryStateShard { +impl Default for QueryStateShard { + fn default() -> QueryStateShard { QueryStateShard { cache: Default::default(), active: Default::default(), jobs: 0 } } } -pub struct QueryState { +pub struct QueryState { cache: C, - shards: Sharded>, + shards: Sharded>, #[cfg(debug_assertions)] pub cache_hits: AtomicUsize, } -impl QueryState { +impl QueryState { #[inline] pub(super) fn get_lookup<'tcx>( &'tcx self, key: &C::Key, - ) -> QueryLookup<'tcx, CTX, C::Key, C::Sharded> { + ) -> QueryLookup<'tcx, D, Q, C::Key, C::Sharded> { // We compute the key's hash once and then use it for both the // shard lookup and the hashmap lookup. This relies on the fact // that both of them use `FxHasher`. @@ -70,16 +68,21 @@ impl QueryState { } /// Indicates the state of a query for a given key in a query map. -enum QueryResult { +enum QueryResult { /// An already executing query. The query job can be used to await for its completion. - Started(QueryJob), + Started(QueryJob), /// The query panicked. Queries trying to wait on this will raise a fatal error which will /// silently panic. Poisoned, } -impl QueryState { +impl QueryState +where + D: Copy + Clone + Eq + Hash, + Q: Clone, + C: QueryCache, +{ #[inline(always)] pub fn iter_results( &self, @@ -98,13 +101,10 @@ impl QueryState { pub fn try_collect_active_jobs( &self, - kind: CTX::DepKind, - make_query: fn(C::Key) -> CTX::Query, - jobs: &mut FxHashMap, QueryJobInfo>, - ) -> Option<()> - where - C::Key: Clone, - { + kind: D, + make_query: fn(C::Key) -> Q, + jobs: &mut QueryMap, + ) -> Option<()> { // We use try_lock_shards here since we are called from the // deadlock handler, and this shouldn't be locked. let shards = self.shards.try_lock_shards()?; @@ -112,8 +112,7 @@ impl QueryState { jobs.extend(shards.flat_map(|(shard_id, shard)| { shard.active.iter().filter_map(move |(k, v)| { if let QueryResult::Started(ref job) = *v { - let id = - QueryJobId { job: job.id, shard: u16::try_from(shard_id).unwrap(), kind }; + let id = QueryJobId::new(job.id, shard_id, kind); let info = QueryInfo { span: job.span, query: make_query(k.clone()) }; Some((id, QueryJobInfo { info, job: job.clone() })) } else { @@ -126,8 +125,8 @@ impl QueryState { } } -impl Default for QueryState { - fn default() -> QueryState { +impl Default for QueryState { + fn default() -> QueryState { QueryState { cache: C::default(), shards: Default::default(), @@ -138,28 +137,30 @@ impl Default for QueryState { } /// Values used when checking a query cache which can be reused on a cache-miss to execute the query. -pub struct QueryLookup<'tcx, CTX: QueryContext, K, C> { +pub struct QueryLookup<'tcx, D, Q, K, C> { pub(super) key_hash: u64, shard: usize, - pub(super) lock: LockGuard<'tcx, QueryStateShard>, + pub(super) lock: LockGuard<'tcx, QueryStateShard>, } /// A type representing the responsibility to execute the job in the `job` field. /// This will poison the relevant query if dropped. -struct JobOwner<'tcx, CTX: QueryContext, C> +struct JobOwner<'tcx, D, Q, C> where + D: Copy + Clone + Eq + Hash, + Q: Clone, C: QueryCache, - C::Key: Eq + Hash + Clone + Debug, { - state: &'tcx QueryState, + state: &'tcx QueryState, key: C::Key, - id: QueryJobId, + id: QueryJobId, } -impl<'tcx, CTX: QueryContext, C> JobOwner<'tcx, CTX, C> +impl<'tcx, D, Q, C> JobOwner<'tcx, D, Q, C> where + D: Copy + Clone + Eq + Hash, + Q: Clone, C: QueryCache, - C::Key: Eq + Hash + Clone + Debug, { /// Either gets a `JobOwner` corresponding the query, allowing us to /// start executing the query, or returns with the result of the query. @@ -170,14 +171,14 @@ where /// This function is inlined because that results in a noticeable speed-up /// for some compile-time benchmarks. #[inline(always)] - fn try_start<'a, 'b>( + fn try_start<'a, 'b, CTX>( tcx: CTX, - state: &'b QueryState, + state: &'b QueryState, span: Span, key: &C::Key, - mut lookup: QueryLookup<'a, CTX, C::Key, C::Sharded>, + mut lookup: QueryLookup<'a, CTX::DepKind, CTX::Query, C::Key, C::Sharded>, query: &QueryVtable, - ) -> TryGetJob<'b, CTX, C> + ) -> TryGetJob<'b, CTX::DepKind, CTX::Query, C> where CTX: QueryContext, { @@ -229,7 +230,12 @@ where // so we just return the error. #[cfg(not(parallel_compiler))] return TryGetJob::Cycle(cold_path(|| { - let value = query.handle_cycle_error(tcx, latch.find_cycle_in_stack(tcx, span)); + let error: CycleError = latch.find_cycle_in_stack( + tcx.try_collect_active_jobs().unwrap(), + &tcx.current_query_job(), + span, + ); + let value = query.handle_cycle_error(tcx, error); state.cache.store_nocache(value) })); @@ -237,7 +243,7 @@ where // thread. #[cfg(parallel_compiler)] { - let result = latch.wait_on(tcx, span); + let result = latch.wait_on(tcx.current_query_job(), span); if let Err(cycle) = result { let value = query.handle_cycle_error(tcx, cycle); @@ -297,9 +303,11 @@ where (result, diagnostics.into_inner()) } -impl<'tcx, CTX: QueryContext, C: QueryCache> Drop for JobOwner<'tcx, CTX, C> +impl<'tcx, D, Q, C> Drop for JobOwner<'tcx, D, Q, C> where - C::Key: Eq + Hash + Clone + Debug, + D: Copy + Clone + Eq + Hash, + Q: Clone, + C: QueryCache, { #[inline(never)] #[cold] @@ -330,12 +338,14 @@ pub struct CycleError { } /// The result of `try_start`. -enum TryGetJob<'tcx, CTX: QueryContext, C: QueryCache> +enum TryGetJob<'tcx, D, Q, C> where - C::Key: Eq + Hash + Clone + Debug, + D: Copy + Clone + Eq + Hash, + Q: Clone, + C: QueryCache, { /// The query is not yet started. Contains a guard to the cache eventually used to start it. - NotYetStarted(JobOwner<'tcx, CTX, C>), + NotYetStarted(JobOwner<'tcx, D, Q, C>), /// The query was already completed. /// Returns the result of the query and its dep-node index @@ -354,7 +364,7 @@ where #[inline(always)] fn try_get_cached( tcx: CTX, - state: &QueryState, + state: &QueryState, key: C::Key, // `on_hit` can be called while holding a lock to the query cache on_hit: OnHit, @@ -364,7 +374,7 @@ where C: QueryCache, CTX: QueryContext, OnHit: FnOnce(&C::Stored, DepNodeIndex) -> R, - OnMiss: FnOnce(C::Key, QueryLookup<'_, CTX, C::Key, C::Sharded>) -> R, + OnMiss: FnOnce(C::Key, QueryLookup<'_, CTX::DepKind, CTX::Query, C::Key, C::Sharded>) -> R, { state.cache.lookup( state, @@ -386,19 +396,20 @@ where #[inline(always)] fn try_execute_query( tcx: CTX, - state: &QueryState, + state: &QueryState, span: Span, key: C::Key, - lookup: QueryLookup<'_, CTX, C::Key, C::Sharded>, + lookup: QueryLookup<'_, CTX::DepKind, CTX::Query, C::Key, C::Sharded>, query: &QueryVtable, ) -> C::Stored where C: QueryCache, - C::Key: Eq + Clone + Debug + crate::dep_graph::DepNodeParams, - C::Stored: Clone, + C::Key: crate::dep_graph::DepNodeParams, CTX: QueryContext, { - let job = match JobOwner::try_start(tcx, state, span, &key, lookup, query) { + let job = match JobOwner::<'_, CTX::DepKind, CTX::Query, C>::try_start( + tcx, state, span, &key, lookup, query, + ) { TryGetJob::NotYetStarted(job) => job, TryGetJob::Cycle(result) => return result, #[cfg(parallel_compiler)] @@ -559,14 +570,12 @@ fn incremental_verify_ich( fn force_query_with_job( tcx: CTX, key: C::Key, - job: JobOwner<'_, CTX, C>, + job: JobOwner<'_, CTX::DepKind, CTX::Query, C>, dep_node: DepNode, query: &QueryVtable, ) -> (C::Stored, DepNodeIndex) where C: QueryCache, - C::Key: Eq + Clone + Debug, - C::Stored: Clone, CTX: QueryContext, { // If the following assertion triggers, it can have two reasons: @@ -617,7 +626,7 @@ where #[inline(never)] fn get_query_impl( tcx: CTX, - state: &QueryState, + state: &QueryState, span: Span, key: C::Key, query: &QueryVtable, @@ -625,8 +634,7 @@ fn get_query_impl( where CTX: QueryContext, C: QueryCache, - C::Key: Eq + Clone + crate::dep_graph::DepNodeParams, - C::Stored: Clone, + C::Key: crate::dep_graph::DepNodeParams, { try_get_cached( tcx, @@ -650,12 +658,12 @@ where #[inline(never)] fn ensure_query_impl( tcx: CTX, - state: &QueryState, + state: &QueryState, key: C::Key, query: &QueryVtable, ) where C: QueryCache, - C::Key: Eq + Clone + crate::dep_graph::DepNodeParams, + C::Key: crate::dep_graph::DepNodeParams, CTX: QueryContext, { if query.eval_always { @@ -687,14 +695,14 @@ fn ensure_query_impl( #[inline(never)] fn force_query_impl( tcx: CTX, - state: &QueryState, + state: &QueryState, key: C::Key, span: Span, dep_node: DepNode, query: &QueryVtable, ) where C: QueryCache, - C::Key: Eq + Clone + crate::dep_graph::DepNodeParams, + C::Key: crate::dep_graph::DepNodeParams, CTX: QueryContext, { // We may be concurrently trying both execute and force a query. @@ -708,7 +716,9 @@ fn force_query_impl( // Cache hit, do nothing }, |key, lookup| { - let job = match JobOwner::try_start(tcx, state, span, &key, lookup, query) { + let job = match JobOwner::<'_, CTX::DepKind, CTX::Query, C>::try_start( + tcx, state, span, &key, lookup, query, + ) { TryGetJob::NotYetStarted(job) => job, TryGetJob::Cycle(_) => return, #[cfg(parallel_compiler)] diff --git a/compiler/rustc_resolve/src/build_reduced_graph.rs b/compiler/rustc_resolve/src/build_reduced_graph.rs index a48d002b2a35b..feeea726f4c1b 100644 --- a/compiler/rustc_resolve/src/build_reduced_graph.rs +++ b/compiler/rustc_resolve/src/build_reduced_graph.rs @@ -95,6 +95,27 @@ impl<'a> Resolver<'a> { } } + /// Walks up the tree of definitions starting at `def_id`, + /// stopping at the first `DefKind::Mod` encountered + fn nearest_mod_parent(&mut self, def_id: DefId) -> Module<'a> { + let def_key = self.cstore().def_key(def_id); + + let mut parent_id = DefId { + krate: def_id.krate, + index: def_key.parent.expect("failed to get parent for module"), + }; + // The immediate parent may not be a module + // (e.g. `const _: () = { #[path = "foo.rs"] mod foo; };`) + // Walk up the tree until we hit a module or the crate root. + while parent_id.index != CRATE_DEF_INDEX + && self.cstore().def_kind(parent_id) != DefKind::Mod + { + let parent_def_key = self.cstore().def_key(parent_id); + parent_id.index = parent_def_key.parent.expect("failed to get parent for module"); + } + self.get_module(parent_id) + } + crate fn get_module(&mut self, def_id: DefId) -> Module<'a> { // If this is a local module, it will be in `module_map`, no need to recalculate it. if let Some(def_id) = def_id.as_local() { @@ -116,11 +137,8 @@ impl<'a> Resolver<'a> { .data .get_opt_name() .expect("given a DefId that wasn't a module"); - // This unwrap is safe since we know this isn't the root - let parent = Some(self.get_module(DefId { - index: def_key.parent.expect("failed to get parent for module"), - ..def_id - })); + + let parent = Some(self.nearest_mod_parent(def_id)); (name, parent) }; @@ -145,8 +163,24 @@ impl<'a> Resolver<'a> { if let Some(id) = def_id.as_local() { self.local_macro_def_scopes[&id] } else { - let module_def_id = ty::DefIdTree::parent(&*self, def_id).unwrap(); - self.get_module(module_def_id) + // This is not entirely correct - a `macro_rules!` macro may occur + // inside a 'block' module: + // + // ```rust + // const _: () = { + // #[macro_export] + // macro_rules! my_macro { + // () => {}; + // } + // ` + // We don't record this information for external crates, so + // the module we compute here will be the closest 'mod' item + // (not necesssarily the actual parent of the `macro_rules!` + // macro). `macro_rules!` macros can't use def-site hygiene, + // so this hopefully won't be a problem. + // + // See https://github.com/rust-lang/rust/pull/77984#issuecomment-712445508 + self.nearest_mod_parent(def_id) } } @@ -613,12 +647,21 @@ impl<'a, 'b> BuildReducedGraphVisitor<'a, 'b> { /// Constructs the reduced graph for one item. fn build_reduced_graph_for_item(&mut self, item: &'b Item) { + if matches!(item.kind, ItemKind::Mod(..)) && item.ident.name == kw::Invalid { + // Fake crate root item from expand. + return; + } + let parent_scope = &self.parent_scope; let parent = parent_scope.module; let expansion = parent_scope.expansion; let ident = item.ident; let sp = item.span; let vis = self.resolve_visibility(&item.vis); + let local_def_id = self.r.local_def_id(item.id); + let def_id = local_def_id.to_def_id(); + + self.r.visibilities.insert(local_def_id, vis); match item.kind { ItemKind::Use(ref use_tree) => { @@ -651,10 +694,12 @@ impl<'a, 'b> BuildReducedGraphVisitor<'a, 'b> { } else if orig_name == Some(kw::SelfLower) { self.r.graph_root } else { - let def_id = self.r.local_def_id(item.id); - let crate_id = - self.r.crate_loader.process_extern_crate(item, &self.r.definitions, def_id); - self.r.extern_crate_map.insert(def_id, crate_id); + let crate_id = self.r.crate_loader.process_extern_crate( + item, + &self.r.definitions, + local_def_id, + ); + self.r.extern_crate_map.insert(local_def_id, crate_id); self.r.get_module(DefId { krate: crate_id, index: CRATE_DEF_INDEX }) }; @@ -705,25 +750,16 @@ impl<'a, 'b> BuildReducedGraphVisitor<'a, 'b> { self.r.define(parent, ident, TypeNS, imported_binding); } - ItemKind::Mod(..) if ident.name == kw::Invalid => {} // Crate root - ItemKind::Mod(..) => { - let def_id = self.r.local_def_id(item.id); - let module_kind = ModuleKind::Def(DefKind::Mod, def_id.to_def_id(), ident.name); + let module_kind = ModuleKind::Def(DefKind::Mod, def_id, ident.name); let module = self.r.arenas.alloc_module(ModuleData { no_implicit_prelude: parent.no_implicit_prelude || { self.r.session.contains_name(&item.attrs, sym::no_implicit_prelude) }, - ..ModuleData::new( - Some(parent), - module_kind, - def_id.to_def_id(), - expansion, - item.span, - ) + ..ModuleData::new(Some(parent), module_kind, def_id, expansion, item.span) }); self.r.define(parent, ident, TypeNS, (module, vis, sp, expansion)); - self.r.module_map.insert(def_id, module); + self.r.module_map.insert(local_def_id, module); // Descend into the module. self.parent_scope.module = module; @@ -731,15 +767,15 @@ impl<'a, 'b> BuildReducedGraphVisitor<'a, 'b> { // These items live in the value namespace. ItemKind::Static(..) => { - let res = Res::Def(DefKind::Static, self.r.local_def_id(item.id).to_def_id()); + let res = Res::Def(DefKind::Static, def_id); self.r.define(parent, ident, ValueNS, (res, vis, sp, expansion)); } ItemKind::Const(..) => { - let res = Res::Def(DefKind::Const, self.r.local_def_id(item.id).to_def_id()); + let res = Res::Def(DefKind::Const, def_id); self.r.define(parent, ident, ValueNS, (res, vis, sp, expansion)); } ItemKind::Fn(..) => { - let res = Res::Def(DefKind::Fn, self.r.local_def_id(item.id).to_def_id()); + let res = Res::Def(DefKind::Fn, def_id); self.r.define(parent, ident, ValueNS, (res, vis, sp, expansion)); // Functions introducing procedural macros reserve a slot @@ -749,13 +785,11 @@ impl<'a, 'b> BuildReducedGraphVisitor<'a, 'b> { // These items live in the type namespace. ItemKind::TyAlias(..) => { - let res = Res::Def(DefKind::TyAlias, self.r.local_def_id(item.id).to_def_id()); + let res = Res::Def(DefKind::TyAlias, def_id); self.r.define(parent, ident, TypeNS, (res, vis, sp, expansion)); } ItemKind::Enum(_, _) => { - let def_id = self.r.local_def_id(item.id).to_def_id(); - self.r.variant_vis.insert(def_id, vis); let module_kind = ModuleKind::Def(DefKind::Enum, def_id, ident.name); let module = self.r.new_module( parent, @@ -769,14 +803,13 @@ impl<'a, 'b> BuildReducedGraphVisitor<'a, 'b> { } ItemKind::TraitAlias(..) => { - let res = Res::Def(DefKind::TraitAlias, self.r.local_def_id(item.id).to_def_id()); + let res = Res::Def(DefKind::TraitAlias, def_id); self.r.define(parent, ident, TypeNS, (res, vis, sp, expansion)); } // These items live in both the type and value namespaces. ItemKind::Struct(ref vdata, _) => { // Define a name in the type namespace. - let def_id = self.r.local_def_id(item.id).to_def_id(); let res = Res::Def(DefKind::Struct, def_id); self.r.define(parent, ident, TypeNS, (res, vis, sp, expansion)); @@ -810,17 +843,19 @@ impl<'a, 'b> BuildReducedGraphVisitor<'a, 'b> { } ret_fields.push(field_vis); } + let ctor_def_id = self.r.local_def_id(ctor_node_id); let ctor_res = Res::Def( DefKind::Ctor(CtorOf::Struct, CtorKind::from_ast(vdata)), - self.r.local_def_id(ctor_node_id).to_def_id(), + ctor_def_id.to_def_id(), ); self.r.define(parent, ident, ValueNS, (ctor_res, ctor_vis, sp, expansion)); + self.r.visibilities.insert(ctor_def_id, ctor_vis); + self.r.struct_constructors.insert(def_id, (ctor_res, ctor_vis, ret_fields)); } } ItemKind::Union(ref vdata, _) => { - let def_id = self.r.local_def_id(item.id).to_def_id(); let res = Res::Def(DefKind::Union, def_id); self.r.define(parent, ident, TypeNS, (res, vis, sp, expansion)); @@ -829,8 +864,6 @@ impl<'a, 'b> BuildReducedGraphVisitor<'a, 'b> { } ItemKind::Trait(..) => { - let def_id = self.r.local_def_id(item.id).to_def_id(); - // Add all the items within to a new module. let module_kind = ModuleKind::Def(DefKind::Trait, def_id, ident.name); let module = self.r.new_module( @@ -845,6 +878,9 @@ impl<'a, 'b> BuildReducedGraphVisitor<'a, 'b> { } // These items do not add names to modules. + ItemKind::Impl { of_trait: Some(..), .. } => { + self.r.trait_impl_items.insert(local_def_id); + } ItemKind::Impl { .. } | ItemKind::ForeignMod(..) | ItemKind::GlobalAsm(..) => {} ItemKind::MacroDef(..) | ItemKind::MacCall(_) => unreachable!(), @@ -853,22 +889,20 @@ impl<'a, 'b> BuildReducedGraphVisitor<'a, 'b> { /// Constructs the reduced graph for one foreign item. fn build_reduced_graph_for_foreign_item(&mut self, item: &ForeignItem) { - let (res, ns) = match item.kind { - ForeignItemKind::Fn(..) => { - (Res::Def(DefKind::Fn, self.r.local_def_id(item.id).to_def_id()), ValueNS) - } - ForeignItemKind::Static(..) => { - (Res::Def(DefKind::Static, self.r.local_def_id(item.id).to_def_id()), ValueNS) - } - ForeignItemKind::TyAlias(..) => { - (Res::Def(DefKind::ForeignTy, self.r.local_def_id(item.id).to_def_id()), TypeNS) - } + let local_def_id = self.r.local_def_id(item.id); + let def_id = local_def_id.to_def_id(); + let (def_kind, ns) = match item.kind { + ForeignItemKind::Fn(..) => (DefKind::Fn, ValueNS), + ForeignItemKind::Static(..) => (DefKind::Static, ValueNS), + ForeignItemKind::TyAlias(..) => (DefKind::ForeignTy, TypeNS), ForeignItemKind::MacCall(_) => unreachable!(), }; let parent = self.parent_scope.module; let expansion = self.parent_scope.expansion; let vis = self.resolve_visibility(&item.vis); + let res = Res::Def(def_kind, def_id); self.r.define(parent, item.ident, ns, (res, vis, item.span, expansion)); + self.r.visibilities.insert(local_def_id, vis); } fn build_reduced_graph_for_block(&mut self, block: &Block) { @@ -1205,6 +1239,7 @@ impl<'a, 'b> BuildReducedGraphVisitor<'a, 'b> { self.r.check_reserved_macro_name(ident, res); self.insert_unused_macro(ident, def_id, item.id, span); } + self.r.visibilities.insert(def_id, vis); MacroRulesScope::Binding(self.r.arenas.alloc_macro_rules_binding(MacroRulesBinding { parent_macro_rules_scope: parent_scope.macro_rules, binding, @@ -1224,6 +1259,7 @@ impl<'a, 'b> BuildReducedGraphVisitor<'a, 'b> { self.insert_unused_macro(ident, def_id, item.id, span); } self.r.define(module, ident, MacroNS, (res, vis, span, expansion)); + self.r.visibilities.insert(def_id, vis); self.parent_scope.macro_rules } } @@ -1297,36 +1333,64 @@ impl<'a, 'b> Visitor<'b> for BuildReducedGraphVisitor<'a, 'b> { } fn visit_assoc_item(&mut self, item: &'b AssocItem, ctxt: AssocCtxt) { - let parent = self.parent_scope.module; - if let AssocItemKind::MacCall(_) = item.kind { self.visit_invoc(item.id); return; } - if let AssocCtxt::Impl = ctxt { - self.resolve_visibility(&item.vis); - visit::walk_assoc_item(self, item, ctxt); - return; - } + let local_def_id = self.r.local_def_id(item.id); + let def_id = local_def_id.to_def_id(); + let vis = match ctxt { + AssocCtxt::Trait => { + let (def_kind, ns) = match item.kind { + AssocItemKind::Const(..) => (DefKind::AssocConst, ValueNS), + AssocItemKind::Fn(_, ref sig, _, _) => { + if sig.decl.has_self() { + self.r.has_self.insert(def_id); + } + (DefKind::AssocFn, ValueNS) + } + AssocItemKind::TyAlias(..) => (DefKind::AssocTy, TypeNS), + AssocItemKind::MacCall(_) => bug!(), // handled above + }; - // Add the item to the trait info. - let item_def_id = self.r.local_def_id(item.id).to_def_id(); - let (res, ns) = match item.kind { - AssocItemKind::Const(..) => (Res::Def(DefKind::AssocConst, item_def_id), ValueNS), - AssocItemKind::Fn(_, ref sig, _, _) => { - if sig.decl.has_self() { - self.r.has_self.insert(item_def_id); + let parent = self.parent_scope.module; + let expansion = self.parent_scope.expansion; + let res = Res::Def(def_kind, def_id); + // Trait item visibility is inherited from its trait when not specified explicitly. + let vis = match &item.vis.kind { + ast::VisibilityKind::Inherited => { + self.r.visibilities[&parent.def_id().unwrap().expect_local()] + } + _ => self.resolve_visibility(&item.vis), + }; + // FIXME: For historical reasons the binding visibility is set to public, + // use actual visibility here instead, using enum variants as an example. + let vis_hack = ty::Visibility::Public; + self.r.define(parent, item.ident, ns, (res, vis_hack, item.span, expansion)); + Some(vis) + } + AssocCtxt::Impl => { + // Trait impl item visibility is inherited from its trait when not specified + // explicitly. In that case we cannot determine it here in early resolve, + // so we leave a hole in the visibility table to be filled later. + // Inherent impl item visibility is never inherited from other items. + if matches!(item.vis.kind, ast::VisibilityKind::Inherited) + && self + .r + .trait_impl_items + .contains(&ty::DefIdTree::parent(&*self.r, def_id).unwrap().expect_local()) + { + None + } else { + Some(self.resolve_visibility(&item.vis)) } - (Res::Def(DefKind::AssocFn, item_def_id), ValueNS) } - AssocItemKind::TyAlias(..) => (Res::Def(DefKind::AssocTy, item_def_id), TypeNS), - AssocItemKind::MacCall(_) => bug!(), // handled above }; - let vis = ty::Visibility::Public; - let expansion = self.parent_scope.expansion; - self.r.define(parent, item.ident, ns, (res, vis, item.span, expansion)); + if let Some(vis) = vis { + self.r.visibilities.insert(local_def_id, vis); + } visit::walk_assoc_item(self, item, ctxt); } @@ -1394,7 +1458,8 @@ impl<'a, 'b> Visitor<'b> for BuildReducedGraphVisitor<'a, 'b> { if sf.is_placeholder { self.visit_invoc(sf.id); } else { - self.resolve_visibility(&sf.vis); + let vis = self.resolve_visibility(&sf.vis); + self.r.visibilities.insert(self.r.local_def_id(sf.id), vis); visit::walk_struct_field(self, sf); } } @@ -1408,22 +1473,30 @@ impl<'a, 'b> Visitor<'b> for BuildReducedGraphVisitor<'a, 'b> { } let parent = self.parent_scope.module; - let vis = self.r.variant_vis[&parent.def_id().expect("enum without def-id")]; + let vis = match variant.vis.kind { + // Variant visibility is inherited from its enum when not specified explicitly. + ast::VisibilityKind::Inherited => { + self.r.visibilities[&parent.def_id().unwrap().expect_local()] + } + _ => self.resolve_visibility(&variant.vis), + }; let expn_id = self.parent_scope.expansion; let ident = variant.ident; // Define a name in the type namespace. - let def_id = self.r.local_def_id(variant.id).to_def_id(); - let res = Res::Def(DefKind::Variant, def_id); + let def_id = self.r.local_def_id(variant.id); + let res = Res::Def(DefKind::Variant, def_id.to_def_id()); self.r.define(parent, ident, TypeNS, (res, vis, variant.span, expn_id)); + self.r.visibilities.insert(def_id, vis); - // If the variant is marked as non_exhaustive then lower the visibility to within the - // crate. - let mut ctor_vis = vis; - let has_non_exhaustive = self.r.session.contains_name(&variant.attrs, sym::non_exhaustive); - if has_non_exhaustive && vis == ty::Visibility::Public { - ctor_vis = ty::Visibility::Restricted(DefId::local(CRATE_DEF_INDEX)); - } + // If the variant is marked as non_exhaustive then lower the visibility to within the crate. + let ctor_vis = if vis == ty::Visibility::Public + && self.r.session.contains_name(&variant.attrs, sym::non_exhaustive) + { + ty::Visibility::Restricted(DefId::local(CRATE_DEF_INDEX)) + } else { + vis + }; // Define a constructor name in the value namespace. // Braced variants, unlike structs, generate unusable names in @@ -1431,12 +1504,15 @@ impl<'a, 'b> Visitor<'b> for BuildReducedGraphVisitor<'a, 'b> { // It's ok to use the variant's id as a ctor id since an // error will be reported on any use of such resolution anyway. let ctor_node_id = variant.data.ctor_id().unwrap_or(variant.id); - let ctor_def_id = self.r.local_def_id(ctor_node_id).to_def_id(); + let ctor_def_id = self.r.local_def_id(ctor_node_id); let ctor_kind = CtorKind::from_ast(&variant.data); - let ctor_res = Res::Def(DefKind::Ctor(CtorOf::Variant, ctor_kind), ctor_def_id); + let ctor_res = Res::Def(DefKind::Ctor(CtorOf::Variant, ctor_kind), ctor_def_id.to_def_id()); self.r.define(parent, ident, ValueNS, (ctor_res, ctor_vis, variant.span, expn_id)); + if ctor_def_id != def_id { + self.r.visibilities.insert(ctor_def_id, ctor_vis); + } // Record field names for error reporting. - self.insert_field_names_local(ctor_def_id, &variant.data); + self.insert_field_names_local(ctor_def_id.to_def_id(), &variant.data); visit::walk_variant(self, variant); } diff --git a/compiler/rustc_resolve/src/def_collector.rs b/compiler/rustc_resolve/src/def_collector.rs index 5d5088de31b97..a4de4d500f5e9 100644 --- a/compiler/rustc_resolve/src/def_collector.rs +++ b/compiler/rustc_resolve/src/def_collector.rs @@ -76,6 +76,7 @@ impl<'a, 'b> visit::Visitor<'a> for DefCollector<'a, 'b> { let def_data = match &i.kind { ItemKind::Impl { .. } => DefPathData::Impl, ItemKind::Mod(..) if i.ident.name == kw::Invalid => { + // Fake crate root item from expand. return visit::walk_item(self, i); } ItemKind::Mod(..) @@ -239,13 +240,13 @@ impl<'a, 'b> visit::Visitor<'a> for DefCollector<'a, 'b> { fn visit_ty(&mut self, ty: &'a Ty) { match ty.kind { - TyKind::MacCall(..) => return self.visit_macro_invoc(ty.id), + TyKind::MacCall(..) => self.visit_macro_invoc(ty.id), TyKind::ImplTrait(node_id, _) => { - self.create_def(node_id, DefPathData::ImplTrait, ty.span); + let parent_def = self.create_def(node_id, DefPathData::ImplTrait, ty.span); + self.with_parent(parent_def, |this| visit::walk_ty(this, ty)); } - _ => {} + _ => visit::walk_ty(self, ty), } - visit::walk_ty(self, ty); } fn visit_stmt(&mut self, stmt: &'a Stmt) { diff --git a/compiler/rustc_resolve/src/diagnostics.rs b/compiler/rustc_resolve/src/diagnostics.rs index 774a147c114ec..33ab09a8f4280 100644 --- a/compiler/rustc_resolve/src/diagnostics.rs +++ b/compiler/rustc_resolve/src/diagnostics.rs @@ -922,6 +922,17 @@ impl<'a> Resolver<'a> { ); self.add_typo_suggestion(err, suggestion, ident.span); + let import_suggestions = self.lookup_import_candidates( + ident, + Namespace::MacroNS, + parent_scope, + |res| match res { + Res::Def(DefKind::Macro(MacroKind::Bang), _) => true, + _ => false, + }, + ); + show_candidates(err, None, &import_suggestions, false, true); + if macro_kind == MacroKind::Derive && (ident.name == sym::Send || ident.name == sym::Sync) { let msg = format!("unsafe traits like `{}` should be implemented explicitly", ident); err.span_note(ident.span, &msg); diff --git a/compiler/rustc_resolve/src/late.rs b/compiler/rustc_resolve/src/late.rs index 219517b4ab2e2..7517ab66170a2 100644 --- a/compiler/rustc_resolve/src/late.rs +++ b/compiler/rustc_resolve/src/late.rs @@ -369,7 +369,7 @@ struct DiagnosticMetadata<'ast> { /// param. currently_processing_generics: bool, - /// The current enclosing function (used for better errors). + /// The current enclosing (non-closure) function (used for better errors). current_function: Option<(FnKind<'ast>, Span)>, /// A list of labels as of yet unused. Labels will be removed from this map when @@ -515,8 +515,10 @@ impl<'a: 'ast, 'ast> Visitor<'ast> for LateResolutionVisitor<'a, '_, 'ast> { FnKind::Fn(FnCtxt::Assoc(_), ..) => NormalRibKind, FnKind::Closure(..) => ClosureOrAsyncRibKind, }; - let previous_value = - replace(&mut self.diagnostic_metadata.current_function, Some((fn_kind, sp))); + let previous_value = self.diagnostic_metadata.current_function; + if matches!(fn_kind, FnKind::Fn(..)) { + self.diagnostic_metadata.current_function = Some((fn_kind, sp)); + } debug!("(resolving function) entering function"); let declaration = fn_kind.decl(); diff --git a/compiler/rustc_resolve/src/late/diagnostics.rs b/compiler/rustc_resolve/src/late/diagnostics.rs index bee05e7738280..c24b383f3b811 100644 --- a/compiler/rustc_resolve/src/late/diagnostics.rs +++ b/compiler/rustc_resolve/src/late/diagnostics.rs @@ -1330,58 +1330,17 @@ impl<'a: 'ast, 'ast> LateResolutionVisitor<'a, '_, 'ast> { let suggest_only_tuple_variants = matches!(source, PathSource::TupleStruct(..)) || source.is_call(); - let mut suggestable_variants = if suggest_only_tuple_variants { + if suggest_only_tuple_variants { // Suggest only tuple variants regardless of whether they have fields and do not // suggest path with added parenthesis. - variants + let mut suggestable_variants = variants .iter() .filter(|(.., kind)| *kind == CtorKind::Fn) .map(|(variant, ..)| path_names_to_string(variant)) - .collect::>() - } else { - variants - .iter() - .filter(|(_, def_id, kind)| { - // Suggest only variants that have no fields (these can definitely - // be constructed). - let has_fields = - self.r.field_names.get(&def_id).map(|f| f.is_empty()).unwrap_or(false); - match kind { - CtorKind::Const => true, - CtorKind::Fn | CtorKind::Fictive if has_fields => true, - _ => false, - } - }) - .map(|(variant, _, kind)| (path_names_to_string(variant), kind)) - .map(|(variant_str, kind)| { - // Add constructor syntax where appropriate. - match kind { - CtorKind::Const => variant_str, - CtorKind::Fn => format!("({}())", variant_str), - CtorKind::Fictive => format!("({} {{}})", variant_str), - } - }) - .collect::>() - }; - - let non_suggestable_variant_count = variants.len() - suggestable_variants.len(); + .collect::>(); - if !suggestable_variants.is_empty() { - let msg = if non_suggestable_variant_count == 0 && suggestable_variants.len() == 1 { - "try using the enum's variant" - } else { - "try using one of the enum's variants" - }; + let non_suggestable_variant_count = variants.len() - suggestable_variants.len(); - err.span_suggestions( - span, - msg, - suggestable_variants.drain(..), - Applicability::MaybeIncorrect, - ); - } - - if suggest_only_tuple_variants { let source_msg = if source.is_call() { "to construct" } else if matches!(source, PathSource::TupleStruct(..)) { @@ -1390,6 +1349,21 @@ impl<'a: 'ast, 'ast> LateResolutionVisitor<'a, '_, 'ast> { unreachable!() }; + if !suggestable_variants.is_empty() { + let msg = if non_suggestable_variant_count == 0 && suggestable_variants.len() == 1 { + format!("try {} the enum's variant", source_msg) + } else { + format!("try {} one of the enum's variants", source_msg) + }; + + err.span_suggestions( + span, + &msg, + suggestable_variants.drain(..), + Applicability::MaybeIncorrect, + ); + } + // If the enum has no tuple variants.. if non_suggestable_variant_count == variants.len() { err.help(&format!("the enum has no tuple variants {}", source_msg)); @@ -1408,24 +1382,76 @@ impl<'a: 'ast, 'ast> LateResolutionVisitor<'a, '_, 'ast> { )); } } else { - let made_suggestion = non_suggestable_variant_count != variants.len(); - if made_suggestion { - if non_suggestable_variant_count == 1 { - err.help( - "you might have meant to use the enum's other variant that has fields", - ); - } else if non_suggestable_variant_count >= 1 { - err.help( - "you might have meant to use one of the enum's other variants that \ - have fields", - ); - } - } else { - if non_suggestable_variant_count == 1 { - err.help("you might have meant to use the enum's variant"); - } else if non_suggestable_variant_count >= 1 { - err.help("you might have meant to use one of the enum's variants"); + let needs_placeholder = |def_id: DefId, kind: CtorKind| { + let has_no_fields = + self.r.field_names.get(&def_id).map(|f| f.is_empty()).unwrap_or(false); + match kind { + CtorKind::Const => false, + CtorKind::Fn | CtorKind::Fictive if has_no_fields => false, + _ => true, } + }; + + let mut suggestable_variants = variants + .iter() + .filter(|(_, def_id, kind)| !needs_placeholder(*def_id, *kind)) + .map(|(variant, _, kind)| (path_names_to_string(variant), kind)) + .map(|(variant, kind)| match kind { + CtorKind::Const => variant, + CtorKind::Fn => format!("({}())", variant), + CtorKind::Fictive => format!("({} {{}})", variant), + }) + .collect::>(); + + if !suggestable_variants.is_empty() { + let msg = if suggestable_variants.len() == 1 { + "you might have meant to use the following enum variant" + } else { + "you might have meant to use one of the following enum variants" + }; + + err.span_suggestions( + span, + msg, + suggestable_variants.drain(..), + Applicability::MaybeIncorrect, + ); + } + + let mut suggestable_variants_with_placeholders = variants + .iter() + .filter(|(_, def_id, kind)| needs_placeholder(*def_id, *kind)) + .map(|(variant, _, kind)| (path_names_to_string(variant), kind)) + .filter_map(|(variant, kind)| match kind { + CtorKind::Fn => Some(format!("({}(/* fields */))", variant)), + CtorKind::Fictive => Some(format!("({} {{ /* fields */ }})", variant)), + _ => None, + }) + .collect::>(); + + if !suggestable_variants_with_placeholders.is_empty() { + let msg = match ( + suggestable_variants.is_empty(), + suggestable_variants_with_placeholders.len(), + ) { + (true, 1) => "the following enum variant is available", + (true, _) => "the following enum variants are available", + (false, 1) => "alternatively, the following enum variant is available", + (false, _) => "alternatively, the following enum variants are also available", + }; + + err.span_suggestions( + span, + msg, + suggestable_variants_with_placeholders.drain(..), + Applicability::HasPlaceholders, + ); + } + }; + + if def_id.is_local() { + if let Some(span) = self.def_span(def_id) { + err.span_note(span, "the enum is defined here"); } } } diff --git a/compiler/rustc_resolve/src/lib.rs b/compiler/rustc_resolve/src/lib.rs index 6677a5ffe2867..b12e516fa3ed5 100644 --- a/compiler/rustc_resolve/src/lib.rs +++ b/compiler/rustc_resolve/src/lib.rs @@ -19,7 +19,7 @@ pub use rustc_hir::def::{Namespace, PerNS}; use Determinacy::*; -use rustc_arena::TypedArena; +use rustc_arena::{DroplessArena, TypedArena}; use rustc_ast::node_id::NodeMap; use rustc_ast::unwrap_or; use rustc_ast::visit::{self, Visitor}; @@ -944,7 +944,8 @@ pub struct Resolver<'a> { /// Maps glob imports to the names of items actually imported. glob_map: FxHashMap>, - + /// Visibilities in "lowered" form, for all entities that have them. + visibilities: FxHashMap, used_imports: FxHashSet<(NodeId, Namespace)>, maybe_unused_trait_imports: FxHashSet, maybe_unused_extern_crates: Vec<(LocalDefId, Span)>, @@ -1008,10 +1009,6 @@ pub struct Resolver<'a> { /// Features enabled for this crate. active_features: FxHashSet, - /// Stores enum visibilities to properly build a reduced graph - /// when visiting the correspondent variants. - variant_vis: DefIdMap, - lint_buffer: LintBuffer, next_node_id: NodeId, @@ -1028,6 +1025,9 @@ pub struct Resolver<'a> { invocation_parents: FxHashMap, next_disambiguator: FxHashMap<(LocalDefId, DefPathData), u32>, + /// Some way to know that we are in a *trait* impl in `visit_assoc_item`. + /// FIXME: Replace with a more general AST map (together with some other fields). + trait_impl_items: FxHashSet, } /// Nothing really interesting here; it just provides memory for the rest of the crate. @@ -1035,12 +1035,10 @@ pub struct Resolver<'a> { pub struct ResolverArenas<'a> { modules: TypedArena>, local_modules: RefCell>>, - name_bindings: TypedArena>, imports: TypedArena>, name_resolutions: TypedArena>>, - macro_rules_bindings: TypedArena>, ast_paths: TypedArena, - pattern_spans: TypedArena, + dropless: DroplessArena, } impl<'a> ResolverArenas<'a> { @@ -1055,7 +1053,7 @@ impl<'a> ResolverArenas<'a> { self.local_modules.borrow() } fn alloc_name_binding(&'a self, name_binding: NameBinding<'a>) -> &'a NameBinding<'a> { - self.name_bindings.alloc(name_binding) + self.dropless.alloc(name_binding) } fn alloc_import(&'a self, import: Import<'a>) -> &'a Import<'_> { self.imports.alloc(import) @@ -1067,13 +1065,13 @@ impl<'a> ResolverArenas<'a> { &'a self, binding: MacroRulesBinding<'a>, ) -> &'a MacroRulesBinding<'a> { - self.macro_rules_bindings.alloc(binding) + self.dropless.alloc(binding) } fn alloc_ast_paths(&'a self, paths: &[ast::Path]) -> &'a [ast::Path] { self.ast_paths.alloc_from_iter(paths.iter().cloned()) } fn alloc_pattern_spans(&'a self, spans: impl Iterator) -> &'a [Span] { - self.pattern_spans.alloc_from_iter(spans) + self.dropless.alloc_from_iter(spans) } } @@ -1195,7 +1193,8 @@ impl<'a> Resolver<'a> { metadata_loader: &'a MetadataLoaderDyn, arenas: &'a ResolverArenas<'a>, ) -> Resolver<'a> { - let root_def_id = DefId::local(CRATE_DEF_INDEX); + let root_local_def_id = LocalDefId { local_def_index: CRATE_DEF_INDEX }; + let root_def_id = root_local_def_id.to_def_id(); let root_module_kind = ModuleKind::Def(DefKind::Mod, root_def_id, kw::Invalid); let graph_root = arenas.alloc_module(ModuleData { no_implicit_prelude: session.contains_name(&krate.attrs, sym::no_implicit_prelude), @@ -1213,11 +1212,14 @@ impl<'a> Resolver<'a> { ) }); let mut module_map = FxHashMap::default(); - module_map.insert(LocalDefId { local_def_index: CRATE_DEF_INDEX }, graph_root); + module_map.insert(root_local_def_id, graph_root); let definitions = Definitions::new(crate_name, session.local_crate_disambiguator()); let root = definitions.get_root_def(); + let mut visibilities = FxHashMap::default(); + visibilities.insert(root_local_def_id, ty::Visibility::Public); + let mut def_id_to_span = IndexVec::default(); assert_eq!(def_id_to_span.push(rustc_span::DUMMY_SP), root); let mut def_id_to_node_id = IndexVec::default(); @@ -1240,9 +1242,6 @@ impl<'a> Resolver<'a> { extern_prelude.insert(Ident::with_dummy_span(sym::core), Default::default()); if !session.contains_name(&krate.attrs, sym::no_std) { extern_prelude.insert(Ident::with_dummy_span(sym::std), Default::default()); - if session.rust_2018() { - extern_prelude.insert(Ident::with_dummy_span(sym::meta), Default::default()); - } } } @@ -1293,7 +1292,7 @@ impl<'a> Resolver<'a> { ast_transform_scopes: FxHashMap::default(), glob_map: Default::default(), - + visibilities, used_imports: FxHashSet::default(), maybe_unused_trait_imports: Default::default(), maybe_unused_extern_crates: Vec::new(), @@ -1342,7 +1341,6 @@ impl<'a> Resolver<'a> { .map(|(feat, ..)| *feat) .chain(features.declared_lang_features.iter().map(|(feat, ..)| *feat)) .collect(), - variant_vis: Default::default(), lint_buffer: LintBuffer::default(), next_node_id: NodeId::from_u32(1), def_id_to_span, @@ -1351,6 +1349,7 @@ impl<'a> Resolver<'a> { placeholder_field_indices: Default::default(), invocation_parents, next_disambiguator: Default::default(), + trait_impl_items: Default::default(), } } @@ -1374,6 +1373,7 @@ impl<'a> Resolver<'a> { pub fn into_outputs(self) -> ResolverOutputs { let definitions = self.definitions; + let visibilities = self.visibilities; let extern_crate_map = self.extern_crate_map; let export_map = self.export_map; let maybe_unused_trait_imports = self.maybe_unused_trait_imports; @@ -1382,6 +1382,7 @@ impl<'a> Resolver<'a> { ResolverOutputs { definitions: definitions, cstore: Box::new(self.crate_loader.into_cstore()), + visibilities, extern_crate_map, export_map, glob_map, @@ -1399,6 +1400,7 @@ impl<'a> Resolver<'a> { ResolverOutputs { definitions: self.definitions.clone(), cstore: Box::new(self.cstore().clone()), + visibilities: self.visibilities.clone(), extern_crate_map: self.extern_crate_map.clone(), export_map: self.export_map.clone(), glob_map: self.glob_map.clone(), diff --git a/compiler/rustc_resolve/src/macros.rs b/compiler/rustc_resolve/src/macros.rs index bea7138964764..b5b281b93bcae 100644 --- a/compiler/rustc_resolve/src/macros.rs +++ b/compiler/rustc_resolve/src/macros.rs @@ -19,7 +19,7 @@ use rustc_feature::is_builtin_attr_name; use rustc_hir::def::{self, DefKind, NonMacroAttrKind}; use rustc_hir::def_id; use rustc_middle::middle::stability; -use rustc_middle::{span_bug, ty}; +use rustc_middle::ty; use rustc_session::lint::builtin::UNUSED_MACROS; use rustc_session::Session; use rustc_span::edition::Edition; @@ -885,11 +885,11 @@ impl<'a> Resolver<'a> { initial_res: Option, res: Res| { if let Some(initial_res) = initial_res { - if res != initial_res && res != Res::Err && this.ambiguity_errors.is_empty() { + if res != initial_res { // Make sure compilation does not succeed if preferred macro resolution // has changed after the macro had been expanded. In theory all such - // situations should be reported as ambiguity errors, so this is a bug. - span_bug!(span, "inconsistent resolution for a macro"); + // situations should be reported as errors, so this is a bug. + this.session.delay_span_bug(span, "inconsistent resolution for a macro"); } } else { // It's possible that the macro was unresolved (indeterminate) and silently diff --git a/compiler/rustc_save_analysis/src/dump_visitor.rs b/compiler/rustc_save_analysis/src/dump_visitor.rs index ce484858cbb66..dbb5e3cc9f066 100644 --- a/compiler/rustc_save_analysis/src/dump_visitor.rs +++ b/compiler/rustc_save_analysis/src/dump_visitor.rs @@ -320,6 +320,15 @@ impl<'tcx> DumpVisitor<'tcx> { for param in generics.params { match param.kind { hir::GenericParamKind::Lifetime { .. } => {} + hir::GenericParamKind::Type { + synthetic: Some(hir::SyntheticTyParamKind::ImplTrait), + .. + } => { + return self + .nest_typeck_results(self.tcx.hir().local_def_id(param.hir_id), |this| { + this.visit_generics(generics) + }); + } hir::GenericParamKind::Type { .. } => { let param_ss = param.name.ident().span; let name = escape(self.span.snippet(param_ss)); @@ -351,7 +360,8 @@ impl<'tcx> DumpVisitor<'tcx> { hir::GenericParamKind::Const { .. } => {} } } - self.visit_generics(generics); + + self.visit_generics(generics) } fn process_fn( diff --git a/compiler/rustc_span/src/symbol.rs b/compiler/rustc_span/src/symbol.rs index 28fef65da070a..bae1e4f314c01 100644 --- a/compiler/rustc_span/src/symbol.rs +++ b/compiler/rustc_span/src/symbol.rs @@ -593,6 +593,7 @@ symbols! { infer_static_outlives_requirements, inlateout, inline, + inline_const, inout, instruction_set, intel, @@ -776,6 +777,7 @@ symbols! { panic_info, panic_location, panic_runtime, + panic_str, panic_unwind, param_attrs, parent_trait, @@ -892,6 +894,7 @@ symbols! { rustc, rustc_allocator, rustc_allocator_nounwind, + rustc_allow_const_fn_unstable, rustc_args_required_const, rustc_attrs, rustc_builtin_macro, diff --git a/compiler/rustc_symbol_mangling/Cargo.toml b/compiler/rustc_symbol_mangling/Cargo.toml index c0dacd24c38e6..3df5f16131922 100644 --- a/compiler/rustc_symbol_mangling/Cargo.toml +++ b/compiler/rustc_symbol_mangling/Cargo.toml @@ -10,7 +10,7 @@ doctest = false [dependencies] tracing = "0.1" punycode = "0.4.0" -rustc-demangle = "0.1.16" +rustc-demangle = "0.1.18" rustc_ast = { path = "../rustc_ast" } rustc_span = { path = "../rustc_span" } diff --git a/compiler/rustc_symbol_mangling/src/v0.rs b/compiler/rustc_symbol_mangling/src/v0.rs index 16d0b86903ea8..7833385cbc996 100644 --- a/compiler/rustc_symbol_mangling/src/v0.rs +++ b/compiler/rustc_symbol_mangling/src/v0.rs @@ -4,6 +4,7 @@ use rustc_data_structures::fx::{FxHashMap, FxHashSet}; use rustc_hir as hir; use rustc_hir::def_id::{CrateNum, DefId}; use rustc_hir::definitions::{DefPathData, DisambiguatedDefPathData}; +use rustc_middle::mir::interpret::sign_extend; use rustc_middle::ty::print::{Print, Printer}; use rustc_middle::ty::subst::{GenericArg, GenericArgKind, Subst}; use rustc_middle::ty::{self, Instance, Ty, TyCtxt, TypeFoldable}; @@ -527,17 +528,31 @@ impl Printer<'tcx> for SymbolMangler<'tcx> { } let start = self.out.len(); - match ct.ty.kind() { - ty::Uint(_) => {} - ty::Bool => {} + let mut neg = false; + let val = match ct.ty.kind() { + ty::Uint(_) | ty::Bool | ty::Char => { + ct.try_eval_bits(self.tcx, ty::ParamEnv::reveal_all(), ct.ty) + } + ty::Int(_) => { + let param_env = ty::ParamEnv::reveal_all(); + ct.try_eval_bits(self.tcx, param_env, ct.ty).and_then(|b| { + let sz = self.tcx.layout_of(param_env.and(ct.ty)).ok()?.size; + let val = sign_extend(b, sz) as i128; + if val < 0 { + neg = true; + } + Some(val.wrapping_abs() as u128) + }) + } _ => { bug!("symbol_names: unsupported constant of type `{}` ({:?})", ct.ty, ct); } - } - self = ct.ty.print(self)?; + }; - if let Some(bits) = ct.try_eval_bits(self.tcx, ty::ParamEnv::reveal_all(), ct.ty) { - let _ = write!(self.out, "{:x}_", bits); + if let Some(bits) = val { + // We only print the type if the const can be evaluated. + self = ct.ty.print(self)?; + let _ = write!(self.out, "{}{:x}_", if neg { "n" } else { "" }, bits); } else { // NOTE(eddyb) despite having the path, we need to // encode a placeholder, as the path could refer diff --git a/compiler/rustc_target/src/spec/apple_sdk_base.rs b/compiler/rustc_target/src/spec/apple_sdk_base.rs index e34277d5af04c..1b17c2c278f9a 100644 --- a/compiler/rustc_target/src/spec/apple_sdk_base.rs +++ b/compiler/rustc_target/src/spec/apple_sdk_base.rs @@ -34,6 +34,7 @@ fn link_env_remove(arch: Arch) -> Vec { pub fn opts(arch: Arch) -> TargetOptions { TargetOptions { cpu: target_cpu(arch), + dynamic_linking: false, executables: true, link_env_remove: link_env_remove(arch), has_elf_tls: false, diff --git a/compiler/rustc_trait_selection/src/traits/auto_trait.rs b/compiler/rustc_trait_selection/src/traits/auto_trait.rs index e40067202e112..93a0073588ec7 100644 --- a/compiler/rustc_trait_selection/src/traits/auto_trait.rs +++ b/compiler/rustc_trait_selection/src/traits/auto_trait.rs @@ -642,7 +642,8 @@ impl AutoTraitFinder<'tcx> { // We check this by calling is_of_param on the relevant types // from the various possible predicates - match predicate.skip_binders() { + let bound_predicate = predicate.bound_atom(); + match bound_predicate.skip_binder() { ty::PredicateAtom::Trait(p, _) => { if self.is_param_no_infer(p.trait_ref.substs) && !only_projections @@ -650,10 +651,10 @@ impl AutoTraitFinder<'tcx> { { self.add_user_pred(computed_preds, predicate); } - predicates.push_back(ty::Binder::bind(p)); + predicates.push_back(bound_predicate.rebind(p)); } ty::PredicateAtom::Projection(p) => { - let p = ty::Binder::bind(p); + let p = bound_predicate.rebind(p); debug!( "evaluate_nested_obligations: examining projection predicate {:?}", predicate @@ -783,13 +784,13 @@ impl AutoTraitFinder<'tcx> { } } ty::PredicateAtom::RegionOutlives(binder) => { - let binder = ty::Binder::bind(binder); + let binder = bound_predicate.rebind(binder); if select.infcx().region_outlives_predicate(&dummy_cause, binder).is_err() { return false; } } ty::PredicateAtom::TypeOutlives(binder) => { - let binder = ty::Binder::bind(binder); + let binder = bound_predicate.rebind(binder); match ( binder.no_bound_vars(), binder.map_bound_ref(|pred| pred.0).no_bound_vars(), diff --git a/compiler/rustc_trait_selection/src/traits/codegen/mod.rs b/compiler/rustc_trait_selection/src/traits/codegen.rs similarity index 91% rename from compiler/rustc_trait_selection/src/traits/codegen/mod.rs rename to compiler/rustc_trait_selection/src/traits/codegen.rs index dd7ea55cc1043..3cb6ec8626186 100644 --- a/compiler/rustc_trait_selection/src/traits/codegen/mod.rs +++ b/compiler/rustc_trait_selection/src/traits/codegen.rs @@ -17,14 +17,17 @@ use rustc_middle::ty::{self, TyCtxt}; /// (necessarily) resolve all nested obligations on the impl. Note /// that type check should guarantee to us that all nested /// obligations *could be* resolved if we wanted to. +/// /// Assumes that this is run after the entire crate has been successfully type-checked. +/// This also expects that `trait_ref` is fully normalized. pub fn codegen_fulfill_obligation<'tcx>( - ty: TyCtxt<'tcx>, + tcx: TyCtxt<'tcx>, (param_env, trait_ref): (ty::ParamEnv<'tcx>, ty::PolyTraitRef<'tcx>), ) -> Result, ErrorReported> { // Remove any references to regions; this helps improve caching. - let trait_ref = ty.erase_regions(&trait_ref); - + let trait_ref = tcx.erase_regions(&trait_ref); + // We expect the input to be fully normalized. + debug_assert_eq!(trait_ref, tcx.normalize_erasing_regions(param_env, trait_ref)); debug!( "codegen_fulfill_obligation(trait_ref={:?}, def_id={:?})", (param_env, trait_ref), @@ -33,7 +36,7 @@ pub fn codegen_fulfill_obligation<'tcx>( // Do the initial selection for the obligation. This yields the // shallow result we are looking for -- that is, what specific impl. - ty.infer_ctxt().enter(|infcx| { + tcx.infer_ctxt().enter(|infcx| { let mut selcx = SelectionContext::new(&infcx); let obligation_cause = ObligationCause::dummy(); @@ -118,7 +121,10 @@ where // contains unbound type parameters. It could be a slight // optimization to stop iterating early. if let Err(errors) = fulfill_cx.select_all_or_error(infcx) { - bug!("Encountered errors `{:?}` resolving bounds after type-checking", errors); + infcx.tcx.sess.delay_span_bug( + rustc_span::DUMMY_SP, + &format!("Encountered errors `{:?}` resolving bounds after type-checking", errors), + ); } let result = infcx.resolve_vars_if_possible(result); diff --git a/compiler/rustc_trait_selection/src/traits/const_evaluatable.rs b/compiler/rustc_trait_selection/src/traits/const_evaluatable.rs index 1e1eb16faf407..c79b2624f8cb0 100644 --- a/compiler/rustc_trait_selection/src/traits/const_evaluatable.rs +++ b/compiler/rustc_trait_selection/src/traits/const_evaluatable.rs @@ -223,11 +223,23 @@ impl AbstractConst<'tcx> { } } +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +struct WorkNode<'tcx> { + node: Node<'tcx>, + span: Span, + used: bool, +} + struct AbstractConstBuilder<'a, 'tcx> { tcx: TyCtxt<'tcx>, body: &'a mir::Body<'tcx>, /// The current WIP node tree. - nodes: IndexVec>, + /// + /// We require all nodes to be used in the final abstract const, + /// so we store this here. Note that we also consider nodes as used + /// if they are mentioned in an assert, so some used nodes are never + /// actually reachable by walking the [`AbstractConst`]. + nodes: IndexVec>, locals: IndexVec, /// We only allow field accesses if they access /// the result of a checked operation. @@ -274,6 +286,27 @@ impl<'a, 'tcx> AbstractConstBuilder<'a, 'tcx> { Ok(Some(builder)) } + fn add_node(&mut self, node: Node<'tcx>, span: Span) -> NodeId { + // Mark used nodes. + match node { + Node::Leaf(_) => (), + Node::Binop(_, lhs, rhs) => { + self.nodes[lhs].used = true; + self.nodes[rhs].used = true; + } + Node::UnaryOp(_, input) => { + self.nodes[input].used = true; + } + Node::FunctionCall(func, nodes) => { + self.nodes[func].used = true; + nodes.iter().for_each(|&n| self.nodes[n].used = true); + } + } + + // Nodes start as unused. + self.nodes.push(WorkNode { node, span, used: false }) + } + fn place_to_local( &mut self, span: Span, @@ -311,7 +344,7 @@ impl<'a, 'tcx> AbstractConstBuilder<'a, 'tcx> { let local = self.place_to_local(span, p)?; Ok(self.locals[local]) } - mir::Operand::Constant(ct) => Ok(self.nodes.push(Node::Leaf(ct.literal))), + mir::Operand::Constant(ct) => Ok(self.add_node(Node::Leaf(ct.literal), span)), } } @@ -336,19 +369,19 @@ impl<'a, 'tcx> AbstractConstBuilder<'a, 'tcx> { fn build_statement(&mut self, stmt: &mir::Statement<'tcx>) -> Result<(), ErrorReported> { debug!("AbstractConstBuilder: stmt={:?}", stmt); + let span = stmt.source_info.span; match stmt.kind { StatementKind::Assign(box (ref place, ref rvalue)) => { - let local = self.place_to_local(stmt.source_info.span, place)?; + let local = self.place_to_local(span, place)?; match *rvalue { Rvalue::Use(ref operand) => { - self.locals[local] = - self.operand_to_node(stmt.source_info.span, operand)?; + self.locals[local] = self.operand_to_node(span, operand)?; Ok(()) } Rvalue::BinaryOp(op, ref lhs, ref rhs) if Self::check_binop(op) => { - let lhs = self.operand_to_node(stmt.source_info.span, lhs)?; - let rhs = self.operand_to_node(stmt.source_info.span, rhs)?; - self.locals[local] = self.nodes.push(Node::Binop(op, lhs, rhs)); + let lhs = self.operand_to_node(span, lhs)?; + let rhs = self.operand_to_node(span, rhs)?; + self.locals[local] = self.add_node(Node::Binop(op, lhs, rhs), span); if op.is_checkable() { bug!("unexpected unchecked checkable binary operation"); } else { @@ -356,18 +389,18 @@ impl<'a, 'tcx> AbstractConstBuilder<'a, 'tcx> { } } Rvalue::CheckedBinaryOp(op, ref lhs, ref rhs) if Self::check_binop(op) => { - let lhs = self.operand_to_node(stmt.source_info.span, lhs)?; - let rhs = self.operand_to_node(stmt.source_info.span, rhs)?; - self.locals[local] = self.nodes.push(Node::Binop(op, lhs, rhs)); + let lhs = self.operand_to_node(span, lhs)?; + let rhs = self.operand_to_node(span, rhs)?; + self.locals[local] = self.add_node(Node::Binop(op, lhs, rhs), span); self.checked_op_locals.insert(local); Ok(()) } Rvalue::UnaryOp(op, ref operand) if Self::check_unop(op) => { - let operand = self.operand_to_node(stmt.source_info.span, operand)?; - self.locals[local] = self.nodes.push(Node::UnaryOp(op, operand)); + let operand = self.operand_to_node(span, operand)?; + self.locals[local] = self.add_node(Node::UnaryOp(op, operand), span); Ok(()) } - _ => self.error(Some(stmt.source_info.span), "unsupported rvalue")?, + _ => self.error(Some(span), "unsupported rvalue")?, } } // These are not actually relevant for us here, so we can ignore them. @@ -415,13 +448,9 @@ impl<'a, 'tcx> AbstractConstBuilder<'a, 'tcx> { .map(|arg| self.operand_to_node(terminator.source_info.span, arg)) .collect::, _>>()?, ); - self.locals[local] = self.nodes.push(Node::FunctionCall(func, args)); + self.locals[local] = self.add_node(Node::FunctionCall(func, args), fn_span); Ok(Some(target)) } - // We only allow asserts for checked operations. - // - // These asserts seem to all have the form `!_local.0` so - // we only allow exactly that. TerminatorKind::Assert { ref cond, expected: false, target, .. } => { let p = match cond { mir::Operand::Copy(p) | mir::Operand::Move(p) => p, @@ -430,7 +459,15 @@ impl<'a, 'tcx> AbstractConstBuilder<'a, 'tcx> { const ONE_FIELD: mir::Field = mir::Field::from_usize(1); debug!("proj: {:?}", p.projection); - if let &[mir::ProjectionElem::Field(ONE_FIELD, _)] = p.projection.as_ref() { + if let Some(p) = p.as_local() { + debug_assert!(!self.checked_op_locals.contains(p)); + // Mark locals directly used in asserts as used. + // + // This is needed because division does not use `CheckedBinop` but instead + // adds an explicit assert for `divisor != 0`. + self.nodes[self.locals[p]].used = true; + return Ok(Some(target)); + } else if let &[mir::ProjectionElem::Field(ONE_FIELD, _)] = p.projection.as_ref() { // Only allow asserts checking the result of a checked operation. if self.checked_op_locals.contains(p.local) { return Ok(Some(target)); @@ -457,7 +494,13 @@ impl<'a, 'tcx> AbstractConstBuilder<'a, 'tcx> { if let Some(next) = self.build_terminator(block.terminator())? { block = &self.body.basic_blocks()[next]; } else { - return Ok(self.tcx.arena.alloc_from_iter(self.nodes)); + assert_eq!(self.locals[mir::RETURN_PLACE], self.nodes.last().unwrap()); + self.nodes[self.locals[mir::RETURN_PLACE]].used = true; + if let Some(&unused) = self.nodes.iter().find(|n| !n.used) { + self.error(Some(unused.span), "dead code")?; + } + + return Ok(self.tcx.arena.alloc_from_iter(self.nodes.into_iter().map(|n| n.node))); } } } diff --git a/compiler/rustc_trait_selection/src/traits/error_reporting/mod.rs b/compiler/rustc_trait_selection/src/traits/error_reporting/mod.rs index 05e3ed3435113..f8bd3ab96e254 100644 --- a/compiler/rustc_trait_selection/src/traits/error_reporting/mod.rs +++ b/compiler/rustc_trait_selection/src/traits/error_reporting/mod.rs @@ -255,9 +255,10 @@ impl<'a, 'tcx> InferCtxtExt<'tcx> for InferCtxt<'a, 'tcx> { return; } - match obligation.predicate.skip_binders() { + let bound_predicate = obligation.predicate.bound_atom(); + match bound_predicate.skip_binder() { ty::PredicateAtom::Trait(trait_predicate, _) => { - let trait_predicate = ty::Binder::bind(trait_predicate); + let trait_predicate = bound_predicate.rebind(trait_predicate); let trait_predicate = self.resolve_vars_if_possible(&trait_predicate); if self.tcx.sess.has_errors() && trait_predicate.references_error() { @@ -531,7 +532,7 @@ impl<'a, 'tcx> InferCtxtExt<'tcx> for InferCtxt<'a, 'tcx> { } ty::PredicateAtom::RegionOutlives(predicate) => { - let predicate = ty::Binder::bind(predicate); + let predicate = bound_predicate.rebind(predicate); let predicate = self.resolve_vars_if_possible(&predicate); let err = self .region_outlives_predicate(&obligation.cause, predicate) @@ -1078,9 +1079,10 @@ impl<'a, 'tcx> InferCtxtPrivExt<'tcx> for InferCtxt<'a, 'tcx> { } // FIXME: It should be possible to deal with `ForAll` in a cleaner way. - let (cond, error) = match (cond.skip_binders(), error.skip_binders()) { + let bound_error = error.bound_atom(); + let (cond, error) = match (cond.skip_binders(), bound_error.skip_binder()) { (ty::PredicateAtom::Trait(..), ty::PredicateAtom::Trait(error, _)) => { - (cond, ty::Binder::bind(error)) + (cond, bound_error.rebind(error)) } _ => { // FIXME: make this work in other cases too. @@ -1089,9 +1091,10 @@ impl<'a, 'tcx> InferCtxtPrivExt<'tcx> for InferCtxt<'a, 'tcx> { }; for obligation in super::elaborate_predicates(self.tcx, std::iter::once(cond)) { - if let ty::PredicateAtom::Trait(implication, _) = obligation.predicate.skip_binders() { + let bound_predicate = obligation.predicate.bound_atom(); + if let ty::PredicateAtom::Trait(implication, _) = bound_predicate.skip_binder() { let error = error.to_poly_trait_ref(); - let implication = ty::Binder::bind(implication.trait_ref); + let implication = bound_predicate.rebind(implication.trait_ref); // FIXME: I'm just not taking associated types at all here. // Eventually I'll need to implement param-env-aware // `Γ₁ ⊦ φ₁ => Γ₂ ⊦ φ₂` logic. @@ -1169,12 +1172,13 @@ impl<'a, 'tcx> InferCtxtPrivExt<'tcx> for InferCtxt<'a, 'tcx> { // // this can fail if the problem was higher-ranked, in which // cause I have no idea for a good error message. - if let ty::PredicateAtom::Projection(data) = predicate.skip_binders() { + let bound_predicate = predicate.bound_atom(); + if let ty::PredicateAtom::Projection(data) = bound_predicate.skip_binder() { let mut selcx = SelectionContext::new(self); let (data, _) = self.replace_bound_vars_with_fresh_vars( obligation.cause.span, infer::LateBoundRegionConversionTime::HigherRankedType, - &ty::Binder::bind(data), + &bound_predicate.rebind(data), ); let mut obligations = vec![]; let normalized_ty = super::normalize_projection_type( @@ -1455,11 +1459,11 @@ impl<'a, 'tcx> InferCtxtPrivExt<'tcx> for InferCtxt<'a, 'tcx> { return; } - let mut err = match predicate.skip_binders() { + let bound_predicate = predicate.bound_atom(); + let mut err = match bound_predicate.skip_binder() { ty::PredicateAtom::Trait(data, _) => { - let trait_ref = ty::Binder::bind(data.trait_ref); - let self_ty = trait_ref.skip_binder().self_ty(); - debug!("self_ty {:?} {:?} trait_ref {:?}", self_ty, self_ty.kind(), trait_ref); + let trait_ref = bound_predicate.rebind(data.trait_ref); + debug!("trait_ref {:?}", trait_ref); if predicate.references_error() { return; @@ -1474,6 +1478,17 @@ impl<'a, 'tcx> InferCtxtPrivExt<'tcx> for InferCtxt<'a, 'tcx> { // known, since we don't dispatch based on region // relationships. + // Pick the first substitution that still contains inference variables as the one + // we're going to emit an error for. If there are none (see above), fall back to + // the substitution for `Self`. + let subst = { + let substs = data.trait_ref.substs; + substs + .iter() + .find(|s| s.has_infer_types_or_consts()) + .unwrap_or_else(|| substs[0]) + }; + // This is kind of a hack: it frequently happens that some earlier // error prevents types from being fully inferred, and then we get // a bunch of uninteresting errors saying something like " InferCtxtPrivExt<'tcx> for InferCtxt<'a, 'tcx> { // check upstream for type errors and don't add the obligations to // begin with in those cases. if self.tcx.lang_items().sized_trait() == Some(trait_ref.def_id()) { - self.emit_inference_failure_err( - body_id, - span, - self_ty.into(), - ErrorCode::E0282, - ) - .emit(); + self.emit_inference_failure_err(body_id, span, subst, ErrorCode::E0282).emit(); return; } - let mut err = self.emit_inference_failure_err( - body_id, - span, - self_ty.into(), - ErrorCode::E0283, - ); + let mut err = + self.emit_inference_failure_err(body_id, span, subst, ErrorCode::E0283); err.note(&format!("cannot satisfy `{}`", predicate)); if let ObligationCauseCode::ItemObligation(def_id) = obligation.cause.code { self.suggest_fully_qualified_path(&mut err, def_id, span, trait_ref.def_id()); @@ -1582,7 +1587,7 @@ impl<'a, 'tcx> InferCtxtPrivExt<'tcx> for InferCtxt<'a, 'tcx> { self.emit_inference_failure_err(body_id, span, a.into(), ErrorCode::E0282) } ty::PredicateAtom::Projection(data) => { - let trait_ref = ty::Binder::bind(data).to_poly_trait_ref(self.tcx); + let trait_ref = bound_predicate.rebind(data).to_poly_trait_ref(self.tcx); let self_ty = trait_ref.skip_binder().self_ty(); let ty = data.ty; if predicate.references_error() { diff --git a/compiler/rustc_trait_selection/src/traits/fulfill.rs b/compiler/rustc_trait_selection/src/traits/fulfill.rs index 97dd180b27b04..9a8b5534dfe83 100644 --- a/compiler/rustc_trait_selection/src/traits/fulfill.rs +++ b/compiler/rustc_trait_selection/src/traits/fulfill.rs @@ -1,6 +1,6 @@ use crate::infer::{InferCtxt, TyOrConstInferVar}; use rustc_data_structures::obligation_forest::ProcessResult; -use rustc_data_structures::obligation_forest::{DoCompleted, Error, ForestObligation}; +use rustc_data_structures::obligation_forest::{Error, ForestObligation, Outcome}; use rustc_data_structures::obligation_forest::{ObligationForest, ObligationProcessor}; use rustc_errors::ErrorReported; use rustc_infer::traits::{TraitEngine, TraitEngineExt as _, TraitObligation}; @@ -129,13 +129,11 @@ impl<'a, 'tcx> FulfillmentContext<'tcx> { debug!("select: starting another iteration"); // Process pending obligations. - let outcome = self.predicates.process_obligations( - &mut FulfillProcessor { + let outcome: Outcome<_, _> = + self.predicates.process_obligations(&mut FulfillProcessor { selcx, register_region_obligations: self.register_region_obligations, - }, - DoCompleted::No, - ); + }); debug!("select: outcome={:#?}", outcome); // FIXME: if we kept the original cache key, we could mark projection @@ -353,7 +351,7 @@ impl<'a, 'b, 'tcx> FulfillProcessor<'a, 'b, 'tcx> { // This means we need to pass it the bound version of our // predicate. ty::PredicateAtom::Trait(trait_ref, _constness) => { - let trait_obligation = obligation.with(Binder::bind(trait_ref)); + let trait_obligation = obligation.with(binder.rebind(trait_ref)); self.process_trait_obligation( obligation, @@ -362,7 +360,7 @@ impl<'a, 'b, 'tcx> FulfillProcessor<'a, 'b, 'tcx> { ) } ty::PredicateAtom::Projection(data) => { - let project_obligation = obligation.with(Binder::bind(data)); + let project_obligation = obligation.with(binder.rebind(data)); self.process_projection_obligation( project_obligation, diff --git a/compiler/rustc_trait_selection/src/traits/object_safety.rs b/compiler/rustc_trait_selection/src/traits/object_safety.rs index 0e43f1655ddb2..d1647e686a84f 100644 --- a/compiler/rustc_trait_selection/src/traits/object_safety.rs +++ b/compiler/rustc_trait_selection/src/traits/object_safety.rs @@ -13,7 +13,7 @@ use super::elaborate_predicates; use crate::infer::TyCtxtInferExt; use crate::traits::query::evaluate_obligation::InferCtxtExt; use crate::traits::{self, Obligation, ObligationCause}; -use rustc_errors::{Applicability, FatalError}; +use rustc_errors::FatalError; use rustc_hir as hir; use rustc_hir::def_id::DefId; use rustc_middle::ty::subst::{GenericArg, InternalSubsts, Subst}; @@ -21,7 +21,7 @@ use rustc_middle::ty::{self, Ty, TyCtxt, TypeFoldable, TypeVisitor, WithConstnes use rustc_middle::ty::{Predicate, ToPredicate}; use rustc_session::lint::builtin::WHERE_CLAUSES_OBJECT_SAFETY; use rustc_span::symbol::Symbol; -use rustc_span::Span; +use rustc_span::{MultiSpan, Span}; use smallvec::SmallVec; use std::array; @@ -100,49 +100,7 @@ fn object_safety_violations_for_trait( span, ) = violation { - // Using `CRATE_NODE_ID` is wrong, but it's hard to get a more precise id. - // It's also hard to get a use site span, so we use the method definition span. - tcx.struct_span_lint_hir( - WHERE_CLAUSES_OBJECT_SAFETY, - hir::CRATE_HIR_ID, - *span, - |lint| { - let mut err = lint.build(&format!( - "the trait `{}` cannot be made into an object", - tcx.def_path_str(trait_def_id) - )); - let node = tcx.hir().get_if_local(trait_def_id); - let msg = if let Some(hir::Node::Item(item)) = node { - err.span_label( - item.ident.span, - "this trait cannot be made into an object...", - ); - format!("...because {}", violation.error_msg()) - } else { - format!( - "the trait cannot be made into an object because {}", - violation.error_msg() - ) - }; - err.span_label(*span, &msg); - match (node, violation.solution()) { - (Some(_), Some((note, None))) => { - err.help(¬e); - } - (Some(_), Some((note, Some((sugg, span))))) => { - err.span_suggestion( - span, - ¬e, - sugg, - Applicability::MachineApplicable, - ); - } - // Only provide the help if its a local trait, otherwise it's not actionable. - _ => {} - } - err.emit(); - }, - ); + lint_object_unsafe_trait(tcx, *span, trait_def_id, violation); false } else { true @@ -180,6 +138,51 @@ fn object_safety_violations_for_trait( violations } +/// Lint object-unsafe trait. +fn lint_object_unsafe_trait( + tcx: TyCtxt<'_>, + span: Span, + trait_def_id: DefId, + violation: &ObjectSafetyViolation, +) { + // Using `CRATE_NODE_ID` is wrong, but it's hard to get a more precise id. + // It's also hard to get a use site span, so we use the method definition span. + tcx.struct_span_lint_hir(WHERE_CLAUSES_OBJECT_SAFETY, hir::CRATE_HIR_ID, span, |lint| { + let mut err = lint.build(&format!( + "the trait `{}` cannot be made into an object", + tcx.def_path_str(trait_def_id) + )); + let node = tcx.hir().get_if_local(trait_def_id); + let mut spans = MultiSpan::from_span(span); + if let Some(hir::Node::Item(item)) = node { + spans.push_span_label( + item.ident.span, + "this trait cannot be made into an object...".into(), + ); + spans.push_span_label(span, format!("...because {}", violation.error_msg())); + } else { + spans.push_span_label( + span, + format!( + "the trait cannot be made into an object because {}", + violation.error_msg() + ), + ); + }; + err.span_note( + spans, + "for a trait to be \"object safe\" it needs to allow building a vtable to allow the \ + call to be resolvable dynamically; for more information visit \ + ", + ); + if node.is_some() { + // Only provide the help if its a local trait, otherwise it's not + violation.solution(&mut err); + } + err.emit(); + }); +} + fn sized_trait_bound_spans<'tcx>( tcx: TyCtxt<'tcx>, bounds: hir::GenericBounds<'tcx>, @@ -385,6 +388,8 @@ fn virtual_call_violation_for_method<'tcx>( trait_def_id: DefId, method: &ty::AssocItem, ) -> Option { + let sig = tcx.fn_sig(method.def_id); + // The method's first parameter must be named `self` if !method.fn_has_self_parameter { // We'll attempt to provide a structured suggestion for `Self: Sized`. @@ -395,11 +400,21 @@ fn virtual_call_violation_for_method<'tcx>( [.., pred] => (", Self: Sized", pred.span().shrink_to_hi()), }, ); - return Some(MethodViolationCode::StaticMethod(sugg)); + // Get the span pointing at where the `self` receiver should be. + let sm = tcx.sess.source_map(); + let self_span = method.ident.span.to(tcx + .hir() + .span_if_local(method.def_id) + .unwrap_or_else(|| sm.next_point(method.ident.span)) + .shrink_to_hi()); + let self_span = sm.span_through_char(self_span, '(').shrink_to_hi(); + return Some(MethodViolationCode::StaticMethod( + sugg, + self_span, + !sig.inputs().skip_binder().is_empty(), + )); } - let sig = tcx.fn_sig(method.def_id); - for (i, input_ty) in sig.skip_binder().inputs()[1..].iter().enumerate() { if contains_illegal_self_type_reference(tcx, trait_def_id, input_ty) { return Some(MethodViolationCode::ReferencesSelfInput(i)); diff --git a/compiler/rustc_trait_selection/src/traits/project.rs b/compiler/rustc_trait_selection/src/traits/project.rs index 8dbf7ec51c6db..827b1d35f1c22 100644 --- a/compiler/rustc_trait_selection/src/traits/project.rs +++ b/compiler/rustc_trait_selection/src/traits/project.rs @@ -623,7 +623,8 @@ fn prune_cache_value_obligations<'a, 'tcx>( .obligations .iter() .filter(|obligation| { - match obligation.predicate.skip_binders() { + let bound_predicate = obligation.predicate.bound_atom(); + match bound_predicate.skip_binder() { // We found a `T: Foo` predicate, let's check // if `U` references any unresolved type // variables. In principle, we only care if this @@ -634,7 +635,7 @@ fn prune_cache_value_obligations<'a, 'tcx>( // but we have `T: Foo` and `?1: Bar`). ty::PredicateAtom::Projection(data) => { - infcx.unresolved_type_vars(&ty::Binder::bind(data.ty)).is_some() + infcx.unresolved_type_vars(&bound_predicate.rebind(data.ty)).is_some() } // We are only interested in `T: Foo` predicates, whre @@ -907,8 +908,9 @@ fn assemble_candidates_from_predicates<'cx, 'tcx>( let infcx = selcx.infcx(); for predicate in env_predicates { debug!(?predicate); + let bound_predicate = predicate.bound_atom(); if let ty::PredicateAtom::Projection(data) = predicate.skip_binders() { - let data = ty::Binder::bind(data); + let data = bound_predicate.rebind(data); let same_def_id = data.projection_def_id() == obligation.predicate.item_def_id; let is_match = same_def_id diff --git a/compiler/rustc_trait_selection/src/traits/select/candidate_assembly.rs b/compiler/rustc_trait_selection/src/traits/select/candidate_assembly.rs index fdf1641c98676..b0bfb4ad17371 100644 --- a/compiler/rustc_trait_selection/src/traits/select/candidate_assembly.rs +++ b/compiler/rustc_trait_selection/src/traits/select/candidate_assembly.rs @@ -642,24 +642,30 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { debug!(?poly_trait_ref, "assemble_candidates_from_object_ty"); + let poly_trait_predicate = self.infcx().resolve_vars_if_possible(&obligation.predicate); + let placeholder_trait_predicate = + self.infcx().replace_bound_vars_with_placeholders(&poly_trait_predicate); + // Count only those upcast versions that match the trait-ref // we are looking for. Specifically, do not only check for the // correct trait, but also the correct type parameters. // For example, we may be trying to upcast `Foo` to `Bar`, // but `Foo` is declared as `trait Foo: Bar`. - let upcast_trait_refs = util::supertraits(self.tcx(), poly_trait_ref) - .filter(|upcast_trait_ref| { - self.infcx - .probe(|_| self.match_poly_trait_ref(obligation, *upcast_trait_ref).is_ok()) + let candidate_supertraits = util::supertraits(self.tcx(), poly_trait_ref) + .enumerate() + .filter(|&(_, upcast_trait_ref)| { + self.infcx.probe(|_| { + self.match_normalize_trait_ref( + obligation, + upcast_trait_ref, + placeholder_trait_predicate.trait_ref, + ) + .is_ok() + }) }) - .count(); + .map(|(idx, _)| ObjectCandidate(idx)); - if upcast_trait_refs > 1 { - // Can be upcast in many ways; need more type information. - candidates.ambiguous = true; - } else if upcast_trait_refs == 1 { - candidates.vec.push(ObjectCandidate); - } + candidates.vec.extend(candidate_supertraits); }) } diff --git a/compiler/rustc_trait_selection/src/traits/select/confirmation.rs b/compiler/rustc_trait_selection/src/traits/select/confirmation.rs index 37d619d594215..872b8e85f563f 100644 --- a/compiler/rustc_trait_selection/src/traits/select/confirmation.rs +++ b/compiler/rustc_trait_selection/src/traits/select/confirmation.rs @@ -69,10 +69,15 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { } ProjectionCandidate(idx) => { - let obligations = self.confirm_projection_candidate(obligation, idx); + let obligations = self.confirm_projection_candidate(obligation, idx)?; Ok(ImplSource::Param(obligations)) } + ObjectCandidate(idx) => { + let data = self.confirm_object_candidate(obligation, idx)?; + Ok(ImplSource::Object(data)) + } + ClosureCandidate => { let vtable_closure = self.confirm_closure_candidate(obligation)?; Ok(ImplSource::Closure(vtable_closure)) @@ -97,11 +102,6 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { Ok(ImplSource::TraitAlias(data)) } - ObjectCandidate => { - let data = self.confirm_object_candidate(obligation); - Ok(ImplSource::Object(data)) - } - BuiltinObjectCandidate => { // This indicates something like `Trait + Send: Send`. In this case, we know that // this holds because that's what the object type is telling us, and there's really @@ -120,7 +120,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { &mut self, obligation: &TraitObligation<'tcx>, idx: usize, - ) -> Vec> { + ) -> Result>, SelectionError<'tcx>> { self.infcx.commit_unconditionally(|_| { let tcx = self.tcx(); @@ -148,19 +148,13 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { &mut obligations, ); - obligations.extend( + obligations.extend(self.infcx.commit_if_ok(|_| { self.infcx .at(&obligation.cause, obligation.param_env) .sup(placeholder_trait_predicate.trait_ref.to_poly_trait_ref(), candidate) .map(|InferOk { obligations, .. }| obligations) - .unwrap_or_else(|_| { - bug!( - "Projection bound `{:?}` was applicable to `{:?}` but now is not", - candidate, - obligation - ); - }), - ); + .map_err(|_| Unimplemented) + })?); if let ty::Projection(..) = placeholder_self_ty.kind() { for predicate in tcx.predicates_of(def_id).instantiate_own(tcx, substs).predicates { @@ -181,7 +175,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { } } - obligations + Ok(obligations) }) } @@ -371,9 +365,10 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { fn confirm_object_candidate( &mut self, obligation: &TraitObligation<'tcx>, - ) -> ImplSourceObjectData<'tcx, PredicateObligation<'tcx>> { - debug!(?obligation, "confirm_object_candidate"); + index: usize, + ) -> Result>, SelectionError<'tcx>> { let tcx = self.tcx(); + debug!(?obligation, ?index, "confirm_object_candidate"); let trait_predicate = self.infcx.replace_bound_vars_with_placeholders(&obligation.predicate); @@ -399,43 +394,39 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { }) .with_self_ty(self.tcx(), self_ty); - let mut upcast_trait_ref = None; let mut nested = vec![]; - let vtable_base; - { - // We want to find the first supertrait in the list of - // supertraits that we can unify with, and do that - // unification. We know that there is exactly one in the list - // where we can unify, because otherwise select would have - // reported an ambiguity. (When we do find a match, also - // record it for later.) - let nonmatching = util::supertraits(tcx, ty::Binder::dummy(object_trait_ref)) - .take_while(|&t| { - match self.infcx.commit_if_ok(|_| { - self.infcx - .at(&obligation.cause, obligation.param_env) - .sup(obligation_trait_ref, t) - .map(|InferOk { obligations, .. }| obligations) - .map_err(|_| ()) - }) { - Ok(obligations) => { - upcast_trait_ref = Some(t); - nested.extend(obligations); - false - } - Err(_) => true, - } - }); + let mut supertraits = util::supertraits(tcx, ty::Binder::dummy(object_trait_ref)); - // Additionally, for each of the non-matching predicates that - // we pass over, we sum up the set of number of vtable - // entries, so that we can compute the offset for the selected - // trait. - vtable_base = nonmatching.map(|t| super::util::count_own_vtable_entries(tcx, t)).sum(); - } + // For each of the non-matching predicates that + // we pass over, we sum up the set of number of vtable + // entries, so that we can compute the offset for the selected + // trait. + let vtable_base = supertraits + .by_ref() + .take(index) + .map(|t| super::util::count_own_vtable_entries(tcx, t)) + .sum(); + + let unnormalized_upcast_trait_ref = + supertraits.next().expect("supertraits iterator no longer has as many elements"); + + let upcast_trait_ref = normalize_with_depth_to( + self, + obligation.param_env, + obligation.cause.clone(), + obligation.recursion_depth + 1, + &unnormalized_upcast_trait_ref, + &mut nested, + ); - let upcast_trait_ref = upcast_trait_ref.unwrap(); + nested.extend(self.infcx.commit_if_ok(|_| { + self.infcx + .at(&obligation.cause, obligation.param_env) + .sup(obligation_trait_ref, upcast_trait_ref) + .map(|InferOk { obligations, .. }| obligations) + .map_err(|_| Unimplemented) + })?); // Check supertraits hold. This is so that their associated type bounds // will be checked in the code below. @@ -501,7 +492,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { } debug!(?nested, "object nested obligations"); - ImplSourceObjectData { upcast_trait_ref, vtable_base, nested } + Ok(ImplSourceObjectData { upcast_trait_ref, vtable_base, nested }) } fn confirm_fn_pointer_candidate( diff --git a/compiler/rustc_trait_selection/src/traits/select/mod.rs b/compiler/rustc_trait_selection/src/traits/select/mod.rs index 4d347380c6c77..4cc4bc0acdab6 100644 --- a/compiler/rustc_trait_selection/src/traits/select/mod.rs +++ b/compiler/rustc_trait_selection/src/traits/select/mod.rs @@ -449,16 +449,17 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { } let result = ensure_sufficient_stack(|| { - match obligation.predicate.skip_binders() { + let bound_predicate = obligation.predicate.bound_atom(); + match bound_predicate.skip_binder() { ty::PredicateAtom::Trait(t, _) => { - let t = ty::Binder::bind(t); + let t = bound_predicate.rebind(t); debug_assert!(!t.has_escaping_bound_vars()); let obligation = obligation.with(t); self.evaluate_trait_predicate_recursively(previous_stack, obligation) } ty::PredicateAtom::Subtype(p) => { - let p = ty::Binder::bind(p); + let p = bound_predicate.rebind(p); // Does this code ever run? match self.infcx.subtype_predicate(&obligation.cause, obligation.param_env, p) { Some(Ok(InferOk { mut obligations, .. })) => { @@ -502,7 +503,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { } ty::PredicateAtom::Projection(data) => { - let data = ty::Binder::bind(data); + let data = bound_predicate.rebind(data); let project_obligation = obligation.with(data); match project::poly_project_and_unify_type(self, &project_obligation) { Ok(Ok(Some(mut subobligations))) => { @@ -517,12 +518,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { result } Ok(Ok(None)) => Ok(EvaluatedToAmbig), - // EvaluatedToRecur might also be acceptable here, but use - // Unknown for now because it means that we won't dismiss a - // selection candidate solely because it has a projection - // cycle. This is closest to the previous behavior of - // immediately erroring. - Ok(Err(project::InProgress)) => Ok(EvaluatedToUnknown), + Ok(Err(project::InProgress)) => Ok(EvaluatedToRecur), Err(_) => Ok(EvaluatedToErr), } } @@ -1174,10 +1170,11 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { .iter() .enumerate() .filter_map(|(idx, bound)| { - if let ty::PredicateAtom::Trait(pred, _) = bound.skip_binders() { - let bound = ty::Binder::bind(pred.trait_ref); + let bound_predicate = bound.bound_atom(); + if let ty::PredicateAtom::Trait(pred, _) = bound_predicate.skip_binder() { + let bound = bound_predicate.rebind(pred.trait_ref); if self.infcx.probe(|_| { - match self.match_projection( + match self.match_normalize_trait_ref( obligation, bound, placeholder_trait_predicate.trait_ref, @@ -1205,7 +1202,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { /// Equates the trait in `obligation` with trait bound. If the two traits /// can be equated and the normalized trait bound doesn't contain inference /// variables or placeholders, the normalized bound is returned. - fn match_projection( + fn match_normalize_trait_ref( &mut self, obligation: &TraitObligation<'tcx>, trait_bound: ty::PolyTraitRef<'tcx>, @@ -1355,10 +1352,10 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { | BuiltinUnsizeCandidate | BuiltinCandidate { .. } | TraitAliasCandidate(..) - | ObjectCandidate + | ObjectCandidate(_) | ProjectionCandidate(_), ) => !is_global(cand), - (ObjectCandidate | ProjectionCandidate(_), ParamCandidate(ref cand)) => { + (ObjectCandidate(_) | ProjectionCandidate(_), ParamCandidate(ref cand)) => { // Prefer these to a global where-clause bound // (see issue #50825). is_global(cand) @@ -1379,20 +1376,20 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { is_global(cand) && other.evaluation.must_apply_modulo_regions() } - (ProjectionCandidate(i), ProjectionCandidate(j)) => { - // Arbitrarily pick the first candidate for backwards + (ProjectionCandidate(i), ProjectionCandidate(j)) + | (ObjectCandidate(i), ObjectCandidate(j)) => { + // Arbitrarily pick the lower numbered candidate for backwards // compatibility reasons. Don't let this affect inference. - i > j && !needs_infer + i < j && !needs_infer } - (ObjectCandidate, ObjectCandidate) => bug!("Duplicate object candidate"), - (ObjectCandidate, ProjectionCandidate(_)) - | (ProjectionCandidate(_), ObjectCandidate) => { + (ObjectCandidate(_), ProjectionCandidate(_)) + | (ProjectionCandidate(_), ObjectCandidate(_)) => { bug!("Have both object and projection candidate") } // Arbitrarily give projection and object candidates priority. ( - ObjectCandidate | ProjectionCandidate(_), + ObjectCandidate(_) | ProjectionCandidate(_), ImplCandidate(..) | ClosureCandidate | GeneratorCandidate @@ -1412,7 +1409,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { | BuiltinUnsizeCandidate | BuiltinCandidate { .. } | TraitAliasCandidate(..), - ObjectCandidate | ProjectionCandidate(_), + ObjectCandidate(_) | ProjectionCandidate(_), ) => false, (&ImplCandidate(other_def), &ImplCandidate(victim_def)) => { @@ -1529,16 +1526,20 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { ty::Str | ty::Slice(_) | ty::Dynamic(..) | ty::Foreign(..) => None, - ty::Tuple(tys) => { - Where(ty::Binder::bind(tys.last().into_iter().map(|k| k.expect_ty()).collect())) - } + ty::Tuple(tys) => Where( + obligation + .predicate + .rebind(tys.last().into_iter().map(|k| k.expect_ty()).collect()), + ), ty::Adt(def, substs) => { let sized_crit = def.sized_constraint(self.tcx()); // (*) binder moved here - Where(ty::Binder::bind( - sized_crit.iter().map(|ty| ty.subst(self.tcx(), substs)).collect(), - )) + Where( + obligation.predicate.rebind({ + sized_crit.iter().map(|ty| ty.subst(self.tcx(), substs)).collect() + }), + ) } ty::Projection(_) | ty::Param(_) | ty::Opaque(..) => None, @@ -1561,7 +1562,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { use self::BuiltinImplConditions::{Ambiguous, None, Where}; - match self_ty.kind() { + match *self_ty.kind() { ty::Infer(ty::IntVar(_)) | ty::Infer(ty::FloatVar(_)) | ty::FnDef(..) @@ -1590,12 +1591,12 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { ty::Array(element_ty, _) => { // (*) binder moved here - Where(ty::Binder::bind(vec![element_ty])) + Where(obligation.predicate.rebind(vec![element_ty])) } ty::Tuple(tys) => { // (*) binder moved here - Where(ty::Binder::bind(tys.iter().map(|k| k.expect_ty()).collect())) + Where(obligation.predicate.rebind(tys.iter().map(|k| k.expect_ty()).collect())) } ty::Closure(_, substs) => { @@ -1605,7 +1606,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { // Not yet resolved. Ambiguous } else { - Where(ty::Binder::bind(substs.as_closure().upvar_tys().collect())) + Where(obligation.predicate.rebind(substs.as_closure().upvar_tys().collect())) } } @@ -1884,9 +1885,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { /// Normalize `where_clause_trait_ref` and try to match it against /// `obligation`. If successful, return any predicates that - /// result from the normalization. Normalization is necessary - /// because where-clauses are stored in the parameter environment - /// unnormalized. + /// result from the normalization. fn match_where_clause_trait_ref( &mut self, obligation: &TraitObligation<'tcx>, diff --git a/compiler/rustc_traits/src/chalk/lowering.rs b/compiler/rustc_traits/src/chalk/lowering.rs index 391251b6fa554..5ca0fc0c88b54 100644 --- a/compiler/rustc_traits/src/chalk/lowering.rs +++ b/compiler/rustc_traits/src/chalk/lowering.rs @@ -81,8 +81,11 @@ impl<'tcx> LowerInto<'tcx, chalk_ir::InEnvironment, ) -> chalk_ir::InEnvironment>> { let clauses = self.environment.into_iter().map(|predicate| { - let (predicate, binders, _named_regions) = - collect_bound_vars(interner, interner.tcx, &predicate.bound_atom(interner.tcx)); + let (predicate, binders, _named_regions) = collect_bound_vars( + interner, + interner.tcx, + &predicate.bound_atom_with_opt_escaping(interner.tcx), + ); let consequence = match predicate { ty::PredicateAtom::TypeWellFormedFromEnv(ty) => { chalk_ir::DomainGoal::FromEnv(chalk_ir::FromEnv::Ty(ty.lower_into(interner))) @@ -133,8 +136,11 @@ impl<'tcx> LowerInto<'tcx, chalk_ir::InEnvironment LowerInto<'tcx, chalk_ir::GoalData>> for ty::Predicate<'tcx> { fn lower_into(self, interner: &RustInterner<'tcx>) -> chalk_ir::GoalData> { - let (predicate, binders, _named_regions) = - collect_bound_vars(interner, interner.tcx, &self.bound_atom(interner.tcx)); + let (predicate, binders, _named_regions) = collect_bound_vars( + interner, + interner.tcx, + &self.bound_atom_with_opt_escaping(interner.tcx), + ); let value = match predicate { ty::PredicateAtom::Trait(predicate, _) => { @@ -653,8 +659,11 @@ impl<'tcx> LowerInto<'tcx, Option, ) -> Option>> { - let (predicate, binders, _named_regions) = - collect_bound_vars(interner, interner.tcx, &self.bound_atom(interner.tcx)); + let (predicate, binders, _named_regions) = collect_bound_vars( + interner, + interner.tcx, + &self.bound_atom_with_opt_escaping(interner.tcx), + ); let value = match predicate { ty::PredicateAtom::Trait(predicate, _) => { Some(chalk_ir::WhereClause::Implemented(predicate.trait_ref.lower_into(interner))) @@ -762,27 +771,22 @@ impl<'tcx> LowerInto<'tcx, Option, ) -> Option>> { - match self.bound_atom(interner.tcx).skip_binder() { - ty::PredicateAtom::Trait(predicate, _) => { - let (predicate, binders, _named_regions) = - collect_bound_vars(interner, interner.tcx, &ty::Binder::bind(predicate)); - - Some(chalk_ir::Binders::new( - binders, - chalk_solve::rust_ir::InlineBound::TraitBound( - predicate.trait_ref.lower_into(interner), - ), - )) - } - ty::PredicateAtom::Projection(predicate) => { - let (predicate, binders, _named_regions) = - collect_bound_vars(interner, interner.tcx, &ty::Binder::bind(predicate)); - - Some(chalk_ir::Binders::new( - binders, - chalk_solve::rust_ir::InlineBound::AliasEqBound(predicate.lower_into(interner)), - )) - } + let (predicate, binders, _named_regions) = collect_bound_vars( + interner, + interner.tcx, + &self.bound_atom_with_opt_escaping(interner.tcx), + ); + match predicate { + ty::PredicateAtom::Trait(predicate, _) => Some(chalk_ir::Binders::new( + binders, + chalk_solve::rust_ir::InlineBound::TraitBound( + predicate.trait_ref.lower_into(interner), + ), + )), + ty::PredicateAtom::Projection(predicate) => Some(chalk_ir::Binders::new( + binders, + chalk_solve::rust_ir::InlineBound::AliasEqBound(predicate.lower_into(interner)), + )), ty::PredicateAtom::TypeOutlives(_predicate) => None, ty::PredicateAtom::WellFormed(_ty) => None, diff --git a/compiler/rustc_ty/src/ty.rs b/compiler/rustc_ty/src/ty.rs index 1f21d9488a454..2562140bb5d7e 100644 --- a/compiler/rustc_ty/src/ty.rs +++ b/compiler/rustc_ty/src/ty.rs @@ -80,7 +80,6 @@ fn sized_constraint_for_ty<'tcx>( fn associated_item_from_trait_item_ref( tcx: TyCtxt<'_>, parent_def_id: LocalDefId, - parent_vis: &hir::Visibility<'_>, trait_item_ref: &hir::TraitItemRef, ) -> ty::AssocItem { let def_id = tcx.hir().local_def_id(trait_item_ref.id.hir_id); @@ -93,8 +92,7 @@ fn associated_item_from_trait_item_ref( ty::AssocItem { ident: trait_item_ref.ident, kind, - // Visibility of trait items is inherited from their traits. - vis: ty::Visibility::from_hir(parent_vis, trait_item_ref.id.hir_id, tcx), + vis: tcx.visibility(def_id), defaultness: trait_item_ref.defaultness, def_id: def_id.to_def_id(), container: ty::TraitContainer(parent_def_id.to_def_id()), @@ -117,8 +115,7 @@ fn associated_item_from_impl_item_ref( ty::AssocItem { ident: impl_item_ref.ident, kind, - // Visibility of trait impl items doesn't matter. - vis: ty::Visibility::from_hir(&impl_item_ref.vis, impl_item_ref.id.hir_id, tcx), + vis: tcx.visibility(def_id), defaultness: impl_item_ref.defaultness, def_id: def_id.to_def_id(), container: ty::ImplContainer(parent_def_id.to_def_id()), @@ -143,12 +140,8 @@ fn associated_item(tcx: TyCtxt<'_>, def_id: DefId) -> ty::AssocItem { hir::ItemKind::Trait(.., ref trait_item_refs) => { if let Some(trait_item_ref) = trait_item_refs.iter().find(|i| i.id.hir_id == id) { - let assoc_item = associated_item_from_trait_item_ref( - tcx, - parent_def_id, - &parent_item.vis, - trait_item_ref, - ); + let assoc_item = + associated_item_from_trait_item_ref(tcx, parent_def_id, trait_item_ref); debug_assert_eq!(assoc_item.def_id, def_id); return assoc_item; } diff --git a/compiler/rustc_typeck/src/astconv/generics.rs b/compiler/rustc_typeck/src/astconv/generics.rs index b867798c76cf7..3bfb2d3f1b0f9 100644 --- a/compiler/rustc_typeck/src/astconv/generics.rs +++ b/compiler/rustc_typeck/src/astconv/generics.rs @@ -548,13 +548,18 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { generics: &ty::Generics, ) -> bool { let explicit = !seg.infer_args; - let impl_trait = generics.params.iter().any(|param| match param.kind { - ty::GenericParamDefKind::Type { - synthetic: Some(hir::SyntheticTyParamKind::ImplTrait), - .. - } => true, - _ => false, - }); + let impl_trait = + generics.params.iter().any(|param| match param.kind { + ty::GenericParamDefKind::Type { + synthetic: + Some( + hir::SyntheticTyParamKind::ImplTrait + | hir::SyntheticTyParamKind::FromAttr, + ), + .. + } => true, + _ => false, + }); if explicit && impl_trait { let spans = seg diff --git a/compiler/rustc_typeck/src/astconv/mod.rs b/compiler/rustc_typeck/src/astconv/mod.rs index 170ca2ce744cd..07e523af3ebf5 100644 --- a/compiler/rustc_typeck/src/astconv/mod.rs +++ b/compiler/rustc_typeck/src/astconv/mod.rs @@ -1095,9 +1095,10 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { obligation.predicate ); - match obligation.predicate.skip_binders() { + let bound_predicate = obligation.predicate.bound_atom(); + match bound_predicate.skip_binder() { ty::PredicateAtom::Trait(pred, _) => { - let pred = ty::Binder::bind(pred); + let pred = bound_predicate.rebind(pred); associated_types.entry(span).or_default().extend( tcx.associated_items(pred.def_id()) .in_definition_order() @@ -1106,7 +1107,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { ); } ty::PredicateAtom::Projection(pred) => { - let pred = ty::Binder::bind(pred); + let pred = bound_predicate.rebind(pred); // A `Self` within the original bound will be substituted with a // `trait_object_dummy_self`, so check for that. let references_self = diff --git a/compiler/rustc_typeck/src/check/_match.rs b/compiler/rustc_typeck/src/check/_match.rs index 7cb23dc053795..398e013e62fb5 100644 --- a/compiler/rustc_typeck/src/check/_match.rs +++ b/compiler/rustc_typeck/src/check/_match.rs @@ -201,6 +201,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { expr.span, ObligationCauseCode::MatchExpressionArm(box MatchExpressionArmCause { arm_span, + scrut_span: scrut.span, semi_span, source: match_src, prior_arms: other_arms.clone(), diff --git a/compiler/rustc_typeck/src/check/check.rs b/compiler/rustc_typeck/src/check/check.rs index b9a5db478552e..3d8653b4a6a47 100644 --- a/compiler/rustc_typeck/src/check/check.rs +++ b/compiler/rustc_typeck/src/check/check.rs @@ -348,8 +348,7 @@ pub(super) fn check_union(tcx: TyCtxt<'_>, id: hir::HirId, span: Span) { check_packed(tcx, span, def); } -/// When the `#![feature(untagged_unions)]` gate is active, -/// check that the fields of the `union` does not contain fields that need dropping. +/// Check that the fields of the `union` do not need dropping. pub(super) fn check_union_fields(tcx: TyCtxt<'_>, span: Span, item_def_id: LocalDefId) -> bool { let item_type = tcx.type_of(item_def_id); if let ty::Adt(def, substs) = item_type.kind() { diff --git a/compiler/rustc_typeck/src/check/closure.rs b/compiler/rustc_typeck/src/check/closure.rs index 8bdd933644a33..8cd83c39f9e31 100644 --- a/compiler/rustc_typeck/src/check/closure.rs +++ b/compiler/rustc_typeck/src/check/closure.rs @@ -192,6 +192,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { obligation.predicate ); + let bound_predicate = obligation.predicate.bound_atom(); if let ty::PredicateAtom::Projection(proj_predicate) = obligation.predicate.skip_binders() { @@ -199,7 +200,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { // the complete signature. self.deduce_sig_from_projection( Some(obligation.cause.span), - ty::Binder::bind(proj_predicate), + bound_predicate.rebind(proj_predicate), ) } else { None diff --git a/compiler/rustc_typeck/src/check/coercion.rs b/compiler/rustc_typeck/src/check/coercion.rs index db32a73618125..6da3ecde329cf 100644 --- a/compiler/rustc_typeck/src/check/coercion.rs +++ b/compiler/rustc_typeck/src/check/coercion.rs @@ -583,7 +583,8 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { while !queue.is_empty() { let obligation = queue.remove(0); debug!("coerce_unsized resolve step: {:?}", obligation); - let trait_pred = match obligation.predicate.skip_binders() { + let bound_predicate = obligation.predicate.bound_atom(); + let trait_pred = match bound_predicate.skip_binder() { ty::PredicateAtom::Trait(trait_pred, _) if traits.contains(&trait_pred.def_id()) => { @@ -594,7 +595,7 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { has_unsized_tuple_coercion = true; } } - ty::Binder::bind(trait_pred) + bound_predicate.rebind(trait_pred) } _ => { coercion.obligations.push(obligation); @@ -1474,6 +1475,28 @@ impl<'tcx, 'exprs, E: AsCoercionSite> CoerceMany<'tcx, 'exprs, E> { if let (Some(sp), Some(fn_output)) = (fcx.ret_coercion_span.borrow().as_ref(), fn_output) { self.add_impl_trait_explanation(&mut err, cause, fcx, expected, *sp, fn_output); } + + if let Some(sp) = fcx.ret_coercion_span.borrow().as_ref() { + // If the closure has an explicit return type annotation, + // then a type error may occur at the first return expression we + // see in the closure (if it conflicts with the declared + // return type). Skip adding a note in this case, since it + // would be incorrect. + if !err.span.primary_spans().iter().any(|span| span == sp) { + let hir = fcx.tcx.hir(); + let body_owner = hir.body_owned_by(hir.enclosing_body_owner(fcx.body_id)); + if fcx.tcx.is_closure(hir.body_owner_def_id(body_owner).to_def_id()) { + err.span_note( + *sp, + &format!( + "return type inferred to be `{}` here", + fcx.resolve_vars_if_possible(&expected) + ), + ); + } + } + } + err } diff --git a/compiler/rustc_typeck/src/check/demand.rs b/compiler/rustc_typeck/src/check/demand.rs index 3e66885448b9a..b8143787a2ddf 100644 --- a/compiler/rustc_typeck/src/check/demand.rs +++ b/compiler/rustc_typeck/src/check/demand.rs @@ -751,8 +751,20 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { } } - let msg = format!("you can convert an `{}` to `{}`", checked_ty, expected_ty); - let cast_msg = format!("you can cast an `{} to `{}`", checked_ty, expected_ty); + let msg = format!( + "you can convert {} `{}` to {} `{}`", + checked_ty.kind().article(), + checked_ty, + expected_ty.kind().article(), + expected_ty, + ); + let cast_msg = format!( + "you can cast {} `{}` to {} `{}`", + checked_ty.kind().article(), + checked_ty, + expected_ty.kind().article(), + expected_ty, + ); let lit_msg = format!( "change the type of the numeric literal from `{}` to `{}`", checked_ty, expected_ty, @@ -814,7 +826,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { let suggestion = format!("{}::from({})", checked_ty, lhs_src); (lhs_expr.span, msg, suggestion) } else { - let msg = format!("{} and panic if the converted value wouldn't fit", msg); + let msg = format!("{} and panic if the converted value doesn't fit", msg); let suggestion = format!("{}{}.try_into().unwrap()", prefix, with_opt_paren(&src)); (expr.span, msg, suggestion) diff --git a/compiler/rustc_typeck/src/check/dropck.rs b/compiler/rustc_typeck/src/check/dropck.rs index ae94a6df5fd47..5650b2cdd3c98 100644 --- a/compiler/rustc_typeck/src/check/dropck.rs +++ b/compiler/rustc_typeck/src/check/dropck.rs @@ -226,12 +226,14 @@ fn ensure_drop_predicates_are_implied_by_item_defn<'tcx>( // could be extended easily also to the other `Predicate`. let predicate_matches_closure = |p: Predicate<'tcx>| { let mut relator: SimpleEqRelation<'tcx> = SimpleEqRelation::new(tcx, self_param_env); - match (predicate.skip_binders(), p.skip_binders()) { + let predicate = predicate.bound_atom(); + let p = p.bound_atom(); + match (predicate.skip_binder(), p.skip_binder()) { (ty::PredicateAtom::Trait(a, _), ty::PredicateAtom::Trait(b, _)) => { - relator.relate(ty::Binder::bind(a), ty::Binder::bind(b)).is_ok() + relator.relate(predicate.rebind(a), p.rebind(b)).is_ok() } (ty::PredicateAtom::Projection(a), ty::PredicateAtom::Projection(b)) => { - relator.relate(ty::Binder::bind(a), ty::Binder::bind(b)).is_ok() + relator.relate(predicate.rebind(a), p.rebind(b)).is_ok() } _ => predicate == p, } diff --git a/compiler/rustc_typeck/src/check/expr.rs b/compiler/rustc_typeck/src/check/expr.rs index 179e383be2e2b..9990f86a36b13 100644 --- a/compiler/rustc_typeck/src/check/expr.rs +++ b/compiler/rustc_typeck/src/check/expr.rs @@ -286,6 +286,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { } ExprKind::DropTemps(ref e) => self.check_expr_with_expectation(e, expected), ExprKind::Array(ref args) => self.check_expr_array(args, expected, expr), + ExprKind::ConstBlock(ref anon_const) => self.to_const(anon_const).ty, ExprKind::Repeat(ref element, ref count) => { self.check_expr_repeat(element, count, expected, expr) } diff --git a/compiler/rustc_typeck/src/check/generator_interior.rs b/compiler/rustc_typeck/src/check/generator_interior.rs index 3fc5f02a4a47d..4473aa2081f23 100644 --- a/compiler/rustc_typeck/src/check/generator_interior.rs +++ b/compiler/rustc_typeck/src/check/generator_interior.rs @@ -250,10 +250,7 @@ impl<'a, 'tcx> Visitor<'tcx> for InteriorVisitor<'a, 'tcx> { let mut scope_var_ids = self.guard_bindings.pop().expect("should have pushed at least one earlier"); for var_id in scope_var_ids.drain(..) { - assert!( - self.guard_bindings_set.remove(&var_id), - "variable should be placed in scope earlier" - ); + self.guard_bindings_set.remove(&var_id); } } self.visit_expr(body); diff --git a/compiler/rustc_typeck/src/check/intrinsic.rs b/compiler/rustc_typeck/src/check/intrinsic.rs index 2ee867c2dd648..f40a250200e4a 100644 --- a/compiler/rustc_typeck/src/check/intrinsic.rs +++ b/compiler/rustc_typeck/src/check/intrinsic.rs @@ -328,14 +328,14 @@ pub fn check_intrinsic_type(tcx: TyCtxt<'_>, it: &hir::ForeignItem<'_>) { kw::Try => { let mut_u8 = tcx.mk_mut_ptr(tcx.types.u8); - let try_fn_ty = ty::Binder::bind(tcx.mk_fn_sig( + let try_fn_ty = ty::Binder::dummy(tcx.mk_fn_sig( iter::once(mut_u8), tcx.mk_unit(), false, hir::Unsafety::Normal, Abi::Rust, )); - let catch_fn_ty = ty::Binder::bind(tcx.mk_fn_sig( + let catch_fn_ty = ty::Binder::dummy(tcx.mk_fn_sig( [mut_u8, mut_u8].iter().cloned(), tcx.mk_unit(), false, diff --git a/compiler/rustc_typeck/src/check/method/probe.rs b/compiler/rustc_typeck/src/check/method/probe.rs index c1ba29284da16..d403e25939893 100644 --- a/compiler/rustc_typeck/src/check/method/probe.rs +++ b/compiler/rustc_typeck/src/check/method/probe.rs @@ -796,29 +796,29 @@ impl<'a, 'tcx> ProbeContext<'a, 'tcx> { // FIXME: do we want to commit to this behavior for param bounds? debug!("assemble_inherent_candidates_from_param(param_ty={:?})", param_ty); - let bounds = - self.param_env.caller_bounds().iter().map(ty::Predicate::skip_binders).filter_map( - |predicate| match predicate { - ty::PredicateAtom::Trait(trait_predicate, _) => { - match trait_predicate.trait_ref.self_ty().kind() { - ty::Param(ref p) if *p == param_ty => { - Some(ty::Binder::bind(trait_predicate.trait_ref)) - } - _ => None, + let bounds = self.param_env.caller_bounds().iter().filter_map(|predicate| { + let bound_predicate = predicate.bound_atom(); + match bound_predicate.skip_binder() { + ty::PredicateAtom::Trait(trait_predicate, _) => { + match *trait_predicate.trait_ref.self_ty().kind() { + ty::Param(p) if p == param_ty => { + Some(bound_predicate.rebind(trait_predicate.trait_ref)) } + _ => None, } - ty::PredicateAtom::Subtype(..) - | ty::PredicateAtom::Projection(..) - | ty::PredicateAtom::RegionOutlives(..) - | ty::PredicateAtom::WellFormed(..) - | ty::PredicateAtom::ObjectSafe(..) - | ty::PredicateAtom::ClosureKind(..) - | ty::PredicateAtom::TypeOutlives(..) - | ty::PredicateAtom::ConstEvaluatable(..) - | ty::PredicateAtom::ConstEquate(..) - | ty::PredicateAtom::TypeWellFormedFromEnv(..) => None, - }, - ); + } + ty::PredicateAtom::Subtype(..) + | ty::PredicateAtom::Projection(..) + | ty::PredicateAtom::RegionOutlives(..) + | ty::PredicateAtom::WellFormed(..) + | ty::PredicateAtom::ObjectSafe(..) + | ty::PredicateAtom::ClosureKind(..) + | ty::PredicateAtom::TypeOutlives(..) + | ty::PredicateAtom::ConstEvaluatable(..) + | ty::PredicateAtom::ConstEquate(..) + | ty::PredicateAtom::TypeWellFormedFromEnv(..) => None, + } + }); self.elaborate_bounds(bounds, |this, poly_trait_ref, item| { let trait_ref = this.erase_late_bound_regions(&poly_trait_ref); diff --git a/compiler/rustc_typeck/src/check/method/suggest.rs b/compiler/rustc_typeck/src/check/method/suggest.rs index e33a4e98c593a..6d2ffadc20c27 100644 --- a/compiler/rustc_typeck/src/check/method/suggest.rs +++ b/compiler/rustc_typeck/src/check/method/suggest.rs @@ -637,9 +637,10 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { } }; let mut format_pred = |pred: ty::Predicate<'tcx>| { - match pred.skip_binders() { + let bound_predicate = pred.bound_atom(); + match bound_predicate.skip_binder() { ty::PredicateAtom::Projection(pred) => { - let pred = ty::Binder::bind(pred); + let pred = bound_predicate.rebind(pred); // `::Item = String`. let trait_ref = pred.skip_binder().projection_ty.trait_ref(self.tcx); @@ -658,8 +659,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { Some((obligation, trait_ref.self_ty())) } ty::PredicateAtom::Trait(poly_trait_ref, _) => { - let poly_trait_ref = ty::Binder::bind(poly_trait_ref); - let p = poly_trait_ref.skip_binder().trait_ref; + let p = poly_trait_ref.trait_ref; let self_ty = p.self_ty(); let path = p.print_only_trait_path(); let obligation = format!("{}: {}", self_ty, path); diff --git a/compiler/rustc_typeck/src/check/mod.rs b/compiler/rustc_typeck/src/check/mod.rs index 24ffe944128c9..169ad0df3a5c9 100644 --- a/compiler/rustc_typeck/src/check/mod.rs +++ b/compiler/rustc_typeck/src/check/mod.rs @@ -111,6 +111,7 @@ use rustc_hir::itemlikevisit::ItemLikeVisitor; use rustc_hir::{HirIdMap, Node}; use rustc_index::bit_set::BitSet; use rustc_index::vec::Idx; +use rustc_infer::infer::type_variable::{TypeVariableOrigin, TypeVariableOriginKind}; use rustc_middle::ty::fold::{TypeFoldable, TypeFolder}; use rustc_middle::ty::query::Providers; use rustc_middle::ty::subst::GenericArgKind; @@ -528,7 +529,20 @@ fn typeck_with_fallback<'tcx>( hir::TyKind::Infer => Some(AstConv::ast_ty_to_ty(&fcx, ty)), _ => None, }) - .unwrap_or_else(fallback); + .unwrap_or_else(|| match tcx.hir().get(id) { + Node::AnonConst(_) => match tcx.hir().get(tcx.hir().get_parent_node(id)) { + Node::Expr(&hir::Expr { + kind: hir::ExprKind::ConstBlock(ref anon_const), + .. + }) if anon_const.hir_id == id => fcx.next_ty_var(TypeVariableOrigin { + kind: TypeVariableOriginKind::TypeInference, + span, + }), + _ => fallback(), + }, + _ => fallback(), + }); + let expected_type = fcx.normalize_associated_types_in(body.value.span, &expected_type); fcx.require_type_is_sized(expected_type, body.value.span, traits::ConstSized); @@ -850,7 +864,8 @@ fn bounds_from_generic_predicates<'tcx>( let mut projections = vec![]; for (predicate, _) in predicates.predicates { debug!("predicate {:?}", predicate); - match predicate.skip_binders() { + let bound_predicate = predicate.bound_atom(); + match bound_predicate.skip_binder() { ty::PredicateAtom::Trait(trait_predicate, _) => { let entry = types.entry(trait_predicate.self_ty()).or_default(); let def_id = trait_predicate.def_id(); @@ -861,7 +876,7 @@ fn bounds_from_generic_predicates<'tcx>( } } ty::PredicateAtom::Projection(projection_pred) => { - projections.push(ty::Binder::bind(projection_pred)); + projections.push(bound_predicate.rebind(projection_pred)); } _ => {} } diff --git a/compiler/rustc_typeck/src/check/op.rs b/compiler/rustc_typeck/src/check/op.rs index 2f3fb49717b9c..02268b11a7a8e 100644 --- a/compiler/rustc_typeck/src/check/op.rs +++ b/compiler/rustc_typeck/src/check/op.rs @@ -302,7 +302,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { true, ), hir::BinOpKind::Mul => ( - format!("cannot multiply `{}` to `{}`", rhs_ty, lhs_ty), + format!("cannot multiply `{}` by `{}`", lhs_ty, rhs_ty), Some("std::ops::Mul"), true, ), diff --git a/compiler/rustc_typeck/src/coherence/orphan.rs b/compiler/rustc_typeck/src/coherence/orphan.rs index 917fc5631c4f5..b2009962aba2e 100644 --- a/compiler/rustc_typeck/src/coherence/orphan.rs +++ b/compiler/rustc_typeck/src/coherence/orphan.rs @@ -110,7 +110,7 @@ impl ItemLikeVisitor<'v> for OrphanChecker<'tcx> { ) .note( "implementing a foreign trait is only possible if at \ - least one of the types for which is it implemented is local, \ + least one of the types for which it is implemented is local, \ and no uncovered type parameters appear before that first \ local type", ) @@ -135,7 +135,7 @@ impl ItemLikeVisitor<'v> for OrphanChecker<'tcx> { local type", param_ty, )).note("implementing a foreign trait is only possible if at \ - least one of the types for which is it implemented is local" + least one of the types for which it is implemented is local" ).note("only traits defined in the current crate can be \ implemented for a type parameter" ).emit(); diff --git a/compiler/rustc_typeck/src/collect.rs b/compiler/rustc_typeck/src/collect.rs index 630e80d502efa..b30fb7be273f1 100644 --- a/compiler/rustc_typeck/src/collect.rs +++ b/compiler/rustc_typeck/src/collect.rs @@ -40,7 +40,7 @@ use rustc_middle::ty::query::Providers; use rustc_middle::ty::subst::InternalSubsts; use rustc_middle::ty::util::Discr; use rustc_middle::ty::util::IntTypeExt; -use rustc_middle::ty::{self, AdtKind, Const, ToPolyTraitRef, Ty, TyCtxt}; +use rustc_middle::ty::{self, AdtKind, Const, DefIdTree, ToPolyTraitRef, Ty, TyCtxt}; use rustc_middle::ty::{ReprOptions, ToPredicate, WithConstness}; use rustc_session::config::SanitizerSet; use rustc_session::lint; @@ -851,7 +851,6 @@ fn convert_variant( parent_did: LocalDefId, ) -> ty::VariantDef { let mut seen_fields: FxHashMap = Default::default(); - let hir_id = tcx.hir().local_def_id_to_hir_id(variant_did.unwrap_or(parent_did)); let fields = def .fields() .iter() @@ -868,11 +867,7 @@ fn convert_variant( seen_fields.insert(f.ident.normalize_to_macros_2_0(), f.span); } - ty::FieldDef { - did: fid.to_def_id(), - ident: f.ident, - vis: ty::Visibility::from_hir(&f.vis, hir_id, tcx), - } + ty::FieldDef { did: fid.to_def_id(), ident: f.ident, vis: tcx.visibility(fid) } }) .collect(); let recovered = match def { @@ -2791,6 +2786,14 @@ fn codegen_fn_attrs(tcx: TyCtxt<'_>, id: DefId) -> CodegenFnAttrs { } }); + // #73631: closures inherit `#[target_feature]` annotations + if tcx.features().target_feature_11 && tcx.is_closure(id) { + let owner_id = tcx.parent(id).expect("closure should have a parent"); + codegen_fn_attrs + .target_features + .extend(tcx.codegen_fn_attrs(owner_id).target_features.iter().copied()) + } + // If a function uses #[target_feature] it can't be inlined into general // purpose functions as they wouldn't have the right target features // enabled. For that reason we also forbid #[inline(always)] as it can't be diff --git a/compiler/rustc_typeck/src/collect/type_of.rs b/compiler/rustc_typeck/src/collect/type_of.rs index 4b3250a1d443a..a754d4dbac713 100644 --- a/compiler/rustc_typeck/src/collect/type_of.rs +++ b/compiler/rustc_typeck/src/collect/type_of.rs @@ -112,12 +112,16 @@ pub(super) fn opt_const_param_of(tcx: TyCtxt<'_>, def_id: LocalDefId) -> Option< tcx.sess.delay_span_bug(tcx.def_span(def_id), "anon const with Res::Err"); return None; } - _ => span_bug!( - DUMMY_SP, - "unexpected anon const res {:?} in path: {:?}", - res, - path, - ), + _ => { + // If the user tries to specify generics on a type that does not take them, + // e.g. `usize`, we may hit this branch, in which case we treat it as if + // no arguments have been passed. An error should already have been emitted. + tcx.sess.delay_span_bug( + tcx.def_span(def_id), + &format!("unexpected anon const res {:?} in path: {:?}", res, path), + ); + return None; + } }; generics @@ -309,6 +313,12 @@ pub(super) fn type_of(tcx: TyCtxt<'_>, def_id: DefId) -> Ty<'_> { tcx.types.usize } + Node::Expr(&Expr { kind: ExprKind::ConstBlock(ref anon_const), .. }) + if anon_const.hir_id == hir_id => + { + tcx.typeck(def_id).node_type(anon_const.hir_id) + } + Node::Variant(Variant { disr_expr: Some(ref e), .. }) if e.hir_id == hir_id => tcx .adt_def(tcx.hir().get_parent_did(hir_id).to_def_id()) .repr diff --git a/compiler/rustc_typeck/src/expr_use_visitor.rs b/compiler/rustc_typeck/src/expr_use_visitor.rs index e16f26c330401..471909a092f7b 100644 --- a/compiler/rustc_typeck/src/expr_use_visitor.rs +++ b/compiler/rustc_typeck/src/expr_use_visitor.rs @@ -258,7 +258,10 @@ impl<'a, 'tcx> ExprUseVisitor<'a, 'tcx> { self.consume_exprs(&ia.inputs_exprs); } - hir::ExprKind::Continue(..) | hir::ExprKind::Lit(..) | hir::ExprKind::Err => {} + hir::ExprKind::Continue(..) + | hir::ExprKind::Lit(..) + | hir::ExprKind::ConstBlock(..) + | hir::ExprKind::Err => {} hir::ExprKind::Loop(ref blk, _, _) => { self.walk_block(blk); diff --git a/compiler/rustc_typeck/src/lib.rs b/compiler/rustc_typeck/src/lib.rs index 7efda54fbe035..c1fa39e96eb95 100644 --- a/compiler/rustc_typeck/src/lib.rs +++ b/compiler/rustc_typeck/src/lib.rs @@ -317,7 +317,7 @@ fn check_start_fn_ty(tcx: TyCtxt<'_>, start_def_id: LocalDefId) { } } - let se_ty = tcx.mk_fn_ptr(ty::Binder::bind(tcx.mk_fn_sig( + let se_ty = tcx.mk_fn_ptr(ty::Binder::dummy(tcx.mk_fn_sig( [tcx.types.isize, tcx.mk_imm_ptr(tcx.mk_imm_ptr(tcx.types.u8))].iter().cloned(), tcx.types.isize, false, diff --git a/compiler/rustc_typeck/src/mem_categorization.rs b/compiler/rustc_typeck/src/mem_categorization.rs index 04ead74936f88..f6ac7aa9155fe 100644 --- a/compiler/rustc_typeck/src/mem_categorization.rs +++ b/compiler/rustc_typeck/src/mem_categorization.rs @@ -370,6 +370,7 @@ impl<'a, 'tcx> MemCategorizationContext<'a, 'tcx> { | hir::ExprKind::Loop(..) | hir::ExprKind::Match(..) | hir::ExprKind::Lit(..) + | hir::ExprKind::ConstBlock(..) | hir::ExprKind::Break(..) | hir::ExprKind::Continue(..) | hir::ExprKind::Struct(..) diff --git a/compiler/rustc_typeck/src/variance/mod.rs b/compiler/rustc_typeck/src/variance/mod.rs index a893f69c48ada..1565efbb022f4 100644 --- a/compiler/rustc_typeck/src/variance/mod.rs +++ b/compiler/rustc_typeck/src/variance/mod.rs @@ -4,7 +4,7 @@ //! [rustc dev guide]: https://rustc-dev-guide.rust-lang.org/variance.html use hir::Node; -use rustc_arena::TypedArena; +use rustc_arena::DroplessArena; use rustc_hir as hir; use rustc_hir::def_id::{CrateNum, DefId, LOCAL_CRATE}; use rustc_middle::ty::query::Providers; @@ -32,8 +32,8 @@ pub fn provide(providers: &mut Providers) { fn crate_variances(tcx: TyCtxt<'_>, crate_num: CrateNum) -> CrateVariancesMap<'_> { assert_eq!(crate_num, LOCAL_CRATE); - let mut arena = TypedArena::default(); - let terms_cx = terms::determine_parameters_to_be_inferred(tcx, &mut arena); + let arena = DroplessArena::default(); + let terms_cx = terms::determine_parameters_to_be_inferred(tcx, &arena); let constraints_cx = constraints::add_constraints_from_crate(terms_cx); solve::solve_constraints(constraints_cx) } diff --git a/compiler/rustc_typeck/src/variance/terms.rs b/compiler/rustc_typeck/src/variance/terms.rs index f61a783de694b..81c858c53cb8e 100644 --- a/compiler/rustc_typeck/src/variance/terms.rs +++ b/compiler/rustc_typeck/src/variance/terms.rs @@ -9,7 +9,7 @@ // `InferredIndex` is a newtype'd int representing the index of such // a variable. -use rustc_arena::TypedArena; +use rustc_arena::DroplessArena; use rustc_hir as hir; use rustc_hir::itemlikevisit::ItemLikeVisitor; use rustc_hir::HirIdMap; @@ -47,7 +47,7 @@ impl<'a> fmt::Debug for VarianceTerm<'a> { pub struct TermsContext<'a, 'tcx> { pub tcx: TyCtxt<'tcx>, - pub arena: &'a TypedArena>, + pub arena: &'a DroplessArena, // For marker types, UnsafeCell, and other lang items where // variance is hardcoded, records the item-id and the hardcoded @@ -64,7 +64,7 @@ pub struct TermsContext<'a, 'tcx> { pub fn determine_parameters_to_be_inferred<'a, 'tcx>( tcx: TyCtxt<'tcx>, - arena: &'a mut TypedArena>, + arena: &'a DroplessArena, ) -> TermsContext<'a, 'tcx> { let mut terms_cx = TermsContext { tcx, diff --git a/library/alloc/src/alloc.rs b/library/alloc/src/alloc.rs index bbde60952d30d..0a4f88dedbb07 100644 --- a/library/alloc/src/alloc.rs +++ b/library/alloc/src/alloc.rs @@ -383,7 +383,7 @@ pub fn handle_alloc_error(layout: Layout) -> ! { unsafe { oom_impl(layout) } } -#[cfg(not(any(test, bootstrap)))] +#[cfg(not(any(target_os = "hermit", test, bootstrap)))] #[doc(hidden)] #[allow(unused_attributes)] #[unstable(feature = "alloc_internals", issue = "none")] diff --git a/library/alloc/src/collections/btree/map.rs b/library/alloc/src/collections/btree/map.rs index 606bf94f99867..4f9aa44b6b510 100644 --- a/library/alloc/src/collections/btree/map.rs +++ b/library/alloc/src/collections/btree/map.rs @@ -9,12 +9,17 @@ use core::ops::{Index, RangeBounds}; use core::ptr; use super::borrow::DormantMutRef; -use super::node::{self, marker, ForceResult::*, Handle, InsertResult::*, NodeRef}; +use super::node::{self, marker, ForceResult::*, Handle, NodeRef}; use super::search::{self, SearchResult::*}; use super::unwrap_unchecked; +mod entry; +pub use entry::{Entry, OccupiedEntry, VacantEntry}; use Entry::*; -use UnderflowResult::*; + +/// Minimum number of elements in nodes that are not a root. +/// We might temporarily have fewer elements during methods. +pub(super) const MIN_LEN: usize = node::MIN_LEN_AFTER_SPLIT; /// A map based on a B-Tree. /// @@ -452,69 +457,6 @@ impl fmt::Debug for RangeMut<'_, K, V> { } } -/// A view into a single entry in a map, which may either be vacant or occupied. -/// -/// This `enum` is constructed from the [`entry`] method on [`BTreeMap`]. -/// -/// [`entry`]: BTreeMap::entry -#[stable(feature = "rust1", since = "1.0.0")] -pub enum Entry<'a, K: 'a, V: 'a> { - /// A vacant entry. - #[stable(feature = "rust1", since = "1.0.0")] - Vacant(#[stable(feature = "rust1", since = "1.0.0")] VacantEntry<'a, K, V>), - - /// An occupied entry. - #[stable(feature = "rust1", since = "1.0.0")] - Occupied(#[stable(feature = "rust1", since = "1.0.0")] OccupiedEntry<'a, K, V>), -} - -#[stable(feature = "debug_btree_map", since = "1.12.0")] -impl Debug for Entry<'_, K, V> { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match *self { - Vacant(ref v) => f.debug_tuple("Entry").field(v).finish(), - Occupied(ref o) => f.debug_tuple("Entry").field(o).finish(), - } - } -} - -/// A view into a vacant entry in a `BTreeMap`. -/// It is part of the [`Entry`] enum. -#[stable(feature = "rust1", since = "1.0.0")] -pub struct VacantEntry<'a, K: 'a, V: 'a> { - key: K, - handle: Handle, K, V, marker::Leaf>, marker::Edge>, - dormant_map: DormantMutRef<'a, BTreeMap>, - - // Be invariant in `K` and `V` - _marker: PhantomData<&'a mut (K, V)>, -} - -#[stable(feature = "debug_btree_map", since = "1.12.0")] -impl Debug for VacantEntry<'_, K, V> { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_tuple("VacantEntry").field(self.key()).finish() - } -} - -/// A view into an occupied entry in a `BTreeMap`. -/// It is part of the [`Entry`] enum. -#[stable(feature = "rust1", since = "1.0.0")] -pub struct OccupiedEntry<'a, K: 'a, V: 'a> { - handle: Handle, K, V, marker::LeafOrInternal>, marker::KV>, - dormant_map: DormantMutRef<'a, BTreeMap>, - - // Be invariant in `K` and `V` - _marker: PhantomData<&'a mut (K, V)>, -} - -#[stable(feature = "debug_btree_map", since = "1.12.0")] -impl Debug for OccupiedEntry<'_, K, V> { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_struct("OccupiedEntry").field("key", self.key()).field("value", self.get()).finish() - } -} - // An iterator for merging two sorted sequences into one struct MergeIter> { left: Peekable, @@ -1156,13 +1098,13 @@ impl BTreeMap { // Check if right-most child is underfull. let mut last_edge = internal.last_edge(); let right_child_len = last_edge.reborrow().descend().len(); - if right_child_len < node::MIN_LEN { + if right_child_len < MIN_LEN { // We need to steal. let mut last_kv = match last_edge.left_kv() { Ok(left) => left, Err(_) => unreachable!(), }; - last_kv.bulk_steal_left(node::MIN_LEN - right_child_len); + last_kv.bulk_steal_left(MIN_LEN - right_child_len); last_edge = last_kv.right_edge(); } @@ -1214,40 +1156,8 @@ impl BTreeMap { let mut right = Self::new(); let right_root = Self::ensure_is_owned(&mut right.root); - for _ in 0..left_root.height() { - right_root.push_internal_level(); - } - { - let mut left_node = left_root.node_as_mut(); - let mut right_node = right_root.node_as_mut(); - - loop { - let mut split_edge = match search::search_node(left_node, key) { - // key is going to the right tree - Found(handle) => handle.left_edge(), - GoDown(handle) => handle, - }; - - split_edge.move_suffix(&mut right_node); - - match (split_edge.force(), right_node.force()) { - (Internal(edge), Internal(node)) => { - left_node = edge.descend(); - right_node = node.first_edge().descend(); - } - (Leaf(_), Leaf(_)) => { - break; - } - _ => { - unreachable!(); - } - } - } - } - - left_root.fix_right_border(); - right_root.fix_left_border(); + left_root.split_off(right_root, key); if left_root.height() < right_root.height() { self.length = left_root.node_as_ref().calc_length(); @@ -2310,596 +2220,6 @@ impl BTreeMap { } } -impl<'a, K: Ord, V> Entry<'a, K, V> { - /// Ensures a value is in the entry by inserting the default if empty, and returns - /// a mutable reference to the value in the entry. - /// - /// # Examples - /// - /// ``` - /// use std::collections::BTreeMap; - /// - /// let mut map: BTreeMap<&str, usize> = BTreeMap::new(); - /// map.entry("poneyland").or_insert(12); - /// - /// assert_eq!(map["poneyland"], 12); - /// ``` - #[stable(feature = "rust1", since = "1.0.0")] - pub fn or_insert(self, default: V) -> &'a mut V { - match self { - Occupied(entry) => entry.into_mut(), - Vacant(entry) => entry.insert(default), - } - } - - /// Ensures a value is in the entry by inserting the result of the default function if empty, - /// and returns a mutable reference to the value in the entry. - /// - /// # Examples - /// - /// ``` - /// use std::collections::BTreeMap; - /// - /// let mut map: BTreeMap<&str, String> = BTreeMap::new(); - /// let s = "hoho".to_string(); - /// - /// map.entry("poneyland").or_insert_with(|| s); - /// - /// assert_eq!(map["poneyland"], "hoho".to_string()); - /// ``` - #[stable(feature = "rust1", since = "1.0.0")] - pub fn or_insert_with V>(self, default: F) -> &'a mut V { - match self { - Occupied(entry) => entry.into_mut(), - Vacant(entry) => entry.insert(default()), - } - } - - #[unstable(feature = "or_insert_with_key", issue = "71024")] - /// Ensures a value is in the entry by inserting, if empty, the result of the default function, - /// which takes the key as its argument, and returns a mutable reference to the value in the - /// entry. - /// - /// # Examples - /// - /// ``` - /// #![feature(or_insert_with_key)] - /// use std::collections::BTreeMap; - /// - /// let mut map: BTreeMap<&str, usize> = BTreeMap::new(); - /// - /// map.entry("poneyland").or_insert_with_key(|key| key.chars().count()); - /// - /// assert_eq!(map["poneyland"], 9); - /// ``` - #[inline] - pub fn or_insert_with_key V>(self, default: F) -> &'a mut V { - match self { - Occupied(entry) => entry.into_mut(), - Vacant(entry) => { - let value = default(entry.key()); - entry.insert(value) - } - } - } - - /// Returns a reference to this entry's key. - /// - /// # Examples - /// - /// ``` - /// use std::collections::BTreeMap; - /// - /// let mut map: BTreeMap<&str, usize> = BTreeMap::new(); - /// assert_eq!(map.entry("poneyland").key(), &"poneyland"); - /// ``` - #[stable(feature = "map_entry_keys", since = "1.10.0")] - pub fn key(&self) -> &K { - match *self { - Occupied(ref entry) => entry.key(), - Vacant(ref entry) => entry.key(), - } - } - - /// Provides in-place mutable access to an occupied entry before any - /// potential inserts into the map. - /// - /// # Examples - /// - /// ``` - /// use std::collections::BTreeMap; - /// - /// let mut map: BTreeMap<&str, usize> = BTreeMap::new(); - /// - /// map.entry("poneyland") - /// .and_modify(|e| { *e += 1 }) - /// .or_insert(42); - /// assert_eq!(map["poneyland"], 42); - /// - /// map.entry("poneyland") - /// .and_modify(|e| { *e += 1 }) - /// .or_insert(42); - /// assert_eq!(map["poneyland"], 43); - /// ``` - #[stable(feature = "entry_and_modify", since = "1.26.0")] - pub fn and_modify(self, f: F) -> Self - where - F: FnOnce(&mut V), - { - match self { - Occupied(mut entry) => { - f(entry.get_mut()); - Occupied(entry) - } - Vacant(entry) => Vacant(entry), - } - } -} - -impl<'a, K: Ord, V: Default> Entry<'a, K, V> { - #[stable(feature = "entry_or_default", since = "1.28.0")] - /// Ensures a value is in the entry by inserting the default value if empty, - /// and returns a mutable reference to the value in the entry. - /// - /// # Examples - /// - /// ``` - /// use std::collections::BTreeMap; - /// - /// let mut map: BTreeMap<&str, Option> = BTreeMap::new(); - /// map.entry("poneyland").or_default(); - /// - /// assert_eq!(map["poneyland"], None); - /// ``` - pub fn or_default(self) -> &'a mut V { - match self { - Occupied(entry) => entry.into_mut(), - Vacant(entry) => entry.insert(Default::default()), - } - } -} - -impl<'a, K: Ord, V> VacantEntry<'a, K, V> { - /// Gets a reference to the key that would be used when inserting a value - /// through the VacantEntry. - /// - /// # Examples - /// - /// ``` - /// use std::collections::BTreeMap; - /// - /// let mut map: BTreeMap<&str, usize> = BTreeMap::new(); - /// assert_eq!(map.entry("poneyland").key(), &"poneyland"); - /// ``` - #[stable(feature = "map_entry_keys", since = "1.10.0")] - pub fn key(&self) -> &K { - &self.key - } - - /// Take ownership of the key. - /// - /// # Examples - /// - /// ``` - /// use std::collections::BTreeMap; - /// use std::collections::btree_map::Entry; - /// - /// let mut map: BTreeMap<&str, usize> = BTreeMap::new(); - /// - /// if let Entry::Vacant(v) = map.entry("poneyland") { - /// v.into_key(); - /// } - /// ``` - #[stable(feature = "map_entry_recover_keys2", since = "1.12.0")] - pub fn into_key(self) -> K { - self.key - } - - /// Sets the value of the entry with the `VacantEntry`'s key, - /// and returns a mutable reference to it. - /// - /// # Examples - /// - /// ``` - /// use std::collections::BTreeMap; - /// use std::collections::btree_map::Entry; - /// - /// let mut map: BTreeMap<&str, u32> = BTreeMap::new(); - /// - /// if let Entry::Vacant(o) = map.entry("poneyland") { - /// o.insert(37); - /// } - /// assert_eq!(map["poneyland"], 37); - /// ``` - #[stable(feature = "rust1", since = "1.0.0")] - pub fn insert(self, value: V) -> &'a mut V { - let out_ptr = match self.handle.insert_recursing(self.key, value) { - (Fit(_), val_ptr) => { - // Safety: We have consumed self.handle and the handle returned. - let map = unsafe { self.dormant_map.awaken() }; - map.length += 1; - val_ptr - } - (Split(ins), val_ptr) => { - drop(ins.left); - // Safety: We have consumed self.handle and the reference returned. - let map = unsafe { self.dormant_map.awaken() }; - let root = map.root.as_mut().unwrap(); - root.push_internal_level().push(ins.k, ins.v, ins.right); - map.length += 1; - val_ptr - } - }; - // Now that we have finished growing the tree using borrowed references, - // dereference the pointer to a part of it, that we picked up along the way. - unsafe { &mut *out_ptr } - } -} - -impl<'a, K: Ord, V> OccupiedEntry<'a, K, V> { - /// Gets a reference to the key in the entry. - /// - /// # Examples - /// - /// ``` - /// use std::collections::BTreeMap; - /// - /// let mut map: BTreeMap<&str, usize> = BTreeMap::new(); - /// map.entry("poneyland").or_insert(12); - /// assert_eq!(map.entry("poneyland").key(), &"poneyland"); - /// ``` - #[stable(feature = "map_entry_keys", since = "1.10.0")] - pub fn key(&self) -> &K { - self.handle.reborrow().into_kv().0 - } - - /// Take ownership of the key and value from the map. - /// - /// # Examples - /// - /// ``` - /// use std::collections::BTreeMap; - /// use std::collections::btree_map::Entry; - /// - /// let mut map: BTreeMap<&str, usize> = BTreeMap::new(); - /// map.entry("poneyland").or_insert(12); - /// - /// if let Entry::Occupied(o) = map.entry("poneyland") { - /// // We delete the entry from the map. - /// o.remove_entry(); - /// } - /// - /// // If now try to get the value, it will panic: - /// // println!("{}", map["poneyland"]); - /// ``` - #[stable(feature = "map_entry_recover_keys2", since = "1.12.0")] - pub fn remove_entry(self) -> (K, V) { - self.remove_kv() - } - - /// Gets a reference to the value in the entry. - /// - /// # Examples - /// - /// ``` - /// use std::collections::BTreeMap; - /// use std::collections::btree_map::Entry; - /// - /// let mut map: BTreeMap<&str, usize> = BTreeMap::new(); - /// map.entry("poneyland").or_insert(12); - /// - /// if let Entry::Occupied(o) = map.entry("poneyland") { - /// assert_eq!(o.get(), &12); - /// } - /// ``` - #[stable(feature = "rust1", since = "1.0.0")] - pub fn get(&self) -> &V { - self.handle.reborrow().into_kv().1 - } - - /// Gets a mutable reference to the value in the entry. - /// - /// If you need a reference to the `OccupiedEntry` that may outlive the - /// destruction of the `Entry` value, see [`into_mut`]. - /// - /// [`into_mut`]: OccupiedEntry::into_mut - /// - /// # Examples - /// - /// ``` - /// use std::collections::BTreeMap; - /// use std::collections::btree_map::Entry; - /// - /// let mut map: BTreeMap<&str, usize> = BTreeMap::new(); - /// map.entry("poneyland").or_insert(12); - /// - /// assert_eq!(map["poneyland"], 12); - /// if let Entry::Occupied(mut o) = map.entry("poneyland") { - /// *o.get_mut() += 10; - /// assert_eq!(*o.get(), 22); - /// - /// // We can use the same Entry multiple times. - /// *o.get_mut() += 2; - /// } - /// assert_eq!(map["poneyland"], 24); - /// ``` - #[stable(feature = "rust1", since = "1.0.0")] - pub fn get_mut(&mut self) -> &mut V { - self.handle.kv_mut().1 - } - - /// Converts the entry into a mutable reference to its value. - /// - /// If you need multiple references to the `OccupiedEntry`, see [`get_mut`]. - /// - /// [`get_mut`]: OccupiedEntry::get_mut - /// - /// # Examples - /// - /// ``` - /// use std::collections::BTreeMap; - /// use std::collections::btree_map::Entry; - /// - /// let mut map: BTreeMap<&str, usize> = BTreeMap::new(); - /// map.entry("poneyland").or_insert(12); - /// - /// assert_eq!(map["poneyland"], 12); - /// if let Entry::Occupied(o) = map.entry("poneyland") { - /// *o.into_mut() += 10; - /// } - /// assert_eq!(map["poneyland"], 22); - /// ``` - #[stable(feature = "rust1", since = "1.0.0")] - pub fn into_mut(self) -> &'a mut V { - self.handle.into_val_mut() - } - - /// Sets the value of the entry with the `OccupiedEntry`'s key, - /// and returns the entry's old value. - /// - /// # Examples - /// - /// ``` - /// use std::collections::BTreeMap; - /// use std::collections::btree_map::Entry; - /// - /// let mut map: BTreeMap<&str, usize> = BTreeMap::new(); - /// map.entry("poneyland").or_insert(12); - /// - /// if let Entry::Occupied(mut o) = map.entry("poneyland") { - /// assert_eq!(o.insert(15), 12); - /// } - /// assert_eq!(map["poneyland"], 15); - /// ``` - #[stable(feature = "rust1", since = "1.0.0")] - pub fn insert(&mut self, value: V) -> V { - mem::replace(self.get_mut(), value) - } - - /// Takes the value of the entry out of the map, and returns it. - /// - /// # Examples - /// - /// ``` - /// use std::collections::BTreeMap; - /// use std::collections::btree_map::Entry; - /// - /// let mut map: BTreeMap<&str, usize> = BTreeMap::new(); - /// map.entry("poneyland").or_insert(12); - /// - /// if let Entry::Occupied(o) = map.entry("poneyland") { - /// assert_eq!(o.remove(), 12); - /// } - /// // If we try to get "poneyland"'s value, it'll panic: - /// // println!("{}", map["poneyland"]); - /// ``` - #[stable(feature = "rust1", since = "1.0.0")] - pub fn remove(self) -> V { - self.remove_kv().1 - } - - // Body of `remove_entry`, separate to keep the above implementations short. - fn remove_kv(self) -> (K, V) { - let mut emptied_internal_root = false; - let (old_kv, _) = self.handle.remove_kv_tracking(|| emptied_internal_root = true); - // SAFETY: we consumed the intermediate root borrow, `self.handle`. - let map = unsafe { self.dormant_map.awaken() }; - map.length -= 1; - if emptied_internal_root { - let root = map.root.as_mut().unwrap(); - root.pop_internal_level(); - } - old_kv - } -} - -impl<'a, K: 'a, V: 'a> Handle, K, V, marker::LeafOrInternal>, marker::KV> { - /// Removes a key/value-pair from the map, and returns that pair, as well as - /// the leaf edge corresponding to that former pair. - fn remove_kv_tracking( - self, - handle_emptied_internal_root: F, - ) -> ((K, V), Handle, K, V, marker::Leaf>, marker::Edge>) { - let (old_kv, mut pos, was_internal) = match self.force() { - Leaf(leaf) => { - let (old_kv, pos) = leaf.remove(); - (old_kv, pos, false) - } - Internal(mut internal) => { - // Replace the location freed in the internal node with an - // adjacent KV, and remove that adjacent KV from its leaf. - // Always choose the adjacent KV on the left side because - // it is typically faster to pop an element from the end - // of the KV arrays without needing to shift other elements. - - let key_loc = internal.kv_mut().0 as *mut K; - let val_loc = internal.kv_mut().1 as *mut V; - - let to_remove = internal.left_edge().descend().last_leaf_edge().left_kv().ok(); - let to_remove = unsafe { unwrap_unchecked(to_remove) }; - - let (kv, pos) = to_remove.remove(); - - let old_key = unsafe { mem::replace(&mut *key_loc, kv.0) }; - let old_val = unsafe { mem::replace(&mut *val_loc, kv.1) }; - - ((old_key, old_val), pos, true) - } - }; - - // Handle underflow - let mut cur_node = unsafe { ptr::read(&pos).into_node().forget_type() }; - let mut at_leaf = true; - while cur_node.len() < node::MIN_LEN { - match handle_underfull_node(cur_node) { - AtRoot => break, - Merged(edge, merged_with_left, offset) => { - // If we merged with our right sibling then our tracked - // position has not changed. However if we merged with our - // left sibling then our tracked position is now dangling. - if at_leaf && merged_with_left { - let idx = pos.idx() + offset; - let node = match unsafe { ptr::read(&edge).descend().force() } { - Leaf(leaf) => leaf, - Internal(_) => unreachable!(), - }; - pos = unsafe { Handle::new_edge(node, idx) }; - } - - let parent = edge.into_node(); - if parent.len() == 0 { - // The parent that was just emptied must be the root, - // because nodes on a lower level would not have been - // left with a single child. - handle_emptied_internal_root(); - break; - } else { - cur_node = parent.forget_type(); - at_leaf = false; - } - } - Stole(stole_from_left) => { - // Adjust the tracked position if we stole from a left sibling - if stole_from_left && at_leaf { - // SAFETY: This is safe since we just added an element to our node. - unsafe { - pos.move_next_unchecked(); - } - } - break; - } - } - } - - // If we deleted from an internal node then we need to compensate for - // the earlier swap and adjust the tracked position to point to the - // next element. - if was_internal { - pos = unsafe { unwrap_unchecked(pos.next_kv().ok()).next_leaf_edge() }; - } - - (old_kv, pos) - } -} - -impl node::Root { - /// Removes empty levels on the top, but keep an empty leaf if the entire tree is empty. - fn fix_top(&mut self) { - while self.height() > 0 && self.node_as_ref().len() == 0 { - self.pop_internal_level(); - } - } - - fn fix_right_border(&mut self) { - self.fix_top(); - - { - let mut cur_node = self.node_as_mut(); - - while let Internal(node) = cur_node.force() { - let mut last_kv = node.last_kv(); - - if last_kv.can_merge() { - cur_node = last_kv.merge().descend(); - } else { - let right_len = last_kv.reborrow().right_edge().descend().len(); - // `MINLEN + 1` to avoid readjust if merge happens on the next level. - if right_len < node::MIN_LEN + 1 { - last_kv.bulk_steal_left(node::MIN_LEN + 1 - right_len); - } - cur_node = last_kv.right_edge().descend(); - } - } - } - - self.fix_top(); - } - - /// The symmetric clone of `fix_right_border`. - fn fix_left_border(&mut self) { - self.fix_top(); - - { - let mut cur_node = self.node_as_mut(); - - while let Internal(node) = cur_node.force() { - let mut first_kv = node.first_kv(); - - if first_kv.can_merge() { - cur_node = first_kv.merge().descend(); - } else { - let left_len = first_kv.reborrow().left_edge().descend().len(); - if left_len < node::MIN_LEN + 1 { - first_kv.bulk_steal_right(node::MIN_LEN + 1 - left_len); - } - cur_node = first_kv.left_edge().descend(); - } - } - } - - self.fix_top(); - } -} - -enum UnderflowResult<'a, K, V> { - AtRoot, - Merged(Handle, K, V, marker::Internal>, marker::Edge>, bool, usize), - Stole(bool), -} - -fn handle_underfull_node<'a, K: 'a, V: 'a>( - node: NodeRef, K, V, marker::LeafOrInternal>, -) -> UnderflowResult<'_, K, V> { - let parent = match node.ascend() { - Ok(parent) => parent, - Err(_) => return AtRoot, - }; - - // Prefer the left KV if it exists. Merging with the left side is faster, - // since merging happens towards the left and `node` has fewer elements. - // Stealing from the left side is faster, since we can pop from the end of - // the KV arrays. - let (is_left, mut handle) = match parent.left_kv() { - Ok(left) => (true, left), - Err(parent) => { - let right = unsafe { unwrap_unchecked(parent.right_kv().ok()) }; - (false, right) - } - }; - - if handle.can_merge() { - let offset = if is_left { handle.reborrow().left_edge().descend().len() + 1 } else { 0 }; - Merged(handle.merge(), is_left, offset) - } else { - if is_left { - handle.steal_left(); - } else { - handle.steal_right(); - } - Stole(is_left) - } -} - impl> Iterator for MergeIter { type Item = (K, V); diff --git a/library/alloc/src/collections/btree/map/entry.rs b/library/alloc/src/collections/btree/map/entry.rs new file mode 100644 index 0000000000000..73a0ca21f6733 --- /dev/null +++ b/library/alloc/src/collections/btree/map/entry.rs @@ -0,0 +1,475 @@ +use core::fmt::{self, Debug}; +use core::marker::PhantomData; +use core::mem; + +use super::super::borrow::DormantMutRef; +use super::super::node::{marker, Handle, InsertResult::*, NodeRef}; +use super::BTreeMap; + +use Entry::*; + +/// A view into a single entry in a map, which may either be vacant or occupied. +/// +/// This `enum` is constructed from the [`entry`] method on [`BTreeMap`]. +/// +/// [`entry`]: BTreeMap::entry +#[stable(feature = "rust1", since = "1.0.0")] +pub enum Entry<'a, K: 'a, V: 'a> { + /// A vacant entry. + #[stable(feature = "rust1", since = "1.0.0")] + Vacant(#[stable(feature = "rust1", since = "1.0.0")] VacantEntry<'a, K, V>), + + /// An occupied entry. + #[stable(feature = "rust1", since = "1.0.0")] + Occupied(#[stable(feature = "rust1", since = "1.0.0")] OccupiedEntry<'a, K, V>), +} + +#[stable(feature = "debug_btree_map", since = "1.12.0")] +impl Debug for Entry<'_, K, V> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match *self { + Vacant(ref v) => f.debug_tuple("Entry").field(v).finish(), + Occupied(ref o) => f.debug_tuple("Entry").field(o).finish(), + } + } +} + +/// A view into a vacant entry in a `BTreeMap`. +/// It is part of the [`Entry`] enum. +#[stable(feature = "rust1", since = "1.0.0")] +pub struct VacantEntry<'a, K: 'a, V: 'a> { + pub(super) key: K, + pub(super) handle: Handle, K, V, marker::Leaf>, marker::Edge>, + pub(super) dormant_map: DormantMutRef<'a, BTreeMap>, + + // Be invariant in `K` and `V` + pub(super) _marker: PhantomData<&'a mut (K, V)>, +} + +#[stable(feature = "debug_btree_map", since = "1.12.0")] +impl Debug for VacantEntry<'_, K, V> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_tuple("VacantEntry").field(self.key()).finish() + } +} + +/// A view into an occupied entry in a `BTreeMap`. +/// It is part of the [`Entry`] enum. +#[stable(feature = "rust1", since = "1.0.0")] +pub struct OccupiedEntry<'a, K: 'a, V: 'a> { + pub(super) handle: Handle, K, V, marker::LeafOrInternal>, marker::KV>, + pub(super) dormant_map: DormantMutRef<'a, BTreeMap>, + + // Be invariant in `K` and `V` + pub(super) _marker: PhantomData<&'a mut (K, V)>, +} + +#[stable(feature = "debug_btree_map", since = "1.12.0")] +impl Debug for OccupiedEntry<'_, K, V> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("OccupiedEntry").field("key", self.key()).field("value", self.get()).finish() + } +} + +impl<'a, K: Ord, V> Entry<'a, K, V> { + /// Ensures a value is in the entry by inserting the default if empty, and returns + /// a mutable reference to the value in the entry. + /// + /// # Examples + /// + /// ``` + /// use std::collections::BTreeMap; + /// + /// let mut map: BTreeMap<&str, usize> = BTreeMap::new(); + /// map.entry("poneyland").or_insert(12); + /// + /// assert_eq!(map["poneyland"], 12); + /// ``` + #[stable(feature = "rust1", since = "1.0.0")] + pub fn or_insert(self, default: V) -> &'a mut V { + match self { + Occupied(entry) => entry.into_mut(), + Vacant(entry) => entry.insert(default), + } + } + + /// Ensures a value is in the entry by inserting the result of the default function if empty, + /// and returns a mutable reference to the value in the entry. + /// + /// # Examples + /// + /// ``` + /// use std::collections::BTreeMap; + /// + /// let mut map: BTreeMap<&str, String> = BTreeMap::new(); + /// let s = "hoho".to_string(); + /// + /// map.entry("poneyland").or_insert_with(|| s); + /// + /// assert_eq!(map["poneyland"], "hoho".to_string()); + /// ``` + #[stable(feature = "rust1", since = "1.0.0")] + pub fn or_insert_with V>(self, default: F) -> &'a mut V { + match self { + Occupied(entry) => entry.into_mut(), + Vacant(entry) => entry.insert(default()), + } + } + + #[unstable(feature = "or_insert_with_key", issue = "71024")] + /// Ensures a value is in the entry by inserting, if empty, the result of the default function, + /// which takes the key as its argument, and returns a mutable reference to the value in the + /// entry. + /// + /// # Examples + /// + /// ``` + /// #![feature(or_insert_with_key)] + /// use std::collections::BTreeMap; + /// + /// let mut map: BTreeMap<&str, usize> = BTreeMap::new(); + /// + /// map.entry("poneyland").or_insert_with_key(|key| key.chars().count()); + /// + /// assert_eq!(map["poneyland"], 9); + /// ``` + #[inline] + pub fn or_insert_with_key V>(self, default: F) -> &'a mut V { + match self { + Occupied(entry) => entry.into_mut(), + Vacant(entry) => { + let value = default(entry.key()); + entry.insert(value) + } + } + } + + /// Returns a reference to this entry's key. + /// + /// # Examples + /// + /// ``` + /// use std::collections::BTreeMap; + /// + /// let mut map: BTreeMap<&str, usize> = BTreeMap::new(); + /// assert_eq!(map.entry("poneyland").key(), &"poneyland"); + /// ``` + #[stable(feature = "map_entry_keys", since = "1.10.0")] + pub fn key(&self) -> &K { + match *self { + Occupied(ref entry) => entry.key(), + Vacant(ref entry) => entry.key(), + } + } + + /// Provides in-place mutable access to an occupied entry before any + /// potential inserts into the map. + /// + /// # Examples + /// + /// ``` + /// use std::collections::BTreeMap; + /// + /// let mut map: BTreeMap<&str, usize> = BTreeMap::new(); + /// + /// map.entry("poneyland") + /// .and_modify(|e| { *e += 1 }) + /// .or_insert(42); + /// assert_eq!(map["poneyland"], 42); + /// + /// map.entry("poneyland") + /// .and_modify(|e| { *e += 1 }) + /// .or_insert(42); + /// assert_eq!(map["poneyland"], 43); + /// ``` + #[stable(feature = "entry_and_modify", since = "1.26.0")] + pub fn and_modify(self, f: F) -> Self + where + F: FnOnce(&mut V), + { + match self { + Occupied(mut entry) => { + f(entry.get_mut()); + Occupied(entry) + } + Vacant(entry) => Vacant(entry), + } + } +} + +impl<'a, K: Ord, V: Default> Entry<'a, K, V> { + #[stable(feature = "entry_or_default", since = "1.28.0")] + /// Ensures a value is in the entry by inserting the default value if empty, + /// and returns a mutable reference to the value in the entry. + /// + /// # Examples + /// + /// ``` + /// use std::collections::BTreeMap; + /// + /// let mut map: BTreeMap<&str, Option> = BTreeMap::new(); + /// map.entry("poneyland").or_default(); + /// + /// assert_eq!(map["poneyland"], None); + /// ``` + pub fn or_default(self) -> &'a mut V { + match self { + Occupied(entry) => entry.into_mut(), + Vacant(entry) => entry.insert(Default::default()), + } + } +} + +impl<'a, K: Ord, V> VacantEntry<'a, K, V> { + /// Gets a reference to the key that would be used when inserting a value + /// through the VacantEntry. + /// + /// # Examples + /// + /// ``` + /// use std::collections::BTreeMap; + /// + /// let mut map: BTreeMap<&str, usize> = BTreeMap::new(); + /// assert_eq!(map.entry("poneyland").key(), &"poneyland"); + /// ``` + #[stable(feature = "map_entry_keys", since = "1.10.0")] + pub fn key(&self) -> &K { + &self.key + } + + /// Take ownership of the key. + /// + /// # Examples + /// + /// ``` + /// use std::collections::BTreeMap; + /// use std::collections::btree_map::Entry; + /// + /// let mut map: BTreeMap<&str, usize> = BTreeMap::new(); + /// + /// if let Entry::Vacant(v) = map.entry("poneyland") { + /// v.into_key(); + /// } + /// ``` + #[stable(feature = "map_entry_recover_keys2", since = "1.12.0")] + pub fn into_key(self) -> K { + self.key + } + + /// Sets the value of the entry with the `VacantEntry`'s key, + /// and returns a mutable reference to it. + /// + /// # Examples + /// + /// ``` + /// use std::collections::BTreeMap; + /// use std::collections::btree_map::Entry; + /// + /// let mut map: BTreeMap<&str, u32> = BTreeMap::new(); + /// + /// if let Entry::Vacant(o) = map.entry("poneyland") { + /// o.insert(37); + /// } + /// assert_eq!(map["poneyland"], 37); + /// ``` + #[stable(feature = "rust1", since = "1.0.0")] + pub fn insert(self, value: V) -> &'a mut V { + let out_ptr = match self.handle.insert_recursing(self.key, value) { + (Fit(_), val_ptr) => { + // Safety: We have consumed self.handle and the handle returned. + let map = unsafe { self.dormant_map.awaken() }; + map.length += 1; + val_ptr + } + (Split(ins), val_ptr) => { + drop(ins.left); + // Safety: We have consumed self.handle and the reference returned. + let map = unsafe { self.dormant_map.awaken() }; + let root = map.root.as_mut().unwrap(); + root.push_internal_level().push(ins.k, ins.v, ins.right); + map.length += 1; + val_ptr + } + }; + // Now that we have finished growing the tree using borrowed references, + // dereference the pointer to a part of it, that we picked up along the way. + unsafe { &mut *out_ptr } + } +} + +impl<'a, K: Ord, V> OccupiedEntry<'a, K, V> { + /// Gets a reference to the key in the entry. + /// + /// # Examples + /// + /// ``` + /// use std::collections::BTreeMap; + /// + /// let mut map: BTreeMap<&str, usize> = BTreeMap::new(); + /// map.entry("poneyland").or_insert(12); + /// assert_eq!(map.entry("poneyland").key(), &"poneyland"); + /// ``` + #[stable(feature = "map_entry_keys", since = "1.10.0")] + pub fn key(&self) -> &K { + self.handle.reborrow().into_kv().0 + } + + /// Take ownership of the key and value from the map. + /// + /// # Examples + /// + /// ``` + /// use std::collections::BTreeMap; + /// use std::collections::btree_map::Entry; + /// + /// let mut map: BTreeMap<&str, usize> = BTreeMap::new(); + /// map.entry("poneyland").or_insert(12); + /// + /// if let Entry::Occupied(o) = map.entry("poneyland") { + /// // We delete the entry from the map. + /// o.remove_entry(); + /// } + /// + /// // If now try to get the value, it will panic: + /// // println!("{}", map["poneyland"]); + /// ``` + #[stable(feature = "map_entry_recover_keys2", since = "1.12.0")] + pub fn remove_entry(self) -> (K, V) { + self.remove_kv() + } + + /// Gets a reference to the value in the entry. + /// + /// # Examples + /// + /// ``` + /// use std::collections::BTreeMap; + /// use std::collections::btree_map::Entry; + /// + /// let mut map: BTreeMap<&str, usize> = BTreeMap::new(); + /// map.entry("poneyland").or_insert(12); + /// + /// if let Entry::Occupied(o) = map.entry("poneyland") { + /// assert_eq!(o.get(), &12); + /// } + /// ``` + #[stable(feature = "rust1", since = "1.0.0")] + pub fn get(&self) -> &V { + self.handle.reborrow().into_kv().1 + } + + /// Gets a mutable reference to the value in the entry. + /// + /// If you need a reference to the `OccupiedEntry` that may outlive the + /// destruction of the `Entry` value, see [`into_mut`]. + /// + /// [`into_mut`]: OccupiedEntry::into_mut + /// + /// # Examples + /// + /// ``` + /// use std::collections::BTreeMap; + /// use std::collections::btree_map::Entry; + /// + /// let mut map: BTreeMap<&str, usize> = BTreeMap::new(); + /// map.entry("poneyland").or_insert(12); + /// + /// assert_eq!(map["poneyland"], 12); + /// if let Entry::Occupied(mut o) = map.entry("poneyland") { + /// *o.get_mut() += 10; + /// assert_eq!(*o.get(), 22); + /// + /// // We can use the same Entry multiple times. + /// *o.get_mut() += 2; + /// } + /// assert_eq!(map["poneyland"], 24); + /// ``` + #[stable(feature = "rust1", since = "1.0.0")] + pub fn get_mut(&mut self) -> &mut V { + self.handle.kv_mut().1 + } + + /// Converts the entry into a mutable reference to its value. + /// + /// If you need multiple references to the `OccupiedEntry`, see [`get_mut`]. + /// + /// [`get_mut`]: OccupiedEntry::get_mut + /// + /// # Examples + /// + /// ``` + /// use std::collections::BTreeMap; + /// use std::collections::btree_map::Entry; + /// + /// let mut map: BTreeMap<&str, usize> = BTreeMap::new(); + /// map.entry("poneyland").or_insert(12); + /// + /// assert_eq!(map["poneyland"], 12); + /// if let Entry::Occupied(o) = map.entry("poneyland") { + /// *o.into_mut() += 10; + /// } + /// assert_eq!(map["poneyland"], 22); + /// ``` + #[stable(feature = "rust1", since = "1.0.0")] + pub fn into_mut(self) -> &'a mut V { + self.handle.into_val_mut() + } + + /// Sets the value of the entry with the `OccupiedEntry`'s key, + /// and returns the entry's old value. + /// + /// # Examples + /// + /// ``` + /// use std::collections::BTreeMap; + /// use std::collections::btree_map::Entry; + /// + /// let mut map: BTreeMap<&str, usize> = BTreeMap::new(); + /// map.entry("poneyland").or_insert(12); + /// + /// if let Entry::Occupied(mut o) = map.entry("poneyland") { + /// assert_eq!(o.insert(15), 12); + /// } + /// assert_eq!(map["poneyland"], 15); + /// ``` + #[stable(feature = "rust1", since = "1.0.0")] + pub fn insert(&mut self, value: V) -> V { + mem::replace(self.get_mut(), value) + } + + /// Takes the value of the entry out of the map, and returns it. + /// + /// # Examples + /// + /// ``` + /// use std::collections::BTreeMap; + /// use std::collections::btree_map::Entry; + /// + /// let mut map: BTreeMap<&str, usize> = BTreeMap::new(); + /// map.entry("poneyland").or_insert(12); + /// + /// if let Entry::Occupied(o) = map.entry("poneyland") { + /// assert_eq!(o.remove(), 12); + /// } + /// // If we try to get "poneyland"'s value, it'll panic: + /// // println!("{}", map["poneyland"]); + /// ``` + #[stable(feature = "rust1", since = "1.0.0")] + pub fn remove(self) -> V { + self.remove_kv().1 + } + + // Body of `remove_entry`, separate to keep the above implementations short. + pub(super) fn remove_kv(self) -> (K, V) { + let mut emptied_internal_root = false; + let (old_kv, _) = self.handle.remove_kv_tracking(|| emptied_internal_root = true); + // SAFETY: we consumed the intermediate root borrow, `self.handle`. + let map = unsafe { self.dormant_map.awaken() }; + map.length -= 1; + if emptied_internal_root { + let root = map.root.as_mut().unwrap(); + root.pop_internal_level(); + } + old_kv + } +} diff --git a/library/alloc/src/collections/btree/map/tests.rs b/library/alloc/src/collections/btree/map/tests.rs index 118e173bb16db..adb94972f5bb6 100644 --- a/library/alloc/src/collections/btree/map/tests.rs +++ b/library/alloc/src/collections/btree/map/tests.rs @@ -1,4 +1,4 @@ -use super::super::{navigate::Position, node, DeterministicRng}; +use super::super::{node, DeterministicRng}; use super::Entry::{Occupied, Vacant}; use super::*; use crate::boxed::Box; @@ -7,7 +7,7 @@ use crate::rc::Rc; use crate::string::{String, ToString}; use crate::vec::Vec; use std::convert::TryFrom; -use std::iter::FromIterator; +use std::iter::{self, FromIterator}; use std::mem; use std::ops::Bound::{self, Excluded, Included, Unbounded}; use std::ops::RangeBounds; @@ -42,19 +42,6 @@ fn test_all_refs<'a, T: 'a>(dummy: &mut T, iter: impl Iterator } } -struct SeriesChecker { - previous: Option, -} - -impl SeriesChecker { - fn is_ascending(&mut self, next: T) { - if let Some(previous) = self.previous { - assert!(previous < next, "{:?} >= {:?}", previous, next); - } - self.previous = Some(next); - } -} - impl<'a, K: 'a, V: 'a> BTreeMap { /// Panics if the map (or the code navigating it) is corrupted. fn check(&self) @@ -63,44 +50,15 @@ impl<'a, K: 'a, V: 'a> BTreeMap { { if let Some(root) = &self.root { let root_node = root.node_as_ref(); - let mut checker = SeriesChecker { previous: None }; - let mut internal_length = 0; - let mut internal_kv_count = 0; - let mut leaf_length = 0; - root_node.visit_nodes_in_order(|pos| match pos { - Position::Leaf(node) => { - let is_root = root_node.height() == 0; - let min_len = if is_root { 0 } else { node::MIN_LEN }; - assert!(node.len() >= min_len, "{} < {}", node.len(), min_len); - - for idx in 0..node.len() { - let key = *unsafe { node.key_at(idx) }; - checker.is_ascending(key); - } - leaf_length += node.len(); - } - Position::Internal(node) => { - let is_root = root_node.height() == node.height(); - let min_len = if is_root { 1 } else { node::MIN_LEN }; - assert!(node.len() >= min_len, "{} < {}", node.len(), min_len); - for idx in 0..=node.len() { - let edge = unsafe { node::Handle::new_edge(node, idx) }; - assert!(edge.descend().ascend().ok().unwrap() == edge); - } + assert!(root_node.ascend().is_err()); + root_node.assert_back_pointers(); - internal_length += node.len(); - } - Position::InternalKV(kv) => { - let key = *kv.into_kv().0; - checker.is_ascending(key); + let counted = root_node.assert_ascending(); + assert_eq!(self.length, counted); + assert_eq!(self.length, root_node.calc_length()); - internal_kv_count += 1; - } - }); - assert_eq!(internal_length, internal_kv_count); - assert_eq!(root_node.calc_length(), internal_length + leaf_length); - assert_eq!(self.length, internal_length + leaf_length); + root_node.assert_min_len(if root_node.height() > 0 { 1 } else { 0 }); } else { assert_eq!(self.length, 0); } @@ -116,34 +74,25 @@ impl<'a, K: 'a, V: 'a> BTreeMap { K: Debug, { if let Some(root) = self.root.as_ref() { - let mut result = String::new(); - let root_node = root.node_as_ref(); - root_node.visit_nodes_in_order(|pos| match pos { - Position::Leaf(leaf) => { - let depth = root_node.height(); - let indent = " ".repeat(depth); - result += &format!("\n{}", indent); - for idx in 0..leaf.len() { - if idx > 0 { - result += ", "; - } - result += &format!("{:?}", unsafe { leaf.key_at(idx) }); - } - } - Position::Internal(_) => {} - Position::InternalKV(kv) => { - let depth = root_node.height() - kv.into_node().height(); - let indent = " ".repeat(depth); - result += &format!("\n{}{:?}", indent, kv.into_kv().0); - } - }); - result + root.node_as_ref().dump_keys() } else { String::from("not yet allocated") } } } +impl<'a, K: 'a, V: 'a> NodeRef, K, V, marker::LeafOrInternal> { + pub fn assert_min_len(self, min_len: usize) { + assert!(self.len() >= min_len, "{} < {}", self.len(), min_len); + if let node::ForceResult::Internal(node) = self.force() { + for idx in 0..=node.len() { + let edge = unsafe { Handle::new_edge(node, idx) }; + edge.descend().assert_min_len(MIN_LEN); + } + } + } +} + // Test our value of MIN_INSERTS_HEIGHT_2. It may change according to the // implementation of insertion, but it's best to be aware of when it does. #[test] @@ -170,7 +119,6 @@ fn test_levels() { let last_key = *map.last_key_value().unwrap().0; map.insert(last_key + 1, ()); } - println!("{}", map.dump_keys()); map.check(); // Structure: // - 1 element in internal root node with 2 children @@ -372,7 +320,7 @@ fn test_iter_rev() { fn do_test_iter_mut_mutation(size: usize) where T: Copy + Debug + Ord + TryFrom, - >::Error: std::fmt::Debug, + >::Error: Debug, { let zero = T::try_from(0).unwrap(); let mut map: BTreeMap = (0..size).map(|i| (T::try_from(i).unwrap(), zero)).collect(); @@ -857,7 +805,7 @@ mod test_drain_filter { fn consuming_nothing() { let pairs = (0..3).map(|i| (i, i)); let mut map: BTreeMap<_, _> = pairs.collect(); - assert!(map.drain_filter(|_, _| false).eq(std::iter::empty())); + assert!(map.drain_filter(|_, _| false).eq(iter::empty())); map.check(); } @@ -878,7 +826,7 @@ mod test_drain_filter { *v += 6; false }) - .eq(std::iter::empty()) + .eq(iter::empty()) ); assert!(map.keys().copied().eq(0..3)); assert!(map.values().copied().eq(6..9)); diff --git a/library/alloc/src/collections/btree/mod.rs b/library/alloc/src/collections/btree/mod.rs index ecbdacda4b618..bcc50ed561587 100644 --- a/library/alloc/src/collections/btree/mod.rs +++ b/library/alloc/src/collections/btree/mod.rs @@ -2,8 +2,10 @@ mod borrow; pub mod map; mod navigate; mod node; +mod remove; mod search; pub mod set; +mod split; #[doc(hidden)] trait Recover { diff --git a/library/alloc/src/collections/btree/node.rs b/library/alloc/src/collections/btree/node.rs index 12f29d7efbc9e..c8d3de9e5cd5c 100644 --- a/library/alloc/src/collections/btree/node.rs +++ b/library/alloc/src/collections/btree/node.rs @@ -38,8 +38,8 @@ use crate::alloc::{AllocRef, Global, Layout}; use crate::boxed::Box; const B: usize = 6; -pub const MIN_LEN: usize = B - 1; pub const CAPACITY: usize = 2 * B - 1; +pub const MIN_LEN_AFTER_SPLIT: usize = B - 1; const KV_IDX_CENTER: usize = B - 1; const EDGE_IDX_LEFT_OF_CENTER: usize = B - 1; const EDGE_IDX_RIGHT_OF_CENTER: usize = B; @@ -87,7 +87,6 @@ impl LeafNode { #[repr(C)] // gdb_providers.py uses this type name for introspection. struct InternalNode { - // gdb_providers.py uses this field name for introspection. data: LeafNode, /// The pointers to the children of this node. `len + 1` of these are considered @@ -128,10 +127,6 @@ impl BoxedNode { BoxedNode { ptr: Unique::from(&mut Box::leak(node).data) } } - unsafe fn from_ptr(ptr: NonNull>) -> Self { - BoxedNode { ptr: unsafe { Unique::new_unchecked(ptr.as_ptr()) } } - } - fn as_ptr(&self) -> NonNull> { NonNull::from(self.ptr) } @@ -199,7 +194,7 @@ impl Root { /// and is the opposite of `pop_internal_level`. pub fn push_internal_level(&mut self) -> NodeRef, K, V, marker::Internal> { let mut new_node = Box::new(unsafe { InternalNode::new() }); - new_node.edges[0].write(unsafe { BoxedNode::from_ptr(self.node.as_ptr()) }); + new_node.edges[0].write(unsafe { ptr::read(&mut self.node) }); self.node = BoxedNode::from_internal(new_node); self.height += 1; @@ -225,8 +220,8 @@ impl Root { let top = self.node.ptr; - let internal_node = unsafe { self.internal_node_as_mut() }; - self.node = unsafe { BoxedNode::from_ptr(internal_node.first_edge().descend().node) }; + let mut internal_node = unsafe { self.internal_node_as_mut() }; + self.node = unsafe { internal_node.as_internal_mut().edges[0].assume_init_read() }; self.height -= 1; self.node_as_mut().as_leaf_mut().parent = None; @@ -616,7 +611,7 @@ impl<'a, K: 'a, V: 'a> NodeRef, K, V, marker::Leaf> { /// Adds a key/value pair to the beginning of the node. fn push_front(&mut self, key: K, val: V) { - debug_assert!(self.len() < CAPACITY); + assert!(self.len() < CAPACITY); unsafe { slice_insert(self.keys_mut(), 0, key); @@ -669,14 +664,7 @@ impl<'a, K: 'a, V: 'a> NodeRef, K, V, marker::Internal> { unsafe { slice_insert(self.keys_mut(), 0, key); slice_insert(self.vals_mut(), 0, val); - slice_insert( - slice::from_raw_parts_mut( - MaybeUninit::slice_as_mut_ptr(&mut self.as_internal_mut().edges), - self.len() + 1, - ), - 0, - edge.node, - ); + slice_insert(self.edges_mut(), 0, edge.node); } self.as_leaf_mut().len += 1; @@ -926,33 +914,22 @@ fn splitpoint(edge_idx: usize) -> (usize, InsertionPlace) { } } -impl<'a, K: 'a, V: 'a, NodeType> Handle, K, V, NodeType>, marker::Edge> { - /// Helps implementations of `insert_fit` for a particular `NodeType`, - /// by taking care of leaf data. +impl<'a, K: 'a, V: 'a> Handle, K, V, marker::Leaf>, marker::Edge> { /// Inserts a new key/value pair between the key/value pairs to the right and left of /// this edge. This method assumes that there is enough space in the node for the new /// pair to fit. - fn leafy_insert_fit(&mut self, key: K, val: V) { + /// + /// The returned pointer points to the inserted value. + fn insert_fit(&mut self, key: K, val: V) -> *mut V { debug_assert!(self.node.len() < CAPACITY); unsafe { slice_insert(self.node.keys_mut(), self.idx, key); slice_insert(self.node.vals_mut(), self.idx, val); - self.node.as_leaf_mut().len += 1; - } - } -} -impl<'a, K: 'a, V: 'a> Handle, K, V, marker::Leaf>, marker::Edge> { - /// Inserts a new key/value pair between the key/value pairs to the right and left of - /// this edge. This method assumes that there is enough space in the node for the new - /// pair to fit. - /// - /// The returned pointer points to the inserted value. - fn insert_fit(&mut self, key: K, val: V) -> *mut V { - self.leafy_insert_fit(key, val); - unsafe { self.node.val_mut_at(self.idx) } + self.node.val_mut_at(self.idx) + } } } @@ -1001,11 +978,14 @@ impl<'a, K: 'a, V: 'a> Handle, K, V, marker::Internal>, /// between this edge and the key/value pair to the right of this edge. This method assumes /// that there is enough space in the node for the new pair to fit. fn insert_fit(&mut self, key: K, val: V, edge: Root) { + debug_assert!(self.node.len() < CAPACITY); debug_assert!(edge.height == self.node.height - 1); unsafe { + slice_insert(self.node.keys_mut(), self.idx, key); + slice_insert(self.node.vals_mut(), self.idx, val); slice_insert(self.node.edges_mut(), self.idx + 1, edge.node); - self.leafy_insert_fit(key, val); + self.node.as_leaf_mut().len += 1; self.node.correct_childrens_parent_links((self.idx + 1)..=self.node.len()); } @@ -1136,15 +1116,21 @@ impl<'a, K: 'a, V: 'a, NodeType> Handle, K, V, NodeType> } impl<'a, K: 'a, V: 'a, NodeType> Handle, K, V, NodeType>, marker::KV> { + /// Helps implementations of `split` for a particular `NodeType`, + /// by calculating the length of the new node. + fn split_new_node_len(&self) -> usize { + debug_assert!(self.idx < self.node.len()); + self.node.len() - self.idx - 1 + } + /// Helps implementations of `split` for a particular `NodeType`, /// by taking care of leaf data. - fn leafy_split(&mut self, new_node: &mut LeafNode) -> (K, V, usize) { + fn split_leaf_data(&mut self, new_node: &mut LeafNode) -> (K, V) { + let new_len = self.split_new_node_len(); unsafe { let k = ptr::read(self.node.key_at(self.idx)); let v = ptr::read(self.node.val_at(self.idx)); - let new_len = self.node.len() - self.idx - 1; - ptr::copy_nonoverlapping( self.node.key_at(self.idx + 1), MaybeUninit::slice_as_mut_ptr(&mut new_node.keys), @@ -1158,7 +1144,7 @@ impl<'a, K: 'a, V: 'a, NodeType> Handle, K, V, NodeType> self.node.as_leaf_mut().len = self.idx as u16; new_node.len = new_len as u16; - (k, v, new_len) + (k, v) } } } @@ -1166,7 +1152,7 @@ impl<'a, K: 'a, V: 'a, NodeType> Handle, K, V, NodeType> impl<'a, K: 'a, V: 'a> Handle, K, V, marker::Leaf>, marker::KV> { /// Splits the underlying node into three parts: /// - /// - The node is truncated to only contain the key/value pairs to the right of + /// - The node is truncated to only contain the key/value pairs to the left of /// this handle. /// - The key and value pointed to by this handle are extracted. /// - All the key/value pairs to the right of this handle are put into a newly @@ -1175,9 +1161,10 @@ impl<'a, K: 'a, V: 'a> Handle, K, V, marker::Leaf>, mark unsafe { let mut new_node = Box::new(LeafNode::new()); - let (k, v, _) = self.leafy_split(&mut new_node); + let (k, v) = self.split_leaf_data(&mut new_node); - (self.node, k, v, Root { node: BoxedNode::from_leaf(new_node), height: 0 }) + let right = Root { node: BoxedNode::from_leaf(new_node), height: 0 }; + (self.node, k, v, right) } } @@ -1211,29 +1198,28 @@ impl<'a, K: 'a, V: 'a> Handle, K, V, marker::Internal>, /// Splits the underlying node into three parts: /// /// - The node is truncated to only contain the edges and key/value pairs to the - /// right of this handle. + /// left of this handle. /// - The key and value pointed to by this handle are extracted. /// - All the edges and key/value pairs to the right of this handle are put into /// a newly allocated node. pub fn split(mut self) -> (NodeRef, K, V, marker::Internal>, K, V, Root) { unsafe { let mut new_node = Box::new(InternalNode::new()); - - let (k, v, new_len) = self.leafy_split(&mut new_node.data); - let height = self.node.height; - let old_node = &*self.node.as_internal_ptr(); - + // Move edges out before reducing length: + let new_len = self.split_new_node_len(); ptr::copy_nonoverlapping( - old_node.edges.as_ptr().add(self.idx + 1), - new_node.edges.as_mut_ptr(), + self.node.edge_at(self.idx + 1), + MaybeUninit::slice_as_mut_ptr(&mut new_node.edges), new_len + 1, ); + let (k, v) = self.split_leaf_data(&mut new_node.data); - let mut new_root = Root { node: BoxedNode::from_internal(new_node), height }; + let height = self.node.height; + let mut right = Root { node: BoxedNode::from_internal(new_node), height }; - new_root.internal_node_as_mut().correct_childrens_parent_links(0..=new_len); + right.internal_node_as_mut().correct_childrens_parent_links(0..=new_len); - (self.node, k, v, new_root) + (self.node, k, v, right) } } diff --git a/library/alloc/src/collections/btree/node/tests.rs b/library/alloc/src/collections/btree/node/tests.rs index 2ef9aad0ccdcf..d6527057c5d77 100644 --- a/library/alloc/src/collections/btree/node/tests.rs +++ b/library/alloc/src/collections/btree/node/tests.rs @@ -1,6 +1,87 @@ +use super::super::navigate; use super::*; +use crate::fmt::Debug; +use crate::string::String; use core::cmp::Ordering::*; +impl<'a, K: 'a, V: 'a> NodeRef, K, V, marker::LeafOrInternal> { + /// Asserts that the back pointer in each reachable node points to its parent. + pub fn assert_back_pointers(self) { + if let ForceResult::Internal(node) = self.force() { + for idx in 0..=node.len() { + let edge = unsafe { Handle::new_edge(node, idx) }; + let child = edge.descend(); + assert!(child.ascend().ok() == Some(edge)); + child.assert_back_pointers(); + } + } + } + + /// Asserts that the keys are in strictly ascending order. + /// Returns how many keys it encountered. + pub fn assert_ascending(self) -> usize + where + K: Copy + Debug + Ord, + { + struct SeriesChecker { + num_seen: usize, + previous: Option, + } + impl SeriesChecker { + fn is_ascending(&mut self, next: T) { + if let Some(previous) = self.previous { + assert!(previous < next, "{:?} >= {:?}", previous, next); + } + self.previous = Some(next); + self.num_seen += 1; + } + } + + let mut checker = SeriesChecker { num_seen: 0, previous: None }; + self.visit_nodes_in_order(|pos| match pos { + navigate::Position::Leaf(node) => { + for idx in 0..node.len() { + let key = *unsafe { node.key_at(idx) }; + checker.is_ascending(key); + } + } + navigate::Position::InternalKV(kv) => { + let key = *kv.into_kv().0; + checker.is_ascending(key); + } + navigate::Position::Internal(_) => {} + }); + checker.num_seen + } + + pub fn dump_keys(self) -> String + where + K: Debug, + { + let mut result = String::new(); + self.visit_nodes_in_order(|pos| match pos { + navigate::Position::Leaf(leaf) => { + let depth = self.height(); + let indent = " ".repeat(depth); + result += &format!("\n{}", indent); + for idx in 0..leaf.len() { + if idx > 0 { + result += ", "; + } + result += &format!("{:?}", unsafe { leaf.key_at(idx) }); + } + } + navigate::Position::Internal(_) => {} + navigate::Position::InternalKV(kv) => { + let depth = self.height() - kv.into_node().height(); + let indent = " ".repeat(depth); + result += &format!("\n{}{:?}", indent, kv.into_kv().0); + } + }); + result + } +} + #[test] fn test_splitpoint() { for idx in 0..=CAPACITY { @@ -19,8 +100,8 @@ fn test_splitpoint() { right_len += 1; } } - assert!(left_len >= MIN_LEN); - assert!(right_len >= MIN_LEN); + assert!(left_len >= MIN_LEN_AFTER_SPLIT); + assert!(right_len >= MIN_LEN_AFTER_SPLIT); assert!(left_len + right_len == CAPACITY); } } diff --git a/library/alloc/src/collections/btree/remove.rs b/library/alloc/src/collections/btree/remove.rs new file mode 100644 index 0000000000000..99655d3e2bf64 --- /dev/null +++ b/library/alloc/src/collections/btree/remove.rs @@ -0,0 +1,133 @@ +use super::map::MIN_LEN; +use super::node::{marker, ForceResult, Handle, NodeRef}; +use super::unwrap_unchecked; +use core::mem; +use core::ptr; + +impl<'a, K: 'a, V: 'a> Handle, K, V, marker::LeafOrInternal>, marker::KV> { + /// Removes a key/value-pair from the map, and returns that pair, as well as + /// the leaf edge corresponding to that former pair. + pub fn remove_kv_tracking( + self, + handle_emptied_internal_root: F, + ) -> ((K, V), Handle, K, V, marker::Leaf>, marker::Edge>) { + let (old_kv, mut pos, was_internal) = match self.force() { + ForceResult::Leaf(leaf) => { + let (old_kv, pos) = leaf.remove(); + (old_kv, pos, false) + } + ForceResult::Internal(mut internal) => { + // Replace the location freed in the internal node with an + // adjacent KV, and remove that adjacent KV from its leaf. + // Always choose the adjacent KV on the left side because + // it is typically faster to pop an element from the end + // of the KV arrays without needing to shift other elements. + + let key_loc = internal.kv_mut().0 as *mut K; + let val_loc = internal.kv_mut().1 as *mut V; + + let to_remove = internal.left_edge().descend().last_leaf_edge().left_kv().ok(); + let to_remove = unsafe { unwrap_unchecked(to_remove) }; + + let (kv, pos) = to_remove.remove(); + + let old_key = unsafe { mem::replace(&mut *key_loc, kv.0) }; + let old_val = unsafe { mem::replace(&mut *val_loc, kv.1) }; + + ((old_key, old_val), pos, true) + } + }; + + // Handle underflow + let mut cur_node = unsafe { ptr::read(&pos).into_node().forget_type() }; + let mut at_leaf = true; + while cur_node.len() < MIN_LEN { + match handle_underfull_node(cur_node) { + UnderflowResult::AtRoot => break, + UnderflowResult::Merged(edge, merged_with_left, offset) => { + // If we merged with our right sibling then our tracked + // position has not changed. However if we merged with our + // left sibling then our tracked position is now dangling. + if at_leaf && merged_with_left { + let idx = pos.idx() + offset; + let node = match unsafe { ptr::read(&edge).descend().force() } { + ForceResult::Leaf(leaf) => leaf, + ForceResult::Internal(_) => unreachable!(), + }; + pos = unsafe { Handle::new_edge(node, idx) }; + } + + let parent = edge.into_node(); + if parent.len() == 0 { + // The parent that was just emptied must be the root, + // because nodes on a lower level would not have been + // left with a single child. + handle_emptied_internal_root(); + break; + } else { + cur_node = parent.forget_type(); + at_leaf = false; + } + } + UnderflowResult::Stole(stole_from_left) => { + // Adjust the tracked position if we stole from a left sibling + if stole_from_left && at_leaf { + // SAFETY: This is safe since we just added an element to our node. + unsafe { + pos.move_next_unchecked(); + } + } + break; + } + } + } + + // If we deleted from an internal node then we need to compensate for + // the earlier swap and adjust the tracked position to point to the + // next element. + if was_internal { + pos = unsafe { unwrap_unchecked(pos.next_kv().ok()).next_leaf_edge() }; + } + + (old_kv, pos) + } +} + +enum UnderflowResult<'a, K, V> { + AtRoot, + Merged(Handle, K, V, marker::Internal>, marker::Edge>, bool, usize), + Stole(bool), +} + +fn handle_underfull_node<'a, K: 'a, V: 'a>( + node: NodeRef, K, V, marker::LeafOrInternal>, +) -> UnderflowResult<'_, K, V> { + let parent = match node.ascend() { + Ok(parent) => parent, + Err(_) => return UnderflowResult::AtRoot, + }; + + // Prefer the left KV if it exists. Merging with the left side is faster, + // since merging happens towards the left and `node` has fewer elements. + // Stealing from the left side is faster, since we can pop from the end of + // the KV arrays. + let (is_left, mut handle) = match parent.left_kv() { + Ok(left) => (true, left), + Err(parent) => { + let right = unsafe { unwrap_unchecked(parent.right_kv().ok()) }; + (false, right) + } + }; + + if handle.can_merge() { + let offset = if is_left { handle.reborrow().left_edge().descend().len() + 1 } else { 0 }; + UnderflowResult::Merged(handle.merge(), is_left, offset) + } else { + if is_left { + handle.steal_left(); + } else { + handle.steal_right(); + } + UnderflowResult::Stole(is_left) + } +} diff --git a/library/alloc/src/collections/btree/split.rs b/library/alloc/src/collections/btree/split.rs new file mode 100644 index 0000000000000..5f00a5a25abad --- /dev/null +++ b/library/alloc/src/collections/btree/split.rs @@ -0,0 +1,106 @@ +use super::map::MIN_LEN; +use super::node::{ForceResult::*, Root}; +use super::search::{search_node, SearchResult::*}; +use core::borrow::Borrow; + +impl Root { + pub fn split_off(&mut self, right_root: &mut Self, key: &Q) + where + K: Borrow, + { + debug_assert!(right_root.height() == 0); + debug_assert!(right_root.node_as_ref().len() == 0); + + let left_root = self; + for _ in 0..left_root.height() { + right_root.push_internal_level(); + } + + { + let mut left_node = left_root.node_as_mut(); + let mut right_node = right_root.node_as_mut(); + + loop { + let mut split_edge = match search_node(left_node, key) { + // key is going to the right tree + Found(handle) => handle.left_edge(), + GoDown(handle) => handle, + }; + + split_edge.move_suffix(&mut right_node); + + match (split_edge.force(), right_node.force()) { + (Internal(edge), Internal(node)) => { + left_node = edge.descend(); + right_node = node.first_edge().descend(); + } + (Leaf(_), Leaf(_)) => { + break; + } + _ => unreachable!(), + } + } + } + + left_root.fix_right_border(); + right_root.fix_left_border(); + } + + /// Removes empty levels on the top, but keeps an empty leaf if the entire tree is empty. + fn fix_top(&mut self) { + while self.height() > 0 && self.node_as_ref().len() == 0 { + self.pop_internal_level(); + } + } + + fn fix_right_border(&mut self) { + self.fix_top(); + + { + let mut cur_node = self.node_as_mut(); + + while let Internal(node) = cur_node.force() { + let mut last_kv = node.last_kv(); + + if last_kv.can_merge() { + cur_node = last_kv.merge().descend(); + } else { + let right_len = last_kv.reborrow().right_edge().descend().len(); + // `MIN_LEN + 1` to avoid readjust if merge happens on the next level. + if right_len < MIN_LEN + 1 { + last_kv.bulk_steal_left(MIN_LEN + 1 - right_len); + } + cur_node = last_kv.right_edge().descend(); + } + } + } + + self.fix_top(); + } + + /// The symmetric clone of `fix_right_border`. + fn fix_left_border(&mut self) { + self.fix_top(); + + { + let mut cur_node = self.node_as_mut(); + + while let Internal(node) = cur_node.force() { + let mut first_kv = node.first_kv(); + + if first_kv.can_merge() { + cur_node = first_kv.merge().descend(); + } else { + let left_len = first_kv.reborrow().left_edge().descend().len(); + // `MIN_LEN + 1` to avoid readjust if merge happens on the next level. + if left_len < MIN_LEN + 1 { + first_kv.bulk_steal_right(MIN_LEN + 1 - left_len); + } + cur_node = first_kv.left_edge().descend(); + } + } + } + + self.fix_top(); + } +} diff --git a/library/alloc/src/collections/vec_deque.rs b/library/alloc/src/collections/vec_deque.rs index ff9b1553bf2fc..22b02a4f849b6 100644 --- a/library/alloc/src/collections/vec_deque.rs +++ b/library/alloc/src/collections/vec_deque.rs @@ -1102,7 +1102,7 @@ impl VecDeque { where R: RangeBounds, { - let Range { start, end } = slice::check_range(self.len(), range); + let Range { start, end } = range.assert_len(self.len()); let tail = self.wrap_add(self.tail, start); let head = self.wrap_add(self.tail, end); (tail, head) @@ -2181,7 +2181,7 @@ impl VecDeque { /// /// This method does not allocate and does not change the order of the /// inserted elements. As it returns a mutable slice, this can be used to - /// sort or binary search a deque. + /// sort a deque. /// /// Once the internal storage is contiguous, the [`as_slices`] and /// [`as_mut_slices`] methods will return the entire contents of the @@ -2430,6 +2430,143 @@ impl VecDeque { self.wrap_copy(self.tail, self.head, k); } } + + /// Binary searches this sorted `VecDeque` for a given element. + /// + /// If the value is found then [`Result::Ok`] is returned, containing the + /// index of the matching element. If there are multiple matches, then any + /// one of the matches could be returned. If the value is not found then + /// [`Result::Err`] is returned, containing the index where a matching + /// element could be inserted while maintaining sorted order. + /// + /// # Examples + /// + /// Looks up a series of four elements. The first is found, with a + /// uniquely determined position; the second and third are not + /// found; the fourth could match any position in `[1, 4]`. + /// + /// ``` + /// #![feature(vecdeque_binary_search)] + /// use std::collections::VecDeque; + /// + /// let deque: VecDeque<_> = vec![0, 1, 1, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55].into(); + /// + /// assert_eq!(deque.binary_search(&13), Ok(9)); + /// assert_eq!(deque.binary_search(&4), Err(7)); + /// assert_eq!(deque.binary_search(&100), Err(13)); + /// let r = deque.binary_search(&1); + /// assert!(matches!(r, Ok(1..=4))); + /// ``` + /// + /// If you want to insert an item to a sorted `VecDeque`, while maintaining + /// sort order: + /// + /// ``` + /// #![feature(vecdeque_binary_search)] + /// use std::collections::VecDeque; + /// + /// let mut deque: VecDeque<_> = vec![0, 1, 1, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55].into(); + /// let num = 42; + /// let idx = deque.binary_search(&num).unwrap_or_else(|x| x); + /// deque.insert(idx, num); + /// assert_eq!(deque, &[0, 1, 1, 1, 1, 2, 3, 5, 8, 13, 21, 34, 42, 55]); + /// ``` + #[unstable(feature = "vecdeque_binary_search", issue = "78021")] + #[inline] + pub fn binary_search(&self, x: &T) -> Result + where + T: Ord, + { + self.binary_search_by(|e| e.cmp(x)) + } + + /// Binary searches this sorted `VecDeque` with a comparator function. + /// + /// The comparator function should implement an order consistent + /// with the sort order of the underlying `VecDeque`, returning an + /// order code that indicates whether its argument is `Less`, + /// `Equal` or `Greater` than the desired target. + /// + /// If the value is found then [`Result::Ok`] is returned, containing the + /// index of the matching element. If there are multiple matches, then any + /// one of the matches could be returned. If the value is not found then + /// [`Result::Err`] is returned, containing the index where a matching + /// element could be inserted while maintaining sorted order. + /// + /// # Examples + /// + /// Looks up a series of four elements. The first is found, with a + /// uniquely determined position; the second and third are not + /// found; the fourth could match any position in `[1, 4]`. + /// + /// ``` + /// #![feature(vecdeque_binary_search)] + /// use std::collections::VecDeque; + /// + /// let deque: VecDeque<_> = vec![0, 1, 1, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55].into(); + /// + /// assert_eq!(deque.binary_search_by(|x| x.cmp(&13)), Ok(9)); + /// assert_eq!(deque.binary_search_by(|x| x.cmp(&4)), Err(7)); + /// assert_eq!(deque.binary_search_by(|x| x.cmp(&100)), Err(13)); + /// let r = deque.binary_search_by(|x| x.cmp(&1)); + /// assert!(matches!(r, Ok(1..=4))); + /// ``` + #[unstable(feature = "vecdeque_binary_search", issue = "78021")] + pub fn binary_search_by<'a, F>(&'a self, mut f: F) -> Result + where + F: FnMut(&'a T) -> Ordering, + { + let (front, back) = self.as_slices(); + + if let Some(Ordering::Less | Ordering::Equal) = back.first().map(|elem| f(elem)) { + back.binary_search_by(f).map(|idx| idx + front.len()).map_err(|idx| idx + front.len()) + } else { + front.binary_search_by(f) + } + } + + /// Binary searches this sorted `VecDeque` with a key extraction function. + /// + /// Assumes that the `VecDeque` is sorted by the key, for instance with + /// [`make_contiguous().sort_by_key()`](#method.make_contiguous) using the same + /// key extraction function. + /// + /// If the value is found then [`Result::Ok`] is returned, containing the + /// index of the matching element. If there are multiple matches, then any + /// one of the matches could be returned. If the value is not found then + /// [`Result::Err`] is returned, containing the index where a matching + /// element could be inserted while maintaining sorted order. + /// + /// # Examples + /// + /// Looks up a series of four elements in a slice of pairs sorted by + /// their second elements. The first is found, with a uniquely + /// determined position; the second and third are not found; the + /// fourth could match any position in `[1, 4]`. + /// + /// ``` + /// #![feature(vecdeque_binary_search)] + /// use std::collections::VecDeque; + /// + /// let deque: VecDeque<_> = vec![(0, 0), (2, 1), (4, 1), (5, 1), + /// (3, 1), (1, 2), (2, 3), (4, 5), (5, 8), (3, 13), + /// (1, 21), (2, 34), (4, 55)].into(); + /// + /// assert_eq!(deque.binary_search_by_key(&13, |&(a,b)| b), Ok(9)); + /// assert_eq!(deque.binary_search_by_key(&4, |&(a,b)| b), Err(7)); + /// assert_eq!(deque.binary_search_by_key(&100, |&(a,b)| b), Err(13)); + /// let r = deque.binary_search_by_key(&1, |&(a,b)| b); + /// assert!(matches!(r, Ok(1..=4))); + /// ``` + #[unstable(feature = "vecdeque_binary_search", issue = "78021")] + #[inline] + pub fn binary_search_by_key<'a, B, F>(&'a self, b: &B, mut f: F) -> Result + where + F: FnMut(&'a T) -> B, + B: Ord, + { + self.binary_search_by(|k| f(k).cmp(b)) + } } impl VecDeque { diff --git a/library/alloc/src/lib.rs b/library/alloc/src/lib.rs index 046c3867d8403..0fe15958076c5 100644 --- a/library/alloc/src/lib.rs +++ b/library/alloc/src/lib.rs @@ -72,6 +72,7 @@ #![allow(explicit_outlives_requirements)] #![allow(incomplete_features)] #![deny(unsafe_op_in_unsafe_fn)] +#![cfg_attr(not(bootstrap), feature(rustc_allow_const_fn_unstable))] #![cfg_attr(not(test), feature(generator_trait))] #![cfg_attr(test, feature(test))] #![cfg_attr(test, feature(new_uninit))] @@ -114,11 +115,12 @@ #![feature(or_patterns)] #![feature(pattern)] #![feature(ptr_internals)] +#![feature(range_bounds_assert_len)] #![feature(raw_ref_op)] #![feature(rustc_attrs)] #![feature(receiver_trait)] +#![feature(renamed_spin_loop)] #![feature(min_specialization)] -#![feature(slice_check_range)] #![feature(slice_ptr_get)] #![feature(slice_ptr_len)] #![feature(staged_api)] diff --git a/library/alloc/src/raw_vec.rs b/library/alloc/src/raw_vec.rs index a60e676fda557..a4240308bb35f 100644 --- a/library/alloc/src/raw_vec.rs +++ b/library/alloc/src/raw_vec.rs @@ -116,7 +116,8 @@ impl RawVec { impl RawVec { /// Like `new`, but parameterized over the choice of allocator for /// the returned `RawVec`. - #[allow_internal_unstable(const_fn)] + #[cfg_attr(not(bootstrap), rustc_allow_const_fn_unstable(const_fn))] + #[cfg_attr(bootstrap, allow_internal_unstable(const_fn))] pub const fn new_in(alloc: A) -> Self { // `cap: 0` means "unallocated". zero-sized types are ignored. Self { ptr: Unique::dangling(), cap: 0, alloc } @@ -259,7 +260,7 @@ impl RawVec { /// Ensures that the buffer contains at least enough space to hold `len + /// additional` elements. If it doesn't already have enough capacity, will /// reallocate enough space plus comfortable slack space to get amortized - /// `O(1)` behavior. Will limit this behavior if it would needlessly cause + /// *O*(1) behavior. Will limit this behavior if it would needlessly cause /// itself to panic. /// /// If `len` exceeds `self.capacity()`, this may fail to actually allocate diff --git a/library/alloc/src/slice.rs b/library/alloc/src/slice.rs index 79403cf86873e..3db66964941c3 100644 --- a/library/alloc/src/slice.rs +++ b/library/alloc/src/slice.rs @@ -91,8 +91,6 @@ use crate::borrow::ToOwned; use crate::boxed::Box; use crate::vec::Vec; -#[unstable(feature = "slice_check_range", issue = "76393")] -pub use core::slice::check_range; #[unstable(feature = "array_chunks", issue = "74985")] pub use core::slice::ArrayChunks; #[unstable(feature = "array_chunks", issue = "74985")] @@ -169,7 +167,7 @@ mod hack { impl [T] { /// Sorts the slice. /// - /// This sort is stable (i.e., does not reorder equal elements) and `O(n * log(n))` worst-case. + /// This sort is stable (i.e., does not reorder equal elements) and *O*(*n* \* log(*n*)) worst-case. /// /// When applicable, unstable sorting is preferred because it is generally faster than stable /// sorting and it doesn't allocate auxiliary memory. @@ -204,7 +202,7 @@ impl [T] { /// Sorts the slice with a comparator function. /// - /// This sort is stable (i.e., does not reorder equal elements) and `O(n * log(n))` worst-case. + /// This sort is stable (i.e., does not reorder equal elements) and *O*(*n* \* log(*n*)) worst-case. /// /// The comparator function must define a total ordering for the elements in the slice. If /// the ordering is not total, the order of the elements is unspecified. An order is a @@ -258,8 +256,8 @@ impl [T] { /// Sorts the slice with a key extraction function. /// - /// This sort is stable (i.e., does not reorder equal elements) and `O(m * n * log(n))` - /// worst-case, where the key function is `O(m)`. + /// This sort is stable (i.e., does not reorder equal elements) and *O*(*m* \* *n* \* log(*n*)) + /// worst-case, where the key function is *O*(*m*). /// /// For expensive key functions (e.g. functions that are not simple property accesses or /// basic operations), [`sort_by_cached_key`](#method.sort_by_cached_key) is likely to be @@ -301,8 +299,8 @@ impl [T] { /// /// During sorting, the key function is called only once per element. /// - /// This sort is stable (i.e., does not reorder equal elements) and `O(m * n + n * log(n))` - /// worst-case, where the key function is `O(m)`. + /// This sort is stable (i.e., does not reorder equal elements) and *O*(*m* \* *n* + *n* \* log(*n*)) + /// worst-case, where the key function is *O*(*m*). /// /// For simple key functions (e.g., functions that are property accesses or /// basic operations), [`sort_by_key`](#method.sort_by_key) is likely to be @@ -946,7 +944,7 @@ where /// 1. for every `i` in `1..runs.len()`: `runs[i - 1].len > runs[i].len` /// 2. for every `i` in `2..runs.len()`: `runs[i - 2].len > runs[i - 1].len + runs[i].len` /// -/// The invariants ensure that the total running time is `O(n * log(n))` worst-case. +/// The invariants ensure that the total running time is *O*(*n* \* log(*n*)) worst-case. fn merge_sort(v: &mut [T], mut is_less: F) where F: FnMut(&T, &T) -> bool, diff --git a/library/alloc/src/string.rs b/library/alloc/src/string.rs index d3598ccfce81f..72ed036637d8b 100644 --- a/library/alloc/src/string.rs +++ b/library/alloc/src/string.rs @@ -1,8 +1,8 @@ -//! A UTF-8 encoded, growable string. +//! A UTF-8–encoded, growable string. //! -//! This module contains the [`String`] type, a trait for converting -//! [`ToString`]s, and several error types that may result from working with -//! [`String`]s. +//! This module contains the [`String`] type, the [`ToString`] trait for +//! converting to strings, and several error types that may result from +//! working with [`String`]s. //! //! # Examples //! @@ -49,7 +49,6 @@ use core::iter::{FromIterator, FusedIterator}; use core::ops::Bound::{Excluded, Included, Unbounded}; use core::ops::{self, Add, AddAssign, Index, IndexMut, Range, RangeBounds}; use core::ptr; -use core::slice; use core::str::{lossy, pattern::Pattern}; use crate::borrow::{Cow, ToOwned}; @@ -58,7 +57,7 @@ use crate::collections::TryReserveError; use crate::str::{self, from_boxed_utf8_unchecked, Chars, FromStr, Utf8Error}; use crate::vec::Vec; -/// A UTF-8 encoded, growable string. +/// A UTF-8–encoded, growable string. /// /// The `String` type is the most common string type that has ownership over the /// contents of the string. It has a close relationship with its borrowed @@ -566,7 +565,7 @@ impl String { Cow::Owned(res) } - /// Decode a UTF-16 encoded vector `v` into a `String`, returning [`Err`] + /// Decode a UTF-16–encoded vector `v` into a `String`, returning [`Err`] /// if `v` contains any invalid data. /// /// # Examples @@ -600,7 +599,7 @@ impl String { Ok(ret) } - /// Decode a UTF-16 encoded slice `v` into a `String`, replacing + /// Decode a UTF-16–encoded slice `v` into a `String`, replacing /// invalid data with [the replacement character (`U+FFFD`)][U+FFFD]. /// /// Unlike [`from_utf8_lossy`] which returns a [`Cow<'a, str>`], @@ -1507,14 +1506,14 @@ impl String { // of the vector version. The data is just plain bytes. // Because the range removal happens in Drop, if the Drain iterator is leaked, // the removal will not happen. - let Range { start, end } = slice::check_range(self.len(), range); + let Range { start, end } = range.assert_len(self.len()); assert!(self.is_char_boundary(start)); assert!(self.is_char_boundary(end)); // Take out two simultaneous borrows. The &mut String won't be accessed // until iteration is over, in Drop. let self_ptr = self as *mut _; - // SAFETY: `check_range` and `is_char_boundary` do the appropriate bounds checks. + // SAFETY: `assert_len` and `is_char_boundary` do the appropriate bounds checks. let chars_iter = unsafe { self.get_unchecked(start..end) }.chars(); Drain { start, end, iter: chars_iter, string: self_ptr } @@ -2192,15 +2191,15 @@ pub trait ToString { #[stable(feature = "rust1", since = "1.0.0")] impl ToString for T { // A common guideline is to not inline generic functions. However, - // remove `#[inline]` from this method causes non-negligible regression. - // See as last attempt try to remove it. + // removing `#[inline]` from this method causes non-negligible regressions. + // See , the last attempt + // to try to remove it. #[inline] default fn to_string(&self) -> String { use fmt::Write; let mut buf = String::new(); buf.write_fmt(format_args!("{}", self)) .expect("a Display implementation returned an error unexpectedly"); - buf.shrink_to_fit(); buf } } diff --git a/library/alloc/src/sync.rs b/library/alloc/src/sync.rs index d70de1163c9ee..73ff795c01aa8 100644 --- a/library/alloc/src/sync.rs +++ b/library/alloc/src/sync.rs @@ -10,6 +10,7 @@ use core::cmp::Ordering; use core::convert::{From, TryFrom}; use core::fmt; use core::hash::{Hash, Hasher}; +use core::hint; use core::intrinsics::abort; use core::iter; use core::marker::{PhantomData, Unpin, Unsize}; @@ -764,6 +765,7 @@ impl Arc { loop { // check if the weak counter is currently "locked"; if so, spin. if cur == usize::MAX { + hint::spin_loop(); cur = this.inner().weak.load(Relaxed); continue; } diff --git a/library/alloc/src/vec.rs b/library/alloc/src/vec.rs index 5e68f76693fcf..5b3604db563c6 100644 --- a/library/alloc/src/vec.rs +++ b/library/alloc/src/vec.rs @@ -259,7 +259,7 @@ use crate::raw_vec::RawVec; /// `Vec` does not guarantee any particular growth strategy when reallocating /// when full, nor when [`reserve`] is called. The current strategy is basic /// and it may prove desirable to use a non-constant growth factor. Whatever -/// strategy is used will of course guarantee `O(1)` amortized [`push`]. +/// strategy is used will of course guarantee *O*(1) amortized [`push`]. /// /// `vec![x; n]`, `vec![a, b, c, d]`, and /// [`Vec::with_capacity(n)`][`Vec::with_capacity`], will all produce a `Vec` @@ -1314,7 +1314,7 @@ impl Vec { // the hole, and the vector length is restored to the new length. // let len = self.len(); - let Range { start, end } = slice::check_range(len, range); + let Range { start, end } = range.assert_len(len); unsafe { // set self.vec length's to start, to be safe in case Drain is leaked @@ -1603,50 +1603,6 @@ impl Vec { } } -impl Vec { - /// Resizes the `Vec` in-place so that `len` is equal to `new_len`. - /// - /// If `new_len` is greater than `len`, the `Vec` is extended by the - /// difference, with each additional slot filled with [`Default::default()`]. - /// If `new_len` is less than `len`, the `Vec` is simply truncated. - /// - /// This method uses [`Default`] to create new values on every push. If - /// you'd rather [`Clone`] a given value, use [`resize`]. - /// - /// # Examples - /// - /// ``` - /// # #![allow(deprecated)] - /// #![feature(vec_resize_default)] - /// - /// let mut vec = vec![1, 2, 3]; - /// vec.resize_default(5); - /// assert_eq!(vec, [1, 2, 3, 0, 0]); - /// - /// let mut vec = vec![1, 2, 3, 4]; - /// vec.resize_default(2); - /// assert_eq!(vec, [1, 2]); - /// ``` - /// - /// [`resize`]: Vec::resize - #[unstable(feature = "vec_resize_default", issue = "41758")] - #[rustc_deprecated( - reason = "This is moving towards being removed in favor \ - of `.resize_with(Default::default)`. If you disagree, please comment \ - in the tracking issue.", - since = "1.33.0" - )] - pub fn resize_default(&mut self, new_len: usize) { - let len = self.len(); - - if new_len > len { - self.extend_with(new_len - len, ExtendDefault); - } else { - self.truncate(new_len); - } - } -} - // This code generalizes `extend_with_{element,default}`. trait ExtendWith { fn next(&mut self) -> T; diff --git a/library/alloc/tests/lib.rs b/library/alloc/tests/lib.rs index cff8ff9ac7ad9..b7cc03f8eb999 100644 --- a/library/alloc/tests/lib.rs +++ b/library/alloc/tests/lib.rs @@ -20,6 +20,7 @@ #![feature(inplace_iteration)] #![feature(iter_map_while)] #![feature(int_bits_const)] +#![feature(vecdeque_binary_search)] use std::collections::hash_map::DefaultHasher; use std::hash::{Hash, Hasher}; diff --git a/library/alloc/tests/str.rs b/library/alloc/tests/str.rs index ed8ee2d8823c0..834dd4656ff76 100644 --- a/library/alloc/tests/str.rs +++ b/library/alloc/tests/str.rs @@ -529,6 +529,13 @@ mod slice_index { message: "out of bounds"; } + in mod rangeinclusive_len { + data: "abcdef"; + good: data[0..=5] == "abcdef"; + bad: data[0..=6]; + message: "out of bounds"; + } + in mod range_len_len { data: "abcdef"; good: data[6..6] == ""; @@ -544,6 +551,28 @@ mod slice_index { } } + panic_cases! { + in mod rangeinclusive_exhausted { + data: "abcdef"; + + good: data[0..=5] == "abcdef"; + good: data[{ + let mut iter = 0..=5; + iter.by_ref().count(); // exhaust it + iter + }] == ""; + + // 0..=6 is out of bounds before exhaustion, so it + // stands to reason that it still would be after. + bad: data[{ + let mut iter = 0..=6; + iter.by_ref().count(); // exhaust it + iter + }]; + message: "out of bounds"; + } + } + panic_cases! { in mod range_neg_width { data: "abcdef"; diff --git a/library/alloc/tests/vec_deque.rs b/library/alloc/tests/vec_deque.rs index 46d8a3c4cb493..05cb3a2c03d79 100644 --- a/library/alloc/tests/vec_deque.rs +++ b/library/alloc/tests/vec_deque.rs @@ -1659,3 +1659,42 @@ fn test_drain_leak() { drop(v); assert_eq!(unsafe { DROPS }, 7); } + +#[test] +fn test_binary_search() { + // Contiguous (front only) search: + let deque: VecDeque<_> = vec![1, 2, 3, 5, 6].into(); + assert!(deque.as_slices().1.is_empty()); + assert_eq!(deque.binary_search(&3), Ok(2)); + assert_eq!(deque.binary_search(&4), Err(3)); + + // Split search (both front & back non-empty): + let mut deque: VecDeque<_> = vec![5, 6].into(); + deque.push_front(3); + deque.push_front(2); + deque.push_front(1); + deque.push_back(10); + assert!(!deque.as_slices().0.is_empty()); + assert!(!deque.as_slices().1.is_empty()); + assert_eq!(deque.binary_search(&0), Err(0)); + assert_eq!(deque.binary_search(&1), Ok(0)); + assert_eq!(deque.binary_search(&5), Ok(3)); + assert_eq!(deque.binary_search(&7), Err(5)); + assert_eq!(deque.binary_search(&20), Err(6)); +} + +#[test] +fn test_binary_search_by() { + let deque: VecDeque<_> = vec![(1,), (2,), (3,), (5,), (6,)].into(); + + assert_eq!(deque.binary_search_by(|&(v,)| v.cmp(&3)), Ok(2)); + assert_eq!(deque.binary_search_by(|&(v,)| v.cmp(&4)), Err(3)); +} + +#[test] +fn test_binary_search_by_key() { + let deque: VecDeque<_> = vec![(1,), (2,), (3,), (5,), (6,)].into(); + + assert_eq!(deque.binary_search_by_key(&3, |&(v,)| v), Ok(2)); + assert_eq!(deque.binary_search_by_key(&4, |&(v,)| v), Err(3)); +} diff --git a/library/backtrace b/library/backtrace index 893fbb23688e9..8b8ea53b56f51 160000 --- a/library/backtrace +++ b/library/backtrace @@ -1 +1 @@ -Subproject commit 893fbb23688e98376e54c26b59432a2966a8cc96 +Subproject commit 8b8ea53b56f519dd7780defdd4254daaec892584 diff --git a/library/core/benches/ascii.rs b/library/core/benches/ascii.rs index 05dd7adff1fbb..bc59c378609f0 100644 --- a/library/core/benches/ascii.rs +++ b/library/core/benches/ascii.rs @@ -253,9 +253,9 @@ macro_rules! repeat { }; } -const SHORT: &'static str = "Alice's"; -const MEDIUM: &'static str = "Alice's Adventures in Wonderland"; -const LONG: &'static str = repeat!( +const SHORT: &str = "Alice's"; +const MEDIUM: &str = "Alice's Adventures in Wonderland"; +const LONG: &str = repeat!( r#" La Guida di Bragia, a Ballad Opera for the Marionette Theatre (around 1850) Alice's Adventures in Wonderland (1865) diff --git a/library/core/src/alloc/mod.rs b/library/core/src/alloc/mod.rs index 6d09b4f02635b..c61c19cc7d1d1 100644 --- a/library/core/src/alloc/mod.rs +++ b/library/core/src/alloc/mod.rs @@ -89,13 +89,11 @@ impl fmt::Display for AllocError { pub unsafe trait AllocRef { /// Attempts to allocate a block of memory. /// - /// On success, returns a [`NonNull<[u8]>`] meeting the size and alignment guarantees of `layout`. + /// On success, returns a [`NonNull<[u8]>`][NonNull] meeting the size and alignment guarantees of `layout`. /// /// The returned block may have a larger size than specified by `layout.size()`, and may or may /// not have its contents initialized. /// - /// [`NonNull<[u8]>`]: NonNull - /// /// # Errors /// /// Returning `Err` indicates that either memory is exhausted or `layout` does not meet @@ -146,7 +144,7 @@ pub unsafe trait AllocRef { /// Attempts to extend the memory block. /// - /// Returns a new [`NonNull<[u8]>`] containing a pointer and the actual size of the allocated + /// Returns a new [`NonNull<[u8]>`][NonNull] containing a pointer and the actual size of the allocated /// memory. The pointer is suitable for holding data described by `new_layout`. To accomplish /// this, the allocator may extend the allocation referenced by `ptr` to fit the new layout. /// @@ -158,8 +156,6 @@ pub unsafe trait AllocRef { /// If this method returns `Err`, then ownership of the memory block has not been transferred to /// this allocator, and the contents of the memory block are unaltered. /// - /// [`NonNull<[u8]>`]: NonNull - /// /// # Safety /// /// * `ptr` must denote a block of memory [*currently allocated*] via this allocator. @@ -271,7 +267,7 @@ pub unsafe trait AllocRef { /// Attempts to shrink the memory block. /// - /// Returns a new [`NonNull<[u8]>`] containing a pointer and the actual size of the allocated + /// Returns a new [`NonNull<[u8]>`][NonNull] containing a pointer and the actual size of the allocated /// memory. The pointer is suitable for holding data described by `new_layout`. To accomplish /// this, the allocator may shrink the allocation referenced by `ptr` to fit the new layout. /// @@ -283,8 +279,6 @@ pub unsafe trait AllocRef { /// If this method returns `Err`, then ownership of the memory block has not been transferred to /// this allocator, and the contents of the memory block are unaltered. /// - /// [`NonNull<[u8]>`]: NonNull - /// /// # Safety /// /// * `ptr` must denote a block of memory [*currently allocated*] via this allocator. diff --git a/library/core/src/convert/mod.rs b/library/core/src/convert/mod.rs index 2bfeb49b5fab7..3f7110b34cc67 100644 --- a/library/core/src/convert/mod.rs +++ b/library/core/src/convert/mod.rs @@ -134,6 +134,7 @@ pub const fn identity(x: T) -> T { /// want to accept all references that can be converted to [`&str`] as an argument. /// Since both [`String`] and [`&str`] implement `AsRef` we can accept both as input argument. /// +/// [`&str`]: primitive@str /// [`Option`]: Option /// [`Result`]: Result /// [`Borrow`]: crate::borrow::Borrow diff --git a/library/core/src/convert/num.rs b/library/core/src/convert/num.rs index 336c0b26bc7d7..2dd5e813d6fb7 100644 --- a/library/core/src/convert/num.rs +++ b/library/core/src/convert/num.rs @@ -485,3 +485,49 @@ nzint_impl_try_from_int! { i32, NonZeroI32, #[stable(feature = "nzint_try_from_i nzint_impl_try_from_int! { i64, NonZeroI64, #[stable(feature = "nzint_try_from_int_conv", since = "1.46.0")] } nzint_impl_try_from_int! { i128, NonZeroI128, #[stable(feature = "nzint_try_from_int_conv", since = "1.46.0")] } nzint_impl_try_from_int! { isize, NonZeroIsize, #[stable(feature = "nzint_try_from_int_conv", since = "1.46.0")] } + +macro_rules! nzint_impl_try_from_nzint { + ($From:ty => $To:ty, $doc: expr) => { + #[stable(feature = "nzint_try_from_nzint_conv", since = "1.49.0")] + #[doc = $doc] + impl TryFrom<$From> for $To { + type Error = TryFromIntError; + + #[inline] + fn try_from(value: $From) -> Result { + TryFrom::try_from(value.get()).map(|v| { + // SAFETY: $From is a NonZero type, so v is not zero. + unsafe { Self::new_unchecked(v) } + }) + } + } + }; + ($To:ty: $($From: ty),*) => {$( + nzint_impl_try_from_nzint!( + $From => $To, + concat!( + "Attempts to convert `", + stringify!($From), + "` to `", + stringify!($To), + "`.", + ) + ); + )*}; +} + +// Non-zero int -> non-zero unsigned int +nzint_impl_try_from_nzint! { NonZeroU8: NonZeroI8, NonZeroU16, NonZeroI16, NonZeroU32, NonZeroI32, NonZeroU64, NonZeroI64, NonZeroU128, NonZeroI128, NonZeroUsize, NonZeroIsize } +nzint_impl_try_from_nzint! { NonZeroU16: NonZeroI8, NonZeroI16, NonZeroU32, NonZeroI32, NonZeroU64, NonZeroI64, NonZeroU128, NonZeroI128, NonZeroUsize, NonZeroIsize } +nzint_impl_try_from_nzint! { NonZeroU32: NonZeroI8, NonZeroI16, NonZeroI32, NonZeroU64, NonZeroI64, NonZeroU128, NonZeroI128, NonZeroUsize, NonZeroIsize } +nzint_impl_try_from_nzint! { NonZeroU64: NonZeroI8, NonZeroI16, NonZeroI32, NonZeroI64, NonZeroU128, NonZeroI128, NonZeroUsize, NonZeroIsize } +nzint_impl_try_from_nzint! { NonZeroU128: NonZeroI8, NonZeroI16, NonZeroI32, NonZeroI64, NonZeroI128, NonZeroUsize, NonZeroIsize } +nzint_impl_try_from_nzint! { NonZeroUsize: NonZeroI8, NonZeroI16, NonZeroU32, NonZeroI32, NonZeroU64, NonZeroI64, NonZeroU128, NonZeroI128, NonZeroIsize } + +// Non-zero int -> non-zero signed int +nzint_impl_try_from_nzint! { NonZeroI8: NonZeroU8, NonZeroU16, NonZeroI16, NonZeroU32, NonZeroI32, NonZeroU64, NonZeroI64, NonZeroU128, NonZeroI128, NonZeroUsize, NonZeroIsize } +nzint_impl_try_from_nzint! { NonZeroI16: NonZeroU16, NonZeroU32, NonZeroI32, NonZeroU64, NonZeroI64, NonZeroU128, NonZeroI128, NonZeroUsize, NonZeroIsize } +nzint_impl_try_from_nzint! { NonZeroI32: NonZeroU32, NonZeroU64, NonZeroI64, NonZeroU128, NonZeroI128, NonZeroUsize, NonZeroIsize } +nzint_impl_try_from_nzint! { NonZeroI64: NonZeroU64, NonZeroU128, NonZeroI128, NonZeroUsize, NonZeroIsize } +nzint_impl_try_from_nzint! { NonZeroI128: NonZeroU128, NonZeroUsize, NonZeroIsize } +nzint_impl_try_from_nzint! { NonZeroIsize: NonZeroU16, NonZeroU32, NonZeroI32, NonZeroU64, NonZeroI64, NonZeroU128, NonZeroI128, NonZeroUsize } diff --git a/library/core/src/intrinsics.rs b/library/core/src/intrinsics.rs index 2a7c105e807f4..dbc7921a62a67 100644 --- a/library/core/src/intrinsics.rs +++ b/library/core/src/intrinsics.rs @@ -1660,22 +1660,22 @@ extern "rust-intrinsic" { /// Returns (a + b) mod 2N, where N is the width of T in bits. /// /// The stabilized versions of this intrinsic are available on the integer - /// primitives via the `checked_add` method. For example, - /// [`u32::checked_add`] + /// primitives via the `wrapping_add` method. For example, + /// [`u32::wrapping_add`] #[rustc_const_stable(feature = "const_int_wrapping", since = "1.40.0")] pub fn wrapping_add(a: T, b: T) -> T; /// Returns (a - b) mod 2N, where N is the width of T in bits. /// /// The stabilized versions of this intrinsic are available on the integer - /// primitives via the `checked_sub` method. For example, - /// [`u32::checked_sub`] + /// primitives via the `wrapping_sub` method. For example, + /// [`u32::wrapping_sub`] #[rustc_const_stable(feature = "const_int_wrapping", since = "1.40.0")] pub fn wrapping_sub(a: T, b: T) -> T; /// Returns (a * b) mod 2N, where N is the width of T in bits. /// /// The stabilized versions of this intrinsic are available on the integer - /// primitives via the `checked_mul` method. For example, - /// [`u32::checked_mul`] + /// primitives via the `wrapping_mul` method. For example, + /// [`u32::wrapping_mul`] #[rustc_const_stable(feature = "const_int_wrapping", since = "1.40.0")] pub fn wrapping_mul(a: T, b: T) -> T; diff --git a/library/core/src/iter/adapters/chain.rs b/library/core/src/iter/adapters/chain.rs index 38fb74372db18..2e070d7122442 100644 --- a/library/core/src/iter/adapters/chain.rs +++ b/library/core/src/iter/adapters/chain.rs @@ -109,7 +109,7 @@ where acc = b.try_fold(acc, f)?; // we don't fuse the second iterator } - Try::from_ok(acc) + try { acc } } fn fold(self, mut acc: Acc, mut f: F) -> Acc @@ -292,7 +292,7 @@ where acc = a.try_rfold(acc, f)?; // we don't fuse the second iterator } - Try::from_ok(acc) + try { acc } } fn rfold(self, mut acc: Acc, mut f: F) -> Acc diff --git a/library/core/src/iter/adapters/flatten.rs b/library/core/src/iter/adapters/flatten.rs index ddb1aaebc1f3e..35adb4f69d854 100644 --- a/library/core/src/iter/adapters/flatten.rs +++ b/library/core/src/iter/adapters/flatten.rs @@ -317,7 +317,7 @@ where } self.backiter = None; - Try::from_ok(init) + try { init } } #[inline] @@ -397,7 +397,7 @@ where } self.frontiter = None; - Try::from_ok(init) + try { init } } #[inline] diff --git a/library/core/src/iter/adapters/fuse.rs b/library/core/src/iter/adapters/fuse.rs index a78da369c241b..60ac3524e6696 100644 --- a/library/core/src/iter/adapters/fuse.rs +++ b/library/core/src/iter/adapters/fuse.rs @@ -303,7 +303,7 @@ where acc = iter.try_fold(acc, fold)?; self.iter = None; } - Try::from_ok(acc) + try { acc } } #[inline] @@ -353,7 +353,7 @@ where acc = iter.try_rfold(acc, fold)?; self.iter = None; } - Try::from_ok(acc) + try { acc } } #[inline] diff --git a/library/core/src/iter/adapters/mod.rs b/library/core/src/iter/adapters/mod.rs index 1e520b62f77c4..9c8e639c2d802 100644 --- a/library/core/src/iter/adapters/mod.rs +++ b/library/core/src/iter/adapters/mod.rs @@ -579,7 +579,7 @@ where })?; if is_empty { - return Try::from_ok(acc); + return try { acc }; } loop { @@ -715,7 +715,7 @@ where if self.first_take { self.first_take = false; match self.iter.next() { - None => return Try::from_ok(acc), + None => return try { acc }, Some(x) => acc = f(acc, x)?, } } @@ -792,7 +792,7 @@ where } match self.next_back() { - None => Try::from_ok(init), + None => try { init }, Some(x) => { let acc = f(init, x)?; from_fn(nth_back(&mut self.iter, self.step)).try_fold(acc, f) @@ -1075,7 +1075,7 @@ fn filter_try_fold<'a, T, Acc, R: Try>( predicate: &'a mut impl FnMut(&T) -> bool, mut fold: impl FnMut(Acc, T) -> R + 'a, ) -> impl FnMut(Acc, T) -> R + 'a { - move |acc, item| if predicate(&item) { fold(acc, item) } else { R::from_ok(acc) } + move |acc, item| if predicate(&item) { fold(acc, item) } else { try { acc } } } #[stable(feature = "rust1", since = "1.0.0")] @@ -1229,7 +1229,7 @@ fn filter_map_try_fold<'a, T, B, Acc, R: Try>( ) -> impl FnMut(Acc, T) -> R + 'a { move |acc, item| match f(item) { Some(x) => fold(acc, x), - None => R::from_ok(acc), + None => try { acc }, } } @@ -1280,7 +1280,7 @@ where #[inline] fn find( f: &mut impl FnMut(T) -> Option, - ) -> impl FnMut((), T) -> ControlFlow<(), B> + '_ { + ) -> impl FnMut((), T) -> ControlFlow + '_ { move |(), x| match f(x) { Some(x) => ControlFlow::Break(x), None => ControlFlow::CONTINUE, @@ -1660,7 +1660,7 @@ impl Iterator for Peekable { R: Try, { let acc = match self.peeked.take() { - Some(None) => return Try::from_ok(init), + Some(None) => return try { init }, Some(Some(v)) => f(init, v)?, None => init, }; @@ -1703,7 +1703,7 @@ where R: Try, { match self.peeked.take() { - Some(None) => Try::from_ok(init), + Some(None) => try { init }, Some(Some(v)) => match self.iter.try_rfold(init, &mut f).into_result() { Ok(acc) => f(acc, v), Err(e) => { @@ -1938,7 +1938,7 @@ where if !self.flag { match self.next() { Some(v) => init = fold(init, v)?, - None => return Try::from_ok(init), + None => return try { init }, } } self.iter.try_fold(init, fold) @@ -2059,19 +2059,19 @@ where flag: &'a mut bool, p: &'a mut impl FnMut(&T) -> bool, mut fold: impl FnMut(Acc, T) -> R + 'a, - ) -> impl FnMut(Acc, T) -> ControlFlow + 'a { + ) -> impl FnMut(Acc, T) -> ControlFlow + 'a { move |acc, x| { if p(&x) { ControlFlow::from_try(fold(acc, x)) } else { *flag = true; - ControlFlow::Break(Try::from_ok(acc)) + ControlFlow::Break(try { acc }) } } } if self.flag { - Try::from_ok(init) + try { init } } else { let flag = &mut self.flag; let p = &mut self.predicate; @@ -2180,7 +2180,7 @@ where let Self { iter, predicate } = self; iter.try_fold(init, |acc, x| match predicate(x) { Some(item) => ControlFlow::from_try(fold(acc, item)), - None => ControlFlow::Break(Try::from_ok(acc)), + None => ControlFlow::Break(try { acc }), }) .into_try() } @@ -2316,7 +2316,7 @@ where if n > 0 { // nth(n) skips n+1 if self.iter.nth(n - 1).is_none() { - return Try::from_ok(init); + return try { init }; } } self.iter.try_fold(init, fold) @@ -2372,7 +2372,7 @@ where fn check>( mut n: usize, mut fold: impl FnMut(Acc, T) -> R, - ) -> impl FnMut(Acc, T) -> ControlFlow { + ) -> impl FnMut(Acc, T) -> ControlFlow { move |acc, x| { n -= 1; let r = fold(acc, x); @@ -2381,11 +2381,7 @@ where } let n = self.len(); - if n == 0 { - Try::from_ok(init) - } else { - self.iter.try_rfold(init, check(n, fold)).into_try() - } + if n == 0 { try { init } } else { self.iter.try_rfold(init, check(n, fold)).into_try() } } fn rfold(mut self, init: Acc, fold: Fold) -> Acc @@ -2500,7 +2496,7 @@ where fn check<'a, T, Acc, R: Try>( n: &'a mut usize, mut fold: impl FnMut(Acc, T) -> R + 'a, - ) -> impl FnMut(Acc, T) -> ControlFlow + 'a { + ) -> impl FnMut(Acc, T) -> ControlFlow + 'a { move |acc, x| { *n -= 1; let r = fold(acc, x); @@ -2509,7 +2505,7 @@ where } if self.n == 0 { - Try::from_ok(init) + try { init } } else { let n = &mut self.n; self.iter.try_fold(init, check(n, fold)).into_try() @@ -2587,11 +2583,11 @@ where R: Try, { if self.n == 0 { - Try::from_ok(init) + try { init } } else { let len = self.iter.len(); if len > self.n && self.iter.nth_back(len - self.n - 1).is_none() { - Try::from_ok(init) + try { init } } else { self.iter.try_rfold(init, fold) } @@ -2685,9 +2681,9 @@ where state: &'a mut St, f: &'a mut impl FnMut(&mut St, T) -> Option, mut fold: impl FnMut(Acc, B) -> R + 'a, - ) -> impl FnMut(Acc, T) -> ControlFlow + 'a { + ) -> impl FnMut(Acc, T) -> ControlFlow + 'a { move |acc, x| match f(state, x) { - None => ControlFlow::Break(Try::from_ok(acc)), + None => ControlFlow::Break(try { acc }), Some(x) => ControlFlow::from_try(fold(acc, x)), } } @@ -2951,7 +2947,7 @@ where Ok(x) => ControlFlow::from_try(f(acc, x)), Err(e) => { *error = Err(e); - ControlFlow::Break(Try::from_ok(acc)) + ControlFlow::Break(try { acc }) } }) .into_try() diff --git a/library/core/src/iter/range.rs b/library/core/src/iter/range.rs index 9f34aee1947cd..cd8ab11cb8426 100644 --- a/library/core/src/iter/range.rs +++ b/library/core/src/iter/range.rs @@ -713,7 +713,7 @@ impl Iterator for ops::RangeInclusive { R: Try, { if self.is_empty() { - return Try::from_ok(init); + return try { init }; } let mut accum = init; @@ -731,7 +731,7 @@ impl Iterator for ops::RangeInclusive { accum = f(accum, self.start.clone())?; } - Try::from_ok(accum) + try { accum } } #[inline] @@ -818,7 +818,7 @@ impl DoubleEndedIterator for ops::RangeInclusive { R: Try, { if self.is_empty() { - return Try::from_ok(init); + return try { init }; } let mut accum = init; @@ -836,7 +836,7 @@ impl DoubleEndedIterator for ops::RangeInclusive { accum = f(accum, self.start.clone())?; } - Try::from_ok(accum) + try { accum } } #[inline] diff --git a/library/core/src/iter/traits/double_ended.rs b/library/core/src/iter/traits/double_ended.rs index 16bee0e2eee18..6f8cb6b5a65b6 100644 --- a/library/core/src/iter/traits/double_ended.rs +++ b/library/core/src/iter/traits/double_ended.rs @@ -122,6 +122,9 @@ pub trait DoubleEndedIterator: Iterator { /// assert_eq!(iter.advance_back_by(0), Ok(())); /// assert_eq!(iter.advance_back_by(100), Err(1)); // only `&3` was skipped /// ``` + /// + /// [`Ok(())`]: Ok + /// [`Err(k)`]: Err #[inline] #[unstable(feature = "iter_advance_by", reason = "recently added", issue = "77404")] fn advance_back_by(&mut self, n: usize) -> Result<(), usize> { @@ -221,7 +224,7 @@ pub trait DoubleEndedIterator: Iterator { while let Some(x) = self.next_back() { accum = f(accum, x)?; } - Try::from_ok(accum) + try { accum } } /// An iterator method that reduces the iterator's elements to a single, @@ -336,9 +339,7 @@ pub trait DoubleEndedIterator: Iterator { P: FnMut(&Self::Item) -> bool, { #[inline] - fn check( - mut predicate: impl FnMut(&T) -> bool, - ) -> impl FnMut((), T) -> ControlFlow<(), T> { + fn check(mut predicate: impl FnMut(&T) -> bool) -> impl FnMut((), T) -> ControlFlow { move |(), x| { if predicate(&x) { ControlFlow::Break(x) } else { ControlFlow::CONTINUE } } diff --git a/library/core/src/iter/traits/iterator.rs b/library/core/src/iter/traits/iterator.rs index 813afcc0ec6e4..7fc60caec2a73 100644 --- a/library/core/src/iter/traits/iterator.rs +++ b/library/core/src/iter/traits/iterator.rs @@ -289,12 +289,12 @@ pub trait Iterator { /// This method will eagerly skip `n` elements by calling [`next`] up to `n` /// times until [`None`] is encountered. /// - /// `advance_by(n)` will return [`Ok(())`] if the iterator successfully advances by - /// `n` elements, or [`Err(k)`] if [`None`] is encountered, where `k` is the number + /// `advance_by(n)` will return [`Ok(())`][Ok] if the iterator successfully advances by + /// `n` elements, or [`Err(k)`][Err] if [`None`] is encountered, where `k` is the number /// of elements the iterator is advanced by before running out of elements (i.e. the /// length of the iterator). Note that `k` is always less than `n`. /// - /// Calling `advance_by(0)` does not consume any elements and always returns [`Ok(())`]. + /// Calling `advance_by(0)` does not consume any elements and always returns [`Ok(())`][Ok]. /// /// [`next`]: Iterator::next /// @@ -1887,7 +1887,7 @@ pub trait Iterator { while let Some(x) = self.next() { accum = f(accum, x)?; } - Try::from_ok(accum) + try { accum } } /// An iterator method that applies a fallible function to each item in the @@ -2109,7 +2109,7 @@ pub trait Iterator { F: FnMut(Self::Item) -> bool, { #[inline] - fn check(mut f: impl FnMut(T) -> bool) -> impl FnMut((), T) -> ControlFlow<(), ()> { + fn check(mut f: impl FnMut(T) -> bool) -> impl FnMut((), T) -> ControlFlow<()> { move |(), x| { if f(x) { ControlFlow::CONTINUE } else { ControlFlow::BREAK } } @@ -2162,7 +2162,7 @@ pub trait Iterator { F: FnMut(Self::Item) -> bool, { #[inline] - fn check(mut f: impl FnMut(T) -> bool) -> impl FnMut((), T) -> ControlFlow<(), ()> { + fn check(mut f: impl FnMut(T) -> bool) -> impl FnMut((), T) -> ControlFlow<()> { move |(), x| { if f(x) { ControlFlow::BREAK } else { ControlFlow::CONTINUE } } @@ -2222,9 +2222,7 @@ pub trait Iterator { P: FnMut(&Self::Item) -> bool, { #[inline] - fn check( - mut predicate: impl FnMut(&T) -> bool, - ) -> impl FnMut((), T) -> ControlFlow<(), T> { + fn check(mut predicate: impl FnMut(&T) -> bool) -> impl FnMut((), T) -> ControlFlow { move |(), x| { if predicate(&x) { ControlFlow::Break(x) } else { ControlFlow::CONTINUE } } @@ -2255,9 +2253,7 @@ pub trait Iterator { F: FnMut(Self::Item) -> Option, { #[inline] - fn check( - mut f: impl FnMut(T) -> Option, - ) -> impl FnMut((), T) -> ControlFlow<(), B> { + fn check(mut f: impl FnMut(T) -> Option) -> impl FnMut((), T) -> ControlFlow { move |(), x| match f(x) { Some(x) => ControlFlow::Break(x), None => ControlFlow::CONTINUE, @@ -2296,7 +2292,7 @@ pub trait Iterator { R: Try, { #[inline] - fn check(mut f: F) -> impl FnMut((), T) -> ControlFlow<(), Result> + fn check(mut f: F) -> impl FnMut((), T) -> ControlFlow> where F: FnMut(&T) -> R, R: Try, diff --git a/library/core/src/lib.rs b/library/core/src/lib.rs index 97f27566eb0f4..6cb240d1730ed 100644 --- a/library/core/src/lib.rs +++ b/library/core/src/lib.rs @@ -63,6 +63,7 @@ #![warn(missing_debug_implementations)] #![allow(explicit_outlives_requirements)] #![allow(incomplete_features)] +#![cfg_attr(not(bootstrap), feature(rustc_allow_const_fn_unstable))] #![feature(allow_internal_unstable)] #![feature(arbitrary_self_types)] #![feature(asm)] @@ -129,9 +130,10 @@ #![feature(str_split_as_str)] #![feature(str_split_inclusive_as_str)] #![feature(transparent_unions)] +#![feature(try_blocks)] #![feature(unboxed_closures)] #![feature(unsized_locals)] -#![feature(untagged_unions)] +#![cfg_attr(bootstrap, feature(untagged_unions))] #![feature(unwind_attributes)] #![feature(variant_count)] #![feature(tbm_target_feature)] diff --git a/library/core/src/macros/mod.rs b/library/core/src/macros/mod.rs index 4c62c16f5063c..ac45e819cf67a 100644 --- a/library/core/src/macros/mod.rs +++ b/library/core/src/macros/mod.rs @@ -10,7 +10,7 @@ macro_rules! panic { $crate::panicking::panic($msg) ); ($msg:expr) => ( - $crate::panic!("{}", $crate::convert::identity::<&str>($msg)) + $crate::panicking::panic_str($msg) ); ($msg:expr,) => ( $crate::panic!($msg) diff --git a/library/core/src/num/int_macros.rs b/library/core/src/num/int_macros.rs index 33fa26675f610..295a876773c48 100644 --- a/library/core/src/num/int_macros.rs +++ b/library/core/src/num/int_macros.rs @@ -2045,7 +2045,8 @@ assert_eq!( #[rustc_const_stable(feature = "const_int_conversion", since = "1.44.0")] // SAFETY: const sound because integers are plain old datatypes so we can always // transmute them to arrays of bytes - #[allow_internal_unstable(const_fn_transmute)] + #[cfg_attr(not(bootstrap), rustc_allow_const_fn_unstable(const_fn_transmute))] + #[cfg_attr(bootstrap, allow_internal_unstable(const_fn_transmute))] #[inline] pub const fn to_ne_bytes(self) -> [u8; mem::size_of::()] { // SAFETY: integers are plain old datatypes so we can always transmute them to @@ -2193,7 +2194,8 @@ fn read_ne_", stringify!($SelfT), "(input: &mut &[u8]) -> ", stringify!($SelfT), #[rustc_const_stable(feature = "const_int_conversion", since = "1.44.0")] // SAFETY: const sound because integers are plain old datatypes so we can always // transmute to them - #[allow_internal_unstable(const_fn_transmute)] + #[cfg_attr(not(bootstrap), rustc_allow_const_fn_unstable(const_fn_transmute))] + #[cfg_attr(bootstrap, allow_internal_unstable(const_fn_transmute))] #[inline] pub const fn from_ne_bytes(bytes: [u8; mem::size_of::()]) -> Self { // SAFETY: integers are plain old datatypes so we can always transmute to them diff --git a/library/core/src/num/uint_macros.rs b/library/core/src/num/uint_macros.rs index 0de1cc6b1654a..bdea0ea3b08c0 100644 --- a/library/core/src/num/uint_macros.rs +++ b/library/core/src/num/uint_macros.rs @@ -1803,7 +1803,8 @@ assert_eq!( #[rustc_const_stable(feature = "const_int_conversion", since = "1.44.0")] // SAFETY: const sound because integers are plain old datatypes so we can always // transmute them to arrays of bytes - #[allow_internal_unstable(const_fn_transmute)] + #[cfg_attr(not(bootstrap), rustc_allow_const_fn_unstable(const_fn_transmute))] + #[cfg_attr(bootstrap, allow_internal_unstable(const_fn_transmute))] #[inline] pub const fn to_ne_bytes(self) -> [u8; mem::size_of::()] { // SAFETY: integers are plain old datatypes so we can always transmute them to @@ -1951,7 +1952,8 @@ fn read_ne_", stringify!($SelfT), "(input: &mut &[u8]) -> ", stringify!($SelfT), #[rustc_const_stable(feature = "const_int_conversion", since = "1.44.0")] // SAFETY: const sound because integers are plain old datatypes so we can always // transmute to them - #[allow_internal_unstable(const_fn_transmute)] + #[cfg_attr(not(bootstrap), rustc_allow_const_fn_unstable(const_fn_transmute))] + #[cfg_attr(bootstrap, allow_internal_unstable(const_fn_transmute))] #[inline] pub const fn from_ne_bytes(bytes: [u8; mem::size_of::()]) -> Self { // SAFETY: integers are plain old datatypes so we can always transmute to them diff --git a/library/core/src/ops/arith.rs b/library/core/src/ops/arith.rs index 19f86ced5007c..92090d8e6fca7 100644 --- a/library/core/src/ops/arith.rs +++ b/library/core/src/ops/arith.rs @@ -302,7 +302,7 @@ sub_impl! { usize u8 u16 u32 u64 u128 isize i8 i16 i32 i64 i128 f32 f64 } #[lang = "mul"] #[stable(feature = "rust1", since = "1.0.0")] #[rustc_on_unimplemented( - message = "cannot multiply `{Rhs}` to `{Self}`", + message = "cannot multiply `{Self}` by `{Rhs}`", label = "no implementation for `{Self} * {Rhs}`" )] #[doc(alias = "*")] @@ -826,7 +826,7 @@ sub_assign_impl! { usize u8 u16 u32 u64 u128 isize i8 i16 i32 i64 i128 f32 f64 } #[lang = "mul_assign"] #[stable(feature = "op_assign_traits", since = "1.8.0")] #[rustc_on_unimplemented( - message = "cannot multiply-assign `{Rhs}` to `{Self}`", + message = "cannot multiply-assign `{Self}` by `{Rhs}`", label = "no implementation for `{Self} *= {Rhs}`" )] #[doc(alias = "*")] diff --git a/library/core/src/ops/control_flow.rs b/library/core/src/ops/control_flow.rs index b0c7dc1a51875..5ede1ba8e2c10 100644 --- a/library/core/src/ops/control_flow.rs +++ b/library/core/src/ops/control_flow.rs @@ -3,7 +3,7 @@ use crate::ops::Try; /// Used to make try_fold closures more like normal loops #[unstable(feature = "control_flow_enum", reason = "new API", issue = "75744")] #[derive(Debug, Clone, Copy, PartialEq)] -pub enum ControlFlow { +pub enum ControlFlow { /// Continue in the loop, using the given value for the next iteration Continue(C), /// Exit the loop, yielding the given value @@ -11,7 +11,7 @@ pub enum ControlFlow { } #[unstable(feature = "control_flow_enum", reason = "new API", issue = "75744")] -impl Try for ControlFlow { +impl Try for ControlFlow { type Ok = C; type Error = B; #[inline] @@ -31,7 +31,21 @@ impl Try for ControlFlow { } } -impl ControlFlow { +impl ControlFlow { + /// Returns `true` if this is a `Break` variant. + #[inline] + #[unstable(feature = "control_flow_enum", reason = "new API", issue = "75744")] + pub fn is_break(&self) -> bool { + matches!(*self, ControlFlow::Break(_)) + } + + /// Returns `true` if this is a `Continue` variant. + #[inline] + #[unstable(feature = "control_flow_enum", reason = "new API", issue = "75744")] + pub fn is_continue(&self) -> bool { + matches!(*self, ControlFlow::Continue(_)) + } + /// Converts the `ControlFlow` into an `Option` which is `Some` if the /// `ControlFlow` was `Break` and `None` otherwise. #[inline] @@ -44,7 +58,7 @@ impl ControlFlow { } } -impl ControlFlow { +impl ControlFlow { /// Create a `ControlFlow` from any type implementing `Try`. #[unstable(feature = "control_flow_enum", reason = "new API", issue = "75744")] #[inline] @@ -66,7 +80,7 @@ impl ControlFlow { } } -impl ControlFlow<(), B> { +impl ControlFlow { /// It's frequently the case that there's no value needed with `Continue`, /// so this provides a way to avoid typing `(())`, if you prefer it. /// @@ -88,7 +102,7 @@ impl ControlFlow<(), B> { pub const CONTINUE: Self = ControlFlow::Continue(()); } -impl ControlFlow { +impl ControlFlow<(), C> { /// APIs like `try_for_each` don't need values with `Break`, /// so this provides a way to avoid typing `(())`, if you prefer it. /// diff --git a/library/core/src/ops/range.rs b/library/core/src/ops/range.rs index 2eaf7601e54de..1d67e65e51f5f 100644 --- a/library/core/src/ops/range.rs +++ b/library/core/src/ops/range.rs @@ -1,5 +1,9 @@ use crate::fmt; use crate::hash::Hash; +use crate::slice::index::{ + slice_end_index_len_fail, slice_end_index_overflow_fail, slice_index_order_fail, + slice_start_index_overflow_fail, +}; /// An unbounded range (`..`). /// @@ -19,7 +23,7 @@ use crate::hash::Hash; /// /// ```compile_fail,E0277 /// for i in .. { -/// // ... +/// // ... /// } /// ``` /// @@ -27,12 +31,12 @@ use crate::hash::Hash; /// /// ``` /// let arr = [0, 1, 2, 3, 4]; -/// assert_eq!(arr[ .. ], [0,1,2,3,4]); // RangeFull -/// assert_eq!(arr[ .. 3], [0,1,2 ]); -/// assert_eq!(arr[ ..=3], [0,1,2,3 ]); -/// assert_eq!(arr[1.. ], [ 1,2,3,4]); -/// assert_eq!(arr[1.. 3], [ 1,2 ]); -/// assert_eq!(arr[1..=3], [ 1,2,3 ]); +/// assert_eq!(arr[ .. ], [0, 1, 2, 3, 4]); // This is the `RangeFull` +/// assert_eq!(arr[ .. 3], [0, 1, 2 ]); +/// assert_eq!(arr[ ..=3], [0, 1, 2, 3 ]); +/// assert_eq!(arr[1.. ], [ 1, 2, 3, 4]); +/// assert_eq!(arr[1.. 3], [ 1, 2 ]); +/// assert_eq!(arr[1..=3], [ 1, 2, 3 ]); /// ``` /// /// [slicing index]: crate::slice::SliceIndex @@ -52,22 +56,26 @@ impl fmt::Debug for RangeFull { /// A (half-open) range bounded inclusively below and exclusively above /// (`start..end`). /// -/// The `Range` `start..end` contains all values with `x >= start` and -/// `x < end`. It is empty unless `start < end`. +/// The range `start..end` contains all values with `start <= x < end`. +/// It is empty if `start >= end`. /// /// # Examples /// +/// The `start..end` syntax is a `Range`: +/// /// ``` /// assert_eq!((3..5), std::ops::Range { start: 3, end: 5 }); /// assert_eq!(3 + 4 + 5, (3..6).sum()); +/// ``` /// +/// ``` /// let arr = [0, 1, 2, 3, 4]; -/// assert_eq!(arr[ .. ], [0,1,2,3,4]); -/// assert_eq!(arr[ .. 3], [0,1,2 ]); -/// assert_eq!(arr[ ..=3], [0,1,2,3 ]); -/// assert_eq!(arr[1.. ], [ 1,2,3,4]); -/// assert_eq!(arr[1.. 3], [ 1,2 ]); // Range -/// assert_eq!(arr[1..=3], [ 1,2,3 ]); +/// assert_eq!(arr[ .. ], [0, 1, 2, 3, 4]); +/// assert_eq!(arr[ .. 3], [0, 1, 2 ]); +/// assert_eq!(arr[ ..=3], [0, 1, 2, 3 ]); +/// assert_eq!(arr[1.. ], [ 1, 2, 3, 4]); +/// assert_eq!(arr[1.. 3], [ 1, 2 ]); // This is a `Range` +/// assert_eq!(arr[1..=3], [ 1, 2, 3 ]); /// ``` #[lang = "Range"] #[doc(alias = "..")] @@ -160,17 +168,21 @@ impl> Range { /// /// # Examples /// +/// The `start..` syntax is a `RangeFrom`: +/// /// ``` /// assert_eq!((2..), std::ops::RangeFrom { start: 2 }); /// assert_eq!(2 + 3 + 4, (2..).take(3).sum()); +/// ``` /// +/// ``` /// let arr = [0, 1, 2, 3, 4]; -/// assert_eq!(arr[ .. ], [0,1,2,3,4]); -/// assert_eq!(arr[ .. 3], [0,1,2 ]); -/// assert_eq!(arr[ ..=3], [0,1,2,3 ]); -/// assert_eq!(arr[1.. ], [ 1,2,3,4]); // RangeFrom -/// assert_eq!(arr[1.. 3], [ 1,2 ]); -/// assert_eq!(arr[1..=3], [ 1,2,3 ]); +/// assert_eq!(arr[ .. ], [0, 1, 2, 3, 4]); +/// assert_eq!(arr[ .. 3], [0, 1, 2 ]); +/// assert_eq!(arr[ ..=3], [0, 1, 2, 3 ]); +/// assert_eq!(arr[1.. ], [ 1, 2, 3, 4]); // This is a `RangeFrom` +/// assert_eq!(arr[1.. 3], [ 1, 2 ]); +/// assert_eq!(arr[1..=3], [ 1, 2, 3 ]); /// ``` #[lang = "RangeFrom"] #[doc(alias = "..")] @@ -244,12 +256,12 @@ impl> RangeFrom { /// /// ``` /// let arr = [0, 1, 2, 3, 4]; -/// assert_eq!(arr[ .. ], [0,1,2,3,4]); -/// assert_eq!(arr[ .. 3], [0,1,2 ]); // RangeTo -/// assert_eq!(arr[ ..=3], [0,1,2,3 ]); -/// assert_eq!(arr[1.. ], [ 1,2,3,4]); -/// assert_eq!(arr[1.. 3], [ 1,2 ]); -/// assert_eq!(arr[1..=3], [ 1,2,3 ]); +/// assert_eq!(arr[ .. ], [0, 1, 2, 3, 4]); +/// assert_eq!(arr[ .. 3], [0, 1, 2 ]); // This is a `RangeTo` +/// assert_eq!(arr[ ..=3], [0, 1, 2, 3 ]); +/// assert_eq!(arr[1.. ], [ 1, 2, 3, 4]); +/// assert_eq!(arr[1.. 3], [ 1, 2 ]); +/// assert_eq!(arr[1..=3], [ 1, 2, 3 ]); /// ``` /// /// [slicing index]: crate::slice::SliceIndex @@ -310,17 +322,21 @@ impl> RangeTo { /// /// # Examples /// +/// The `start..=end` syntax is a `RangeInclusive`: +/// /// ``` /// assert_eq!((3..=5), std::ops::RangeInclusive::new(3, 5)); /// assert_eq!(3 + 4 + 5, (3..=5).sum()); +/// ``` /// +/// ``` /// let arr = [0, 1, 2, 3, 4]; -/// assert_eq!(arr[ .. ], [0,1,2,3,4]); -/// assert_eq!(arr[ .. 3], [0,1,2 ]); -/// assert_eq!(arr[ ..=3], [0,1,2,3 ]); -/// assert_eq!(arr[1.. ], [ 1,2,3,4]); -/// assert_eq!(arr[1.. 3], [ 1,2 ]); -/// assert_eq!(arr[1..=3], [ 1,2,3 ]); // RangeInclusive +/// assert_eq!(arr[ .. ], [0, 1, 2, 3, 4]); +/// assert_eq!(arr[ .. 3], [0, 1, 2 ]); +/// assert_eq!(arr[ ..=3], [0, 1, 2, 3 ]); +/// assert_eq!(arr[1.. ], [ 1, 2, 3, 4]); +/// assert_eq!(arr[1.. 3], [ 1, 2 ]); +/// assert_eq!(arr[1..=3], [ 1, 2, 3 ]); // This is a `RangeInclusive` /// ``` #[lang = "RangeInclusive"] #[doc(alias = "..=")] @@ -430,6 +446,20 @@ impl RangeInclusive { } } +impl RangeInclusive { + /// Converts to an exclusive `Range` for `SliceIndex` implementations. + /// The caller is responsible for dealing with `end == usize::MAX`. + #[inline] + pub(crate) fn into_slice_range(self) -> Range { + // If we're not exhausted, we want to simply slice `start..end + 1`. + // If we are exhausted, then slicing with `end + 1..end + 1` gives us an + // empty range that is still subject to bounds-checks for that endpoint. + let exclusive_end = self.end + 1; + let start = if self.exhausted { exclusive_end } else { self.start }; + start..exclusive_end + } +} + #[stable(feature = "inclusive_range", since = "1.26.0")] impl fmt::Debug for RangeInclusive { fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { @@ -463,6 +493,16 @@ impl> RangeInclusive { /// assert!(!(0.0..=f32::NAN).contains(&0.0)); /// assert!(!(f32::NAN..=1.0).contains(&1.0)); /// ``` + /// + /// This method always returns `false` after iteration has finished: + /// + /// ``` + /// let mut r = 3..=5; + /// assert!(r.contains(&3) && r.contains(&5)); + /// for _ in r.by_ref() {} + /// // Precise field values are unspecified here + /// assert!(!r.contains(&3) && !r.contains(&5)); + /// ``` #[stable(feature = "range_contains", since = "1.35.0")] pub fn contains(&self, item: &U) -> bool where @@ -534,12 +574,12 @@ impl> RangeInclusive { /// /// ``` /// let arr = [0, 1, 2, 3, 4]; -/// assert_eq!(arr[ .. ], [0,1,2,3,4]); -/// assert_eq!(arr[ .. 3], [0,1,2 ]); -/// assert_eq!(arr[ ..=3], [0,1,2,3 ]); // RangeToInclusive -/// assert_eq!(arr[1.. ], [ 1,2,3,4]); -/// assert_eq!(arr[1.. 3], [ 1,2 ]); -/// assert_eq!(arr[1..=3], [ 1,2,3 ]); +/// assert_eq!(arr[ .. ], [0, 1, 2, 3, 4]); +/// assert_eq!(arr[ .. 3], [0, 1, 2 ]); +/// assert_eq!(arr[ ..=3], [0, 1, 2, 3 ]); // This is a `RangeToInclusive` +/// assert_eq!(arr[1.. ], [ 1, 2, 3, 4]); +/// assert_eq!(arr[1.. 3], [ 1, 2 ]); +/// assert_eq!(arr[1..=3], [ 1, 2, 3 ]); /// ``` /// /// [slicing index]: crate::slice::SliceIndex @@ -661,9 +701,9 @@ impl Bound<&T> { } } -#[stable(feature = "collections_range", since = "1.28.0")] /// `RangeBounds` is implemented by Rust's built-in range types, produced /// by range syntax like `..`, `a..`, `..b`, `..=c`, `d..e`, or `f..=g`. +#[stable(feature = "collections_range", since = "1.28.0")] pub trait RangeBounds { /// Start index bound. /// @@ -701,6 +741,92 @@ pub trait RangeBounds { #[stable(feature = "collections_range", since = "1.28.0")] fn end_bound(&self) -> Bound<&T>; + /// Performs bounds-checking of this range. + /// + /// The returned [`Range`] is safe to pass to [`slice::get_unchecked`] and + /// [`slice::get_unchecked_mut`] for slices of the given length. + /// + /// [`slice::get_unchecked`]: ../../std/primitive.slice.html#method.get_unchecked + /// [`slice::get_unchecked_mut`]: ../../std/primitive.slice.html#method.get_unchecked_mut + /// + /// # Panics + /// + /// Panics if the range would be out of bounds. + /// + /// # Examples + /// + /// ``` + /// #![feature(range_bounds_assert_len)] + /// + /// use std::ops::RangeBounds; + /// + /// let v = [10, 40, 30]; + /// assert_eq!(1..2, (1..2).assert_len(v.len())); + /// assert_eq!(0..2, (..2).assert_len(v.len())); + /// assert_eq!(1..3, (1..).assert_len(v.len())); + /// ``` + /// + /// Panics when [`Index::index`] would panic: + /// + /// ```should_panic + /// #![feature(range_bounds_assert_len)] + /// + /// use std::ops::RangeBounds; + /// + /// (2..1).assert_len(3); + /// ``` + /// + /// ```should_panic + /// #![feature(range_bounds_assert_len)] + /// + /// use std::ops::RangeBounds; + /// + /// (1..4).assert_len(3); + /// ``` + /// + /// ```should_panic + /// #![feature(range_bounds_assert_len)] + /// + /// use std::ops::RangeBounds; + /// + /// (1..=usize::MAX).assert_len(3); + /// ``` + /// + /// [`Index::index`]: crate::ops::Index::index + #[track_caller] + #[unstable(feature = "range_bounds_assert_len", issue = "76393")] + fn assert_len(self, len: usize) -> Range + where + Self: RangeBounds, + { + let start: Bound<&usize> = self.start_bound(); + let start = match start { + Bound::Included(&start) => start, + Bound::Excluded(start) => { + start.checked_add(1).unwrap_or_else(|| slice_start_index_overflow_fail()) + } + Bound::Unbounded => 0, + }; + + let end: Bound<&usize> = self.end_bound(); + let end = match end { + Bound::Included(end) => { + end.checked_add(1).unwrap_or_else(|| slice_end_index_overflow_fail()) + } + Bound::Excluded(&end) => end, + Bound::Unbounded => len, + }; + + if start > end { + slice_index_order_fail(start, end); + } + if end > len { + slice_end_index_len_fail(end, len); + } + + Range { start, end } + } + /// Returns `true` if `item` is contained in the range. /// /// # Examples @@ -779,7 +905,13 @@ impl RangeBounds for RangeInclusive { Included(&self.start) } fn end_bound(&self) -> Bound<&T> { - Included(&self.end) + if self.exhausted { + // When the iterator is exhausted, we usually have start == end, + // but we want the range to appear empty, containing nothing. + Excluded(&self.end) + } else { + Included(&self.end) + } } } diff --git a/library/core/src/option.rs b/library/core/src/option.rs index 9cb20a0afdc6e..3daf26208b937 100644 --- a/library/core/src/option.rs +++ b/library/core/src/option.rs @@ -562,6 +562,36 @@ impl Option { } } + /// Inserts `value` into the option then returns a mutable reference to it. + /// + /// If the option already contains a value, the old value is dropped. + /// + /// # Example + /// + /// ``` + /// #![feature(option_insert)] + /// + /// let mut opt = None; + /// let val = opt.insert(1); + /// assert_eq!(*val, 1); + /// assert_eq!(opt.unwrap(), 1); + /// let val = opt.insert(2); + /// assert_eq!(*val, 2); + /// *val = 3; + /// assert_eq!(opt.unwrap(), 3); + /// ``` + #[inline] + #[unstable(feature = "option_insert", reason = "newly added", issue = "78271")] + pub fn insert(&mut self, value: T) -> &mut T { + *self = Some(value); + + match self { + Some(v) => v, + // SAFETY: the code above just filled the option + None => unsafe { hint::unreachable_unchecked() }, + } + } + ///////////////////////////////////////////////////////////////////////// // Iterator constructors ///////////////////////////////////////////////////////////////////////// @@ -687,6 +717,7 @@ impl Option { /// assert_eq!(Some(4).filter(is_even), Some(4)); /// ``` /// + /// [`Some(t)`]: Some #[inline] #[stable(feature = "option_filter", since = "1.27.0")] pub fn filter bool>(self, predicate: P) -> Self { @@ -791,7 +822,7 @@ impl Option { // Entry-like operations to insert if None and return a reference ///////////////////////////////////////////////////////////////////////// - /// Inserts `v` into the option if it is [`None`], then + /// Inserts `value` into the option if it is [`None`], then /// returns a mutable reference to the contained value. /// /// # Examples @@ -810,12 +841,12 @@ impl Option { /// ``` #[inline] #[stable(feature = "option_entry", since = "1.20.0")] - pub fn get_or_insert(&mut self, v: T) -> &mut T { - self.get_or_insert_with(|| v) + pub fn get_or_insert(&mut self, value: T) -> &mut T { + self.get_or_insert_with(|| value) } - /// Inserts a value computed from `f` into the option if it is [`None`], then - /// returns a mutable reference to the contained value. + /// Inserts a value computed from `f` into the option if it is [`None`], + /// then returns a mutable reference to the contained value. /// /// # Examples /// @@ -838,8 +869,8 @@ impl Option { *self = Some(f()); } - match *self { - Some(ref mut v) => v, + match self { + Some(v) => v, // SAFETY: a `None` variant for `self` would have been replaced by a `Some` // variant in the code above. None => unsafe { hint::unreachable_unchecked() }, diff --git a/library/core/src/panicking.rs b/library/core/src/panicking.rs index 15fd638bef8ad..09dd19b8f5f93 100644 --- a/library/core/src/panicking.rs +++ b/library/core/src/panicking.rs @@ -50,6 +50,13 @@ pub fn panic(expr: &'static str) -> ! { panic_fmt(fmt::Arguments::new_v1(&[expr], &[])); } +#[inline] +#[track_caller] +#[cfg_attr(not(bootstrap), lang = "panic_str")] // needed for const-evaluated panics +pub fn panic_str(expr: &str) -> ! { + panic_fmt(format_args!("{}", expr)); +} + #[cold] #[cfg_attr(not(feature = "panic_immediate_abort"), inline(never))] #[track_caller] diff --git a/library/core/src/pin.rs b/library/core/src/pin.rs index 9f0284d5d9542..0b9c733f7fead 100644 --- a/library/core/src/pin.rs +++ b/library/core/src/pin.rs @@ -781,6 +781,34 @@ impl<'a, T: ?Sized> Pin<&'a mut T> { } } +impl Pin<&'static T> { + /// Get a pinned reference from a static reference. + /// + /// This is safe, because `T` is borrowed for the `'static` lifetime, which + /// never ends. + #[unstable(feature = "pin_static_ref", issue = "78186")] + #[rustc_const_unstable(feature = "const_pin", issue = "76654")] + pub const fn static_ref(r: &'static T) -> Pin<&'static T> { + // SAFETY: The 'static borrow guarantees the data will not be + // moved/invalidated until it gets dropped (which is never). + unsafe { Pin::new_unchecked(r) } + } +} + +impl Pin<&'static mut T> { + /// Get a pinned mutable reference from a static mutable reference. + /// + /// This is safe, because `T` is borrowed for the `'static` lifetime, which + /// never ends. + #[unstable(feature = "pin_static_ref", issue = "78186")] + #[rustc_const_unstable(feature = "const_pin", issue = "76654")] + pub const fn static_mut(r: &'static mut T) -> Pin<&'static mut T> { + // SAFETY: The 'static borrow guarantees the data will not be + // moved/invalidated until it gets dropped (which is never). + unsafe { Pin::new_unchecked(r) } + } +} + #[stable(feature = "pin", since = "1.33.0")] impl Deref for Pin

{ type Target = P::Target; diff --git a/library/core/src/ptr/mod.rs b/library/core/src/ptr/mod.rs index 92c4f2ccfe8a0..bca3be56ba5f5 100644 --- a/library/core/src/ptr/mod.rs +++ b/library/core/src/ptr/mod.rs @@ -229,6 +229,16 @@ pub(crate) struct FatPtr { pub(crate) len: usize, } +// Manual impl needed to avoid `T: Clone` bound. +impl Clone for FatPtr { + fn clone(&self) -> Self { + *self + } +} + +// Manual impl needed to avoid `T: Copy` bound. +impl Copy for FatPtr {} + /// Forms a raw slice from a pointer and a length. /// /// The `len` argument is the number of **elements**, not the number of bytes. diff --git a/library/core/src/slice/index.rs b/library/core/src/slice/index.rs index 16fcb6231dc09..660c8a2da5da0 100644 --- a/library/core/src/slice/index.rs +++ b/library/core/src/slice/index.rs @@ -1,6 +1,6 @@ //! Indexing implementations for `[T]`. -use crate::ops::{self, Bound, Range, RangeBounds}; +use crate::ops; use crate::ptr; #[stable(feature = "rust1", since = "1.0.0")] @@ -37,104 +37,31 @@ fn slice_start_index_len_fail(index: usize, len: usize) -> ! { #[inline(never)] #[cold] #[track_caller] -pub(super) fn slice_end_index_len_fail(index: usize, len: usize) -> ! { +pub(crate) fn slice_end_index_len_fail(index: usize, len: usize) -> ! { panic!("range end index {} out of range for slice of length {}", index, len); } #[inline(never)] #[cold] #[track_caller] -pub(super) fn slice_index_order_fail(index: usize, end: usize) -> ! { +pub(crate) fn slice_index_order_fail(index: usize, end: usize) -> ! { panic!("slice index starts at {} but ends at {}", index, end); } #[inline(never)] #[cold] #[track_caller] -pub(super) fn slice_start_index_overflow_fail() -> ! { +pub(crate) fn slice_start_index_overflow_fail() -> ! { panic!("attempted to index slice from after maximum usize"); } #[inline(never)] #[cold] #[track_caller] -pub(super) fn slice_end_index_overflow_fail() -> ! { +pub(crate) fn slice_end_index_overflow_fail() -> ! { panic!("attempted to index slice up to maximum usize"); } -/// Performs bounds-checking of the given range. -/// The returned [`Range`] is safe to pass to [`get_unchecked`] and [`get_unchecked_mut`] -/// for slices of the given length. -/// -/// [`get_unchecked`]: ../../std/primitive.slice.html#method.get_unchecked -/// [`get_unchecked_mut`]: ../../std/primitive.slice.html#method.get_unchecked_mut -/// -/// # Panics -/// -/// Panics if the range is out of bounds. -/// -/// # Examples -/// -/// ``` -/// #![feature(slice_check_range)] -/// use std::slice; -/// -/// let v = [10, 40, 30]; -/// assert_eq!(1..2, slice::check_range(v.len(), 1..2)); -/// assert_eq!(0..2, slice::check_range(v.len(), ..2)); -/// assert_eq!(1..3, slice::check_range(v.len(), 1..)); -/// ``` -/// -/// Panics when [`Index::index`] would panic: -/// -/// ```should_panic -/// #![feature(slice_check_range)] -/// -/// std::slice::check_range(3, 2..1); -/// ``` -/// -/// ```should_panic -/// #![feature(slice_check_range)] -/// -/// std::slice::check_range(3, 1..4); -/// ``` -/// -/// ```should_panic -/// #![feature(slice_check_range)] -/// -/// std::slice::check_range(3, 1..=usize::MAX); -/// ``` -/// -/// [`Index::index`]: ops::Index::index -#[track_caller] -#[unstable(feature = "slice_check_range", issue = "76393")] -pub fn check_range>(len: usize, range: R) -> Range { - let start = match range.start_bound() { - Bound::Included(&start) => start, - Bound::Excluded(start) => { - start.checked_add(1).unwrap_or_else(|| slice_start_index_overflow_fail()) - } - Bound::Unbounded => 0, - }; - - let end = match range.end_bound() { - Bound::Included(end) => { - end.checked_add(1).unwrap_or_else(|| slice_end_index_overflow_fail()) - } - Bound::Excluded(&end) => end, - Bound::Unbounded => len, - }; - - if start > end { - slice_index_order_fail(start, end); - } - if end > len { - slice_end_index_len_fail(end, len); - } - - Range { start, end } -} - mod private_slice_index { use super::ops; #[stable(feature = "slice_get_slice", since = "1.28.0")] @@ -449,28 +376,24 @@ unsafe impl SliceIndex<[T]> for ops::RangeInclusive { #[inline] fn get(self, slice: &[T]) -> Option<&[T]> { - if *self.end() == usize::MAX { None } else { (*self.start()..self.end() + 1).get(slice) } + if *self.end() == usize::MAX { None } else { self.into_slice_range().get(slice) } } #[inline] fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> { - if *self.end() == usize::MAX { - None - } else { - (*self.start()..self.end() + 1).get_mut(slice) - } + if *self.end() == usize::MAX { None } else { self.into_slice_range().get_mut(slice) } } #[inline] unsafe fn get_unchecked(self, slice: *const [T]) -> *const [T] { // SAFETY: the caller has to uphold the safety contract for `get_unchecked`. - unsafe { (*self.start()..self.end() + 1).get_unchecked(slice) } + unsafe { self.into_slice_range().get_unchecked(slice) } } #[inline] unsafe fn get_unchecked_mut(self, slice: *mut [T]) -> *mut [T] { // SAFETY: the caller has to uphold the safety contract for `get_unchecked_mut`. - unsafe { (*self.start()..self.end() + 1).get_unchecked_mut(slice) } + unsafe { self.into_slice_range().get_unchecked_mut(slice) } } #[inline] @@ -478,7 +401,7 @@ unsafe impl SliceIndex<[T]> for ops::RangeInclusive { if *self.end() == usize::MAX { slice_end_index_overflow_fail(); } - (*self.start()..self.end() + 1).index(slice) + self.into_slice_range().index(slice) } #[inline] @@ -486,7 +409,7 @@ unsafe impl SliceIndex<[T]> for ops::RangeInclusive { if *self.end() == usize::MAX { slice_end_index_overflow_fail(); } - (*self.start()..self.end() + 1).index_mut(slice) + self.into_slice_range().index_mut(slice) } } diff --git a/library/core/src/slice/mod.rs b/library/core/src/slice/mod.rs index 349c2cde274dd..b6fd0c4986b64 100644 --- a/library/core/src/slice/mod.rs +++ b/library/core/src/slice/mod.rs @@ -29,7 +29,7 @@ pub mod memchr; mod ascii; mod cmp; -mod index; +pub(crate) mod index; mod iter; mod raw; mod rotate; @@ -73,9 +73,6 @@ pub use sort::heapsort; #[stable(feature = "slice_get_slice", since = "1.28.0")] pub use index::SliceIndex; -#[unstable(feature = "slice_check_range", issue = "76393")] -pub use index::check_range; - #[lang = "slice"] #[cfg(not(test))] impl [T] { @@ -91,7 +88,8 @@ impl [T] { #[rustc_const_stable(feature = "const_slice_len", since = "1.32.0")] #[inline] // SAFETY: const sound because we transmute out the length field as a usize (which it must be) - #[allow_internal_unstable(const_fn_union)] + #[cfg_attr(not(bootstrap), rustc_allow_const_fn_unstable(const_fn_union))] + #[cfg_attr(bootstrap, allow_internal_unstable(const_fn_union))] pub const fn len(&self) -> usize { // SAFETY: this is safe because `&[T]` and `FatPtr` have the same layout. // Only `std` can make this guarantee. @@ -1948,10 +1946,10 @@ impl [T] { /// /// The comparator function must define a total ordering for the elements in the slice. If /// the ordering is not total, the order of the elements is unspecified. An order is a - /// total order if it is (for all a, b and c): + /// total order if it is (for all `a`, `b` and `c`): /// - /// * total and antisymmetric: exactly one of a < b, a == b or a > b is true; and - /// * transitive, a < b and b < c implies a < c. The same must hold for both == and >. + /// * total and antisymmetric: exactly one of `a < b`, `a == b` or `a > b` is true, and + /// * transitive, `a < b` and `b < c` implies `a < c`. The same must hold for both `==` and `>`. /// /// For example, while [`f64`] doesn't implement [`Ord`] because `NaN != NaN`, we can use /// `partial_cmp` as our sort function when we know the slice doesn't contain a `NaN`. @@ -2714,7 +2712,7 @@ impl [T] { where T: Copy, { - let Range { start: src_start, end: src_end } = check_range(self.len(), src); + let Range { start: src_start, end: src_end } = src.assert_len(self.len()); let count = src_end - src_start; assert!(dest <= self.len() - count, "dest is out of bounds"); // SAFETY: the conditions for `ptr::copy` have all been checked above, diff --git a/library/core/src/str/converts.rs b/library/core/src/str/converts.rs index de2a93f735032..952d0598a7c5b 100644 --- a/library/core/src/str/converts.rs +++ b/library/core/src/str/converts.rs @@ -157,7 +157,8 @@ pub fn from_utf8_mut(v: &mut [u8]) -> Result<&mut str, Utf8Error> { #[inline] #[stable(feature = "rust1", since = "1.0.0")] #[rustc_const_unstable(feature = "const_str_from_utf8_unchecked", issue = "75196")] -#[allow_internal_unstable(const_fn_transmute)] +#[cfg_attr(not(bootstrap), rustc_allow_const_fn_unstable(const_fn_transmute))] +#[cfg_attr(bootstrap, allow_internal_unstable(const_fn_transmute))] pub const unsafe fn from_utf8_unchecked(v: &[u8]) -> &str { // SAFETY: the caller must guarantee that the bytes `v` are valid UTF-8. // Also relies on `&str` and `&[u8]` having the same layout. diff --git a/library/core/src/str/mod.rs b/library/core/src/str/mod.rs index 3e18a4e70627d..ee9c09fe186c7 100644 --- a/library/core/src/str/mod.rs +++ b/library/core/src/str/mod.rs @@ -219,7 +219,8 @@ impl str { #[rustc_const_stable(feature = "str_as_bytes", since = "1.32.0")] #[inline(always)] #[allow(unused_attributes)] - #[allow_internal_unstable(const_fn_transmute)] + #[cfg_attr(not(bootstrap), rustc_allow_const_fn_unstable(const_fn_transmute))] + #[cfg_attr(bootstrap, allow_internal_unstable(const_fn_transmute))] pub const fn as_bytes(&self) -> &[u8] { // SAFETY: const sound because we transmute two types with the same layout unsafe { mem::transmute(self) } diff --git a/library/core/src/str/traits.rs b/library/core/src/str/traits.rs index 4f8aa246e5232..9cfb5a8998773 100644 --- a/library/core/src/str/traits.rs +++ b/library/core/src/str/traits.rs @@ -89,7 +89,7 @@ fn str_index_overflow_fail() -> ! { /// self`. Equivalent to `&self[0 .. len]` or `&mut self[0 .. len]`. Unlike /// other indexing operations, this can never panic. /// -/// This operation is `O(1)`. +/// This operation is *O*(1). /// /// Prior to 1.20.0, these indexing operations were still supported by /// direct implementation of `Index` and `IndexMut`. @@ -130,7 +130,7 @@ unsafe impl SliceIndex for ops::RangeFull { /// Returns a slice of the given string from the byte range /// [`begin`, `end`). /// -/// This operation is `O(1)`. +/// This operation is *O*(1). /// /// Prior to 1.20.0, these indexing operations were still supported by /// direct implementation of `Index` and `IndexMut`. @@ -237,7 +237,7 @@ unsafe impl SliceIndex for ops::Range { /// Returns a slice of the given string from the byte range [`0`, `end`). /// Equivalent to `&self[0 .. end]` or `&mut self[0 .. end]`. /// -/// This operation is `O(1)`. +/// This operation is *O*(1). /// /// Prior to 1.20.0, these indexing operations were still supported by /// direct implementation of `Index` and `IndexMut`. @@ -308,7 +308,7 @@ unsafe impl SliceIndex for ops::RangeTo { /// `len`). Equivalent to `&self[begin .. len]` or `&mut self[begin .. /// len]`. /// -/// This operation is `O(1)`. +/// This operation is *O*(1). /// /// Prior to 1.20.0, these indexing operations were still supported by /// direct implementation of `Index` and `IndexMut`. @@ -385,7 +385,7 @@ unsafe impl SliceIndex for ops::RangeFrom { /// self[begin .. end + 1]`, except if `end` has the maximum value for /// `usize`. /// -/// This operation is `O(1)`. +/// This operation is *O*(1). /// /// # Panics /// @@ -398,39 +398,35 @@ unsafe impl SliceIndex for ops::RangeInclusive { type Output = str; #[inline] fn get(self, slice: &str) -> Option<&Self::Output> { - if *self.end() == usize::MAX { None } else { (*self.start()..self.end() + 1).get(slice) } + if *self.end() == usize::MAX { None } else { self.into_slice_range().get(slice) } } #[inline] fn get_mut(self, slice: &mut str) -> Option<&mut Self::Output> { - if *self.end() == usize::MAX { - None - } else { - (*self.start()..self.end() + 1).get_mut(slice) - } + if *self.end() == usize::MAX { None } else { self.into_slice_range().get_mut(slice) } } #[inline] unsafe fn get_unchecked(self, slice: *const str) -> *const Self::Output { // SAFETY: the caller must uphold the safety contract for `get_unchecked`. - unsafe { (*self.start()..self.end() + 1).get_unchecked(slice) } + unsafe { self.into_slice_range().get_unchecked(slice) } } #[inline] unsafe fn get_unchecked_mut(self, slice: *mut str) -> *mut Self::Output { // SAFETY: the caller must uphold the safety contract for `get_unchecked_mut`. - unsafe { (*self.start()..self.end() + 1).get_unchecked_mut(slice) } + unsafe { self.into_slice_range().get_unchecked_mut(slice) } } #[inline] fn index(self, slice: &str) -> &Self::Output { if *self.end() == usize::MAX { str_index_overflow_fail(); } - (*self.start()..self.end() + 1).index(slice) + self.into_slice_range().index(slice) } #[inline] fn index_mut(self, slice: &mut str) -> &mut Self::Output { if *self.end() == usize::MAX { str_index_overflow_fail(); } - (*self.start()..self.end() + 1).index_mut(slice) + self.into_slice_range().index_mut(slice) } } @@ -441,7 +437,7 @@ unsafe impl SliceIndex for ops::RangeInclusive { /// Equivalent to `&self [0 .. end + 1]`, except if `end` has the maximum /// value for `usize`. /// -/// This operation is `O(1)`. +/// This operation is *O*(1). /// /// # Panics /// diff --git a/library/core/src/task/wake.rs b/library/core/src/task/wake.rs index 8cca9dc904293..d3c0d9b784139 100644 --- a/library/core/src/task/wake.rs +++ b/library/core/src/task/wake.rs @@ -130,7 +130,8 @@ impl RawWakerVTable { #[rustc_promotable] #[stable(feature = "futures_api", since = "1.36.0")] #[rustc_const_stable(feature = "futures_api", since = "1.36.0")] - #[allow_internal_unstable(const_fn_fn_ptr_basics)] + #[cfg_attr(not(bootstrap), rustc_allow_const_fn_unstable(const_fn_fn_ptr_basics))] + #[cfg_attr(bootstrap, allow_internal_unstable(const_fn_fn_ptr_basics))] pub const fn new( clone: unsafe fn(*const ()) -> RawWaker, wake: unsafe fn(*const ()), diff --git a/library/core/tests/slice.rs b/library/core/tests/slice.rs index ac5c9353ccb46..9ccc5a08dcbea 100644 --- a/library/core/tests/slice.rs +++ b/library/core/tests/slice.rs @@ -1341,6 +1341,14 @@ mod slice_index { message: "out of range"; } + in mod rangeinclusive_len { + data: [0, 1, 2, 3, 4, 5]; + + good: data[0..=5] == [0, 1, 2, 3, 4, 5]; + bad: data[0..=6]; + message: "out of range"; + } + in mod range_len_len { data: [0, 1, 2, 3, 4, 5]; @@ -1358,6 +1366,28 @@ mod slice_index { } } + panic_cases! { + in mod rangeinclusive_exhausted { + data: [0, 1, 2, 3, 4, 5]; + + good: data[0..=5] == [0, 1, 2, 3, 4, 5]; + good: data[{ + let mut iter = 0..=5; + iter.by_ref().count(); // exhaust it + iter + }] == []; + + // 0..=6 is out of range before exhaustion, so it + // stands to reason that it still would be after. + bad: data[{ + let mut iter = 0..=6; + iter.by_ref().count(); // exhaust it + iter + }]; + message: "out of range"; + } + } + panic_cases! { in mod range_neg_width { data: [0, 1, 2, 3, 4, 5]; diff --git a/library/proc_macro/src/bridge/client.rs b/library/proc_macro/src/bridge/client.rs index ba3d4c075e11f..dfe5df965cfac 100644 --- a/library/proc_macro/src/bridge/client.rs +++ b/library/proc_macro/src/bridge/client.rs @@ -401,7 +401,8 @@ fn run_client DecodeMut<'a, 's, ()>, R: Encode<()>>( } impl Client crate::TokenStream> { - #[allow_internal_unstable(const_fn)] + #[cfg_attr(not(bootstrap), rustc_allow_const_fn_unstable(const_fn))] + #[cfg_attr(bootstrap, allow_internal_unstable(const_fn))] pub const fn expand1(f: fn(crate::TokenStream) -> crate::TokenStream) -> Self { extern "C" fn run( bridge: Bridge<'_>, @@ -414,7 +415,8 @@ impl Client crate::TokenStream> { } impl Client crate::TokenStream> { - #[allow_internal_unstable(const_fn)] + #[cfg_attr(not(bootstrap), rustc_allow_const_fn_unstable(const_fn))] + #[cfg_attr(bootstrap, allow_internal_unstable(const_fn))] pub const fn expand2( f: fn(crate::TokenStream, crate::TokenStream) -> crate::TokenStream, ) -> Self { @@ -459,7 +461,8 @@ impl ProcMacro { } } - #[allow_internal_unstable(const_fn)] + #[cfg_attr(not(bootstrap), rustc_allow_const_fn_unstable(const_fn))] + #[cfg_attr(bootstrap, allow_internal_unstable(const_fn))] pub const fn custom_derive( trait_name: &'static str, attributes: &'static [&'static str], @@ -468,7 +471,8 @@ impl ProcMacro { ProcMacro::CustomDerive { trait_name, attributes, client: Client::expand1(expand) } } - #[allow_internal_unstable(const_fn)] + #[cfg_attr(not(bootstrap), rustc_allow_const_fn_unstable(const_fn))] + #[cfg_attr(bootstrap, allow_internal_unstable(const_fn))] pub const fn attr( name: &'static str, expand: fn(crate::TokenStream, crate::TokenStream) -> crate::TokenStream, @@ -476,7 +480,8 @@ impl ProcMacro { ProcMacro::Attr { name, client: Client::expand2(expand) } } - #[allow_internal_unstable(const_fn)] + #[cfg_attr(not(bootstrap), rustc_allow_const_fn_unstable(const_fn))] + #[cfg_attr(bootstrap, allow_internal_unstable(const_fn))] pub const fn bang( name: &'static str, expand: fn(crate::TokenStream) -> crate::TokenStream, diff --git a/library/proc_macro/src/bridge/scoped_cell.rs b/library/proc_macro/src/bridge/scoped_cell.rs index daa577f74bac3..e7c32b10384d4 100644 --- a/library/proc_macro/src/bridge/scoped_cell.rs +++ b/library/proc_macro/src/bridge/scoped_cell.rs @@ -35,7 +35,8 @@ impl<'a, 'b, T: LambdaL> DerefMut for RefMutL<'a, 'b, T> { pub struct ScopedCell(Cell<>::Out>); impl ScopedCell { - #[allow_internal_unstable(const_fn)] + #[cfg_attr(not(bootstrap), rustc_allow_const_fn_unstable(const_fn))] + #[cfg_attr(bootstrap, allow_internal_unstable(const_fn))] pub const fn new(value: >::Out) -> Self { ScopedCell(Cell::new(value)) } diff --git a/library/proc_macro/src/lib.rs b/library/proc_macro/src/lib.rs index 139b3591206e7..5a4b69cf6fc1b 100644 --- a/library/proc_macro/src/lib.rs +++ b/library/proc_macro/src/lib.rs @@ -18,6 +18,7 @@ test(no_crate_inject, attr(deny(warnings))), test(attr(allow(dead_code, deprecated, unused_variables, unused_mut))) )] +#![cfg_attr(not(bootstrap), feature(rustc_allow_const_fn_unstable))] #![feature(nll)] #![feature(staged_api)] #![feature(const_fn)] diff --git a/library/std/Cargo.toml b/library/std/Cargo.toml index c08828bc0cde9..5925aa87a3f13 100644 --- a/library/std/Cargo.toml +++ b/library/std/Cargo.toml @@ -24,7 +24,7 @@ hashbrown = { version = "0.9.0", default-features = false, features = ['rustc-de # Dependencies of the `backtrace` crate addr2line = { version = "0.13.0", optional = true, default-features = false } -rustc-demangle = { version = "0.1.4", features = ['rustc-dep-of-std'] } +rustc-demangle = { version = "0.1.18", features = ['rustc-dep-of-std'] } miniz_oxide = { version = "0.4.0", optional = true, default-features = false } [dependencies.object] version = "0.20" @@ -42,7 +42,7 @@ dlmalloc = { version = "0.1", features = ['rustc-dep-of-std'] } fortanix-sgx-abi = { version = "0.3.2", features = ['rustc-dep-of-std'] } [target.'cfg(all(any(target_arch = "x86_64", target_arch = "aarch64"), target_os = "hermit"))'.dependencies] -hermit-abi = { version = "0.1.15", features = ['rustc-dep-of-std'] } +hermit-abi = { version = "0.1.17", features = ['rustc-dep-of-std'] } [target.wasm32-wasi.dependencies] wasi = { version = "0.9.0", features = ['rustc-dep-of-std'], default-features = false } diff --git a/library/std/src/ffi/c_str.rs b/library/std/src/ffi/c_str.rs index 13021738af139..6df4eb992594f 100644 --- a/library/std/src/ffi/c_str.rs +++ b/library/std/src/ffi/c_str.rs @@ -1383,7 +1383,8 @@ impl CStr { /// [`U+FFFD REPLACEMENT CHARACTER`][U+FFFD] and return a /// [`Cow`]`::`[`Owned`]`(`[`String`]`)` with the result. /// - /// [`str`]: prim@str + /// [`str`]: primitive@str + /// [`&str`]: primitive@str /// [`Borrowed`]: Cow::Borrowed /// [`Owned`]: Cow::Owned /// [U+FFFD]: crate::char::REPLACEMENT_CHARACTER diff --git a/library/std/src/fs/tests.rs b/library/std/src/fs/tests.rs index 65a29076fefa8..38fd470a1c322 100644 --- a/library/std/src/fs/tests.rs +++ b/library/std/src/fs/tests.rs @@ -73,10 +73,9 @@ pub fn got_symlink_permission(tmpdir: &TempDir) -> bool { let link = tmpdir.join("some_hopefully_unique_link_name"); match symlink_file(r"nonexisting_target", link) { - Ok(_) => true, // ERROR_PRIVILEGE_NOT_HELD = 1314 Err(ref err) if err.raw_os_error() == Some(1314) => false, - Err(_) => true, + Ok(_) | Err(_) => true, } } diff --git a/library/std/src/keyword_docs.rs b/library/std/src/keyword_docs.rs index 54ce0e7b831f4..9b704ee9ecab2 100644 --- a/library/std/src/keyword_docs.rs +++ b/library/std/src/keyword_docs.rs @@ -102,7 +102,9 @@ mod break_keyword {} #[doc(keyword = "const")] // -/// Compile-time constants and deterministic functions. +/// Compile-time constants and compile-time evaluable functions. +/// +/// ## Compile-time constants /// /// Sometimes a certain value is used many times throughout a program, and it can become /// inconvenient to copy it over and over. What's more, it's not always possible or desirable to @@ -145,15 +147,28 @@ mod break_keyword {} /// /// Constants, like statics, should always be in `SCREAMING_SNAKE_CASE`. /// +/// For more detail on `const`, see the [Rust Book] or the [Reference]. +/// +/// ## Compile-time evaluable functions +/// +/// The other main use of the `const` keyword is in `const fn`. This marks a function as being +/// callable in the body of a `const` or `static` item and in array initializers (commonly called +/// "const contexts"). `const fn` are restricted in the set of operations they can perform, to +/// ensure that they can be evaluated at compile-time. See the [Reference][const-eval] for more +/// detail. +/// +/// Turning a `fn` into a `const fn` has no effect on run-time uses of that function. +/// +/// ## Other uses of `const` +/// /// The `const` keyword is also used in raw pointers in combination with `mut`, as seen in `*const /// T` and `*mut T`. More about `const` as used in raw pointers can be read at the Rust docs for the [pointer primitive]. /// -/// For more detail on `const`, see the [Rust Book] or the [Reference]. -/// /// [pointer primitive]: primitive.pointer.html /// [Rust Book]: /// ../book/ch03-01-variables-and-mutability.html#differences-between-variables-and-constants /// [Reference]: ../reference/items/constant-items.html +/// [const-eval]: ../reference/const_eval.html mod const_keyword {} #[doc(keyword = "continue")] @@ -331,7 +346,7 @@ mod else_keyword {} /// When data follows along with a variant, such as with rust's built-in [`Option`] type, the data /// is added as the type describes, for example `Option::Some(123)`. The same follows with /// struct-like variants, with things looking like `ComplexEnum::LotsOfThings { usual_struct_stuff: -/// true, blah: "hello!".to_string(), }`. Empty Enums are similar to () in that they cannot be +/// true, blah: "hello!".to_string(), }`. Empty Enums are similar to [`!`] in that they cannot be /// instantiated at all, and are used mainly to mess with the type system in interesting ways. /// /// For more information, take a look at the [Rust Book] or the [Reference] @@ -339,6 +354,7 @@ mod else_keyword {} /// [ADT]: https://en.wikipedia.org/wiki/Algebraic_data_type /// [Rust Book]: ../book/ch06-01-defining-an-enum.html /// [Reference]: ../reference/items/enumerations.html +/// [`!`]: primitive.never.html mod enum_keyword {} #[doc(keyword = "extern")] diff --git a/library/std/src/lib.rs b/library/std/src/lib.rs index 5224672adb28b..96a7755c68821 100644 --- a/library/std/src/lib.rs +++ b/library/std/src/lib.rs @@ -206,6 +206,7 @@ #![needs_panic_runtime] // std may use features in a platform-specific way #![allow(unused_features)] +#![cfg_attr(not(bootstrap), feature(rustc_allow_const_fn_unstable))] #![cfg_attr(test, feature(print_internals, set_stdio, update_panic_count))] #![cfg_attr( all(target_vendor = "fortanix", target_env = "sgx"), @@ -259,6 +260,7 @@ #![feature(exhaustive_patterns)] #![feature(extend_one)] #![feature(external_doc)] +#![feature(fmt_as_str)] #![feature(fn_traits)] #![feature(format_args_nl)] #![feature(gen_future)] @@ -319,7 +321,7 @@ #![feature(unsafe_block_in_unsafe_fn)] #![feature(unsafe_cell_get_mut)] #![feature(unsafe_cell_raw_get)] -#![feature(untagged_unions)] +#![cfg_attr(bootstrap, feature(untagged_unions))] #![feature(unwind_attributes)] #![feature(vec_into_raw_parts)] #![feature(wake_trait)] diff --git a/library/std/src/net/addr/tests.rs b/library/std/src/net/addr/tests.rs index 43f965de25e65..40f5a84bcd520 100644 --- a/library/std/src/net/addr/tests.rs +++ b/library/std/src/net/addr/tests.rs @@ -68,7 +68,7 @@ fn bind_udp_socket_bad() { // returns its own address, it is still an error to bind a UDP socket to // a non-local address, and so we still get an error here in that case. - const INPUT_23076: &'static str = "1200::AB00:1234::2552:7777:1313:34300"; + const INPUT_23076: &str = "1200::AB00:1234::2552:7777:1313:34300"; assert!(crate::net::UdpSocket::bind(INPUT_23076).is_err()) } diff --git a/library/std/src/net/ip.rs b/library/std/src/net/ip.rs index f01a7b72a6559..bb3ece4c2739f 100644 --- a/library/std/src/net/ip.rs +++ b/library/std/src/net/ip.rs @@ -456,10 +456,7 @@ impl Ipv4Addr { #[rustc_const_unstable(feature = "const_ipv4", issue = "76205")] #[stable(since = "1.7.0", feature = "ip_17")] pub const fn is_link_local(&self) -> bool { - match self.octets() { - [169, 254, ..] => true, - _ => false, - } + matches!(self.octets(), [169, 254, ..]) } /// Returns [`true`] if the address appears to be globally routable. @@ -1046,7 +1043,8 @@ impl Ipv6Addr { /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[rustc_const_stable(feature = "const_ipv6", since = "1.32.0")] - #[allow_internal_unstable(const_fn_transmute)] + #[cfg_attr(not(bootstrap), rustc_allow_const_fn_unstable(const_fn_transmute))] + #[cfg_attr(bootstrap, allow_internal_unstable(const_fn_transmute))] pub const fn new(a: u16, b: u16, c: u16, d: u16, e: u16, f: u16, g: u16, h: u16) -> Ipv6Addr { let addr16 = [ a.to_be(), @@ -1262,10 +1260,7 @@ impl Ipv6Addr { /// [RFC 4291 errata 4406]: https://www.rfc-editor.org/errata/eid4406 #[rustc_const_unstable(feature = "const_ipv6", issue = "76205")] pub const fn is_unicast_link_local_strict(&self) -> bool { - (self.segments()[0] & 0xffff) == 0xfe80 - && (self.segments()[1] & 0xffff) == 0 - && (self.segments()[2] & 0xffff) == 0 - && (self.segments()[3] & 0xffff) == 0 + matches!(self.segments(), [0xfe80, 0, 0, 0, ..]) } /// Returns [`true`] if the address is a unicast link-local address (`fe80::/10`). diff --git a/library/std/src/net/udp/tests.rs b/library/std/src/net/udp/tests.rs index 658369f79aa75..fbed3d32d451a 100644 --- a/library/std/src/net/udp/tests.rs +++ b/library/std/src/net/udp/tests.rs @@ -152,19 +152,13 @@ fn udp_clone_two_write() { let (done, rx) = channel(); let tx2 = tx.clone(); let _t = thread::spawn(move || { - match sock3.send_to(&[1], &addr2) { - Ok(..) => { - let _ = tx2.send(()); - } - Err(..) => {} + if sock3.send_to(&[1], &addr2).is_ok() { + let _ = tx2.send(()); } done.send(()).unwrap(); }); - match sock1.send_to(&[2], &addr2) { - Ok(..) => { - let _ = tx.send(()); - } - Err(..) => {} + if sock1.send_to(&[2], &addr2).is_ok() { + let _ = tx.send(()); } drop(tx); diff --git a/library/std/src/os/vxworks/fs.rs b/library/std/src/os/vxworks/fs.rs index 5a7e5bcaa7600..77e6238ca1f52 100644 --- a/library/std/src/os/vxworks/fs.rs +++ b/library/std/src/os/vxworks/fs.rs @@ -26,10 +26,16 @@ pub trait MetadataExt { #[stable(feature = "metadata_ext2", since = "1.8.0")] fn st_atime(&self) -> i64; #[stable(feature = "metadata_ext2", since = "1.8.0")] + fn st_atime_nsec(&self) -> i64; + #[stable(feature = "metadata_ext2", since = "1.8.0")] fn st_mtime(&self) -> i64; #[stable(feature = "metadata_ext2", since = "1.8.0")] + fn st_mtime_nsec(&self) -> i64; + #[stable(feature = "metadata_ext2", since = "1.8.0")] fn st_ctime(&self) -> i64; #[stable(feature = "metadata_ext2", since = "1.8.0")] + fn st_ctime_nsec(&self) -> i64; + #[stable(feature = "metadata_ext2", since = "1.8.0")] fn st_blksize(&self) -> u64; #[stable(feature = "metadata_ext2", since = "1.8.0")] fn st_blocks(&self) -> u64; @@ -66,12 +72,21 @@ impl MetadataExt for Metadata { fn st_atime(&self) -> i64 { self.as_inner().as_inner().st_atime as i64 } + fn st_atime_nsec(&self) -> i64 { + 0 + } fn st_mtime(&self) -> i64 { self.as_inner().as_inner().st_mtime as i64 } + fn st_mtime_nsec(&self) -> i64 { + 0 + } fn st_ctime(&self) -> i64 { self.as_inner().as_inner().st_ctime as i64 } + fn st_ctime_nsec(&self) -> i64 { + 0 + } fn st_blksize(&self) -> u64 { self.as_inner().as_inner().st_blksize as u64 } diff --git a/library/std/src/os/vxworks/raw.rs b/library/std/src/os/vxworks/raw.rs index 29a0af5645ee1..cb41ddfe2a9bf 100644 --- a/library/std/src/os/vxworks/raw.rs +++ b/library/std/src/os/vxworks/raw.rs @@ -5,3 +5,6 @@ use crate::os::raw::c_ulong; #[stable(feature = "pthread_t", since = "1.8.0")] pub type pthread_t = c_ulong; + +#[stable(feature = "raw_ext", since = "1.1.0")] +pub use libc::{blkcnt_t, blksize_t, dev_t, ino_t, mode_t, nlink_t, off_t, time_t}; diff --git a/library/std/src/panicking.rs b/library/std/src/panicking.rs index 8dceb12de87b8..221ae809e23a2 100644 --- a/library/std/src/panicking.rs +++ b/library/std/src/panicking.rs @@ -478,10 +478,26 @@ pub fn begin_panic_handler(info: &PanicInfo<'_>) -> ! { } } + struct StrPanicPayload(&'static str); + + unsafe impl BoxMeUp for StrPanicPayload { + fn take_box(&mut self) -> *mut (dyn Any + Send) { + Box::into_raw(Box::new(self.0)) + } + + fn get(&mut self) -> &(dyn Any + Send) { + &self.0 + } + } + let loc = info.location().unwrap(); // The current implementation always returns Some let msg = info.message().unwrap(); // The current implementation always returns Some crate::sys_common::backtrace::__rust_end_short_backtrace(move || { - rust_panic_with_hook(&mut PanicPayload::new(msg), info.message(), loc); + if let Some(msg) = msg.as_str() { + rust_panic_with_hook(&mut StrPanicPayload(msg), info.message(), loc); + } else { + rust_panic_with_hook(&mut PanicPayload::new(msg), info.message(), loc); + } }) } diff --git a/library/std/src/process.rs b/library/std/src/process.rs index 3d238b7f764ef..a149946774404 100644 --- a/library/std/src/process.rs +++ b/library/std/src/process.rs @@ -1184,7 +1184,7 @@ impl Stdio { } /// This stream will be ignored. This is the equivalent of attaching the - /// stream to `/dev/null` + /// stream to `/dev/null`. /// /// # Examples /// diff --git a/library/std/src/sys/cloudabi/abi/cloudabi.rs b/library/std/src/sys/cloudabi/abi/cloudabi.rs index b02faf1830c53..5c4e3fd85c41c 100644 --- a/library/std/src/sys/cloudabi/abi/cloudabi.rs +++ b/library/std/src/sys/cloudabi/abi/cloudabi.rs @@ -1910,7 +1910,7 @@ extern "C" { /// The resolution of the clock. #[inline] pub unsafe fn clock_res_get(clock_id_: clockid, resolution_: &mut timestamp) -> errno { - cloudabi_sys_clock_res_get(clock_id_, resolution_) + unsafe { cloudabi_sys_clock_res_get(clock_id_, resolution_) } } /// Obtains the time value of a clock. @@ -1934,7 +1934,7 @@ pub unsafe fn clock_time_get( precision_: timestamp, time_: *mut timestamp, ) -> errno { - cloudabi_sys_clock_time_get(clock_id_, precision_, time_) + unsafe { cloudabi_sys_clock_time_get(clock_id_, precision_, time_) } } /// Wakes up threads waiting on a userspace condition variable. @@ -1961,7 +1961,7 @@ pub unsafe fn clock_time_get( /// threads, all threads are woken up. #[inline] pub unsafe fn condvar_signal(condvar_: *mut condvar, scope_: scope, nwaiters_: nthreads) -> errno { - cloudabi_sys_condvar_signal(condvar_, scope_, nwaiters_) + unsafe { cloudabi_sys_condvar_signal(condvar_, scope_, nwaiters_) } } /// Closes a file descriptor. @@ -1972,7 +1972,7 @@ pub unsafe fn condvar_signal(condvar_: *mut condvar, scope_: scope, nwaiters_: n /// The file descriptor that needs to be closed. #[inline] pub unsafe fn fd_close(fd_: fd) -> errno { - cloudabi_sys_fd_close(fd_) + unsafe { cloudabi_sys_fd_close(fd_) } } /// Creates a file descriptor. @@ -1990,7 +1990,7 @@ pub unsafe fn fd_close(fd_: fd) -> errno { /// The file descriptor that has been created. #[inline] pub unsafe fn fd_create1(type_: filetype, fd_: &mut fd) -> errno { - cloudabi_sys_fd_create1(type_, fd_) + unsafe { cloudabi_sys_fd_create1(type_, fd_) } } /// Creates a pair of file descriptors. @@ -2013,7 +2013,8 @@ pub unsafe fn fd_create1(type_: filetype, fd_: &mut fd) -> errno { /// The second file descriptor of the pair. #[inline] pub unsafe fn fd_create2(type_: filetype, fd1_: &mut fd, fd2_: &mut fd) -> errno { - cloudabi_sys_fd_create2(type_, fd1_, fd2_) + // SAFETY: the caller must uphold the safety contract for `cloudabi_sys_fd_create2`. + unsafe { cloudabi_sys_fd_create2(type_, fd1_, fd2_) } } /// Synchronizes the data of a file to disk. @@ -2025,7 +2026,9 @@ pub unsafe fn fd_create2(type_: filetype, fd1_: &mut fd, fd2_: &mut fd) -> errno /// needs to be synchronized to disk. #[inline] pub unsafe fn fd_datasync(fd_: fd) -> errno { - cloudabi_sys_fd_datasync(fd_) + // SAFETY: the caller must guarantee that `fd` is valid + // for synchronization. + unsafe { cloudabi_sys_fd_datasync(fd_) } } /// Duplicates a file descriptor. @@ -2040,7 +2043,7 @@ pub unsafe fn fd_datasync(fd_: fd) -> errno { /// The new file descriptor. #[inline] pub unsafe fn fd_dup(from_: fd, fd_: &mut fd) -> errno { - cloudabi_sys_fd_dup(from_, fd_) + unsafe { cloudabi_sys_fd_dup(from_, fd_) } } /// Reads from a file descriptor, without using and updating the @@ -2064,7 +2067,7 @@ pub unsafe fn fd_dup(from_: fd, fd_: &mut fd) -> errno { /// The number of bytes read. #[inline] pub unsafe fn fd_pread(fd_: fd, iovs_: &[iovec], offset_: filesize, nread_: &mut usize) -> errno { - cloudabi_sys_fd_pread(fd_, iovs_.as_ptr(), iovs_.len(), offset_, nread_) + unsafe { cloudabi_sys_fd_pread(fd_, iovs_.as_ptr(), iovs_.len(), offset_, nread_) } } /// Writes to a file descriptor, without using and updating the @@ -2093,7 +2096,7 @@ pub unsafe fn fd_pwrite( offset_: filesize, nwritten_: &mut usize, ) -> errno { - cloudabi_sys_fd_pwrite(fd_, iovs_.as_ptr(), iovs_.len(), offset_, nwritten_) + unsafe { cloudabi_sys_fd_pwrite(fd_, iovs_.as_ptr(), iovs_.len(), offset_, nwritten_) } } /// Reads from a file descriptor. @@ -2112,7 +2115,7 @@ pub unsafe fn fd_pwrite( /// The number of bytes read. #[inline] pub unsafe fn fd_read(fd_: fd, iovs_: &[iovec], nread_: &mut usize) -> errno { - cloudabi_sys_fd_read(fd_, iovs_.as_ptr(), iovs_.len(), nread_) + unsafe { cloudabi_sys_fd_read(fd_, iovs_.as_ptr(), iovs_.len(), nread_) } } /// Atomically replaces a file descriptor by a copy of another @@ -2138,7 +2141,7 @@ pub unsafe fn fd_read(fd_: fd, iovs_: &[iovec], nread_: &mut usize) -> errno { /// overwritten. #[inline] pub unsafe fn fd_replace(from_: fd, to_: fd) -> errno { - cloudabi_sys_fd_replace(from_, to_) + unsafe { cloudabi_sys_fd_replace(from_, to_) } } /// Moves the offset of the file descriptor. @@ -2166,7 +2169,7 @@ pub unsafe fn fd_seek( whence_: whence, newoffset_: &mut filesize, ) -> errno { - cloudabi_sys_fd_seek(fd_, offset_, whence_, newoffset_) + unsafe { cloudabi_sys_fd_seek(fd_, offset_, whence_, newoffset_) } } /// Gets attributes of a file descriptor. @@ -2182,7 +2185,7 @@ pub unsafe fn fd_seek( /// attributes are stored. #[inline] pub unsafe fn fd_stat_get(fd_: fd, buf_: *mut fdstat) -> errno { - cloudabi_sys_fd_stat_get(fd_, buf_) + unsafe { cloudabi_sys_fd_stat_get(fd_, buf_) } } /// Adjusts attributes of a file descriptor. @@ -2202,7 +2205,7 @@ pub unsafe fn fd_stat_get(fd_: fd, buf_: *mut fdstat) -> errno { /// be adjusted. #[inline] pub unsafe fn fd_stat_put(fd_: fd, buf_: *const fdstat, flags_: fdsflags) -> errno { - cloudabi_sys_fd_stat_put(fd_, buf_, flags_) + unsafe { cloudabi_sys_fd_stat_put(fd_, buf_, flags_) } } /// Synchronizes the data and metadata of a file to disk. @@ -2214,7 +2217,7 @@ pub unsafe fn fd_stat_put(fd_: fd, buf_: *const fdstat, flags_: fdsflags) -> err /// and metadata needs to be synchronized to disk. #[inline] pub unsafe fn fd_sync(fd_: fd) -> errno { - cloudabi_sys_fd_sync(fd_) + unsafe { cloudabi_sys_fd_sync(fd_) } } /// Writes to a file descriptor. @@ -2233,7 +2236,7 @@ pub unsafe fn fd_sync(fd_: fd) -> errno { /// The number of bytes written. #[inline] pub unsafe fn fd_write(fd_: fd, iovs_: &[ciovec], nwritten_: &mut usize) -> errno { - cloudabi_sys_fd_write(fd_, iovs_.as_ptr(), iovs_.len(), nwritten_) + unsafe { cloudabi_sys_fd_write(fd_, iovs_.as_ptr(), iovs_.len(), nwritten_) } } /// Provides file advisory information on a file descriptor. @@ -2256,7 +2259,7 @@ pub unsafe fn fd_write(fd_: fd, iovs_: &[ciovec], nwritten_: &mut usize) -> errn /// The advice. #[inline] pub unsafe fn file_advise(fd_: fd, offset_: filesize, len_: filesize, advice_: advice) -> errno { - cloudabi_sys_file_advise(fd_, offset_, len_, advice_) + unsafe { cloudabi_sys_file_advise(fd_, offset_, len_, advice_) } } /// Forces the allocation of space in a file. @@ -2275,7 +2278,7 @@ pub unsafe fn file_advise(fd_: fd, offset_: filesize, len_: filesize, advice_: a /// The length of the area that is allocated. #[inline] pub unsafe fn file_allocate(fd_: fd, offset_: filesize, len_: filesize) -> errno { - cloudabi_sys_file_allocate(fd_, offset_, len_) + unsafe { cloudabi_sys_file_allocate(fd_, offset_, len_) } } /// Creates a file of a specified type. @@ -2296,7 +2299,7 @@ pub unsafe fn file_allocate(fd_: fd, offset_: filesize, len_: filesize) -> errno /// Creates a directory. #[inline] pub unsafe fn file_create(fd_: fd, path_: &[u8], type_: filetype) -> errno { - cloudabi_sys_file_create(fd_, path_.as_ptr(), path_.len(), type_) + unsafe { cloudabi_sys_file_create(fd_, path_.as_ptr(), path_.len(), type_)} } /// Creates a hard link. @@ -2320,7 +2323,7 @@ pub unsafe fn file_create(fd_: fd, path_: &[u8], type_: filetype) -> errno { /// should be created. #[inline] pub unsafe fn file_link(fd1_: lookup, path1_: &[u8], fd2_: fd, path2_: &[u8]) -> errno { - cloudabi_sys_file_link(fd1_, path1_.as_ptr(), path1_.len(), fd2_, path2_.as_ptr(), path2_.len()) + unsafe { cloudabi_sys_file_link(fd1_, path1_.as_ptr(), path1_.len(), fd2_, path2_.as_ptr(), path2_.len()) } } /// Opens a file. @@ -2362,7 +2365,7 @@ pub unsafe fn file_open( fds_: *const fdstat, fd_: &mut fd, ) -> errno { - cloudabi_sys_file_open(dirfd_, path_.as_ptr(), path_.len(), oflags_, fds_, fd_) + unsafe { cloudabi_sys_file_open(dirfd_, path_.as_ptr(), path_.len(), oflags_, fds_, fd_) } } /// Reads directory entries from a directory. @@ -2402,7 +2405,7 @@ pub unsafe fn file_readdir( cookie_: dircookie, bufused_: &mut usize, ) -> errno { - cloudabi_sys_file_readdir(fd_, buf_.as_mut_ptr() as *mut (), buf_.len(), cookie_, bufused_) + unsafe { cloudabi_sys_file_readdir(fd_, buf_.as_mut_ptr() as *mut (), buf_.len(), cookie_, bufused_) } } /// Reads the contents of a symbolic link. @@ -2425,14 +2428,16 @@ pub unsafe fn file_readdir( /// The number of bytes placed in the buffer. #[inline] pub unsafe fn file_readlink(fd_: fd, path_: &[u8], buf_: &mut [u8], bufused_: &mut usize) -> errno { - cloudabi_sys_file_readlink( - fd_, - path_.as_ptr(), - path_.len(), - buf_.as_mut_ptr(), - buf_.len(), - bufused_, - ) + unsafe { + cloudabi_sys_file_readlink( + fd_, + path_.as_ptr(), + path_.len(), + buf_.as_mut_ptr(), + buf_.len(), + bufused_, + ) + } } /// Renames a file. @@ -2456,14 +2461,16 @@ pub unsafe fn file_readlink(fd_: fd, path_: &[u8], buf_: &mut [u8], bufused_: &m /// be renamed. #[inline] pub unsafe fn file_rename(fd1_: fd, path1_: &[u8], fd2_: fd, path2_: &[u8]) -> errno { - cloudabi_sys_file_rename( - fd1_, - path1_.as_ptr(), - path1_.len(), - fd2_, - path2_.as_ptr(), - path2_.len(), - ) + unsafe { + cloudabi_sys_file_rename( + fd1_, + path1_.as_ptr(), + path1_.len(), + fd2_, + path2_.as_ptr(), + path2_.len(), + ) + } } /// Gets attributes of a file by file descriptor. @@ -2479,7 +2486,7 @@ pub unsafe fn file_rename(fd1_: fd, path1_: &[u8], fd2_: fd, path2_: &[u8]) -> e /// stored. #[inline] pub unsafe fn file_stat_fget(fd_: fd, buf_: *mut filestat) -> errno { - cloudabi_sys_file_stat_fget(fd_, buf_) + unsafe { cloudabi_sys_file_stat_fget(fd_, buf_) } } /// Adjusts attributes of a file by file descriptor. @@ -2499,7 +2506,7 @@ pub unsafe fn file_stat_fget(fd_: fd, buf_: *mut filestat) -> errno { /// be adjusted. #[inline] pub unsafe fn file_stat_fput(fd_: fd, buf_: *const filestat, flags_: fsflags) -> errno { - cloudabi_sys_file_stat_fput(fd_, buf_, flags_) + unsafe { cloudabi_sys_file_stat_fput(fd_, buf_, flags_) } } /// Gets attributes of a file by path. @@ -2520,7 +2527,7 @@ pub unsafe fn file_stat_fput(fd_: fd, buf_: *const filestat, flags_: fsflags) -> /// stored. #[inline] pub unsafe fn file_stat_get(fd_: lookup, path_: &[u8], buf_: *mut filestat) -> errno { - cloudabi_sys_file_stat_get(fd_, path_.as_ptr(), path_.len(), buf_) + unsafe { cloudabi_sys_file_stat_get(fd_, path_.as_ptr(), path_.len(), buf_) } } /// Adjusts attributes of a file by path. @@ -2550,7 +2557,7 @@ pub unsafe fn file_stat_put( buf_: *const filestat, flags_: fsflags, ) -> errno { - cloudabi_sys_file_stat_put(fd_, path_.as_ptr(), path_.len(), buf_, flags_) + unsafe { cloudabi_sys_file_stat_put(fd_, path_.as_ptr(), path_.len(), buf_, flags_) } } /// Creates a symbolic link. @@ -2569,7 +2576,7 @@ pub unsafe fn file_stat_put( /// link should be created. #[inline] pub unsafe fn file_symlink(path1_: &[u8], fd_: fd, path2_: &[u8]) -> errno { - cloudabi_sys_file_symlink(path1_.as_ptr(), path1_.len(), fd_, path2_.as_ptr(), path2_.len()) + unsafe { cloudabi_sys_file_symlink(path1_.as_ptr(), path1_.len(), fd_, path2_.as_ptr(), path2_.len()) } } /// Unlinks a file, or removes a directory. @@ -2591,7 +2598,7 @@ pub unsafe fn file_symlink(path1_: &[u8], fd_: fd, path2_: &[u8]) -> errno { /// Otherwise, unlink a file. #[inline] pub unsafe fn file_unlink(fd_: fd, path_: &[u8], flags_: ulflags) -> errno { - cloudabi_sys_file_unlink(fd_, path_.as_ptr(), path_.len(), flags_) + unsafe { cloudabi_sys_file_unlink(fd_, path_.as_ptr(), path_.len(), flags_) } } /// Unlocks a write-locked userspace lock. @@ -2618,7 +2625,7 @@ pub unsafe fn file_unlink(fd_: fd, path_: &[u8], flags_: ulflags) -> errno { /// shared memory. #[inline] pub unsafe fn lock_unlock(lock_: *mut lock, scope_: scope) -> errno { - cloudabi_sys_lock_unlock(lock_, scope_) + unsafe { cloudabi_sys_lock_unlock(lock_, scope_) } } /// Provides memory advisory information on a region of memory. @@ -2633,7 +2640,7 @@ pub unsafe fn lock_unlock(lock_: *mut lock, scope_: scope) -> errno { /// The advice. #[inline] pub unsafe fn mem_advise(mapping_: &mut [u8], advice_: advice) -> errno { - cloudabi_sys_mem_advise(mapping_.as_mut_ptr() as *mut (), mapping_.len(), advice_) + unsafe { cloudabi_sys_mem_advise(mapping_.as_mut_ptr() as *mut (), mapping_.len(), advice_) } } /// Creates a memory mapping, making the contents of a file @@ -2682,7 +2689,7 @@ pub unsafe fn mem_map( off_: filesize, mem_: &mut *mut (), ) -> errno { - cloudabi_sys_mem_map(addr_, len_, prot_, flags_, fd_, off_, mem_) + unsafe { cloudabi_sys_mem_map(addr_, len_, prot_, flags_, fd_, off_, mem_) } } /// Changes the protection of a memory mapping. @@ -2696,7 +2703,7 @@ pub unsafe fn mem_map( /// New protection options. #[inline] pub unsafe fn mem_protect(mapping_: &mut [u8], prot_: mprot) -> errno { - cloudabi_sys_mem_protect(mapping_.as_mut_ptr() as *mut (), mapping_.len(), prot_) + unsafe { cloudabi_sys_mem_protect(mapping_.as_mut_ptr() as *mut (), mapping_.len(), prot_) } } /// Synchronizes a region of memory with its physical storage. @@ -2710,7 +2717,7 @@ pub unsafe fn mem_protect(mapping_: &mut [u8], prot_: mprot) -> errno { /// The method of synchronization. #[inline] pub unsafe fn mem_sync(mapping_: &mut [u8], flags_: msflags) -> errno { - cloudabi_sys_mem_sync(mapping_.as_mut_ptr() as *mut (), mapping_.len(), flags_) + unsafe { cloudabi_sys_mem_sync(mapping_.as_mut_ptr() as *mut (), mapping_.len(), flags_) } } /// Unmaps a region of memory. @@ -2721,7 +2728,7 @@ pub unsafe fn mem_sync(mapping_: &mut [u8], flags_: msflags) -> errno { /// The pages that needs to be unmapped. #[inline] pub unsafe fn mem_unmap(mapping_: &mut [u8]) -> errno { - cloudabi_sys_mem_unmap(mapping_.as_mut_ptr() as *mut (), mapping_.len()) + unsafe { cloudabi_sys_mem_unmap(mapping_.as_mut_ptr() as *mut (), mapping_.len()) } } /// Concurrently polls for the occurrence of a set of events. @@ -2746,7 +2753,7 @@ pub unsafe fn poll( nsubscriptions_: usize, nevents_: *mut usize, ) -> errno { - cloudabi_sys_poll(in_, out_, nsubscriptions_, nevents_) + unsafe { cloudabi_sys_poll(in_, out_, nsubscriptions_, nevents_) } } /// Replaces the process by a new executable. @@ -2784,7 +2791,7 @@ pub unsafe fn poll( /// execution. #[inline] pub unsafe fn proc_exec(fd_: fd, data_: &[u8], fds_: &[fd]) -> errno { - cloudabi_sys_proc_exec(fd_, data_.as_ptr() as *const (), data_.len(), fds_.as_ptr(), fds_.len()) + unsafe { cloudabi_sys_proc_exec(fd_, data_.as_ptr() as *const (), data_.len(), fds_.as_ptr(), fds_.len()) } } /// Terminates the process normally. @@ -2797,7 +2804,7 @@ pub unsafe fn proc_exec(fd_: fd, data_: &[u8], fds_: &[fd]) -> errno { /// through [`event.union.proc_terminate.exitcode`](struct.event_proc_terminate.html#structfield.exitcode). #[inline] pub unsafe fn proc_exit(rval_: exitcode) -> ! { - cloudabi_sys_proc_exit(rval_) + unsafe { cloudabi_sys_proc_exit(rval_) } } /// Forks the process of the calling thread. @@ -2822,7 +2829,7 @@ pub unsafe fn proc_exit(rval_: exitcode) -> ! { /// initial thread of the child process. #[inline] pub unsafe fn proc_fork(fd_: &mut fd, tid_: &mut tid) -> errno { - cloudabi_sys_proc_fork(fd_, tid_) + unsafe { cloudabi_sys_proc_fork(fd_, tid_) } } /// Sends a signal to the process of the calling thread. @@ -2837,7 +2844,7 @@ pub unsafe fn proc_fork(fd_: &mut fd, tid_: &mut tid) -> errno { /// [`event.union.proc_terminate.signal`](struct.event_proc_terminate.html#structfield.signal). #[inline] pub unsafe fn proc_raise(sig_: signal) -> errno { - cloudabi_sys_proc_raise(sig_) + unsafe { cloudabi_sys_proc_raise(sig_) } } /// Obtains random data from the kernel random number generator. @@ -2853,7 +2860,7 @@ pub unsafe fn proc_raise(sig_: signal) -> errno { /// data. #[inline] pub unsafe fn random_get(buf_: &mut [u8]) -> errno { - cloudabi_sys_random_get(buf_.as_mut_ptr() as *mut (), buf_.len()) + unsafe { cloudabi_sys_random_get(buf_.as_mut_ptr() as *mut (), buf_.len()) } } /// Receives a message on a socket. @@ -2871,7 +2878,7 @@ pub unsafe fn random_get(buf_: &mut [u8]) -> errno { /// Output parameters. #[inline] pub unsafe fn sock_recv(sock_: fd, in_: *const recv_in, out_: *mut recv_out) -> errno { - cloudabi_sys_sock_recv(sock_, in_, out_) + unsafe { cloudabi_sys_sock_recv(sock_, in_, out_) } } /// Sends a message on a socket. @@ -2888,7 +2895,7 @@ pub unsafe fn sock_recv(sock_: fd, in_: *const recv_in, out_: *mut recv_out) -> /// Output parameters. #[inline] pub unsafe fn sock_send(sock_: fd, in_: *const send_in, out_: *mut send_out) -> errno { - cloudabi_sys_sock_send(sock_, in_, out_) + unsafe { cloudabi_sys_sock_send(sock_, in_, out_) } } /// Shuts down socket send and receive channels. @@ -2903,7 +2910,7 @@ pub unsafe fn sock_send(sock_: fd, in_: *const send_in, out_: *mut send_out) -> /// down. #[inline] pub unsafe fn sock_shutdown(sock_: fd, how_: sdflags) -> errno { - cloudabi_sys_sock_shutdown(sock_, how_) + unsafe { cloudabi_sys_sock_shutdown(sock_, how_) } } /// Creates a new thread within the current process. @@ -2917,7 +2924,7 @@ pub unsafe fn sock_shutdown(sock_: fd, how_: sdflags) -> errno { /// The thread ID of the new thread. #[inline] pub unsafe fn thread_create(attr_: *mut threadattr, tid_: &mut tid) -> errno { - cloudabi_sys_thread_create(attr_, tid_) + unsafe { cloudabi_sys_thread_create(attr_, tid_) } } /// Terminates the calling thread. @@ -2937,11 +2944,11 @@ pub unsafe fn thread_create(attr_: *mut threadattr, tid_: &mut tid) -> errno { /// shared memory. #[inline] pub unsafe fn thread_exit(lock_: *mut lock, scope_: scope) -> ! { - cloudabi_sys_thread_exit(lock_, scope_) + unsafe { cloudabi_sys_thread_exit(lock_, scope_) } } /// Temporarily yields execution of the calling thread. #[inline] pub unsafe fn thread_yield() -> errno { - cloudabi_sys_thread_yield() + unsafe { cloudabi_sys_thread_yield() } } diff --git a/library/std/src/sys/cloudabi/mod.rs b/library/std/src/sys/cloudabi/mod.rs index f7dd2c8d00fd2..13f1bc8826e61 100644 --- a/library/std/src/sys/cloudabi/mod.rs +++ b/library/std/src/sys/cloudabi/mod.rs @@ -1,3 +1,5 @@ +#![deny(unsafe_op_in_unsafe_fn)] + use crate::io::ErrorKind; use crate::mem; diff --git a/library/std/src/sys/cloudabi/mutex.rs b/library/std/src/sys/cloudabi/mutex.rs index 1203d8de0c572..9dafcbc1fba0b 100644 --- a/library/std/src/sys/cloudabi/mutex.rs +++ b/library/std/src/sys/cloudabi/mutex.rs @@ -103,7 +103,9 @@ impl ReentrantMutex { }; let mut event = MaybeUninit::::uninit(); let mut nevents = MaybeUninit::::uninit(); - let ret = abi::poll(&subscription, event.as_mut_ptr(), 1, nevents.as_mut_ptr()); + // SAFE: The caller must to ensure that `event` and `nevents` are initialized. + let ret = + unsafe { abi::poll(&subscription, event.as_mut_ptr(), 1, nevents.as_mut_ptr()) }; assert_eq!(ret, abi::errno::SUCCESS, "Failed to acquire mutex"); let event = event.assume_init(); assert_eq!(event.error, abi::errno::SUCCESS, "Failed to acquire mutex"); diff --git a/library/std/src/sys/hermit/fs.rs b/library/std/src/sys/hermit/fs.rs index 82ccab1462ba8..829d4c943f11b 100644 --- a/library/std/src/sys/hermit/fs.rs +++ b/library/std/src/sys/hermit/fs.rs @@ -334,10 +334,6 @@ impl File { pub fn set_permissions(&self, _perm: FilePermissions) -> io::Result<()> { Err(Error::from_raw_os_error(22)) } - - pub fn diverge(&self) -> ! { - loop {} - } } impl DirBuilder { diff --git a/library/std/src/sys/hermit/mod.rs b/library/std/src/sys/hermit/mod.rs index 8eaf07e52d69a..af05310a8d3ab 100644 --- a/library/std/src/sys/hermit/mod.rs +++ b/library/std/src/sys/hermit/mod.rs @@ -31,6 +31,7 @@ pub mod net; pub mod os; pub mod path; pub mod pipe; +#[path = "../unsupported/process.rs"] pub mod process; pub mod rwlock; pub mod stack_overflow; diff --git a/library/std/src/sys/hermit/mutex.rs b/library/std/src/sys/hermit/mutex.rs index 3d4813209cbc4..f988a019cfedb 100644 --- a/library/std/src/sys/hermit/mutex.rs +++ b/library/std/src/sys/hermit/mutex.rs @@ -1,44 +1,214 @@ +use crate::cell::UnsafeCell; +use crate::collections::VecDeque; use crate::ffi::c_void; +use crate::ops::{Deref, DerefMut, Drop}; use crate::ptr; +use crate::sync::atomic::{spin_loop_hint, AtomicUsize, Ordering}; use crate::sys::hermit::abi; +/// This type provides a lock based on busy waiting to realize mutual exclusion +/// +/// # Description +/// +/// This structure behaves a lot like a common mutex. There are some differences: +/// +/// - By using busy waiting, it can be used outside the runtime. +/// - It is a so called ticket lock and is completly fair. +#[cfg_attr(target_arch = "x86_64", repr(align(128)))] +#[cfg_attr(not(target_arch = "x86_64"), repr(align(64)))] +struct Spinlock { + queue: AtomicUsize, + dequeue: AtomicUsize, + data: UnsafeCell, +} + +unsafe impl Sync for Spinlock {} +unsafe impl Send for Spinlock {} + +/// A guard to which the protected data can be accessed +/// +/// When the guard falls out of scope it will release the lock. +struct SpinlockGuard<'a, T: ?Sized + 'a> { + dequeue: &'a AtomicUsize, + data: &'a mut T, +} + +impl Spinlock { + pub const fn new(user_data: T) -> Spinlock { + Spinlock { + queue: AtomicUsize::new(0), + dequeue: AtomicUsize::new(1), + data: UnsafeCell::new(user_data), + } + } + + #[inline] + fn obtain_lock(&self) { + let ticket = self.queue.fetch_add(1, Ordering::SeqCst) + 1; + while self.dequeue.load(Ordering::SeqCst) != ticket { + spin_loop_hint(); + } + } + + #[inline] + pub unsafe fn lock(&self) -> SpinlockGuard<'_, T> { + self.obtain_lock(); + SpinlockGuard { dequeue: &self.dequeue, data: &mut *self.data.get() } + } +} + +impl Default for Spinlock { + fn default() -> Spinlock { + Spinlock::new(Default::default()) + } +} + +impl<'a, T: ?Sized> Deref for SpinlockGuard<'a, T> { + type Target = T; + fn deref(&self) -> &T { + &*self.data + } +} + +impl<'a, T: ?Sized> DerefMut for SpinlockGuard<'a, T> { + fn deref_mut(&mut self) -> &mut T { + &mut *self.data + } +} + +impl<'a, T: ?Sized> Drop for SpinlockGuard<'a, T> { + /// The dropping of the SpinlockGuard will release the lock it was created from. + fn drop(&mut self) { + self.dequeue.fetch_add(1, Ordering::SeqCst); + } +} + +/// Realize a priority queue for tasks +struct PriorityQueue { + queues: [Option>; abi::NO_PRIORITIES], + prio_bitmap: u64, +} + +impl PriorityQueue { + pub const fn new() -> PriorityQueue { + PriorityQueue { + queues: [ + None, None, None, None, None, None, None, None, None, None, None, None, None, None, + None, None, None, None, None, None, None, None, None, None, None, None, None, None, + None, None, None, + ], + prio_bitmap: 0, + } + } + + /// Add a task id by its priority to the queue + pub fn push(&mut self, prio: abi::Priority, id: abi::Tid) { + let i: usize = prio.into().into(); + self.prio_bitmap |= (1 << i) as u64; + if let Some(queue) = &mut self.queues[i] { + queue.push_back(id); + } else { + let mut queue = VecDeque::new(); + queue.push_back(id); + self.queues[i] = Some(queue); + } + } + + fn pop_from_queue(&mut self, queue_index: usize) -> Option { + if let Some(queue) = &mut self.queues[queue_index] { + let id = queue.pop_front(); + + if queue.is_empty() { + self.prio_bitmap &= !(1 << queue_index as u64); + } + + id + } else { + None + } + } + + /// Pop the task handle with the highest priority from the queue + pub fn pop(&mut self) -> Option { + for i in 0..abi::NO_PRIORITIES { + if self.prio_bitmap & (1 << i) != 0 { + return self.pop_from_queue(i); + } + } + + None + } +} + +struct MutexInner { + locked: bool, + blocked_task: PriorityQueue, +} + +impl MutexInner { + pub const fn new() -> MutexInner { + MutexInner { locked: false, blocked_task: PriorityQueue::new() } + } +} + pub struct Mutex { - inner: *const c_void, + inner: Spinlock, } +pub type MovableMutex = Box; + unsafe impl Send for Mutex {} unsafe impl Sync for Mutex {} impl Mutex { pub const fn new() -> Mutex { - Mutex { inner: ptr::null() } + Mutex { inner: Spinlock::new(MutexInner::new()) } } #[inline] pub unsafe fn init(&mut self) { - let _ = abi::sem_init(&mut self.inner as *mut *const c_void, 1); + self.inner = Spinlock::new(MutexInner::new()); } #[inline] pub unsafe fn lock(&self) { - let _ = abi::sem_timedwait(self.inner, 0); + loop { + let mut guard = self.inner.lock(); + if guard.locked == false { + guard.locked = true; + return; + } else { + let prio = abi::get_priority(); + let id = abi::getpid(); + + guard.blocked_task.push(prio, id); + abi::block_current_task(); + drop(guard); + abi::yield_now(); + } + } } #[inline] pub unsafe fn unlock(&self) { - let _ = abi::sem_post(self.inner); + let mut guard = self.inner.lock(); + guard.locked = false; + if let Some(tid) = guard.blocked_task.pop() { + abi::wakeup_task(tid); + } } #[inline] pub unsafe fn try_lock(&self) -> bool { - let result = abi::sem_trywait(self.inner); - result == 0 + let mut guard = self.inner.lock(); + if guard.locked == false { + guard.locked = true; + } + guard.locked } #[inline] - pub unsafe fn destroy(&self) { - let _ = abi::sem_destroy(self.inner); - } + pub unsafe fn destroy(&self) {} } pub struct ReentrantMutex { diff --git a/library/std/src/sys/hermit/process.rs b/library/std/src/sys/hermit/process.rs deleted file mode 100644 index 4702e5c549228..0000000000000 --- a/library/std/src/sys/hermit/process.rs +++ /dev/null @@ -1,149 +0,0 @@ -use crate::ffi::OsStr; -use crate::fmt; -use crate::io; -use crate::sys::fs::File; -use crate::sys::pipe::AnonPipe; -use crate::sys::{unsupported, Void}; -use crate::sys_common::process::CommandEnv; - -pub use crate::ffi::OsString as EnvKey; - -//////////////////////////////////////////////////////////////////////////////// -// Command -//////////////////////////////////////////////////////////////////////////////// - -pub struct Command { - env: CommandEnv, -} - -// passed back to std::process with the pipes connected to the child, if any -// were requested -pub struct StdioPipes { - pub stdin: Option, - pub stdout: Option, - pub stderr: Option, -} - -pub enum Stdio { - Inherit, - Null, - MakePipe, -} - -impl Command { - pub fn new(_program: &OsStr) -> Command { - Command { env: Default::default() } - } - - pub fn arg(&mut self, _arg: &OsStr) {} - - pub fn env_mut(&mut self) -> &mut CommandEnv { - &mut self.env - } - - pub fn cwd(&mut self, _dir: &OsStr) {} - - pub fn stdin(&mut self, _stdin: Stdio) {} - - pub fn stdout(&mut self, _stdout: Stdio) {} - - pub fn stderr(&mut self, _stderr: Stdio) {} - - pub fn spawn( - &mut self, - _default: Stdio, - _needs_stdin: bool, - ) -> io::Result<(Process, StdioPipes)> { - unsupported() - } -} - -impl From for Stdio { - fn from(pipe: AnonPipe) -> Stdio { - pipe.diverge() - } -} - -impl From for Stdio { - fn from(file: File) -> Stdio { - file.diverge() - } -} - -impl fmt::Debug for Command { - fn fmt(&self, _f: &mut fmt::Formatter<'_>) -> fmt::Result { - Ok(()) - } -} - -pub struct ExitStatus(Void); - -impl ExitStatus { - pub fn success(&self) -> bool { - match self.0 {} - } - - pub fn code(&self) -> Option { - match self.0 {} - } -} - -impl Clone for ExitStatus { - fn clone(&self) -> ExitStatus { - match self.0 {} - } -} - -impl Copy for ExitStatus {} - -impl PartialEq for ExitStatus { - fn eq(&self, _other: &ExitStatus) -> bool { - match self.0 {} - } -} - -impl Eq for ExitStatus {} - -impl fmt::Debug for ExitStatus { - fn fmt(&self, _f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self.0 {} - } -} - -impl fmt::Display for ExitStatus { - fn fmt(&self, _f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self.0 {} - } -} - -#[derive(PartialEq, Eq, Clone, Copy, Debug)] -pub struct ExitCode(bool); - -impl ExitCode { - pub const SUCCESS: ExitCode = ExitCode(false); - pub const FAILURE: ExitCode = ExitCode(true); - - pub fn as_i32(&self) -> i32 { - self.0 as i32 - } -} - -pub struct Process(Void); - -impl Process { - pub fn id(&self) -> u32 { - match self.0 {} - } - - pub fn kill(&mut self) -> io::Result<()> { - match self.0 {} - } - - pub fn wait(&mut self) -> io::Result { - match self.0 {} - } - - pub fn try_wait(&mut self) -> io::Result> { - match self.0 {} - } -} diff --git a/library/std/src/sys/mod.rs b/library/std/src/sys/mod.rs index 7b5fac922d08a..b4628b649117e 100644 --- a/library/std/src/sys/mod.rs +++ b/library/std/src/sys/mod.rs @@ -89,6 +89,7 @@ cfg_if::cfg_if! { #[stable(feature = "rust1", since = "1.0.0")] pub use self::ext as windows_ext; } else if #[cfg(any(target_os = "cloudabi", + target_os = "hermit", target_arch = "wasm32", all(target_vendor = "fortanix", target_env = "sgx")))] { // On CloudABI and wasm right now the shim below doesn't compile, so diff --git a/library/std/src/sys/sgx/env.rs b/library/std/src/sys/sgx/env.rs index 6fa0ed7bcf475..8043b7c5213a1 100644 --- a/library/std/src/sys/sgx/env.rs +++ b/library/std/src/sys/sgx/env.rs @@ -1,9 +1,9 @@ pub mod os { - pub const FAMILY: &'static str = ""; - pub const OS: &'static str = ""; - pub const DLL_PREFIX: &'static str = ""; - pub const DLL_SUFFIX: &'static str = ".sgxs"; - pub const DLL_EXTENSION: &'static str = "sgxs"; - pub const EXE_SUFFIX: &'static str = ".sgxs"; - pub const EXE_EXTENSION: &'static str = "sgxs"; + pub const FAMILY: &str = ""; + pub const OS: &str = ""; + pub const DLL_PREFIX: &str = ""; + pub const DLL_SUFFIX: &str = ".sgxs"; + pub const DLL_EXTENSION: &str = "sgxs"; + pub const EXE_SUFFIX: &str = ".sgxs"; + pub const EXE_EXTENSION: &str = "sgxs"; } diff --git a/library/std/src/sys/sgx/path.rs b/library/std/src/sys/sgx/path.rs index 06c9df3ff5427..840a7ae042625 100644 --- a/library/std/src/sys/sgx/path.rs +++ b/library/std/src/sys/sgx/path.rs @@ -15,5 +15,5 @@ pub fn parse_prefix(_: &OsStr) -> Option> { None } -pub const MAIN_SEP_STR: &'static str = "/"; +pub const MAIN_SEP_STR: &str = "/"; pub const MAIN_SEP: char = '/'; diff --git a/library/std/src/sys/unix/args.rs b/library/std/src/sys/unix/args.rs index f7c3f16371818..6967647249390 100644 --- a/library/std/src/sys/unix/args.rs +++ b/library/std/src/sys/unix/args.rs @@ -70,7 +70,8 @@ impl DoubleEndedIterator for Args { target_os = "haiku", target_os = "l4re", target_os = "fuchsia", - target_os = "redox" + target_os = "redox", + target_os = "vxworks" ))] mod imp { use super::Args; diff --git a/library/std/src/sys/unix/ext/fs.rs b/library/std/src/sys/unix/ext/fs.rs index 4b9f4ceb29c49..66bbc1c585413 100644 --- a/library/std/src/sys/unix/ext/fs.rs +++ b/library/std/src/sys/unix/ext/fs.rs @@ -650,6 +650,9 @@ pub trait MetadataExt { /// ``` #[stable(feature = "metadata_ext", since = "1.1.0")] fn blocks(&self) -> u64; + #[cfg(target_os = "vxworks")] + #[stable(feature = "metadata_ext", since = "1.1.0")] + fn attrib(&self) -> u8; } #[stable(feature = "metadata_ext", since = "1.1.0")] @@ -702,6 +705,10 @@ impl MetadataExt for fs::Metadata { fn blocks(&self) -> u64 { self.st_blocks() } + #[cfg(target_os = "vxworks")] + fn attrib(&self) -> u8 { + self.st_attrib() + } } /// Unix-specific extensions for [`fs::FileType`]. diff --git a/library/std/src/sys/unix/ext/process.rs b/library/std/src/sys/unix/ext/process.rs index 82527c40e9138..3615a8a5ee8b0 100644 --- a/library/std/src/sys/unix/ext/process.rs +++ b/library/std/src/sys/unix/ext/process.rs @@ -16,12 +16,20 @@ pub trait CommandExt { /// `setuid` call in the child process. Failure in the `setuid` /// call will cause the spawn to fail. #[stable(feature = "rust1", since = "1.0.0")] - fn uid(&mut self, id: u32) -> &mut process::Command; + fn uid( + &mut self, + #[cfg(not(target_os = "vxworks"))] id: u32, + #[cfg(target_os = "vxworks")] id: u16, + ) -> &mut process::Command; /// Similar to `uid`, but sets the group ID of the child process. This has /// the same semantics as the `uid` field. #[stable(feature = "rust1", since = "1.0.0")] - fn gid(&mut self, id: u32) -> &mut process::Command; + fn gid( + &mut self, + #[cfg(not(target_os = "vxworks"))] id: u32, + #[cfg(target_os = "vxworks")] id: u16, + ) -> &mut process::Command; /// Schedules a closure to be run just before the `exec` function is /// invoked. @@ -115,12 +123,20 @@ pub trait CommandExt { #[stable(feature = "rust1", since = "1.0.0")] impl CommandExt for process::Command { - fn uid(&mut self, id: u32) -> &mut process::Command { + fn uid( + &mut self, + #[cfg(not(target_os = "vxworks"))] id: u32, + #[cfg(target_os = "vxworks")] id: u16, + ) -> &mut process::Command { self.as_inner_mut().uid(id); self } - fn gid(&mut self, id: u32) -> &mut process::Command { + fn gid( + &mut self, + #[cfg(not(target_os = "vxworks"))] id: u32, + #[cfg(target_os = "vxworks")] id: u16, + ) -> &mut process::Command { self.as_inner_mut().gid(id); self } diff --git a/library/std/src/sys/unix/fd.rs b/library/std/src/sys/unix/fd.rs index 2224a055d6d87..d3a279a23553e 100644 --- a/library/std/src/sys/unix/fd.rs +++ b/library/std/src/sys/unix/fd.rs @@ -200,7 +200,8 @@ impl FileDesc { target_os = "l4re", target_os = "linux", target_os = "haiku", - target_os = "redox" + target_os = "redox", + target_os = "vxworks" )))] pub fn set_cloexec(&self) -> io::Result<()> { unsafe { @@ -217,7 +218,8 @@ impl FileDesc { target_os = "l4re", target_os = "linux", target_os = "haiku", - target_os = "redox" + target_os = "redox", + target_os = "vxworks" ))] pub fn set_cloexec(&self) -> io::Result<()> { unsafe { diff --git a/library/std/src/sys/unix/fs.rs b/library/std/src/sys/unix/fs.rs index 8184c25afcfb8..d27d6e2c5659b 100644 --- a/library/std/src/sys/unix/fs.rs +++ b/library/std/src/sys/unix/fs.rs @@ -297,6 +297,7 @@ impl FileAttr { #[cfg(not(target_os = "netbsd"))] impl FileAttr { + #[cfg(not(target_os = "vxworks"))] pub fn modified(&self) -> io::Result { Ok(SystemTime::from(libc::timespec { tv_sec: self.stat.st_mtime as libc::time_t, @@ -304,6 +305,15 @@ impl FileAttr { })) } + #[cfg(target_os = "vxworks")] + pub fn modified(&self) -> io::Result { + Ok(SystemTime::from(libc::timespec { + tv_sec: self.stat.st_mtime as libc::time_t, + tv_nsec: 0, + })) + } + + #[cfg(not(target_os = "vxworks"))] pub fn accessed(&self) -> io::Result { Ok(SystemTime::from(libc::timespec { tv_sec: self.stat.st_atime as libc::time_t, @@ -311,6 +321,14 @@ impl FileAttr { })) } + #[cfg(target_os = "vxworks")] + pub fn accessed(&self) -> io::Result { + Ok(SystemTime::from(libc::timespec { + tv_sec: self.stat.st_atime as libc::time_t, + tv_nsec: 0, + })) + } + #[cfg(any( target_os = "freebsd", target_os = "openbsd", @@ -535,12 +553,22 @@ impl DirEntry { lstat(&self.path()) } - #[cfg(any(target_os = "solaris", target_os = "illumos", target_os = "haiku"))] + #[cfg(any( + target_os = "solaris", + target_os = "illumos", + target_os = "haiku", + target_os = "vxworks" + ))] pub fn file_type(&self) -> io::Result { lstat(&self.path()).map(|m| m.file_type()) } - #[cfg(not(any(target_os = "solaris", target_os = "illumos", target_os = "haiku")))] + #[cfg(not(any( + target_os = "solaris", + target_os = "illumos", + target_os = "haiku", + target_os = "vxworks" + )))] pub fn file_type(&self) -> io::Result { match self.entry.d_type { libc::DT_CHR => Ok(FileType { mode: libc::S_IFCHR }), @@ -565,7 +593,8 @@ impl DirEntry { target_os = "haiku", target_os = "l4re", target_os = "fuchsia", - target_os = "redox" + target_os = "redox", + target_os = "vxworks" ))] pub fn ino(&self) -> u64 { self.entry.d_ino as u64 @@ -603,7 +632,8 @@ impl DirEntry { target_os = "linux", target_os = "emscripten", target_os = "l4re", - target_os = "haiku" + target_os = "haiku", + target_os = "vxworks" ))] fn name_bytes(&self) -> &[u8] { unsafe { CStr::from_ptr(self.entry.d_name.as_ptr()).to_bytes() } @@ -757,11 +787,25 @@ impl File { unsafe fn os_datasync(fd: c_int) -> c_int { libc::fcntl(fd, libc::F_FULLFSYNC) } - #[cfg(target_os = "linux")] + #[cfg(any( + target_os = "freebsd", + target_os = "linux", + target_os = "android", + target_os = "netbsd", + target_os = "openbsd" + ))] unsafe fn os_datasync(fd: c_int) -> c_int { libc::fdatasync(fd) } - #[cfg(not(any(target_os = "macos", target_os = "ios", target_os = "linux")))] + #[cfg(not(any( + target_os = "android", + target_os = "freebsd", + target_os = "ios", + target_os = "linux", + target_os = "macos", + target_os = "netbsd", + target_os = "openbsd" + )))] unsafe fn os_datasync(fd: c_int) -> c_int { libc::fsync(fd) } @@ -901,13 +945,25 @@ impl fmt::Debug for File { Some(PathBuf::from(OsString::from_vec(buf))) } - #[cfg(not(any(target_os = "linux", target_os = "macos")))] + #[cfg(target_os = "vxworks")] + fn get_path(fd: c_int) -> Option { + let mut buf = vec![0; libc::PATH_MAX as usize]; + let n = unsafe { libc::ioctl(fd, libc::FIOGETNAME, buf.as_ptr()) }; + if n == -1 { + return None; + } + let l = buf.iter().position(|&c| c == 0).unwrap(); + buf.truncate(l as usize); + Some(PathBuf::from(OsString::from_vec(buf))) + } + + #[cfg(not(any(target_os = "linux", target_os = "macos", target_os = "vxworks")))] fn get_path(_fd: c_int) -> Option { // FIXME(#24570): implement this for other Unix platforms None } - #[cfg(any(target_os = "linux", target_os = "macos"))] + #[cfg(any(target_os = "linux", target_os = "macos", target_os = "vxworks"))] fn get_mode(fd: c_int) -> Option<(bool, bool)> { let mode = unsafe { libc::fcntl(fd, libc::F_GETFL) }; if mode == -1 { @@ -921,7 +977,7 @@ impl fmt::Debug for File { } } - #[cfg(not(any(target_os = "linux", target_os = "macos")))] + #[cfg(not(any(target_os = "linux", target_os = "macos", target_os = "vxworks")))] fn get_mode(_fd: c_int) -> Option<(bool, bool)> { // FIXME(#24570): implement this for other Unix platforms None diff --git a/library/std/src/sys/unix/mod.rs b/library/std/src/sys/unix/mod.rs index 776f4f18ecfa2..b28c6d85b7c72 100644 --- a/library/std/src/sys/unix/mod.rs +++ b/library/std/src/sys/unix/mod.rs @@ -220,6 +220,10 @@ where } } +pub fn cvt_nz(error: libc::c_int) -> crate::io::Result<()> { + if error == 0 { Ok(()) } else { Err(crate::io::Error::from_raw_os_error(error)) } +} + // On Unix-like platforms, libc::abort will unregister signal handlers // including the SIGABRT handler, preventing the abort from being blocked, and // fclose streams, with the side effect of flushing them so libc buffered diff --git a/library/std/src/sys/unix/mutex.rs b/library/std/src/sys/unix/mutex.rs index ebc737b75ae0b..89c55eb859d09 100644 --- a/library/std/src/sys/unix/mutex.rs +++ b/library/std/src/sys/unix/mutex.rs @@ -1,5 +1,6 @@ use crate::cell::UnsafeCell; use crate::mem::MaybeUninit; +use crate::sys::cvt_nz; pub struct Mutex { inner: UnsafeCell, @@ -51,14 +52,11 @@ impl Mutex { // PTHREAD_MUTEX_NORMAL which is guaranteed to deadlock if we try to // re-lock it from the same thread, thus avoiding undefined behavior. let mut attr = MaybeUninit::::uninit(); - let r = libc::pthread_mutexattr_init(attr.as_mut_ptr()); - debug_assert_eq!(r, 0); - let r = libc::pthread_mutexattr_settype(attr.as_mut_ptr(), libc::PTHREAD_MUTEX_NORMAL); - debug_assert_eq!(r, 0); - let r = libc::pthread_mutex_init(self.inner.get(), attr.as_ptr()); - debug_assert_eq!(r, 0); - let r = libc::pthread_mutexattr_destroy(attr.as_mut_ptr()); - debug_assert_eq!(r, 0); + cvt_nz(libc::pthread_mutexattr_init(attr.as_mut_ptr())).unwrap(); + let attr = PthreadMutexAttr(&mut attr); + cvt_nz(libc::pthread_mutexattr_settype(attr.0.as_mut_ptr(), libc::PTHREAD_MUTEX_NORMAL)) + .unwrap(); + cvt_nz(libc::pthread_mutex_init(self.inner.get(), attr.0.as_ptr())).unwrap(); } #[inline] pub unsafe fn lock(&self) { @@ -106,15 +104,11 @@ impl ReentrantMutex { pub unsafe fn init(&self) { let mut attr = MaybeUninit::::uninit(); - let result = libc::pthread_mutexattr_init(attr.as_mut_ptr()); - debug_assert_eq!(result, 0); - let result = - libc::pthread_mutexattr_settype(attr.as_mut_ptr(), libc::PTHREAD_MUTEX_RECURSIVE); - debug_assert_eq!(result, 0); - let result = libc::pthread_mutex_init(self.inner.get(), attr.as_ptr()); - debug_assert_eq!(result, 0); - let result = libc::pthread_mutexattr_destroy(attr.as_mut_ptr()); - debug_assert_eq!(result, 0); + cvt_nz(libc::pthread_mutexattr_init(attr.as_mut_ptr())).unwrap(); + let attr = PthreadMutexAttr(&mut attr); + cvt_nz(libc::pthread_mutexattr_settype(attr.0.as_mut_ptr(), libc::PTHREAD_MUTEX_RECURSIVE)) + .unwrap(); + cvt_nz(libc::pthread_mutex_init(self.inner.get(), attr.0.as_ptr())).unwrap(); } pub unsafe fn lock(&self) { @@ -137,3 +131,14 @@ impl ReentrantMutex { debug_assert_eq!(result, 0); } } + +struct PthreadMutexAttr<'a>(&'a mut MaybeUninit); + +impl Drop for PthreadMutexAttr<'_> { + fn drop(&mut self) { + unsafe { + let result = libc::pthread_mutexattr_destroy(self.0.as_mut_ptr()); + debug_assert_eq!(result, 0); + } + } +} diff --git a/library/std/src/sys/unix/net.rs b/library/std/src/sys/unix/net.rs index 011325fddc5b9..74c7db27226ef 100644 --- a/library/std/src/sys/unix/net.rs +++ b/library/std/src/sys/unix/net.rs @@ -77,6 +77,7 @@ impl Socket { } } + #[cfg(not(target_os = "vxworks"))] pub fn new_pair(fam: c_int, ty: c_int) -> io::Result<(Socket, Socket)> { unsafe { let mut fds = [0, 0]; @@ -98,6 +99,11 @@ impl Socket { } } + #[cfg(target_os = "vxworks")] + pub fn new_pair(_fam: c_int, _ty: c_int) -> io::Result<(Socket, Socket)> { + unimplemented!() + } + pub fn connect_timeout(&self, addr: &SocketAddr, timeout: Duration) -> io::Result<()> { self.set_nonblocking(true)?; let r = unsafe { @@ -366,7 +372,7 @@ impl IntoInner for Socket { // res_init unconditionally, we call it only when we detect we're linking // against glibc version < 2.26. (That is, when we both know its needed and // believe it's thread-safe). -#[cfg(target_env = "gnu")] +#[cfg(all(target_env = "gnu", not(target_os = "vxworks")))] fn on_resolver_failure() { use crate::sys; @@ -378,5 +384,5 @@ fn on_resolver_failure() { } } -#[cfg(not(target_env = "gnu"))] +#[cfg(any(not(target_env = "gnu"), target_os = "vxworks"))] fn on_resolver_failure() {} diff --git a/library/std/src/sys/unix/os.rs b/library/std/src/sys/unix/os.rs index 2392238d0a194..d5e14bec76572 100644 --- a/library/std/src/sys/unix/os.rs +++ b/library/std/src/sys/unix/os.rs @@ -37,7 +37,7 @@ cfg_if::cfg_if! { } extern "C" { - #[cfg(not(target_os = "dragonfly"))] + #[cfg(not(any(target_os = "dragonfly", target_os = "vxworks")))] #[cfg_attr( any( target_os = "linux", @@ -67,18 +67,28 @@ extern "C" { } /// Returns the platform-specific value of errno -#[cfg(not(target_os = "dragonfly"))] +#[cfg(not(any(target_os = "dragonfly", target_os = "vxworks")))] pub fn errno() -> i32 { unsafe { (*errno_location()) as i32 } } /// Sets the platform-specific value of errno -#[cfg(all(not(target_os = "linux"), not(target_os = "dragonfly")))] // needed for readdir and syscall! +#[cfg(all(not(target_os = "linux"), not(target_os = "dragonfly"), not(target_os = "vxworks")))] // needed for readdir and syscall! #[allow(dead_code)] // but not all target cfgs actually end up using it pub fn set_errno(e: i32) { unsafe { *errno_location() = e as c_int } } +#[cfg(target_os = "vxworks")] +pub fn errno() -> i32 { + unsafe { libc::errnoGet() } +} + +#[cfg(target_os = "vxworks")] +pub fn set_errno(e: i32) { + unsafe { libc::errnoSet(e as c_int) }; +} + #[cfg(target_os = "dragonfly")] pub fn errno() -> i32 { extern "C" { @@ -439,6 +449,19 @@ pub fn current_exe() -> io::Result { Err(io::Error::new(ErrorKind::Other, "Not yet implemented!")) } +#[cfg(target_os = "vxworks")] +pub fn current_exe() -> io::Result { + #[cfg(test)] + use realstd::env; + + #[cfg(not(test))] + use crate::env; + + let exe_path = env::args().next().unwrap(); + let path = path::Path::new(&exe_path); + path.canonicalize() +} + pub struct Env { iter: vec::IntoIter<(OsString, OsString)>, _dont_send_or_sync_me: PhantomData<*mut ()>, @@ -568,7 +591,8 @@ pub fn home_dir() -> Option { target_os = "android", target_os = "ios", target_os = "emscripten", - target_os = "redox" + target_os = "redox", + target_os = "vxworks" ))] unsafe fn fallback() -> Option { None @@ -577,7 +601,8 @@ pub fn home_dir() -> Option { target_os = "android", target_os = "ios", target_os = "emscripten", - target_os = "redox" + target_os = "redox", + target_os = "vxworks" )))] unsafe fn fallback() -> Option { let amt = match libc::sysconf(libc::_SC_GETPW_R_SIZE_MAX) { diff --git a/library/std/src/sys/unix/process/process_common.rs b/library/std/src/sys/unix/process/process_common.rs index 9ddd4ad4000ef..372e5e6a5b367 100644 --- a/library/std/src/sys/unix/process/process_common.rs +++ b/library/std/src/sys/unix/process/process_common.rs @@ -24,6 +24,8 @@ cfg_if::cfg_if! { // fuchsia doesn't have /dev/null } else if #[cfg(target_os = "redox")] { const DEV_NULL: &str = "null:\0"; + } else if #[cfg(target_os = "vxworks")] { + const DEV_NULL: &str = "/null\0"; } else { const DEV_NULL: &str = "/dev/null\0"; } @@ -48,7 +50,7 @@ cfg_if::cfg_if! { raw[bit / 8] |= 1 << (bit % 8); return 0; } - } else { + } else if #[cfg(not(target_os = "vxworks"))] { pub use libc::{sigemptyset, sigaddset}; } } @@ -253,11 +255,17 @@ impl Command { let maybe_env = self.env.capture_if_changed(); maybe_env.map(|env| construct_envp(env, &mut self.saw_nul)) } + #[allow(dead_code)] pub fn env_saw_path(&self) -> bool { self.env.have_changed_path() } + #[allow(dead_code)] + pub fn program_is_path(&self) -> bool { + self.program.to_bytes().contains(&b'/') + } + pub fn setup_io( &self, default: Stdio, diff --git a/library/std/src/sys/unix/process/process_unix.rs b/library/std/src/sys/unix/process/process_unix.rs index 5e55f97705db6..a590c74435639 100644 --- a/library/std/src/sys/unix/process/process_unix.rs +++ b/library/std/src/sys/unix/process/process_unix.rs @@ -6,6 +6,10 @@ use crate::sys; use crate::sys::cvt; use crate::sys::process::process_common::*; +#[cfg(target_os = "vxworks")] +use libc::RTP_ID as pid_t; + +#[cfg(not(target_os = "vxworks"))] use libc::{c_int, gid_t, pid_t, uid_t}; //////////////////////////////////////////////////////////////////////////////// @@ -277,11 +281,11 @@ impl Command { envp: Option<&CStringArray>, ) -> io::Result> { use crate::mem::MaybeUninit; - use crate::sys; + use crate::sys::{self, cvt_nz}; if self.get_gid().is_some() || self.get_uid().is_some() - || self.env_saw_path() + || (self.env_saw_path() && !self.program_is_path()) || !self.get_closures().is_empty() { return Ok(None); @@ -339,10 +343,6 @@ impl Command { } } - fn cvt_nz(error: libc::c_int) -> io::Result<()> { - if error == 0 { Ok(()) } else { Err(io::Error::from_raw_os_error(error)) } - } - unsafe { let mut attrs = MaybeUninit::uninit(); cvt_nz(libc::posix_spawnattr_init(attrs.as_mut_ptr()))?; diff --git a/library/std/src/sys/unix/stack_overflow.rs b/library/std/src/sys/unix/stack_overflow.rs index c74fc2b590316..d84742053524e 100644 --- a/library/std/src/sys/unix/stack_overflow.rs +++ b/library/std/src/sys/unix/stack_overflow.rs @@ -219,7 +219,7 @@ mod imp { target_os = "solaris", target_os = "illumos", all(target_os = "netbsd", not(target_vendor = "rumprun")), - target_os = "openbsd" + target_os = "openbsd", )))] mod imp { pub unsafe fn init() {} diff --git a/library/std/src/sys/unix/thread.rs b/library/std/src/sys/unix/thread.rs index 652219e28f6e0..fdf114d6df6fe 100644 --- a/library/std/src/sys/unix/thread.rs +++ b/library/std/src/sys/unix/thread.rs @@ -6,10 +6,12 @@ use crate::ptr; use crate::sys::{os, stack_overflow}; use crate::time::Duration; -#[cfg(not(target_os = "l4re"))] +#[cfg(not(any(target_os = "l4re", target_os = "vxworks")))] pub const DEFAULT_MIN_STACK_SIZE: usize = 2 * 1024 * 1024; #[cfg(target_os = "l4re")] pub const DEFAULT_MIN_STACK_SIZE: usize = 1024 * 1024; +#[cfg(target_os = "vxworks")] +pub const DEFAULT_MIN_STACK_SIZE: usize = 256 * 1024; pub struct Thread { id: libc::pthread_t, @@ -152,10 +154,11 @@ impl Thread { target_os = "haiku", target_os = "l4re", target_os = "emscripten", - target_os = "redox" + target_os = "redox", + target_os = "vxworks" ))] pub fn set_name(_name: &CStr) { - // Newlib, Haiku, and Emscripten have no way to set a thread name. + // Newlib, Haiku, Emscripten, and VxWorks have no way to set a thread name. } #[cfg(target_os = "fuchsia")] pub fn set_name(_name: &CStr) { diff --git a/library/std/src/sys/vxworks/alloc.rs b/library/std/src/sys/vxworks/alloc.rs deleted file mode 100644 index 97a191d7232e0..0000000000000 --- a/library/std/src/sys/vxworks/alloc.rs +++ /dev/null @@ -1,49 +0,0 @@ -use crate::alloc::{GlobalAlloc, Layout, System}; -use crate::ptr; -use crate::sys_common::alloc::{realloc_fallback, MIN_ALIGN}; - -#[stable(feature = "alloc_system_type", since = "1.28.0")] -unsafe impl GlobalAlloc for System { - #[inline] - unsafe fn alloc(&self, layout: Layout) -> *mut u8 { - if layout.align() <= MIN_ALIGN && layout.align() <= layout.size() { - libc::malloc(layout.size()) as *mut u8 - } else { - aligned_malloc(&layout) - } - } - - #[inline] - unsafe fn alloc_zeroed(&self, layout: Layout) -> *mut u8 { - if layout.align() <= MIN_ALIGN && layout.align() <= layout.size() { - libc::calloc(layout.size(), 1) as *mut u8 - } else { - let ptr = self.alloc(layout.clone()); - if !ptr.is_null() { - ptr::write_bytes(ptr, 0, layout.size()); - } - ptr - } - } - - #[inline] - unsafe fn dealloc(&self, ptr: *mut u8, _layout: Layout) { - libc::free(ptr as *mut libc::c_void) - } - - #[inline] - unsafe fn realloc(&self, ptr: *mut u8, layout: Layout, new_size: usize) -> *mut u8 { - if layout.align() <= MIN_ALIGN && layout.align() <= new_size { - libc::realloc(ptr as *mut libc::c_void, new_size) as *mut u8 - } else { - realloc_fallback(self, ptr, layout, new_size) - } - } -} - -#[inline] -unsafe fn aligned_malloc(layout: &Layout) -> *mut u8 { - let mut out = ptr::null_mut(); - let ret = libc::posix_memalign(&mut out, layout.align(), layout.size()); - if ret != 0 { ptr::null_mut() } else { out as *mut u8 } -} diff --git a/library/std/src/sys/vxworks/args.rs b/library/std/src/sys/vxworks/args.rs deleted file mode 100644 index 30cf7a707c7af..0000000000000 --- a/library/std/src/sys/vxworks/args.rs +++ /dev/null @@ -1,95 +0,0 @@ -#![allow(dead_code)] // runtime init functions not used during testing -use crate::ffi::OsString; -use crate::marker::PhantomData; -use crate::vec; - -/// One-time global initialization. -pub unsafe fn init(argc: isize, argv: *const *const u8) { - imp::init(argc, argv) -} - -/// One-time global cleanup. -pub unsafe fn cleanup() { - imp::cleanup() -} - -/// Returns the command line arguments -pub fn args() -> Args { - imp::args() -} - -pub struct Args { - iter: vec::IntoIter, - _dont_send_or_sync_me: PhantomData<*mut ()>, -} - -impl Args { - pub fn inner_debug(&self) -> &[OsString] { - self.iter.as_slice() - } -} - -impl Iterator for Args { - type Item = OsString; - fn next(&mut self) -> Option { - self.iter.next() - } - fn size_hint(&self) -> (usize, Option) { - self.iter.size_hint() - } -} - -impl ExactSizeIterator for Args { - fn len(&self) -> usize { - self.iter.len() - } -} - -impl DoubleEndedIterator for Args { - fn next_back(&mut self) -> Option { - self.iter.next_back() - } -} - -mod imp { - use super::Args; - use crate::ffi::{CStr, OsString}; - use crate::marker::PhantomData; - use crate::ptr; - - use crate::sys_common::mutex::StaticMutex; - - static mut ARGC: isize = 0; - static mut ARGV: *const *const u8 = ptr::null(); - static LOCK: StaticMutex = StaticMutex::new(); - - pub unsafe fn init(argc: isize, argv: *const *const u8) { - let _guard = LOCK.lock(); - ARGC = argc; - ARGV = argv; - } - - pub unsafe fn cleanup() { - let _guard = LOCK.lock(); - ARGC = 0; - ARGV = ptr::null(); - } - - pub fn args() -> Args { - Args { iter: clone().into_iter(), _dont_send_or_sync_me: PhantomData } - } - - fn clone() -> Vec { - unsafe { - let _guard = LOCK.lock(); - let ret = (0..ARGC) - .map(|i| { - let cstr = CStr::from_ptr(*ARGV.offset(i) as *const libc::c_char); - use crate::sys::vxworks::ext::ffi::OsStringExt; - OsStringExt::from_vec(cstr.to_bytes().to_vec()) - }) - .collect(); - return ret; - } - } -} diff --git a/library/std/src/sys/vxworks/cmath.rs b/library/std/src/sys/vxworks/cmath.rs deleted file mode 100644 index f327b69fc7541..0000000000000 --- a/library/std/src/sys/vxworks/cmath.rs +++ /dev/null @@ -1,32 +0,0 @@ -#![cfg(not(test))] - -use libc::{c_double, c_float}; - -extern "C" { - pub fn acos(n: c_double) -> c_double; - pub fn acosf(n: c_float) -> c_float; - pub fn asin(n: c_double) -> c_double; - pub fn asinf(n: c_float) -> c_float; - pub fn atan(n: c_double) -> c_double; - pub fn atan2(a: c_double, b: c_double) -> c_double; - pub fn atan2f(a: c_float, b: c_float) -> c_float; - pub fn atanf(n: c_float) -> c_float; - pub fn cbrt(n: c_double) -> c_double; - pub fn cbrtf(n: c_float) -> c_float; - pub fn cosh(n: c_double) -> c_double; - pub fn coshf(n: c_float) -> c_float; - pub fn expm1(n: c_double) -> c_double; - pub fn expm1f(n: c_float) -> c_float; - pub fn fdim(a: c_double, b: c_double) -> c_double; - pub fn fdimf(a: c_float, b: c_float) -> c_float; - pub fn hypot(x: c_double, y: c_double) -> c_double; - pub fn hypotf(x: c_float, y: c_float) -> c_float; - pub fn log1p(n: c_double) -> c_double; - pub fn log1pf(n: c_float) -> c_float; - pub fn sinh(n: c_double) -> c_double; - pub fn sinhf(n: c_float) -> c_float; - pub fn tan(n: c_double) -> c_double; - pub fn tanf(n: c_float) -> c_float; - pub fn tanh(n: c_double) -> c_double; - pub fn tanhf(n: c_float) -> c_float; -} diff --git a/library/std/src/sys/vxworks/condvar.rs b/library/std/src/sys/vxworks/condvar.rs deleted file mode 100644 index b4724be7c7c3b..0000000000000 --- a/library/std/src/sys/vxworks/condvar.rs +++ /dev/null @@ -1,91 +0,0 @@ -use crate::cell::UnsafeCell; -use crate::sys::mutex::{self, Mutex}; -use crate::time::Duration; - -pub struct Condvar { - inner: UnsafeCell, -} - -pub type MovableCondvar = Box; - -unsafe impl Send for Condvar {} -unsafe impl Sync for Condvar {} - -const TIMESPEC_MAX: libc::timespec = - libc::timespec { tv_sec: ::MAX, tv_nsec: 1_000_000_000 - 1 }; - -fn saturating_cast_to_time_t(value: u64) -> libc::time_t { - if value > ::MAX as u64 { ::MAX } else { value as libc::time_t } -} - -impl Condvar { - pub const fn new() -> Condvar { - // Might be moved and address is changing it is better to avoid - // initialization of potentially opaque OS data before it landed - Condvar { inner: UnsafeCell::new(libc::PTHREAD_COND_INITIALIZER) } - } - - pub unsafe fn init(&mut self) { - use crate::mem::MaybeUninit; - let mut attr = MaybeUninit::::uninit(); - let r = libc::pthread_condattr_init(attr.as_mut_ptr()); - assert_eq!(r, 0); - let r = libc::pthread_condattr_setclock(attr.as_mut_ptr(), libc::CLOCK_MONOTONIC); - assert_eq!(r, 0); - let r = libc::pthread_cond_init(self.inner.get(), attr.as_ptr()); - assert_eq!(r, 0); - let r = libc::pthread_condattr_destroy(attr.as_mut_ptr()); - assert_eq!(r, 0); - } - - #[inline] - pub unsafe fn notify_one(&self) { - let r = libc::pthread_cond_signal(self.inner.get()); - debug_assert_eq!(r, 0); - } - - #[inline] - pub unsafe fn notify_all(&self) { - let r = libc::pthread_cond_broadcast(self.inner.get()); - debug_assert_eq!(r, 0); - } - - #[inline] - pub unsafe fn wait(&self, mutex: &Mutex) { - let r = libc::pthread_cond_wait(self.inner.get(), mutex::raw(mutex)); - debug_assert_eq!(r, 0); - } - - // This implementation is used on systems that support pthread_condattr_setclock - // where we configure condition variable to use monotonic clock (instead of - // default system clock). This approach avoids all problems that result - // from changes made to the system time. - pub unsafe fn wait_timeout(&self, mutex: &Mutex, dur: Duration) -> bool { - use crate::mem; - - let mut now: libc::timespec = mem::zeroed(); - let r = libc::clock_gettime(libc::CLOCK_MONOTONIC, &mut now); - assert_eq!(r, 0); - - // Nanosecond calculations can't overflow because both values are below 1e9. - let nsec = dur.subsec_nanos() + now.tv_nsec as u32; - - let sec = saturating_cast_to_time_t(dur.as_secs()) - .checked_add((nsec / 1_000_000_000) as libc::time_t) - .and_then(|s| s.checked_add(now.tv_sec)); - let nsec = nsec % 1_000_000_000; - - let timeout = - sec.map(|s| libc::timespec { tv_sec: s, tv_nsec: nsec as _ }).unwrap_or(TIMESPEC_MAX); - - let r = libc::pthread_cond_timedwait(self.inner.get(), mutex::raw(mutex), &timeout); - assert!(r == libc::ETIMEDOUT || r == 0); - r == 0 - } - - #[inline] - pub unsafe fn destroy(&self) { - let r = libc::pthread_cond_destroy(self.inner.get()); - debug_assert_eq!(r, 0); - } -} diff --git a/library/std/src/sys/vxworks/ext/ffi.rs b/library/std/src/sys/vxworks/ext/ffi.rs deleted file mode 100644 index 76b34a6b5d84a..0000000000000 --- a/library/std/src/sys/vxworks/ext/ffi.rs +++ /dev/null @@ -1,38 +0,0 @@ -//! Unix-specific extension to the primitives in the `std::ffi` module -//! -//! # Examples -//! -//! ``` -//! use std::ffi::OsString; -//! use std::os::unix::ffi::OsStringExt; -//! -//! let bytes = b"foo".to_vec(); -//! -//! // OsStringExt::from_vec -//! let os_string = OsString::from_vec(bytes); -//! assert_eq!(os_string.to_str(), Some("foo")); -//! -//! // OsStringExt::into_vec -//! let bytes = os_string.into_vec(); -//! assert_eq!(bytes, b"foo"); -//! ``` -//! -//! ``` -//! use std::ffi::OsStr; -//! use std::os::unix::ffi::OsStrExt; -//! -//! let bytes = b"foo"; -//! -//! // OsStrExt::from_bytes -//! let os_str = OsStr::from_bytes(bytes); -//! assert_eq!(os_str.to_str(), Some("foo")); -//! -//! // OsStrExt::as_bytes -//! let bytes = os_str.as_bytes(); -//! assert_eq!(bytes, b"foo"); -//! ``` - -#![stable(feature = "rust1", since = "1.0.0")] - -#[stable(feature = "rust1", since = "1.0.0")] -pub use crate::sys_common::os_str_bytes::*; diff --git a/library/std/src/sys/vxworks/ext/fs.rs b/library/std/src/sys/vxworks/ext/fs.rs deleted file mode 100644 index 68dc21b806c0f..0000000000000 --- a/library/std/src/sys/vxworks/ext/fs.rs +++ /dev/null @@ -1,817 +0,0 @@ -#![stable(feature = "rust1", since = "1.0.0")] - -use crate::fs::{self, Permissions}; -use crate::io; -use crate::path::Path; -use crate::sys; -use crate::sys::platform::fs::MetadataExt as UnixMetadataExt; -use crate::sys_common::{AsInner, AsInnerMut, FromInner}; - -/// Unix-specific extensions to [`fs::File`]. -#[stable(feature = "file_offset", since = "1.15.0")] -pub trait FileExt { - /// Reads a number of bytes starting from a given offset. - /// - /// Returns the number of bytes read. - /// - /// The offset is relative to the start of the file and thus independent - /// from the current cursor. - /// - /// The current file cursor is not affected by this function. - /// - /// Note that similar to [`File::read`], it is not an error to return with a - /// short read. - /// - /// [`File::read`]: fs::File::read - /// - /// # Examples - /// - /// ```no_run - /// use std::io; - /// use std::fs::File; - /// use std::os::unix::prelude::FileExt; - /// - /// fn main() -> io::Result<()> { - /// let mut buf = [0u8; 8]; - /// let file = File::open("foo.txt")?; - /// - /// // We now read 8 bytes from the offset 10. - /// let num_bytes_read = file.read_at(&mut buf, 10)?; - /// println!("read {} bytes: {:?}", num_bytes_read, buf); - /// Ok(()) - /// } - /// ``` - #[stable(feature = "file_offset", since = "1.15.0")] - fn read_at(&self, buf: &mut [u8], offset: u64) -> io::Result; - - /// Reads the exact number of byte required to fill `buf` from the given offset. - /// - /// The offset is relative to the start of the file and thus independent - /// from the current cursor. - /// - /// The current file cursor is not affected by this function. - /// - /// Similar to [`Read::read_exact`] but uses [`read_at`] instead of `read`. - /// - /// [`Read::read_exact`]: io::Read::read_exact - /// [`read_at`]: FileExt::read_at - /// - /// # Errors - /// - /// If this function encounters an error of the kind - /// [`ErrorKind::Interrupted`] then the error is ignored and the operation - /// will continue. - /// - /// If this function encounters an "end of file" before completely filling - /// the buffer, it returns an error of the kind [`ErrorKind::UnexpectedEof`]. - /// The contents of `buf` are unspecified in this case. - /// - /// If any other read error is encountered then this function immediately - /// returns. The contents of `buf` are unspecified in this case. - /// - /// If this function returns an error, it is unspecified how many bytes it - /// has read, but it will never read more than would be necessary to - /// completely fill the buffer. - /// - /// [`ErrorKind::Interrupted`]: io::ErrorKind::Interrupted - /// [`ErrorKind::UnexpectedEof`]: io::ErrorKind::UnexpectedEof - /// - /// # Examples - /// - /// ```no_run - /// #![feature(rw_exact_all_at)] - /// use std::io; - /// use std::fs::File; - /// use std::os::unix::prelude::FileExt; - /// - /// fn main() -> io::Result<()> { - /// let mut buf = [0u8; 8]; - /// let file = File::open("foo.txt")?; - /// - /// // We now read exactly 8 bytes from the offset 10. - /// file.read_exact_at(&mut buf, 10)?; - /// println!("read {} bytes: {:?}", buf.len(), buf); - /// Ok(()) - /// } - /// ``` - #[stable(feature = "rw_exact_all_at", since = "1.33.0")] - fn read_exact_at(&self, mut buf: &mut [u8], mut offset: u64) -> io::Result<()> { - while !buf.is_empty() { - match self.read_at(buf, offset) { - Ok(0) => break, - Ok(n) => { - let tmp = buf; - buf = &mut tmp[n..]; - offset += n as u64; - } - Err(ref e) if e.kind() == io::ErrorKind::Interrupted => {} - Err(e) => return Err(e), - } - } - if !buf.is_empty() { - Err(io::Error::new(io::ErrorKind::UnexpectedEof, "failed to fill whole buffer")) - } else { - Ok(()) - } - } - - /// Writes a number of bytes starting from a given offset. - /// - /// Returns the number of bytes written. - /// - /// The offset is relative to the start of the file and thus independent - /// from the current cursor. - /// - /// The current file cursor is not affected by this function. - /// - /// When writing beyond the end of the file, the file is appropriately - /// extended and the intermediate bytes are initialized with the value 0. - /// - /// Note that similar to [`File::write`], it is not an error to return a - /// short write. - /// - /// [`File::write`]: fs::File::write - /// - /// # Examples - /// - /// ```no_run - /// use std::fs::File; - /// use std::io; - /// use std::os::unix::prelude::FileExt; - /// - /// fn main() -> io::Result<()> { - /// let file = File::open("foo.txt")?; - /// - /// // We now write at the offset 10. - /// file.write_at(b"sushi", 10)?; - /// Ok(()) - /// } - /// ``` - #[stable(feature = "file_offset", since = "1.15.0")] - fn write_at(&self, buf: &[u8], offset: u64) -> io::Result; - - /// Attempts to write an entire buffer starting from a given offset. - /// - /// The offset is relative to the start of the file and thus independent - /// from the current cursor. - /// - /// The current file cursor is not affected by this function. - /// - /// This method will continuously call [`write_at`] until there is no more data - /// to be written or an error of non-[`ErrorKind::Interrupted`] kind is - /// returned. This method will not return until the entire buffer has been - /// successfully written or such an error occurs. The first error that is - /// not of [`ErrorKind::Interrupted`] kind generated from this method will be - /// returned. - /// - /// # Errors - /// - /// This function will return the first error of - /// non-[`ErrorKind::Interrupted`] kind that [`write_at`] returns. - /// - /// [`ErrorKind::Interrupted`]: io::ErrorKind::Interrupted - /// [`write_at`]: FileExt::write_at - /// - /// # Examples - /// - /// ```no_run - /// #![feature(rw_exact_all_at)] - /// use std::fs::File; - /// use std::io; - /// use std::os::unix::prelude::FileExt; - /// - /// fn main() -> io::Result<()> { - /// let file = File::open("foo.txt")?; - /// - /// // We now write at the offset 10. - /// file.write_all_at(b"sushi", 10)?; - /// Ok(()) - /// } - /// ``` - #[stable(feature = "rw_exact_all_at", since = "1.33.0")] - fn write_all_at(&self, mut buf: &[u8], mut offset: u64) -> io::Result<()> { - while !buf.is_empty() { - match self.write_at(buf, offset) { - Ok(0) => { - return Err(io::Error::new( - io::ErrorKind::WriteZero, - "failed to write whole buffer", - )); - } - Ok(n) => { - buf = &buf[n..]; - offset += n as u64 - } - Err(ref e) if e.kind() == io::ErrorKind::Interrupted => {} - Err(e) => return Err(e), - } - } - Ok(()) - } -} - -#[stable(feature = "file_offset", since = "1.15.0")] -impl FileExt for fs::File { - fn read_at(&self, buf: &mut [u8], offset: u64) -> io::Result { - self.as_inner().read_at(buf, offset) - } - fn write_at(&self, buf: &[u8], offset: u64) -> io::Result { - self.as_inner().write_at(buf, offset) - } -} - -/// Unix-specific extensions to [`fs::Permissions`]. -#[stable(feature = "fs_ext", since = "1.1.0")] -pub trait PermissionsExt { - /// Returns the underlying raw `st_mode` bits that contain the standard - /// Unix permissions for this file. - /// - /// # Examples - /// - /// ```no_run - /// use std::fs::File; - /// use std::os::unix::fs::PermissionsExt; - /// - /// fn main() -> std::io::Result<()> { - /// let f = File::create("foo.txt")?; - /// let metadata = f.metadata()?; - /// let permissions = metadata.permissions(); - /// - /// println!("permissions: {}", permissions.mode()); - /// Ok(()) } - /// ``` - #[stable(feature = "fs_ext", since = "1.1.0")] - fn mode(&self) -> u32; - - /// Sets the underlying raw bits for this set of permissions. - /// - /// # Examples - /// - /// ```no_run - /// use std::fs::File; - /// use std::os::unix::fs::PermissionsExt; - /// - /// fn main() -> std::io::Result<()> { - /// let f = File::create("foo.txt")?; - /// let metadata = f.metadata()?; - /// let mut permissions = metadata.permissions(); - /// - /// permissions.set_mode(0o644); // Read/write for owner and read for others. - /// assert_eq!(permissions.mode(), 0o644); - /// Ok(()) } - /// ``` - #[stable(feature = "fs_ext", since = "1.1.0")] - fn set_mode(&mut self, mode: u32); - - /// Creates a new instance of `Permissions` from the given set of Unix - /// permission bits. - /// - /// # Examples - /// - /// ``` - /// use std::fs::Permissions; - /// use std::os::unix::fs::PermissionsExt; - /// - /// // Read/write for owner and read for others. - /// let permissions = Permissions::from_mode(0o644); - /// assert_eq!(permissions.mode(), 0o644); - /// ``` - #[stable(feature = "fs_ext", since = "1.1.0")] - fn from_mode(mode: u32) -> Self; -} - -#[stable(feature = "fs_ext", since = "1.1.0")] -impl PermissionsExt for Permissions { - fn mode(&self) -> u32 { - self.as_inner().mode() - } - - fn set_mode(&mut self, mode: u32) { - *self = Permissions::from_inner(FromInner::from_inner(mode)); - } - - fn from_mode(mode: u32) -> Permissions { - Permissions::from_inner(FromInner::from_inner(mode)) - } -} - -/// Unix-specific extensions to [`fs::OpenOptions`]. -#[stable(feature = "fs_ext", since = "1.1.0")] -pub trait OpenOptionsExt { - /// Sets the mode bits that a new file will be created with. - /// - /// If a new file is created as part of an `OpenOptions::open` call then this - /// specified `mode` will be used as the permission bits for the new file. - /// If no `mode` is set, the default of `0o666` will be used. - /// The operating system masks out bits with the system's `umask`, to produce - /// the final permissions. - /// - /// # Examples - /// - /// ```no_run - /// use std::fs::OpenOptions; - /// use std::os::unix::fs::OpenOptionsExt; - /// - /// # fn main() { - /// let mut options = OpenOptions::new(); - /// options.mode(0o644); // Give read/write for owner and read for others. - /// let file = options.open("foo.txt"); - /// # } - /// ``` - #[stable(feature = "fs_ext", since = "1.1.0")] - fn mode(&mut self, mode: u32) -> &mut Self; - - /// Pass custom flags to the `flags` argument of `open`. - /// - /// The bits that define the access mode are masked out with `O_ACCMODE`, to - /// ensure they do not interfere with the access mode set by Rusts options. - /// - /// Custom flags can only set flags, not remove flags set by Rusts options. - /// This options overwrites any previously set custom flags. - /// - /// # Examples - /// - /// ```no_run - /// # #![feature(libc)] - /// extern crate libc; - /// use std::fs::OpenOptions; - /// use std::os::unix::fs::OpenOptionsExt; - /// - /// # fn main() { - /// let mut options = OpenOptions::new(); - /// options.write(true); - /// if cfg!(unix) { - /// options.custom_flags(libc::O_NOFOLLOW); - /// } - /// let file = options.open("foo.txt"); - /// # } - /// ``` - #[stable(feature = "open_options_ext", since = "1.10.0")] - fn custom_flags(&mut self, flags: i32) -> &mut Self; -} - -/*#[stable(feature = "fs_ext", since = "1.1.0")] -impl OpenOptionsExt for OpenOptions { - fn mode(&mut self, mode: u32) -> &mut OpenOptions { - self.as_inner_mut().mode(mode); self - } - - fn custom_flags(&mut self, flags: i32) -> &mut OpenOptions { - self.as_inner_mut().custom_flags(flags); self - } -} -*/ - -/// Unix-specific extensions to [`fs::Metadata`]. -#[stable(feature = "metadata_ext", since = "1.1.0")] -pub trait MetadataExt { - /// Returns the ID of the device containing the file. - /// - /// # Examples - /// - /// ```no_run - /// use std::io; - /// use std::fs; - /// use std::os::unix::fs::MetadataExt; - /// - /// fn main() -> io::Result<()> { - /// let meta = fs::metadata("some_file")?; - /// let dev_id = meta.dev(); - /// Ok(()) - /// } - /// ``` - #[stable(feature = "metadata_ext", since = "1.1.0")] - fn dev(&self) -> u64; - /// Returns the inode number. - /// - /// # Examples - /// - /// ```no_run - /// use std::fs; - /// use std::os::unix::fs::MetadataExt; - /// use std::io; - /// - /// fn main() -> io::Result<()> { - /// let meta = fs::metadata("some_file")?; - /// let inode = meta.ino(); - /// Ok(()) - /// } - /// ``` - #[stable(feature = "metadata_ext", since = "1.1.0")] - fn ino(&self) -> u64; - /// Returns the rights applied to this file. - /// - /// # Examples - /// - /// ```no_run - /// use std::fs; - /// use std::os::unix::fs::MetadataExt; - /// use std::io; - /// - /// fn main() -> io::Result<()> { - /// let meta = fs::metadata("some_file")?; - /// let mode = meta.mode(); - /// let user_has_write_access = mode & 0o200; - /// let user_has_read_write_access = mode & 0o600; - /// let group_has_read_access = mode & 0o040; - /// let others_have_exec_access = mode & 0o001; - /// Ok(()) - /// } - /// ``` - #[stable(feature = "metadata_ext", since = "1.1.0")] - fn mode(&self) -> u32; - /// Returns the number of hard links pointing to this file. - /// - /// # Examples - /// - /// ```no_run - /// use std::fs; - /// use std::os::unix::fs::MetadataExt; - /// use std::io; - /// - /// fn main() -> io::Result<()> { - /// let meta = fs::metadata("some_file")?; - /// let nb_hard_links = meta.nlink(); - /// Ok(()) - /// } - /// ``` - #[stable(feature = "metadata_ext", since = "1.1.0")] - fn nlink(&self) -> u64; - /// Returns the user ID of the owner of this file. - /// - /// # Examples - /// - /// ```no_run - /// use std::fs; - /// use std::os::unix::fs::MetadataExt; - /// use std::io; - /// - /// fn main() -> io::Result<()> { - /// let meta = fs::metadata("some_file")?; - /// let user_id = meta.uid(); - /// Ok(()) - /// } - /// ``` - #[stable(feature = "metadata_ext", since = "1.1.0")] - fn uid(&self) -> u32; - /// Returns the group ID of the owner of this file. - /// - /// # Examples - /// - /// ```no_run - /// use std::fs; - /// use std::os::unix::fs::MetadataExt; - /// use std::io; - /// - /// fn main() -> io::Result<()> { - /// let meta = fs::metadata("some_file")?; - /// let group_id = meta.gid(); - /// Ok(()) - /// } - /// ``` - #[stable(feature = "metadata_ext", since = "1.1.0")] - fn gid(&self) -> u32; - /// Returns the device ID of this file (if it is a special one). - /// - /// # Examples - /// - /// ```no_run - /// use std::fs; - /// use std::os::unix::fs::MetadataExt; - /// use std::io; - /// - /// fn main() -> io::Result<()> { - /// let meta = fs::metadata("some_file")?; - /// let device_id = meta.rdev(); - /// Ok(()) - /// } - /// ``` - #[stable(feature = "metadata_ext", since = "1.1.0")] - fn rdev(&self) -> u64; - /// Returns the total size of this file in bytes. - /// - /// # Examples - /// - /// ```no_run - /// use std::fs; - /// use std::os::unix::fs::MetadataExt; - /// use std::io; - /// - /// fn main() -> io::Result<()> { - /// let meta = fs::metadata("some_file")?; - /// let file_size = meta.size(); - /// Ok(()) - /// } - /// ``` - #[stable(feature = "metadata_ext", since = "1.1.0")] - fn size(&self) -> u64; - /// Returns the time of the last access to the file. - /// - /// # Examples - /// - /// ```no_run - /// use std::fs; - /// use std::os::unix::fs::MetadataExt; - /// use std::io; - /// - /// fn main() -> io::Result<()> { - /// let meta = fs::metadata("some_file")?; - /// let last_access_time = meta.atime(); - /// Ok(()) - /// } - /// ``` - #[stable(feature = "metadata_ext", since = "1.1.0")] - fn atime(&self) -> i64; - /// Returns the time of the last access to the file in nanoseconds. - /// - /// # Examples - /// - /// ```no_run - /// use std::fs; - /// use std::os::unix::fs::MetadataExt; - /// use std::io; - /// - /// fn main() -> io::Result<()> { - /// let meta = fs::metadata("some_file")?; - /// let nano_last_access_time = meta.atime_nsec(); - /// Ok(()) - /// } - /// ``` - #[stable(feature = "metadata_ext", since = "1.1.0")] - fn mtime(&self) -> i64; - /// Returns the time of the last modification of the file in nanoseconds. - /// - /// # Examples - /// - /// ```no_run - /// use std::fs; - /// use std::os::unix::fs::MetadataExt; - /// use std::io; - /// - /// fn main() -> io::Result<()> { - /// let meta = fs::metadata("some_file")?; - /// let nano_last_modification_time = meta.mtime_nsec(); - /// Ok(()) - /// } - /// ``` - #[stable(feature = "metadata_ext", since = "1.1.0")] - fn ctime(&self) -> i64; - /// Returns the time of the last status change of the file in nanoseconds. - /// - /// # Examples - /// - /// ```no_run - /// use std::fs; - /// use std::os::unix::fs::MetadataExt; - /// use std::io; - /// - /// fn main() -> io::Result<()> { - /// let meta = fs::metadata("some_file")?; - /// let nano_last_status_change_time = meta.ctime_nsec(); - /// Ok(()) - /// } - /// ``` - #[stable(feature = "metadata_ext", since = "1.1.0")] - fn blksize(&self) -> u64; - /// Returns the number of blocks allocated to the file, in 512-byte units. - /// - /// Please note that this may be smaller than `st_size / 512` when the file has holes. - /// - /// # Examples - /// - /// ```no_run - /// use std::fs; - /// use std::os::unix::fs::MetadataExt; - /// use std::io; - /// - /// fn main() -> io::Result<()> { - /// let meta = fs::metadata("some_file")?; - /// let blocks = meta.blocks(); - /// Ok(()) - /// } - /// ``` - #[stable(feature = "metadata_ext", since = "1.1.0")] - fn blocks(&self) -> u64; - #[stable(feature = "metadata_ext", since = "1.1.0")] - fn attrib(&self) -> u8; -} - -#[stable(feature = "metadata_ext", since = "1.1.0")] -impl MetadataExt for fs::Metadata { - fn dev(&self) -> u64 { - self.st_dev() - } - fn ino(&self) -> u64 { - self.st_ino() - } - fn mode(&self) -> u32 { - self.st_mode() - } - fn nlink(&self) -> u64 { - self.st_nlink() - } - fn uid(&self) -> u32 { - self.st_uid() - } - fn gid(&self) -> u32 { - self.st_gid() - } - fn rdev(&self) -> u64 { - self.st_rdev() - } - fn size(&self) -> u64 { - self.st_size() - } - fn atime(&self) -> i64 { - self.st_atime() - } - fn mtime(&self) -> i64 { - self.st_mtime() - } - fn ctime(&self) -> i64 { - self.st_ctime() - } - fn blksize(&self) -> u64 { - self.st_blksize() - } - fn blocks(&self) -> u64 { - self.st_blocks() - } - fn attrib(&self) -> u8 { - self.st_attrib() - } -} - -/// Unix-specific extensions for [`fs::FileType`]. -/// -/// Adds support for special Unix file types such as block/character devices, -/// pipes, and sockets. -#[stable(feature = "file_type_ext", since = "1.5.0")] -pub trait FileTypeExt { - /// Returns whether this file type is a block device. - /// - /// # Examples - /// - /// ```no_run - /// use std::fs; - /// use std::os::unix::fs::FileTypeExt; - /// use std::io; - /// - /// fn main() -> io::Result<()> { - /// let meta = fs::metadata("block_device_file")?; - /// let file_type = meta.file_type(); - /// assert!(file_type.is_block_device()); - /// Ok(()) - /// } - /// ``` - #[stable(feature = "file_type_ext", since = "1.5.0")] - fn is_block_device(&self) -> bool; - /// Returns whether this file type is a char device. - /// - /// # Examples - /// - /// ```no_run - /// use std::fs; - /// use std::os::unix::fs::FileTypeExt; - /// use std::io; - /// - /// fn main() -> io::Result<()> { - /// let meta = fs::metadata("char_device_file")?; - /// let file_type = meta.file_type(); - /// assert!(file_type.is_char_device()); - /// Ok(()) - /// } - /// ``` - #[stable(feature = "file_type_ext", since = "1.5.0")] - fn is_char_device(&self) -> bool; - /// Returns whether this file type is a fifo. - /// - /// # Examples - /// - /// ```no_run - /// use std::fs; - /// use std::os::unix::fs::FileTypeExt; - /// use std::io; - /// - /// fn main() -> io::Result<()> { - /// let meta = fs::metadata("fifo_file")?; - /// let file_type = meta.file_type(); - /// assert!(file_type.is_fifo()); - /// Ok(()) - /// } - /// ``` - #[stable(feature = "file_type_ext", since = "1.5.0")] - fn is_fifo(&self) -> bool; - /// Returns whether this file type is a socket. - /// - /// # Examples - /// - /// ```no_run - /// use std::fs; - /// use std::os::unix::fs::FileTypeExt; - /// use std::io; - /// - /// fn main() -> io::Result<()> { - /// let meta = fs::metadata("unix.socket")?; - /// let file_type = meta.file_type(); - /// assert!(file_type.is_socket()); - /// Ok(()) - /// } - /// ``` - #[stable(feature = "file_type_ext", since = "1.5.0")] - fn is_socket(&self) -> bool; -} - -#[stable(feature = "file_type_ext", since = "1.5.0")] -impl FileTypeExt for fs::FileType { - fn is_block_device(&self) -> bool { - self.as_inner().is(libc::S_IFBLK) - } - fn is_char_device(&self) -> bool { - self.as_inner().is(libc::S_IFCHR) - } - fn is_fifo(&self) -> bool { - self.as_inner().is(libc::S_IFIFO) - } - fn is_socket(&self) -> bool { - self.as_inner().is(libc::S_IFSOCK) - } -} - -/// Unix-specific extension methods for [`fs::DirEntry`]. -#[stable(feature = "dir_entry_ext", since = "1.1.0")] -pub trait DirEntryExt { - /// Returns the underlying `d_ino` field in the contained `dirent` - /// structure. - /// - /// # Examples - /// - /// ``` - /// use std::fs; - /// use std::os::unix::fs::DirEntryExt; - /// - /// if let Ok(entries) = fs::read_dir(".") { - /// for entry in entries { - /// if let Ok(entry) = entry { - /// // Here, `entry` is a `DirEntry`. - /// println!("{:?}: {}", entry.file_name(), entry.ino()); - /// } - /// } - /// } - /// ``` - #[stable(feature = "dir_entry_ext", since = "1.1.0")] - fn ino(&self) -> u64; -} - -#[stable(feature = "dir_entry_ext", since = "1.1.0")] -impl DirEntryExt for fs::DirEntry { - fn ino(&self) -> u64 { - self.as_inner().ino() - } -} - -/// Creates a new symbolic link on the filesystem. -/// -/// The `dst` path will be a symbolic link pointing to the `src` path. -/// -/// # Examples -/// -/// ```no_run -/// use std::os::unix::fs; -/// -/// fn main() -> std::io::Result<()> { -/// fs::symlink("a.txt", "b.txt")?; -/// Ok(()) -/// } -/// ``` -#[stable(feature = "symlink", since = "1.1.0")] -pub fn symlink, Q: AsRef>(src: P, dst: Q) -> io::Result<()> { - sys::fs::symlink(src.as_ref(), dst.as_ref()) -} - -/// Unix-specific extensions to [`fs::DirBuilder`]. -#[stable(feature = "dir_builder", since = "1.6.0")] -pub trait DirBuilderExt { - /// Sets the mode to create new directories with. This option defaults to - /// 0o777. - /// - /// # Examples - /// - /// ```no_run - /// use std::fs::DirBuilder; - /// use std::os::unix::fs::DirBuilderExt; - /// - /// let mut builder = DirBuilder::new(); - /// builder.mode(0o755); - /// ``` - #[stable(feature = "dir_builder", since = "1.6.0")] - fn mode(&mut self, mode: u32) -> &mut Self; -} - -#[stable(feature = "dir_builder", since = "1.6.0")] -impl DirBuilderExt for fs::DirBuilder { - fn mode(&mut self, mode: u32) -> &mut fs::DirBuilder { - self.as_inner_mut().set_mode(mode); - self - } -} diff --git a/library/std/src/sys/vxworks/ext/io.rs b/library/std/src/sys/vxworks/ext/io.rs deleted file mode 100644 index 8b5a2d12af74f..0000000000000 --- a/library/std/src/sys/vxworks/ext/io.rs +++ /dev/null @@ -1,208 +0,0 @@ -//! Unix-specific extensions to general I/O primitives - -#![stable(feature = "rust1", since = "1.0.0")] - -use crate::fs; -use crate::io; -use crate::net; -use crate::os::raw; -use crate::sys; -use crate::sys_common::{self, AsInner, FromInner, IntoInner}; - -/// Raw file descriptors. -#[stable(feature = "rust1", since = "1.0.0")] -pub type RawFd = raw::c_int; - -/// A trait to extract the raw unix file descriptor from an underlying -/// object. -/// -/// This is only available on unix platforms and must be imported in order -/// to call the method. Windows platforms have a corresponding `AsRawHandle` -/// and `AsRawSocket` set of traits. -#[stable(feature = "rust1", since = "1.0.0")] -pub trait AsRawFd { - /// Extracts the raw file descriptor. - /// - /// This method does **not** pass ownership of the raw file descriptor - /// to the caller. The descriptor is only guaranteed to be valid while - /// the original object has not yet been destroyed. - #[stable(feature = "rust1", since = "1.0.0")] - fn as_raw_fd(&self) -> RawFd; -} - -/// A trait to express the ability to construct an object from a raw file -/// descriptor. -#[stable(feature = "from_raw_os", since = "1.1.0")] -pub trait FromRawFd { - /// Constructs a new instance of `Self` from the given raw file - /// descriptor. - /// - /// This function **consumes ownership** of the specified file - /// descriptor. The returned object will take responsibility for closing - /// it when the object goes out of scope. - /// - /// This function is also unsafe as the primitives currently returned - /// have the contract that they are the sole owner of the file - /// descriptor they are wrapping. Usage of this function could - /// accidentally allow violating this contract which can cause memory - /// unsafety in code that relies on it being true. - #[stable(feature = "from_raw_os", since = "1.1.0")] - unsafe fn from_raw_fd(fd: RawFd) -> Self; -} - -/// A trait to express the ability to consume an object and acquire ownership of -/// its raw file descriptor. -#[stable(feature = "into_raw_os", since = "1.4.0")] -pub trait IntoRawFd { - /// Consumes this object, returning the raw underlying file descriptor. - /// - /// This function **transfers ownership** of the underlying file descriptor - /// to the caller. Callers are then the unique owners of the file descriptor - /// and must close the descriptor once it's no longer needed. - #[stable(feature = "into_raw_os", since = "1.4.0")] - fn into_raw_fd(self) -> RawFd; -} - -#[stable(feature = "raw_fd_reflexive_traits", since = "1.48.0")] -impl AsRawFd for RawFd { - fn as_raw_fd(&self) -> RawFd { - *self - } -} -#[stable(feature = "raw_fd_reflexive_traits", since = "1.48.0")] -impl IntoRawFd for RawFd { - fn into_raw_fd(self) -> RawFd { - self - } -} -#[stable(feature = "raw_fd_reflexive_traits", since = "1.48.0")] -impl FromRawFd for RawFd { - unsafe fn from_raw_fd(fd: RawFd) -> RawFd { - fd - } -} - -#[stable(feature = "rust1", since = "1.0.0")] -impl AsRawFd for fs::File { - fn as_raw_fd(&self) -> RawFd { - self.as_inner().fd().raw() - } -} -#[stable(feature = "from_raw_os", since = "1.1.0")] -impl FromRawFd for fs::File { - unsafe fn from_raw_fd(fd: RawFd) -> fs::File { - fs::File::from_inner(sys::fs::File::from_inner(fd)) - } -} -#[stable(feature = "into_raw_os", since = "1.4.0")] -impl IntoRawFd for fs::File { - fn into_raw_fd(self) -> RawFd { - self.into_inner().into_fd().into_raw() - } -} - -#[stable(feature = "asraw_stdio", since = "1.21.0")] -impl AsRawFd for io::Stdin { - fn as_raw_fd(&self) -> RawFd { - libc::STDIN_FILENO - } -} - -#[stable(feature = "asraw_stdio", since = "1.21.0")] -impl AsRawFd for io::Stdout { - fn as_raw_fd(&self) -> RawFd { - libc::STDOUT_FILENO - } -} - -#[stable(feature = "asraw_stdio", since = "1.21.0")] -impl AsRawFd for io::Stderr { - fn as_raw_fd(&self) -> RawFd { - libc::STDERR_FILENO - } -} - -#[stable(feature = "asraw_stdio_locks", since = "1.35.0")] -impl<'a> AsRawFd for io::StdinLock<'a> { - fn as_raw_fd(&self) -> RawFd { - libc::STDIN_FILENO - } -} - -#[stable(feature = "asraw_stdio_locks", since = "1.35.0")] -impl<'a> AsRawFd for io::StdoutLock<'a> { - fn as_raw_fd(&self) -> RawFd { - libc::STDOUT_FILENO - } -} - -#[stable(feature = "asraw_stdio_locks", since = "1.35.0")] -impl<'a> AsRawFd for io::StderrLock<'a> { - fn as_raw_fd(&self) -> RawFd { - libc::STDERR_FILENO - } -} - -#[stable(feature = "rust1", since = "1.0.0")] -impl AsRawFd for net::TcpStream { - fn as_raw_fd(&self) -> RawFd { - *self.as_inner().socket().as_inner() - } -} - -#[stable(feature = "rust1", since = "1.0.0")] -impl AsRawFd for net::TcpListener { - fn as_raw_fd(&self) -> RawFd { - *self.as_inner().socket().as_inner() - } -} - -#[stable(feature = "rust1", since = "1.0.0")] -impl AsRawFd for net::UdpSocket { - fn as_raw_fd(&self) -> RawFd { - *self.as_inner().socket().as_inner() - } -} - -#[stable(feature = "from_raw_os", since = "1.1.0")] -impl FromRawFd for net::TcpStream { - unsafe fn from_raw_fd(fd: RawFd) -> net::TcpStream { - let socket = sys::net::Socket::from_inner(fd); - net::TcpStream::from_inner(sys_common::net::TcpStream::from_inner(socket)) - } -} - -#[stable(feature = "from_raw_os", since = "1.1.0")] -impl FromRawFd for net::TcpListener { - unsafe fn from_raw_fd(fd: RawFd) -> net::TcpListener { - let socket = sys::net::Socket::from_inner(fd); - net::TcpListener::from_inner(sys_common::net::TcpListener::from_inner(socket)) - } -} - -#[stable(feature = "from_raw_os", since = "1.1.0")] -impl FromRawFd for net::UdpSocket { - unsafe fn from_raw_fd(fd: RawFd) -> net::UdpSocket { - let socket = sys::net::Socket::from_inner(fd); - net::UdpSocket::from_inner(sys_common::net::UdpSocket::from_inner(socket)) - } -} - -#[stable(feature = "into_raw_os", since = "1.4.0")] -impl IntoRawFd for net::TcpStream { - fn into_raw_fd(self) -> RawFd { - self.into_inner().into_socket().into_inner() - } -} -#[stable(feature = "into_raw_os", since = "1.4.0")] -impl IntoRawFd for net::TcpListener { - fn into_raw_fd(self) -> RawFd { - self.into_inner().into_socket().into_inner() - } -} -#[stable(feature = "into_raw_os", since = "1.4.0")] -impl IntoRawFd for net::UdpSocket { - fn into_raw_fd(self) -> RawFd { - self.into_inner().into_socket().into_inner() - } -} diff --git a/library/std/src/sys/vxworks/ext/mod.rs b/library/std/src/sys/vxworks/ext/mod.rs deleted file mode 100644 index 8fa9bd9d1e27f..0000000000000 --- a/library/std/src/sys/vxworks/ext/mod.rs +++ /dev/null @@ -1,24 +0,0 @@ -#![stable(feature = "rust1", since = "1.0.0")] -#![allow(missing_docs)] - -pub mod ffi; -pub mod fs; -pub mod io; -pub mod process; -pub mod raw; - -#[stable(feature = "rust1", since = "1.0.0")] -pub mod prelude { - #[doc(no_inline)] - #[stable(feature = "rust1", since = "1.0.0")] - pub use super::ffi::{OsStrExt, OsStringExt}; - #[doc(no_inline)] - #[stable(feature = "rust1", since = "1.0.0")] - pub use super::fs::{FileTypeExt, MetadataExt, OpenOptionsExt, PermissionsExt}; - #[doc(no_inline)] - #[stable(feature = "rust1", since = "1.0.0")] - pub use super::io::{AsRawFd, FromRawFd, IntoRawFd, RawFd}; - #[doc(no_inline)] - #[stable(feature = "rust1", since = "1.0.0")] - pub use super::process::ExitStatusExt; -} diff --git a/library/std/src/sys/vxworks/ext/process.rs b/library/std/src/sys/vxworks/ext/process.rs deleted file mode 100644 index 3ffa5be1b3bf1..0000000000000 --- a/library/std/src/sys/vxworks/ext/process.rs +++ /dev/null @@ -1,229 +0,0 @@ -//! Unix-specific extensions to primitives in the `std::process` module. - -#![stable(feature = "rust1", since = "1.0.0")] - -use crate::ffi::OsStr; -use crate::io; -use crate::process; -use crate::sys; -use crate::sys::vxworks::ext::io::{AsRawFd, FromRawFd, IntoRawFd, RawFd}; -use crate::sys_common::{AsInner, AsInnerMut, FromInner, IntoInner}; - -/// Unix-specific extensions to the [`process::Command`] builder. -#[stable(feature = "rust1", since = "1.0.0")] -pub trait CommandExt { - /// Sets the child process's user ID. This translates to a - /// `setuid` call in the child process. Failure in the `setuid` - /// call will cause the spawn to fail. - #[stable(feature = "rust1", since = "1.0.0")] - fn uid(&mut self, id: u16) -> &mut process::Command; - - /// Similar to `uid`, but sets the group ID of the child process. This has - /// the same semantics as the `uid` field. - #[stable(feature = "rust1", since = "1.0.0")] - fn gid(&mut self, id: u16) -> &mut process::Command; - - /// Schedules a closure to be run just before the `exec` function is - /// invoked. - /// - /// The closure is allowed to return an I/O error whose OS error code will - /// be communicated back to the parent and returned as an error from when - /// the spawn was requested. - /// - /// Multiple closures can be registered and they will be called in order of - /// their registration. If a closure returns `Err` then no further closures - /// will be called and the spawn operation will immediately return with a - /// failure. - /// - /// # Notes and Safety - /// - /// This closure will be run in the context of the child process after a - /// `fork`. This primarily means that any modifications made to memory on - /// behalf of this closure will **not** be visible to the parent process. - /// This is often a very constrained environment where normal operations - /// like `malloc` or acquiring a mutex are not guaranteed to work (due to - /// other threads perhaps still running when the `fork` was run). - /// - /// This also means that all resources such as file descriptors and - /// memory-mapped regions got duplicated. It is your responsibility to make - /// sure that the closure does not violate library invariants by making - /// invalid use of these duplicates. - /// - /// When this closure is run, aspects such as the stdio file descriptors and - /// working directory have successfully been changed, so output to these - /// locations may not appear where intended. - #[stable(feature = "process_pre_exec", since = "1.34.0")] - unsafe fn pre_exec(&mut self, f: F) -> &mut process::Command - where - F: FnMut() -> io::Result<()> + Send + Sync + 'static; - - /// Schedules a closure to be run just before the `exec` function is - /// invoked. - /// - /// This method is stable and usable, but it should be unsafe. To fix - /// that, it got deprecated in favor of the unsafe [`pre_exec`]. - /// - /// [`pre_exec`]: CommandExt::pre_exec - #[stable(feature = "process_exec", since = "1.15.0")] - #[rustc_deprecated(since = "1.37.0", reason = "should be unsafe, use `pre_exec` instead")] - fn before_exec(&mut self, f: F) -> &mut process::Command - where - F: FnMut() -> io::Result<()> + Send + Sync + 'static, - { - unsafe { self.pre_exec(f) } - } - - /// Performs all the required setup by this `Command`, followed by calling - /// the `execvp` syscall. - /// - /// On success this function will not return, and otherwise it will return - /// an error indicating why the exec (or another part of the setup of the - /// `Command`) failed. - /// - /// `exec` not returning has the same implications as calling - /// [`process::exit`] – no destructors on the current stack or any other - /// thread’s stack will be run. Therefore, it is recommended to only call - /// `exec` at a point where it is fine to not run any destructors. Note, - /// that the `execvp` syscall independently guarantees that all memory is - /// freed and all file descriptors with the `CLOEXEC` option (set by default - /// on all file descriptors opened by the standard library) are closed. - /// - /// This function, unlike `spawn`, will **not** `fork` the process to create - /// a new child. Like spawn, however, the default behavior for the stdio - /// descriptors will be to inherited from the current process. - /// - /// - /// # Notes - /// - /// The process may be in a "broken state" if this function returns in - /// error. For example the working directory, environment variables, signal - /// handling settings, various user/group information, or aspects of stdio - /// file descriptors may have changed. If a "transactional spawn" is - /// required to gracefully handle errors it is recommended to use the - /// cross-platform `spawn` instead. - #[stable(feature = "process_exec2", since = "1.9.0")] - fn exec(&mut self) -> io::Error; - - /// Set executable argument - /// - /// Set the first process argument, `argv[0]`, to something other than the - /// default executable path. - #[stable(feature = "process_set_argv0", since = "1.45.0")] - fn arg0(&mut self, arg: S) -> &mut process::Command - where - S: AsRef; -} - -#[stable(feature = "rust1", since = "1.0.0")] -impl CommandExt for process::Command { - fn uid(&mut self, id: u16) -> &mut process::Command { - self.as_inner_mut().uid(id); - self - } - - fn gid(&mut self, id: u16) -> &mut process::Command { - self.as_inner_mut().gid(id); - self - } - - unsafe fn pre_exec(&mut self, f: F) -> &mut process::Command - where - F: FnMut() -> io::Result<()> + Send + Sync + 'static, - { - self.as_inner_mut().pre_exec(Box::new(f)); - self - } - - fn exec(&mut self) -> io::Error { - self.as_inner_mut().exec(sys::process::Stdio::Inherit) - } - - fn arg0(&mut self, arg: S) -> &mut process::Command - where - S: AsRef, - { - self.as_inner_mut().set_arg_0(arg.as_ref()); - self - } -} - -/// Unix-specific extensions to [`process::ExitStatus`]. -#[stable(feature = "rust1", since = "1.0.0")] -pub trait ExitStatusExt { - /// Creates a new `ExitStatus` from the raw underlying `i32` return value of - /// a process. - #[stable(feature = "exit_status_from", since = "1.12.0")] - fn from_raw(raw: i32) -> Self; - - /// If the process was terminated by a signal, returns that signal. - #[stable(feature = "rust1", since = "1.0.0")] - fn signal(&self) -> Option; -} - -#[stable(feature = "rust1", since = "1.0.0")] -impl ExitStatusExt for process::ExitStatus { - fn from_raw(raw: i32) -> Self { - process::ExitStatus::from_inner(From::from(raw)) - } - - fn signal(&self) -> Option { - self.as_inner().signal() - } -} - -#[stable(feature = "process_extensions", since = "1.2.0")] -impl FromRawFd for process::Stdio { - unsafe fn from_raw_fd(fd: RawFd) -> process::Stdio { - let fd = sys::fd::FileDesc::new(fd); - let io = sys::process::Stdio::Fd(fd); - process::Stdio::from_inner(io) - } -} - -#[stable(feature = "process_extensions", since = "1.2.0")] -impl AsRawFd for process::ChildStdin { - fn as_raw_fd(&self) -> RawFd { - self.as_inner().fd().raw() - } -} - -#[stable(feature = "process_extensions", since = "1.2.0")] -impl AsRawFd for process::ChildStdout { - fn as_raw_fd(&self) -> RawFd { - self.as_inner().fd().raw() - } -} - -#[stable(feature = "process_extensions", since = "1.2.0")] -impl AsRawFd for process::ChildStderr { - fn as_raw_fd(&self) -> RawFd { - self.as_inner().fd().raw() - } -} - -#[stable(feature = "into_raw_os", since = "1.4.0")] -impl IntoRawFd for process::ChildStdin { - fn into_raw_fd(self) -> RawFd { - self.into_inner().into_fd().into_raw() - } -} - -#[stable(feature = "into_raw_os", since = "1.4.0")] -impl IntoRawFd for process::ChildStdout { - fn into_raw_fd(self) -> RawFd { - self.into_inner().into_fd().into_raw() - } -} - -#[stable(feature = "into_raw_os", since = "1.4.0")] -impl IntoRawFd for process::ChildStderr { - fn into_raw_fd(self) -> RawFd { - self.into_inner().into_fd().into_raw() - } -} - -/// Returns the OS-assigned process identifier associated with this process's parent. -#[stable(feature = "unix_ppid", since = "1.27.0")] -pub fn parent_id() -> u32 { - crate::sys::os::getppid() -} diff --git a/library/std/src/sys/vxworks/ext/raw.rs b/library/std/src/sys/vxworks/ext/raw.rs deleted file mode 100644 index 1f134f4e2d1bd..0000000000000 --- a/library/std/src/sys/vxworks/ext/raw.rs +++ /dev/null @@ -1,5 +0,0 @@ -#![stable(feature = "raw_ext", since = "1.1.0")] - -#[doc(inline)] -#[stable(feature = "pthread_t", since = "1.8.0")] -pub use crate::sys::platform::raw::pthread_t; diff --git a/library/std/src/sys/vxworks/fd.rs b/library/std/src/sys/vxworks/fd.rs deleted file mode 100644 index d58468ad539ff..0000000000000 --- a/library/std/src/sys/vxworks/fd.rs +++ /dev/null @@ -1,201 +0,0 @@ -#![unstable(reason = "not public", issue = "none", feature = "fd")] - -use crate::cmp; -use crate::io::{self, Initializer, IoSlice, IoSliceMut, Read}; -use crate::mem; -use crate::sys::cvt; -use crate::sys_common::AsInner; - -use libc::{self, c_int, c_void, ssize_t}; - -#[derive(Debug)] -pub struct FileDesc { - fd: c_int, -} - -// The maximum read limit on most POSIX-like systems is `SSIZE_MAX`, -// with the man page quoting that if the count of bytes to read is -// greater than `SSIZE_MAX` the result is "unspecified". -const READ_LIMIT: usize = ssize_t::MAX as usize; - -impl FileDesc { - pub fn new(fd: c_int) -> FileDesc { - FileDesc { fd: fd } - } - - pub fn raw(&self) -> c_int { - self.fd - } - - /// Extracts the actual file descriptor without closing it. - pub fn into_raw(self) -> c_int { - let fd = self.fd; - mem::forget(self); - fd - } - - pub fn read(&self, buf: &mut [u8]) -> io::Result { - let ret = cvt(unsafe { - libc::read(self.fd, buf.as_mut_ptr() as *mut c_void, cmp::min(buf.len(), READ_LIMIT)) - })?; - Ok(ret as usize) - } - - pub fn read_vectored(&self, bufs: &mut [IoSliceMut<'_>]) -> io::Result { - let ret = cvt(unsafe { - libc::readv( - self.fd, - bufs.as_ptr() as *const libc::iovec, - cmp::min(bufs.len(), c_int::MAX as usize) as c_int, - ) - })?; - Ok(ret as usize) - } - - #[inline] - pub fn is_read_vectored(&self) -> bool { - true - } - - pub fn read_to_end(&self, buf: &mut Vec) -> io::Result { - let mut me = self; - (&mut me).read_to_end(buf) - } - - pub fn read_at(&self, buf: &mut [u8], offset: u64) -> io::Result { - unsafe fn cvt_pread( - fd: c_int, - buf: *mut c_void, - count: usize, - offset: i64, - ) -> io::Result { - use libc::pread; - cvt(pread(fd, buf, count, offset)) - } - - unsafe { - cvt_pread( - self.fd, - buf.as_mut_ptr() as *mut c_void, - cmp::min(buf.len(), READ_LIMIT), - offset as i64, - ) - .map(|n| n as usize) - } - } - - pub fn write(&self, buf: &[u8]) -> io::Result { - let ret = cvt(unsafe { - libc::write(self.fd, buf.as_ptr() as *const c_void, cmp::min(buf.len(), READ_LIMIT)) - })?; - Ok(ret as usize) - } - - pub fn write_vectored(&self, bufs: &[IoSlice<'_>]) -> io::Result { - let ret = cvt(unsafe { - libc::writev( - self.fd, - bufs.as_ptr() as *const libc::iovec, - cmp::min(bufs.len(), c_int::MAX as usize) as c_int, - ) - })?; - Ok(ret as usize) - } - - #[inline] - pub fn is_write_vectored(&self) -> bool { - true - } - - pub fn write_at(&self, buf: &[u8], offset: u64) -> io::Result { - unsafe fn cvt_pwrite( - fd: c_int, - buf: *const c_void, - count: usize, - offset: i64, - ) -> io::Result { - use libc::pwrite; - cvt(pwrite(fd, buf, count, offset)) - } - - unsafe { - cvt_pwrite( - self.fd, - buf.as_ptr() as *const c_void, - cmp::min(buf.len(), READ_LIMIT), - offset as i64, - ) - .map(|n| n as usize) - } - } - - pub fn get_cloexec(&self) -> io::Result { - unsafe { Ok((cvt(libc::fcntl(self.fd, libc::F_GETFD))? & libc::FD_CLOEXEC) != 0) } - } - - pub fn set_cloexec(&self) -> io::Result<()> { - unsafe { - let previous = cvt(libc::fcntl(self.fd, libc::F_GETFD))?; - let new = previous | libc::FD_CLOEXEC; - if new != previous { - cvt(libc::fcntl(self.fd, libc::F_SETFD, new))?; - } - Ok(()) - } - } - - pub fn set_nonblocking(&self, nonblocking: bool) -> io::Result<()> { - unsafe { - let v = nonblocking as c_int; - cvt(libc::ioctl(self.fd, libc::FIONBIO, &v))?; - Ok(()) - } - } - - // refer to pxPipeDrv library documentation. - // VxWorks uses fcntl to set O_NONBLOCK to the pipes - pub fn set_nonblocking_pipe(&self, nonblocking: bool) -> io::Result<()> { - unsafe { - let mut flags = cvt(libc::fcntl(self.fd, libc::F_GETFL, 0))?; - flags = if nonblocking { flags | libc::O_NONBLOCK } else { flags & !libc::O_NONBLOCK }; - cvt(libc::fcntl(self.fd, libc::F_SETFL, flags))?; - Ok(()) - } - } - - pub fn duplicate(&self) -> io::Result { - let fd = self.raw(); - match cvt(unsafe { libc::fcntl(fd, libc::F_DUPFD_CLOEXEC, 0) }) { - Ok(newfd) => Ok(FileDesc::new(newfd)), - Err(e) => return Err(e), - } - } -} - -impl<'a> Read for &'a FileDesc { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - (**self).read(buf) - } - - #[inline] - unsafe fn initializer(&self) -> Initializer { - Initializer::nop() - } -} - -impl AsInner for FileDesc { - fn as_inner(&self) -> &c_int { - &self.fd - } -} - -impl Drop for FileDesc { - fn drop(&mut self) { - // Note that errors are ignored when closing a file descriptor. The - // reason for this is that if an error occurs we don't actually know if - // the file descriptor was closed or not, and if we retried (for - // something like EINTR), we might close another valid file descriptor - // (opened after we closed ours. - let _ = unsafe { libc::close(self.fd) }; - } -} diff --git a/library/std/src/sys/vxworks/fs.rs b/library/std/src/sys/vxworks/fs.rs deleted file mode 100644 index cb761af1a25c2..0000000000000 --- a/library/std/src/sys/vxworks/fs.rs +++ /dev/null @@ -1,624 +0,0 @@ -// copies from linuxx -use crate::ffi::{CStr, CString, OsStr, OsString}; -use crate::fmt; -use crate::io::{self, Error, ErrorKind, IoSlice, IoSliceMut, SeekFrom}; -use crate::mem; -use crate::path::{Path, PathBuf}; -use crate::ptr; -use crate::sync::Arc; -use crate::sys::fd::FileDesc; -use crate::sys::time::SystemTime; -use crate::sys::vxworks::ext::ffi::OsStrExt; -use crate::sys::vxworks::ext::ffi::OsStringExt; -use crate::sys::{cvt, cvt_r}; -use crate::sys_common::{AsInner, FromInner}; -use libc::{self, c_int, mode_t, off_t, stat64}; -use libc::{dirent, ftruncate, lseek, open, readdir_r as readdir64_r}; -pub struct File(FileDesc); - -#[derive(Clone)] -pub struct FileAttr { - stat: stat64, -} - -// all DirEntry's will have a reference to this struct -struct InnerReadDir { - dirp: Dir, - root: PathBuf, -} - -pub struct ReadDir { - inner: Arc, - end_of_stream: bool, -} - -struct Dir(*mut libc::DIR); - -unsafe impl Send for Dir {} -unsafe impl Sync for Dir {} - -pub struct DirEntry { - entry: dirent, - dir: Arc, -} - -#[derive(Clone, Debug)] -pub struct OpenOptions { - // generic - read: bool, - write: bool, - append: bool, - truncate: bool, - create: bool, - create_new: bool, - // system-specific - custom_flags: i32, - mode: mode_t, -} - -#[derive(Clone, PartialEq, Eq, Debug)] -pub struct FilePermissions { - mode: mode_t, -} - -#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] -pub struct FileType { - mode: mode_t, -} - -#[derive(Debug)] -pub struct DirBuilder { - mode: mode_t, -} - -impl FileAttr { - pub fn size(&self) -> u64 { - self.stat.st_size as u64 - } - pub fn perm(&self) -> FilePermissions { - FilePermissions { mode: (self.stat.st_mode as mode_t) } - } - - pub fn file_type(&self) -> FileType { - FileType { mode: self.stat.st_mode as mode_t } - } - - pub fn modified(&self) -> io::Result { - Ok(SystemTime::from(libc::timespec { - tv_sec: self.stat.st_mtime as libc::time_t, - tv_nsec: 0, // hack 2.0; - })) - } - - pub fn accessed(&self) -> io::Result { - Ok(SystemTime::from(libc::timespec { - tv_sec: self.stat.st_atime as libc::time_t, - tv_nsec: 0, // hack - a proper fix would be better - })) - } - - pub fn created(&self) -> io::Result { - Err(io::Error::new( - io::ErrorKind::Other, - "creation time is not available on this platform currently", - )) - } -} - -impl AsInner for FileAttr { - fn as_inner(&self) -> &stat64 { - &self.stat - } -} - -impl FilePermissions { - pub fn readonly(&self) -> bool { - // check if any class (owner, group, others) has write permission - self.mode & 0o222 == 0 - } - - pub fn set_readonly(&mut self, readonly: bool) { - if readonly { - // remove write permission for all classes; equivalent to `chmod a-w ` - self.mode &= !0o222; - } else { - // add write permission for all classes; equivalent to `chmod a+w ` - self.mode |= 0o222; - } - } - pub fn mode(&self) -> u32 { - self.mode as u32 - } -} - -impl FileType { - pub fn is_dir(&self) -> bool { - self.is(libc::S_IFDIR) - } - pub fn is_file(&self) -> bool { - self.is(libc::S_IFREG) - } - pub fn is_symlink(&self) -> bool { - self.is(libc::S_IFLNK) - } - - pub fn is(&self, mode: mode_t) -> bool { - self.mode & libc::S_IFMT == mode - } -} - -impl FromInner for FilePermissions { - fn from_inner(mode: u32) -> FilePermissions { - FilePermissions { mode: mode as mode_t } - } -} - -impl fmt::Debug for ReadDir { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - // This will only be called from std::fs::ReadDir, which will add a "ReadDir()" frame. - // Thus the result will be e g 'ReadDir("/home")' - fmt::Debug::fmt(&*self.inner.root, f) - } -} - -impl Iterator for ReadDir { - type Item = io::Result; - fn next(&mut self) -> Option> { - if self.end_of_stream { - return None; - } - - unsafe { - let mut ret = DirEntry { entry: mem::zeroed(), dir: Arc::clone(&self.inner) }; - let mut entry_ptr = ptr::null_mut(); - loop { - if readdir64_r(self.inner.dirp.0, &mut ret.entry, &mut entry_ptr) != 0 { - if entry_ptr.is_null() { - // We encountered an error (which will be returned in this iteration), but - // we also reached the end of the directory stream. The `end_of_stream` - // flag is enabled to make sure that we return `None` in the next iteration - // (instead of looping forever) - self.end_of_stream = true; - } - return Some(Err(Error::last_os_error())); - } - if entry_ptr.is_null() { - return None; - } - if ret.name_bytes() != b"." && ret.name_bytes() != b".." { - return Some(Ok(ret)); - } - } - } - } -} - -impl Drop for Dir { - fn drop(&mut self) { - let r = unsafe { libc::closedir(self.0) }; - debug_assert_eq!(r, 0); - } -} - -impl DirEntry { - pub fn path(&self) -> PathBuf { - use crate::sys::vxworks::ext::ffi::OsStrExt; - self.dir.root.join(OsStr::from_bytes(self.name_bytes())) - } - - pub fn file_name(&self) -> OsString { - OsStr::from_bytes(self.name_bytes()).to_os_string() - } - - pub fn metadata(&self) -> io::Result { - lstat(&self.path()) - } - - pub fn file_type(&self) -> io::Result { - lstat(&self.path()).map(|m| m.file_type()) - } - - pub fn ino(&self) -> u64 { - self.entry.d_ino as u64 - } - - fn name_bytes(&self) -> &[u8] { - unsafe { - //&*self.name - CStr::from_ptr(self.entry.d_name.as_ptr()).to_bytes() - } - } -} - -impl OpenOptions { - pub fn new() -> OpenOptions { - OpenOptions { - // generic - read: false, - write: false, - append: false, - truncate: false, - create: false, - create_new: false, - // system-specific - custom_flags: 0, - mode: 0o666, - } - } - - pub fn read(&mut self, read: bool) { - self.read = read; - } - pub fn write(&mut self, write: bool) { - self.write = write; - } - pub fn append(&mut self, append: bool) { - self.append = append; - } - pub fn truncate(&mut self, truncate: bool) { - self.truncate = truncate; - } - pub fn create(&mut self, create: bool) { - self.create = create; - } - pub fn create_new(&mut self, create_new: bool) { - self.create_new = create_new; - } - pub fn mode(&mut self, mode: u32) { - self.mode = mode as mode_t; - } - - fn get_access_mode(&self) -> io::Result { - match (self.read, self.write, self.append) { - (true, false, false) => Ok(libc::O_RDONLY), - (false, true, false) => Ok(libc::O_WRONLY), - (true, true, false) => Ok(libc::O_RDWR), - (false, _, true) => Ok(libc::O_WRONLY | libc::O_APPEND), - (true, _, true) => Ok(libc::O_RDWR | libc::O_APPEND), - (false, false, false) => Err(Error::from_raw_os_error(libc::EINVAL)), - } - } - - fn get_creation_mode(&self) -> io::Result { - match (self.write, self.append) { - (true, false) => {} - (false, false) => { - if self.truncate || self.create || self.create_new { - return Err(Error::from_raw_os_error(libc::EINVAL)); - } - } - (_, true) => { - if self.truncate && !self.create_new { - return Err(Error::from_raw_os_error(libc::EINVAL)); - } - } - } - - Ok(match (self.create, self.truncate, self.create_new) { - (false, false, false) => 0, - (true, false, false) => libc::O_CREAT, - (false, true, false) => libc::O_TRUNC, - (true, true, false) => libc::O_CREAT | libc::O_TRUNC, - (_, _, true) => libc::O_CREAT | libc::O_EXCL, - }) - } -} - -impl File { - pub fn open(path: &Path, opts: &OpenOptions) -> io::Result { - let path = cstr(path)?; - File::open_c(&path, opts) - } - - pub fn open_c(path: &CStr, opts: &OpenOptions) -> io::Result { - let flags = libc::O_CLOEXEC - | opts.get_access_mode()? - | opts.get_creation_mode()? - | (opts.custom_flags as c_int & !libc::O_ACCMODE); - let fd = cvt_r(|| unsafe { open(path.as_ptr(), flags, opts.mode as c_int) })?; - Ok(File(FileDesc::new(fd))) - } - - pub fn file_attr(&self) -> io::Result { - let mut stat: stat64 = unsafe { mem::zeroed() }; - cvt(unsafe { ::libc::fstat(self.0.raw(), &mut stat) })?; - Ok(FileAttr { stat: stat }) - } - - pub fn fsync(&self) -> io::Result<()> { - cvt_r(|| unsafe { libc::fsync(self.0.raw()) })?; - Ok(()) - } - - pub fn datasync(&self) -> io::Result<()> { - cvt_r(|| unsafe { os_datasync(self.0.raw()) })?; - return Ok(()); - unsafe fn os_datasync(fd: c_int) -> c_int { - libc::fsync(fd) - } //not supported - } - - pub fn truncate(&self, size: u64) -> io::Result<()> { - return cvt_r(|| unsafe { ftruncate(self.0.raw(), size as off_t) }).map(drop); - } - - pub fn read(&self, buf: &mut [u8]) -> io::Result { - self.0.read(buf) - } - - pub fn read_vectored(&self, bufs: &mut [IoSliceMut<'_>]) -> io::Result { - self.0.read_vectored(bufs) - } - - #[inline] - pub fn is_read_vectored(&self) -> bool { - self.0.is_read_vectored() - } - - pub fn read_at(&self, buf: &mut [u8], offset: u64) -> io::Result { - self.0.read_at(buf, offset) - } - - pub fn write(&self, buf: &[u8]) -> io::Result { - self.0.write(buf) - } - - pub fn write_vectored(&self, bufs: &[IoSlice<'_>]) -> io::Result { - self.0.write_vectored(bufs) - } - - #[inline] - pub fn is_write_vectored(&self) -> bool { - self.0.is_write_vectored() - } - - pub fn write_at(&self, buf: &[u8], offset: u64) -> io::Result { - self.0.write_at(buf, offset) - } - - pub fn flush(&self) -> io::Result<()> { - Ok(()) - } - - pub fn seek(&self, pos: SeekFrom) -> io::Result { - let (whence, pos) = match pos { - // Casting to `i64` is fine, too large values will end up as - // negative which will cause an error in `"lseek64"`. - SeekFrom::Start(off) => (libc::SEEK_SET, off as i64), - SeekFrom::End(off) => (libc::SEEK_END, off), - SeekFrom::Current(off) => (libc::SEEK_CUR, off), - }; - let n = cvt(unsafe { lseek(self.0.raw(), pos, whence) })?; - Ok(n as u64) - } - - pub fn duplicate(&self) -> io::Result { - self.0.duplicate().map(File) - } - - pub fn fd(&self) -> &FileDesc { - &self.0 - } - - pub fn into_fd(self) -> FileDesc { - self.0 - } - - pub fn set_permissions(&self, perm: FilePermissions) -> io::Result<()> { - cvt_r(|| unsafe { libc::fchmod(self.0.raw(), perm.mode) })?; - Ok(()) - } - - pub fn diverge(&self) -> ! { - panic!() - } -} - -impl DirBuilder { - pub fn new() -> DirBuilder { - DirBuilder { mode: 0o777 } - } - - pub fn mkdir(&self, p: &Path) -> io::Result<()> { - let p = cstr(p)?; - cvt(unsafe { libc::mkdir(p.as_ptr(), self.mode) })?; - Ok(()) - } - - pub fn set_mode(&mut self, mode: u32) { - self.mode = mode as mode_t; - } -} - -fn cstr(path: &Path) -> io::Result { - use crate::sys::vxworks::ext::ffi::OsStrExt; - Ok(CString::new(path.as_os_str().as_bytes())?) -} - -impl FromInner for File { - fn from_inner(fd: c_int) -> File { - File(FileDesc::new(fd)) - } -} - -impl fmt::Debug for File { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - fn get_path(fd: c_int) -> Option { - let mut buf = vec![0; libc::PATH_MAX as usize]; - let n = unsafe { libc::ioctl(fd, libc::FIOGETNAME, buf.as_ptr()) }; - if n == -1 { - return None; - } - let l = buf.iter().position(|&c| c == 0).unwrap(); - buf.truncate(l as usize); - Some(PathBuf::from(OsString::from_vec(buf))) - } - fn get_mode(fd: c_int) -> Option<(bool, bool)> { - let mode = unsafe { libc::fcntl(fd, libc::F_GETFL) }; - if mode == -1 { - return None; - } - match mode & libc::O_ACCMODE { - libc::O_RDONLY => Some((true, false)), - libc::O_RDWR => Some((true, true)), - libc::O_WRONLY => Some((false, true)), - _ => None, - } - } - - let fd = self.0.raw(); - let mut b = f.debug_struct("File"); - b.field("fd", &fd); - if let Some(path) = get_path(fd) { - b.field("path", &path); - } - if let Some((read, write)) = get_mode(fd) { - b.field("read", &read).field("write", &write); - } - b.finish() - } -} - -pub fn readdir(p: &Path) -> io::Result { - let root = p.to_path_buf(); - let p = cstr(p)?; - unsafe { - let ptr = libc::opendir(p.as_ptr()); - if ptr.is_null() { - Err(Error::last_os_error()) - } else { - let inner = InnerReadDir { dirp: Dir(ptr), root }; - Ok(ReadDir { inner: Arc::new(inner), end_of_stream: false }) - } - } -} - -pub fn unlink(p: &Path) -> io::Result<()> { - let p = cstr(p)?; - cvt(unsafe { libc::unlink(p.as_ptr()) })?; - Ok(()) -} - -pub fn rename(old: &Path, new: &Path) -> io::Result<()> { - let old = cstr(old)?; - let new = cstr(new)?; - cvt(unsafe { libc::rename(old.as_ptr(), new.as_ptr()) })?; - Ok(()) -} - -pub fn set_perm(p: &Path, perm: FilePermissions) -> io::Result<()> { - let p = cstr(p)?; - cvt_r(|| unsafe { libc::chmod(p.as_ptr(), perm.mode) })?; - Ok(()) -} - -pub fn rmdir(p: &Path) -> io::Result<()> { - let p = cstr(p)?; - cvt(unsafe { libc::rmdir(p.as_ptr()) })?; - Ok(()) -} - -pub fn remove_dir_all(path: &Path) -> io::Result<()> { - let filetype = lstat(path)?.file_type(); - if filetype.is_symlink() { unlink(path) } else { remove_dir_all_recursive(path) } -} - -fn remove_dir_all_recursive(path: &Path) -> io::Result<()> { - for child in readdir(path)? { - let child = child?; - if child.file_type()?.is_dir() { - remove_dir_all_recursive(&child.path())?; - } else { - unlink(&child.path())?; - } - } - rmdir(path) -} - -pub fn readlink(p: &Path) -> io::Result { - let c_path = cstr(p)?; - let p = c_path.as_ptr(); - - let mut buf = Vec::with_capacity(256); - - loop { - let buf_read = - cvt(unsafe { libc::readlink(p, buf.as_mut_ptr() as *mut _, buf.capacity()) })? as usize; - - unsafe { - buf.set_len(buf_read); - } - - if buf_read != buf.capacity() { - buf.shrink_to_fit(); - - return Ok(PathBuf::from(OsString::from_vec(buf))); - } - - // Trigger the internal buffer resizing logic of `Vec` by requiring - // more space than the current capacity. The length is guaranteed to be - // the same as the capacity due to the if statement above. - buf.reserve(1); - } -} - -pub fn symlink(src: &Path, dst: &Path) -> io::Result<()> { - let src = cstr(src)?; - let dst = cstr(dst)?; - cvt(unsafe { libc::symlink(src.as_ptr(), dst.as_ptr()) })?; - Ok(()) -} - -pub fn link(src: &Path, dst: &Path) -> io::Result<()> { - let src = cstr(src)?; - let dst = cstr(dst)?; - cvt(unsafe { libc::link(src.as_ptr(), dst.as_ptr()) })?; - Ok(()) -} - -pub fn stat(p: &Path) -> io::Result { - let p = cstr(p)?; - let mut stat: stat64 = unsafe { mem::zeroed() }; - cvt(unsafe { libc::stat(p.as_ptr(), &mut stat as *mut _ as *mut _) })?; - Ok(FileAttr { stat }) -} - -pub fn lstat(p: &Path) -> io::Result { - let p = cstr(p)?; - let mut stat: stat64 = unsafe { mem::zeroed() }; - cvt(unsafe { ::libc::lstat(p.as_ptr(), &mut stat as *mut _ as *mut _) })?; - Ok(FileAttr { stat }) -} - -pub fn canonicalize(p: &Path) -> io::Result { - use crate::sys::vxworks::ext::ffi::OsStrExt; - let path = CString::new(p.as_os_str().as_bytes())?; - let buf; - unsafe { - let r = libc::realpath(path.as_ptr(), ptr::null_mut()); - if r.is_null() { - return Err(io::Error::last_os_error()); - } - buf = CStr::from_ptr(r).to_bytes().to_vec(); - libc::free(r as *mut _); - } - Ok(PathBuf::from(OsString::from_vec(buf))) -} - -pub fn copy(from: &Path, to: &Path) -> io::Result { - use crate::fs::File; - if !from.is_file() { - return Err(Error::new( - ErrorKind::InvalidInput, - "the source path is not an existing regular file", - )); - } - - let mut reader = File::open(from)?; - let mut writer = File::create(to)?; - let perm = reader.metadata()?.permissions(); - - let ret = io::copy(&mut reader, &mut writer)?; - writer.set_permissions(perm)?; - Ok(ret) -} diff --git a/library/std/src/sys/vxworks/io.rs b/library/std/src/sys/vxworks/io.rs deleted file mode 100644 index 0f68ebf8da9cc..0000000000000 --- a/library/std/src/sys/vxworks/io.rs +++ /dev/null @@ -1,75 +0,0 @@ -use crate::marker::PhantomData; -use crate::slice; - -use libc::{c_void, iovec}; - -#[derive(Copy, Clone)] -#[repr(transparent)] -pub struct IoSlice<'a> { - vec: iovec, - _p: PhantomData<&'a [u8]>, -} - -impl<'a> IoSlice<'a> { - #[inline] - pub fn new(buf: &'a [u8]) -> IoSlice<'a> { - IoSlice { - vec: iovec { iov_base: buf.as_ptr() as *mut u8 as *mut c_void, iov_len: buf.len() }, - _p: PhantomData, - } - } - - #[inline] - pub fn advance(&mut self, n: usize) { - if self.vec.iov_len < n { - panic!("advancing IoSlice beyond its length"); - } - - unsafe { - self.vec.iov_len -= n; - self.vec.iov_base = self.vec.iov_base.add(n); - } - } - - #[inline] - pub fn as_slice(&self) -> &[u8] { - unsafe { slice::from_raw_parts(self.vec.iov_base as *mut u8, self.vec.iov_len) } - } -} - -pub struct IoSliceMut<'a> { - vec: iovec, - _p: PhantomData<&'a mut [u8]>, -} - -impl<'a> IoSliceMut<'a> { - #[inline] - pub fn new(buf: &'a mut [u8]) -> IoSliceMut<'a> { - IoSliceMut { - vec: iovec { iov_base: buf.as_mut_ptr() as *mut c_void, iov_len: buf.len() }, - _p: PhantomData, - } - } - - #[inline] - pub fn advance(&mut self, n: usize) { - if self.vec.iov_len < n { - panic!("advancing IoSliceMut beyond its length"); - } - - unsafe { - self.vec.iov_len -= n; - self.vec.iov_base = self.vec.iov_base.add(n); - } - } - - #[inline] - pub fn as_slice(&self) -> &[u8] { - unsafe { slice::from_raw_parts(self.vec.iov_base as *mut u8, self.vec.iov_len) } - } - - #[inline] - pub fn as_mut_slice(&mut self) -> &mut [u8] { - unsafe { slice::from_raw_parts_mut(self.vec.iov_base as *mut u8, self.vec.iov_len) } - } -} diff --git a/library/std/src/sys/vxworks/memchr.rs b/library/std/src/sys/vxworks/memchr.rs deleted file mode 100644 index 928100c92ffad..0000000000000 --- a/library/std/src/sys/vxworks/memchr.rs +++ /dev/null @@ -1,21 +0,0 @@ -// Original implementation taken from rust-memchr. -// Copyright 2015 Andrew Gallant, bluss and Nicolas Koch - -pub fn memchr(needle: u8, haystack: &[u8]) -> Option { - let p = unsafe { - libc::memchr( - haystack.as_ptr() as *const libc::c_void, - needle as libc::c_int, - haystack.len(), - ) - }; - if p.is_null() { None } else { Some(p as usize - (haystack.as_ptr() as usize)) } -} - -pub fn memrchr(needle: u8, haystack: &[u8]) -> Option { - fn memrchr_specific(needle: u8, haystack: &[u8]) -> Option { - core::slice::memchr::memrchr(needle, haystack) - } - - memrchr_specific(needle, haystack) -} diff --git a/library/std/src/sys/vxworks/mod.rs b/library/std/src/sys/vxworks/mod.rs index 1132a849e2f18..c20edaa1a4778 100644 --- a/library/std/src/sys/vxworks/mod.rs +++ b/library/std/src/sys/vxworks/mod.rs @@ -7,29 +7,53 @@ pub use self::rand::hashmap_random_keys; pub use crate::os::vxworks as platform; pub use libc::strlen; +#[macro_use] +#[path = "../unix/weak.rs"] +pub mod weak; + +#[path = "../unix/alloc.rs"] pub mod alloc; +#[path = "../unix/args.rs"] pub mod args; +#[path = "../unix/cmath.rs"] pub mod cmath; +#[path = "../unix/condvar.rs"] pub mod condvar; pub mod env; +#[path = "../unix/ext/mod.rs"] pub mod ext; +#[path = "../unix/fd.rs"] pub mod fd; +#[path = "../unix/fs.rs"] pub mod fs; +#[path = "../unix/io.rs"] pub mod io; +#[path = "../unix/memchr.rs"] pub mod memchr; +#[path = "../unix/mutex.rs"] pub mod mutex; +#[path = "../unix/net.rs"] pub mod net; +#[path = "../unix/os.rs"] pub mod os; +#[path = "../unix/path.rs"] pub mod path; +#[path = "../unix/pipe.rs"] pub mod pipe; pub mod process; pub mod rand; +#[path = "../unix/rwlock.rs"] pub mod rwlock; +#[path = "../unix/stack_overflow.rs"] pub mod stack_overflow; +#[path = "../unix/stdio.rs"] pub mod stdio; +#[path = "../unix/thread.rs"] pub mod thread; pub mod thread_local_dtor; +#[path = "../unix/thread_local_key.rs"] pub mod thread_local_key; +#[path = "../unix/time.rs"] pub mod time; pub use crate::sys_common::os_str_bytes as os_str; diff --git a/library/std/src/sys/vxworks/mutex.rs b/library/std/src/sys/vxworks/mutex.rs deleted file mode 100644 index dd7582c21a727..0000000000000 --- a/library/std/src/sys/vxworks/mutex.rs +++ /dev/null @@ -1,133 +0,0 @@ -use crate::cell::UnsafeCell; -use crate::mem::MaybeUninit; - -pub struct Mutex { - inner: UnsafeCell, -} - -pub type MovableMutex = Box; - -#[inline] -pub unsafe fn raw(m: &Mutex) -> *mut libc::pthread_mutex_t { - m.inner.get() -} - -unsafe impl Send for Mutex {} -unsafe impl Sync for Mutex {} - -#[allow(dead_code)] // sys isn't exported yet -impl Mutex { - pub const fn new() -> Mutex { - // Might be moved to a different address, so it is better to avoid - // initialization of potentially opaque OS data before it landed. - // Be very careful using this newly constructed `Mutex`, reentrant - // locking is undefined behavior until `init` is called! - Mutex { inner: UnsafeCell::new(libc::PTHREAD_MUTEX_INITIALIZER) } - } - #[inline] - pub unsafe fn init(&mut self) { - // Issue #33770 - // - // A pthread mutex initialized with PTHREAD_MUTEX_INITIALIZER will have - // a type of PTHREAD_MUTEX_DEFAULT, which has undefined behavior if you - // try to re-lock it from the same thread when you already hold a lock. - // - // In practice, glibc takes advantage of this undefined behavior to - // implement hardware lock elision, which uses hardware transactional - // memory to avoid acquiring the lock. While a transaction is in - // progress, the lock appears to be unlocked. This isn't a problem for - // other threads since the transactional memory will abort if a conflict - // is detected, however no abort is generated if re-locking from the - // same thread. - // - // Since locking the same mutex twice will result in two aliasing &mut - // references, we instead create the mutex with type - // PTHREAD_MUTEX_NORMAL which is guaranteed to deadlock if we try to - // re-lock it from the same thread, thus avoiding undefined behavior. - let mut attr = MaybeUninit::::uninit(); - let r = libc::pthread_mutexattr_init(attr.as_mut_ptr()); - debug_assert_eq!(r, 0); - let r = libc::pthread_mutexattr_settype(attr.as_mut_ptr(), libc::PTHREAD_MUTEX_NORMAL); - debug_assert_eq!(r, 0); - let r = libc::pthread_mutex_init(self.inner.get(), attr.as_ptr()); - debug_assert_eq!(r, 0); - let r = libc::pthread_mutexattr_destroy(attr.as_mut_ptr()); - debug_assert_eq!(r, 0); - } - #[inline] - pub unsafe fn lock(&self) { - let r = libc::pthread_mutex_lock(self.inner.get()); - debug_assert_eq!(r, 0); - } - #[inline] - pub unsafe fn unlock(&self) { - let r = libc::pthread_mutex_unlock(self.inner.get()); - debug_assert_eq!(r, 0); - } - #[inline] - pub unsafe fn try_lock(&self) -> bool { - libc::pthread_mutex_trylock(self.inner.get()) == 0 - } - #[inline] - #[cfg(not(target_os = "dragonfly"))] - pub unsafe fn destroy(&self) { - let r = libc::pthread_mutex_destroy(self.inner.get()); - debug_assert_eq!(r, 0); - } - #[inline] - #[cfg(target_os = "dragonfly")] - pub unsafe fn destroy(&self) { - let r = libc::pthread_mutex_destroy(self.inner.get()); - // On DragonFly pthread_mutex_destroy() returns EINVAL if called on a - // mutex that was just initialized with libc::PTHREAD_MUTEX_INITIALIZER. - // Once it is used (locked/unlocked) or pthread_mutex_init() is called, - // this behaviour no longer occurs. - debug_assert!(r == 0 || r == libc::EINVAL); - } -} - -pub struct ReentrantMutex { - inner: UnsafeCell, -} - -unsafe impl Send for ReentrantMutex {} -unsafe impl Sync for ReentrantMutex {} - -impl ReentrantMutex { - pub const unsafe fn uninitialized() -> ReentrantMutex { - ReentrantMutex { inner: UnsafeCell::new(libc::PTHREAD_MUTEX_INITIALIZER) } - } - - pub unsafe fn init(&self) { - let mut attr = MaybeUninit::::uninit(); - let result = libc::pthread_mutexattr_init(attr.as_mut_ptr()); - debug_assert_eq!(result, 0); - let result = - libc::pthread_mutexattr_settype(attr.as_mut_ptr(), libc::PTHREAD_MUTEX_RECURSIVE); - debug_assert_eq!(result, 0); - let result = libc::pthread_mutex_init(self.inner.get(), attr.as_ptr()); - debug_assert_eq!(result, 0); - let result = libc::pthread_mutexattr_destroy(attr.as_mut_ptr()); - debug_assert_eq!(result, 0); - } - - pub unsafe fn lock(&self) { - let result = libc::pthread_mutex_lock(self.inner.get()); - debug_assert_eq!(result, 0); - } - - #[inline] - pub unsafe fn try_lock(&self) -> bool { - libc::pthread_mutex_trylock(self.inner.get()) == 0 - } - - pub unsafe fn unlock(&self) { - let result = libc::pthread_mutex_unlock(self.inner.get()); - debug_assert_eq!(result, 0); - } - - pub unsafe fn destroy(&self) { - let result = libc::pthread_mutex_destroy(self.inner.get()); - debug_assert_eq!(result, 0); - } -} diff --git a/library/std/src/sys/vxworks/net.rs b/library/std/src/sys/vxworks/net.rs deleted file mode 100644 index 7613fbec46f39..0000000000000 --- a/library/std/src/sys/vxworks/net.rs +++ /dev/null @@ -1,335 +0,0 @@ -#[cfg(all(test, taget_env = "gnu"))] -mod tests; - -use crate::cmp; -use crate::ffi::CStr; -use crate::io; -use crate::io::{IoSlice, IoSliceMut}; -use crate::mem; -use crate::net::{Shutdown, SocketAddr}; -use crate::str; -use crate::sys::fd::FileDesc; -use crate::sys_common::net::{getsockopt, setsockopt, sockaddr_to_addr}; -use crate::sys_common::{AsInner, FromInner, IntoInner}; -use crate::time::{Duration, Instant}; -use libc::{self, c_int, c_void, size_t, sockaddr, socklen_t, EAI_SYSTEM, MSG_PEEK}; - -pub use crate::sys::{cvt, cvt_r}; - -#[allow(unused_extern_crates)] -pub extern crate libc as netc; - -pub type wrlen_t = size_t; - -pub struct Socket(FileDesc); - -pub fn init() {} - -pub fn cvt_gai(err: c_int) -> io::Result<()> { - if err == 0 { - return Ok(()); - } - - // We may need to trigger a glibc workaround. See on_resolver_failure() for details. - on_resolver_failure(); - - if err == EAI_SYSTEM { - return Err(io::Error::last_os_error()); - } - - let detail = unsafe { - str::from_utf8(CStr::from_ptr(libc::gai_strerror(err)).to_bytes()).unwrap().to_owned() - }; - Err(io::Error::new( - io::ErrorKind::Other, - &format!("failed to lookup address information: {}", detail)[..], - )) -} - -impl Socket { - pub fn new(addr: &SocketAddr, ty: c_int) -> io::Result { - let fam = match *addr { - SocketAddr::V4(..) => libc::AF_INET, - SocketAddr::V6(..) => libc::AF_INET6, - }; - Socket::new_raw(fam, ty) - } - - pub fn new_raw(fam: c_int, ty: c_int) -> io::Result { - unsafe { - let fd = cvt(libc::socket(fam, ty, 0))?; - let fd = FileDesc::new(fd); - fd.set_cloexec()?; - let socket = Socket(fd); - Ok(socket) - } - } - - pub fn new_pair(_fam: c_int, _ty: c_int) -> io::Result<(Socket, Socket)> { - unimplemented!(); - } - - pub fn connect_timeout(&self, addr: &SocketAddr, timeout: Duration) -> io::Result<()> { - self.set_nonblocking(true)?; - let r = unsafe { - let (addrp, len) = addr.into_inner(); - cvt(libc::connect(self.0.raw(), addrp, len)) - }; - self.set_nonblocking(false)?; - - match r { - Ok(_) => return Ok(()), - // there's no ErrorKind for EINPROGRESS :( - Err(ref e) if e.raw_os_error() == Some(libc::EINPROGRESS) => {} - Err(e) => return Err(e), - } - - let mut pollfd = libc::pollfd { fd: self.0.raw(), events: libc::POLLOUT, revents: 0 }; - - if timeout.as_secs() == 0 && timeout.subsec_nanos() == 0 { - return Err(io::Error::new( - io::ErrorKind::InvalidInput, - "cannot set a 0 duration timeout", - )); - } - - let start = Instant::now(); - - loop { - let elapsed = start.elapsed(); - if elapsed >= timeout { - return Err(io::Error::new(io::ErrorKind::TimedOut, "connection timed out")); - } - - let timeout = timeout - elapsed; - let mut timeout = timeout - .as_secs() - .saturating_mul(1_000) - .saturating_add(timeout.subsec_nanos() as u64 / 1_000_000); - if timeout == 0 { - timeout = 1; - } - - let timeout = cmp::min(timeout, c_int::MAX as u64) as c_int; - - match unsafe { libc::poll(&mut pollfd, 1, timeout) } { - -1 => { - let err = io::Error::last_os_error(); - if err.kind() != io::ErrorKind::Interrupted { - return Err(err); - } - } - 0 => {} - _ => { - // linux returns POLLOUT|POLLERR|POLLHUP for refused connections (!), so look - // for POLLHUP rather than read readiness - if pollfd.revents & libc::POLLHUP != 0 { - let e = self.take_error()?.unwrap_or_else(|| { - io::Error::new(io::ErrorKind::Other, "no error set after POLLHUP") - }); - return Err(e); - } - - return Ok(()); - } - } - } - } - - pub fn accept(&self, storage: *mut sockaddr, len: *mut socklen_t) -> io::Result { - let fd = cvt_r(|| unsafe { libc::accept(self.0.raw(), storage, len) })?; - let fd = FileDesc::new(fd); - fd.set_cloexec()?; - Ok(Socket(fd)) - } - - pub fn duplicate(&self) -> io::Result { - self.0.duplicate().map(Socket) - } - - fn recv_with_flags(&self, buf: &mut [u8], flags: c_int) -> io::Result { - let ret = cvt(unsafe { - libc::recv(self.0.raw(), buf.as_mut_ptr() as *mut c_void, buf.len(), flags) - })?; - Ok(ret as usize) - } - - pub fn read(&self, buf: &mut [u8]) -> io::Result { - self.recv_with_flags(buf, 0) - } - - pub fn peek(&self, buf: &mut [u8]) -> io::Result { - self.recv_with_flags(buf, MSG_PEEK) - } - - pub fn read_vectored(&self, bufs: &mut [IoSliceMut<'_>]) -> io::Result { - self.0.read_vectored(bufs) - } - - #[inline] - pub fn is_read_vectored(&self) -> bool { - self.0.is_read_vectored() - } - - fn recv_from_with_flags( - &self, - buf: &mut [u8], - flags: c_int, - ) -> io::Result<(usize, SocketAddr)> { - let mut storage: libc::sockaddr_storage = unsafe { mem::zeroed() }; - let mut addrlen = mem::size_of_val(&storage) as libc::socklen_t; - - let n = cvt(unsafe { - libc::recvfrom( - self.0.raw(), - buf.as_mut_ptr() as *mut c_void, - buf.len(), - flags, - &mut storage as *mut _ as *mut _, - &mut addrlen, - ) - })?; - Ok((n as usize, sockaddr_to_addr(&storage, addrlen as usize)?)) - } - - pub fn recv_from(&self, buf: &mut [u8]) -> io::Result<(usize, SocketAddr)> { - self.recv_from_with_flags(buf, 0) - } - - pub fn peek_from(&self, buf: &mut [u8]) -> io::Result<(usize, SocketAddr)> { - self.recv_from_with_flags(buf, MSG_PEEK) - } - - pub fn write(&self, buf: &[u8]) -> io::Result { - self.0.write(buf) - } - - pub fn write_vectored(&self, bufs: &[IoSlice<'_>]) -> io::Result { - self.0.write_vectored(bufs) - } - - #[inline] - pub fn is_write_vectored(&self) -> bool { - self.0.is_write_vectored() - } - - pub fn set_timeout(&self, dur: Option, kind: libc::c_int) -> io::Result<()> { - let timeout = match dur { - Some(dur) => { - if dur.as_secs() == 0 && dur.subsec_nanos() == 0 { - return Err(io::Error::new( - io::ErrorKind::InvalidInput, - "cannot set a 0 duration timeout", - )); - } - - let secs = if dur.as_secs() > libc::time_t::MAX as u64 { - libc::time_t::MAX - } else { - dur.as_secs() as libc::time_t - }; - let mut timeout = libc::timeval { - tv_sec: secs, - tv_usec: (dur.subsec_nanos() / 1000) as libc::suseconds_t, - }; - if timeout.tv_sec == 0 && timeout.tv_usec == 0 { - timeout.tv_usec = 1; - } - timeout - } - None => libc::timeval { tv_sec: 0, tv_usec: 0 }, - }; - setsockopt(self, libc::SOL_SOCKET, kind, timeout) - } - - pub fn timeout(&self, kind: libc::c_int) -> io::Result> { - let raw: libc::timeval = getsockopt(self, libc::SOL_SOCKET, kind)?; - if raw.tv_sec == 0 && raw.tv_usec == 0 { - Ok(None) - } else { - let sec = raw.tv_sec as u64; - let nsec = (raw.tv_usec as u32) * 1000; - Ok(Some(Duration::new(sec, nsec))) - } - } - - pub fn shutdown(&self, how: Shutdown) -> io::Result<()> { - let how = match how { - Shutdown::Write => libc::SHUT_WR, - Shutdown::Read => libc::SHUT_RD, - Shutdown::Both => libc::SHUT_RDWR, - }; - cvt(unsafe { libc::shutdown(self.0.raw(), how) })?; - Ok(()) - } - - pub fn set_nodelay(&self, nodelay: bool) -> io::Result<()> { - setsockopt(self, libc::IPPROTO_TCP, libc::TCP_NODELAY, nodelay as c_int) - } - - pub fn nodelay(&self) -> io::Result { - let raw: c_int = getsockopt(self, libc::IPPROTO_TCP, libc::TCP_NODELAY)?; - Ok(raw != 0) - } - - pub fn set_nonblocking(&self, nonblocking: bool) -> io::Result<()> { - let mut nonblocking = nonblocking as libc::c_int; - cvt(unsafe { libc::ioctl(*self.as_inner(), libc::FIONBIO, &mut nonblocking) }).map(drop) - } - - pub fn take_error(&self) -> io::Result> { - let raw: c_int = getsockopt(self, libc::SOL_SOCKET, libc::SO_ERROR)?; - if raw == 0 { Ok(None) } else { Ok(Some(io::Error::from_raw_os_error(raw as i32))) } - } -} - -impl AsInner for Socket { - fn as_inner(&self) -> &c_int { - self.0.as_inner() - } -} - -impl FromInner for Socket { - fn from_inner(fd: c_int) -> Socket { - Socket(FileDesc::new(fd)) - } -} - -impl IntoInner for Socket { - fn into_inner(self) -> c_int { - self.0.into_raw() - } -} - -// In versions of glibc prior to 2.26, there's a bug where the DNS resolver -// will cache the contents of /etc/resolv.conf, so changes to that file on disk -// can be ignored by a long-running program. That can break DNS lookups on e.g. -// laptops where the network comes and goes. See -// https://sourceware.org/bugzilla/show_bug.cgi?id=984. Note however that some -// distros including Debian have patched glibc to fix this for a long time. -// -// A workaround for this bug is to call the res_init libc function, to clear -// the cached configs. Unfortunately, while we believe glibc's implementation -// of res_init is thread-safe, we know that other implementations are not -// (https://github.com/rust-lang/rust/issues/43592). Code here in libstd could -// try to synchronize its res_init calls with a Mutex, but that wouldn't -// protect programs that call into libc in other ways. So instead of calling -// res_init unconditionally, we call it only when we detect we're linking -// against glibc version < 2.26. (That is, when we both know its needed and -// believe it's thread-safe). -#[cfg(target_env = "gnu")] -fn on_resolver_failure() { - /* - use crate::sys; - - // If the version fails to parse, we treat it the same as "not glibc". - if let Some(version) = sys::os::glibc_version() { - if version < (2, 26) { - unsafe { libc::res_init() }; - } - } - */ -} - -#[cfg(not(target_env = "gnu"))] -fn on_resolver_failure() {} diff --git a/library/std/src/sys/vxworks/net/tests.rs b/library/std/src/sys/vxworks/net/tests.rs deleted file mode 100644 index e7c6e348f8e5a..0000000000000 --- a/library/std/src/sys/vxworks/net/tests.rs +++ /dev/null @@ -1,23 +0,0 @@ -use super::*; - -#[test] -fn test_res_init() { - // This mostly just tests that the weak linkage doesn't panic wildly... - res_init_if_glibc_before_2_26().unwrap(); -} - -#[test] -fn test_parse_glibc_version() { - let cases = [ - ("0.0", Some((0, 0))), - ("01.+2", Some((1, 2))), - ("3.4.5.six", Some((3, 4))), - ("1", None), - ("1.-2", None), - ("1.foo", None), - ("foo.1", None), - ]; - for &(version_str, parsed) in cases.iter() { - assert_eq!(parsed, parse_glibc_version(version_str)); - } -} diff --git a/library/std/src/sys/vxworks/os.rs b/library/std/src/sys/vxworks/os.rs deleted file mode 100644 index 6eaec6f1e50df..0000000000000 --- a/library/std/src/sys/vxworks/os.rs +++ /dev/null @@ -1,315 +0,0 @@ -use crate::error::Error as StdError; -use crate::ffi::{CStr, CString, OsStr, OsString}; -use crate::fmt; -use crate::io; -use crate::iter; -use crate::marker::PhantomData; -use crate::mem; -use crate::memchr; -use crate::path::{self, Path, PathBuf}; -use crate::slice; -use crate::str; -use crate::sys::cvt; -use crate::sys_common::mutex::{StaticMutex, StaticMutexGuard}; -use libc::{self, c_char /*,c_void */, c_int}; -/*use sys::fd; this one is probably important */ -use crate::vec; - -const TMPBUF_SZ: usize = 128; - -// This is a terrible fix -use crate::sys::os_str::Buf; -use crate::sys_common::{AsInner, FromInner, IntoInner}; - -pub trait OsStringExt { - fn from_vec(vec: Vec) -> Self; - fn into_vec(self) -> Vec; -} - -impl OsStringExt for OsString { - fn from_vec(vec: Vec) -> OsString { - FromInner::from_inner(Buf { inner: vec }) - } - fn into_vec(self) -> Vec { - self.into_inner().inner - } -} - -pub trait OsStrExt { - fn from_bytes(slice: &[u8]) -> &Self; - fn as_bytes(&self) -> &[u8]; -} - -impl OsStrExt for OsStr { - fn from_bytes(slice: &[u8]) -> &OsStr { - unsafe { mem::transmute(slice) } - } - fn as_bytes(&self) -> &[u8] { - &self.as_inner().inner - } -} - -pub fn errno() -> i32 { - unsafe { libc::errnoGet() } -} - -pub fn set_errno(e: i32) { - unsafe { - libc::errnoSet(e as c_int); - } -} - -/// Gets a detailed string description for the given error number. -pub fn error_string(errno: i32) -> String { - let mut buf = [0 as c_char; TMPBUF_SZ]; - extern "C" { - fn strerror_r(errnum: c_int, buf: *mut c_char, buflen: libc::size_t) -> c_int; - } - - let p = buf.as_mut_ptr(); - unsafe { - if strerror_r(errno as c_int, p, buf.len()) < 0 { - panic!("strerror_r failure"); - } - let p = p as *const _; - str::from_utf8(CStr::from_ptr(p).to_bytes()).unwrap().to_owned() - } -} - -pub fn getcwd() -> io::Result { - let mut buf = Vec::with_capacity(512); - loop { - unsafe { - let ptr = buf.as_mut_ptr() as *mut libc::c_char; - if !libc::getcwd(ptr, buf.capacity() as libc::size_t).is_null() { - let len = CStr::from_ptr(buf.as_ptr() as *const libc::c_char).to_bytes().len(); - buf.set_len(len); - buf.shrink_to_fit(); - return Ok(PathBuf::from(OsString::from_vec(buf))); - } else { - let error = io::Error::last_os_error(); - if error.raw_os_error() != Some(libc::ERANGE) { - return Err(error); - } - } - // Trigger the internal buffer resizing logic of `Vec` by requiring - // more space than the current capacity. - let cap = buf.capacity(); - buf.set_len(cap); - buf.reserve(1); - } - } -} - -pub fn chdir(p: &path::Path) -> io::Result<()> { - let p: &OsStr = p.as_ref(); - let p = CString::new(p.as_bytes())?; - unsafe { - match libc::chdir(p.as_ptr()) == (0 as c_int) { - true => Ok(()), - false => Err(io::Error::last_os_error()), - } - } -} - -pub struct SplitPaths<'a> { - iter: iter::Map bool>, fn(&'a [u8]) -> PathBuf>, -} - -pub fn split_paths(unparsed: &OsStr) -> SplitPaths<'_> { - fn bytes_to_path(b: &[u8]) -> PathBuf { - PathBuf::from(::from_bytes(b)) - } - fn is_colon(b: &u8) -> bool { - *b == b':' - } - let unparsed = unparsed.as_bytes(); - SplitPaths { - iter: unparsed - .split(is_colon as fn(&u8) -> bool) - .map(bytes_to_path as fn(&[u8]) -> PathBuf), - } -} - -impl<'a> Iterator for SplitPaths<'a> { - type Item = PathBuf; - fn next(&mut self) -> Option { - self.iter.next() - } - fn size_hint(&self) -> (usize, Option) { - self.iter.size_hint() - } -} - -#[derive(Debug)] -pub struct JoinPathsError; - -pub fn join_paths(paths: I) -> Result -where - I: Iterator, - T: AsRef, -{ - let mut joined = Vec::new(); - let sep = b':'; - - for (i, path) in paths.enumerate() { - let path = path.as_ref().as_bytes(); - if i > 0 { - joined.push(sep) - } - if path.contains(&sep) { - return Err(JoinPathsError); - } - joined.extend_from_slice(path); - } - Ok(OsStringExt::from_vec(joined)) -} - -impl fmt::Display for JoinPathsError { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - "path segment contains separator `:`".fmt(f) - } -} - -impl StdError for JoinPathsError { - #[allow(deprecated)] - fn description(&self) -> &str { - "failed to join paths" - } -} - -pub fn current_exe() -> io::Result { - #[cfg(test)] - use realstd::env; - - #[cfg(not(test))] - use crate::env; - - let exe_path = env::args().next().unwrap(); - let path = Path::new(&exe_path); - path.canonicalize() -} - -pub struct Env { - iter: vec::IntoIter<(OsString, OsString)>, - _dont_send_or_sync_me: PhantomData<*mut ()>, -} - -impl Iterator for Env { - type Item = (OsString, OsString); - fn next(&mut self) -> Option<(OsString, OsString)> { - self.iter.next() - } - fn size_hint(&self) -> (usize, Option) { - self.iter.size_hint() - } -} - -pub unsafe fn environ() -> *mut *const *const c_char { - extern "C" { - static mut environ: *const *const c_char; - } - &mut environ -} - -pub unsafe fn env_lock() -> StaticMutexGuard { - // It is UB to attempt to acquire this mutex reentrantly! - static ENV_LOCK: StaticMutex = StaticMutex::new(); - ENV_LOCK.lock() -} - -/// Returns a vector of (variable, value) byte-vector pairs for all the -/// environment variables of the current process. -pub fn env() -> Env { - unsafe { - let _guard = env_lock(); - let mut environ = *environ(); - if environ.is_null() { - panic!("os::env() failure getting env string from OS: {}", io::Error::last_os_error()); - } - let mut result = Vec::new(); - while !(*environ).is_null() { - if let Some(key_value) = parse(CStr::from_ptr(*environ).to_bytes()) { - result.push(key_value); - } - environ = environ.add(1); - } - return Env { iter: result.into_iter(), _dont_send_or_sync_me: PhantomData }; - } - - fn parse(input: &[u8]) -> Option<(OsString, OsString)> { - // Strategy (copied from glibc): Variable name and value are separated - // by an ASCII equals sign '='. Since a variable name must not be - // empty, allow variable names starting with an equals sign. Skip all - // malformed lines. - if input.is_empty() { - return None; - } - let pos = memchr::memchr(b'=', &input[1..]).map(|p| p + 1); - pos.map(|p| { - ( - OsStringExt::from_vec(input[..p].to_vec()), - OsStringExt::from_vec(input[p + 1..].to_vec()), - ) - }) - } -} - -pub fn getenv(k: &OsStr) -> io::Result> { - // environment variables with a nul byte can't be set, so their value is - // always None as well - let k = CString::new(k.as_bytes())?; - unsafe { - let _guard = env_lock(); - let s = libc::getenv(k.as_ptr()) as *const libc::c_char; - let ret = if s.is_null() { - None - } else { - Some(OsStringExt::from_vec(CStr::from_ptr(s).to_bytes().to_vec())) - }; - Ok(ret) - } -} - -pub fn setenv(k: &OsStr, v: &OsStr) -> io::Result<()> { - let k = CString::new(k.as_bytes())?; - let v = CString::new(v.as_bytes())?; - - unsafe { - let _guard = env_lock(); - cvt(libc::setenv(k.as_ptr(), v.as_ptr(), 1)).map(drop) - } -} - -pub fn unsetenv(n: &OsStr) -> io::Result<()> { - let nbuf = CString::new(n.as_bytes())?; - - unsafe { - let _guard = env_lock(); - cvt(libc::unsetenv(nbuf.as_ptr())).map(drop) - } -} - -pub fn page_size() -> usize { - unsafe { libc::sysconf(libc::_SC_PAGESIZE) as usize } -} - -pub fn temp_dir() -> PathBuf { - crate::env::var_os("TMPDIR").map(PathBuf::from).unwrap_or_else(|| PathBuf::from("/tmp")) -} - -pub fn home_dir() -> Option { - crate::env::var_os("HOME").or_else(|| None).map(PathBuf::from) -} - -pub fn exit(code: i32) -> ! { - unsafe { libc::exit(code as c_int) } -} - -pub fn getpid() -> u32 { - unsafe { libc::getpid() as u32 } -} - -pub fn getppid() -> u32 { - unsafe { libc::getppid() as u32 } -} diff --git a/library/std/src/sys/vxworks/path.rs b/library/std/src/sys/vxworks/path.rs deleted file mode 100644 index 840a7ae042625..0000000000000 --- a/library/std/src/sys/vxworks/path.rs +++ /dev/null @@ -1,19 +0,0 @@ -use crate::ffi::OsStr; -use crate::path::Prefix; - -#[inline] -pub fn is_sep_byte(b: u8) -> bool { - b == b'/' -} - -#[inline] -pub fn is_verbatim_sep(b: u8) -> bool { - b == b'/' -} - -pub fn parse_prefix(_: &OsStr) -> Option> { - None -} - -pub const MAIN_SEP_STR: &str = "/"; -pub const MAIN_SEP: char = '/'; diff --git a/library/std/src/sys/vxworks/pipe.rs b/library/std/src/sys/vxworks/pipe.rs deleted file mode 100644 index a18376212af51..0000000000000 --- a/library/std/src/sys/vxworks/pipe.rs +++ /dev/null @@ -1,107 +0,0 @@ -use crate::io::{self, IoSlice, IoSliceMut}; -use crate::mem; -use crate::sync::atomic::AtomicBool; -use crate::sys::fd::FileDesc; -use crate::sys::{cvt, cvt_r}; -use libc::{self /*, c_int apparently not used? */}; - -pub struct AnonPipe(FileDesc); - -pub fn anon_pipe() -> io::Result<(AnonPipe, AnonPipe)> { - static INVALID: AtomicBool = AtomicBool::new(false); - - let mut fds = [0; 2]; - cvt(unsafe { libc::pipe(fds.as_mut_ptr()) })?; - - let fd0 = FileDesc::new(fds[0]); - let fd1 = FileDesc::new(fds[1]); - fd0.set_cloexec()?; - fd1.set_cloexec()?; - Ok((AnonPipe(fd0), AnonPipe(fd1))) -} - -impl AnonPipe { - pub fn read(&self, buf: &mut [u8]) -> io::Result { - self.0.read(buf) - } - - pub fn read_vectored(&self, bufs: &mut [IoSliceMut<'_>]) -> io::Result { - self.0.read_vectored(bufs) - } - - #[inline] - pub fn is_read_vectored(&self) -> bool { - self.0.is_read_vectored() - } - - pub fn write(&self, buf: &[u8]) -> io::Result { - self.0.write(buf) - } - - pub fn write_vectored(&self, bufs: &[IoSlice<'_>]) -> io::Result { - self.0.write_vectored(bufs) - } - - #[inline] - pub fn is_write_vectored(&self) -> bool { - self.0.is_write_vectored() - } - - pub fn fd(&self) -> &FileDesc { - &self.0 - } - pub fn into_fd(self) -> FileDesc { - self.0 - } - pub fn diverge(&self) -> ! { - panic!() - } -} - -pub fn read2(p1: AnonPipe, v1: &mut Vec, p2: AnonPipe, v2: &mut Vec) -> io::Result<()> { - // Set both pipes into nonblocking mode as we're gonna be reading from both - // in the `select` loop below, and we wouldn't want one to block the other! - let p1 = p1.into_fd(); - let p2 = p2.into_fd(); - p1.set_nonblocking_pipe(true)?; - p2.set_nonblocking_pipe(true)?; - - let mut fds: [libc::pollfd; 2] = unsafe { mem::zeroed() }; - fds[0].fd = p1.raw(); - fds[0].events = libc::POLLIN; - fds[1].fd = p2.raw(); - fds[1].events = libc::POLLIN; - loop { - // wait for either pipe to become readable using `poll` - cvt_r(|| unsafe { libc::poll(fds.as_mut_ptr(), 2, -1) })?; - - if fds[0].revents != 0 && read(&p1, v1)? { - p2.set_nonblocking_pipe(false)?; - return p2.read_to_end(v2).map(drop); - } - if fds[1].revents != 0 && read(&p2, v2)? { - p1.set_nonblocking_pipe(false)?; - return p1.read_to_end(v1).map(drop); - } - } - - // Read as much as we can from each pipe, ignoring EWOULDBLOCK or - // EAGAIN. If we hit EOF, then this will happen because the underlying - // reader will return Ok(0), in which case we'll see `Ok` ourselves. In - // this case we flip the other fd back into blocking mode and read - // whatever's leftover on that file descriptor. - fn read(fd: &FileDesc, dst: &mut Vec) -> Result { - match fd.read_to_end(dst) { - Ok(_) => Ok(true), - Err(e) => { - if e.raw_os_error() == Some(libc::EWOULDBLOCK) - || e.raw_os_error() == Some(libc::EAGAIN) - { - Ok(false) - } else { - Err(e) - } - } - } - } -} diff --git a/library/std/src/sys/vxworks/process/mod.rs b/library/std/src/sys/vxworks/process/mod.rs index c59782ff44b0b..dc6130eaa24a8 100644 --- a/library/std/src/sys/vxworks/process/mod.rs +++ b/library/std/src/sys/vxworks/process/mod.rs @@ -1,7 +1,9 @@ -pub use self::process_common::{Command, ExitCode, ExitStatus, Stdio, StdioPipes}; -pub use self::process_inner::Process; +pub use self::process_common::{Command, CommandArgs, ExitCode, Stdio, StdioPipes}; +pub use self::process_inner::{ExitStatus, Process}; pub use crate::ffi::OsString as EnvKey; +pub use crate::sys_common::process::CommandEnvs; +#[path = "../../unix/process/process_common.rs"] mod process_common; #[path = "process_vxworks.rs"] mod process_inner; diff --git a/library/std/src/sys/vxworks/process/process_common.rs b/library/std/src/sys/vxworks/process/process_common.rs deleted file mode 100644 index 6473a0c3cec41..0000000000000 --- a/library/std/src/sys/vxworks/process/process_common.rs +++ /dev/null @@ -1,399 +0,0 @@ -use crate::os::unix::prelude::*; - -use crate::collections::BTreeMap; -use crate::ffi::{CStr, CString, OsStr, OsString}; -use crate::fmt; -use crate::io; -use crate::ptr; -use crate::sys::fd::FileDesc; -use crate::sys::fs::{File, OpenOptions}; -use crate::sys::pipe::{self, AnonPipe}; -use crate::sys_common::process::CommandEnv; - -use libc::{c_char, c_int, gid_t, uid_t, EXIT_FAILURE, EXIT_SUCCESS}; - -//////////////////////////////////////////////////////////////////////////////// -// Command -//////////////////////////////////////////////////////////////////////////////// - -pub struct Command { - // Currently we try hard to ensure that the call to `.exec()` doesn't - // actually allocate any memory. While many platforms try to ensure that - // memory allocation works after a fork in a multithreaded process, it's - // been observed to be buggy and somewhat unreliable, so we do our best to - // just not do it at all! - // - // Along those lines, the `argv` and `envp` raw pointers here are exactly - // what's gonna get passed to `execvp`. The `argv` array starts with the - // `program` and ends with a NULL, and the `envp` pointer, if present, is - // also null-terminated. - // - // Right now we don't support removing arguments, so there's no much fancy - // support there, but we support adding and removing environment variables, - // so a side table is used to track where in the `envp` array each key is - // located. Whenever we add a key we update it in place if it's already - // present, and whenever we remove a key we update the locations of all - // other keys. - program: CString, - args: Vec, - argv: Argv, - env: CommandEnv, - - cwd: Option, - uid: Option, - gid: Option, - saw_nul: bool, - closures: Vec io::Result<()> + Send + Sync>>, - stdin: Option, - stdout: Option, - stderr: Option, -} - -// Create a new type for `Argv`, so that we can make it `Send` and `Sync` -struct Argv(Vec<*const c_char>); - -// It is safe to make `Argv` `Send` and `Sync`, because it contains -// pointers to memory owned by `Command.args` -unsafe impl Send for Argv {} -unsafe impl Sync for Argv {} - -// passed back to std::process with the pipes connected to the child, if any -// were requested -pub struct StdioPipes { - pub stdin: Option, - pub stdout: Option, - pub stderr: Option, -} - -// passed to do_exec() with configuration of what the child stdio should look -// like -pub struct ChildPipes { - pub stdin: ChildStdio, - pub stdout: ChildStdio, - pub stderr: ChildStdio, -} - -pub enum ChildStdio { - Inherit, - Explicit(c_int), - Owned(FileDesc), -} - -pub enum Stdio { - Inherit, - Null, - MakePipe, - Fd(FileDesc), -} - -impl Command { - pub fn new(program: &OsStr) -> Command { - let mut saw_nul = false; - let program = os2c(program, &mut saw_nul); - Command { - argv: Argv(vec![program.as_ptr(), ptr::null()]), - args: vec![program.clone()], - program, - env: Default::default(), - cwd: None, - uid: None, - gid: None, - saw_nul, - closures: Vec::new(), - stdin: None, - stdout: None, - stderr: None, - } - } - - pub fn set_arg_0(&mut self, arg: &OsStr) { - // Set a new arg0 - let arg = os2c(arg, &mut self.saw_nul); - debug_assert!(self.argv.0.len() > 1); - self.argv.0[0] = arg.as_ptr(); - self.args[0] = arg; - } - - pub fn arg(&mut self, arg: &OsStr) { - // Overwrite the trailing NULL pointer in `argv` and then add a new null - // pointer. - let arg = os2c(arg, &mut self.saw_nul); - self.argv.0[self.args.len()] = arg.as_ptr(); - self.argv.0.push(ptr::null()); - - // Also make sure we keep track of the owned value to schedule a - // destructor for this memory. - self.args.push(arg); - } - - pub fn cwd(&mut self, dir: &OsStr) { - self.cwd = Some(os2c(dir, &mut self.saw_nul)); - } - pub fn uid(&mut self, id: uid_t) { - self.uid = Some(id); - } - pub fn gid(&mut self, id: gid_t) { - self.gid = Some(id); - } - - pub fn saw_nul(&self) -> bool { - self.saw_nul - } - pub fn get_argv(&self) -> &Vec<*const c_char> { - &self.argv.0 - } - - pub fn get_program(&self) -> &CStr { - &*self.program - } - - #[allow(dead_code)] - pub fn get_cwd(&self) -> &Option { - &self.cwd - } - #[allow(dead_code)] - pub fn get_uid(&self) -> Option { - self.uid - } - #[allow(dead_code)] - pub fn get_gid(&self) -> Option { - self.gid - } - - pub fn get_closures(&mut self) -> &mut Vec io::Result<()> + Send + Sync>> { - &mut self.closures - } - - pub unsafe fn pre_exec(&mut self, _f: Box io::Result<()> + Send + Sync>) { - // Fork() is not supported in vxWorks so no way to run the closure in the new procecss. - unimplemented!(); - } - - pub fn stdin(&mut self, stdin: Stdio) { - self.stdin = Some(stdin); - } - - pub fn stdout(&mut self, stdout: Stdio) { - self.stdout = Some(stdout); - } - - pub fn stderr(&mut self, stderr: Stdio) { - self.stderr = Some(stderr); - } - - pub fn env_mut(&mut self) -> &mut CommandEnv { - &mut self.env - } - - pub fn capture_env(&mut self) -> Option { - let maybe_env = self.env.capture_if_changed(); - maybe_env.map(|env| construct_envp(env, &mut self.saw_nul)) - } - #[allow(dead_code)] - pub fn env_saw_path(&self) -> bool { - self.env.have_changed_path() - } - - pub fn setup_io( - &self, - default: Stdio, - needs_stdin: bool, - ) -> io::Result<(StdioPipes, ChildPipes)> { - let null = Stdio::Null; - let default_stdin = if needs_stdin { &default } else { &null }; - let stdin = self.stdin.as_ref().unwrap_or(default_stdin); - let stdout = self.stdout.as_ref().unwrap_or(&default); - let stderr = self.stderr.as_ref().unwrap_or(&default); - let (their_stdin, our_stdin) = stdin.to_child_stdio(true)?; - let (their_stdout, our_stdout) = stdout.to_child_stdio(false)?; - let (their_stderr, our_stderr) = stderr.to_child_stdio(false)?; - let ours = StdioPipes { stdin: our_stdin, stdout: our_stdout, stderr: our_stderr }; - let theirs = ChildPipes { stdin: their_stdin, stdout: their_stdout, stderr: their_stderr }; - Ok((ours, theirs)) - } -} - -fn os2c(s: &OsStr, saw_nul: &mut bool) -> CString { - CString::new(s.as_bytes()).unwrap_or_else(|_e| { - *saw_nul = true; - CString::new("").unwrap() - }) -} - -// Helper type to manage ownership of the strings within a C-style array. -pub struct CStringArray { - items: Vec, - ptrs: Vec<*const c_char>, -} - -impl CStringArray { - pub fn with_capacity(capacity: usize) -> Self { - let mut result = CStringArray { - items: Vec::with_capacity(capacity), - ptrs: Vec::with_capacity(capacity + 1), - }; - result.ptrs.push(ptr::null()); - result - } - pub fn push(&mut self, item: CString) { - let l = self.ptrs.len(); - self.ptrs[l - 1] = item.as_ptr(); - self.ptrs.push(ptr::null()); - self.items.push(item); - } - pub fn as_ptr(&self) -> *const *const c_char { - self.ptrs.as_ptr() - } -} - -fn construct_envp(env: BTreeMap, saw_nul: &mut bool) -> CStringArray { - let mut result = CStringArray::with_capacity(env.len()); - for (k, v) in env { - let mut k: OsString = k.into(); - - // Reserve additional space for '=' and null terminator - k.reserve_exact(v.len() + 2); - k.push("="); - k.push(&v); - - // Add the new entry into the array - if let Ok(item) = CString::new(k.into_vec()) { - result.push(item); - } else { - *saw_nul = true; - } - } - - result -} - -impl Stdio { - pub fn to_child_stdio(&self, readable: bool) -> io::Result<(ChildStdio, Option)> { - match *self { - Stdio::Inherit => Ok((ChildStdio::Inherit, None)), - - // Make sure that the source descriptors are not an stdio - // descriptor, otherwise the order which we set the child's - // descriptors may blow away a descriptor which we are hoping to - // save. For example, suppose we want the child's stderr to be the - // parent's stdout, and the child's stdout to be the parent's - // stderr. No matter which we dup first, the second will get - // overwritten prematurely. - Stdio::Fd(ref fd) => { - if fd.raw() >= 0 && fd.raw() <= libc::STDERR_FILENO { - Ok((ChildStdio::Owned(fd.duplicate()?), None)) - } else { - Ok((ChildStdio::Explicit(fd.raw()), None)) - } - } - - Stdio::MakePipe => { - let (reader, writer) = pipe::anon_pipe()?; - let (ours, theirs) = if readable { (writer, reader) } else { (reader, writer) }; - Ok((ChildStdio::Owned(theirs.into_fd()), Some(ours))) - } - - Stdio::Null => { - let mut opts = OpenOptions::new(); - opts.read(readable); - opts.write(!readable); - let path = unsafe { CStr::from_ptr("/null\0".as_ptr() as *const _) }; - let fd = File::open_c(&path, &opts)?; - Ok((ChildStdio::Owned(fd.into_fd()), None)) - } - } - } -} - -impl From for Stdio { - fn from(pipe: AnonPipe) -> Stdio { - Stdio::Fd(pipe.into_fd()) - } -} - -impl From for Stdio { - fn from(file: File) -> Stdio { - Stdio::Fd(file.into_fd()) - } -} - -impl ChildStdio { - pub fn fd(&self) -> Option { - match *self { - ChildStdio::Inherit => None, - ChildStdio::Explicit(fd) => Some(fd), - ChildStdio::Owned(ref fd) => Some(fd.raw()), - } - } -} - -impl fmt::Debug for Command { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - if self.program != self.args[0] { - write!(f, "[{:?}] ", self.program)?; - } - write!(f, "{:?}", self.args[0])?; - - for arg in &self.args[1..] { - write!(f, " {:?}", arg)?; - } - Ok(()) - } -} - -/// Unix exit statuses -#[derive(PartialEq, Eq, Clone, Copy, Debug)] -pub struct ExitStatus(c_int); - -impl ExitStatus { - pub fn new(status: c_int) -> ExitStatus { - ExitStatus(status) - } - - fn exited(&self) -> bool { - libc::WIFEXITED(self.0) - } - - pub fn success(&self) -> bool { - self.code() == Some(0) - } - - pub fn code(&self) -> Option { - if self.exited() { Some(libc::WEXITSTATUS(self.0)) } else { None } - } - - pub fn signal(&self) -> Option { - if !self.exited() { Some(libc::WTERMSIG(self.0)) } else { None } - } -} - -/// Converts a raw `c_int` to a type-safe `ExitStatus` by wrapping it without copying. -impl From for ExitStatus { - fn from(a: c_int) -> ExitStatus { - ExitStatus(a) - } -} - -impl fmt::Display for ExitStatus { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - if let Some(code) = self.code() { - write!(f, "exit code: {}", code) - } else { - let signal = self.signal().unwrap(); - write!(f, "signal: {}", signal) - } - } -} - -#[derive(PartialEq, Eq, Clone, Copy, Debug)] -pub struct ExitCode(u8); - -impl ExitCode { - pub const SUCCESS: ExitCode = ExitCode(EXIT_SUCCESS as _); - pub const FAILURE: ExitCode = ExitCode(EXIT_FAILURE as _); - - #[inline] - pub fn as_i32(&self) -> i32 { - self.0 as i32 - } -} diff --git a/library/std/src/sys/vxworks/process/process_vxworks.rs b/library/std/src/sys/vxworks/process/process_vxworks.rs index f7e84ae3de9c7..69adbcdddc971 100644 --- a/library/std/src/sys/vxworks/process/process_vxworks.rs +++ b/library/std/src/sys/vxworks/process/process_vxworks.rs @@ -1,3 +1,4 @@ +use crate::fmt; use crate::io::{self, Error, ErrorKind}; use crate::sys; use crate::sys::cvt; @@ -67,7 +68,7 @@ impl Command { let _lock = sys::os::env_lock(); let ret = libc::rtpSpawn( - self.get_program().as_ptr(), + self.get_program_cstr().as_ptr(), self.get_argv().as_ptr() as *mut *const c_char, // argv c_envp as *mut *const c_char, 100 as c_int, // initial priority @@ -167,3 +168,47 @@ impl Process { } } } + +/// Unix exit statuses +#[derive(PartialEq, Eq, Clone, Copy, Debug)] +pub struct ExitStatus(c_int); + +impl ExitStatus { + pub fn new(status: c_int) -> ExitStatus { + ExitStatus(status) + } + + fn exited(&self) -> bool { + libc::WIFEXITED(self.0) + } + + pub fn success(&self) -> bool { + self.code() == Some(0) + } + + pub fn code(&self) -> Option { + if self.exited() { Some(libc::WEXITSTATUS(self.0)) } else { None } + } + + pub fn signal(&self) -> Option { + if !self.exited() { Some(libc::WTERMSIG(self.0)) } else { None } + } +} + +/// Converts a raw `c_int` to a type-safe `ExitStatus` by wrapping it without copying. +impl From for ExitStatus { + fn from(a: c_int) -> ExitStatus { + ExitStatus(a) + } +} + +impl fmt::Display for ExitStatus { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + if let Some(code) = self.code() { + write!(f, "exit code: {}", code) + } else { + let signal = self.signal().unwrap(); + write!(f, "signal: {}", signal) + } + } +} diff --git a/library/std/src/sys/vxworks/rwlock.rs b/library/std/src/sys/vxworks/rwlock.rs deleted file mode 100644 index c90304c2b4a6a..0000000000000 --- a/library/std/src/sys/vxworks/rwlock.rs +++ /dev/null @@ -1,114 +0,0 @@ -use crate::cell::UnsafeCell; -use crate::sync::atomic::{AtomicUsize, Ordering}; - -pub struct RWLock { - inner: UnsafeCell, - write_locked: UnsafeCell, - num_readers: AtomicUsize, -} - -unsafe impl Send for RWLock {} -unsafe impl Sync for RWLock {} - -impl RWLock { - pub const fn new() -> RWLock { - RWLock { - inner: UnsafeCell::new(libc::PTHREAD_RWLOCK_INITIALIZER), - write_locked: UnsafeCell::new(false), - num_readers: AtomicUsize::new(0), - } - } - - #[inline] - pub unsafe fn read(&self) { - let r = libc::pthread_rwlock_rdlock(self.inner.get()); - if r == libc::EAGAIN { - panic!("rwlock maximum reader count exceeded"); - } else if r == libc::EDEADLK || (r == 0 && *self.write_locked.get()) { - if r == 0 { - self.raw_unlock(); - } - panic!("rwlock read lock would result in deadlock"); - } else { - debug_assert_eq!(r, 0); - self.num_readers.fetch_add(1, Ordering::Relaxed); - } - } - - #[inline] - pub unsafe fn try_read(&self) -> bool { - let r = libc::pthread_rwlock_tryrdlock(self.inner.get()); - if r == 0 { - if *self.write_locked.get() { - self.raw_unlock(); - false - } else { - self.num_readers.fetch_add(1, Ordering::Relaxed); - true - } - } else { - false - } - } - - #[inline] - pub unsafe fn write(&self) { - let r = libc::pthread_rwlock_wrlock(self.inner.get()); - // See comments above for why we check for EDEADLK and write_locked. We - // also need to check that num_readers is 0. - if r == libc::EDEADLK - || *self.write_locked.get() - || self.num_readers.load(Ordering::Relaxed) != 0 - { - if r == 0 { - self.raw_unlock(); - } - panic!("rwlock write lock would result in deadlock"); - } else { - debug_assert_eq!(r, 0); - } - *self.write_locked.get() = true; - } - - #[inline] - pub unsafe fn try_write(&self) -> bool { - let r = libc::pthread_rwlock_trywrlock(self.inner.get()); - if r == 0 { - if *self.write_locked.get() || self.num_readers.load(Ordering::Relaxed) != 0 { - self.raw_unlock(); - false - } else { - *self.write_locked.get() = true; - true - } - } else { - false - } - } - - #[inline] - unsafe fn raw_unlock(&self) { - let r = libc::pthread_rwlock_unlock(self.inner.get()); - debug_assert_eq!(r, 0); - } - - #[inline] - pub unsafe fn read_unlock(&self) { - debug_assert!(!*self.write_locked.get()); - self.num_readers.fetch_sub(1, Ordering::Relaxed); - self.raw_unlock(); - } - - #[inline] - pub unsafe fn write_unlock(&self) { - debug_assert_eq!(self.num_readers.load(Ordering::Relaxed), 0); - debug_assert!(*self.write_locked.get()); - *self.write_locked.get() = false; - self.raw_unlock(); - } - #[inline] - pub unsafe fn destroy(&self) { - let r = libc::pthread_rwlock_destroy(self.inner.get()); - debug_assert_eq!(r, 0); - } -} diff --git a/library/std/src/sys/vxworks/stack_overflow.rs b/library/std/src/sys/vxworks/stack_overflow.rs deleted file mode 100644 index 7b58c83193bf3..0000000000000 --- a/library/std/src/sys/vxworks/stack_overflow.rs +++ /dev/null @@ -1,38 +0,0 @@ -#![cfg_attr(test, allow(dead_code))] - -use self::imp::{drop_handler, make_handler}; - -pub use self::imp::cleanup; -pub use self::imp::init; - -pub struct Handler { - _data: *mut libc::c_void, -} - -impl Handler { - pub unsafe fn new() -> Handler { - make_handler() - } -} - -impl Drop for Handler { - fn drop(&mut self) { - unsafe { - drop_handler(self); - } - } -} - -mod imp { - use crate::ptr; - - pub unsafe fn init() {} - - pub unsafe fn cleanup() {} - - pub unsafe fn make_handler() -> super::Handler { - super::Handler { _data: ptr::null_mut() } - } - - pub unsafe fn drop_handler(_handler: &mut super::Handler) {} -} diff --git a/library/std/src/sys/vxworks/stdio.rs b/library/std/src/sys/vxworks/stdio.rs deleted file mode 100644 index 92e9f205b4e6e..0000000000000 --- a/library/std/src/sys/vxworks/stdio.rs +++ /dev/null @@ -1,69 +0,0 @@ -use crate::io; -use crate::sys::fd::FileDesc; - -pub struct Stdin(()); -pub struct Stdout(()); -pub struct Stderr(()); - -impl Stdin { - pub const fn new() -> Stdin { - Stdin(()) - } -} - -impl io::Read for Stdin { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - let fd = FileDesc::new(libc::STDIN_FILENO); - let ret = fd.read(buf); - fd.into_raw(); // do not close this FD - ret - } -} - -impl Stdout { - pub const fn new() -> Stdout { - Stdout(()) - } -} - -impl io::Write for Stdout { - fn write(&mut self, buf: &[u8]) -> io::Result { - let fd = FileDesc::new(libc::STDOUT_FILENO); - let ret = fd.write(buf); - fd.into_raw(); // do not close this FD - ret - } - - fn flush(&mut self) -> io::Result<()> { - Ok(()) - } -} - -impl Stderr { - pub const fn new() -> Stderr { - Stderr(()) - } -} - -impl io::Write for Stderr { - fn write(&mut self, buf: &[u8]) -> io::Result { - let fd = FileDesc::new(libc::STDERR_FILENO); - let ret = fd.write(buf); - fd.into_raw(); // do not close this FD - ret - } - - fn flush(&mut self) -> io::Result<()> { - Ok(()) - } -} - -pub fn is_ebadf(err: &io::Error) -> bool { - err.raw_os_error() == Some(libc::EBADF as i32) -} - -pub const STDIN_BUF_SIZE: usize = crate::sys_common::io::DEFAULT_BUF_SIZE; - -pub fn panic_output() -> Option { - Some(Stderr::new()) -} diff --git a/library/std/src/sys/vxworks/thread.rs b/library/std/src/sys/vxworks/thread.rs deleted file mode 100644 index 24a2e0f965d28..0000000000000 --- a/library/std/src/sys/vxworks/thread.rs +++ /dev/null @@ -1,155 +0,0 @@ -use crate::cmp; -use crate::ffi::CStr; -use crate::io; -use crate::mem; -use crate::ptr; -use crate::sys::{os, stack_overflow}; -use crate::time::Duration; - -pub const DEFAULT_MIN_STACK_SIZE: usize = 0x40000; // 256K - -pub struct Thread { - id: libc::pthread_t, -} - -// Some platforms may have pthread_t as a pointer in which case we still want -// a thread to be Send/Sync -unsafe impl Send for Thread {} -unsafe impl Sync for Thread {} - -// The pthread_attr_setstacksize symbol doesn't exist in the emscripten libc, -// so we have to not link to it to satisfy emcc's ERROR_ON_UNDEFINED_SYMBOLS. -unsafe fn pthread_attr_setstacksize( - attr: *mut libc::pthread_attr_t, - stack_size: libc::size_t, -) -> libc::c_int { - libc::pthread_attr_setstacksize(attr, stack_size) -} - -impl Thread { - // unsafe: see thread::Builder::spawn_unchecked for safety requirements - pub unsafe fn new(stack: usize, p: Box) -> io::Result { - let p = Box::into_raw(box p); - let mut native: libc::pthread_t = mem::zeroed(); - let mut attr: libc::pthread_attr_t = mem::zeroed(); - assert_eq!(libc::pthread_attr_init(&mut attr), 0); - - let stack_size = cmp::max(stack, min_stack_size(&attr)); - - match pthread_attr_setstacksize(&mut attr, stack_size) { - 0 => {} - n => { - assert_eq!(n, libc::EINVAL); - // EINVAL means |stack_size| is either too small or not a - // multiple of the system page size. Because it's definitely - // >= PTHREAD_STACK_MIN, it must be an alignment issue. - // Round up to the nearest page and try again. - let page_size = os::page_size(); - let stack_size = - (stack_size + page_size - 1) & (-(page_size as isize - 1) as usize - 1); - assert_eq!(libc::pthread_attr_setstacksize(&mut attr, stack_size), 0); - } - }; - - let ret = libc::pthread_create(&mut native, &attr, thread_start, p as *mut _); - // Note: if the thread creation fails and this assert fails, then p will - // be leaked. However, an alternative design could cause double-free - // which is clearly worse. - assert_eq!(libc::pthread_attr_destroy(&mut attr), 0); - - return if ret != 0 { - // The thread failed to start and as a result p was not consumed. Therefore, it is - // safe to reconstruct the box so that it gets deallocated. - drop(Box::from_raw(p)); - Err(io::Error::from_raw_os_error(ret)) - } else { - Ok(Thread { id: native }) - }; - - extern "C" fn thread_start(main: *mut libc::c_void) -> *mut libc::c_void { - unsafe { - // Next, set up our stack overflow handler which may get triggered if we run - // out of stack. - let _handler = stack_overflow::Handler::new(); - // Finally, let's run some code. - Box::from_raw(main as *mut Box)(); - } - ptr::null_mut() - } - } - - pub fn yield_now() { - let ret = unsafe { libc::sched_yield() }; - debug_assert_eq!(ret, 0); - } - - pub fn set_name(_name: &CStr) { - // VxWorks does not provide a way to set the task name except at creation time - } - - pub fn sleep(dur: Duration) { - let mut secs = dur.as_secs(); - let mut nsecs = dur.subsec_nanos() as _; - - // If we're awoken with a signal then the return value will be -1 and - // nanosleep will fill in `ts` with the remaining time. - unsafe { - while secs > 0 || nsecs > 0 { - let mut ts = libc::timespec { - tv_sec: cmp::min(libc::time_t::MAX as u64, secs) as libc::time_t, - tv_nsec: nsecs, - }; - secs -= ts.tv_sec as u64; - if libc::nanosleep(&ts, &mut ts) == -1 { - assert_eq!(os::errno(), libc::EINTR); - secs += ts.tv_sec as u64; - nsecs = ts.tv_nsec; - } else { - nsecs = 0; - } - } - } - } - - pub fn join(self) { - unsafe { - let ret = libc::pthread_join(self.id, ptr::null_mut()); - mem::forget(self); - assert!(ret == 0, "failed to join thread: {}", io::Error::from_raw_os_error(ret)); - } - } - - pub fn id(&self) -> libc::pthread_t { - self.id - } - - pub fn into_id(self) -> libc::pthread_t { - let id = self.id; - mem::forget(self); - id - } -} - -impl Drop for Thread { - fn drop(&mut self) { - let ret = unsafe { libc::pthread_detach(self.id) }; - debug_assert_eq!(ret, 0); - } -} - -#[cfg_attr(test, allow(dead_code))] -pub mod guard { - use crate::ops::Range; - pub type Guard = Range; - pub unsafe fn current() -> Option { - None - } - pub unsafe fn init() -> Option { - None - } - pub unsafe fn deinit() {} -} - -fn min_stack_size(_: *const libc::pthread_attr_t) -> usize { - libc::PTHREAD_STACK_MIN -} diff --git a/library/std/src/sys/vxworks/thread_local_key.rs b/library/std/src/sys/vxworks/thread_local_key.rs deleted file mode 100644 index 2c5b94b1e61e5..0000000000000 --- a/library/std/src/sys/vxworks/thread_local_key.rs +++ /dev/null @@ -1,34 +0,0 @@ -#![allow(dead_code)] // not used on all platforms - -use crate::mem; - -pub type Key = libc::pthread_key_t; - -#[inline] -pub unsafe fn create(dtor: Option) -> Key { - let mut key = 0; - assert_eq!(libc::pthread_key_create(&mut key, mem::transmute(dtor)), 0); - key -} - -#[inline] -pub unsafe fn set(key: Key, value: *mut u8) { - let r = libc::pthread_setspecific(key, value as *mut _); - debug_assert_eq!(r, 0); -} - -#[inline] -pub unsafe fn get(key: Key) -> *mut u8 { - libc::pthread_getspecific(key) as *mut u8 -} - -#[inline] -pub unsafe fn destroy(key: Key) { - let r = libc::pthread_key_delete(key); - debug_assert_eq!(r, 0); -} - -#[inline] -pub fn requires_synchronized_create() -> bool { - false -} diff --git a/library/std/src/sys/vxworks/time.rs b/library/std/src/sys/vxworks/time.rs deleted file mode 100644 index 8f46f4d284f0b..0000000000000 --- a/library/std/src/sys/vxworks/time.rs +++ /dev/null @@ -1,197 +0,0 @@ -use crate::cmp::Ordering; -use crate::time::Duration; -use core::hash::{Hash, Hasher}; - -pub use self::inner::{Instant, SystemTime, UNIX_EPOCH}; -use crate::convert::TryInto; - -const NSEC_PER_SEC: u64 = 1_000_000_000; - -#[derive(Copy, Clone)] -struct Timespec { - t: libc::timespec, -} - -impl Timespec { - const fn zero() -> Timespec { - Timespec { t: libc::timespec { tv_sec: 0, tv_nsec: 0 } } - } - fn sub_timespec(&self, other: &Timespec) -> Result { - if self >= other { - Ok(if self.t.tv_nsec >= other.t.tv_nsec { - Duration::new( - (self.t.tv_sec - other.t.tv_sec) as u64, - (self.t.tv_nsec - other.t.tv_nsec) as u32, - ) - } else { - Duration::new( - (self.t.tv_sec - 1 - other.t.tv_sec) as u64, - self.t.tv_nsec as u32 + (NSEC_PER_SEC as u32) - other.t.tv_nsec as u32, - ) - }) - } else { - match other.sub_timespec(self) { - Ok(d) => Err(d), - Err(d) => Ok(d), - } - } - } - - fn checked_add_duration(&self, other: &Duration) -> Option { - let mut secs = other - .as_secs() - .try_into() // <- target type would be `libc::time_t` - .ok() - .and_then(|secs| self.t.tv_sec.checked_add(secs))?; - - // Nano calculations can't overflow because nanos are <1B which fit - // in a u32. - let mut nsec = other.subsec_nanos() + self.t.tv_nsec as u32; - if nsec >= NSEC_PER_SEC as u32 { - nsec -= NSEC_PER_SEC as u32; - secs = secs.checked_add(1)?; - } - Some(Timespec { t: libc::timespec { tv_sec: secs, tv_nsec: nsec as _ } }) - } - - fn checked_sub_duration(&self, other: &Duration) -> Option { - let mut secs = other - .as_secs() - .try_into() // <- target type would be `libc::time_t` - .ok() - .and_then(|secs| self.t.tv_sec.checked_sub(secs))?; - - // Similar to above, nanos can't overflow. - let mut nsec = self.t.tv_nsec as i32 - other.subsec_nanos() as i32; - if nsec < 0 { - nsec += NSEC_PER_SEC as i32; - secs = secs.checked_sub(1)?; - } - Some(Timespec { t: libc::timespec { tv_sec: secs, tv_nsec: nsec as _ } }) - } -} - -impl PartialEq for Timespec { - fn eq(&self, other: &Timespec) -> bool { - self.t.tv_sec == other.t.tv_sec && self.t.tv_nsec == other.t.tv_nsec - } -} - -impl Eq for Timespec {} - -impl PartialOrd for Timespec { - fn partial_cmp(&self, other: &Timespec) -> Option { - Some(self.cmp(other)) - } -} - -impl Ord for Timespec { - fn cmp(&self, other: &Timespec) -> Ordering { - let me = (self.t.tv_sec, self.t.tv_nsec); - let other = (other.t.tv_sec, other.t.tv_nsec); - me.cmp(&other) - } -} - -impl Hash for Timespec { - fn hash(&self, state: &mut H) { - self.t.tv_sec.hash(state); - self.t.tv_nsec.hash(state); - } -} -mod inner { - use crate::fmt; - use crate::sys::cvt; - use crate::time::Duration; - - use super::Timespec; - - #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] - pub struct Instant { - t: Timespec, - } - - #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] - pub struct SystemTime { - t: Timespec, - } - - pub const UNIX_EPOCH: SystemTime = - SystemTime { t: Timespec { t: libc::timespec { tv_sec: 0, tv_nsec: 0 } } }; - - impl Instant { - pub fn now() -> Instant { - Instant { t: now(libc::CLOCK_MONOTONIC) } - } - - pub const fn zero() -> Instant { - Instant { t: Timespec::zero() } - } - - pub fn actually_monotonic() -> bool { - true - } - - pub fn checked_sub_instant(&self, other: &Instant) -> Option { - self.t.sub_timespec(&other.t).ok() - } - - pub fn checked_add_duration(&self, other: &Duration) -> Option { - Some(Instant { t: self.t.checked_add_duration(other)? }) - } - - pub fn checked_sub_duration(&self, other: &Duration) -> Option { - Some(Instant { t: self.t.checked_sub_duration(other)? }) - } - } - - impl fmt::Debug for Instant { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_struct("Instant") - .field("tv_sec", &self.t.t.tv_sec) - .field("tv_nsec", &self.t.t.tv_nsec) - .finish() - } - } - - impl SystemTime { - pub fn now() -> SystemTime { - SystemTime { t: now(libc::CLOCK_REALTIME) } - } - - pub fn sub_time(&self, other: &SystemTime) -> Result { - self.t.sub_timespec(&other.t) - } - - pub fn checked_add_duration(&self, other: &Duration) -> Option { - Some(SystemTime { t: self.t.checked_add_duration(other)? }) - } - - pub fn checked_sub_duration(&self, other: &Duration) -> Option { - Some(SystemTime { t: self.t.checked_sub_duration(other)? }) - } - } - - impl From for SystemTime { - fn from(t: libc::timespec) -> SystemTime { - SystemTime { t: Timespec { t: t } } - } - } - - impl fmt::Debug for SystemTime { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_struct("SystemTime") - .field("tv_sec", &self.t.t.tv_sec) - .field("tv_nsec", &self.t.t.tv_nsec) - .finish() - } - } - - pub type clock_t = libc::c_int; - - fn now(clock: clock_t) -> Timespec { - let mut t = Timespec { t: libc::timespec { tv_sec: 0, tv_nsec: 0 } }; - cvt(unsafe { libc::clock_gettime(clock, &mut t.t) }).unwrap(); - t - } -} diff --git a/library/std/src/thread/available_concurrency.rs b/library/std/src/thread/available_concurrency.rs new file mode 100644 index 0000000000000..4e805e4f599a8 --- /dev/null +++ b/library/std/src/thread/available_concurrency.rs @@ -0,0 +1,157 @@ +use crate::io; +use crate::num::NonZeroUsize; + +/// Returns the number of hardware threads available to the program. +/// +/// This value should be considered only a hint. +/// +/// # Platform-specific behavior +/// +/// If interpreted as the number of actual hardware threads, it may undercount on +/// Windows systems with more than 64 hardware threads. If interpreted as the +/// available concurrency for that process, it may overcount on Windows systems +/// when limited by a process wide affinity mask or job object limitations, and +/// it may overcount on Linux systems when limited by a process wide affinity +/// mask or affected by cgroups limits. +/// +/// # Errors +/// +/// This function will return an error in the following situations, but is not +/// limited to just these cases: +/// +/// - If the number of hardware threads is not known for the target platform. +/// - The process lacks permissions to view the number of hardware threads +/// available. +/// +/// # Examples +/// +/// ``` +/// # #![allow(dead_code)] +/// #![feature(available_concurrency)] +/// use std::thread; +/// +/// let count = thread::available_concurrency().map(|n| n.get()).unwrap_or(1); +/// ``` +#[unstable(feature = "available_concurrency", issue = "74479")] +pub fn available_concurrency() -> io::Result { + available_concurrency_internal() +} + +cfg_if::cfg_if! { + if #[cfg(windows)] { + #[allow(nonstandard_style)] + fn available_concurrency_internal() -> io::Result { + #[repr(C)] + struct SYSTEM_INFO { + wProcessorArchitecture: u16, + wReserved: u16, + dwPageSize: u32, + lpMinimumApplicationAddress: *mut u8, + lpMaximumApplicationAddress: *mut u8, + dwActiveProcessorMask: *mut u8, + dwNumberOfProcessors: u32, + dwProcessorType: u32, + dwAllocationGranularity: u32, + wProcessorLevel: u16, + wProcessorRevision: u16, + } + extern "system" { + fn GetSystemInfo(info: *mut SYSTEM_INFO) -> i32; + } + let res = unsafe { + let mut sysinfo = crate::mem::zeroed(); + GetSystemInfo(&mut sysinfo); + sysinfo.dwNumberOfProcessors as usize + }; + match res { + 0 => Err(io::Error::new(io::ErrorKind::NotFound, "The number of hardware threads is not known for the target platform")), + cpus => Ok(unsafe { NonZeroUsize::new_unchecked(cpus) }), + } + } + } else if #[cfg(any( + target_os = "android", + target_os = "cloudabi", + target_os = "emscripten", + target_os = "fuchsia", + target_os = "ios", + target_os = "linux", + target_os = "macos", + target_os = "solaris", + target_os = "illumos", + ))] { + fn available_concurrency_internal() -> io::Result { + match unsafe { libc::sysconf(libc::_SC_NPROCESSORS_ONLN) } { + -1 => Err(io::Error::last_os_error()), + 0 => Err(io::Error::new(io::ErrorKind::NotFound, "The number of hardware threads is not known for the target platform")), + cpus => Ok(unsafe { NonZeroUsize::new_unchecked(cpus as usize) }), + } + } + } else if #[cfg(any(target_os = "freebsd", target_os = "dragonfly", target_os = "netbsd"))] { + fn available_concurrency_internal() -> io::Result { + use crate::ptr; + + let mut cpus: libc::c_uint = 0; + let mut cpus_size = crate::mem::size_of_val(&cpus); + + unsafe { + cpus = libc::sysconf(libc::_SC_NPROCESSORS_ONLN) as libc::c_uint; + } + + // Fallback approach in case of errors or no hardware threads. + if cpus < 1 { + let mut mib = [libc::CTL_HW, libc::HW_NCPU, 0, 0]; + let res = unsafe { + libc::sysctl( + mib.as_mut_ptr(), + 2, + &mut cpus as *mut _ as *mut _, + &mut cpus_size as *mut _ as *mut _, + ptr::null_mut(), + 0, + ) + }; + + // Handle errors if any. + if res == -1 { + return Err(io::Error::last_os_error()); + } else if cpus == 0 { + return Err(io::Error::new(io::ErrorKind::NotFound, "The number of hardware threads is not known for the target platform")); + } + } + Ok(unsafe { NonZeroUsize::new_unchecked(cpus as usize) }) + } + } else if #[cfg(target_os = "openbsd")] { + fn available_concurrency_internal() -> io::Result { + use crate::ptr; + + let mut cpus: libc::c_uint = 0; + let mut cpus_size = crate::mem::size_of_val(&cpus); + let mut mib = [libc::CTL_HW, libc::HW_NCPU, 0, 0]; + + let res = unsafe { + libc::sysctl( + mib.as_mut_ptr(), + 2, + &mut cpus as *mut _ as *mut _, + &mut cpus_size as *mut _ as *mut _, + ptr::null_mut(), + 0, + ) + }; + + // Handle errors if any. + if res == -1 { + return Err(io::Error::last_os_error()); + } else if cpus == 0 { + return Err(io::Error::new(io::ErrorKind::NotFound, "The number of hardware threads is not known for the target platform")); + } + + Ok(unsafe { NonZeroUsize::new_unchecked(cpus as usize) }) + } + } else { + // FIXME: implement on vxWorks, Redox, HermitCore, Haiku, l4re + fn available_concurrency_internal() -> io::Result { + Err(io::Error::new(io::ErrorKind::NotFound, "The number of hardware threads is not known for the target platform")) + } + } +} diff --git a/library/std/src/thread/mod.rs b/library/std/src/thread/mod.rs index 087175bb92ab3..45c10266ba255 100644 --- a/library/std/src/thread/mod.rs +++ b/library/std/src/thread/mod.rs @@ -175,9 +175,15 @@ use crate::time::Duration; #[macro_use] mod local; +#[unstable(feature = "available_concurrency", issue = "74479")] +mod available_concurrency; + #[stable(feature = "rust1", since = "1.0.0")] pub use self::local::{AccessError, LocalKey}; +#[unstable(feature = "available_concurrency", issue = "74479")] +pub use available_concurrency::available_concurrency; + // The types used by the thread_local! macro to access TLS keys. Note that there // are two types, the "OS" type and the "fast" type. The OS thread local key // type is accessed via platform-specific API calls and is slow, while the fast diff --git a/library/test/src/formatters/json.rs b/library/test/src/formatters/json.rs index 41e7e6adcf16d..4dc4162700c6e 100644 --- a/library/test/src/formatters/json.rs +++ b/library/test/src/formatters/json.rs @@ -39,9 +39,12 @@ impl JsonFormatter { stdout: Option>, extra: Option<&str>, ) -> io::Result<()> { + // A doc test's name includes a filename which must be escaped for correct json. self.write_message(&*format!( r#"{{ "type": "{}", "name": "{}", "event": "{}""#, - ty, name, evt + ty, + EscapedString(name), + evt ))?; if let Some(exec_time) = exec_time { self.write_message(&*format!(r#", "exec_time": "{}""#, exec_time))?; @@ -67,7 +70,7 @@ impl OutputFormatter for JsonFormatter { fn write_test_start(&mut self, desc: &TestDesc) -> io::Result<()> { self.writeln_message(&*format!( r#"{{ "type": "test", "event": "started", "name": "{}" }}"#, - desc.name + EscapedString(desc.name.as_slice()) )) } @@ -140,7 +143,10 @@ impl OutputFormatter for JsonFormatter { \"name\": \"{}\", \ \"median\": {}, \ \"deviation\": {}{} }}", - desc.name, median, deviation, mbps + EscapedString(desc.name.as_slice()), + median, + deviation, + mbps ); self.writeln_message(&*line) @@ -151,7 +157,7 @@ impl OutputFormatter for JsonFormatter { fn write_timeout(&mut self, desc: &TestDesc) -> io::Result<()> { self.writeln_message(&*format!( r#"{{ "type": "test", "event": "timeout", "name": "{}" }}"#, - desc.name + EscapedString(desc.name.as_slice()) )) } diff --git a/library/test/src/helpers/concurrency.rs b/library/test/src/helpers/concurrency.rs index 7ca27bf0dc15e..7e9bd50f5566e 100644 --- a/library/test/src/helpers/concurrency.rs +++ b/library/test/src/helpers/concurrency.rs @@ -1,6 +1,7 @@ //! Helper module which helps to determine amount of threads to be used //! during tests execution. use std::env; +use std::thread; #[allow(deprecated)] pub fn get_concurrency() -> usize { @@ -12,106 +13,6 @@ pub fn get_concurrency() -> usize { _ => panic!("RUST_TEST_THREADS is `{}`, should be a positive integer.", s), } } - Err(..) => num_cpus(), - } -} - -cfg_if::cfg_if! { - if #[cfg(windows)] { - #[allow(nonstandard_style)] - fn num_cpus() -> usize { - #[repr(C)] - struct SYSTEM_INFO { - wProcessorArchitecture: u16, - wReserved: u16, - dwPageSize: u32, - lpMinimumApplicationAddress: *mut u8, - lpMaximumApplicationAddress: *mut u8, - dwActiveProcessorMask: *mut u8, - dwNumberOfProcessors: u32, - dwProcessorType: u32, - dwAllocationGranularity: u32, - wProcessorLevel: u16, - wProcessorRevision: u16, - } - extern "system" { - fn GetSystemInfo(info: *mut SYSTEM_INFO) -> i32; - } - unsafe { - let mut sysinfo = std::mem::zeroed(); - GetSystemInfo(&mut sysinfo); - sysinfo.dwNumberOfProcessors as usize - } - } - } else if #[cfg(any( - target_os = "android", - target_os = "cloudabi", - target_os = "emscripten", - target_os = "fuchsia", - target_os = "ios", - target_os = "linux", - target_os = "macos", - target_os = "solaris", - target_os = "illumos", - ))] { - fn num_cpus() -> usize { - unsafe { libc::sysconf(libc::_SC_NPROCESSORS_ONLN) as usize } - } - } else if #[cfg(any(target_os = "freebsd", target_os = "dragonfly", target_os = "netbsd"))] { - fn num_cpus() -> usize { - use std::ptr; - - let mut cpus: libc::c_uint = 0; - let mut cpus_size = std::mem::size_of_val(&cpus); - - unsafe { - cpus = libc::sysconf(libc::_SC_NPROCESSORS_ONLN) as libc::c_uint; - } - if cpus < 1 { - let mut mib = [libc::CTL_HW, libc::HW_NCPU, 0, 0]; - unsafe { - libc::sysctl( - mib.as_mut_ptr(), - 2, - &mut cpus as *mut _ as *mut _, - &mut cpus_size as *mut _ as *mut _, - ptr::null_mut(), - 0, - ); - } - if cpus < 1 { - cpus = 1; - } - } - cpus as usize - } - } else if #[cfg(target_os = "openbsd")] { - fn num_cpus() -> usize { - use std::ptr; - - let mut cpus: libc::c_uint = 0; - let mut cpus_size = std::mem::size_of_val(&cpus); - let mut mib = [libc::CTL_HW, libc::HW_NCPU, 0, 0]; - - unsafe { - libc::sysctl( - mib.as_mut_ptr(), - 2, - &mut cpus as *mut _ as *mut _, - &mut cpus_size as *mut _ as *mut _, - ptr::null_mut(), - 0, - ); - } - if cpus < 1 { - cpus = 1; - } - cpus as usize - } - } else { - // FIXME: implement on vxWorks, Redox, HermitCore, Haiku, l4re - fn num_cpus() -> usize { - 1 - } + Err(..) => thread::available_concurrency().map(|n| n.get()).unwrap_or(1), } } diff --git a/library/test/src/lib.rs b/library/test/src/lib.rs index b0b81f85fe08f..9c5bb8957b548 100644 --- a/library/test/src/lib.rs +++ b/library/test/src/lib.rs @@ -24,6 +24,7 @@ #![feature(rustc_private)] #![feature(nll)] #![feature(bool_to_option)] +#![feature(available_concurrency)] #![feature(set_stdio)] #![feature(panic_unwind)] #![feature(staged_api)] diff --git a/src/bootstrap/bootstrap.py b/src/bootstrap/bootstrap.py index ce37adeb28c93..8fe0d2584f7a9 100644 --- a/src/bootstrap/bootstrap.py +++ b/src/bootstrap/bootstrap.py @@ -435,7 +435,9 @@ def download_stage0(self): llvm_sha = subprocess.check_output([ "git", "log", "--author=bors", "--format=%H", "-n1", "-m", "--first-parent", - "--", "src/llvm-project" + "--", + "src/llvm-project", + "src/bootstrap/download-ci-llvm-stamp", ]).decode(sys.getdefaultencoding()).strip() llvm_assertions = self.get_toml('assertions', 'llvm') == 'true' if self.program_out_of_date(self.llvm_stamp(), llvm_sha + str(llvm_assertions)): @@ -891,10 +893,15 @@ def update_submodules(self): ).decode(default_encoding).splitlines()] filtered_submodules = [] submodules_names = [] + llvm_checked_out = os.path.exists(os.path.join(self.rust_root, "src/llvm-project/.git")) for module in submodules: if module.endswith("llvm-project"): + # Don't sync the llvm-project submodule either if an external LLVM + # was provided, or if we are downloading LLVM. Also, if the + # submodule has been initialized already, sync it anyways so that + # it doesn't mess up contributor pull requests. if self.get_toml('llvm-config') or self.downloading_llvm(): - if self.get_toml('lld') != 'true': + if self.get_toml('lld') != 'true' and not llvm_checked_out: continue check = self.check_submodule(module, slow_submodules) filtered_submodules.append((module, check)) diff --git a/src/bootstrap/dist.rs b/src/bootstrap/dist.rs index 1887b805da19b..6d2da0c021382 100644 --- a/src/bootstrap/dist.rs +++ b/src/bootstrap/dist.rs @@ -2538,8 +2538,15 @@ impl Step for RustDev { let dst_bindir = image.join("bin"); t!(fs::create_dir_all(&dst_bindir)); - let exe = builder.llvm_out(target).join("bin").join(exe("llvm-config", target)); - builder.install(&exe, &dst_bindir, 0o755); + let src_bindir = builder.llvm_out(target).join("bin"); + let install_bin = + |name| builder.install(&src_bindir.join(exe(name, target)), &dst_bindir, 0o755); + install_bin("llvm-config"); + install_bin("llvm-ar"); + install_bin("llvm-objdump"); + install_bin("llvm-profdata"); + install_bin("llvm-bcanalyzer"); + install_bin("llvm-cov"); builder.install(&builder.llvm_filecheck(target), &dst_bindir, 0o755); // Copy the include directory as well; needed mostly to build diff --git a/src/bootstrap/download-ci-llvm-stamp b/src/bootstrap/download-ci-llvm-stamp new file mode 100644 index 0000000000000..d857618eefa8c --- /dev/null +++ b/src/bootstrap/download-ci-llvm-stamp @@ -0,0 +1,4 @@ +Change this file to make users of the `download-ci-llvm` configuration download +a new version of LLVM from CI, even if the LLVM submodule hasn’t changed. + +Last change is for: https://github.com/rust-lang/rust/pull/78131 diff --git a/src/bootstrap/flags.rs b/src/bootstrap/flags.rs index 22cfd0c56431d..3834e50e3fa11 100644 --- a/src/bootstrap/flags.rs +++ b/src/bootstrap/flags.rs @@ -232,7 +232,13 @@ To learn more about a subcommand, run `./x.py -h`", match subcommand.as_str() { "test" | "t" => { opts.optflag("", "no-fail-fast", "Run all tests regardless of failure"); - opts.optmulti("", "test-args", "extra arguments", "ARGS"); + opts.optmulti( + "", + "test-args", + "extra arguments to be passed for the test tool being used \ + (e.g. libtest, compiletest or rustdoc)", + "ARGS", + ); opts.optmulti( "", "rustc-args", diff --git a/src/bootstrap/native.rs b/src/bootstrap/native.rs index 6bba00ee85e14..37d6fab070b8e 100644 --- a/src/bootstrap/native.rs +++ b/src/bootstrap/native.rs @@ -378,6 +378,8 @@ fn configure_cmake( cfg.define("CMAKE_SYSTEM_NAME", "FreeBSD"); } else if target.contains("windows") { cfg.define("CMAKE_SYSTEM_NAME", "Windows"); + } else if target.contains("haiku") { + cfg.define("CMAKE_SYSTEM_NAME", "Haiku"); } // When cross-compiling we should also set CMAKE_SYSTEM_VERSION, but in // that case like CMake we cannot easily determine system version either. diff --git a/src/bootstrap/setup.rs b/src/bootstrap/setup.rs index a281061ace15b..c6e1c99564c09 100644 --- a/src/bootstrap/setup.rs +++ b/src/bootstrap/setup.rs @@ -1,6 +1,7 @@ use crate::{t, VERSION}; use std::fmt::Write as _; use std::path::{Path, PathBuf}; +use std::process::Command; use std::str::FromStr; use std::{ env, fmt, fs, @@ -196,10 +197,17 @@ simply delete the `pre-commit` file from .git/hooks." Ok(if should_install { let src = src_path.join("src").join("etc").join("pre-commit.sh"); - let dst = src_path.join(".git").join("hooks").join("pre-commit"); - match fs::hard_link(src, dst) { + let git = t!(Command::new("git").args(&["rev-parse", "--git-common-dir"]).output().map( + |output| { + assert!(output.status.success(), "failed to run `git`"); + PathBuf::from(t!(String::from_utf8(output.stdout)).trim()) + } + )); + let dst = git.join("hooks").join("pre-commit"); + match fs::hard_link(src, &dst) { Err(e) => println!( - "x.py encountered an error -- do you already have the git hook installed?\n{}", + "error: could not create hook {}: do you already have the git hook installed?\n{}", + dst.display(), e ), Ok(_) => println!("Linked `src/etc/pre-commit.sh` to `.git/hooks/pre-commit`"), diff --git a/src/bootstrap/test.rs b/src/bootstrap/test.rs index bda9e0f57841c..33e85dc5e2a11 100644 --- a/src/bootstrap/test.rs +++ b/src/bootstrap/test.rs @@ -737,6 +737,7 @@ impl Step for Tidy { let mut cmd = builder.tool_cmd(Tool::Tidy); cmd.arg(&builder.src); cmd.arg(&builder.initial_cargo); + cmd.arg(&builder.out); if builder.is_verbose() { cmd.arg("--verbose"); } diff --git a/src/bootstrap/util.rs b/src/bootstrap/util.rs index a307ef39d03a8..b35d1b99fa5c7 100644 --- a/src/bootstrap/util.rs +++ b/src/bootstrap/util.rs @@ -122,7 +122,7 @@ impl Drop for TimeIt { fn drop(&mut self) { let time = self.1.elapsed(); if !self.0 { - println!("\tfinished in {}.{:03}", time.as_secs(), time.subsec_millis()); + println!("\tfinished in {}.{:03} seconds", time.as_secs(), time.subsec_millis()); } } } diff --git a/src/ci/docker/host-x86_64/armhf-gnu/Dockerfile b/src/ci/docker/host-x86_64/armhf-gnu/Dockerfile index 9fb5faf3ee0f4..9370f5debb5ea 100644 --- a/src/ci/docker/host-x86_64/armhf-gnu/Dockerfile +++ b/src/ci/docker/host-x86_64/armhf-gnu/Dockerfile @@ -58,7 +58,7 @@ RUN curl https://www.busybox.net/downloads/busybox-1.21.1.tar.bz2 | tar xjf - && # Download the ubuntu rootfs, which we'll use as a chroot for all our tests. WORKDIR /tmp RUN mkdir rootfs/ubuntu -RUN curl http://cdimage.ubuntu.com/ubuntu-base/releases/16.04/release/ubuntu-base-16.04-core-armhf.tar.gz | \ +RUN curl http://cdimage.ubuntu.com/ubuntu-base/releases/16.04/release/ubuntu-base-16.04.6-base-armhf.tar.gz | \ tar xzf - -C rootfs/ubuntu && \ cd rootfs && mkdir proc sys dev etc etc/init.d diff --git a/src/ci/docker/scripts/musl-toolchain.sh b/src/ci/docker/scripts/musl-toolchain.sh index 1ae412340cb11..c56338a4f9562 100644 --- a/src/ci/docker/scripts/musl-toolchain.sh +++ b/src/ci/docker/scripts/musl-toolchain.sh @@ -4,6 +4,9 @@ # Versions of the toolchain components are configurable in `musl-cross-make/Makefile` and # musl unlike GLIBC is forward compatible so upgrading it shouldn't break old distributions. # Right now we have: Binutils 2.31.1, GCC 9.2.0, musl 1.1.24. + +# ignore-tidy-linelength + set -ex hide_output() { @@ -26,6 +29,9 @@ exit 1 ARCH=$1 TARGET=$ARCH-linux-musl +# Don't depend on the mirrors of sabotage linux that musl-cross-make uses. +LINUX_HEADERS_SITE=https://ci-mirrors.rust-lang.org/rustc/sabotage-linux-tarballs + OUTPUT=/usr/local shift @@ -38,8 +44,8 @@ cd musl-cross-make # A few commits ahead of v0.9.9 to include the cowpatch fix: git checkout a54eb56f33f255dfca60be045f12a5cfaf5a72a9 -hide_output make -j$(nproc) TARGET=$TARGET MUSL_VER=1.1.24 -hide_output make install TARGET=$TARGET MUSL_VER=1.1.24 OUTPUT=$OUTPUT +hide_output make -j$(nproc) TARGET=$TARGET MUSL_VER=1.1.24 LINUX_HEADERS_SITE=$LINUX_HEADERS_SITE +hide_output make install TARGET=$TARGET MUSL_VER=1.1.24 LINUX_HEADERS_SITE=$LINUX_HEADERS_SITE OUTPUT=$OUTPUT cd - diff --git a/src/doc/embedded-book b/src/doc/embedded-book index 79ab7776929c6..ca8169e69b479 160000 --- a/src/doc/embedded-book +++ b/src/doc/embedded-book @@ -1 +1 @@ -Subproject commit 79ab7776929c66db83203397958fa7037d5d9a30 +Subproject commit ca8169e69b479f615855d0eece7e318138fcfc00 diff --git a/src/doc/rustc/src/targets/custom.md b/src/doc/rustc/src/targets/custom.md index 0691afc60eae1..98e113a663b53 100644 --- a/src/doc/rustc/src/targets/custom.md +++ b/src/doc/rustc/src/targets/custom.md @@ -14,4 +14,4 @@ To see it for a different target, add the `--target` flag: $ rustc +nightly -Z unstable-options --target=wasm32-unknown-unknown --print target-spec-json ``` -To use a custom target, see [`xargo`](https://github.com/japaric/xargo). \ No newline at end of file +To use a custom target, see the (unstable) [`build-std` feature](https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#build-std) of `cargo`. diff --git a/src/doc/rustdoc/src/documentation-tests.md b/src/doc/rustdoc/src/documentation-tests.md index 18010bebcf0e7..387d86189b08f 100644 --- a/src/doc/rustdoc/src/documentation-tests.md +++ b/src/doc/rustdoc/src/documentation-tests.md @@ -16,8 +16,8 @@ The basic idea is this: The triple backticks start and end code blocks. If this were in a file named `foo.rs`, running `rustdoc --test foo.rs` will extract this example, and then run it as a test. -Please note that by default, if no language is set for the block code, `rustdoc` -assumes it is `Rust` code. So the following: +Please note that by default, if no language is set for the block code, rustdoc +assumes it is Rust code. So the following: ``````markdown ```rust @@ -44,7 +44,6 @@ the `assert!` family of macros works the same as other Rust code: ```rust let foo = "foo"; - assert_eq!(foo, "foo"); ``` @@ -55,8 +54,9 @@ the code panics and the doctest fails. In the example above, you'll note something strange: there's no `main` function! Forcing you to write `main` for every example, no matter how small, -adds friction. So `rustdoc` processes your examples slightly before -running them. Here's the full algorithm rustdoc uses to preprocess examples: +adds friction and clutters the output. So `rustdoc` processes your examples +slightly before running them. Here's the full algorithm `rustdoc` uses to +preprocess examples: 1. Some common `allow` attributes are inserted, including `unused_variables`, `unused_assignments`, `unused_mut`, @@ -78,10 +78,12 @@ Sometimes, you need some setup code, or other things that would distract from your example, but are important to make the tests work. Consider an example block that looks like this: -```text +```ignore +/// ``` /// /// Some documentation. /// # fn foo() {} // this function will be hidden /// println!("Hello, World!"); +/// ``` ``` It will render like this: @@ -251,7 +253,7 @@ disambiguate the error type: This is an unfortunate consequence of the `?` operator adding an implicit conversion, so type inference fails because the type is not unique. Please note that you must write the `(())` in one sequence without intermediate whitespace -so that rustdoc understands you want an implicit `Result`-returning function. +so that `rustdoc` understands you want an implicit `Result`-returning function. ## Documenting macros @@ -359,7 +361,7 @@ the code with the 2015 edition. ## Syntax reference The *exact* syntax for code blocks, including the edge cases, can be found -in the [Fenced Code Blocks](https://spec.commonmark.org/0.28/#fenced-code-blocks) +in the [Fenced Code Blocks](https://spec.commonmark.org/0.29/#fenced-code-blocks) section of the CommonMark specification. Rustdoc also accepts *indented* code blocks as an alternative to fenced @@ -372,7 +374,7 @@ can indent each line by four or more spaces. `````` These, too, are documented in the CommonMark specification, in the -[Indented Code Blocks](https://spec.commonmark.org/0.28/#indented-code-blocks) +[Indented Code Blocks](https://spec.commonmark.org/0.29/#indented-code-blocks) section. However, it's preferable to use fenced code blocks over indented code blocks. @@ -388,7 +390,7 @@ documentation. To this end, Rustdoc allows you to have certain items only appear collecting doctests, so you can utilize doctest functionality without forcing the test to appear in docs, or to find an arbitrary private item to include it on. -When compiling a crate for use in doctests (with `--test` option), rustdoc will set `cfg(doctest)`. +When compiling a crate for use in doctests (with `--test` option), `rustdoc` will set `#[cfg(doctest)]`. Note that they will still link against only the public items of your crate; if you need to test private items, you need to write a unit test. @@ -407,18 +409,18 @@ pub struct MyStructOnlyTakesUsize; ``` Note that the struct `MyStructOnlyTakesUsize` here isn't actually part of your public crate -API. The use of `#[cfg(doctest)]` makes sure that this struct only exists while rustdoc is +API. The use of `#[cfg(doctest)]` makes sure that this struct only exists while `rustdoc` is collecting doctests. This means that its doctest is executed when `--test` is passed to rustdoc, but is hidden from the public documentation. -Another possible use of `cfg(doctest)` is to test doctests that are included in your README file +Another possible use of `#[cfg(doctest)]` is to test doctests that are included in your README file without including it in your main documentation. For example, you could write this into your `lib.rs` to test your README as part of your doctests: ```rust,ignore #![feature(external_doc)] -#[doc(include="../README.md")] +#[doc(include = "../README.md")] #[cfg(doctest)] pub struct ReadmeDoctests; ``` diff --git a/src/doc/unstable-book/src/language-features/inline-const.md b/src/doc/unstable-book/src/language-features/inline-const.md new file mode 100644 index 0000000000000..00e1c79ca3ff3 --- /dev/null +++ b/src/doc/unstable-book/src/language-features/inline-const.md @@ -0,0 +1,45 @@ +# `inline_const` + +The tracking issue for this feature is: [#76001] + +------ + +This feature allows you to use inline constant expressions. For example, you can +turn this code: + +```rust +# fn add_one(x: i32) -> i32 { x + 1 } +const MY_COMPUTATION: i32 = 1 + 2 * 3 / 4; + +fn main() { + let x = add_one(MY_COMPUTATION); +} +``` + +into this code: + +```rust +#![feature(inline_const)] + +# fn add_one(x: i32) -> i32 { x + 1 } +fn main() { + let x = add_one(const { 1 + 2 * 3 / 4 }); +} +``` + +You can also use inline constant expressions in patterns: + +```rust +#![feature(inline_const)] + +const fn one() -> i32 { 1 } + +let some_int = 3; +match some_int { + const { 1 + 2 } => println!("Matched 1 + 2"), + const { one() } => println!("Matched const fn returning 1"), + _ => println!("Didn't match anything :("), +} +``` + +[#76001]: https://github.com/rust-lang/rust/issues/76001 diff --git a/src/doc/unstable-book/src/library-features/default-free-fn.md b/src/doc/unstable-book/src/library-features/default-free-fn.md index 5dff73a94dd87..d40a27dddf362 100644 --- a/src/doc/unstable-book/src/library-features/default-free-fn.md +++ b/src/doc/unstable-book/src/library-features/default-free-fn.md @@ -10,6 +10,8 @@ Adds a free `default()` function to the `std::default` module. This function just forwards to [`Default::default()`], but may remove repetition of the word "default" from the call site. +[`Default::default()`]: https://doc.rust-lang.org/nightly/std/default/trait.Default.html#tymethod.default + Here is an example: ```rust diff --git a/src/doc/unstable-book/src/library-features/range-bounds-assert-len.md b/src/doc/unstable-book/src/library-features/range-bounds-assert-len.md new file mode 100644 index 0000000000000..0e95d5ded9296 --- /dev/null +++ b/src/doc/unstable-book/src/library-features/range-bounds-assert-len.md @@ -0,0 +1,10 @@ +# `range_bounds_assert_len` + +The tracking issue for this feature is: [#76393] + +------------------------ + +This adds [`RangeBounds::assert_len`]. + +[#76393]: https://github.com/rust-lang/rust/issues/76393 +[`RangeBounds::assert_len`]: https://doc.rust-lang.org/nightly/std/ops/trait.RangeBounds.html#method.assert_len diff --git a/src/doc/unstable-book/src/library-features/slice-check-range.md b/src/doc/unstable-book/src/library-features/slice-check-range.md deleted file mode 100644 index 83e5738cf5412..0000000000000 --- a/src/doc/unstable-book/src/library-features/slice-check-range.md +++ /dev/null @@ -1,10 +0,0 @@ -# `slice_check_range` - -The tracking issue for this feature is: [#76393] - ------------------------- - -This adds [`slice::check_range`]. - -[#76393]: https://github.com/rust-lang/rust/issues/76393 -[`slice::check_range`]: https://doc.rust-lang.org/nightly/std/primitive.slice.html#method.check_range diff --git a/src/etc/gdb_providers.py b/src/etc/gdb_providers.py index b2d343fd7af6a..eec3027085c91 100644 --- a/src/etc/gdb_providers.py +++ b/src/etc/gdb_providers.py @@ -229,8 +229,8 @@ def cast_to_internal(node): yield child if i < length: # Avoid "Cannot perform pointer math on incomplete type" on zero-sized arrays. - key = keys[i]["value"]["value"] if keys.type.sizeof > 0 else None - val = vals[i]["value"]["value"] if vals.type.sizeof > 0 else None + key = keys[i]["value"]["value"] if keys.type.sizeof > 0 else "()" + val = vals[i]["value"]["value"] if vals.type.sizeof > 0 else "()" yield key, val @@ -242,11 +242,8 @@ def children_of_map(map): root = root.cast(gdb.lookup_type(root.type.name[21:-1])) boxed_root_node = root["node"] height = root["height"] - for i, (key, val) in enumerate(children_of_node(boxed_root_node, height)): - if key is not None: - yield "key{}".format(i), key - if val is not None: - yield "val{}".format(i), val + for child in children_of_node(boxed_root_node, height): + yield child class StdBTreeSetProvider: @@ -258,8 +255,8 @@ def to_string(self): def children(self): inner_map = self.valobj["map"] - for child in children_of_map(inner_map): - yield child + for i, (child, _) in enumerate(children_of_map(inner_map)): + yield "[{}]".format(i), child @staticmethod def display_hint(): @@ -274,8 +271,9 @@ def to_string(self): return "BTreeMap(size={})".format(self.valobj["length"]) def children(self): - for child in children_of_map(self.valobj): - yield child + for i, (key, val) in enumerate(children_of_map(self.valobj)): + yield "key{}".format(i), key + yield "val{}".format(i), val @staticmethod def display_hint(): diff --git a/src/librustdoc/clean/auto_trait.rs b/src/librustdoc/clean/auto_trait.rs index 1ea1a09106957..f39b53f3c826a 100644 --- a/src/librustdoc/clean/auto_trait.rs +++ b/src/librustdoc/clean/auto_trait.rs @@ -315,12 +315,13 @@ impl<'a, 'tcx> AutoTraitFinder<'a, 'tcx> { tcx: TyCtxt<'tcx>, pred: ty::Predicate<'tcx>, ) -> FxHashSet { - let regions = match pred.skip_binders() { + let bound_predicate = pred.bound_atom(); + let regions = match bound_predicate.skip_binder() { ty::PredicateAtom::Trait(poly_trait_pred, _) => { - tcx.collect_referenced_late_bound_regions(&ty::Binder::bind(poly_trait_pred)) + tcx.collect_referenced_late_bound_regions(&bound_predicate.rebind(poly_trait_pred)) } ty::PredicateAtom::Projection(poly_proj_pred) => { - tcx.collect_referenced_late_bound_regions(&ty::Binder::bind(poly_proj_pred)) + tcx.collect_referenced_late_bound_regions(&bound_predicate.rebind(poly_proj_pred)) } _ => return FxHashSet::default(), }; diff --git a/src/librustdoc/clean/mod.rs b/src/librustdoc/clean/mod.rs index 776b131a07611..bfc747058185e 100644 --- a/src/librustdoc/clean/mod.rs +++ b/src/librustdoc/clean/mod.rs @@ -23,9 +23,9 @@ use rustc_middle::ty::fold::TypeFolder; use rustc_middle::ty::subst::{InternalSubsts, Subst}; use rustc_middle::ty::{self, AdtKind, Lift, Ty, TyCtxt}; use rustc_mir::const_eval::{is_const_fn, is_min_const_fn, is_unstable_const_fn}; -use rustc_span::hygiene::MacroKind; +use rustc_span::hygiene::{AstPass, MacroKind}; use rustc_span::symbol::{kw, sym, Ident, Symbol}; -use rustc_span::{self, Pos}; +use rustc_span::{self, ExpnKind, Pos}; use rustc_typeck::hir_ty_to_ty; use std::collections::hash_map::Entry; @@ -1563,7 +1563,7 @@ impl<'tcx> Clean for Ty<'tcx> { ty::Str => Primitive(PrimitiveType::Str), ty::Slice(ty) => Slice(box ty.clean(cx)), ty::Array(ty, n) => { - let mut n = cx.tcx.lift(&n).expect("array lift failed"); + let mut n = cx.tcx.lift(n).expect("array lift failed"); n = n.eval(cx.tcx, ty::ParamEnv::reveal_all()); let n = print_const(cx, n); Array(box ty.clean(cx), n) @@ -1573,7 +1573,7 @@ impl<'tcx> Clean for Ty<'tcx> { BorrowedRef { lifetime: r.clean(cx), mutability: mutbl, type_: box ty.clean(cx) } } ty::FnDef(..) | ty::FnPtr(_) => { - let ty = cx.tcx.lift(self).expect("FnPtr lift failed"); + let ty = cx.tcx.lift(*self).expect("FnPtr lift failed"); let sig = ty.fn_sig(cx.tcx); let def_id = DefId::local(CRATE_DEF_INDEX); BareFunction(box BareFunctionDecl { @@ -1675,7 +1675,7 @@ impl<'tcx> Clean for Ty<'tcx> { ty::Opaque(def_id, substs) => { // Grab the "TraitA + TraitB" from `impl TraitA + TraitB`, // by looking up the bounds associated with the def_id. - let substs = cx.tcx.lift(&substs).expect("Opaque lift failed"); + let substs = cx.tcx.lift(substs).expect("Opaque lift failed"); let bounds = cx .tcx .explicit_item_bounds(def_id) @@ -1689,7 +1689,10 @@ impl<'tcx> Clean for Ty<'tcx> { .filter_map(|bound| { // Note: The substs of opaque types can contain unbound variables, // meaning that we have to use `ignore_quantifiers_with_unbound_vars` here. - let trait_ref = match bound.bound_atom(cx.tcx).skip_binder() { + let trait_ref = match bound + .bound_atom_with_opt_escaping(cx.tcx) + .skip_binder() + { ty::PredicateAtom::Trait(tr, _constness) => { ty::Binder::bind(tr.trait_ref) } @@ -1713,7 +1716,7 @@ impl<'tcx> Clean for Ty<'tcx> { .iter() .filter_map(|bound| { if let ty::PredicateAtom::Projection(proj) = - bound.bound_atom(cx.tcx).skip_binder() + bound.bound_atom_with_opt_escaping(cx.tcx).skip_binder() { if proj.projection_ty.trait_ref(cx.tcx) == trait_ref.skip_binder() @@ -2231,6 +2234,13 @@ impl Clean> for doctree::ExternCrate<'_> { impl Clean> for doctree::Import<'_> { fn clean(&self, cx: &DocContext<'_>) -> Vec { + // We need this comparison because some imports (for std types for example) + // are "inserted" as well but directly by the compiler and they should not be + // taken into account. + if self.span.ctxt().outer_expn_data().kind == ExpnKind::AstPass(AstPass::StdImports) { + return Vec::new(); + } + // We consider inlining the documentation of `pub use` statements, but we // forcefully don't inline if this is not public or if the // #[doc(no_inline)] attribute is present. diff --git a/src/librustdoc/clean/types.rs b/src/librustdoc/clean/types.rs index 3b72cf5c4f97a..32b3f69ecd4f0 100644 --- a/src/librustdoc/clean/types.rs +++ b/src/librustdoc/clean/types.rs @@ -14,6 +14,7 @@ use rustc_ast::{self as ast, AttrStyle}; use rustc_ast::{FloatTy, IntTy, UintTy}; use rustc_attr::{Stability, StabilityLevel}; use rustc_data_structures::fx::{FxHashMap, FxHashSet}; +use rustc_feature::UnstableFeatures; use rustc_hir as hir; use rustc_hir::def::Res; use rustc_hir::def_id::{CrateNum, DefId, LOCAL_CRATE}; @@ -679,9 +680,13 @@ impl Attributes { "../".repeat(depth) } Some(&(_, _, ExternalLocation::Remote(ref s))) => s.to_string(), - Some(&(_, _, ExternalLocation::Unknown)) | None => { - String::from("https://doc.rust-lang.org/nightly") - } + Some(&(_, _, ExternalLocation::Unknown)) | None => String::from( + if UnstableFeatures::from_environment().is_nightly_build() { + "https://doc.rust-lang.org/nightly" + } else { + "https://doc.rust-lang.org" + }, + ), }; // This is a primitive so the url is done "by hand". let tail = fragment.find('#').unwrap_or_else(|| fragment.len()); diff --git a/src/librustdoc/clean/utils.rs b/src/librustdoc/clean/utils.rs index 9a7f1964a1157..b0ed233f5ec7f 100644 --- a/src/librustdoc/clean/utils.rs +++ b/src/librustdoc/clean/utils.rs @@ -503,7 +503,7 @@ fn print_const_with_custom_print_scalar(cx: &DocContext<'_>, ct: &'tcx ty::Const format!("{}{}", format_integer_with_underscore_sep(&data.to_string()), ui.name_str()) } (ty::ConstKind::Value(ConstValue::Scalar(Scalar::Raw { data, .. })), ty::Int(i)) => { - let ty = cx.tcx.lift(&ct.ty).unwrap(); + let ty = cx.tcx.lift(ct.ty).unwrap(); let size = cx.tcx.layout_of(ty::ParamEnv::empty().and(ty)).unwrap().size; let sign_extended_data = sign_extend(data, size) as i128; diff --git a/src/librustdoc/html/render/mod.rs b/src/librustdoc/html/render/mod.rs index 22ae7af617fc7..1726093c6facb 100644 --- a/src/librustdoc/html/render/mod.rs +++ b/src/librustdoc/html/render/mod.rs @@ -540,11 +540,11 @@ impl FormatRenderer for Context { }; let sidebar = if let Some(ref version) = cache.crate_version { format!( - "

Crate {}

\ -
\ + "

Crate {}

\ +
\

Version {}

\
\ -

Back to index

", +

Back to index

", crate_name, Escape(version), ) @@ -567,7 +567,7 @@ impl FormatRenderer for Context { page.root_path = "./"; let mut style_files = self.shared.style_files.clone(); - let sidebar = "

Settings

"; + let sidebar = "

Settings

"; style_files.push(StylePath { path: PathBuf::from("settings.css"), disabled: false }); let v = layout::render( &self.shared.layout, @@ -576,7 +576,8 @@ impl FormatRenderer for Context { settings( self.shared.static_root_path.as_deref().unwrap_or("./"), &self.shared.resource_suffix, - ), + &self.shared.style_files, + )?, &style_files, ); self.shared.fs.write(&settings_file, v.as_bytes())?; @@ -807,10 +808,11 @@ function handleThemeButtonsBlur(e) {{ themePicker.onclick = switchThemeButtonState; themePicker.onblur = handleThemeButtonsBlur; {}.forEach(function(item) {{ - var but = document.createElement('button'); + var but = document.createElement("button"); but.textContent = item; but.onclick = function(el) {{ switchTheme(currentTheme, mainTheme, item, true); + useSystemTheme(false); }}; but.onblur = handleThemeButtonsBlur; themes.appendChild(but); @@ -1062,10 +1064,9 @@ themePicker.onblur = handleThemeButtonsBlur; krates.dedup(); let content = format!( - "

\ - List of all crates\ -

\ -
    {}
", + "

\ + List of all crates\ +

    {}
", krates .iter() .map(|s| { @@ -1209,7 +1210,7 @@ impl ItemEntry { impl ItemEntry { crate fn print(&self) -> impl fmt::Display + '_ { crate::html::format::display_fn(move |f| { - write!(f, "{}", self.url, Escape(&self.name)) + write!(f, "{}", self.url, Escape(&self.name)) }) } } @@ -1300,7 +1301,7 @@ fn print_entries(f: &mut Buffer, e: &FxHashSet, title: &str, class: & e.sort(); write!( f, - "

{}

    {}
", + "

{}

    {}
", title, Escape(title), class, @@ -1313,16 +1314,16 @@ impl AllTypes { fn print(self, f: &mut Buffer) { write!( f, - "

\ - \ - \ + "

\ + \ + \ \ - []\ + []\ \ - List of all items\ + List of all items\

" ); print_entries(f, &self.structs, "Structs", "structs"); @@ -1344,40 +1345,75 @@ impl AllTypes { #[derive(Debug)] enum Setting { - Section { description: &'static str, sub_settings: Vec }, - Entry { js_data_name: &'static str, description: &'static str, default_value: bool }, + Section { + description: &'static str, + sub_settings: Vec, + }, + Toggle { + js_data_name: &'static str, + description: &'static str, + default_value: bool, + }, + Select { + js_data_name: &'static str, + description: &'static str, + default_value: &'static str, + options: Vec<(String, String)>, + }, } impl Setting { - fn display(&self) -> String { + fn display(&self, root_path: &str, suffix: &str) -> String { match *self { - Setting::Section { ref description, ref sub_settings } => format!( - "
\ -
{}
\ -
{}
+ Setting::Section { description, ref sub_settings } => format!( + "
\ +
{}
\ +
{}
", description, - sub_settings.iter().map(|s| s.display()).collect::() + sub_settings.iter().map(|s| s.display(root_path, suffix)).collect::() ), - Setting::Entry { ref js_data_name, ref description, ref default_value } => format!( - "
\ -