Skip to content

Commit

Permalink
Auto merge of rust-lang#100123 - matthiaskrgr:rollup-aylwvyc, r=matth…
Browse files Browse the repository at this point in the history
…iaskrgr

Rollup of 9 pull requests

Successful merges:

 - rust-lang#98877 (Set llvm configs when building lld)
 - rust-lang#100068 (Fix backwards-compatibility check for tests with `+whole-archive`)
 - rust-lang#100083 (rustdoc: use a more compact encoding for source-files.js)
 - rust-lang#100102 (Fix typo)
 - rust-lang#100104 (Remove more Clean trait implementations)
 - rust-lang#100105 (Add regression test for rust-lang#90871)
 - rust-lang#100107 (fix trailing whitespace in error message)
 - rust-lang#100111 (Provide suggestion on missing `let` in binding statement)
 - rust-lang#100119 (FilesTimes support does not build for ESP-IDF)

Failed merges:

r? `@ghost`
`@rustbot` modify labels: rollup
  • Loading branch information
bors committed Aug 4, 2022
2 parents 59d99f5 + a44532c commit 1ad5635
Show file tree
Hide file tree
Showing 19 changed files with 208 additions and 94 deletions.
30 changes: 27 additions & 3 deletions compiler/rustc_ast_passes/src/feature_gate.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ use rustc_ast as ast;
use rustc_ast::visit::{self, AssocCtxt, FnCtxt, FnKind, Visitor};
use rustc_ast::{AssocConstraint, AssocConstraintKind, NodeId};
use rustc_ast::{PatKind, RangeEnd, VariantData};
use rustc_errors::struct_span_err;
use rustc_errors::{struct_span_err, Applicability};
use rustc_feature::{AttributeGate, BuiltinAttribute, BUILTIN_ATTRIBUTE_MAP};
use rustc_feature::{Features, GateIssue};
use rustc_session::parse::{feature_err, feature_err_issue};
Expand Down Expand Up @@ -577,6 +577,32 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> {
}
}

fn visit_stmt(&mut self, stmt: &'a ast::Stmt) {
if let ast::StmtKind::Semi(expr) = &stmt.kind
&& let ast::ExprKind::Assign(lhs, _, _) = &expr.kind
&& let ast::ExprKind::Type(..) = lhs.kind
&& self.sess.parse_sess.span_diagnostic.err_count() == 0
&& !self.features.type_ascription
&& !lhs.span.allows_unstable(sym::type_ascription)
{
// When we encounter a statement of the form `foo: Ty = val;`, this will emit a type
// ascription error, but the likely intention was to write a `let` statement. (#78907).
feature_err_issue(
&self.sess.parse_sess,
sym::type_ascription,
lhs.span,
GateIssue::Language,
"type ascription is experimental",
).span_suggestion_verbose(
lhs.span.shrink_to_lo(),
"you might have meant to introduce a new binding",
"let ".to_string(),
Applicability::MachineApplicable,
).emit();
}
visit::walk_stmt(self, stmt);
}

fn visit_expr(&mut self, e: &'a ast::Expr) {
match e.kind {
ast::ExprKind::Box(_) => {
Expand Down Expand Up @@ -795,8 +821,6 @@ fn maybe_stage_features(sess: &Session, krate: &ast::Crate) {
// checks if `#![feature]` has been used to enable any lang feature
// does not check the same for lib features unless there's at least one
// declared lang feature
use rustc_errors::Applicability;

if !sess.opts.unstable_features.is_nightly_build() {
let lang_features = &sess.features_untracked().declared_lang_features;
if lang_features.len() == 0 {
Expand Down
2 changes: 1 addition & 1 deletion compiler/rustc_borrowck/src/diagnostics/conflict_errors.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2736,7 +2736,7 @@ impl<'b, 'v> Visitor<'v> for ConditionVisitor<'b> {
self.errors.push((
e.span,
format!(
"if the `for` loop runs 0 times, {} is not initialized ",
"if the `for` loop runs 0 times, {} is not initialized",
self.name
),
));
Expand Down
2 changes: 1 addition & 1 deletion compiler/rustc_codegen_ssa/src/back/link.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2267,7 +2267,7 @@ fn add_local_native_libraries(
// be added explicitly if necessary, see the error in `fn link_rlib`) compiled
// as an executable due to `--test`. Use whole-archive implicitly, like before
// the introduction of native lib modifiers.
|| (bundle != Some(false) && sess.opts.test)
|| (whole_archive == None && bundle != Some(false) && sess.opts.test)
{
cmd.link_whole_staticlib(
name,
Expand Down
2 changes: 1 addition & 1 deletion compiler/rustc_infer/src/infer/higher_ranked/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ impl<'a, 'tcx> CombineFields<'a, 'tcx> {
///
/// This is implemented by first entering a new universe.
/// We then replace all bound variables in `sup` with placeholders,
/// and all bound variables in `sup` with inference vars.
/// and all bound variables in `sub` with inference vars.
/// We can then just relate the two resulting types as normal.
///
/// Note: this is a subtle algorithm. For a full explanation, please see
Expand Down
10 changes: 7 additions & 3 deletions library/std/src/sys/unix/fs.rs
Original file line number Diff line number Diff line change
Expand Up @@ -544,9 +544,11 @@ impl Default for FileTimes {
fn default() -> Self {
// Redox doesn't appear to support `UTIME_OMIT`, so we stub it out here, and always return
// an error in `set_times`.
#[cfg(target_os = "redox")]
// ESP-IDF does not support `futimens` at all and the behavior for that OS is therefore
// the same as for Redox.
#[cfg(any(target_os = "redox", target_os = "espidf"))]
let omit = libc::timespec { tv_sec: 0, tv_nsec: 0 };
#[cfg(not(target_os = "redox"))]
#[cfg(not(any(target_os = "redox", target_os = "espidf")))]
let omit = libc::timespec { tv_sec: 0, tv_nsec: libc::UTIME_OMIT as _ };
Self([omit; 2])
}
Expand Down Expand Up @@ -1077,8 +1079,10 @@ impl File {

pub fn set_times(&self, times: FileTimes) -> io::Result<()> {
cfg_if::cfg_if! {
if #[cfg(target_os = "redox")] {
if #[cfg(any(target_os = "redox", target_os = "espidf"))] {
// Redox doesn't appear to support `UTIME_OMIT`.
// ESP-IDF does not support `futimens` at all and the behavior for that OS is therefore
// the same as for Redox.
drop(times);
Err(io::const_io_error!(
io::ErrorKind::Unsupported,
Expand Down
40 changes: 21 additions & 19 deletions src/bootstrap/native.rs
Original file line number Diff line number Diff line change
Expand Up @@ -345,13 +345,6 @@ impl Step for Llvm {
cfg.define("LLVM_ENABLE_ZLIB", "OFF");
}

if builder.config.llvm_thin_lto {
cfg.define("LLVM_ENABLE_LTO", "Thin");
if !target.contains("apple") {
cfg.define("LLVM_ENABLE_LLD", "ON");
}
}

// This setting makes the LLVM tools link to the dynamic LLVM library,
// which saves both memory during parallel links and overall disk space
// for the tools. We don't do this on every platform as it doesn't work
Expand Down Expand Up @@ -463,15 +456,8 @@ impl Step for Llvm {
cfg.define("LLVM_VERSION_SUFFIX", suffix);
}

if let Some(ref linker) = builder.config.llvm_use_linker {
cfg.define("LLVM_USE_LINKER", linker);
}

if builder.config.llvm_allow_old_toolchain {
cfg.define("LLVM_TEMPORARILY_ALLOW_OLD_TOOLCHAIN", "YES");
}

configure_cmake(builder, target, &mut cfg, true, ldflags);
configure_llvm(builder, target, &mut cfg);

for (key, val) in &builder.config.llvm_build_config {
cfg.define(key, val);
Expand Down Expand Up @@ -731,6 +717,25 @@ fn configure_cmake(
}
}

fn configure_llvm(builder: &Builder<'_>, target: TargetSelection, cfg: &mut cmake::Config) {
// ThinLTO is only available when building with LLVM, enabling LLD is required.
// Apple's linker ld64 supports ThinLTO out of the box though, so don't use LLD on Darwin.
if builder.config.llvm_thin_lto {
cfg.define("LLVM_ENABLE_LTO", "Thin");
if !target.contains("apple") {
cfg.define("LLVM_ENABLE_LLD", "ON");
}
}

if let Some(ref linker) = builder.config.llvm_use_linker {
cfg.define("LLVM_USE_LINKER", linker);
}

if builder.config.llvm_allow_old_toolchain {
cfg.define("LLVM_TEMPORARILY_ALLOW_OLD_TOOLCHAIN", "YES");
}
}

// Adapted from https://github.com/alexcrichton/cc-rs/blob/fba7feded71ee4f63cfe885673ead6d7b4f2f454/src/lib.rs#L2347-L2365
fn get_var(var_base: &str, host: &str, target: &str) -> Option<OsString> {
let kind = if host == target { "HOST" } else { "TARGET" };
Expand Down Expand Up @@ -794,6 +799,7 @@ impl Step for Lld {
}

configure_cmake(builder, target, &mut cfg, true, ldflags);
configure_llvm(builder, target, &mut cfg);

// This is an awful, awful hack. Discovered when we migrated to using
// clang-cl to compile LLVM/LLD it turns out that LLD, when built out of
Expand Down Expand Up @@ -825,10 +831,6 @@ impl Step for Lld {
.define("LLVM_CONFIG_PATH", llvm_config_shim)
.define("LLVM_INCLUDE_TESTS", "OFF");

if builder.config.llvm_allow_old_toolchain {
cfg.define("LLVM_TEMPORARILY_ALLOW_OLD_TOOLCHAIN", "YES");
}

// While we're using this horrible workaround to shim the execution of
// llvm-config, let's just pile on more. I can't seem to figure out how
// to build LLD as a standalone project and also cross-compile it at the
Expand Down
68 changes: 30 additions & 38 deletions src/librustdoc/clean/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -247,30 +247,28 @@ pub(crate) fn clean_middle_const<'tcx>(
}
}

impl<'tcx> Clean<'tcx, Option<Lifetime>> for ty::Region<'tcx> {
fn clean(&self, _cx: &mut DocContext<'_>) -> Option<Lifetime> {
match **self {
ty::ReStatic => Some(Lifetime::statik()),
ty::ReLateBound(_, ty::BoundRegion { kind: ty::BrNamed(_, name), .. }) => {
if name != kw::UnderscoreLifetime { Some(Lifetime(name)) } else { None }
}
ty::ReEarlyBound(ref data) => {
if data.name != kw::UnderscoreLifetime {
Some(Lifetime(data.name))
} else {
None
}
}
ty::ReLateBound(..)
| ty::ReFree(..)
| ty::ReVar(..)
| ty::RePlaceholder(..)
| ty::ReEmpty(_)
| ty::ReErased => {
debug!("cannot clean region {:?}", self);
pub(crate) fn clean_middle_region<'tcx>(region: ty::Region<'tcx>) -> Option<Lifetime> {
match *region {
ty::ReStatic => Some(Lifetime::statik()),
ty::ReLateBound(_, ty::BoundRegion { kind: ty::BrNamed(_, name), .. }) => {
if name != kw::UnderscoreLifetime { Some(Lifetime(name)) } else { None }
}
ty::ReEarlyBound(ref data) => {
if data.name != kw::UnderscoreLifetime {
Some(Lifetime(data.name))
} else {
None
}
}
ty::ReLateBound(..)
| ty::ReFree(..)
| ty::ReVar(..)
| ty::RePlaceholder(..)
| ty::ReEmpty(_)
| ty::ReErased => {
debug!("cannot clean region {:?}", region);
None
}
}
}

Expand Down Expand Up @@ -321,7 +319,7 @@ impl<'tcx> Clean<'tcx, Option<WherePredicate>> for ty::Predicate<'tcx> {
ty::PredicateKind::Trait(pred) => {
clean_poly_trait_predicate(bound_predicate.rebind(pred), cx)
}
ty::PredicateKind::RegionOutlives(pred) => clean_region_outlives_predicate(pred, cx),
ty::PredicateKind::RegionOutlives(pred) => clean_region_outlives_predicate(pred),
ty::PredicateKind::TypeOutlives(pred) => clean_type_outlives_predicate(pred, cx),
ty::PredicateKind::Projection(pred) => Some(clean_projection_predicate(pred, cx)),
ty::PredicateKind::ConstEvaluatable(..) => None,
Expand Down Expand Up @@ -358,7 +356,6 @@ fn clean_poly_trait_predicate<'tcx>(

fn clean_region_outlives_predicate<'tcx>(
pred: ty::OutlivesPredicate<ty::Region<'tcx>, ty::Region<'tcx>>,
cx: &mut DocContext<'tcx>,
) -> Option<WherePredicate> {
let ty::OutlivesPredicate(a, b) = pred;

Expand All @@ -367,8 +364,10 @@ fn clean_region_outlives_predicate<'tcx>(
}

Some(WherePredicate::RegionPredicate {
lifetime: a.clean(cx).expect("failed to clean lifetime"),
bounds: vec![GenericBound::Outlives(b.clean(cx).expect("failed to clean bounds"))],
lifetime: clean_middle_region(a).expect("failed to clean lifetime"),
bounds: vec![GenericBound::Outlives(
clean_middle_region(b).expect("failed to clean bounds"),
)],
})
}

Expand All @@ -384,7 +383,9 @@ fn clean_type_outlives_predicate<'tcx>(

Some(WherePredicate::BoundPredicate {
ty: clean_middle_ty(ty, cx, None),
bounds: vec![GenericBound::Outlives(lt.clean(cx).expect("failed to clean lifetimes"))],
bounds: vec![GenericBound::Outlives(
clean_middle_region(lt).expect("failed to clean lifetimes"),
)],
bound_params: Vec::new(),
})
}
Expand Down Expand Up @@ -999,15 +1000,6 @@ impl<'tcx> Clean<'tcx, FnRetTy> for hir::FnRetTy<'tcx> {
}
}

impl<'tcx> Clean<'tcx, bool> for hir::IsAuto {
fn clean(&self, _: &mut DocContext<'tcx>) -> bool {
match *self {
hir::IsAuto::Yes => true,
hir::IsAuto::No => false,
}
}
}

impl<'tcx> Clean<'tcx, Path> for hir::TraitRef<'tcx> {
fn clean(&self, cx: &mut DocContext<'tcx>) -> Path {
let path = clean_path(self.path, cx);
Expand Down Expand Up @@ -1597,7 +1589,7 @@ pub(crate) fn clean_middle_ty<'tcx>(
}
ty::RawPtr(mt) => RawPointer(mt.mutbl, Box::new(clean_middle_ty(mt.ty, cx, None))),
ty::Ref(r, ty, mutbl) => BorrowedRef {
lifetime: r.clean(cx),
lifetime: clean_middle_region(r),
mutability: mutbl,
type_: Box::new(clean_middle_ty(ty, cx, None)),
},
Expand Down Expand Up @@ -1644,7 +1636,7 @@ pub(crate) fn clean_middle_ty<'tcx>(

inline::record_extern_fqn(cx, did, ItemType::Trait);

let lifetime = reg.clean(cx);
let lifetime = clean_middle_region(*reg);
let mut bounds = vec![];

for did in dids {
Expand Down Expand Up @@ -1710,7 +1702,7 @@ pub(crate) fn clean_middle_ty<'tcx>(
let trait_ref = match bound_predicate.skip_binder() {
ty::PredicateKind::Trait(tr) => bound_predicate.rebind(tr.trait_ref),
ty::PredicateKind::TypeOutlives(ty::OutlivesPredicate(_ty, reg)) => {
if let Some(r) = reg.clean(cx) {
if let Some(r) = clean_middle_region(reg) {
regions.push(GenericBound::Outlives(r));
}
return None;
Expand Down
8 changes: 4 additions & 4 deletions src/librustdoc/clean/utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,9 @@ use crate::clean::auto_trait::AutoTraitFinder;
use crate::clean::blanket_impl::BlanketImplFinder;
use crate::clean::render_macro_matchers::render_macro_matcher;
use crate::clean::{
clean_middle_const, clean_middle_ty, inline, Clean, Crate, ExternalCrate, Generic, GenericArg,
GenericArgs, ImportSource, Item, ItemKind, Lifetime, Path, PathSegment, Primitive,
PrimitiveType, Type, TypeBinding, Visibility,
clean_middle_const, clean_middle_region, clean_middle_ty, inline, Clean, Crate, ExternalCrate,
Generic, GenericArg, GenericArgs, ImportSource, Item, ItemKind, Lifetime, Path, PathSegment,
Primitive, PrimitiveType, Type, TypeBinding, Visibility,
};
use crate::core::DocContext;
use crate::formats::item_type::ItemType;
Expand Down Expand Up @@ -86,7 +86,7 @@ pub(crate) fn substs_to_args<'tcx>(
Vec::with_capacity(substs.len().saturating_sub(if skip_first { 1 } else { 0 }));
ret_val.extend(substs.iter().filter_map(|kind| match kind.unpack() {
GenericArgKind::Lifetime(lt) => {
Some(GenericArg::Lifetime(lt.clean(cx).unwrap_or(Lifetime::elided())))
Some(GenericArg::Lifetime(clean_middle_region(lt).unwrap_or(Lifetime::elided())))
}
GenericArgKind::Type(_) if skip_first => {
skip_first = false;
Expand Down
33 changes: 20 additions & 13 deletions src/librustdoc/html/render/write_shared.rs
Original file line number Diff line number Diff line change
Expand Up @@ -366,13 +366,15 @@ pub(super) fn write_shared(
.collect::<Vec<_>>();
files.sort_unstable();
let subs = subs.iter().map(|s| s.to_json_string()).collect::<Vec<_>>().join(",");
let dirs =
if subs.is_empty() { String::new() } else { format!(",\"dirs\":[{}]", subs) };
let dirs = if subs.is_empty() && files.is_empty() {
String::new()
} else {
format!(",[{}]", subs)
};
let files = files.join(",");
let files =
if files.is_empty() { String::new() } else { format!(",\"files\":[{}]", files) };
let files = if files.is_empty() { String::new() } else { format!(",[{}]", files) };
format!(
"{{\"name\":\"{name}\"{dirs}{files}}}",
"[\"{name}\"{dirs}{files}]",
name = self.elem.to_str().expect("invalid osstring conversion"),
dirs = dirs,
files = files
Expand Down Expand Up @@ -411,18 +413,23 @@ pub(super) fn write_shared(
let dst = cx.dst.join(&format!("source-files{}.js", cx.shared.resource_suffix));
let make_sources = || {
let (mut all_sources, _krates) =
try_err!(collect(&dst, krate.name(cx.tcx()).as_str(), "sourcesIndex"), &dst);
try_err!(collect_json(&dst, krate.name(cx.tcx()).as_str()), &dst);
all_sources.push(format!(
"sourcesIndex[\"{}\"] = {};",
r#""{}":{}"#,
&krate.name(cx.tcx()),
hierarchy.to_json_string()
hierarchy
.to_json_string()
// All these `replace` calls are because we have to go through JS string for JSON content.
.replace('\\', r"\\")
.replace('\'', r"\'")
// We need to escape double quotes for the JSON.
.replace("\\\"", "\\\\\"")
));
all_sources.sort();
Ok(format!(
"var sourcesIndex = {{}};\n{}\ncreateSourceSidebar();\n",
all_sources.join("\n")
)
.into_bytes())
let mut v = String::from("var sourcesIndex = JSON.parse('{\\\n");
v.push_str(&all_sources.join(",\\\n"));
v.push_str("\\\n}');\ncreateSourceSidebar();\n");
Ok(v.into_bytes())
};
write_crate("source-files.js", &make_sources)?;
}
Expand Down
Loading

0 comments on commit 1ad5635

Please sign in to comment.