From 65d1abae9ef2d3bb291b635cbc2e4a37dc0ed21c Mon Sep 17 00:00:00 2001 From: Stephen Crane Date: Wed, 10 Apr 2019 12:56:42 -0700 Subject: [PATCH] Bump rustc to nightly 2019-04-08 (#49) Major updates to translator: - Remove ThinTokenStream - Update AST - Use libc crate instead of rustc internal version - Add support for varargs Major updates to the refactoring tool: - Remove ThinTokenStream - Update AST - Replace Folder with in-place mutable Visitor - Use new rustc driver interface Note: We need https://github.com/rust-lang/rust/pull/59173 to be in our nightly for LLVM 8 builds against system libLLVM-8.so on linux. --- Cargo.lock | 12 + Cargo.toml | 1 + c2rust-ast-builder/src/builder.rs | 64 +- c2rust-ast-exporter/build.rs | 6 +- c2rust-ast-exporter/src/AstExporter.cpp | 21 +- c2rust-macros/Cargo.toml | 13 + c2rust-macros/src/lib.rs | 177 +++++ c2rust-refactor/Cargo.toml | 1 + c2rust-refactor/gen/ast.txt | 25 +- c2rust-refactor/gen/lr_expr.py | 51 +- c2rust-refactor/src/analysis/labeled_ty.rs | 44 +- c2rust-refactor/src/analysis/mod.rs | 13 +- .../src/analysis/ownership/annot.rs | 52 +- .../src/analysis/ownership/constraint.rs | 114 +-- .../src/analysis/ownership/context.rs | 111 ++- .../src/analysis/ownership/debug.rs | 6 +- .../src/analysis/ownership/inst.rs | 20 +- .../src/analysis/ownership/inter.rs | 22 +- .../src/analysis/ownership/intra.rs | 87 +-- c2rust-refactor/src/analysis/ownership/mod.rs | 278 +++---- .../src/analysis/ownership/mono.rs | 24 +- c2rust-refactor/src/analysis/type_eq.rs | 238 +++--- c2rust-refactor/src/ast_manip/ast_deref.rs | 2 +- c2rust-refactor/src/ast_manip/ast_equiv.rs | 2 +- c2rust-refactor/src/ast_manip/ast_names.rs | 2 +- c2rust-refactor/src/ast_manip/fn_edit.rs | 58 +- c2rust-refactor/src/ast_manip/fold.rs | 441 +++++++---- c2rust-refactor/src/ast_manip/fold_node.rs | 152 ---- c2rust-refactor/src/ast_manip/get_node_id.rs | 2 +- .../src/ast_manip/list_node_ids.rs | 2 +- c2rust-refactor/src/ast_manip/lr_expr.rs | 171 +++-- c2rust-refactor/src/ast_manip/mod.rs | 4 +- c2rust-refactor/src/ast_manip/number_nodes.rs | 34 +- c2rust-refactor/src/ast_manip/output_exprs.rs | 181 ++--- c2rust-refactor/src/ast_manip/remove_paren.rs | 38 +- c2rust-refactor/src/ast_manip/seq_edit.rs | 42 +- c2rust-refactor/src/ast_manip/util.rs | 8 +- c2rust-refactor/src/collapse/cfg_attr.rs | 80 +- c2rust-refactor/src/collapse/deleted.rs | 38 +- c2rust-refactor/src/collapse/mac_table.rs | 2 +- c2rust-refactor/src/collapse/macros.rs | 146 ++-- c2rust-refactor/src/collapse/mod.rs | 74 +- c2rust-refactor/src/collapse/node_map.rs | 16 +- c2rust-refactor/src/collapse/nt_match.rs | 2 +- c2rust-refactor/src/command.rs | 475 ++++++------ c2rust-refactor/src/context.rs | 35 +- c2rust-refactor/src/driver.rs | 693 +++++++++++------- c2rust-refactor/src/illtyped.rs | 481 ++++++------ c2rust-refactor/src/interact/main_thread.rs | 54 +- c2rust-refactor/src/lib.rs | 98 ++- c2rust-refactor/src/mark_adjust.rs | 5 +- c2rust-refactor/src/matcher/bindings.rs | 8 +- c2rust-refactor/src/matcher/impls.rs | 2 +- c2rust-refactor/src/matcher/mod.rs | 287 +++++--- c2rust-refactor/src/matcher/subst.rs | 191 ++--- c2rust-refactor/src/node_map.rs | 6 +- c2rust-refactor/src/path_edit.rs | 220 +++--- c2rust-refactor/src/pick_node.rs | 7 +- c2rust-refactor/src/reflect.rs | 47 +- c2rust-refactor/src/resolve.rs | 18 +- c2rust-refactor/src/rewrite/base.rs | 7 +- .../src/rewrite/strategy/item_header.rs | 18 +- c2rust-refactor/src/rewrite/strategy/print.rs | 7 +- .../src/rewrite/strategy/recursive.rs | 2 +- c2rust-refactor/src/scripting.rs | 90 +-- c2rust-refactor/src/select/parse.rs | 41 +- c2rust-refactor/src/span_fix.rs | 49 +- .../src/transform/canonicalize_refs.rs | 79 +- .../src/transform/char_literals.rs | 11 +- c2rust-refactor/src/transform/control_flow.rs | 48 +- c2rust-refactor/src/transform/externs.rs | 37 +- c2rust-refactor/src/transform/format.rs | 20 +- c2rust-refactor/src/transform/funcs.rs | 109 ++- c2rust-refactor/src/transform/generics.rs | 18 +- c2rust-refactor/src/transform/ionize.rs | 40 +- c2rust-refactor/src/transform/items.rs | 199 +++-- c2rust-refactor/src/transform/linkage.rs | 31 +- c2rust-refactor/src/transform/literals.rs | 93 +-- c2rust-refactor/src/transform/mod.rs | 8 +- c2rust-refactor/src/transform/ownership.rs | 116 ++- .../src/transform/reorganize_definitions.rs | 34 +- c2rust-refactor/src/transform/retype.rs | 533 ++++++-------- c2rust-refactor/src/transform/rewrite.rs | 33 +- c2rust-refactor/src/transform/statics.rs | 127 ++-- c2rust-refactor/src/transform/structs.rs | 39 +- c2rust-refactor/src/transform/test.rs | 60 +- c2rust-refactor/src/transform/vars.rs | 88 +-- .../src/transform/wrapping_arith.rs | 57 +- .../src/build_files/Cargo.toml.hbs | 4 + c2rust-transpile/src/build_files/build.rs.hbs | 11 + c2rust-transpile/src/build_files/lib.rs.hbs | 6 +- c2rust-transpile/src/build_files/mod.rs | 22 + c2rust-transpile/src/cfg/mod.rs | 7 +- c2rust-transpile/src/convert_type.rs | 20 + c2rust-transpile/src/lib.rs | 1 + c2rust-transpile/src/rust_ast/traverse.rs | 1 + c2rust-transpile/src/translator/assembly.rs | 2 +- c2rust-transpile/src/translator/bitfields.rs | 14 +- c2rust-transpile/src/translator/builtins.rs | 59 +- .../src/translator/main_function.rs | 2 +- c2rust-transpile/src/translator/mod.rs | 183 ++++- c2rust-transpile/src/translator/operators.rs | 8 +- c2rust-transpile/src/translator/simd.rs | 6 +- c2rust-transpile/src/translator/variadic.rs | 132 +++- .../backends/dynamic-dlsym/Cargo.toml | 2 + .../backends/dynamic-dlsym/src/lib.rs | 1 - .../backends/zstd-logging/Cargo.toml | 1 + .../backends/zstd-logging/src/lib.rs | 1 - docs/known-limitations.md | 2 +- rust-toolchain | 2 +- scripts/common.py | 4 +- tests/arrays/Cargo.toml | 3 + tests/builtins/Cargo.toml | 3 + tests/casts/Cargo.toml | 3 + tests/conditionals/Cargo.toml | 3 + tests/enums/Cargo.toml | 3 + tests/example/Cargo.toml | 3 + tests/floats/Cargo.toml | 1 + tests/gotos/Cargo.toml | 3 + tests/ints/Cargo.toml | 3 + tests/longdouble/Cargo.toml | 1 + tests/loops/Cargo.toml | 3 + tests/misc/Cargo.toml | 3 + tests/pointers/Cargo.toml | 3 + tests/simd/Cargo.toml | 4 + tests/statics/Cargo.toml | 3 + tests/structs/Cargo.toml | 1 + tests/unions/Cargo.toml | 3 + 128 files changed, 4251 insertions(+), 3784 deletions(-) create mode 100644 c2rust-macros/Cargo.toml create mode 100644 c2rust-macros/src/lib.rs delete mode 100644 c2rust-refactor/src/ast_manip/fold_node.rs create mode 100644 c2rust-transpile/src/build_files/build.rs.hbs diff --git a/Cargo.lock b/Cargo.lock index cf2c88deb..0adc0c4d3 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1,3 +1,5 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. [[package]] name = "adler32" version = "1.0.3" @@ -197,6 +199,15 @@ dependencies = [ "syn 0.15.24 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "c2rust-macros" +version = "0.9.0" +dependencies = [ + "proc-macro2 0.4.24 (registry+https://github.com/rust-lang/crates.io-index)", + "quote 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)", + "syn 0.15.24 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "c2rust-manual-preprocessors" version = "0.1.0" @@ -217,6 +228,7 @@ name = "c2rust-refactor" version = "0.9.0" dependencies = [ "c2rust-ast-builder 0.9.0", + "c2rust-macros 0.9.0", "cargo 0.32.0 (registry+https://github.com/rust-lang/crates.io-index)", "clap 2.32.0 (registry+https://github.com/rust-lang/crates.io-index)", "derive_more 0.14.0 (registry+https://github.com/rust-lang/crates.io-index)", diff --git a/Cargo.toml b/Cargo.toml index 6731885b1..6039c0f75 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -7,6 +7,7 @@ members = [ "c2rust-ast-exporter", "manual/preprocessors", "c2rust-bitfields", + "c2rust-macros", ] exclude = [ "cross-checks/pointer-tracer", diff --git a/c2rust-ast-builder/src/builder.rs b/c2rust-ast-builder/src/builder.rs index 12fae7ede..ac2baeee3 100644 --- a/c2rust-ast-builder/src/builder.rs +++ b/c2rust-ast-builder/src/builder.rs @@ -5,7 +5,7 @@ use syntax::source_map::{DUMMY_SP, Spanned, Span, dummy_spanned}; use syntax::parse::token::{self, Token, DelimToken}; use syntax::attr::{mk_attr_inner}; use syntax::ptr::P; -use syntax::tokenstream::{TokenTree, TokenStream, TokenStreamBuilder, ThinTokenStream}; +use syntax::tokenstream::{TokenTree, TokenStream, TokenStreamBuilder}; use syntax::symbol::keywords; use std::rc::Rc; use rustc_target::spec::abi::{self, Abi}; @@ -183,14 +183,14 @@ impl> Make for Vec { } -impl Make for TokenStream { - fn make(self, _mk: &Builder) -> ThinTokenStream { - self.into() - } -} +//impl Make for TokenStream { +// fn make(self, _mk: &Builder) -> TokenStream { +// self.into() +// } +//} -impl Make for Vec { - fn make(self, _mk: &Builder) -> ThinTokenStream { +impl Make for Vec { + fn make(self, _mk: &Builder) -> TokenStream { self.into_iter().collect::().into() } } @@ -207,7 +207,7 @@ impl Make for AngleBracketedArgs { } } -impl Make for ParenthesisedArgs { +impl Make for ParenthesizedArgs { fn make(self, _mk: &Builder) -> GenericArgs { Parenthesized(self) } @@ -225,15 +225,15 @@ impl Make for Lifetime { } } -impl Make for MetaItem { - fn make(self, _mk: &Builder) -> NestedMetaItemKind { - NestedMetaItemKind::MetaItem(self) +impl Make for MetaItem { + fn make(self, _mk: &Builder) -> NestedMetaItem { + NestedMetaItem::MetaItem(self) } } -impl Make for Lit { - fn make(self, _mk: &Builder) -> NestedMetaItemKind { - NestedMetaItemKind::Literal(self) +impl Make for Lit { + fn make(self, _mk: &Builder) -> NestedMetaItem { + NestedMetaItem::Literal(self) } } @@ -448,11 +448,11 @@ impl Builder { } } - pub fn parenthesized_args(self, tys: Ts) -> ParenthesisedArgs + pub fn parenthesized_args(self, tys: Ts) -> ParenthesizedArgs where Ts: Make>> { let tys = tys.make(&self); - ParenthesisedArgs { + ParenthesizedArgs { span: self.span, inputs: tys, output: None, @@ -495,8 +495,8 @@ impl Builder { pub fn abs_path(self, path: Pa) -> Path where Pa: Make { let mut p = path.make(&self); - if !p.segments.get(0).map_or(false, |s| s.ident.name == keywords::CrateRoot.name()) { - p.segments.insert(0, keywords::CrateRoot.ident().make(&self)); + if !p.segments.get(0).map_or(false, |s| s.ident.name == keywords::PathRoot.name()) { + p.segments.insert(0, keywords::PathRoot.ident().make(&self)); } p } @@ -1284,7 +1284,7 @@ impl Builder { let block = block.make(&self); let header = FnHeader { unsafety: self.unsafety, - asyncness: IsAsync::NotAsync, + asyncness: dummy_spanned(IsAsync::NotAsync), constness: dummy_spanned(self.constness), abi: self.abi, }; @@ -1295,12 +1295,12 @@ impl Builder { block)) } - pub fn fn_decl(self, inputs: Vec, output: FunctionRetTy, variadic: bool) -> P + pub fn fn_decl(self, inputs: Vec, output: FunctionRetTy, c_variadic: bool) -> P { P(FnDecl { inputs, output, - variadic, + c_variadic, }) } @@ -1308,7 +1308,7 @@ impl Builder { where I: Make { let name = name.make(&self); Self::item(name, self.attrs, self.vis, self.span, self.id, - ItemKind::Struct(VariantData::Struct(fields, DUMMY_NODE_ID), + ItemKind::Struct(VariantData::Struct(fields, false), self.generics)) } @@ -1316,7 +1316,7 @@ impl Builder { where I: Make { let name = name.make(&self); Self::item(name, self.attrs, self.vis, self.span, self.id, - ItemKind::Union(VariantData::Struct(fields, DUMMY_NODE_ID), + ItemKind::Union(VariantData::Struct(fields, false), self.generics)) } @@ -1367,6 +1367,7 @@ impl Builder { node: Variant_ { ident: name, attrs: self.attrs, + id: DUMMY_NODE_ID, data: dat, disr_expr: None, }, @@ -1382,6 +1383,7 @@ impl Builder { node: Variant_ { ident: name, attrs: self.attrs, + id: DUMMY_NODE_ID, data: VariantData::Unit(self.id), disr_expr: disc, }, @@ -1577,7 +1579,6 @@ impl Builder { Unsafety::Normal => BlockCheckMode::Default, }, span: self.span, - recovered: false, }) } @@ -1637,7 +1638,7 @@ impl Builder { } pub fn attribute(self, style: AttrStyle, path: Pa, tokens: Ts) -> Attribute - where Pa: Make, Ts: Make + where Pa: Make, Ts: Make { let path = path.make(&self); let tokens = tokens.make(&self).into(); @@ -1665,17 +1666,16 @@ impl Builder { let path = path.make(&self); let kind = kind.make(&self); MetaItem { - ident: path, + path: path, node: kind, span: DUMMY_SP, } } pub fn nested_meta_item(self, kind: K) -> NestedMetaItem - where K: Make - { - let kind = kind.make(&self); - dummy_spanned(kind) + where K: Make + { + kind.make(&self) } // Convert the current internal list of outer attributes @@ -1691,7 +1691,7 @@ impl Builder { } pub fn mac(self, path: Pa, tts: Ts, delim: MacDelimiter) -> Mac - where Pa: Make, Ts: Make { + where Pa: Make, Ts: Make { let path = path.make(&self); let tts = tts.make(&self); Spanned { diff --git a/c2rust-ast-exporter/build.rs b/c2rust-ast-exporter/build.rs index 674c9790a..09e5f9b5f 100644 --- a/c2rust-ast-exporter/build.rs +++ b/c2rust-ast-exporter/build.rs @@ -290,7 +290,7 @@ variable or make sure `llvm-config` is on $PATH then re-build. For example: .or(invoke_command(llvm_config.as_ref(), &["--system-libs", "--link-static"])) .unwrap_or(String::new()) .split_whitespace() - .map(|lib| String::from(lib.trim_left_matches("-l"))) + .map(|lib| String::from(lib.trim_start_matches("-l"))) .collect(); let llvm_dylib = invoke_command(llvm_config.as_ref(), &["--libs", "--link-shared"]); @@ -314,7 +314,7 @@ variable or make sure `llvm-config` is on $PATH then re-build. For example: } // Windows is not supported }; let mut dylib_file = String::from("lib"); - dylib_file.push_str(llvm_dylib.trim_left_matches("-l")); + dylib_file.push_str(llvm_dylib.trim_start_matches("-l")); dylib_file.push_str(dylib_suffix); let sysroot = invoke_command( env::var("RUSTC").ok().as_ref(), @@ -353,7 +353,7 @@ variable or make sure `llvm-config` is on $PATH then re-build. For example: ]) .unwrap_or(String::new()) .split_whitespace() - .map(|lib| String::from(lib.trim_left_matches("-l"))) + .map(|lib| String::from(lib.trim_start_matches("-l"))) .collect(); Self { diff --git a/c2rust-ast-exporter/src/AstExporter.cpp b/c2rust-ast-exporter/src/AstExporter.cpp index db55dec36..408e08ac7 100644 --- a/c2rust-ast-exporter/src/AstExporter.cpp +++ b/c2rust-ast-exporter/src/AstExporter.cpp @@ -635,6 +635,13 @@ class TranslateASTVisitor final return true; } + bool VisitIndirectGotoStmt(IndirectGotoStmt *IGS) { + std:: string msg = "the GNU C labels-as-values extension is not supported. Aborting."; + + printError(msg, IGS); + abort(); + } + bool VisitLabelStmt(LabelStmt *LS) { std::vector childIds = { LS->getSubStmt() }; @@ -645,7 +652,6 @@ class TranslateASTVisitor final return true; } - bool VisitNullStmt(NullStmt *NS) { std::vector childIds; encode_entry(NS, TagNullStmt, childIds); @@ -724,8 +730,7 @@ class TranslateASTVisitor final Expr::EvalResult eval_result; #endif // CLANG_VERSION_MAJOR if (!expr->EvaluateAsInt(eval_result, *Context)) { - std:: string msg = "Aborting due to the expression in `CaseStmt`\ - not being an integer."; + std:: string msg = "Expression in case statement is not an integer. Aborting."; printError(msg, CS); abort(); } @@ -1218,10 +1223,10 @@ class TranslateASTVisitor final if (!FD->isCanonicalDecl()) return true; - if (FD->hasBody() && FD->isVariadic()) { - // auto fname = FD->getNameString(); - printWarning("variadic functions are not fully supported.", FD); - } + // if (FD->hasBody() && FD->isVariadic()) { + // // auto fname = FD->getNameString(); + // printWarning("variadic functions are not fully supported.", FD); + // } // Use the parameters from the function declaration // the defines the body, if one exists. @@ -1517,7 +1522,7 @@ class TranslateASTVisitor final if (warnOnFlexibleArrayDecl(D)) { printWarning("this may be an unsupported flexible array member with size of 1, " "omit the size if this field is intended to be a flexible array member. " - "Note that you must be sure to fix any struct size calculations after " + "Note that you must fix any struct size calculations after " "doing so or else it will likely be off (by one). " "See section 6.7.2.1 of the C99 standard.", D); } diff --git a/c2rust-macros/Cargo.toml b/c2rust-macros/Cargo.toml new file mode 100644 index 000000000..89a4810a4 --- /dev/null +++ b/c2rust-macros/Cargo.toml @@ -0,0 +1,13 @@ +[package] +name = "c2rust-macros" +version = "0.9.0" +authors = ["Stephen Crane ", "The C2Rust Project Developers "] +edition = "2018" + +[lib] +proc-macro = true + +[dependencies] +proc-macro2 = { version = "0.4", features = ["nightly"] } +quote = "0.6" +syn = { version = "0.15", features = ["full", "extra-traits", "visit"] } diff --git a/c2rust-macros/src/lib.rs b/c2rust-macros/src/lib.rs new file mode 100644 index 000000000..45ccdceb8 --- /dev/null +++ b/c2rust-macros/src/lib.rs @@ -0,0 +1,177 @@ +#![recursion_limit = "128"] +extern crate syn; +extern crate proc_macro; +extern crate proc_macro2; +extern crate quote; + +use proc_macro2::{Span, TokenStream}; +use syn::parse_macro_input; +use syn::{ArgCaptured, Block, FnArg, Ident, Pat, TraitItemMethod, Type, TypeReference}; +use syn::visit::Visit; +use quote::quote; + +#[derive(Default)] +struct VisitorImpls { + tokens: TokenStream, + count: usize, +} + +impl VisitorImpls { + fn generate_visit(&mut self, method_name: &Ident, arg_pat: &Pat, ty: &Type, noop: &Option) { + self.tokens.extend(quote! { + impl MutVisit for #ty { + fn visit(&mut self, f: &mut F) { + f.#method_name(self) + } + } + }); + + let folder_name = format!("Folder{}", self.count); + let folder_ident = Ident::new(&folder_name, Span::call_site()); + + let walk = match noop { + Some(block) => quote! { #block }, + None => { + let noop_fn_name = format!("noop_{}", method_name); + let noop_fn = Ident::new(&noop_fn_name, Span::call_site()); + quote! { + syntax::mut_visit::#noop_fn(#arg_pat, self); + } + } + }; + self.tokens.extend(quote! { + struct #folder_ident + where F: FnMut(&mut #ty) + { + callback: F, + } + + impl MutVisitor for #folder_ident + where F: FnMut(&mut #ty) + { + fn #method_name(&mut self, #arg_pat: &mut #ty) { + #walk + (self.callback)(#arg_pat) + } + } + + impl MutVisitNodes for #ty { + fn visit(target: &mut T, callback: F) + where T: MutVisit, + F: FnMut(&mut Self) + { + let mut f = #folder_ident { callback }; + target.visit(&mut f) + } + } + }); + + self.count += 1; + } + + fn generate_flat_map(&mut self, method_name: &Ident, arg_pat: &Pat, ty: &Type, noop: &Option) { + self.tokens.extend(quote! { + impl MutVisit for #ty { + fn visit(&mut self, f: &mut F) { + let new = f.#method_name(self.clone()); + *self = new.lone(); + } + + fn flat_map(self, f: &mut F) -> SmallVec<[#ty; 1]> { + f.#method_name(self) + } + } + }); + + let folder_name = format!("Folder{}", self.count); + let folder_ident = Ident::new(&folder_name, Span::call_site()); + + let walk = match noop { + Some(block) => quote! { #block }, + None => { + let noop_fn_name = format!("noop_{}", method_name); + let noop_fn = Ident::new(&noop_fn_name, Span::call_site()); + quote! { + syntax::mut_visit::#noop_fn(#arg_pat, self); + } + } + }; + self.tokens.extend(quote! { + struct #folder_ident + where F: FnMut(#ty) -> SmallVec<[#ty; 1]> + { + callback: F, + } + + impl MutVisitor for #folder_ident + where F: FnMut(#ty) -> SmallVec<[#ty; 1]> + { + fn #method_name(&mut self, #arg_pat: #ty) -> SmallVec<[#ty; 1]> { + let mut v = #walk; + v.flat_map_in_place(|x| (self.callback)(x)); + v + } + } + + impl FlatMapNodes for #ty { + fn visit(target: &mut T, callback: F) + where T: MutVisit, + F: FnMut(#ty) -> SmallVec<[#ty; 1]> + { + let mut f = #folder_ident { callback }; + target.visit(&mut f) + } + + fn flat_map(target: T, callback: F) -> SmallVec<[T; 1]> + where T: MutVisit, + F: FnMut(#ty) -> SmallVec<[#ty; 1]> + { + let mut f = #folder_ident { callback }; + target.flat_map(&mut f) + } + } + }); + + self.count += 1; + } +} + +impl<'ast> Visit<'ast> for VisitorImpls { + fn visit_trait_item_method(&mut self, m: &TraitItemMethod) { + let method_name = &m.sig.ident; + let method_noop = &m.default; + match &m.sig.decl.inputs[1] { + FnArg::Captured( + ArgCaptured { + pat, + ty, + .. + } + ) => { + match ty { + Type::Reference( + TypeReference { + mutability: Some(_), + elem, + .. + } + ) => self.generate_visit(method_name, &pat, &elem, method_noop), + + _ => self.generate_flat_map(method_name, &pat, &ty, method_noop), + } + } + + _ => {} + } + } +} + +#[proc_macro] +pub fn gen_visitor_impls(tokens: proc_macro::TokenStream) -> proc_macro::TokenStream { + let visitor_trait: syn::ItemTrait = parse_macro_input!(tokens); + let mut visitor = VisitorImpls::default(); + visitor.visit_item_trait(&visitor_trait); + + visitor.tokens.into() +} + diff --git a/c2rust-refactor/Cargo.toml b/c2rust-refactor/Cargo.toml index f96d7ffe5..3bddd1f35 100644 --- a/c2rust-refactor/Cargo.toml +++ b/c2rust-refactor/Cargo.toml @@ -27,6 +27,7 @@ log = "0.4" rlua = "0.16" slotmap = {version = "0.3", features = ["unstable"]} derive_more = "0.14" +c2rust-macros = { version = "0.9.0", path = "../c2rust-macros" } [lib] name = "c2rust_refactor" diff --git a/c2rust-refactor/gen/ast.txt b/c2rust-refactor/gen/ast.txt index a6fc6063a..8c36a5ba9 100644 --- a/c2rust-refactor/gen/ast.txt +++ b/c2rust-refactor/gen/ast.txt @@ -88,7 +88,7 @@ struct TraitRef { path, ref_id } struct EnumDef { variants } #[extend_span] -struct Variant_ { ident, #[match=ignore] attrs, data, disr_expr } +struct Variant_ { ident, #[match=ignore] attrs, id, data, disr_expr } enum VariantData { Struct(fields, id), Tuple(fields, id), @@ -118,6 +118,7 @@ struct GenericParam { id, ident, attrs, bounds, kind } enum GenericParamKind { Lifetime, Type { default }, + Const { ty }, } #[nonterminal] struct WhereClause { id, predicates, span } @@ -165,6 +166,7 @@ enum TyKind { ImplicitSelf, #[mac_table_record] Mac(mac), Err, + CVarArgs, } flag TraitObjectSyntax; @@ -182,7 +184,7 @@ enum GenericBound { struct PolyTraitRef { trait_ref, span, bound_generic_params } -struct FnDecl { inputs, output, variadic } +struct FnDecl { inputs, output, c_variadic } struct FnHeader { unsafety, asyncness, constness, abi } #[rewrite_print] struct Arg { ty, pat, id } @@ -257,6 +259,7 @@ enum ExprKind { Paren(expr), Try(#[prec=POSTFIX] expr), Yield(#[prec=JUMP] expr), + Err, } enum IsAsync { @@ -283,7 +286,7 @@ enum Guard { If(expr), } #[match=custom] #[rewrite_print_recover] #[nonterminal] -struct Block { #[mac_table_seq] stmts, id, rules, span, recovered } +struct Block { #[mac_table_seq] stmts, id, rules, span } #[match=custom] #[mac_table_record] #[nonterminal] @@ -317,6 +320,7 @@ enum LitKind { Float(sym, ty), FloatUnsuffixed(sym), Bool(x), + Err(sym), } @@ -364,10 +368,11 @@ enum GenericArgs { Parenthesized(ppd), } struct AngleBracketedArgs { span, args, bindings } -struct ParenthesisedArgs { span, inputs, output } +struct ParenthesizedArgs { span, inputs, output } enum GenericArg { Lifetime(lt), Type(ty), + Const(c), } struct QSelf { ty, path_span, position } @@ -376,8 +381,6 @@ struct Mac_ { path, delim, tts } flag MacStmtStyle; #[equiv_mode=ignore] flag TokenStream; -#[equiv_mode=ignore] -flag ThinTokenStream; struct MacroDef { tokens, legacy } @@ -444,11 +447,7 @@ enum Nonterminal { enum TokenTree { Token(sp, t), - Delimited(sp, d), -} - -struct Delimited { - delim, tts + Delimited(sp, d, tts), } struct DelimSpan { open, close } @@ -456,13 +455,13 @@ struct DelimSpan { open, close } flag DelimToken; flag Token; -struct MetaItem { ident, node, span } +struct MetaItem { path, node, span } enum MetaItemKind { Word, List(l), NameValue(lit), } -enum NestedMetaItemKind { +enum NestedMetaItem { MetaItem(mi), Literal(lit), } diff --git a/c2rust-refactor/gen/lr_expr.py b/c2rust-refactor/gen/lr_expr.py index 51fda323b..f09408a54 100644 --- a/c2rust-refactor/gen/lr_expr.py +++ b/c2rust-refactor/gen/lr_expr.py @@ -23,37 +23,37 @@ def expr_kind_match(d, mode): yield 'match self {' for v, path in variants_paths(d): - yield ' %s => %s(' % (struct_pattern(v, path, bind_mode=''), path) + yield ' %s => {' % struct_pattern(v, path, bind_mode='') for f in v.fields: if 'lvalue_mut' in f.attrs: - yield ' %s.fold_lvalue_mut(lr),' % f.name + yield ' %s.fold_lvalue_mut(lr);' % f.name elif 'lvalue_imm' in f.attrs: - yield ' %s.fold_lvalue(lr),' % f.name + yield ' %s.fold_lvalue(lr);' % f.name elif 'lr_propagate' in f.attrs: - yield ' %s.fold_%s(lr),' % (f.name, mode) + yield ' %s.fold_%s(lr);' % (f.name, mode) elif 'lvalue_kind' in f.attrs: yield ' match %s {' % f.attrs['lvalue_kind'] yield ' Mutability::Mutable =>' yield ' %s.fold_lvalue_mut(lr),' % f.name yield ' Mutability::Immutable =>' yield ' %s.fold_lvalue(lr),' % f.name - yield ' },' + yield ' }' else: - yield ' %s.fold_rvalue(lr),' % f.name - yield ' ),' + yield ' %s.fold_rvalue(lr);' % f.name + yield ' }' yield '}' @linewise def expr_kind_impl(d): yield '#[allow(unused)]' yield 'impl LRExpr for %s {' % d.name - yield ' fn fold_rvalue(self, lr: &mut LR) -> Self {' + yield ' fn fold_rvalue(&mut self, lr: &mut LR) {' yield indent(expr_kind_match(d, 'rvalue'), ' ') yield ' }' - yield ' fn fold_lvalue(self, lr: &mut LR) -> Self {' + yield ' fn fold_lvalue(&mut self, lr: &mut LR) {' yield indent(expr_kind_match(d, 'lvalue'), ' ') yield ' }' - yield ' fn fold_lvalue_mut(self, lr: &mut LR) -> Self {' + yield ' fn fold_lvalue_mut(&mut self, lr: &mut LR) {' yield indent(expr_kind_match(d, 'lvalue_mut'), ' ') yield ' }' yield '}' @@ -62,17 +62,17 @@ def expr_kind_impl(d): def expr_impl(d): yield '#[allow(unused)]' yield 'impl LRExpr for %s {' % d.name - yield ' fn fold_rvalue(self, lr: &mut LR) -> Self {' - yield ' let e = Expr { node: self.node.fold_rvalue(lr), ..self };' - yield ' lr.fold_rvalue(e)' + yield ' fn fold_rvalue(&mut self, lr: &mut LR) {' + yield ' self.node.fold_rvalue(lr);' + yield ' lr.fold_rvalue(self)' yield ' }' - yield ' fn fold_lvalue(self, lr: &mut LR) -> Self {' - yield ' let e = Expr { node: self.node.fold_lvalue(lr), ..self };' - yield ' lr.fold_lvalue(e)' + yield ' fn fold_lvalue(&mut self, lr: &mut LR) {' + yield ' self.node.fold_lvalue(lr);' + yield ' lr.fold_lvalue(self)' yield ' }' - yield ' fn fold_lvalue_mut(self, lr: &mut LR) -> Self {' - yield ' let e = Expr { node: self.node.fold_lvalue_mut(lr), ..self };' - yield ' lr.fold_lvalue_mut(e)' + yield ' fn fold_lvalue_mut(&mut self, lr: &mut LR) {' + yield ' self.node.fold_lvalue_mut(lr);' + yield ' lr.fold_lvalue_mut(self)' yield ' }' yield '}' @@ -80,14 +80,11 @@ def expr_impl(d): def null_impl(d): yield '#[allow(unused)]' yield 'impl LRExpr for %s {' % d.name - yield ' fn fold_rvalue(self, lr: &mut LR) -> Self {' - yield ' self' + yield ' fn fold_rvalue(&mut self, lr: &mut LR) {' yield ' }' - yield ' fn fold_lvalue(self, lr: &mut LR) -> Self {' - yield ' self' + yield ' fn fold_lvalue(&mut self, lr: &mut LR) {' yield ' }' - yield ' fn fold_lvalue_mut(self, lr: &mut LR) -> Self {' - yield ' self' + yield ' fn fold_lvalue_mut(&mut self, lr: &mut LR) {' yield ' }' yield '}' @@ -99,7 +96,9 @@ def generate(decls): for d in decls: if d.name == 'Expr': - yield expr_impl(d) + # We implement P manually + # yield expr_impl(d) + pass elif d.name == 'ExprKind': yield expr_kind_impl(d) else: diff --git a/c2rust-refactor/src/analysis/labeled_ty.rs b/c2rust-refactor/src/analysis/labeled_ty.rs index 776dbca23..e6564f863 100644 --- a/c2rust-refactor/src/analysis/labeled_ty.rs +++ b/c2rust-refactor/src/analysis/labeled_ty.rs @@ -20,28 +20,28 @@ use crate::type_map; /// `TyKind` would be annoying, so instead, we let labeled types form arbitrary trees, and /// make the `LabeledTyCtxt` responsible for making those trees match the `Ty`'s structure. #[derive(Clone, PartialEq, Eq)] -pub struct LabeledTyS<'tcx, L: 'tcx> { +pub struct LabeledTyS<'lty, 'tcx: 'lty, L: 'lty> { /// The underlying type. pub ty: Ty<'tcx>, /// The arguments of this type constructor. The number and meaning of these arguments depends /// on which type constructor this is (specifically, which `TyKind` variant is used for /// `self.ty.sty`). - pub args: &'tcx [LabeledTy<'tcx, L>], + pub args: &'lty [LabeledTy<'lty, 'tcx, L>], /// The label for the current type constructor. pub label: L, } /// A labeled type. Like `rustc::ty::Ty`, this is a reference to some arena-allocated data. -pub type LabeledTy<'tcx, L> = &'tcx LabeledTyS<'tcx, L>; +pub type LabeledTy<'lty, 'tcx, L> = &'lty LabeledTyS<'lty, 'tcx, L>; -impl<'tcx, L: fmt::Debug> fmt::Debug for LabeledTyS<'tcx, L> { +impl<'lty, 'tcx, L: fmt::Debug> fmt::Debug for LabeledTyS<'lty, 'tcx, L> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{:?}#{:?}{:?}", self.label, self.ty, self.args) } } -impl<'tcx, L> LabeledTyS<'tcx, L> { - pub fn for_each_label(&'tcx self, callback: &mut F) { +impl<'lty, 'tcx, L> LabeledTyS<'lty, 'tcx, L> { + pub fn for_each_label(&'lty self, callback: &mut F) { callback(&self.label); for &arg in self.args { arg.for_each_label(callback); @@ -51,15 +51,15 @@ impl<'tcx, L> LabeledTyS<'tcx, L> { /// Context for constructing `LabeledTy`s. -pub struct LabeledTyCtxt<'tcx, L: 'tcx> { - arena: &'tcx SyncDroplessArena, +pub struct LabeledTyCtxt<'lty, L: 'lty> { + arena: &'lty SyncDroplessArena, _marker: PhantomData, } -impl<'tcx, L: Clone> LabeledTyCtxt<'tcx, L> { +impl<'lty, 'tcx: 'lty, L: Clone> LabeledTyCtxt<'lty, L> { /// Build a new `LabeledTyCtxt`. The `arena` must be the same one used by the `TyCtxt` that /// built the underlying `Ty`s to be labeled. - pub fn new(arena: &'tcx SyncDroplessArena) -> LabeledTyCtxt<'tcx, L> { + pub fn new(arena: &'lty SyncDroplessArena) -> LabeledTyCtxt<'lty, L> { LabeledTyCtxt { arena: arena, _marker: PhantomData, @@ -67,7 +67,7 @@ impl<'tcx, L: Clone> LabeledTyCtxt<'tcx, L> { } /// Manually construct a slice in the context's arena. - pub fn mk_slice(&self, ltys: &[LabeledTy<'tcx, L>]) -> &'tcx [LabeledTy<'tcx, L>] { + pub fn mk_slice(&self, ltys: &[LabeledTy<'lty, 'tcx, L>]) -> &'lty [LabeledTy<'lty, 'tcx, L>] { if ltys.len() == 0 { return &[]; } @@ -76,7 +76,7 @@ impl<'tcx, L: Clone> LabeledTyCtxt<'tcx, L> { /// Manually construct a labeled type. Note that this does not do any checks on `args`! The /// caller is responsible for making sure the number of arguments matches `ty.sty`. - pub fn mk(&self, ty: Ty<'tcx>, args: &'tcx [LabeledTy<'tcx, L>], label: L) -> LabeledTy<'tcx, L> { + pub fn mk(&self, ty: Ty<'tcx>, args: &'lty [LabeledTy<'lty, 'tcx, L>], label: L) -> LabeledTy<'lty, 'tcx, L> { self.arena.alloc(LabeledTyS { ty: ty, args: args, @@ -87,7 +87,7 @@ impl<'tcx, L: Clone> LabeledTyCtxt<'tcx, L> { /// Label a `Ty` using a callback. The callback runs at every type constructor to produce a /// label for that node in the tree. - pub fn label) -> L>(&self, ty: Ty<'tcx>, f: &mut F) -> LabeledTy<'tcx, L> { + pub fn label) -> L>(&self, ty: Ty<'tcx>, f: &mut F) -> LabeledTy<'lty, 'tcx, L> { use rustc::ty::TyKind::*; let label = f(ty); match ty.sty { @@ -155,7 +155,7 @@ impl<'tcx, L: Clone> LabeledTyCtxt<'tcx, L> { /// Label multiple `Ty`s using a callback. pub fn label_slice(&self, tys: &[Ty<'tcx>], - f: &mut F) -> &'tcx [LabeledTy<'tcx, L>] + f: &mut F) -> &'lty [LabeledTy<'lty, 'tcx, L>] where F: FnMut(Ty<'tcx>) -> L { self.mk_slice(&tys.iter().map(|ty| self.label(ty, f)).collect::>()) } @@ -170,8 +170,8 @@ impl<'tcx, L: Clone> LabeledTyCtxt<'tcx, L> { /// end up with a `LabeledTy` whose `ty` is `S`, but whose args are `[u32]`. By some /// miracle, this hasn't broken anything yet, but we may need to fix it eventually. pub fn subst(&self, - lty: LabeledTy<'tcx, L>, - substs: &[LabeledTy<'tcx, L>]) -> LabeledTy<'tcx, L> { + lty: LabeledTy<'lty, 'tcx, L>, + substs: &[LabeledTy<'lty, 'tcx, L>]) -> LabeledTy<'lty, 'tcx, L> { match lty.ty.sty { TyKind::Param(ref tp) => { substs[tp.idx as usize] @@ -182,14 +182,14 @@ impl<'tcx, L: Clone> LabeledTyCtxt<'tcx, L> { /// Substitute arguments in multiple labeled types. pub fn subst_slice(&self, - ltys: &[LabeledTy<'tcx, L>], - substs: &[LabeledTy<'tcx, L>]) -> &'tcx [LabeledTy<'tcx, L>] { + ltys: &[LabeledTy<'lty, 'tcx, L>], + substs: &[LabeledTy<'lty, 'tcx, L>]) -> &'lty [LabeledTy<'lty, 'tcx, L>] { self.mk_slice(<ys.iter().map(|lty| self.subst(lty, substs)).collect::>()) } /// Run a callback to replace the labels on a type. - pub fn relabel(&self, lty: LabeledTy<'tcx, L2>, func: &mut F) -> LabeledTy<'tcx, L> + pub fn relabel(&self, lty: LabeledTy<'lty, 'tcx, L2>, func: &mut F) -> LabeledTy<'lty, 'tcx, L> where F: FnMut(&L2) -> L { let args = self.relabel_slice(lty.args, func); self.mk(lty.ty, args, func(<y.label)) @@ -197,8 +197,8 @@ impl<'tcx, L: Clone> LabeledTyCtxt<'tcx, L> { /// Replace the labels on several labeled types. pub fn relabel_slice(&self, - ltys: &'tcx [LabeledTy<'tcx, L2>], - func: &mut F) -> &'tcx [LabeledTy<'tcx, L>] + ltys: &'lty [LabeledTy<'lty, 'tcx, L2>], + func: &mut F) -> &'lty [LabeledTy<'lty, 'tcx, L>] where F: FnMut(&L2) -> L { let ltys = ltys.iter().cloned().map(|lty| self.relabel(lty, func)).collect::>(); self.mk_slice(<ys) @@ -206,7 +206,7 @@ impl<'tcx, L: Clone> LabeledTyCtxt<'tcx, L> { } -impl<'tcx, L: fmt::Debug> type_map::Type for LabeledTy<'tcx, L> { +impl<'lty, 'tcx, L: fmt::Debug> type_map::Type for LabeledTy<'lty, 'tcx, L> { fn sty(&self) -> &TyKind { &self.ty.sty } diff --git a/c2rust-refactor/src/analysis/mod.rs b/c2rust-refactor/src/analysis/mod.rs index ddfda8c74..95d80b7e5 100644 --- a/c2rust-refactor/src/analysis/mod.rs +++ b/c2rust-refactor/src/analysis/mod.rs @@ -2,6 +2,7 @@ use std::collections::HashSet; +use arena::SyncDroplessArena; use crate::command::{Registry, DriverCommand}; use crate::driver::Phase; use c2rust_ast_builder::IntoSymbol; @@ -22,7 +23,7 @@ pub mod type_eq; fn register_test_analysis_type_eq(reg: &mut Registry) { reg.register("test_analysis_type_eq", |_args| { Box::new(DriverCommand::new(Phase::Phase3, move |st, cx| { - let result = type_eq::analyze(cx.hir_map(), cx.ty_ctxt(), cx.ty_arena(), &st.krate()); + let result = type_eq::analyze(cx.ty_ctxt(), &st.krate()); info!("{:?}", result); })) }); @@ -38,7 +39,8 @@ fn register_test_analysis_type_eq(reg: &mut Registry) { fn register_test_analysis_ownership(reg: &mut Registry) { reg.register("test_analysis_ownership", |_args| { Box::new(DriverCommand::new(Phase::Phase3, move |st, cx| { - let results = ownership::analyze(&st, &cx); + let arena = SyncDroplessArena::default(); + let results = ownership::analyze(&st, &cx, &arena); ownership::dump_results(&cx, &results); })) }); @@ -68,12 +70,13 @@ fn register_mark_related_types(reg: &mut Registry) { reg.register("mark_related_types", |args| { let label = args.get(0).map_or("target", |x| x).into_symbol(); Box::new(DriverCommand::new(Phase::Phase3, move |st, cx| { - let ty_class = type_eq::analyze(cx.hir_map(), cx.ty_ctxt(), cx.ty_arena(), &st.krate()); + let ty_class = type_eq::analyze(cx.ty_ctxt(), &st.krate()); let mut related_classes = HashSet::new(); for &(id, l) in st.marks().iter() { if l == label { - if let Some(&cls) = ty_class.get(&id) { + let hir_id = cx.hir_map().node_to_hir_id(id); + if let Some(&cls) = ty_class.get(&hir_id) { related_classes.insert(cls); } } @@ -81,7 +84,7 @@ fn register_mark_related_types(reg: &mut Registry) { for (&id, &cls) in &ty_class { if related_classes.contains(&cls) { - st.add_mark(id, label); + st.add_mark(cx.hir_map().hir_to_node_id(id), label); } } })) diff --git a/c2rust-refactor/src/analysis/ownership/annot.rs b/c2rust-refactor/src/analysis/ownership/annot.rs index 605a3f615..cb0d0fd32 100644 --- a/c2rust-refactor/src/analysis/ownership/annot.rs +++ b/c2rust-refactor/src/analysis/ownership/annot.rs @@ -22,8 +22,8 @@ use super::constraint::{ConstraintSet, Perm}; use super::context::Ctxt; -struct LTySource<'c, 'a: 'c, 'tcx: 'a> { - cx: &'c mut Ctxt<'a, 'tcx>, +struct LTySource<'c, 'lty, 'a: 'lty, 'tcx: 'a> { + cx: &'c mut Ctxt<'lty, 'a, 'tcx>, // XXX - bit of a hack. We keep the def id of the last call to `fn_sig`, and refer to that // inside the map_types callback to figure out the right scope for any SigVars in the type. @@ -32,9 +32,9 @@ struct LTySource<'c, 'a: 'c, 'tcx: 'a> { last_sig_did: Option, } -impl<'c, 'a, 'tcx> TypeSource for LTySource<'c, 'a, 'tcx> { - type Type = LTy<'tcx>; - type Signature = LFnSig<'tcx>; +impl<'c, 'lty, 'a: 'lty, 'tcx: 'a> TypeSource for LTySource<'c, 'lty, 'a, 'tcx> { + type Type = LTy<'lty, 'tcx>; + type Signature = LFnSig<'lty, 'tcx>; fn expr_type(&mut self, _e: &ast::Expr) -> Option { self.last_sig_did = None; @@ -63,9 +63,9 @@ impl<'c, 'a, 'tcx> TypeSource for LTySource<'c, 'a, 'tcx> { } } -pub fn handle_marks<'a, 'tcx>(cx: &mut Ctxt<'a, 'tcx>, - st: &CommandState, - dcx: &RefactorCtxt<'a, 'tcx>) { +pub fn handle_marks<'a, 'tcx, 'lty>(cx: &mut Ctxt<'lty, 'a, 'tcx>, + st: &CommandState, + dcx: &RefactorCtxt<'a, 'tcx>) { let mut fixed_vars = Vec::new(); { let source = LTySource { @@ -159,9 +159,9 @@ impl<'ast> Visitor<'ast> for AttrVisitor<'ast> { } } -pub fn handle_attrs<'a, 'hir, 'tcx>(cx: &mut Ctxt<'a, 'tcx>, - st: &CommandState, - dcx: &RefactorCtxt<'a, 'tcx>) { +pub fn handle_attrs<'a, 'hir, 'tcx, 'lty>(cx: &mut Ctxt<'lty, 'a, 'tcx>, + st: &CommandState, + dcx: &RefactorCtxt<'a, 'tcx>) { let krate = st.krate(); let mut v = AttrVisitor { @@ -199,9 +199,9 @@ pub fn handle_attrs<'a, 'hir, 'tcx>(cx: &mut Ctxt<'a, 'tcx>, for attr in attrs { let meta = match_or!([attr.meta()] Some(x) => x; continue); - match &meta.name().as_str() as &str { + match &meta.path.to_string() as &str { "ownership_constraints" => { - let cset = parse_ownership_constraints(&meta, dcx.ty_arena()) + let cset = parse_ownership_constraints(&meta, cx.arena) .unwrap_or_else(|e| panic!("bad #[ownership_constraints] for {:?}: {}", def_id, e)); @@ -268,15 +268,15 @@ fn meta_item_word(meta: &ast::MetaItem) -> Result<(), &'static str> { } fn nested_meta_item(nmeta: &ast::NestedMetaItem) -> Result<&ast::MetaItem, &'static str> { - match nmeta.node { - ast::NestedMetaItemKind::MetaItem(ref m) => Ok(m), - _ => Err("expected NestedMetaItemKind::MetaItem"), + match nmeta { + ast::NestedMetaItem::MetaItem(ref m) => Ok(m), + _ => Err("expected NestedMetaItem::MetaItem"), } } fn nested_str(nmeta: &ast::NestedMetaItem) -> Result { - match nmeta.node { - ast::NestedMetaItemKind::Literal(ref lit) => { + match nmeta { + ast::NestedMetaItem::Literal(ref lit) => { match lit.node { ast::LitKind::Str(s, _) => Ok(s), _ => Err("expected str"), @@ -286,9 +286,9 @@ fn nested_str(nmeta: &ast::NestedMetaItem) -> Result { } } -fn parse_ownership_constraints<'tcx>(meta: &ast::MetaItem, - arena: &'tcx SyncDroplessArena) - -> Result, &'static str> { +fn parse_ownership_constraints<'lty, 'tcx>(meta: &ast::MetaItem, + arena: &'lty SyncDroplessArena) + -> Result, &'static str> { let args = meta_item_list(meta)?; let mut cset = ConstraintSet::new(); @@ -311,9 +311,9 @@ fn parse_ownership_constraints<'tcx>(meta: &ast::MetaItem, Ok(cset) } -fn parse_perm<'tcx>(meta: &ast::MetaItem, - arena: &'tcx SyncDroplessArena) - -> Result, &'static str> { +fn parse_perm<'lty, 'tcx>(meta: &ast::MetaItem, + arena: &'lty SyncDroplessArena) + -> Result, &'static str> { if meta.check_name("min") { let args = meta_item_list(meta)?; if args.len() == 0 { @@ -332,7 +332,7 @@ fn parse_perm<'tcx>(meta: &ast::MetaItem, } else { meta_item_word(meta)?; - let name = meta.name().as_str(); + let name = meta.path.to_string(); match &name as &str { "READ" => return Ok(Perm::read()), "WRITE" => return Ok(Perm::write()), @@ -351,7 +351,7 @@ fn parse_perm<'tcx>(meta: &ast::MetaItem, fn parse_concrete(meta: &ast::MetaItem) -> Result { meta_item_word(meta)?; - let name = meta.name().as_str(); + let name = meta.path.to_string(); match &name as &str { "READ" => Ok(ConcretePerm::Read), "WRITE" => Ok(ConcretePerm::Write), diff --git a/c2rust-refactor/src/analysis/ownership/constraint.rs b/c2rust-refactor/src/analysis/ownership/constraint.rs index 7eaad5845..7ffccf29a 100644 --- a/c2rust-refactor/src/analysis/ownership/constraint.rs +++ b/c2rust-refactor/src/analysis/ownership/constraint.rs @@ -12,7 +12,7 @@ use super::{ConcretePerm, PermVar, Var}; /// A permission expression. #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Debug, Hash)] -pub enum Perm<'tcx> { +pub enum Perm<'lty> { /// A concrete permission. Concrete(ConcretePerm), @@ -20,7 +20,7 @@ pub enum Perm<'tcx> { /// permissions, not `Min`s. // Weird ordering, but it's necessary for `perm_range` - we need a way to write down the // largest and smallest possible `Perm`s, and the largest/smallest `Min` is hard to get. - Min(&'tcx [Perm<'tcx>]), + Min(&'lty [Perm<'lty>]), // Wrappers around the various `PermVar`s. StaticVar(Var), @@ -29,20 +29,20 @@ pub enum Perm<'tcx> { LocalVar(Var), } -impl<'tcx> Perm<'tcx> { - pub fn read() -> Perm<'tcx> { +impl<'lty, 'tcx> Perm<'lty> { + pub fn read() -> Perm<'lty> { Perm::Concrete(ConcretePerm::Read) } - pub fn write() -> Perm<'tcx> { + pub fn write() -> Perm<'lty> { Perm::Concrete(ConcretePerm::Write) } - pub fn move_() -> Perm<'tcx> { + pub fn move_() -> Perm<'lty> { Perm::Concrete(ConcretePerm::Move) } - pub fn var(pv: PermVar) -> Perm<'tcx> { + pub fn var(pv: PermVar) -> Perm<'lty> { match pv { PermVar::Static(v) => Perm::StaticVar(v), PermVar::Sig(v) => Perm::SigVar(v), @@ -64,7 +64,7 @@ impl<'tcx> Perm<'tcx> { /// Construct the minimum of two permissions. This needs a reference to the arena, since it /// may need to allocate a new slice for `Min`. - pub fn min(a: Perm<'tcx>, b: Perm<'tcx>, arena: &'tcx SyncDroplessArena) -> Perm<'tcx> { + pub fn min(a: Perm<'lty>, b: Perm<'lty>, arena: &'lty SyncDroplessArena) -> Perm<'lty> { eprintln!("finding min of {:?} and {:?}", a, b); match (a, b) { // A few easy cases @@ -118,7 +118,7 @@ impl<'tcx> Perm<'tcx> { /// Check if `other` appears somewhere within `self`. Note this checks syntactic presence /// only, not any kind of subtyping relation. - pub fn contains(&self, other: Perm<'tcx>) -> bool { + pub fn contains(&self, other: Perm<'lty>) -> bool { if *self == other { return true; } @@ -131,11 +131,11 @@ impl<'tcx> Perm<'tcx> { /// Modify `self` by replacing `old` with each element of `news` in turn, yielding each result /// to `callback`. pub fn for_each_replacement(&self, - arena: &'tcx SyncDroplessArena, - old: Perm<'tcx>, - news: &[Perm<'tcx>], + arena: &'lty SyncDroplessArena, + old: Perm<'lty>, + news: &[Perm<'lty>], mut callback: F) - where F: FnMut(Perm<'tcx>) { + where F: FnMut(Perm<'lty>) { if *self == old { // Easy case for &new in news { @@ -185,7 +185,7 @@ impl<'tcx> Perm<'tcx> { } /// Iterater over each atomic (non-`Min`) permission that appears in `self`. - pub fn for_each_atom)>(&self, callback: &mut F) { + pub fn for_each_atom)>(&self, callback: &mut F) { match *self { Perm::Min(ps) => { for &p in ps { @@ -203,37 +203,37 @@ impl<'tcx> Perm<'tcx> { /// Note that most of the more complex operations are imprecise (unsound) in certain cases /// involving `Min`. Fortunately, these cases seem not to come up often in practice. #[derive(Clone, PartialEq, Eq, Debug)] -pub struct ConstraintSet<'tcx> { - less: BTreeSet<(Perm<'tcx>, Perm<'tcx>)>, - greater: BTreeSet<(Perm<'tcx>, Perm<'tcx>)>, +pub struct ConstraintSet<'lty> { + less: BTreeSet<(Perm<'lty>, Perm<'lty>)>, + greater: BTreeSet<(Perm<'lty>, Perm<'lty>)>, } /// Return a pair of bounds, suitable for use with `BTreeSet::range`, covering all pairs of /// permissions whose first element is `p`. -fn perm_range(p: Perm) -> (Bound<(Perm, Perm)>, Bound<(Perm, Perm)>) { +fn perm_range<'lty, 'tcx>(p: Perm<'lty>) -> (Bound<(Perm<'lty>, Perm<'lty>)>, Bound<(Perm<'lty>, Perm<'lty>)>) { (Bound::Included((p, Perm::read())), Bound::Included((p, Perm::LocalVar(Var(!0))))) } -impl<'tcx> ConstraintSet<'tcx> { - pub fn new() -> ConstraintSet<'tcx> { +impl<'lty, 'tcx> ConstraintSet<'lty> { + pub fn new() -> ConstraintSet<'lty> { ConstraintSet { less: BTreeSet::new(), greater: BTreeSet::new(), } } - pub fn iter(&self) -> btree_set::Iter<(Perm<'tcx>, Perm<'tcx>)> { + pub fn iter(&self) -> btree_set::Iter<(Perm<'lty>, Perm<'lty>)> { self.less.iter() } - pub fn add(&mut self, a: Perm<'tcx>, b: Perm<'tcx>) { + pub fn add(&mut self, a: Perm<'lty>, b: Perm<'lty>) { self.less.insert((a, b)); self.greater.insert((b, a)); } /// Add all constraints from `other` to `self`. - pub fn import(&mut self, other: &ConstraintSet<'tcx>) { + pub fn import(&mut self, other: &ConstraintSet<'lty>) { debug!("IMPORT {} constraints", other.less.len()); self.less.extend(other.less.iter().cloned().filter(|&(ref a, ref b)| { debug!("IMPORT CONSTRAINT: {:?} <= {:?}", a, b); @@ -245,10 +245,10 @@ impl<'tcx> ConstraintSet<'tcx> { /// For each constraint in `other`, substitute all atomic permissions using the callback `f`, /// then add the constraint to `self`. pub fn import_substituted(&mut self, - other: &ConstraintSet<'tcx>, - arena: &'tcx SyncDroplessArena, + other: &ConstraintSet<'lty>, + arena: &'lty SyncDroplessArena, f: F) - where F: Fn(Perm<'tcx>) -> Perm<'tcx> { + where F: Fn(Perm<'lty>) -> Perm<'lty> { debug!("IMPORT {} constraints (substituted)", other.less.len()); let subst_one = |p| { @@ -277,9 +277,9 @@ impl<'tcx> ConstraintSet<'tcx> { /// Clone `self`, substituting each atomic permission using the callback `f`. pub fn clone_substituted(&self, - arena: &'tcx SyncDroplessArena, - f: F) -> ConstraintSet<'tcx> - where F: Fn(Perm<'tcx>) -> Perm<'tcx> { + arena: &'lty SyncDroplessArena, + f: F) -> ConstraintSet<'lty> + where F: Fn(Perm<'lty>) -> Perm<'lty> { let mut new_cset = ConstraintSet::new(); new_cset.import_substituted(self, arena, f); new_cset @@ -287,10 +287,10 @@ impl<'tcx> ConstraintSet<'tcx> { /// Run a breadth-first search over `map`, starting from `p`. Runs callback `f` on each /// encountered permission. - fn traverse_constraints(map: &BTreeSet<(Perm<'tcx>, Perm<'tcx>)>, - p: Perm<'tcx>, + fn traverse_constraints(map: &BTreeSet<(Perm<'lty>, Perm<'lty>)>, + p: Perm<'lty>, mut f: F) - where F: FnMut(Perm<'tcx>) -> bool { + where F: FnMut(Perm<'lty>) -> bool { let mut seen = HashSet::new(); let mut queue = VecDeque::new(); @@ -315,8 +315,8 @@ impl<'tcx> ConstraintSet<'tcx> { /// /// This only traverses chains of `q <= p`, `r <= q`, etc. It doesn't do anything intelligent /// regarding `Min`. - pub fn for_each_less_than(&self, p: Perm<'tcx>, f: F) - where F: FnMut(Perm<'tcx>) -> bool { + pub fn for_each_less_than(&self, p: Perm<'lty>, f: F) + where F: FnMut(Perm<'lty>) -> bool { Self::traverse_constraints(&self.greater, p, f); } @@ -324,8 +324,8 @@ impl<'tcx> ConstraintSet<'tcx> { /// /// This only traverses chains of `p <= q`, `q <= r`, etc. It doesn't do anything intelligent /// regarding `Min`. - pub fn for_each_greater_than(&self, p: Perm<'tcx>, f: F) - where F: FnMut(Perm<'tcx>) -> bool { + pub fn for_each_greater_than(&self, p: Perm<'lty>, f: F) + where F: FnMut(Perm<'lty>) -> bool { Self::traverse_constraints(&self.less, p, f); } @@ -334,7 +334,7 @@ impl<'tcx> ConstraintSet<'tcx> { /// /// This function may return a lower result than necessary due to imprecise reasoning about /// `Min`. - pub fn lower_bound(&self, p: Perm<'tcx>) -> ConcretePerm { + pub fn lower_bound(&self, p: Perm<'lty>) -> ConcretePerm { match p { Perm::Concrete(p) => return p, _ => {}, @@ -359,7 +359,7 @@ impl<'tcx> ConstraintSet<'tcx> { /// /// This function may return a higher result than necessary due to imprecise reasoning about /// `Min`. - pub fn upper_bound(&self, p: Perm<'tcx>) -> ConcretePerm { + pub fn upper_bound(&self, p: Perm<'lty>) -> ConcretePerm { match p { Perm::Concrete(p) => return p, _ => {}, @@ -387,11 +387,11 @@ impl<'tcx> ConstraintSet<'tcx> { /// (strictly) partial assignments, it may report that a satisfying assignment is possible when /// it's not, but never the other way around. pub fn check_partial_assignment(&self, eval: F) -> bool - where F: Fn(Perm<'tcx>) -> Option { + where F: Fn(Perm<'lty>) -> Option { /// Evaluate a permission, recursing into `Perm::Min`s. Returns the computed permission /// value along with two boolean flags `any_missing` and `all_missing`. - fn eval_rec<'tcx, F>(p: Perm<'tcx>, eval: &F) -> (ConcretePerm, bool, bool) - where F: Fn(Perm<'tcx>) -> Option { + fn eval_rec<'lty, 'tcx, F>(p: Perm<'lty>, eval: &F) -> (ConcretePerm, bool, bool) + where F: Fn(Perm<'lty>) -> Option { match p { Perm::Concrete(c) => (c, false, false), Perm::Min(ps) => { @@ -445,7 +445,7 @@ impl<'tcx> ConstraintSet<'tcx> { } /// Obtain an editing cursor for this constraint set. - pub fn edit<'a>(&'a mut self) -> EditConstraintSet<'a, 'tcx> { + pub fn edit<'a>(&'a mut self) -> EditConstraintSet<'a, 'lty> { let to_visit = self.less.iter().cloned().collect(); EditConstraintSet { cset: self, @@ -454,7 +454,7 @@ impl<'tcx> ConstraintSet<'tcx> { } /// Iterate over each atomic permission in each constraint. - pub fn for_each_perm)>(&self, mut f: F) { + pub fn for_each_perm)>(&self, mut f: F) { for &(a, b) in &self.less { a.for_each_atom(&mut f); b.for_each_atom(&mut f); @@ -463,17 +463,17 @@ impl<'tcx> ConstraintSet<'tcx> { } /// Editing cursor, for visiting every constraint while adding/removing as you go. -pub struct EditConstraintSet<'a, 'tcx: 'a> { +pub struct EditConstraintSet<'a, 'lty> { /// The underlying constraint set. - cset: &'a mut ConstraintSet<'tcx>, + cset: &'a mut ConstraintSet<'lty>, /// Queue of constraints that have yet to be visited. - to_visit: VecDeque<(Perm<'tcx>, Perm<'tcx>)>, + to_visit: VecDeque<(Perm<'lty>, Perm<'lty>)>, } -impl<'a, 'tcx> EditConstraintSet<'a, 'tcx> { +impl<'a, 'lty> EditConstraintSet<'a, 'lty> { /// Obtain the next constraint if there are any left to be processed. - pub fn next(&mut self) -> Option<(Perm<'tcx>, Perm<'tcx>)> { + pub fn next(&mut self) -> Option<(Perm<'lty>, Perm<'lty>)> { while let Some((a, b)) = self.to_visit.pop_front() { if self.cset.less.contains(&(a, b)) { return Some((a, b)); @@ -484,7 +484,7 @@ impl<'a, 'tcx> EditConstraintSet<'a, 'tcx> { /// Add a new constraint. If the constraint didn't already exist, it will be queued up to be /// visited in the future. - pub fn add(&mut self, a: Perm<'tcx>, b: Perm<'tcx>) { + pub fn add(&mut self, a: Perm<'lty>, b: Perm<'lty>) { if self.cset.less.contains(&(a, b)) { return; } @@ -494,7 +494,7 @@ impl<'a, 'tcx> EditConstraintSet<'a, 'tcx> { } /// Add a constraint, but never queue it up for future visiting. - pub fn add_no_visit(&mut self, a: Perm<'tcx>, b: Perm<'tcx>) { + pub fn add_no_visit(&mut self, a: Perm<'lty>, b: Perm<'lty>) { if self.cset.less.contains(&(a, b)) { return; } @@ -503,7 +503,7 @@ impl<'a, 'tcx> EditConstraintSet<'a, 'tcx> { } /// Remove a constraint. - pub fn remove(&mut self, a: Perm<'tcx>, b: Perm<'tcx>) { + pub fn remove(&mut self, a: Perm<'lty>, b: Perm<'lty>) { self.cset.less.remove(&(a, b)); self.cset.greater.remove(&(b, a)); // If it remains in `to_visit`, it will be skipped by `next`. @@ -511,7 +511,7 @@ impl<'a, 'tcx> EditConstraintSet<'a, 'tcx> { } -impl<'tcx> ConstraintSet<'tcx> { +impl<'lty, 'tcx> ConstraintSet<'lty> { /// Remove constraints that are obviously useless, like `READ <= p`. pub fn remove_useless(&mut self) { let mut edit = self.edit(); @@ -550,7 +550,7 @@ impl<'tcx> ConstraintSet<'tcx> { /// Simplify `min(...) <= ...` constraints as much as possible. Unlike `... <= min(...)`, it /// may not always be possible to completely eliminate such constraints. - pub fn simplify_min_lhs(&mut self, arena: &'tcx SyncDroplessArena) { + pub fn simplify_min_lhs(&mut self, arena: &'lty SyncDroplessArena) { let mut edit = self.edit(); 'next: while let Some((a, b)) = edit.next() { @@ -639,7 +639,7 @@ impl<'tcx> ConstraintSet<'tcx> { } /// Simplify the constraint set as best we can. - pub fn simplify(&mut self, arena: &'tcx SyncDroplessArena) { + pub fn simplify(&mut self, arena: &'lty SyncDroplessArena) { self.remove_useless(); self.expand_min_rhs(); self.simplify_min_lhs(arena); @@ -651,11 +651,11 @@ impl<'tcx> ConstraintSet<'tcx> { /// /// This may be imprecise if a removed permission appears as an argument of a `Min`. Simplify /// the constraint set first to remove as many `Min`s as possible before using this function. - pub fn retain_perms(&mut self, arena: &'tcx SyncDroplessArena, filter: F) - where F: Fn(Perm<'tcx>) -> bool { + pub fn retain_perms(&mut self, arena: &'lty SyncDroplessArena, filter: F) + where F: Fn(Perm<'lty>) -> bool { // Collect all atomic permissions that appear in the constraint set. let mut atomic_perms = HashSet::new(); - fn collect_atomic<'tcx>(p: Perm<'tcx>, dest: &mut HashSet>) { + fn collect_atomic<'lty, 'tcx>(p: Perm<'lty>, dest: &mut HashSet>) { match p { Perm::Min(ps) => { for &p in ps { diff --git a/c2rust-refactor/src/analysis/ownership/context.rs b/c2rust-refactor/src/analysis/ownership/context.rs index 91cc925f6..aaf61c7e7 100644 --- a/c2rust-refactor/src/analysis/ownership/context.rs +++ b/c2rust-refactor/src/analysis/ownership/context.rs @@ -39,15 +39,15 @@ use super::constraint::Perm; // analysis. These structures generally start out minimally initialized, and are populated as // parts of the analysis runs. -pub struct FuncSumm<'tcx> { - pub sig: LFnSig<'tcx>, +pub struct FuncSumm<'lty, 'tcx> { + pub sig: LFnSig<'lty, 'tcx>, pub num_sig_vars: u32, /// Constraints over signature variables only. /// /// Populated by `inter`. May be initialized early by `annot` if an `#[ownership_constraints]` /// attr is present on one of the variants - in this case, `cset_provided` will be set. - pub sig_cset: ConstraintSet<'tcx>, + pub sig_cset: ConstraintSet<'lty>, /// Was the constraint set provided externally? (If so, we don't process this function during /// `inter`.) @@ -59,14 +59,14 @@ pub struct FuncSumm<'tcx> { pub num_monos: usize, } -pub struct VariantSumm<'tcx> { +pub struct VariantSumm<'lty> { pub func_id: DefId, pub variant_idx: usize, /// Constraints over static, instantiation (callee), and signature variables. /// /// Populated by `intra`. - pub inst_cset: ConstraintSet<'tcx>, + pub inst_cset: ConstraintSet<'lty>, /// List of instantiations, or references to functions. /// @@ -97,29 +97,28 @@ pub struct Instantiation { } -pub struct Ctxt<'a, 'tcx: 'a> { +pub struct Ctxt<'lty, 'a: 'lty, 'tcx: 'a> { pub tcx: TyCtxt<'a, 'tcx, 'tcx>, - pub lcx: LabeledTyCtxt<'tcx, Option>, - pub arena: &'tcx SyncDroplessArena, + pub lcx: LabeledTyCtxt<'lty, Option>, + pub arena: &'lty SyncDroplessArena, /// Types of non-`fn` definitions. This includes `static`s and also `struct` fields. - pub static_summ: HashMap>, + pub static_summ: HashMap>, /// Assignment of permission values to static vars. This is only here because this is a /// convenient way to communicate it from `annot` to `inter`. pub static_assign: IndexVec, - funcs: HashMap>, - variants: HashMap>, + funcs: HashMap>, + variants: HashMap>, monos: HashMap<(DefId, usize), MonoSumm>, } -impl<'a, 'tcx> Ctxt<'a, 'tcx> { - pub fn new(tcx: TyCtxt<'a, 'tcx, 'tcx>, - arena: &'tcx SyncDroplessArena) -> Ctxt<'a, 'tcx> { +impl<'lty, 'a: 'lty, 'tcx: 'a> Ctxt<'lty, 'a, 'tcx> { + pub fn new(tcx: TyCtxt<'a, 'tcx, 'tcx>, arena: &'lty SyncDroplessArena) -> Ctxt<'lty, 'a, 'tcx> { Ctxt { tcx: tcx, - lcx: LabeledTyCtxt::new(arena), + lcx: LabeledTyCtxt::new(&arena), arena: arena, static_summ: HashMap::new(), @@ -131,7 +130,7 @@ impl<'a, 'tcx> Ctxt<'a, 'tcx> { } } - pub fn static_ty(&mut self, did: DefId) -> LTy<'tcx> { + pub fn static_ty(&mut self, did: DefId) -> LTy<'lty, 'tcx> { let assign = &mut self.static_assign; match self.static_summ.entry(did) { Entry::Vacant(e) => { @@ -151,11 +150,11 @@ impl<'a, 'tcx> Ctxt<'a, 'tcx> { } } - fn func_summ_impl<'b>(funcs: &'b mut HashMap>, - variants: &mut HashMap>, + fn func_summ_impl<'b>(funcs: &'b mut HashMap>, + variants: &mut HashMap>, tcx: TyCtxt<'a, 'tcx, 'tcx>, - lcx: &mut LabeledTyCtxt<'tcx, Option>, - did: DefId) -> &'b mut FuncSumm<'tcx> { + lcx: &mut LabeledTyCtxt<'lty, Option>, + did: DefId) -> &'b mut FuncSumm<'lty, 'tcx> { match funcs.entry(did) { Entry::Vacant(e) => { assert!(!variants.contains_key(&did), @@ -212,7 +211,7 @@ impl<'a, 'tcx> Ctxt<'a, 'tcx> { } } - pub fn func_summ(&mut self, did: DefId) -> &mut FuncSumm<'tcx> { + pub fn func_summ(&mut self, did: DefId) -> &mut FuncSumm<'lty, 'tcx> { Self::func_summ_impl(&mut self.funcs, &mut self.variants, self.tcx, @@ -220,17 +219,17 @@ impl<'a, 'tcx> Ctxt<'a, 'tcx> { did) } - pub fn get_func_summ(&self, did: DefId) -> &FuncSumm<'tcx> { + pub fn get_func_summ(&self, did: DefId) -> &FuncSumm<'lty, 'tcx> { self.funcs.get(&did).unwrap() } - pub fn func_ids<'b>(&'b self) -> FuncIds<'b, 'tcx> { + pub fn func_ids<'b>(&'b self) -> FuncIds<'b, 'lty, 'tcx> { FuncIds { inner: self.funcs.keys(), } } - pub fn variant_ids<'b>(&'b self) -> VariantIds<'b, 'tcx> { + pub fn variant_ids<'b>(&'b self) -> VariantIds<'b, 'lty> { VariantIds { inner: self.variants.keys(), } @@ -242,14 +241,14 @@ impl<'a, 'tcx> Ctxt<'a, 'tcx> { } } - fn add_variant_impl<'b>(funcs: &'b mut HashMap>, - variants: &'b mut HashMap>, + fn add_variant_impl<'b>(funcs: &'b mut HashMap>, + variants: &'b mut HashMap>, tcx: TyCtxt<'a, 'tcx, 'tcx>, - lcx: &mut LabeledTyCtxt<'tcx, Option>, + lcx: &'b mut LabeledTyCtxt<'lty, Option>, func_did: DefId, variant_did: DefId) - -> (&'b mut FuncSumm<'tcx>, - &'b mut VariantSumm<'tcx>) { + -> (&'b mut FuncSumm<'lty, 'tcx>, + &'b mut VariantSumm<'lty>) { let func = Self::func_summ_impl(funcs, variants, tcx, @@ -275,8 +274,8 @@ impl<'a, 'tcx> Ctxt<'a, 'tcx> { } pub fn add_variant(&mut self, func_did: DefId, variant_id: DefId) - -> (&mut FuncSumm<'tcx>, - &mut VariantSumm<'tcx>) { + -> (&mut FuncSumm<'lty, 'tcx>, + &mut VariantSumm<'lty>) { Self::add_variant_impl(&mut self.funcs, &mut self.variants, self.tcx, @@ -286,8 +285,8 @@ impl<'a, 'tcx> Ctxt<'a, 'tcx> { } pub fn add_mono(&mut self, variant_did: DefId) - -> (&mut FuncSumm<'tcx>, - &mut VariantSumm<'tcx>, + -> (&mut FuncSumm<'lty, 'tcx>, + &mut VariantSumm<'lty>, &mut MonoSumm) { let (func, variant) = Self::variant_summ_impl(&mut self.funcs, &mut self.variants, @@ -307,13 +306,13 @@ impl<'a, 'tcx> Ctxt<'a, 'tcx> { (func, variant, mono) } - fn variant_summ_impl<'b>(funcs: &'b mut HashMap>, - variants: &'b mut HashMap>, + fn variant_summ_impl<'b>(funcs: &'b mut HashMap>, + variants: &'b mut HashMap>, tcx: TyCtxt<'a, 'tcx, 'tcx>, - lcx: &mut LabeledTyCtxt<'tcx, Option>, + lcx: &'b mut LabeledTyCtxt<'lty, Option>, variant_did: DefId) - -> (&'b mut FuncSumm<'tcx>, - &'b mut VariantSumm<'tcx>) { + -> (&'b mut FuncSumm<'lty, 'tcx>, + &'b mut VariantSumm<'lty>) { if variants.contains_key(&variant_did) { let variant = variants.get_mut(&variant_did).unwrap(); let func = funcs.get_mut(&variant.func_id).unwrap(); @@ -326,7 +325,7 @@ impl<'a, 'tcx> Ctxt<'a, 'tcx> { /// Get the variant and function summaries for a `fn`. The summaries will be created if they /// don't already exist. pub fn variant_summ(&mut self, variant_did: DefId) - -> (&mut FuncSumm<'tcx>, &mut VariantSumm<'tcx>) { + -> (&mut FuncSumm<'lty, 'tcx>, &mut VariantSumm<'lty>) { Self::variant_summ_impl(&mut self.funcs, &mut self.variants, self.tcx, @@ -334,25 +333,25 @@ impl<'a, 'tcx> Ctxt<'a, 'tcx> { variant_did) } - pub fn get_variant_summ(&self, did: DefId) -> &VariantSumm<'tcx> { + pub fn get_variant_summ(&self, did: DefId) -> &VariantSumm<'lty> { &self.variants[&did] } - pub fn variant_func_sig(&mut self, variant_did: DefId) -> LFnSig<'tcx> { + pub fn variant_func_sig(&mut self, variant_did: DefId) -> LFnSig<'lty, 'tcx> { self.variant_summ(variant_did).0.sig } pub fn first_variant_summ(&mut self, func_did: DefId) - -> (&mut FuncSumm<'tcx>, - &mut VariantSumm<'tcx>) { + -> (&mut FuncSumm<'lty, 'tcx>, + &mut VariantSumm<'lty>) { let func = self.funcs.get_mut(&func_did).unwrap(); let variant = self.variants.get_mut(&func.variant_ids[0]).unwrap(); (func, variant) } pub fn mono_summ(&mut self, func_did: DefId, mono_idx: usize) - -> (&mut FuncSumm<'tcx>, - &mut VariantSumm<'tcx>, + -> (&mut FuncSumm<'lty, 'tcx>, + &mut VariantSumm<'lty>, &mut MonoSumm) { let func = self.funcs.get_mut(&func_did).unwrap(); @@ -372,7 +371,7 @@ impl<'a, 'tcx> Ctxt<'a, 'tcx> { } /// Get the variant summary for the variant that owns the given mono. - pub fn get_mono_variant_summ(&self, func_did: DefId, mono_idx: usize) -> &VariantSumm<'tcx> { + pub fn get_mono_variant_summ(&self, func_did: DefId, mono_idx: usize) -> &VariantSumm<'lty> { let func = self.get_func_summ(func_did); if func.variant_ids.len() == 1 { @@ -383,17 +382,17 @@ impl<'a, 'tcx> Ctxt<'a, 'tcx> { } - pub fn min_perm(&mut self, a: Perm<'tcx>, b: Perm<'tcx>) -> Perm<'tcx> { + pub fn min_perm(&mut self, a: Perm<'lty>, b: Perm<'lty>) -> Perm<'lty> { Perm::min(a, b, self.arena) } } -fn preload_constraints<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, +fn preload_constraints<'lty, 'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId, - sig: LFnSig<'tcx>) -> Option> { + sig: LFnSig<'lty, 'tcx>) -> Option> { let mut cset = ConstraintSet::new(); - let path = tcx.absolute_item_path_str(def_id); + let path = tcx.def_path_str(def_id); match &path as &str { "core::ptr::::offset" | "core::ptr::::offset" => { @@ -413,11 +412,11 @@ fn preload_constraints<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, Some(cset) } -pub struct FuncIds<'a, 'tcx: 'a> { - inner: hash_map::Keys<'a, DefId, FuncSumm<'tcx>>, +pub struct FuncIds<'a, 'lty, 'tcx> { + inner: hash_map::Keys<'a, DefId, FuncSumm<'lty, 'tcx>>, } -impl<'a, 'tcx> Iterator for FuncIds<'a, 'tcx> { +impl<'a, 'lty, 'tcx> Iterator for FuncIds<'a, 'lty, 'tcx> { type Item = DefId; fn next(&mut self) -> Option { @@ -426,11 +425,11 @@ impl<'a, 'tcx> Iterator for FuncIds<'a, 'tcx> { } -pub struct VariantIds<'a, 'tcx: 'a> { - inner: hash_map::Keys<'a, DefId, VariantSumm<'tcx>>, +pub struct VariantIds<'a, 'lty> { + inner: hash_map::Keys<'a, DefId, VariantSumm<'lty>>, } -impl<'a, 'tcx> Iterator for VariantIds<'a, 'tcx> { +impl<'a, 'lty> Iterator for VariantIds<'a, 'lty> { type Item = DefId; fn next(&mut self) -> Option { diff --git a/c2rust-refactor/src/analysis/ownership/debug.rs b/c2rust-refactor/src/analysis/ownership/debug.rs index c7fd0a09d..0b8708e45 100644 --- a/c2rust-refactor/src/analysis/ownership/debug.rs +++ b/c2rust-refactor/src/analysis/ownership/debug.rs @@ -11,9 +11,9 @@ use crate::analysis::labeled_ty::LabeledTy; use super::{ConcretePerm, Perm}; -pub struct Pretty<'tcx, L: 'tcx>(pub LabeledTy<'tcx, L>); +pub struct Pretty<'lty, 'tcx, L: 'lty>(pub LabeledTy<'lty, 'tcx, L>); -pub fn pretty_slice<'a, 'tcx, L>(tys: &'a [LabeledTy<'tcx, L>]) -> &'a [Pretty<'tcx, L>] { +pub fn pretty_slice<'lty, 'tcx, L>(tys: &'lty [LabeledTy<'lty, 'tcx, L>]) -> &'lty [Pretty<'lty, 'tcx, L>] { unsafe { ::std::mem::transmute(tys) } } @@ -89,7 +89,7 @@ impl<'tcx> fmt::Debug for PrettyLabel> { -impl<'tcx, L> fmt::Debug for Pretty<'tcx, L> where L: Copy + fmt::Debug, PrettyLabel: fmt::Debug { +impl<'lty, 'tcx, L> fmt::Debug for Pretty<'lty, 'tcx, L> where L: Copy + fmt::Debug, PrettyLabel: fmt::Debug { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { match self.0.ty.sty { TyKind::Ref(_, _, m) => diff --git a/c2rust-refactor/src/analysis/ownership/inst.rs b/c2rust-refactor/src/analysis/ownership/inst.rs index 29812027d..c6f3b99c5 100644 --- a/c2rust-refactor/src/analysis/ownership/inst.rs +++ b/c2rust-refactor/src/analysis/ownership/inst.rs @@ -11,11 +11,11 @@ use super::constraint::ConstraintSet; use super::context::{Ctxt, VariantSumm, Instantiation}; -pub struct InstCtxt<'a, 'tcx: 'a> { - cx: &'a Ctxt<'a, 'tcx>, +pub struct InstCtxt<'lty, 'a: 'lty, 'tcx: 'a> { + cx: &'lty Ctxt<'lty, 'a, 'tcx>, - insts: &'a [Instantiation], - cset: ConstraintSet<'tcx>, + insts: &'lty [Instantiation], + cset: ConstraintSet<'lty>, /// Selected mono idx for each instantiation. inst_sel: Vec>, @@ -24,11 +24,11 @@ pub struct InstCtxt<'a, 'tcx: 'a> { inst_assign: IndexVec>, } -impl<'a, 'tcx> InstCtxt<'a, 'tcx> { - pub fn new(cx: &'a Ctxt<'a, 'tcx>, +impl<'lty, 'a, 'tcx> InstCtxt<'lty, 'a, 'tcx> { + pub fn new(cx: &'lty Ctxt<'lty, 'a, 'tcx>, func_did: DefId, mono_idx: usize) - -> InstCtxt<'a, 'tcx> { + -> InstCtxt<'lty, 'a, 'tcx> { let variant = cx.get_mono_variant_summ(func_did, mono_idx); let mono = cx.get_mono_summ(func_did, mono_idx); let cset = build_inst_cset(cx, variant, &mono.assign); @@ -154,10 +154,10 @@ pub fn find_instantiations(cx: &mut Ctxt) { } -pub fn build_inst_cset<'a, 'tcx>(cx: &Ctxt<'a, 'tcx>, - variant: &VariantSumm<'tcx>, +pub fn build_inst_cset<'lty, 'a, 'tcx>(cx: &'lty Ctxt<'lty, 'a, 'tcx>, + variant: &VariantSumm<'lty>, assign: &IndexVec) - -> ConstraintSet<'tcx> { + -> ConstraintSet<'lty> { let mut cset = variant.inst_cset.clone_substituted(cx.arena, |p| { match p { Perm::SigVar(v) => Perm::Concrete(assign[v]), diff --git a/c2rust-refactor/src/analysis/ownership/inter.rs b/c2rust-refactor/src/analysis/ownership/inter.rs index f701efb3e..a8c7c55d1 100644 --- a/c2rust-refactor/src/analysis/ownership/inter.rs +++ b/c2rust-refactor/src/analysis/ownership/inter.rs @@ -40,12 +40,12 @@ impl WorkList { } } -pub struct InterCtxt<'c, 'a: 'c, 'tcx: 'a> { - cx: &'c mut Ctxt<'a, 'tcx>, +pub struct InterCtxt<'c, 'lty, 'a: 'lty, 'tcx: 'a> { + cx: &'c mut Ctxt<'lty, 'a, 'tcx>, // Note: all IDs here are function IDs. Variants are ignored. - complete_cset: HashMap>, + complete_cset: HashMap>, work_list: WorkList, rev_deps: HashMap>, @@ -53,8 +53,8 @@ pub struct InterCtxt<'c, 'a: 'c, 'tcx: 'a> { static_rev_deps: HashMap>, } -impl<'c, 'a, 'tcx> InterCtxt<'c, 'a, 'tcx> { - pub fn new(cx: &'c mut Ctxt<'a, 'tcx>) -> InterCtxt<'c, 'a, 'tcx> { +impl<'c, 'lty, 'a, 'tcx> InterCtxt<'c, 'lty, 'a, 'tcx> { + pub fn new(cx: &'c mut Ctxt<'lty, 'a, 'tcx>) -> InterCtxt<'c, 'lty, 'a, 'tcx> { InterCtxt { cx: cx, complete_cset: HashMap::new(), @@ -65,9 +65,8 @@ impl<'c, 'a, 'tcx> InterCtxt<'c, 'a, 'tcx> { } /// Recompute the `complete_cset` of one function. Returns the new cset. - fn compute_one_cset(&mut self, def_id: DefId) -> ConstraintSet<'tcx> { + fn compute_one_cset(&mut self, def_id: DefId) -> ConstraintSet<'lty> { let dummy_cset = ConstraintSet::new(); - let arena = self.cx.arena; let mut cset = { let (func, var) = self.cx.first_variant_summ(def_id); @@ -94,6 +93,7 @@ impl<'c, 'a, 'tcx> InterCtxt<'c, 'a, 'tcx> { } // Copy in complete csets for all instantiations. + let arena = self.cx.arena; for inst in &self.cx.first_variant_summ(def_id).1.insts { let complete = self.complete_cset.get(&inst.callee).unwrap_or(&dummy_cset); eprintln!(" instantiate {:?} for vars {}..", inst.callee, inst.first_inst_var); @@ -114,9 +114,9 @@ impl<'c, 'a, 'tcx> InterCtxt<'c, 'a, 'tcx> { } cset.remove_useless(); - cset.simplify_min_lhs(arena); + cset.simplify_min_lhs(self.cx.arena); - cset.retain_perms(arena, |p| { + cset.retain_perms(self.cx.arena, |p| { match p { Perm::LocalVar(_) | Perm::InstVar(_) => false, _ => true, @@ -144,14 +144,14 @@ impl<'c, 'a, 'tcx> InterCtxt<'c, 'a, 'tcx> { } // Simplify away static vars too. - cset.retain_perms(arena, |p| { + cset.retain_perms(self.cx.arena, |p| { match p { Perm::LocalVar(_) | Perm::InstVar(_) | Perm::StaticVar(_) => false, _ => true, } }); - cset.simplify(arena); + cset.simplify(self.cx.arena); cset } diff --git a/c2rust-refactor/src/analysis/ownership/intra.rs b/c2rust-refactor/src/analysis/ownership/intra.rs index 0f92b6fc4..99c2855eb 100644 --- a/c2rust-refactor/src/analysis/ownership/intra.rs +++ b/c2rust-refactor/src/analysis/ownership/intra.rs @@ -2,7 +2,6 @@ use rustc::hir::def_id::DefId; use rustc::mir::*; -use rustc::mir::tcx::PlaceTy; use rustc::ty::{Ty, TyKind}; use rustc_data_structures::indexed_vec::IndexVec; use rustc_target::abi::VariantIdx; @@ -15,7 +14,7 @@ use super::context::{Ctxt, Instantiation}; #[derive(Clone, Copy, PartialEq, Eq, Debug)] -enum Label<'tcx> { +enum Label<'lty> { /// Most `TyKind` get no constructor. None, @@ -24,7 +23,7 @@ enum Label<'tcx> { /// Note this can be an arbitrary permission expression, not just a `PermVar`. Taking the /// address of an lvalue gives a pointer whose permission is the lvalue's path permission, /// which can be arbitrary. - Ptr(Perm<'tcx>), + Ptr(Perm<'lty>), /// `FnDef` ought to be labeled with something like an extra set of `Substs`, but for /// permissions instead of type/lifetimes. However, every one of those `Substs` would simply @@ -34,8 +33,8 @@ enum Label<'tcx> { FnDef(usize), } -impl<'tcx> Label<'tcx> { - fn perm(&self) -> Perm<'tcx> { +impl<'lty> Label<'lty> { + fn perm(&self) -> Perm<'lty> { match *self { Label::Ptr(p) => p, _ => panic!("expected Label::Ptr"), @@ -44,15 +43,15 @@ impl<'tcx> Label<'tcx> { } /// Type aliases for `intra`-specific labeled types. -type ITy<'tcx> = LabeledTy<'tcx, Label<'tcx>>; -type IFnSig<'tcx> = FnSig<'tcx, Label<'tcx>>; +type ITy<'lty, 'tcx> = LabeledTy<'lty, 'tcx, Label<'lty>>; +type IFnSig<'lty, 'tcx> = FnSig<'lty, 'tcx, Label<'lty>>; /// Variant-local analysis context. We run one of these for each function variant to produce the /// initial (incomplete) summary. -pub struct IntraCtxt<'c, 'a: 'c, 'tcx: 'a> { - cx: &'c mut Ctxt<'a, 'tcx>, - ilcx: LabeledTyCtxt<'tcx, Label<'tcx>>, +pub struct IntraCtxt<'c, 'lty, 'a: 'lty, 'tcx: 'a> { + cx: &'c mut Ctxt<'lty, 'a, 'tcx>, + ilcx: LabeledTyCtxt<'lty, Label<'tcx>>, /// ID of the variant being processed. def_id: DefId, @@ -60,8 +59,8 @@ pub struct IntraCtxt<'c, 'a: 'c, 'tcx: 'a> { bbid: BasicBlock, stmt_idx: usize, - cset: ConstraintSet<'tcx>, - local_tys: IndexVec>, + cset: ConstraintSet<'lty>, + local_tys: IndexVec>, next_local_var: u32, /// List of function instantiation sites. @@ -81,10 +80,10 @@ pub struct IntraCtxt<'c, 'a: 'c, 'tcx: 'a> { next_inst_var: u32, } -impl<'c, 'a, 'tcx> IntraCtxt<'c, 'a, 'tcx> { - pub fn new(cx: &'c mut Ctxt<'a, 'tcx>, +impl<'c, 'lty, 'a: 'lty, 'tcx: 'a> IntraCtxt<'c, 'lty, 'a, 'tcx> { + pub fn new(cx: &'c mut Ctxt<'lty, 'a, 'tcx>, def_id: DefId, - mir: &'a Mir<'tcx>) -> IntraCtxt<'c, 'a, 'tcx> { + mir: &'a Mir<'tcx>) -> IntraCtxt<'c, 'lty, 'a, 'tcx> { let ilcx = LabeledTyCtxt::new(cx.arena); IntraCtxt { cx: cx, @@ -130,7 +129,7 @@ impl<'c, 'a, 'tcx> IntraCtxt<'c, 'a, 'tcx> { self.cset = self.cx.variant_summ(self.def_id).1.inst_cset.clone(); } - fn relabel_ty(&mut self, lty: LTy<'tcx>) -> ITy<'tcx> { + fn relabel_ty(&mut self, lty: LTy<'lty, 'tcx>) -> ITy<'lty, 'tcx> { self.ilcx.relabel(lty, &mut |&l| { match l { Some(pv) => Label::Ptr(Perm::var(pv)), @@ -139,7 +138,7 @@ impl<'c, 'a, 'tcx> IntraCtxt<'c, 'a, 'tcx> { }) } - fn relabel_sig(&mut self, sig: LFnSig<'tcx>) -> IFnSig<'tcx> { + fn relabel_sig(&mut self, sig: LFnSig<'lty, 'tcx>) -> IFnSig<'lty, 'tcx> { let mut f = |&l: &Option<_>| { match l { Some(pv) => Label::Ptr(Perm::var(pv)), @@ -180,7 +179,7 @@ impl<'c, 'a, 'tcx> IntraCtxt<'c, 'a, 'tcx> { var.insts = self.insts; } - fn local_ty(&mut self, ty: Ty<'tcx>) -> ITy<'tcx> { + fn local_ty(&mut self, ty: Ty<'tcx>) -> ITy<'lty, 'tcx> { let Self { ref mut cx, ref mut ilcx, ref mut next_local_var, ref mut next_inst_var, ref mut insts, .. } = *self; ilcx.label(ty, &mut |ty| { @@ -212,38 +211,41 @@ impl<'c, 'a, 'tcx> IntraCtxt<'c, 'a, 'tcx> { }) } - fn local_var_ty(&mut self, l: Local) -> ITy<'tcx> { + fn local_var_ty(&mut self, l: Local) -> ITy<'lty, 'tcx> { self.local_tys[l] } - fn static_ty(&mut self, def_id: DefId) -> ITy<'tcx> { + fn static_ty(&mut self, def_id: DefId) -> ITy<'lty, 'tcx> { let lty = self.cx.static_ty(def_id); self.relabel_ty(lty) } /// Compute the type of an `Lvalue` and the maximum permissions for accessing it. - fn place_lty(&mut self, lv: &Place<'tcx>) -> (ITy<'tcx>, Perm<'tcx>) { + fn place_lty(&mut self, lv: &Place<'tcx>) -> (ITy<'lty, 'tcx>, Perm<'lty>) { let (ty, perm, variant) = self.place_lty_downcast(lv); assert!(variant.is_none(), "expected non-Downcast result"); (ty, perm) } - fn place_lty_downcast(&mut self, - lv: &Place<'tcx>) -> (ITy<'tcx>, Perm<'tcx>, Option) { - match *lv { - Place::Local(l) => (self.local_var_ty(l), Perm::move_(), None), - - Place::Static(ref s) => (self.static_ty(s.def_id), Perm::move_(), None), - - Place::Promoted(ref _p) => { - // TODO: test this - let pty = lv.ty(self.mir, self.cx.tcx); - let ty = expect!([pty] PlaceTy::Ty { ty } => ty); - (self.local_ty(ty), Perm::read(), None) - }, + fn place_lty_downcast( + &mut self, + lv: &Place<'tcx>, + ) -> (ITy<'lty, 'tcx>, Perm<'lty>, Option) { + match lv { + Place::Base(PlaceBase::Local(l)) => (self.local_var_ty(*l), Perm::move_(), None), + + Place::Base(PlaceBase::Static(ref s)) => match s.kind { + StaticKind::Static(def_id) => (self.static_ty(def_id), Perm::move_(), None), + StaticKind::Promoted(ref _p) => { + // TODO: test this + let pty = lv.ty(self.mir, self.cx.tcx); + let ty = pty.ty; + (self.local_ty(ty), Perm::read(), None) + } + } - Place::Projection(ref p) => { + Place::Projection(box p) => { let (base_ty, base_perm, base_variant) = self.place_lty_downcast(&p.base); // Sanity check @@ -275,7 +277,7 @@ impl<'c, 'a, 'tcx> IntraCtxt<'c, 'a, 'tcx> { } } - fn field_lty(&mut self, base_ty: ITy<'tcx>, v: VariantIdx, f: Field) -> ITy<'tcx> { + fn field_lty(&mut self, base_ty: ITy<'lty, 'tcx>, v: VariantIdx, f: Field) -> ITy<'lty, 'tcx> { match base_ty.ty.sty { TyKind::Adt(adt, _substs) => { let field_def = &adt.variants[v].fields[f.index()]; @@ -287,7 +289,7 @@ impl<'c, 'a, 'tcx> IntraCtxt<'c, 'a, 'tcx> { } } - fn rvalue_lty(&mut self, rv: &Rvalue<'tcx>) -> (ITy<'tcx>, Perm<'tcx>) { + fn rvalue_lty(&mut self, rv: &Rvalue<'tcx>) -> (ITy<'lty, 'tcx>, Perm<'lty>) { let ty = rv.ty(self.mir, self.cx.tcx); match *rv { @@ -375,7 +377,7 @@ impl<'c, 'a, 'tcx> IntraCtxt<'c, 'a, 'tcx> { } } - fn operand_lty(&mut self, op: &Operand<'tcx>) -> (ITy<'tcx>, Perm<'tcx>) { + fn operand_lty(&mut self, op: &Operand<'tcx>) -> (ITy<'lty, 'tcx>, Perm<'lty>) { match *op { Operand::Copy(ref lv) => self.place_lty(lv), Operand::Move(ref lv) => self.place_lty(lv), @@ -396,7 +398,7 @@ impl<'c, 'a, 'tcx> IntraCtxt<'c, 'a, 'tcx> { /// topmost pointer type. The resulting permission must be no higher than the permission of /// the RHS pointer, and also must be no higher than the permission of any pointer dereferenced /// on the path to the RHS. - fn propagate(&mut self, lhs: ITy<'tcx>, rhs: ITy<'tcx>, path_perm: Perm<'tcx>) { + fn propagate(&mut self, lhs: ITy<'lty, 'tcx>, rhs: ITy<'lty, 'tcx>, path_perm: Perm<'lty>) { if let (Label::Ptr(l_perm), Label::Ptr(r_perm)) = (lhs.label, rhs.label) { self.propagate_perm(l_perm, r_perm); @@ -418,7 +420,7 @@ impl<'c, 'a, 'tcx> IntraCtxt<'c, 'a, 'tcx> { } } - fn propagate_eq(&mut self, lhs: ITy<'tcx>, rhs: ITy<'tcx>) { + fn propagate_eq(&mut self, lhs: ITy<'lty, 'tcx>, rhs: ITy<'lty, 'tcx>) { if let (Label::Ptr(l_perm), Label::Ptr(r_perm)) = (lhs.label, rhs.label) { self.propagate_perm(l_perm, r_perm); self.propagate_perm(r_perm, l_perm); @@ -433,7 +435,7 @@ impl<'c, 'a, 'tcx> IntraCtxt<'c, 'a, 'tcx> { } } - fn propagate_perm(&mut self, p1: Perm<'tcx>, p2: Perm<'tcx>) { + fn propagate_perm(&mut self, p1: Perm<'lty>, p2: Perm<'lty>) { eprintln!("ADD: {:?} <= {:?}", p1, p2); self.cset.add(p1, p2); } @@ -464,7 +466,7 @@ impl<'c, 'a, 'tcx> IntraCtxt<'c, 'a, 'tcx> { } - fn ty_fn_sig(&mut self, ty: ITy<'tcx>) -> IFnSig<'tcx> { + fn ty_fn_sig(&mut self, ty: ITy<'lty, 'tcx>) -> IFnSig<'lty, 'tcx> { match ty.ty.sty { TyKind::FnDef(did, _substs) => { let idx = expect!([ty.label] Label::FnDef(idx) => idx); @@ -528,7 +530,6 @@ impl<'c, 'a, 'tcx> IntraCtxt<'c, 'a, 'tcx> { // with them without analysing the actual asm code. StatementKind::InlineAsm { .. } | StatementKind::Retag { .. } | - StatementKind::EscapeToRaw(_) | StatementKind::AscribeUserType(..) | StatementKind::Nop => {}, } diff --git a/c2rust-refactor/src/analysis/ownership/mod.rs b/c2rust-refactor/src/analysis/ownership/mod.rs index d81407910..2b83dbf2b 100644 --- a/c2rust-refactor/src/analysis/ownership/mod.rs +++ b/c2rust-refactor/src/analysis/ownership/mod.rs @@ -99,14 +99,14 @@ pub enum PermVar { } /// A type where pointer type constructors are labeled with permission variables. -pub type LTy<'tcx> = LabeledTy<'tcx, Option>; -type LFnSig<'tcx> = FnSig<'tcx, Option>; +pub type LTy<'lty, 'tcx> = LabeledTy<'lty, 'tcx, Option>; +type LFnSig<'lty, 'tcx> = FnSig<'lty, 'tcx, Option>; /// A generic labeled function signature. #[derive(Clone, Copy, PartialEq, Eq, Debug)] -pub struct FnSig<'tcx, L: 'tcx> { - pub inputs: &'tcx [LabeledTy<'tcx, L>], - pub output: LabeledTy<'tcx, L>, +pub struct FnSig<'lty, 'tcx, L: 'lty> { + pub inputs: &'lty [LabeledTy<'lty, 'tcx, L>], + pub output: LabeledTy<'lty, 'tcx, L>, } /// One of the concrete permission values, READ, WRITE, or MOVE. @@ -118,16 +118,16 @@ pub enum ConcretePerm { } -impl<'tcx, L: fmt::Debug> type_map::Signature> for FnSig<'tcx, L> { +impl<'lty, 'tcx, L: fmt::Debug> type_map::Signature> for FnSig<'lty, 'tcx, L> { fn num_inputs(&self) -> usize { self.inputs.len() } - fn input(&self, idx: usize) -> LabeledTy<'tcx, L> { + fn input(&self, idx: usize) -> LabeledTy<'lty, 'tcx, L> { self.inputs[idx] } - fn output(&self) -> LabeledTy<'tcx, L> { + fn output(&self) -> LabeledTy<'lty, 'tcx, L> { self.output } } @@ -164,9 +164,9 @@ fn is_fn(hir_map: &hir::map::Map, def_id: DefId) -> bool { /// Run the intraprocedural step of polymorphic signature inference. Results are written back into /// the `Ctxt`. -fn analyze_intra<'a, 'tcx>(cx: &mut Ctxt<'a, 'tcx>, - hir_map: &hir::map::Map<'tcx>, - tcx: TyCtxt<'a, 'tcx, 'tcx>) { +fn analyze_intra<'a, 'tcx, 'lty>(cx: &mut Ctxt<'lty, 'a, 'tcx>, + hir_map: &hir::map::Map<'tcx>, + tcx: TyCtxt<'a, 'tcx, 'tcx>) { for &def_id in tcx.mir_keys(LOCAL_CRATE).iter() { // We currently don't process `static` bodies, even though they do have MIR. if !is_fn(hir_map, def_id) { @@ -188,17 +188,19 @@ fn analyze_intra<'a, 'tcx>(cx: &mut Ctxt<'a, 'tcx>, /// Run the interprocedural step of polymorphic signature inference. Results are written back into /// the `Ctxt`. -fn analyze_inter(cx: &mut Ctxt) { +fn analyze_inter<'lty, 'a, 'tcx>(cx: &mut Ctxt<'lty, 'a, 'tcx>) { let mut inter_cx = InterCtxt::new(cx); inter_cx.process(); inter_cx.finish(); } /// Run the analysis. -pub fn analyze<'a, 'tcx>(st: &CommandState, - dcx: &RefactorCtxt<'a, 'tcx>) - -> AnalysisResult<'tcx> { - let mut cx = Ctxt::new(dcx.ty_ctxt(), dcx.ty_arena()); +pub fn analyze<'lty, 'a: 'lty, 'tcx: 'a>( + st: &CommandState, + dcx: &RefactorCtxt<'a, 'tcx>, + arena: &'lty SyncDroplessArena, +) -> AnalysisResult<'lty, 'tcx> { + let mut cx = Ctxt::new(dcx.ty_ctxt(), arena); // Process the annotations and marks provided by the user. handle_attrs(&mut cx, st, dcx); @@ -213,30 +215,30 @@ pub fn analyze<'a, 'tcx>(st: &CommandState, find_instantiations(&mut cx); // Convert results to a more usable format. - convert_results(&cx) + cx.into() } /// A type where pointers are labeled with variables. -pub type VTy<'tcx> = LabeledTy<'tcx, Option>; +pub type VTy<'lty, 'tcx> = LabeledTy<'lty, 'tcx, Option>; /// A signature where pointers are labeled with variables. -pub type VFnSig<'tcx> = FnSig<'tcx, Option>; +pub type VFnSig<'lty, 'tcx> = FnSig<'lty, 'tcx, Option>; /// A type where pointers are labeled with concrete permissions. -pub type PTy<'tcx> = LabeledTy<'tcx, Option>; +pub type PTy<'lty, 'tcx> = LabeledTy<'lty, 'tcx, Option>; /// A signature where pointers are labeled with concrete permissions. -pub type PFnSig<'tcx> = FnSig<'tcx, Option>; +pub type PFnSig<'lty, 'tcx> = FnSig<'lty, 'tcx, Option>; /// The collected results of running the analysis. -pub struct AnalysisResult<'tcx> { +pub struct AnalysisResult<'lty, 'tcx> { /// The permission-labeled type of every non-fn item. This includes statics, consts, and /// struct/enum fields. - pub statics: HashMap>, + pub statics: HashMap>, /// Results for to each (analysis-level) function. Note that only the primary variant of each /// variant group will have its `DefId` present in this table - look up `variants[&id].func_id` /// first if you aren't sure whether a `fn` is a primary variant. - pub funcs: HashMap>, + pub funcs: HashMap>, /// Results for to each variant `fn`. Every `fn` that was analyzed should have an entry /// in this table. @@ -245,17 +247,20 @@ pub struct AnalysisResult<'tcx> { /// Results for each monomorphization of each analysis-level function, indexed by function ID /// and monomorphization index. pub monos: HashMap<(DefId, usize), MonoResult>, + + /// Arena used to allocate all type wrappers + arena: &'lty SyncDroplessArena, } /// Results specific to an analysis-level function. -pub struct FunctionResult<'tcx> { +pub struct FunctionResult<'lty, 'tcx> { /// Polymorphic function signature. Each pointer is labeled with a `SigVar`. - pub sig: VFnSig<'tcx>, + pub sig: VFnSig<'lty, 'tcx>, pub num_sig_vars: u32, /// Constraint set relating `SigVar`s to each other and to concrete permission values. - pub cset: ConstraintSet<'tcx>, + pub cset: ConstraintSet<'lty>, /// List of variant IDs, for multi-variant functions. If the function has only a single /// variant, this field is `None` and the variant's ID is the same as the function's ID. @@ -317,145 +322,152 @@ pub struct MonoResult { pub callee_mono_idxs: Vec, } -impl<'tcx> AnalysisResult<'tcx> { +impl<'lty, 'tcx> AnalysisResult<'lty, 'tcx> { /// Get the function and variant results for a `fn` item-like. - pub fn fn_results(&self, id: DefId) -> (&FunctionResult<'tcx>, &VariantResult) { + pub fn fn_results(&self, id: DefId) -> (&FunctionResult<'lty, 'tcx>, &VariantResult) { let vr = &self.variants[&id]; let fr = &self.funcs[&vr.func_id]; (fr, vr) } -} - -/// Extract the useful information from the `Ctxt`, and collect it into an `AnalysisResult`. -fn convert_results<'a, 'tcx>(cx: &Ctxt<'a, 'tcx>) -> AnalysisResult<'tcx> { - let mut r = AnalysisResult { - statics: HashMap::new(), - funcs: HashMap::new(), - variants: HashMap::new(), - monos: HashMap::new(), - }; - - // statics - let perm_lcx = LabeledTyCtxt::new(cx.arena); - for (&def_id, <y) in cx.static_summ.iter() { - let pty = perm_lcx.relabel(lty, &mut |p| { - if let Some(PermVar::Static(v)) = *p { - Some(cx.static_assign[v]) - } else { - None - } - }); - r.statics.insert(def_id, pty); + pub fn arena(&self) -> &'lty SyncDroplessArena { + self.arena } +} + - // funcs +impl<'lty, 'a, 'tcx> From> for AnalysisResult<'lty, 'tcx> { + /// Extract the useful information from the `Ctxt`, and collect it into an `AnalysisResult`. + fn from(cx: Ctxt<'lty, 'a, 'tcx>) -> AnalysisResult<'lty, 'tcx> { + let mut statics = HashMap::new(); + let mut funcs = HashMap::new(); + let mut variants = HashMap::new(); + let mut monos = HashMap::new(); - let var_lcx = LabeledTyCtxt::new(cx.arena); - for def_id in cx.func_ids() { - let func = cx.get_func_summ(def_id); + // statics - let sig = { - let mut f = |p: &Option<_>| { - if let Some(PermVar::Sig(v)) = *p { - Some(v) + let perm_lcx = LabeledTyCtxt::new(cx.arena); + for (&def_id, <y) in cx.static_summ.iter() { + let pty = perm_lcx.relabel(lty, &mut |p| { + if let Some(PermVar::Static(v)) = *p { + Some(cx.static_assign[v]) } else { None } - }; - FnSig { - inputs: var_lcx.relabel_slice(func.sig.inputs, &mut f), - output: var_lcx.relabel(func.sig.output, &mut f), - } - }; - - let variant_ids = - if func.variant_ids.len() == 1 { None } - else { Some(func.variant_ids.clone()) }; - - r.funcs.insert(def_id, FunctionResult { - sig: sig, - num_sig_vars: func.num_sig_vars, - cset: func.sig_cset.clone(), - variants: variant_ids, - num_monos: func.num_monos, - }); + }); + statics.insert(def_id, pty); + } + // funcs - // func variants + let var_lcx = LabeledTyCtxt::new(cx.arena); + for def_id in cx.func_ids() { + let func = cx.get_func_summ(def_id); - for (idx, &var_id) in func.variant_ids.iter().enumerate() { - let variant = cx.get_variant_summ(var_id); - let func_refs = variant.insts.iter().map(|inst| { - FuncRef { - def_id: inst.callee, - span: inst.span, + let sig = { + let mut f = |p: &Option<_>| { + if let Some(PermVar::Sig(v)) = *p { + Some(v) + } else { + None + } + }; + FnSig { + inputs: var_lcx.relabel_slice(func.sig.inputs, &mut f), + output: var_lcx.relabel(func.sig.output, &mut f), } - }).collect(); + }; - r.variants.insert(var_id, VariantResult { - func_id: def_id, - index: idx, - func_refs: func_refs, + let variant_ids = + if func.variant_ids.len() == 1 { None } + else { Some(func.variant_ids.clone()) }; + + funcs.insert(def_id, FunctionResult { + sig: sig, + num_sig_vars: func.num_sig_vars, + cset: func.sig_cset.clone(), + variants: variant_ids, + num_monos: func.num_monos, }); - } - // func monos + // func variants - // Assign suffixes if not provided. - let mut suffixes = Vec::new(); - if func.monos_provided { - // Do nothing. If monos were provided, we'll use their provided names. - } else if func.num_monos == 1 { - // Use the original name. - suffixes.push(String::new()); - } else { - /// Default suffixes corresponding to the three concrete permissions. - static SUFFIX_BASE: [&'static str; 3] = ["", "mut", "take"]; - // If more than one mono tries to use the same default suffix, we need to append a - // number to disambiguate. - let mut suffix_count = [0, 0, 0]; + for (idx, &var_id) in func.variant_ids.iter().enumerate() { + let variant = cx.get_variant_summ(var_id); + let func_refs = variant.insts.iter().map(|inst| { + FuncRef { + def_id: inst.callee, + span: inst.span, + } + }).collect(); - let is_output = mono::infer_outputs(func); + variants.insert(var_id, VariantResult { + func_id: def_id, + index: idx, + func_refs: func_refs, + }); + } - // Guess a suffix for each mono depending on its output types. Automatic suffixes look - // like "", "mut", "take", "2", "mut3", "take4", etc. - for idx in 0 .. func.num_monos { - let mono = cx.get_mono_summ(def_id, idx); - let max_perm = is_output.iter_enumerated() - .filter(|&(_, &out)| out) - .map(|(v, _)| mono.assign[v]) - .max().unwrap_or(ConcretePerm::Read); + // func monos - let idx = max_perm as usize; - suffix_count[idx] += 1; - let suffix = if suffix_count[idx] == 1 { - SUFFIX_BASE[idx].to_owned() - } else { - format!("{}{}", SUFFIX_BASE[idx], suffix_count[idx]) - }; - suffixes.push(suffix); + // Assign suffixes if not provided. + let mut suffixes = Vec::new(); + if func.monos_provided { + // Do nothing. If monos were provided, we'll use their provided names. + } else if func.num_monos == 1 { + // Use the original name. + suffixes.push(String::new()); + } else { + /// Default suffixes corresponding to the three concrete permissions. + static SUFFIX_BASE: [&'static str; 3] = ["", "mut", "take"]; + // If more than one mono tries to use the same default suffix, we need to append a + // number to disambiguate. + let mut suffix_count = [0, 0, 0]; + + let is_output = mono::infer_outputs(func); + + // Guess a suffix for each mono depending on its output types. Automatic suffixes look + // like "", "mut", "take", "2", "mut3", "take4", etc. + for idx in 0 .. func.num_monos { + let mono = cx.get_mono_summ(def_id, idx); + + let max_perm = is_output.iter_enumerated() + .filter(|&(_, &out)| out) + .map(|(v, _)| mono.assign[v]) + .max().unwrap_or(ConcretePerm::Read); + + let idx = max_perm as usize; + suffix_count[idx] += 1; + let suffix = if suffix_count[idx] == 1 { + SUFFIX_BASE[idx].to_owned() + } else { + format!("{}{}", SUFFIX_BASE[idx], suffix_count[idx]) + }; + suffixes.push(suffix); + } } - } - for idx in 0 .. func.num_monos { - let mono = cx.get_mono_summ(def_id, idx); + for idx in 0 .. func.num_monos { + let mono = cx.get_mono_summ(def_id, idx); - let suffix = - if func.monos_provided { mono.suffix.clone() } + let suffix = + if func.monos_provided { mono.suffix.clone() } else { suffixes[idx].clone() }; - r.monos.insert((def_id, idx), MonoResult { - suffix: suffix, - assign: mono.assign.clone(), - callee_mono_idxs: mono.callee_mono_idxs.clone(), - }); + monos.insert((def_id, idx), MonoResult { + suffix: suffix, + assign: mono.assign.clone(), + callee_mono_idxs: mono.callee_mono_idxs.clone(), + }); + } } - } - r + let Ctxt { arena, .. } = cx; + + AnalysisResult { statics, funcs, variants, monos, arena } + } } /// Print the analysis results to stderr, for debugging. diff --git a/c2rust-refactor/src/analysis/ownership/mono.rs b/c2rust-refactor/src/analysis/ownership/mono.rs index 387220de7..e6e519445 100644 --- a/c2rust-refactor/src/analysis/ownership/mono.rs +++ b/c2rust-refactor/src/analysis/ownership/mono.rs @@ -19,9 +19,9 @@ pub fn infer_outputs(summ: &FuncSumm) -> IndexVec { } } - fn walk_input<'tcx>(ty: LTy<'tcx>, + fn walk_input<'lty, 'tcx>(ty: LTy<'lty, 'tcx>, is_out: &mut IndexVec, - cset: &ConstraintSet<'tcx>) { + cset: &ConstraintSet<'lty>) { let mut target_out = false; if let Some(p) = ty.label { if cset.lower_bound(Perm::var(p)) >= ConcretePerm::Write { @@ -66,16 +66,16 @@ fn for_each_output_assignment(summ: &FuncSumm, mut callback: F) where F: FnMut(&IndexVec>) { - struct State<'a, 'tcx: 'a, F: 'a> { + struct State<'lty, F: 'lty> { max: Var, - is_out: &'a IndexVec, - is_bounded: &'a IndexVec, - cset: &'a ConstraintSet<'tcx>, + is_out: &'lty IndexVec, + is_bounded: &'lty IndexVec, + cset: &'lty ConstraintSet<'lty>, assignment: IndexVec>, - callback: &'a mut F, + callback: &'lty mut F, } - impl<'a, 'tcx, F> State<'a, 'tcx, F> + impl<'lty, 'tcx, F> State<'lty, F> where F: FnMut(&IndexVec>) { fn walk_vars(&mut self, cur: Var) { if cur >= self.max { @@ -124,14 +124,14 @@ fn for_each_output_assignment(summ: &FuncSumm, fn find_input_assignment(summ: &FuncSumm, out_assign: &IndexVec>) -> Option> { - struct State<'a, 'tcx: 'a> { + struct State<'lty> { max: Var, - out_assign: &'a IndexVec>, - cset: &'a ConstraintSet<'tcx>, + out_assign: &'lty IndexVec>, + cset: &'lty ConstraintSet<'lty>, assignment: IndexVec, } - impl<'a, 'tcx> State<'a, 'tcx> { + impl<'lty, 'tcx> State<'lty> { fn walk_vars(&mut self, cur: Var) -> bool { if cur >= self.max { return true; diff --git a/c2rust-refactor/src/analysis/type_eq.rs b/c2rust-refactor/src/analysis/type_eq.rs index fca954a1a..30455b9e7 100644 --- a/c2rust-refactor/src/analysis/type_eq.rs +++ b/c2rust-refactor/src/analysis/type_eq.rs @@ -39,7 +39,6 @@ use std::collections::HashMap; use arena::SyncDroplessArena; use ena::unify::{UnificationTable, UnifyKey, InPlace}; -use rustc::hir; use rustc::hir::*; use rustc::hir::def_id::DefId; use rustc::hir::intravisit::{self, Visitor, NestedVisitorMap}; @@ -83,25 +82,25 @@ impl UnifyKey for TyLabel { type Label = Option; /// A `Ty` where every node is labeled with a unification key. -type LTy<'tcx> = LabeledTy<'tcx, Label>; +type LTy<'lty, 'tcx> = LabeledTy<'lty, 'tcx, Label>; /// A `FnSig` where every node is labeled with a unification key. #[derive(Clone, Copy, Debug)] -struct LFnSig<'tcx> { - inputs: &'tcx [LTy<'tcx>], - output: LTy<'tcx>, - variadic: bool, +struct LFnSig<'lty, 'tcx> { + inputs: &'lty [LTy<'lty, 'tcx>], + output: LTy<'lty, 'tcx>, + c_variadic: bool, } /// A table for tracking labeled types and their unifications. -struct LTyTable<'tcx> { +struct LTyTable<'lty> { unif: RefCell>>, - lcx: LabeledTyCtxt<'tcx, Label>, + lcx: LabeledTyCtxt<'lty, Label>, } -impl<'tcx> LTyTable<'tcx> { - fn new(arena: &'tcx SyncDroplessArena) -> LTyTable<'tcx> { +impl<'lty, 'tcx> LTyTable<'lty> { + fn new(arena: &'lty SyncDroplessArena) -> LTyTable<'lty> { LTyTable { unif: RefCell::new(UnificationTable::new()), lcx: LabeledTyCtxt::new(arena), @@ -110,61 +109,61 @@ impl<'tcx> LTyTable<'tcx> { /// Label a `Ty` with fresh unification keys. - fn label(&self, ty: ty::Ty<'tcx>) -> LTy<'tcx> { + fn label(&self, ty: ty::Ty<'tcx>) -> LTy<'lty, 'tcx> { self.lcx.label(ty, &mut |_| Some(self.unif.borrow_mut().new_key(()))) } - fn label_slice(&self, tys: &[ty::Ty<'tcx>]) -> &'tcx [LTy<'tcx>] { + fn label_slice(&self, tys: &[ty::Ty<'tcx>]) -> &'lty [LTy<'lty, 'tcx>] { self.lcx.label_slice(tys, &mut |_| Some(self.unif.borrow_mut().new_key(()))) } - fn label_sig(&self, sig: ty::FnSig<'tcx>) -> LFnSig<'tcx> { + fn label_sig(&self, sig: ty::FnSig<'tcx>) -> LFnSig<'lty, 'tcx> { LFnSig { inputs: self.label_slice(sig.inputs()), output: self.label(sig.output()), - variadic: sig.variadic, + c_variadic: sig.c_variadic, } } /// Produce a dummy `LTy` on which unification is a no-op. In effect, every use of the dummy /// type behaves as if it were a fresh type. Running `unify(dummy, a); unify(dummy, b);` does /// not result in the unification of `a` and `b`. - fn non_unifiable(&self, ty: ty::Ty<'tcx>) -> LTy<'tcx> { + fn non_unifiable(&self, ty: ty::Ty<'tcx>) -> LTy<'lty, 'tcx> { self.lcx.label(ty, &mut |_| None) } - fn non_unifiable_slice(&self, tys: &[ty::Ty<'tcx>]) -> &'tcx [LTy<'tcx>] { + fn non_unifiable_slice(&self, tys: &[ty::Ty<'tcx>]) -> &'lty [LTy<'lty, 'tcx>] { self.lcx.label_slice(tys, &mut |_| None) } - fn non_unifiable_sig(&self, sig: ty::FnSig<'tcx>) -> LFnSig<'tcx> { + fn non_unifiable_sig(&self, sig: ty::FnSig<'tcx>) -> LFnSig<'lty, 'tcx> { LFnSig { inputs: self.non_unifiable_slice(sig.inputs()), output: self.non_unifiable(sig.output()), - variadic: sig.variadic, + c_variadic: sig.c_variadic, } } - fn subst(&self, lty: LTy<'tcx>, substs: &[LTy<'tcx>]) -> LTy<'tcx> { + fn subst(&self, lty: LTy<'lty, 'tcx>, substs: &[LTy<'lty, 'tcx>]) -> LTy<'lty, 'tcx> { self.lcx.subst(lty, substs) } - fn subst_slice(&self, ltys: &[LTy<'tcx>], substs: &[LTy<'tcx>]) -> &'tcx [LTy<'tcx>] { + fn subst_slice(&self, ltys: &[LTy<'lty, 'tcx>], substs: &[LTy<'lty, 'tcx>]) -> &'lty [LTy<'lty, 'tcx>] { self.lcx.subst_slice(ltys, substs) } - fn subst_sig(&self, sig: LFnSig<'tcx>, substs: &[LTy<'tcx>]) -> LFnSig<'tcx> { + fn subst_sig(&self, sig: LFnSig<'lty, 'tcx>, substs: &[LTy<'lty, 'tcx>]) -> LFnSig<'lty, 'tcx> { LFnSig { inputs: self.subst_slice(sig.inputs, substs), output: self.subst(sig.output, substs), - variadic: sig.variadic, + c_variadic: sig.c_variadic, } } /// Unify two types, including any type arguments they may have. - fn unify(&self, lty1: LTy<'tcx>, lty2: LTy<'tcx>) { + fn unify(&self, lty1: LTy<'lty, 'tcx>, lty2: LTy<'lty, 'tcx>) { if let (Some(l1), Some(l2)) = (lty1.label, lty2.label) { self.unif.borrow_mut().union(l1, l2); } @@ -174,7 +173,7 @@ impl<'tcx> LTyTable<'tcx> { } } - fn unify_slices(&self, ltys1: &[LTy<'tcx>], ltys2: &[LTy<'tcx>]) { + fn unify_slices(&self, ltys1: &[LTy<'lty, 'tcx>], ltys2: &[LTy<'lty, 'tcx>]) { for (lty1, lty2) in ltys1.iter().zip(ltys2.iter()) { self.unify(lty1, lty2); } @@ -184,19 +183,19 @@ impl<'tcx> LTyTable<'tcx> { /// Walk over typechecking tables, building a labeled type for each expr and pattern. -struct ExprPatVisitor<'a, 'tcx: 'a> { +struct ExprPatVisitor<'lty, 'a: 'lty, 'tcx: 'a> { tcx: TyCtxt<'a, 'tcx, 'tcx>, - ltt: &'a LTyTable<'tcx>, + ltt: &'lty LTyTable<'lty>, /// The labeled unadjusted type, for every node that has a type. - unadjusted: HashMap>, + unadjusted: HashMap>, /// The labeled adjusted type, for every node that has adjustments. - adjusted: HashMap>, + adjusted: HashMap>, /// The labeled substitutions, for every node where type substitutions were applied. - substs: HashMap]>, + substs: HashMap]>, } -impl<'a, 'tcx> ExprPatVisitor<'a, 'tcx> { +impl<'lty, 'a, 'tcx> ExprPatVisitor<'lty, 'a, 'tcx> { /// Process the type tables for a single body. fn handle_body(&mut self, body_id: BodyId) { let tables = self.tcx.body_tables(body_id); @@ -218,7 +217,7 @@ impl<'a, 'tcx> ExprPatVisitor<'a, 'tcx> { } } -impl<'a, 'hir> ItemLikeVisitor<'hir> for ExprPatVisitor<'a, 'hir> { +impl<'lty, 'a, 'hir> ItemLikeVisitor<'hir> for ExprPatVisitor<'lty, 'a, 'hir> { // Visit every itemlike with a BodyId, and call `handle_body` on each. fn visit_item(&mut self, item: &'hir Item) { @@ -254,50 +253,49 @@ impl<'a, 'hir> ItemLikeVisitor<'hir> for ExprPatVisitor<'a, 'hir> { /// `type_map::TypeSource` for getting `TyCtxt` results. Used when collecting the labeled types /// for `ast::Ty` nodes. -struct LabelTysSource<'a, 'tcx: 'a> { - hir_map: &'a hir::map::Map<'tcx>, +struct LabelTysSource<'lty, 'a: 'lty, 'tcx: 'a> { tcx: TyCtxt<'a, 'tcx, 'tcx>, - ltt: &'a LTyTable<'tcx>, + ltt: &'lty LTyTable<'lty>, } -impl<'a, 'tcx> LabelTysSource<'a, 'tcx> { +impl<'lty, 'a, 'tcx> LabelTysSource<'lty, 'a, 'tcx> { fn get_tables(&self, id: NodeId) -> &'tcx TypeckTables<'tcx> { - let parent = self.hir_map.get_parent(id); - let parent_body = self.hir_map.body_owned_by(parent); + let parent = self.tcx.hir().get_parent_item(self.tcx.hir().node_to_hir_id(id)); + let parent_body = self.tcx.hir().body_owned_by(parent); self.tcx.body_tables(parent_body) } - fn node_lty(&self, id: NodeId) -> LTy<'tcx> { + fn node_lty(&self, id: NodeId) -> LTy<'lty, 'tcx> { let tables = self.get_tables(id); - let hir_id = self.hir_map.node_to_hir_id(id); - let ty = tables.node_id_to_type(hir_id); + let hir_id = self.tcx.hir().node_to_hir_id(id); + let ty = tables.node_type(hir_id); self.ltt.label(ty) } } -impl<'a, 'tcx> type_map::TypeSource for LabelTysSource<'a, 'tcx> { - type Type = LTy<'tcx>; - type Signature = LFnSig<'tcx>; +impl<'lty, 'a, 'tcx> type_map::TypeSource for LabelTysSource<'lty, 'a, 'tcx> { + type Type = LTy<'lty, 'tcx>; + type Signature = LFnSig<'lty, 'tcx>; - fn expr_type(&mut self, e: &ast::Expr) -> Option> { + fn expr_type(&mut self, e: &ast::Expr) -> Option> { Some(self.node_lty(e.id)) } - fn pat_type(&mut self, p: &ast::Pat) -> Option> { + fn pat_type(&mut self, p: &ast::Pat) -> Option> { Some(self.node_lty(p.id)) } - fn def_type(&mut self, did: DefId) -> Option> { + fn def_type(&mut self, did: DefId) -> Option> { let ty = self.tcx.type_of(did); Some(self.ltt.label(ty)) } - fn fn_sig(&mut self, did: DefId) -> Option> { + fn fn_sig(&mut self, did: DefId) -> Option> { let sig = self.tcx.fn_sig(did); Some(self.ltt.label_sig(*sig.skip_binder())) } - fn closure_sig(&mut self, did: DefId) -> Option> { + fn closure_sig(&mut self, did: DefId) -> Option> { self.fn_sig(did).map(|sig| { // The returned signature has the arguments wrapped in a tuple LFnSig { @@ -308,36 +306,35 @@ impl<'a, 'tcx> type_map::TypeSource for LabelTysSource<'a, 'tcx> { } } -impl<'tcx> type_map::Signature> for LFnSig<'tcx> { +impl<'lty, 'tcx> type_map::Signature> for LFnSig<'lty, 'tcx> { fn num_inputs(&self) -> usize { self.inputs.len() } - fn input(&self, idx: usize) -> LTy<'tcx> { + fn input(&self, idx: usize) -> LTy<'lty, 'tcx> { self.inputs[idx] } - fn output(&self) -> LTy<'tcx> { + fn output(&self) -> LTy<'lty, 'tcx> { self.output } } /// Label the `ty::Ty` for every `ast::Ty` in the crate. -fn label_tys<'a, 'tcx>(hir_map: &hir::map::Map<'tcx>, - tcx: TyCtxt<'a, 'tcx, 'tcx>, - ltt: &'a LTyTable<'tcx>, - krate: &ast::Crate) - -> HashMap> { +fn label_tys<'lty, 'a: 'lty, 'tcx: 'a>( + tcx: TyCtxt<'a, 'tcx, 'tcx>, + ltt: &'lty LTyTable<'lty>, + krate: &ast::Crate, +) -> HashMap> { let mut ty_nodes = HashMap::new(); let source = LabelTysSource { - hir_map: hir_map, tcx: tcx, ltt: ltt, }; - type_map::map_types(hir_map, source, krate, |_, ast_ty, lty| { - // Note that HIR `Ty` nodes don't have `HirId`s, so we index everything by the old `NodeId` + type_map::map_types(tcx.hir(), source, krate, |_, ast_ty, lty| { + // Note that AST `Ty` nodes don't have `HirId`s, so we index everything by the old `NodeId` // instead. - ty_nodes.insert(ast_ty.id, lty); + ty_nodes.insert(tcx.hir().node_to_hir_id(ast_ty.id), lty); }); ty_nodes } @@ -346,9 +343,9 @@ fn label_tys<'a, 'tcx>(hir_map: &hir::map::Map<'tcx>, /// Build a map of primitive types used by specific language features, such as `bool`'s usage in /// `if` and `while`. -fn prim_tys<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, - ltt: &'a LTyTable<'tcx>) - -> HashMap<&'static str, LTy<'tcx>> { +fn prim_tys<'lty, 'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, + ltt: &'lty LTyTable<'lty>) + -> HashMap<&'static str, LTy<'lty, 'tcx>> { let mut map = HashMap::new(); map.insert("bool", ltt.label(tcx.mk_bool())); @@ -361,17 +358,16 @@ fn prim_tys<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, /// Walk over the HIR, unifying types as equality constraints are discovered. -struct UnifyVisitor<'a, 'tcx: 'a> { - hir_map: &'a hir::map::Map<'tcx>, +struct UnifyVisitor<'lty, 'a: 'lty, 'tcx: 'a> { tcx: TyCtxt<'a, 'tcx, 'tcx>, - ltt: &'a LTyTable<'tcx>, + ltt: &'lty LTyTable<'lty>, // These are the tables generated by the visitors and functions defined above. - unadjusted_nodes: &'a HashMap>, - nodes: &'a HashMap>, - node_substs: &'a HashMap]>, - ty_nodes: &'a HashMap>, - prims: &'a HashMap<&'static str, LTy<'tcx>>, + unadjusted_nodes: &'lty HashMap>, + nodes: &'lty HashMap>, + node_substs: &'lty HashMap]>, + ty_nodes: &'lty HashMap>, + prims: &'lty HashMap<&'static str, LTy<'lty, 'tcx>>, /// Cache of labeled types for each definition. /// @@ -379,54 +375,54 @@ struct UnifyVisitor<'a, 'tcx: 'a> { /// is for exprs and patterns. And we can't easily visit all defs because some are pulled in /// from other crates. Since we can't precompute the `LTy` for every def, we have to keep this /// cache and add defs to it as we encounter them. - defs: RefCell>>, + defs: RefCell>>, /// Cache of labeled signatures of function/method definitions. For generic functions, this /// always contains the unsubstituted (polymorphic) signature. - def_sigs: RefCell>>, + def_sigs: RefCell>>, } -impl<'a, 'tcx> UnifyVisitor<'a, 'tcx> { +impl<'lty, 'a, 'tcx> UnifyVisitor<'lty, 'a, 'tcx> { // Helpers for looking up labeled types in the various precomputed tables. - fn node_lty(&self, id: NodeId) -> LTy<'tcx> { - let tables = self.get_tables(id); - let hir_id = self.hir_map.node_to_hir_id(id); - let ty = tables.node_id_to_type(hir_id); + fn node_lty(&self, id: NodeId) -> LTy<'lty, 'tcx> { + let hir_id = self.tcx.hir().node_to_hir_id(id); + let tables = self.get_tables(hir_id); + let ty = tables.node_type(hir_id); self.ltt.label(ty) } - fn expr_lty(&self, e: &Expr) -> LTy<'tcx> { + fn expr_lty(&self, e: &Expr) -> LTy<'lty, 'tcx> { self.nodes.get(&e.hir_id) .or_else(|| self.unadjusted_nodes.get(&e.hir_id)) .unwrap_or_else(|| panic!("expr_lty: no lty for {:?} @ {:?}", e, self.tcx.sess.source_map().span_to_string(e.span))) } - fn opt_unadjusted_expr_lty(&self, e: &Expr) -> Option> { + fn opt_unadjusted_expr_lty(&self, e: &Expr) -> Option> { self.unadjusted_nodes.get(&e.hir_id).map(|&x| x) } - fn block_lty(&self, b: &Block) -> LTy<'tcx> { + fn block_lty(&self, b: &Block) -> LTy<'lty, 'tcx> { match b.expr { Some(ref e) => self.expr_lty(e), None => self.prim_lty("()"), } } - fn pat_lty(&self, p: &Pat) -> LTy<'tcx> { + fn pat_lty(&self, p: &Pat) -> LTy<'lty, 'tcx> { self.unadjusted_nodes.get(&p.hir_id) .unwrap_or_else(|| panic!("pat_lty: no lty for {:?} @ {:?}", p, self.tcx.sess.source_map().span_to_string(p.span))) } - fn ty_lty(&self, t: &Ty) -> LTy<'tcx> { - self.ty_nodes.get(&t.id) + fn ty_lty(&self, t: &Ty) -> LTy<'lty, 'tcx> { + self.ty_nodes.get(&t.hir_id) .unwrap_or_else(|| panic!("ty_lty: no lty for {:?} @ {:?}", t, self.tcx.sess.source_map().span_to_string(t.span))) } - fn prim_lty(&self, name: &'static str) -> LTy<'tcx> { + fn prim_lty(&self, name: &'static str) -> LTy<'lty, 'tcx> { self.prims.get(&name) .unwrap_or_else(|| panic!("prim_lty: no such prim {:?}", name)) } @@ -434,8 +430,8 @@ impl<'a, 'tcx> UnifyVisitor<'a, 'tcx> { // Functions for accessing the def ty/sig caches - fn compute_def_lty(&self, id: DefId) -> LTy<'tcx> { - match self.hir_map.get_if_local(id) { + fn compute_def_lty(&self, id: DefId) -> LTy<'lty, 'tcx> { + match self.tcx.hir().get_if_local(id) { Some(Node::Binding(p)) => { return self.pat_lty(p); }, @@ -445,13 +441,13 @@ impl<'a, 'tcx> UnifyVisitor<'a, 'tcx> { self.ltt.label(self.tcx.type_of(id)) } - fn def_lty(&self, id: DefId) -> LTy<'tcx> { + fn def_lty(&self, id: DefId) -> LTy<'lty, 'tcx> { *self.defs.borrow_mut().entry(id) .or_insert_with(|| self.compute_def_lty(id)) } - fn compute_def_sig(&self, id: DefId) -> LFnSig<'tcx> { + fn compute_def_sig(&self, id: DefId) -> LFnSig<'lty, 'tcx> { let sig = self.tcx.fn_sig(id); let is_extern = match sig.skip_binder().abi { Abi::Rust | Abi::RustIntrinsic | Abi::RustCall => false, @@ -465,7 +461,7 @@ impl<'a, 'tcx> UnifyVisitor<'a, 'tcx> { } } - fn def_sig(&self, id: DefId) -> LFnSig<'tcx> { + fn def_sig(&self, id: DefId) -> LFnSig<'lty, 'tcx> { *self.def_sigs.borrow_mut().entry(id) .or_insert_with(|| self.compute_def_sig(id)) } @@ -473,7 +469,7 @@ impl<'a, 'tcx> UnifyVisitor<'a, 'tcx> { // Helpers for extracting information from function types. - fn fn_num_inputs(&self, lty: LTy<'tcx>) -> usize { + fn fn_num_inputs(&self, lty: LTy<'lty, 'tcx>) -> usize { use rustc::ty::TyKind::*; match lty.ty.sty { FnDef(id, _) => self.def_sig(id).inputs.len(), @@ -485,7 +481,7 @@ impl<'a, 'tcx> UnifyVisitor<'a, 'tcx> { } /// Get the input types out of a `FnPtr` or `FnDef` `LTy`. - fn fn_input(&self, lty: LTy<'tcx>, idx: usize) -> LTy<'tcx> { + fn fn_input(&self, lty: LTy<'lty, 'tcx>, idx: usize) -> LTy<'lty, 'tcx> { use rustc::ty::TyKind::*; match lty.ty.sty { FnDef(id, _) => { @@ -504,7 +500,7 @@ impl<'a, 'tcx> UnifyVisitor<'a, 'tcx> { } /// Get the output type out of a `FnPtr` or `FnDef` `LTy`. - fn fn_output(&self, lty: LTy<'tcx>) -> LTy<'tcx> { + fn fn_output(&self, lty: LTy<'lty, 'tcx>) -> LTy<'lty, 'tcx> { use rustc::ty::TyKind::*; match lty.ty.sty { FnDef(id, _) => { @@ -519,14 +515,14 @@ impl<'a, 'tcx> UnifyVisitor<'a, 'tcx> { } } - fn fn_is_variadic(&self, lty: LTy<'tcx>) -> bool { + fn fn_is_variadic(&self, lty: LTy<'lty, 'tcx>) -> bool { use rustc::ty::TyKind::*; match lty.ty.sty { FnDef(id, _) => { - self.def_sig(id).variadic + self.def_sig(id).c_variadic }, FnPtr(ty_sig) => { - ty_sig.skip_binder().variadic + ty_sig.skip_binder().c_variadic }, // TODO: Closure _ => panic!("fn_is_variadic: not a fn type"), @@ -534,16 +530,16 @@ impl<'a, 'tcx> UnifyVisitor<'a, 'tcx> { } - fn get_tables(&self, id: NodeId) -> &'tcx TypeckTables<'tcx> { - let parent = self.hir_map.get_parent(id); - let parent_body = self.hir_map.body_owned_by(parent); + fn get_tables(&self, id: HirId) -> &'tcx TypeckTables<'tcx> { + let parent = self.tcx.hir().get_parent_item(id); + let parent_body = self.tcx.hir().body_owned_by(parent); self.tcx.body_tables(parent_body) } /// Get the signature of the method being called by an expression. This includes substituting /// in the type arguments, if the method is generic. - fn method_sig(&self, e: &Expr) -> LFnSig<'tcx> { - let def_id = self.get_tables(e.id).type_dependent_defs()[e.hir_id].def_id(); + fn method_sig(&self, e: &Expr) -> LFnSig<'lty, 'tcx> { + let def_id = self.get_tables(e.hir_id).type_dependent_defs()[e.hir_id].def_id(); let sig = self.def_sig(def_id); let substs = self.node_substs.get(&e.hir_id).map_or_else(|| &[] as &[_], |x| x); self.ltt.subst_sig(sig, substs) @@ -552,7 +548,7 @@ impl<'a, 'tcx> UnifyVisitor<'a, 'tcx> { /// Get the labeled type of a field. For generic structs, this returns the type after /// substitution, using the type arguments from `struct_ty`. - fn field_lty(&self, struct_ty: LTy<'tcx>, name: Symbol) -> LTy<'tcx> { + fn field_lty(&self, struct_ty: LTy<'lty, 'tcx>, name: Symbol) -> LTy<'lty, 'tcx> { let adt = match struct_ty.ty.sty { ty::TyKind::Adt(ref adt, _) => adt, _ => panic!("field_lty: not a struct ty: {:?}", struct_ty), @@ -568,9 +564,9 @@ impl<'a, 'tcx> UnifyVisitor<'a, 'tcx> { } } -impl<'a, 'hir> Visitor<'hir> for UnifyVisitor<'a, 'hir> { +impl<'lty, 'a, 'hir> Visitor<'hir> for UnifyVisitor<'lty, 'a, 'hir> { fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'hir> { - NestedVisitorMap::OnlyBodies(self.hir_map) + NestedVisitorMap::OnlyBodies(self.tcx.hir()) } fn visit_expr(&mut self, e: &'hir Expr) { @@ -770,9 +766,11 @@ impl<'a, 'hir> Visitor<'hir> for UnifyVisitor<'a, 'hir> { ExprKind::Repeat(ref e, _) => { self.ltt.unify(rty.args[0], self.expr_lty(e)); }, + + _ => {} } - if let Some(adjs) = self.get_tables(e.id).adjustments().get(e.hir_id) { + if let Some(adjs) = self.get_tables(e.hir_id).adjustments().get(e.hir_id) { // Relate the unadjusted and adjusted types for this expr by stepping through the // intermediate adjustments one by one. let mut prev_ty = rty; @@ -792,7 +790,7 @@ impl<'a, 'hir> Visitor<'hir> for UnifyVisitor<'a, 'hir> { // "unsafe" tag on the function pointer. self.ltt.unify(rty, prev_ty); }, - Adjust::ClosureFnPointer => {}, // unsupported + Adjust::ClosureFnPointer(_) => {}, // unsupported Adjust::MutToConstPointer => { // Only the mutability tag changes self.ltt.unify(rty, prev_ty); @@ -825,7 +823,7 @@ impl<'a, 'hir> Visitor<'hir> for UnifyVisitor<'a, 'hir> { PatKind::Binding(_, node_id, _, ref opt_pat) => { - let lty = self.node_lty(node_id); + let lty = self.node_lty(self.tcx.hir().hir_to_node_id(node_id)); self.ltt.unify(rty, lty); if let Some(ref p) = *opt_pat { self.ltt.unify(rty, self.pat_lty(p)); @@ -880,13 +878,13 @@ impl<'a, 'hir> Visitor<'hir> for UnifyVisitor<'a, 'hir> { decl: &'hir FnDecl, body_id: BodyId, span: Span, - id: NodeId) { + id: HirId) { if let intravisit::FnKind::Closure(..) = kind { return; } - let body = self.hir_map.body(body_id); - let def_id = self.hir_map.local_def_id(id); + let body = self.tcx.hir().body(body_id); + let def_id = self.tcx.hir().local_def_id_from_hir_id(id); let sig = self.def_sig(def_id); // The results of `def_sig` and `def_lty` are produced by calling `tcx.fn_sig` / // `tcx.type_of` and giving the results fresh labels, so they initially have no connection @@ -915,13 +913,13 @@ impl<'a, 'hir> Visitor<'hir> for UnifyVisitor<'a, 'hir> { fn visit_struct_field(&mut self, field: &'hir StructField) { // Unify the field's type annotation with the definition type. - let def_id = self.hir_map.local_def_id(field.id); + let def_id = self.tcx.hir().local_def_id_from_hir_id(field.hir_id); self.ltt.unify(self.ty_lty(&field.ty), self.def_lty(def_id)); intravisit::walk_struct_field(self, field); } fn visit_foreign_item(&mut self, i: &'hir ForeignItem) { - let def_id = self.hir_map.local_def_id(i.id); + let def_id = self.tcx.hir().local_def_id_from_hir_id(i.hir_id); match i.node { ForeignItemKind::Fn(ref decl, _, _) => { let sig = self.def_sig(def_id); @@ -955,11 +953,10 @@ impl<'a, 'hir> Visitor<'hir> for UnifyVisitor<'a, 'hir> { /// Run the analysis, producing a map from `ast::Ty` `NodeId`s to an equivalence class number. -pub fn analyze<'a, 'tcx>(hir_map: &hir::map::Map<'tcx>, - tcx: TyCtxt<'a, 'tcx, 'tcx>, - tcx_arena: &'tcx SyncDroplessArena, - krate: &ast::Crate) -> HashMap { - let ltt = LTyTable::new(tcx_arena); +pub fn analyze<'a, 'tcx: 'a>(tcx: TyCtxt<'a, 'tcx, 'tcx>, + krate: &ast::Crate) -> HashMap { + let arena = SyncDroplessArena::default(); + let ltt = LTyTable::new(&arena); // Collect labeled expr/pat types from the TypeckTables of each item. let mut v = ExprPatVisitor { @@ -969,7 +966,7 @@ pub fn analyze<'a, 'tcx>(hir_map: &hir::map::Map<'tcx>, adjusted: HashMap::new(), substs: HashMap::new(), }; - hir_map.krate().visit_all_item_likes(&mut v); + tcx.hir().krate().visit_all_item_likes(&mut v); let ExprPatVisitor { unadjusted: unadjusted_nodes, adjusted: nodes, @@ -978,7 +975,7 @@ pub fn analyze<'a, 'tcx>(hir_map: &hir::map::Map<'tcx>, } = v; // Construct labeled types for each `ast::Ty` in the program. - let ty_nodes = label_tys(hir_map, tcx, <t, krate); + let ty_nodes = label_tys(tcx, <t, krate); // Construct labeled types for primitive operations. let prims = prim_tys(tcx, <t); @@ -986,7 +983,6 @@ pub fn analyze<'a, 'tcx>(hir_map: &hir::map::Map<'tcx>, // Run the unification pass. let mut v = UnifyVisitor { - hir_map: hir_map, tcx: tcx, ltt: <t, @@ -998,7 +994,7 @@ pub fn analyze<'a, 'tcx>(hir_map: &hir::map::Map<'tcx>, defs: RefCell::new(HashMap::new()), def_sigs: RefCell::new(HashMap::new()), }; - hir_map.krate().visit_all_item_likes(&mut v.as_deep_visitor()); + tcx.hir().krate().visit_all_item_likes(&mut v.as_deep_visitor()); // For all `ast::Ty` nodes, build a map with the `NodeId` and the raw label of the root of its diff --git a/c2rust-refactor/src/ast_manip/ast_deref.rs b/c2rust-refactor/src/ast_manip/ast_deref.rs index 511375d2d..e3a8893ec 100644 --- a/c2rust-refactor/src/ast_manip/ast_deref.rs +++ b/c2rust-refactor/src/ast_manip/ast_deref.rs @@ -4,7 +4,7 @@ use syntax::ast::*; use syntax::parse::token::{Token, DelimToken, Nonterminal}; use syntax::ptr::P; use syntax::source_map::{Span, Spanned}; -use syntax::tokenstream::{TokenTree, Delimited, DelimSpan, TokenStream, ThinTokenStream}; +use syntax::tokenstream::{TokenTree, DelimSpan, TokenStream}; use syntax_pos::hygiene::SyntaxContext; diff --git a/c2rust-refactor/src/ast_manip/ast_equiv.rs b/c2rust-refactor/src/ast_manip/ast_equiv.rs index 7befd36fe..58dac4931 100644 --- a/c2rust-refactor/src/ast_manip/ast_equiv.rs +++ b/c2rust-refactor/src/ast_manip/ast_equiv.rs @@ -6,7 +6,7 @@ use syntax::ast::*; use syntax::parse::token::{Token, DelimToken, Nonterminal}; use syntax::ptr::P; use syntax::source_map::{Span, Spanned}; -use syntax::tokenstream::{TokenTree, Delimited, DelimSpan, TokenStream, ThinTokenStream}; +use syntax::tokenstream::{TokenTree, DelimSpan, TokenStream}; use syntax_pos::hygiene::SyntaxContext; /// Trait for checking equivalence of AST nodes. This is similar to `PartialEq`, but less strict, diff --git a/c2rust-refactor/src/ast_manip/ast_names.rs b/c2rust-refactor/src/ast_manip/ast_names.rs index 23aee7638..db41440d3 100644 --- a/c2rust-refactor/src/ast_manip/ast_names.rs +++ b/c2rust-refactor/src/ast_manip/ast_names.rs @@ -2,7 +2,7 @@ use syntax::ast::*; use syntax::parse::token::Nonterminal; use syntax::ptr::P; use syntax::source_map::Spanned; -use syntax::tokenstream::{DelimSpan, Delimited, TokenTree}; +use syntax::tokenstream::{DelimSpan, TokenTree}; pub trait AstName { fn ast_name(&self) -> String; diff --git a/c2rust-refactor/src/ast_manip/fn_edit.rs b/c2rust-refactor/src/ast_manip/fn_edit.rs index 0dfc6e967..750e89fe0 100644 --- a/c2rust-refactor/src/ast_manip/fn_edit.rs +++ b/c2rust-refactor/src/ast_manip/fn_edit.rs @@ -1,13 +1,13 @@ //! Helpers for rewriting all `fn` itemlikes, regardless of item kind. use smallvec::SmallVec; use syntax::ast::*; -use syntax::fold::{self, Folder}; +use syntax::mut_visit::{self, MutVisitor}; use syntax::ptr::P; -use syntax::util::move_map::MoveMap; +use syntax::util::map_in_place::MapInPlace; use syntax::visit::{self, Visitor}; use syntax_pos::Span; -use crate::ast_manip::{Fold, Visit, GetNodeId, GetSpan}; +use crate::ast_manip::{MutVisit, Visit, GetNodeId, GetSpan}; /// Enum indicating which kind of itemlike a `fn` is. @@ -47,18 +47,18 @@ impl GetSpan for FnLike { } -/// Folder for rewriting `fn`s using a `FnLike` callback. +/// MutVisitor for rewriting `fn`s using a `FnLike` callback. struct FnFolder where F: FnMut(FnLike) -> SmallVec<[FnLike; 1]> { callback: F, } -impl Folder for FnFolder +impl MutVisitor for FnFolder where F: FnMut(FnLike) -> SmallVec<[FnLike; 1]> { - fn fold_item(&mut self, i: P) -> SmallVec<[P; 1]> { + fn flat_map_item(&mut self, i: P) -> SmallVec<[P; 1]> { match i.node { ItemKind::Fn(..) => {}, - _ => return fold::noop_fold_item(i, self), + _ => return mut_visit::noop_flat_map_item(i, self), } let i = i.into_inner(); @@ -89,13 +89,13 @@ impl Folder for FnFolder // the signature and body of the function. tokens: None, }) - }).flat_map(|i| fold::noop_fold_item(i, self)).collect() + }).flat_map(|i| mut_visit::noop_flat_map_item(i, self)).collect() } - fn fold_impl_item(&mut self, i: ImplItem) -> SmallVec<[ImplItem; 1]> { + fn flat_map_impl_item(&mut self, i: ImplItem) -> SmallVec<[ImplItem; 1]> { match i.node { ImplItemKind::Method(..) => {}, - _ => return fold::noop_fold_impl_item(i, self), + _ => return mut_visit::noop_flat_map_impl_item(i, self), } unpack!([i.node] ImplItemKind::Method(sig, block)); @@ -132,13 +132,13 @@ impl Folder for FnFolder defaultness: defaultness, tokens: None, } - }).flat_map(|i| fold::noop_fold_impl_item(i, self)).collect() + }).flat_map(|i| mut_visit::noop_flat_map_impl_item(i, self)).collect() } - fn fold_trait_item(&mut self, i: TraitItem) -> SmallVec<[TraitItem; 1]> { + fn flat_map_trait_item(&mut self, i: TraitItem) -> SmallVec<[TraitItem; 1]> { match i.node { TraitItemKind::Method(..) => {}, - _ => return fold::noop_fold_trait_item(i, self), + _ => return mut_visit::noop_flat_map_trait_item(i, self), } unpack!([i.node] TraitItemKind::Method(sig, block)); @@ -170,18 +170,17 @@ impl Folder for FnFolder generics: generics.clone(), tokens: None, } - }).flat_map(|i| fold::noop_fold_trait_item(i, self)).collect() + }).flat_map(|i| mut_visit::noop_flat_map_trait_item(i, self)).collect() } - fn fold_foreign_mod(&mut self, mut nm: ForeignMod) -> ForeignMod { - nm.items = nm.items.move_flat_map(|i| self.fold_foreign_item(i)); - fold::noop_fold_foreign_mod(nm, self) + fn visit_foreign_mod(&mut self, nm: &mut ForeignMod) { + nm.items.flat_map_in_place(|i| self.flat_map_foreign_item(i)); } - fn fold_foreign_item(&mut self, i: ForeignItem) -> SmallVec<[ForeignItem; 1]> { + fn flat_map_foreign_item(&mut self, i: ForeignItem) -> SmallVec<[ForeignItem; 1]> { match i.node { ForeignItemKind::Fn(..) => {}, - _ => return fold::noop_fold_foreign_item(i, self), + _ => return mut_visit::noop_flat_map_foreign_item(i, self), } unpack!([i.node] ForeignItemKind::Fn(decl, generics)); @@ -207,27 +206,30 @@ impl Folder for FnFolder attrs: fl.attrs, vis: vis.clone(), } - }).flat_map(|i| fold::noop_fold_foreign_item(i, self)).collect() + }).flat_map(|i| mut_visit::noop_flat_map_foreign_item(i, self)).collect() } } /// Fold over all item-like function definitions, including `ItemKind::Fn`, `ImplItemKind::Method`, /// `TraitItemKind::Method`, and `ForeignItemKind::Fn`. -pub fn fold_fns(target: T, mut callback: F) -> ::Result - where T: Fold, - F: FnMut(FnLike) -> FnLike { - fold_fns_multi(target, |fl| smallvec![callback(fl)]) +pub fn mut_visit_fns(target: &mut T, mut callback: F) + where T: MutVisit, + F: FnMut(&mut FnLike) { + flat_map_fns(target, |mut fl| { + callback(&mut fl); + smallvec![fl] + }) } -/// Similar to `fold_fns`, but allows transforming each `FnLike` into a sequence of zero or more +/// Similar to `mut_visit_fns`, but allows transforming each `FnLike` into a sequence of zero or more /// `FnLike`s. -pub fn fold_fns_multi(target: T, callback: F) -> ::Result - where T: Fold, +pub fn flat_map_fns(target: &mut T, callback: F) + where T: MutVisit, F: FnMut(FnLike) -> SmallVec<[FnLike; 1]> { let mut f = FnFolder { callback: callback, }; - target.fold(&mut f) + target.visit(&mut f) } diff --git a/c2rust-refactor/src/ast_manip/fold.rs b/c2rust-refactor/src/ast_manip/fold.rs index d0c5933bd..6d3275405 100644 --- a/c2rust-refactor/src/ast_manip/fold.rs +++ b/c2rust-refactor/src/ast_manip/fold.rs @@ -1,149 +1,336 @@ -//! `Fold` trait for AST types that can be folded over. +//! `MutVisit` trait for AST types that can be modified. use syntax::ast::*; -use syntax::fold::Folder; +use syntax::mut_visit::*; use syntax::ptr::P; -use syntax::parse::token::{Token, Nonterminal}; +use syntax::parse::token::{self, Token}; use syntax::source_map::Span; use syntax::tokenstream::{TokenTree, TokenStream}; +use syntax::util::map_in_place::MapInPlace; + use smallvec::SmallVec; +use c2rust_macros::gen_visitor_impls; +use crate::util::Lone; +/// A trait for AST nodes that can accept a `MutVisitor`. +pub trait MutVisit: Sized { + fn visit(&mut self, _: &mut F) { + unimplemented!("visit is not implemented for {}", stringify!(Self)); + } -/// A trait for AST nodes that can accept a `Folder`. -pub trait Fold { - /// The result of a fold over `Self`. Typically this is either `Self` or `SmallVector`. - type Result; + fn flat_map(mut self, f: &mut F) -> SmallVec<[Self; 1]> { + self.visit(f); + smallvec![self] + } +} - fn fold(self, f: &mut F) -> Self::Result; +/// Trait for AST node types that can be rewritten with a mutable visit. +pub trait MutVisitNodes: MutVisit + Sized { + fn visit(target: &mut T, callback: F) + where T: MutVisit, + F: FnMut(&mut Self); } -// This macro takes as input the definition of `syntax::fold::Folder` as it appears the libsyntax -// docs, and emits a `Fold` impl for each method it finds. -macro_rules! gen_folder_impls { - ( - pub trait Folder: Sized { - $( - fn $fold_fn:ident (&mut self, $arg:ident : $ArgTy:ty) -> $ResultTy:ty { ... } - )* - } - ) => { - $( - impl Fold for $ArgTy { - type Result = $ResultTy; - fn fold(self, f: &mut F) -> Self::Result { - f.$fold_fn(self) - } - } - )* - }; +/// Trait for AST node types that can be rewritten with a flat_map. +pub trait FlatMapNodes: MutVisit + Sized { + fn visit(target: &mut T, callback: F) + where T: MutVisit, + F: FnMut(Self) -> SmallVec<[Self; 1]>; + + fn flat_map(target: T, callback: F) -> SmallVec<[T; 1]> + where T: MutVisit, + F: FnMut(Self) -> SmallVec<[Self; 1]>; } -impl Fold for Vec { - type Result = Vec<::Result>; - fn fold(self, f: &mut F) -> Self::Result { - let mut results = Vec::with_capacity(self.len()); - for x in self { - results.push(x.fold(f)); +impl MutVisit for Vec + where T: MutVisit +{ + fn visit(&mut self, f: &mut F) { + for elem in self { + elem.visit(f); } - results } } -impl Fold for Option { - type Result = Option<::Result>; - fn fold(self, f: &mut F) -> Self::Result { - self.map(|x| x.fold(f)) +impl MutVisit for Option + where T: MutVisit +{ + fn visit(&mut self, f: &mut F) { + if let Some(elem) = self { + elem.visit(f) + } } } -gen_folder_impls! { - // Copy-pasted from the syntax::fold::Folder docs. Omit functions that take Vec or - // Option, so we can write the generic impls above without conflicts. Additional changes - // are noted below. - pub trait Folder: Sized { - fn fold_crate(&mut self, c: Crate) -> Crate { ... } - //fn fold_meta_items(&mut self, meta_items: Vec) -> Vec { ... } - fn fold_meta_list_item( - &mut self, - list_item: NestedMetaItem - ) -> NestedMetaItem { ... } - fn fold_meta_item(&mut self, meta_item: MetaItem) -> MetaItem { ... } - fn fold_use_tree(&mut self, use_tree: UseTree) -> UseTree { ... } - fn fold_foreign_item(&mut self, ni: ForeignItem) -> SmallVec<[ForeignItem; 1]> { ... } - //fn fold_foreign_item_simple(&mut self, ni: ForeignItem) -> ForeignItem { ... } - fn fold_item(&mut self, i: P) -> SmallVec<[P; 1]> { ... } - //fn fold_item_simple(&mut self, i: Item) -> Item { ... } - fn fold_struct_field(&mut self, sf: StructField) -> StructField { ... } - fn fold_item_kind(&mut self, i: ItemKind) -> ItemKind { ... } - fn fold_trait_item(&mut self, i: TraitItem) -> SmallVec<[TraitItem; 1]> { ... } - fn fold_impl_item(&mut self, i: ImplItem) -> SmallVec<[ImplItem; 1]> { ... } - fn fold_fn_decl(&mut self, d: P) -> P { ... } - fn fold_asyncness(&mut self, a: IsAsync) -> IsAsync { ... } - fn fold_block(&mut self, b: P) -> P { ... } - fn fold_stmt(&mut self, s: Stmt) -> SmallVec<[Stmt; 1]> { ... } - fn fold_arm(&mut self, a: Arm) -> Arm { ... } - fn fold_guard(&mut self, g: Guard) -> Guard { ... } - fn fold_pat(&mut self, p: P) -> P { ... } - fn fold_anon_const(&mut self, c: AnonConst) -> AnonConst { ... } - fn fold_expr(&mut self, e: P) -> P { ... } - fn fold_range_end(&mut self, re: RangeEnd) -> RangeEnd { ... } - // Skip this method. We already have an impl for P, from fold_expr above - //fn fold_opt_expr(&mut self, e: P) -> Option> { ... } - //fn fold_exprs(&mut self, es: Vec>) -> Vec> { ... } - fn fold_generic_arg(&mut self, arg: GenericArg) -> GenericArg { ... } - fn fold_ty(&mut self, t: P) -> P { ... } - fn fold_lifetime(&mut self, l: Lifetime) -> Lifetime { ... } - fn fold_ty_binding(&mut self, t: TypeBinding) -> TypeBinding { ... } - fn fold_mod(&mut self, m: Mod) -> Mod { ... } - fn fold_foreign_mod(&mut self, nm: ForeignMod) -> ForeignMod { ... } - fn fold_global_asm(&mut self, ga: P) -> P { ... } - fn fold_variant(&mut self, v: Variant) -> Variant { ... } - fn fold_ident(&mut self, i: Ident) -> Ident { ... } - fn fold_usize(&mut self, i: usize) -> usize { ... } - fn fold_path(&mut self, p: Path) -> Path { ... } - //fn fold_qpath(&mut self, qs: Option, p: Path) -> (Option, Path) { ... } - fn fold_generic_args(&mut self, p: GenericArgs) -> GenericArgs { ... } - fn fold_angle_bracketed_parameter_data( - &mut self, - p: AngleBracketedArgs - ) -> AngleBracketedArgs { ... } - fn fold_parenthesized_parameter_data( - &mut self, - p: ParenthesisedArgs - ) -> ParenthesisedArgs { ... } - fn fold_local(&mut self, l: P) -> P { ... } - fn fold_mac(&mut self, _mac: Mac) -> Mac { ... } - fn fold_macro_def(&mut self, def: MacroDef) -> MacroDef { ... } - fn fold_label(&mut self, label: Label) -> Label { ... } - fn fold_attribute(&mut self, at: Attribute) -> Option { ... } - fn fold_arg(&mut self, a: Arg) -> Arg { ... } - fn fold_generics(&mut self, generics: Generics) -> Generics { ... } - fn fold_trait_ref(&mut self, p: TraitRef) -> TraitRef { ... } - fn fold_poly_trait_ref(&mut self, p: PolyTraitRef) -> PolyTraitRef { ... } - fn fold_variant_data(&mut self, vdata: VariantData) -> VariantData { ... } - fn fold_generic_param(&mut self, param: GenericParam) -> GenericParam { ... } - //fn fold_generic_params(&mut self, params: Vec) -> Vec { ... } - fn fold_tt(&mut self, tt: TokenTree) -> TokenTree { ... } - fn fold_tts(&mut self, tts: TokenStream) -> TokenStream { ... } - fn fold_token(&mut self, t: Token) -> Token { ... } - fn fold_interpolated(&mut self, nt: Nonterminal) -> Nonterminal { ... } - //fn fold_opt_bounds( - // &mut self, - // b: Option - //) -> Option { ... } - //fn fold_bounds(&mut self, b: GenericBounds) -> GenericBounds { ... } - fn fold_param_bound(&mut self, tpb: GenericBound) -> GenericBound { ... } - fn fold_mt(&mut self, mt: MutTy) -> MutTy { ... } - fn fold_field(&mut self, field: Field) -> Field { ... } - fn fold_where_clause(&mut self, where_clause: WhereClause) -> WhereClause { ... } - fn fold_where_predicate( - &mut self, - where_predicate: WherePredicate - ) -> WherePredicate { ... } - fn fold_vis(&mut self, vis: Visibility) -> Visibility { ... } - fn new_id(&mut self, i: NodeId) -> NodeId { ... } - fn new_span(&mut self, sp: Span) -> Span { ... } - } -} +gen_visitor_impls! { + pub trait MutVisitor: Sized { + // Methods in this trait have one of three forms: + // + // fn visit_t(&mut self, t: &mut T); // common + // fn flat_map_t(&mut self, t: T) -> SmallVec<[T; 1]>; // rare + // fn filter_map_t(&mut self, t: T) -> Option; // rarest + // + // Any additions to this trait should happen in form of a call to a public + // `noop_*` function that only calls out to the visitor again, not other + // `noop_*` functions. This is a necessary API workaround to the problem of + // not being able to call out to the super default method in an overridden + // default method. + // + // When writing these methods, it is better to use destructuring like this: + // + // fn visit_abc(&mut self, ABC { a, b, c: _ }: &mut ABC) { + // visit_a(a); + // visit_b(b); + // } + // + // than to use field access like this: + // + // fn visit_abc(&mut self, abc: &mut ABC) { + // visit_a(&mut abc.a); + // visit_b(&mut abc.b); + // // ignore abc.c + // } + // + // As well as being more concise, the former is explicit about which fields + // are skipped. Furthermore, if a new field is added, the destructuring + // version will cause a compile error, which is good. In comparison, the + // field access version will continue working and it would be easy to + // forget to add handling for it. + + fn visit_crate(&mut self, c: &mut Crate) { + noop_visit_crate(c, self) + } + + fn visit_meta_list_item(&mut self, list_item: &mut NestedMetaItem) { + noop_visit_meta_list_item(list_item, self); + } + + fn visit_meta_item(&mut self, meta_item: &mut MetaItem) { + noop_visit_meta_item(meta_item, self); + } + + fn visit_use_tree(&mut self, use_tree: &mut UseTree) { + noop_visit_use_tree(use_tree, self); + } + + fn flat_map_foreign_item(&mut self, ni: ForeignItem) -> SmallVec<[ForeignItem; 1]> { + noop_flat_map_foreign_item(ni, self) + } + + fn flat_map_item(&mut self, i: P) -> SmallVec<[P; 1]> { + noop_flat_map_item(i, self) + } + + fn visit_fn_header(&mut self, header: &mut FnHeader) { + noop_visit_fn_header(header, self); + } + + fn visit_struct_field(&mut self, sf: &mut StructField) { + noop_visit_struct_field(sf, self); + } + + fn visit_item_kind(&mut self, i: &mut ItemKind) { + noop_visit_item_kind(i, self); + } + + fn flat_map_trait_item(&mut self, i: TraitItem) -> SmallVec<[TraitItem; 1]> { + noop_flat_map_trait_item(i, self) + } + + fn flat_map_impl_item(&mut self, i: ImplItem) -> SmallVec<[ImplItem; 1]> { + noop_flat_map_impl_item(i, self) + } + + fn visit_fn_decl(&mut self, d: &mut P) { + noop_visit_fn_decl(d, self); + } + + fn visit_asyncness(&mut self, a: &mut IsAsync) { + noop_visit_asyncness(a, self); + } + + fn visit_block(&mut self, b: &mut P) { + noop_visit_block(b, self); + } + + fn flat_map_stmt(&mut self, s: Stmt) -> SmallVec<[Stmt; 1]> { + noop_flat_map_stmt(s, self) + } + + fn visit_arm(&mut self, a: &mut Arm) { + noop_visit_arm(a, self); + } + + fn visit_guard(&mut self, g: &mut Guard) { + noop_visit_guard(g, self); + } + + fn visit_pat(&mut self, p: &mut P) { + noop_visit_pat(p, self); + } + + fn visit_anon_const(&mut self, c: &mut AnonConst) { + noop_visit_anon_const(c, self); + } + + fn visit_expr(&mut self, e: &mut P) { + noop_visit_expr(e, self); + } + + // fn filter_map_expr(&mut self, e: P) -> Option> { + // noop_filter_map_expr(e, self) + // } + + fn visit_generic_arg(&mut self, arg: &mut GenericArg) { + noop_visit_generic_arg(arg, self); + } + + fn visit_ty(&mut self, t: &mut P) { + noop_visit_ty(t, self); + } + + // noop_visit_lifetime is private, so we can't walk lifetimes + // fn visit_lifetime(&mut self, l: &mut Lifetime) { + // noop_visit_lifetime(l, self); + // } + + fn visit_ty_binding(&mut self, t: &mut TypeBinding) { + noop_visit_ty_binding(t, self); + } + + fn visit_mod(&mut self, m: &mut Mod) { + noop_visit_mod(m, self); + } + + fn visit_foreign_mod(&mut self, nm: &mut ForeignMod) { + noop_visit_foreign_mod(nm, self); + } + fn visit_variant(&mut self, v: &mut Variant) { + noop_visit_variant(v, self); + } + + fn visit_ident(&mut self, i: &mut Ident) { + noop_visit_ident(i, self); + } + + fn visit_path(&mut self, p: &mut Path) { + noop_visit_path(p, self); + } + + fn visit_qself(&mut self, qs: &mut Option) { + noop_visit_qself(qs, self); + } + + fn visit_generic_args(&mut self, p: &mut GenericArgs) { + noop_visit_generic_args(p, self); + } + + fn visit_angle_bracketed_parameter_data(&mut self, p: &mut AngleBracketedArgs) { + noop_visit_angle_bracketed_parameter_data(p, self); + } + + fn visit_parenthesized_parameter_data(&mut self, p: &mut ParenthesizedArgs) { + noop_visit_parenthesized_parameter_data(p, self); + } + + fn visit_local(&mut self, l: &mut P) { + noop_visit_local(l, self); + } + + // fn visit_mac(&mut self, _mac: &mut Mac) { + // panic!("visit_mac disabled by default"); + // // N.B., see note about macros above. If you really want a visitor that + // // works on macros, use this definition in your trait impl: + // // mut_visit::noop_visit_mac(_mac, self); + // } + + fn visit_macro_def(&mut self, def: &mut MacroDef) { + noop_visit_macro_def(def, self); + } + + fn visit_label(&mut self, label: &mut Label) { + noop_visit_label(label, self); + } + + fn visit_attribute(&mut self, at: &mut Attribute) { + noop_visit_attribute(at, self); + } + + fn visit_arg(&mut self, a: &mut Arg) { + noop_visit_arg(a, self); + } + + fn visit_generics(&mut self, generics: &mut Generics) { + noop_visit_generics(generics, self); + } + + fn visit_trait_ref(&mut self, tr: &mut TraitRef) { + noop_visit_trait_ref(tr, self); + } + + fn visit_poly_trait_ref(&mut self, p: &mut PolyTraitRef) { + noop_visit_poly_trait_ref(p, self); + } + fn visit_variant_data(&mut self, vdata: &mut VariantData) { + noop_visit_variant_data(vdata, self); + } + + fn visit_generic_param(&mut self, param: &mut GenericParam) { + noop_visit_generic_param(param, self); + } + + // fn visit_generic_params(&mut self, params: &mut Vec) { + // noop_visit_generic_params(params, self); + // } + + fn visit_tt(&mut self, tt: &mut TokenTree) { + noop_visit_tt(tt, self); + } + + fn visit_tts(&mut self, tts: &mut TokenStream) { + noop_visit_tts(tts, self); + } + + fn visit_token(&mut self, t: &mut Token) { + noop_visit_token(t, self); + } + + fn visit_interpolated(&mut self, nt: &mut token::Nonterminal) { + noop_visit_interpolated(nt, self); + } + + fn visit_param_bound(&mut self, tpb: &mut GenericBound) { + noop_visit_param_bound(tpb, self); + } + + fn visit_mt(&mut self, mt: &mut MutTy) { + noop_visit_mt(mt, self); + } + + fn visit_field(&mut self, field: &mut Field) { + noop_visit_field(field, self); + } + + fn visit_where_clause(&mut self, where_clause: &mut WhereClause) { + noop_visit_where_clause(where_clause, self); + } + + fn visit_where_predicate(&mut self, where_predicate: &mut WherePredicate) { + noop_visit_where_predicate(where_predicate, self); + } + + fn visit_vis(&mut self, vis: &mut Visibility) { + noop_visit_vis(vis, self); + } + + fn visit_id(&mut self, _id: &mut NodeId) { + // Do nothing. + } + + fn visit_span(&mut self, _sp: &mut Span) { + // Do nothing. + } + } +} diff --git a/c2rust-refactor/src/ast_manip/fold_node.rs b/c2rust-refactor/src/ast_manip/fold_node.rs deleted file mode 100644 index 4c2413164..000000000 --- a/c2rust-refactor/src/ast_manip/fold_node.rs +++ /dev/null @@ -1,152 +0,0 @@ -//! Helper function for performing a fold that transforms only one type of AST node. -use syntax::ast::*; -use syntax::fold::{self, Folder}; -use syntax::ptr::P; -use smallvec::SmallVec; -use syntax::util::move_map::MoveMap; - -use crate::ast_manip::Fold; - - -/// Trait for AST node types that can be rewritten with a fold. -pub trait FoldNode: Fold + Sized { - fn fold_nodes(target: T, callback: F) -> ::Result - where T: Fold, - F: FnMut(Self) -> ::Result; -} - -macro_rules! gen_fold_node_impl { - ( - node = $Node:ty; - folder = $NodeFolder:ident; - - fn $fold_thing:ident ( &mut $slf:ident , $arg:ident : $ArgTy:ty ) -> $RetTy:ty; - walk = $walk:expr; - map = $map:expr; - ) => { - struct $NodeFolder - where F: FnMut($ArgTy) -> $RetTy { - callback: F, - } - - impl Folder for $NodeFolder - where F: FnMut($ArgTy) -> $RetTy { - fn $fold_thing(&mut $slf, $arg: $ArgTy) -> $RetTy { - let $arg = $walk; - $map - } - - fn fold_mac(&mut self, mac: Mac) -> Mac { - fold::noop_fold_mac(mac, self) - } - } - - impl FoldNode for $Node { - fn fold_nodes(target: T, callback: F) -> ::Result - where T: Fold, - F: FnMut(Self) -> ::Result { - let mut f = $NodeFolder { callback: callback }; - target.fold(&mut f) - } - } - }; -} - -// If you want to use `fold_nodes` with more node types, add more `gen_fold_node_impl!` invocations -// below. -gen_fold_node_impl! { - // The node type. - node = P; - // A name to use for the `Folder` that rewrites this node type. - folder = ExprNodeFolder; - // The signature of the `Folder` method for this node type. - fn fold_expr(&mut self, e: P) -> P; - // An expression that invokes the default `Folder` behavior for this node type. Can refer to - // the node being folded using the argument name from the signature above. - walk = e.map(|e| fold::noop_fold_expr(e, self)); - // An expression that computes the application of `self.callback` to the result of `walk`. - // This will be more complicated if folding this node type returns a sequence of nodes (see - // below for examples). - map = (self.callback)(e); -} - -gen_fold_node_impl! { - node = P; - folder = TyNodeFolder; - fn fold_ty(&mut self, ty: P) -> P; - walk = fold::noop_fold_ty(ty, self); - map = (self.callback)(ty); -} - -gen_fold_node_impl! { - node = P; - folder = ItemNodeFolder; - fn fold_item(&mut self, i: P) -> SmallVec<[P; 1]>; - walk = fold::noop_fold_item(i, self); - map = i.move_flat_map(|i| (self.callback)(i)); -} - -gen_fold_node_impl! { - node = ImplItem; - folder = ImplItemNodeFolder; - fn fold_impl_item(&mut self, i: ImplItem) -> SmallVec<[ImplItem; 1]>; - walk = fold::noop_fold_impl_item(i, self); - map = i.move_flat_map(|i| (self.callback)(i)); -} - -gen_fold_node_impl! { - node = Path; - folder = PathNodeFolder; - fn fold_path(&mut self, p: Path) -> Path; - walk = fold::noop_fold_path(p, self); - map = (self.callback)(p); -} - -gen_fold_node_impl! { - node = P; - folder = BlockNodeFolder; - fn fold_block(&mut self, b: P) -> P; - walk = fold::noop_fold_block(b, self); - map = (self.callback)(b); -} - -gen_fold_node_impl! { - node = P; - folder = LocalNodeFolder; - fn fold_local(&mut self, l: P) -> P; - walk = fold::noop_fold_local(l, self); - map = (self.callback)(l); -} - -gen_fold_node_impl! { - node = ForeignMod; - folder = ForeignModNodeFolder; - fn fold_foreign_mod(&mut self, nm: ForeignMod) -> ForeignMod; - walk = fold::noop_fold_foreign_mod(nm, self); - map = (self.callback)(nm); -} - -gen_fold_node_impl! { - node = ForeignItem; - folder = ForeignItemNodeFolder; - fn fold_foreign_item(&mut self, ni: ForeignItem) -> SmallVec<[ForeignItem; 1]>; - walk = fold::noop_fold_foreign_item(ni, self); - map = ni.move_flat_map(|ni| (self.callback)(ni)); -} - -gen_fold_node_impl! { - node = Stmt; - folder = StmtNodeFolder; - fn fold_stmt(&mut self, s: Stmt) -> SmallVec<[Stmt; 1]>; - walk = fold::noop_fold_stmt(s, self); - map = s.move_flat_map(|s| (self.callback)(s)); -} - -/// Rewrite nodes of the callback's argument type within `target`. This function performs a -/// postorder traversal. -pub fn fold_nodes(target: T, callback: F) -> ::Result - where N: FoldNode, - T: Fold, - F: FnMut(N) -> ::Result { - N::fold_nodes(target, callback) -} diff --git a/c2rust-refactor/src/ast_manip/get_node_id.rs b/c2rust-refactor/src/ast_manip/get_node_id.rs index 9781a22ad..f06e4e106 100644 --- a/c2rust-refactor/src/ast_manip/get_node_id.rs +++ b/c2rust-refactor/src/ast_manip/get_node_id.rs @@ -6,7 +6,7 @@ use syntax::ast::*; use syntax::parse::token::{Token, DelimToken, Nonterminal}; use syntax::ptr::P; use syntax::source_map::{Span, Spanned}; -use syntax::tokenstream::{TokenTree, Delimited, DelimSpan, TokenStream, ThinTokenStream}; +use syntax::tokenstream::{TokenTree, DelimSpan, TokenStream}; use syntax_pos::hygiene::SyntaxContext; diff --git a/c2rust-refactor/src/ast_manip/list_node_ids.rs b/c2rust-refactor/src/ast_manip/list_node_ids.rs index 7fe4cada9..42c89dba4 100644 --- a/c2rust-refactor/src/ast_manip/list_node_ids.rs +++ b/c2rust-refactor/src/ast_manip/list_node_ids.rs @@ -6,7 +6,7 @@ use syntax::ast::*; use syntax::parse::token::{Token, DelimToken, Nonterminal}; use syntax::ptr::P; use syntax::source_map::{Span, Spanned}; -use syntax::tokenstream::{TokenTree, Delimited, DelimSpan, TokenStream, ThinTokenStream}; +use syntax::tokenstream::{TokenTree, DelimSpan, TokenStream}; use syntax_pos::hygiene::SyntaxContext; diff --git a/c2rust-refactor/src/ast_manip/lr_expr.rs b/c2rust-refactor/src/ast_manip/lr_expr.rs index 84d89e4d9..7e4986a90 100644 --- a/c2rust-refactor/src/ast_manip/lr_expr.rs +++ b/c2rust-refactor/src/ast_manip/lr_expr.rs @@ -5,15 +5,14 @@ use rustc_target::spec::abi::Abi; use smallvec::SmallVec; use syntax::ThinVec; use syntax::ast::*; -use syntax::fold::{self, Folder}; +use syntax::mut_visit::{self, MutVisitor}; use syntax::parse::token::{Token, DelimToken, Nonterminal}; use syntax::ptr::P; use syntax::source_map::{Span, Spanned}; -use syntax::tokenstream::{TokenTree, Delimited, DelimSpan, TokenStream, ThinTokenStream}; -use syntax::util::move_map::MoveMap; +use syntax::tokenstream::{TokenTree, DelimSpan, TokenStream}; use syntax_pos::hygiene::SyntaxContext; -use crate::ast_manip::Fold; +use crate::ast_manip::MutVisit; // TODO: Check for autoborrow adjustments. Some method receivers are actually Lvalue / LvalueMut @@ -25,16 +24,16 @@ use crate::ast_manip::Fold; /// Trait implemented by all AST types, allowing folding over exprs while tracking the context. trait LRExpr { - fn fold_rvalue(self, lr: &mut LR) -> Self; - fn fold_lvalue(self, lr: &mut LR) -> Self; - fn fold_lvalue_mut(self, lr: &mut LR) -> Self; + fn fold_rvalue(&mut self, lr: &mut LR); + fn fold_lvalue(&mut self, lr: &mut LR); + fn fold_lvalue_mut(&mut self, lr: &mut LR); } /// A set of expr rewrites, one for each kind of context where an expr may appear. trait LRRewrites { - fn fold_rvalue(&mut self, e: Expr) -> Expr; - fn fold_lvalue(&mut self, e: Expr) -> Expr; - fn fold_lvalue_mut(&mut self, e: Expr) -> Expr; + fn fold_rvalue(&mut self, e: &mut P); + fn fold_lvalue(&mut self, e: &mut P); + fn fold_lvalue_mut(&mut self, e: &mut P); } @@ -43,20 +42,20 @@ trait LRRewrites { macro_rules! lr_expr_fn { (($slf:ident, $next:ident($T:ty)) => $e:expr) => { #[allow(unused_mut)] - fn fold_rvalue($slf, lr: &mut LR) -> Self { - let mut $next = |x: $T| x.fold_rvalue(lr); + fn fold_rvalue(&mut $slf, lr: &mut LR) { + let mut $next = |x: &mut $T| x.fold_rvalue(lr); $e } #[allow(unused_mut)] - fn fold_lvalue($slf, lr: &mut LR) -> Self { - let mut $next = |x: $T| x.fold_lvalue(lr); + fn fold_lvalue(&mut $slf, lr: &mut LR) { + let mut $next = |x: &mut $T| x.fold_lvalue(lr); $e } #[allow(unused_mut)] - fn fold_lvalue_mut($slf, lr: &mut LR) -> Self { - let mut $next = |x: $T| x.fold_lvalue_mut(lr); + fn fold_lvalue_mut(&mut $slf, lr: &mut LR) { + let mut $next = |x: &mut $T| x.fold_lvalue_mut(lr); $e } }; @@ -64,86 +63,95 @@ macro_rules! lr_expr_fn { impl LRExpr for Vec { lr_expr_fn!((self, next(T)) => { - self.move_map(next) + mut_visit::visit_vec(self, next) }); } impl LRExpr for ThinVec { - lr_expr_fn!((self, next(Vec)) => { - next(self.into()).into() + lr_expr_fn!((self, next(T)) => { + for x in self.iter_mut() { + next(x); + } }); } impl LRExpr for P { lr_expr_fn!((self, next(T)) => { - self.map(next) + next(self); }); } impl LRExpr for Rc { lr_expr_fn!((self, next(T)) => { - Rc::new(next((*self).clone())) + next(Rc::make_mut(self)); }); } impl LRExpr for Spanned { lr_expr_fn!((self, next(T)) => { - Spanned { node: next(self.node), ..self } + next(&mut self.node) }); } impl LRExpr for Option { lr_expr_fn!((self, next(T)) => { - match self { - Some(x) => Some(next(x)), - None => None, - } + mut_visit::visit_opt(self, next) }); } impl LRExpr for (A, B) { - fn fold_rvalue(self, lr: &mut LR) -> Self { - let (a, b) = self; - (a.fold_rvalue(lr), - b.fold_rvalue(lr)) + fn fold_rvalue(&mut self, lr: &mut LR) { + self.0.fold_rvalue(lr); + self.1.fold_rvalue(lr); } - fn fold_lvalue(self, lr: &mut LR) -> Self { - let (a, b) = self; - (a.fold_lvalue(lr), - b.fold_lvalue(lr)) + fn fold_lvalue(&mut self, lr: &mut LR) { + self.0.fold_lvalue(lr); + self.1.fold_lvalue(lr); } - fn fold_lvalue_mut(self, lr: &mut LR) -> Self { - let (a, b) = self; - (a.fold_lvalue_mut(lr), - b.fold_lvalue_mut(lr)) + fn fold_lvalue_mut(&mut self, lr: &mut LR) { + self.0.fold_lvalue_mut(lr); + self.1.fold_lvalue_mut(lr); } } impl LRExpr for (A, B, C) { - fn fold_rvalue(self, lr: &mut LR) -> Self { - let (a, b, c) = self; - (a.fold_rvalue(lr), - b.fold_rvalue(lr), - c.fold_rvalue(lr)) + fn fold_rvalue(&mut self, lr: &mut LR) { + self.0.fold_rvalue(lr); + self.1.fold_rvalue(lr); + self.2.fold_rvalue(lr); } - fn fold_lvalue(self, lr: &mut LR) -> Self { - let (a, b, c) = self; - (a.fold_lvalue(lr), - b.fold_lvalue(lr), - c.fold_lvalue(lr)) + fn fold_lvalue(&mut self, lr: &mut LR) { + self.0.fold_lvalue(lr); + self.1.fold_lvalue(lr); + self.2.fold_lvalue(lr); } - fn fold_lvalue_mut(self, lr: &mut LR) -> Self { - let (a, b, c) = self; - (a.fold_lvalue_mut(lr), - b.fold_lvalue_mut(lr), - c.fold_lvalue_mut(lr)) + fn fold_lvalue_mut(&mut self, lr: &mut LR) { + self.0.fold_lvalue_mut(lr); + self.1.fold_lvalue_mut(lr); + self.2.fold_lvalue_mut(lr); } } +impl LRExpr for P { + fn fold_rvalue(&mut self, lr: &mut LR) { + self.node.fold_rvalue(lr); + lr.fold_rvalue(self) + } + fn fold_lvalue(&mut self, lr: &mut LR) { + self.node.fold_lvalue(lr); + lr.fold_lvalue(self) + } + fn fold_lvalue_mut(&mut self, lr: &mut LR) { + self.node.fold_lvalue_mut(lr); + lr.fold_lvalue_mut(self) + } +} + + include!(concat!(env!("OUT_DIR"), "/lr_expr_gen.inc.rs")); @@ -156,22 +164,23 @@ pub enum Context { } -struct Rewrites, Context) -> P> { +struct Rewrites, Context)> { callback: F, } impl LRRewrites for Rewrites - where F: FnMut(P, Context) -> P { - fn fold_rvalue(&mut self, e: Expr) -> Expr { - (self.callback)(P(e), Context::Rvalue).into_inner() + where F: FnMut(&mut P, Context) +{ + fn fold_rvalue(&mut self, e: &mut P) { + (self.callback)(e, Context::Rvalue) } - fn fold_lvalue(&mut self, e: Expr) -> Expr { - (self.callback)(P(e), Context::Lvalue).into_inner() + fn fold_lvalue(&mut self, e: &mut P) { + (self.callback)(e, Context::Lvalue) } - fn fold_lvalue_mut(&mut self, e: Expr) -> Expr { - (self.callback)(P(e), Context::LvalueMut).into_inner() + fn fold_lvalue_mut(&mut self, e: &mut P) { + (self.callback)(e, Context::LvalueMut) } } @@ -179,8 +188,8 @@ impl LRRewrites for Rewrites /// rvalue, (immutable) lvalue, or mutable lvalue context. /// /// `start` is the context of the outermost expression `e`. -pub fn fold_expr_with_context(e: P, start: Context, callback: F) -> P - where F: FnMut(P, Context) -> P { +pub fn fold_expr_with_context(e: &mut P, start: Context, callback: F) + where F: FnMut(&mut P, Context) { let mut lr = Rewrites { callback: callback }; match start { Context::Rvalue => e.fold_rvalue(&mut lr), @@ -190,7 +199,7 @@ pub fn fold_expr_with_context(e: P, start: Context, callback: F) -> P { callback: F, in_expr: bool, @@ -206,39 +215,39 @@ impl TopExprFolder { } } -impl) -> P> Folder for TopExprFolder { - fn fold_expr(&mut self, e: P) -> P { - let e = self.in_expr(true, |this| e.map(|e| fold::noop_fold_expr(e, this))); +impl)> MutVisitor for TopExprFolder { + fn visit_expr(&mut self, e: &mut P) { + self.in_expr(true, |this| mut_visit::noop_visit_expr(e, this)); if !self.in_expr { - (self.callback)(e) - } else { - e + (self.callback)(e); } } // Clear the `in_expr` flag upon entry to a non-expr node that may contain exprs. - fn fold_ty(&mut self, ty: P) -> P { - self.in_expr(false, |this| fold::noop_fold_ty(ty, this)) + fn visit_ty(&mut self, ty: &mut P) { + self.in_expr(false, |this| mut_visit::noop_visit_ty(ty, this)) } - fn fold_pat(&mut self, p: P) -> P { - self.in_expr(false, |this| fold::noop_fold_pat(p, this)) + fn visit_pat(&mut self, p: &mut P) { + self.in_expr(false, |this| mut_visit::noop_visit_pat(p, this)) } - fn fold_stmt(&mut self, s: Stmt) -> SmallVec<[Stmt; 1]> { - self.in_expr(false, |this| fold::noop_fold_stmt(s, this)) + fn flat_map_stmt(&mut self, s: Stmt) -> SmallVec<[Stmt; 1]> { + self.in_expr(false, |this| mut_visit::noop_flat_map_stmt(s, this)) } } -fn fold_top_exprs(x: T, callback: F) -> ::Result - where T: Fold, F: FnMut(P) -> P { +fn fold_top_exprs(x: &mut T, callback: F) + where T: MutVisit, F: FnMut(&mut P) +{ let mut f = TopExprFolder { callback: callback, in_expr: false }; - x.fold(&mut f) + x.visit(&mut f) } -pub fn fold_exprs_with_context(x: T, mut callback: F) -> ::Result - where T: Fold, F: FnMut(P, Context) -> P { +pub fn fold_exprs_with_context(x: &mut T, mut callback: F) + where T: MutVisit, F: FnMut(&mut P, Context) +{ fold_top_exprs(x, |e| { fold_expr_with_context(e, Context::Rvalue, |e, ctx| callback(e, ctx)) }) diff --git a/c2rust-refactor/src/ast_manip/mod.rs b/c2rust-refactor/src/ast_manip/mod.rs index ee258ca64..6b40a7868 100644 --- a/c2rust-refactor/src/ast_manip/mod.rs +++ b/c2rust-refactor/src/ast_manip/mod.rs @@ -6,7 +6,6 @@ mod ast_deref; mod ast_equiv; mod ast_names; mod fold; -mod fold_node; mod get_node_id; mod get_span; mod list_node_ids; @@ -19,8 +18,7 @@ mod visit_node; pub use self::ast_deref::AstDeref; pub use self::ast_equiv::AstEquiv; pub use self::ast_names::AstName; -pub use self::fold::Fold; -pub use self::fold_node::{FoldNode, fold_nodes}; +pub use self::fold::{FlatMapNodes, MutVisit, MutVisitNodes}; pub use self::get_node_id::{GetNodeId, MaybeGetNodeId}; pub use self::get_span::GetSpan; pub use self::list_node_ids::ListNodeIds; diff --git a/c2rust-refactor/src/ast_manip/number_nodes.rs b/c2rust-refactor/src/ast_manip/number_nodes.rs index ad62ff226..41a037736 100644 --- a/c2rust-refactor/src/ast_manip/number_nodes.rs +++ b/c2rust-refactor/src/ast_manip/number_nodes.rs @@ -1,8 +1,8 @@ use std::cell::Cell; use syntax::ast::{NodeId, Mac, DUMMY_NODE_ID}; -use syntax::fold::{self, Folder}; +use syntax::mut_visit::{self, MutVisitor}; -use crate::ast_manip::Fold; +use crate::ast_manip::MutVisit; pub struct NodeIdCounter(Cell); @@ -24,37 +24,39 @@ struct NumberNodes<'a> { counter: &'a NodeIdCounter, } -impl<'a> Folder for NumberNodes<'a> { - fn new_id(&mut self, _i: NodeId) -> NodeId { - self.counter.next() +impl<'a> MutVisitor for NumberNodes<'a> { + fn visit_id(&mut self, i: &mut NodeId) { + *i = self.counter.next() } - fn fold_mac(&mut self, mac: Mac) -> Mac { - fold::noop_fold_mac(mac, self) + fn visit_mac(&mut self, mac: &mut Mac) { + mut_visit::noop_visit_mac(mac, self) } } /// Assign new `NodeId`s to all nodes in `x`. -pub fn number_nodes(x: T) -> ::Result { +pub fn number_nodes(x: &mut T) { // 0 is a valid node id. DUMMY_NODE_ID is -1. number_nodes_with(x, &NodeIdCounter::new(0)) } /// Assign new `NodeId`s to all nodes in `x`. -pub fn number_nodes_with(x: T, counter: &NodeIdCounter) -> ::Result { - x.fold(&mut NumberNodes { counter }) +pub fn number_nodes_with(x: &mut T, counter: &NodeIdCounter) { + x.visit(&mut NumberNodes { counter }) } struct ResetNodeIds; -impl Folder for ResetNodeIds { - fn new_id(&mut self, _i: NodeId) -> NodeId { DUMMY_NODE_ID } +impl MutVisitor for ResetNodeIds { + fn visit_id(&mut self, i: &mut NodeId) { + *i = DUMMY_NODE_ID; + } - fn fold_mac(&mut self, mac: Mac) -> Mac { - fold::noop_fold_mac(mac, self) + fn visit_mac(&mut self, mac: &mut Mac) { + mut_visit::noop_visit_mac(mac, self) } } -pub fn reset_node_ids(x: T) -> ::Result { - x.fold(&mut ResetNodeIds) +pub fn reset_node_ids(x: &mut T) { + x.visit(&mut ResetNodeIds) } diff --git a/c2rust-refactor/src/ast_manip/output_exprs.rs b/c2rust-refactor/src/ast_manip/output_exprs.rs index 48d70685a..942c91fce 100644 --- a/c2rust-refactor/src/ast_manip/output_exprs.rs +++ b/c2rust-refactor/src/ast_manip/output_exprs.rs @@ -1,11 +1,10 @@ //! `fold_output_exprs` function, for visiting return-value expressions. use smallvec::SmallVec; use syntax::ast::*; -use syntax::fold::{self, Folder}; +use syntax::mut_visit::{self, MutVisitor, visit_opt, visit_vec}; use syntax::ptr::P; -use syntax::util::move_map::MoveMap; -use crate::ast_manip::Fold; +use crate::ast_manip::MutVisit; use crate::util::Lone; @@ -16,7 +15,7 @@ struct OutputFolder { trailing: bool, } -impl) -> P> OutputFolder { +impl)> OutputFolder { /// Change the value of `self.trailing` for the duration of the callback `g`. fn with_trailing R, R>(&mut self, trailing: bool, g: G) -> R { let old = self.trailing; @@ -27,125 +26,107 @@ impl) -> P> OutputFolder { } } -impl) -> P> Folder for OutputFolder { - fn fold_item(&mut self, i: P) -> SmallVec<[P; 1]> { +impl)> MutVisitor for OutputFolder { + fn flat_map_item(&mut self, i: P) -> SmallVec<[P; 1]> { // The expr within the fn is always trailing match i.node { ItemKind::Fn(..) => - self.with_trailing(true, |f| fold::noop_fold_item(i, f)), - _ => fold::noop_fold_item(i, self), + self.with_trailing(true, |f| mut_visit::noop_flat_map_item(i, f)), + _ => mut_visit::noop_flat_map_item(i, self), } } - fn fold_impl_item(&mut self, i: ImplItem) -> SmallVec<[ImplItem; 1]> { + fn flat_map_impl_item(&mut self, i: ImplItem) -> SmallVec<[ImplItem; 1]> { match i.node { ImplItemKind::Method(..) => - self.with_trailing(true, |f| fold::noop_fold_impl_item(i, f)), - _ => fold::noop_fold_impl_item(i, self), + self.with_trailing(true, |f| mut_visit::noop_flat_map_impl_item(i, f)), + _ => mut_visit::noop_flat_map_impl_item(i, self), } } - fn fold_trait_item(&mut self, i: TraitItem) -> SmallVec<[TraitItem; 1]> { + fn flat_map_trait_item(&mut self, i: TraitItem) -> SmallVec<[TraitItem; 1]> { match i.node { TraitItemKind::Method(..) => - self.with_trailing(true, |f| fold::noop_fold_trait_item(i, f)), - _ => fold::noop_fold_trait_item(i, self), + self.with_trailing(true, |f| mut_visit::noop_flat_map_trait_item(i, f)), + _ => mut_visit::noop_flat_map_trait_item(i, self), } } - fn fold_block(&mut self, b: P) -> P { + fn visit_block(&mut self, b: &mut P) { if b.stmts.len() == 0 { - return b; + return; } - b.map(|b| { - let last = b.stmts.len() - 1; - let new_stmts = b.stmts.into_iter().enumerate().map(|(i, s)| { - if i != last { - self.with_trailing(false, |f| f.fold_stmt(s)).lone() - } else { - // Last stmt is trailing if the block is trailing - self.fold_stmt(s).lone() - } - }).collect(); - - Block { - stmts: new_stmts, - .. b + let last = b.stmts.len() - 1; + for (i, s) in b.stmts.iter_mut().enumerate() { + if i != last { + *s = self.with_trailing(false, |f| f.flat_map_stmt(s.clone())).lone(); + } else { + // Last stmt is trailing if the block is trailing + *s = self.flat_map_stmt(s.clone()).lone(); } - }) + } } - fn fold_stmt(&mut self, s: Stmt) -> SmallVec<[Stmt; 1]> { + fn flat_map_stmt(&mut self, s: Stmt) -> SmallVec<[Stmt; 1]> { match s.node { - StmtKind::Expr(..) => fold::noop_fold_stmt(s, self), - _ => self.with_trailing(false, |f| fold::noop_fold_stmt(s, f)), + StmtKind::Expr(..) => mut_visit::noop_flat_map_stmt(s, self), + _ => self.with_trailing(false, |f| mut_visit::noop_flat_map_stmt(s, f)), } } - fn fold_expr(&mut self, e: P) -> P { - e.map(|e| { - let node = match e.node { - ExprKind::If(cond, then, rest) => ExprKind::If( - self.with_trailing(false, |f| f.fold_expr(cond)), - self.fold_block(then), - rest.map(|e| self.fold_expr(e)), - ), - - ExprKind::IfLet(pats, cond, then, rest) => ExprKind::IfLet( - pats.move_map(|p| self.fold_pat(p)), - self.with_trailing(false, |f| f.fold_expr(cond)), - self.fold_block(then), - rest.map(|e| self.fold_expr(e)), - ), - - // TODO: Handle `loop` + `break`-with-expr. If the `loop` is a trailing - // expression, then a `break` targeting its label should be treated as a return - // expression. - //ExprKind::Loop(body) => { TODO }, - - ExprKind::Match(target, arms) => ExprKind::Match( - self.with_trailing(false, |f| f.fold_expr(target)), - arms.move_map(|arm| self.fold_arm(arm)), - ), - - ExprKind::Block(b, lbl) => ExprKind::Block(self.fold_block(b), lbl), - - ExprKind::Try(_) => { - // Explicitly unimplemented. Depending on whether `try` winds up - // auto-wrapping its "return" value in `Ok`, we may need to treat the trailing - // expr of a `catch` specially. - panic!("output_exprs: ExprKind::Try is not supported") - }, - - ExprKind::Ret(None) => ExprKind::Ret(None), - ExprKind::Ret(Some(ret)) => { - let ret = self.with_trailing(false, |f| f.fold_expr(ret)); - let ret = (self.callback)(ret); - ExprKind::Ret(Some(ret)) - }, - - //ExprKind::Break(Some(label), Some(expr)) => { TODO }, - - // Not sure what to do with ExprKind::Try. It can return (on error), but doesn't - // have an actual output expression. - - node => { - let e = Expr { node: node, .. e }; - let e = self.with_trailing(false, |f| fold::noop_fold_expr(e, f)); - if self.trailing { - return (self.callback)(P(e)).into_inner(); - } else { - return e - } - }, - }; - - Expr { - node: node, - .. e + fn visit_expr(&mut self, e: &mut P) { + match &mut e.node { + ExprKind::If(cond, then, rest) => { + self.with_trailing(false, |f| f.visit_expr(cond)); + self.visit_block(then); + visit_opt(rest, |rest| self.visit_expr(rest)); + } + + ExprKind::IfLet(pats, cond, then, rest) => { + visit_vec(pats, |pat| self.visit_pat(pat)); + self.with_trailing(false, |f| f.visit_expr(cond)); + self.visit_block(then); + visit_opt(rest, |rest| self.visit_expr(rest)); + } + + // TODO: Handle `loop` + `break`-with-expr. If the `loop` is a trailing + // expression, then a `break` targeting its label should be treated as a return + // expression. + //ExprKind::Loop(body) => { TODO }, + + ExprKind::Match(target, arms) => { + self.with_trailing(false, |f| f.visit_expr(target)); + visit_vec(arms, |arm| self.visit_arm(arm)); + } + + ExprKind::Block(b, _lbl) => { + self.visit_block(b); + } + + ExprKind::Try(_) => { + // Explicitly unimplemented. Depending on whether `try` winds up + // auto-wrapping its "return" value in `Ok`, we may need to treat the trailing + // expr of a `catch` specially. + panic!("output_exprs: ExprKind::Try is not supported") + } + + ExprKind::Ret(ret) => { + visit_opt(ret, |ret| self.with_trailing(false, |f| f.visit_expr(ret))); } - }) + + //ExprKind::Break(Some(label), Some(expr)) => { TODO }, + + // Not sure what to do with ExprKind::Try. It can return (on error), but doesn't + // have an actual output expression. + + _ => { + self.with_trailing(false, |f| mut_visit::noop_visit_expr(e, f)); + if self.trailing { + (self.callback)(e); + } + }, + }; } } @@ -155,12 +136,12 @@ impl) -> P> Folder for OutputFolder { /// For the trailing expression of a block, only the leaf expressions will be visited - for /// example, in `fn f() { if c { x } else { y } }`, only `x` and `y` will be visited, not `{ x }`, /// `{ y }`, or the `if`. -pub fn fold_output_exprs(target: T, trailing: bool, callback: F) -> ::Result - where T: Fold, - F: FnMut(P) -> P { +pub fn fold_output_exprs(target: &mut T, trailing: bool, callback: F) + where T: MutVisit, + F: FnMut(&mut P) { let mut f = OutputFolder { callback: callback, trailing: trailing, }; - target.fold(&mut f) + target.visit(&mut f) } diff --git a/c2rust-refactor/src/ast_manip/remove_paren.rs b/c2rust-refactor/src/ast_manip/remove_paren.rs index 609b2b85e..b3b0a0a01 100644 --- a/c2rust-refactor/src/ast_manip/remove_paren.rs +++ b/c2rust-refactor/src/ast_manip/remove_paren.rs @@ -1,9 +1,9 @@ //! `remove_paren` function, for removing unnecessary `ExprKind::Paren` nodes. use syntax::ast::*; -use syntax::fold::{self, Folder}; +use syntax::mut_visit::{self, MutVisitor}; use syntax::ptr::P; -use crate::ast_manip::Fold; +use crate::ast_manip::MutVisit; /// AST fold for deleting `ExprKind::Paren` nodes. These are used only for pretty-printing, but @@ -11,35 +11,27 @@ use crate::ast_manip::Fold; /// which parses back as "Mul(Paren(Add(x, y)), z)"). struct RemoveParen; -impl Folder for RemoveParen { - fn fold_expr(&mut self, e: P) -> P { - let mut e = e; - while let ExprKind::Paren(_) = e.node { - match e.into_inner().node { - ExprKind::Paren(inner) => { e = inner; }, - _ => unreachable!(), - } +impl MutVisitor for RemoveParen { + fn visit_expr(&mut self, e: &mut P) { + if let ExprKind::Paren(ref inner) = e.node { + *e = inner.clone(); } - e.map(|e| fold::noop_fold_expr(e, self)) + mut_visit::noop_visit_expr(e, self); } - fn fold_ty(&mut self, t: P) -> P { - let mut t = t; - while let TyKind::Paren(_) = t.node { - match t.into_inner().node { - TyKind::Paren(inner) => { t = inner; }, - _ => unreachable!(), - } + fn visit_ty(&mut self, t: &mut P) { + if let TyKind::Paren(ref inner) = t.node { + *t = inner.clone(); } - fold::noop_fold_ty(t, self) + mut_visit::noop_visit_ty(t, self) } // Need a no-op implementation to avoid "fold_mac disabled by default" error. - fn fold_mac(&mut self, mac: Mac) -> Mac { - fold::noop_fold_mac(mac, self) + fn visit_mac(&mut self, mac: &mut Mac) { + mut_visit::noop_visit_mac(mac, self) } } -pub fn remove_paren(x: T) -> ::Result { - x.fold(&mut RemoveParen) +pub fn remove_paren(x: &mut T) { + x.visit(&mut RemoveParen) } diff --git a/c2rust-refactor/src/ast_manip/seq_edit.rs b/c2rust-refactor/src/ast_manip/seq_edit.rs index 1c616c658..54c72d234 100644 --- a/c2rust-refactor/src/ast_manip/seq_edit.rs +++ b/c2rust-refactor/src/ast_manip/seq_edit.rs @@ -1,9 +1,10 @@ //! Functions for rewriting sequences of stmts or items, using `Cursor`. +use std::mem; use syntax::ast::{Block, Stmt, Item, Mod}; -use syntax::fold::{self, Folder}; +use syntax::mut_visit::{self, MutVisitor}; use syntax::ptr::P; -use crate::ast_manip::Fold; +use crate::ast_manip::MutVisit; use crate::util::cursor::Cursor; @@ -11,23 +12,21 @@ struct BlockFolder)> { f: F, } -impl)> Folder for BlockFolder { - fn fold_block(&mut self, b: P) -> P { - let b = b.map(|mut b| { - let mut stmt_cursor = Cursor::from_vec(b.stmts); - (self.f)(&mut stmt_cursor); - b.stmts = stmt_cursor.into_vec(); - b - }); - fold::noop_fold_block(b, self) +impl)> MutVisitor for BlockFolder { + fn visit_block(&mut self, b: &mut P) { + let stmts = mem::replace(&mut b.stmts, vec![]); + let mut stmt_cursor = Cursor::from_vec(stmts); + (self.f)(&mut stmt_cursor); + b.stmts = stmt_cursor.into_vec(); + mut_visit::noop_visit_block(b, self) } } /// Rewrite every block by manipulating a `Cursor` for the `Stmt`s inside. -pub fn fold_blocks(target: T, callback: F) -> ::Result - where T: Fold, +pub fn fold_blocks(target: &mut T, callback: F) + where T: MutVisit, F: FnMut(&mut Cursor) { - target.fold(&mut BlockFolder { f: callback }) + target.visit(&mut BlockFolder { f: callback }) } @@ -35,18 +34,19 @@ struct ModuleFolder>)> { f: F, } -impl>)> Folder for ModuleFolder { - fn fold_mod(&mut self, mut m: Mod) -> Mod { - let mut curs = Cursor::from_vec(m.items); +impl>)> MutVisitor for ModuleFolder { + fn visit_mod(&mut self, mut m: &mut Mod) { + let items = mem::replace(&mut m.items, vec![]); + let mut curs = Cursor::from_vec(items); (self.f)(&mut curs); m.items = curs.into_vec(); - fold::noop_fold_mod(m, self) + mut_visit::noop_visit_mod(m, self) } } /// Rewrite every module by manipulating a `Cursor` for the `Item`s inside. -pub fn fold_modules(target: T, callback: F) -> ::Result - where T: Fold, +pub fn fold_modules(target: &mut T, callback: F) + where T: MutVisit, F: FnMut(&mut Cursor>) { - target.fold(&mut ModuleFolder { f: callback }) + target.visit(&mut ModuleFolder { f: callback }) } diff --git a/c2rust-refactor/src/ast_manip/util.rs b/c2rust-refactor/src/ast_manip/util.rs index e0bf4c998..df8d7b54c 100644 --- a/c2rust-refactor/src/ast_manip/util.rs +++ b/c2rust-refactor/src/ast_manip/util.rs @@ -5,7 +5,7 @@ use syntax::ast::*; use syntax::ptr::P; use syntax::source_map::{SourceMap, Span, DUMMY_SP}; use syntax::symbol::{Symbol, keywords}; -use syntax::tokenstream::{TokenStream, ThinTokenStream}; +use syntax::tokenstream::{TokenStream}; use super::AstEquiv; @@ -79,7 +79,7 @@ impl PatternSymbol for Ty { impl PatternSymbol for Mac { fn pattern_symbol(&self) -> Option { - if self.node.tts != ThinTokenStream::from(TokenStream::empty()) { + if self.node.tts != TokenStream::empty() { return None; } self.node.path.pattern_symbol() @@ -203,7 +203,7 @@ pub fn split_uses(item: P) -> SmallVec<[P; 1]> { /// Is a path relative to the current module? pub fn is_relative_path(path: &Path) -> bool { !path.segments.is_empty() - && (path.segments[0].ident.name == keywords::SelfValue.name() + && (path.segments[0].ident.name == keywords::SelfLower.name() || path.segments[0].ident.name == keywords::Super.name()) } @@ -229,7 +229,7 @@ pub fn namespace(def: &Def) -> Option { | SelfTy(..) | ToolMod => Some(Namespace::TypeNS), - Fn(..) | Const(..) | Static(..) | StructCtor(..) | VariantCtor(..) | SelfCtor(..) + Fn(..) | Const(..) | Static(..) | SelfCtor(..) | Method(..) | AssociatedConst(..) | Local(..) | Upvar(..) | Label(..) => { Some(Namespace::ValueNS) } diff --git a/c2rust-refactor/src/collapse/cfg_attr.rs b/c2rust-refactor/src/collapse/cfg_attr.rs index a4efd9f93..a0a5d1d30 100644 --- a/c2rust-refactor/src/collapse/cfg_attr.rs +++ b/c2rust-refactor/src/collapse/cfg_attr.rs @@ -1,11 +1,13 @@ use std::collections::HashMap; use syntax::ast::*; use syntax::attr::HasAttrs; -use syntax::fold::{self, Folder}; +use syntax::mut_visit::{self, MutVisitor}; use syntax::ptr::P; use syntax::visit::{self, Visitor}; -use crate::ast_manip::{Fold, GetNodeId, Visit}; +use smallvec::SmallVec; + +use crate::ast_manip::{MutVisit, GetNodeId, Visit}; struct CollectCfgAttrs { @@ -65,61 +67,65 @@ struct RestoreCfgAttrs { } impl RestoreCfgAttrs { - fn restore(&mut self, x: T) -> T { + fn restore(&mut self, x: &mut T) { if let Some(cfg_attrs) = self.node_attrs.get(&x.get_node_id()) { info!("RESTORE ATTRS {:?} onto {:?}", cfg_attrs.iter().map(|a| ::syntax::print::pprust::attr_to_string(a)) .collect::>(), x.attrs().iter().map(|a| ::syntax::print::pprust::attr_to_string(a)) .collect::>()); - let x2 = - x.map_attrs(|mut attrs| { + x.visit_attrs(|attrs| { // Drop attrs that were produced by evaluation of one of the `#[cfg_attr]`s. attrs.retain(|a| !cfg_attrs.iter().any(|ca| ca.span.contains(a.span))); // Now put the #[cfg_attr]s themselves back in. attrs.extend(cfg_attrs.iter().cloned()); - attrs }); info!(" attrs changed to {:?}", - x2.attrs().iter().map(|a| ::syntax::print::pprust::attr_to_string(a)) + x.attrs().iter().map(|a| ::syntax::print::pprust::attr_to_string(a)) .collect::>()); - x2 - } else { - x } } } -macro_rules! restore_cfg_attrs { - ($slf:ident, $thing:ident; - $($fold_thing:ident($Thing:ty), $noop_fold_thing:expr;)*) => { - impl Folder for RestoreCfgAttrs { - $( - fn $fold_thing(&mut $slf, $thing: $Thing) -> <$Thing as Fold>::Result { - let $thing = $slf.restore($thing); - $noop_fold_thing - } - )* +impl MutVisitor for RestoreCfgAttrs { + fn flat_map_item(&mut self, mut i: P) -> SmallVec<[P; 1]> { + self.restore(&mut i); + mut_visit::noop_flat_map_item(i, self) + } - fn fold_mac(&mut self, mac: Mac) -> Mac { - fold::noop_fold_mac(mac, self) - } - } - }; -} + fn flat_map_impl_item(&mut self, mut i: ImplItem) -> SmallVec<[ImplItem; 1]> { + self.restore(&mut i); + mut_visit::noop_flat_map_impl_item(i, self) + } -restore_cfg_attrs! { - self, x; - fold_item(P), fold::noop_fold_item(x, self); - fold_impl_item(ImplItem), fold::noop_fold_impl_item(x, self); - fold_trait_item(TraitItem), fold::noop_fold_trait_item(x, self); - fold_foreign_item(ForeignItem), fold::noop_fold_foreign_item(x, self); - fold_stmt(Stmt), fold::noop_fold_stmt(x, self); - fold_expr(P), x.map(|x| fold::noop_fold_expr(x, self)); - // TODO: extend this list with the remaining node types + fn flat_map_trait_item(&mut self, mut i: TraitItem) -> SmallVec<[TraitItem; 1]> { + self.restore(&mut i); + mut_visit::noop_flat_map_trait_item(i, self) + } + + fn flat_map_foreign_item(&mut self, mut i: ForeignItem) -> SmallVec<[ForeignItem; 1]> { + self.restore(&mut i); + mut_visit::noop_flat_map_foreign_item(i, self) + } + + fn flat_map_stmt(&mut self, mut s: Stmt) -> SmallVec<[Stmt; 1]> { + self.restore(&mut s); + mut_visit::noop_flat_map_stmt(s, self) + } + + fn visit_expr(&mut self, e: &mut P) { + self.restore(e); + mut_visit::noop_visit_expr(e, self) + } + + fn visit_mac(&mut self, mac: &mut Mac) { + mut_visit::noop_visit_mac(mac, self) + } + + // TODO: extend this impl with the remaining node types } -pub fn restore_cfg_attrs(krate: Crate, node_attrs: HashMap>) -> Crate { +pub fn restore_cfg_attrs(krate: &mut Crate, node_attrs: HashMap>) { let mut f = RestoreCfgAttrs { node_attrs }; - krate.fold(&mut f) + krate.visit(&mut f) } diff --git a/c2rust-refactor/src/collapse/deleted.rs b/c2rust-refactor/src/collapse/deleted.rs index c482fd55c..35d030271 100644 --- a/c2rust-refactor/src/collapse/deleted.rs +++ b/c2rust-refactor/src/collapse/deleted.rs @@ -5,14 +5,13 @@ use std::collections::{HashMap, HashSet}; use std::mem; use smallvec::SmallVec; use syntax::ast::*; -use syntax::fold::{self, Folder}; +use syntax::mut_visit::{self, MutVisitor}; use syntax::ptr::P; use syntax::visit::{self, Visitor}; -use crate::ast_manip::{Fold, GetNodeId, ListNodeIds, Visit}; +use crate::ast_manip::{MutVisit, GetNodeId, ListNodeIds, Visit}; use crate::ast_manip::number_nodes::{number_nodes_with, NodeIdCounter}; use crate::node_map::NodeMap; -use crate::util::Lone; use super::mac_table::{MacTable, MacNodeRef, AsMacNodeRef}; @@ -144,15 +143,14 @@ pub fn collect_deleted_nodes<'ast>(krate: &'ast Crate, struct RestoreDeletedNodes<'a, 'ast> { node_map: &'a mut NodeMap, - counter: &'a mut NodeIdCounter, + counter: &'a NodeIdCounter, /// Map of deleted nodes inside each parent, keyed on collapsed parent `NodeId`. deleted: HashMap>> } impl<'a, 'ast> RestoreDeletedNodes<'a, 'ast> { fn restore_seq(&mut self, parent: NodeId, nodes: &mut Vec) - where T: GetNodeId + ListNodeIds + Fold + AsMacNodeRef, - ::Result: Lone { + where T: GetNodeId + ListNodeIds + MutVisit + AsMacNodeRef { let deleted = match_or!([self.deleted.get(&parent)] Some(x) => x; return); // Set of nodes that are currently present in the parent. We use this to find the last @@ -174,12 +172,12 @@ impl<'a, 'ast> RestoreDeletedNodes<'a, 'ast> { let result = Vec::with_capacity(nodes.len() + deleted.len()); let old_nodes = mem::replace(nodes, result); - let counter = &mut *self.counter; + let counter = self.counter; let node_map = &mut *self.node_map; let mut push_ins_after = |nodes: &mut Vec<_>, id| { for dn in ins_after.remove(&id).into_iter().flat_map(|x| x) { - let n = T::clone_from_mac_node_ref(dn.node); - let n = number_nodes_with(n, counter).lone(); + let mut n = T::clone_from_mac_node_ref(dn.node); + number_nodes_with(&mut n, counter); for (id, &origin) in n.list_node_ids().into_iter().zip(&dn.saved_origins) { node_map.restore_origin(id, origin); @@ -199,13 +197,13 @@ impl<'a, 'ast> RestoreDeletedNodes<'a, 'ast> { } } -impl<'a, 'ast> Folder for RestoreDeletedNodes<'a, 'ast> { - fn fold_crate(&mut self, mut x: Crate) -> Crate { +impl<'a, 'ast> MutVisitor for RestoreDeletedNodes<'a, 'ast> { + fn visit_crate(&mut self, x: &mut Crate) { self.restore_seq(CRATE_NODE_ID, &mut x.module.items); - fold::noop_fold_crate(x, self) + mut_visit::noop_visit_crate(x, self) } - fn fold_item(&mut self, x: P) -> SmallVec<[P; 1]> { + fn flat_map_item(&mut self, x: P) -> SmallVec<[P; 1]> { let x = x.map(|mut x| { match x.node { ItemKind::Mod(ref mut m) => @@ -220,11 +218,11 @@ impl<'a, 'ast> Folder for RestoreDeletedNodes<'a, 'ast> { } x }); - fold::noop_fold_item(x, self) + mut_visit::noop_flat_map_item(x, self) } - fn fold_mac(&mut self, mac: Mac) -> Mac { - fold::noop_fold_mac(mac, self) + fn visit_mac(&mut self, mac: &mut Mac) { + mut_visit::noop_visit_mac(mac, self) } } @@ -244,14 +242,14 @@ fn index_deleted_nodes<'ast>(vec: Vec>) /// /// The `DeletedNode`s in `deleted` should use expanded IDs, as returned from /// `collect_deleted_nodes`. -pub fn restore_deleted_nodes(krate: Crate, +pub fn restore_deleted_nodes(krate: &mut Crate, node_map: &mut NodeMap, - counter: &mut NodeIdCounter, - deleted: Vec) -> Crate { + counter: &NodeIdCounter, + deleted: Vec) { // Transfer `deleted` to `collapsed` IDs, which is what `krate` is currently using. let deleted = transfer_deleted_nodes(node_map, deleted); let deleted = index_deleted_nodes(deleted); let mut f = RestoreDeletedNodes { node_map, counter, deleted }; - krate.fold(&mut f) + krate.visit(&mut f) } diff --git a/c2rust-refactor/src/collapse/mac_table.rs b/c2rust-refactor/src/collapse/mac_table.rs index 28537ca05..59a83c61b 100644 --- a/c2rust-refactor/src/collapse/mac_table.rs +++ b/c2rust-refactor/src/collapse/mac_table.rs @@ -4,7 +4,7 @@ use syntax::ThinVec; use syntax::ast::*; use syntax::source_map::{Span, SyntaxContext}; use syntax::parse::token::{Token, DelimToken, Nonterminal}; -use syntax::tokenstream::{TokenTree, Delimited, DelimSpan, TokenStream, ThinTokenStream}; +use syntax::tokenstream::{TokenTree, DelimSpan, TokenStream}; use rustc_target::spec::abi::Abi; use std::rc::Rc; diff --git a/c2rust-refactor/src/collapse/macros.rs b/c2rust-refactor/src/collapse/macros.rs index 0ac6c7c61..2aed83a6a 100644 --- a/c2rust-refactor/src/collapse/macros.rs +++ b/c2rust-refactor/src/collapse/macros.rs @@ -6,13 +6,14 @@ //! added in different places: macro invocations in `CollapseMacros`, macro arguments in //! `token_rewrite_map`, and nodes outside of macros in `ReplaceTokens`. use std::collections::{HashMap, HashSet, BTreeMap}; +use rustc_data_structures::sync::Lrc; use syntax::ast::*; use syntax::attr; use syntax::source_map::{Span, BytePos}; -use syntax::fold::{self, Folder}; +use syntax::mut_visit::{self, MutVisitor}; use syntax::ptr::P; use syntax::parse::token::{Token, Nonterminal}; -use syntax::tokenstream::{self, TokenStream, ThinTokenStream, TokenTree, Delimited}; +use syntax::tokenstream::{self, TokenStream, TokenTree}; use smallvec::SmallVec; use c2rust_ast_builder::mk; @@ -20,8 +21,7 @@ use super::mac_table::{MacTable, InvocId, InvocKind}; use super::nt_match::{self, NtMatch}; use super::root_callsite_span; -use crate::ast_manip::{Fold, ListNodeIds}; -use crate::ast_manip::AstEquiv; +use crate::ast_manip::{AstEquiv, ListNodeIds, MutVisit}; @@ -57,8 +57,10 @@ impl<'a> CollapseMacros<'a> { for (span, nt) in nt_match::match_nonterminals(old, new) { trace!(" got {} at {:?}", ::syntax::print::pprust::token_to_string( - &Token::interpolated(nt.clone())), - span); + &Token::Interpolated(Lrc::new(nt.clone())) + ), + span, + ); self.token_rewrites.push(RewriteItem { invoc_id, span, nt }); } } @@ -68,8 +70,8 @@ impl<'a> CollapseMacros<'a> { } } -impl<'a> Folder for CollapseMacros<'a> { - fn fold_expr(&mut self, e: P) -> P { +impl<'a> MutVisitor for CollapseMacros<'a> { + fn visit_expr(&mut self, e: &mut P) { if let Some(info) = self.mac_table.get(e.id) { if let InvocKind::Mac(mac) = info.invoc { let old = info.expanded.as_expr() @@ -78,15 +80,15 @@ impl<'a> Folder for CollapseMacros<'a> { let new_e = mk().id(e.id).span(root_callsite_span(e.span)).mac_expr(mac); trace!("collapse: {:?} -> {:?}", e, new_e); self.record_matched_ids(e.id, new_e.id); - return new_e; + *e = new_e; } else { warn!("bad macro kind for expr: {:?}", info.invoc); } } - e.map(|e| fold::noop_fold_expr(e, self)) + mut_visit::noop_visit_expr(e, self) } - fn fold_pat(&mut self, p: P) -> P { + fn visit_pat(&mut self, p: &mut P) { if let Some(info) = self.mac_table.get(p.id) { if let InvocKind::Mac(mac) = info.invoc { let old = info.expanded.as_pat() @@ -95,15 +97,15 @@ impl<'a> Folder for CollapseMacros<'a> { let new_p = mk().id(p.id).span(root_callsite_span(p.span)).mac_pat(mac); trace!("collapse: {:?} -> {:?}", p, new_p); self.record_matched_ids(p.id, new_p.id); - return new_p; + *p = new_p; } else { warn!("bad macro kind for pat: {:?}", info.invoc); } } - fold::noop_fold_pat(p, self) + mut_visit::noop_visit_pat(p, self) } - fn fold_ty(&mut self, t: P) -> P { + fn visit_ty(&mut self, t: &mut P) { if let Some(info) = self.mac_table.get(t.id) { if let InvocKind::Mac(mac) = info.invoc { let old = info.expanded.as_ty() @@ -112,15 +114,15 @@ impl<'a> Folder for CollapseMacros<'a> { let new_t = mk().id(t.id).span(root_callsite_span(t.span)).mac_ty(mac); trace!("collapse: {:?} -> {:?}", t, new_t); self.record_matched_ids(t.id, new_t.id); - return new_t; + *t = new_t; } else { warn!("bad macro kind for ty: {:?}", info.invoc); } } - fold::noop_fold_ty(t, self) + mut_visit::noop_visit_ty(t, self) } - fn fold_stmt(&mut self, s: Stmt) -> SmallVec<[Stmt; 1]> { + fn flat_map_stmt(&mut self, s: Stmt) -> SmallVec<[Stmt; 1]> { if let Some(info) = self.mac_table.get(s.id) { if let InvocKind::Mac(mac) = info.invoc { let old = info.expanded.as_stmt() @@ -141,10 +143,10 @@ impl<'a> Folder for CollapseMacros<'a> { warn!("bad macro kind for stmt: {:?}", info.invoc); } } - fold::noop_fold_stmt(s, self) + mut_visit::noop_flat_map_stmt(s, self) } - fn fold_item(&mut self, i: P) -> SmallVec<[P; 1]> { + fn flat_map_item(&mut self, i: P) -> SmallVec<[P; 1]> { if let Some(info) = self.mac_table.get(i.id) { match info.invoc { InvocKind::Mac(mac) => { @@ -175,10 +177,10 @@ impl<'a> Folder for CollapseMacros<'a> { }, } } - fold::noop_fold_item(i, self) + mut_visit::noop_flat_map_item(i, self) } - fn fold_impl_item(&mut self, ii: ImplItem) -> SmallVec<[ImplItem; 1]> { + fn flat_map_impl_item(&mut self, ii: ImplItem) -> SmallVec<[ImplItem; 1]> { if let Some(info) = self.mac_table.get(ii.id) { if let InvocKind::Mac(mac) = info.invoc { let old = info.expanded.as_impl_item() @@ -200,10 +202,10 @@ impl<'a> Folder for CollapseMacros<'a> { warn!("bad macro kind for impl item: {:?}", info.invoc); } } - fold::noop_fold_impl_item(ii, self) + mut_visit::noop_flat_map_impl_item(ii, self) } - fn fold_trait_item(&mut self, ti: TraitItem) -> SmallVec<[TraitItem; 1]> { + fn flat_map_trait_item(&mut self, ti: TraitItem) -> SmallVec<[TraitItem; 1]> { if let Some(info) = self.mac_table.get(ti.id) { if let InvocKind::Mac(mac) = info.invoc { let old = info.expanded.as_trait_item() @@ -225,10 +227,10 @@ impl<'a> Folder for CollapseMacros<'a> { warn!("bad macro kind for trait item: {:?}", info.invoc); } } - fold::noop_fold_trait_item(ti, self) + mut_visit::noop_flat_map_trait_item(ti, self) } - fn fold_foreign_item(&mut self, fi: ForeignItem) -> SmallVec<[ForeignItem; 1]> { + fn flat_map_foreign_item(&mut self, fi: ForeignItem) -> SmallVec<[ForeignItem; 1]> { if let Some(info) = self.mac_table.get(fi.id) { if let InvocKind::Mac(mac) = info.invoc { let old = info.expanded.as_foreign_item() @@ -250,11 +252,11 @@ impl<'a> Folder for CollapseMacros<'a> { warn!("bad macro kind for trait item: {:?}", info.invoc); } } - fold::noop_fold_foreign_item(fi, self) + mut_visit::noop_flat_map_foreign_item(fi, self) } - fn fold_mac(&mut self, mac: Mac) -> Mac { - fold::noop_fold_mac(mac, self) + fn visit_mac(&mut self, mac: &mut Mac) { + mut_visit::noop_visit_mac(mac, self) } } @@ -380,7 +382,7 @@ fn rewrite_tokens(invoc_id: InvocId, if let Some(item) = rewrites.remove(&tt.span().lo()) { assert!(item.invoc_id == invoc_id); - new_tts.push(TokenTree::Token(item.span, Token::interpolated(item.nt))); + new_tts.push(TokenTree::Token(item.span, Token::Interpolated(Lrc::new(item.nt)))); ignore_until = Some(item.span.hi()); continue; } @@ -389,12 +391,12 @@ fn rewrite_tokens(invoc_id: InvocId, TokenTree::Token(sp, t) => { new_tts.push(TokenTree::Token(sp, t)); }, - TokenTree::Delimited(sp, d) => { - let d_tts: TokenStream = d.tts.into(); - new_tts.push(TokenTree::Delimited(sp, Delimited { - tts: rewrite_tokens(invoc_id, d_tts.into_trees(), rewrites).into(), - ..d - })); + TokenTree::Delimited(sp, delim, tts) => { + new_tts.push(TokenTree::Delimited( + sp, + delim, + rewrite_tokens(invoc_id, tts.into_trees(), rewrites).into(), + )); }, } } @@ -405,7 +407,7 @@ fn rewrite_tokens(invoc_id: InvocId, fn convert_token_rewrites(rewrite_vec: Vec, mac_table: &MacTable, matched_ids: &mut Vec<(NodeId, NodeId)>) - -> HashMap { + -> HashMap { let mut rewrite_map = token_rewrite_map(rewrite_vec, matched_ids); let invoc_ids = rewrite_map.values().map(|r| r.invoc_id).collect::>(); invoc_ids.into_iter().filter_map(|invoc_id| { @@ -422,57 +424,48 @@ fn convert_token_rewrites(rewrite_vec: Vec, } -/// Folder for updating the `TokenStream`s of macro invocations. This is where we actually copy +/// MutVisitor for updating the `TokenStream`s of macro invocations. This is where we actually copy /// the rewritten token streams produced by `convert_token_rewrites` into the AST. /// /// As a side effect, this updates `matched_ids` with identity edges (`x.id -> x.id`) for any /// remaining nodes that were unaffected by the collapsing. struct ReplaceTokens<'a> { mac_table: &'a MacTable<'a>, - new_tokens: HashMap, + new_tokens: HashMap, matched_ids: &'a mut Vec<(NodeId, NodeId)>, } -impl<'a> Folder for ReplaceTokens<'a> { - fn fold_expr(&mut self, e: P) -> P { +impl<'a> MutVisitor for ReplaceTokens<'a> { + fn visit_expr(&mut self, e: &mut P) { if let Some(invoc_id) = self.mac_table.get(e.id).map(|m| m.id) { if let Some(new_tts) = self.new_tokens.get(&invoc_id).cloned() { // NB: Don't walk, so we never run `self.new_id` on `e.id`. matched_ids entries // for macro invocations get handled by the CollapseMacros pass. - return e.map(|mut e| { - expect!([e.node] ExprKind::Mac(ref mut mac) => mac.node.tts = new_tts); - e - }); + expect!([e.node] ExprKind::Mac(ref mut mac) => mac.node.tts = new_tts); } } - e.map(|e| fold::noop_fold_expr(e, self)) + mut_visit::noop_visit_expr(e, self) } - fn fold_pat(&mut self, p: P) -> P { + fn visit_pat(&mut self, p: &mut P) { if let Some(invoc_id) = self.mac_table.get(p.id).map(|m| m.id) { if let Some(new_tts) = self.new_tokens.get(&invoc_id).cloned() { - return p.map(|mut p| { - expect!([p.node] PatKind::Mac(ref mut mac) => mac.node.tts = new_tts); - p - }); + expect!([p.node] PatKind::Mac(ref mut mac) => mac.node.tts = new_tts); } } - fold::noop_fold_pat(p, self) + mut_visit::noop_visit_pat(p, self) } - fn fold_ty(&mut self, t: P) -> P { + fn visit_ty(&mut self, t: &mut P) { if let Some(invoc_id) = self.mac_table.get(t.id).map(|m| m.id) { if let Some(new_tts) = self.new_tokens.get(&invoc_id).cloned() { - return t.map(|mut t| { - expect!([t.node] TyKind::Mac(ref mut mac) => mac.node.tts = new_tts); - t - }); + expect!([t.node] TyKind::Mac(ref mut mac) => mac.node.tts = new_tts); } } - fold::noop_fold_ty(t, self) + mut_visit::noop_visit_ty(t, self) } - fn fold_stmt(&mut self, s: Stmt) -> SmallVec<[Stmt; 1]> { + fn flat_map_stmt(&mut self, s: Stmt) -> SmallVec<[Stmt; 1]> { if let Some(invoc_id) = self.mac_table.get(s.id).map(|m| m.id) { if let Some(new_tts) = self.new_tokens.get(&invoc_id).cloned() { unpack!([s.node] StmtKind::Mac(mac)); @@ -483,10 +476,10 @@ impl<'a> Folder for ReplaceTokens<'a> { return smallvec![Stmt { node: StmtKind::Mac(mac), ..s }]; } } - fold::noop_fold_stmt(s, self) + mut_visit::noop_flat_map_stmt(s, self) } - fn fold_item(&mut self, i: P) -> SmallVec<[P; 1]> { + fn flat_map_item(&mut self, i: P) -> SmallVec<[P; 1]> { if let Some(invoc_id) = self.mac_table.get(i.id).map(|m| m.id) { if let Some(new_tts) = self.new_tokens.get(&invoc_id).cloned() { return smallvec![i.map(|mut i| { @@ -495,10 +488,10 @@ impl<'a> Folder for ReplaceTokens<'a> { })]; } } - fold::noop_fold_item(i, self) + mut_visit::noop_flat_map_item(i, self) } - fn fold_impl_item(&mut self, ii: ImplItem) -> SmallVec<[ImplItem; 1]> { + fn flat_map_impl_item(&mut self, ii: ImplItem) -> SmallVec<[ImplItem; 1]> { if let Some(invoc_id) = self.mac_table.get(ii.id).map(|m| m.id) { if let Some(new_tts) = self.new_tokens.get(&invoc_id).cloned() { let mut ii = ii; @@ -506,10 +499,10 @@ impl<'a> Folder for ReplaceTokens<'a> { return smallvec![ii]; } } - fold::noop_fold_impl_item(ii, self) + mut_visit::noop_flat_map_impl_item(ii, self) } - fn fold_trait_item(&mut self, ti: TraitItem) -> SmallVec<[TraitItem; 1]> { + fn flat_map_trait_item(&mut self, ti: TraitItem) -> SmallVec<[TraitItem; 1]> { if let Some(invoc_id) = self.mac_table.get(ti.id).map(|m| m.id) { if let Some(new_tts) = self.new_tokens.get(&invoc_id).cloned() { let mut ti = ti; @@ -517,10 +510,10 @@ impl<'a> Folder for ReplaceTokens<'a> { return smallvec![ti]; } } - fold::noop_fold_trait_item(ti, self) + mut_visit::noop_flat_map_trait_item(ti, self) } - fn fold_foreign_item(&mut self, fi: ForeignItem) -> SmallVec<[ForeignItem; 1]> { + fn flat_map_foreign_item(&mut self, fi: ForeignItem) -> SmallVec<[ForeignItem; 1]> { if let Some(invoc_id) = self.mac_table.get(fi.id).map(|m| m.id) { if let Some(new_tts) = self.new_tokens.get(&invoc_id).cloned() { let mut fi = fi; @@ -528,22 +521,21 @@ impl<'a> Folder for ReplaceTokens<'a> { return smallvec![fi]; } } - fold::noop_fold_foreign_item(fi, self) + mut_visit::noop_flat_map_foreign_item(fi, self) } - fn fold_mac(&mut self, mac: Mac) -> Mac { - fold::noop_fold_mac(mac, self) + fn visit_mac(&mut self, mac: &mut Mac) { + mut_visit::noop_visit_mac(mac, self) } - fn new_id(&mut self, i: NodeId) -> NodeId { - self.matched_ids.push((i, i)); - i + fn visit_id(&mut self, i: &mut NodeId) { + self.matched_ids.push((*i, *i)); } } -pub fn collapse_macros(mut krate: Crate, - mac_table: &MacTable) -> (Crate, Vec<(NodeId, NodeId)>) { +pub fn collapse_macros(krate: &mut Crate, + mac_table: &MacTable) -> Vec<(NodeId, NodeId)> { let mut matched_ids = Vec::new(); let token_rewrites: Vec; @@ -554,7 +546,7 @@ pub fn collapse_macros(mut krate: Crate, token_rewrites: Vec::new(), matched_ids: &mut matched_ids, }; - krate = krate.fold(&mut collapse_macros); + krate.visit(&mut collapse_macros); token_rewrites = collapse_macros.token_rewrites; } @@ -565,8 +557,8 @@ pub fn collapse_macros(mut krate: Crate, ::syntax::print::pprust::tokens_to_string(v.clone().into())); } - krate = krate.fold(&mut ReplaceTokens { + krate.visit(&mut ReplaceTokens { mac_table, new_tokens, matched_ids: &mut matched_ids }); - (krate, matched_ids) + matched_ids } diff --git a/c2rust-refactor/src/collapse/mod.rs b/c2rust-refactor/src/collapse/mod.rs index 174b5a25c..c9c27eb08 100644 --- a/c2rust-refactor/src/collapse/mod.rs +++ b/c2rust-refactor/src/collapse/mod.rs @@ -14,7 +14,7 @@ //! Though most of the code and comments talk about "macros", we really mean everything that gets //! processed during macro expansion, which includes regular macros, proc macros (`format!`, etc.), //! certain attributes (`#[derive]`, `#[cfg]`), and `std`/prelude injection. -use std::collections::HashSet; +use std::collections::{HashMap, HashSet}; use syntax::attr; use syntax::ast::*; use syntax::source_map::Span; @@ -34,6 +34,68 @@ pub use self::mac_table::{MacTable, MacInfo, collect_macro_invocations}; pub use self::node_map::match_nonterminal_ids; pub use self::macros::collapse_macros; +use crate::command::CommandState; +use crate::node_map::NodeMap; +use deleted::DeletedNode; + +pub struct CollapseInfo<'ast> { + mac_table: MacTable<'ast>, + cfg_attr_info: HashMap>, + deleted_info: Vec>, +} + +impl<'ast> CollapseInfo<'ast> { + pub fn collect( + unexpanded: &'ast Crate, + expanded: &'ast Crate, + node_map: &mut NodeMap, + cs: &CommandState, + ) -> Self { + // Collect info + update node_map, then transfer and commit + let (mac_table, matched_ids) = + collect_macro_invocations(unexpanded, expanded); + node_map.add_edges(&matched_ids); + node_map.add_edges(&[(CRATE_NODE_ID, CRATE_NODE_ID)]); + let cfg_attr_info = collect_cfg_attrs(&unexpanded); + let deleted_info = collect_deleted_nodes( + &unexpanded, &node_map, &mac_table); + match_nonterminal_ids(node_map, &mac_table); + + node_map.transfer_marks(&mut cs.marks_mut()); + let cfg_attr_info = node_map.transfer_map(cfg_attr_info); + node_map.commit(); + + CollapseInfo { + mac_table, + cfg_attr_info, + deleted_info, + } + } + + pub fn collapse(self, node_map: &mut NodeMap, cs: &CommandState) { + // Collapse macros + update node_map. The cfg_attr step requires the updated node_map + // TODO: we should be able to skip some of these steps if `!cmd_state.krate_changed()` + collapse_injected(&mut cs.krate_mut()); + let matched_ids = collapse_macros(&mut cs.krate_mut(), &self.mac_table); + node_map.add_edges(&matched_ids); + node_map.add_edges(&[(CRATE_NODE_ID, CRATE_NODE_ID)]); + + let cfg_attr_info = node_map.transfer_map(self.cfg_attr_info); + restore_cfg_attrs(&mut cs.krate_mut(), cfg_attr_info); + + restore_deleted_nodes( + &mut cs.krate_mut(), + node_map, + cs.node_id_counter(), + self.deleted_info, + ); + + node_map.transfer_marks(&mut cs.marks_mut()); + node_map.commit(); + } +} + + /// Returns a list of injected crate names, plus a flag indicating whether a prelude import was /// also injected. fn injected_items(krate: &Crate) -> (&'static [&'static str], bool) { @@ -52,11 +114,11 @@ fn injected_items(krate: &Crate) -> (&'static [&'static str], bool) { } /// Reverse the effect of `std`/prelude injection, by deleting the injected items. -pub fn collapse_injected(mut krate: Crate) -> Crate { - let (crate_names, mut expect_prelude) = injected_items(&krate); +pub fn collapse_injected(krate: &mut Crate) { + let (crate_names, mut expect_prelude) = injected_items(krate); let mut crate_names = crate_names.iter().map(|x| x.into_symbol()).collect::>(); - let new_items = krate.module.items.into_iter().filter(|i| { + krate.module.items.retain(|i| { match i.node { ItemKind::ExternCrate(_) => { // Remove the first `extern crate` matching each entry in `crate_names`. @@ -76,9 +138,7 @@ pub fn collapse_injected(mut krate: Crate) -> Crate { }, _ => true, } - }).collect(); - krate.module.items = new_items; - krate + }); } fn root_callsite_span(sp: Span) -> Span { diff --git a/c2rust-refactor/src/collapse/node_map.rs b/c2rust-refactor/src/collapse/node_map.rs index de43ed0fb..20c5cf511 100644 --- a/c2rust-refactor/src/collapse/node_map.rs +++ b/c2rust-refactor/src/collapse/node_map.rs @@ -1,5 +1,6 @@ //! `NodeMap` support for macro expansion/collapsing. use std::collections::HashMap; +use rustc_data_structures::sync::Lrc; use syntax::ast::*; use syntax::source_map::Span; use syntax::parse::token::{Token, Nonterminal}; @@ -59,25 +60,24 @@ fn nt_span(nt: &Nonterminal) -> Option { }) } -fn collect_nonterminals(ts: TokenStream, span_map: &mut HashMap) { +fn collect_nonterminals(ts: TokenStream, span_map: &mut HashMap>) { for tt in ts.into_trees() { match tt { - TokenTree::Token(_, Token::Interpolated(nt_tts)) => { - let nt = &nt_tts.0; - if let Some(span) = nt_span(nt) { + TokenTree::Token(_, Token::Interpolated(nt)) => { + if let Some(span) = nt_span(&nt) { span_map.insert(span, nt.clone()); } }, TokenTree::Token(..) => {}, - TokenTree::Delimited(_, d) => { - collect_nonterminals(d.tts.into(), span_map); + TokenTree::Delimited(_, _, tts) => { + collect_nonterminals(tts.into(), span_map); }, } } } struct NtUseVisitor<'a> { - nts: &'a HashMap, + nts: &'a HashMap>, matched_ids: Vec<(NodeId, NodeId)>, } @@ -86,7 +86,7 @@ macro_rules! define_nt_use_visitor { impl<'a, 'ast> Visitor<'ast> for NtUseVisitor<'a> { $( fn $visit_thing(&mut self, x: &'ast $Thing) { if let Some(nt) = self.nts.get(&x.span) { - match nt { + match **nt { Nonterminal::$NtThing(ref y) => { if AstEquiv::ast_equiv(x, y) { self.matched_ids.extend( diff --git a/c2rust-refactor/src/collapse/nt_match.rs b/c2rust-refactor/src/collapse/nt_match.rs index edee8ab5b..9efeccb97 100644 --- a/c2rust-refactor/src/collapse/nt_match.rs +++ b/c2rust-refactor/src/collapse/nt_match.rs @@ -4,7 +4,7 @@ use syntax::ThinVec; use syntax::ast::*; use syntax::source_map::{Span, Spanned, SyntaxContext}; use syntax::parse::token::{Token, DelimToken, Nonterminal}; -use syntax::tokenstream::{TokenTree, Delimited, DelimSpan, TokenStream, ThinTokenStream}; +use syntax::tokenstream::{TokenTree, DelimSpan, TokenStream}; use rustc_target::spec::abi::Abi; use std::rc::Rc; diff --git a/c2rust-refactor/src/command.rs b/c2rust-refactor/src/command.rs index f9ba8eb18..8b4ea81de 100644 --- a/c2rust-refactor/src/command.rs +++ b/c2rust-refactor/src/command.rs @@ -5,20 +5,28 @@ use std::collections::{HashMap, HashSet}; use std::iter; use std::mem; use std::sync::Arc; -use rustc::session::Session; -use syntax::ast::{NodeId, Crate, CRATE_NODE_ID, Mod}; +use rustc::hir; +use rustc::hir::def_id::LOCAL_CRATE; +use rustc::session::{self, DiagnosticOutput, Session}; +use rustc_data_structures::sync::Lrc; +use rustc_interface::util; +use rustc_interface::interface; +use rustc_metadata::cstore::CStore; +use syntax::ast::{NodeId, Crate, CRATE_NODE_ID}; use syntax::ast::{Expr, Pat, Ty, Stmt, Item}; -use syntax::source_map::DUMMY_SP; +use syntax::ext::base::NamedSyntaxExtension; +use syntax::feature_gate::AttributeType; +use syntax::source_map::SourceMap; use syntax::ptr::P; use syntax::symbol::Symbol; use syntax::visit::Visitor; -use crate::ast_manip::{ListNodeIds, remove_paren, Visit, Fold}; +use crate::ast_manip::{ListNodeIds, remove_paren, Visit, MutVisit}; use crate::ast_manip::ast_map::map_ast_into; use crate::ast_manip::number_nodes::{number_nodes, number_nodes_with, NodeIdCounter, reset_node_ids}; -use crate::collapse; -use crate::driver::{self, Phase, Phase1Bits}; -use crate::file_io::{FileIO, ArcFileIO}; +use crate::collapse::CollapseInfo; +use crate::driver::{self, Phase}; +use crate::file_io::FileIO; use crate::node_map::NodeMap; use crate::rewrite; use crate::rewrite::files; @@ -44,35 +52,28 @@ struct ParsedNodes { impl Visit for ParsedNodes { fn visit<'ast, V: Visitor<'ast>>(&'ast self, v: &mut V) { - self.exprs.iter().for_each(|x| x.visit(v)); - self.pats.iter().for_each(|x| x.visit(v)); - self.tys.iter().for_each(|x| x.visit(v)); + self.exprs.iter().for_each(|x| (&**x).visit(v)); + self.pats.iter().for_each(|x| (&**x).visit(v)); + self.tys.iter().for_each(|x| (&**x).visit(v)); self.stmts.iter().for_each(|x| x.visit(v)); - self.items.iter().for_each(|x| x.visit(v)); + self.items.iter().for_each(|x| (&**x).visit(v)); } } +#[derive(Default)] +struct PluginInfo { + _syntax_exts: Vec, + _attributes: Vec<(String, AttributeType)>, +} + /// Stores the overall state of the refactoring process, which can be read and updated by /// `Command`s. pub struct RefactorState { - file_io: Arc, + config: interface::Config, + compiler: interface::Compiler, cmd_reg: Registry, - session: Session, - - parsed_nodes: ParsedNodes, - /// Counter for assigning fresh `NodeId`s to newly parsed nodes (among others). - /// - /// It's important that this counter is preserved across `transform_crate` calls. Parsed - /// nodes' IDs stick around after the originating `transform_crate` ends: they remain in - /// `parsed_nodes`, and they can be referenced by `node_map` as "old" IDs. Preserving this - /// counter ensures that every parsed node has a distinct `NodeId`. - node_id_counter: NodeIdCounter, - - /// The current crate AST. This is used as the "new" AST when rewriting. This is always - /// "unexpanded" - meaning either actually unexpanded, or expanded and then subsequently - /// macro-collapsed. - krate: Crate, + file_io: Arc, /// The original crate AST. This is used as the "old" AST when rewriting. This is always a /// real unexpanded AST, as it was loaded from disk, with full user-provided source text. @@ -81,89 +82,83 @@ pub struct RefactorState { /// Mapping from `krate` IDs to `disk_krate` IDs node_map: NodeMap, - /// Current marks. The `NodeId`s here refer to nodes in `krate`. - marks: HashSet<(NodeId, Symbol)>, + /// Mutable state available to a driver command + cs: CommandState, } -fn dummy_crate() -> Crate { - Crate { - module: Mod { - inner: DUMMY_SP, - items: vec![], - inline: false, - }, - attrs: vec![], - span: DUMMY_SP, - } +fn parse_crate(compiler: &interface::Compiler) -> Crate { + let mut krate = compiler.parse().unwrap().take(); + remove_paren(&mut krate); + number_nodes(&mut krate); + krate } impl RefactorState { - pub fn new(session: Session, - cmd_reg: Registry, - file_io: Arc, - marks: HashSet<(NodeId, Symbol)>) -> RefactorState { - let mut session = session; - // Force disable incremental compilation. It causes panics with multiple typechecking. - session.opts.incremental = None; - + pub fn new( + config: interface::Config, + cmd_reg: Registry, + file_io: Arc, + marks: HashSet<(NodeId, Symbol)>, + ) -> RefactorState { + let compiler = driver::make_compiler(&config, file_io.clone()); + let krate = parse_crate(&compiler); + let orig_krate = krate.clone(); + let (node_map, cs) = Self::init(krate, Some(marks)); RefactorState { - file_io, + config, + compiler, cmd_reg, - session, - - parsed_nodes: ParsedNodes::default(), - node_id_counter: NodeIdCounter::new(0x8000_0000), + file_io, - krate: dummy_crate(), - orig_krate: dummy_crate(), + orig_krate, - node_map: NodeMap::new(), + node_map, - marks: marks, + cs, } } - pub fn from_rustc_args(rustc_args: &[String], - cmd_reg: Registry, - file_io: Arc, - marks: HashSet<(NodeId, Symbol)>) -> RefactorState { - let session = driver::build_session_from_args( - rustc_args, Some(Box::new(ArcFileIO(file_io.clone())))); - Self::new(session, cmd_reg, file_io, marks) - } + /// Initialization shared between new() and load_crate() + fn init(krate: Crate, marks: Option>) -> (NodeMap, CommandState) { + // (Re)initialize `node_map` and `marks`. + let mut node_map = NodeMap::new(); + node_map.init(krate.list_node_ids().into_iter()); + // Special case: CRATE_NODE_ID doesn't actually appear anywhere in the AST. + node_map.init(iter::once(CRATE_NODE_ID)); + let marks = marks.unwrap_or_else(|| HashSet::new()); + // The newly loaded `krate` and reinitialized `node_map` reference none of the old + // `parsed_nodes`. That means we can reset the ID counter without risk of ID collisions. + let parsed_nodes = ParsedNodes::default(); + let node_id_counter = NodeIdCounter::new(0x8000_0000); - pub fn session(&self) -> &Session { - &self.session + let cs = CommandState::new( + krate, + marks, + parsed_nodes, + node_id_counter, + ); + + (node_map, cs) } + pub fn session(&self) -> &Session { + self.compiler.session() + } - fn load_crate_inner(&self) -> Crate { - let bits = Phase1Bits::from_session_reparse(&self.session); - bits.into_crate() + pub fn source_map(&self) -> &SourceMap { + self.compiler.source_map() } /// Load the crate from disk. This also resets a bunch of internal state, since we won't be /// rewriting with the previous `orig_crate` any more. pub fn load_crate(&mut self) { - // Discard any existing krate, overwriting it with one loaded from disk. - let krate = self.load_crate_inner(); - let krate = remove_paren(krate); - let krate = number_nodes(krate); + self.compiler = driver::make_compiler(&self.config, self.file_io.clone()); + let krate = parse_crate(&self.compiler); self.orig_krate = krate.clone(); - self.krate = krate; - - // Re-initialize `node_map` and `marks`. - self.node_map = NodeMap::new(); - self.node_map.init(self.krate.list_node_ids().into_iter()); - // Special case: CRATE_NODE_ID doesn't actually appear anywhere in the AST. - self.node_map.init(iter::once(CRATE_NODE_ID)); - self.marks = HashSet::new(); - - // The newly loaded `krate` and reinitialized `node_map` reference none of the old - // `parsed_nodes`. That means we can reset the ID counter without risk of ID collisions. - self.parsed_nodes = ParsedNodes::default(); - self.node_id_counter = NodeIdCounter::new(0x8000_0000); + let (node_map, cs) = Self::init(krate, None); + self.node_map = node_map; + self.cs = cs; } /// Save the crate to disk, by writing out the new source text produced by rewriting. @@ -173,117 +168,162 @@ impl RefactorState { /// matches the text on disk) as the basis for rewriting. pub fn save_crate(&mut self) { let old = &self.orig_krate; - let new = &self.krate; + let new = &self.cs.krate(); let node_id_map = self.node_map.clone().into_inner(); self.file_io.save_marks( - new, self.session.source_map(), &node_id_map, &self.marks).unwrap(); + new, self.session().source_map(), &node_id_map, &self.cs.marks()).unwrap(); - let rw = rewrite::rewrite(&self.session, old, new, node_id_map, |map| { - map_ast_into(&self.parsed_nodes, map); + let parsed_nodes = self.cs.parsed_nodes.borrow(); + let rw = rewrite::rewrite(self.session(), old, new, node_id_map, |map| { + map_ast_into(&*parsed_nodes, map); }); // Note that `rewrite_files_with` does not read any files from disk - it uses the // `SourceMap` to get files' original source text. - files::rewrite_files_with(self.session.source_map(), &rw, &*self.file_io).unwrap(); - } - - pub fn transform_crate(&mut self, phase: Phase, f: F) -> R - where F: FnOnce(&CommandState, &RefactorCtxt) -> R { - let krate = mem::replace(&mut self.krate, dummy_crate()); - let marks = mem::replace(&mut self.marks, HashSet::new()); - - let unexpanded = krate.clone(); - let krate = reset_node_ids(krate); - let bits = Phase1Bits::from_session_and_crate(&self.session, krate); - driver::run_compiler_from_phase1(bits, phase, |krate, cx| { - let krate = span_fix::fix_format(krate); - let expanded = krate.clone(); - - // Collect info + update node_map, then transfer and commit - let (mac_table, matched_ids) = - collapse::collect_macro_invocations(&unexpanded, &expanded); - self.node_map.add_edges(&matched_ids); - self.node_map.add_edges(&[(CRATE_NODE_ID, CRATE_NODE_ID)]); - let cfg_attr_info = collapse::collect_cfg_attrs(&unexpanded); - let deleted_info = collapse::collect_deleted_nodes( - &unexpanded, &self.node_map, &mac_table); - collapse::match_nonterminal_ids(&mut self.node_map, &mac_table); - - let marks = self.node_map.transfer_marks(&marks); - let cfg_attr_info = self.node_map.transfer_map(cfg_attr_info); - self.node_map.commit(); - - // Run the transform - let r: R; - let new_krate: Crate; - let new_marks: HashSet<(NodeId, Symbol)>; - { - let parsed_nodes = mem::replace(&mut self.parsed_nodes, - ParsedNodes::default()); - let node_id_counter = mem::replace(&mut self.node_id_counter, - NodeIdCounter::new(0)); - let cmd_state = CommandState::new(krate, marks, parsed_nodes, node_id_counter); - r = f(&cmd_state, &cx); - - new_krate = cmd_state.krate.into_inner(); - new_marks = cmd_state.marks.into_inner(); - self.parsed_nodes = cmd_state.parsed_nodes.into_inner(); - self.node_id_counter = cmd_state.counter; - - self.node_map.init(cmd_state.new_parsed_node_ids.into_inner().into_iter()); + files::rewrite_files_with(self.source_map(), &rw, &*self.file_io).unwrap(); + } + + pub fn transform_crate(&mut self, phase: Phase, f: F) -> interface::Result + where F: FnOnce(&CommandState, &RefactorCtxt) -> R + { + // let mut krate = mem::replace(&mut self.krate, dummy_crate()); + // let marks = mem::replace(&mut self.marks, HashSet::new()); + + let unexpanded = self.cs.krate().clone(); + + self.cs.reset(); + + self.rebuild_session(); + + // Immediately fix up the attr spans, since during expansion, any + // `derive` attrs will be removed. + span_fix::fix_attr_spans(self.cs.krate.get_mut()); + + // Replace current parse query results + let parse = self.compiler.parse()?; + let _ = parse.take(); + parse.give(self.cs.krate().clone()); + + match phase { + Phase::Phase1 => {} + + Phase::Phase2 | Phase::Phase3 => { + self.cs.krate.replace(self.compiler.expansion()?.peek().0.clone()); } + } - // Collapse macros + update node_map. The cfg_attr step requires the updated node_map - // TODO: we should be able to skip some of these steps if `!cmd_state.krate_changed()` - let new_krate = collapse::collapse_injected(new_krate); - let (new_krate, matched_ids) = collapse::collapse_macros(new_krate, &mac_table); - self.node_map.add_edges(&matched_ids); - self.node_map.add_edges(&[(CRATE_NODE_ID, CRATE_NODE_ID)]); + span_fix::fix_format(self.cs.krate.get_mut()); + let expanded = self.cs.krate().clone(); + let collapse_info = CollapseInfo::collect( + &unexpanded, + &expanded, + &mut self.node_map, + &self.cs, + ); + + // Run the transform + let r = match phase { + Phase::Phase1 => { + let cx = RefactorCtxt::new_phase_1(&self.compiler.session(), &self.compiler.cstore()); + + f(&self.cs, &cx) + } - let cfg_attr_info = self.node_map.transfer_map(cfg_attr_info); - let new_krate = collapse::restore_cfg_attrs(new_krate, cfg_attr_info); + Phase::Phase2 => { + let hir = self.compiler.lower_to_hir()?.take(); + let (ref hir_forest, ref expansion) = hir; + let hir_forest = hir_forest.borrow(); + let defs = expansion.defs.borrow(); + let map = hir::map::map_crate( + self.compiler.session(), + &*self.compiler.cstore().clone(), + &hir_forest, + &defs, + ); + + let cx = RefactorCtxt::new_phase_2(self.compiler.session(), self.compiler.cstore(), &map); + + f(&self.cs, &cx) + } - let new_krate = collapse::restore_deleted_nodes( - new_krate, &mut self.node_map, &mut self.node_id_counter, deleted_info); + Phase::Phase3 => { + let r = self.compiler.global_ctxt()?.take().enter(|tcx| { + let _result = tcx.analysis(LOCAL_CRATE); + let cx = RefactorCtxt::new_phase_3(self.compiler.session(), self.compiler.cstore(), tcx.hir(), tcx); - let new_marks = self.node_map.transfer_marks(&new_marks); - self.node_map.commit(); + f(&self.cs, &cx) + }); - // Write back new crate and marks - self.krate = new_krate; - self.marks = new_marks; + // Ensure that we've dropped any copies of the session Lrc + let _ = self.compiler.lower_to_hir()?.take(); + let _ = self.compiler.codegen_channel()?.take(); - r - }) + r + } + }; + + self.node_map.init(self.cs.new_parsed_node_ids.get_mut().drain(..)); + + collapse_info.collapse(&mut self.node_map, &self.cs); + + Ok(r) + } + + fn rebuild_session(&mut self) { + // Ensure we've dropped the resolver since it keeps a copy of the session Rc + if let Ok(expansion) = self.compiler.expansion() { + if let Ok(resolver) = Lrc::try_unwrap(expansion.take().1) { + resolver.map(|x| x.into_inner().complete()); + } else { + panic!("Could not drop resolver"); + } + } + + let compiler: &mut driver::Compiler = unsafe { mem::transmute(&mut self.compiler) }; + let old_session = &compiler.sess; + + let descriptions = util::diagnostics_registry(); + let mut new_sess = session::build_session_with_source_map( + old_session.opts.clone(), + old_session.local_crate_source_file.clone(), + descriptions, + self.compiler.source_map().clone(), + DiagnosticOutput::Default, + Default::default(), + ); + let new_codegen_backend = util::get_codegen_backend(&new_sess); + let new_cstore = CStore::new(new_codegen_backend.metadata_loader()); + + new_sess.parse_sess.config = old_session.parse_sess.config.clone(); + + *Lrc::get_mut(&mut compiler.sess).unwrap() = new_sess; + *Lrc::get_mut(&mut compiler.codegen_backend).unwrap() = new_codegen_backend; + *Lrc::get_mut(&mut compiler.cstore).unwrap() = new_cstore; } pub fn run_typeck_loop(&mut self, mut func: F) -> Result<(), &'static str> - where F: FnMut(Crate, &CommandState, &RefactorCtxt) -> TypeckLoopResult { + where F: FnMut(&mut Crate, &CommandState, &RefactorCtxt) -> TypeckLoopResult { let func = &mut func; let mut result = None; while result.is_none() { self.transform_crate(Phase::Phase3, |st, cx| { - st.map_krate(|krate| { - match func(krate, st, cx) { - TypeckLoopResult::Iterate(krate) => krate, - TypeckLoopResult::Err(e, krate) => { - result = Some(Err(e)); - krate - }, - TypeckLoopResult::Finished(krate) => { - result = Some(Ok(())); - krate - }, + match func(&mut st.krate_mut(), st, cx) { + TypeckLoopResult::Iterate => {} + TypeckLoopResult::Err(e) => { + result = Some(Err(e)); } - }); - }); + TypeckLoopResult::Finished => { + result = Some(Ok(())); + } + } + }).expect("Failed to run compiler"); } result.unwrap() } pub fn clear_marks(&mut self) { - self.marks.clear() + self.cs.marks.get_mut().clear() } @@ -298,41 +338,56 @@ impl RefactorState { } - pub fn marks(&self) -> &HashSet<(NodeId, Symbol)> { - &self.marks + pub fn marks(&self) -> cell::Ref> { + self.cs.marks.borrow() } - pub fn marks_mut(&mut self) -> &mut HashSet<(NodeId, Symbol)> { - &mut self.marks + pub fn marks_mut(&mut self) -> cell::RefMut> { + self.cs.marks.borrow_mut() } } pub enum TypeckLoopResult { - Iterate(Crate), - Err(&'static str, Crate), - Finished(Crate), + Iterate, + Err(&'static str), + Finished, } /// Mutable state that can be modified by a "driver" command. This is normally paired with a /// `RefactorCtxt`, which contains immutable analysis results from the original input `Crate`. pub struct CommandState { + parsed_nodes: RefCell, + /// Counter for assigning fresh `NodeId`s to newly parsed nodes (among others). + /// + /// It's important that this counter is preserved across `transform_crate` calls. Parsed + /// nodes' IDs stick around after the originating `transform_crate` ends: they remain in + /// `parsed_nodes`, and they can be referenced by `node_map` as "old" IDs. Preserving this + /// counter ensures that every parsed node has a distinct `NodeId`. + node_id_counter: NodeIdCounter, + + /// The current crate AST. This is used as the "new" AST when rewriting. + /// This is always starts "unexpanded" - meaning either actually unexpanded, + /// or expanded and then subsequently macro-collapsed. krate: RefCell, + + /// Current marks. The `NodeId`s here refer to nodes in `krate`. marks: RefCell>, - parsed_nodes: RefCell, + + // krate: RefCell, + // marks: RefCell>, + // parsed_nodes: RefCell, new_parsed_node_ids: RefCell>, krate_changed: Cell, marks_changed: Cell, - - counter: NodeIdCounter, } impl CommandState { fn new(krate: Crate, marks: HashSet<(NodeId, Symbol)>, parsed_nodes: ParsedNodes, - counter: NodeIdCounter) -> CommandState { + node_id_counter: NodeIdCounter) -> CommandState { CommandState { krate: RefCell::new(krate), marks: RefCell::new(marks), @@ -342,10 +397,19 @@ impl CommandState { krate_changed: Cell::new(false), marks_changed: Cell::new(false), - counter + node_id_counter } } + /// Reset the command state in preparation for a new transform iteration + fn reset(&mut self) { + reset_node_ids(self.krate.get_mut()); + + self.new_parsed_node_ids.get_mut().clear(); + self.krate_changed.set(false); + self.marks_changed.set(false); + } + pub fn krate(&self) -> cell::Ref { self.krate.borrow() @@ -356,19 +420,8 @@ impl CommandState { self.krate.borrow_mut() } - pub fn map_krate Crate>(&self, func: F) { - let dummy_crate = Crate { - module: Mod { - inner: DUMMY_SP, - items: Vec::new(), - inline: true, - }, - attrs: Vec::new(), - span: DUMMY_SP, - }; - let old_krate = mem::replace(&mut *self.krate_mut(), dummy_crate); - let new_krate = func(old_krate); - *self.krate_mut() = new_krate; + pub fn map_krate(&self, func: F) { + func(&mut self.krate_mut()); } pub fn krate_changed(&self) -> bool { @@ -401,10 +454,13 @@ impl CommandState { self.marks_changed.get() } + pub fn node_id_counter(&self) -> &NodeIdCounter { + &self.node_id_counter + } /// Generate a fresh NodeId. pub fn next_node_id(&self) -> NodeId { - self.counter.next() + self.node_id_counter.next() } /// Transfer marks on `old` to a fresh NodeId, and return that fresh NodeId. @@ -422,27 +478,25 @@ impl CommandState { } - fn process_parsed(&self, x: T) -> ::Result - where T: Fold, ::Result: ListNodeIds { - let x = number_nodes_with(x, &self.counter); + fn process_parsed(&self, x: &mut T) + where T: MutVisit + ListNodeIds { + number_nodes_with(x, &self.node_id_counter); self.new_parsed_node_ids.borrow_mut() .extend(x.list_node_ids()); - x } /// Parse an `Expr`, keeping the original `src` around for use during rewriting. pub fn parse_expr(&self, cx: &RefactorCtxt, src: &str) -> P { - let e = driver::parse_expr(cx.session(), src); - let e = self.process_parsed(e); + let mut e = driver::parse_expr(cx.session(), src); + self.process_parsed(&mut e); self.parsed_nodes.borrow_mut().exprs.push(e.clone()); e } pub fn parse_items(&self, cx: &RefactorCtxt, src: &str) -> Vec> { - let is = driver::parse_items(cx.session(), src); - let is: Vec> = is.into_iter() - .flat_map(|i| self.process_parsed(i)).collect(); - for i in &is { + let mut is = driver::parse_items(cx.session(), src); + for i in &mut is { + self.process_parsed(i); self.parsed_nodes.borrow_mut().items.push(i.clone()); } is @@ -465,7 +519,7 @@ pub trait Command { } /// A command builder is a function that takes some string arguments and produces a `Command`. -pub type Builder = FnMut(&[String]) -> Box; +pub type Builder = FnMut(&[String]) -> Box + Send; /// Tracks known refactoring command builders, and allows invoking them by name. pub struct Registry { @@ -480,7 +534,7 @@ impl Registry { } pub fn register(&mut self, name: &str, builder: B) - where B: FnMut(&[String]) -> Box + 'static { + where B: FnMut(&[String]) -> Box + 'static + Send { self.commands.insert(name.to_owned(), Box::new(builder)); } @@ -523,7 +577,8 @@ impl DriverCommand impl Command for DriverCommand where F: FnMut(&CommandState, &RefactorCtxt) { fn run(&mut self, state: &mut RefactorState) { - state.transform_crate(self.phase, |st, cx| (self.func)(st, cx)); + state.transform_crate(self.phase, |st, cx| (self.func)(st, cx)) + .expect("Failed to run compiler"); } } @@ -550,6 +605,10 @@ fn register_commit(reg: &mut Registry) { reg.register("write", |_args| Box::new(FuncCommand(|rs: &mut RefactorState| { rs.save_crate(); }))); + + reg.register("dump_crate", |_args| Box::new(FuncCommand(|rs: &mut RefactorState| { + eprintln!("{:#?}", rs.cs.krate()); + }))); } pub fn register_commands(reg: &mut Registry) { diff --git a/c2rust-refactor/src/context.rs b/c2rust-refactor/src/context.rs index 687392ff8..7d026495f 100644 --- a/c2rust-refactor/src/context.rs +++ b/c2rust-refactor/src/context.rs @@ -1,11 +1,10 @@ -use arena::SyncDroplessArena; use rustc::hir::def::Def; use rustc::hir::def_id::DefId; use rustc::hir::map as hir_map; use rustc::hir::{self, Node}; use rustc::session::Session; -use rustc::ty::subst::Substs; use rustc::ty::{FnSig, ParamEnv, PolyFnSig, Ty, TyCtxt, TyKind}; +use rustc::ty::subst::InternalSubsts; use rustc_metadata::cstore::CStore; use syntax::ast::{self, Expr, ExprKind, FnDecl, FunctionRetTy, Item, NodeId, Path, QSelf, DUMMY_NODE_ID}; use syntax::ptr::P; @@ -20,15 +19,10 @@ use crate::reflect; #[derive(Clone)] pub struct RefactorCtxt<'a, 'tcx: 'a> { sess: &'a Session, + cstore: &'a CStore, + map: Option<&'a hir_map::Map<'tcx>>, tcx: Option>, - - /// This is a reference to the same `DroplessArena` used in `tcx`. Analyses working with types - /// use this to allocate extra values with the same lifetime `'tcx` as the types themselves. - /// This way `Ty` wrappers don't need two lifetime parameters everywhere. - tcx_arena: Option<&'tcx SyncDroplessArena>, - - cstore: &'a CStore, } impl<'a, 'tcx: 'a> RefactorCtxt<'a, 'tcx> { @@ -37,9 +31,8 @@ impl<'a, 'tcx: 'a> RefactorCtxt<'a, 'tcx> { cstore: &'a CStore, map: Option<&'a hir_map::Map<'tcx>>, tcx: Option>, - tcx_arena: Option<&'tcx SyncDroplessArena>, ) -> Self { - Self {sess, cstore, map, tcx, tcx_arena} + Self {sess, cstore, map, tcx} } } @@ -62,11 +55,6 @@ impl<'a, 'tcx: 'a> RefactorCtxt<'a, 'tcx> { .expect("ty ctxt is not available in this context (requires phase 3)") } - pub fn ty_arena(&self) -> &'tcx SyncDroplessArena { - self.tcx_arena - .expect("ty ctxt is not available in this context (requires phase 3)") - } - pub fn has_ty_ctxt(&self) -> bool { self.tcx.is_some() } @@ -79,7 +67,7 @@ impl<'a, 'tcx: 'a> RefactorCtxt<'a, 'tcx> { let parent = self.hir_map().get_parent_did(id); let tables = self.ty_ctxt().typeck_tables_of(parent); let hir_id = self.hir_map().node_to_hir_id(id); - tables.node_id_to_type(hir_id) + tables.node_type(hir_id) } pub fn opt_node_type(&self, id: NodeId) -> Option> { @@ -90,7 +78,7 @@ impl<'a, 'tcx: 'a> RefactorCtxt<'a, 'tcx> { } let tables = self.ty_ctxt().typeck_tables_of(parent); let hir_id = self.hir_map().node_to_hir_id(id); - tables.node_id_to_type_opt(hir_id) + tables.node_type_opt(hir_id) } /// Get the `ty::Ty` computed for a node, taking into account any @@ -111,7 +99,7 @@ impl<'a, 'tcx: 'a> RefactorCtxt<'a, 'tcx> { if let Some(adj) = tables.adjustments().get(hir_id).and_then(|adjs| adjs.last()) { Some(adj.target) } else { - tables.node_id_to_type_opt(hir_id) + tables.node_type_opt(hir_id) } } @@ -133,13 +121,12 @@ impl<'a, 'tcx: 'a> RefactorCtxt<'a, 'tcx> { pub fn node_def_id(&self, id: NodeId) -> DefId { match self.hir_map().find(id) { Some(Node::Binding(_)) => self.node_def_id(self.hir_map().get_parent_node(id)), - Some(Node::Item(item)) => self.hir_map().local_def_id(item.id), + Some(Node::Item(item)) => self.hir_map().local_def_id_from_hir_id(item.hir_id), _ => self.hir_map().local_def_id(id), } } pub fn def_to_hir_id(&self, def: &hir::def::Def) -> Option { - use rustc::hir::def::Def; match def { Def::Mod(did) | Def::Struct(did) | @@ -155,9 +142,9 @@ impl<'a, 'tcx: 'a> RefactorCtxt<'a, 'tcx> { Def::TyParam(did) | Def::Fn(did) | Def::Const(did) | + Def::ConstParam(did) | Def::Static(did, _) | - Def::StructCtor(did, _) | - Def::VariantCtor(did, _) | + Def::Ctor(did, ..) | Def::SelfCtor(did) | Def::Method(did) | Def::AssociatedConst(did) | @@ -549,5 +536,5 @@ pub struct CalleeInfo<'tcx> { pub def_id: Option, /// The type and region arguments that were substituted in at the call site. - pub substs: Option<&'tcx Substs<'tcx>>, + pub substs: Option<&'tcx InternalSubsts<'tcx>>, } diff --git a/c2rust-refactor/src/driver.rs b/c2rust-refactor/src/driver.rs index 54953d413..bbacc0be2 100644 --- a/c2rust-refactor/src/driver.rs +++ b/c2rust-refactor/src/driver.rs @@ -1,26 +1,41 @@ //! Frontend logic for parsing and expanding ASTs. This code largely mimics the behavior of //! `rustc_driver::driver::compile_input`. -use std::mem::{self, ManuallyDrop}; +use std::any::Any; +use std::cell::RefCell; +use std::collections::HashSet; +use std::mem; use std::path::{Path, PathBuf}; use std::rc::Rc; +use std::sync::Arc; +use std::sync::mpsc; +use rustc::dep_graph::DepGraph; use rustc::hir::map as hir_map; -use rustc::ty::{TyCtxt, AllArenas}; -use rustc::ty::query::Providers; -use rustc::session::{self, Session}; -use rustc::session::config::{Input, Options}; +use rustc::util::common::ErrorReported; +use rustc::ty::{self, GlobalCtxt, Resolutions, TyCtxt}; +use rustc::ty::steal::Steal; +use rustc_data_structures::declare_box_region_type; +use rustc_data_structures::sync::{Lock, Lrc}; +use rustc_incremental::DepGraphFuture; +use rustc_interface::{util, Config}; +use rustc_interface::interface::BoxedResolver; +use rustc::session::{self, DiagnosticOutput, Session}; +use rustc::session::config::{Input, OutputFilenames}; +use rustc::session::config::Options as SessionOptions; use rustc_driver; -use rustc_driver::driver::{self, build_output_filenames, CompileController}; use rustc_errors::DiagnosticBuilder; +use rustc_interface::util::get_codegen_backend; use rustc_metadata::cstore::CStore; -use rustc_resolve::MakeGlobMap; -use rustc_codegen_utils::link; use rustc_codegen_utils::codegen_backend::CodegenBackend; +use rustc_interface::interface; +use syntax::ast; use syntax::ast::{ - Crate, Expr, Pat, Ty, Stmt, Item, ImplItem, ForeignItem, ItemKind, Block, Arg, BlockCheckMode, - UnsafeSource, + Expr, Pat, Ty, Stmt, Item, ImplItem, ForeignItem, ItemKind, Block, Arg, BlockCheckMode, + UnsafeSource, NodeId }; use syntax::ast::DUMMY_NODE_ID; +use syntax::ext::base::NamedSyntaxExtension; +use syntax::feature_gate::AttributeType; use syntax::source_map::SourceMap; use syntax::source_map::{FileLoader, RealFileLoader}; use syntax::ext::hygiene::SyntaxContext; @@ -28,14 +43,16 @@ use syntax::parse::{self, PResult}; use syntax::parse::token::Token; use syntax::parse::parser::Parser; use syntax::ptr::P; -use syntax::symbol::keywords; +use syntax::symbol::{keywords, Symbol}; use syntax::tokenstream::TokenTree; use syntax_pos::FileName; use syntax_pos::Span; -use arena::SyncDroplessArena; use crate::ast_manip::remove_paren; -use crate::span_fix; +use crate::command::{Registry, RefactorState}; +use crate::file_io::{ArcFileIO, FileIO}; +// TODO: don't forget to call span_fix after parsing +// use crate::span_fix; use crate::util::Lone; use crate::RefactorCtxt; @@ -53,139 +70,139 @@ pub enum Phase { } impl<'a, 'tcx: 'a> RefactorCtxt<'a, 'tcx> { - fn new_phase_1(sess: &'a Session, cstore: &'a CStore) -> RefactorCtxt<'a, 'tcx> { - RefactorCtxt::new(sess, cstore, None, None, None) + pub fn new_phase_1(sess: &'a Session, cstore: &'a CStore) -> RefactorCtxt<'a, 'tcx> { + RefactorCtxt::new(sess, cstore, None, None) } - fn new_phase_2(sess: &'a Session, - cstore: &'a CStore, - map: &'a hir_map::Map<'tcx>) -> RefactorCtxt<'a, 'tcx> { - RefactorCtxt::new(sess, cstore, Some(map), None, None) + pub fn new_phase_2(sess: &'a Session, + cstore: &'a CStore, + map: &'a hir_map::Map<'tcx>) -> RefactorCtxt<'a, 'tcx> { + RefactorCtxt::new(sess, cstore, Some(map), None) } - fn new_phase_3(sess: &'a Session, - cstore: &'a CStore, - map: &'a hir_map::Map<'tcx>, - tcx: TyCtxt<'a, 'tcx, 'tcx>, - tcx_arena: &'tcx SyncDroplessArena) -> RefactorCtxt<'a, 'tcx> { - RefactorCtxt::new(sess, cstore, Some(map), Some(tcx), Some(tcx_arena)) + pub fn new_phase_3(sess: &'a Session, + cstore: &'a CStore, + map: &'a hir_map::Map<'tcx>, + tcx: TyCtxt<'a, 'tcx, 'tcx>, + ) -> RefactorCtxt<'a, 'tcx> { + RefactorCtxt::new(sess, cstore, Some(map), Some(tcx)) } } -/// Various driver bits that we have lying around at the end of `phase_1_parse_input`. This is -/// everything we need to (re-)run the compiler from phase 1 onward. -pub struct Phase1Bits { - session: Session, - cstore: CStore, - codegen_backend: Box, - input: Input, - output: Option, - out_dir: Option, - control: CompileController<'static>, - krate: Crate, -} - -impl Phase1Bits { - /// Set up the compiler again, using a previously-constructed `Session`. - /// - /// A `Crate` is mostly self-contained, but its `Span`s are really indexes into external - /// tables. So if you actually plan to run the compiler after calling `reset()`, the new - /// `krate` passed here should satisfy a few properties: - /// - /// 1. The crate must have been parsed under the same `SourceMap` used by `session`. Spans' - /// `hi` and `lo` byte positions are indices into the `SourceMap` used for parsing, so - /// transferring those spans to a different `SourceMap` produces nonsensical results. - /// - /// 2. The crate must not contain any paths starting with `$crate` from a non-empty - /// `SyntaxCtxt`. These types of paths appear during macro expansion, and can only be - /// resolved using tables populated by the macro expander. - /// - /// 3. All `NodeId`s in the crate must be DUMMY_NODE_ID. - /// - /// 4. The crate must not contain automatically-injected `extern crate` declarations. The - /// compilation process will inject new copies of these, and then fail due to the name - /// collision. - /// - /// A crate that has only been compiled to `Phase1` already satisfies points 2-4. If you want - /// to re-compile a crate from `Phase2` or later, use `recheck::prepare_recheck` to fix things - /// up first. - pub fn from_session_and_crate(old_session: &Session, krate: Crate) -> Phase1Bits { - let (session, cstore, codegen_backend) = rebuild_session(old_session); - - let in_path = old_session.local_crate_source_file.clone(); - let input = Input::File(in_path.unwrap()); - - let mut control = CompileController::basic(); - control.provide = Box::new(move |providers| { - use rustc::hir::def_id::CrateNum; - use rustc::middle::privacy::AccessLevels; - use rustc_data_structures::sync::Lrc; - use rustc_privacy; - - fn privacy_access_levels<'tcx>(tcx: TyCtxt<'_, 'tcx, 'tcx>, - krate: CrateNum) -> Lrc { - // Get and call the original implementation, resetting the error count before - // returning so that `abort_if_errors` won't abort. - // NOTE: It's possible in theory for the codegen_backend to override the - // implementation, since `codegen_backend.provide` runs after `default_provide`. - // We wouldn't handle that since we call only `rustc_privacy::provide` here. - // Privacy checking would be a weird thing for a backend to override, though. - let mut p = Providers::default(); - rustc_privacy::provide(&mut p); - let r = (p.privacy_access_levels)(tcx, krate); - tcx.sess.diagnostic().reset_err_count(); - r - } - providers.privacy_access_levels = privacy_access_levels; - - // TODO: provide error-resetting versions of other "query + `abort_if_errors`" passes - // in `phase_3_run_analysis_passes`. - }); - - Phase1Bits { - session, cstore, codegen_backend, - - input, - output: None, - out_dir: None, - - control, krate, - } - } - - /// Set up the compiler using a previously-created session, repeating phase 1 (input parsing). - pub fn from_session_reparse(old_session: &Session) -> Phase1Bits { - let (session, cstore, codegen_backend) = rebuild_session(old_session); - - let in_path = old_session.local_crate_source_file.clone(); - let input = Input::File(in_path.unwrap()); - - let control = CompileController::basic(); - - // Start of `compile_input` code - let krate = driver::phase_1_parse_input(&control, &session, &input).unwrap(); - - Phase1Bits { - session, cstore, codegen_backend, - - input, - output: None, - out_dir: None, - - control, krate, - } - } - - pub fn into_crate(self) -> Crate { - self.krate - } -} +// /// Various driver bits that we have lying around at the end of `phase_1_parse_input`. This is +// /// everything we need to (re-)run the compiler from phase 1 onward. +// pub struct Phase1Bits { +// session: Session, +// cstore: CStore, +// codegen_backend: Box, +// input: Input, +// output: Option, +// out_dir: Option, +// control: CompileController<'static>, +// krate: Crate, +// } + +// impl Phase1Bits { +// /// Set up the compiler again, using a previously-constructed `Session`. +// /// +// /// A `Crate` is mostly self-contained, but its `Span`s are really indexes into external +// /// tables. So if you actually plan to run the compiler after calling `reset()`, the new +// /// `krate` passed here should satisfy a few properties: +// /// +// /// 1. The crate must have been parsed under the same `SourceMap` used by `session`. Spans' +// /// `hi` and `lo` byte positions are indices into the `SourceMap` used for parsing, so +// /// transferring those spans to a different `SourceMap` produces nonsensical results. +// /// +// /// 2. The crate must not contain any paths starting with `$crate` from a non-empty +// /// `SyntaxCtxt`. These types of paths appear during macro expansion, and can only be +// /// resolved using tables populated by the macro expander. +// /// +// /// 3. All `NodeId`s in the crate must be DUMMY_NODE_ID. +// /// +// /// 4. The crate must not contain automatically-injected `extern crate` declarations. The +// /// compilation process will inject new copies of these, and then fail due to the name +// /// collision. +// /// +// /// A crate that has only been compiled to `Phase1` already satisfies points 2-4. If you want +// /// to re-compile a crate from `Phase2` or later, use `recheck::prepare_recheck` to fix things +// /// up first. +// pub fn from_session_and_crate(old_session: &Session, krate: Crate) -> Phase1Bits { +// let (session, cstore, codegen_backend) = rebuild_session(old_session); + +// let in_path = old_session.local_crate_source_file.clone(); +// let input = Input::File(in_path.unwrap()); + +// let mut control = CompileController::basic(); +// control.provide = Box::new(move |providers| { +// use rustc::hir::def_id::CrateNum; +// use rustc::middle::privacy::AccessLevels; +// use rustc_data_structures::sync::Lrc; +// use rustc_privacy; + +// fn privacy_access_levels<'tcx>(tcx: TyCtxt<'_, 'tcx, 'tcx>, +// krate: CrateNum) -> Lrc { +// // Get and call the original implementation, resetting the error count before +// // returning so that `abort_if_errors` won't abort. +// // NOTE: It's possible in theory for the codegen_backend to override the +// // implementation, since `codegen_backend.provide` runs after `default_provide`. +// // We wouldn't handle that since we call only `rustc_privacy::provide` here. +// // Privacy checking would be a weird thing for a backend to override, though. +// let mut p = Providers::default(); +// rustc_privacy::provide(&mut p); +// let r = (p.privacy_access_levels)(tcx, krate); +// tcx.sess.diagnostic().reset_err_count(); +// r +// } +// providers.privacy_access_levels = privacy_access_levels; + +// // TODO: provide error-resetting versions of other "query + `abort_if_errors`" passes +// // in `phase_3_run_analysis_passes`. +// }); + +// Phase1Bits { +// session, cstore, codegen_backend, + +// input, +// output: None, +// out_dir: None, + +// control, krate, +// } +// } + +// /// Set up the compiler using a previously-created session, repeating phase 1 (input parsing). +// pub fn from_session_reparse(old_session: &Session) -> Phase1Bits { +// let (session, cstore, codegen_backend) = rebuild_session(old_session); + +// let in_path = old_session.local_crate_source_file.clone(); +// let input = Input::File(in_path.unwrap()); + +// let control = CompileController::basic(); + +// // Start of `compile_input` code +// let krate = driver::phase_1_parse_input(&control, &session, &input).unwrap(); + +// Phase1Bits { +// session, cstore, codegen_backend, + +// input, +// output: None, +// out_dir: None, + +// control, krate, +// } +// } + +// pub fn into_crate(self) -> Crate { +// self.krate +// } +// } /// Sysroot adjustment: if the sysroot is unset, and args[0] is an absolute path, use args[0] to /// infer a sysroot. Rustc's own sysroot detection (filesearch::get_or_default_sysroot) uses /// env::current_exe, which will point to c2rust-refactor, not rustc. -fn maybe_set_sysroot(mut sopts: Options, args: &[String]) -> Options { +fn maybe_set_sysroot(mut sopts: SessionOptions, args: &[String]) -> SessionOptions { if sopts.maybe_sysroot.is_none() && args.len() > 0 { let p = Path::new(&args[0]); if p.is_absolute() { @@ -197,114 +214,251 @@ fn maybe_set_sysroot(mut sopts: Options, args: &[String]) -> Options { sopts } -pub fn run_compiler_to_phase1(args: &[String], - file_loader: Option>) -> Phase1Bits { +pub fn clone_config(config: &interface::Config) -> interface::Config { + let input = match &config.input { + Input::File(f) => Input::File(f.clone()), + Input::Str { name, input } => Input::Str { + name: name.clone(), + input: input.clone(), + }, + }; + interface::Config { + opts: config.opts.clone(), + crate_cfg: config.crate_cfg.clone(), + input, + input_path: config.input_path.clone(), + output_file: config.output_file.clone(), + output_dir: config.output_dir.clone(), + file_loader: None, + diagnostic_output: DiagnosticOutput::Default, + stderr: config.stderr.clone(), + crate_name: config.crate_name.clone(), + lint_caps: config.lint_caps.clone(), + } +} + +pub fn create_config(args: &[String]) -> interface::Config { let matches = rustc_driver::handle_options(args) .expect("rustc arg parsing failed"); - let (sopts, _cfg) = session::config::build_session_options_and_crate_config(&matches); + let (sopts, cfg) = session::config::build_session_options_and_crate_config(&matches); let sopts = maybe_set_sysroot(sopts, args); - let out_dir = matches.opt_str("out-dir").map(|o| PathBuf::from(&o)); - let output = matches.opt_str("o").map(|o| PathBuf::from(&o)); + let output_dir = matches.opt_str("out-dir").map(|o| PathBuf::from(&o)); + let output_file = matches.opt_str("o").map(|o| PathBuf::from(&o)); assert!(matches.free.len() == 1, - "expected exactly one input file"); - let in_path = Some(Path::new(&matches.free[0]).to_owned()); - let input = Input::File(in_path.as_ref().unwrap().clone()); + "expected exactly one input file"); + let input_path = Some(Path::new(&matches.free[0]).to_owned()); + let input = Input::File(input_path.as_ref().unwrap().clone()); + + interface::Config { + opts: sopts, + crate_cfg: cfg, + input, + input_path, + output_file, + output_dir, + file_loader: None, + diagnostic_output: DiagnosticOutput::Default, + stderr: None, + crate_name: None, + lint_caps: Default::default(), + } +} + +pub fn run_compiler(mut config: interface::Config, file_loader: Option>, f: F) -> R + where F: FnOnce(&interface::Compiler) -> R, + R: Send, +{ + // Force disable incremental compilation. It causes panics with multiple typechecking. + config.opts.incremental = None; + config.file_loader = file_loader; + + syntax::with_globals(move || { + ty::tls::GCX_PTR.set(&Lock::new(0), || { + ty::tls::with_thread_locals(|| { + interface::run_compiler_in_existing_thread_pool(config, f) + }) + }) + }) +} - let (session, cstore, codegen_backend) = build_session(sopts, in_path, file_loader); +pub fn run_refactoring( + mut config: interface::Config, + cmd_reg: Registry, + file_io: Arc, + marks: HashSet<(NodeId, Symbol)>, + f: F, +) -> R + where F: FnOnce(RefactorState) -> R, + R: Send, +{ + // Force disable incremental compilation. It causes panics with multiple typechecking. + config.opts.incremental = None; + + syntax::with_globals(move || { + ty::tls::GCX_PTR.set(&Lock::new(0), || { + ty::tls::with_thread_locals(|| { + let state = RefactorState::new(config, cmd_reg, file_io, marks); + f(state) + }) + }) + }) +} - // It might seem tempting to set up a custom CompileController and invoke `compile_input` here, - // in order to avoid duplicating a bunch of `compile_input`'s logic. Unfortunately, that - // doesn't work well with the current API. The `CompileState`s provided to the PhaseController - // callbacks only contain the data relevant to th ecurrent phase - for example, in the - // after_analysis callback, `tcx` is available but `krate`, `arena`, and `hir_map` are not. - // Furthermore, the callback type is such that the `CompileState`s for separate callbacks have - // unrelated lifetimes, so we can't (safely) collect up the relevant pieces ourselves from - // multiple callback invocations. +#[allow(dead_code)] +pub struct Compiler { + pub sess: Lrc, + pub codegen_backend: Lrc>, + source_map: Lrc, + input: Input, + input_path: Option, + output_dir: Option, + output_file: Option, + queries: Queries, + pub cstore: Lrc, + crate_name: Option, +} - let control = CompileController::basic(); +#[allow(dead_code)] +#[derive(Default)] +struct Queries { + dep_graph_future: Query>, + parse: Query, + crate_name: Query, + register_plugins: Query<(ast::Crate, PluginInfo)>, + expansion: Query<(ast::Crate, Rc>>)>, + dep_graph: Query, + lower_to_hir: Query<(Steal, ExpansionResult)>, + prepare_outputs: Query, + codegen_channel: Query<(Steal>>, + Steal>>)>, + global_ctxt: Query, + ongoing_codegen: Query>, + link: Query<()>, +} - // Start of `compile_input` code - let krate = driver::phase_1_parse_input(&control, &session, &input).unwrap(); +#[allow(dead_code)] +struct Query { + result: RefCell>>, +} - Phase1Bits { - session, cstore, codegen_backend, - input, output, out_dir, - control, krate, +impl Default for Query { + fn default() -> Self { + Query { + result: RefCell::new(None), + } } } -pub fn run_compiler_from_phase1(bits: Phase1Bits, - phase: Phase, - func: F) -> R - where F: FnOnce(Crate, RefactorCtxt) -> R { - let Phase1Bits { - session, cstore, codegen_backend, input, output, out_dir, control, krate, - } = bits; +#[allow(dead_code)] +struct PluginInfo { + syntax_exts: Vec, + attributes: Vec<(String, AttributeType)>, +} - // Immediately fix up the attr spans, since during expansion, any `derive` attrs will be - // removed. - let krate = span_fix::fix_attr_spans(krate); +struct ExpansionResult { + pub defs: Steal, + pub resolutions: Steal, +} - if phase == Phase::Phase1 { - let cx = RefactorCtxt::new_phase_1(&session, &cstore); - return func(krate, cx); - } +declare_box_region_type!( + pub BoxedGlobalCtxt, + for('gcx), + (&'gcx GlobalCtxt<'gcx>) -> ((), ()) +); + + +pub fn make_compiler(config: &Config, file_io: Arc) -> interface::Compiler { + let mut config = clone_config(config); + config.file_loader = Some(Box::new(ArcFileIO(file_io))); + let (sess, codegen_backend, source_map) = util::create_session( + config.opts, + config.crate_cfg, + config.diagnostic_output, + config.file_loader, + config.input_path.clone(), + config.lint_caps, + ); - let outputs = build_output_filenames(&input, &out_dir, &output, &krate.attrs, &session); - let crate_name = link::find_crate_name(Some(&session), &krate.attrs, &input); - let mut expand_result = driver::phase_2_configure_and_expand( - &session, - &cstore, - krate, - /*registry*/ None, - &crate_name, - /*addl_plugins*/ None, - MakeGlobMap::No, - |_| Ok(()) - ).unwrap_or_else(|e| panic!("Error running compiler phase 2: {:?}", e)); - let krate = expand_result.expanded_crate; - - let arenas = AllArenas::new(); - - let hir_map = hir_map::map_crate(&session, &cstore, &mut expand_result.hir_forest, &expand_result.defs); - - if phase == Phase::Phase2 { - let cx = RefactorCtxt::new_phase_2(&session, &cstore, &hir_map); - return func(krate, cx); - } + // Put a dummy file at the beginning of the source_map, so that no real `Span` will accidentally + // collide with `DUMMY_SP` (which is `0 .. 0`). + source_map.new_source_file(FileName::Custom("".to_string()), " ".to_string()); - let mut result = None; - let _ = driver::phase_3_run_analysis_passes( - &*codegen_backend, - &control, - &session, &cstore, hir_map, expand_result.analysis, expand_result.resolutions, - &arenas, &crate_name, &outputs, - |tcx, _analysis, _incremental_hashes_map, _result| { - if phase == Phase::Phase3 { - let cx = RefactorCtxt::new_phase_3(&session, &cstore, &tcx.hir, tcx, &arenas.interner); - result = Some(func(krate, cx)); - return; - } - unreachable!(); - }); - result.unwrap() + let cstore = Lrc::new(CStore::new(codegen_backend.metadata_loader())); + + let compiler = Compiler { + sess, + codegen_backend, + source_map, + cstore, + input: config.input, + input_path: config.input_path, + output_dir: config.output_dir, + output_file: config.output_file, + queries: Default::default(), + crate_name: config.crate_name, + }; + + let compiler = unsafe { mem::transmute(compiler) }; + compiler } + + + + +// pub fn run_compiler_to_phase1(args: &[String], +// file_loader: Option>) -> Phase1Bits { +// let matches = rustc_driver::handle_options(args) +// .expect("rustc arg parsing failed"); +// let (sopts, _cfg) = session::config::build_session_options_and_crate_config(&matches); +// let sopts = maybe_set_sysroot(sopts, args); +// let out_dir = matches.opt_str("out-dir").map(|o| PathBuf::from(&o)); +// let output = matches.opt_str("o").map(|o| PathBuf::from(&o)); + +// assert!(matches.free.len() == 1, +// "expected exactly one input file"); +// let in_path = Some(Path::new(&matches.free[0]).to_owned()); +// let input = Input::File(in_path.as_ref().unwrap().clone()); + +// let (session, cstore, codegen_backend) = build_session(sopts, in_path, file_loader); + +// // It might seem tempting to set up a custom CompileController and invoke `compile_input` here, +// // in order to avoid duplicating a bunch of `compile_input`'s logic. Unfortunately, that +// // doesn't work well with the current API. The `CompileState`s provided to the PhaseController +// // callbacks only contain the data relevant to th ecurrent phase - for example, in the +// // after_analysis callback, `tcx` is available but `krate`, `arena`, and `hir_map` are not. +// // Furthermore, the callback type is such that the `CompileState`s for separate callbacks have +// // unrelated lifetimes, so we can't (safely) collect up the relevant pieces ourselves from +// // multiple callback invocations. + +// let control = CompileController::basic(); + +// // Start of `compile_input` code +// let krate = driver::phase_1_parse_input(&control, &session, &input).unwrap(); + +// Phase1Bits { +// session, cstore, codegen_backend, +// input, output, out_dir, +// control, krate, +// } +// } + + /// Run the compiler with some command line `args`. Stops compiling and invokes the callback /// `func` after the indicated `phase`. /// /// `file_loader` can be `None` to read source code from the file system. Otherwise, the provided /// loader will be used within the compiler. For example, editor integration uses a custom file /// loader to provide the compiler with buffer contents for currently open files. -pub fn run_compiler(args: &[String], - file_loader: Option>, - phase: Phase, - func: F) -> R - where F: FnOnce(Crate, RefactorCtxt) -> R { - let bits = run_compiler_to_phase1(args, file_loader); - run_compiler_from_phase1(bits, phase, func) -} +// pub fn run_compiler(args: &[String], +// file_loader: Option>, +// phase: Phase, +// func: F) -> R +// where F: FnOnce(Crate, RefactorCtxt) -> R { +// let bits = run_compiler_to_phase1(args, file_loader); +// run_compiler_from_phase1(bits, phase, func) +// } pub fn build_session_from_args(args: &[String], file_loader: Option>) -> Session { @@ -321,11 +475,11 @@ pub fn build_session_from_args(args: &[String], session } -fn build_session(sopts: Options, +fn build_session(sopts: SessionOptions, in_path: Option, file_loader: Option>) -> (Session, CStore, Box) { // Corresponds roughly to `run_compiler`. - let descriptions = rustc_driver::diagnostics_registry(); + let descriptions = rustc_interface::util::diagnostics_registry(); let file_loader = file_loader.unwrap_or_else(|| Box::new(RealFileLoader)); // Note: `source_map` is expected to be an `Lrc`, which is an alias for `Rc`. // If this ever changes, we'll need a new trick to obtain the `SourceMap` in `rebuild_session`. @@ -334,51 +488,19 @@ fn build_session(sopts: Options, // collide with `DUMMY_SP` (which is `0 .. 0`). source_map.new_source_file(FileName::Custom("".to_string()), " ".to_string()); - let emitter_dest = None; - let sess = session::build_session_with_source_map( - sopts, in_path, descriptions, source_map, emitter_dest + sopts, in_path, descriptions, source_map, DiagnosticOutput::Default, Default::default(), ); - let codegen_backend = rustc_driver::get_codegen_backend(&sess); + let codegen_backend = get_codegen_backend(&sess); let cstore = CStore::new(codegen_backend.metadata_loader()); (sess, cstore, codegen_backend) } -/// Build a new session from an existing one. This uses the same `SourceMap`, so spans will be -/// compatible across both sessions. -fn rebuild_session(old_session: &Session) -> (Session, CStore, Box) { - let descriptions = rustc_driver::diagnostics_registry(); - - // We happen to know that the `&SourceMap` we get from `old_session.source_map()` is inside an `Rc` - // pointer, so we can clone that `Rc` with a little unsafe code. - let source_map = unsafe { - let temp_rc = ManuallyDrop::new(Rc::from_raw(old_session.source_map())); - let source_map = (*temp_rc).clone(); - mem::forget(temp_rc); - source_map - }; - - let emitter_dest = None; - - let session = session::build_session_with_source_map( - old_session.opts.clone(), - old_session.local_crate_source_file.clone(), - descriptions, - source_map, - emitter_dest, - ); - - let codegen_backend = rustc_driver::get_codegen_backend(&session); - let cstore = CStore::new(codegen_backend.metadata_loader()); - - (session, cstore, codegen_backend) -} - -fn make_parser<'a>(sess: &'a Session, name: &str, src: &str) -> Parser<'a> { +fn make_parser<'a>(sess: &'a Session, src: &str) -> Parser<'a> { parse::new_parser_from_source_str(&sess.parse_sess, - FileName::Real(PathBuf::from(name)), + FileName::anon_source_code(src), src.to_owned()) } @@ -389,25 +511,34 @@ pub fn emit_and_panic(mut db: DiagnosticBuilder, what: &str) -> ! { // Helper functions for parsing source code in an existing `Session`. pub fn parse_expr(sess: &Session, src: &str) -> P { - let mut p = make_parser(sess, "", src); + let mut p = make_parser(sess, src); match p.parse_expr() { - Ok(expr) => remove_paren(expr), + Ok(mut expr) => { + remove_paren(&mut expr); + expr + } Err(db) => emit_and_panic(db, "expr"), } } pub fn parse_pat(sess: &Session, src: &str) -> P { - let mut p = make_parser(sess, "", src); + let mut p = make_parser(sess, src); match p.parse_pat(None) { - Ok(pat) => remove_paren(pat), + Ok(mut pat) => { + remove_paren(&mut pat); + pat + } Err(db) => emit_and_panic(db, "pat"), } } pub fn parse_ty(sess: &Session, src: &str) -> P { - let mut p = make_parser(sess, "", src); + let mut p = make_parser(sess, src); match p.parse_ty() { - Ok(ty) => remove_paren(ty), + Ok(mut ty) => { + remove_paren(&mut ty); + ty + } Err(db) => emit_and_panic(db, "ty"), } } @@ -415,19 +546,25 @@ pub fn parse_ty(sess: &Session, src: &str) -> P { pub fn parse_stmts(sess: &Session, src: &str) -> Vec { // TODO: rustc no longer exposes `parse_full_stmt`. `parse_block` is a hacky // workaround that may cause suboptimal error messages. - let mut p = make_parser(sess, "", &format!("{{ {} }}", src)); + let mut p = make_parser(sess, &format!("{{ {} }}", src)); match p.parse_block() { - Ok(blk) => blk.into_inner().stmts.into_iter().map(|s| remove_paren(s).lone()).collect(), + Ok(blk) => blk.into_inner().stmts.into_iter().map(|mut s| { + remove_paren(&mut s); + s.lone() + }).collect(), Err(db) => emit_and_panic(db, "stmts"), } } pub fn parse_items(sess: &Session, src: &str) -> Vec> { - let mut p = make_parser(sess, "", src); + let mut p = make_parser(sess, src); let mut items = Vec::new(); loop { match p.parse_item() { - Ok(Some(item)) => items.push(remove_paren(item).lone()), + Ok(Some(mut item)) => { + remove_paren(&mut item); + items.push(item.lone()); + } Ok(None) => break, Err(db) => emit_and_panic(db, "items"), } @@ -438,7 +575,7 @@ pub fn parse_items(sess: &Session, src: &str) -> Vec> { pub fn parse_impl_items(sess: &Session, src: &str) -> Vec { // TODO: rustc no longer exposes `parse_impl_item_`. `parse_item` is a hacky // workaround that may cause suboptimal error messages. - let mut p = make_parser(sess, "", &format!("impl ! {{ {} }}", src)); + let mut p = make_parser(sess, &format!("impl ! {{ {} }}", src)); match p.parse_item() { Ok(item) => { match item.expect("expected to find an item").into_inner().node { @@ -453,7 +590,7 @@ pub fn parse_impl_items(sess: &Session, src: &str) -> Vec { pub fn parse_foreign_items(sess: &Session, src: &str) -> Vec { // TODO: rustc no longer exposes a method for parsing ForeignItems. `parse_item` is a hacky // workaround that may cause suboptimal error messages. - let mut p = make_parser(sess, "", &format!("extern {{ {} }}", src)); + let mut p = make_parser(sess, &format!("extern {{ {} }}", src)); match p.parse_item() { Ok(item) => { match item.expect("expected to find an item").into_inner().node { @@ -466,7 +603,7 @@ pub fn parse_foreign_items(sess: &Session, src: &str) -> Vec { } pub fn parse_block(sess: &Session, src: &str) -> P { - let mut p = make_parser(sess, "", src); + let mut p = make_parser(sess, src); let rules = if p.eat_keyword(keywords::Unsafe) { BlockCheckMode::Unsafe(UnsafeSource::UserProvided) @@ -475,9 +612,10 @@ pub fn parse_block(sess: &Session, src: &str) -> P { }; match p.parse_block() { - Ok(block) => { - let block = remove_paren(block); - block.map(|b| Block { rules, ..b }) + Ok(mut block) => { + remove_paren(&mut block); + block.rules = rules; + block }, Err(db) => emit_and_panic(db, "block"), } @@ -492,9 +630,12 @@ fn parse_arg_inner<'a>(p: &mut Parser<'a>) -> PResult<'a, Arg> { } pub fn parse_arg(sess: &Session, src: &str) -> Arg { - let mut p = make_parser(sess, "", src); + let mut p = make_parser(sess, src); match parse_arg_inner(&mut p) { - Ok(arg) => remove_paren(arg), + Ok(mut arg) => { + remove_paren(&mut arg); + arg + } Err(db) => emit_and_panic(db, "arg"), } } @@ -502,7 +643,7 @@ pub fn parse_arg(sess: &Session, src: &str) -> Arg { pub fn run_parser(sess: &Session, src: &str, f: F) -> R where F: for<'a> FnOnce(&mut Parser<'a>) -> PResult<'a, R> { - let mut p = make_parser(sess, "", src); + let mut p = make_parser(sess, src); match f(&mut p) { Ok(x) => x, Err(db) => emit_and_panic(db, "src"), @@ -520,7 +661,7 @@ pub fn run_parser_tts(sess: &Session, tts: Vec, f: F) -> R pub fn try_run_parser(sess: &Session, src: &str, f: F) -> Option where F: for<'a> FnOnce(&mut Parser<'a>) -> PResult<'a, R> { - let mut p = make_parser(sess, "", src); + let mut p = make_parser(sess, src); match f(&mut p) { Ok(x) => Some(x), Err(mut db) => { @@ -546,6 +687,6 @@ pub fn try_run_parser_tts(sess: &Session, tts: Vec, f: F) -> Op /// Create a span whose text is `s`. Note this is somewhat expensive, as it adds a new dummy file /// to the `SourceMap` on every call. pub fn make_span_for_text(cm: &SourceMap, s: &str) -> Span { - let fm = cm.new_source_file(FileName::Custom("".to_string()), s.to_string()); + let fm = cm.new_source_file(FileName::anon_source_code(s), s.to_string()); Span::new(fm.start_pos, fm.end_pos, SyntaxContext::empty()) } diff --git a/c2rust-refactor/src/illtyped.rs b/c2rust-refactor/src/illtyped.rs index 25b8d28e4..ba226d84e 100644 --- a/c2rust-refactor/src/illtyped.rs +++ b/c2rust-refactor/src/illtyped.rs @@ -1,14 +1,12 @@ -use std::cell::{Cell, RefCell}; use rustc::hir; use rustc::hir::def::Def; use rustc::ty::{self, TyCtxt, ParamEnv}; use smallvec::SmallVec; use syntax::ast::*; -use syntax::fold::{self, Folder}; +use syntax::mut_visit::{self, MutVisitor}; use syntax::ptr::P; -use syntax::util::move_map::MoveMap; -use crate::ast_manip::Fold; +use crate::ast_manip::MutVisit; use crate::RefactorCtxt; @@ -36,10 +34,9 @@ pub trait IlltypedFolder<'tcx> { /// type `expected`. #[allow(unused)] fn fix_expr(&mut self, - e: P, + e: &mut P, actual: ty::Ty<'tcx>, - expected: ty::Ty<'tcx>) -> P { - e + expected: ty::Ty<'tcx>) { } /// Called on each expr `e` that is the subject of an invalid cast: `e` has type `actual`, @@ -49,35 +46,34 @@ pub trait IlltypedFolder<'tcx> { /// The default implementation dispatches to `fix_expr`, since fixing `e` to have type exactly /// `target` will certainly make the cast succeed. fn fix_expr_cast(&mut self, - e: P, + e: &mut P, actual: ty::Ty<'tcx>, - target: ty::Ty<'tcx>) -> P { + target: ty::Ty<'tcx>) { self.fix_expr(e, actual, target) } /// Called on each expr `e` that contains a subexpr whose actual type doesn't match the /// expected type propagated down from `e`. - fn fix_expr_parent(&mut self, e: P) -> P { - e + fn fix_expr_parent(&mut self, _e: &mut P) { } } impl<'a, 'tcx, F: IlltypedFolder<'tcx>> IlltypedFolder<'tcx> for &'a mut F { fn fix_expr(&mut self, - e: P, + e: &mut P, actual: ty::Ty<'tcx>, - expected: ty::Ty<'tcx>) -> P { + expected: ty::Ty<'tcx>) { ::fix_expr(self, e, actual, expected) } fn fix_expr_cast(&mut self, - e: P, + e: &mut P, actual: ty::Ty<'tcx>, - target: ty::Ty<'tcx>) -> P { + target: ty::Ty<'tcx>) { ::fix_expr_cast(self, e, actual, target) } - fn fix_expr_parent(&mut self, e: P) -> P { + fn fix_expr_parent(&mut self, e: &mut P) { ::fix_expr_parent(self, e) } } @@ -89,269 +85,220 @@ struct FoldIlltyped<'a, 'tcx, F> { } impl<'a, 'tcx, F: IlltypedFolder<'tcx>> FoldIlltyped<'a, 'tcx, F> { - fn ensure(&mut self, - sub_e: P, - expected_ty: ty::Ty<'tcx>, - illtyped: Option<&Cell>) -> P { - if let Some(actual_ty) = self.cx.opt_adjusted_node_type(sub_e.id) { + /// Attempt to ensure that `expr` has the type `expected_ty`. Return true if + /// retyping was needed. + fn ensure( + &mut self, + expr: &mut P, + expected_ty: ty::Ty<'tcx>, + ) -> bool { + if let Some(actual_ty) = self.cx.opt_adjusted_node_type(expr.id) { if !types_approx_equal(self.cx.ty_ctxt(), actual_ty, expected_ty) { - illtyped.map(|i| i.set(true)); - return self.inner.fix_expr(sub_e, actual_ty, expected_ty); + self.inner.fix_expr(expr, actual_ty, expected_ty); + return true; } } - sub_e + false } - fn ensure_cast(&mut self, - sub_e: P, - target_ty: ty::Ty<'tcx>, - illtyped: Option<&Cell>) -> P { + /// Attempt to ensure that `expr` has the type `expected_ty`, inserting + /// casts if needed. Return true if retyping was needed. + // TODO: Use this when checking casts + #[allow(dead_code)] + fn ensure_cast( + &mut self, + sub_e: &mut P, + target_ty: ty::Ty<'tcx>, + ) -> bool { if let Some(actual_ty) = self.cx.opt_adjusted_node_type(sub_e.id) { - illtyped.map(|i| i.set(true)); - return self.inner.fix_expr_cast(sub_e, actual_ty, target_ty); + self.inner.fix_expr_cast(sub_e, actual_ty, target_ty); + return true; } - sub_e + false } } -impl<'a, 'tcx, F: IlltypedFolder<'tcx>> Folder for FoldIlltyped<'a, 'tcx, F> { - fn fold_expr(&mut self, e: P) -> P { - let cx = self.cx; - let this = RefCell::new(self); - - let illtyped = Cell::new(false); - let ensure = |sub_e, expected_ty| { - this.borrow_mut().ensure(sub_e, expected_ty, Some(&illtyped)) - }; - let ensure_cast = |sub_e, target_ty| { - this.borrow_mut().ensure_cast(sub_e, target_ty, Some(&illtyped)) - }; +impl<'a, 'tcx, F: IlltypedFolder<'tcx>> MutVisitor for FoldIlltyped<'a, 'tcx, F> { + fn visit_expr(&mut self, e: &mut P) { + let mut illtyped = false; - let e = e.map(|e| { - let e = fold::noop_fold_expr(e, &mut **this.borrow_mut()); + mut_visit::noop_visit_expr(e, self); + let ty = match self.cx.opt_node_type(e.id) { + Some(x) => x, + None => return, + }; + if let ty::TyKind::Error = ty.sty { + return; + } - let ty = match cx.opt_node_type(e.id) { - Some(x) => x, - None => return e, - }; - if let ty::TyKind::Error = ty.sty { - return e; - } + // We need the whole `Expr` to do this lookup, so it can't happen inside the match. + let opt_fn_sig = self.cx.opt_callee_fn_sig(&e); - // We need the whole `Expr` to do this lookup, so it can't happen inside the match. - let opt_fn_sig = cx.opt_callee_fn_sig(&e); + let tcx = self.cx.ty_ctxt(); - let tcx = cx.ty_ctxt(); + let id = e.id; - let node = match e.node { - ExprKind::Box(content) => { - ExprKind::Box(ensure(content, ty.boxed_ty())) - } - n @ ExprKind::ObsoleteInPlace(..) => n, // NYI - ExprKind::Array(elems) => { - let expected_elem_ty = ty.builtin_index().unwrap(); - ExprKind::Array(elems.move_map(|e| ensure(e, expected_elem_ty))) - } - ExprKind::Repeat(elem, count) => { - let expected_elem_ty = ty.builtin_index().unwrap(); - ExprKind::Repeat(ensure(elem, expected_elem_ty), count) + match &mut e.node { + ExprKind::Box(content) => { + illtyped |= self.ensure(content, ty.boxed_ty()); + } + ExprKind::ObsoleteInPlace(..) => {} // NYI + ExprKind::Array(elems) => { + let expected_elem_ty = ty.builtin_index().unwrap(); + for e in elems { + illtyped |= self.ensure(e, expected_elem_ty); } - ExprKind::Tup(elems) => { - let elem_tys = expect!([ty.sty] ty::TyKind::Tuple(elem_tys) => elem_tys); - ExprKind::Tup(elems.into_iter().zip(elem_tys) - .map(|(elem, elem_ty)| ensure(elem, elem_ty)) - .collect()) + } + ExprKind::Repeat(elem, _count) => { + let expected_elem_ty = ty.builtin_index().unwrap(); + illtyped |= self.ensure(elem, expected_elem_ty); + } + ExprKind::Tup(elems) => { + let elem_tys = expect!([ty.sty] ty::TyKind::Tuple(elem_tys) => elem_tys); + for (elem, elem_ty) in elems.iter_mut().zip(elem_tys) { + illtyped |= self.ensure(elem, elem_ty); } - ExprKind::Call(callee, args) => { - if let Some(fn_sig) = opt_fn_sig { - let mut retyped_args = Vec::with_capacity(args.len()); - for (i, arg) in args.into_iter().enumerate() { + } + ExprKind::Call(_callee, args) => { + if let Some(fn_sig) = opt_fn_sig { + for (i, arg) in args.iter_mut().enumerate() { + if !fn_sig.c_variadic || i < fn_sig.inputs().len() - 1 { if let Some(&ty) = fn_sig.inputs().get(i) { - retyped_args.push(ensure(arg, ty)); - } else { - retyped_args.push(arg); + illtyped |= self.ensure(arg, ty); } } - ExprKind::Call(callee, retyped_args) - } else { - ExprKind::Call(callee, args) } } - ExprKind::MethodCall(seg, args) => { - if let Some(fn_sig) = opt_fn_sig { - let mut retyped_args = Vec::with_capacity(args.len()); - for (i, arg) in args.into_iter().enumerate() { - if let Some(&ty) = fn_sig.inputs().get(i) { - retyped_args.push(ensure(arg, ty)); - } else { - retyped_args.push(arg); - } + } + ExprKind::MethodCall(_seg, args) => { + if let Some(fn_sig) = opt_fn_sig { + for (i, arg) in args.iter_mut().enumerate() { + if let Some(&ty) = fn_sig.inputs().get(i) { + illtyped |= self.ensure(arg, ty); } - ExprKind::MethodCall(seg, retyped_args) - } else { - ExprKind::MethodCall(seg, args) } - }, - ExprKind::Binary(binop, lhs, rhs) => { - use syntax::ast::BinOpKind::*; - // TODO: check for overloads - match binop.node { - Add | Sub | Mul | Div | Rem | - BitXor | BitAnd | BitOr => - ExprKind::Binary(binop, ensure(lhs, ty), ensure(rhs, ty)), - Eq | Lt | Le | Ne | Ge | Gt => { - if let Some(lhs_ty) = cx.opt_node_type(lhs.id) { - ExprKind::Binary(binop, lhs, ensure(rhs, lhs_ty)) - } else if let Some(rhs_ty) = cx.opt_node_type(rhs.id) { - ExprKind::Binary(binop, ensure(lhs, rhs_ty), rhs) - } else { - ExprKind::Binary(binop, lhs, rhs) - } - }, - Shl | Shr => - ExprKind::Binary(binop, ensure(lhs, ty), rhs), - And | Or => - ExprKind::Binary(binop, ensure(lhs, ty), ensure(rhs, ty)), - } - } - ExprKind::Unary(binop, ohs) => { - // TODO: need cases for deref, neg/not, and a check for overloads - ExprKind::Unary(binop, ohs) } - ExprKind::Lit(l) => ExprKind::Lit(l), // TODO - ExprKind::Cast(sub_e, target) => { - // Check if the cast is erroneous. We do this by looking up the subexpression - // (yes, the subexpression) in the `cast_kinds` table - if there's nothing - // there, it's not a valid cast. - let parent = cx.hir_map().get_parent_did(sub_e.id); - let tables = cx.ty_ctxt().typeck_tables_of(parent); - let hir_id = cx.hir_map().node_to_hir_id(sub_e.id); - if tables.cast_kinds().get(hir_id).is_none() { - ExprKind::Cast(ensure_cast(sub_e, ty), target) - } else { - ExprKind::Cast(sub_e, target) + } + ExprKind::Binary(binop, lhs, rhs) => { + use syntax::ast::BinOpKind::*; + // TODO: check for overloads + match binop.node { + Add | Sub | Mul | Div | Rem | + BitXor | BitAnd | BitOr => { + illtyped |= self.ensure(lhs, ty); + illtyped |= self.ensure(rhs, ty); + } + Eq | Lt | Le | Ne | Ge | Gt => { + if let Some(lhs_ty) = self.cx.opt_node_type(lhs.id) { + illtyped |= self.ensure(rhs, lhs_ty); + } else if let Some(rhs_ty) = self.cx.opt_node_type(rhs.id) { + illtyped |= self.ensure(lhs, rhs_ty); + } + } + Shl | Shr => { + illtyped |= self.ensure(lhs, ty); + } + And | Or => { + illtyped |= self.ensure(lhs, ty); + illtyped |= self.ensure(rhs, ty); } } - ExprKind::Type(sub_e, ascribed) => { - ExprKind::Type(sub_e, ascribed) - } - ExprKind::AddrOf(m, ohs) => ExprKind::AddrOf(m, ohs), // TODO - ExprKind::If(cond, tr, fl) => { - // TODO: do something clever with tr + fl - ExprKind::If(ensure(cond, tcx.mk_bool()), tr, fl) - } - ExprKind::IfLet(pats, expr, tr, fl) => { - let expr = if let Some(pat_ty) = cx.opt_node_type(pats[0].id) { - ensure(expr, pat_ty) - } else { - expr - }; - // TODO: do something clever with tr + fl - // TODO: handle discrepancies between different pattern tys - ExprKind::IfLet(pats, expr, tr, fl) - } - ExprKind::While(cond, body, opt_label) => { - ExprKind::While(ensure(cond, tcx.mk_bool()), body, opt_label) - } - ExprKind::WhileLet(pats, expr, body, opt_label) => { - let expr = if let Some(pat_ty) = cx.opt_node_type(pats[0].id) { - ensure(expr, pat_ty) - } else { - expr - }; - ExprKind::WhileLet(pats, expr, body, opt_label) - } - ExprKind::ForLoop(pat, iter, body, opt_label) => { - ExprKind::ForLoop(pat, iter, body, opt_label) - } - ExprKind::Loop(body, opt_label) => { - ExprKind::Loop(body, opt_label) - } - ExprKind::Match(expr, arms) => { - let expr = if let Some(pat_ty) = arms.get(0).and_then( - |arm| cx.opt_node_type(arm.pats[0].id)) { - ensure(expr, pat_ty) - } else { - expr - }; - // TODO: ensure arm bodies match ty - ExprKind::Match(expr, arms) - } - ExprKind::Closure(capture_clause, asyncness, movability, decl, body, span) => { - ExprKind::Closure(capture_clause, asyncness, movability, decl, body, span) - } - ExprKind::Block(blk, opt_label) => { - // TODO: ensure last expr matches ty - ExprKind::Block(blk, opt_label) - } - ExprKind::Async(capture_clause, node_id, body) => { - ExprKind::Async(capture_clause, node_id, body) - } - ExprKind::Assign(el, er) => { - let lhs_ty = cx.node_type(el.id); - ExprKind::Assign(el, ensure(er, lhs_ty)) - } - ExprKind::AssignOp(op, el, er) => { - // TODO: need cases for arith/bitwise, shift, &&/||, and a check for overloads - let lhs_ty = cx.node_type(el.id); - ExprKind::AssignOp(op, el, ensure(er, lhs_ty)) - } - ExprKind::Field(el, ident) => { - ExprKind::Field(el, ident) - } - ExprKind::Index(el, er) => { - // TODO: check for overloads - ExprKind::Index(el, ensure(er, tcx.mk_mach_uint(UintTy::Usize))) - } - ExprKind::Range(e1, e2, lim) => { - // TODO: e1 & e2 should have the same type if both present - ExprKind::Range(e1, e2, lim) - } - ExprKind::Path(qself, path) => { - ExprKind::Path(qself, path) + } + ExprKind::Unary(_binop, _ohs) => { + // TODO: need cases for deref, neg/not, and a check for overloads + } + ExprKind::Lit(_l) => {} // TODO + ExprKind::Cast(_sub_e, _target) => { + // Check if the cast is erroneous. We do this by looking up the subexpression + // (yes, the subexpression) in the `cast_kinds` table - if there's nothing + // there, it's not a valid cast. + + // Updating to nightly-2019-04-08 note: cast_kinds is gone now, + // and cast checking only marks coercion casts. We don't need to + // implement the logic for coercions, but it looks like we need + // to implement logic for real cast typechecking. + + // TODO: Implement + + // let parent = self.cx.hir_map().get_parent_did(sub_e.id); + // let tables = self.cx.ty_ctxt().typeck_tables_of(parent); + // let hir_id = self.cx.hir_map().node_to_hir_id(sub_e.id); + // if tables.cast_kinds().get(hir_id).is_none() { + // illtyped |= self.ensure_cast(sub_e, ty); + // } + } + ExprKind::AddrOf(_m, _ohs) => {} // TODO + ExprKind::If(cond, _tr, _fl) => { + // TODO: do something clever with tr + fl + illtyped |= self.ensure(cond, tcx.mk_bool()); + } + ExprKind::IfLet(pats, expr, _tr, _fl) => { + if let Some(pat_ty) = self.cx.opt_node_type(pats[0].id) { + illtyped |= self.ensure(expr, pat_ty); } - ExprKind::Break(opt_label, opt_expr) => { - ExprKind::Break(opt_label, opt_expr) + // TODO: do something clever with tr + fl + // TODO: handle discrepancies between different pattern tys + } + ExprKind::While(cond, _body, _opt_label) => { + illtyped |= self.ensure(cond, tcx.mk_bool()); + } + ExprKind::WhileLet(pats, expr, _body, _opt_label) => { + if let Some(pat_ty) = self.cx.opt_node_type(pats[0].id) { + illtyped |= self.ensure(expr, pat_ty); } - ExprKind::Continue(opt_label) => { - ExprKind::Continue(opt_label) + } + ExprKind::Match(expr, arms) => { + if let Some(pat_ty) = arms.get(0).and_then( + |arm| self.cx.opt_node_type(arm.pats[0].id)) { + illtyped |= self.ensure(expr, pat_ty); } - ExprKind::Ret(e) => ExprKind::Ret(e), - ExprKind::InlineAsm(asm) => ExprKind::InlineAsm(asm), - ExprKind::Mac(mac) => ExprKind::Mac(mac), - ExprKind::Struct(path, fields, maybe_expr) => { - let (fields, maybe_expr) = handle_struct( - cx, e.id, ty, fields, maybe_expr, |e, ty| ensure(e, ty)); - ExprKind::Struct(path, fields, maybe_expr) - }, - ExprKind::Paren(ex) => ExprKind::Paren(ex), - ExprKind::Yield(ex) => ExprKind::Yield(ex), - ExprKind::Try(ex) => ExprKind::Try(ex), - ExprKind::TryBlock(body) => ExprKind::TryBlock(body), - }; - - Expr { node, ..e } - }); - - if illtyped.get() { - this.borrow_mut().inner.fix_expr_parent(e) - } else { - e + // TODO: self.ensure arm bodies match ty + } + ExprKind::Block(_blk, _opt_label) => { + // TODO: self.ensure last expr matches ty + } + ExprKind::Assign(el, er) => { + let lhs_ty = self.cx.node_type(el.id); + illtyped |= self.ensure(er, lhs_ty); + } + ExprKind::AssignOp(_op, el, er) => { + // TODO: need cases for arith/bitwise, shift, &&/||, and a check for overloads + let lhs_ty = self.cx.node_type(el.id); + illtyped |= self.ensure(er, lhs_ty); + } + ExprKind::Index(_el, er) => { + // TODO: check for overloads + illtyped |= self.ensure(er, tcx.mk_mach_uint(UintTy::Usize)); + } + ExprKind::Range(_e1, _e2, _lim) => { + // TODO: e1 & e2 should have the same type if both present + } + ExprKind::Struct(_path, fields, maybe_expr) => { + handle_struct(self.cx, id, ty, fields, maybe_expr, |e, ty| illtyped |= self.ensure(e, ty)); + } + + _ => {} + }; + + if illtyped { + self.inner.fix_expr_parent(e) } } - fn fold_item(&mut self, i: P) -> SmallVec<[P; 1]> { - fold::noop_fold_item(i, self).move_map(|i| i.map(|mut i| { - i.node = match i.node { - ItemKind::Static(ty, mutbl, expr) => { - let did = self.cx.node_def_id(i.id); + fn flat_map_item(&mut self, i: P) -> SmallVec<[P; 1]> { + let mut items = mut_visit::noop_flat_map_item(i, self); + for i in items.iter_mut() { + let id = i.id; + match &mut i.node { + ItemKind::Static(_ty, _mutbl, expr) => { + let did = self.cx.node_def_id(id); let expected_ty = self.cx.ty_ctxt().type_of(did); info!("STATIC: expected ty {:?}, expr {:?}", expected_ty, expr); let tcx = self.cx.ty_ctxt(); - let node_id = tcx.hir.as_local_node_id(did).unwrap(); - match tcx.hir.get(node_id) { + let node_id = tcx.hir().as_local_node_id(did).unwrap(); + match tcx.hir().get(node_id) { hir::Node::Item(item) => { match item.node { hir::ItemKind::Static(ref t, ..) => info!(" - ty hir = {:?}", t), @@ -361,60 +308,58 @@ impl<'a, 'tcx, F: IlltypedFolder<'tcx>> Folder for FoldIlltyped<'a, 'tcx, F> { _ => {}, } - let expr = self.ensure(expr, expected_ty, None); - ItemKind::Static(ty, mutbl, expr) - }, - ItemKind::Const(ty, expr) => { - let did = self.cx.node_def_id(i.id); + self.ensure(expr, expected_ty); + } + ItemKind::Const(_ty, expr) => { + let did = self.cx.node_def_id(id); let expected_ty = self.cx.ty_ctxt().type_of(did); - let expr = self.ensure(expr, expected_ty, None); - ItemKind::Const(ty, expr) - }, - n => n, - }; - i - })) + self.ensure(expr, expected_ty); + } + _ => {} + } + } + + items } } fn handle_struct<'tcx, F>(cx: &RefactorCtxt<'_, 'tcx>, expr_id: NodeId, ty: ty::Ty<'tcx>, - fields: Vec, - maybe_expr: Option>, - mut ensure: F) -> (Vec, Option>) - where F: FnMut(P, ty::Ty<'tcx>) -> P { + fields: &mut Vec, + maybe_expr: &mut Option>, + mut ensure: F) + where F: FnMut(&mut P, ty::Ty<'tcx>) { let (adt_def, substs) = match ty.sty { ty::TyKind::Adt(a, s) => (a, s), - _ => return (fields, maybe_expr), + _ => return, }; // Get the variant def using the resolution of the path. let variant_hir_def = match_or!([resolve_struct_path(cx, expr_id)] Some(x) => x; - return (fields, maybe_expr)); + return); let vdef = adt_def.variant_of_def(variant_hir_def); - let fields = fields.move_map(|f| { - let idx = match_or!([cx.ty_ctxt().find_field_index(f.ident, vdef)] Some(x) => x; return f); + mut_visit::visit_vec(fields, |f| { + let idx = match_or!([cx.ty_ctxt().find_field_index(f.ident, vdef)] Some(x) => x; return); let fdef = &vdef.fields[idx]; let field_ty = fdef.ty(cx.ty_ctxt(), substs); - Field { expr: ensure(f.expr, field_ty), ..f } + ensure(&mut f.expr, field_ty); }); - let maybe_expr = maybe_expr.map(|e| ensure(e, ty)); - (fields, maybe_expr) + mut_visit::visit_opt(maybe_expr, |e| ensure(e, ty)); } fn resolve_struct_path(cx: &RefactorCtxt, id: NodeId) -> Option { let node = match_or!([cx.hir_map().find(id)] Some(x) => x; return None); let expr = match_or!([node] hir::Node::Expr(e) => e; return None); - let qpath = match_or!([expr.node] hir::ExprKind::Struct(ref q, ..) => q; return None); + let qpath: &hir::QPath = match_or!([expr.node] hir::ExprKind::Struct(ref q, ..) => q; return None); let path = match_or!([qpath] hir::QPath::Resolved(_, ref path) => path; return None); Some(path.def) } -pub fn fold_illtyped<'tcx, F, T>(cx: &RefactorCtxt<'_, 'tcx>, x: T, f: F) -> ::Result - where F: IlltypedFolder<'tcx>, T: Fold { +pub fn fold_illtyped<'tcx, F, T>(cx: &RefactorCtxt<'_, 'tcx>, x: &mut T, f: F) + where F: IlltypedFolder<'tcx>, T: MutVisit { let mut f2 = FoldIlltyped { cx, inner: f }; - x.fold(&mut f2) + x.visit(&mut f2) } diff --git a/c2rust-refactor/src/interact/main_thread.rs b/c2rust-refactor/src/interact/main_thread.rs index f8d04467a..77315a9c9 100644 --- a/c2rust-refactor/src/interact/main_thread.rs +++ b/c2rust-refactor/src/interact/main_thread.rs @@ -10,6 +10,7 @@ use std::str::FromStr; use std::sync::{Arc, Mutex}; use std::sync::mpsc::{self, SyncSender, Receiver}; use std::thread; +use rustc_interface::interface::{self, Config}; use syntax::ast::*; use syntax::source_map::{FileLoader, RealFileLoader}; use syntax::source_map::Span; @@ -20,7 +21,7 @@ use syntax_pos::FileName; use crate::ast_manip::{GetNodeId, GetSpan, Visit}; use crate::command::{self, RefactorState}; use crate::driver; -use crate::file_io::FileIO; +use crate::file_io::{FileIO}; use crate::interact::{ToServer, ToClient}; use crate::interact::WrapSender; use crate::interact::{plain_backend, vim8_backend}; @@ -40,22 +41,11 @@ struct InteractState { } impl InteractState { - fn new(rustc_args: Vec, - registry: command::Registry, - to_worker: SyncSender, + fn new(state: RefactorState, + buffers_available: Arc>>, + _to_worker: SyncSender, to_client: SyncSender) -> InteractState { - let buffers_available = Arc::new(Mutex::new(HashSet::new())); - - let file_io = Arc::new(InteractiveFileIO { - buffers_available: buffers_available.clone(), - to_worker: to_worker.clone(), - to_client: to_client.clone(), - }); - - let state = RefactorState::from_rustc_args( - &rustc_args, registry, file_io, HashSet::new()); - InteractState { to_client, buffers_available, state } } @@ -77,7 +67,7 @@ impl InteractState { } } - fn run_compiler(&mut self, phase: driver::Phase, func: F) -> R + fn run_compiler(&mut self, phase: driver::Phase, func: F) -> interface::Result where F: FnOnce(&Crate, &RefactorCtxt) -> R { self.state.transform_crate(phase, |st, cx| { func(&st.krate(), cx) @@ -95,7 +85,7 @@ impl InteractState { let (id, mark_info) = self.run_compiler(driver::Phase::Phase2, |krate, cx| { let info = pick_node::pick_node_at_loc( - &krate, &cx, kind, &file, line, col) + &krate, cx.session(), kind, &file, line, col) .unwrap_or_else( || panic!("no {:?} node at {}:{}:{}", kind, file, line, col)); @@ -112,7 +102,7 @@ impl InteractState { end_col: hi.col.0 as u32, labels: vec![(&label.as_str() as &str).to_owned()], }) - }); + }).expect("Failed to run compiler"); self.state.marks_mut().insert((id, label)); self.to_client.send(Mark { info: mark_info }).unwrap(); @@ -126,7 +116,7 @@ impl InteractState { let id = NodeId::from_usize(id); let mut labels = Vec::new(); - for &(mark_id, label) in self.state.marks() { + for &(mark_id, label) in &*self.state.marks() { if mark_id == id { labels.push((&label.as_str() as &str).to_owned()); } @@ -148,7 +138,7 @@ impl InteractState { labels: labels, }; Mark { info: info } - }); + }).expect("Failed to run compiler"); self.to_client.send(msg).unwrap(); }, @@ -156,7 +146,7 @@ impl InteractState { let msg = self.state.transform_crate(driver::Phase::Phase2, |st, cx| { let infos = collect_mark_infos(&st.marks(), &st.krate(), &cx); MarkList { infos: infos } - }); + }).expect("Failed to run compiler"); self.to_client.send(msg).unwrap(); }, @@ -232,9 +222,11 @@ fn collect_mark_infos(marks: &HashSet<(NodeId, Symbol)>, infos_vec } -pub fn interact_command(args: &[String], - rustc_args: Vec, - registry: command::Registry) { +pub fn interact_command( + args: &[String], + config: Config, + registry: command::Registry, +) { let (to_main, main_recv) = mpsc::channel(); let (to_worker, worker_recv) = mpsc::sync_channel(1); @@ -248,8 +240,18 @@ pub fn interact_command(args: &[String], worker::run_worker(worker_recv, to_client_, to_main); }); - InteractState::new(rustc_args, registry, to_worker, to_client) - .run_loop(main_recv); + let buffers_available = Arc::new(Mutex::new(HashSet::new())); + + let file_io = Arc::new(InteractiveFileIO { + buffers_available: buffers_available.clone(), + to_worker: to_worker.clone(), + to_client: to_client.clone(), + }); + + driver::run_refactoring(config, registry, file_io, HashSet::new(), |state| { + InteractState::new(state, buffers_available, to_worker, to_client) + .run_loop(main_recv); + }); } diff --git a/c2rust-refactor/src/lib.rs b/c2rust-refactor/src/lib.rs index 5ad881a65..0a89f87cd 100644 --- a/c2rust-refactor/src/lib.rs +++ b/c2rust-refactor/src/lib.rs @@ -3,7 +3,7 @@ trace_macros, specialization, box_patterns, - try_from, + generator_trait, )] extern crate arena; extern crate ena; @@ -20,6 +20,8 @@ extern crate rustc; extern crate rustc_data_structures; extern crate rustc_driver; extern crate rustc_errors; +extern crate rustc_incremental; +extern crate rustc_interface; extern crate rustc_metadata; extern crate rustc_privacy; extern crate rustc_resolve; @@ -77,9 +79,7 @@ use std::str::{self, FromStr}; use std::sync::Arc; use cargo::util::paths; use syntax::ast::NodeId; -use rustc::ty; -use rustc_driver::CompilationFailure; -use rustc_data_structures::sync::Lock; +use rustc_interface::interface; use c2rust_ast_builder::IntoSymbol; @@ -203,7 +203,7 @@ fn get_rustc_arg_strings(src: RustcArgSource) -> Vec { } fn get_rustc_cargo_args() -> Vec { - use std::sync::{Arc, Mutex}; + use std::sync::Mutex; use cargo::Config; use cargo::core::{Workspace, PackageId, Target, maybe_allow_nightly_features}; use cargo::core::compiler::{CompileMode, Executor, DefaultExecutor, Context, Unit}; @@ -307,7 +307,17 @@ fn get_rustc_cargo_args() -> Vec { args } -fn main_impl(opts: Options) { +pub fn lib_main(opts: Options) -> interface::Result<()> { + env_logger::init(); + + // Make sure we compile with the toolchain version that the refactoring tool + // is built against. + env::set_var("RUSTUP_TOOLCHAIN", env!("RUSTUP_TOOLCHAIN")); + + rustc_driver::report_ices_to_stderr_if_any(move || main_impl(opts)).and_then(|x| x) +} + +fn main_impl(opts: Options) -> interface::Result<()> { let mut marks = HashSet::new(); for m in &opts.marks { let label = m.label.as_ref().map_or("target", |s| s).into_symbol(); @@ -316,8 +326,14 @@ fn main_impl(opts: Options) { let rustc_args = get_rustc_arg_strings(opts.rustc_args.clone()); + // TODO: interface::run_compiler() here and create a RefactorState with the + // callback. RefactorState should know how to reset the compiler when needed + // and can handle querying the compiler. + if opts.cursors.len() > 0 { - driver::run_compiler(&rustc_args, None, driver::Phase::Phase2, |krate, cx| { + let config = driver::create_config(&rustc_args); + driver::run_compiler(config, None, |compiler| { + let expanded_crate = compiler.expansion().unwrap().take().0; for c in &opts.cursors { let kind_result = c.kind.clone().map_or(Ok(pick_node::NodeKind::Any), |s| pick_node::NodeKind::from_str(&s)); @@ -330,7 +346,7 @@ fn main_impl(opts: Options) { }; let id = match pick_node::pick_node_at_loc( - &krate, &cx, kind, &c.file, c.line, c.col) { + &expanded_crate, compiler.session(), kind, &c.file, c.line, c.col) { Some(info) => info.id, None => { info!("Failed to find {:?} at {}:{}:{}", @@ -361,59 +377,35 @@ fn main_impl(opts: Options) { plugin::load_plugins(&opts.plugin_dirs, &opts.plugins, &mut cmd_reg); + let config = driver::create_config(&rustc_args); + if opts.commands.len() == 1 && opts.commands[0].name == "interact" { - interact::interact_command(&opts.commands[0].args, - rustc_args, - cmd_reg); + interact::interact_command(&opts.commands[0].args, config, cmd_reg); } else if opts.commands.len() == 1 && opts.commands[0].name == "script" { assert_eq!(opts.commands[0].args.len(), 1); - scripting::run_lua_file( - Path::new(&opts.commands[0].args[0]), - rustc_args, - cmd_reg, - opts.rewrite_modes, - ).expect("Error loading user script"); + scripting::run_lua_file(Path::new(&opts.commands[0].args[0]), config, cmd_reg, opts.rewrite_modes) + .expect("Error loading user script"); } else { - let mut state = command::RefactorState::from_rustc_args( - &rustc_args, - cmd_reg, - Arc::new(file_io::RealFileIO::new(opts.rewrite_modes)), - marks, - ); - - state.load_crate(); - - for cmd in opts.commands.clone() { - if &cmd.name == "interact" { - panic!("`interact` must be the only command"); - } else { - match state.run(&cmd.name, &cmd.args) { - Ok(_)=> {}, - Err(e) => { - eprintln!("{:?}", e); - std::process::exit(1); - } + let file_io = Arc::new(file_io::RealFileIO::new(opts.rewrite_modes.clone())); + driver::run_refactoring(config, cmd_reg, file_io, marks, |mut state| { + for cmd in opts.commands.clone() { + if &cmd.name == "interact" { + panic!("`interact` must be the only command"); + } else { + match state.run(&cmd.name, &cmd.args) { + Ok(_)=> {}, + Err(e) => { + eprintln!("{:?}", e); + std::process::exit(1); + } + } } } - } - state.save_crate(); + state.save_crate(); + }); } -} - -pub fn lib_main(opts: Options) -> Result<(), CompilationFailure> { - env_logger::init(); - - // Make sure we compile with the toolchain version that the refactoring tool - // is built against. - env::set_var("RUSTUP_TOOLCHAIN", env!("RUSTUP_TOOLCHAIN")); - ty::tls::GCX_PTR.set(&Lock::new(0), || { - rustc_driver::monitor(move || { - syntax::with_globals(move || { - main_impl(opts); - }); - }) - }) + Ok(()) } diff --git a/c2rust-refactor/src/mark_adjust.rs b/c2rust-refactor/src/mark_adjust.rs index 071d32153..bfc492e0b 100644 --- a/c2rust-refactor/src/mark_adjust.rs +++ b/c2rust-refactor/src/mark_adjust.rs @@ -33,8 +33,7 @@ impl<'a, 'tcx> MarkUseVisitor<'a, 'tcx> { } // For struct and node constructors, also check the parent item - if matches!([path.def] Def::StructCtor(..)) || - matches!([path.def] Def::VariantCtor(..)) { + if matches!([path.def] Def::Ctor(..)) { let parent_id = self.cx.hir_map().get_parent(id); if self.st.marked(parent_id, self.label) { self.st.add_mark(use_id, self.label); @@ -168,7 +167,7 @@ pub fn find_mark_uses(target: &T, /// For every top-level definition bearing `MARK`, apply `MARK` to uses of that /// definition. Removes `MARK` from the original definitions. pub fn find_mark_uses_command(st: &CommandState, cx: &RefactorCtxt, label: &str) { - find_mark_uses(&*st.krate(), st, cx, label); + find_mark_uses(&*st.krate_mut(), st, cx, label); } diff --git a/c2rust-refactor/src/matcher/bindings.rs b/c2rust-refactor/src/matcher/bindings.rs index 7cfca6193..c2067dc93 100644 --- a/c2rust-refactor/src/matcher/bindings.rs +++ b/c2rust-refactor/src/matcher/bindings.rs @@ -7,7 +7,7 @@ use syntax::ast::{Expr, Ident, Item, Pat, Path, Stmt, Ty}; use syntax::parse::token::Token; use syntax::ptr::P; use syntax::symbol::Symbol; -use syntax::tokenstream::{Cursor, Delimited, TokenTree, TokenStream, TokenStreamBuilder}; +use syntax::tokenstream::{Cursor, TokenTree, TokenStream, TokenStreamBuilder}; use crate::ast_manip::AstEquiv; use c2rust_ast_builder::IntoSymbol; @@ -320,11 +320,9 @@ fn rewrite_token_stream(ts: TokenStream, bt: &mut BindingTypes) -> TokenStream { _ => TokenTree::Token(sp, Token::Dollar) }, - TokenTree::Delimited(sp, del) => { - let Delimited { delim, tts } = del; + TokenTree::Delimited(sp, delim, tts) => { let dts = rewrite_token_stream(tts.into(), bt); - let del = Delimited { delim, tts: dts.into() }; - TokenTree::Delimited(sp, del) + TokenTree::Delimited(sp, delim, dts.into() ) } tt @ _ => tt diff --git a/c2rust-refactor/src/matcher/impls.rs b/c2rust-refactor/src/matcher/impls.rs index 6a406eb69..5ad81a26d 100644 --- a/c2rust-refactor/src/matcher/impls.rs +++ b/c2rust-refactor/src/matcher/impls.rs @@ -7,7 +7,7 @@ use syntax::source_map::{Span, Spanned}; use syntax::ext::hygiene::SyntaxContext; use syntax::parse::token::{Token, DelimToken, Nonterminal}; use syntax::ptr::P; -use syntax::tokenstream::{TokenTree, Delimited, DelimSpan, TokenStream, ThinTokenStream}; +use syntax::tokenstream::{TokenTree, DelimSpan, TokenStream}; use crate::ast_manip::util::{macro_name, PatternSymbol}; use crate::matcher::{self, TryMatch, MatchCtxt}; diff --git a/c2rust-refactor/src/matcher/mod.rs b/c2rust-refactor/src/matcher/mod.rs index dc30adbd1..d47064204 100644 --- a/c2rust-refactor/src/matcher/mod.rs +++ b/c2rust-refactor/src/matcher/mod.rs @@ -36,25 +36,22 @@ use rustc::session::Session; use smallvec::SmallVec; use std::cmp; use std::result; -use std::path::PathBuf; use syntax::ast::{Block, Expr, ExprKind, Ident, Item, Label, Pat, Path, Stmt, Ty}; -use syntax::fold::{self, Folder}; +use syntax::mut_visit::{self, MutVisitor}; use syntax::parse::parser::{Parser, PathStyle}; use syntax::parse::token::Token; use syntax::parse::{self, PResult}; use syntax::ptr::P; use syntax::symbol::Symbol; -use syntax::tokenstream::{TokenStream, ThinTokenStream}; -use syntax::util::move_map::MoveMap; +use syntax::tokenstream::TokenStream; use syntax_pos::FileName; use crate::ast_manip::util::PatternSymbol; -use crate::ast_manip::{Fold, GetNodeId, remove_paren}; +use crate::ast_manip::{MutVisit, GetNodeId, remove_paren}; use crate::command::CommandState; use crate::driver::{self, emit_and_panic}; use crate::RefactorCtxt; use crate::reflect; -use crate::util::Lone; use c2rust_ast_builder::IntoSymbol; mod bindings; @@ -119,33 +116,36 @@ impl<'a, 'tcx> MatchCtxt<'a, 'tcx> { pub fn parse_expr(&mut self, src: &str) -> P { - let (mut p, bt) = make_bindings_parser(self.cx.session(), "", src); + let (mut p, bt) = make_bindings_parser(self.cx.session(), src); match p.parse_expr() { - Ok(expr) => { + Ok(mut expr) => { self.types.merge(bt); - remove_paren(expr) + remove_paren(&mut expr); + expr } Err(db) => emit_and_panic(db, "expr"), } } pub fn parse_pat(&mut self, src: &str) -> P { - let (mut p, bt) = make_bindings_parser(self.cx.session(), "", src); + let (mut p, bt) = make_bindings_parser(self.cx.session(), src); match p.parse_pat(None) { - Ok(pat) => { + Ok(mut pat) => { self.types.merge(bt); - remove_paren(pat) + remove_paren(&mut pat); + pat } Err(db) => emit_and_panic(db, "pat"), } } pub fn parse_ty(&mut self, src: &str) -> P { - let (mut p, bt) = make_bindings_parser(self.cx.session(), "", src); + let (mut p, bt) = make_bindings_parser(self.cx.session(), src); match p.parse_ty() { - Ok(ty) => { + Ok(mut ty) => { self.types.merge(bt); - remove_paren(ty) + remove_paren(&mut ty); + ty } Err(db) => emit_and_panic(db, "ty"), } @@ -154,22 +154,29 @@ impl<'a, 'tcx> MatchCtxt<'a, 'tcx> { pub fn parse_stmts(&mut self, src: &str) -> Vec { // TODO: rustc no longer exposes `parse_full_stmt`. `parse_block` is a hacky // workaround that may cause suboptimal error messages. - let (mut p, bt) = make_bindings_parser(self.cx.session(), "", &format!("{{ {} }}", src)); + let (mut p, bt) = make_bindings_parser(self.cx.session(), &format!("{{ {} }}", src)); match p.parse_block() { Ok(blk) => { self.types.merge(bt); - blk.into_inner().stmts.into_iter().map(|s| remove_paren(s).lone()).collect() + let mut stmts = blk.into_inner().stmts; + for s in stmts.iter_mut() { + remove_paren(s); + } + stmts } Err(db) => emit_and_panic(db, "stmts"), } } pub fn parse_items(&mut self, src: &str) -> Vec> { - let (mut p, bt) = make_bindings_parser(self.cx.session(), "", src); + let (mut p, bt) = make_bindings_parser(self.cx.session(), src); let mut items = Vec::new(); loop { match p.parse_item() { - Ok(Some(item)) => items.push(remove_paren(item).lone()), + Ok(Some(mut item)) => { + remove_paren(&mut item); + items.push(item); + } Ok(None) => break, Err(db) => emit_and_panic(db, "items"), } @@ -393,7 +400,7 @@ impl<'a, 'tcx> MatchCtxt<'a, 'tcx> { /// Handle the `marked!(...)` matching form. pub fn do_marked(&mut self, - tts: &ThinTokenStream, + tts: &TokenStream, func: F, target: &T) -> Result<()> where T: TryMatch + GetNodeId, @@ -419,7 +426,7 @@ impl<'a, 'tcx> MatchCtxt<'a, 'tcx> { /// Core implementation of the `def!(...)` matching form. fn do_def_impl(&mut self, - tts: &ThinTokenStream, + tts: &TokenStream, style: PathStyle, opt_def_id: Option) -> Result<()> { let mut p = Parser::new(&self.cx.session().parse_sess, @@ -445,20 +452,20 @@ impl<'a, 'tcx> MatchCtxt<'a, 'tcx> { } /// Handle the `def!(...)` matching form for exprs. - pub fn do_def_expr(&mut self, tts: &ThinTokenStream, target: &Expr) -> Result<()> { + pub fn do_def_expr(&mut self, tts: &TokenStream, target: &Expr) -> Result<()> { let opt_def_id = self.cx.try_resolve_expr(target); self.do_def_impl(tts, PathStyle::Expr, opt_def_id) } /// Handle the `def!(...)` matching form for exprs. - pub fn do_def_ty(&mut self, tts: &ThinTokenStream, target: &Ty) -> Result<()> { + pub fn do_def_ty(&mut self, tts: &TokenStream, target: &Ty) -> Result<()> { let opt_def_id = self.cx.try_resolve_ty(target); self.do_def_impl(tts, PathStyle::Type, opt_def_id) } /// Handle the `typed!(...)` matching form. pub fn do_typed(&mut self, - tts: &ThinTokenStream, + tts: &TokenStream, func: F, target: &T) -> Result<()> where T: TryMatch + GetNodeId, @@ -485,7 +492,7 @@ impl<'a, 'tcx> MatchCtxt<'a, 'tcx> { self.try_match(&pattern, target) } - pub fn do_cast(&mut self, tts: &ThinTokenStream, func: F, target: &Expr) -> Result<()> + pub fn do_cast(&mut self, tts: &TokenStream, func: F, target: &Expr) -> Result<()> where F: for<'b> FnOnce(&mut Parser<'b>) -> PResult<'b, P> { let ts: TokenStream = tts.clone().into(); let pattern = driver::run_parser_tts(self.cx.session(), ts.into_trees().collect(), func); @@ -509,9 +516,9 @@ impl<'a, 'tcx> MatchCtxt<'a, 'tcx> { } } -fn make_bindings_parser<'a>(sess: &'a Session, name: &str, src: &str) -> (Parser<'a>, BindingTypes) { +fn make_bindings_parser<'a>(sess: &'a Session, src: &str) -> (Parser<'a>, BindingTypes) { let ts = - parse::parse_stream_from_source_str(FileName::Real(PathBuf::from(name)), + parse::parse_stream_from_source_str(FileName::anon_source_code(src), src.to_owned(), &sess.parse_sess, None); @@ -525,16 +532,25 @@ pub trait TryMatch { -/// Trait for AST types that can be used as patterns in a search-and-replace (`fold_match`). -pub trait Pattern: TryMatch+Sized { - fn apply_folder<'a, 'tcx, T, F>( +/// Trait for AST types that can be used as patterns in a search-and-replace (`mut_visit_match`). +pub trait Pattern: TryMatch+Sized { + fn visit<'a, 'tcx, T, F>( self, - init_mcx: MatchCtxt<'a, 'tcx>, - callback: F, - target: T, - ) -> ::Result - where T: Fold, - F: FnMut(Self, MatchCtxt<'a, 'tcx>) -> Self; + _init_mcx: MatchCtxt<'a, 'tcx>, + _callback: F, + _target: &mut T, + ) + where T: MutVisit, + F: FnMut(&mut V, MatchCtxt<'a, 'tcx>) {} + + fn flat_map<'a, 'tcx, T, F>( + self, + _init_mcx: MatchCtxt<'a, 'tcx>, + _callback: F, + _target: &mut T, + ) + where T: MutVisit, + F: FnMut(V, MatchCtxt<'a, 'tcx>) -> SmallVec<[V; 1]> {} } @@ -550,44 +566,93 @@ macro_rules! gen_pattern_impl { ) => { /// Automatically generated `Folder` implementation, for use by `Pattern`. pub struct $PatternFolder<'a, 'tcx: 'a, F> - where F: FnMut($Pat, MatchCtxt<'a, 'tcx>) -> $Pat { + where F: FnMut(&mut $Pat, MatchCtxt<'a, 'tcx>) { pattern: $Pat, init_mcx: MatchCtxt<'a, 'tcx>, callback: F, } - impl<'a, 'tcx, F> Folder for $PatternFolder<'a, 'tcx, F> - where F: FnMut($Pat, MatchCtxt<'a, 'tcx>) -> $Pat { + impl<'a, 'tcx, F> MutVisitor for $PatternFolder<'a, 'tcx, F> + where F: FnMut(&mut $Pat, MatchCtxt<'a, 'tcx>) { #[allow(unused_mut)] fn $fold_thing(&mut $slf, $arg: $ArgTy) -> $RetTy { let $arg = $walk; - let mut $match_one = |x| { + let mut $match_one = |x: &mut $ArgTy| { if let Ok(mcx) = $slf.init_mcx.clone_match(&$slf.pattern, &x) { ($slf.callback)(x, mcx) - } else { - x } }; $map } } - impl Pattern for $Pat { - fn apply_folder<'a, 'tcx, T, F>( + impl Pattern<$Pat> for $Pat { + fn visit<'a, 'tcx, T, F>( self, init_mcx: MatchCtxt<'a, 'tcx>, callback: F, - target: T, - ) -> ::Result - where T: Fold, - F: FnMut(Self, MatchCtxt<'a, 'tcx>) -> Self + target: &mut T, + ) + where T: MutVisit, + F: FnMut(&mut Self, MatchCtxt<'a, 'tcx>) { let mut f = $PatternFolder { pattern: self, init_mcx: init_mcx, callback: callback, }; - target.fold(&mut f) + target.visit(&mut f) + } + } + }; + ( + pattern = $Pat:ty; + folder = $PatternFolder:ident; + + // Capture the ident "self" from the outer context, so it can be used in the expressions. + fn $fold_thing:ident ( &mut $slf:ident , $arg:ident : &mut $ArgTy:ty ); + walk = $walk:expr; + map($match_one:ident) = $map:expr; + ) => { + /// Automatically generated `Folder` implementation, for use by `Pattern`. + pub struct $PatternFolder<'a, 'tcx: 'a, F> + where F: FnMut(&mut $Pat, MatchCtxt<'a, 'tcx>) { + pattern: $Pat, + init_mcx: MatchCtxt<'a, 'tcx>, + callback: F, + } + + impl<'a, 'tcx, F> MutVisitor for $PatternFolder<'a, 'tcx, F> + where F: FnMut(&mut $Pat, MatchCtxt<'a, 'tcx>) + { + #[allow(unused_mut)] + fn $fold_thing(&mut $slf, $arg: &mut $ArgTy) { + $walk; + let mut $match_one = |x: &mut $ArgTy| { + if let Ok(mcx) = $slf.init_mcx.clone_match(&$slf.pattern, &x) { + ($slf.callback)(x, mcx); + } + }; + $map + } + } + + impl Pattern<$Pat> for $Pat { + fn visit<'a, 'tcx, T, F>( + self, + init_mcx: MatchCtxt<'a, 'tcx>, + callback: F, + target: &mut T, + ) + where T: MutVisit, + F: FnMut(&mut Self, MatchCtxt<'a, 'tcx>) + { + let mut f = $PatternFolder { + pattern: self, + init_mcx: init_mcx, + callback: callback, + }; + target.visit(&mut f) } } }; @@ -600,10 +665,10 @@ gen_pattern_impl! { folder = ExprPatternFolder; // Signature of the corresponding `Folder` method. - fn fold_expr(&mut self, e: P) -> P; + fn visit_expr(&mut self, e: &mut P); // Expr that runs the default `Folder` action for this node type. Can refer to the argument of // the `Folder` method using the name that appears in the signature above. - walk = e.map(|e| fold::noop_fold_expr(e, self)); + walk = mut_visit::noop_visit_expr(e, self); // Expr that runs the callback on the result of the `walk` expression. This is parameterized // by the `match_one` closure. map(match_one) = match_one(e); @@ -613,8 +678,8 @@ gen_pattern_impl! { pattern = P; folder = TyPatternFolder; - fn fold_ty(&mut self, t: P) -> P; - walk = fold::noop_fold_ty(t, self); + fn visit_ty(&mut self, t: &mut P); + walk = mut_visit::noop_visit_ty(t, self); map(match_one) = match_one(t); } @@ -622,9 +687,9 @@ gen_pattern_impl! { pattern = Stmt; folder = StmtPatternFolder; - fn fold_stmt(&mut self, s: Stmt) -> SmallVec<[Stmt; 1]>; - walk = fold::noop_fold_stmt(s, self); - map(match_one) = s.move_map(match_one); + fn flat_map_stmt(&mut self, s: Stmt) -> SmallVec<[Stmt; 1]>; + walk = mut_visit::noop_flat_map_stmt(s, self); + map(match_one) = { let mut s = s; s.iter_mut().for_each(match_one); s }; } @@ -632,18 +697,18 @@ gen_pattern_impl! { /// Custom `Folder` for multi-statement `Pattern`s. pub struct MultiStmtPatternFolder<'a, 'tcx: 'a, F> - where F: FnMut(Vec, MatchCtxt<'a, 'tcx>) -> Vec { + where F: FnMut(&mut Vec, MatchCtxt<'a, 'tcx>) { pattern: Vec, init_mcx: MatchCtxt<'a, 'tcx>, callback: F, } -impl<'a, 'tcx, F> Folder for MultiStmtPatternFolder<'a, 'tcx, F> - where F: FnMut(Vec, MatchCtxt<'a, 'tcx>) -> Vec { - fn fold_block(&mut self, b: P) -> P { +impl<'a, 'tcx, F> MutVisitor for MultiStmtPatternFolder<'a, 'tcx, F> + where F: FnMut(&mut Vec, MatchCtxt<'a, 'tcx>) { + fn visit_block(&mut self, b: &mut P) { assert!(self.pattern.len() > 0); - let b = fold::noop_fold_block(b, self); + mut_visit::noop_visit_block(b, self); let mut new_stmts = Vec::with_capacity(b.stmts.len()); let mut last = 0; @@ -655,9 +720,9 @@ impl<'a, 'tcx, F> Folder for MultiStmtPatternFolder<'a, 'tcx, F> if let Some(consumed) = result { new_stmts.extend_from_slice(&b.stmts[last .. i]); - let consumed_stmts = b.stmts[i .. i + consumed].to_owned(); - let mut replacement = (self.callback)(consumed_stmts, mcx); - new_stmts.append(&mut replacement); + let mut consumed_stmts = b.stmts[i .. i + consumed].to_owned(); + (self.callback)(&mut consumed_stmts, mcx); + new_stmts.extend(consumed_stmts); i += cmp::max(consumed, 1); last = i; @@ -672,11 +737,9 @@ impl<'a, 'tcx, F> Folder for MultiStmtPatternFolder<'a, 'tcx, F> } } - if last == 0 { - b - } else { + if last != 0 { new_stmts.extend_from_slice(&b.stmts[last ..]); - b.map(|b| Block { stmts: new_stmts, ..b }) + b.stmts = new_stmts; } } } @@ -741,61 +804,75 @@ fn is_multi_stmt_glob(mcx: &MatchCtxt, pattern: &Stmt) -> bool { true } -impl Pattern for Vec { - fn apply_folder<'a, 'tcx, T, F>(self, - init_mcx: MatchCtxt<'a, 'tcx>, - callback: F, - target: T) -> ::Result - where T: Fold, - F: FnMut(Self, MatchCtxt<'a, 'tcx>) -> Self { +impl Pattern> for Vec { + fn visit<'a, 'tcx, T, F>( + self, + init_mcx: MatchCtxt<'a, 'tcx>, + callback: F, + target: &mut T, + ) where T: MutVisit, + F: FnMut(&mut Vec, MatchCtxt<'a, 'tcx>) + { let mut f = MultiStmtPatternFolder { pattern: self, init_mcx: init_mcx, callback: callback, }; - target.fold(&mut f) + target.visit(&mut f) } } /// Find every match for `pattern` within `target`, and rewrite each one by invoking `callback`. -pub fn fold_match(st: &CommandState, +pub fn mut_visit_match(st: &CommandState, cx: &RefactorCtxt, pattern: P, - target: T, - callback: F) -> ::Result - where P: Pattern, - T: Fold, - F: FnMut(P, MatchCtxt) -> P { - fold_match_with(MatchCtxt::new(st, cx), pattern, target, callback) + target: &mut T, + callback: F) + where P: Pattern

, + T: MutVisit, + F: FnMut(&mut P, MatchCtxt) { + mut_visit_match_with(MatchCtxt::new(st, cx), pattern, target, callback) } /// Find every match for `pattern` within `target`, and rewrite each one by invoking `callback`. -pub fn fold_match_with<'a, 'tcx, P, T, F>( +pub fn mut_visit_match_with<'a, 'tcx, P, T, V, F>( + init_mcx: MatchCtxt<'a, 'tcx>, + pattern: P, + target: &mut T, + callback: F, +) +where P: Pattern, + T: MutVisit, + F: FnMut(&mut V, MatchCtxt<'a, 'tcx>) +{ + pattern.visit(init_mcx, callback, target) +} + +pub fn flat_map_match_with<'a, 'tcx, P, T, V, F>( init_mcx: MatchCtxt<'a, 'tcx>, pattern: P, - target: T, + target: &mut T, callback: F, -) -> ::Result -where P: Pattern, - T: Fold, - F: FnMut(P, MatchCtxt<'a, 'tcx>) -> P +) +where P: Pattern, + T: MutVisit, + F: FnMut(V, MatchCtxt<'a, 'tcx>) -> SmallVec<[V; 1]> { - pattern.apply_folder(init_mcx, callback, target) + pattern.flat_map(init_mcx, callback, target) } /// Find the first place where `pattern` matches under initial context `init_mcx`, and return the /// resulting `Bindings`. pub fn find_first_with(init_mcx: MatchCtxt, pattern: P, - target: T) -> Option - where P: Pattern, T: Fold { + target: &mut T) -> Option + where P: Pattern

, T: MutVisit { let mut result = None; - fold_match_with(init_mcx, pattern, target, |p, mcx| { + mut_visit_match_with(init_mcx, pattern, target, |_p, mcx| { if result.is_none() { result = Some(mcx.bindings); } - p }); result } @@ -804,32 +881,34 @@ pub fn find_first_with(init_mcx: MatchCtxt, pub fn find_first(st: &CommandState, cx: &RefactorCtxt, pattern: P, - target: T) -> Option - where P: Pattern, T: Fold { + target: &mut T) -> Option + where P: Pattern

, T: MutVisit { find_first_with(MatchCtxt::new(st, cx), pattern, target) } // TODO: find a better place to put this /// Replace all instances of expression `pat` with expression `repl`. -pub fn replace_expr(st: &CommandState, +pub fn replace_expr(st: &CommandState, cx: &RefactorCtxt, - ast: T, + ast: &mut T, pat: &str, - repl: &str) -> ::Result { + repl: &str) { let mut mcx = MatchCtxt::new(st, cx); let pat = mcx.parse_expr(pat); let repl = mcx.parse_expr(repl); - fold_match_with(mcx, pat, ast, |_, mcx| repl.clone().subst(st, cx, &mcx.bindings)) + // TODO: Make Subst modify in place + mut_visit_match_with(mcx, pat, ast, |x, mcx| *x = repl.clone().subst(st, cx, &mcx.bindings)) } /// Replace all instances of the statement sequence `pat` with `repl`. -pub fn replace_stmts(st: &CommandState, +pub fn replace_stmts(st: &CommandState, cx: &RefactorCtxt, - ast: T, + ast: &mut T, pat: &str, - repl: &str) -> ::Result { + repl: &str) { let mut mcx = MatchCtxt::new(st, cx); let pat = mcx.parse_stmts(pat); let repl = mcx.parse_stmts(repl); - fold_match_with(mcx, pat, ast, |_, mcx| repl.clone().subst(st, cx, &mcx.bindings)) + // TODO: Make Subst modify in place + mut_visit_match_with(mcx, pat, ast, |x, mcx| *x = repl.clone().subst(st, cx, &mcx.bindings)) } diff --git a/c2rust-refactor/src/matcher/subst.rs b/c2rust-refactor/src/matcher/subst.rs index c9862eadb..71f39d357 100644 --- a/c2rust-refactor/src/matcher/subst.rs +++ b/c2rust-refactor/src/matcher/subst.rs @@ -17,18 +17,16 @@ //! For itemlikes, a lone ident can't be used as a placeholder because it's not a valid //! itemlike. Use a zero-argument macro invocation `__x!()` instead. -use syntax::ast::{Ident, Path, Expr, ExprKind, Pat, Ty, TyKind, Stmt, Item, ImplItem, Label, Local}; +use syntax::ast::{Ident, Path, Expr, Pat, Ty, Stmt, Item, ImplItem, Label}; use syntax::ast::Mac; -use syntax::fold::{self, Folder, fold_attrs}; +use syntax::mut_visit::{self, MutVisitor}; use syntax::ptr::P; -use syntax::util::move_map::MoveMap; use smallvec::SmallVec; -use crate::ast_manip::Fold; -use crate::ast_manip::util::{PatternSymbol, macro_name}; +use crate::ast_manip::MutVisit; +use crate::ast_manip::util::PatternSymbol; use crate::command::CommandState; use crate::matcher::Bindings; -use crate::util::Lone; use crate::RefactorCtxt; // `st` and `cx` were previously used for `def!` substitution, which has been removed. I expect @@ -40,89 +38,8 @@ struct SubstFolder<'a, 'tcx: 'a> { bindings: &'a Bindings, } -impl<'a, 'tcx> SubstFolder<'a, 'tcx> { - fn fold_opt_label(&mut self, l: Option

>::Error: Debug, + ) where + P: Pattern, + V: TryFrom + Into + Clone, + >::Error: Debug, { - fold_match_with(self.mcx.clone(), pattern, krate, |x, mcx| { - let orig_node = self.transform.intern(x); + mut_visit_match_with(self.mcx.clone(), pattern, krate, |x, mcx| { + let orig_node = self.transform.intern(x.clone()); let mcx = ScriptingMatchCtxt::wrap(self.transform.clone(), mcx); let new_node = lua_ctx .scope(|scope| { @@ -347,7 +347,7 @@ impl<'a, 'tcx> ScriptingMatchCtxt<'a, 'tcx> { .unwrap_or_else(|e| { panic!("Could not execute callback in match:fold_with {:#?}", e) }); - self.transform.remove_ast(new_node).try_into().unwrap() + *x = self.transform.remove_ast(new_node).try_into().unwrap(); }) } } @@ -396,13 +396,13 @@ impl<'a, 'tcx> UserData for ScriptingMatchCtxt<'a, 'tcx> { methods.add_method( "fold_with", |lua_ctx, this, (needle, krate, f): (LuaAstNode, LuaAstNode, LuaFunction)| { - let krate = ast::Crate::try_from(this.transform.remove_ast(krate)).unwrap(); - let krate = match this.transform.remove_ast(needle).clone() { - RustAstNode::Expr(pattern) => this.fold_with(lua_ctx, pattern, krate, f), - RustAstNode::Ty(pattern) => this.fold_with(lua_ctx, pattern, krate, f), - RustAstNode::Stmts(pattern) => this.fold_with(lua_ctx, pattern, krate, f), + let mut krate = ast::Crate::try_from(this.transform.remove_ast(krate)).unwrap(); + match this.transform.remove_ast(needle).clone() { + RustAstNode::Expr(pattern) => this.fold_with(lua_ctx, pattern, &mut krate, f), + RustAstNode::Ty(pattern) => this.fold_with(lua_ctx, pattern, &mut krate, f), + RustAstNode::Stmts(pattern) => this.fold_with(lua_ctx, pattern, &mut krate, f), _ => return Err(LuaError::external("Unexpected Ast node type")), - }; + } Ok(this.transform.intern(krate)) }, ); @@ -522,14 +522,14 @@ impl<'a, 'tcx> UserData for TransformCtxt<'a, 'tcx> { this.st.map_krate(|krate| { let mut mcx = MatchCtxt::new(this.st, this.cx); let pat = mcx.parse_stmts(&pat); - fold_match_with(mcx, pat, krate, |pat, _mcx| { - let i = f.call::<_, LuaAstNode>(this.intern(pat)).unwrap(); - this.nodes + mut_visit_match_with(mcx, pat, krate, |pat, _mcx| { + let i = f.call::<_, LuaAstNode>(this.intern(pat.clone())).unwrap(); + *pat = this.nodes .borrow_mut() .remove(i) .unwrap() .try_into() - .unwrap() + .unwrap(); }) }); Ok(()) @@ -546,14 +546,14 @@ impl<'a, 'tcx> UserData for TransformCtxt<'a, 'tcx> { this.st.map_krate(|krate| { let mut mcx = MatchCtxt::new(this.st, this.cx); let pat = mcx.parse_expr(&pat); - fold_match_with(mcx, pat, krate, |pat, _mcx| { - let i = f.call::<_, LuaAstNode>(this.intern(pat)).unwrap(); - this.nodes + mut_visit_match_with(mcx, pat, krate, |pat, _mcx| { + let i = f.call::<_, LuaAstNode>(this.intern(pat.clone())).unwrap(); + *pat = this.nodes .borrow_mut() .remove(i) .unwrap() .try_into() - .unwrap() + .unwrap(); }) }); Ok(()) diff --git a/c2rust-refactor/src/select/parse.rs b/c2rust-refactor/src/select/parse.rs index 2648bd6c5..a5d13498d 100644 --- a/c2rust-refactor/src/select/parse.rs +++ b/c2rust-refactor/src/select/parse.rs @@ -15,7 +15,6 @@ use syntax_pos::FileName; use crate::pick_node::NodeKind; use crate::ast_manip::remove_paren; use crate::select::{SelectOp, Filter, AnyPattern, ItemLikeKind}; -use crate::util::Lone; type PResult = Result; @@ -98,11 +97,11 @@ impl<'a> Stream<'a> { fn parens_raw(&mut self) -> PResult { match self.take()? { - TokenTree::Delimited(_, d) => { - if d.delim != DelimToken::Paren { - fail!("expected parens, but got {:?}", d.delim); + TokenTree::Delimited(_, delim, tts) => { + if delim != DelimToken::Paren { + fail!("expected parens, but got {:?}", delim); } - Ok(d.tts.into()) + Ok(tts.into()) }, TokenTree::Token(_, tok) => fail!("expected parens, but got {:?}", tok), } @@ -114,7 +113,7 @@ impl<'a> Stream<'a> { fn maybe_parens(&mut self) -> Option> { let has_parens = match self.peek() { - Some(&TokenTree::Delimited(_, ref d)) => d.delim == DelimToken::Paren, + Some(&TokenTree::Delimited(_, delim, _)) => delim == DelimToken::Paren, _ => false, }; @@ -272,12 +271,12 @@ impl<'a> Stream<'a> { let ts = self.parens_raw()?; let mut p = Parser::new(self.sess, ts, None, false, false); - let x = p.parse_expr() + let mut x = p.parse_expr() .map_err(|e| format!("error parsing expr: {}", e.message()))?; p.expect(&Token::Eof) .map_err(|e| format!("error parsing expr: {}", e.message()))?; - let x = remove_paren(x); + remove_paren(&mut x); Ok(Filter::Matches(AnyPattern::Expr(x))) }, @@ -285,12 +284,12 @@ impl<'a> Stream<'a> { let ts = self.parens_raw()?; let mut p = Parser::new(self.sess, ts, None, false, false); - let x = p.parse_pat(None) + let mut x = p.parse_pat(None) .map_err(|e| format!("error parsing pat: {}", e.message()))?; p.expect(&Token::Eof) .map_err(|e| format!("error parsing pat: {}", e.message()))?; - let x = remove_paren(x); + remove_paren(&mut x); Ok(Filter::Matches(AnyPattern::Pat(x))) }, @@ -298,12 +297,12 @@ impl<'a> Stream<'a> { let ts = self.parens_raw()?; let mut p = Parser::new(self.sess, ts, None, false, false); - let x = p.parse_ty() + let mut x = p.parse_ty() .map_err(|e| format!("error parsing ty: {}", e.message()))?; p.expect(&Token::Eof) .map_err(|e| format!("error parsing ty: {}", e.message()))?; - let x = remove_paren(x); + remove_paren(&mut x); Ok(Filter::Matches(AnyPattern::Ty(x))) }, @@ -311,7 +310,7 @@ impl<'a> Stream<'a> { let ts = self.parens_raw()?; let mut p = Parser::new(self.sess, ts, None, false, false); - let x = match p.parse_stmt() { + let mut x = match p.parse_stmt() { Ok(Some(x)) => x, Ok(None) => fail!("expected stmt"), Err(e) => fail!("error parsing stmt: {}", e.message()), @@ -322,7 +321,7 @@ impl<'a> Stream<'a> { p.expect(&Token::Eof) .map_err(|e| format!("error parsing stmt: {}", e.message()))?; - let x = remove_paren(x).lone(); + remove_paren(&mut x); Ok(Filter::Matches(AnyPattern::Stmt(x))) }, @@ -453,12 +452,14 @@ impl<'a> Stream<'a> { pub fn parse(sess: &Session, src: &str) -> Vec { - let fm = sess.source_map().new_source_file(FileName::Macros("select".to_owned()), - src.to_owned()); - eprintln!("src = {:?}", src); - eprintln!("fm = {:?}", fm); - let ts = parse::source_file_to_stream(&sess.parse_sess, fm, None); - eprintln!("tokens = {:?}", ts); + debug!("src = {:?}", src); + let ts = parse::parse_stream_from_source_str( + FileName::macro_expansion_source_code(src), + src.to_string(), + &sess.parse_sess, + None + ); + debug!("tokens = {:?}", ts); let mut stream = Stream::new(&sess.parse_sess, ts.into_trees().collect()); let mut ops = Vec::new(); diff --git a/c2rust-refactor/src/span_fix.rs b/c2rust-refactor/src/span_fix.rs index d3da71123..14643597d 100644 --- a/c2rust-refactor/src/span_fix.rs +++ b/c2rust-refactor/src/span_fix.rs @@ -10,15 +10,15 @@ use std::mem; use smallvec::SmallVec; use syntax::ast::*; use syntax::source_map::{Span, DUMMY_SP}; -use syntax::fold::{self, Folder}; +use syntax::mut_visit::{self, MutVisitor}; use syntax::ptr::P; use syntax_pos::hygiene::SyntaxContext; -use crate::ast_manip::Fold; +use crate::ast_manip::MutVisit; use crate::ast_manip::util::extended_span; -/// Folder for fixing expansions of `format!`. `format!(..., foo)` generates an expression `&foo`, +/// MutVisitor for fixing expansions of `format!`. `format!(..., foo)` generates an expression `&foo`, /// and gives it the same span as `foo` itself (notably, *not* a macro generated span), which /// causes problems for us later on. This folder detects nodes like `&foo` and gives them a /// macro-generated span to fix the problem. @@ -60,8 +60,8 @@ impl FixFormat { } } -impl Folder for FixFormat { - fn fold_expr(&mut self, e: P) -> P { +impl MutVisitor for FixFormat { + fn visit_expr(&mut self, e: &mut P) { if self.in_format && e.span.ctxt() == SyntaxContext::empty() && matches!([e.node] ExprKind::AddrOf(..)) { @@ -69,32 +69,31 @@ impl Folder for FixFormat { // Current node is the `&foo`. We need to change its span. On recursing into `foo`, // we are no longer inside a `format!` invocation. let new_span = self.parent_span; - self.descend(false, e.span, |this| e.map(|e| { - let mut e = fold::noop_fold_expr(e, this); + self.descend(false, e.span, |this| { + mut_visit::noop_visit_expr(e, this); e.span = new_span; - e - })) + }) } else if !self.in_format && self.is_format_entry(&e) { trace!("ENTERING format! at {:?}", e); - self.descend(true, e.span, |this| e.map(|e| fold::noop_fold_expr(e, this))) + self.descend(true, e.span, |this| mut_visit::noop_visit_expr(e, this)) } else { let in_format = self.in_format; - self.descend(in_format, e.span, |this| e.map(|e| fold::noop_fold_expr(e, this))) + self.descend(in_format, e.span, |this| mut_visit::noop_visit_expr(e, this)) } } - fn fold_mac(&mut self, mac: Mac) -> Mac { - fold::noop_fold_mac(mac, self) + fn visit_mac(&mut self, mac: &mut Mac) { + mut_visit::noop_visit_mac(mac, self) } } -/// Folder for fixing up spans of items with attributes. We set the span of the item to include +/// MutVisitor for fixing up spans of items with attributes. We set the span of the item to include /// all its attrs, so that removing the item will also remove the attrs from the source text. struct FixAttrs; -impl Folder for FixAttrs { - fn fold_item(&mut self, i: P) -> SmallVec<[P; 1]> { +impl MutVisitor for FixAttrs { + fn flat_map_item(&mut self, i: P) -> SmallVec<[P; 1]> { let new_span = extended_span(i.span, &i.attrs); let i = if new_span != i.span { @@ -102,10 +101,10 @@ impl Folder for FixAttrs { } else { i }; - fold::noop_fold_item(i, self) + mut_visit::noop_flat_map_item(i, self) } - fn fold_foreign_item(&mut self, fi: ForeignItem) -> SmallVec<[ForeignItem; 1]> { + fn flat_map_foreign_item(&mut self, fi: ForeignItem) -> SmallVec<[ForeignItem; 1]> { let new_span = extended_span(fi.span, &fi.attrs); let fi = if new_span != fi.span { @@ -113,23 +112,23 @@ impl Folder for FixAttrs { } else { fi }; - fold::noop_fold_foreign_item(fi, self) + mut_visit::noop_flat_map_foreign_item(fi, self) } - fn fold_mac(&mut self, mac: Mac) -> Mac { - fold::noop_fold_mac(mac, self) + fn visit_mac(&mut self, mac: &mut Mac) { + mut_visit::noop_visit_mac(mac, self) } } -pub fn fix_format(node: T) -> ::Result { +pub fn fix_format(node: &mut T) { let mut fix_format = FixFormat { parent_span: DUMMY_SP, in_format: false, }; - node.fold(&mut fix_format) + node.visit(&mut fix_format) } -pub fn fix_attr_spans(node: T) -> ::Result { - node.fold(&mut FixAttrs) +pub fn fix_attr_spans(node: &mut T) { + node.visit(&mut FixAttrs) } diff --git a/c2rust-refactor/src/transform/canonicalize_refs.rs b/c2rust-refactor/src/transform/canonicalize_refs.rs index 9ed6b1d20..bc8f9319b 100644 --- a/c2rust-refactor/src/transform/canonicalize_refs.rs +++ b/c2rust-refactor/src/transform/canonicalize_refs.rs @@ -3,7 +3,7 @@ use syntax::ast::{Crate, Expr, ExprKind, Mutability, UnOp}; use syntax::ptr::P; use c2rust_ast_builder::mk; -use crate::ast_manip::fold_nodes; +use crate::ast_manip::MutVisitNodes; use crate::command::{CommandState, Registry}; use crate::driver::Phase; use crate::transform::Transform; @@ -13,28 +13,27 @@ use crate::RefactorCtxt; struct CanonicalizeRefs; impl Transform for CanonicalizeRefs { - fn transform(&self, krate: Crate, _st: &CommandState, cx: &RefactorCtxt) -> Crate { - fold_nodes(krate, |mut expr: P| { + fn transform(&self, krate: &mut Crate, _st: &CommandState, cx: &RefactorCtxt) { + MutVisitNodes::visit(krate, |expr: &mut P| { let hir_expr = cx.hir_map().expect_expr(expr.id); let parent = cx.hir_map().get_parent_did(expr.id); let tables = cx.ty_ctxt().typeck_tables_of(parent); for adjustment in tables.expr_adjustments(hir_expr) { match adjustment.kind { Adjust::Deref(_) => { - expr = mk().unary_expr(UnOp::Deref, expr); + *expr = mk().unary_expr(UnOp::Deref, expr.clone()); } Adjust::Borrow(AutoBorrow::Ref(_, ref mutability)) => { let mutability = match mutability { AutoBorrowMutability::Mutable{..} => Mutability::Mutable, AutoBorrowMutability::Immutable => Mutability::Immutable, }; - expr = mk().set_mutbl(mutability).addr_of_expr(expr); + *expr = mk().set_mutbl(mutability).addr_of_expr(expr.clone()); } _ => {}, } } - expr - }) + }); } fn min_phase(&self) -> Phase { @@ -47,31 +46,26 @@ impl Transform for CanonicalizeRefs { struct RemoveUnnecessaryRefs; impl Transform for RemoveUnnecessaryRefs { - fn transform(&self, krate: Crate, _st: &CommandState, _cx: &RefactorCtxt) -> Crate { - fold_nodes(krate, |expr: P| { - expr.map(|expr| match expr.node { - ExprKind::MethodCall(path, args) => { - let (receiver, rest) = args.split_first().unwrap(); - let receiver = remove_all_derefs(remove_ref(remove_reborrow(receiver.clone()))); - let rest = rest.iter().map(|arg| remove_reborrow(arg.clone())); - let mut args = Vec::with_capacity(args.len() + 1); - args.push(receiver); - args.extend(rest); - Expr { - node: ExprKind::MethodCall(path, args), - ..expr + fn transform(&self, krate: &mut Crate, _st: &CommandState, _cx: &RefactorCtxt) { + MutVisitNodes::visit(krate, |expr: &mut P| { + match &mut expr.node { + ExprKind::MethodCall(_path, args) => { + let (receiver, rest) = args.split_first_mut().unwrap(); + remove_reborrow(receiver); + remove_ref(receiver); + remove_all_derefs(receiver); + for arg in rest { + remove_reborrow(arg); } } - ExprKind::Call(callee, args) => { - let args = args.iter().map(|arg| remove_reborrow(arg.clone())).collect(); - Expr { - node: ExprKind::Call(callee, args), - ..expr + ExprKind::Call(_callee, args) => { + for arg in args.iter_mut() { + remove_reborrow(arg); } } - _ => expr, - }) - }) + _ => {} + } + }); } fn min_phase(&self) -> Phase { @@ -79,27 +73,30 @@ impl Transform for RemoveUnnecessaryRefs { } } -fn remove_ref(expr: P) -> P { - expr.map(|expr| match expr.node { - ExprKind::AddrOf(_, expr) => expr.into_inner(), - _ => expr, - }) +fn remove_ref(expr: &mut P) { + match &expr.node { + ExprKind::AddrOf(_, inner) => *expr = inner.clone(), + _ => {} + } } -fn remove_all_derefs(expr: P) -> P { - expr.map(|expr| match expr.node { - ExprKind::Unary(UnOp::Deref, expr) => remove_all_derefs(expr).into_inner(), - _ => expr, - }) +fn remove_all_derefs(expr: &mut P) { + match &expr.node { + ExprKind::Unary(UnOp::Deref, inner) => { + *expr = inner.clone(); + remove_all_derefs(expr); + } + _ => {} + } } -fn remove_reborrow(expr: P) -> P { +fn remove_reborrow(expr: &mut P) { if let ExprKind::AddrOf(_, ref subexpr) = expr.node { if let ExprKind::Unary(UnOp::Deref, ref subexpr) = subexpr.node { - return remove_reborrow(subexpr.clone()); + *expr = subexpr.clone(); + remove_reborrow(expr); } } - expr } pub fn register_commands(reg: &mut Registry) { diff --git a/c2rust-refactor/src/transform/char_literals.rs b/c2rust-refactor/src/transform/char_literals.rs index c409d9a66..d8b54831f 100644 --- a/c2rust-refactor/src/transform/char_literals.rs +++ b/c2rust-refactor/src/transform/char_literals.rs @@ -5,7 +5,7 @@ use c2rust_ast_builder::mk; use crate::command::{CommandState, Registry}; use crate::RefactorCtxt; use crate::driver::{self, Phase}; -use crate::matcher::{Bindings, BindingType, MatchCtxt, Subst, fold_match_with}; +use crate::matcher::{Bindings, BindingType, MatchCtxt, Subst, mut_visit_match_with}; use crate::transform::Transform; /// # `char_literals` Command @@ -21,27 +21,24 @@ struct CharLits { impl Transform for CharLits { fn min_phase(&self) -> Phase { Phase::Phase2 } - fn transform(&self, krate: Crate, st: &CommandState, cx: &RefactorCtxt) -> Crate { + fn transform(&self, krate: &mut Crate, st: &CommandState, cx: &RefactorCtxt) { let pattern = driver::parse_expr(cx.session(), "__number as libc::c_char"); let mut mcx = MatchCtxt::new(st, cx); mcx.set_type("__number", BindingType::Expr); - let krate = fold_match_with(mcx, pattern.clone(), krate, |e, mcx| { + mut_visit_match_with(mcx, pattern.clone(), krate, |e, mcx| { let field: &P = mcx.bindings.get::<_, P>("__number").unwrap(); if let ExprKind::Lit(ref l) = field.node { if let LitKind::Int(i, _) = l.node { if i < 256 { let mut bnd = Bindings::new(); bnd.add("__number", mk().lit_expr(mk().char_lit(i as u8 as char))); - return pattern.clone().subst(st, cx, &bnd) + *e = pattern.clone().subst(st, cx, &bnd); } } } - e }); - - krate } } diff --git a/c2rust-refactor/src/transform/control_flow.rs b/c2rust-refactor/src/transform/control_flow.rs index e676627c1..183c5e6b3 100644 --- a/c2rust-refactor/src/transform/control_flow.rs +++ b/c2rust-refactor/src/transform/control_flow.rs @@ -2,7 +2,7 @@ use syntax::ast::{Crate, Expr, ExprKind, Lit, LitKind, Stmt, StmtKind}; use syntax::ptr::P; use crate::command::{CommandState, Registry}; -use crate::matcher::{MatchCtxt, Subst, replace_expr, fold_match_with, find_first}; +use crate::matcher::{MatchCtxt, Subst, replace_expr, mut_visit_match_with, find_first}; use crate::transform::Transform; use crate::RefactorCtxt; @@ -17,7 +17,7 @@ use crate::RefactorCtxt; pub struct ReconstructWhile; impl Transform for ReconstructWhile { - fn transform(&self, krate: Crate, st: &CommandState, cx: &RefactorCtxt) -> Crate { + fn transform(&self, krate: &mut Crate, st: &CommandState, cx: &RefactorCtxt) { let krate = replace_expr( st, cx, krate, r#" @@ -47,7 +47,7 @@ impl Transform for ReconstructWhile { pub struct ReconstructForRange; impl Transform for ReconstructForRange { - fn transform(&self, krate: Crate, st: &CommandState, cx: &RefactorCtxt) -> Crate { + fn transform(&self, krate: &mut Crate, st: &CommandState, cx: &RefactorCtxt) { let mut mcx = MatchCtxt::new(st, cx); let pat_str = r#" $i:Ident = $start:Expr; @@ -68,24 +68,24 @@ impl Transform for ReconstructForRange { let range_step_excl = mcx.parse_stmts("$'label: for $i in ($start .. $end).step_by($step) { $body; }"); let range_step_incl = mcx.parse_stmts("$'label: for $i in ($start ..= $end).step_by($step) { $body; }"); - fold_match_with(mcx, pat, krate, |orig, mut mcx| { + mut_visit_match_with(mcx, pat, krate, |orig, mut mcx| { let cond = mcx.bindings.get::<_, P>("$cond").unwrap().clone(); let range_excl = if mcx.try_match(&*lt_cond, &cond).is_ok() { true } else if mcx.try_match(&*le_cond, &cond).is_ok() { false } else { - return orig; + return; }; let incr = match mcx.bindings.get::<_, Stmt>("$incr").unwrap().node { StmtKind::Semi(ref e) | StmtKind::Expr(ref e) => e.clone(), - _ => { return orig; } + _ => { return; } }; if !mcx.try_match(&*i_plus_eq, &incr).is_ok() && !mcx.try_match(&*i_eq_plus, &incr).is_ok() { - return orig; + return; } let step = mcx.bindings.get::<_, P>("$step").unwrap(); @@ -95,8 +95,8 @@ impl Transform for ReconstructForRange { (false, true) => range_step_excl.clone(), (false, false) => range_step_incl.clone(), }; - repl_step.subst(st, cx, &mcx.bindings) - }) + *orig = repl_step.subst(st, cx, &mcx.bindings); + }); } } @@ -121,11 +121,11 @@ fn is_one_lit(l: &Lit) -> bool { /// Removes loop labels that are not used in a named `break` or `continue`. pub struct RemoveUnusedLabels; -fn remove_unused_labels_from_loop_kind(krate: Crate, +fn remove_unused_labels_from_loop_kind(krate: &mut Crate, st: &CommandState, cx: &RefactorCtxt, pat: &str, - repl: &str) -> Crate { + repl: &str) { let mut mcx = MatchCtxt::new(st, cx); let pat = mcx.parse_expr(pat); let repl = mcx.parse_expr(repl); @@ -134,36 +134,34 @@ fn remove_unused_labels_from_loop_kind(krate: Crate, let find_break = mcx.parse_expr("break $'label"); let find_break_expr = mcx.parse_expr("break $'label $bv:Expr"); - fold_match_with(mcx, pat, krate, |orig, mcx| { + mut_visit_match_with(mcx, pat, krate, |orig, mcx| { let body = mcx.bindings.get::<_, Vec>("$body").unwrap(); // TODO: Would be nice to get rid of the clones of body. Might require making // `find_first` use a visitor instead of a `fold`, which means duplicating a lot of the // `PatternFolder` definitions in matcher.rs to make `PatternVisitor` variants. - if find_first(st, cx, find_continue.clone().subst(st, cx, &mcx.bindings), body.clone()).is_none() && - find_first(st, cx, find_break.clone().subst(st, cx, &mcx.bindings), body.clone()).is_none() && - find_first(st, cx, find_break_expr.clone().subst(st, cx, &mcx.bindings), body.clone()).is_none() { - repl.clone().subst(st, cx, &mcx.bindings) - } else { - orig + if find_first(st, cx, find_continue.clone().subst(st, cx, &mcx.bindings), &mut body.clone()).is_none() && + find_first(st, cx, find_break.clone().subst(st, cx, &mcx.bindings), &mut body.clone()).is_none() && + find_first(st, cx, find_break_expr.clone().subst(st, cx, &mcx.bindings), &mut body.clone()).is_none() + { + *orig = repl.clone().subst(st, cx, &mcx.bindings); } - }) + }); } impl Transform for RemoveUnusedLabels { - fn transform(&self, krate: Crate, st: &CommandState, cx: &RefactorCtxt) -> Crate { - let krate = remove_unused_labels_from_loop_kind(krate, st, cx, + fn transform(&self, krate: &mut Crate, st: &CommandState, cx: &RefactorCtxt) { + remove_unused_labels_from_loop_kind(krate, st, cx, "$'label:Ident: loop { $body:MultiStmt; }", "loop { $body; }"); - let krate = remove_unused_labels_from_loop_kind(krate, st, cx, + remove_unused_labels_from_loop_kind(krate, st, cx, "$'label:Ident: while $cond:Expr { $body:MultiStmt; }", "while $cond { $body; }"); - let krate = remove_unused_labels_from_loop_kind(krate, st, cx, + remove_unused_labels_from_loop_kind(krate, st, cx, "$'label:Ident: while let $pat:Pat = $init:Expr { $body:MultiStmt; }", "while let $pat = $init { $body; }"); - let krate = remove_unused_labels_from_loop_kind(krate, st, cx, + remove_unused_labels_from_loop_kind(krate, st, cx, "$'label:Ident: for $pat:Pat in $iter { $body:MultiStmt; }", "for $pat in $iter { $body; }"); - krate } } diff --git a/c2rust-refactor/src/transform/externs.rs b/c2rust-refactor/src/transform/externs.rs index a4c8f51f5..febc48164 100644 --- a/c2rust-refactor/src/transform/externs.rs +++ b/c2rust-refactor/src/transform/externs.rs @@ -6,7 +6,7 @@ use syntax::ast::*; use syntax::ptr::P; use c2rust_ast_builder::mk; -use crate::ast_manip::{fold_nodes, visit_nodes}; +use crate::ast_manip::{MutVisitNodes, visit_nodes}; use crate::command::{CommandState, Registry}; use crate::driver::{Phase}; use crate::path_edit::fold_resolved_paths_with_id; @@ -42,7 +42,7 @@ fn is_foreign_symbol(tcx: TyCtxt, did: DefId) -> bool { } impl Transform for CanonicalizeExterns { - fn transform(&self, krate: Crate, st: &CommandState, cx: &RefactorCtxt) -> Crate { + fn transform(&self, krate: &mut Crate, st: &CommandState, cx: &RefactorCtxt) { let tcx = cx.ty_ctxt(); @@ -75,7 +75,7 @@ impl Transform for CanonicalizeExterns { // Map from replaced fn DefId to replacement fn DefId let mut replace_map = HashMap::new(); - visit_nodes(&krate, |fi: &ForeignItem| { + visit_nodes(krate, |fi: &ForeignItem| { if !st.marked(fi.id, "target") { return; } @@ -141,7 +141,7 @@ impl Transform for CanonicalizeExterns { bail!("old and new sig differ in arg count"); } - if old_sig.variadic != new_sig.variadic { + if old_sig.c_variadic != new_sig.c_variadic { bail!("old and new sig differ in variadicness"); } @@ -177,7 +177,7 @@ impl Transform for CanonicalizeExterns { // Maps the NodeId of each rewritten path expr to the DefId of the old extern that was // previously referenced by that path. let mut path_ids = HashMap::new(); - let krate = fold_resolved_paths_with_id(krate, cx, |id, qself, path, def| { + fold_resolved_paths_with_id(krate, cx, |id, qself, path, def| { let old_did = match_or!([def.opt_def_id()] Some(x) => x; return (qself, path)); let new_did = match_or!([replace_map.get(&old_did)] Some(&x) => x; return (qself, path)); @@ -188,13 +188,13 @@ impl Transform for CanonicalizeExterns { // Add casts to rewritten calls and exprs - let krate = fold_nodes(krate, |mut e: P| { + MutVisitNodes::visit(krate, |e: &mut P| { if let Some(&old_did) = path_ids.get(&e.id) { // This whole expr was a reference to the old extern `old_did`. See if we need a // cast around the whole thing. (This should only be true for statics.) if let Some(&(old_ty, _new_ty)) = ty_replace_map.get(&(old_did, TyLoc::Whole)) { // The rewritten expr has type `new_ty`, but its context expects `old_ty`. - e = mk().cast_expr(e, reflect::reflect_tcx_ty(tcx, old_ty)); + *e = mk().cast_expr(e.clone(), reflect::reflect_tcx_ty(tcx, old_ty)); } } @@ -213,14 +213,11 @@ impl Transform for CanonicalizeExterns { for i in 0 .. arg_count { let k = (old_did, TyLoc::Arg(i)); if let Some(&(_old_ty, new_ty)) = ty_replace_map.get(&k) { - e = e.map(|mut e| { - expect!([e.node] ExprKind::Call(_, ref mut args) => { - // The new fn requires `new_ty`, where the old one needed `old_ty`. - let ty_ast = reflect::reflect_tcx_ty(tcx, new_ty); - let new_arg = mk().cast_expr(&args[i], ty_ast); - args[i] = new_arg; - }); - e + expect!([e.node] ExprKind::Call(_, ref mut args) => { + // The new fn requires `new_ty`, where the old one needed `old_ty`. + let ty_ast = reflect::reflect_tcx_ty(tcx, new_ty); + let new_arg = mk().cast_expr(&args[i], ty_ast); + args[i] = new_arg; }); info!(" arg {} - rewrote e = {:?}", i, e); } @@ -228,27 +225,21 @@ impl Transform for CanonicalizeExterns { if let Some(&(old_ty, _new_ty)) = ty_replace_map.get(&(old_did, TyLoc::Ret)) { // The new fn returns `new_ty`, where the old context requires `old_ty`. - e = mk().cast_expr(e, reflect::reflect_tcx_ty(tcx, old_ty)); + *e = mk().cast_expr(e.clone(), reflect::reflect_tcx_ty(tcx, old_ty)); info!(" return - rewrote e = {:?}", e); } } - - e }); // Remove the old externs - let krate = fold_nodes(krate, |mut fm: ForeignMod| { + MutVisitNodes::visit(krate, |fm: &mut ForeignMod| { fm.items.retain(|fi| { let did = cx.node_def_id(fi.id); !replace_map.contains_key(&did) }); - fm }); - - - krate } fn min_phase(&self) -> Phase { diff --git a/c2rust-refactor/src/transform/format.rs b/c2rust-refactor/src/transform/format.rs index 7824f9942..81e16a5bc 100644 --- a/c2rust-refactor/src/transform/format.rs +++ b/c2rust-refactor/src/transform/format.rs @@ -1,6 +1,7 @@ use std::collections::{HashMap, HashSet}; use std::str; use std::str::FromStr; +use rustc_data_structures::sync::Lrc; use rustc::hir::def_id::DefId; use syntax::ast::*; use syntax::attr; @@ -11,7 +12,7 @@ use syntax::tokenstream::TokenTree; use syntax_pos::Span; use c2rust_ast_builder::mk; -use crate::ast_manip::{fold_nodes, visit_nodes}; +use crate::ast_manip::{FlatMapNodes, MutVisitNodes, visit_nodes}; use crate::command::{CommandState, Registry}; use crate::transform::Transform; use crate::RefactorCtxt; @@ -48,15 +49,15 @@ use crate::RefactorCtxt; pub struct ConvertFormatArgs; impl Transform for ConvertFormatArgs { - fn transform(&self, krate: Crate, st: &CommandState, _cx: &RefactorCtxt) -> Crate { - fold_nodes(krate, |e: P| { + fn transform(&self, krate: &mut Crate, st: &CommandState, _cx: &RefactorCtxt) { + MutVisitNodes::visit(krate, |e: &mut P| { let fmt_idx = match e.node { ExprKind::Call(_, ref args) => args.iter().position(|e| st.marked(e.id, "target")), _ => None, }; if fmt_idx.is_none() { - return e; + return; } let fmt_idx = fmt_idx.unwrap(); @@ -81,7 +82,7 @@ impl Transform for ConvertFormatArgs { let mut new_args = args[..fmt_idx].to_owned(); new_args.push(mk().mac_expr(mac)); - mk().id(st.transfer_marks(e.id)).call_expr(func, new_args) + *e = mk().id(st.transfer_marks(e.id)).call_expr(func, new_args) }) } } @@ -168,8 +169,7 @@ fn build_format_macro( let expr_tt = |mut e: P| { let span = e.span; e.span = DUMMY_SP; - TokenTree::Token(span, Token::interpolated( - Nonterminal::NtExpr(e))) + TokenTree::Token(span, Token::Interpolated(Lrc::new(Nonterminal::NtExpr(e)))) }; macro_tts.push(expr_tt(new_fmt_str_expr)); for (i, arg) in fmt_args[1..].iter().enumerate() { @@ -214,11 +214,11 @@ fn build_format_macro( pub struct ConvertPrintfs; impl Transform for ConvertPrintfs { - fn transform(&self, krate: Crate, _st: &CommandState, cx: &RefactorCtxt) -> Crate { + fn transform(&self, krate: &mut Crate, _st: &CommandState, cx: &RefactorCtxt) { let mut printf_defs = HashSet::::new(); let mut fprintf_defs = HashSet::::new(); let mut stderr_defs = HashSet::::new(); - visit_nodes(&krate, |fi: &ForeignItem| { + visit_nodes(krate, |fi: &ForeignItem| { if attr::contains_name(&fi.attrs, "no_mangle") { match (&*fi.ident.as_str(), &fi.node) { ("printf", ForeignItemKind::Fn(_, _)) => { @@ -234,7 +234,7 @@ impl Transform for ConvertPrintfs { } } }); - fold_nodes(krate, |s: Stmt| { + FlatMapNodes::visit(krate, |s: Stmt| { match s.node { StmtKind::Semi(ref expr) => { if let ExprKind::Call(ref f, ref args) = expr.node { diff --git a/c2rust-refactor/src/transform/funcs.rs b/c2rust-refactor/src/transform/funcs.rs index 44fcfe298..16948c47a 100644 --- a/c2rust-refactor/src/transform/funcs.rs +++ b/c2rust-refactor/src/transform/funcs.rs @@ -5,15 +5,15 @@ use rustc_target::spec::abi::Abi; use syntax::ast; use syntax::ast::*; use syntax::attr; -use syntax::fold::{self, Folder}; +use syntax::mut_visit::{self, MutVisitor}; use syntax::ptr::P; use smallvec::SmallVec; use c2rust_ast_builder::{mk, IntoSymbol}; -use crate::ast_manip::{fold_nodes, fold_modules, visit_nodes, Fold}; +use crate::ast_manip::{FlatMapNodes, MutVisitNodes, fold_modules, visit_nodes, MutVisit}; use crate::command::{CommandState, Registry}; use crate::driver::{Phase, parse_expr}; -use crate::matcher::{BindingType, MatchCtxt, Subst, fold_match_with}; +use crate::matcher::{BindingType, MatchCtxt, Subst, mut_visit_match_with}; use crate::path_edit::{fold_resolved_paths, fold_resolved_paths_with_id}; use crate::transform::Transform; use crate::util::Lone; @@ -38,12 +38,12 @@ use crate::RefactorCtxt; pub struct ToMethod; impl Transform for ToMethod { - fn transform(&self, krate: Crate, st: &CommandState, cx: &RefactorCtxt) -> Crate { + fn transform(&self, krate: &mut Crate, st: &CommandState, cx: &RefactorCtxt) { // (1) Find the impl we're inserting into. let mut dest = None; - let krate = fold_nodes(krate, |i: P| { + FlatMapNodes::visit(krate, |i: P| { // We're looking for an inherent impl (no `TraitRef`) marked with a cursor. if !st.marked(i.id, "dest") || !matches!([i.node] ItemKind::Impl(_, _, _, _, None, _, _)) { @@ -58,7 +58,7 @@ impl Transform for ToMethod { }); if dest.is_none() { - return krate; + return; } let dest = dest.unwrap(); @@ -80,7 +80,7 @@ impl Transform for ToMethod { } let mut fns = Vec::new(); - let krate = fold_modules(krate, |curs| { + fold_modules(krate, |curs| { while let Some(arg_idx) = curs.advance_until_match(|i| { // Find the argument under the cursor. let decl = match_or!([i.node] ItemKind::Fn(ref decl, ..) => decl; return None); @@ -173,7 +173,7 @@ impl Transform for ToMethod { // FIXME: rustc changed how locals args are represented, and we // don't have a Def for locals any more, and thus no def_id. We need // to fix this in path_edit.rs - f.block = fold_resolved_paths(f.block.clone(), cx, |qself, path, def| { + fold_resolved_paths(&mut f.block, cx, |qself, path, def| { match cx.def_to_hir_id(&def) { Some(hir_id) => if hir_id == arg_hir_id { @@ -192,7 +192,7 @@ impl Transform for ToMethod { let mut fns = Some(fns); - let krate = fold_nodes(krate, |i: P| { + FlatMapNodes::visit(krate, |i: P| { if i.id != dest.id || fns.is_none() { return smallvec![i]; } @@ -230,14 +230,14 @@ impl Transform for ToMethod { // (5) Find all uses of marked functions, and rewrite them into method calls. - let krate = fold_nodes(krate, |e: P| { + MutVisitNodes::visit(krate, |e: &mut P| { if !matches!([e.node] ExprKind::Call(..)) { - return e; + return; } unpack!([e.node.clone()] ExprKind::Call(func, args)); - let def_id = match_or!([cx.try_resolve_expr(&func)] Some(x) => x; return e); - let info = match_or!([fn_ref_info.get(&def_id)] Some(x) => x; return e); + let def_id = match_or!([cx.try_resolve_expr(&func)] Some(x) => x; return); + let info = match_or!([fn_ref_info.get(&def_id)] Some(x) => x; return); // At this point, we know `func` is a reference to a marked function, and we have the // function's `FnRefInfo`. @@ -248,30 +248,18 @@ impl Transform for ToMethod { let self_arg = args.remove(arg_idx); args.insert(0, self_arg); - e.map(|e| { - Expr { - node: ExprKind::MethodCall( - mk().path_segment(&info.ident), - args), - .. e - } - }) + e.node = ExprKind::MethodCall( + mk().path_segment(&info.ident), + args + ); } else { // There is no `self` argument, but change the function reference to the new path. let mut new_path = cx.def_path(cx.node_def_id(dest.id)); new_path.segments.push(mk().path_segment(&info.ident)); - e.map(|e| { - Expr { - node: ExprKind::Call(mk().path_expr(new_path), args), - .. e - } - }) + e.node = ExprKind::Call(mk().path_expr(new_path), args); } }); - - - krate } fn min_phase(&self) -> Phase { @@ -288,23 +276,19 @@ impl Transform for ToMethod { pub struct FixUnusedUnsafe; impl Transform for FixUnusedUnsafe { - fn transform(&self, krate: Crate, _st: &CommandState, cx: &RefactorCtxt) -> Crate { - fold_nodes(krate, |mut b: P| { + fn transform(&self, krate: &mut Crate, _st: &CommandState, cx: &RefactorCtxt) { + MutVisitNodes::visit(krate, |b: &mut P| { if let BlockCheckMode::Unsafe(UnsafeSource::UserProvided) = b.rules { let parent = cx.hir_map().get_parent_did(b.id); let result = cx.ty_ctxt().unsafety_check_result(parent); let unused = result.unsafe_blocks.iter().any(|&(id, used)| { - id == b.id && !used + id == cx.hir_map().node_to_hir_id(b.id) && !used }); if unused { - b = b.map(|b| Block { - rules: BlockCheckMode::Default, - .. b - }); + b.rules = BlockCheckMode::Default; } } - b - }) + }); } fn min_phase(&self) -> Phase { @@ -328,8 +312,8 @@ struct SinkUnsafeFolder<'a> { st: &'a CommandState, } -impl<'a> Folder for SinkUnsafeFolder<'a> { - fn fold_item(&mut self, i: P) -> SmallVec<[P; 1]> { +impl<'a> MutVisitor for SinkUnsafeFolder<'a> { + fn flat_map_item(&mut self, i: P) -> SmallVec<[P; 1]> { let i = if self.st.marked(i.id, "target") { i.map(|mut i| { match i.node { @@ -345,10 +329,10 @@ impl<'a> Folder for SinkUnsafeFolder<'a> { }; - fold::noop_fold_item(i, self) + mut_visit::noop_flat_map_item(i, self) } - fn fold_impl_item(&mut self, mut i: ImplItem) -> SmallVec<[ImplItem; 1]> { + fn flat_map_impl_item(&mut self, mut i: ImplItem) -> SmallVec<[ImplItem; 1]> { if self.st.marked(i.id, "target") { match i.node { ImplItemKind::Method(MethodSig { ref mut header, .. }, ref mut block) => { @@ -358,7 +342,7 @@ impl<'a> Folder for SinkUnsafeFolder<'a> { } } - fold::noop_fold_impl_item(i, self) + mut_visit::noop_flat_map_impl_item(i, self) } } @@ -372,8 +356,8 @@ fn sink_unsafe(unsafety: &mut Unsafety, block: &mut P) { } impl Transform for SinkUnsafe { - fn transform(&self, krate: Crate, st: &CommandState, _cx: &RefactorCtxt) -> Crate { - krate.fold(&mut SinkUnsafeFolder { st }) + fn transform(&self, krate: &mut Crate, st: &CommandState, _cx: &RefactorCtxt) { + krate.visit(&mut SinkUnsafeFolder { st }) } } @@ -426,7 +410,7 @@ impl Transform for SinkUnsafe { pub struct WrapExtern; impl Transform for WrapExtern { - fn transform(&self, krate: Crate, st: &CommandState, cx: &RefactorCtxt) -> Crate { + fn transform(&self, krate: &mut Crate, st: &CommandState, cx: &RefactorCtxt) { // (1) Collect the marked externs. #[derive(Debug)] struct FuncInfo { @@ -437,7 +421,7 @@ impl Transform for WrapExtern { } let mut fns = Vec::new(); - visit_nodes(&krate, |fi: &ForeignItem| { + visit_nodes(krate, |fi: &ForeignItem| { if !st.marked(fi.id, "target") { return; } @@ -463,7 +447,7 @@ impl Transform for WrapExtern { // (2) Generate wrappers in the destination module. let mut dest_path = None; - let krate = fold_nodes(krate, |i: P| { + FlatMapNodes::visit(krate, |i: P| { if !st.marked(i.id, "dest") { return smallvec![i]; } @@ -507,7 +491,7 @@ impl Transform for WrapExtern { let decl = P(FnDecl { inputs: wrapper_args, output: f.decl.output.clone(), - variadic: false, + c_variadic: false, }); let body = mk().block(vec![ mk().expr_stmt(mk().call_expr( @@ -526,13 +510,13 @@ impl Transform for WrapExtern { if dest_path.is_none() { info!("warning: found no \"dest\" mark"); - return krate; + return; } let dest_path = dest_path.unwrap(); // (3) Rewrite call sites to use the new wrappers. let ident_map = fns.iter().map(|f| (f.def_id, f.ident)).collect::>(); - let krate = fold_resolved_paths(krate, cx, |qself, path, def| { + fold_resolved_paths(krate, cx, |qself, path, def| { match def.opt_def_id() { Some(def_id) if ident_map.contains_key(&def_id) => { let ident = ident_map.get(&def_id).unwrap(); @@ -543,8 +527,6 @@ impl Transform for WrapExtern { _ => (qself, path), } }); - - krate } fn min_phase(&self) -> Phase { @@ -572,12 +554,12 @@ impl Transform for WrapExtern { pub struct WrapApi; impl Transform for WrapApi { - fn transform(&self, krate: Crate, st: &CommandState, cx: &RefactorCtxt) -> Crate { + fn transform(&self, krate: &mut Crate, st: &CommandState, cx: &RefactorCtxt) { // Map from original function HirId to new function name let mut wrapper_map = HashMap::new(); // Add wrapper functions - let krate = fold_nodes(krate, |i: P| { + FlatMapNodes::visit(krate, |i: P| { if !st.marked(i.id, "target") { return smallvec![i]; } @@ -688,13 +670,13 @@ impl Transform for WrapApi { // these, we edit them to refer to the wrapper, which has the same type (specifically, the // same ABI) as the old function. let mut callees = HashSet::new(); - visit_nodes(&krate, |e: &Expr| { + visit_nodes(krate, |e: &Expr| { if let ExprKind::Call(ref callee, _) = e.node { callees.insert(callee.id); } }); - let krate = fold_resolved_paths_with_id(krate, cx, |id, q, p, d| { + fold_resolved_paths_with_id(krate, cx, |id, q, p, d| { if callees.contains(&id) || q.is_some() { return (q, p); } @@ -706,8 +688,6 @@ impl Transform for WrapApi { new_path.segments.push(mk().path_segment(name)); (q, new_path) }); - - krate } fn min_phase(&self) -> Phase { @@ -751,7 +731,7 @@ struct Abstract { } impl Transform for Abstract { - fn transform(&self, krate: Crate, st: &CommandState, cx: &RefactorCtxt) -> Crate { + fn transform(&self, krate: &mut Crate, st: &CommandState, cx: &RefactorCtxt) { let pat = parse_expr(cx.session(), &self.pat); let func_src = format!("unsafe fn {} {{\n {}\n}}", @@ -794,21 +774,18 @@ impl Transform for Abstract { init_mcx.set_type(name.name, BindingType::Ty); } - let krate = fold_match_with(init_mcx, pat, krate, |_ast, mut mcx| { + mut_visit_match_with(init_mcx, pat, krate, |ast, mut mcx| { for name in &type_args { if mcx.bindings.get::<_, P>(name.name).is_none() { mcx.bindings.add(name.name, mk().infer_ty()); } } - call_expr.clone().subst(st, cx, &mcx.bindings) + *ast = call_expr.clone().subst(st, cx, &mcx.bindings); }); // Add the function definition to the crate - let mut krate = krate; krate.module.items.push(func); - - krate } fn min_phase(&self) -> Phase { diff --git a/c2rust-refactor/src/transform/generics.rs b/c2rust-refactor/src/transform/generics.rs index 6abacbe5f..dd77999f6 100644 --- a/c2rust-refactor/src/transform/generics.rs +++ b/c2rust-refactor/src/transform/generics.rs @@ -3,7 +3,7 @@ use syntax::ast::*; use syntax::ptr::P; use syntax::symbol::Symbol; -use crate::ast_manip::fold_nodes; +use crate::ast_manip::{FlatMapNodes, MutVisitNodes}; use crate::command::{CommandState, Registry}; use crate::driver::{parse_ty}; use crate::path_edit::fold_resolved_paths_with_id; @@ -69,7 +69,7 @@ pub struct GeneralizeItems { } impl Transform for GeneralizeItems { - fn transform(&self, krate: Crate, st: &CommandState, cx: &RefactorCtxt) -> Crate { + fn transform(&self, krate: &mut Crate, st: &CommandState, cx: &RefactorCtxt) { // (1) Find marked types and replace with the named type variable. // Map from item NodeId to the concrete type that was replaced with the type variable. @@ -78,26 +78,26 @@ impl Transform for GeneralizeItems { let mut replacement_ty = self.replacement_ty.as_ref() .map(|s| parse_ty(cx.session(), s)); - let krate = fold_nodes(krate, |ty: P| { + MutVisitNodes::visit(krate, |ty: &mut P| { if !st.marked(ty.id, "target") { - return ty; + return; } let parent_id = cx.hir_map().get_parent(ty.id); if !st.marked(parent_id, "target") { - return ty; + return; } if replacement_ty.is_none() { replacement_ty = Some(ty.clone()); } - mk().ident_ty(self.ty_var_name) + *ty = mk().ident_ty(self.ty_var_name) }); // (2) Add parameters to rewritten items. let mut item_def_ids = HashSet::new(); - let krate = fold_nodes(krate, |i: P| { + FlatMapNodes::visit(krate, |i: P| { if !st.marked(i.id, "target") { return smallvec![i]; } @@ -126,7 +126,7 @@ impl Transform for GeneralizeItems { let replacement_ty = replacement_ty .expect("must provide a replacement type argument or mark"); - let krate = fold_resolved_paths_with_id(krate, cx, |path_id, qself, mut path, def| { + fold_resolved_paths_with_id(krate, cx, |path_id, qself, mut path, def| { match def.opt_def_id() { Some(def_id) if item_def_ids.contains(&def_id) => (), _ => return (qself, path), @@ -159,8 +159,6 @@ impl Transform for GeneralizeItems { (qself, path) }); - - krate } } diff --git a/c2rust-refactor/src/transform/ionize.rs b/c2rust-refactor/src/transform/ionize.rs index ec28e9676..da5b33160 100644 --- a/c2rust-refactor/src/transform/ionize.rs +++ b/c2rust-refactor/src/transform/ionize.rs @@ -3,16 +3,16 @@ use rustc::ty::TyKind; use std::collections::HashSet; use std::fmt::Display; use syntax::ast::*; -use syntax::fold::Folder; +use syntax::mut_visit::MutVisitor; use syntax::ptr::P; use c2rust_ast_builder::mk; -use crate::ast_manip::{Fold, visit_nodes, fold_nodes}; +use crate::ast_manip::{FlatMapNodes, MutVisit, visit_nodes}; use crate::ast_manip::lr_expr::{self, fold_expr_with_context}; use crate::command::{CommandState, Registry}; use crate::driver::{Phase, parse_impl_items, parse_stmts, parse_expr}; use crate::reflect::reflect_def_path; -use crate::matcher::{Bindings, BindingType, MatchCtxt, Subst, fold_match_with}; +use crate::matcher::{Bindings, BindingType, MatchCtxt, Subst, mut_visit_match_with}; use crate::transform::Transform; use crate::RefactorCtxt; @@ -35,16 +35,16 @@ struct ExprFolder { callback: F, } -impl) -> P> Folder for ExprFolder { - fn fold_expr(&mut self, e: P) -> P { +impl)> MutVisitor for ExprFolder { + fn visit_expr(&mut self, e: &mut P) { (self.callback)(e) } } -fn fold_top_exprs(x: T, callback: F) -> ::Result - where T: Fold, F: FnMut(P) -> P { +fn fold_top_exprs(x: &mut T, callback: F) + where T: MutVisit, F: FnMut(&mut P) { let mut f = ExprFolder { callback: callback }; - x.fold(&mut f) + x.visit(&mut f) } fn accessor_name(fieldname: T) -> Ident { @@ -77,7 +77,7 @@ fn generate_enum_accessors(cx: &RefactorCtxt) -> Vec { impl Transform for Ionize { fn min_phase(&self) -> Phase { Phase::Phase3 } - fn transform(&self, krate: Crate, st: &CommandState, cx: &RefactorCtxt) -> Crate { + fn transform(&self, krate: &mut Crate, st: &CommandState, cx: &RefactorCtxt) { let _as_variant_methods = generate_enum_accessors(cx); let outer_assignment_pat = parse_stmts(cx.session(), "__val.__field = __expr;"); @@ -87,7 +87,7 @@ impl Transform for Ionize { let mut targets: HashSet = HashSet::new(); // Find marked unions - visit_nodes(&krate, |i: &Item| { + visit_nodes(krate, |i: &Item| { if st.marked(i.id, "target") { if let ItemKind::Union(VariantData::Struct(ref _fields, _), _) = i.node { if let Some(def_id) = cx.hir_map().opt_local_def_id(i.id) { @@ -107,7 +107,7 @@ impl Transform for Ionize { mcx.set_type("__val", BindingType::Expr); // Replace union assignment with enum assignment - let krate = fold_match_with(mcx, outer_assignment_pat, krate, |e, mcx| { + mut_visit_match_with(mcx, outer_assignment_pat, krate, |e, mcx| { let field = mcx.bindings.get::<_, Ident>("__field").unwrap(); let _expr = mcx.bindings.get::<_, P>("__expr").unwrap(); let val = mcx.bindings.get::<_, P>("__val").unwrap(); @@ -122,9 +122,9 @@ impl Transform for Ionize { let mut bnd1 = mcx.bindings.clone(); bnd1.add("__con", mk().path_expr(path)); - outer_assignment_repl.clone().subst(st, cx, &bnd1) + *e = outer_assignment_repl.clone().subst(st, cx, &bnd1); } - _ => e + _ => {} } }); @@ -134,7 +134,7 @@ impl Transform for Ionize { mcx.set_type("__field", BindingType::Ident); mcx.set_type("__val", BindingType::Expr); - let krate = fold_top_exprs(krate, |e: P| { + fold_top_exprs(krate, |e: &mut P| { fold_expr_with_context(e, lr_expr::Context::Rvalue, |e, context| { if lr_expr::Context::Rvalue == context { match mcx.clone_match(&*outer_access_pat, &*e) { @@ -142,18 +142,16 @@ impl Transform for Ionize { let mut bnd = mcx1.bindings.clone(); let accessor = accessor_name(bnd.get::<_, Ident>("__field").unwrap()); bnd.add("__accessor", accessor); - outer_access_repl.clone().subst(st, cx, &bnd) + *e = outer_access_repl.clone().subst(st, cx, &bnd); } - Err(_) => e, + Err(_) => {} } - } else { - e } - }) + }); }); // Replace union with enum - let krate = fold_nodes(krate, |i: P| { + FlatMapNodes::visit(krate, |i: P| { match cx.hir_map().opt_local_def_id(i.id) { Some(ref def_id) if targets.contains(def_id) => {} _ => return smallvec![i] @@ -189,8 +187,6 @@ impl Transform for Ionize { panic!("ionize: Marked target not a union") } }); - - krate } } diff --git a/c2rust-refactor/src/transform/items.rs b/c2rust-refactor/src/transform/items.rs index 0ba1e6dc7..198c217ab 100644 --- a/c2rust-refactor/src/transform/items.rs +++ b/c2rust-refactor/src/transform/items.rs @@ -5,13 +5,13 @@ use rustc::hir::HirId; use syntax::attr; use syntax::ast::*; use syntax::source_map::DUMMY_SP; -use syntax::fold::{self, Folder}; +use syntax::mut_visit::{self, MutVisitor}; use syntax::ptr::P; use syntax::symbol::Symbol; use smallvec::SmallVec; use c2rust_ast_builder::{mk, Make, IntoSymbol}; -use crate::ast_manip::{fold_nodes, Fold, AstEquiv}; +use crate::ast_manip::{FlatMapNodes, MutVisit, AstEquiv}; use crate::command::{CommandState, Registry}; use crate::driver::{self, Phase}; use crate::path_edit::fold_resolved_paths; @@ -34,14 +34,14 @@ pub struct RenameRegex { } impl Transform for RenameRegex { - fn transform(&self, krate: Crate, st: &CommandState, cx: &RefactorCtxt) -> Crate { + fn transform(&self, krate: &mut Crate, st: &CommandState, cx: &RefactorCtxt) { let re = Regex::new(&self.pattern).unwrap(); // (1) Fold over items and rewrite their `ident`s. Records the new paths of modified items // into `new_paths`. let mut new_idents = HashMap::new(); - let krate = fold_nodes(krate, |i: P| { + FlatMapNodes::visit(krate, |i: P| { if let Some(label) = self.filter { if !st.marked(i.id, label) { return smallvec![i]; @@ -66,7 +66,7 @@ impl Transform for RenameRegex { // (2) Rewrite paths referring to renamed defs - let krate = fold_resolved_paths(krate, cx, |qself, mut path, def| { + fold_resolved_paths(krate, cx, |qself, mut path, def| { if let Some(hir_id) = cx.def_to_hir_id(def) { if let Some(new_ident) = new_idents.get(&hir_id) { path.segments.last_mut().unwrap().ident = new_ident.clone(); @@ -74,8 +74,6 @@ impl Transform for RenameRegex { } (qself, path) }); - - krate } } @@ -120,7 +118,7 @@ impl Transform for RenameRegex { pub struct RenameUnnamed; impl Transform for RenameUnnamed { - fn transform(&self, krate: Crate, _st: &CommandState, cx: &RefactorCtxt) -> Crate { + fn transform(&self, krate: &mut Crate, _st: &CommandState, cx: &RefactorCtxt) { #[derive(Debug, Default)] struct Renamer { items_to_change: HashSet, @@ -135,7 +133,7 @@ impl Transform for RenameUnnamed { let make_name = |counter| { Ident::from_str(&format!("unnamed_{}", counter)) }; // 1. Rename Anonymous types to the unique Ident - let krate = fold_nodes(krate, |i: P| { + FlatMapNodes::visit(krate, |i: P| { if attr::contains_name(&i.attrs, "header_src") && !renamer.is_source { renamer.is_source = true; } @@ -163,7 +161,7 @@ impl Transform for RenameUnnamed { }); // 2. Update types to match the new renamed Anonymous Types - let krate = fold_resolved_paths(krate, cx, |qself, mut path, def| { + fold_resolved_paths(krate, cx, |qself, mut path, def| { if let Some(hir_id) = cx.def_to_hir_id(def) { if let Some(new_ident) = renamer.new_idents.get(&hir_id) { path.segments.last_mut().unwrap().ident = new_ident.clone(); @@ -175,11 +173,11 @@ impl Transform for RenameUnnamed { // No need to update paths if the project wasn't transpiled // with `--reorganize-definitions` flag if !renamer.is_source { - return krate; + return; } // 3. Update paths to from the old AnonymousType `Ident` to the new AnonymousType `Ident` - let krate = fold_nodes(krate, |mut i: P| { + FlatMapNodes::visit(krate, |mut i: P| { // This pass is only intended to be ran when the `--reorganize-definition` flag is used // on `c2rust-transpile`, and the reason is due to having use statements importing // `Item`s within submodules (also the only time the `c2rust-transpile`r uses use @@ -274,8 +272,6 @@ impl Transform for RenameUnnamed { } smallvec![i] }); - - krate } fn min_phase(&self) -> Phase { @@ -295,7 +291,7 @@ impl Transform for RenameUnnamed { pub struct ReplaceItems; impl Transform for ReplaceItems { - fn transform(&self, krate: Crate, st: &CommandState, cx: &RefactorCtxt) -> Crate { + fn transform(&self, krate: &mut Crate, st: &CommandState, cx: &RefactorCtxt) { // (1) Scan items for `target` and `repl` marks, collecting the relevant `DefId`s and // removing all `target` items. @@ -303,7 +299,7 @@ impl Transform for ReplaceItems { let mut repl_id = None; // (1a) Top-level items - let krate = fold_nodes(krate, |i: P| { + FlatMapNodes::visit(krate, |i: P| { if st.marked(i.id, "repl") { if repl_id.is_none() { repl_id = Some(cx.node_def_id(i.id)); @@ -322,7 +318,7 @@ impl Transform for ReplaceItems { // (1b) Impl items // TODO: Only inherent impls are supported for now. May not work on trait impls. - let krate = fold_nodes(krate, |i: ImplItem| { + FlatMapNodes::visit(krate, |i: ImplItem| { if st.marked(i.id, "repl") { if repl_id.is_none() { repl_id = Some(cx.node_def_id(i.id)); @@ -343,7 +339,7 @@ impl Transform for ReplaceItems { // (2) Rewrite references to `target` items to refer to `repl` instead. - let krate = fold_resolved_paths(krate, cx, |qself, path, def| { + fold_resolved_paths(krate, cx, |qself, path, def| { match def.opt_def_id() { Some(def_id) if target_ids.contains(&def_id) => (None, cx.def_path(repl_id)), @@ -354,7 +350,7 @@ impl Transform for ReplaceItems { // (3) Find impls for `target` types, and remove them. This way, if a struct is removed, // we also remove the associated `Clone` impl. - let krate = fold_nodes(krate, |i: P| { + FlatMapNodes::visit(krate, |i: P| { let opt_def_id = match i.node { ItemKind::Impl(_, _, _, _, _, ref ty, _) => cx.try_resolve_ty(ty), _ => None, @@ -367,8 +363,6 @@ impl Transform for ReplaceItems { } smallvec![i] }); - - krate } fn min_phase(&self) -> Phase { @@ -392,7 +386,7 @@ pub struct SetVisibility { } impl Transform for SetVisibility { - fn transform(&self, krate: Crate, st: &CommandState, cx: &RefactorCtxt) -> Crate { + fn transform(&self, krate: &mut Crate, st: &CommandState, cx: &RefactorCtxt) { let vis = driver::run_parser(cx.session(), &self.vis_str, |p| p.parse_visibility(false)); @@ -405,8 +399,8 @@ impl Transform for SetVisibility { in_trait_impl: bool, } - impl<'a> Folder for SetVisFolder<'a> { - fn fold_item(&mut self, mut i: P) -> SmallVec<[P; 1]> { + impl<'a> MutVisitor for SetVisFolder<'a> { + fn flat_map_item(&mut self, mut i: P) -> SmallVec<[P; 1]> { if self.st.marked(i.id, "target") && !i.vis.ast_equiv(&self.vis) { i = i.map(|mut i| { i.vis = self.vis.clone(); @@ -417,34 +411,34 @@ impl Transform for SetVisibility { let was_in_trait_impl = self.in_trait_impl; self.in_trait_impl = matches!([i.node] ItemKind::Impl(_, _, _, _, Some(_), _, _)); - let r = fold::noop_fold_item(i, self); + let r = mut_visit::noop_flat_map_item(i, self); self.in_trait_impl = was_in_trait_impl; r } - fn fold_impl_item(&mut self, mut i: ImplItem) -> SmallVec<[ImplItem; 1]> { + fn flat_map_impl_item(&mut self, mut i: ImplItem) -> SmallVec<[ImplItem; 1]> { if self.in_trait_impl { - return fold::noop_fold_impl_item(i, self); + return mut_visit::noop_flat_map_impl_item(i, self); } if self.st.marked(i.id, "target") { i.vis = self.vis.clone(); } - fold::noop_fold_impl_item(i, self) + mut_visit::noop_flat_map_impl_item(i, self) } - fn fold_foreign_item(&mut self, mut i: ForeignItem) -> SmallVec<[ForeignItem; 1]> { + fn flat_map_foreign_item(&mut self, mut i: ForeignItem) -> SmallVec<[ForeignItem; 1]> { if self.st.marked(i.id, "target") { i.vis = self.vis.clone(); } - fold::noop_fold_foreign_item(i, self) + mut_visit::noop_flat_map_foreign_item(i, self) } // Trait items have no visibility. } - krate.fold(&mut SetVisFolder { st, vis, in_trait_impl: false }) + krate.visit(&mut SetVisFolder { st, vis, in_trait_impl: false }) } } @@ -462,7 +456,7 @@ pub struct SetMutability { } impl Transform for SetMutability { - fn transform(&self, krate: Crate, st: &CommandState, _cx: &RefactorCtxt) -> Crate { + fn transform(&self, krate: &mut Crate, st: &CommandState, _cx: &RefactorCtxt) { let mutbl = <&str as Make>::make(&self.mut_str, &mk()); struct SetMutFolder<'a> { @@ -470,8 +464,8 @@ impl Transform for SetMutability { mutbl: Mutability, } - impl<'a> Folder for SetMutFolder<'a> { - fn fold_item(&mut self, mut i: P) -> SmallVec<[P; 1]> { + impl<'a> MutVisitor for SetMutFolder<'a> { + fn flat_map_item(&mut self, mut i: P) -> SmallVec<[P; 1]> { if self.st.marked(i.id, "target") { i = i.map(|mut i| { match i.node { @@ -481,10 +475,10 @@ impl Transform for SetMutability { i }); } - fold::noop_fold_item(i, self) + mut_visit::noop_flat_map_item(i, self) } - fn fold_foreign_item(&mut self, mut i: ForeignItem) -> SmallVec<[ForeignItem; 1]> { + fn flat_map_foreign_item(&mut self, mut i: ForeignItem) -> SmallVec<[ForeignItem; 1]> { if self.st.marked(i.id, "target") { match i.node { ForeignItemKind::Static(_, ref mut is_mutbl) => @@ -492,11 +486,11 @@ impl Transform for SetMutability { _ => {}, } } - fold::noop_fold_foreign_item(i, self) + mut_visit::noop_flat_map_foreign_item(i, self) } } - krate.fold(&mut SetMutFolder { st, mutbl }) + krate.visit(&mut SetMutFolder { st, mutbl }) } } @@ -507,7 +501,7 @@ pub struct SetUnsafety { } impl Transform for SetUnsafety { - fn transform(&self, krate: Crate, st: &CommandState, _cx: &RefactorCtxt) -> Crate { + fn transform(&self, krate: &mut Crate, st: &CommandState, _cx: &RefactorCtxt) { let unsafety = <&str as Make>::make(&self.unsafe_str, &mk()); struct SetUnsafetyFolder<'a> { @@ -515,8 +509,8 @@ impl Transform for SetUnsafety { unsafety: Unsafety, } - impl<'a> Folder for SetUnsafetyFolder<'a> { - fn fold_item(&mut self, mut i: P) -> SmallVec<[P; 1]> { + impl<'a> MutVisitor for SetUnsafetyFolder<'a> { + fn flat_map_item(&mut self, mut i: P) -> SmallVec<[P; 1]> { if self.st.marked(i.id, "target") { i = i.map(|mut i| { match i.node { @@ -531,10 +525,10 @@ impl Transform for SetUnsafety { i }); } - fold::noop_fold_item(i, self) + mut_visit::noop_flat_map_item(i, self) } - fn fold_trait_item(&mut self, mut i: TraitItem) -> SmallVec<[TraitItem; 1]> { + fn flat_map_trait_item(&mut self, mut i: TraitItem) -> SmallVec<[TraitItem; 1]> { if self.st.marked(i.id, "target") { match i.node { TraitItemKind::Method(ref mut sig, _) => @@ -542,10 +536,10 @@ impl Transform for SetUnsafety { _ => {}, } } - fold::noop_fold_trait_item(i, self) + mut_visit::noop_flat_map_trait_item(i, self) } - fn fold_impl_item(&mut self, mut i: ImplItem) -> SmallVec<[ImplItem; 1]> { + fn flat_map_impl_item(&mut self, mut i: ImplItem) -> SmallVec<[ImplItem; 1]> { if self.st.marked(i.id, "target") { match i.node { ImplItemKind::Method(ref mut sig, _) => @@ -553,11 +547,11 @@ impl Transform for SetUnsafety { _ => {}, } } - fold::noop_fold_impl_item(i, self) + mut_visit::noop_flat_map_impl_item(i, self) } } - krate.fold(&mut SetUnsafetyFolder { st, unsafety }) + krate.visit(&mut SetUnsafetyFolder { st, unsafety }) } } @@ -580,7 +574,7 @@ pub struct CreateItem { } impl Transform for CreateItem { - fn transform(&self, krate: Crate, st: &CommandState, cx: &RefactorCtxt) -> Crate { + fn transform(&self, krate: &mut Crate, st: &CommandState, cx: &RefactorCtxt) { let mark = self.mark; let inside = match &self.pos as &str { @@ -604,13 +598,13 @@ impl Transform for CreateItem { } impl<'a> CreateFolder<'a> { - fn handle_mod(&mut self, parent_id: NodeId, m: Mod, skip_dummy: bool) -> Mod { + fn handle_mod(&mut self, parent_id: NodeId, m: &mut Mod, skip_dummy: bool) { let mut items = Vec::with_capacity(m.items.len()); // When true, insert before the next item that satisfies `skip_dummy` let mut insert_inside = self.inside && self.st.marked(parent_id, self.mark); - for i in m.items { + for i in &m.items { if insert_inside { // Special case for `inside` mode with the Crate marked. We want to insert // after the injected std and prelude items, because inserting before an @@ -622,7 +616,7 @@ impl Transform for CreateItem { } let insert = !self.inside && self.st.marked(i.id, self.mark); - items.push(i); + items.push(i.clone()); if insert { items.extend(self.items.iter().cloned()); } @@ -633,69 +627,55 @@ impl Transform for CreateItem { items.extend(self.items.iter().cloned()); } - Mod { items, ..m } + m.items = items; } } - impl<'a> Folder for CreateFolder<'a> { - fn fold_crate(&mut self, c: Crate) -> Crate { - let c = Crate { - module: self.handle_mod(CRATE_NODE_ID, c.module, true), - ..c - }; - - // We do this instead of noop_fold_module, because noop_fold_crate makes up a dummy - // Item for the crate, causing us to try and insert into c.module a second time. - // (We don't just omit fold_crate and rely on this dummy item because the dummy - // item has DUMMY_NODE_ID instead of CRATE_NODE_ID.) - Crate { - module: fold::noop_fold_mod(c.module, self), - ..c - } + impl<'a> MutVisitor for CreateFolder<'a> { + fn visit_crate(&mut self, c: &mut Crate) { + self.handle_mod(CRATE_NODE_ID, &mut c.module, true); + + // We do this instead of noop_visit_crate, because + // noop_visit_crate makes up a dummy Item for the crate, causing + // us to try and insert into c.module a second time. (We don't + // just omit fold_crate and rely on this dummy item because the + // dummy item has DUMMY_NODE_ID instead of CRATE_NODE_ID.) + mut_visit::noop_visit_mod(&mut c.module, self); } - fn fold_item(&mut self, i: P) -> SmallVec<[P; 1]> { - let i = if !matches!([i.node] ItemKind::Mod(..)) { - i - } else { - i.map(|i| { - unpack!([i.node] ItemKind::Mod(m)); - Item { - node: ItemKind::Mod(self.handle_mod(i.id, m, false)), - .. i - } - }) - }; - fold::noop_fold_item(i, self) + fn flat_map_item(&mut self, mut i: P) -> SmallVec<[P; 1]> { + let id = i.id; + if let ItemKind::Mod(m) = &mut i.node { + self.handle_mod(id, m, false); + } + mut_visit::noop_flat_map_item(i, self) } - fn fold_block(&mut self, b: P) -> P { - let b = b.map(|b| { - let mut stmts = Vec::with_capacity(b.stmts.len()); + fn visit_block(&mut self, b: &mut P) { + let mut stmts = Vec::with_capacity(b.stmts.len()); - if self.inside && self.st.marked(b.id, self.mark) { - stmts.extend(self.items.iter().cloned().map(|i| mk().item_stmt(i))); - } + if self.inside && self.st.marked(b.id, self.mark) { + stmts.extend(self.items.iter().cloned().map(|i| mk().item_stmt(i))); + } - for s in b.stmts { - let insert = !self.inside && self.st.marked(s.id, self.mark); - stmts.push(s); - if insert { - stmts.extend(self.items.iter().cloned().map(|i| mk().item_stmt(i))); - } + for s in &b.stmts { + let insert = !self.inside && self.st.marked(s.id, self.mark); + stmts.push(s.clone()); + if insert { + stmts.extend(self.items.iter().cloned().map(|i| mk().item_stmt(i))); } + } + b.stmts = stmts; - Block { stmts, .. b } - }); - fold::noop_fold_block(b, self) + mut_visit::noop_visit_block(b, self) } - fn fold_mac(&mut self, mac: Mac) -> Mac { - fold::noop_fold_mac(mac, self) + fn visit_mac(&mut self, mac: &mut Mac) { + mut_visit::noop_visit_mac(mac, self) } } - krate.fold(&mut CreateFolder { st, mark, inside, items }) + krate.visit(&mut CreateFolder { st, mark, inside, items }) } } @@ -711,7 +691,7 @@ impl Transform for CreateItem { pub struct DeleteItems; impl Transform for DeleteItems { - fn transform(&self, krate: Crate, st: &CommandState, _cx: &RefactorCtxt) -> Crate { + fn transform(&self, krate: &mut Crate, st: &CommandState, _cx: &RefactorCtxt) { let mark = "target".into_symbol(); struct DeleteFolder<'a> { @@ -719,25 +699,22 @@ impl Transform for DeleteItems { mark: Symbol, } - impl<'a> Folder for DeleteFolder<'a> { - fn fold_mod(&mut self, mut m: Mod) -> Mod { + impl<'a> MutVisitor for DeleteFolder<'a> { + fn visit_mod(&mut self, m: &mut Mod) { m.items.retain(|i| !self.st.marked(i.id, self.mark)); - fold::noop_fold_mod(m, self) + mut_visit::noop_visit_mod(m, self) } - fn fold_block(&mut self, b: P) -> P { - let b = b.map(|mut b| { - b.stmts.retain(|s| match s.node { - StmtKind::Item(ref i) => !self.st.marked(i.id, self.mark), - _ => true, - }); - b + fn visit_block(&mut self, b: &mut P) { + b.stmts.retain(|s| match s.node { + StmtKind::Item(ref i) => !self.st.marked(i.id, self.mark), + _ => true, }); - fold::noop_fold_block(b, self) + mut_visit::noop_visit_block(b, self) } } - krate.fold(&mut DeleteFolder { st, mark }) + krate.visit(&mut DeleteFolder { st, mark }) } } diff --git a/c2rust-refactor/src/transform/linkage.rs b/c2rust-refactor/src/transform/linkage.rs index 6905f3425..f835427b1 100644 --- a/c2rust-refactor/src/transform/linkage.rs +++ b/c2rust-refactor/src/transform/linkage.rs @@ -5,7 +5,7 @@ use syntax::attr; use syntax::ptr::P; use syntax::symbol::Symbol; -use crate::ast_manip::{fold_nodes, visit_nodes}; +use crate::ast_manip::{FlatMapNodes, MutVisitNodes, visit_nodes}; use crate::ast_manip::fn_edit::{visit_fns, FnKind}; use crate::command::{CommandState, Registry}; use crate::driver::{Phase}; @@ -58,13 +58,13 @@ use crate::RefactorCtxt; pub struct LinkFuncs; impl Transform for LinkFuncs { - fn transform(&self, krate: Crate, _st: &CommandState, cx: &RefactorCtxt) -> Crate { + fn transform(&self, krate: &mut Crate, _st: &CommandState, cx: &RefactorCtxt) { // (1) Find all `#[no_mangle]` or `#[export_name=...]` functions, and index them by symbol. // (2) Find all extern fns, and index them by def_id. let mut symbol_to_def = HashMap::new(); let mut extern_def_to_symbol = HashMap::new(); - visit_fns(&krate, |fl| { + visit_fns(krate, |fl| { let def_id = cx.node_def_id(fl.id); if fl.kind != FnKind::Foreign { if let Some(name) = attr::first_attr_value_str_by_name(&fl.attrs, "export_name") { @@ -78,7 +78,7 @@ impl Transform for LinkFuncs { }); // (3) Adjust references to extern fns to refer to the `#[no_mangle]` definition instead. - let krate = fold_resolved_paths(krate, cx, |qself, path, def| { + fold_resolved_paths(krate, cx, |qself, path, def| { if let Some(def_id) = def.opt_def_id() { if let Some(&symbol) = extern_def_to_symbol.get(&def_id) { if let Some(&real_def_id) = symbol_to_def.get(&symbol) { @@ -90,7 +90,7 @@ impl Transform for LinkFuncs { }); // (4) Remove unused externs - let krate = fold_nodes(krate, |mut fm: ForeignMod| { + MutVisitNodes::visit(krate, |fm: &mut ForeignMod| { fm.items.retain(|i| { let def_id = cx.node_def_id(i.id); // Drop any items that resolve to a symbol in another module. @@ -101,10 +101,7 @@ impl Transform for LinkFuncs { } true }); - fm }); - - krate } fn min_phase(&self) -> Phase { @@ -150,12 +147,12 @@ impl Transform for LinkFuncs { pub struct LinkIncompleteTypes; impl Transform for LinkIncompleteTypes { - fn transform(&self, krate: Crate, _st: &CommandState, cx: &RefactorCtxt) -> Crate { + fn transform(&self, krate: &mut Crate, _st: &CommandState, cx: &RefactorCtxt) { // (1) Find complete type definitions, and index them by name. let mut name_to_complete = HashMap::new(); let mut incomplete_to_name = HashMap::new(); - visit_nodes(&krate, |i: &Item| { + visit_nodes(krate, |i: &Item| { let complete = match i.node { ItemKind::Struct(..) => true, ItemKind::Union(..) => true, @@ -171,7 +168,7 @@ impl Transform for LinkIncompleteTypes { }); // (2) Find incomplete type definitions (extern types), and index them by name. - visit_nodes(&krate, |i: &ForeignItem| { + visit_nodes(krate, |i: &ForeignItem| { let incomplete = match i.node { ForeignItemKind::Ty => true, _ => false, @@ -244,11 +241,11 @@ impl Transform for LinkIncompleteTypes { pub struct CanonicalizeStructs; impl Transform for CanonicalizeStructs { - fn transform(&self, krate: Crate, st: &CommandState, cx: &RefactorCtxt) -> Crate { + fn transform(&self, krate: &mut Crate, st: &CommandState, cx: &RefactorCtxt) { // (1) Find all marked structs. let mut canon_ids: HashMap = HashMap::new(); - visit_nodes(&krate, |i: &Item| { + visit_nodes(krate, |i: &Item| { if st.marked(i.id, "target") { canon_ids.insert(i.ident.name, cx.node_def_id(i.id)); } @@ -259,7 +256,7 @@ impl Transform for CanonicalizeStructs { // Map removed struct IDs to their replacements. let mut removed_id_map = HashMap::new(); - let krate = fold_nodes(krate, |i: P| { + FlatMapNodes::visit(krate, |i: P| { let should_remove = match i.node { ItemKind::Struct(..) => { if let Some(&canon_def_id) = canon_ids.get(&i.ident.name) { @@ -286,7 +283,7 @@ impl Transform for CanonicalizeStructs { // (3) Remove impls for removed structs. - let krate = fold_nodes(krate, |i: P| { + FlatMapNodes::visit(krate, |i: P| { let should_remove = match i.node { ItemKind::Impl(_, _, _, _, _, ref ty, _) => { if let Some(ty_def_id) = cx.try_resolve_ty(ty) { @@ -307,7 +304,7 @@ impl Transform for CanonicalizeStructs { // (4) Rewrite references to removed structs. - let krate = fold_resolved_paths(krate, cx, |qself, path, def| { + fold_resolved_paths(krate, cx, |qself, path, def| { if let Some(&canon_def_id) = def.opt_def_id().as_ref() .and_then(|x| removed_id_map.get(&x)) { (None, cx.def_path(canon_def_id)) @@ -315,8 +312,6 @@ impl Transform for CanonicalizeStructs { (qself, path) } }); - - krate } fn min_phase(&self) -> Phase { diff --git a/c2rust-refactor/src/transform/literals.rs b/c2rust-refactor/src/transform/literals.rs index 260329fd5..6ee4b3a15 100644 --- a/c2rust-refactor/src/transform/literals.rs +++ b/c2rust-refactor/src/transform/literals.rs @@ -3,7 +3,7 @@ use syntax::ast::*; use syntax::ptr::P; use syntax::symbol::Symbol; -use crate::ast_manip::fold_nodes; +use crate::ast_manip::MutVisitNodes; use crate::command::{CommandState, Registry}; use crate::transform::Transform; use crate::RefactorCtxt; @@ -23,28 +23,24 @@ use crate::RefactorCtxt; pub struct ByteStrToStr; impl Transform for ByteStrToStr { - fn transform(&self, krate: Crate, st: &CommandState, _cx: &RefactorCtxt) -> Crate { - fold_nodes(krate, |e: P| { + fn transform(&self, krate: &mut Crate, st: &CommandState, _cx: &RefactorCtxt) { + MutVisitNodes::visit(krate, |e: &mut P| { if !st.marked(e.id, "target") { - return e; + return; } - e.map(|e| { - let node = match e.node { - ExprKind::Lit(l) => { - let node = match l.node { - LitKind::ByteStr(bs) => { - let s = String::from_utf8((*bs).clone()).unwrap(); - LitKind::Str(Symbol::intern(&s), StrStyle::Cooked) - }, - n => n, - }; - ExprKind::Lit(Lit { node, ..l }) - }, - n => n, - }; - Expr { node, ..e } - }) + match &mut e.node { + ExprKind::Lit(l) => { + match l.node { + LitKind::ByteStr(ref bs) => { + let s = String::from_utf8((**bs).clone()).unwrap(); + l.node = LitKind::Str(Symbol::intern(&s), StrStyle::Cooked) + } + _ => {} + } + } + _ => {} + } }) } } @@ -64,43 +60,32 @@ impl Transform for ByteStrToStr { pub struct RemoveNullTerminator; impl Transform for RemoveNullTerminator { - fn transform(&self, krate: Crate, st: &CommandState, _cx: &RefactorCtxt) -> Crate { - fold_nodes(krate, |e: P| { + fn transform(&self, krate: &mut Crate, st: &CommandState, _cx: &RefactorCtxt) { + MutVisitNodes::visit(krate, |e: &mut P| { if !st.marked(e.id, "target") { - return e; + return; } - e.map(|e| { - let node = match e.node { - ExprKind::Lit(l) => { - let node = match l.node { - LitKind::ByteStr(bs) => { - if bs.last() == Some(&0) { - let mut bs = (*bs).clone(); - bs.pop(); - LitKind::ByteStr(Lrc::new(bs)) - } else { - LitKind::ByteStr(bs) - } - }, - LitKind::Str(s, style) => { - if s.as_str().ends_with("\0") { - let end = s.as_str().len() - 1; - let new_s = Symbol::intern(&s.as_str()[..end]); - LitKind::Str(new_s, style) - } else { - LitKind::Str(s, style) - } - }, - n => n, - }; - ExprKind::Lit(Lit { node, ..l }) - }, - n => n, - }; - Expr { node, ..e } - }) - }) + match &mut e.node { + ExprKind::Lit(l) => { + match &mut l.node { + LitKind::ByteStr(bs) => { + if bs.last() == Some(&0) { + Lrc::get_mut(bs).unwrap().pop(); + } + } + LitKind::Str(ref mut s, _style) => { + if s.as_str().ends_with("\0") { + let end = s.as_str().len() - 1; + *s = Symbol::intern(&s.as_str()[..end]); + } + } + _ => {} + } + } + _ => {} + } + }); } } diff --git a/c2rust-refactor/src/transform/mod.rs b/c2rust-refactor/src/transform/mod.rs index edce778d0..d9acea917 100644 --- a/c2rust-refactor/src/transform/mod.rs +++ b/c2rust-refactor/src/transform/mod.rs @@ -10,7 +10,7 @@ use crate::RefactorCtxt; /// An AST transformation that can be applied to a crate. pub trait Transform { /// Apply the transformation. - fn transform(&self, krate: Crate, st: &CommandState, cx: &RefactorCtxt) -> Crate; + fn transform(&self, krate: &mut Crate, st: &CommandState, cx: &RefactorCtxt); /// Return the minimum phase at which this transform can operate. See the `Phase` docs for /// details. The default is `Phase2`. @@ -27,10 +27,8 @@ pub struct TransformCommand(pub T); impl Command for TransformCommand { fn run(&mut self, state: &mut RefactorState) { state.transform_crate(self.0.min_phase(), |st, cx| { - st.map_krate(|krate| { - self.0.transform(krate, st, cx) - }); - }); + self.0.transform(&mut *st.krate_mut(), st, cx) + }).expect("Failed to run compiler"); } } diff --git a/c2rust-refactor/src/transform/ownership.rs b/c2rust-refactor/src/transform/ownership.rs index a75fff1a7..5b31b601c 100644 --- a/c2rust-refactor/src/transform/ownership.rs +++ b/c2rust-refactor/src/transform/ownership.rs @@ -7,15 +7,15 @@ use rustc::hir::def_id::DefId; use rustc_data_structures::indexed_vec::IndexVec; use syntax::ast::*; use syntax::source_map::DUMMY_SP; -use syntax::fold::{self, Folder}; +use syntax::mut_visit::{self, MutVisitor}; use syntax::parse::token::{self, Token, DelimToken}; use syntax::ptr::P; use syntax::symbol::Symbol; -use syntax::tokenstream::{TokenTree, TokenStream, Delimited, DelimSpan}; +use syntax::tokenstream::{TokenTree, TokenStream, DelimSpan}; use smallvec::SmallVec; -use crate::ast_manip::{fold_nodes, Fold}; -use crate::ast_manip::fn_edit::fold_fns_multi; +use crate::ast_manip::{MutVisitNodes, MutVisit}; +use crate::ast_manip::fn_edit::flat_map_fns; use crate::analysis::labeled_ty::LabeledTyCtxt; use crate::analysis::ownership::{self, ConcretePerm, Var, PTy}; use crate::analysis::ownership::constraint::{ConstraintSet, Perm}; @@ -62,16 +62,17 @@ pub fn register_commands(reg: &mut Registry) { fn do_annotate(st: &CommandState, cx: &RefactorCtxt, label: Symbol) { - let analysis = ownership::analyze(&st, &cx); + let arena = SyncDroplessArena::default(); + let analysis = ownership::analyze(&st, &cx, &arena); struct AnnotateFolder<'a, 'tcx: 'a> { label: Symbol, - ana: ownership::AnalysisResult<'tcx>, + ana: ownership::AnalysisResult<'tcx, 'tcx>, hir_map: &'a hir::map::Map<'tcx>, st: &'a CommandState, } - impl<'a, 'tcx> AnnotateFolder<'a, 'tcx> { + impl<'lty, 'a, 'tcx> AnnotateFolder<'a, 'tcx> { fn static_attr_for(&self, id: NodeId) -> Option { self.hir_map.opt_local_def_id(id) .and_then(|def_id| self.ana.statics.get(&def_id)) @@ -105,7 +106,7 @@ fn do_annotate(st: &CommandState, fn clean_attrs(&self, attrs: &mut Vec) { attrs.retain(|a| { - match &a.name().as_str() as &str { + match &a.path.to_string() as &str { "ownership_mono" | "ownership_constraints" | "ownership_static" => false, @@ -115,13 +116,13 @@ fn do_annotate(st: &CommandState, } } - impl<'a, 'tcx> Folder for AnnotateFolder<'a, 'tcx> { - fn fold_item(&mut self, i: P) -> SmallVec<[P; 1]> { + impl<'lty, 'a, 'tcx> MutVisitor for AnnotateFolder<'a, 'tcx> { + fn flat_map_item(&mut self, i: P) -> SmallVec<[P; 1]> { if !self.st.marked(i.id, self.label) { - return fold::noop_fold_item(i, self); + return mut_visit::noop_flat_map_item(i, self); } - fold::noop_fold_item(i.map(|mut i| { + mut_visit::noop_flat_map_item(i.map(|mut i| { match i.node { ItemKind::Static(..) | ItemKind::Const(..) => { self.clean_attrs(&mut i.attrs); @@ -145,17 +146,17 @@ fn do_annotate(st: &CommandState, }), self) } - fn fold_impl_item(&mut self, i: ImplItem) -> SmallVec<[ImplItem; 1]> { + fn flat_map_impl_item(&mut self, i: ImplItem) -> SmallVec<[ImplItem; 1]> { if !self.st.marked(i.id, self.label) { - return fold::noop_fold_impl_item(i, self); + return mut_visit::noop_flat_map_impl_item(i, self); } - fold::noop_fold_impl_item(i, self) + mut_visit::noop_flat_map_impl_item(i, self) } - fn fold_struct_field(&mut self, mut sf: StructField) -> StructField { + fn visit_struct_field(&mut self, sf: &mut StructField) { if !self.st.marked(sf.id, self.label) { - return fold::noop_fold_struct_field(sf, self); + return mut_visit::noop_visit_struct_field(sf, self); } self.clean_attrs(&mut sf.attrs); @@ -163,12 +164,12 @@ fn do_annotate(st: &CommandState, sf.attrs.push(attr); } - fold::noop_fold_struct_field(sf, self) + mut_visit::noop_visit_struct_field(sf, self) } } st.map_krate(|krate| { - krate.fold(&mut AnnotateFolder { + krate.visit(&mut AnnotateFolder { label: label, ana: analysis, hir_map: cx.hir_map(), @@ -263,10 +264,11 @@ fn token(t: Token) -> TokenTree { } fn parens(ts: Vec) -> TokenTree { - TokenTree::Delimited(DelimSpan::dummy(), Delimited { - delim: DelimToken::Paren, - tts: ts.into_iter().collect::().into(), - }) + TokenTree::Delimited( + DelimSpan::dummy(), + DelimToken::Paren, + ts.into_iter().collect::().into(), + ) } fn make_attr(name: &str, tokens: TokenStream) -> Attribute { @@ -300,7 +302,8 @@ fn build_variant_attr(group: &str) -> Attribute { fn do_split_variants(st: &CommandState, cx: &RefactorCtxt, label: Symbol) { - let ana = ownership::analyze(&st, &cx); + let arena = SyncDroplessArena::default(); + let ana = ownership::analyze(&st, &cx, &arena); // Map from ExprPath/ExprMethodCall span to function ref idx within the caller. let mut span_fref_idx = HashMap::new(); @@ -318,7 +321,7 @@ fn do_split_variants(st: &CommandState, // (1) Duplicate marked fns with `mono` attrs to produce multiple variants. We rewrite // references to other fns during this process, since afterward it would be difficult to // distinguish the different copies - their bodies have identical spans and `NodeId`s. - let krate = fold_fns_multi(krate, |fl| { + flat_map_fns(krate, |fl| { if !st.marked(fl.id, label) { return smallvec![fl]; } @@ -366,9 +369,9 @@ fn do_split_variants(st: &CommandState, fl.attrs.push(build_mono_attr(&mr.suffix, &mr.assign)); fl.attrs.push(build_variant_attr(&path_str)); - fl.block = fl.block.map(|b| fold_nodes(b, |e: P| { + fl.block.as_mut().map(|b| MutVisitNodes::visit(b, |e: &mut P| { let fref_idx = match_or!([span_fref_idx.get(&e.span)] - Some(&x) => x; return e); + Some(&x) => x; return); handled_spans.insert(e.span); let dest = vr.func_refs[fref_idx].def_id; @@ -382,12 +385,12 @@ fn do_split_variants(st: &CommandState, if !dest_marked && dest_fr.variants.is_none() { // A call from a split function to a non-split function. Leave the call // unchanged. - return e; + return; } let dest_mono_idx = mr.callee_mono_idxs[fref_idx]; let new_name = callee_new_name(cx, &ana, dest, dest_mono_idx); - rename_callee(e, &new_name) + rename_callee(e, &new_name); })); fls.push(fl); @@ -397,12 +400,12 @@ fn do_split_variants(st: &CommandState, // (2) Find calls from other functions into functions being split. Retarget those calls to // an appropriate monomorphization. - let krate = fold_nodes(krate, |e: P| { + MutVisitNodes::visit(krate, |e: &mut P| { let fref_idx = match_or!([span_fref_idx.get(&e.span)] - Some(&x) => x; return e); + Some(&x) => x; return); if handled_spans.contains(&e.span) { // This span was handled while splitting a function into variants. - return e; + return; } // Figure out where we are. @@ -416,7 +419,7 @@ fn do_split_variants(st: &CommandState, let dest_marked = cx.hir_map().as_local_node_id(dest) .map_or(false, |id| st.marked(id, label)); if !dest_marked && dest_fr.variants.is_none() { - return e; + return; } // Pick a monomorphization. @@ -435,29 +438,23 @@ fn do_split_variants(st: &CommandState, let new_name = callee_new_name(cx, &ana, dest, dest_mono_idx); rename_callee(e, &new_name) }); - - krate }); } -fn rename_callee(e: P, new_name: &str) -> P { - e.map(|mut e| { - match e.node { - ExprKind::Path(_, ref mut path) => { - // Change the last path segment. - let seg = path.segments.last_mut().unwrap(); - seg.ident = mk().ident(new_name); - }, - - ExprKind::MethodCall(ref mut seg, _) => { - seg.ident = mk().ident(new_name); - }, +fn rename_callee(e: &mut P, new_name: &str) { + match &mut e.node { + ExprKind::Path(_, ref mut path) => { + // Change the last path segment. + let seg = path.segments.last_mut().unwrap(); + seg.ident = mk().ident(new_name); + }, - _ => panic!("rename_callee: unexpected expr kind: {:?}", e), - } + ExprKind::MethodCall(ref mut seg, _) => { + seg.ident = mk().ident(new_name); + }, - e - }) + _ => panic!("rename_callee: unexpected expr kind: {:?}", e), + } } fn callee_new_name(cx: &RefactorCtxt, @@ -497,16 +494,16 @@ fn callee_new_name(cx: &RefactorCtxt, /// of the ownership analysis. /// See `analysis/ownership/README.md` for details on ownership inference. fn do_mark_pointers(st: &CommandState, cx: &RefactorCtxt) { - let ana = ownership::analyze(&st, &cx); + let arena = SyncDroplessArena::default(); + let ana = ownership::analyze(&st, &cx, &arena); - struct AnalysisTypeSource<'a, 'tcx: 'a> { - ana: &'a ownership::AnalysisResult<'tcx>, - arena: &'tcx SyncDroplessArena, + struct AnalysisTypeSource<'lty, 'tcx: 'lty> { + ana: &'lty ownership::AnalysisResult<'lty, 'tcx>, } - impl<'a, 'tcx> type_map::TypeSource for AnalysisTypeSource<'a, 'tcx> { - type Type = ownership::PTy<'tcx>; - type Signature = ownership::PFnSig<'tcx>; + impl<'lty, 'tcx> type_map::TypeSource for AnalysisTypeSource<'lty, 'tcx> { + type Type = ownership::PTy<'lty, 'tcx>; + type Signature = ownership::PFnSig<'lty, 'tcx>; fn def_type(&mut self, did: DefId) -> Option { self.ana.statics.get(&did).cloned() @@ -527,7 +524,7 @@ fn do_mark_pointers(st: &CommandState, cx: &RefactorCtxt) { let mr = &self.ana.monos[&(vr.func_id, mono_idx)]; - let lcx = LabeledTyCtxt::new(self.arena); + let lcx = LabeledTyCtxt::new(self.ana.arena()); let sig = { let mut f = |l: &Option<_>| { @@ -551,7 +548,6 @@ fn do_mark_pointers(st: &CommandState, cx: &RefactorCtxt) { let source = AnalysisTypeSource { ana: &ana, - arena: cx.ty_arena(), }; let s_ref = "ref".into_symbol(); diff --git a/c2rust-refactor/src/transform/reorganize_definitions.rs b/c2rust-refactor/src/transform/reorganize_definitions.rs index 6bd62292a..43e935a20 100644 --- a/c2rust-refactor/src/transform/reorganize_definitions.rs +++ b/c2rust-refactor/src/transform/reorganize_definitions.rs @@ -13,7 +13,7 @@ use syntax::symbol::keywords; use c2rust_ast_builder::mk; use crate::ast_manip::util::{join_visibility, is_relative_path, namespace, split_uses}; -use crate::ast_manip::{AstEquiv, fold_nodes, visit_nodes}; +use crate::ast_manip::{AstEquiv, FlatMapNodes, visit_nodes}; use crate::command::{CommandState, Registry}; use crate::driver::{Phase}; use crate::path_edit::fold_resolved_paths_with_id; @@ -68,13 +68,13 @@ impl<'a, 'tcx> Reorganizer<'a, 'tcx> { } /// Run the reorganization pass - pub fn run(&mut self, krate: Crate) -> Crate { + pub fn run(&mut self, krate: &mut Crate) { self.find_destination_modules(&krate); let mut module_items = HashMap::new(); - let krate = self.remove_header_items(krate, &mut module_items); + self.remove_header_items(krate, &mut module_items); - let krate = self.move_items(krate, module_items); + self.move_items(krate, module_items); self.update_paths(krate) } @@ -127,10 +127,10 @@ impl<'a, 'tcx> Reorganizer<'a, 'tcx> { /// mapping. fn remove_header_items( &mut self, - krate: Crate, + krate: &mut Crate, module_items: &mut HashMap>, - ) -> Crate { - fold_nodes(krate, |item: P| { + ) { + FlatMapNodes::visit(krate, |item: P| { if has_source_header(&item.attrs) { let header_item = item; if let ItemKind::Mod(_) = &header_item.node { @@ -147,7 +147,7 @@ impl<'a, 'tcx> Reorganizer<'a, 'tcx> { if let ItemKind::ForeignMod(m) = &item.node { for foreign_item in &m.items { let dest_path = mk().path(vec![ - keywords::CrateRoot.ident(), + keywords::PathRoot.ident(), dest_module_ident, foreign_item.ident, ]); @@ -162,7 +162,7 @@ impl<'a, 'tcx> Reorganizer<'a, 'tcx> { // a simple path in the crate root and it is flat, // i.e. has no submodules which contain target items. let dest_path = mk().path(vec![ - keywords::CrateRoot.ident(), + keywords::PathRoot.ident(), dest_module_ident, item.ident, ]); @@ -188,8 +188,8 @@ impl<'a, 'tcx> Reorganizer<'a, 'tcx> { /// Add items in `module_items` to their respective modules and create any /// new modules. - fn move_items(&self, krate: Crate, mut module_items: HashMap) -> Crate { - let mut krate = fold_nodes(krate, |item: P| { + fn move_items(&self, krate: &mut Crate, mut module_items: HashMap) { + FlatMapNodes::visit(krate, |item: P| { smallvec![if let Some(new_defines) = module_items.remove(&item.id) { new_defines.move_into_module(item) } else { @@ -208,17 +208,15 @@ impl<'a, 'tcx> Reorganizer<'a, 'tcx> { } } } - - krate } /// Update paths to moved items and remove redundant imports. - fn update_paths(&self, krate: Crate) -> Crate { + fn update_paths(&self, krate: &mut Crate) { // Maps NodeId of an AST element with an updated path to the NodeId of // the module it's target is now located in. let mut remapped_path_nodes = HashMap::new(); - let krate = fold_resolved_paths_with_id(krate, self.cx, |id, qself, path, def| { + fold_resolved_paths_with_id(krate, self.cx, |id, qself, path, def| { debug!("Folding path {:?} (def: {:?})", path, def); if let Some(def_id) = def.opt_def_id() { if let Some((new_path, mod_id)) = self.path_mapping.get(&def_id) { @@ -237,7 +235,7 @@ impl<'a, 'tcx> Reorganizer<'a, 'tcx> { }); // Remove use statements that now refer to their self module. - fold_nodes(krate, |mut item: P| { + FlatMapNodes::visit(krate, |mut item: P| { let parent_id = item.id; if let ItemKind::Mod(m) = &mut item.node { let mut uses: HashMap = HashMap::new(); @@ -281,7 +279,7 @@ impl<'a, 'tcx> Reorganizer<'a, 'tcx> { }); } smallvec![item] - }) + }); } } @@ -615,7 +613,7 @@ fn is_std(attrs: &Vec) -> bool { } impl Transform for ReorganizeDefinitions { - fn transform(&self, krate: Crate, st: &CommandState, cx: &RefactorCtxt) -> Crate { + fn transform(&self, krate: &mut Crate, st: &CommandState, cx: &RefactorCtxt) { let mut reorg = Reorganizer::new(st, cx); reorg.run(krate) } diff --git a/c2rust-refactor/src/transform/retype.rs b/c2rust-refactor/src/transform/retype.rs index bd874d2ee..295dfa056 100644 --- a/c2rust-refactor/src/transform/retype.rs +++ b/c2rust-refactor/src/transform/retype.rs @@ -1,27 +1,26 @@ use std::collections::{HashMap, HashSet}; -use std::mem; +use std::ops::DerefMut; use rustc::hir; use rustc::hir::def_id::DefId; use rustc::ty::{self, TyKind, TyCtxt, ParamEnv}; use syntax::ast::*; -use syntax::fold::{self, Folder}; +use syntax::mut_visit::{self, MutVisitor}; use syntax::parse::PResult; use syntax::parse::parser::Parser; use syntax::parse::token::{Token, BinOpToken}; use syntax::print::pprust; use syntax::ptr::P; -use syntax::util::move_map::MoveMap; use syntax_pos::Span; use smallvec::SmallVec; use c2rust_ast_builder::mk; -use crate::ast_manip::{Fold, fold_nodes, fold_output_exprs}; -use crate::ast_manip::fn_edit::{fold_fns, visit_fns}; +use crate::ast_manip::{FlatMapNodes, MutVisit, MutVisitNodes, fold_output_exprs}; +use crate::ast_manip::fn_edit::{mut_visit_fns, visit_fns}; use crate::ast_manip::lr_expr::{self, fold_expr_with_context, fold_exprs_with_context}; use crate::command::{Command, CommandState, RefactorState, Registry, TypeckLoopResult}; use crate::driver::{self, Phase, parse_ty, parse_expr}; use crate::illtyped::{IlltypedFolder, fold_illtyped}; -use crate::matcher::{Bindings, MatchCtxt, Subst, fold_match, fold_match_with, replace_expr}; +use crate::matcher::{Bindings, MatchCtxt, Subst, mut_visit_match, mut_visit_match_with, replace_expr}; use crate::reflect::{self, reflect_tcx_ty}; use crate::transform::Transform; use crate::RefactorCtxt; @@ -47,7 +46,7 @@ pub struct RetypeArgument { } impl Transform for RetypeArgument { - fn transform(&self, krate: Crate, st: &CommandState, cx: &RefactorCtxt) -> Crate { + fn transform(&self, krate: &mut Crate, st: &CommandState, cx: &RefactorCtxt) { // (1) Change argument types and rewrite function bodies. let new_ty = parse_ty(cx.session(), &self.new_ty); @@ -58,75 +57,61 @@ impl Transform for RetypeArgument { // modified. let mut mod_fns: HashMap> = HashMap::new(); - let krate = fold_fns(krate, |mut fl| { + mut_visit_fns(krate, |fl| { let fn_id = fl.id; // Def IDs of changed arguments. let mut changed_args = HashSet::new(); - fl.decl = fl.decl.map(|mut decl| { - for (i, arg) in decl.inputs.iter_mut().enumerate() { - if st.marked(arg.id, "target") { - arg.ty = new_ty.clone(); - mod_fns.entry(cx.node_def_id(fn_id)).or_insert_with(HashSet::new).insert(i); + for (i, arg) in fl.decl.inputs.iter_mut().enumerate() { + if st.marked(arg.id, "target") { + arg.ty = new_ty.clone(); + mod_fns.entry(cx.node_def_id(fn_id)).or_insert_with(HashSet::new).insert(i); - changed_args.insert(cx.hir_map().node_to_hir_id(arg.pat.id)); - } + changed_args.insert(cx.hir_map().node_to_hir_id(arg.pat.id)); } - decl - }); + } if changed_args.len() == 0 { - return fl; + return; } // An argument was changed, so we need to rewrite uses of that argument inside the // function body. - // `fold_nodes` does a preorder traversal, so if we replace `x` with `wrap(x)`, we will + // `mut_visit_nodes` does a preorder traversal, so if we replace `x` with `wrap(x)`, we will // see `x` again in the recursive call. We keep track of which nodes have already been // rewritten so that we don't end up with a stack overflow. let mut rewritten_nodes = HashSet::new(); - fl.block = fold_nodes(fl.block.take(), |e: P| { + fl.block.as_mut().map(|b| MutVisitNodes::visit(b, |e: &mut P| { if let Some(hir_id) = cx.try_resolve_expr_to_hid(&e) { if changed_args.contains(&hir_id) && !rewritten_nodes.contains(&e.id) { rewritten_nodes.insert(e.id); let mut bnd = Bindings::new(); bnd.add("__new", e.clone()); - return unwrap.clone().subst(st, cx, &bnd); + *e = unwrap.clone().subst(st, cx, &bnd); } } - e - }); - - fl + })); }); // (2) Rewrite callsites of modified functions. // We don't need any protection against infinite recursion here, because it doesn't make // sense for `wrap` to call the function whose args we're changing. - let krate = fold_nodes(krate, |e: P| { - let callee = match_or!([cx.opt_callee(&e)] Some(x) => x; return e); - let mod_args = match_or!([mod_fns.get(&callee)] Some(x) => x; return e); - e.map(|mut e| { - { - let args: &mut [P] = - match e.node { - ExprKind::Call(_, ref mut args) => args, - ExprKind::MethodCall(_, ref mut args) => args, - _ => panic!("expected Call or MethodCall"), - }; - for &idx in mod_args { - let mut bnd = Bindings::new(); - bnd.add("__old", args[idx].clone()); - args[idx] = wrap.clone().subst(st, cx, &bnd); - } - } - e - }) + MutVisitNodes::visit(krate, |e: &mut P| { + let callee = match_or!([cx.opt_callee(&e)] Some(x) => x; return); + let mod_args = match_or!([mod_fns.get(&callee)] Some(x) => x; return); + let args: &mut [P] = match e.node { + ExprKind::Call(_, ref mut args) => args, + ExprKind::MethodCall(_, ref mut args) => args, + _ => panic!("expected Call or MethodCall"), + }; + for &idx in mod_args { + let mut bnd = Bindings::new(); + bnd.add("__old", args[idx].clone()); + args[idx] = wrap.clone().subst(st, cx, &bnd); + } }); - - krate } fn min_phase(&self) -> Phase { @@ -156,7 +141,7 @@ pub struct RetypeReturn { } impl Transform for RetypeReturn { - fn transform(&self, krate: Crate, st: &CommandState, cx: &RefactorCtxt) -> Crate { + fn transform(&self, krate: &mut Crate, st: &CommandState, cx: &RefactorCtxt) { // (1) Change argument types and rewrite function bodies. let new_ty = parse_ty(cx.session(), &self.new_ty); @@ -166,43 +151,37 @@ impl Transform for RetypeReturn { // Modified functions, by DefId. let mut mod_fns: HashSet = HashSet::new(); - let krate = fold_fns(krate, |mut fl| { + mut_visit_fns(krate, |fl| { if !st.marked(fl.id, "target") { - return fl; + return; } // Change the return type annotation - fl.decl = fl.decl.map(|mut decl| { - decl.output = FunctionRetTy::Ty(new_ty.clone()); - decl - }); + fl.decl.output = FunctionRetTy::Ty(new_ty.clone()); // Rewrite output expressions using `wrap`. - fl.block = fl.block.map(|b| fold_output_exprs(b, true, |e| { + fl.block.as_mut().map(|b| fold_output_exprs(b, true, |e| { let mut bnd = Bindings::new(); bnd.add("__old", e.clone()); - return wrap.clone().subst(st, cx, &bnd); + *e = wrap.clone().subst(st, cx, &bnd); })); mod_fns.insert(cx.node_def_id(fl.id)); - fl }); // (2) Rewrite callsites of modified functions. // We don't need any protection against infinite recursion here, because it doesn't make // sense for `unwrap` to call the function whose args we're changing. - let krate = fold_nodes(krate, |e: P| { - let callee = match_or!([cx.opt_callee(&e)] Some(x) => x; return e); + MutVisitNodes::visit(krate, |e: &mut P| { + let callee = match_or!([cx.opt_callee(&e)] Some(x) => x; return); if !mod_fns.contains(&callee) { - return e; + return; } let mut bnd = Bindings::new(); - bnd.add("__new", e); - unwrap.clone().subst(st, cx, &bnd) + bnd.add("__new", e.clone()); + *e = unwrap.clone().subst(st, cx, &bnd) }); - - krate } fn min_phase(&self) -> Phase { @@ -243,7 +222,7 @@ pub struct RetypeStatic { } impl Transform for RetypeStatic { - fn transform(&self, krate: Crate, st: &CommandState, cx: &RefactorCtxt) -> Crate { + fn transform(&self, krate: &mut Crate, st: &CommandState, cx: &RefactorCtxt) { // (1) Change the types of marked statics, and update their initializer expressions. let new_ty = parse_ty(cx.session(), &self.new_ty); @@ -255,7 +234,7 @@ impl Transform for RetypeStatic { // Modified statics, by DefId. let mut mod_statics: HashSet = HashSet::new(); - let krate = fold_nodes(krate, |i: P| { + FlatMapNodes::visit(krate, |i: P| { if !st.marked(i.id, "target") { return smallvec![i]; } @@ -275,7 +254,7 @@ impl Transform for RetypeStatic { })] }); - let krate = fold_nodes(krate, |mut fi: ForeignItem| { + FlatMapNodes::visit(krate, |mut fi: ForeignItem| { if !st.marked(fi.id, "target") { return smallvec![fi]; } @@ -297,52 +276,48 @@ impl Transform for RetypeStatic { // its own thing with them. Note we assume the input AST is properly numbered. let mut handled_ids: HashSet = HashSet::new(); - let krate = fold_nodes(krate, |e: P| { + MutVisitNodes::visit(krate, |e: &mut P| { if !matches!([e.node] ExprKind::Assign(..), ExprKind::AssignOp(..)) { - return e; + return; } - e.map(|mut e| { - match e.node { - ExprKind::Assign(ref lhs, ref mut rhs) | - ExprKind::AssignOp(_, ref lhs, ref mut rhs) => { - if cx.try_resolve_expr(lhs) - .map_or(false, |did| mod_statics.contains(&did)) { + match e.node { + ExprKind::Assign(ref lhs, ref mut rhs) | + ExprKind::AssignOp(_, ref lhs, ref mut rhs) => { + if cx.try_resolve_expr(lhs) + .map_or(false, |did| mod_statics.contains(&did)) { let mut bnd = Bindings::new(); bnd.add("__old", rhs.clone()); *rhs = rev_conv_assign.clone().subst(st, cx, &bnd); handled_ids.insert(lhs.id); } - }, - _ => {}, - } - e - }) + }, + _ => {}, + } }); // (3) Rewrite use sites of modified statics. - let krate = fold_exprs_with_context(krate, |e, ectx| { + fold_exprs_with_context(krate, |e, ectx| { if !matches!([e.node] ExprKind::Path(..)) || handled_ids.contains(&e.id) || !cx.try_resolve_expr(&e).map_or(false, |did| mod_statics.contains(&did)) { - return e; + return; } let mut bnd = Bindings::new(); bnd.add("__new", e.clone()); - match ectx { + *e = match ectx { lr_expr::Context::Rvalue => conv_rval.clone().subst(st, cx, &bnd), lr_expr::Context::Lvalue => conv_lval.clone().subst(st, cx, &bnd), - lr_expr::Context::LvalueMut => + lr_expr::Context::LvalueMut => { conv_lval_mut.clone().unwrap_or_else( || panic!("need conv_lval_mut to handle LvalueMut expression `{}`", pprust::expr_to_string(&e))) - .subst(st, cx, &bnd), - } + .subst(st, cx, &bnd) + } + }; }); - - krate } fn min_phase(&self) -> Phase { @@ -356,8 +331,8 @@ impl Transform for RetypeStatic { /// /// This function currently handles only direct function calls. Creation and use of function /// pointers is not handled correctly yet. -pub fn bitcast_retype(st: &CommandState, cx: &RefactorCtxt, krate: Crate, retype: F) -> Crate - where F: FnMut(&P) -> Option> { +pub fn bitcast_retype(st: &CommandState, cx: &RefactorCtxt, krate: &mut Crate, retype: F) + where F: FnMut(&mut P) -> bool { // (1) Walk over all supported nodes, replacing type annotations. Also record which nodes had // type annotations replaced, for future reference. @@ -370,9 +345,9 @@ pub fn bitcast_retype(st: &CommandState, cx: &RefactorCtxt, krate: Crate, ret changed_defs: HashMap, P)>, } - impl Folder for ChangeTypeFolder - where F: FnMut(&P) -> Option> { - fn fold_item(&mut self, i: P) -> SmallVec<[P; 1]> { + impl MutVisitor for ChangeTypeFolder + where F: FnMut(&mut P) -> bool { + fn flat_map_item(&mut self, i: P) -> SmallVec<[P; 1]> { let i = if matches!([i.node] ItemKind::Fn(..)) { i.map(|mut i| { let mut fd = expect!([i.node] @@ -380,10 +355,10 @@ pub fn bitcast_retype(st: &CommandState, cx: &RefactorCtxt, krate: Crate, ret fd.clone().into_inner()); for (j, arg) in fd.inputs.iter_mut().enumerate() { - if let Some(new_ty) = (self.retype)(&arg.ty) { - let old_ty = mem::replace(&mut arg.ty, new_ty.clone()); + let old_ty = arg.ty.clone(); + if (self.retype)(&mut arg.ty) { self.changed_inputs.insert((i.id, j), - (old_ty.clone(), new_ty.clone())); + (old_ty.clone(), arg.ty.clone())); self.changed_funcs.insert(i.id); // Also record that the type of the variable declared here has changed. @@ -391,7 +366,7 @@ pub fn bitcast_retype(st: &CommandState, cx: &RefactorCtxt, krate: Crate, ret // Note that `PatKind::Ident` doesn't guarantee that this is a // variable binding. But if it's not, then no name will ever // resolve to `arg.pat`'s DefId, so it doesn't matter. - self.changed_defs.insert(arg.pat.id, (old_ty, new_ty)); + self.changed_defs.insert(arg.pat.id, (old_ty, arg.ty.clone())); } else { // TODO: Would be nice to warn the user (or skip rewriting) if a // nontrivial pattern gets its type changed, as we'll likely miss @@ -401,9 +376,9 @@ pub fn bitcast_retype(st: &CommandState, cx: &RefactorCtxt, krate: Crate, ret } if let FunctionRetTy::Ty(ref mut ty) = fd.output { - if let Some(new_ty) = (self.retype)(ty) { - let old_ty = mem::replace(ty, new_ty.clone()); - self.changed_outputs.insert(i.id, (old_ty, new_ty)); + let old_ty = ty.clone(); + if (self.retype)(ty) { + self.changed_outputs.insert(i.id, (old_ty, ty.clone())); self.changed_funcs.insert(i.id); } } @@ -422,9 +397,9 @@ pub fn bitcast_retype(st: &CommandState, cx: &RefactorCtxt, krate: Crate, ret i.map(|mut i| { { let ty = expect!([i.node] ItemKind::Static(ref mut ty, _, _) => ty); - if let Some(new_ty) = (self.retype)(ty) { - let old_ty = mem::replace(ty, new_ty.clone()); - self.changed_defs.insert(i.id, (old_ty, new_ty)); + let old_ty = ty.clone(); + if (self.retype)(ty) { + self.changed_defs.insert(i.id, (old_ty, ty.clone())); } } i @@ -434,9 +409,9 @@ pub fn bitcast_retype(st: &CommandState, cx: &RefactorCtxt, krate: Crate, ret i.map(|mut i| { { let ty = expect!([i.node] ItemKind::Const(ref mut ty, _) => ty); - if let Some(new_ty) = (self.retype)(ty) { - let old_ty = mem::replace(ty, new_ty.clone()); - self.changed_defs.insert(i.id, (old_ty, new_ty)); + let old_ty = ty.clone(); + if (self.retype)(ty) { + self.changed_defs.insert(i.id, (old_ty, ty.clone())); } } i @@ -446,15 +421,15 @@ pub fn bitcast_retype(st: &CommandState, cx: &RefactorCtxt, krate: Crate, ret i }; - fold::noop_fold_item(i, self) + mut_visit::noop_flat_map_item(i, self) } - fn fold_struct_field(&mut self, mut sf: StructField) -> StructField { - if let Some(new_ty) = (self.retype)(&sf.ty) { - let old_ty = mem::replace(&mut sf.ty, new_ty.clone()); - self.changed_defs.insert(sf.id, (old_ty, new_ty)); + fn visit_struct_field(&mut self, sf: &mut StructField) { + let old_ty = sf.ty.clone(); + if (self.retype)(&mut sf.ty) { + self.changed_defs.insert(sf.id, (old_ty, sf.ty.clone())); } - fold::noop_fold_struct_field(sf, self) + mut_visit::noop_visit_struct_field(sf, self) } } @@ -465,7 +440,7 @@ pub fn bitcast_retype(st: &CommandState, cx: &RefactorCtxt, krate: Crate, ret changed_funcs: HashSet::new(), changed_defs: HashMap::new(), }; - let krate = krate.fold(&mut f); + krate.visit(&mut f); let ChangeTypeFolder { changed_inputs, changed_outputs, changed_funcs, changed_defs, .. } = f; @@ -479,21 +454,21 @@ pub fn bitcast_retype(st: &CommandState, cx: &RefactorCtxt, krate: Crate, ret let lvalue_mut_repl = parse_expr(cx.session(), "*::std::mem::transmute::<&mut __old_ty, &mut __new_ty>(&mut __e)"); - // Folder for rewriting top-level exprs only + // MutVisitor for rewriting top-level exprs only struct ExprFolder { callback: F, } - impl) -> P> Folder for ExprFolder { - fn fold_expr(&mut self, e: P) -> P { + impl)> MutVisitor for ExprFolder { + fn visit_expr(&mut self, e: &mut P) { (self.callback)(e) } } - fn fold_top_exprs(x: T, callback: F) -> ::Result - where T: Fold, F: FnMut(P) -> P { + fn fold_top_exprs(x: &mut T, callback: F) + where T: MutVisit, F: FnMut(&mut P) { let mut f = ExprFolder { callback: callback }; - x.fold(&mut f) + x.visit(&mut f) } let transmute = |e, context, old_ty: &P, new_ty: &P| { @@ -510,13 +485,13 @@ pub fn bitcast_retype(st: &CommandState, cx: &RefactorCtxt, krate: Crate, ret repl.subst(st, cx, &bnd) }; - let krate = fold_top_exprs(krate, |e: P| { + fold_top_exprs(krate, |e: &mut P| { fold_expr_with_context(e, lr_expr::Context::Rvalue, |e, context| { match e.node { ExprKind::Path(..) => { if let Some(&(ref old_ty, ref new_ty)) = cx.try_resolve_expr_to_hid(&e) .and_then(|hid| changed_defs.get(&cx.hir_map().hir_to_node_id(hid))) { - return transmute(e.clone(), context, new_ty, old_ty); + *e = transmute(e.clone(), context, new_ty, old_ty); } }, @@ -531,7 +506,7 @@ pub fn bitcast_retype(st: &CommandState, cx: &RefactorCtxt, krate: Crate, ret if let Some(&(ref old_ty, ref new_ty)) = cx.hir_map() .as_local_node_id(did) .and_then(|id| changed_defs.get(&id)) { - return transmute(e.clone(), context, new_ty, old_ty); + *e = transmute(e.clone(), context, new_ty, old_ty); } }, _ => panic!("field access on non-adt"), @@ -542,8 +517,6 @@ pub fn bitcast_retype(st: &CommandState, cx: &RefactorCtxt, krate: Crate, ret if let Some(func_id) = cx.opt_callee(&e) .and_then(|did| cx.hir_map().as_local_node_id(did)) { if changed_funcs.contains(&func_id) { - let mut e = e.clone(); - let new_args = args.iter().enumerate().map(|(i, a)| { if let Some(&(ref old_ty, ref new_ty)) = changed_inputs.get(&(func_id, i)) { @@ -555,18 +528,12 @@ pub fn bitcast_retype(st: &CommandState, cx: &RefactorCtxt, krate: Crate, ret a.clone() } }).collect(); - e = e.map(move |mut e| { - expect!([e.node] - ExprKind::Call(_, ref mut args) => *args = new_args); - e - }); - - if let Some(&(ref old_ty, ref new_ty)) = - changed_outputs.get(&func_id) { - e = transmute(e, context, new_ty, old_ty); - } + expect!([e.node] + ExprKind::Call(_, ref mut args) => *args = new_args); - return e; + if let Some(&(ref old_ty, ref new_ty)) = changed_outputs.get(&func_id) { + *e = transmute(e.clone(), context, new_ty, old_ty); + } } } }, @@ -576,26 +543,19 @@ pub fn bitcast_retype(st: &CommandState, cx: &RefactorCtxt, krate: Crate, ret _ => {}, }; - - e }) }); // (3) Wrap output expressions from functions whose return types were modified. - let krate = fold_fns(krate, |mut fl| { + mut_visit_fns(krate, |fl| { if let Some(&(ref old_ty, ref new_ty)) = changed_outputs.get(&fl.id) { - fl.block = fl.block.map(|b| fold_output_exprs(b, true, |e| { - transmute(e, lr_expr::Context::Rvalue, old_ty, new_ty) + fl.block.as_mut().map(|b| fold_output_exprs(b, true, |e| { + *e = transmute(e.clone(), lr_expr::Context::Rvalue, old_ty, new_ty); })); } - - fl }); - - - krate } @@ -617,7 +577,7 @@ pub struct BitcastRetype { } impl Transform for BitcastRetype { - fn transform(&self, krate: Crate, st: &CommandState, cx: &RefactorCtxt) -> Crate { + fn transform(&self, krate: &mut Crate, st: &CommandState, cx: &RefactorCtxt) { let pat = parse_ty(cx.session(), &self.pat); let repl = parse_ty(cx.session(), &self.repl); @@ -628,15 +588,11 @@ impl Transform for BitcastRetype { // and `U::SomeTy` could be totally unrelated). let mut matched = false; - let new_ty = fold_match(st, cx, pat.clone(), ty.clone(), |_, mcx| { + mut_visit_match(st, cx, pat.clone(), ty, |ty, mcx| { matched = true; - repl.clone().subst(st, cx, &mcx.bindings) + *ty = repl.clone().subst(st, cx, &mcx.bindings); }); - if matched { - Some(new_ty) - } else { - None - } + matched }) } @@ -714,27 +670,26 @@ impl Command for TypeFixRules { info!("Starting retyping iteration"); let mut lr_map = HashMap::new(); - let krate = lr_expr::fold_exprs_with_context(krate, |e, ectx| { + lr_expr::fold_exprs_with_context(krate, |e, ectx| { // This crate was just expanded (inside run_typeck_loop), so all nodes should be // numbered. assert!(e.id != DUMMY_NODE_ID); if ectx != lr_expr::Context::Rvalue { lr_map.insert(e.id, ectx); } - e }); let mut inserted = 0; - let krate = fold_illtyped(cx, krate, TypeFixRulesFolder { + fold_illtyped(cx, &mut *st.krate_mut(), TypeFixRulesFolder { st, cx, rules: &rules, num_inserted_casts: &mut inserted, lr_map: &lr_map, }); if inserted > 0 { - TypeckLoopResult::Iterate(krate) + TypeckLoopResult::Iterate } else { - TypeckLoopResult::Finished(krate) + TypeckLoopResult::Finished } }).expect("Could not retype crate!"); } @@ -751,9 +706,9 @@ struct TypeFixRulesFolder<'a, 'tcx: 'a> { impl<'a, 'tcx> IlltypedFolder<'tcx> for TypeFixRulesFolder<'a, 'tcx> { fn fix_expr(&mut self, - e: P, + e: &mut P, actual: ty::Ty<'tcx>, - expected: ty::Ty<'tcx>) -> P { + expected: ty::Ty<'tcx>) { let ectx = self.lr_map.get(&e.id).cloned().unwrap_or(lr_expr::Context::Rvalue); let actual_ty_ast = reflect::reflect_tcx_ty(self.cx.ty_ctxt(), actual); let expected_ty_ast = reflect::reflect_tcx_ty(self.cx.ty_ctxt(), expected); @@ -779,13 +734,12 @@ impl<'a, 'tcx> IlltypedFolder<'tcx> for TypeFixRulesFolder<'a, 'tcx> { } let mut bnd = mcx.bindings; - bnd.add("__old", e); + bnd.add("__old", e.clone()); info!("rewriting with bindings {:?}", bnd); *self.num_inserted_casts += 1; - return r.cast_expr.clone().subst(self.st, self.cx, &bnd); + *e = r.cast_expr.clone().subst(self.st, self.cx, &bnd); + return; } - - e } } @@ -830,10 +784,10 @@ impl Command for AutoRetype { let type_annotations = state.transform_crate(Phase::Phase3, |st, cx| { let mut retype_prep = RetypePrepFolder::new(st, cx, &self.mark_types); st.map_krate(|krate| { - krate.fold(&mut retype_prep) + krate.visit(&mut retype_prep) }); retype_prep.type_annotations - }); + }).expect("Failed to run compiler"); state.run_typeck_loop(|krate, _st, cx| { info!("Starting retyping iteration"); RetypeIteration::new(cx, &type_annotations).run(krate) @@ -846,9 +800,9 @@ impl Command for AutoRetype { state.transform_crate(Phase::Phase3, |st, cx| { st.map_krate(|krate| { let mut folder = RestoreAnnotationsFolder::new(cx, type_annotations); - krate.fold(&mut folder) + krate.visit(&mut folder) }); - }); + }).expect("Failed to run compiler"); } } @@ -882,53 +836,42 @@ impl<'a> RetypePrepFolder<'a> { /// Check type node for marks and return the new type if found in /// `mark_types`, otherwise return the original type. - fn map_type(&self, old_ty: P) -> P { + fn map_type(&self, ty: &mut P) { for (label, new_ty) in self.mark_types.iter() { - if self.st.marked(old_ty.id, label) { - return new_ty.clone(); + if self.st.marked(ty.id, label) { + *ty = new_ty.clone(); + return; } } - old_ty } } -impl<'a> Folder for RetypePrepFolder<'a> { +impl<'a> MutVisitor for RetypePrepFolder<'a> { /// Replace marked argument types with their new types - fn fold_fn_decl(&mut self, decl: P) -> P { - decl.map(|FnDecl {inputs, output, variadic}| FnDecl { - inputs: inputs.move_map(|arg| Arg { - ty: self.map_type(arg.ty), - ..arg - }), - output: match output { - FunctionRetTy::Ty(ty) => FunctionRetTy::Ty(self.map_type(ty)), - _ => output, - }, - variadic, - }) + fn visit_fn_decl(&mut self, decl: &mut P) { + let FnDecl { inputs, output, c_variadic: _ } = decl.deref_mut(); + for arg in inputs { + self.map_type(&mut arg.ty); + } + match output { + FunctionRetTy::Ty(ty) => self.map_type(ty), + _ => {} + } } /// Replace marked struct field types with their new types - fn fold_struct_field(&mut self, field: StructField) -> StructField { - StructField { - ty: self.map_type(field.ty), - ..field - } + fn visit_struct_field(&mut self, field: &mut StructField) { + self.map_type(&mut field.ty); } /// Remove all local variable types forcing type inference to update their /// types. We will replace these types if needed. - fn fold_local(&mut self, local: P) -> P { - local.map(|local| { - if let Some(ty) = &local.ty { - self.type_annotations.insert(local.span, ty.clone()); - } - Local { - ty: None, - init: local.init.map(|e| self.fold_expr(e)), - ..local - } - }) + fn visit_local(&mut self, local: &mut P) { + if let Some(ty) = &local.ty { + self.type_annotations.insert(local.span, ty.clone()); + } + local.ty = None; + local.init.as_mut().map(|i| self.visit_expr(i)); } } @@ -952,29 +895,21 @@ impl<'a, 'tcx> RestoreAnnotationsFolder<'a, 'tcx> { } } -impl<'a, 'tcx> Folder for RestoreAnnotationsFolder<'a, 'tcx> { - fn fold_local(&mut self, local: P) -> P { - local.map(|local| { - let ty = local.ty.clone().or_else(|| { - if self.type_annotations.contains_key(&local.span) { - let new_ty = self.cx.node_type(local.id); - // Reflect the type back to an AST type. Since - // we don't (yet) have a way to determine if an - // AST Ty is equivalent to a TyCtxt Ty, we just - // drop the old type and recreate it. Ideally we - // would only change the old AST Ty if it was - // changed in retyping. - let new_ast_ty = reflect_tcx_ty(self.cx.ty_ctxt(), new_ty); - Some(new_ast_ty) - } else { - None - } - }); - Local { - ty, - ..local +impl<'a, 'tcx> MutVisitor for RestoreAnnotationsFolder<'a, 'tcx> { + fn visit_local(&mut self, local: &mut P) { + if local.ty.is_none() { + if self.type_annotations.contains_key(&local.span) { + let new_ty = self.cx.node_type(local.id); + // Reflect the type back to an AST type. Since + // we don't (yet) have a way to determine if an + // AST Ty is equivalent to a TyCtxt Ty, we just + // drop the old type and recreate it. Ideally we + // would only change the old AST Ty if it was + // changed in retyping. + let new_ast_ty = reflect_tcx_ty(self.cx.ty_ctxt(), new_ty); + local.ty = Some(new_ast_ty); } - }) + } } } @@ -1002,40 +937,32 @@ impl<'a, 'tcx, 'b> RetypeIteration<'a, 'tcx, 'b> { } } - fn run(&mut self, krate: Crate) -> TypeckLoopResult { - let krate = { - fold_illtyped(self.cx, krate, RetypeIterationFolder { iteration: self }) - }; + fn run(&mut self, krate: &mut Crate) -> TypeckLoopResult { + fold_illtyped(self.cx, krate, RetypeIterationFolder { iteration: self }); if self.num_inserted_casts > 0 { - return TypeckLoopResult::Iterate(krate); + return TypeckLoopResult::Iterate; } // If we find any remaining type errors, restore the explicit type // annotation to see if that will fix the error. let mut local_type_restored = false; - let krate = fold_nodes(krate, |local: P| { - local.map(|local| { - let ty = self.cx.node_type(local.id); - if let TyKind::Error = ty.sty { - if let Some(old_ty) = self.type_annotations.get(&local.span) { - local_type_restored = true; - return Local { - ty: Some(old_ty.clone()), - ..local - } - } + MutVisitNodes::visit(krate, |local: &mut P| { + let ty = self.cx.node_type(local.id); + if let TyKind::Error = ty.sty { + if let Some(old_ty) = self.type_annotations.get(&local.span) { + local_type_restored = true; + local.ty = Some(old_ty.clone()); } - local - }) + } }); if local_type_restored { - return TypeckLoopResult::Iterate(krate); + return TypeckLoopResult::Iterate; } let mut errors = false; - visit_fns(&krate, |func| { + visit_fns(krate, |func| { if func.block.is_some() { let def_id = self.cx.hir_map().local_def_id(func.id); let tables = self.cx.ty_ctxt().typeck_tables_of(def_id); @@ -1047,9 +974,9 @@ impl<'a, 'tcx, 'b> RetypeIteration<'a, 'tcx, 'b> { if errors { debug!("{:#?}", krate); - TypeckLoopResult::Err("Typechecking failed", krate) + TypeckLoopResult::Err("Typechecking failed") } else { - TypeckLoopResult::Finished(krate) + TypeckLoopResult::Finished } } } @@ -1061,25 +988,21 @@ struct RetypeIterationFolder<'a, 'b, 'tcx, 'c> { impl<'a, 'b, 'tcx, 'c> IlltypedFolder<'tcx> for RetypeIterationFolder<'a, 'b, 'tcx, 'c> { fn fix_expr( &mut self, - e: P, + e: &mut P, actual: ty::Ty<'tcx>, expected: ty::Ty<'tcx> - ) -> P { + ) { info!("Retyping {:?} into type {:?}", e, expected); if let TyKind::Error = actual.sty { - return e; + return; } - match self.iteration.try_retype(e, TypeExpectation::new(expected)) { - Ok(e) => { - info!("Retyped into {:?} with type {:?}", e, expected); - e - }, - Err(e) => { - // With a bottom-up retyping, I'm not sure we want to panic - // here. We may be able to retype a parent and eliminate the - // need to retype the child. - panic!("Could not transform expression {:?} from type {:?} into type {:?}", e, actual, expected) - } + if self.iteration.try_retype(e, TypeExpectation::new(expected)) { + info!("Retyped into {:?} with type {:?}", e, expected); + } else { + // With a bottom-up retyping, I'm not sure we want to panic + // here. We may be able to retype a parent and eliminate the + // need to retype the child. + panic!("Could not transform expression {:?} from type {:?} into type {:?}", e, actual, expected) } } } @@ -1307,17 +1230,21 @@ impl<'a, 'tcx, 'b> RetypeIteration<'a, 'tcx, 'b> { /// Attempt to remove transmutes, optionally with as_ptr and as_mut_ptr /// calls. This is exclusively for readability, not correctness. - fn try_transmute_fix(&mut self, expr: &P, expected: TypeExpectation<'tcx>) -> Option> { - match (&expr.node, &expected.ty.sty) { + fn try_transmute_fix(&mut self, expr: &mut P, expected: TypeExpectation<'tcx>) -> bool { + match (&mut expr.node, &expected.ty.sty) { (ExprKind::Call(ref callee, ref arguments), _) => { let callee_did = self.cx.try_resolve_expr(callee); if let Some(callee_did) = callee_did { - let callee_str = self.cx.ty_ctxt().absolute_item_path_str(callee_did); + let callee_str = self.cx.ty_ctxt().def_path_str(callee_did); // intrinsics are in an anonymous namespace, so the full // path is actually core::intrinsics::::transmute - if callee_str == "core::intrinsics::::transmute" { - if let Ok(new_subexpr) = self.try_retype(arguments[0].clone(), expected) { - return Some(new_subexpr); + if callee_str == "std::intrinsics::transmute" || + callee_str == "core::intrinsics::transmute" + { + let mut e = arguments[0].clone(); + if self.try_retype(&mut e, expected) { + *expr = e; + return true; } } } @@ -1335,18 +1262,21 @@ impl<'a, 'tcx, 'b> RetypeIteration<'a, 'tcx, 'b> { let mut sub_expected = expected; sub_expected.ty = self.cx.ty_ctxt().mk_slice(inner_ty); sub_expected.mutability = Some(*mutbl); - if let Ok(new_subexpr) = self.try_retype(arguments[0].clone(), sub_expected.clone()) { - return Some(mk().method_call_expr(new_subexpr, new_method_name, Vec::>::new())); + let mut e = arguments[0].clone(); + if self.try_retype(&mut e, sub_expected.clone()) { + *expr = mk().method_call_expr(e, new_method_name, Vec::>::new()); + return true; } sub_expected.ty = self.cx.ty_ctxt().mk_ref( &ty::ReEmpty, ty::TypeAndMut{ty: sub_expected.ty, mutbl: *mutbl}, ); - if let Ok(new_subexpr) = self.try_retype(arguments[0].clone(), sub_expected) { - return Some(mk().method_call_expr(new_subexpr, new_method_name, Vec::>::new())); + if self.try_retype(&mut e, sub_expected) { + *expr = mk().method_call_expr(e, new_method_name, Vec::>::new()); + return true; } } - (ExprKind::Unary(UnOp::Deref, ref e), _) => { + (ExprKind::Unary(UnOp::Deref, e), _) => { let mut sub_expected = expected.clone(); let old_subtype = self.cx.node_type(e.id); sub_expected.ty = match old_subtype.sty { @@ -1366,62 +1296,66 @@ impl<'a, 'tcx, 'b> RetypeIteration<'a, 'tcx, 'b> { } _ => panic!("Unsupported type for dereference"), }; - if let Ok(new_expr) = self.try_retype(e.clone(), sub_expected) { - return Some(mk().unary_expr(UnOp::Deref, new_expr)); - } + return self.try_retype(e, sub_expected); } - (ExprKind::AddrOf(expr_mut, ref e), TyKind::Ref(_, subty, expected_mut)) => { - let mutbl = match (expr_mut, expected_mut) { + (ExprKind::AddrOf(expr_mut, e), TyKind::Ref(_, subty, expected_mut)) => { + let mutbl = match (&expr_mut, expected_mut) { (Mutability::Mutable, _) | (Mutability::Immutable, hir::Mutability::MutImmutable) => expected_mut, - _ => return None, + _ => return false, }; let mut sub_expected = expected; sub_expected.ty = subty; sub_expected.mutability = Some(*mutbl); - if let Ok(new_expr) = self.try_retype(e.clone(), sub_expected) { - return Some(mk().set_mutbl(*mutbl).addr_of_expr(new_expr)); + if self.try_retype(e, sub_expected) { + *expr_mut = match *mutbl { + hir::Mutability::MutImmutable => Mutability::Immutable, + hir::Mutability::MutMutable => Mutability::Mutable, + }; + return true; } } _ => (), }; - None + false } /// Attempt to coerce or cast an expression into the expected type fn try_retype( &mut self, - expr: P, + expr: &mut P, expected: TypeExpectation<'tcx>, - ) -> Result, P> { + ) -> bool { let cur_ty = self.cx.node_type(expr.id); debug!("Attempting to retype {:?} from {:?} to {:?}", expr, cur_ty, expected); if can_coerce(cur_ty, expected.ty, self.cx.ty_ctxt()) { - return Ok(expr); + return true; } - match expr.node { - ExprKind::Cast(ref expr, _) => { - return self.try_retype(expr.clone(), expected); + match &mut expr.node { + ExprKind::Cast(expr, _) => { + return self.try_retype(expr, expected); } - ExprKind::Lit(ref lit) => { + ExprKind::Lit(lit) => { if let Some(e) = self.retype_int_lit(lit.clone(), expected.clone()) { - return Ok(e); + *expr = e; + return true; } } _ => (), }; - if let Some(e) = self.try_transmute_fix(&expr, expected.clone()) { - return Ok(e); + if self.try_transmute_fix(expr, expected.clone()) { + return true; } if self.can_cast(cur_ty, expected.ty, self.cx.hir_map().get_parent_did(expr.id)) { self.num_inserted_casts += 1; - return Ok(mk().cast_expr(expr, reflect_tcx_ty(self.cx.ty_ctxt(), expected.ty))); + *expr = mk().cast_expr(expr.clone(), reflect_tcx_ty(self.cx.ty_ctxt(), expected.ty)); + return true; } - Err(expr) + false } } @@ -1483,11 +1417,11 @@ fn can_coerce<'a, 'tcx>( pub struct RemoveRedundantCasts; impl Transform for RemoveRedundantCasts { - fn transform(&self, krate: Crate, st: &CommandState, cx: &RefactorCtxt) -> Crate { + fn transform(&self, krate: &mut Crate, st: &CommandState, cx: &RefactorCtxt) { let tcx = cx.ty_ctxt(); let mut mcx = MatchCtxt::new(st, cx); let pat = mcx.parse_expr("$e:Expr as $t:Ty"); - fold_match_with(mcx, pat, krate, |ast, mcx| { + mut_visit_match_with(mcx, pat, krate, |ast, mcx| { let e = mcx.bindings.get::<_, P>("$e").unwrap(); let e_ty = cx.adjusted_node_type(e.id); let e_ty = tcx.normalize_erasing_regions(ParamEnv::empty(), e_ty); @@ -1496,9 +1430,7 @@ impl Transform for RemoveRedundantCasts { let t_ty = cx.adjusted_node_type(t.id); let t_ty = tcx.normalize_erasing_regions(ParamEnv::empty(), t_ty); if e_ty == t_ty { - e.clone() - } else { - ast + *ast = e.clone(); } }) } @@ -1517,20 +1449,19 @@ impl Transform for RemoveRedundantCasts { pub struct ConvertCastAsPtr; impl Transform for ConvertCastAsPtr { - fn transform(&self, krate: Crate, st: &CommandState, cx: &RefactorCtxt) -> Crate { - let krate = replace_expr(st, cx, krate, + fn transform(&self, krate: &mut Crate, st: &CommandState, cx: &RefactorCtxt) { + replace_expr(st, cx, krate, "typed!($expr:Expr, &[$ty:Ty]) as *const $ty", "$expr.as_ptr()"); - let krate = replace_expr(st, cx, krate, + replace_expr(st, cx, krate, "typed!($expr:Expr, &[$ty:Ty]) as *mut $ty", "$expr.as_mut_ptr()"); - let krate = replace_expr(st, cx, krate, + replace_expr(st, cx, krate, "typed!($expr:Expr, &[$ty:Ty; $len]) as *const $ty", "$expr.as_ptr()"); - let krate = replace_expr(st, cx, krate, + replace_expr(st, cx, krate, "typed!($expr:Expr, &[$ty:Ty; $len]) as *mut $ty", "$expr.as_mut_ptr()"); - krate } fn min_phase(&self) -> Phase { diff --git a/c2rust-refactor/src/transform/rewrite.rs b/c2rust-refactor/src/transform/rewrite.rs index 2c53192f2..3b16754c6 100644 --- a/c2rust-refactor/src/transform/rewrite.rs +++ b/c2rust-refactor/src/transform/rewrite.rs @@ -4,7 +4,7 @@ use syntax::symbol::Symbol; use crate::command::{CommandState, Registry}; use crate::contains_mark::contains_mark; use crate::driver::Phase; -use crate::matcher::{MatchCtxt, Subst, fold_match_with}; +use crate::matcher::{MatchCtxt, Subst, mut_visit_match_with}; use crate::transform::Transform; use c2rust_ast_builder::IntoSymbol; use crate::RefactorCtxt; @@ -47,18 +47,18 @@ pub struct RewriteExpr { } impl Transform for RewriteExpr { - fn transform(&self, krate: Crate, st: &CommandState, cx: &RefactorCtxt) -> Crate { + fn transform(&self, krate: &mut Crate, st: &CommandState, cx: &RefactorCtxt) { let mut mcx = MatchCtxt::new(st, cx); let pat = mcx.parse_expr(&self.pat); let repl = mcx.parse_expr(&self.repl); - fold_match_with(mcx, pat, krate, |ast, mcx| { + mut_visit_match_with(mcx, pat, krate, |ast, mcx| { if let Some(filter) = self.filter { - if !contains_mark(&*ast, filter, st) { - return ast; + if !contains_mark(&**ast, filter, st) { + return; } } - repl.clone().subst(st, cx, &mcx.bindings) + *ast = repl.clone().subst(st, cx, &mcx.bindings); }) } @@ -93,18 +93,18 @@ pub struct RewriteTy { } impl Transform for RewriteTy { - fn transform(&self, krate: Crate, st: &CommandState, cx: &RefactorCtxt) -> Crate { + fn transform(&self, krate: &mut Crate, st: &CommandState, cx: &RefactorCtxt) { let mut mcx = MatchCtxt::new(st, cx); let pat = mcx.parse_ty(&self.pat); let repl = mcx.parse_ty(&self.repl); - fold_match_with(mcx, pat, krate, |ast, mcx| { + mut_visit_match_with(mcx, pat, krate, |ast, mcx| { if let Some(filter) = self.filter { - if !contains_mark(&*ast, filter, st) { - return ast; + if !contains_mark(&**ast, filter, st) { + return; } } - repl.clone().subst(st, cx, &mcx.bindings) + *ast = repl.clone().subst(st, cx, &mcx.bindings); }) } @@ -132,12 +132,12 @@ pub struct RewriteStmts { } impl Transform for RewriteStmts { - fn transform(&self, krate: Crate, st: &CommandState, cx: &RefactorCtxt) -> Crate { + fn transform(&self, krate: &mut Crate, st: &CommandState, cx: &RefactorCtxt) { let mut mcx = MatchCtxt::new(st, cx); let pat = mcx.parse_stmts(&self.pat); let repl = mcx.parse_stmts(&self.repl); - fold_match_with(mcx, pat, krate, |_, mcx| { - repl.clone().subst(st, cx, &mcx.bindings) + mut_visit_match_with(mcx, pat, krate, |ast, mcx| { + *ast = repl.clone().subst(st, cx, &mcx.bindings); }) } @@ -152,14 +152,13 @@ pub struct DebugMatchExpr { } impl Transform for DebugMatchExpr { - fn transform(&self, krate: Crate, st: &CommandState, cx: &RefactorCtxt) -> Crate { + fn transform(&self, krate: &mut Crate, st: &CommandState, cx: &RefactorCtxt) { let mut init_mcx = MatchCtxt::new(st, cx); init_mcx.debug = true; let pat = init_mcx.parse_expr(&self.pat); - fold_match_with(init_mcx, pat, krate, |ast, _mcx| { + mut_visit_match_with(init_mcx, pat, krate, |ast, _mcx| { eprintln!("matched node {:?}", ast); - ast }) } diff --git a/c2rust-refactor/src/transform/statics.rs b/c2rust-refactor/src/transform/statics.rs index 543771d37..546284aa4 100644 --- a/c2rust-refactor/src/transform/statics.rs +++ b/c2rust-refactor/src/transform/statics.rs @@ -1,14 +1,15 @@ use std::collections::{HashMap, HashSet}; +use std::mem; use rustc::hir::def_id::DefId; use syntax::ast::*; use syntax::ptr::P; use syntax::symbol::Symbol; -use crate::ast_manip::{fold_nodes, fold_modules}; -use crate::ast_manip::fn_edit::fold_fns; +use crate::ast_manip::{FlatMapNodes, MutVisitNodes, fold_modules}; +use crate::ast_manip::fn_edit::mut_visit_fns; use crate::command::{CommandState, Registry}; use crate::driver::{parse_expr}; -use crate::matcher::{Bindings, BindingType, MatchCtxt, Subst, fold_match_with}; +use crate::matcher::{Bindings, BindingType, MatchCtxt, Subst, mut_visit_match_with}; use crate::path_edit::fold_resolved_paths; use crate::transform::Transform; use c2rust_ast_builder::{mk, IntoSymbol}; @@ -65,11 +66,11 @@ pub struct CollectToStruct { } impl Transform for CollectToStruct { - fn transform(&self, krate: Crate, st: &CommandState, cx: &RefactorCtxt) -> Crate { + fn transform(&self, krate: &mut Crate, st: &CommandState, cx: &RefactorCtxt) { // Map from Symbol (the name) to the DefId of the old `static`. let mut old_statics = HashMap::new(); - let krate = fold_modules(krate, |curs| { + fold_modules(krate, |curs| { let mut matches = Vec::new(); let mut insert_point = None; @@ -116,22 +117,20 @@ impl Transform for CollectToStruct { init_mcx.bindings.add( "__s", Ident::with_empty_ctxt((&self.instance_name as &str).into_symbol())); - let krate = fold_match_with(init_mcx, ident_pat, krate, |orig, mcx| { + mut_visit_match_with(init_mcx, ident_pat, krate, |orig, mcx| { let static_id = match old_statics.get(&mcx.bindings.get::<_, Ident>("__x").unwrap().name) { Some(&x) => x, - None => return orig, + None => return, }; if cx.resolve_expr(&orig) != static_id { - return orig; + return; } // This really is a reference to one of the collected statics. Replace it with a // reference to the generated struct. - ident_repl.clone().subst(st, cx, &mcx.bindings) + *orig = ident_repl.clone().subst(st, cx, &mcx.bindings) }); - - krate } } @@ -212,7 +211,7 @@ fn build_struct_instance(struct_name: &str, pub struct Localize; impl Transform for Localize { - fn transform(&self, krate: Crate, st: &CommandState, cx: &RefactorCtxt) -> Crate { + fn transform(&self, krate: &mut Crate, st: &CommandState, cx: &RefactorCtxt) { // (1) Collect all marked statics. struct StaticInfo { @@ -223,7 +222,7 @@ impl Transform for Localize { } let mut statics = HashMap::new(); - let krate = fold_nodes(krate, |i: P| { + FlatMapNodes::visit(krate, |i: P| { if !st.marked(i.id, "target") { return smallvec![i]; } @@ -251,24 +250,21 @@ impl Transform for Localize { // Collect all outgoing references from marked functions. let mut fn_refs = HashMap::new(); - let krate = fold_fns(krate, |mut fl| { + mut_visit_fns(krate, |fl| { if !st.marked(fl.id, "user") { - return fl; + return; } let fn_def_id = cx.node_def_id(fl.id); let mut refs = HashSet::new(); - let block = fold_resolved_paths(fl.block, cx, |qself, path, def| { + fold_resolved_paths(&mut fl.block, cx, |qself, path, def| { if let Some(def_id) = def.opt_def_id() { refs.insert(def_id); } (qself, path) }); fn_refs.insert(fn_def_id, refs); - - fl.block = block; - fl }); // Sort the references, collecting those that point to other marked functions and those @@ -317,42 +313,31 @@ impl Transform for Localize { // the statics they reference. Replace uses of statics in the bodies of marked functions // with the corresponding parameter. - let krate = fold_fns(krate, |mut fl| { + mut_visit_fns(krate, |fl| { let fn_def_id = cx.node_def_id(fl.id); if let Some(static_ids) = fn_statics.get(&fn_def_id) { // Add new argument to function signature. - fl.decl = fl.decl.map(|mut decl| { - for &static_id in static_ids { - let info = &statics[&static_id]; - decl.inputs.push(mk().arg( - mk().set_mutbl(info.mutbl).ref_ty(&info.ty), - mk().ident_pat(info.arg_name))); - } - decl - }); + for &static_id in static_ids { + let info = &statics[&static_id]; + fl.decl.inputs.push(mk().arg( + mk().set_mutbl(info.mutbl).ref_ty(&info.ty), + mk().ident_pat(info.arg_name))); + } // Update uses of statics. - fl.block = fold_nodes(fl.block, |e: P| { + MutVisitNodes::visit(&mut fl.block, |e: &mut P| { if let Some(def_id) = cx.try_resolve_expr(&e) { if let Some(info) = statics.get(&def_id) { - return mk().unary_expr("*", mk().ident_expr(info.arg_name)); + *e = mk().unary_expr("*", mk().ident_expr(info.arg_name)); + return; } } - e }); // Update calls to other marked functions. - fl.block = fold_nodes(fl.block, |e: P| { - match e.node { - ExprKind::Call(_, _) => {}, - _ => return e, - } - - e.map(|e| { - unpack!([e.node] ExprKind::Call(func, args)); - let mut args = args; - + MutVisitNodes::visit(&mut fl.block, |e: &mut P| { + if let ExprKind::Call(func, args) = &mut e.node { if let Some(func_id) = cx.try_resolve_expr(&func) { if let Some(func_static_ids) = fn_statics.get(&func_id) { for &static_id in func_static_ids { @@ -360,26 +345,13 @@ impl Transform for Localize { } } } - - Expr { - node: ExprKind::Call(func, args), - .. e - } - }) + } }); } else { // Update calls only. - fl.block = fold_nodes(fl.block, |e: P| { - match e.node { - ExprKind::Call(_, _) => {}, - _ => return e, - } - - e.map(|e| { - unpack!([e.node] ExprKind::Call(func, args)); - let mut args = args; - + MutVisitNodes::visit(&mut fl.block, |e: &mut P| { + if let ExprKind::Call(func, args) = &mut e.node { if let Some(func_id) = cx.try_resolve_expr(&func) { if let Some(func_static_ids) = fn_statics.get(&func_id) { for &static_id in func_static_ids { @@ -389,20 +361,10 @@ impl Transform for Localize { } } } - - Expr { - node: ExprKind::Call(func, args), - .. e - } - }) + } }); } - - fl }); - - krate - } } @@ -446,7 +408,7 @@ impl Transform for Localize { struct StaticToLocal; impl Transform for StaticToLocal { - fn transform(&self, krate: Crate, st: &CommandState, cx: &RefactorCtxt) -> Crate { + fn transform(&self, krate: &mut Crate, st: &CommandState, cx: &RefactorCtxt) { // (1) Collect all marked statics. struct StaticInfo { @@ -457,7 +419,7 @@ impl Transform for StaticToLocal { } let mut statics = HashMap::new(); - let krate = fold_nodes(krate, |i: P| { + FlatMapNodes::visit(krate, |i: P| { if !st.marked(i.id, "target") { return smallvec![i]; } @@ -482,11 +444,11 @@ impl Transform for StaticToLocal { // (2) Add a new local to every function that uses a marked static. - let krate = fold_fns(krate, |mut fl| { + mut_visit_fns(krate, |fl| { // Figure out which statics (if any) this function uses. let mut ref_ids = HashSet::new(); let mut refs = Vec::new(); - fl.block = fold_resolved_paths(fl.block, cx, |qself, path, def| { + fold_resolved_paths(&mut fl.block, cx, |qself, path, def| { if let Some(def_id) = def.opt_def_id() { if ref_ids.insert(def_id) { if let Some(info) = statics.get(&def_id) { @@ -498,30 +460,25 @@ impl Transform for StaticToLocal { }); if refs.len() == 0 { - return fl; + return; } refs.sort_by_key(|info| info.name.name); - fl.block = fl.block.map(|b| b.map(|mut b| { - let mut new_stmts = Vec::with_capacity(refs.len() + b.stmts.len()); + if let Some(block) = &mut fl.block { + let new_stmts = Vec::with_capacity(refs.len() + block.stmts.len()); + let old_stmts = mem::replace(&mut block.stmts, new_stmts); for &info in &refs { let pat = mk().set_mutbl(info.mutbl).ident_pat(info.name); let local = mk().local(pat, Some(info.ty.clone()), Some(info.expr.clone())); let stmt = mk().local_stmt(P(local)); - new_stmts.push(stmt); + block.stmts.push(stmt); } - new_stmts.extend(b.stmts.into_iter()); - b.stmts = new_stmts; - b - })); - - fl + block.stmts.extend(old_stmts.into_iter()); + } }); - - krate } } diff --git a/c2rust-refactor/src/transform/structs.rs b/c2rust-refactor/src/transform/structs.rs index a5af79282..cb5b786aa 100644 --- a/c2rust-refactor/src/transform/structs.rs +++ b/c2rust-refactor/src/transform/structs.rs @@ -2,10 +2,10 @@ use rustc::ty; use syntax::ast::*; use syntax::ptr::P; -use crate::ast_manip::{fold_blocks, fold_nodes, AstEquiv}; +use crate::ast_manip::{fold_blocks, FlatMapNodes, AstEquiv}; use crate::command::{CommandState, Registry}; use crate::driver::{Phase, parse_expr}; -use crate::matcher::{fold_match, Subst}; +use crate::matcher::{mut_visit_match, Subst}; use crate::path_edit::fold_resolved_paths; use crate::transform::Transform; use c2rust_ast_builder::{mk, IntoSymbol}; @@ -32,22 +32,22 @@ use crate::RefactorCtxt; pub struct AssignToUpdate; impl Transform for AssignToUpdate { - fn transform(&self, krate: Crate, st: &CommandState, cx: &RefactorCtxt) -> Crate { + fn transform(&self, krate: &mut Crate, st: &CommandState, cx: &RefactorCtxt) { let pat = parse_expr(cx.session(), "__x.__f = __y"); let repl = parse_expr(cx.session(), "__x = __s { __f: __y, .. __x }"); - fold_match(st, cx, pat, krate, |orig, mut mcx| { + mut_visit_match(st, cx, pat, krate, |orig, mut mcx| { let x = mcx.bindings.get::<_, P>("__x").unwrap().clone(); let struct_def_id = match cx.node_type(x.id).sty { ty::TyKind::Adt(ref def, _) => def.did, - _ => return orig, + _ => return, }; let struct_path = cx.def_path(struct_def_id); mcx.bindings.add("__s", struct_path); - repl.clone().subst(st, cx, &mcx.bindings) - }) + *orig = repl.clone().subst(st, cx, &mcx.bindings); + }); } fn min_phase(&self) -> Phase { @@ -75,7 +75,7 @@ impl Transform for AssignToUpdate { pub struct MergeUpdates; impl Transform for MergeUpdates { - fn transform(&self, krate: Crate, _st: &CommandState, _cx: &RefactorCtxt) -> Crate { + fn transform(&self, krate: &mut Crate, _st: &CommandState, _cx: &RefactorCtxt) { fold_blocks(krate, |curs| { loop { // Find a struct update. @@ -142,19 +142,21 @@ fn build_struct_update(path: Path, fields: Vec, base: P) -> Stmt { pub struct Rename(pub String); impl Transform for Rename { - fn transform(&self, krate: Crate, st: &CommandState, cx: &RefactorCtxt) -> Crate { + fn transform(&self, krate: &mut Crate, st: &CommandState, cx: &RefactorCtxt) { let new_ident = Ident::with_empty_ctxt((&self.0 as &str).into_symbol()); let mut target_def_id = None; // Find the struct definition and rename it. - let krate = fold_nodes(krate, |i: P| { + FlatMapNodes::visit(krate, |i: P| { if target_def_id.is_some() || !st.marked(i.id, "target") { return smallvec![i]; } // Make sure this is actually a struct declaration, and not, say, the target // declaration's containing module. - match_or!([struct_item_id(&i)] Some(x) => x; return smallvec![i]); + if !is_struct(&i) { + return smallvec![i]; + } target_def_id = Some(cx.node_def_id(i.id)); smallvec![i.map(|i| { @@ -172,7 +174,7 @@ impl Transform for Rename { let target_def_id = target_def_id .expect("found no struct to rename"); - let krate = fold_resolved_paths(krate, cx, |qself, mut path, def| { + fold_resolved_paths(krate, cx, |qself, mut path, def| { if let Some(def_id) = def.opt_def_id() { if def_id == target_def_id { path.segments.last_mut().unwrap().ident = new_ident; @@ -180,8 +182,6 @@ impl Transform for Rename { } (qself, path) }); - - krate } fn min_phase(&self) -> Phase { @@ -189,10 +189,13 @@ impl Transform for Rename { } } -fn struct_item_id(i: &Item) -> Option { - let vd = match_or!([i.node] ItemKind::Struct(ref vd, _) => vd; return None); - let id = match_or!([*vd] VariantData::Struct(_, id) => id; return None); - Some(id) +fn is_struct(i: &Item) -> bool { + if let ItemKind::Struct(ref vd, _) = i.node { + if let VariantData::Struct(..) = *vd { + return true; + } + } + false } diff --git a/c2rust-refactor/src/transform/test.rs b/c2rust-refactor/src/transform/test.rs index 32f542b74..566b37cc6 100644 --- a/c2rust-refactor/src/transform/test.rs +++ b/c2rust-refactor/src/transform/test.rs @@ -6,11 +6,11 @@ use syntax::ast::*; use syntax::ptr::P; use rustc::hir; use rustc::ty::{self, TyCtxt, ParamEnv}; -use rustc::ty::subst::Substs; +use rustc::ty::subst::InternalSubsts; use c2rust_ast_builder::mk; use crate::ast_manip::{visit_nodes}; -use crate::ast_manip::fn_edit::fold_fns; +use crate::ast_manip::fn_edit::mut_visit_fns; use crate::command::{RefactorState, CommandState, Command, Registry, TypeckLoopResult}; use crate::driver::{Phase}; use crate::matcher::{replace_expr, replace_stmts}; @@ -28,7 +28,7 @@ use crate::RefactorCtxt; pub struct OnePlusOne; impl Transform for OnePlusOne { - fn transform(&self, krate: Crate, st: &CommandState, cx: &RefactorCtxt) -> Crate { + fn transform(&self, krate: &mut Crate, st: &CommandState, cx: &RefactorCtxt) { let krate = replace_expr(st, cx, krate, "2", "1 + 1"); krate } @@ -45,7 +45,7 @@ impl Transform for OnePlusOne { pub struct FPlusOne; impl Transform for FPlusOne { - fn transform(&self, krate: Crate, st: &CommandState, cx: &RefactorCtxt) -> Crate { + fn transform(&self, krate: &mut Crate, st: &CommandState, cx: &RefactorCtxt) { let krate = replace_expr(st, cx, krate, "f(__x)", "__x + 1"); krate } @@ -62,7 +62,7 @@ impl Transform for FPlusOne { pub struct ReplaceStmts(pub String, pub String); impl Transform for ReplaceStmts { - fn transform(&self, krate: Crate, st: &CommandState, cx: &RefactorCtxt) -> Crate { + fn transform(&self, krate: &mut Crate, st: &CommandState, cx: &RefactorCtxt) { let krate = replace_stmts(st, cx, krate, &self.0, &self.1); krate } @@ -86,10 +86,10 @@ pub struct InsertRemoveArgs { } impl Transform for InsertRemoveArgs { - fn transform(&self, krate: Crate, st: &CommandState, _cx: &RefactorCtxt) -> Crate { - let krate = fold_fns(krate, |mut fl| { + fn transform(&self, krate: &mut Crate, st: &CommandState, _cx: &RefactorCtxt) { + mut_visit_fns(krate, |fl| { if !st.marked(fl.id, "target") { - return fl; + return; } let mut counter = 0; @@ -100,31 +100,24 @@ impl Transform for InsertRemoveArgs { arg }; - fl.decl = fl.decl.clone().map(|mut decl| { - let mut new_args = Vec::new(); - let old_arg_count = decl.inputs.len(); - for (i, arg) in decl.inputs.into_iter().enumerate() { - for _ in 0 .. self.insert_idxs.get(&i).cloned().unwrap_or(0) { - new_args.push(mk_arg()); - } - - if !self.remove_idxs.contains(&i) { - new_args.push(arg); - } + let mut new_args = Vec::new(); + let old_arg_count = fl.decl.inputs.len(); + for (i, arg) in fl.decl.inputs.iter().enumerate() { + for _ in 0 .. self.insert_idxs.get(&i).cloned().unwrap_or(0) { + new_args.push(mk_arg()); } - for _ in 0 .. self.insert_idxs.get(&old_arg_count).cloned().unwrap_or(0) { - new_args.push(mk_arg()); + if !self.remove_idxs.contains(&i) { + new_args.push(arg.clone()); } + } - decl.inputs = new_args; - decl - }); + for _ in 0 .. self.insert_idxs.get(&old_arg_count).cloned().unwrap_or(0) { + new_args.push(mk_arg()); + } - fl + fl.decl.inputs = new_args; }); - - krate } } @@ -142,13 +135,13 @@ pub struct TestTypeckLoop; impl Command for TestTypeckLoop { fn run(&mut self, state: &mut RefactorState) { let mut i = 3; - state.run_typeck_loop(|krate, _st, _cx| { + state.run_typeck_loop(|_krate, _st, _cx| { i -= 1; info!("ran typeck loop iteration {}", i); if i == 0 { - TypeckLoopResult::Finished(krate) + TypeckLoopResult::Finished } else { - TypeckLoopResult::Iterate(krate) + TypeckLoopResult::Iterate } }).unwrap(); } @@ -166,8 +159,8 @@ impl Command for TestTypeckLoop { pub struct TestDebugCallees; impl Transform for TestDebugCallees { - fn transform(&self, krate: Crate, _st: &CommandState, cx: &RefactorCtxt) -> Crate { - visit_nodes(&krate, |e: &Expr| { + fn transform(&self, krate: &mut Crate, _st: &CommandState, cx: &RefactorCtxt) { + visit_nodes(krate, |e: &Expr| { let tcx = cx.ty_ctxt(); let hir_map = cx.hir_map(); @@ -186,7 +179,7 @@ impl Transform for TestDebugCallees { fn describe_ty<'a, 'tcx: 'a>(tcx: TyCtxt<'a, 'tcx, 'tcx>, desc: &str, ty: ty::Ty<'tcx>, - substs: Option<&'tcx Substs<'tcx>>) { + substs: Option<&'tcx InternalSubsts<'tcx>>) { info!(" {}: {:?}", desc, ty); if let Some(substs) = substs { info!(" subst: {:?}", @@ -261,7 +254,6 @@ impl Transform for TestDebugCallees { _ => {}, } }); - krate } fn min_phase(&self) -> Phase { diff --git a/c2rust-refactor/src/transform/vars.rs b/c2rust-refactor/src/transform/vars.rs index 146a938bb..7db6e184c 100644 --- a/c2rust-refactor/src/transform/vars.rs +++ b/c2rust-refactor/src/transform/vars.rs @@ -9,10 +9,10 @@ use syntax::ptr::P; use syntax::visit::{self, Visitor}; use c2rust_ast_builder::mk; -use crate::ast_manip::{fold_nodes, fold_blocks, visit_nodes}; +use crate::ast_manip::{MutVisitNodes, fold_blocks, visit_nodes}; use crate::command::{CommandState, Registry}; use crate::driver::{Phase}; -use crate::matcher::{MatchCtxt, Subst, fold_match_with, replace_stmts}; +use crate::matcher::{MatchCtxt, Subst, mut_visit_match_with, replace_stmts}; use crate::transform::Transform; use rustc::middle::cstore::CrateStore; use crate::RefactorCtxt; @@ -29,14 +29,13 @@ use crate::RefactorCtxt; pub struct LetXUninitialized; impl Transform for LetXUninitialized { - fn transform(&self, krate: Crate, st: &CommandState, cx: &RefactorCtxt) -> Crate { - let krate = replace_stmts(st, cx, krate, + fn transform(&self, krate: &mut Crate, st: &CommandState, cx: &RefactorCtxt) { + replace_stmts(st, cx, krate, "let __pat;", "let __pat = ::std::mem::uninitialized();"); - let krate = replace_stmts(st, cx, krate, + replace_stmts(st, cx, krate, "let __pat: __ty;", "let __pat: __ty = ::std::mem::uninitialized();"); - krate } } @@ -54,7 +53,7 @@ impl Transform for LetXUninitialized { pub struct SinkLets; impl Transform for SinkLets { - fn transform(&self, krate: Crate, _st: &CommandState, cx: &RefactorCtxt) -> Crate { + fn transform(&self, krate: &mut Crate, _st: &CommandState, cx: &RefactorCtxt) { // (1) Collect info on every local that might be worth moving. struct LocalInfo { @@ -63,7 +62,7 @@ impl Transform for SinkLets { } let mut locals: HashMap = HashMap::new(); - visit_nodes(&krate, |l: &Local| { + visit_nodes(krate, |l: &Local| { if let PatKind::Ident(BindingMode::ByValue(_), _, None) = l.pat.node { if l.init.is_none() || !expr_has_side_effects(cx, l.init.as_ref().unwrap()) { let hir_id = cx.hir_map().node_to_hir_id(l.pat.id); @@ -158,13 +157,13 @@ impl Transform for SinkLets { // This is separate from the actual rewrite because we need to do a preorder traversal, but // folds are always postorder to avoid infinite recursion. - visit_nodes(&krate, |b: &Block| { + visit_nodes(krate, |b: &Block| { let used_locals = &block_locals[&b.id]; // Check if there are any locals we should place in this block. We place a local here // if its use kind is `Other` and it hasn't been placed already. A use kind of // `InsideOneBlock` means the local can be placed somewhere deeper, so this strategy - // ensures we place the local in the deepest legal position. We rely on `fold_nodes` + // ensures we place the local in the deepest legal position. We rely on `mut_visit_nodes` // doing a preorder traversal to avoid placing them too deep. let mut place_here = used_locals.iter() .filter(|&(&id, &kind)| kind == UseKind::Other && !placed_locals.contains(&id)) @@ -184,20 +183,15 @@ impl Transform for SinkLets { // (4) Place new locals in the appropriate locations. - let krate = fold_nodes(krate, |b: P| { + MutVisitNodes::visit(krate, |b: &mut P| { let place_here = match_or!([local_placement.get(&b.id)] - Some(x) => x; return b); - - b.map(|mut b| { - let mut new_stmts = place_here.iter() - .map(|&id| mk().local_stmt(&locals[&id].local)) - .collect::>(); - new_stmts.append(&mut b.stmts); - Block { - stmts: new_stmts, - .. b - } - }) + Some(x) => x; return); + + let mut new_stmts = place_here.iter() + .map(|&id| mk().local_stmt(&locals[&id].local)) + .collect::>(); + new_stmts.append(&mut b.stmts); + b.stmts = new_stmts; }); // (5) Remove old locals @@ -210,19 +204,14 @@ impl Transform for SinkLets { .map(|(_, info)| info.old_node_id) .collect::>(); - let krate = fold_nodes(krate, |b: P| { - b.map(|mut b| { - b.stmts.retain(|s| { - match s.node { - StmtKind::Local(ref l) => !remove_local_ids.contains(&l.id), - _ => true, - } - }); - b - }) + MutVisitNodes::visit(krate, |b: &mut P| { + b.stmts.retain(|s| { + match s.node { + StmtKind::Local(ref l) => !remove_local_ids.contains(&l.id), + _ => true, + } + }); }); - - krate } fn min_phase(&self) -> Phase { @@ -285,11 +274,11 @@ fn is_uninit_call(cx: &RefactorCtxt, e: &Expr) -> bool { pub struct FoldLetAssign; impl Transform for FoldLetAssign { - fn transform(&self, krate: Crate, _st: &CommandState, cx: &RefactorCtxt) -> Crate { + fn transform(&self, krate: &mut Crate, _st: &CommandState, cx: &RefactorCtxt) { // (1) Find all locals that might be foldable. let mut locals: HashMap> = HashMap::new(); - visit_nodes(&krate, |l: &Local| { + visit_nodes(krate, |l: &Local| { if let PatKind::Ident(BindingMode::ByValue(_), _, None) = l.pat.node { if l.init.is_none() || !expr_has_side_effects(cx, l.init.as_ref().unwrap()) { let hir_id = cx.hir_map().node_to_hir_id(l.pat.id); @@ -344,7 +333,7 @@ impl Transform for FoldLetAssign { cx: cx, locals: &locals, }; - visit::walk_crate(&mut v, &krate); + visit::walk_crate(&mut v, krate); v.stmt_locals }; @@ -445,10 +434,10 @@ impl Transform for FoldLetAssign { pub struct UninitToDefault; impl Transform for UninitToDefault { - fn transform(&self, krate: Crate, _st: &CommandState, cx: &RefactorCtxt) -> Crate { - fold_nodes(krate, |l: P| { + fn transform(&self, krate: &mut Crate, _st: &CommandState, cx: &RefactorCtxt) { + MutVisitNodes::visit(krate, |l: &mut P| { if !l.init.as_ref().map_or(false, |e| is_uninit_call(cx, e)) { - return l; + return; } let init = l.init.as_ref().unwrap().clone(); @@ -459,14 +448,9 @@ impl Transform for UninitToDefault { TyKind::Int(ity) => mk().int_lit(0, ity), TyKind::Uint(uty) => mk().int_lit(0, uty), TyKind::Float(fty) => mk().float_lit("0", fty), - _ => return l, + _ => return, }; - l.map(|l| { - Local { - init: Some(mk().lit_expr(new_init_lit)), - .. l - } - }) + l.init = Some(mk().lit_expr(new_init_lit)); }) } @@ -486,12 +470,12 @@ impl Transform for UninitToDefault { pub struct RemoveRedundantLetTypes; impl Transform for RemoveRedundantLetTypes { - fn transform(&self, krate: Crate, st: &CommandState, cx: &RefactorCtxt) -> Crate { + fn transform(&self, krate: &mut Crate, st: &CommandState, cx: &RefactorCtxt) { let tcx = cx.ty_ctxt(); let mut mcx = MatchCtxt::new(st, cx); let pat = mcx.parse_stmts("let $pat:Pat : $ty:Ty = $init:Expr;"); let repl = mcx.parse_stmts("let $pat = $init;"); - fold_match_with(mcx, pat, krate, |ast, mcx| { + mut_visit_match_with(mcx, pat, krate, |ast, mcx| { let e = mcx.bindings.get::<_, P>("$init").unwrap(); let e_ty = cx.adjusted_node_type(e.id); let e_ty = tcx.normalize_erasing_regions(ParamEnv::empty(), e_ty); @@ -500,9 +484,7 @@ impl Transform for RemoveRedundantLetTypes { let t_ty = cx.adjusted_node_type(t.id); let t_ty = tcx.normalize_erasing_regions(ParamEnv::empty(), t_ty); if e_ty == t_ty { - repl.clone().subst(st, cx, &mcx.bindings) - } else { - ast + *ast = repl.clone().subst(st, cx, &mcx.bindings); } }) } diff --git a/c2rust-refactor/src/transform/wrapping_arith.rs b/c2rust-refactor/src/transform/wrapping_arith.rs index 854044d7b..8b87a051c 100644 --- a/c2rust-refactor/src/transform/wrapping_arith.rs +++ b/c2rust-refactor/src/transform/wrapping_arith.rs @@ -15,35 +15,34 @@ use crate::RefactorCtxt; pub struct WrappingToNormal; impl Transform for WrappingToNormal { - fn transform(&self, krate: Crate, st: &CommandState, cx: &RefactorCtxt) -> Crate { - let krate = replace_expr(st, cx, krate, - "$x:Expr.wrapping_add($y:Expr)", - "$x + $y"); - let krate = replace_expr(st, cx, krate, - "$x:Expr.wrapping_sub($y:Expr)", - "$x - $y"); - let krate = replace_expr(st, cx, krate, - "$x:Expr.wrapping_mul($y:Expr)", - "$x * $y"); - let krate = replace_expr(st, cx, krate, - "$x:Expr.wrapping_div($y:Expr)", - "$x / $y"); - let krate = replace_expr(st, cx, krate, - "$x:Expr.wrapping_rem($y:Expr)", - "$x % $y"); - let krate = replace_expr(st, cx, krate, - "$x:Expr.wrapping_neg()", - "-$x"); - let krate = replace_expr(st, cx, krate, - "$x:Expr.wrapping_shl($y:Expr)", - "$x << $y"); - let krate = replace_expr(st, cx, krate, - "$x:Expr.wrapping_shr($y:Expr)", - "$x >> $y"); - let krate = replace_expr(st, cx, krate, - "$x:Expr.wrapping_abs()", - "$x:Expr.abs()"); - krate + fn transform(&self, krate: &mut Crate, st: &CommandState, cx: &RefactorCtxt) { + replace_expr(st, cx, krate, + "$x:Expr.wrapping_add($y:Expr)", + "$x + $y"); + replace_expr(st, cx, krate, + "$x:Expr.wrapping_sub($y:Expr)", + "$x - $y"); + replace_expr(st, cx, krate, + "$x:Expr.wrapping_mul($y:Expr)", + "$x * $y"); + replace_expr(st, cx, krate, + "$x:Expr.wrapping_div($y:Expr)", + "$x / $y"); + replace_expr(st, cx, krate, + "$x:Expr.wrapping_rem($y:Expr)", + "$x % $y"); + replace_expr(st, cx, krate, + "$x:Expr.wrapping_neg()", + "-$x"); + replace_expr(st, cx, krate, + "$x:Expr.wrapping_shl($y:Expr)", + "$x << $y"); + replace_expr(st, cx, krate, + "$x:Expr.wrapping_shr($y:Expr)", + "$x >> $y"); + replace_expr(st, cx, krate, + "$x:Expr.wrapping_abs()", + "$x:Expr.abs()"); } } diff --git a/c2rust-transpile/src/build_files/Cargo.toml.hbs b/c2rust-transpile/src/build_files/Cargo.toml.hbs index 913da256b..1d779eb80 100644 --- a/c2rust-transpile/src/build_files/Cargo.toml.hbs +++ b/c2rust-transpile/src/build_files/Cargo.toml.hbs @@ -3,6 +3,7 @@ name = "{{crate_name}}" authors = ["C2Rust"] version = "0.0.0" publish = false +edition = 2018 {{#if main_module~}} [[bin]] @@ -14,6 +15,9 @@ path = "{{root_rs_file}}" crate-type = ["staticlib"] {{~/if}} +[dependencies] +libc = "0.2" + {{#if cross_checks~}} [dependencies.c2rust-xcheck-plugin] version = "*" diff --git a/c2rust-transpile/src/build_files/build.rs.hbs b/c2rust-transpile/src/build_files/build.rs.hbs new file mode 100644 index 000000000..ff4d4e9c0 --- /dev/null +++ b/c2rust-transpile/src/build_files/build.rs.hbs @@ -0,0 +1,11 @@ +#[cfg(target_os = "linux")] +fn main() { + // add linux dependencies here below + // println!("cargo:rustc-flags=-l readline"); +} + +#[cfg(target_os = "macos")] +fn main() { + // add macos dependencies below + // println!("cargo:rustc-flags=-l edit"); +} \ No newline at end of file diff --git a/c2rust-transpile/src/build_files/lib.rs.hbs b/c2rust-transpile/src/build_files/lib.rs.hbs index 6cc333c4e..68996586d 100644 --- a/c2rust-transpile/src/build_files/lib.rs.hbs +++ b/c2rust-transpile/src/build_files/lib.rs.hbs @@ -1,4 +1,3 @@ -#![feature(libc)] #![feature(extern_types)] #![feature(asm)] #![feature(ptr_wrapping_offset_from)] @@ -12,9 +11,12 @@ #![allow(mutable_transmutes)] #![allow(unused_mut)] -{{~#if reorganize_definitions~}} +{{#if reorganize_definitions~}} #![feature(custom_attribute)] {{/if}} +{{#if translate_valist~}} + #![feature(c_variadic)] +{{/if}} {{#if cross_checks~}} #![feature(plugin, custom_attribute)] diff --git a/c2rust-transpile/src/build_files/mod.rs b/c2rust-transpile/src/build_files/mod.rs index e98522167..c5460cf61 100644 --- a/c2rust-transpile/src/build_files/mod.rs +++ b/c2rust-transpile/src/build_files/mod.rs @@ -65,8 +65,11 @@ pub fn emit_build_files(tcfg: &TranspilerConfig, build_dir: &Path, reg.register_template_string("Cargo.toml", include_str!("Cargo.toml.hbs")).unwrap(); reg.register_template_string("lib.rs", include_str!("lib.rs.hbs")).unwrap(); + reg.register_template_string("build.rs", include_str!("build.rs.hbs")).unwrap(); emit_cargo_toml(tcfg,®, &build_dir); + if tcfg.translate_valist { emit_rust_toolchain(tcfg, &build_dir); } + emit_build_rs(tcfg, ®, &build_dir); emit_lib_rs(tcfg, ®, &build_dir, modules) } @@ -93,6 +96,15 @@ fn get_module_name(main: &Option) -> Option { None } +/// Emit `build.rs` to make it easier to link in native libraries +fn emit_build_rs(tcfg: &TranspilerConfig, reg: &Handlebars, build_dir: &Path) + -> Option { + let json = json!({}); + let output = reg.render("build.rs", &json).unwrap(); + let output_path = build_dir.join("build.rs"); + maybe_write_to_file(&output_path, output, tcfg.overwrite_existing) +} + /// Emit `lib.rs` for a library or `main.rs` for a binary. Returns the path /// to `lib.rs` or `main.rs` (or `None` if the output file existed already). fn emit_lib_rs(tcfg: &TranspilerConfig, reg: &Handlebars, build_dir: &Path, @@ -125,6 +137,7 @@ fn emit_lib_rs(tcfg: &TranspilerConfig, reg: &Handlebars, build_dir: &Path, let json = json!({ "root_rs_file": file_name, "reorganize_definitions": tcfg.reorganize_definitions, + "translate_valist": tcfg.translate_valist, "cross_checks": tcfg.cross_checks, "cross_check_backend": rs_xcheck_backend, "main_module": get_module_name(&tcfg.main), @@ -138,6 +151,15 @@ fn emit_lib_rs(tcfg: &TranspilerConfig, reg: &Handlebars, build_dir: &Path, maybe_write_to_file(&output_path, output, tcfg.overwrite_existing) } +/// If we translate variadic functions, the output will only compile +/// on a nightly toolchain until the `c_variadics` feature is stable. +fn emit_rust_toolchain(tcfg: &TranspilerConfig, build_dir: &Path) { + let output_path = build_dir.join("rust-toolchain"); + // TODO: use value of $C2RUST_HOME/rust-toolchain? + let output = String::from("nightly-2019-03-13\n"); + maybe_write_to_file(&output_path, output, tcfg.overwrite_existing); +} + fn emit_cargo_toml(tcfg: &TranspilerConfig, reg: &Handlebars, build_dir: &Path) { // rust_checks_path is gone because we don't want to refer to the source // path but instead want the cross-check libs to be installed via cargo. diff --git a/c2rust-transpile/src/cfg/mod.rs b/c2rust-transpile/src/cfg/mod.rs index 176d14b41..50e659af4 100644 --- a/c2rust-transpile/src/cfg/mod.rs +++ b/c2rust-transpile/src/cfg/mod.rs @@ -73,7 +73,7 @@ impl Label { } fn debug_print(&self) -> String { - String::from(self.pretty_print().trim_left_matches('\'')) + String::from(self.pretty_print().trim_start_matches('\'')) } fn to_num_expr(&self) -> P { @@ -496,7 +496,8 @@ pub enum ImplicitReturnType { /// > used by the caller, the behavior is undefined." NoImplicitReturnType, - /// This is for handling statement expressions + /// This is for handling GNU C statement expressions + /// https://gcc.gnu.org/onlinedocs/gcc-3.2.3/gcc/Statement-Exprs.html /// /// TODO: document StmtExpr(ExprContext, CExprId, Label), @@ -549,6 +550,8 @@ impl Cfg { wip.body.push(StmtOrDecl::Stmt(mk().semi_stmt(mk().return_expr(None as Option>)))); }, ImplicitReturnType::NoImplicitReturnType => { + // NOTE: emitting `ret_expr` is not necessarily an error. For instance, + // this statement exit may be dominated by one or more return statements. let ret_expr: P = translator.panic("Reached end of non-void function without returning"); wip.body.push(StmtOrDecl::Stmt(mk().semi_stmt(ret_expr))); diff --git a/c2rust-transpile/src/convert_type.rs b/c2rust-transpile/src/convert_type.rs index 663003a40..7eac210f4 100644 --- a/c2rust-transpile/src/convert_type.rs +++ b/c2rust-transpile/src/convert_type.rs @@ -185,6 +185,26 @@ impl TypeConverter { Ok(mk().set_mutbl(mutbl).ptr_ty(child_ty)) } + pub fn is_inner_type_valist( + ctxt: &TypedAstContext, + qtype: CQualTypeId + ) -> bool { + match ctxt.resolve_type(qtype.ctype).kind { + CTypeKind::Struct(struct_id) => { + if let CDeclKind::Struct { name: Some(ref struct_name), .. } = ctxt[struct_id].kind { + if struct_name == "__va_list_tag" { + return true; + } + } + false + }, + CTypeKind::Pointer(pointer_id) => { + Self::is_inner_type_valist(ctxt, pointer_id) + }, + _ => false, + } + } + /// Convert a `C` type to a `Rust` one. For the moment, these are expected to have compatible /// memory layouts. pub fn convert(&mut self, ctxt: &TypedAstContext, ctype: CTypeId) -> Result, TranslationError> { diff --git a/c2rust-transpile/src/lib.rs b/c2rust-transpile/src/lib.rs index 8bf3681e1..815d89afd 100644 --- a/c2rust-transpile/src/lib.rs +++ b/c2rust-transpile/src/lib.rs @@ -2,6 +2,7 @@ #![feature(label_break_value)] extern crate dtoa; extern crate rustc_target; +extern crate rustc_data_structures; extern crate serde_cbor; extern crate syntax; extern crate syntax_pos; diff --git a/c2rust-transpile/src/rust_ast/traverse.rs b/c2rust-transpile/src/rust_ast/traverse.rs index 14c6c1b3c..0d0f76e7c 100644 --- a/c2rust-transpile/src/rust_ast/traverse.rs +++ b/c2rust-transpile/src/rust_ast/traverse.rs @@ -198,6 +198,7 @@ pub fn traverse_expr_def(walk: &mut W, mut e: Expr) -> Expr { ), ExprKind::Async(cap, nod, block) => ExprKind::Async(cap, nod, block.traverse(walk)), ExprKind::TryBlock(blk) => ExprKind::TryBlock(blk.traverse(walk)), + ExprKind::Err => unimplemented!(), }; e } diff --git a/c2rust-transpile/src/translator/assembly.rs b/c2rust-transpile/src/translator/assembly.rs index a5a27c05f..a4b40d523 100644 --- a/c2rust-transpile/src/translator/assembly.rs +++ b/c2rust-transpile/src/translator/assembly.rs @@ -29,7 +29,7 @@ impl<'c> Translation<'c> { self.use_feature("asm"); fn push_expr(tokens: &mut Vec, expr: P) { - tokens.push(Token::interpolated(Nonterminal::NtExpr(expr))); + tokens.push(Token::Interpolated(Lrc::new(Nonterminal::NtExpr(expr)))); } let mut stmts: Vec = vec![]; diff --git a/c2rust-transpile/src/translator/bitfields.rs b/c2rust-transpile/src/translator/bitfields.rs index 8be2547eb..f2c9f563a 100644 --- a/c2rust-transpile/src/translator/bitfields.rs +++ b/c2rust-transpile/src/translator/bitfields.rs @@ -7,11 +7,10 @@ use std::ops::Index; use c_ast::{BinOp, CDeclId, CDeclKind, CExprId, CExprKind, CQualTypeId, CTypeId, MemberKind, UnOp}; use c2rust_ast_builder::mk; -use syntax::ast::{AttrStyle, BinOpKind, Expr, ExprKind, MetaItemKind, NestedMetaItem, NestedMetaItemKind, Lit, LitIntType, LitKind, StmtKind, StrStyle, StructField, Ty, TyKind, self}; -use syntax::ext::quote::rt::Span; +use syntax::ast::{AttrStyle, BinOpKind, Expr, ExprKind, MetaItemKind, NestedMetaItem, Lit, LitIntType, LitKind, StmtKind, StrStyle, StructField, Ty, TyKind, self}; use syntax::ptr::P; use syntax::source_map::symbol::Symbol; -use syntax_pos::DUMMY_SP; +use syntax_pos::{Span, DUMMY_SP}; use translator::{ExprContext, Translation, ConvertedDecl, simple_metaitem}; use with_stmts::WithStmts; use super::TranslationError; @@ -56,7 +55,7 @@ fn assigment_metaitem(lhs: &str, rhs: &str) -> NestedMetaItem { }), ); - mk().nested_meta_item(NestedMetaItemKind::MetaItem(meta_item)) + mk().nested_meta_item(NestedMetaItem::MetaItem(meta_item)) } impl<'a> Translation<'a> { @@ -431,8 +430,10 @@ impl<'a> Translation<'a> { ) -> Result, TranslationError> { let field_info: Vec = field_ids.iter() .map(|field_id| match self.ast_context.index(*field_id).kind { - CDeclKind::Field { ref name, typ, bitfield_width, platform_bit_offset, platform_type_bitwidth, .. } => - (name.clone(), typ, bitfield_width, platform_bit_offset, platform_type_bitwidth), + CDeclKind::Field { typ, bitfield_width, platform_bit_offset, platform_type_bitwidth, .. } => { + let name = self.type_converter.borrow().resolve_field_name(None, *field_id).unwrap(); + (name, typ, bitfield_width, platform_bit_offset, platform_type_bitwidth) + }, _ => unreachable!("Found non-field in record field list"), }).collect(); let reorganized_fields = self.get_field_types(field_info, platform_byte_size)?; @@ -468,7 +469,6 @@ impl<'a> Translation<'a> { }, FieldType::Regular { ctype, name, .. } => { let field_init = self.implicit_default_expr(ctype, is_static)?; - fields.push(mk().field(name, field_init)); }, } diff --git a/c2rust-transpile/src/translator/builtins.rs b/c2rust-transpile/src/translator/builtins.rs index 1d5f64d0d..6a09863e9 100644 --- a/c2rust-transpile/src/translator/builtins.rs +++ b/c2rust-transpile/src/translator/builtins.rs @@ -131,21 +131,64 @@ impl<'c> Translation<'c> { "__builtin_va_start" => { if ctx.is_unused() && args.len() == 2 { if let Some(va_id) = self.match_vastart(args[0]) { - if ctx.is_va_decl(va_id) { - return Ok(WithStmts::new(self.panic("va_start stub"))) + if self.is_promoted_va_decl(va_id) { + // `va_start` is automatically called for the promoted decl. + return Ok(WithStmts::new(self.panic_or_err("va_start stub"))) } } } Err(TranslationError::generic("Unsupported va_start")) }, - "__builtin_va_copy" => Err(TranslationError::generic( - "va_copy not supported" - )), + "__builtin_va_copy" => { + if ctx.is_unused() && args.len() == 2 { + if let Some((_dst_va_id, _src_va_id)) = self.match_vacopy(args[0], args[1]) { + + let dst = self.convert_expr(ctx.used(), args[0])?; + let src = self.convert_expr(ctx.used(), args[1])?; + + let path = { + let std_or_core = if self.tcfg.emit_no_std { "core" } else { "std" }; + let path = vec!["", std_or_core, "intrinsics", "va_copy"]; + mk().path_expr(path) + }; + let mut_ref_src = mk().mutbl().addr_of_expr(src.val); + let call_expr = mk().call_expr(path, vec![mut_ref_src] as Vec>); + let assign_expr = mk().assign_expr(dst.val, call_expr); + let stmt = mk().semi_stmt(assign_expr); + + let mut res = WithStmts::new(self.panic_or_err("va_copy stub")); + res.stmts.push(stmt); + return Ok(res); + } + } + Err(TranslationError::generic("Unsupported va_copy")) + }, "__builtin_va_end" => { if ctx.is_unused() && args.len() == 1 { if let Some(va_id) = self.match_vaend(args[0]) { - if ctx.is_va_decl(va_id) { - return Ok(WithStmts::new(self.panic("va_end stub"))) + if self.is_promoted_va_decl(va_id) { + // no need to call end on `va_end` on `va_list` promoted to arg + return Ok(WithStmts::new(self.panic_or_err("va_end stub"))) + } else if self.is_copied_va_decl(va_id) { + // call to `va_end` on non-promoted `va_list` + + let val = self.convert_expr(ctx.used(), args[0])?; + + let path = { + let std_or_core = if self.tcfg.emit_no_std { "core" } else { "std" }; + let path = vec!["", std_or_core, "intrinsics", "va_end"]; + mk().path_expr(path) + }; + let ref_val = mk().mutbl().addr_of_expr(val.val); + let call_expr = mk().call_expr(path, vec![ref_val] as Vec>); + + let stmt = mk().semi_stmt(call_expr); + + let mut res = WithStmts::new(self.panic_or_err("va_end stub")); + res.stmts.push(stmt); + return Ok(res); + + // return Ok(WithStmts::new(self.panic("va_end stub"))) } } } @@ -524,7 +567,7 @@ impl<'c> Translation<'c> { memcpy_expr } else { stmts.push(mk().semi_stmt(memcpy_expr)); - self.panic("__builtin_memcpy not used") + self.panic_or_err("__builtin_memcpy not used") }; Ok(WithStmts { stmts, val }) diff --git a/c2rust-transpile/src/translator/main_function.rs b/c2rust-transpile/src/translator/main_function.rs index aafd536de..c086e76c0 100644 --- a/c2rust-transpile/src/translator/main_function.rs +++ b/c2rust-transpile/src/translator/main_function.rs @@ -141,7 +141,7 @@ impl<'c> Translation<'c> { Some(mk().mac_expr(mk().mac( vec!["format"], vec![ - Token::interpolated(Nonterminal::NtExpr(mk().lit_expr(mk().str_lit("{}={}")))), + Token::Interpolated(Lrc::new(Nonterminal::NtExpr(mk().lit_expr(mk().str_lit("{}={}"))))), Token::Comma, Token::from_ast_ident(mk().ident("var_name")), Token::Comma, diff --git a/c2rust-transpile/src/translator/mod.rs b/c2rust-transpile/src/translator/mod.rs index 07a8b1af7..37f407bb3 100644 --- a/c2rust-transpile/src/translator/mod.rs +++ b/c2rust-transpile/src/translator/mod.rs @@ -17,6 +17,7 @@ use syntax::ptr::*; use syntax::tokenstream::{TokenStream, TokenTree}; use syntax::{with_globals, ast}; use syntax_pos::{DUMMY_SP, Span}; +use rustc_data_structures::sync::Lrc; use rust_ast::comment_store::CommentStore; use rust_ast::item_store::ItemStore; @@ -200,7 +201,6 @@ pub struct ExprContext { used: bool, is_static: bool, decay_ref: DecayRef, - va_decl: Option, is_bitfield_write: bool, needs_address: bool, } @@ -214,7 +214,6 @@ impl ExprContext { pub fn not_static(self) -> Self { ExprContext { is_static: false, .. self } } pub fn static_(self) -> Self { ExprContext { is_static: true, .. self } } pub fn set_static(self, is_static: bool) -> Self { ExprContext { is_static, .. self } } - pub fn is_va_decl(&self, decl_id: CDeclId) -> bool { Some(decl_id) == self.va_decl } pub fn is_bitfield_write(&self) -> bool { self.is_bitfield_write } pub fn set_bitfield_write(self, is_bitfield_write: bool) -> Self { ExprContext { is_bitfield_write, .. self } @@ -225,6 +224,36 @@ impl ExprContext { } } +#[derive(Clone, Debug)] +pub struct FunContext { + /// The name of the function we're currently translating + name: Option, + /// The va_list decl that we promote to a Rust function arg + promoted_va_decl: Option, + /// The va_list decls that we did not promote because they were `va_copy`ed. + copied_va_decls: Option> +} + +impl FunContext { + pub fn new() -> Self { + FunContext { + name: None, + promoted_va_decl: None, + copied_va_decls: None + } + } + + pub fn enter_new(&mut self, fn_name: &str) { + self.name = Some(fn_name.to_string()); + self.promoted_va_decl = None; + self.copied_va_decls = None; + } + + pub fn get_name<'a>(&'a self) -> &'a str { + return self.name.as_ref().unwrap() + } +} + pub struct Translation<'c> { // Translation environment @@ -241,6 +270,7 @@ pub struct Translation<'c> { type_converter: RefCell, renamer: RefCell>, zero_inits: RefCell, TranslationError>>>, + function_context: RefCell, // Comment support pub comment_context: RefCell, // Incoming comments @@ -259,7 +289,7 @@ pub struct Translation<'c> { fn simple_metaitem(name: &str) -> NestedMetaItem { let meta_item = mk().meta_item(vec![name], MetaItemKind::Word); - mk().nested_meta_item(NestedMetaItemKind::MetaItem(meta_item)) + mk().nested_meta_item(NestedMetaItem::MetaItem(meta_item)) } fn cast_int(val: P, name: &str) -> P { @@ -446,7 +476,6 @@ pub fn translate(ast_context: TypedAstContext, tcfg: &TranspilerConfig, main_fil used: true, is_static: false, decay_ref: DecayRef::Default, - va_decl: None, is_bitfield_write: false, needs_address: false, }; @@ -750,7 +779,7 @@ fn print_header(s: &mut State, t: &Translation) -> io::Result<()> { if t.tcfg.emit_modules { s.print_item(&mk().use_item(vec!["libc"], None as Option))?; } else { - let mut features = vec!["libc"]; + let mut features = vec![]; features.extend(t.features.borrow().iter()); features.extend(t.type_converter.borrow().features_used()); let mut pragmas: Vec<(&str, Vec<&str>)> = @@ -898,6 +927,7 @@ impl<'c> Translation<'c> { "drop", "Some", "None", "Ok", "Err", ])), zero_inits: RefCell::new(IndexMap::new()), + function_context: RefCell::new(FunContext::new()), comment_context, comment_store: RefCell::new(CommentStore::new()), sectioned_static_initializers: RefCell::new(Vec::new()), @@ -915,14 +945,24 @@ impl<'c> Translation<'c> { // This node should _never_ show up in the final generated code. This is an easy way to notice // if it does. + pub fn panic_or_err(&self, msg: &str) -> P { + self.panic_or_err_helper(msg, self.tcfg.panic_on_translator_failure) + } + pub fn panic(&self, msg: &str) -> P { - let macro_name = if self.tcfg.panic_on_translator_failure { "panic" } else { "compile_error" }; + self.panic_or_err_helper(msg, true) + } + + fn panic_or_err_helper(&self, msg: &str, panic: bool) -> P { + let macro_name = if panic { "panic" } else { "compile_error" }; let macro_msg = vec![ - Token::interpolated(Nonterminal::NtExpr(mk().lit_expr(mk().str_lit(msg)))), + Token::Interpolated(Lrc::new(Nonterminal::NtExpr(mk().lit_expr(mk().str_lit(msg))))), ].into_iter().collect::(); mk().mac_expr(mk().mac(vec![macro_name], macro_msg, MacDelimiter::Parenthesis)) } + + fn mk_cross_check(&self, mk: Builder, args: Vec<&str>) -> Builder { if self.tcfg.cross_checks { mk.call_attr("cross_check", args) @@ -1155,8 +1195,8 @@ impl<'c> Translation<'c> { vec!["align"], MetaItemKind::List( vec![mk().nested_meta_item( - NestedMetaItemKind::Literal(lit))])); - reprs.push(mk().nested_meta_item(NestedMetaItemKind::MetaItem(inner))); + NestedMetaItem::Literal(lit))])); + reprs.push(mk().nested_meta_item(NestedMetaItem::MetaItem(inner))); }; let repr_attr = mk().meta_item(vec!["repr"], MetaItemKind::List(reprs)); @@ -1379,9 +1419,31 @@ impl<'c> Translation<'c> { } } + /// Returns true iff type is a (pointer to)* the `va_list` structure type. + /// Note: the logic is based on `TypeConverter::convert_pointer`. + pub fn is_inner_type_valist( + ctxt: &TypedAstContext, + qtype: CQualTypeId + ) -> bool { + match ctxt.resolve_type(qtype.ctype).kind { + CTypeKind::Struct(struct_id) => { + if let CDeclKind::Struct { name: Some(ref struct_name), .. } = ctxt[struct_id].kind { + if struct_name == "__va_list_tag" { + return true; + } + } + false + }, + CTypeKind::Pointer(pointer_id) => { + Self::is_inner_type_valist(ctxt, pointer_id) + }, + _ => false, + } + } + fn convert_function( &self, - mut ctx: ExprContext, + ctx: ExprContext, span: Span, is_global: bool, is_inline: bool, @@ -1396,16 +1458,16 @@ impl<'c> Translation<'c> { attrs: &IndexSet, ) -> Result { - if is_variadic { + self.function_context.borrow_mut().enter_new(name); + + let is_valist: bool = arguments + .iter() + .any(|&(_, _, typ)| Self::is_inner_type_valist(&self.ast_context, typ)); + if is_variadic || is_valist { if let Some(body_id) = body { - match self.well_formed_variadic(body_id) { - None => - return Err(format_err!( - "Failed to translate {}; unsupported variadic function.", name).into()), - Some(va_id) => { - self.register_va_arg(va_id); - ctx.va_decl = Some(va_id); - } + if !self.is_well_formed_variadic(body_id) { + return Err(format_err!( + "Failed to translate {}; unsupported variadic function.", name).into()); } } } @@ -1413,6 +1475,7 @@ impl<'c> Translation<'c> { self.with_scope(|| { let mut args: Vec = vec![]; + // handle regular (non-variadic) arguments for &(decl_id, ref var, typ) in arguments { @@ -1434,6 +1497,24 @@ impl<'c> Translation<'c> { args.push(mk().arg(ty, pat)) } + // handle variadic arguments + if is_variadic { + let ty = mk().ident_ty("..."); + if let Some(va_decl_id) = self.get_promoted_va_decl() { + // `register_va_arg` succeeded + let var = self.renamer.borrow_mut() + .get(&va_decl_id) + .expect(&format!("Failed to get name for variadic argument")); + + // FIXME: detect mutability requirements + let pat = mk().set_mutbl(Mutability::Mutable).ident_pat(var); + args.push(mk().arg(ty, pat)) + } else { + args.push(mk().arg(ty, mk().wild_pat())) + } + } + + // handle return type let ret = match return_type { Some(return_type) => self.convert_type(return_type.ctype)?, None => mk().never_ty(), @@ -1728,9 +1809,11 @@ impl<'c> Translation<'c> { } pub fn convert_decl_stmt_info(&self, ctx: ExprContext, decl_id: CDeclId) -> Result { - if ctx.is_va_decl(decl_id) { + if self.is_promoted_va_decl(decl_id) { + // `va_list` decl was promoted to arg + self.use_feature("c_variadic"); return Ok(cfg::DeclStmtInfo::empty()) - } + } match self.ast_context.index(decl_id).kind { CDeclKind::Variable { ref ident, has_static_duration: true, is_externally_visible: false, is_defn: true, initializer, typ, .. } => { @@ -1765,6 +1848,28 @@ impl<'c> Translation<'c> { CDeclKind::Variable { has_static_duration: false, has_thread_duration: false, is_externally_visible: false, is_defn, ref ident, initializer, typ, .. } => { assert!(is_defn, "Only local variable definitions should be extracted"); + let rust_name = self.renamer.borrow_mut() + .insert(decl_id, &ident) + .expect(&format!("Failed to insert variable '{}'", ident)); + + if self.is_copied_va_decl(decl_id) { + // translate `va_list` declarations not promoted to an arg + // to `VaList` and do not emit an initializer. + let pat_mut = mk().set_mutbl("mut").ident_pat(rust_name.clone()); + let ty = { + let std_or_core = if self.tcfg.emit_no_std { "core" } else { "std" }; + let path = vec!["", std_or_core, "ffi", "VaList"]; + mk().path_ty(path) + }; + let local_mut = mk().local::<_, _, P>(pat_mut, Some(ty), None); + + return Ok(cfg::DeclStmtInfo::new( + vec![], // decl + vec![], // assign + vec![mk().local_stmt(P(local_mut))], // decl_and_assign + )); + } + let has_self_reference = if let Some(expr_id) = initializer { self.has_decl_reference(decl_id, expr_id) @@ -1773,10 +1878,7 @@ impl<'c> Translation<'c> { }; let mut stmts = self.compute_variable_array_sizes(ctx, typ.ctype)?; - - let rust_name = self.renamer.borrow_mut() - .insert(decl_id, &ident) - .expect(&format!("Failed to insert variable '{}'", ident)); + let (ty, mutbl, init) = self.convert_variable(ctx, initializer, typ)?; let mut init = init?; @@ -2318,11 +2420,11 @@ impl<'c> Translation<'c> { // offset_of!(Struct, field[expr as usize]) as ty let mut macro_body = vec![ - TokenTree::Token(DUMMY_SP, Token::interpolated(ty_ident)), + TokenTree::Token(DUMMY_SP, Token::Interpolated(Lrc::new(ty_ident))), TokenTree::Token(DUMMY_SP, Token::Comma), - TokenTree::Token(DUMMY_SP, Token::interpolated(field_ident)), + TokenTree::Token(DUMMY_SP, Token::Interpolated(Lrc::new(field_ident))), TokenTree::Token(DUMMY_SP, Token::OpenDelim(DelimToken::Bracket)), - TokenTree::Token(DUMMY_SP, Token::interpolated(index_expr)), + TokenTree::Token(DUMMY_SP, Token::Interpolated(Lrc::new(index_expr))), TokenTree::Token(DUMMY_SP, Token::CloseDelim(DelimToken::Bracket)), ]; let path = mk().path("offset_of"); @@ -2359,7 +2461,7 @@ impl<'c> Translation<'c> { Ok(cond.and_then(|c| WithStmts { stmts: vec![mk().semi_stmt(mk().ifte_expr(c, then, Some(els)))], - val: self.panic("Conditional expression is not supposed to be used"), + val: self.panic_or_err("Conditional expression is not supposed to be used"), })) } else { let then: P = lhs.to_block(); @@ -2380,7 +2482,7 @@ impl<'c> Translation<'c> { None as Option>))); Ok(WithStmts { stmts: lhs.stmts, - val: self.panic("Binary conditional expression is not supposed to be used"), + val: self.panic_or_err("Binary conditional expression is not supposed to be used"), }) } else { self.name_reference_write_read(ctx, lhs)?.result_map(|(_, lhs_val)| { @@ -2597,7 +2699,7 @@ impl<'c> Translation<'c> { // Recall that if `used` is false, the `stmts` field of the output must contain // all side-effects (and a function call can always have side-effects) stmts.push(mk().semi_stmt(expr)); - WithStmts { stmts, val: self.panic(panic_msg) } + WithStmts { stmts, val: self.panic_or_err(panic_msg) } } else { WithStmts { stmts, val: expr } } @@ -2641,12 +2743,17 @@ impl<'c> Translation<'c> { if let Some(stmt) = stmts.pop() { match as_semi_break_stmt(&stmt, &lbl) { - Some(val) => return Ok(WithStmts::new(mk().block_expr({ - match val { - None => mk().block(stmts), - Some(val) => WithStmts { stmts, val }.to_block() - } - }))), + Some(val) => { + let block = mk().block_expr({ + match val { + None => mk().block(stmts), + Some(val) => WithStmts { stmts, val }.to_block() + } + }); + // enclose block in parentheses to work around + // https://github.com/rust-lang/rust/issues/54482 + return Ok(WithStmts::new(mk().paren_expr(block))) + }, _ => { self.use_feature("label_break_value"); stmts.push(stmt) @@ -2661,7 +2768,7 @@ impl<'c> Translation<'c> { } _ => { if ctx.is_unused() { - let val = self.panic("Empty statement expression is not supposed to be used"); + let val = self.panic_or_err("Empty statement expression is not supposed to be used"); Ok(WithStmts { stmts: vec![], val }) } else { Err(TranslationError::generic("Bad statement expression")) diff --git a/c2rust-transpile/src/translator/operators.rs b/c2rust-transpile/src/translator/operators.rs index 596260038..955e38f9e 100644 --- a/c2rust-transpile/src/translator/operators.rs +++ b/c2rust-transpile/src/translator/operators.rs @@ -42,7 +42,7 @@ impl<'c> Translation<'c> { if ctx.is_unused() { let out_val = mem::replace( &mut out.val, - self.panic("Binary expression is not supposed to be used"), + self.panic_or_err("Binary expression is not supposed to be used"), ); out.stmts.push(mk().semi_stmt(out_val)); } @@ -59,7 +59,7 @@ impl<'c> Translation<'c> { if ctx.is_unused() { let out_val = mem::replace( &mut out.val, - self.panic("Binary expression is not supposed to be used"), + self.panic_or_err("Binary expression is not supposed to be used"), ); out.stmts.push(mk().semi_stmt(out_val)); } @@ -124,7 +124,7 @@ impl<'c> Translation<'c> { Ok(WithStmts { stmts, - val: self.panic("Binary expression is not supposed to be used"), + val: self.panic_or_err("Binary expression is not supposed to be used"), }) } else { let WithStmts { @@ -319,7 +319,7 @@ impl<'c> Translation<'c> { } = self.name_reference_write(ctx, lhs)?; ( write, - self.panic("Volatile value is not supposed to be read"), + self.panic_or_err("Volatile value is not supposed to be read"), lhs_stmts, ) }; diff --git a/c2rust-transpile/src/translator/simd.rs b/c2rust-transpile/src/translator/simd.rs index cf1d8e5a7..7d4410c8c 100644 --- a/c2rust-transpile/src/translator/simd.rs +++ b/c2rust-transpile/src/translator/simd.rs @@ -227,7 +227,7 @@ impl<'c> Translation<'c> { } else { Ok(WithStmts { stmts: vec![mk().expr_stmt(call)], - val: self.panic("No value for unused shuffle vector return"), + val: self.panic_or_err("No value for unused shuffle vector return"), }) } } @@ -338,7 +338,7 @@ impl<'c> Translation<'c> { } else { Ok(WithStmts { stmts: vec![mk().expr_stmt(call)], - val: self.panic("No value for unused shuffle vector return"), + val: self.panic_or_err("No value for unused shuffle vector return"), }) } } @@ -455,7 +455,7 @@ impl<'c> Translation<'c> { } else { Ok(WithStmts { stmts: vec![mk().expr_stmt(call)], - val: self.panic("No value for unused shuffle vector return"), + val: self.panic_or_err("No value for unused shuffle vector return"), }) } } diff --git a/c2rust-transpile/src/translator/variadic.rs b/c2rust-transpile/src/translator/variadic.rs index 94497a3fd..535c6ab7f 100644 --- a/c2rust-transpile/src/translator/variadic.rs +++ b/c2rust-transpile/src/translator/variadic.rs @@ -1,10 +1,11 @@ use super::*; +use std::collections::{HashMap}; #[derive(Copy, Clone, Debug)] pub enum VaPart { Start(CDeclId), End(CDeclId), - Copy, + Copy(CDeclId, CDeclId), } macro_rules! match_or { @@ -20,18 +21,40 @@ impl<'c> Translation<'c> { /// Install a fake variable into the renamer as a kludge until we have /// proper variadic function definition support - pub fn register_va_arg(&self, decl_id: CDeclId) { - - match self.ast_context[decl_id].kind { - CDeclKind::Variable { ref ident, .. } => { - self.renamer.borrow_mut() + fn register_va_decls(&self, promoted_decl_id: Option, copied_decl_ids: IndexSet) { + let mut fn_ctx = self.function_context.borrow_mut(); + fn_ctx.copied_va_decls = Some(copied_decl_ids); + + if let Some(decl_id) = promoted_decl_id { + // found a promotable `va_list` + fn_ctx.promoted_va_decl = Some(decl_id); + match self.ast_context[decl_id].kind { + CDeclKind::Variable { ref ident, .. } => { + self.renamer.borrow_mut() .insert(decl_id, ident) .expect(&format!("Failed to install variadic function kludge")); + } + _ => panic!("va_arg was not a variable"), } - _ => panic!("va_arg was not a variable"), } } + pub fn is_promoted_va_decl(&self, decl_id: CDeclId) -> bool { + let fn_ctx = self.function_context.borrow(); + fn_ctx.promoted_va_decl == Some(decl_id) + } + + pub fn is_copied_va_decl(&self, decl_id: CDeclId) -> bool { + let fn_ctx = self.function_context.borrow(); + if let Some(ref decls) = fn_ctx.copied_va_decls { + decls.contains(&decl_id) + } else { false } + } + + pub fn get_promoted_va_decl(&self) -> Option { + self.function_context.borrow().promoted_va_decl + } + pub fn match_vastart(&self, expr: CExprId) -> Option { match_or! { [self.ast_context[expr].kind] CExprKind::ImplicitCast(_, e, _, _, _) => e } @@ -41,11 +64,16 @@ impl<'c> Translation<'c> { } pub fn match_vaend(&self, expr: CExprId) -> Option { - match_or! { [self.ast_context[expr].kind] - CExprKind::ImplicitCast(_, e, _, _, _) => e } - match_or! { [self.ast_context[e].kind] - CExprKind::DeclRef(_, va_id, _) => va_id } - Some(va_id) + self.match_vastart(expr) + } + + pub fn match_vacopy(&self, dst_expr: CExprId, src_expr: CExprId) -> Option<(CDeclId, CDeclId)> { + let dst_id = self.match_vastart(dst_expr); + let src_id = self.match_vastart(src_expr); + if let (Some(did), Some(sid)) = (dst_id, src_id) { + return Some((did, sid)); + } + None } pub fn match_vapart(&self, expr: CExprId) -> Option { @@ -63,7 +91,12 @@ impl<'c> Translation<'c> { self.match_vastart(args[0]).map(VaPart::Start) } - "__builtin_va_copy" => Some(VaPart::Copy), + "__builtin_va_copy" => { + if args.len() != 2 { return None } + self.match_vacopy(args[0], args[1]).map( + |(did, sid)| VaPart::Copy(did, sid) + ) + } "__builtin_va_end" => { if args.len() != 1 { return None } @@ -89,7 +122,7 @@ impl<'c> Translation<'c> { }); if ctx.is_unused() { res.stmts.push(mk().expr_stmt(res.val)); - res.val = self.panic("convert_vaarg unused"); + res.val = self.panic_or_err("convert_vaarg unused"); } Ok(res) @@ -100,38 +133,65 @@ impl<'c> Translation<'c> { /// Determine if a variadic function body declares a va_list argument /// and contains a va_start and va_end call for that argument list. - /// If it does the declaration ID for that variable is returned so that + /// If it does, the declaration ID for that variable is registered so that /// the resulting Rust function can have that variable argument list /// variable moved up to the argument list. - pub fn well_formed_variadic(&self, body: CStmtId) -> Option { - - let mut va_started: Option = None; - let mut va_end_found = false; + pub fn is_well_formed_variadic(&self, body: CStmtId) -> bool { + // maps each va_list to the operations performed on it (e.g. va_start, va_end) + let mut candidates: HashMap> = HashMap::new(); let mut iter = DFExpr::new(&self.ast_context, body.into()); while let Some(s) = iter.next() { if let SomeId::Expr(e) = s { if let Some(part) = self.match_vapart(e) { -// println!("Found: {:?}", part); - match part { - VaPart::Start(va_id) => { - if va_started.is_some() { - return None - } - va_started = Some(va_id); - } - VaPart::Copy => return None, - VaPart::End(va_id) => { - if va_started != Some(va_id) || va_end_found { - return None - } - va_end_found = true; - } - } + let id = match part { + VaPart::Start(va_id) | VaPart::End(va_id) => va_id, + VaPart::Copy(dst_va_id, _src_va_id) => dst_va_id, + }; + candidates.entry(id).or_insert(vec![]).push(part); } } } - if va_end_found { va_started } else { None } + if candidates.len() == 0 { + // no calls to `va_start`, `va_copy` or `va_end` is fine + return true + } + + let start_called = |k: &CDeclId| candidates[k] + .iter() + .any(|e| if let VaPart::Start( _ ) = e { true } else { false }); + let copy_called = |k: &CDeclId| candidates[k] + .iter() + .any(|e| if let VaPart::Copy(_, _) = e { true } else { false }); + let end_called = |k: &CDeclId| candidates[k] + .iter() + .any(|e| if let VaPart::End( _ ) = e { true } else { false }); + + // va_lists initialized by `va_copy` and finalized by `va_end` + let copied = candidates + .keys() + .filter_map(|k| if copy_called(k) && end_called(k) { Some(*k) } else { None }) + .collect::>(); + + // va_lists initialized by `va_start` and finalized by `va_end` + let promotable = candidates + .keys() + .filter_map(|k| if start_called(k) && end_called(k) { Some(*k) } else { None }) + .collect::>(); + + if promotable.len() + copied.len() > 0 { + // have promotable and/or copied va_lists that need registration + let promoted = match promotable.len() { + 0 => None, + 1 => Some(promotable[0]), + _ => panic!("couldn't determine which va_list to promote in {}", + self.function_context.borrow().get_name() + ) + }; + self.register_va_decls(promoted, copied); + return true + } + false } } diff --git a/cross-checks/rust-checks/backends/dynamic-dlsym/Cargo.toml b/cross-checks/rust-checks/backends/dynamic-dlsym/Cargo.toml index 7943923b7..266b19f5e 100644 --- a/cross-checks/rust-checks/backends/dynamic-dlsym/Cargo.toml +++ b/cross-checks/rust-checks/backends/dynamic-dlsym/Cargo.toml @@ -7,3 +7,5 @@ license = "BSD-3-Clause" homepage = "https://c2rust.com/" repository = "https://github.com/immunant/c2rust" +[dependencies] +libc = "0.2.50" diff --git a/cross-checks/rust-checks/backends/dynamic-dlsym/src/lib.rs b/cross-checks/rust-checks/backends/dynamic-dlsym/src/lib.rs index 4136d4a94..2f0d6f73d 100644 --- a/cross-checks/rust-checks/backends/dynamic-dlsym/src/lib.rs +++ b/cross-checks/rust-checks/backends/dynamic-dlsym/src/lib.rs @@ -1,4 +1,3 @@ -#![feature(libc)] extern crate libc; diff --git a/cross-checks/rust-checks/backends/zstd-logging/Cargo.toml b/cross-checks/rust-checks/backends/zstd-logging/Cargo.toml index 7dd64b0f5..d79d063cd 100644 --- a/cross-checks/rust-checks/backends/zstd-logging/Cargo.toml +++ b/cross-checks/rust-checks/backends/zstd-logging/Cargo.toml @@ -17,3 +17,4 @@ path = "src/bin/printer.rs" [dependencies] lazy_static = "1.1.0" zstd = "0.4" +libc = "0.2.50" diff --git a/cross-checks/rust-checks/backends/zstd-logging/src/lib.rs b/cross-checks/rust-checks/backends/zstd-logging/src/lib.rs index 507c36293..3132dbb69 100644 --- a/cross-checks/rust-checks/backends/zstd-logging/src/lib.rs +++ b/cross-checks/rust-checks/backends/zstd-logging/src/lib.rs @@ -1,4 +1,3 @@ -#![feature(libc)] #[macro_use] extern crate lazy_static; diff --git a/docs/known-limitations.md b/docs/known-limitations.md index 5bb6b038f..8eab427d9 100644 --- a/docs/known-limitations.md +++ b/docs/known-limitations.md @@ -3,7 +3,7 @@ This document tracks things that we know the translator can't handle, as well as ## Unimplemented - * variadic function definitions (blocking [Rust issue](https://github.com/rust-lang/rust/issues/44930)) + * variadic function definitions and macros that operate on `va_list`s (work in progress) * preserving comments (work in progress) * `long double` and `_Complex` types (partially blocked by Rust language) * Non x86/64 SIMD function/types and x86/64 SIMD function/types which have no rust equivalent diff --git a/rust-toolchain b/rust-toolchain index 087b7c664..db159ee67 100644 --- a/rust-toolchain +++ b/rust-toolchain @@ -1 +1 @@ -nightly-2018-12-03 +nightly-2019-04-08 diff --git a/scripts/common.py b/scripts/common.py index c0ee7fc06..cdc1f6d7d 100644 --- a/scripts/common.py +++ b/scripts/common.py @@ -95,9 +95,9 @@ class Config: MIN_PLUMBUM_VERSION = (1, 6, 3) CC_DB_JSON = "compile_commands.json" - CUSTOM_RUST_NAME = 'nightly-2018-12-03' + CUSTOM_RUST_NAME = 'nightly-2019-04-08' # output of `rustup run $CUSTOM_RUST_NAME -- rustc --version` - CUSTOM_RUST_RUSTC_VERSION = "rustc 1.32.0-nightly (21f268495 2018-12-02)" + # CUSTOM_RUST_RUSTC_VERSION = "rustc 1.32.0-nightly (21f268495 2018-12-02)" """ Reflect changes to all configuration variables that depend on LLVM_VER diff --git a/tests/arrays/Cargo.toml b/tests/arrays/Cargo.toml index c7248ab67..622775a1e 100644 --- a/tests/arrays/Cargo.toml +++ b/tests/arrays/Cargo.toml @@ -1,3 +1,6 @@ [package] name = "array-tests" version = "0.1.0" + +[dependencies] +libc = "0.2" diff --git a/tests/builtins/Cargo.toml b/tests/builtins/Cargo.toml index 5228b9959..9d45a4ff8 100644 --- a/tests/builtins/Cargo.toml +++ b/tests/builtins/Cargo.toml @@ -1,3 +1,6 @@ [package] name = "builtins-tests" version = "0.1.0" + +[dependencies] +libc = "0.2" diff --git a/tests/casts/Cargo.toml b/tests/casts/Cargo.toml index 56a2ece90..d49dd12ed 100644 --- a/tests/casts/Cargo.toml +++ b/tests/casts/Cargo.toml @@ -1,3 +1,6 @@ [package] name = "cast-tests" version = "0.1.0" + +[dependencies] +libc = "0.2" diff --git a/tests/conditionals/Cargo.toml b/tests/conditionals/Cargo.toml index 565b1f729..cd2cb5ca4 100644 --- a/tests/conditionals/Cargo.toml +++ b/tests/conditionals/Cargo.toml @@ -1,3 +1,6 @@ [package] name = "conditional-tests" version = "0.1.0" + +[dependencies] +libc = "0.2" diff --git a/tests/enums/Cargo.toml b/tests/enums/Cargo.toml index 92bdc48df..f4e61c0f7 100644 --- a/tests/enums/Cargo.toml +++ b/tests/enums/Cargo.toml @@ -1,3 +1,6 @@ [package] name = "enum-tests" version = "0.1.0" + +[dependencies] +libc = "0.2" diff --git a/tests/example/Cargo.toml b/tests/example/Cargo.toml index a366f0c57..cfacb04ad 100644 --- a/tests/example/Cargo.toml +++ b/tests/example/Cargo.toml @@ -1,3 +1,6 @@ [package] name = "example-tests" version = "0.1.0" + +[dependencies] +libc = "0.2" diff --git a/tests/floats/Cargo.toml b/tests/floats/Cargo.toml index a94c904cb..ad84a59de 100644 --- a/tests/floats/Cargo.toml +++ b/tests/floats/Cargo.toml @@ -3,3 +3,4 @@ name = "float-tests" version = "0.1.0" [dependencies] +libc = "0.2" diff --git a/tests/gotos/Cargo.toml b/tests/gotos/Cargo.toml index 44c008b7b..74494fb5d 100644 --- a/tests/gotos/Cargo.toml +++ b/tests/gotos/Cargo.toml @@ -1,3 +1,6 @@ [package] name = "goto-tests" version = "0.1.0" + +[dependencies] +libc = "0.2" diff --git a/tests/ints/Cargo.toml b/tests/ints/Cargo.toml index 2688b66b5..f7dfd1f7d 100644 --- a/tests/ints/Cargo.toml +++ b/tests/ints/Cargo.toml @@ -1,3 +1,6 @@ [package] name = "int-tests" version = "0.1.0" + +[dependencies] +libc = "0.2" diff --git a/tests/longdouble/Cargo.toml b/tests/longdouble/Cargo.toml index bf850dab9..329eee857 100644 --- a/tests/longdouble/Cargo.toml +++ b/tests/longdouble/Cargo.toml @@ -5,3 +5,4 @@ version = "0.1.0" [dependencies] f128 = { git = "https://github.com/jkarns275/f128", rev = "da362b10704a0ab1f05b8aef34156aaf38779116" } num-traits = "0.2.6" +libc = "0.2" diff --git a/tests/loops/Cargo.toml b/tests/loops/Cargo.toml index 09b8d03f1..29bffc43c 100644 --- a/tests/loops/Cargo.toml +++ b/tests/loops/Cargo.toml @@ -1,3 +1,6 @@ [package] name = "loops-tests" version = "0.1.0" + +[dependencies] +libc = "0.2" diff --git a/tests/misc/Cargo.toml b/tests/misc/Cargo.toml index ceb4403ea..cd25406a5 100644 --- a/tests/misc/Cargo.toml +++ b/tests/misc/Cargo.toml @@ -1,3 +1,6 @@ [package] name = "misc-tests" version = "0.1.0" + +[dependencies] +libc = "0.2" diff --git a/tests/pointers/Cargo.toml b/tests/pointers/Cargo.toml index ed4861763..940c7ab27 100644 --- a/tests/pointers/Cargo.toml +++ b/tests/pointers/Cargo.toml @@ -1,3 +1,6 @@ [package] name = "pointer-tests" version = "0.1.0" + +[dependencies] +libc = "0.2" diff --git a/tests/simd/Cargo.toml b/tests/simd/Cargo.toml index 66c6757bb..a5e8e6523 100644 --- a/tests/simd/Cargo.toml +++ b/tests/simd/Cargo.toml @@ -1,3 +1,7 @@ [package] name = "simd-tests" version = "0.1.0" + +[dependencies] +libc = "0.2" + diff --git a/tests/statics/Cargo.toml b/tests/statics/Cargo.toml index 1d969dc86..a53ffba3f 100644 --- a/tests/statics/Cargo.toml +++ b/tests/statics/Cargo.toml @@ -1,3 +1,6 @@ [package] name = "statics-tests" version = "0.1.0" + +[dependencies] +libc = "0.2" diff --git a/tests/structs/Cargo.toml b/tests/structs/Cargo.toml index ddf68c9a8..1c58deab4 100644 --- a/tests/structs/Cargo.toml +++ b/tests/structs/Cargo.toml @@ -5,3 +5,4 @@ version = "0.1.0" [dependencies] c2rust-bitfields = { path = "../../c2rust-bitfields" } memoffset = "0.2" +libc = "0.2" diff --git a/tests/unions/Cargo.toml b/tests/unions/Cargo.toml index a078929c7..5c1fbeed2 100644 --- a/tests/unions/Cargo.toml +++ b/tests/unions/Cargo.toml @@ -1,3 +1,6 @@ [package] name = "union-tests" version = "0.1.0" + +[dependencies] +libc = "0.2"