From 33e98e1869c64ad277130807afe989988e83063c Mon Sep 17 00:00:00 2001 From: Stu Hood Date: Tue, 18 Jul 2017 16:50:30 -0700 Subject: [PATCH] Zinc 1.0.0-X20 upgrade: JVM portion (#4728) ### Problem Pants is on an older version of zinc (one that does not use class-based name-hashing), and the modern zinc project is moving quickly thanks to @jvican and others. We had previously been on `X7` but it was reverted in #4510 because benchmarks showed that no incremental compilation was happening for scala code. ### Solution * Upgrade to zinc `1.0.0-X20` * Use the zinc `AnalysisMappers` API described on #4513 to make analysis files portable without parsing * Extract options parsing out of the `Settings` object and into its own module, to allow for reuse in multiple binary entrypoints * Refactor and split our zinc wrapper into `zinc-compiler` and `zinc-extractor` to support parsing the `product_deps_by_src` and `classes_by_source` products directly (in order to move toward making analysis a black box) * Switch to usage of new builder-pattern APIs for constructing zinc objects * Remove the `Loggers`/`Reporters` facades in favor of built in support for filtering log messages ### Result The new version of the zinc wrapper correctly supports incremental compile (with the exception of https://github.com/sbt/zinc/issues/355), and the python portions of pants no longer require any internal knowledge of zinc analysis. The python half of this change will remove that code. --- .../jvm/com/fasterxml/jackson/module/BUILD | 9 + 3rdparty/jvm/org/scala-lang/modules/BUILD | 9 + 3rdparty/jvm/org/scala-sbt/BUILD | 13 +- .../org/pantsbuild/zinc/AnalysisMap.scala | 186 ---------- src/scala/org/pantsbuild/zinc/Compiler.scala | 224 ------------ src/scala/org/pantsbuild/zinc/Inputs.scala | 199 ----------- src/scala/org/pantsbuild/zinc/Main.scala | 114 ------ src/scala/org/pantsbuild/zinc/Settings.scala | 335 ------------------ src/scala/org/pantsbuild/zinc/Setup.scala | 260 -------------- .../zinc/SimpleCompileProgress.scala | 48 --- .../zinc/analysis/AnalysisMap.scala | 153 ++++++++ .../zinc/analysis/AnalysisOptions.scala | 23 ++ src/scala/org/pantsbuild/zinc/analysis/BUILD | 22 ++ .../analysis/PortableAnalysisMappers.scala | 82 +++++ src/scala/org/pantsbuild/zinc/cache/BUILD | 8 +- .../org/pantsbuild/zinc/{ => compiler}/BUILD | 12 +- .../zinc/compiler/CompilerCacheKey.scala | 70 ++++ .../zinc/compiler/CompilerUtils.scala | 154 ++++++++ .../pantsbuild/zinc/compiler/InputUtils.scala | 206 +++++++++++ .../org/pantsbuild/zinc/compiler/Main.scala | 131 +++++++ .../pantsbuild/zinc/compiler/Settings.scala | 319 +++++++++++++++++ src/scala/org/pantsbuild/zinc/extractor/BUILD | 20 ++ .../pantsbuild/zinc/extractor/Extractor.scala | 78 ++++ .../org/pantsbuild/zinc/extractor/Main.scala | 78 ++++ .../pantsbuild/zinc/extractor/Settings.scala | 42 +++ .../org/pantsbuild/zinc/logging/Loggers.scala | 75 ---- .../pantsbuild/zinc/logging/Reporters.scala | 58 --- src/scala/org/pantsbuild/zinc/options/BUILD | 13 + .../pantsbuild/zinc/options/OptionSet.scala | 72 ++++ .../zinc/{ => options}/Options.scala | 2 +- .../pantsbuild/zinc/{logging => util}/BUILD | 8 +- .../org/pantsbuild/zinc/{ => util}/Util.scala | 2 +- tests/scala/org/pantsbuild/zinc/BUILD | 19 - .../zinc/{ => analysis}/AnalysisMapSpec.scala | 13 +- .../scala/org/pantsbuild/zinc/analysis/BUILD | 18 + tests/scala/org/pantsbuild/zinc/logging/BUILD | 11 - .../pantsbuild/zinc/logging/LoggersSpec.scala | 41 --- zinc/BUILD | 20 +- zinc/README.md | 3 +- 39 files changed, 1555 insertions(+), 1595 deletions(-) create mode 100644 3rdparty/jvm/com/fasterxml/jackson/module/BUILD create mode 100644 3rdparty/jvm/org/scala-lang/modules/BUILD delete mode 100644 src/scala/org/pantsbuild/zinc/AnalysisMap.scala delete mode 100644 src/scala/org/pantsbuild/zinc/Compiler.scala delete mode 100644 src/scala/org/pantsbuild/zinc/Inputs.scala delete mode 100644 src/scala/org/pantsbuild/zinc/Main.scala delete mode 100644 src/scala/org/pantsbuild/zinc/Settings.scala delete mode 100644 src/scala/org/pantsbuild/zinc/Setup.scala delete mode 100644 src/scala/org/pantsbuild/zinc/SimpleCompileProgress.scala create mode 100644 src/scala/org/pantsbuild/zinc/analysis/AnalysisMap.scala create mode 100644 src/scala/org/pantsbuild/zinc/analysis/AnalysisOptions.scala create mode 100644 src/scala/org/pantsbuild/zinc/analysis/BUILD create mode 100644 src/scala/org/pantsbuild/zinc/analysis/PortableAnalysisMappers.scala rename src/scala/org/pantsbuild/zinc/{ => compiler}/BUILD (53%) create mode 100644 src/scala/org/pantsbuild/zinc/compiler/CompilerCacheKey.scala create mode 100644 src/scala/org/pantsbuild/zinc/compiler/CompilerUtils.scala create mode 100644 src/scala/org/pantsbuild/zinc/compiler/InputUtils.scala create mode 100644 src/scala/org/pantsbuild/zinc/compiler/Main.scala create mode 100644 src/scala/org/pantsbuild/zinc/compiler/Settings.scala create mode 100644 src/scala/org/pantsbuild/zinc/extractor/BUILD create mode 100644 src/scala/org/pantsbuild/zinc/extractor/Extractor.scala create mode 100644 src/scala/org/pantsbuild/zinc/extractor/Main.scala create mode 100644 src/scala/org/pantsbuild/zinc/extractor/Settings.scala delete mode 100644 src/scala/org/pantsbuild/zinc/logging/Loggers.scala delete mode 100644 src/scala/org/pantsbuild/zinc/logging/Reporters.scala create mode 100644 src/scala/org/pantsbuild/zinc/options/BUILD create mode 100644 src/scala/org/pantsbuild/zinc/options/OptionSet.scala rename src/scala/org/pantsbuild/zinc/{ => options}/Options.scala (99%) rename src/scala/org/pantsbuild/zinc/{logging => util}/BUILD (59%) rename src/scala/org/pantsbuild/zinc/{ => util}/Util.scala (99%) delete mode 100644 tests/scala/org/pantsbuild/zinc/BUILD rename tests/scala/org/pantsbuild/zinc/{ => analysis}/AnalysisMapSpec.scala (66%) create mode 100644 tests/scala/org/pantsbuild/zinc/analysis/BUILD delete mode 100644 tests/scala/org/pantsbuild/zinc/logging/BUILD delete mode 100644 tests/scala/org/pantsbuild/zinc/logging/LoggersSpec.scala diff --git a/3rdparty/jvm/com/fasterxml/jackson/module/BUILD b/3rdparty/jvm/com/fasterxml/jackson/module/BUILD new file mode 100644 index 00000000000..5edf166928a --- /dev/null +++ b/3rdparty/jvm/com/fasterxml/jackson/module/BUILD @@ -0,0 +1,9 @@ +# Copyright 2017 Pants project contributors (see CONTRIBUTORS.md). +# Licensed under the Apache License, Version 2.0 (see LICENSE). + +jar_library( + name='scala', + jars = [ + scala_jar(org='com.fasterxml.jackson.module', name='jackson-module-scala', rev='2.8.4'), + ] +) diff --git a/3rdparty/jvm/org/scala-lang/modules/BUILD b/3rdparty/jvm/org/scala-lang/modules/BUILD new file mode 100644 index 00000000000..f1844fd2800 --- /dev/null +++ b/3rdparty/jvm/org/scala-lang/modules/BUILD @@ -0,0 +1,9 @@ +# Copyright 2017 Pants project contributors (see CONTRIBUTORS.md). +# Licensed under the Apache License, Version 2.0 (see LICENSE). + +jar_library( + name='scala-java8-compat', + jars=[ + scala_jar(org='org.scala-lang.modules', name='scala-java8-compat', rev='0.8.0'), + ], +) diff --git a/3rdparty/jvm/org/scala-sbt/BUILD b/3rdparty/jvm/org/scala-sbt/BUILD index 429b9e3b14f..5212472d8d5 100644 --- a/3rdparty/jvm/org/scala-sbt/BUILD +++ b/3rdparty/jvm/org/scala-sbt/BUILD @@ -1,12 +1,19 @@ +# Copyright 2017 Pants project contributors (see CONTRIBUTORS.md). +# Licensed under the Apache License, Version 2.0 (see LICENSE). + jar_library( name='zinc', jars=[ - scala_jar(org='org.scala-sbt', name='zinc', rev='1.0.0-X7', + scala_jar(org='org.scala-sbt', name='zinc', rev='1.0.0-X20', excludes=[ exclude(org='org.scala-sbt', name='io_2.11'), exclude(org='org.scala-sbt', name='util-logging_2.11'), ]), ], + dependencies=[ + ':io', + ':util-logging', + ], ) jar_library( @@ -14,7 +21,7 @@ jar_library( jars=[ # TODO: `zinc` only declares a dep on the `tests` classifier for # util-logging for some reason. We redefine the dep here to get the full package. - scala_jar(org='org.scala-sbt', name='util-logging', rev='1.0.0-M17', force=True), + scala_jar(org='org.scala-sbt', name='util-logging', rev='1.0.0-M27', force=True), ], ) @@ -23,6 +30,6 @@ jar_library( jars=[ # TODO: `zinc` only declares a dep on the `tests` classifier for # io. We redefine the dep here to get the full package. - scala_jar(org='org.scala-sbt', name='io', rev='1.0.0-M9', force=True), + scala_jar(org='org.scala-sbt', name='io', rev='1.0.0-M13', force=True), ], ) diff --git a/src/scala/org/pantsbuild/zinc/AnalysisMap.scala b/src/scala/org/pantsbuild/zinc/AnalysisMap.scala deleted file mode 100644 index 7c8d4c0d57a..00000000000 --- a/src/scala/org/pantsbuild/zinc/AnalysisMap.scala +++ /dev/null @@ -1,186 +0,0 @@ -/** - * Copyright (C) 2015 Pants project contributors (see CONTRIBUTORS.md). - * Licensed under the Apache License, Version 2.0 (see LICENSE). - */ - -package org.pantsbuild.zinc - -import java.io.{File, IOException} -import java.nio.file.Files -import java.nio.file.StandardCopyOption -import xsbti.Maybe -import xsbti.compile.{CompileAnalysis, DefinesClass, MiniSetup, PerClasspathEntryLookup} -import sbt.internal.inc.{Analysis, AnalysisStore, CompanionsStore, Locate, TextAnalysisFormat} -import sbt.io.{IO, Using} -import sbt.util.Logger -import sbt.util.Logger.o2m -import org.pantsbuild.zinc.cache.{Cache, FileFPrint} -import org.pantsbuild.zinc.cache.Cache.Implicits -import xsbti.api.Companions - -/** - * A facade around the analysis cache to: - * 1) map between classpath entries and cache locations - * 2) use analysis for `definesClass` when it is available - * - * SBT uses the `definesClass` and `getAnalysis` methods in order to load the APIs for upstream - * classes. For a classpath containing multiple entries, sbt will call `definesClass` sequentially - * on classpath entries until it finds a classpath entry defining a particular class. When it finds - * the appropriate classpath entry, it will use `getAnalysis` to fetch the API for that class. - */ -case class AnalysisMap private[AnalysisMap] ( - // a map of classpath entries to cache file fingerprints, excluding the current compile destination - analysisLocations: Map[File, FileFPrint], - // log - log: Logger -) { - - def getPCELookup = new PerClasspathEntryLookup { - /** - * Gets analysis for a classpath entry (if it exists) by translating its path to a potential - * cache location and then checking the cache. - */ - def analysis(classpathEntry: File): Maybe[CompileAnalysis] = - o2m(analysisLocations.get(classpathEntry).flatMap(AnalysisMap.get)) - - /** - * An implementation of definesClass that will use analysis for an input directory to determine - * whether it defines a particular class. - * - * TODO: This optimization is unnecessary for jars on the classpath, which are already indexed. - * Can remove after the sbt jar output patch lands. - */ - def definesClass(classpathEntry: File): DefinesClass = { - getAnalysis(classpathEntry).map { analysis => - log.debug(s"Hit analysis cache for class definitions with ${classpathEntry}") - // strongly hold the classNames, and transform them to ensure that they are unlinked from - // the remainder of the analysis - val classNames = analysis.asInstanceOf[Analysis].relations.srcProd.reverseMap.keys.toList.toSet.map( - (f: File) => filePathToClassName(f)) - new ClassNamesDefinesClass(classNames) - }.getOrElse { - // no analysis: return a function that will scan instead - Locate.definesClass(classpathEntry) - } - } - - private class ClassNamesDefinesClass(classes: Set[String]) extends DefinesClass { - override def apply(className: String): Boolean = classes(className) - } - - private def filePathToClassName(file: File): String = { - // Extract className from path, for example: - // .../.pants.d/compile/zinc/.../current/classes/org/pantsbuild/example/hello/exe/Exe.class - // => org.pantsbuild.example.hello.exe.Exe - file.getAbsolutePath.split("current/classes")(1).drop(1).replace(".class", "").replaceAll("/", ".") - } - - /** - * Gets analysis for a classpath entry (if it exists) by translating its path to a potential - * cache location and then checking the cache. - */ - def getAnalysis(classpathEntry: File): Option[CompileAnalysis] = - analysisLocations.get(classpathEntry).flatMap(AnalysisMap.get) - } -} - -object AnalysisMap { - /** - * Static cache for compile analyses. Values must be Options because in get() we don't yet - * know if, on a cache miss, the underlying file will yield a valid Analysis. - */ - private val analysisCache = - Cache[FileFPrint, Option[(CompileAnalysis, MiniSetup)]](Setup.Defaults.analysisCacheLimit) - - def create( - // a map of classpath entries to cache file locations, excluding the current compile destination - analysisLocations: Map[File, File], - // log - log: Logger - ): AnalysisMap = - AnalysisMap( - // create fingerprints for all inputs at startup - analysisLocations.flatMap { - case (classpathEntry, cacheFile) => FileFPrint.fprint(cacheFile).map(classpathEntry -> _) - }, - log - ) - - private def get(cacheFPrint: FileFPrint): Option[CompileAnalysis] = - analysisCache.getOrElseUpdate(cacheFPrint) { - // re-fingerprint the file on miss, to ensure that analysis hasn't changed since we started - if (!FileFPrint.fprint(cacheFPrint.file).exists(_ == cacheFPrint)) { - throw new IOException(s"Analysis at $cacheFPrint has changed since startup!") - } - AnalysisStore.cached(SafeFileBasedStore(cacheFPrint.file)).get() - }.map(_._1) - - /** - * Create an analysis store backed by analysisCache. - */ - def cachedStore(cacheFile: File): AnalysisStore = { - val fileStore = AnalysisStore.cached(SafeFileBasedStore(cacheFile)) - - val fprintStore = new AnalysisStore { - def set(analysis: CompileAnalysis, setup: MiniSetup) { - fileStore.set(analysis, setup) - FileFPrint.fprint(cacheFile) foreach { analysisCache.put(_, Some((analysis, setup))) } - } - def get(): Option[(CompileAnalysis, MiniSetup)] = { - FileFPrint.fprint(cacheFile) flatMap { fprint => - analysisCache.getOrElseUpdate(fprint) { - fileStore.get - } - } - } - } - - AnalysisStore.sync(AnalysisStore.cached(fprintStore)) - } -} - -/** - * Safely update analysis file by writing to a temp file first - * and only rename to the original file upon successful write. - * - * TODO: merge this upstream https://github.com/sbt/zinc/issues/178 - */ -object SafeFileBasedStore { - def apply(file: File): AnalysisStore = new AnalysisStore { - override def set(analysis: CompileAnalysis, setup: MiniSetup): Unit = { - val tmpAnalysisFile = File.createTempFile(file.getName, ".tmp") - val analysisStore = PlainTextFileBasedStore(tmpAnalysisFile) - analysisStore.set(analysis, setup) - Files.move(tmpAnalysisFile.toPath, file.toPath, StandardCopyOption.REPLACE_EXISTING) - } - - override def get(): Option[(CompileAnalysis, MiniSetup)] = - PlainTextFileBasedStore(file).get - } -} - -/** - * Zinc 1.0 changes its analysis file format to zip, and split into two files. - * The following provides a plain text adaptor for pants parser. Long term though, - * we should consider define an internal analysis format that's 1) more stable - * 2) better performance because we can pick and choose only the fields we care about - * - string processing in rebase can be slow for example. - * https://github.com/pantsbuild/pants/issues/4039 - */ -object PlainTextFileBasedStore { - def apply(file: File): AnalysisStore = new AnalysisStore { - override def set(analysis: CompileAnalysis, setup: MiniSetup): Unit = { - Using.fileWriter(IO.utf8)(file) { writer => TextAnalysisFormat.write(writer, analysis, setup) } - } - - override def get(): Option[(CompileAnalysis, MiniSetup)] = - try { Some(getUncaught()) } catch { case _: Exception => None } - def getUncaught(): (CompileAnalysis, MiniSetup) = - Using.fileReader(IO.utf8)(file) { reader => TextAnalysisFormat.read(reader, noopCompanionsStore) } - } - - val noopCompanionsStore = new CompanionsStore { - override def get(): Option[(Map[String, Companions], Map[String, Companions])] = Some(getUncaught()) - override def getUncaught(): (Map[String, Companions], Map[String, Companions]) = (Map(), Map()) - } -} diff --git a/src/scala/org/pantsbuild/zinc/Compiler.scala b/src/scala/org/pantsbuild/zinc/Compiler.scala deleted file mode 100644 index 5f661463753..00000000000 --- a/src/scala/org/pantsbuild/zinc/Compiler.scala +++ /dev/null @@ -1,224 +0,0 @@ -/** - * Copyright (C) 2012 Typesafe, Inc. - */ - -package org.pantsbuild.zinc - -import java.io.File -import java.net.URLClassLoader -import sbt.internal.inc.{ - AnalyzingCompiler, - CompileOutput, - CompilerCache, - CompilerBridgeProvider, - IncrementalCompilerImpl, - RawCompiler, - ScalaInstance, - javac -} -import sbt.io.Path -import sbt.io.syntax._ -import sbt.util.Logger -import xsbti.compile.{ - GlobalsCache, - JavaCompiler, - ScalaInstance => XScalaInstance -} - -import org.pantsbuild.zinc.cache.Cache -import org.pantsbuild.zinc.cache.Cache.Implicits - -object Compiler { - val CompilerInterfaceId = "compiler-interface" - val JavaClassVersion = System.getProperty("java.class.version") - - /** - * Static cache for zinc compilers. - */ - private val compilerCache = Cache[Setup, Compiler](Setup.Defaults.compilerCacheLimit) - - /** - * Static cache for resident scala compilers. - */ - private val residentCache: GlobalsCache = createResidentCache(Setup.Defaults.residentCacheLimit) - - /** - * Get or create a zinc compiler based on compiler setup. - */ - def apply(setup: Setup, log: Logger): Compiler = - compilerCache.getOrElseUpdate(setup) { - create(setup, log) - } - - /** - * Java API for creating compiler. - */ - def getOrCreate(setup: Setup, log: Logger): Compiler = apply(setup, log) - - /** - * Create a new zinc compiler based on compiler setup. - */ - def create(setup: Setup, log: Logger): Compiler = { - val instance = scalaInstance(setup) - val interfaceJar = compilerInterface(setup, instance, log) - val scalac = newScalaCompiler(instance, interfaceJar) - val javac = newJavaCompiler(instance, setup.javaHome, setup.forkJava) - new Compiler(scalac, javac, setup) - } - - /** - * Create a new scala compiler. - */ - def newScalaCompiler(instance: XScalaInstance, interfaceJar: File): AnalyzingCompiler = - new AnalyzingCompiler( - instance, - CompilerBridgeProvider.constant(interfaceJar), - sbt.internal.inc.ClasspathOptionsUtil.auto, - _ => (), None - ) - - /** - * Create a new java compiler. - */ - def newJavaCompiler(instance: XScalaInstance, javaHome: Option[File], fork: Boolean): JavaCompiler = - if (fork || javaHome.isDefined) { - javac.JavaCompiler.fork(javaHome) - } else { - javac.JavaCompiler.local.getOrElse { - throw new RuntimeException( - "Unable to locate javac directly. Please ensure that a JDK is on zinc's classpath." - ) - } - } - - /** - * Create new globals cache. - */ - def createResidentCache(maxCompilers: Int): GlobalsCache = { - if (maxCompilers <= 0) CompilerCache.fresh else CompilerCache(maxCompilers) - } - - /** - * Create the scala instance for the compiler. Includes creating the classloader. - */ - def scalaInstance(setup: Setup): XScalaInstance = { - import setup.{scalaCompiler, scalaExtra, scalaLibrary} - val allJars = scalaLibrary +: scalaCompiler +: scalaExtra - val loader = scalaLoader(allJars) - val version = scalaVersion(loader) - new ScalaInstance(version.getOrElse("unknown"), loader, scalaLibrary, scalaCompiler, allJars.toArray, version) - } - - /** - * Create a new classloader with the root loader as parent (to avoid zinc itself being included). - */ - def scalaLoader(jars: Seq[File]) = - new URLClassLoader( - Path.toURLs(jars), - sbt.internal.inc.classpath.ClasspathUtilities.rootLoader - ) - - /** - * Get the actual scala version from the compiler.properties in a classloader. - * The classloader should only contain one version of scala. - */ - def scalaVersion(scalaLoader: ClassLoader): Option[String] = { - Util.propertyFromResource("compiler.properties", "version.number", scalaLoader) - } - - /** - * Get the compiler interface for this compiler setup. Compile it if not already cached. - * NB: This usually occurs within the compilerCache entry lock, but in the presence of - * multiple zinc processes (ie, without nailgun) we need to be more careful not to clobber - * another compilation attempt. - */ - def compilerInterface(setup: Setup, scalaInstance: XScalaInstance, log: Logger): File = { - def compile(targetJar: File): Unit = - AnalyzingCompiler.compileSources( - Seq(setup.compilerBridgeSrc), - targetJar, - Seq(setup.compilerInterface), - CompilerInterfaceId, - new RawCompiler(scalaInstance, sbt.internal.inc.ClasspathOptionsUtil.auto, log), - log - ) - val dir = setup.cacheDir / interfaceId(scalaInstance.actualVersion) - val interfaceJar = dir / (CompilerInterfaceId + ".jar") - if (!interfaceJar.isFile) { - dir.mkdirs() - val tempJar = File.createTempFile("interface-", ".jar.tmp", dir) - try { - compile(tempJar) - tempJar.renameTo(interfaceJar) - } finally { - tempJar.delete() - } - } - interfaceJar - } - - def interfaceId(scalaVersion: String) = CompilerInterfaceId + "-" + scalaVersion + "-" + JavaClassVersion -} - -/** - * A zinc compiler for incremental recompilation. - */ -class Compiler(scalac: AnalyzingCompiler, javac: JavaCompiler, setup: Setup) { - - private[this] val compiler = new IncrementalCompilerImpl() - - /** - * Run a compile. The resulting analysis is pesisted to `inputs.cacheFile`. - */ - def compile(inputs: Inputs, cwd: Option[File], reporter: xsbti.Reporter, progress: xsbti.compile.CompileProgress)(log: Logger): Unit = { - import inputs._ - - // load the existing analysis - val targetAnalysisStore = AnalysisMap.cachedStore(cacheFile) - val (previousAnalysis, previousSetup) = - targetAnalysisStore.get().map { - case (a, s) => (Some(a), Some(s)) - } getOrElse { - (None, None) - } - - val result = - compiler.incrementalCompile( - scalac, - javac, - sources, - classpath = autoClasspath(classesDirectory, scalac.scalaInstance.allJars, javaOnly, classpath), - output = CompileOutput(classesDirectory), - cache = Compiler.residentCache, - Some(progress), - options = scalacOptions, - javacOptions, - previousAnalysis, - previousSetup, - perClasspathEntryLookup = analysisMap.getPCELookup, - reporter, - compileOrder, - skip = false, - incOptions.options(log), - extra = Nil - )(log) - - // if the compile resulted in modified analysis, persist it - if (result.hasModified) { - targetAnalysisStore.set(result.analysis, result.setup) - } - } - - /** - * Automatically add the output directory and scala library to the classpath. - */ - def autoClasspath(classesDirectory: File, allScalaJars: Seq[File], javaOnly: Boolean, classpath: Seq[File]): Seq[File] = { - if (javaOnly) classesDirectory +: classpath - else Setup.splitScala(allScalaJars) match { - case Some(scalaJars) => classesDirectory +: scalaJars.library +: classpath - case None => classesDirectory +: classpath - } - } - - override def toString = "Compiler(Scala %s)" format scalac.scalaInstance.actualVersion -} diff --git a/src/scala/org/pantsbuild/zinc/Inputs.scala b/src/scala/org/pantsbuild/zinc/Inputs.scala deleted file mode 100644 index 25f38d7dc1b..00000000000 --- a/src/scala/org/pantsbuild/zinc/Inputs.scala +++ /dev/null @@ -1,199 +0,0 @@ -/** - * Copyright (C) 2012 Typesafe, Inc. - */ - -package org.pantsbuild.zinc - -import java.io.{File, IOException} -import java.util.{ List => JList, Map => JMap } - -import scala.collection.JavaConverters._ - -import sbt.io.syntax._ -import sbt.util.Logger -import xsbti.compile.CompileOrder - -/** - * All inputs for a compile run. - */ -case class Inputs( - classpath: Seq[File], - sources: Seq[File], - classesDirectory: File, - scalacOptions: Seq[String], - javacOptions: Seq[String], - cacheFile: File, - analysisMap: AnalysisMap, - javaOnly: Boolean, - compileOrder: CompileOrder, - incOptions: IncOptions) - -object Inputs { - /** - * Create inputs based on command-line settings. - */ - def apply(log: Logger, settings: Settings): Inputs = { - import settings._ - inputs( - log, - classpath, - sources, - classesDirectory, - scalacOptions, - javacOptions, - analysis.cache, - analysis.cacheMap, - javaOnly, - compileOrder, - incOptions) - } - - /** - * Create normalised and defaulted Inputs. - */ - def inputs( - log: Logger, - classpath: Seq[File], - sources: Seq[File], - classesDirectory: File, - scalacOptions: Seq[String], - javacOptions: Seq[String], - analysisCache: Option[File], - analysisCacheMap: Map[File, File], - javaOnly: Boolean, - compileOrder: CompileOrder, - incOptions: IncOptions): Inputs = - { - val normalise: File => File = { _.getAbsoluteFile } - val cp = classpath map normalise - val srcs = sources map normalise - val classes = normalise(classesDirectory) - val cacheFile = normalise(analysisCache.getOrElse(defaultCacheLocation(classesDirectory))) - val analysisMap = - AnalysisMap.create( - analysisCacheMap.collect { - case (k, v) if normalise(k) != classes => - (normalise(k), normalise(v)) - }, - log - ) - val incOpts = updateIncOptions(incOptions, classesDirectory, normalise) - new Inputs( - cp, srcs, classes, scalacOptions, javacOptions, cacheFile, analysisMap, - javaOnly, compileOrder, incOpts - ) - } - - /** - * Java API for creating Inputs. - */ - def create( - log: Logger, - classpath: JList[File], - sources: JList[File], - classesDirectory: File, - scalacOptions: JList[String], - javacOptions: JList[String], - analysisCache: File, - analysisMap: JMap[File, File], - compileOrder: String, - incOptions: IncOptions): Inputs = - inputs( - log, - classpath.asScala, - sources.asScala, - classesDirectory, - scalacOptions.asScala, - javacOptions.asScala, - Option(analysisCache), - analysisMap.asScala.toMap, - javaOnly = false, - Settings.compileOrder(compileOrder), - incOptions - ) - - /** - * By default the cache location is relative to the classes directory (for example, target/classes/../cache/classes). - */ - def defaultCacheLocation(classesDir: File) = { - classesDir.getParentFile / "cache" / classesDir.getName - } - - /** - * Normalise files and default the backup directory. - */ - def updateIncOptions(incOptions: IncOptions, classesDir: File, normalise: File => File): IncOptions = { - incOptions.copy( - apiDumpDirectory = incOptions.apiDumpDirectory map normalise, - backup = getBackupDirectory(incOptions, classesDir, normalise) - ) - } - - /** - * Get normalised, default if not specified, backup directory. If transactional. - */ - def getBackupDirectory(incOptions: IncOptions, classesDir: File, normalise: File => File): Option[File] = { - if (incOptions.transactional) - Some(normalise(incOptions.backup.getOrElse(defaultBackupLocation(classesDir)))) - else - None - } - - /** - * By default the backup location is relative to the classes directory (for example, target/classes/../backup/classes). - */ - def defaultBackupLocation(classesDir: File) = { - classesDir.getParentFile / "backup" / classesDir.getName - } - - /** - * Verify inputs. - * Currently checks that the cache file is writable. - */ - def verify(inputs: Inputs): Unit = { - if (!Util.checkWritable(inputs.cacheFile)) { - throw new IOException(s"Configured cache file ${inputs.cacheFile} is not writable!") - } - } - - /** - * Debug output for inputs. - */ - def debug(inputs: Inputs, log: xsbti.Logger): Unit = { - show(inputs, s => log.debug(Logger.f0(s))) - } - - /** - * Debug output for inputs. - */ - def show(inputs: Inputs, output: String => Unit): Unit = { - import inputs._ - - val incOpts = Seq( - "transitive step" -> incOptions.transitiveStep, - "recompile all fraction" -> incOptions.recompileAllFraction, - "debug relations" -> incOptions.relationsDebug, - "debug api" -> incOptions.apiDebug, - "api dump" -> incOptions.apiDumpDirectory, - "api diff context size" -> incOptions.apiDiffContextSize, - "transactional" -> incOptions.transactional, - "use zinc provided file manager" -> incOptions.useZincFileManager, - "backup directory" -> incOptions.backup, - "recompile on macro def" -> incOptions.recompileOnMacroDef - ) - - val values = Seq( - "classpath" -> classpath, - "sources" -> sources, - "output directory" -> classesDirectory, - "scalac options" -> scalacOptions, - "javac options" -> javacOptions, - "cache file" -> cacheFile, - "analysis map" -> analysisMap, - "java only" -> javaOnly, - "compile order" -> compileOrder, - "incremental compiler options" -> incOpts) - - Util.show(("Inputs", values), output) - } -} diff --git a/src/scala/org/pantsbuild/zinc/Main.scala b/src/scala/org/pantsbuild/zinc/Main.scala deleted file mode 100644 index 71026a6d62f..00000000000 --- a/src/scala/org/pantsbuild/zinc/Main.scala +++ /dev/null @@ -1,114 +0,0 @@ -/** - * Copyright (C) 2012 Typesafe, Inc. - */ - -package org.pantsbuild.zinc - -import java.io.File -import sbt.util.Level -import xsbti.CompileFailed -import org.pantsbuild.zinc.logging.{ Loggers, Reporters } - -/** - * Command-line main class. - */ -object Main { - def main(args: Array[String]): Unit = run(args, None) - - /** - * Compile run. Current working directory can be provided (for nailed zinc). - */ - def run(args: Array[String], cwd: Option[File]): Unit = { - val startTime = System.currentTimeMillis - - val Parsed(rawSettings, residual, errors) = Settings.parse(args) - - // normalise relative paths to the current working directory (if provided) - val settings = Settings.normalise(rawSettings, cwd) - - // if nailed then also set any system properties provided - if (cwd.isDefined) Util.setProperties(settings.properties) - - val log = - Loggers.create( - settings.consoleLog.logLevel, - settings.consoleLog.color, - captureLog = settings.captureLog - ) - val isDebug = settings.consoleLog.logLevel == Level.Debug - val reporter = - Reporters.create( - log, - settings.consoleLog.fileFilters, - settings.consoleLog.msgFilters - ) - val progress = - new SimpleCompileProgress( - settings.consoleLog.logPhases, - settings.consoleLog.printProgress, - settings.consoleLog.heartbeatSecs - )(log) - - // bail out on any command-line option errors - if (errors.nonEmpty) { - for (error <- errors) log.error(error) - log.error("See %s -help for information about options" format Setup.Command) - sys.exit(1) - } - - if (settings.version) Setup.printVersion() - - if (settings.help) Settings.printUsage() - - val inputs = Inputs(log, settings) - val setup = Setup(settings) - - // if there are no sources provided, print outputs based on current analysis if requested, - // else print version and usage by default - if (inputs.sources.isEmpty) { - if (!settings.version && !settings.help) { - Setup.printVersion() - Settings.printUsage() - sys.exit(1) - } - sys.exit(0) - } - - // check we have all necessary files - if (!Setup.verify(setup, log)) { - log.error("See %s -help for information about locating necessary files" format Setup.Command) - sys.exit(1) - } - - // verify inputs - Inputs.verify(inputs) - - if (isDebug) { - val debug: String => Unit = log.debug(_) - Setup.show(setup, debug) - Inputs.show(inputs, debug) - debug("Setup and Inputs parsed " + Util.timing(startTime)) - } - - // run the compile - try { - val compiler = Compiler(setup, log) - log.debug("Zinc compiler = %s [%s]" format (compiler, compiler.hashCode.toHexString)) - compiler.compile(inputs, cwd, reporter, progress)(log) - log.info("Compile success " + Util.timing(startTime)) - } catch { - case e: CompileFailed => - log.error("Compile failed " + Util.timing(startTime)) - sys.exit(1) - case e: Exception => - if (isDebug) e.printStackTrace - val message = e.getMessage - if (message ne null) log.error(message) - sys.exit(1) - } finally { - if (settings.consoleLog.printProgress || settings.consoleLog.heartbeatSecs > 0) { - System.out.println("Done.") - } - } - } -} diff --git a/src/scala/org/pantsbuild/zinc/Settings.scala b/src/scala/org/pantsbuild/zinc/Settings.scala deleted file mode 100644 index 6ee01387511..00000000000 --- a/src/scala/org/pantsbuild/zinc/Settings.scala +++ /dev/null @@ -1,335 +0,0 @@ -/** - * Copyright (C) 2012 Typesafe, Inc. - */ - -package org.pantsbuild.zinc - -import java.io.File -import java.util.{ List => JList } - -import scala.collection.JavaConverters._ -import scala.util.matching.Regex - -import sbt.io.Path._ -import sbt.util.{Level, Logger} -import sbt.util.Logger.{m2o, o2m} -import xsbti.Maybe -import xsbti.compile.{ - ClassfileManagerType, - CompileOrder, - TransactionalManagerType -} -import xsbti.compile.IncOptionsUtil.defaultIncOptions - - -/** - * All parsed command-line options. - */ -case class Settings( - help: Boolean = false, - version: Boolean = false, - consoleLog: ConsoleOptions = ConsoleOptions(), - captureLog: Option[File] = None, - sources: Seq[File] = Seq.empty, - classpath: Seq[File] = Seq.empty, - classesDirectory: File = new File("."), - scala: ScalaLocation = ScalaLocation(), - scalacOptions: Seq[String] = Seq.empty, - javaHome: Option[File] = None, - forkJava: Boolean = false, - _zincCacheDir: Option[File] = None, - javaOnly: Boolean = false, - javacOptions: Seq[String] = Seq.empty, - compileOrder: CompileOrder = CompileOrder.Mixed, - sbt: SbtJars = SbtJars(), - incOptions: IncOptions = IncOptions(), - analysis: AnalysisOptions = AnalysisOptions(), - properties: Seq[String] = Seq.empty -) { - def zincCacheDir: File = _zincCacheDir.getOrElse { - throw new RuntimeException(s"The ${Settings.ZincCacheDirName} option is required.") - } -} - -/** Due to the limit of 22 elements in a case class, options must get broken down into sub-groups. - * TODO: further break options into sensible subgroups. */ -case class ConsoleOptions( - logLevel: Level.Value = Level.Info, - color: Boolean = true, - logPhases: Boolean = false, - printProgress: Boolean = false, - heartbeatSecs: Int = 0, - fileFilters: Seq[Regex] = Seq.empty, - msgFilters: Seq[Regex] = Seq.empty -) - -/** - * Alternative ways to locate the scala jars. - */ -case class ScalaLocation( - home: Option[File] = None, - path: Seq[File] = Seq.empty, - compiler: Option[File] = None, - library: Option[File] = None, - extra: Seq[File] = Seq.empty -) - -object ScalaLocation { - /** - * Java API for creating ScalaLocation. - */ - def create( - home: File, - path: JList[File], - compiler: File, - library: File, - extra: JList[File]): ScalaLocation = - ScalaLocation( - Option(home), - path.asScala, - Option(compiler), - Option(library), - extra.asScala - ) - - /** - * Java API for creating ScalaLocation with scala home. - */ - def fromHome(home: File) = ScalaLocation(home = Option(home)) - - /** - * Java API for creating ScalaLocation with scala path. - */ - def fromPath(path: JList[File]) = ScalaLocation(path = path.asScala) -} - -/** - * Locating the sbt jars needed for zinc compile. - */ -case class SbtJars( - compilerBridgeSrc: Option[File] = None, - compilerInterface: Option[File] = None -) - -/** - * Wrapper around incremental compiler options. - */ -case class IncOptions( - transitiveStep: Int = defaultIncOptions.transitiveStep, - recompileAllFraction: Double = defaultIncOptions.recompileAllFraction, - relationsDebug: Boolean = defaultIncOptions.relationsDebug, - apiDebug: Boolean = defaultIncOptions.apiDebug, - apiDiffContextSize: Int = defaultIncOptions.apiDiffContextSize, - apiDumpDirectory: Option[File] = m2o(defaultIncOptions.apiDumpDirectory), - transactional: Boolean = false, - useZincFileManager: Boolean = true, - backup: Option[File] = None, - recompileOnMacroDef: Option[Boolean] = m2o(defaultIncOptions.recompileOnMacroDef).map(_.booleanValue) -) { - def options(log: Logger): xsbti.compile.IncOptions = { - new xsbti.compile.IncOptions( - transitiveStep, - recompileAllFraction, - relationsDebug, - apiDebug, - apiDiffContextSize, - o2m(apiDumpDirectory), - classfileManager(log), - useZincFileManager, - o2m(recompileOnMacroDef.map(java.lang.Boolean.valueOf)), - true, // nameHashing - false, // storeApis, apis is stored separately after 1.0.0 - false, // antStyle - Map.empty.asJava, // extra - defaultIncOptions.logRecompileOnMacro, - defaultIncOptions.externalHooks - ) - } - - def defaultApiDumpDirectory = - defaultIncOptions.apiDumpDirectory - - def classfileManager(log: Logger): Maybe[ClassfileManagerType] = - if (transactional && backup.isDefined) - Maybe.just(new TransactionalManagerType(backup.get, log)) - else - Maybe.nothing[ClassfileManagerType] -} - -/** - * Configuration for sbt analysis and analysis output options. - */ -case class AnalysisOptions( - cache: Option[File] = None, - cacheMap: Map[File, File] = Map.empty -) - -object Settings { - val ZincCacheDirName = "-zinc-cache-dir" - /** - * All available command-line options. - */ - val options = Seq( - header("Output options:"), - boolean( ("-help", "-h"), "Print this usage message", (s: Settings) => s.copy(help = true)), - boolean( "-version", "Print version", (s: Settings) => s.copy(version = true)), - - header("Logging Options:"), - boolean( "-debug", "Set log level for stdout to debug", - (s: Settings) => s.copy(consoleLog = s.consoleLog.copy(logLevel = Level.Debug))), - string( "-log-level", "level", "Set log level for stdout (debug|info|warn|error)", - (s: Settings, l: String) => s.copy(consoleLog = s.consoleLog.copy(logLevel = Level.withName(l)))), - boolean( "-no-color", "No color in logging to stdout", - (s: Settings) => s.copy(consoleLog = s.consoleLog.copy(color = false))), - boolean( "-log-phases", "Log phases of compilation for each file to stdout", - (s: Settings) => s.copy(consoleLog = s.consoleLog.copy(logPhases = true))), - boolean( "-print-progress", "Periodically print compilation progress to stdout", - (s: Settings) => s.copy(consoleLog = s.consoleLog.copy(printProgress = true))), - int( "-heartbeat", "interval (sec)", "Print '.' to stdout every n seconds while compiling", - (s: Settings, b: Int) => s.copy(consoleLog = s.consoleLog.copy(heartbeatSecs = b))), - string( "-msg-filter", "regex", "Filter warning messages matching the given regex", - (s: Settings, re: String) => s.copy(consoleLog = s.consoleLog.copy(msgFilters = s.consoleLog.msgFilters :+ re.r))), - string( "-file-filter", "regex", "Filter warning messages from filenames matching the given regex", - (s: Settings, re: String) => s.copy(consoleLog = s.consoleLog.copy(fileFilters = s.consoleLog.fileFilters :+ re.r))), - file( "-capture-log", "file", "Captures all logging (unfiltered) to the given file", - (s: Settings, f: File) => s.copy(captureLog = Some(f))), - - header("Compile options:"), - path( ("-classpath", "-cp"), "path", "Specify the classpath", (s: Settings, cp: Seq[File]) => s.copy(classpath = cp)), - file( "-d", "directory", "Destination for compiled classes", (s: Settings, f: File) => s.copy(classesDirectory = f)), - - header("Scala options:"), - file( "-scala-home", "directory", "Scala home directory (for locating jars)", (s: Settings, f: File) => s.copy(scala = s.scala.copy(home = Some(f)))), - path( "-scala-path", "path", "Specify all Scala jars directly", (s: Settings, sp: Seq[File]) => s.copy(scala = s.scala.copy(path = sp))), - file( "-scala-compiler", "file", "Specify Scala compiler jar directly" , (s: Settings, f: File) => s.copy(scala = s.scala.copy(compiler = Some(f)))), - file( "-scala-library", "file", "Specify Scala library jar directly" , (s: Settings, f: File) => s.copy(scala = s.scala.copy(library = Some(f)))), - path( "-scala-extra", "path", "Specify extra Scala jars directly", (s: Settings, e: Seq[File]) => s.copy(scala = s.scala.copy(extra = e))), - prefix( "-S", "", "Pass option to scalac", (s: Settings, o: String) => s.copy(scalacOptions = s.scalacOptions :+ o)), - - header("Java options:"), - file( "-java-home", "directory", "Select javac home directory (and fork)", (s: Settings, f: File) => s.copy(javaHome = Some(f))), - boolean( "-fork-java", "Run java compiler in separate process", (s: Settings) => s.copy(forkJava = true)), - string( "-compile-order", "order", "Compile order for Scala and Java sources", (s: Settings, o: String) => s.copy(compileOrder = compileOrder(o))), - boolean( "-java-only", "Don't add scala library to classpath", (s: Settings) => s.copy(javaOnly = true)), - prefix( "-C", "", "Pass option to javac", (s: Settings, o: String) => s.copy(javacOptions = s.javacOptions :+ o)), - - header("sbt options:"), - file( "-compiler-bridge", "file", "Specify compiler bridge sources jar", (s: Settings, f: File) => s.copy(sbt = s.sbt.copy(compilerBridgeSrc = Some(f)))), - file( "-compiler-interface", "file", "Specify compiler interface jar", (s: Settings, f: File) => s.copy(sbt = s.sbt.copy(compilerInterface = Some(f)))), - file( ZincCacheDirName, "file", "A cache directory for compiler interfaces", (s: Settings, f: File) => s.copy(_zincCacheDir = Some(f))), - - header("Incremental compiler options:"), - int( "-transitive-step", "n", "Steps before transitive closure", (s: Settings, i: Int) => s.copy(incOptions = s.incOptions.copy(transitiveStep = i))), - fraction( "-recompile-all-fraction", "x", "Limit before recompiling all sources", (s: Settings, d: Double) => s.copy(incOptions = s.incOptions.copy(recompileAllFraction = d))), - boolean( "-debug-relations", "Enable debug logging of analysis relations", (s: Settings) => s.copy(incOptions = s.incOptions.copy(relationsDebug = true))), - boolean( "-debug-api", "Enable analysis API debugging", (s: Settings) => s.copy(incOptions = s.incOptions.copy(apiDebug = true))), - file( "-api-dump", "directory", "Destination for analysis API dump", (s: Settings, f: File) => s.copy(incOptions = s.incOptions.copy(apiDumpDirectory = Some(f)))), - int( "-api-diff-context-size", "n", "Diff context size (in lines) for API debug", (s: Settings, i: Int) => s.copy(incOptions = s.incOptions.copy(apiDiffContextSize = i))), - boolean( "-transactional", "Restore previous class files on failure", (s: Settings) => s.copy(incOptions = s.incOptions.copy(transactional = true))), - boolean( "-no-zinc-file-manager", "Disable zinc provided file manager", (s: Settings) => s.copy(incOptions = s.incOptions.copy(useZincFileManager = false))), - file( "-backup", "directory", "Backup location (if transactional)", (s: Settings, f: File) => s.copy(incOptions = s.incOptions.copy(backup = Some(f)))), - boolean( "-recompileOnMacroDefDisabled", "Disable recompilation of all dependencies of a macro def", - (s: Settings) => s.copy(incOptions = s.incOptions.copy(recompileOnMacroDef = Some(false)))), - - header("Analysis options:"), - file( "-analysis-cache", "file", "Cache file for compile analysis", (s: Settings, f: File) => s.copy(analysis = s.analysis.copy(cache = Some(f)))), - fileMap( "-analysis-map", "Upstream analysis mapping (file:file,...)", (s: Settings, m: Map[File, File]) => s.copy(analysis = s.analysis.copy(cacheMap = m))), - - header("JVM options:"), - prefix( "-D", "property=value", "Pass property to runtime system", (s: Settings, o: String) => s.copy(properties = s.properties :+ o)), - dummy( "-J", "Set JVM flag directly for this process") - ) - - val allOptions: Set[OptionDef[Settings]] = options.toSet - - /** - * Print out the usage message. - */ - def printUsage(): Unit = { - val column = options.map(_.length).max + 2 - println("Usage: %s " format Setup.Command) - options foreach { opt => if (opt.extraline) println(); println(opt.usage(column)) } - println() - } - - /** - * Anything starting with '-' is considered an option, not a source file. - */ - def isOpt(s: String) = s startsWith "-" - - /** - * Parse all args into a Settings object. - * Residual args are either unknown options or source files. - */ - def parse(args: Seq[String]): Parsed[Settings] = { - val Parsed(settings, remaining, errors) = Options.parse(Settings(), allOptions, args, stopOnError = false) - val (unknown, residual) = remaining partition isOpt - val sources = residual map (new File(_)) - val unknownErrors = unknown map ("Unknown option: " + _) - Parsed(settings.copy(sources = sources), Seq.empty, errors ++ unknownErrors) - } - - /** - * Create a CompileOrder value based on string input. - */ - def compileOrder(order: String): CompileOrder = { - order.toLowerCase match { - case "mixed" => CompileOrder.Mixed - case "java" | "java-then-scala" | "javathenscala" => CompileOrder.JavaThenScala - case "scala" | "scala-then-java" | "scalathenjava" => CompileOrder.ScalaThenJava - } - } - - /** - * Normalise all relative paths to the actual current working directory, if provided. - */ - def normalise(settings: Settings, cwd: Option[File]): Settings = { - if (cwd.isEmpty) settings - else { - import settings._ - settings.copy( - sources = Util.normaliseSeq(cwd)(sources), - classpath = Util.normaliseSeq(cwd)(classpath), - classesDirectory = Util.normalise(cwd)(classesDirectory), - scala = scala.copy( - home = Util.normaliseOpt(cwd)(scala.home), - path = Util.normaliseSeq(cwd)(scala.path), - compiler = Util.normaliseOpt(cwd)(scala.compiler), - library = Util.normaliseOpt(cwd)(scala.library), - extra = Util.normaliseSeq(cwd)(scala.extra) - ), - javaHome = Util.normaliseOpt(cwd)(javaHome), - sbt = sbt.copy( - compilerBridgeSrc = Util.normaliseOpt(cwd)(sbt.compilerBridgeSrc), - compilerInterface = Util.normaliseOpt(cwd)(sbt.compilerInterface) - ), - incOptions = incOptions.copy( - apiDumpDirectory = Util.normaliseOpt(cwd)(incOptions.apiDumpDirectory), - backup = Util.normaliseOpt(cwd)(incOptions.backup) - ), - analysis = analysis.copy( - cache = Util.normaliseOpt(cwd)(analysis.cache), - cacheMap = Util.normaliseMap(cwd)(analysis.cacheMap) - ) - ) - } - } - - // helpers for creating options - - def boolean(opt: String, desc: String, action: Settings => Settings) = new BooleanOption[Settings](Seq(opt), desc, action) - def boolean(opts: (String, String), desc: String, action: Settings => Settings) = new BooleanOption[Settings](Seq(opts._1, opts._2), desc, action) - def string(opt: String, arg: String, desc: String, action: (Settings, String) => Settings) = new StringOption[Settings](Seq(opt), arg, desc, action) - def int(opt: String, arg: String, desc: String, action: (Settings, Int) => Settings) = new IntOption[Settings](Seq(opt), arg, desc, action) - def double(opt: String, arg: String, desc: String, action: (Settings, Double) => Settings) = new DoubleOption[Settings](Seq(opt), arg, desc, action) - def fraction(opt: String, arg: String, desc: String, action: (Settings, Double) => Settings) = new FractionOption[Settings](Seq(opt), arg, desc, action) - def file(opt: String, arg: String, desc: String, action: (Settings, File) => Settings) = new FileOption[Settings](Seq(opt), arg, desc, action) - def path(opt: String, arg: String, desc: String, action: (Settings, Seq[File]) => Settings) = new PathOption[Settings](Seq(opt), arg, desc, action) - def path(opts: (String, String), arg: String, desc: String, action: (Settings, Seq[File]) => Settings) = new PathOption[Settings](Seq(opts._1, opts._2), arg, desc, action) - def prefix(pre: String, arg: String, desc: String, action: (Settings, String) => Settings) = new PrefixOption[Settings](pre, arg, desc, action) - def filePair(opt: String, arg: String, desc: String, action: (Settings, (File, File)) => Settings) = new FilePairOption[Settings](Seq(opt), arg, desc, action) - def fileMap(opt: String, desc: String, action: (Settings, Map[File, File]) => Settings) = new FileMapOption[Settings](Seq(opt), desc, action) - def fileSeqMap(opt: String, desc: String, action: (Settings, Map[Seq[File], File]) => Settings) = new FileSeqMapOption[Settings](Seq(opt), desc, action) - def header(label: String) = new HeaderOption[Settings](label) - def dummy(opt: String, desc: String) = new DummyOption[Settings](opt, desc) -} diff --git a/src/scala/org/pantsbuild/zinc/Setup.scala b/src/scala/org/pantsbuild/zinc/Setup.scala deleted file mode 100644 index 951b2526405..00000000000 --- a/src/scala/org/pantsbuild/zinc/Setup.scala +++ /dev/null @@ -1,260 +0,0 @@ -/** - * Copyright (C) 2012 Typesafe, Inc. - */ - -package org.pantsbuild.zinc - -import java.io.File -import java.util.{ List => JList } -import scala.collection.JavaConverters._ - -import sbt.io.syntax._ -import sbt.util.Logger - -/** - * All identity-affecting options for a zinc compiler. All fields in this struct - * must have a useful definition of equality. - */ -case class Setup( - scalaCompiler: File, - scalaLibrary: File, - scalaExtra: Seq[File], - compilerBridgeSrc: File, - compilerInterface: File, - javaHome: Option[File], - forkJava: Boolean, - cacheDir: File) - -/** - * Jar file description for locating jars. - */ -case class JarFile(name: String, classifier: Option[String] = None) { - val versionPattern = "(-.*)?" - val classifierString = classifier map ("-" + _) getOrElse "" - val extension = "jar" - val pattern = name + versionPattern + classifierString + "." + extension - val default = name + classifierString + "." + extension -} - -object JarFile { - def apply(name: String, classifier: String): JarFile = JarFile(name, Some(classifier)) -} - -/** - * The scala jars split into compiler, library, and extra. - */ -case class ScalaJars(compiler: File, library: File, extra: Seq[File]) - -object Setup { - val Command = "zinc" - val Description = "scala incremental compiler" - - val HomeProperty = prop("home") - val DirProperty = prop("dir") - - val ScalaCompiler = JarFile("scala-compiler") - val ScalaLibrary = JarFile("scala-library") - val ScalaReflect = JarFile("scala-reflect") - val CompilerBridgeSources = JarFile("compiler-bridge", "sources") - val CompilerInterface = JarFile("compiler-interface") - - /** - * Create compiler setup from command-line settings. - */ - def apply(settings: Settings): Setup = { - val scalaJars = selectScalaJars(settings.scala) - val (compilerBridgeSrc, compilerInterface) = selectSbtJars(settings.sbt) - setup( - scalaJars.compiler, - scalaJars.library, - scalaJars.extra, - compilerBridgeSrc, - compilerInterface, - settings.javaHome, - settings.forkJava, - settings.zincCacheDir - ) - } - - /** - * Create normalised and defaulted Setup. - */ - def setup( - scalaCompiler: File, - scalaLibrary: File, - scalaExtra: Seq[File], - compilerBridgeSrc: File, - compilerInterface: File, - javaHomeDir: Option[File], - forkJava: Boolean, - cacheDir: File - ): Setup = { - val normalise: File => File = { _.getAbsoluteFile } - val compilerJar = normalise(scalaCompiler) - val libraryJar = normalise(scalaLibrary) - val extraJars = scalaExtra map normalise - val compilerBridgeJar = normalise(compilerBridgeSrc) - val compilerInterfaceJar = normalise(compilerInterface) - val javaHome = javaHomeDir map normalise - Setup(compilerJar, libraryJar, extraJars, compilerBridgeJar, compilerInterfaceJar, javaHome, forkJava, cacheDir) - } - - /** - * Select the scala jars. - * - * Prefer the explicit scala-compiler, scala-library, and scala-extra settings, - * then the scala-path setting, then the scala-home setting. Default to bundled scala. - */ - def selectScalaJars(scala: ScalaLocation): ScalaJars = { - val jars = { - splitScala(scala.path) orElse - splitScala(allLibs(scala.home), Defaults.scalaExcluded) getOrElse - Defaults.scalaJars - } - ScalaJars( - scala.compiler getOrElse jars.compiler, - scala.library getOrElse jars.library, - scala.extra ++ jars.extra - ) - } - - /** - * Distinguish the compiler and library jars. - */ - def splitScala(jars: Seq[File], excluded: Set[String] = Set.empty): Option[ScalaJars] = { - val filtered = jars filterNot (excluded contains _.getName) - val (compiler, other) = filtered partition (_.getName matches ScalaCompiler.pattern) - val (library, extra) = other partition (_.getName matches ScalaLibrary.pattern) - if (compiler.nonEmpty && library.nonEmpty) Some(ScalaJars(compiler(0), library(0), extra)) else None - } - - /** - * Select the sbt jars. - */ - def selectSbtJars(sbt: SbtJars): (File, File) = { - val compilerBridgeSrc = sbt.compilerBridgeSrc getOrElse Defaults.compilerBridgeSrc - val compilerInterface = sbt.compilerInterface getOrElse Defaults.compilerInterface - (compilerBridgeSrc, compilerInterface) - } - - /** - * Verify that necessary jars exist. - */ - def verify(setup: Setup, log: Logger): Boolean = { - requireFile(setup.scalaCompiler, log) && - requireFile(setup.scalaLibrary, log) && - requireFile(setup.compilerBridgeSrc, log) && - requireFile(setup.compilerInterface, log) - } - - /** - * Check file exists. Log error if it doesn't. - */ - def requireFile(file: File, log: Logger): Boolean = { - val exists = file.exists - if (!exists) log.error("Required file not found: " + file.getName) - exists - } - - // - // Default setup - // - - object Defaults { - val userHome = Util.fileProperty("user.home") - val userDir = Util.fileProperty("user.dir") - val zincHome = Util.optFileProperty(HomeProperty).map(_.getCanonicalFile) - - val compilerBridgeSrc = optLibOrDefault(zincHome, CompilerBridgeSources) - val compilerInterface = optLibOrDefault(zincHome, CompilerInterface) - - val scalaCompiler = optLibOrDefault(zincHome, ScalaCompiler) - val scalaLibrary = optLibOrDefault(zincHome, ScalaLibrary) - val scalaExtra = Seq(optLibOrDefault(zincHome, ScalaReflect)) - val scalaJars = ScalaJars(scalaCompiler, scalaLibrary, scalaExtra) - val defaultScalaExcluded = Set("jansi.jar", "jline.jar", "scala-partest.jar", "scala-swing.jar", "scalacheck.jar", "scalap.jar") - val scalaExcluded = Util.stringSetProperty(prop("scala.excluded"), defaultScalaExcluded) - - val cacheLimit = Util.intProperty(prop("cache.limit"), 5) - val analysisCacheLimit = Util.intProperty(prop("analysis.cache.limit"), cacheLimit) - val compilerCacheLimit = Util.intProperty(prop("compiler.cache.limit"), cacheLimit) - val residentCacheLimit = Util.intProperty(prop("resident.cache.limit"), 0) - } - - def prop(name: String) = Command + "." + name - - def allLibs(homeDir: Option[File]): Seq[File] = { - homeDir map { home => (home / "lib" ** "*.jar").get } getOrElse Seq.empty - } - - def optLib(homeDir: Option[File], jar: JarFile): Option[File] = { - allLibs(homeDir) find (_.getName matches jar.pattern) - } - - def optLibOrDefault(homeDir: Option[File], jar: JarFile): File = { - optLib(homeDir, jar) getOrElse new File(jar.default) - } - - // - // Zinc version - // - - /** - * Full zinc version info. - */ - case class Version(published: String, timestamp: String, commit: String) - - /** - * Get the zinc version from a generated properties file. - */ - lazy val zincVersion: Version = { - val props = Util.propertiesFromResource("zinc.version.properties", getClass.getClassLoader) - Version( - props.getProperty("version", "unknown"), - props.getProperty("timestamp", ""), - props.getProperty("commit", "") - ) - } - - /** - * For snapshots the zinc version includes timestamp and commit. - */ - lazy val versionString: String = { - import zincVersion._ - if (published.endsWith("-SNAPSHOT")) "%s %s-%s" format (published, timestamp, commit take 10) - else published - } - - /** - * Print the zinc version to standard out. - */ - def printVersion(): Unit = println("%s (%s) %s" format (Command, Description, versionString)) - - // - // Debug - // - - /** - * Debug output for inputs. - */ - def debug(setup: Setup, log: xsbti.Logger): Unit = { - show(setup, s => log.debug(Logger.f0(s))) - } - - /** - * Debug output for compiler setup. - */ - def show(setup: Setup, output: String => Unit): Unit = { - import setup._ - val values = Seq( - "scala compiler" -> scalaCompiler, - "scala library" -> scalaLibrary, - "scala extra" -> scalaExtra, - "compiler bridge sources" -> compilerBridgeSrc, - "compiler interface" -> compilerInterface, - "java home" -> javaHome, - "fork java" -> forkJava, - "cache directory" -> cacheDir) - Util.show(("Setup", values), output) - } -} diff --git a/src/scala/org/pantsbuild/zinc/SimpleCompileProgress.scala b/src/scala/org/pantsbuild/zinc/SimpleCompileProgress.scala deleted file mode 100644 index 9ba8612416e..00000000000 --- a/src/scala/org/pantsbuild/zinc/SimpleCompileProgress.scala +++ /dev/null @@ -1,48 +0,0 @@ -package org.pantsbuild.zinc - -import sbt.util.Logger -import xsbti.compile.CompileProgress - -/** - * SimpleCompileProgress implements CompileProgress to add output to zinc scala compilations, but - * does not implement the capability to cancel compilations via the `advance` method. - */ -class SimpleCompileProgress(logPhases: Boolean, printProgress: Boolean, heartbeatSecs: Int)(log: Logger) extends CompileProgress { - @volatile private var lastHeartbeatMillis: Long = 0 - - /** - * startUnit Optionally reports to stdout when a phase of compilation has begun for a file. - */ - def startUnit(phase: String, unitPath: String): Unit = { - if (logPhases) { - log.info(phase + " " + unitPath + "...") - } - } - - /** - * advance Optionally emit the percentage of steps completed, and/or a heartbeat ('.' character) - * roughly every `heartbeatSecs` seconds (though buffering may make the actual interval - * imprecise.) If `heartbeatSecs` is not greater than 0, no heartbeat is emitted. - * - * advance is periodically called during compilation, indicating the total number of compilation - * steps completed (`current`) out of the total number of steps necessary. The method returns - * false if the user wishes to cancel compilation, or true otherwise. Currently, Zinc never - * requests to cancel compilation. - */ - def advance(current: Int, total: Int): Boolean = { - if (printProgress) { - val percent = (current * 100) / total - System.out.print(s"\rProgress: ${percent}%") - } - if (heartbeatSecs > 0) { - val currentTimeMillis = System.currentTimeMillis - val delta = currentTimeMillis - lastHeartbeatMillis - if (delta > (1000 * heartbeatSecs)) { - System.out.print(".") - lastHeartbeatMillis = currentTimeMillis - } - } - /* Always continue compiling. */ - true - } -} diff --git a/src/scala/org/pantsbuild/zinc/analysis/AnalysisMap.scala b/src/scala/org/pantsbuild/zinc/analysis/AnalysisMap.scala new file mode 100644 index 00000000000..42b82b4e829 --- /dev/null +++ b/src/scala/org/pantsbuild/zinc/analysis/AnalysisMap.scala @@ -0,0 +1,153 @@ +/** + * Copyright (C) 2017 Pants project contributors (see CONTRIBUTORS.md). + * Licensed under the Apache License, Version 2.0 (see LICENSE). + */ + +package org.pantsbuild.zinc.analysis + +import java.io.{File, IOException} +import java.util.Optional + +import scala.compat.java8.OptionConverters._ + +import sbt.internal.inc.{ + Analysis, + CompanionsStore, + Locate +} +import sbt.util.Logger +import xsbti.api.Companions +import xsbti.compile.{ + AnalysisContents, + AnalysisStore, + CompileAnalysis, + DefinesClass, + FileAnalysisStore, + MiniSetup, + PerClasspathEntryLookup +} + +import org.pantsbuild.zinc.cache.Cache.Implicits +import org.pantsbuild.zinc.cache.{Cache, FileFPrint} +import org.pantsbuild.zinc.util.Util + +/** + * A facade around the analysis cache to: + * 1) map between classpath entries and cache locations + * 2) use analysis for `definesClass` when it is available + * + * SBT uses the `definesClass` and `getAnalysis` methods in order to load the APIs for upstream + * classes. For a classpath containing multiple entries, sbt will call `definesClass` sequentially + * on classpath entries until it finds a classpath entry defining a particular class. When it finds + * the appropriate classpath entry, it will use `getAnalysis` to fetch the API for that class. + */ +class AnalysisMap private[AnalysisMap] ( + // a map of classpath entries to cache file fingerprints, excluding the current compile destination + analysisLocations: Map[File, FileFPrint], + // a Map of File bases to destinations to re-relativize them to + rebases: Map[File, File] +) { + private val analysisMappers = PortableAnalysisMappers.create(rebases) + + def getPCELookup = new PerClasspathEntryLookup { + /** + * Gets analysis for a classpath entry (if it exists) by translating its path to a potential + * cache location and then checking the cache. + */ + def analysis(classpathEntry: File): Optional[CompileAnalysis] = + analysisLocations.get(classpathEntry).flatMap(cacheLookup).asJava + + /** + * An implementation of definesClass that will use analysis for an input directory to determine + * whether it defines a particular class. + * + * TODO: This optimization is unnecessary for jars on the classpath, which are already indexed. + * Can remove after the sbt jar output patch lands. + */ + def definesClass(classpathEntry: File): DefinesClass = { + getAnalysis(classpathEntry).map { analysis => + // strongly hold the classNames, and transform them to ensure that they are unlinked from + // the remainder of the analysis + val classNames = analysis.asInstanceOf[Analysis].relations.srcProd.reverseMap.keys.toList.toSet.map( + (f: File) => filePathToClassName(f)) + new ClassNamesDefinesClass(classNames) + }.getOrElse { + // no analysis: return a function that will scan instead + Locate.definesClass(classpathEntry) + } + } + + private class ClassNamesDefinesClass(classes: Set[String]) extends DefinesClass { + override def apply(className: String): Boolean = classes(className) + } + + private def filePathToClassName(file: File): String = { + // Extract className from path, for example: + // .../.pants.d/compile/zinc/.../current/classes/org/pantsbuild/example/hello/exe/Exe.class + // => org.pantsbuild.example.hello.exe.Exe + file.getAbsolutePath.split("current/classes")(1).drop(1).replace(".class", "").replaceAll("/", ".") + } + + /** + * Gets analysis for a classpath entry (if it exists) by translating its path to a potential + * cache location and then checking the cache. + */ + def getAnalysis(classpathEntry: File): Option[CompileAnalysis] = + analysisLocations.get(classpathEntry).flatMap(cacheLookup) + } + + def cachedStore(cacheFile: File): AnalysisStore = + AnalysisStore.getThreadSafeStore( + new AnalysisStore { + val fileStore = mkFileAnalysisStore(cacheFile) + + def set(analysis: AnalysisContents) { + fileStore.set(analysis) + FileFPrint.fprint(cacheFile).foreach { fprint => + AnalysisMap.analysisCache.put(fprint, Some(analysis)) + } + } + + def get(): Optional[AnalysisContents] = { + val res = + FileFPrint.fprint(cacheFile) flatMap { fprint => + AnalysisMap.analysisCache.getOrElseUpdate(fprint) { + fileStore.get().asScala + } + } + res.asJava + } + } + ) + + private def cacheLookup(cacheFPrint: FileFPrint): Option[CompileAnalysis] = + AnalysisMap.analysisCache.getOrElseUpdate(cacheFPrint) { + // re-fingerprint the file on miss, to ensure that analysis hasn't changed since we started + if (!FileFPrint.fprint(cacheFPrint.file).exists(_ == cacheFPrint)) { + throw new IOException(s"Analysis at $cacheFPrint has changed since startup!") + } + mkFileAnalysisStore(cacheFPrint.file).get().asScala + }.map(_.getAnalysis) + + private def mkFileAnalysisStore(file: File): AnalysisStore = + FileAnalysisStore.getDefault(file, analysisMappers) +} + +object AnalysisMap { + private val analysisCacheLimit = Util.intProperty("zinc.analysis.cache.limit", 100) + /** + * Static cache for compile analyses. Values must be Options because in get() we don't yet + * know if, on a cache miss, the underlying file will yield a valid Analysis. + */ + private val analysisCache = + Cache[FileFPrint, Option[AnalysisContents]](analysisCacheLimit) + + def create(options: AnalysisOptions): AnalysisMap = + new AnalysisMap( + // create fingerprints for all inputs at startup + options.cacheMap.flatMap { + case (classpathEntry, cacheFile) => FileFPrint.fprint(cacheFile).map(classpathEntry -> _) + }, + options.rebaseMap + ) +} diff --git a/src/scala/org/pantsbuild/zinc/analysis/AnalysisOptions.scala b/src/scala/org/pantsbuild/zinc/analysis/AnalysisOptions.scala new file mode 100644 index 00000000000..5efff1fb97d --- /dev/null +++ b/src/scala/org/pantsbuild/zinc/analysis/AnalysisOptions.scala @@ -0,0 +1,23 @@ +/** + * Copyright (C) 2017 Pants project contributors (see CONTRIBUTORS.md). + * Licensed under the Apache License, Version 2.0 (see LICENSE). + */ + +package org.pantsbuild.zinc.analysis + +import java.io.File + +/** + * Configuration for sbt analysis and analysis output options. + */ +case class AnalysisOptions( + _cache: Option[File] = None, + cacheMap: Map[File, File] = Map.empty, + rebaseMap: Map[File, File] = Map.empty, + clearInvalid: Boolean = true +) { + lazy val cache: File = + _cache.getOrElse { + throw new RuntimeException(s"An analysis cache file is required.") + } +} diff --git a/src/scala/org/pantsbuild/zinc/analysis/BUILD b/src/scala/org/pantsbuild/zinc/analysis/BUILD new file mode 100644 index 00000000000..2cb5044212b --- /dev/null +++ b/src/scala/org/pantsbuild/zinc/analysis/BUILD @@ -0,0 +1,22 @@ +# Copyright 2017 Pants project contributors (see CONTRIBUTORS.md). +# Licensed under the Apache License, Version 2.0 (see LICENSE). + +scala_library( + provides=scala_artifact( + org='org.pantsbuild', + name='zinc-analysis', + repo=public, + publication_metadata=pants_library('The SBT incremental compiler for nailgun') + ), + dependencies=[ + '3rdparty/jvm/org/scala-lang/modules:scala-java8-compat', + '3rdparty/jvm/org/scala-sbt:zinc', + 'src/scala/org/pantsbuild/zinc/cache', + 'src/scala/org/pantsbuild/zinc/util', + ], + exports=[ + 'src/scala/org/pantsbuild/zinc/cache', + ], + strict_deps=True, + platform='java8', +) diff --git a/src/scala/org/pantsbuild/zinc/analysis/PortableAnalysisMappers.scala b/src/scala/org/pantsbuild/zinc/analysis/PortableAnalysisMappers.scala new file mode 100644 index 00000000000..b3847ebab23 --- /dev/null +++ b/src/scala/org/pantsbuild/zinc/analysis/PortableAnalysisMappers.scala @@ -0,0 +1,82 @@ +/** + * Copyright (C) 2017 Pants project contributors (see CONTRIBUTORS.md). + * Licensed under the Apache License, Version 2.0 (see LICENSE). + */ + +package org.pantsbuild.zinc.analysis + +import java.io.File +import java.nio.file.Path + +import xsbti.compile.analysis.{GenericMapper, ReadMapper, ReadWriteMappers, WriteMapper} + +import xsbti.compile.MiniSetup +import xsbti.compile.analysis.Stamp + + +/** + * Given a Set of Path bases and destination bases, adapts written analysis to rewrite + * all of the bases. + * + * Intended usecase is to rebase each distinct non-portable base path contained in the analysis: + * in pants this is generally + * 1) the buildroot + * 2) the workdir (generally named `.pants.d`, but not always located under the buildroot) + * 3) the base of the JVM that is in use + */ +object PortableAnalysisMappers { + def create(rebaseMap: Map[File, File]): ReadWriteMappers = { + val rebases = + rebaseMap + .toSeq + .map { + case (k, v) => (k.toPath, v.toPath) + } + .toSet + val forWrite = mkFileRebaser(rebases) + val forRead = mkFileRebaser(rebases.map { case (src, dst) => (dst, src) }) + new ReadWriteMappers(PortableReadMapper(forRead), PortableWriteMapper(forWrite)) + } + + private def mkFileRebaser(rebases: Set[(Path, Path)]): File => File = { + // Sort the rebases from longest to shortest (to ensure that a prefix is rebased + // before a suffix). + val orderedRebases = + rebases.toSeq.sortBy { + case (path, slug) => -path.toString.size + } + val rebaser: File => File = { f => + val p = f.toPath + // Attempt each rebase in length order, applying the longest one that matches. + orderedRebases + .collectFirst { + case (from, to) if p.startsWith(from) => + to.resolve(from.relativize(p)).toFile + } + .getOrElse(f) + } + rebaser + } +} + +case class PortableReadMapper(mapper: File => File) extends PortableMapper with ReadMapper +case class PortableWriteMapper(mapper: File => File) extends PortableMapper with WriteMapper + +trait PortableMapper extends GenericMapper { + def mapper: File => File + + def mapSourceFile(x: File): File = mapper(x) + def mapBinaryFile(x: File): File = mapper(x) + def mapProductFile(x: File): File = mapper(x) + def mapOutputDir(x: File): File = mapper(x) + def mapSourceDir(x: File): File = mapper(x) + def mapClasspathEntry(x: File): File = mapper(x) + + // TODO: Determine whether the rest of these need to be overridden in practice. + def mapJavacOption(x: String): String = x + def mapScalacOption(x: String): String = x + def mapBinaryStamp(f: File, x: Stamp): Stamp = x + def mapSourceStamp(f: File, x: Stamp): Stamp = x + def mapProductStamp(f: File, x: Stamp): Stamp = x + def mapMiniSetup(x: MiniSetup): MiniSetup = x +} diff --git a/src/scala/org/pantsbuild/zinc/cache/BUILD b/src/scala/org/pantsbuild/zinc/cache/BUILD index 0297d2d0f95..307f01a2eb9 100644 --- a/src/scala/org/pantsbuild/zinc/cache/BUILD +++ b/src/scala/org/pantsbuild/zinc/cache/BUILD @@ -1,3 +1,6 @@ +# Copyright 2017 Pants project contributors (see CONTRIBUTORS.md). +# Licensed under the Apache License, Version 2.0 (see LICENSE). + scala_library( provides=scala_artifact( org='org.pantsbuild', @@ -9,6 +12,9 @@ scala_library( '3rdparty:guava', '3rdparty:jsr305', ], + exports=[ + '3rdparty:guava', + ], strict_deps=True, - platform='java7', + platform='java8', ) diff --git a/src/scala/org/pantsbuild/zinc/BUILD b/src/scala/org/pantsbuild/zinc/compiler/BUILD similarity index 53% rename from src/scala/org/pantsbuild/zinc/BUILD rename to src/scala/org/pantsbuild/zinc/compiler/BUILD index 02dd409f8d7..cf1e97d8887 100644 --- a/src/scala/org/pantsbuild/zinc/BUILD +++ b/src/scala/org/pantsbuild/zinc/compiler/BUILD @@ -1,19 +1,25 @@ +# Copyright 2017 Pants project contributors (see CONTRIBUTORS.md). +# Licensed under the Apache License, Version 2.0 (see LICENSE). + scala_library( provides=scala_artifact( org='org.pantsbuild', - name='zinc', + name='zinc-compiler', repo=public, publication_metadata=pants_library('The SBT incremental compiler for nailgun') ), dependencies=[ + '3rdparty/jvm/org/scala-lang/modules:scala-java8-compat', '3rdparty/jvm/org/scala-sbt:io', '3rdparty/jvm/org/scala-sbt:util-logging', '3rdparty/jvm/org/scala-sbt:zinc', '3rdparty:guava', '3rdparty:jsr305', + 'src/scala/org/pantsbuild/zinc/analysis', 'src/scala/org/pantsbuild/zinc/cache', - 'src/scala/org/pantsbuild/zinc/logging', + 'src/scala/org/pantsbuild/zinc/options', + 'src/scala/org/pantsbuild/zinc/util', ], strict_deps=True, - platform='java7', + platform='java8', ) diff --git a/src/scala/org/pantsbuild/zinc/compiler/CompilerCacheKey.scala b/src/scala/org/pantsbuild/zinc/compiler/CompilerCacheKey.scala new file mode 100644 index 00000000000..593baf898d5 --- /dev/null +++ b/src/scala/org/pantsbuild/zinc/compiler/CompilerCacheKey.scala @@ -0,0 +1,70 @@ +/** + * Copyright (C) 2012 Typesafe, Inc. + */ + +package org.pantsbuild.zinc.compiler + +import java.io.File +import java.util.{ List => JList } +import scala.collection.JavaConverters._ + +import sbt.io.syntax._ +import sbt.util.Logger + +/** + * All identity-affecting options for a zinc compiler. All fields in this struct + * must have a useful definition of equality. + */ +case class CompilerCacheKey( + scalaCompiler: File, + scalaLibrary: File, + scalaExtra: Seq[File], + compilerBridgeSrc: File, + compilerInterface: File, + javaHome: Option[File], + forkJava: Boolean, + cacheDir: File) + +object CompilerCacheKey { + + /** + * Create compiler setup from command-line settings. + */ + def apply(settings: Settings): CompilerCacheKey = { + val scalaJars = InputUtils.selectScalaJars(settings.scala) + val (compilerBridgeSrc, compilerInterface) = settings.sbt.jars + setup( + scalaJars.compiler, + scalaJars.library, + scalaJars.extra, + compilerBridgeSrc, + compilerInterface, + settings.javaHome, + settings.forkJava, + settings.zincCacheDir + ) + } + + /** + * Create normalised and defaulted CompilerCacheKey. + */ + def setup( + scalaCompiler: File, + scalaLibrary: File, + scalaExtra: Seq[File], + compilerBridgeSrc: File, + compilerInterface: File, + javaHomeDir: Option[File], + forkJava: Boolean, + cacheDir: File + ): CompilerCacheKey = { + val normalise: File => File = { _.getAbsoluteFile } + val compilerJar = normalise(scalaCompiler) + val libraryJar = normalise(scalaLibrary) + val extraJars = scalaExtra map normalise + val compilerBridgeJar = normalise(compilerBridgeSrc) + val compilerInterfaceJar = normalise(compilerInterface) + val javaHome = javaHomeDir map normalise + CompilerCacheKey(compilerJar, libraryJar, extraJars, compilerBridgeJar, compilerInterfaceJar, javaHome, forkJava, cacheDir) + } +} diff --git a/src/scala/org/pantsbuild/zinc/compiler/CompilerUtils.scala b/src/scala/org/pantsbuild/zinc/compiler/CompilerUtils.scala new file mode 100644 index 00000000000..89f34b62c50 --- /dev/null +++ b/src/scala/org/pantsbuild/zinc/compiler/CompilerUtils.scala @@ -0,0 +1,154 @@ +/** + * Copyright (C) 2012 Typesafe, Inc. + */ + +package org.pantsbuild.zinc.compiler + +import java.io.File +import java.net.URLClassLoader +import sbt.internal.inc.{ + AnalyzingCompiler, + CompileOutput, + IncrementalCompilerImpl, + RawCompiler, + ScalaInstance, + javac, + ZincUtil +} +import sbt.io.Path +import sbt.io.syntax._ +import sbt.util.Logger +import xsbti.compile.{ + ClasspathOptionsUtil, + CompilerCache, + Compilers, + GlobalsCache, + Inputs, + JavaTools, + ScalaCompiler, + ScalaInstance => XScalaInstance, + ZincCompilerUtil +} + +import scala.compat.java8.OptionConverters._ + +import org.pantsbuild.zinc.cache.Cache +import org.pantsbuild.zinc.cache.Cache.Implicits +import org.pantsbuild.zinc.util.Util + +object CompilerUtils { + val CompilerInterfaceId = "compiler-interface" + val JavaClassVersion = System.getProperty("java.class.version") + + private val compilerCacheLimit = Util.intProperty("zinc.compiler.cache.limit", 5) + private val residentCacheLimit = Util.intProperty("zinc.resident.cache.limit", 0) + + /** + * Static cache for zinc compilers. + */ + private val compilerCache = Cache[CompilerCacheKey, Compilers](compilerCacheLimit) + + /** + * Static cache for resident scala compilers. + */ + private val residentCache: GlobalsCache = { + val maxCompilers = residentCacheLimit + if (maxCompilers <= 0) + CompilerCache.fresh + else + CompilerCache.createCacheFor(maxCompilers) + } + + /** + * Get or create a zinc compiler based on compiler setup. + */ + def getOrCreate(settings: Settings, log: Logger): Compilers = { + val setup = CompilerCacheKey(settings) + compilerCache.getOrElseUpdate(setup) { + val instance = scalaInstance(setup) + val interfaceJar = compilerInterface(setup, instance, log) + val scalac = newScalaCompiler(instance, interfaceJar) + ZincUtil.compilers(instance, ClasspathOptionsUtil.auto, setup.javaHome, scalac) + } + } + + /** + * Get the instance of the GlobalsCache. + */ + def getGlobalsCache = residentCache + + /** + * Create a new scala compiler. + */ + def newScalaCompiler(instance: XScalaInstance, interfaceJar: File): AnalyzingCompiler = + new AnalyzingCompiler( + instance, + ZincCompilerUtil.constantBridgeProvider(instance, interfaceJar), + ClasspathOptionsUtil.auto, + _ => (), + // TODO: Should likely use the classloader cache here: + // see https://github.com/pantsbuild/pants/issues/4744 + None + ) + + /** + * Create the scala instance for the compiler. Includes creating the classloader. + */ + def scalaInstance(setup: CompilerCacheKey): XScalaInstance = { + import setup.{scalaCompiler, scalaExtra, scalaLibrary} + val allJars = scalaLibrary +: scalaCompiler +: scalaExtra + val loader = scalaLoader(allJars) + val version = scalaVersion(loader) + new ScalaInstance(version.getOrElse("unknown"), loader, scalaLibrary, scalaCompiler, allJars.toArray, version) + } + + /** + * Create a new classloader with the root loader as parent (to avoid zinc itself being included). + */ + def scalaLoader(jars: Seq[File]) = + new URLClassLoader( + Path.toURLs(jars), + sbt.internal.inc.classpath.ClasspathUtilities.rootLoader + ) + + /** + * Get the actual scala version from the compiler.properties in a classloader. + * The classloader should only contain one version of scala. + */ + def scalaVersion(scalaLoader: ClassLoader): Option[String] = { + Util.propertyFromResource("compiler.properties", "version.number", scalaLoader) + } + + /** + * Get the compiler interface for this compiler setup. Compile it if not already cached. + * NB: This usually occurs within the compilerCache entry lock, but in the presence of + * multiple zinc processes (ie, without nailgun) we need to be more careful not to clobber + * another compilation attempt. + */ + def compilerInterface(setup: CompilerCacheKey, scalaInstance: XScalaInstance, log: Logger): File = { + def compile(targetJar: File): Unit = + AnalyzingCompiler.compileSources( + Seq(setup.compilerBridgeSrc), + targetJar, + Seq(setup.compilerInterface), + CompilerInterfaceId, + new RawCompiler(scalaInstance, ClasspathOptionsUtil.auto, log), + log + ) + val dir = setup.cacheDir / interfaceId(scalaInstance.actualVersion) + val interfaceJar = dir / (CompilerInterfaceId + ".jar") + if (!interfaceJar.isFile) { + dir.mkdirs() + val tempJar = File.createTempFile("interface-", ".jar.tmp", dir) + try { + compile(tempJar) + tempJar.renameTo(interfaceJar) + } finally { + tempJar.delete() + } + } + interfaceJar + } + + def interfaceId(scalaVersion: String) = CompilerInterfaceId + "-" + scalaVersion + "-" + JavaClassVersion +} diff --git a/src/scala/org/pantsbuild/zinc/compiler/InputUtils.scala b/src/scala/org/pantsbuild/zinc/compiler/InputUtils.scala new file mode 100644 index 00000000000..86f39a5749a --- /dev/null +++ b/src/scala/org/pantsbuild/zinc/compiler/InputUtils.scala @@ -0,0 +1,206 @@ +/** + * Copyright (C) 2012 Typesafe, Inc. + */ + +package org.pantsbuild.zinc.compiler + +import java.io.{File, IOException} +import java.lang.{ Boolean => JBoolean } +import java.util.function.{ Function => JFunction } +import java.util.{ List => JList, Map => JMap } + +import scala.collection.JavaConverters._ +import scala.compat.java8.OptionConverters._ +import scala.util.matching.Regex + +import sbt.io.IO +import sbt.util.Logger +import xsbti.{Position, Problem, Severity, ReporterConfig, ReporterUtil} +import xsbti.compile.{ + AnalysisStore, + CompileOptions, + CompileOrder, + Compilers, + Inputs, + PreviousResult, + Setup +} + +import org.pantsbuild.zinc.analysis.AnalysisMap + +object InputUtils { + /** + * Create Inputs based on command-line settings. + */ + def create( + settings: Settings, + analysisMap: AnalysisMap, + previousResult: PreviousResult, + log: Logger + ): Inputs = { + import settings._ + + val compilers = CompilerUtils.getOrCreate(settings, log) + + // TODO: Remove duplication once on Scala 2.12.x. + val positionMapper = + new JFunction[Position, Position] { + override def apply(p: Position): Position = p + } + + val compileOptions = + CompileOptions + .create() + .withClasspath( + autoClasspath( + classesDirectory, + compilers.scalac().scalaInstance().allJars, + javaOnly, + classpath + ).toArray + ) + .withSources(sources.toArray) + .withClassesDirectory(classesDirectory) + .withScalacOptions(scalacOptions.toArray) + .withJavacOptions(javacOptions.toArray) + .withOrder(compileOrder) + val reporter = + ReporterUtil.getDefault( + ReporterConfig.create( + "", + Int.MaxValue, + true, + settings.consoleLog.msgPredicates.toArray, + settings.consoleLog.filePredicates.toArray, + settings.consoleLog.javaLogLevel, + positionMapper + ) + ) + val setup = + Setup.create( + analysisMap.getPCELookup, + false, + settings.analysis.cache, + CompilerUtils.getGlobalsCache, + incOptions.options(log), + reporter, + None.asJava, + Array() + ) + + Inputs.create( + compilers, + compileOptions, + setup, + previousResult + ) + } + + /** + * Load the analysis for the destination, creating it if necessary. + */ + def loadDestinationAnalysis( + settings: Settings, + analysisMap: AnalysisMap, + log: Logger + ): (AnalysisStore, PreviousResult) = { + def load() = { + val analysisStore = analysisMap.cachedStore(settings.analysis.cache) + analysisStore.get().asScala match { + case Some(a) => (analysisStore, Some(a.getAnalysis), Some(a.getMiniSetup)) + case _ => (analysisStore, None, None) + } + } + + // Try loading, and optionally remove/retry on failure. + val (analysisStore, previousAnalysis, previousSetup) = + try { + load() + } catch { + case e: Throwable if settings.analysis.clearInvalid => + // Remove the corrupted analysis and output directory. + log.warn(s"Failed to load analysis from ${settings.analysis.cache} ($e): will execute a clean compile.") + IO.delete(settings.analysis.cache) + IO.delete(settings.classesDirectory) + load() + } + (analysisStore, PreviousResult.create(previousAnalysis.asJava, previousSetup.asJava)) + } + + /** + * Automatically add the output directory and scala library to the classpath. + */ + def autoClasspath(classesDirectory: File, allScalaJars: Seq[File], javaOnly: Boolean, classpath: Seq[File]): Seq[File] = { + if (javaOnly) classesDirectory +: classpath + else splitScala(allScalaJars) match { + case Some(scalaJars) => classesDirectory +: scalaJars.library +: classpath + case None => classesDirectory +: classpath + } + } + + /** + * Select the scala jars. + * + * Prefer the explicit scala-compiler, scala-library, and scala-extra settings, + * then the scala-path setting, then the scala-home setting. Default to bundled scala. + */ + def selectScalaJars(scala: ScalaLocation): ScalaJars = { + val jars = splitScala(scala.path) getOrElse Defaults.scalaJars + ScalaJars( + scala.compiler getOrElse jars.compiler, + scala.library getOrElse jars.library, + scala.extra ++ jars.extra + ) + } + + /** + * Distinguish the compiler and library jars. + */ + def splitScala(jars: Seq[File], excluded: Set[String] = Set.empty): Option[ScalaJars] = { + val filtered = jars filterNot (excluded contains _.getName) + val (compiler, other) = filtered partition (_.getName matches ScalaCompiler.pattern) + val (library, extra) = other partition (_.getName matches ScalaLibrary.pattern) + if (compiler.nonEmpty && library.nonEmpty) Some(ScalaJars(compiler(0), library(0), extra)) else None + } + + // + // Default setup + // + + val ScalaCompiler = JarFile("scala-compiler") + val ScalaLibrary = JarFile("scala-library") + val ScalaReflect = JarFile("scala-reflect") + val CompilerBridgeSources = JarFile("compiler-bridge", "sources") + val CompilerInterface = JarFile("compiler-interface") + + // TODO: The default jar locations here are definitely not helpful, but the existence + // of "some" value for each of these is assumed in a few places. Should remove and make + // them optional to more cleanly support Java-only compiles. + object Defaults { + val scalaCompiler = ScalaCompiler.default + val scalaLibrary = ScalaLibrary.default + val scalaExtra = Seq(ScalaReflect.default) + val scalaJars = ScalaJars(scalaCompiler, scalaLibrary, scalaExtra) + val scalaExcluded = Set("jansi.jar", "jline.jar", "scala-partest.jar", "scala-swing.jar", "scalacheck.jar", "scalap.jar") + } + + /** + * Jar file description for locating jars. + */ + case class JarFile(name: String, classifier: Option[String] = None) { + val versionPattern = "(-.*)?" + val classifierString = classifier map ("-" + _) getOrElse "" + val extension = "jar" + val pattern = name + versionPattern + classifierString + "." + extension + val default = new File(name + classifierString + "." + extension) + } + + object JarFile { + def apply(name: String, classifier: String): JarFile = JarFile(name, Some(classifier)) + } + + /** + * The scala jars split into compiler, library, and extra. + */ + case class ScalaJars(compiler: File, library: File, extra: Seq[File]) +} diff --git a/src/scala/org/pantsbuild/zinc/compiler/Main.scala b/src/scala/org/pantsbuild/zinc/compiler/Main.scala new file mode 100644 index 00000000000..a58a36b7e71 --- /dev/null +++ b/src/scala/org/pantsbuild/zinc/compiler/Main.scala @@ -0,0 +1,131 @@ +/** + * Copyright (C) 2012 Typesafe, Inc. + */ + +package org.pantsbuild.zinc.compiler + +import sbt.internal.inc.IncrementalCompilerImpl +import sbt.internal.util.{ ConsoleLogger, ConsoleOut } +import sbt.util.Level +import xsbti.CompileFailed + +import org.pantsbuild.zinc.analysis.AnalysisMap +import org.pantsbuild.zinc.options.Parsed +import org.pantsbuild.zinc.util.Util + +/** + * Command-line main class. + */ +object Main { + val Command = "zinc-compiler" + val Description = "scala incremental compiler" + + /** + * Full zinc version info. + */ + case class Version(published: String, timestamp: String, commit: String) + + /** + * Get the zinc version from a generated properties file. + */ + lazy val zincVersion: Version = { + val props = Util.propertiesFromResource("zinc.version.properties", getClass.getClassLoader) + Version( + props.getProperty("version", "unknown"), + props.getProperty("timestamp", ""), + props.getProperty("commit", "") + ) + } + + /** + * For snapshots the zinc version includes timestamp and commit. + */ + lazy val versionString: String = { + import zincVersion._ + if (published.endsWith("-SNAPSHOT")) "%s %s-%s" format (published, timestamp, commit take 10) + else published + } + + /** + * Print the zinc version to standard out. + */ + def printVersion(): Unit = println("%s (%s) %s" format (Command, Description, versionString)) + + def mkLogger(settings: Settings) = { + val cl = + ConsoleLogger( + out = ConsoleOut.systemOut, + ansiCodesSupported = settings.consoleLog.color + ) + cl.setLevel(settings.consoleLog.logLevel) + cl + } + + /** + * Run a compile. + */ + def main(args: Array[String]): Unit = { + val startTime = System.currentTimeMillis + + val Parsed(settings, residual, errors) = Settings.parse(args) + + val log = mkLogger(settings) + val isDebug = settings.consoleLog.logLevel <= Level.Debug + + // bail out on any command-line option errors + if (errors.nonEmpty) { + for (error <- errors) log.error(error) + log.error("See %s -help for information about options" format Command) + sys.exit(1) + } + + if (settings.version) printVersion() + + if (settings.help) Settings.printUsage(Command, residualArgs = "") + + // if there are no sources provided, print outputs based on current analysis if requested, + // else print version and usage by default + if (settings.sources.isEmpty) { + if (!settings.version && !settings.help) { + printVersion() + Settings.printUsage(Command) + sys.exit(1) + } + sys.exit(0) + } + + // Load the existing analysis for the destination, if any. + val analysisMap = AnalysisMap.create(settings.analysis) + val (targetAnalysisStore, previousResult) = + InputUtils.loadDestinationAnalysis(settings, analysisMap, log) + val inputs = InputUtils.create(settings, analysisMap, previousResult, log) + + if (isDebug) { + log.debug(s"Inputs: $inputs") + } + + try { + // Run the compile. + val result = new IncrementalCompilerImpl().compile(inputs, log) + + // Store the output if the result changed. + if (result.hasModified) { + targetAnalysisStore.set( + // TODO + sbt.internal.inc.ConcreteAnalysisContents(result.analysis, result.setup) + ) + } + + log.info("Compile success " + Util.timing(startTime)) + } catch { + case e: CompileFailed => + log.error("Compile failed " + Util.timing(startTime)) + sys.exit(1) + case e: Exception => + if (isDebug) e.printStackTrace + val message = e.getMessage + if (message ne null) log.error(message) + sys.exit(1) + } + } +} diff --git a/src/scala/org/pantsbuild/zinc/compiler/Settings.scala b/src/scala/org/pantsbuild/zinc/compiler/Settings.scala new file mode 100644 index 00000000000..c18d2208f8a --- /dev/null +++ b/src/scala/org/pantsbuild/zinc/compiler/Settings.scala @@ -0,0 +1,319 @@ +/** + * Copyright (C) 2012 Typesafe, Inc. + */ + +package org.pantsbuild.zinc.compiler + +import java.io.File +import java.nio.file.Path +import java.lang.{ Boolean => JBoolean } +import java.util.function.{ Function => JFunction } +import java.util.{ List => JList, logging => jlogging } + +import scala.collection.JavaConverters._ +import scala.compat.java8.OptionConverters._ +import scala.util.matching.Regex + +import sbt.io.Path._ +import sbt.io.syntax._ +import sbt.util.{Level, Logger} +import xsbti.compile.{ + ClassFileManagerType, + CompileOrder, + IncOptionsUtil, + TransactionalManagerType +} +import xsbti.compile.{IncOptions => ZincIncOptions} + +import org.pantsbuild.zinc.analysis.AnalysisOptions +import org.pantsbuild.zinc.options.OptionSet + +/** + * All parsed command-line options. + */ +case class Settings( + help: Boolean = false, + version: Boolean = false, + consoleLog: ConsoleOptions = ConsoleOptions(), + _sources: Seq[File] = Seq.empty, + classpath: Seq[File] = Seq.empty, + _classesDirectory: Option[File] = None, + scala: ScalaLocation = ScalaLocation(), + scalacOptions: Seq[String] = Seq.empty, + javaHome: Option[File] = None, + forkJava: Boolean = false, + _zincCacheDir: Option[File] = None, + javaOnly: Boolean = false, + javacOptions: Seq[String] = Seq.empty, + compileOrder: CompileOrder = CompileOrder.Mixed, + sbt: SbtJars = SbtJars(), + _incOptions: IncOptions = IncOptions(), + analysis: AnalysisOptions = AnalysisOptions() +) { + import Settings._ + + lazy val zincCacheDir: File = _zincCacheDir.getOrElse { + throw new RuntimeException(s"The ${Settings.ZincCacheDirOpt} option is required.") + } + + lazy val sources: Seq[File] = _sources map normalise + + lazy val classesDirectory: File = + normalise( + _classesDirectory.getOrElse { + throw new RuntimeException(s"The ${Settings.ZincCacheDirOpt} option is required.") + } + ) + + lazy val incOptions: IncOptions = { + _incOptions.copy( + apiDumpDirectory = _incOptions.apiDumpDirectory map normalise, + backup = { + if (_incOptions.transactional) + Some(normalise(_incOptions.backup.getOrElse(defaultBackupLocation(classesDirectory)))) + else + None + } + ) + } +} + +/** + * Console logging options. + */ +case class ConsoleOptions( + logLevel: Level.Value = Level.Info, + color: Boolean = true, + fileFilters: Seq[Regex] = Seq.empty, + msgFilters: Seq[Regex] = Seq.empty +) { + def javaLogLevel: jlogging.Level = logLevel match { + case Level.Info => + jlogging.Level.INFO + case Level.Warn => + jlogging.Level.WARNING + case Level.Error => + jlogging.Level.SEVERE + case Level.Debug => + jlogging.Level.FINE + case x => + sys.error(s"Unsupported log level: $x") + } + + /** + * Because filtering Path objects requires first converting to a String, we compose + * the regexes into one predicate. + */ + def filePredicates: Seq[JFunction[Path, JBoolean]] = + Seq( + new JFunction[Path, JBoolean] { + def apply(path: Path) = { + val pathStr = path.toString + fileFilters.exists(_.findFirstIn(pathStr).isDefined) + } + } + ) + + def msgPredicates: Seq[JFunction[String, JBoolean]] = + msgFilters.map { regex => + new JFunction[String, JBoolean] { + def apply(msg: String) = regex.findFirstIn(msg).isDefined + } + } +} + +/** + * Alternative ways to locate the scala jars. + */ +case class ScalaLocation( + home: Option[File] = None, + path: Seq[File] = Seq.empty, + compiler: Option[File] = None, + library: Option[File] = None, + extra: Seq[File] = Seq.empty +) + +object ScalaLocation { + /** + * Java API for creating ScalaLocation. + */ + def create( + home: File, + path: JList[File], + compiler: File, + library: File, + extra: JList[File]): ScalaLocation = + ScalaLocation( + Option(home), + path.asScala, + Option(compiler), + Option(library), + extra.asScala + ) + + /** + * Java API for creating ScalaLocation with scala home. + */ + def fromHome(home: File) = ScalaLocation(home = Option(home)) + + /** + * Java API for creating ScalaLocation with scala path. + */ + def fromPath(path: JList[File]) = ScalaLocation(path = path.asScala) +} + +/** + * Locating the sbt jars needed for zinc compile. + */ +case class SbtJars( + compilerBridgeSrc: Option[File] = None, + compilerInterface: Option[File] = None +) { + lazy val jars: (File, File) = (compilerBridgeSrc, compilerInterface) match { + case (Some(x), Some(y)) if x.exists && y.exists => (x, y) + case (Some(x), Some(y)) => + throw new RuntimeException(s"One or both of $x and $y do not exist.") + case _ => + throw new RuntimeException( + s"Both the ${Settings.CompilerBridgeOpt} and " + + s"${Settings.CompilerInterfaceOpt} options are required." + ) + } +} + +/** + * Wrapper around incremental compiler options. + */ +case class IncOptions( + transitiveStep: Int = ZincIncOptions.defaultTransitiveStep, + recompileAllFraction: Double = ZincIncOptions.defaultRecompileAllFraction, + relationsDebug: Boolean = ZincIncOptions.defaultRelationsDebug, + apiDebug: Boolean = ZincIncOptions.defaultApiDebug, + apiDiffContextSize: Int = ZincIncOptions.defaultApiDiffContextSize, + apiDumpDirectory: Option[File] = ZincIncOptions.defaultApiDumpDirectory.asScala, + transactional: Boolean = false, + useZincFileManager: Boolean = true, + backup: Option[File] = None +) { + def options(log: Logger): ZincIncOptions = + ZincIncOptions.create() + .withTransitiveStep(transitiveStep) + .withRecompileAllFraction(recompileAllFraction) + .withRelationsDebug(relationsDebug) + .withApiDebug(apiDebug) + .withApiDiffContextSize(apiDiffContextSize) + .withApiDumpDirectory(apiDumpDirectory.asJava) + .withClassfileManagerType(classfileManager(log).asJava) + .withUseCustomizedFileManager(useZincFileManager) + + def classfileManager(log: Logger): Option[ClassFileManagerType] = + if (transactional && backup.isDefined) + Some(TransactionalManagerType.create(backup.get, log)) + else + None +} + +object Settings extends OptionSet[Settings] { + val DestinationOpt = "-d" + val ZincCacheDirOpt = "-zinc-cache-dir" + val CompilerBridgeOpt = "-compiler-bridge" + val CompilerInterfaceOpt = "-compiler-interface" + + override def empty = Settings() + + override def applyResidual(t: Settings, residualArgs: Seq[String]) = + t.copy(_sources = residualArgs map (new File(_))) + + override val options = Seq( + header("Output options:"), + boolean( ("-help", "-h"), "Print this usage message", (s: Settings) => s.copy(help = true)), + boolean( "-version", "Print version", (s: Settings) => s.copy(version = true)), + + header("Logging Options:"), + boolean( "-debug", "Set log level for stdout to debug", + (s: Settings) => s.copy(consoleLog = s.consoleLog.copy(logLevel = Level.Debug))), + string( "-log-level", "level", "Set log level for stdout (debug|info|warn|error)", + (s: Settings, l: String) => s.copy(consoleLog = s.consoleLog.copy(logLevel = Level.withName(l)))), + boolean( "-no-color", "No color in logging to stdout", + (s: Settings) => s.copy(consoleLog = s.consoleLog.copy(color = false))), + string( "-msg-filter", "regex", "Filter warning messages matching the given regex", + (s: Settings, re: String) => s.copy(consoleLog = s.consoleLog.copy(msgFilters = s.consoleLog.msgFilters :+ re.r))), + string( "-file-filter", "regex", "Filter warning messages from filenames matching the given regex", + (s: Settings, re: String) => s.copy(consoleLog = s.consoleLog.copy(fileFilters = s.consoleLog.fileFilters :+ re.r))), + + header("Compile options:"), + path( ("-classpath", "-cp"), "path", "Specify the classpath", (s: Settings, cp: Seq[File]) => s.copy(classpath = cp)), + file( DestinationOpt, "directory", "Destination for compiled classes", (s: Settings, f: File) => s.copy(_classesDirectory = Some(f))), + + header("Scala options:"), + file( "-scala-home", "directory", "Scala home directory (for locating jars)", (s: Settings, f: File) => s.copy(scala = s.scala.copy(home = Some(f)))), + path( "-scala-path", "path", "Specify all Scala jars directly", (s: Settings, sp: Seq[File]) => s.copy(scala = s.scala.copy(path = sp))), + file( "-scala-compiler", "file", "Specify Scala compiler jar directly" , (s: Settings, f: File) => s.copy(scala = s.scala.copy(compiler = Some(f)))), + file( "-scala-library", "file", "Specify Scala library jar directly" , (s: Settings, f: File) => s.copy(scala = s.scala.copy(library = Some(f)))), + path( "-scala-extra", "path", "Specify extra Scala jars directly", (s: Settings, e: Seq[File]) => s.copy(scala = s.scala.copy(extra = e))), + prefix( "-S", "", "Pass option to scalac", (s: Settings, o: String) => s.copy(scalacOptions = s.scalacOptions :+ o)), + + header("Java options:"), + file( "-java-home", "directory", "Select javac home directory (and fork)", (s: Settings, f: File) => s.copy(javaHome = Some(f))), + boolean( "-fork-java", "Run java compiler in separate process", (s: Settings) => s.copy(forkJava = true)), + string( "-compile-order", "order", "Compile order for Scala and Java sources", (s: Settings, o: String) => s.copy(compileOrder = compileOrder(o))), + boolean( "-java-only", "Don't add scala library to classpath", (s: Settings) => s.copy(javaOnly = true)), + prefix( "-C", "", "Pass option to javac", (s: Settings, o: String) => s.copy(javacOptions = s.javacOptions :+ o)), + + header("sbt options:"), + file( CompilerBridgeOpt, "file", "Specify compiler bridge sources jar", (s: Settings, f: File) => s.copy(sbt = s.sbt.copy(compilerBridgeSrc = Some(f)))), + file( CompilerInterfaceOpt, "file", "Specify compiler interface jar", (s: Settings, f: File) => s.copy(sbt = s.sbt.copy(compilerInterface = Some(f)))), + file( ZincCacheDirOpt, "file", "A cache directory for compiler interfaces", (s: Settings, f: File) => s.copy(_zincCacheDir = Some(f))), + + header("Incremental compiler options:"), + int( "-transitive-step", "n", "Steps before transitive closure", (s: Settings, i: Int) => s.copy(_incOptions = s._incOptions.copy(transitiveStep = i))), + fraction( "-recompile-all-fraction", "x", "Limit before recompiling all sources", (s: Settings, d: Double) => s.copy(_incOptions = s._incOptions.copy(recompileAllFraction = d))), + boolean( "-debug-relations", "Enable debug logging of analysis relations", (s: Settings) => s.copy(_incOptions = s._incOptions.copy(relationsDebug = true))), + boolean( "-debug-api", "Enable analysis API debugging", (s: Settings) => s.copy(_incOptions = s._incOptions.copy(apiDebug = true))), + file( "-api-dump", "directory", "Destination for analysis API dump", (s: Settings, f: File) => s.copy(_incOptions = s._incOptions.copy(apiDumpDirectory = Some(f)))), + int( "-api-diff-context-size", "n", "Diff context size (in lines) for API debug", (s: Settings, i: Int) => s.copy(_incOptions = s._incOptions.copy(apiDiffContextSize = i))), + boolean( "-transactional", "Restore previous class files on failure", (s: Settings) => s.copy(_incOptions = s._incOptions.copy(transactional = true))), + boolean( "-no-zinc-file-manager", "Disable zinc provided file manager", (s: Settings) => s.copy(_incOptions = s._incOptions.copy(useZincFileManager = false))), + file( "-backup", "directory", "Backup location (if transactional)", (s: Settings, f: File) => s.copy(_incOptions = s._incOptions.copy(backup = Some(f)))), + + header("Analysis options:"), + file( "-analysis-cache", "file", "Cache file for compile analysis", (s: Settings, f: File) => s.copy(analysis = + s.analysis.copy(_cache = Some(f)))), + fileMap( "-analysis-map", "Upstream analysis mapping (file:file,...)", + (s: Settings, m: Map[File, File]) => s.copy(analysis = s.analysis.copy(cacheMap = m))), + fileMap( "-rebase-map", "Source and destination paths to rebase in persisted analysis (file:file,...)", + (s: Settings, m: Map[File, File]) => s.copy(analysis = s.analysis.copy(rebaseMap = m))), + boolean( "-no-clear-invalid-analysis", "If set, zinc will fail rather than purging illegal analysis.", + (s: Settings) => s.copy(analysis = s.analysis.copy(clearInvalid = false))) + ) + + /** + * Create a CompileOrder value based on string input. + */ + def compileOrder(order: String): CompileOrder = { + order.toLowerCase match { + case "mixed" => CompileOrder.Mixed + case "java" | "java-then-scala" | "javathenscala" => CompileOrder.JavaThenScala + case "scala" | "scala-then-java" | "scalathenjava" => CompileOrder.ScalaThenJava + } + } + + /** + * Normalise all relative paths to absolute paths. + */ + def normalise(f: File): File = f.getAbsoluteFile + + /** + * By default the cache location is relative to the classes directory (for example, target/classes/../cache/classes). + */ + def defaultCacheLocation(classesDir: File) = { + classesDir.getParentFile / "cache" / classesDir.getName + } + + /** + * By default the backup location is relative to the classes directory (for example, target/classes/../backup/classes). + */ + def defaultBackupLocation(classesDir: File) = { + classesDir.getParentFile / "backup" / classesDir.getName + } +} diff --git a/src/scala/org/pantsbuild/zinc/extractor/BUILD b/src/scala/org/pantsbuild/zinc/extractor/BUILD new file mode 100644 index 00000000000..303c45f5324 --- /dev/null +++ b/src/scala/org/pantsbuild/zinc/extractor/BUILD @@ -0,0 +1,20 @@ +# Copyright 2017 Pants project contributors (see CONTRIBUTORS.md). +# Licensed under the Apache License, Version 2.0 (see LICENSE). + +scala_library( + provides=scala_artifact( + org='org.pantsbuild', + name='zinc-extractor', + repo=public, + publication_metadata=pants_library('The SBT incremental compiler for nailgun') + ), + dependencies=[ + '3rdparty/jvm/com/fasterxml/jackson/module:scala', + '3rdparty/jvm/org/scala-lang/modules:scala-java8-compat', + '3rdparty/jvm/org/scala-sbt:zinc', + 'src/scala/org/pantsbuild/zinc/analysis', + 'src/scala/org/pantsbuild/zinc/options', + ], + strict_deps=True, + platform='java8', +) diff --git a/src/scala/org/pantsbuild/zinc/extractor/Extractor.scala b/src/scala/org/pantsbuild/zinc/extractor/Extractor.scala new file mode 100644 index 00000000000..e4efc5ef264 --- /dev/null +++ b/src/scala/org/pantsbuild/zinc/extractor/Extractor.scala @@ -0,0 +1,78 @@ +/** + * Copyright (C) 2017 Pants project contributors (see CONTRIBUTORS.md). + * Licensed under the Apache License, Version 2.0 (see LICENSE). + */ + +package org.pantsbuild.zinc.extractor + +import java.io.File + +import scala.collection.mutable + +import sbt.internal.inc.{Analysis, FileAnalysisStore, Locate} + +import xsbti.compile.CompileAnalysis + +import org.pantsbuild.zinc.analysis.AnalysisMap + +/** + * Class to encapsulate extracting information from zinc analysis. + */ +class Extractor( + classpath: Seq[File], + analysis: CompileAnalysis, + analysisMap: AnalysisMap +) { + private val relations = analysis.asInstanceOf[Analysis].relations + + // A lookup from classname to defining classpath entry File. + private val definesClass = Locate.entry(classpath, analysisMap.getPCELookup) + + /** + * Extract a mapping from source file to produced classfiles. + */ + def products: Map[File, Set[File]] = + relations + .allSources + .toSeq + .map { source => + source -> relations.products(source) + } + .toMap + + /** + * Extract all file or classname dependencies of this compilation unit that can be + * determined from analysis. + */ + def dependencies: collection.Map[File, collection.Set[File]] = { + val mm = new mutable.HashMap[File, mutable.Set[File]] with mutable.MultiMap[File, File] + + // Look up the external deps for each classfile for each sourcefile. + for { + source <- relations.allSources + sourceClassname <- relations.classNames(source) + classname <- relations.externalDeps(sourceClassname) + dep <- warningDefinesClass(classname) + } { + mm.addBinding(source, dep) + } + + // And library dependencies. + for { + source <- relations.allSources + dep <- relations.libraryDeps(source) + } { + mm.addBinding(source, dep) + } + + mm + } + + private def warningDefinesClass(classname: String): Option[File] = + definesClass(classname).orElse { + // This case should be rare: should only occur when a compiler plugin generates + // additional classes. + System.err.println(s"No analysis declares class $classname") + None + } +} diff --git a/src/scala/org/pantsbuild/zinc/extractor/Main.scala b/src/scala/org/pantsbuild/zinc/extractor/Main.scala new file mode 100644 index 00000000000..b444ae0e8a5 --- /dev/null +++ b/src/scala/org/pantsbuild/zinc/extractor/Main.scala @@ -0,0 +1,78 @@ +/** + * Copyright (C) 2017 Pants project contributors (see CONTRIBUTORS.md). + * Licensed under the Apache License, Version 2.0 (see LICENSE). + */ + +package org.pantsbuild.zinc.extractor + +import java.io.File + +import scala.compat.java8.OptionConverters._ + +import com.fasterxml.jackson.databind.ObjectMapper +import com.fasterxml.jackson.module.scala.DefaultScalaModule +import com.fasterxml.jackson.module.scala.experimental.ScalaObjectMapper + +import org.pantsbuild.zinc.analysis.{AnalysisMap, PortableAnalysisMappers} +import org.pantsbuild.zinc.options.Parsed + +/** + * Command-line main class for analysis extraction. + */ +object Main { + val Command = "zinc-extractor" + + private val om = { + val mapper = new ObjectMapper with ScalaObjectMapper + mapper.registerModule(DefaultScalaModule) + mapper + } + + def main(args: Array[String]): Unit = { + val Parsed(settings, residual, errors) = Settings.parse(args) + + // bail out on any command-line option errors + if (errors.nonEmpty) { + for (error <- errors) System.err.println(error) + System.err.println("See %s -help for information about options" format Command) + sys.exit(1) + } + + if (settings.help) { + Settings.printUsage(Command) + return + } + + val summaryJson = + settings.summaryJson.getOrElse { + throw new RuntimeException(s"An output file is required.") + } + + // Load relevant analysis. + val analysisMap = AnalysisMap.create(settings.analysis) + val analysis = + analysisMap.cachedStore(settings.analysis.cache) + .get() + .asScala + .getOrElse { + throw new RuntimeException(s"Failed to load analysis from ${settings.analysis.cache}") + } + .getAnalysis + + // Extract products and dependencies. + val extractor = new Extractor(settings.classpath, analysis, analysisMap) + + om.writeValue( + summaryJson, + Summary( + extractor.products, + extractor.dependencies + ) + ) + } +} + +case class Summary( + products: collection.Map[File, collection.Set[File]], + dependencies: collection.Map[File, collection.Set[File]] +) diff --git a/src/scala/org/pantsbuild/zinc/extractor/Settings.scala b/src/scala/org/pantsbuild/zinc/extractor/Settings.scala new file mode 100644 index 00000000000..1c9d54dd387 --- /dev/null +++ b/src/scala/org/pantsbuild/zinc/extractor/Settings.scala @@ -0,0 +1,42 @@ +/** + * Copyright (C) 2012 Typesafe, Inc. + */ + +package org.pantsbuild.zinc.extractor + +import java.io.File + +import org.pantsbuild.zinc.options.OptionSet +import org.pantsbuild.zinc.analysis.AnalysisOptions + +/** + * All parsed command-line options. + */ +case class Settings( + help: Boolean = false, + summaryJson: Option[File] = None, + classpath: Seq[File] = Seq(), + analysis: AnalysisOptions = AnalysisOptions() +) + +object Settings extends OptionSet[Settings] { + override def empty = Settings() + + override val options = Seq( + header("Output options:"), + boolean( ("-help", "-h"), "Print this usage message", + (s: Settings) => s.copy(help = true)), + file( "-summary-json", "file", "Output file to write an analysis summary to.", + (s: Settings, f: File) => s.copy(summaryJson = Some(f))), + + header("Input options:"), + path( ("-classpath", "-cp"), "path", "Specify the classpath", + (s: Settings, cp: Seq[File]) => s.copy(classpath = cp)), + file( "-analysis-cache", "file", "Cache file for compile analysis", + (s: Settings, f: File) => s.copy(analysis = s.analysis.copy(_cache = Some(f)))), + fileMap( "-analysis-map", "Upstream analysis mapping (file:file,...)", + (s: Settings, m: Map[File, File]) => s.copy(analysis = s.analysis.copy(cacheMap = m))), + fileMap( "-rebase-map", "Source and destination paths to rebase in persisted analysis (file:file,...)", + (s: Settings, m: Map[File, File]) => s.copy(analysis = s.analysis.copy(rebaseMap = m))) + ) +} diff --git a/src/scala/org/pantsbuild/zinc/logging/Loggers.scala b/src/scala/org/pantsbuild/zinc/logging/Loggers.scala deleted file mode 100644 index 974dc5e4a1d..00000000000 --- a/src/scala/org/pantsbuild/zinc/logging/Loggers.scala +++ /dev/null @@ -1,75 +0,0 @@ -/** - * Copyright (C) 2012 Typesafe, Inc. - * Copyright (C) 2015 Pants project contributors (see CONTRIBUTORS.md). - * Licensed under the Apache License, Version 2.0 (see LICENSE). - */ - -package org.pantsbuild.zinc.logging - -import java.io.{ BufferedOutputStream, File, FileOutputStream, PrintWriter } - -import sbt.util.{ - AbstractLogger, - Level, - Logger -} - -import sbt.internal.util.{ - ConsoleLogger, - ConsoleOut, - MultiLogger, - FullLogger -} - -object Loggers { - /** - * Create a new console logger based on level and color settings. If captureLog is - * specified, a compound logger is created that will additionally log all output (unfiltered) - * to a file. - */ - def create( - level: Level.Value, - color: Boolean, - out: ConsoleOut = ConsoleOut.systemOut, - captureLog: Option[File] = None - ): Logger = { - // log to the console at the configured levels - val consoleLogger = { - val cl = ConsoleLogger(out, useColor = ConsoleLogger.formatEnabled && color) - cl.setLevel(level) - cl - } - // if a capture log was specified, add it as an additional destination - captureLog.map { captureLogFile => - // NB: we append to the capture log, in order to record the complete history of a compile - val fileLogger = { - val fl = new FullLogger(new FileLogger(captureLogFile, true)) - fl.setLevel(Level.Debug) - fl - } - new MultiLogger(List(consoleLogger, fileLogger)) - }.getOrElse { - consoleLogger - } - } -} - -/** - * A logger for an output file. - * - * TODO: The sbt logging interface doesn't expose `close`, so this flushes for every - * line to avoid dropping output on shutdown. - */ -class FileLogger(file: File, append: Boolean) extends Logger { - private val out = new PrintWriter(new BufferedOutputStream(new FileOutputStream(file, append))) - - override def log(level: Level.Value, msg: => String): Unit = { - out.println(s"[${level}]\t${msg}") - out.flush() - } - - def success(message: => String): Unit = - log(Level.Info, message) - - def trace(t: => Throwable): Unit = () -} diff --git a/src/scala/org/pantsbuild/zinc/logging/Reporters.scala b/src/scala/org/pantsbuild/zinc/logging/Reporters.scala deleted file mode 100644 index 9d69a2793d5..00000000000 --- a/src/scala/org/pantsbuild/zinc/logging/Reporters.scala +++ /dev/null @@ -1,58 +0,0 @@ -/** - * Copyright (C) 2015 Pants project contributors (see CONTRIBUTORS.md). - * Licensed under the Apache License, Version 2.0 (see LICENSE). - */ - -package org.pantsbuild.zinc.logging - -import sbt.internal.inc.LoggerReporter -import sbt.util.Logger -import xsbti.{ Position, Reporter, Severity } - -import scala.util.matching.Regex - -object Reporters { - def create( - log: Logger, - fileFilters: Seq[Regex], - msgFilters: Seq[Regex], - maximumErrors: Int = 100 - ): Reporter = - if (fileFilters.isEmpty && msgFilters.isEmpty) { - new LoggerReporter(maximumErrors, log) - } else { - new RegexFilterReporter(fileFilters, msgFilters, maximumErrors, log) - } -} - -/** - * Extends LoggerReporter to filter compile warnings that match various patterns. - */ -class RegexFilterReporter( - fileFilters: Seq[Regex], - msgFilters: Seq[Regex], - maximumErrors: Int, - log: Logger -) extends LoggerReporter( - maximumErrors, - log -) { - - private final def isFiltered(filters: Seq[Regex], str: String): Boolean = - filters.exists(_.findFirstIn(str).isDefined) - - private final def isFiltered(pos: Position, msg: String, severity: Severity): Boolean = - severity != Severity.Error && ( - (!pos.sourceFile.isEmpty && isFiltered(fileFilters, pos.sourceFile.get.getPath)) || ( - isFiltered(msgFilters, msg) - ) - ) - - override def display(pos: Position, msg: String, severity: Severity): Unit = - if (isFiltered(pos, msg, severity)) { - // the only side-effecting operation in the superclass - inc(severity) - } else { - super.display(pos, msg, severity) - } -} diff --git a/src/scala/org/pantsbuild/zinc/options/BUILD b/src/scala/org/pantsbuild/zinc/options/BUILD new file mode 100644 index 00000000000..45185453426 --- /dev/null +++ b/src/scala/org/pantsbuild/zinc/options/BUILD @@ -0,0 +1,13 @@ +# Copyright 2017 Pants project contributors (see CONTRIBUTORS.md). +# Licensed under the Apache License, Version 2.0 (see LICENSE). + +scala_library( + provides=scala_artifact( + org='org.pantsbuild', + name='zinc-options', + repo=public, + publication_metadata=pants_library('The SBT incremental compiler for nailgun') + ), + strict_deps=True, + platform='java8', +) diff --git a/src/scala/org/pantsbuild/zinc/options/OptionSet.scala b/src/scala/org/pantsbuild/zinc/options/OptionSet.scala new file mode 100644 index 00000000000..6b7534dbd71 --- /dev/null +++ b/src/scala/org/pantsbuild/zinc/options/OptionSet.scala @@ -0,0 +1,72 @@ +/** + * Copyright (C) 2017 Pants project contributors (see CONTRIBUTORS.md). + * Licensed under the Apache License, Version 2.0 (see LICENSE). + */ + +package org.pantsbuild.zinc.options + +import java.io.File + +trait OptionSet[T] { + /** An empty set of options. */ + def empty: T + + /** Apply any residual entries to an instance of T and return a new T. */ + def applyResidual(t: T, residualArgs: Seq[String]): T = + if (residualArgs.nonEmpty) { + throw new RuntimeException( + s"Unexpected residual arguments: ${residualArgs.mkString("[", ", ", "]")}" + ) + } else { + t + } + + /** All available command-line options. */ + def options: Seq[OptionDef[T]] + + private def allOptions: Set[OptionDef[T]] = options.toSet + + /** + * Print out the usage message. + */ + def printUsage(cmdName: String, residualArgs: String = ""): Unit = { + val column = options.map(_.length).max + 2 + println(s"Usage: ${cmdName} ${residualArgs}") + options foreach { opt => if (opt.extraline) println(); println(opt.usage(column)) } + println() + } + + /** + * Anything starting with '-' is considered an option, not a source file. + */ + private def isOpt(s: String) = s startsWith "-" + + /** + * Parse all args into a T. + * Residual args are either unknown options or applied. + */ + def parse(args: Seq[String]): Parsed[T] = { + val Parsed(instance, remaining, errors) = Options.parse(empty, allOptions, args, stopOnError = false) + val (unknown, residual) = remaining partition isOpt + val unknownErrors = unknown map ("Unknown option: " + _) + Parsed(applyResidual(instance, residual), Seq.empty, errors ++ unknownErrors) + } + + // helpers for creating options + + def boolean(opt: String, desc: String, action: T => T) = new BooleanOption[T](Seq(opt), desc, action) + def boolean(opts: (String, String), desc: String, action: T => T) = new BooleanOption[T](Seq(opts._1, opts._2), desc, action) + def string(opt: String, arg: String, desc: String, action: (T, String) => T) = new StringOption[T](Seq(opt), arg, desc, action) + def int(opt: String, arg: String, desc: String, action: (T, Int) => T) = new IntOption[T](Seq(opt), arg, desc, action) + def double(opt: String, arg: String, desc: String, action: (T, Double) => T) = new DoubleOption[T](Seq(opt), arg, desc, action) + def fraction(opt: String, arg: String, desc: String, action: (T, Double) => T) = new FractionOption[T](Seq(opt), arg, desc, action) + def file(opt: String, arg: String, desc: String, action: (T, File) => T) = new FileOption[T](Seq(opt), arg, desc, action) + def path(opt: String, arg: String, desc: String, action: (T, Seq[File]) => T) = new PathOption[T](Seq(opt), arg, desc, action) + def path(opts: (String, String), arg: String, desc: String, action: (T, Seq[File]) => T) = new PathOption[T](Seq(opts._1, opts._2), arg, desc, action) + def prefix(pre: String, arg: String, desc: String, action: (T, String) => T) = new PrefixOption[T](pre, arg, desc, action) + def filePair(opt: String, arg: String, desc: String, action: (T, (File, File)) => T) = new FilePairOption[T](Seq(opt), arg, desc, action) + def fileMap(opt: String, desc: String, action: (T, Map[File, File]) => T) = new FileMapOption[T](Seq(opt), desc, action) + def fileSeqMap(opt: String, desc: String, action: (T, Map[Seq[File], File]) => T) = new FileSeqMapOption[T](Seq(opt), desc, action) + def header(label: String) = new HeaderOption[T](label) + def dummy(opt: String, desc: String) = new DummyOption[T](opt, desc) +} diff --git a/src/scala/org/pantsbuild/zinc/Options.scala b/src/scala/org/pantsbuild/zinc/options/Options.scala similarity index 99% rename from src/scala/org/pantsbuild/zinc/Options.scala rename to src/scala/org/pantsbuild/zinc/options/Options.scala index 5faf8e7ab05..abaeabfb051 100644 --- a/src/scala/org/pantsbuild/zinc/Options.scala +++ b/src/scala/org/pantsbuild/zinc/options/Options.scala @@ -2,7 +2,7 @@ * Copyright (C) 2012 Typesafe, Inc. */ -package org.pantsbuild.zinc +package org.pantsbuild.zinc.options import java.io.File import scala.annotation.tailrec diff --git a/src/scala/org/pantsbuild/zinc/logging/BUILD b/src/scala/org/pantsbuild/zinc/util/BUILD similarity index 59% rename from src/scala/org/pantsbuild/zinc/logging/BUILD rename to src/scala/org/pantsbuild/zinc/util/BUILD index 61582dfba78..4505a3f2bda 100644 --- a/src/scala/org/pantsbuild/zinc/logging/BUILD +++ b/src/scala/org/pantsbuild/zinc/util/BUILD @@ -1,14 +1,16 @@ +# Copyright 2017 Pants project contributors (see CONTRIBUTORS.md). +# Licensed under the Apache License, Version 2.0 (see LICENSE). + scala_library( provides=scala_artifact( org='org.pantsbuild', - name='zinc-logging', + name='zinc-util', repo=public, publication_metadata=pants_library('The SBT incremental compiler for nailgun') ), dependencies=[ - '3rdparty/jvm/org/scala-sbt:util-logging', '3rdparty/jvm/org/scala-sbt:zinc', ], strict_deps=True, - platform='java7', + platform='java8', ) diff --git a/src/scala/org/pantsbuild/zinc/Util.scala b/src/scala/org/pantsbuild/zinc/util/Util.scala similarity index 99% rename from src/scala/org/pantsbuild/zinc/Util.scala rename to src/scala/org/pantsbuild/zinc/util/Util.scala index 696742ca7d1..9389490e26e 100644 --- a/src/scala/org/pantsbuild/zinc/Util.scala +++ b/src/scala/org/pantsbuild/zinc/util/Util.scala @@ -2,7 +2,7 @@ * Copyright (C) 2012 Typesafe, Inc. */ -package org.pantsbuild.zinc +package org.pantsbuild.zinc.util import java.io.File import sbt.io.{ Hash, IO } diff --git a/tests/scala/org/pantsbuild/zinc/BUILD b/tests/scala/org/pantsbuild/zinc/BUILD deleted file mode 100644 index a0a4caa3da8..00000000000 --- a/tests/scala/org/pantsbuild/zinc/BUILD +++ /dev/null @@ -1,19 +0,0 @@ -# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md). -# Licensed under the Apache License, Version 2.0 (see LICENSE). - -junit_tests( - dependencies=[ - '3rdparty/jvm/org/scala-sbt:io', - '3rdparty/jvm/org/scala-sbt:util-logging', - '3rdparty/jvm/org/scala-sbt:zinc', - '3rdparty:jsr305', - '3rdparty:junit', - '3rdparty:scalatest', - 'src/scala/org/pantsbuild/zinc', - # TODO fix zinc unused deps false positives due to zinc not recording - # dependencies of erased generic types: https://github.com/sbt/zinc/issues/147 - scoped('3rdparty:guava', scope='forced'), - scoped('src/scala/org/pantsbuild/zinc/cache', scope='forced'), - ], - strict_deps=True, -) diff --git a/tests/scala/org/pantsbuild/zinc/AnalysisMapSpec.scala b/tests/scala/org/pantsbuild/zinc/analysis/AnalysisMapSpec.scala similarity index 66% rename from tests/scala/org/pantsbuild/zinc/AnalysisMapSpec.scala rename to tests/scala/org/pantsbuild/zinc/analysis/AnalysisMapSpec.scala index 877a816f2fe..f08e2106fec 100644 --- a/tests/scala/org/pantsbuild/zinc/AnalysisMapSpec.scala +++ b/tests/scala/org/pantsbuild/zinc/analysis/AnalysisMapSpec.scala @@ -1,13 +1,9 @@ -// Copyright 2015 Pants project contributors (see CONTRIBUTORS.md). +// Copyright 2017 Pants project contributors (see CONTRIBUTORS.md). // Licensed under the Apache License, Version 2.0 (see LICENSE). -package org.pantsbuild.zinc +package org.pantsbuild.zinc.analysis import sbt.io.IO -import sbt.internal.util.{ - ConsoleLogger, - ConsoleOut -} import org.junit.runner.RunWith import org.scalatest.WordSpec @@ -19,11 +15,12 @@ class AnalysisMapSpec extends WordSpec with MustMatchers { "AnalysisMap" should { "succeed for empty analysis" in { IO.withTemporaryDirectory { classpathEntry => - val am = AnalysisMap.create(Map(), ConsoleLogger(ConsoleOut.systemOut)) + val am = AnalysisMap.create(AnalysisOptions()) val dc = am.getPCELookup.definesClass(classpathEntry) dc("NonExistent.class") must be(false) } } - // TODO: needs more testing with spoofed analysis + // TODO: needs more testing with spoofed analysis: + // see https://github.com/pantsbuild/pants/issues/4756 } } diff --git a/tests/scala/org/pantsbuild/zinc/analysis/BUILD b/tests/scala/org/pantsbuild/zinc/analysis/BUILD new file mode 100644 index 00000000000..1da582aafed --- /dev/null +++ b/tests/scala/org/pantsbuild/zinc/analysis/BUILD @@ -0,0 +1,18 @@ +# Copyright 2017 Pants project contributors (see CONTRIBUTORS.md). +# Licensed under the Apache License, Version 2.0 (see LICENSE). + +junit_tests( + dependencies=[ + '3rdparty/jvm/org/scala-sbt:io', + '3rdparty/jvm/org/scala-sbt:util-logging', + '3rdparty/jvm/org/scala-sbt:zinc', + '3rdparty:guava', + '3rdparty:jsr305', + '3rdparty:junit', + '3rdparty:scalatest', + 'src/scala/org/pantsbuild/zinc/analysis', + 'src/scala/org/pantsbuild/zinc/cache', + ], + strict_deps=True, + platform='java8', +) diff --git a/tests/scala/org/pantsbuild/zinc/logging/BUILD b/tests/scala/org/pantsbuild/zinc/logging/BUILD deleted file mode 100644 index cc3fae01a43..00000000000 --- a/tests/scala/org/pantsbuild/zinc/logging/BUILD +++ /dev/null @@ -1,11 +0,0 @@ -# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md). -# Licensed under the Apache License, Version 2.0 (see LICENSE). - -junit_tests( - dependencies=[ - '3rdparty:guava', - '3rdparty:junit', - '3rdparty:scalatest', - 'src/scala/org/pantsbuild/zinc/logging', - ], -) diff --git a/tests/scala/org/pantsbuild/zinc/logging/LoggersSpec.scala b/tests/scala/org/pantsbuild/zinc/logging/LoggersSpec.scala deleted file mode 100644 index f9147463548..00000000000 --- a/tests/scala/org/pantsbuild/zinc/logging/LoggersSpec.scala +++ /dev/null @@ -1,41 +0,0 @@ -// Copyright 2015 Pants project contributors (see CONTRIBUTORS.md). -// Licensed under the Apache License, Version 2.0 (see LICENSE). - -package org.pantsbuild.zinc.logging - -import java.io.{ File, PrintWriter, StringWriter } - -import com.google.common.base.Charsets -import com.google.common.io.Files - -import sbt.util.Level -import sbt.internal.util.ConsoleOut - -import org.junit.runner.RunWith -import org.scalatest.WordSpec -import org.scalatest.junit.JUnitRunner -import org.scalatest.MustMatchers - -@RunWith(classOf[JUnitRunner]) -class LoggersSpec extends WordSpec with MustMatchers { - "Loggers" should { - "be compound" in { - // create a compound logger - val stdout = new StringWriter() - val captureFile = File.createTempFile("loggers", "spec") - val log = - Loggers.create( - Level.Debug, - false, - ConsoleOut.printWriterOut(new PrintWriter(stdout)), - Some(captureFile) - ) - - // log something, and confirm it's captured in both locations - val msg = "this is a log message!" - log.debug(msg) - stdout.toString must include(msg) - Files.toString(captureFile, Charsets.UTF_8) must include(msg) - } - } -} diff --git a/zinc/BUILD b/zinc/BUILD index 94a1c142ae5..fcb8e22a64e 100644 --- a/zinc/BUILD +++ b/zinc/BUILD @@ -1,8 +1,20 @@ +# Copyright 2017 Pants project contributors (see CONTRIBUTORS.md). +# Licensed under the Apache License, Version 2.0 (see LICENSE). + jvm_binary( - basename='zinc', - main='org.pantsbuild.zinc.Main', + name='compiler', + main='org.pantsbuild.zinc.compiler.Main', dependencies=[ - 'src/scala/org/pantsbuild/zinc', + 'src/scala/org/pantsbuild/zinc/compiler', ], - description='zinc -- the scala compiler in nailgun', + description='zinc compiler -- the scala compiler in nailgun', +) + +jvm_binary( + name='extractor', + main='org.pantsbuild.zinc.extractor.Main', + dependencies=[ + 'src/scala/org/pantsbuild/zinc/extractor', + ], + description='zinc extractor -- extractor for zinc analysis', ) diff --git a/zinc/README.md b/zinc/README.md index 2b0fe16491c..e8e2189e2c9 100644 --- a/zinc/README.md +++ b/zinc/README.md @@ -38,7 +38,8 @@ Options To get information about options - ./pants run zinc: -- -help + ./pants run zinc:compiler -- -help + ./pants run zinc:extractor -- -help ### Compile