Skip to content

Commit

Permalink
follow comment
Browse files Browse the repository at this point in the history
  • Loading branch information
AngersZhuuuu committed Dec 30, 2020
1 parent e09d8e0 commit c7d1aee
Show file tree
Hide file tree
Showing 2 changed files with 21 additions and 19 deletions.
16 changes: 8 additions & 8 deletions core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
Original file line number Diff line number Diff line change
Expand Up @@ -304,8 +304,8 @@ private[spark] class SparkSubmit extends Logging {
// Resolve maven dependencies if there are any and add classpath to jars. Add them to py-files
// too for packages that include Python code
val resolvedMavenCoordinates = DependencyUtils.resolveMavenDependencies(
packagesTransitive = true, args.packagesExclusions, args.packages,
args.repositories, args.ivyRepoPath, args.ivySettingsPath)
packagesTransitive = true, Option(args.packagesExclusions), Option(args.packages),
Option(args.repositories), Option(args.ivyRepoPath), args.ivySettingsPath)

if (resolvedMavenCoordinates.nonEmpty) {
// In K8s client mode, when in the driver, add resolved jars early as we might need
Expand Down Expand Up @@ -589,7 +589,7 @@ private[spark] class SparkSubmit extends Logging {
OptionAssigner(args.deployMode, ALL_CLUSTER_MGRS, ALL_DEPLOY_MODES,
confKey = SUBMIT_DEPLOY_MODE.key),
OptionAssigner(args.name, ALL_CLUSTER_MGRS, ALL_DEPLOY_MODES, confKey = "spark.app.name"),
OptionAssigner(args.ivyRepoPath.orNull, ALL_CLUSTER_MGRS, CLIENT, confKey = "spark.jars.ivy"),
OptionAssigner(args.ivyRepoPath, ALL_CLUSTER_MGRS, CLIENT, confKey = "spark.jars.ivy"),
OptionAssigner(args.driverMemory, ALL_CLUSTER_MGRS, CLIENT,
confKey = DRIVER_MEMORY.key),
OptionAssigner(args.driverExtraClassPath, ALL_CLUSTER_MGRS, ALL_DEPLOY_MODES,
Expand All @@ -605,13 +605,13 @@ private[spark] class SparkSubmit extends Logging {
OptionAssigner(args.pyFiles, ALL_CLUSTER_MGRS, CLUSTER, confKey = SUBMIT_PYTHON_FILES.key),

// Propagate attributes for dependency resolution at the driver side
OptionAssigner(args.packages.orNull, STANDALONE | MESOS | KUBERNETES,
OptionAssigner(args.packages, STANDALONE | MESOS | KUBERNETES,
CLUSTER, confKey = "spark.jars.packages"),
OptionAssigner(args.repositories.orNull, STANDALONE | MESOS | KUBERNETES,
OptionAssigner(args.repositories, STANDALONE | MESOS | KUBERNETES,
CLUSTER, confKey = "spark.jars.repositories"),
OptionAssigner(args.ivyRepoPath.orNull, STANDALONE | MESOS | KUBERNETES,
OptionAssigner(args.ivyRepoPath, STANDALONE | MESOS | KUBERNETES,
CLUSTER, confKey = "spark.jars.ivy"),
OptionAssigner(args.packagesExclusions.orNull, STANDALONE | MESOS | KUBERNETES,
OptionAssigner(args.packagesExclusions, STANDALONE | MESOS | KUBERNETES,
CLUSTER, confKey = "spark.jars.excludes"),

// Yarn only
Expand Down Expand Up @@ -647,7 +647,7 @@ private[spark] class SparkSubmit extends Logging {
confKey = DRIVER_CORES.key),
OptionAssigner(args.supervise.toString, STANDALONE | MESOS, CLUSTER,
confKey = DRIVER_SUPERVISE.key),
OptionAssigner(args.ivyRepoPath.orNull, STANDALONE, CLUSTER, confKey = "spark.jars.ivy"),
OptionAssigner(args.ivyRepoPath, STANDALONE, CLUSTER, confKey = "spark.jars.ivy"),

// An internal option used only for spark-shell to add user jars to repl's classloader,
// previously it uses "spark.jars" or "spark.yarn.dist.jars" which now may be pointed to
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -60,11 +60,11 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
var name: String = null
var childArgs: ArrayBuffer[String] = new ArrayBuffer[String]()
var jars: String = null
var packages: Option[String] = None
var repositories: Option[String] = None
var ivyRepoPath: Option[String] = None
var packages: String = null
var repositories: String = null
var ivyRepoPath: String = null
var ivySettingsPath: Option[String] = None
var packagesExclusions: Option[String] = None
var packagesExclusions: String = null
var verbose: Boolean = false
var isPython: Boolean = false
var pyFiles: String = null
Expand Down Expand Up @@ -185,11 +185,13 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
files = Option(files).orElse(sparkProperties.get(config.FILES.key)).orNull
archives = Option(archives).orElse(sparkProperties.get(config.ARCHIVES.key)).orNull
pyFiles = Option(pyFiles).orElse(sparkProperties.get(config.SUBMIT_PYTHON_FILES.key)).orNull
ivyRepoPath = sparkProperties.get("spark.jars.ivy")
ivyRepoPath = sparkProperties.get("spark.jars.ivy").orNull
ivySettingsPath = sparkProperties.get("spark.jars.ivySettings")
packages = packages.orElse(sparkProperties.get("spark.jars.packages"))
packagesExclusions = packagesExclusions.orElse(sparkProperties.get("spark.jars.excludes"))
repositories = repositories.orElse(sparkProperties.get("spark.jars.repositories"))
packages = Option(packages).orElse(sparkProperties.get("spark.jars.packages")).orNull
packagesExclusions = Option(packagesExclusions)
.orElse(sparkProperties.get("spark.jars.excludes")).orNull
repositories = Option(repositories)
.orElse(sparkProperties.get("spark.jars.repositories")).orNull
deployMode = Option(deployMode)
.orElse(sparkProperties.get(config.SUBMIT_DEPLOY_MODE.key))
.orElse(env.get("DEPLOY_MODE"))
Expand Down Expand Up @@ -407,13 +409,13 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
jars = Utils.resolveURIs(value)

case PACKAGES =>
packages = Some(value)
packages = value

case PACKAGES_EXCLUDE =>
packagesExclusions = Some(value)
packagesExclusions = value

case REPOSITORIES =>
repositories = Some(value)
repositories = value

case CONF =>
val (confName, confValue) = SparkSubmitUtils.parseSparkConfProperty(value)
Expand Down

0 comments on commit c7d1aee

Please sign in to comment.