From b771beca9ab6eb5274bb6eb3bdc95e4269ac09e3 Mon Sep 17 00:00:00 2001 From: Yuanjian Li Date: Tue, 5 Sep 2023 09:37:53 -0700 Subject: [PATCH] fix --- .../org/apache/spark/SparkBuildInfo.scala | 2 +- .../apache/spark/util/SparkClassUtils.scala | 4 +-- .../spark/util/SparkCollectionUtils.scala | 4 +-- .../apache/spark/util/SparkErrorUtils.scala | 2 +- .../apache/spark/util/SparkSerDeUtils.scala | 4 +-- .../apache/spark/sql/avro/CustomDecimal.scala | 4 +-- .../apache/spark/util/StubClassLoader.scala | 4 +-- .../spark/sql/errors/CompilationErrors.scala | 2 +- .../spark/sql/types/DataTypeExpression.scala | 30 +++++++++---------- .../apache/spark/sql/jdbc/JdbcDialects.scala | 2 +- 10 files changed, 29 insertions(+), 29 deletions(-) diff --git a/common/utils/src/main/scala/org/apache/spark/SparkBuildInfo.scala b/common/utils/src/main/scala/org/apache/spark/SparkBuildInfo.scala index 23f671f9d7647..ebc62460d2318 100644 --- a/common/utils/src/main/scala/org/apache/spark/SparkBuildInfo.scala +++ b/common/utils/src/main/scala/org/apache/spark/SparkBuildInfo.scala @@ -18,7 +18,7 @@ package org.apache.spark import java.util.Properties -object SparkBuildInfo { +private[spark] object SparkBuildInfo { val ( spark_version: String, diff --git a/common/utils/src/main/scala/org/apache/spark/util/SparkClassUtils.scala b/common/utils/src/main/scala/org/apache/spark/util/SparkClassUtils.scala index 679d546d04c9f..5984eaee42e73 100644 --- a/common/utils/src/main/scala/org/apache/spark/util/SparkClassUtils.scala +++ b/common/utils/src/main/scala/org/apache/spark/util/SparkClassUtils.scala @@ -20,7 +20,7 @@ import java.util.Random import scala.util.Try -trait SparkClassUtils { +private[spark] trait SparkClassUtils { val random = new Random() def getSparkClassLoader: ClassLoader = getClass.getClassLoader @@ -80,4 +80,4 @@ trait SparkClassUtils { } } -object SparkClassUtils extends SparkClassUtils +private[spark] object SparkClassUtils extends SparkClassUtils diff --git a/common/utils/src/main/scala/org/apache/spark/util/SparkCollectionUtils.scala b/common/utils/src/main/scala/org/apache/spark/util/SparkCollectionUtils.scala index 7fecc9ccb664d..be8282db31bee 100644 --- a/common/utils/src/main/scala/org/apache/spark/util/SparkCollectionUtils.scala +++ b/common/utils/src/main/scala/org/apache/spark/util/SparkCollectionUtils.scala @@ -18,7 +18,7 @@ package org.apache.spark.sql.catalyst.util import scala.collection.immutable -trait SparkCollectionUtils { +private[spark] trait SparkCollectionUtils { /** * Same function as `keys.zipWithIndex.toMap`, but has perf gain. */ @@ -34,4 +34,4 @@ trait SparkCollectionUtils { } } -object SparkCollectionUtils extends SparkCollectionUtils +private[spark] object SparkCollectionUtils extends SparkCollectionUtils diff --git a/common/utils/src/main/scala/org/apache/spark/util/SparkErrorUtils.scala b/common/utils/src/main/scala/org/apache/spark/util/SparkErrorUtils.scala index 97a07984a228a..8194d1e424173 100644 --- a/common/utils/src/main/scala/org/apache/spark/util/SparkErrorUtils.scala +++ b/common/utils/src/main/scala/org/apache/spark/util/SparkErrorUtils.scala @@ -90,4 +90,4 @@ private[spark] trait SparkErrorUtils extends Logging { } } -object SparkErrorUtils extends SparkErrorUtils +private[spark] object SparkErrorUtils extends SparkErrorUtils diff --git a/common/utils/src/main/scala/org/apache/spark/util/SparkSerDeUtils.scala b/common/utils/src/main/scala/org/apache/spark/util/SparkSerDeUtils.scala index 9b6174c47bde3..2cc14fea5f307 100644 --- a/common/utils/src/main/scala/org/apache/spark/util/SparkSerDeUtils.scala +++ b/common/utils/src/main/scala/org/apache/spark/util/SparkSerDeUtils.scala @@ -18,7 +18,7 @@ package org.apache.spark.util import java.io.{ByteArrayInputStream, ByteArrayOutputStream, ObjectInputStream, ObjectOutputStream, ObjectStreamClass} -trait SparkSerDeUtils { +private[spark] trait SparkSerDeUtils { /** Serialize an object using Java serialization */ def serialize[T](o: T): Array[Byte] = { val bos = new ByteArrayOutputStream() @@ -51,4 +51,4 @@ trait SparkSerDeUtils { } } -object SparkSerDeUtils extends SparkSerDeUtils +private[spark] object SparkSerDeUtils extends SparkSerDeUtils diff --git a/connector/avro/src/main/java/org/apache/spark/sql/avro/CustomDecimal.scala b/connector/avro/src/main/java/org/apache/spark/sql/avro/CustomDecimal.scala index d76f40c7635c4..fab3d4493e344 100644 --- a/connector/avro/src/main/java/org/apache/spark/sql/avro/CustomDecimal.scala +++ b/connector/avro/src/main/java/org/apache/spark/sql/avro/CustomDecimal.scala @@ -22,14 +22,14 @@ import org.apache.avro.Schema import org.apache.spark.sql.types.DecimalType -object CustomDecimal { +private[spark] object CustomDecimal { val TYPE_NAME = "custom-decimal" } // A customized logical type, which will be registered to Avro. This logical type is similar to // Avro's builtin Decimal type, but is meant to be registered for long type. It indicates that // the long type should be converted to Spark's Decimal type, with provided precision and scale. -private class CustomDecimal(schema: Schema) extends LogicalType(CustomDecimal.TYPE_NAME) { +private[spark] class CustomDecimal(schema: Schema) extends LogicalType(CustomDecimal.TYPE_NAME) { val scale : Int = { val obj = schema.getObjectProp("scale") obj match { diff --git a/core/src/main/scala/org/apache/spark/util/StubClassLoader.scala b/core/src/main/scala/org/apache/spark/util/StubClassLoader.scala index 8d903c2a3e400..ed58ccf1bcf15 100644 --- a/core/src/main/scala/org/apache/spark/util/StubClassLoader.scala +++ b/core/src/main/scala/org/apache/spark/util/StubClassLoader.scala @@ -28,7 +28,7 @@ import org.apache.spark.internal.Logging * whose capturing class contains unknown (and unneeded) classes. The lambda itself does not need * the class and therefor is safe to replace by a stub. */ -class StubClassLoader(parent: ClassLoader, shouldStub: String => Boolean) +private[spark] class StubClassLoader(parent: ClassLoader, shouldStub: String => Boolean) extends ClassLoader(parent) with Logging { override def findClass(name: String): Class[_] = { if (!shouldStub(name)) { @@ -40,7 +40,7 @@ class StubClassLoader(parent: ClassLoader, shouldStub: String => Boolean) } } -object StubClassLoader { +private[spark] object StubClassLoader { def apply(parent: ClassLoader, binaryName: Seq[String]): StubClassLoader = { new StubClassLoader(parent, name => binaryName.exists(p => name.startsWith(p))) } diff --git a/sql/api/src/main/scala/org/apache/spark/sql/errors/CompilationErrors.scala b/sql/api/src/main/scala/org/apache/spark/sql/errors/CompilationErrors.scala index deae1198d9cb9..7c0b3c6cf3083 100644 --- a/sql/api/src/main/scala/org/apache/spark/sql/errors/CompilationErrors.scala +++ b/sql/api/src/main/scala/org/apache/spark/sql/errors/CompilationErrors.scala @@ -51,4 +51,4 @@ private[sql] trait CompilationErrors extends DataTypeErrorsBase { } } -object CompilationErrors extends CompilationErrors +private[sql] object CompilationErrors extends CompilationErrors diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DataTypeExpression.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DataTypeExpression.scala index 1b74419a4af7b..026272a0f2d85 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DataTypeExpression.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DataTypeExpression.scala @@ -18,7 +18,7 @@ package org.apache.spark.sql.types import org.apache.spark.sql.catalyst.expressions.Expression -abstract class DataTypeExpression(val dataType: DataType) { +private[sql] abstract class DataTypeExpression(val dataType: DataType) { /** * Enables matching against DataType for expressions: * {{{ @@ -29,18 +29,18 @@ abstract class DataTypeExpression(val dataType: DataType) { private[sql] def unapply(e: Expression): Boolean = e.dataType == dataType } -case object BooleanTypeExpression extends DataTypeExpression(BooleanType) -case object StringTypeExpression extends DataTypeExpression(StringType) -case object TimestampTypeExpression extends DataTypeExpression(TimestampType) -case object DateTypeExpression extends DataTypeExpression(DateType) -case object ByteTypeExpression extends DataTypeExpression(ByteType) -case object ShortTypeExpression extends DataTypeExpression(ShortType) -case object IntegerTypeExpression extends DataTypeExpression(IntegerType) -case object LongTypeExpression extends DataTypeExpression(LongType) -case object DoubleTypeExpression extends DataTypeExpression(DoubleType) -case object FloatTypeExpression extends DataTypeExpression(FloatType) +private[sql] case object BooleanTypeExpression extends DataTypeExpression(BooleanType) +private[sql] case object StringTypeExpression extends DataTypeExpression(StringType) +private[sql] case object TimestampTypeExpression extends DataTypeExpression(TimestampType) +private[sql] case object DateTypeExpression extends DataTypeExpression(DateType) +private[sql] case object ByteTypeExpression extends DataTypeExpression(ByteType) +private[sql] case object ShortTypeExpression extends DataTypeExpression(ShortType) +private[sql] case object IntegerTypeExpression extends DataTypeExpression(IntegerType) +private[sql] case object LongTypeExpression extends DataTypeExpression(LongType) +private[sql] case object DoubleTypeExpression extends DataTypeExpression(DoubleType) +private[sql] case object FloatTypeExpression extends DataTypeExpression(FloatType) -object NumericTypeExpression { +private[sql] object NumericTypeExpression { /** * Enables matching against NumericType for expressions: * {{{ @@ -53,7 +53,7 @@ object NumericTypeExpression { } } -object IntegralTypeExpression { +private[sql] object IntegralTypeExpression { /** * Enables matching against IntegralType for expressions: * {{{ @@ -66,12 +66,12 @@ object IntegralTypeExpression { } } -object AnyTimestampTypeExpression { +private[sql] object AnyTimestampTypeExpression { def unapply(e: Expression): Boolean = e.dataType.isInstanceOf[TimestampType] || e.dataType.isInstanceOf[TimestampNTZType] } -object DecimalExpression { +private[sql] object DecimalExpression { def unapply(e: Expression): Option[(Int, Int)] = e.dataType match { case t: DecimalType => Some((t.precision, t.scale)) case _ => None diff --git a/sql/core/src/main/scala/org/apache/spark/sql/jdbc/JdbcDialects.scala b/sql/core/src/main/scala/org/apache/spark/sql/jdbc/JdbcDialects.scala index fac3cc60d952a..2f5e813dcb618 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/jdbc/JdbcDialects.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/jdbc/JdbcDialects.scala @@ -719,6 +719,6 @@ object JdbcDialects { /** * NOOP dialect object, always returning the neutral element. */ -object NoopDialect extends JdbcDialect { +private[spark] object NoopDialect extends JdbcDialect { override def canHandle(url : String): Boolean = true }