Skip to content

Commit

Permalink
fix
Browse files Browse the repository at this point in the history
  • Loading branch information
xuanyuanking committed Sep 5, 2023
1 parent 40943c2 commit b771bec
Show file tree
Hide file tree
Showing 10 changed files with 29 additions and 29 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ package org.apache.spark

import java.util.Properties

object SparkBuildInfo {
private[spark] object SparkBuildInfo {

val (
spark_version: String,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ import java.util.Random

import scala.util.Try

trait SparkClassUtils {
private[spark] trait SparkClassUtils {
val random = new Random()

def getSparkClassLoader: ClassLoader = getClass.getClassLoader
Expand Down Expand Up @@ -80,4 +80,4 @@ trait SparkClassUtils {
}
}

object SparkClassUtils extends SparkClassUtils
private[spark] object SparkClassUtils extends SparkClassUtils
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ package org.apache.spark.sql.catalyst.util

import scala.collection.immutable

trait SparkCollectionUtils {
private[spark] trait SparkCollectionUtils {
/**
* Same function as `keys.zipWithIndex.toMap`, but has perf gain.
*/
Expand All @@ -34,4 +34,4 @@ trait SparkCollectionUtils {
}
}

object SparkCollectionUtils extends SparkCollectionUtils
private[spark] object SparkCollectionUtils extends SparkCollectionUtils
Original file line number Diff line number Diff line change
Expand Up @@ -90,4 +90,4 @@ private[spark] trait SparkErrorUtils extends Logging {
}
}

object SparkErrorUtils extends SparkErrorUtils
private[spark] object SparkErrorUtils extends SparkErrorUtils
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ package org.apache.spark.util

import java.io.{ByteArrayInputStream, ByteArrayOutputStream, ObjectInputStream, ObjectOutputStream, ObjectStreamClass}

trait SparkSerDeUtils {
private[spark] trait SparkSerDeUtils {
/** Serialize an object using Java serialization */
def serialize[T](o: T): Array[Byte] = {
val bos = new ByteArrayOutputStream()
Expand Down Expand Up @@ -51,4 +51,4 @@ trait SparkSerDeUtils {
}
}

object SparkSerDeUtils extends SparkSerDeUtils
private[spark] object SparkSerDeUtils extends SparkSerDeUtils
Original file line number Diff line number Diff line change
Expand Up @@ -22,14 +22,14 @@ import org.apache.avro.Schema

import org.apache.spark.sql.types.DecimalType

object CustomDecimal {
private[spark] object CustomDecimal {
val TYPE_NAME = "custom-decimal"
}

// A customized logical type, which will be registered to Avro. This logical type is similar to
// Avro's builtin Decimal type, but is meant to be registered for long type. It indicates that
// the long type should be converted to Spark's Decimal type, with provided precision and scale.
private class CustomDecimal(schema: Schema) extends LogicalType(CustomDecimal.TYPE_NAME) {
private[spark] class CustomDecimal(schema: Schema) extends LogicalType(CustomDecimal.TYPE_NAME) {
val scale : Int = {
val obj = schema.getObjectProp("scale")
obj match {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ import org.apache.spark.internal.Logging
* whose capturing class contains unknown (and unneeded) classes. The lambda itself does not need
* the class and therefor is safe to replace by a stub.
*/
class StubClassLoader(parent: ClassLoader, shouldStub: String => Boolean)
private[spark] class StubClassLoader(parent: ClassLoader, shouldStub: String => Boolean)
extends ClassLoader(parent) with Logging {
override def findClass(name: String): Class[_] = {
if (!shouldStub(name)) {
Expand All @@ -40,7 +40,7 @@ class StubClassLoader(parent: ClassLoader, shouldStub: String => Boolean)
}
}

object StubClassLoader {
private[spark] object StubClassLoader {
def apply(parent: ClassLoader, binaryName: Seq[String]): StubClassLoader = {
new StubClassLoader(parent, name => binaryName.exists(p => name.startsWith(p)))
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -51,4 +51,4 @@ private[sql] trait CompilationErrors extends DataTypeErrorsBase {
}
}

object CompilationErrors extends CompilationErrors
private[sql] object CompilationErrors extends CompilationErrors
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ package org.apache.spark.sql.types

import org.apache.spark.sql.catalyst.expressions.Expression

abstract class DataTypeExpression(val dataType: DataType) {
private[sql] abstract class DataTypeExpression(val dataType: DataType) {
/**
* Enables matching against DataType for expressions:
* {{{
Expand All @@ -29,18 +29,18 @@ abstract class DataTypeExpression(val dataType: DataType) {
private[sql] def unapply(e: Expression): Boolean = e.dataType == dataType
}

case object BooleanTypeExpression extends DataTypeExpression(BooleanType)
case object StringTypeExpression extends DataTypeExpression(StringType)
case object TimestampTypeExpression extends DataTypeExpression(TimestampType)
case object DateTypeExpression extends DataTypeExpression(DateType)
case object ByteTypeExpression extends DataTypeExpression(ByteType)
case object ShortTypeExpression extends DataTypeExpression(ShortType)
case object IntegerTypeExpression extends DataTypeExpression(IntegerType)
case object LongTypeExpression extends DataTypeExpression(LongType)
case object DoubleTypeExpression extends DataTypeExpression(DoubleType)
case object FloatTypeExpression extends DataTypeExpression(FloatType)
private[sql] case object BooleanTypeExpression extends DataTypeExpression(BooleanType)
private[sql] case object StringTypeExpression extends DataTypeExpression(StringType)
private[sql] case object TimestampTypeExpression extends DataTypeExpression(TimestampType)
private[sql] case object DateTypeExpression extends DataTypeExpression(DateType)
private[sql] case object ByteTypeExpression extends DataTypeExpression(ByteType)
private[sql] case object ShortTypeExpression extends DataTypeExpression(ShortType)
private[sql] case object IntegerTypeExpression extends DataTypeExpression(IntegerType)
private[sql] case object LongTypeExpression extends DataTypeExpression(LongType)
private[sql] case object DoubleTypeExpression extends DataTypeExpression(DoubleType)
private[sql] case object FloatTypeExpression extends DataTypeExpression(FloatType)

object NumericTypeExpression {
private[sql] object NumericTypeExpression {
/**
* Enables matching against NumericType for expressions:
* {{{
Expand All @@ -53,7 +53,7 @@ object NumericTypeExpression {
}
}

object IntegralTypeExpression {
private[sql] object IntegralTypeExpression {
/**
* Enables matching against IntegralType for expressions:
* {{{
Expand All @@ -66,12 +66,12 @@ object IntegralTypeExpression {
}
}

object AnyTimestampTypeExpression {
private[sql] object AnyTimestampTypeExpression {
def unapply(e: Expression): Boolean =
e.dataType.isInstanceOf[TimestampType] || e.dataType.isInstanceOf[TimestampNTZType]
}

object DecimalExpression {
private[sql] object DecimalExpression {
def unapply(e: Expression): Option[(Int, Int)] = e.dataType match {
case t: DecimalType => Some((t.precision, t.scale))
case _ => None
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -719,6 +719,6 @@ object JdbcDialects {
/**
* NOOP dialect object, always returning the neutral element.
*/
object NoopDialect extends JdbcDialect {
private[spark] object NoopDialect extends JdbcDialect {
override def canHandle(url : String): Boolean = true
}

0 comments on commit b771bec

Please sign in to comment.