Skip to content

Commit

Permalink
comments
Browse files Browse the repository at this point in the history
  • Loading branch information
marmbrus committed Jan 10, 2016
1 parent e06c0e3 commit a52200b
Show file tree
Hide file tree
Showing 3 changed files with 5 additions and 7 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -135,8 +135,6 @@ final class DataStreamWriter private[sql](df: DataFrame) {

private var source: String = df.sqlContext.conf.defaultDataSourceName

private var mode: SaveMode = SaveMode.ErrorIfExists

private var extraOptions = new scala.collection.mutable.HashMap[String, String]

private var partitioningColumns: Option[Seq[String]] = None
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ import org.apache.spark.rdd.RDD
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.expressions.UnsafeRow
import org.apache.spark.sql.catalyst.expressions.codegen.{UnsafeRowWriter, BufferHolder}
import org.apache.spark.sql.execution.streaming.{FileStreamSink, Sink, FileStreamSouce, Source}
import org.apache.spark.sql.execution.streaming.{FileStreamSink, Sink, FileStreamSource, Source}
import org.apache.spark.sql.{AnalysisException, Row, SQLContext}
import org.apache.spark.sql.execution.datasources.PartitionSpec
import org.apache.spark.sql.sources._
Expand Down Expand Up @@ -77,7 +77,7 @@ class DefaultSource
val path = parameters("path")
val metadataPath = parameters.getOrElse("metadataPath", s"$path/_metadata")

new FileStreamSouce(sqlContext, metadataPath, path)
new FileStreamSource(sqlContext, metadataPath, path)
}

override def createSink(sqlContext: SQLContext, parameters: Map[String, String]): Sink = {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ import scala.collection.mutable.ArrayBuffer
/**
* A very simple source that reads text files from the given directory as they appear.
*/
class FileStreamSouce(
class FileStreamSource(
val sqlContext: SQLContext,
val metadataPath: String,
val path: String) extends Source with Logging {
Expand Down Expand Up @@ -89,8 +89,8 @@ class FileStreamSouce(
}
}

def restart(): FileStreamSouce = {
new FileStreamSouce(sqlContext, metadataPath, path)
def restart(): FileStreamSource = {
new FileStreamSource(sqlContext, metadataPath, path)
}

private def sparkContext = sqlContext.sparkContext
Expand Down

0 comments on commit a52200b

Please sign in to comment.