Skip to content

Commit

Permalink
add testcase
Browse files Browse the repository at this point in the history
  • Loading branch information
jackylk committed Dec 3, 2014
1 parent 578d167 commit f57f15c
Show file tree
Hide file tree
Showing 3 changed files with 89 additions and 1 deletion.
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ class SQLContext(@transient val sparkContext: SparkContext)
self =>

@transient
protected[sql] lazy val catalog: Catalog = new SimpleCatalog(true)
protected[sql] lazy val catalog: Catalog = new SimpleCatalog(caseSensitive)

@transient
protected[sql] lazy val functionRegistry: FunctionRegistry = new SimpleFunctionRegistry
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.spark.sql.test

import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.sql.{SQLConf, SQLContext}

/** A case insensitive SQLContext that can be used for local testing. */
object TestCaseInsensitiveSQLContext
extends SQLContext(
new SparkContext(
"local[2]",
"CaseInsensitiveSQLContext",
new SparkConf().set(SQLConf.CASE_SENSITIVE, "false"))) {

/** Fewer partitions to speed up testing. */
override private[spark] def numShufflePartitions: Int =
getConf(SQLConf.SHUFFLE_PARTITIONS, "5").toInt
}

Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.spark.sql

import java.util.TimeZone

import org.apache.spark.sql.test.TestCaseInsensitiveSQLContext
import org.scalatest.BeforeAndAfterAll

/* Implicits */

import org.apache.spark.sql.test.TestCaseInsensitiveSQLContext._

object CaseInsensitiveTestData{
case class StringData(s: String)
val table = TestCaseInsensitiveSQLContext.sparkContext.parallelize(StringData("test") :: Nil)
table.registerTempTable("caseInsensitiveTable")
}

class SQLQueryCaseInsensitiveSuite extends QueryTest with BeforeAndAfterAll {
CaseInsensitiveTestData

var origZone: TimeZone = _

override protected def beforeAll() {
origZone = TimeZone.getDefault
TimeZone.setDefault(TimeZone.getTimeZone("UTC"))
}

override protected def afterAll() {
TimeZone.setDefault(origZone)
}

test("SPARK-4699 case sensitivity SQL query") {
checkAnswer(sql("SELECT S FROM CASEINSENSITIVETABLE"), "test")
}

}

0 comments on commit f57f15c

Please sign in to comment.