Skip to content

Commit

Permalink
Tests pass with 3.3.0
Browse files Browse the repository at this point in the history
  • Loading branch information
andygrove committed Mar 8, 2022
1 parent c5e6c58 commit 8b6796f
Show file tree
Hide file tree
Showing 6 changed files with 6 additions and 29 deletions.
6 changes: 2 additions & 4 deletions integration_tests/src/main/python/json_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -236,8 +236,7 @@ def test_basic_json_read(std_input_path, filename, schema, read_func, allow_non_
@pytest.mark.parametrize('read_func', [read_json_df, read_json_sql])
@pytest.mark.parametrize('ansi_enabled', ["true", "false"])
@pytest.mark.parametrize('time_parser_policy', [
pytest.param('LEGACY', marks=[pytest.mark.allow_non_gpu('FileSourceScanExec'), \
pytest.mark.xfail(is_spark_330_or_later(), reason="https://github.com/NVIDIA/spark-rapids/issues/4912")]),
pytest.param('LEGACY', marks=pytest.mark.allow_non_gpu('FileSourceScanExec')),
'CORRECTED',
'EXCEPTION'
])
Expand All @@ -263,8 +262,7 @@ def test_json_read_valid_dates(std_input_path, filename, schema, read_func, ansi
@pytest.mark.parametrize('read_func', [read_json_df, read_json_sql])
@pytest.mark.parametrize('ansi_enabled', ["true", "false"])
@pytest.mark.parametrize('time_parser_policy', [
pytest.param('LEGACY', marks=[pytest.mark.allow_non_gpu('FileSourceScanExec'), \
pytest.mark.xfail(is_spark_330_or_later(), reason="https://github.com/NVIDIA/spark-rapids/issues/4912")]),
pytest.param('LEGACY', marks=pytest.mark.allow_non_gpu('FileSourceScanExec')),
'CORRECTED',
'EXCEPTION'
])
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,14 +23,6 @@ import org.apache.spark.sql.catalyst.json.JSONOptions

trait Spark30Xuntil33XFileOptionsShims extends SparkShims {

def dateFormatInRead(fileOptions: Serializable): Option[String] = {
fileOptions match {
case csvOpts: CSVOptions => Option(csvOpts.dateFormat)
case jsonOpts: JSONOptions => Option(jsonOpts.dateFormat)
case _ => throw new RuntimeException("Wrong file options.")
}
}

def timestampFormatInRead(fileOptions: Serializable): Option[String] = {
fileOptions match {
case csvOpts: CSVOptions => Option(csvOpts.timestampFormat)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,14 +23,6 @@ import org.apache.spark.sql.catalyst.json.JSONOptions

trait Spark33XFileOptionsShims extends Spark321PlusShims {

def dateFormatInRead(fileOptions: Serializable): Option[String] = {
fileOptions match {
case csvOpts: CSVOptions => csvOpts.dateFormatInRead
case jsonOpts: JSONOptions => jsonOpts.dateFormatInRead
case _ => throw new RuntimeException("Wrong file options.")
}
}

def timestampFormatInRead(fileOptions: Serializable): Option[String] = {
fileOptions match {
case csvOpts: CSVOptions => csvOpts.dateFormatInRead
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -218,9 +218,8 @@ object GpuCSVScan {
// so we fall back to CPU
meta.willNotWorkOnGpu(s"GpuCSVScan does not support timeParserPolicy=LEGACY")
}
ShimLoader.getSparkShims.dateFormatInRead(parsedOptions).foreach { dateFormat =>
DateUtils.tagAndGetCudfFormat(meta, dateFormat, parseString = true)
}
DateUtils.tagAndGetCudfFormat(meta,
GpuCsvUtils.dateFormatInRead(parsedOptions), parseString = true)
}

if (readSchema.map(_.dataType).contains(TimestampType)) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -303,9 +303,6 @@ trait SparkShims {
*/
def getLegacyStatisticalAggregate(): Boolean


def dateFormatInRead(fileOptions: Serializable): Option[String]

def timestampFormatInRead(fileOptions: Serializable): Option[String]

def neverReplaceShowCurrentNamespaceCommand: ExecRule[_ <: SparkPlan]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -147,9 +147,8 @@ object GpuJsonScan {
})

if (readSchema.map(_.dataType).contains(DateType)) {
ShimLoader.getSparkShims.dateFormatInRead(parsedOptions).foreach { dateFormat =>
DateUtils.tagAndGetCudfFormat(meta, dateFormat, parseString = true)
}
DateUtils.tagAndGetCudfFormat(meta,
GpuJsonUtils.dateFormatInRead(parsedOptions), parseString = true)
}

if (readSchema.map(_.dataType).contains(TimestampType)) {
Expand Down

0 comments on commit 8b6796f

Please sign in to comment.