Skip to content

Commit

Permalink
Release host buffers when read schema is empty (#6221)
Browse files Browse the repository at this point in the history
Signed-off-by: Firestarman <firestarmanllc@gmail.com>
  • Loading branch information
firestarman authored Aug 4, 2022
1 parent 528c67b commit 6008322
Show file tree
Hide file tree
Showing 2 changed files with 16 additions and 2 deletions.
17 changes: 15 additions & 2 deletions integration_tests/src/main/python/avro_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,10 +16,9 @@
from spark_session import with_cpu_session, with_gpu_session
import pytest

from asserts import assert_gpu_and_cpu_are_equal_collect
from asserts import assert_gpu_and_cpu_are_equal_collect, assert_gpu_and_cpu_row_counts_equal
from data_gen import *
from marks import *
from pyspark.sql.types import *

if os.environ.get('INCLUDE_SPARK_AVRO_JAR', 'false') == 'false':
pytestmark = pytest.mark.skip(reason=str("INCLUDE_SPARK_AVRO_JAR is disabled"))
Expand Down Expand Up @@ -137,3 +136,17 @@ def test_avro_read_with_corrupt_files(spark_tmp_path, reader_type, v1_enabled_li
assert_gpu_and_cpu_are_equal_collect(
lambda spark : spark.read.format("avro").load([first_dpath, second_dpath, third_dpath]),
conf=all_confs)


@pytest.mark.parametrize('v1_enabled_list', ["avro", ""], ids=["v1", "v2"])
@pytest.mark.parametrize('reader_type', ['PERFILE', 'MULTITHREADED'])
def test_read_count(spark_tmp_path, v1_enabled_list, reader_type):
data_path = spark_tmp_path + '/AVRO_DATA'
gen_avro_files([('_c0', int_gen)], data_path)

all_confs = copy_and_update(_enable_all_types_conf, {
'spark.rapids.sql.format.avro.reader.type': reader_type,
'spark.sql.sources.useV1SourceList': v1_enabled_list})
assert_gpu_and_cpu_row_counts_equal(
lambda spark: spark.read.format("avro").load(data_path),
conf=all_confs)
Original file line number Diff line number Diff line change
Expand Up @@ -777,6 +777,7 @@ class GpuMultiFileCloudAvroPartitionReader(

val bufAndSize: Array[(HostMemoryBuffer, Long)] = if (readDataSchema.isEmpty) {
// Overload the size to be the number of rows with null buffer
hostBuffers.foreach(_._1.safeClose(new Exception))
Array((null, totalRowsNum))
} else if (isDone) {
// got close before finishing, return null buffer and zero size
Expand Down

0 comments on commit 6008322

Please sign in to comment.