From 4568d625695b2f305352e44a819a88f01e1cf349 Mon Sep 17 00:00:00 2001 From: Alexander Kuzmenkov <36882414+akuzm@users.noreply.github.com> Date: Wed, 10 Apr 2024 12:25:26 +0200 Subject: [PATCH] scalar --- tsl/src/nodes/decompress_chunk/compressed_batch.c | 10 +++++----- tsl/src/nodes/decompress_chunk/compressed_batch.h | 14 ++++++++++++-- 2 files changed, 17 insertions(+), 7 deletions(-) diff --git a/tsl/src/nodes/decompress_chunk/compressed_batch.c b/tsl/src/nodes/decompress_chunk/compressed_batch.c index 3407b45f125..8865b977e33 100644 --- a/tsl/src/nodes/decompress_chunk/compressed_batch.c +++ b/tsl/src/nodes/decompress_chunk/compressed_batch.c @@ -177,7 +177,7 @@ decompress_column(DecompressContext *dcontext, DecompressBatchState *batch_state * The column will have a default value for the entire batch, * set it now. */ - column_values->decompression_type = DT_Default; + column_values->decompression_type = DT_Scalar; *column_values->output_value = getmissingattr(dcontext->decompressed_slot->tts_tupleDescriptor, @@ -443,7 +443,7 @@ compute_plain_qual(DecompressContext *dcontext, DecompressBatchState *batch_stat * with this default value, check if it passes the predicate, and apply * it to the entire batch. */ - Assert(column_values->decompression_type == DT_Default); + Assert(column_values->decompression_type == DT_Scalar); /* * We saved the actual default value into the decompressed scan slot @@ -547,7 +547,7 @@ compute_plain_qual(DecompressContext *dcontext, DecompressBatchState *batch_stat /* Translate the result if the column had a default value. */ if (column_values->arrow == NULL) { - Assert(column_values->decompression_type == DT_Default); + Assert(column_values->decompression_type == DT_Scalar); if (!(default_value_predicate_result[0] & 1)) { /* @@ -837,7 +837,7 @@ compressed_batch_set_compressed_tuple(DecompressContext *dcontext, Assert(i < dcontext->num_data_columns); CompressedColumnValues *column_values = &batch_state->compressed_columns[i]; - column_values->decompression_type = DT_Default; + column_values->decompression_type = DT_Scalar; /* * Note that if it's not a by-value type, we should copy it into @@ -1038,7 +1038,7 @@ make_next_tuple(DecompressBatchState *batch_state, uint16 arrow_row, int num_dat else { /* A compressed column with default value, do nothing. */ - Assert(column_values->decompression_type == DT_Default); + Assert(column_values->decompression_type == DT_Scalar); } } diff --git a/tsl/src/nodes/decompress_chunk/compressed_batch.h b/tsl/src/nodes/decompress_chunk/compressed_batch.h index e70142a82fd..b4e93d1d8a8 100644 --- a/tsl/src/nodes/decompress_chunk/compressed_batch.h +++ b/tsl/src/nodes/decompress_chunk/compressed_batch.h @@ -14,13 +14,23 @@ typedef struct ArrowArray ArrowArray; typedef enum { DT_ArrowTextDict = -4, + DT_ArrowText = -3, - DT_Default = -2, + + /* + * The decompressed value is already in the decompressed slot. This is used + * for segmentby and compressed columns with default value in batch. + */ + DT_Scalar = -2, + DT_Iterator = -1, + DT_Invalid = 0, + /* * Any positive number is also valid for the decompression type. It means - * arrow array of a fixed-size by-value type, with size given by the number. + * arrow array of a fixed-size by-value type, with size in bytes given by + * the number. */ } DecompressionType;