Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

support arrow functions with ExprPlanner #26

Merged
merged 9 commits into from
Jul 9, 2024
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 7 additions & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,13 @@ datafusion = "39"
clap = "4"
tokio = { version = "1.37", features = ["full"] }

[patch.crates-io]
# TODO: remove this once we upgrade to DataFusion 40.0
datafusion = { git = "https://github.com/samuelcolvin/datafusion.git", branch = "register_user_defined_sql_planners" }
datafusion-execution = { git = "https://github.com/samuelcolvin/datafusion.git", branch = "register_user_defined_sql_planners" }
datafusion-common = { git = "https://github.com/samuelcolvin/datafusion.git", branch = "register_user_defined_sql_planners" }
datafusion-expr = { git = "https://github.com/samuelcolvin/datafusion.git", branch = "register_user_defined_sql_planners" }

[lints.clippy]
dbg_macro = "deny"
print_stdout = "deny"
Expand Down
1 change: 1 addition & 0 deletions src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,7 @@ pub fn register_all(registry: &mut dyn FunctionRegistry) -> Result<()> {
Ok(()) as Result<()>
})?;
registry.register_function_rewrite(Arc::new(rewrite::JsonFunctionRewriter))?;
registry.register_user_defined_sql_planner(Arc::new(rewrite::JsonSQLPlanner))?;

Ok(())
}
82 changes: 50 additions & 32 deletions src/rewrite.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,10 @@ use datafusion_common::config::ConfigOptions;
use datafusion_common::tree_node::Transformed;
use datafusion_common::DFSchema;
use datafusion_common::Result;
use datafusion_expr::expr::ScalarFunction;
use datafusion_expr::expr::{Cast, Expr, ScalarFunction};
use datafusion_expr::expr_rewriter::FunctionRewrite;
use datafusion_expr::Expr;
use datafusion_expr::planner::{PlannerResult, RawBinaryExpr, UserDefinedSQLPlanner};
use datafusion_expr::sqlparser::ast::BinaryOperator;

pub(crate) struct JsonFunctionRewriter;

Expand All @@ -15,25 +16,38 @@ impl FunctionRewrite for JsonFunctionRewriter {
}

fn rewrite(&self, expr: Expr, _schema: &DFSchema, _config: &ConfigOptions) -> Result<Transformed<Expr>> {
if let Expr::Cast(cast) = &expr {
if let Expr::ScalarFunction(func) = &*cast.expr {
if func.func.name() == "json_get" {
if let Some(t) = switch_json_get(&cast.data_type, &func.args) {
return Ok(t);
}
}
}
} else if let Expr::ScalarFunction(func) = &expr {
if let Some(new_func) = unnest_json_calls(func) {
return Ok(Transformed::yes(Expr::ScalarFunction(new_func)));
}
}
Ok(Transformed::no(expr))
let transform = match &expr {
Expr::Cast(cast) => optimise_json_get_cast(cast),
Expr::ScalarFunction(func) => unnest_json_calls(func),
_ => None,
};
Ok(transform.unwrap_or_else(|| Transformed::no(expr)))
}
}

fn optimise_json_get_cast(cast: &Cast) -> Option<Transformed<Expr>> {
let Expr::ScalarFunction(scalar_func) = &*cast.expr else {
return None;
};
if scalar_func.func.name() != "json_get" {
return None;
}
let func = match &cast.data_type {
DataType::Boolean => crate::json_get_bool::json_get_bool_udf(),
DataType::Float64 | DataType::Float32 => crate::json_get_float::json_get_float_udf(),
DataType::Int64 | DataType::Int32 => crate::json_get_int::json_get_int_udf(),
DataType::Utf8 => crate::json_get_str::json_get_str_udf(),
_ => return None,
};
let f = ScalarFunction {
func,
args: scalar_func.args.clone(),
};
Some(Transformed::yes(Expr::ScalarFunction(f)))
}

// Replace nested JSON functions e.g. `json_get(json_get(col, 'foo'), 'bar')` with `json_get(col, 'foo', 'bar')`
fn unnest_json_calls(func: &ScalarFunction) -> Option<ScalarFunction> {
fn unnest_json_calls(func: &ScalarFunction) -> Option<Transformed<Expr>> {
if !matches!(
func.func.name(),
"json_get" | "json_get_bool" | "json_get_float" | "json_get_int" | "json_get_json" | "json_get_str"
Expand All @@ -53,26 +67,30 @@ fn unnest_json_calls(func: &ScalarFunction) -> Option<ScalarFunction> {
args.extend(outer_args_iter.cloned());
// See #23, unnest only when all lookup arguments are literals
if args.iter().skip(1).all(|arg| matches!(arg, Expr::Literal(_))) {
Some(ScalarFunction {
Some(Transformed::yes(Expr::ScalarFunction(ScalarFunction {
func: func.func.clone(),
args,
})
})))
} else {
None
}
}

fn switch_json_get(cast_data_type: &DataType, args: &[Expr]) -> Option<Transformed<Expr>> {
let func = match cast_data_type {
DataType::Boolean => crate::json_get_bool::json_get_bool_udf(),
DataType::Float64 | DataType::Float32 => crate::json_get_float::json_get_float_udf(),
DataType::Int64 | DataType::Int32 => crate::json_get_int::json_get_int_udf(),
DataType::Utf8 => crate::json_get_str::json_get_str_udf(),
_ => return None,
};
let f = ScalarFunction {
func,
args: args.to_vec(),
};
Some(Transformed::yes(Expr::ScalarFunction(f)))
#[derive(Debug, Default)]
pub struct JsonSQLPlanner;

impl UserDefinedSQLPlanner for JsonSQLPlanner {
fn plan_binary_op(&self, expr: RawBinaryExpr, _schema: &DFSchema) -> Result<PlannerResult<RawBinaryExpr>> {
let func = match &expr.op {
BinaryOperator::Arrow => crate::json_get::json_get_udf(),
BinaryOperator::LongArrow => crate::json_get_str::json_get_str_udf(),
BinaryOperator::Question => crate::json_contains::json_contains_udf(),
_ => return Ok(PlannerResult::Original(expr)),
};

Ok(PlannerResult::Planned(Expr::ScalarFunction(ScalarFunction {
func,
args: vec![expr.left.clone(), expr.right.clone()],
})))
}
}
190 changes: 185 additions & 5 deletions tests/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -362,7 +362,7 @@ async fn test_json_length_object_nested() {
async fn test_json_contains_large() {
let expected = [
"+----------+",
"| COUNT(*) |",
"| count(*) |",
"+----------+",
"| 4 |",
"+----------+",
Expand All @@ -378,7 +378,7 @@ async fn test_json_contains_large() {
async fn test_json_contains_large_vec() {
let expected = [
"+----------+",
"| COUNT(*) |",
"| count(*) |",
"+----------+",
"| 0 |",
"+----------+",
Expand All @@ -394,7 +394,7 @@ async fn test_json_contains_large_vec() {
async fn test_json_contains_large_both() {
let expected = [
"+----------+",
"| COUNT(*) |",
"| count(*) |",
"+----------+",
"| 0 |",
"+----------+",
Expand All @@ -410,7 +410,7 @@ async fn test_json_contains_large_both() {
async fn test_json_contains_large_params() {
let expected = [
"+----------+",
"| COUNT(*) |",
"| count(*) |",
"+----------+",
"| 4 |",
"+----------+",
Expand All @@ -426,7 +426,7 @@ async fn test_json_contains_large_params() {
async fn test_json_contains_large_both_params() {
let expected = [
"+----------+",
"| COUNT(*) |",
"| count(*) |",
"+----------+",
"| 4 |",
"+----------+",
Expand Down Expand Up @@ -713,3 +713,183 @@ async fn test_json_get_union_array_skip_double_nested() {
let batches = run_query(sql).await.unwrap();
assert_batches_eq!(expected, &batches);
}

#[tokio::test]
async fn test_arrow() {
let batches = run_query("select name, json_data->'foo' from test").await.unwrap();

let expected = [
"+------------------+--------------------------------------+",
"| name | json_get(test.json_data,Utf8(\"foo\")) |",
Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@alamb it's worth noting that using UserDefinedSQLPlanner means the default title for columns becomes the applied function rather than a pretty representation of the actual operator.

This differs from what we got from apache/datafusion#11137, see #22 where the column title for this was test.json_data -> Utf8(\"foo\")

I don't think this is a show stopper, just pointing it out.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

One potential way workaround is to use Expr::alias to name the expression something less unpleasing

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yes, I actually tried that unfortunately it got even more ugly when when you have a complex type and the column heading becomes something like alias(json_get(test.json_data,Utf8(\"foo\")), "json_data ->> "foo"") = "bar"

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

That certainly sounds worse -- I would expect the heading to only be "json_data ->> "foo" (not alias(...)) which is how it works for simple types

use alias

> select count(*), count(distinct stop_name) as foo, trip_tid  from stops group by trip_tid limit 10;
+----------+-----+----------+
| count(*) | foo | trip_tid |
+----------+-----+----------+
| 18       | 0   | 54778923 |
| 17       | 0   | 54787869 |
| 1        | 0   | 54787875 |
| 5        | 2   | 54756517 |
| 6        | 2   | 54756510 |
| 19       | 0   | 54825475 |
| 18       | 0   | 54807326 |
| 27       | 0   | 54825423 |
| 20       | 0   | 54825510 |
| 20       | 0   | 54807384 |
+----------+-----+----------+
10 row(s) fetched.
Elapsed 0.043 seconds.

Non alias

> select count(*), count(distinct stop_name), trip_tid  from stops group by trip_tid limit 10;
+----------+---------------------------------+----------+
| count(*) | count(DISTINCT stops.stop_name) | trip_tid |
+----------+---------------------------------+----------+
| 6        | 0                               | 54804223 |
| 17       | 1                               | 54804238 |
| 2        | 0                               | 54804334 |
| 22       | 0                               | 54779192 |
| 19       | 0                               | 54825475 |
| 18       | 0                               | 54807326 |
| 27       | 0                               | 54825423 |
| 20       | 0                               | 54825510 |
| 20       | 0                               | 54807384 |
| 15       | 0                               | 54825364 |
+----------+---------------------------------+----------+
10 row(s) fetched.
Elapsed 0.014 seconds.

Can you provide an example of what you mean by "complex types"? It sounds like it would be good bug / limitation to fix

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Okay, I've had another try and most of manually inserting aliases is working, see the penultimate commit below.

The only remain strange case is when doing (field->'foo')::int, where my alias logic means that the cast is lost in the alias. See the tests test_arrow_cast_int and test_arrow_double_nested_cast.

I can't for the life of me work out how to make the alias (for test_arrow_cast_int) (Utf8(\"{\"foo\": 42}\") -> Utf8(\"foo\"))::int instead of just Utf8(\"{\"foo\": 42}\") -> Utf8(\"foo\").

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

(ignore that message)

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

apache/datafusion#11306 is causing issues.

"+------------------+--------------------------------------+",
"| object_foo | {str=abc} |",
"| object_foo_array | {array=[1]} |",
"| object_foo_obj | {object={}} |",
"| object_foo_null | {null=true} |",
"| object_bar | {null=} |",
"| list_foo | {null=} |",
"| invalid_json | {null=} |",
"+------------------+--------------------------------------+",
];
assert_batches_eq!(expected, &batches);
}

#[tokio::test]
async fn test_arrow_plan() {
let lines = logical_plan(r#"explain select json_data->'foo' from test"#).await;

let expected = [
"Projection: json_get(test.json_data, Utf8(\"foo\"))",
" TableScan: test projection=[json_data]",
];

assert_eq!(lines, expected);
}

#[tokio::test]
async fn test_long_arrow() {
let batches = run_query("select name, json_data->>'foo' from test").await.unwrap();

let expected = [
"+------------------+------------------------------------------+",
"| name | json_get_str(test.json_data,Utf8(\"foo\")) |",
"+------------------+------------------------------------------+",
"| object_foo | abc |",
"| object_foo_array | |",
"| object_foo_obj | |",
"| object_foo_null | |",
"| object_bar | |",
"| list_foo | |",
"| invalid_json | |",
"+------------------+------------------------------------------+",
];
assert_batches_eq!(expected, &batches);
}

#[tokio::test]
async fn test_long_arrow_plan() {
let lines = logical_plan(r#"explain select json_data->>'foo' from test"#).await;

let expected = [
"Projection: json_get_str(test.json_data, Utf8(\"foo\"))",
" TableScan: test projection=[json_data]",
];

assert_eq!(lines, expected);
}

#[tokio::test]
async fn test_long_arrow_cast_str() {
let batches = run_query(r"select name, (json_data->>'foo')='abc' from test")
.await
.unwrap();

let expected = [
"+------------------+--------------------------------------------------------+",
"| name | json_get_str(test.json_data,Utf8(\"foo\")) = Utf8(\"abc\") |",
"+------------------+--------------------------------------------------------+",
"| object_foo | true |",
"| object_foo_array | |",
"| object_foo_obj | |",
"| object_foo_null | |",
"| object_bar | |",
"| list_foo | |",
"| invalid_json | |",
"+------------------+--------------------------------------------------------+",
];
assert_batches_eq!(expected, &batches);
}

#[tokio::test]
async fn test_long_arrow_cast_int() {
let sql = r#"select ('{"foo": 42}'->'foo')::int"#;
let batches = run_query(sql).await.unwrap();
assert_eq!(display_val(batches).await, (DataType::Int64, "42".to_string()));
}

#[tokio::test]
async fn test_arrow_cast_plan() {
let lines = logical_plan(r#"explain select (json_data->'foo')::int from test"#).await;

let expected = [
"Projection: json_get_int(test.json_data, Utf8(\"foo\")) AS json_get(test.json_data,Utf8(\"foo\"))",
" TableScan: test projection=[json_data]",
];

assert_eq!(lines, expected);
}

#[tokio::test]
async fn test_arrow_nested() {
let batches = run_query("select name, (json_data->'foo'->0)::int from test")
.await
.unwrap();

let expected = [
"+------------------+---------------------------------------------------------+",
"| name | json_get(json_get(test.json_data,Utf8(\"foo\")),Int64(0)) |",
"+------------------+---------------------------------------------------------+",
"| object_foo | |",
"| object_foo_array | 1 |",
"| object_foo_obj | |",
"| object_foo_null | |",
"| object_bar | |",
"| list_foo | |",
"| invalid_json | |",
"+------------------+---------------------------------------------------------+",
];
assert_batches_eq!(expected, &batches);
}

#[tokio::test]
async fn test_arrow_nested_plan() {
let lines = logical_plan(r#"explain select json_data->'foo'->0 from test"#).await;

let expected = [
"Projection: json_get(test.json_data, Utf8(\"foo\"), Int64(0)) AS json_get(json_get(test.json_data,Utf8(\"foo\")),Int64(0))",
" TableScan: test projection=[json_data]",
];

assert_eq!(lines, expected);
}

#[tokio::test]
async fn test_arrow_nested_columns() {
let expected = [
"+-------------+",
"| v |",
"+-------------+",
"| {array=[0]} |",
"| {null=} |",
"| {null=true} |",
"+-------------+",
];

let sql = "select json_data->str_key1->str_key2 v from more_nested";
let batches = run_query(sql).await.unwrap();
assert_batches_eq!(expected, &batches);
}

#[tokio::test]
async fn test_arrow_nested_double_columns() {
let expected = [
"+---------+",
"| v |",
"+---------+",
"| {int=0} |",
"| {null=} |",
"| {null=} |",
"+---------+",
];

let sql = "select json_data->str_key1->str_key2->int_key v from more_nested";
let batches = run_query(sql).await.unwrap();
assert_batches_eq!(expected, &batches);
}

#[tokio::test]
async fn test_lexical_precedence_wrong() {
let sql = r#"select '{"a": "b"}'->>'a'='b' as v"#;
let err = run_query(sql).await.unwrap_err();
assert_eq!(err.to_string(), "Error during planning: Unexpected argument type to 'json_get_str' at position 2, expected string or int, got Boolean.")
}
Loading