From ab894fa8e9de691d1f8eee2698b9948e6dbd961a Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Tue, 31 Oct 2023 03:41:57 -0700 Subject: [PATCH 001/104] Work on convert.FromTyped --- libs/config/convert/from_typed.go | 214 +++++++++++++++++++++ libs/config/convert/from_typed_test.go | 247 +++++++++++++++++++++++++ libs/config/convert/struct_info.go | 26 +++ 3 files changed, 487 insertions(+) create mode 100644 libs/config/convert/from_typed.go create mode 100644 libs/config/convert/from_typed_test.go diff --git a/libs/config/convert/from_typed.go b/libs/config/convert/from_typed.go new file mode 100644 index 0000000000..d2c166ff8c --- /dev/null +++ b/libs/config/convert/from_typed.go @@ -0,0 +1,214 @@ +package convert + +import ( + "fmt" + "reflect" + + "github.com/databricks/cli/libs/config" +) + +// FromTyped converts changes made in the typed structure w.r.t. the configuration value +// back to the configuration value, retaining existing location information where possible. +func FromTyped(src any, ref config.Value) (config.Value, error) { + srcv := reflect.ValueOf(src) + + // Dereference pointer if necessary + for srcv.Kind() == reflect.Pointer { + if srcv.IsNil() { + return config.NilValue, nil + } + srcv = srcv.Elem() + } + + switch srcv.Kind() { + case reflect.Struct: + return fromTypedStruct(srcv, ref) + case reflect.Map: + return fromTypedMap(srcv, ref) + case reflect.Slice: + return fromTypedSlice(srcv, ref) + case reflect.String: + return fromTypedString(srcv, ref) + case reflect.Bool: + return fromTypedBool(srcv, ref) + case reflect.Int, reflect.Int32, reflect.Int64: + return fromTypedInt(srcv, ref) + case reflect.Float32, reflect.Float64: + // return fromTypedFloat(srcv, dst) + } + + return config.NilValue, fmt.Errorf("unsupported type: %s", srcv.Kind()) +} + +func fromTypedStruct(src reflect.Value, ref config.Value) (config.Value, error) { + switch ref.Kind() { + case config.KindMap, config.KindNil: + // Nothing to do. + default: + panic("type error") + } + + out := make(map[string]config.Value) + info := getStructInfo(src.Type()) + for k, v := range info.FieldValues(src) { + // Convert the field taking into account the reference value (may be equal to config.NilValue). + nv, err := FromTyped(v.Interface(), ref.Get(k)) + if err != nil { + return config.Value{}, err + } + + if nv != config.NilValue { + out[k] = nv + } + } + + // If the struct was equal to its zero value, emit a nil. + if len(out) == 0 { + return config.NilValue, nil + } + + return config.NewValue(out, ref.Location()), nil + + // what are my options + // totyped / fromtyped at every mutator boundary + // pro's -- minimal changes to existing mutators + // con's -- doesn't hold for all mutators, so we need different interface ANYWAY + // (e.g. get/set config.Value instances) + // cons -- lossy (cannot do all to/from conversions, lose location, lose variables) + + // explicit mutator interface + // pro's -- very clear what's happening + // cons -- all code + tests need to be changed + // + + // need an incremental approach + // thus, we run totyped + fromtyped at mutator boundary + // can eventually move this into the mutators themselves? + // can treat the typed structure as read-only, perhaps? + // can generate wrapper type that exposes a Get + GetValue at every node + +} + +func fromTypedMap(src reflect.Value, ref config.Value) (config.Value, error) { + switch ref.Kind() { + case config.KindMap, config.KindNil: + // Nothing to do. + default: + panic("type error") + } + + out := make(map[string]config.Value) + iter := src.MapRange() + for iter.Next() { + k := iter.Key().String() + v := iter.Value() + + // Convert entry taking into account the reference value (may be equal to config.NilValue). + nv, err := FromTyped(v.Interface(), ref.Get(k)) + if err != nil { + return config.Value{}, err + } + + // Every entry is represented, even if it is a nil. + // Otherwise, a map with zero-valued structs would yield a nil as well. + out[k] = nv + } + + // If the map has no entries, emit a nil. + if len(out) == 0 { + return config.NilValue, nil + } + + return config.NewValue(out, ref.Location()), nil +} + +func fromTypedSlice(src reflect.Value, ref config.Value) (config.Value, error) { + switch ref.Kind() { + case config.KindSequence, config.KindNil: + // Nothing to do. + default: + panic("type error") + } + + out := make([]config.Value, src.Len()) + for i := 0; i < src.Len(); i++ { + v := src.Index(i) + + // Convert entry taking into account the reference value (may be equal to config.NilValue). + nv, err := FromTyped(v.Interface(), ref.Index(i)) + if err != nil { + return config.Value{}, err + } + + out[i] = nv + } + + // If the slice has no entries, emit a nil. + if len(out) == 0 { + return config.NilValue, nil + } + + return config.NewValue(out, ref.Location()), nil +} + +func fromTypedString(src reflect.Value, ref config.Value) (config.Value, error) { + if src.IsZero() { + return config.NilValue, nil + } + + switch ref.Kind() { + case config.KindString: + value := src.String() + if value == ref.MustString() { + return ref, nil + } + + return config.V(value), nil + case config.KindNil: + return config.V(src.String()), nil + } + + return config.Value{}, fmt.Errorf("unhandled type: %s", ref.Kind()) +} + +func fromTypedBool(src reflect.Value, ref config.Value) (config.Value, error) { + // Note: this means it's not possible to flip a boolean to false on a typed + // structure and see it reflected in the dynamic configuration. + // This case is not handled as is, so we punt on it until the mutaotrs + // modify the dynamic configuration directly. + if src.IsZero() { + return config.NilValue, nil + } + + switch ref.Kind() { + case config.KindBool: + value := src.Bool() + if value == ref.MustBool() { + return ref, nil + } + return config.V(value), nil + case config.KindNil: + return config.V(src.Bool()), nil + } + + return config.Value{}, fmt.Errorf("unhandled type: %s", ref.Kind()) +} + +func fromTypedInt(src reflect.Value, ref config.Value) (config.Value, error) { + if src.IsZero() { + return config.NilValue, nil + } + + switch ref.Kind() { + case config.KindInt: + value := src.Int() + if value == ref.MustInt() { + return ref, nil + } + return config.V(value), nil + case config.KindNil: + return config.V(src.Bool()), nil + } + + return config.Value{}, fmt.Errorf("unhandled type: %s", ref.Kind()) +} diff --git a/libs/config/convert/from_typed_test.go b/libs/config/convert/from_typed_test.go new file mode 100644 index 0000000000..cb5400b9fd --- /dev/null +++ b/libs/config/convert/from_typed_test.go @@ -0,0 +1,247 @@ +package convert + +import ( + "testing" + + "github.com/databricks/cli/libs/config" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestFromTypedStructZeroFields(t *testing.T) { + type Tmp struct { + Foo string `json:"foo"` + Bar string `json:"bar"` + } + + src := Tmp{} + ref := config.V(map[string]config.Value{ + "foo": config.V("bar"), + "bar": config.V("baz"), + }) + + nv, err := FromTyped(src, ref) + require.NoError(t, err) + assert.Equal(t, config.NilValue, nv) +} + +func TestFromTypedStructSetFields(t *testing.T) { + type Tmp struct { + Foo string `json:"foo"` + Bar string `json:"bar"` + } + + src := Tmp{ + Foo: "foo", + Bar: "bar", + } + + ref := config.NilValue + nv, err := FromTyped(src, ref) + require.NoError(t, err) + assert.Equal(t, config.V(map[string]config.Value{ + "foo": config.V("foo"), + "bar": config.V("bar"), + }), nv) +} + +func TestFromTypedStructSetFieldsRetainLocationIfUnchanged(t *testing.T) { + type Tmp struct { + Foo string `json:"foo"` + Bar string `json:"bar"` + } + + src := Tmp{ + Foo: "bar", + Bar: "qux", + } + + ref := config.V(map[string]config.Value{ + "foo": config.NewValue("bar", config.Location{File: "foo"}), + "bar": config.NewValue("baz", config.Location{File: "bar"}), + }) + + nv, err := FromTyped(src, ref) + require.NoError(t, err) + + // Assert foo has retained its location. + assert.Equal(t, config.NewValue("bar", config.Location{File: "foo"}), nv.Get("foo")) + + // Assert bar lost its location (because it was overwritten). + assert.Equal(t, config.NewValue("qux", config.Location{}), nv.Get("bar")) +} + +func TestFromTypedMapEmpty(t *testing.T) { + var src = map[string]string{} + + ref := config.V(map[string]config.Value{ + "foo": config.V("bar"), + "bar": config.V("baz"), + }) + + nv, err := FromTyped(src, ref) + require.NoError(t, err) + assert.Equal(t, config.NilValue, nv) +} + +func TestFromTypedMapNonEmpty(t *testing.T) { + var src = map[string]string{ + "foo": "foo", + "bar": "bar", + } + + ref := config.NilValue + nv, err := FromTyped(src, ref) + require.NoError(t, err) + assert.Equal(t, config.V(map[string]config.Value{ + "foo": config.V("foo"), + "bar": config.V("bar"), + }), nv) +} + +func TestFromTypedMapNonEmptyRetainLocationIfUnchanged(t *testing.T) { + var src = map[string]string{ + "foo": "bar", + "bar": "qux", + } + + ref := config.V(map[string]config.Value{ + "foo": config.NewValue("bar", config.Location{File: "foo"}), + "bar": config.NewValue("baz", config.Location{File: "bar"}), + }) + + nv, err := FromTyped(src, ref) + require.NoError(t, err) + + // Assert foo has retained its location. + assert.Equal(t, config.NewValue("bar", config.Location{File: "foo"}), nv.Get("foo")) + + // Assert bar lost its location (because it was overwritten). + assert.Equal(t, config.NewValue("qux", config.Location{}), nv.Get("bar")) +} + +func TestFromTypedMapFieldWithZeroValue(t *testing.T) { + var src = map[string]string{ + "foo": "", + } + + ref := config.NilValue + nv, err := FromTyped(src, ref) + require.NoError(t, err) + assert.Equal(t, config.V(map[string]config.Value{ + "foo": config.NilValue, + }), nv) +} + +func TestFromTypedSliceEmpty(t *testing.T) { + var src = []string{} + + ref := config.V([]config.Value{ + config.V("bar"), + config.V("baz"), + }) + + nv, err := FromTyped(src, ref) + require.NoError(t, err) + assert.Equal(t, config.NilValue, nv) +} + +func TestFromTypedSliceNonEmpty(t *testing.T) { + var src = []string{ + "foo", + "bar", + } + + ref := config.NilValue + nv, err := FromTyped(src, ref) + require.NoError(t, err) + assert.Equal(t, config.V([]config.Value{ + config.V("foo"), + config.V("bar"), + }), nv) +} + +func TestFromTypedSliceNonEmptyRetainLocationIfUnchanged(t *testing.T) { + var src = []string{ + "foo", + "bar", + } + + ref := config.V([]config.Value{ + config.NewValue("foo", config.Location{File: "foo"}), + config.NewValue("baz", config.Location{File: "baz"}), + }) + + nv, err := FromTyped(src, ref) + require.NoError(t, err) + + // Assert foo has retained its location. + assert.Equal(t, config.NewValue("foo", config.Location{File: "foo"}), nv.Index(0)) + + // Assert bar lost its location (because it was overwritten). + assert.Equal(t, config.NewValue("bar", config.Location{}), nv.Index(1)) +} + +func TestFromTypedStringEmpty(t *testing.T) { + var src string + var ref = config.V("string") + nv, err := FromTyped(src, ref) + require.NoError(t, err) + assert.Equal(t, config.NilValue, nv) +} + +func TestFromTypedStringNonEmpty(t *testing.T) { + var src = "new" + var ref = config.NilValue + nv, err := FromTyped(src, ref) + require.NoError(t, err) + assert.Equal(t, config.V("new"), nv) +} + +func TestFromTypedStringNonEmptyOverwrite(t *testing.T) { + var src = "new" + var ref = config.V("old") + nv, err := FromTyped(src, ref) + require.NoError(t, err) + assert.Equal(t, config.V("new"), nv) +} + +func TestFromTypedStringRetainsLocationsIfUnchanged(t *testing.T) { + var src = "foo" + var ref = config.NewValue("foo", config.Location{File: "foo"}) + nv, err := FromTyped(src, ref) + require.NoError(t, err) + assert.Equal(t, config.NewValue("foo", config.Location{File: "foo"}), nv) +} + +func TestFromTypedBoolEmpty(t *testing.T) { + var src bool + var ref = config.V(true) + nv, err := FromTyped(src, ref) + require.NoError(t, err) + assert.Equal(t, config.NilValue, nv) +} + +func TestFromTypedBoolNonEmpty(t *testing.T) { + var src bool = true + var ref = config.NilValue + nv, err := FromTyped(src, ref) + require.NoError(t, err) + assert.Equal(t, config.V(true), nv) +} + +func TestFromTypedBoolNonEmptyOverwrite(t *testing.T) { + var src bool = true + var ref = config.V(false) + nv, err := FromTyped(src, ref) + require.NoError(t, err) + assert.Equal(t, config.V(true), nv) +} + +func TestFromTypedBoolRetainsLocationsIfUnchanged(t *testing.T) { + var src = true + var ref = config.NewValue(true, config.Location{File: "foo"}) + nv, err := FromTyped(src, ref) + require.NoError(t, err) + assert.Equal(t, config.NewValue(true, config.Location{File: "foo"}), nv) +} diff --git a/libs/config/convert/struct_info.go b/libs/config/convert/struct_info.go index 367b9ecdc4..cc899542c0 100644 --- a/libs/config/convert/struct_info.go +++ b/libs/config/convert/struct_info.go @@ -85,3 +85,29 @@ func buildStructInfo(typ reflect.Type) structInfo { return out } + +func (s *structInfo) FieldValues(v reflect.Value) map[string]reflect.Value { + var out = make(map[string]reflect.Value) + + for k, index := range s.Fields { + fv := v + + // Locate value in struct (it could be an embedded type). + for i, x := range index { + if i > 0 { + if fv.Kind() == reflect.Pointer && fv.Type().Elem().Kind() == reflect.Struct { + if fv.IsNil() { + fv = reflect.Value{} + break + } + fv = fv.Elem() + } + } + fv = fv.Field(x) + } + + out[k] = fv + } + + return out +} From d23e201b61cf8b5cfc89f33e9e53e37f093b8c53 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Tue, 31 Oct 2023 04:57:38 -0700 Subject: [PATCH 002/104] More work on ToTyped --- libs/config/convert/from_typed.go | 23 +++++- libs/config/convert/from_typed_test.go | 100 ++++++++++++++++++++++++- 2 files changed, 117 insertions(+), 6 deletions(-) diff --git a/libs/config/convert/from_typed.go b/libs/config/convert/from_typed.go index d2c166ff8c..852c479ca0 100644 --- a/libs/config/convert/from_typed.go +++ b/libs/config/convert/from_typed.go @@ -34,7 +34,7 @@ func FromTyped(src any, ref config.Value) (config.Value, error) { case reflect.Int, reflect.Int32, reflect.Int64: return fromTypedInt(srcv, ref) case reflect.Float32, reflect.Float64: - // return fromTypedFloat(srcv, dst) + return fromTypedFloat(srcv, ref) } return config.NilValue, fmt.Errorf("unsupported type: %s", srcv.Kind()) @@ -207,7 +207,26 @@ func fromTypedInt(src reflect.Value, ref config.Value) (config.Value, error) { } return config.V(value), nil case config.KindNil: - return config.V(src.Bool()), nil + return config.V(src.Int()), nil + } + + return config.Value{}, fmt.Errorf("unhandled type: %s", ref.Kind()) +} + +func fromTypedFloat(src reflect.Value, ref config.Value) (config.Value, error) { + if src.IsZero() { + return config.NilValue, nil + } + + switch ref.Kind() { + case config.KindFloat: + value := src.Float() + if value == ref.MustFloat() { + return ref, nil + } + return config.V(value), nil + case config.KindNil: + return config.V(src.Float()), nil } return config.Value{}, fmt.Errorf("unhandled type: %s", ref.Kind()) diff --git a/libs/config/convert/from_typed_test.go b/libs/config/convert/from_typed_test.go index cb5400b9fd..f07c75e83c 100644 --- a/libs/config/convert/from_typed_test.go +++ b/libs/config/convert/from_typed_test.go @@ -191,7 +191,7 @@ func TestFromTypedStringEmpty(t *testing.T) { } func TestFromTypedStringNonEmpty(t *testing.T) { - var src = "new" + var src string = "new" var ref = config.NilValue nv, err := FromTyped(src, ref) require.NoError(t, err) @@ -199,7 +199,7 @@ func TestFromTypedStringNonEmpty(t *testing.T) { } func TestFromTypedStringNonEmptyOverwrite(t *testing.T) { - var src = "new" + var src string = "new" var ref = config.V("old") nv, err := FromTyped(src, ref) require.NoError(t, err) @@ -207,13 +207,20 @@ func TestFromTypedStringNonEmptyOverwrite(t *testing.T) { } func TestFromTypedStringRetainsLocationsIfUnchanged(t *testing.T) { - var src = "foo" + var src string = "foo" var ref = config.NewValue("foo", config.Location{File: "foo"}) nv, err := FromTyped(src, ref) require.NoError(t, err) assert.Equal(t, config.NewValue("foo", config.Location{File: "foo"}), nv) } +func TestFromTypedStringTypeError(t *testing.T) { + var src string = "foo" + var ref = config.V(1234) + _, err := FromTyped(src, ref) + require.Error(t, err) +} + func TestFromTypedBoolEmpty(t *testing.T) { var src bool var ref = config.V(true) @@ -239,9 +246,94 @@ func TestFromTypedBoolNonEmptyOverwrite(t *testing.T) { } func TestFromTypedBoolRetainsLocationsIfUnchanged(t *testing.T) { - var src = true + var src bool = true var ref = config.NewValue(true, config.Location{File: "foo"}) nv, err := FromTyped(src, ref) require.NoError(t, err) assert.Equal(t, config.NewValue(true, config.Location{File: "foo"}), nv) } + +func TestFromTypedBoolTypeError(t *testing.T) { + var src bool = true + var ref = config.V("string") + _, err := FromTyped(src, ref) + require.Error(t, err) +} + +func TestFromTypedIntEmpty(t *testing.T) { + var src int + var ref = config.V(true) + nv, err := FromTyped(src, ref) + require.NoError(t, err) + assert.Equal(t, config.NilValue, nv) +} + +func TestFromTypedIntNonEmpty(t *testing.T) { + var src int = 1234 + var ref = config.NilValue + nv, err := FromTyped(src, ref) + require.NoError(t, err) + assert.Equal(t, config.V(int64(1234)), nv) +} + +func TestFromTypedIntNonEmptyOverwrite(t *testing.T) { + var src int = 1234 + var ref = config.V(1233) + nv, err := FromTyped(src, ref) + require.NoError(t, err) + assert.Equal(t, config.V(int64(1234)), nv) +} + +func TestFromTypedIntRetainsLocationsIfUnchanged(t *testing.T) { + var src int = 1234 + var ref = config.NewValue(1234, config.Location{File: "foo"}) + nv, err := FromTyped(src, ref) + require.NoError(t, err) + assert.Equal(t, config.NewValue(1234, config.Location{File: "foo"}), nv) +} + +func TestFromTypedIntTypeError(t *testing.T) { + var src int = 1234 + var ref = config.V("string") + _, err := FromTyped(src, ref) + require.Error(t, err) +} + +func TestFromTypedFloatEmpty(t *testing.T) { + var src float64 + var ref = config.V(1.23) + nv, err := FromTyped(src, ref) + require.NoError(t, err) + assert.Equal(t, config.NilValue, nv) +} + +func TestFromTypedFloatNonEmpty(t *testing.T) { + var src float64 = 1.23 + var ref = config.NilValue + nv, err := FromTyped(src, ref) + require.NoError(t, err) + assert.Equal(t, config.V(1.23), nv) +} + +func TestFromTypedFloatNonEmptyOverwrite(t *testing.T) { + var src float64 = 1.23 + var ref = config.V(1.24) + nv, err := FromTyped(src, ref) + require.NoError(t, err) + assert.Equal(t, config.V(1.23), nv) +} + +func TestFromTypedFloatRetainsLocationsIfUnchanged(t *testing.T) { + var src float64 = 1.23 + var ref = config.NewValue(1.23, config.Location{File: "foo"}) + nv, err := FromTyped(src, ref) + require.NoError(t, err) + assert.Equal(t, config.NewValue(1.23, config.Location{File: "foo"}), nv) +} + +func TestFromTypedFloatTypeError(t *testing.T) { + var src float64 = 1.23 + var ref = config.V("string") + _, err := FromTyped(src, ref) + require.Error(t, err) +} From 36ea807339c0184517ba49697bbcb98677f367c0 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Tue, 31 Oct 2023 05:10:11 -0700 Subject: [PATCH 003/104] More tests --- libs/config/convert/struct_info.go | 4 +- libs/config/convert/struct_info_test.go | 107 ++++++++++++++++++++++++ 2 files changed, 110 insertions(+), 1 deletion(-) diff --git a/libs/config/convert/struct_info.go b/libs/config/convert/struct_info.go index cc899542c0..2457b3c297 100644 --- a/libs/config/convert/struct_info.go +++ b/libs/config/convert/struct_info.go @@ -106,7 +106,9 @@ func (s *structInfo) FieldValues(v reflect.Value) map[string]reflect.Value { fv = fv.Field(x) } - out[k] = fv + if fv.IsValid() { + out[k] = fv + } } return out diff --git a/libs/config/convert/struct_info_test.go b/libs/config/convert/struct_info_test.go index 3079958b2b..2e31adac16 100644 --- a/libs/config/convert/struct_info_test.go +++ b/libs/config/convert/struct_info_test.go @@ -87,3 +87,110 @@ func TestStructInfoAnonymousByPointer(t *testing.T) { assert.Equal(t, []int{0, 0}, si.Fields["foo"]) assert.Equal(t, []int{0, 1, 0}, si.Fields["bar"]) } + +func TestStructInfoFieldValues(t *testing.T) { + type Tmp struct { + Foo string `json:"foo"` + Bar string `json:"bar"` + } + + var src = Tmp{ + Foo: "foo", + Bar: "bar", + } + + si := getStructInfo(reflect.TypeOf(Tmp{})) + fv := si.FieldValues(reflect.ValueOf(src)) + assert.Len(t, fv, 2) + assert.Equal(t, "foo", fv["foo"].String()) + assert.Equal(t, "bar", fv["bar"].String()) +} + +func TestStructInfoFieldValuesAnonymousByValue(t *testing.T) { + type Bar struct { + Bar string `json:"bar"` + } + + type Foo struct { + Foo string `json:"foo"` + Bar + } + + type Tmp struct { + Foo + } + + var src = Tmp{ + Foo: Foo{ + Foo: "foo", + Bar: Bar{ + Bar: "bar", + }, + }, + } + + si := getStructInfo(reflect.TypeOf(Tmp{})) + fv := si.FieldValues(reflect.ValueOf(src)) + assert.Len(t, fv, 2) + assert.Equal(t, "foo", fv["foo"].String()) + assert.Equal(t, "bar", fv["bar"].String()) +} + +func TestStructInfoFieldValuesAnonymousByPointer(t *testing.T) { + type Bar struct { + Bar string `json:"bar"` + } + + type Foo struct { + Foo string `json:"foo"` + *Bar + } + + type Tmp struct { + *Foo + } + + // Test that the embedded fields are dereferenced properly. + t.Run("all are set", func(t *testing.T) { + src := Tmp{ + Foo: &Foo{ + Foo: "foo", + Bar: &Bar{ + Bar: "bar", + }, + }, + } + + si := getStructInfo(reflect.TypeOf(Tmp{})) + fv := si.FieldValues(reflect.ValueOf(src)) + assert.Len(t, fv, 2) + assert.Equal(t, "foo", fv["foo"].String()) + assert.Equal(t, "bar", fv["bar"].String()) + }) + + // Test that fields of embedded types are skipped if the embedded type is nil. + t.Run("top level is set", func(t *testing.T) { + src := Tmp{ + Foo: &Foo{ + Foo: "foo", + Bar: nil, + }, + } + + si := getStructInfo(reflect.TypeOf(Tmp{})) + fv := si.FieldValues(reflect.ValueOf(src)) + assert.Len(t, fv, 1) + assert.Equal(t, "foo", fv["foo"].String()) + }) + + // Test that fields of embedded types are skipped if the embedded type is nil. + t.Run("none are set", func(t *testing.T) { + src := Tmp{ + Foo: nil, + } + + si := getStructInfo(reflect.TypeOf(Tmp{})) + fv := si.FieldValues(reflect.ValueOf(src)) + assert.Empty(t, fv) + }) +} From b9191a404748240db05f3801ae3180ff6f3d3f9c Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Tue, 31 Oct 2023 05:24:06 -0700 Subject: [PATCH 004/104] Cleanup --- libs/config/convert/from_typed.go | 31 ++++++------------------------- 1 file changed, 6 insertions(+), 25 deletions(-) diff --git a/libs/config/convert/from_typed.go b/libs/config/convert/from_typed.go index 852c479ca0..8f247853e1 100644 --- a/libs/config/convert/from_typed.go +++ b/libs/config/convert/from_typed.go @@ -41,11 +41,11 @@ func FromTyped(src any, ref config.Value) (config.Value, error) { } func fromTypedStruct(src reflect.Value, ref config.Value) (config.Value, error) { + // Check that the reference value is compatible or nil. switch ref.Kind() { case config.KindMap, config.KindNil: - // Nothing to do. default: - panic("type error") + return config.Value{}, fmt.Errorf("unhandled type: %s", ref.Kind()) } out := make(map[string]config.Value) @@ -68,33 +68,14 @@ func fromTypedStruct(src reflect.Value, ref config.Value) (config.Value, error) } return config.NewValue(out, ref.Location()), nil - - // what are my options - // totyped / fromtyped at every mutator boundary - // pro's -- minimal changes to existing mutators - // con's -- doesn't hold for all mutators, so we need different interface ANYWAY - // (e.g. get/set config.Value instances) - // cons -- lossy (cannot do all to/from conversions, lose location, lose variables) - - // explicit mutator interface - // pro's -- very clear what's happening - // cons -- all code + tests need to be changed - // - - // need an incremental approach - // thus, we run totyped + fromtyped at mutator boundary - // can eventually move this into the mutators themselves? - // can treat the typed structure as read-only, perhaps? - // can generate wrapper type that exposes a Get + GetValue at every node - } func fromTypedMap(src reflect.Value, ref config.Value) (config.Value, error) { + // Check that the reference value is compatible or nil. switch ref.Kind() { case config.KindMap, config.KindNil: - // Nothing to do. default: - panic("type error") + return config.Value{}, fmt.Errorf("unhandled type: %s", ref.Kind()) } out := make(map[string]config.Value) @@ -123,11 +104,11 @@ func fromTypedMap(src reflect.Value, ref config.Value) (config.Value, error) { } func fromTypedSlice(src reflect.Value, ref config.Value) (config.Value, error) { + // Check that the reference value is compatible or nil. switch ref.Kind() { case config.KindSequence, config.KindNil: - // Nothing to do. default: - panic("type error") + return config.Value{}, fmt.Errorf("unhandled type: %s", ref.Kind()) } out := make([]config.Value, src.Len()) From 7968bcb62cfb2ab6c2689f1b93ca4a1690e30a99 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Tue, 31 Oct 2023 06:53:23 -0700 Subject: [PATCH 005/104] wip --- bundle/config/mutator/mutator.go | 4 +- bundle/config/mutator/select_target.go | 2 +- bundle/config/root.go | 238 ++++++++++++++++--------- bundle/mutator.go | 4 + 4 files changed, 156 insertions(+), 92 deletions(-) diff --git a/bundle/config/mutator/mutator.go b/bundle/config/mutator/mutator.go index aa762e8e6e..231487cd4b 100644 --- a/bundle/config/mutator/mutator.go +++ b/bundle/config/mutator/mutator.go @@ -2,13 +2,11 @@ package mutator import ( "github.com/databricks/cli/bundle" - "github.com/databricks/cli/bundle/config" - "github.com/databricks/cli/bundle/scripts" ) func DefaultMutators() []bundle.Mutator { return []bundle.Mutator{ - scripts.Execute(config.ScriptPreInit), + // scripts.Execute(config.ScriptPreInit), ProcessRootIncludes(), DefineDefaultTarget(), LoadGitDetails(), diff --git a/bundle/config/mutator/select_target.go b/bundle/config/mutator/select_target.go index 2ad4311280..c14122cf52 100644 --- a/bundle/config/mutator/select_target.go +++ b/bundle/config/mutator/select_target.go @@ -36,7 +36,7 @@ func (m *selectTarget) Apply(_ context.Context, b *bundle.Bundle) error { } // Merge specified target into root configuration structure. - err := b.Config.MergeTargetOverrides(target) + err := b.Config.MergeTargetOverrides(m.name) if err != nil { return err } diff --git a/bundle/config/root.go b/bundle/config/root.go index 31867c6cb7..5c14cfd172 100644 --- a/bundle/config/root.go +++ b/bundle/config/root.go @@ -1,18 +1,26 @@ package config import ( + "bytes" "fmt" "os" "path/filepath" "strings" "github.com/databricks/cli/bundle/config/variable" + "github.com/databricks/cli/libs/config" + "github.com/databricks/cli/libs/config/convert" + "github.com/databricks/cli/libs/config/yamlloader" + "github.com/databricks/cli/libs/diag" "github.com/databricks/databricks-sdk-go/service/jobs" - "github.com/ghodss/yaml" "github.com/imdario/mergo" ) type Root struct { + value config.Value + diags diag.Diagnostics + needsToTyped bool + // Path contains the directory path to the root of the bundle. // It is set when loading `databricks.yml`. Path string `json:"-" bundle:"readonly"` @@ -66,30 +74,70 @@ func Load(path string) (*Root, error) { } var r Root - err = yaml.Unmarshal(raw, &r) + v, err := yamlloader.LoadYAML(path, bytes.NewBuffer(raw)) if err != nil { return nil, fmt.Errorf("failed to load %s: %w", path, err) } - if r.Environments != nil && r.Targets != nil { - return nil, fmt.Errorf("both 'environments' and 'targets' are specified, only 'targets' should be used: %s", path) + // Normalize dynamic configuration tree according to configuration type. + v, diags := convert.Normalize(r, v) + if diags != nil { + r.diags = diags } - if r.Environments != nil { - //TODO: add a command line notice that this is a deprecated option. - r.Targets = r.Environments - } + // Store dynamic configuration for later reference (e.g. location information on all nodes). + r.value = v + r.needsToTyped = true + + // // Convert normalized configuration tree to typed configuration. + // err = convert.ToTyped(&r, v) + // if err != nil { + // return nil, fmt.Errorf("failed to load %s: %w", path, err) + // } + + // if r.Environments != nil && r.Targets != nil { + // return nil, fmt.Errorf("both 'environments' and 'targets' are specified, only 'targets' should be used: %s", path) + // } + + // if r.Environments != nil { + // //TODO: add a command line notice that this is a deprecated option. + // r.Targets = r.Environments + // } r.Path = filepath.Dir(path) - r.SetConfigFilePath(path) + // r.SetConfigFilePath(path) - _, err = r.Resources.VerifyUniqueResourceIdentifiers() + // _, err = r.Resources.VerifyUniqueResourceIdentifiers() return &r, err } +func (r *Root) MarkBoundary() { + if r.needsToTyped { + // Convert normalized configuration tree to typed configuration. + err := convert.ToTyped(r, r.value) + if err != nil { + panic(err) + } + } + + nv, err := convert.FromTyped(r, r.value) + if err != nil { + panic(err) + } + + r.value = nv + r.needsToTyped = false +} + +func (r *Root) Diagnostics() diag.Diagnostics { + return r.diags +} + // SetConfigFilePath configures the path that its configuration // was loaded from in configuration leafs that require it. func (r *Root) SetConfigFilePath(path string) { + panic("nope") + r.Resources.SetConfigFilePath(path) if r.Artifacts != nil { r.Artifacts.SetConfigFilePath(path) @@ -114,6 +162,8 @@ func (r *Root) SetConfigFilePath(path string) { // Input has to be a string of the form `foo=bar`. In this case the variable with // name `foo` is assigned the value `bar` func (r *Root) InitializeVariables(vars []string) error { + panic("nope") + for _, variable := range vars { parsedVariable := strings.SplitN(variable, "=", 2) if len(parsedVariable) != 2 { @@ -134,6 +184,18 @@ func (r *Root) InitializeVariables(vars []string) error { } func (r *Root) Merge(other *Root) error { + panic("nope") + + // Merge dynamic configuration values. + // v, err := merge.Merge(r.value, other.value) + // if err != nil { + // return err + // } + // r.value = v + + // Merge diagnostics. + r.diags = append(r.diags, other.diags...) + err := r.Sync.Merge(r, other) if err != nil { return err @@ -153,89 +215,89 @@ func (r *Root) Merge(other *Root) error { return mergo.Merge(r, other, mergo.WithOverride) } -func (r *Root) MergeTargetOverrides(target *Target) error { +func (r *Root) MergeTargetOverrides(name string) error { var err error - // Target may be nil if it's empty. - if target == nil { + target := r.value.Get("targets").Get(name) + if target == config.NilValue { return nil } - if target.Bundle != nil { - err = mergo.Merge(&r.Bundle, target.Bundle, mergo.WithOverride) - if err != nil { - return err - } - } - - if target.Workspace != nil { - err = mergo.Merge(&r.Workspace, target.Workspace, mergo.WithOverride) - if err != nil { - return err - } - } - - if target.Artifacts != nil { - err = mergo.Merge(&r.Artifacts, target.Artifacts, mergo.WithOverride, mergo.WithAppendSlice) - if err != nil { - return err - } - } - - if target.Resources != nil { - err = mergo.Merge(&r.Resources, target.Resources, mergo.WithOverride, mergo.WithAppendSlice) - if err != nil { - return err - } - - err = r.Resources.Merge() - if err != nil { - return err - } - } - - if target.Variables != nil { - for k, v := range target.Variables { - variable, ok := r.Variables[k] - if !ok { - return fmt.Errorf("variable %s is not defined but is assigned a value", k) - } - // we only allow overrides of the default value for a variable - defaultVal := v - variable.Default = &defaultVal - } - } - - if target.RunAs != nil { - r.RunAs = target.RunAs - } - - if target.Mode != "" { - r.Bundle.Mode = target.Mode - } - - if target.ComputeID != "" { - r.Bundle.ComputeID = target.ComputeID - } - - git := &r.Bundle.Git - if target.Git.Branch != "" { - git.Branch = target.Git.Branch - git.Inferred = false - } - if target.Git.Commit != "" { - git.Commit = target.Git.Commit - } - if target.Git.OriginURL != "" { - git.OriginURL = target.Git.OriginURL - } - - if target.Sync != nil { - err = mergo.Merge(&r.Sync, target.Sync, mergo.WithAppendSlice) - if err != nil { - return err - } - } + // if target.Bundle != nil { + // err = mergo.Merge(&r.Bundle, target.Bundle, mergo.WithOverride) + // if err != nil { + // return err + // } + // } + + // if target.Workspace != nil { + // err = mergo.Merge(&r.Workspace, target.Workspace, mergo.WithOverride) + // if err != nil { + // return err + // } + // } + + // if target.Artifacts != nil { + // err = mergo.Merge(&r.Artifacts, target.Artifacts, mergo.WithOverride, mergo.WithAppendSlice) + // if err != nil { + // return err + // } + // } + + // if target.Resources != nil { + // err = mergo.Merge(&r.Resources, target.Resources, mergo.WithOverride, mergo.WithAppendSlice) + // if err != nil { + // return err + // } + + // err = r.Resources.Merge() + // if err != nil { + // return err + // } + // } + + // if target.Variables != nil { + // for k, v := range target.Variables { + // variable, ok := r.Variables[k] + // if !ok { + // return fmt.Errorf("variable %s is not defined but is assigned a value", k) + // } + // // we only allow overrides of the default value for a variable + // defaultVal := v + // variable.Default = &defaultVal + // } + // } + + // if target.RunAs != nil { + // r.RunAs = target.RunAs + // } + + // if target.Mode != "" { + // r.Bundle.Mode = target.Mode + // } + + // if target.ComputeID != "" { + // r.Bundle.ComputeID = target.ComputeID + // } + + // git := &r.Bundle.Git + // if target.Git.Branch != "" { + // git.Branch = target.Git.Branch + // git.Inferred = false + // } + // if target.Git.Commit != "" { + // git.Commit = target.Git.Commit + // } + // if target.Git.OriginURL != "" { + // git.OriginURL = target.Git.OriginURL + // } + + // if target.Sync != nil { + // err = mergo.Merge(&r.Sync, target.Sync, mergo.WithAppendSlice) + // if err != nil { + // return err + // } + // } return nil } diff --git a/bundle/mutator.go b/bundle/mutator.go index e559d2375f..734df0caba 100644 --- a/bundle/mutator.go +++ b/bundle/mutator.go @@ -20,7 +20,11 @@ func Apply(ctx context.Context, b *Bundle, m Mutator) error { ctx = log.NewContext(ctx, log.GetLogger(ctx).With("mutator", m.Name())) log.Debugf(ctx, "Apply") + + b.Config.MarkBoundary() + err := m.Apply(ctx, b) + if err != nil { log.Errorf(ctx, "Error: %s", err) return err From 9b1ed86634a0da90a79154165256883487523fae Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Tue, 31 Oct 2023 07:13:00 -0700 Subject: [PATCH 006/104] Function to merge two instances of config.Value --- libs/config/merge/merge.go | 92 ++++++++++++++ libs/config/merge/merge_test.go | 207 ++++++++++++++++++++++++++++++++ 2 files changed, 299 insertions(+) create mode 100644 libs/config/merge/merge.go create mode 100644 libs/config/merge/merge_test.go diff --git a/libs/config/merge/merge.go b/libs/config/merge/merge.go new file mode 100644 index 0000000000..4d78bb1ded --- /dev/null +++ b/libs/config/merge/merge.go @@ -0,0 +1,92 @@ +package merge + +import ( + "fmt" + + "github.com/databricks/cli/libs/config" +) + +func Merge(a, b config.Value) (config.Value, error) { + return merge(a, b) +} + +func merge(a, b config.Value) (config.Value, error) { + ak := a.Kind() + bk := b.Kind() + + // If a is nil, return b. + if ak == config.KindNil { + return b, nil + } + + // If b is nil, return a. + if bk == config.KindNil { + return a, nil + } + + // Call the appropriate merge function based on the kind of a and b. + switch ak { + case config.KindMap: + if bk != config.KindMap { + return config.NilValue, fmt.Errorf("cannot merge map with %s", bk) + } + return mergeMap(a, b) + case config.KindSequence: + if bk != config.KindSequence { + return config.NilValue, fmt.Errorf("cannot merge sequence with %s", bk) + } + return mergeSequence(a, b) + default: + if ak != bk { + return config.NilValue, fmt.Errorf("cannot merge %s with %s", ak, bk) + } + return mergePrimitive(a, b) + } +} + +func mergeMap(a, b config.Value) (config.Value, error) { + out := make(map[string]config.Value) + am := a.MustMap() + bm := b.MustMap() + + // Add the values from a into the output map. + for k, v := range am { + out[k] = v + } + + // Merge the values from b into the output map. + for k, v := range bm { + if _, ok := out[k]; ok { + // If the key already exists, merge the values. + merged, err := merge(out[k], v) + if err != nil { + return config.NilValue, err + } + out[k] = merged + } else { + // Otherwise, just set the value. + out[k] = v + } + } + + // Preserve the location of the first value. + return config.NewValue(out, a.Location()), nil +} + +func mergeSequence(a, b config.Value) (config.Value, error) { + as := a.MustSequence() + bs := b.MustSequence() + + // Merging sequences means concatenating them. + out := make([]config.Value, len(as)+len(bs)) + copy(out[:], as) + copy(out[len(as):], bs) + + // Preserve the location of the first value. + return config.NewValue(out, a.Location()), nil +} + +func mergePrimitive(a, b config.Value) (config.Value, error) { + // Merging primitive values means using the incoming value. + return b, nil +} diff --git a/libs/config/merge/merge_test.go b/libs/config/merge/merge_test.go new file mode 100644 index 0000000000..c2e89f60a4 --- /dev/null +++ b/libs/config/merge/merge_test.go @@ -0,0 +1,207 @@ +package merge + +import ( + "testing" + + "github.com/databricks/cli/libs/config" + "github.com/stretchr/testify/assert" +) + +func TestMergeMaps(t *testing.T) { + v1 := config.V(map[string]config.Value{ + "foo": config.V("bar"), + "bar": config.V("baz"), + }) + + v2 := config.V(map[string]config.Value{ + "bar": config.V("qux"), + "qux": config.V("foo"), + }) + + // Merge v2 into v1. + { + out, err := Merge(v1, v2) + assert.NoError(t, err) + assert.Equal(t, map[string]any{ + "foo": "bar", + "bar": "qux", + "qux": "foo", + }, out.AsAny()) + } + + // Merge v1 into v2. + { + out, err := Merge(v2, v1) + assert.NoError(t, err) + assert.Equal(t, map[string]any{ + "foo": "bar", + "bar": "baz", + "qux": "foo", + }, out.AsAny()) + } +} + +func TestMergeMapsNil(t *testing.T) { + v := config.V(map[string]config.Value{ + "foo": config.V("bar"), + }) + + // Merge nil into v. + { + out, err := Merge(v, config.NilValue) + assert.NoError(t, err) + assert.Equal(t, map[string]any{ + "foo": "bar", + }, out.AsAny()) + } + + // Merge v into nil. + { + out, err := Merge(config.NilValue, v) + assert.NoError(t, err) + assert.Equal(t, map[string]any{ + "foo": "bar", + }, out.AsAny()) + } +} + +func TestMergeMapsError(t *testing.T) { + v := config.V(map[string]config.Value{ + "foo": config.V("bar"), + }) + + other := config.V("string") + + // Merge a string into v. + { + out, err := Merge(v, other) + assert.EqualError(t, err, "cannot merge map with string") + assert.Equal(t, config.NilValue, out) + } +} + +func TestMergeSequences(t *testing.T) { + v1 := config.V([]config.Value{ + config.V("bar"), + config.V("baz"), + }) + + v2 := config.V([]config.Value{ + config.V("qux"), + config.V("foo"), + }) + + // Merge v2 into v1. + { + out, err := Merge(v1, v2) + assert.NoError(t, err) + assert.Equal(t, []any{ + "bar", + "baz", + "qux", + "foo", + }, out.AsAny()) + } + + // Merge v1 into v2. + { + out, err := Merge(v2, v1) + assert.NoError(t, err) + assert.Equal(t, []any{ + "qux", + "foo", + "bar", + "baz", + }, out.AsAny()) + } +} + +func TestMergeSequencesNil(t *testing.T) { + v := config.V([]config.Value{ + config.V("bar"), + }) + + // Merge nil into v. + { + out, err := Merge(v, config.NilValue) + assert.NoError(t, err) + assert.Equal(t, []any{ + "bar", + }, out.AsAny()) + } + + // Merge v into nil. + { + out, err := Merge(config.NilValue, v) + assert.NoError(t, err) + assert.Equal(t, []any{ + "bar", + }, out.AsAny()) + } +} + +func TestMergeSequencesError(t *testing.T) { + v := config.V([]config.Value{ + config.V("bar"), + }) + + other := config.V("string") + + // Merge a string into v. + { + out, err := Merge(v, other) + assert.EqualError(t, err, "cannot merge sequence with string") + assert.Equal(t, config.NilValue, out) + } +} + +func TestMergePrimitives(t *testing.T) { + v1 := config.V("bar") + v2 := config.V("baz") + + // Merge v2 into v1. + { + out, err := Merge(v1, v2) + assert.NoError(t, err) + assert.Equal(t, "baz", out.AsAny()) + } + + // Merge v1 into v2. + { + out, err := Merge(v2, v1) + assert.NoError(t, err) + assert.Equal(t, "bar", out.AsAny()) + } +} + +func TestMergePrimitivesNil(t *testing.T) { + v := config.V("bar") + + // Merge nil into v. + { + out, err := Merge(v, config.NilValue) + assert.NoError(t, err) + assert.Equal(t, "bar", out.AsAny()) + } + + // Merge v into nil. + { + out, err := Merge(config.NilValue, v) + assert.NoError(t, err) + assert.Equal(t, "bar", out.AsAny()) + } +} + +func TestMergePrimitivesError(t *testing.T) { + v := config.V("bar") + other := config.V(map[string]config.Value{ + "foo": config.V("bar"), + }) + + // Merge a map into v. + { + out, err := Merge(v, other) + assert.EqualError(t, err, "cannot merge string with map") + assert.Equal(t, config.NilValue, out) + } +} From 314ab30b4184cdd2ecf5412eab283212433eb453 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Wed, 1 Nov 2023 05:57:15 -0700 Subject: [PATCH 007/104] tmp --- bundle/config/mutator/process_include_test.go | 10 +- .../mutator/process_root_includes_test.go | 117 ++++++------- bundle/config/mutator/select_target.go | 2 +- bundle/config/mutator/select_target_test.go | 10 +- bundle/config/root.go | 162 +++++++++++------- bundle/config/root_test.go | 8 +- bundle/mutator.go | 3 +- libs/config/value.go | 4 + 8 files changed, 184 insertions(+), 132 deletions(-) diff --git a/bundle/config/mutator/process_include_test.go b/bundle/config/mutator/process_include_test.go index e5e27f9e75..7ca5d19811 100644 --- a/bundle/config/mutator/process_include_test.go +++ b/bundle/config/mutator/process_include_test.go @@ -15,7 +15,7 @@ import ( ) func TestProcessInclude(t *testing.T) { - bundle := &bundle.Bundle{ + b := &bundle.Bundle{ Config: config.Root{ Path: t.TempDir(), Workspace: config.Workspace{ @@ -25,14 +25,14 @@ func TestProcessInclude(t *testing.T) { } relPath := "./file.yml" - fullPath := filepath.Join(bundle.Config.Path, relPath) + fullPath := filepath.Join(b.Config.Path, relPath) f, err := os.Create(fullPath) require.NoError(t, err) fmt.Fprint(f, "workspace:\n host: bar\n") f.Close() - assert.Equal(t, "foo", bundle.Config.Workspace.Host) - err = mutator.ProcessInclude(fullPath, relPath).Apply(context.Background(), bundle) + assert.Equal(t, "foo", b.Config.Workspace.Host) + err = bundle.Apply(context.Background(), b, mutator.ProcessInclude(fullPath, relPath)) require.NoError(t, err) - assert.Equal(t, "bar", bundle.Config.Workspace.Host) + assert.Equal(t, "bar", b.Config.Workspace.Host) } diff --git a/bundle/config/mutator/process_root_includes_test.go b/bundle/config/mutator/process_root_includes_test.go index aec9b32dff..5f04dcc41c 100644 --- a/bundle/config/mutator/process_root_includes_test.go +++ b/bundle/config/mutator/process_root_includes_test.go @@ -3,16 +3,13 @@ package mutator_test import ( "context" "os" - "path" "path/filepath" "runtime" - "strings" "testing" "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config/mutator" - "github.com/databricks/cli/bundle/env" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -24,12 +21,13 @@ func touch(t *testing.T, path, file string) { } func TestProcessRootIncludesEmpty(t *testing.T) { - bundle := &bundle.Bundle{ + b := &bundle.Bundle{ Config: config.Root{ Path: ".", }, } - err := mutator.ProcessRootIncludes().Apply(context.Background(), bundle) + + err := bundle.Apply(context.Background(), b, mutator.ProcessRootIncludes()) require.NoError(t, err) } @@ -41,7 +39,7 @@ func TestProcessRootIncludesAbs(t *testing.T) { t.Skip("skipping temperorilty to make windows unit tests green") } - bundle := &bundle.Bundle{ + b := &bundle.Bundle{ Config: config.Root{ Path: ".", Include: []string{ @@ -49,13 +47,14 @@ func TestProcessRootIncludesAbs(t *testing.T) { }, }, } - err := mutator.ProcessRootIncludes().Apply(context.Background(), bundle) + + err := bundle.Apply(context.Background(), b, mutator.ProcessRootIncludes()) require.Error(t, err) assert.Contains(t, err.Error(), "must be relative paths") } func TestProcessRootIncludesSingleGlob(t *testing.T) { - bundle := &bundle.Bundle{ + b := &bundle.Bundle{ Config: config.Root{ Path: t.TempDir(), Include: []string{ @@ -64,18 +63,17 @@ func TestProcessRootIncludesSingleGlob(t *testing.T) { }, } - touch(t, bundle.Config.Path, "databricks.yml") - touch(t, bundle.Config.Path, "a.yml") - touch(t, bundle.Config.Path, "b.yml") + touch(t, b.Config.Path, "databricks.yml") + touch(t, b.Config.Path, "a.yml") + touch(t, b.Config.Path, "b.yml") - err := mutator.ProcessRootIncludes().Apply(context.Background(), bundle) + err := bundle.Apply(context.Background(), b, mutator.ProcessRootIncludes()) require.NoError(t, err) - - assert.Equal(t, []string{"a.yml", "b.yml"}, bundle.Config.Include) + assert.Equal(t, []string{"a.yml", "b.yml"}, b.Config.Include) } func TestProcessRootIncludesMultiGlob(t *testing.T) { - bundle := &bundle.Bundle{ + b := &bundle.Bundle{ Config: config.Root{ Path: t.TempDir(), Include: []string{ @@ -85,17 +83,16 @@ func TestProcessRootIncludesMultiGlob(t *testing.T) { }, } - touch(t, bundle.Config.Path, "a1.yml") - touch(t, bundle.Config.Path, "b1.yml") + touch(t, b.Config.Path, "a1.yml") + touch(t, b.Config.Path, "b1.yml") - err := mutator.ProcessRootIncludes().Apply(context.Background(), bundle) + err := bundle.Apply(context.Background(), b, mutator.ProcessRootIncludes()) require.NoError(t, err) - - assert.Equal(t, []string{"a1.yml", "b1.yml"}, bundle.Config.Include) + assert.Equal(t, []string{"a1.yml", "b1.yml"}, b.Config.Include) } func TestProcessRootIncludesRemoveDups(t *testing.T) { - bundle := &bundle.Bundle{ + b := &bundle.Bundle{ Config: config.Root{ Path: t.TempDir(), Include: []string{ @@ -105,15 +102,15 @@ func TestProcessRootIncludesRemoveDups(t *testing.T) { }, } - touch(t, bundle.Config.Path, "a.yml") + touch(t, b.Config.Path, "a.yml") - err := mutator.ProcessRootIncludes().Apply(context.Background(), bundle) + err := bundle.Apply(context.Background(), b, mutator.ProcessRootIncludes()) require.NoError(t, err) - assert.Equal(t, []string{"a.yml"}, bundle.Config.Include) + assert.Equal(t, []string{"a.yml"}, b.Config.Include) } func TestProcessRootIncludesNotExists(t *testing.T) { - bundle := &bundle.Bundle{ + b := &bundle.Bundle{ Config: config.Root{ Path: t.TempDir(), Include: []string{ @@ -121,47 +118,47 @@ func TestProcessRootIncludesNotExists(t *testing.T) { }, }, } - err := mutator.ProcessRootIncludes().Apply(context.Background(), bundle) + err := bundle.Apply(context.Background(), b, mutator.ProcessRootIncludes()) require.Error(t, err) assert.Contains(t, err.Error(), "notexist.yml defined in 'include' section does not match any files") } func TestProcessRootIncludesExtrasFromEnvVar(t *testing.T) { - rootPath := t.TempDir() - testYamlName := "extra_include_path.yml" - touch(t, rootPath, testYamlName) - t.Setenv(env.IncludesVariable, path.Join(rootPath, testYamlName)) - - bundle := &bundle.Bundle{ - Config: config.Root{ - Path: rootPath, - }, - } - - err := mutator.ProcessRootIncludes().Apply(context.Background(), bundle) - require.NoError(t, err) - assert.Contains(t, bundle.Config.Include, testYamlName) + // rootPath := t.TempDir() + // testYamlName := "extra_include_path.yml" + // touch(t, rootPath, testYamlName) + // t.Setenv(env.IncludesVariable, path.Join(rootPath, testYamlName)) + + // b := &bundle.Bundle{ + // Config: config.Root{ + // Path: rootPath, + // }, + // } + + // err := bundle.Apply(context.Background(), b, mutator.ProcessRootIncludes()) + // require.NoError(t, err) + // assert.Contains(t, b.Config.Include, testYamlName) } func TestProcessRootIncludesDedupExtrasFromEnvVar(t *testing.T) { - rootPath := t.TempDir() - testYamlName := "extra_include_path.yml" - touch(t, rootPath, testYamlName) - t.Setenv(env.IncludesVariable, strings.Join( - []string{ - path.Join(rootPath, testYamlName), - path.Join(rootPath, testYamlName), - }, - string(os.PathListSeparator), - )) - - bundle := &bundle.Bundle{ - Config: config.Root{ - Path: rootPath, - }, - } - - err := mutator.ProcessRootIncludes().Apply(context.Background(), bundle) - require.NoError(t, err) - assert.Equal(t, []string{testYamlName}, bundle.Config.Include) + // rootPath := t.TempDir() + // testYamlName := "extra_include_path.yml" + // touch(t, rootPath, testYamlName) + // t.Setenv(env.IncludesVariable, strings.Join( + // []string{ + // path.Join(rootPath, testYamlName), + // path.Join(rootPath, testYamlName), + // }, + // string(os.PathListSeparator), + // )) + + // b := &bundle.Bundle{ + // Config: config.Root{ + // Path: rootPath, + // }, + // } + + // err := bundle.Apply(context.Background(), b, mutator.ProcessRootIncludes()) + // require.NoError(t, err) + // assert.Equal(t, []string{testYamlName}, b.Config.Include) } diff --git a/bundle/config/mutator/select_target.go b/bundle/config/mutator/select_target.go index c14122cf52..95558f030f 100644 --- a/bundle/config/mutator/select_target.go +++ b/bundle/config/mutator/select_target.go @@ -30,7 +30,7 @@ func (m *selectTarget) Apply(_ context.Context, b *bundle.Bundle) error { } // Get specified target - target, ok := b.Config.Targets[m.name] + _, ok := b.Config.Targets[m.name] if !ok { return fmt.Errorf("%s: no such target. Available targets: %s", m.name, strings.Join(maps.Keys(b.Config.Targets), ", ")) } diff --git a/bundle/config/mutator/select_target_test.go b/bundle/config/mutator/select_target_test.go index dfcd8cb089..20467270b8 100644 --- a/bundle/config/mutator/select_target_test.go +++ b/bundle/config/mutator/select_target_test.go @@ -12,7 +12,7 @@ import ( ) func TestSelectTarget(t *testing.T) { - bundle := &bundle.Bundle{ + b := &bundle.Bundle{ Config: config.Root{ Workspace: config.Workspace{ Host: "foo", @@ -26,19 +26,19 @@ func TestSelectTarget(t *testing.T) { }, }, } - err := mutator.SelectTarget("default").Apply(context.Background(), bundle) + err := bundle.Apply(context.Background(), b, mutator.SelectTarget("default")) require.NoError(t, err) - assert.Equal(t, "bar", bundle.Config.Workspace.Host) + assert.Equal(t, "bar", b.Config.Workspace.Host) } func TestSelectTargetNotFound(t *testing.T) { - bundle := &bundle.Bundle{ + b := &bundle.Bundle{ Config: config.Root{ Targets: map[string]*config.Target{ "default": {}, }, }, } - err := mutator.SelectTarget("doesnt-exist").Apply(context.Background(), bundle) + err := bundle.Apply(context.Background(), b, mutator.SelectTarget("doesnt-exist")) require.Error(t, err, "no targets defined") } diff --git a/bundle/config/root.go b/bundle/config/root.go index 5c14cfd172..aeb5ca7392 100644 --- a/bundle/config/root.go +++ b/bundle/config/root.go @@ -10,16 +10,17 @@ import ( "github.com/databricks/cli/bundle/config/variable" "github.com/databricks/cli/libs/config" "github.com/databricks/cli/libs/config/convert" + "github.com/databricks/cli/libs/config/merge" "github.com/databricks/cli/libs/config/yamlloader" "github.com/databricks/cli/libs/diag" "github.com/databricks/databricks-sdk-go/service/jobs" - "github.com/imdario/mergo" ) type Root struct { - value config.Value - diags diag.Diagnostics - needsToTyped bool + value config.Value + diags diag.Diagnostics + + depth int // Path contains the directory path to the root of the bundle. // It is set when loading `databricks.yml`. @@ -81,19 +82,17 @@ func Load(path string) (*Root, error) { // Normalize dynamic configuration tree according to configuration type. v, diags := convert.Normalize(r, v) - if diags != nil { - r.diags = diags + + // Convert normalized configuration tree to typed configuration. + err = convert.ToTyped(&r, v) + if err != nil { + return nil, fmt.Errorf("failed to load %s: %w", path, err) } + r.diags = diags + // Store dynamic configuration for later reference (e.g. location information on all nodes). r.value = v - r.needsToTyped = true - - // // Convert normalized configuration tree to typed configuration. - // err = convert.ToTyped(&r, v) - // if err != nil { - // return nil, fmt.Errorf("failed to load %s: %w", path, err) - // } // if r.Environments != nil && r.Targets != nil { // return nil, fmt.Errorf("both 'environments' and 'targets' are specified, only 'targets' should be used: %s", path) @@ -111,22 +110,53 @@ func Load(path string) (*Root, error) { return &r, err } -func (r *Root) MarkBoundary() { - if r.needsToTyped { +func (r *Root) MarkMutatorEntry() { + r.depth++ + + // Many test cases initialize a config as a Go struct literal. + // The zero-initialized value for [wasLoaded] will be false, + // and indicates we need to populate [r.value]. + if !r.value.IsValid() { + nv, err := convert.FromTyped(r, config.NilValue) + if err != nil { + panic(err) + } + + r.value = nv + } + + // If we are entering a mutator at depth 1, we need to convert + // the dynamic configuration tree to typed configuration. + if r.depth == 1 { + // Always run ToTyped upon entering a mutator. // Convert normalized configuration tree to typed configuration. err := convert.ToTyped(r, r.value) if err != nil { panic(err) } - } + } else { + nv, err := convert.FromTyped(r, config.NilValue) + if err != nil { + panic(err) + } - nv, err := convert.FromTyped(r, r.value) - if err != nil { - panic(err) + r.value = nv } +} - r.value = nv - r.needsToTyped = false +func (r *Root) MarkMutatorExit() { + r.depth-- + + // If we are exiting a mutator at depth 0, we need to convert + // the typed configuration to a dynamic configuration tree. + if r.depth == 0 { + nv, err := convert.FromTyped(r, config.NilValue) + if err != nil { + panic(err) + } + + r.value = nv + } } func (r *Root) Diagnostics() diag.Diagnostics { @@ -184,38 +214,45 @@ func (r *Root) InitializeVariables(vars []string) error { } func (r *Root) Merge(other *Root) error { - panic("nope") + // // Merge diagnostics. + // r.diags = append(r.diags, other.diags...) - // Merge dynamic configuration values. - // v, err := merge.Merge(r.value, other.value) + // err := r.Sync.Merge(r, other) // if err != nil { // return err // } - // r.value = v + // other.Sync = Sync{} - // Merge diagnostics. - r.diags = append(r.diags, other.diags...) + // // TODO: when hooking into merge semantics, disallow setting path on the target instance. + // other.Path = "" - err := r.Sync.Merge(r, other) + // Check for safe merge, protecting against duplicate resource identifiers + err := r.Resources.VerifySafeMerge(&other.Resources) if err != nil { return err } - other.Sync = Sync{} - - // TODO: when hooking into merge semantics, disallow setting path on the target instance. - other.Path = "" - // Check for safe merge, protecting against duplicate resource identifiers - err = r.Resources.VerifySafeMerge(&other.Resources) + // Merge dynamic configuration values. + nv, err := merge.Merge(r.value, other.value) if err != nil { return err } + r.value = nv + + // Convert normalized configuration tree to typed configuration. + err = convert.ToTyped(r, r.value) + if err != nil { + panic(err) + } + // TODO: define and test semantics for merging. - return mergo.Merge(r, other, mergo.WithOverride) + // return mergo.Merge(r, other, mergo.WithOverride) + return nil } func (r *Root) MergeTargetOverrides(name string) error { + var tmp config.Value var err error target := r.value.Get("targets").Get(name) @@ -223,32 +260,41 @@ func (r *Root) MergeTargetOverrides(name string) error { return nil } - // if target.Bundle != nil { - // err = mergo.Merge(&r.Bundle, target.Bundle, mergo.WithOverride) - // if err != nil { - // return err - // } - // } + mergeField := func(name string) error { + tmp, err = merge.Merge(r.value.Get(name), target.Get(name)) + if err != nil { + return err + } - // if target.Workspace != nil { - // err = mergo.Merge(&r.Workspace, target.Workspace, mergo.WithOverride) - // if err != nil { - // return err - // } - // } + r.value.MustMap()[name] = tmp + return nil + } - // if target.Artifacts != nil { - // err = mergo.Merge(&r.Artifacts, target.Artifacts, mergo.WithOverride, mergo.WithAppendSlice) - // if err != nil { - // return err - // } - // } + if err = mergeField("bundle"); err != nil { + return err + } - // if target.Resources != nil { - // err = mergo.Merge(&r.Resources, target.Resources, mergo.WithOverride, mergo.WithAppendSlice) - // if err != nil { - // return err - // } + if err = mergeField("workspace"); err != nil { + return err + } + + if err = mergeField("artifacts"); err != nil { + return err + } + + if err = mergeField("resources"); err != nil { + return err + } + + if err = mergeField("sync"); err != nil { + return err + } + + // Convert normalized configuration tree to typed configuration. + err = convert.ToTyped(r, r.value) + if err != nil { + panic(err) + } // err = r.Resources.Merge() // if err != nil { diff --git a/bundle/config/root_test.go b/bundle/config/root_test.go index 3f37da07a5..7b86c32676 100644 --- a/bundle/config/root_test.go +++ b/bundle/config/root_test.go @@ -154,8 +154,12 @@ func TestInitializeVariablesUndefinedVariables(t *testing.T) { func TestRootMergeTargetOverridesWithMode(t *testing.T) { root := &Root{ Bundle: Bundle{}, + Targets: map[string]*Target{ + "development": { + Mode: Development, + }, + }, } - env := &Target{Mode: Development} - require.NoError(t, root.MergeTargetOverrides(env)) + require.NoError(t, root.MergeTargetOverrides("development")) assert.Equal(t, Development, root.Bundle.Mode) } diff --git a/bundle/mutator.go b/bundle/mutator.go index 734df0caba..73d11d4934 100644 --- a/bundle/mutator.go +++ b/bundle/mutator.go @@ -21,7 +21,8 @@ func Apply(ctx context.Context, b *Bundle, m Mutator) error { log.Debugf(ctx, "Apply") - b.Config.MarkBoundary() + b.Config.MarkMutatorEntry() + defer b.Config.MarkMutatorExit() err := m.Apply(ctx, b) diff --git a/libs/config/value.go b/libs/config/value.go index c77f8147dc..fe0ced9bdf 100644 --- a/libs/config/value.go +++ b/libs/config/value.go @@ -51,6 +51,10 @@ func (v Value) Location() Location { return v.l } +func (v Value) IsValid() bool { + return v.k != KindInvalid +} + func (v Value) AsAny() any { switch v.k { case KindInvalid: From bacac76fdfaf919159520ac7a0094fe9673ab188 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Wed, 15 Nov 2023 11:15:57 +0100 Subject: [PATCH 008/104] Rename variable bundle -> b --- bundle/bundle.go | 10 +- bundle/config/mutator/default_target_test.go | 12 +- .../mutator/default_workspace_paths_test.go | 20 +-- .../mutator/default_workspace_root_test.go | 6 +- .../mutator/expand_workspace_root_test.go | 20 +-- .../config/mutator/override_compute_test.go | 40 ++--- bundle/config/mutator/process_include_test.go | 10 +- .../mutator/process_root_includes_test.go | 54 +++---- .../mutator/process_target_mode_test.go | 138 +++++++++--------- .../mutator/select_default_target_test.go | 28 ++-- bundle/config/mutator/select_target_test.go | 10 +- bundle/config/mutator/set_variables_test.go | 10 +- bundle/config/mutator/translate_paths_test.go | 86 +++++------ .../mutator/validate_git_details_test.go | 12 +- bundle/context.go | 6 +- bundle/deferred_test.go | 20 +-- bundle/deploy/terraform/init_test.go | 6 +- bundle/mutator_test.go | 4 +- bundle/python/transform_test.go | 4 +- bundle/seq_test.go | 20 +-- 20 files changed, 258 insertions(+), 258 deletions(-) diff --git a/bundle/bundle.go b/bundle/bundle.go index a2d774bbd0..b4f5ee10ec 100644 --- a/bundle/bundle.go +++ b/bundle/bundle.go @@ -63,7 +63,7 @@ type Bundle struct { } func Load(ctx context.Context, path string) (*Bundle, error) { - bundle := &Bundle{} + b := &Bundle{} stat, err := os.Stat(path) if err != nil { return nil, err @@ -74,13 +74,13 @@ func Load(ctx context.Context, path string) (*Bundle, error) { _, hasIncludesEnv := env.Includes(ctx) if hasRootEnv && hasIncludesEnv && stat.IsDir() { log.Debugf(ctx, "No bundle configuration; using bundle root: %s", path) - bundle.Config = config.Root{ + b.Config = config.Root{ Path: path, Bundle: config.Bundle{ Name: filepath.Base(path), }, } - return bundle, nil + return b, nil } return nil, err } @@ -89,8 +89,8 @@ func Load(ctx context.Context, path string) (*Bundle, error) { if err != nil { return nil, err } - bundle.Config = *root - return bundle, nil + b.Config = *root + return b, nil } // MustLoad returns a bundle configuration. diff --git a/bundle/config/mutator/default_target_test.go b/bundle/config/mutator/default_target_test.go index 49fbe6de2c..9214c4eff7 100644 --- a/bundle/config/mutator/default_target_test.go +++ b/bundle/config/mutator/default_target_test.go @@ -12,24 +12,24 @@ import ( ) func TestDefaultTarget(t *testing.T) { - bundle := &bundle.Bundle{} - err := mutator.DefineDefaultTarget().Apply(context.Background(), bundle) + b := &bundle.Bundle{} + err := mutator.DefineDefaultTarget().Apply(context.Background(), b) require.NoError(t, err) - env, ok := bundle.Config.Targets["default"] + env, ok := b.Config.Targets["default"] assert.True(t, ok) assert.Equal(t, &config.Target{}, env) } func TestDefaultTargetAlreadySpecified(t *testing.T) { - bundle := &bundle.Bundle{ + b := &bundle.Bundle{ Config: config.Root{ Targets: map[string]*config.Target{ "development": {}, }, }, } - err := mutator.DefineDefaultTarget().Apply(context.Background(), bundle) + err := mutator.DefineDefaultTarget().Apply(context.Background(), b) require.NoError(t, err) - _, ok := bundle.Config.Targets["default"] + _, ok := b.Config.Targets["default"] assert.False(t, ok) } diff --git a/bundle/config/mutator/default_workspace_paths_test.go b/bundle/config/mutator/default_workspace_paths_test.go index 308f82c4d5..0f99555817 100644 --- a/bundle/config/mutator/default_workspace_paths_test.go +++ b/bundle/config/mutator/default_workspace_paths_test.go @@ -12,22 +12,22 @@ import ( ) func TestDefineDefaultWorkspacePaths(t *testing.T) { - bundle := &bundle.Bundle{ + b := &bundle.Bundle{ Config: config.Root{ Workspace: config.Workspace{ RootPath: "/", }, }, } - err := mutator.DefineDefaultWorkspacePaths().Apply(context.Background(), bundle) + err := mutator.DefineDefaultWorkspacePaths().Apply(context.Background(), b) require.NoError(t, err) - assert.Equal(t, "/files", bundle.Config.Workspace.FilesPath) - assert.Equal(t, "/artifacts", bundle.Config.Workspace.ArtifactsPath) - assert.Equal(t, "/state", bundle.Config.Workspace.StatePath) + assert.Equal(t, "/files", b.Config.Workspace.FilesPath) + assert.Equal(t, "/artifacts", b.Config.Workspace.ArtifactsPath) + assert.Equal(t, "/state", b.Config.Workspace.StatePath) } func TestDefineDefaultWorkspacePathsAlreadySet(t *testing.T) { - bundle := &bundle.Bundle{ + b := &bundle.Bundle{ Config: config.Root{ Workspace: config.Workspace{ RootPath: "/", @@ -37,9 +37,9 @@ func TestDefineDefaultWorkspacePathsAlreadySet(t *testing.T) { }, }, } - err := mutator.DefineDefaultWorkspacePaths().Apply(context.Background(), bundle) + err := mutator.DefineDefaultWorkspacePaths().Apply(context.Background(), b) require.NoError(t, err) - assert.Equal(t, "/foo/bar", bundle.Config.Workspace.FilesPath) - assert.Equal(t, "/foo/bar", bundle.Config.Workspace.ArtifactsPath) - assert.Equal(t, "/foo/bar", bundle.Config.Workspace.StatePath) + assert.Equal(t, "/foo/bar", b.Config.Workspace.FilesPath) + assert.Equal(t, "/foo/bar", b.Config.Workspace.ArtifactsPath) + assert.Equal(t, "/foo/bar", b.Config.Workspace.StatePath) } diff --git a/bundle/config/mutator/default_workspace_root_test.go b/bundle/config/mutator/default_workspace_root_test.go index 1822dca0f1..ad921f6fdf 100644 --- a/bundle/config/mutator/default_workspace_root_test.go +++ b/bundle/config/mutator/default_workspace_root_test.go @@ -12,7 +12,7 @@ import ( ) func TestDefaultWorkspaceRoot(t *testing.T) { - bundle := &bundle.Bundle{ + b := &bundle.Bundle{ Config: config.Root{ Bundle: config.Bundle{ Name: "name", @@ -20,7 +20,7 @@ func TestDefaultWorkspaceRoot(t *testing.T) { }, }, } - err := mutator.DefineDefaultWorkspaceRoot().Apply(context.Background(), bundle) + err := mutator.DefineDefaultWorkspaceRoot().Apply(context.Background(), b) require.NoError(t, err) - assert.Equal(t, "~/.bundle/name/environment", bundle.Config.Workspace.RootPath) + assert.Equal(t, "~/.bundle/name/environment", b.Config.Workspace.RootPath) } diff --git a/bundle/config/mutator/expand_workspace_root_test.go b/bundle/config/mutator/expand_workspace_root_test.go index 0ec11a07db..217c07c512 100644 --- a/bundle/config/mutator/expand_workspace_root_test.go +++ b/bundle/config/mutator/expand_workspace_root_test.go @@ -13,7 +13,7 @@ import ( ) func TestExpandWorkspaceRoot(t *testing.T) { - bundle := &bundle.Bundle{ + b := &bundle.Bundle{ Config: config.Root{ Workspace: config.Workspace{ CurrentUser: &config.User{ @@ -25,13 +25,13 @@ func TestExpandWorkspaceRoot(t *testing.T) { }, }, } - err := mutator.ExpandWorkspaceRoot().Apply(context.Background(), bundle) + err := mutator.ExpandWorkspaceRoot().Apply(context.Background(), b) require.NoError(t, err) - assert.Equal(t, "/Users/jane@doe.com/foo", bundle.Config.Workspace.RootPath) + assert.Equal(t, "/Users/jane@doe.com/foo", b.Config.Workspace.RootPath) } func TestExpandWorkspaceRootDoesNothing(t *testing.T) { - bundle := &bundle.Bundle{ + b := &bundle.Bundle{ Config: config.Root{ Workspace: config.Workspace{ CurrentUser: &config.User{ @@ -43,13 +43,13 @@ func TestExpandWorkspaceRootDoesNothing(t *testing.T) { }, }, } - err := mutator.ExpandWorkspaceRoot().Apply(context.Background(), bundle) + err := mutator.ExpandWorkspaceRoot().Apply(context.Background(), b) require.NoError(t, err) - assert.Equal(t, "/Users/charly@doe.com/foo", bundle.Config.Workspace.RootPath) + assert.Equal(t, "/Users/charly@doe.com/foo", b.Config.Workspace.RootPath) } func TestExpandWorkspaceRootWithoutRoot(t *testing.T) { - bundle := &bundle.Bundle{ + b := &bundle.Bundle{ Config: config.Root{ Workspace: config.Workspace{ CurrentUser: &config.User{ @@ -60,18 +60,18 @@ func TestExpandWorkspaceRootWithoutRoot(t *testing.T) { }, }, } - err := mutator.ExpandWorkspaceRoot().Apply(context.Background(), bundle) + err := mutator.ExpandWorkspaceRoot().Apply(context.Background(), b) require.Error(t, err) } func TestExpandWorkspaceRootWithoutCurrentUser(t *testing.T) { - bundle := &bundle.Bundle{ + b := &bundle.Bundle{ Config: config.Root{ Workspace: config.Workspace{ RootPath: "~/foo", }, }, } - err := mutator.ExpandWorkspaceRoot().Apply(context.Background(), bundle) + err := mutator.ExpandWorkspaceRoot().Apply(context.Background(), b) require.Error(t, err) } diff --git a/bundle/config/mutator/override_compute_test.go b/bundle/config/mutator/override_compute_test.go index cb37eeb5f4..70d7f238da 100644 --- a/bundle/config/mutator/override_compute_test.go +++ b/bundle/config/mutator/override_compute_test.go @@ -16,7 +16,7 @@ import ( func TestOverrideDevelopment(t *testing.T) { t.Setenv("DATABRICKS_CLUSTER_ID", "") - bundle := &bundle.Bundle{ + b := &bundle.Bundle{ Config: config.Root{ Bundle: config.Bundle{ Mode: config.Development, @@ -47,22 +47,22 @@ func TestOverrideDevelopment(t *testing.T) { } m := mutator.OverrideCompute() - err := m.Apply(context.Background(), bundle) + err := m.Apply(context.Background(), b) require.NoError(t, err) - assert.Nil(t, bundle.Config.Resources.Jobs["job1"].Tasks[0].NewCluster) - assert.Equal(t, "newClusterID", bundle.Config.Resources.Jobs["job1"].Tasks[0].ExistingClusterId) - assert.Equal(t, "newClusterID", bundle.Config.Resources.Jobs["job1"].Tasks[1].ExistingClusterId) - assert.Equal(t, "newClusterID", bundle.Config.Resources.Jobs["job1"].Tasks[2].ExistingClusterId) - assert.Equal(t, "newClusterID", bundle.Config.Resources.Jobs["job1"].Tasks[3].ExistingClusterId) + assert.Nil(t, b.Config.Resources.Jobs["job1"].Tasks[0].NewCluster) + assert.Equal(t, "newClusterID", b.Config.Resources.Jobs["job1"].Tasks[0].ExistingClusterId) + assert.Equal(t, "newClusterID", b.Config.Resources.Jobs["job1"].Tasks[1].ExistingClusterId) + assert.Equal(t, "newClusterID", b.Config.Resources.Jobs["job1"].Tasks[2].ExistingClusterId) + assert.Equal(t, "newClusterID", b.Config.Resources.Jobs["job1"].Tasks[3].ExistingClusterId) - assert.Nil(t, bundle.Config.Resources.Jobs["job1"].Tasks[0].NewCluster) - assert.Empty(t, bundle.Config.Resources.Jobs["job1"].Tasks[2].ComputeKey) - assert.Empty(t, bundle.Config.Resources.Jobs["job1"].Tasks[3].JobClusterKey) + assert.Nil(t, b.Config.Resources.Jobs["job1"].Tasks[0].NewCluster) + assert.Empty(t, b.Config.Resources.Jobs["job1"].Tasks[2].ComputeKey) + assert.Empty(t, b.Config.Resources.Jobs["job1"].Tasks[3].JobClusterKey) } func TestOverrideDevelopmentEnv(t *testing.T) { t.Setenv("DATABRICKS_CLUSTER_ID", "newClusterId") - bundle := &bundle.Bundle{ + b := &bundle.Bundle{ Config: config.Root{ Resources: config.Resources{ Jobs: map[string]*resources.Job{ @@ -83,14 +83,14 @@ func TestOverrideDevelopmentEnv(t *testing.T) { } m := mutator.OverrideCompute() - err := m.Apply(context.Background(), bundle) + err := m.Apply(context.Background(), b) require.NoError(t, err) - assert.Equal(t, "cluster2", bundle.Config.Resources.Jobs["job1"].Tasks[1].ExistingClusterId) + assert.Equal(t, "cluster2", b.Config.Resources.Jobs["job1"].Tasks[1].ExistingClusterId) } func TestOverridePipelineTask(t *testing.T) { t.Setenv("DATABRICKS_CLUSTER_ID", "newClusterId") - bundle := &bundle.Bundle{ + b := &bundle.Bundle{ Config: config.Root{ Resources: config.Resources{ Jobs: map[string]*resources.Job{ @@ -108,13 +108,13 @@ func TestOverridePipelineTask(t *testing.T) { } m := mutator.OverrideCompute() - err := m.Apply(context.Background(), bundle) + err := m.Apply(context.Background(), b) require.NoError(t, err) - assert.Empty(t, bundle.Config.Resources.Jobs["job1"].Tasks[0].ExistingClusterId) + assert.Empty(t, b.Config.Resources.Jobs["job1"].Tasks[0].ExistingClusterId) } func TestOverrideProduction(t *testing.T) { - bundle := &bundle.Bundle{ + b := &bundle.Bundle{ Config: config.Root{ Bundle: config.Bundle{ ComputeID: "newClusterID", @@ -138,13 +138,13 @@ func TestOverrideProduction(t *testing.T) { } m := mutator.OverrideCompute() - err := m.Apply(context.Background(), bundle) + err := m.Apply(context.Background(), b) require.Error(t, err) } func TestOverrideProductionEnv(t *testing.T) { t.Setenv("DATABRICKS_CLUSTER_ID", "newClusterId") - bundle := &bundle.Bundle{ + b := &bundle.Bundle{ Config: config.Root{ Resources: config.Resources{ Jobs: map[string]*resources.Job{ @@ -165,6 +165,6 @@ func TestOverrideProductionEnv(t *testing.T) { } m := mutator.OverrideCompute() - err := m.Apply(context.Background(), bundle) + err := m.Apply(context.Background(), b) require.NoError(t, err) } diff --git a/bundle/config/mutator/process_include_test.go b/bundle/config/mutator/process_include_test.go index e5e27f9e75..eb1cb29151 100644 --- a/bundle/config/mutator/process_include_test.go +++ b/bundle/config/mutator/process_include_test.go @@ -15,7 +15,7 @@ import ( ) func TestProcessInclude(t *testing.T) { - bundle := &bundle.Bundle{ + b := &bundle.Bundle{ Config: config.Root{ Path: t.TempDir(), Workspace: config.Workspace{ @@ -25,14 +25,14 @@ func TestProcessInclude(t *testing.T) { } relPath := "./file.yml" - fullPath := filepath.Join(bundle.Config.Path, relPath) + fullPath := filepath.Join(b.Config.Path, relPath) f, err := os.Create(fullPath) require.NoError(t, err) fmt.Fprint(f, "workspace:\n host: bar\n") f.Close() - assert.Equal(t, "foo", bundle.Config.Workspace.Host) - err = mutator.ProcessInclude(fullPath, relPath).Apply(context.Background(), bundle) + assert.Equal(t, "foo", b.Config.Workspace.Host) + err = mutator.ProcessInclude(fullPath, relPath).Apply(context.Background(), b) require.NoError(t, err) - assert.Equal(t, "bar", bundle.Config.Workspace.Host) + assert.Equal(t, "bar", b.Config.Workspace.Host) } diff --git a/bundle/config/mutator/process_root_includes_test.go b/bundle/config/mutator/process_root_includes_test.go index aec9b32dff..7a0b9e65a3 100644 --- a/bundle/config/mutator/process_root_includes_test.go +++ b/bundle/config/mutator/process_root_includes_test.go @@ -24,12 +24,12 @@ func touch(t *testing.T, path, file string) { } func TestProcessRootIncludesEmpty(t *testing.T) { - bundle := &bundle.Bundle{ + b := &bundle.Bundle{ Config: config.Root{ Path: ".", }, } - err := mutator.ProcessRootIncludes().Apply(context.Background(), bundle) + err := mutator.ProcessRootIncludes().Apply(context.Background(), b) require.NoError(t, err) } @@ -41,7 +41,7 @@ func TestProcessRootIncludesAbs(t *testing.T) { t.Skip("skipping temperorilty to make windows unit tests green") } - bundle := &bundle.Bundle{ + b := &bundle.Bundle{ Config: config.Root{ Path: ".", Include: []string{ @@ -49,13 +49,13 @@ func TestProcessRootIncludesAbs(t *testing.T) { }, }, } - err := mutator.ProcessRootIncludes().Apply(context.Background(), bundle) + err := mutator.ProcessRootIncludes().Apply(context.Background(), b) require.Error(t, err) assert.Contains(t, err.Error(), "must be relative paths") } func TestProcessRootIncludesSingleGlob(t *testing.T) { - bundle := &bundle.Bundle{ + b := &bundle.Bundle{ Config: config.Root{ Path: t.TempDir(), Include: []string{ @@ -64,18 +64,18 @@ func TestProcessRootIncludesSingleGlob(t *testing.T) { }, } - touch(t, bundle.Config.Path, "databricks.yml") - touch(t, bundle.Config.Path, "a.yml") - touch(t, bundle.Config.Path, "b.yml") + touch(t, b.Config.Path, "databricks.yml") + touch(t, b.Config.Path, "a.yml") + touch(t, b.Config.Path, "b.yml") - err := mutator.ProcessRootIncludes().Apply(context.Background(), bundle) + err := mutator.ProcessRootIncludes().Apply(context.Background(), b) require.NoError(t, err) - assert.Equal(t, []string{"a.yml", "b.yml"}, bundle.Config.Include) + assert.Equal(t, []string{"a.yml", "b.yml"}, b.Config.Include) } func TestProcessRootIncludesMultiGlob(t *testing.T) { - bundle := &bundle.Bundle{ + b := &bundle.Bundle{ Config: config.Root{ Path: t.TempDir(), Include: []string{ @@ -85,17 +85,17 @@ func TestProcessRootIncludesMultiGlob(t *testing.T) { }, } - touch(t, bundle.Config.Path, "a1.yml") - touch(t, bundle.Config.Path, "b1.yml") + touch(t, b.Config.Path, "a1.yml") + touch(t, b.Config.Path, "b1.yml") - err := mutator.ProcessRootIncludes().Apply(context.Background(), bundle) + err := mutator.ProcessRootIncludes().Apply(context.Background(), b) require.NoError(t, err) - assert.Equal(t, []string{"a1.yml", "b1.yml"}, bundle.Config.Include) + assert.Equal(t, []string{"a1.yml", "b1.yml"}, b.Config.Include) } func TestProcessRootIncludesRemoveDups(t *testing.T) { - bundle := &bundle.Bundle{ + b := &bundle.Bundle{ Config: config.Root{ Path: t.TempDir(), Include: []string{ @@ -105,15 +105,15 @@ func TestProcessRootIncludesRemoveDups(t *testing.T) { }, } - touch(t, bundle.Config.Path, "a.yml") + touch(t, b.Config.Path, "a.yml") - err := mutator.ProcessRootIncludes().Apply(context.Background(), bundle) + err := mutator.ProcessRootIncludes().Apply(context.Background(), b) require.NoError(t, err) - assert.Equal(t, []string{"a.yml"}, bundle.Config.Include) + assert.Equal(t, []string{"a.yml"}, b.Config.Include) } func TestProcessRootIncludesNotExists(t *testing.T) { - bundle := &bundle.Bundle{ + b := &bundle.Bundle{ Config: config.Root{ Path: t.TempDir(), Include: []string{ @@ -121,7 +121,7 @@ func TestProcessRootIncludesNotExists(t *testing.T) { }, }, } - err := mutator.ProcessRootIncludes().Apply(context.Background(), bundle) + err := mutator.ProcessRootIncludes().Apply(context.Background(), b) require.Error(t, err) assert.Contains(t, err.Error(), "notexist.yml defined in 'include' section does not match any files") } @@ -132,15 +132,15 @@ func TestProcessRootIncludesExtrasFromEnvVar(t *testing.T) { touch(t, rootPath, testYamlName) t.Setenv(env.IncludesVariable, path.Join(rootPath, testYamlName)) - bundle := &bundle.Bundle{ + b := &bundle.Bundle{ Config: config.Root{ Path: rootPath, }, } - err := mutator.ProcessRootIncludes().Apply(context.Background(), bundle) + err := mutator.ProcessRootIncludes().Apply(context.Background(), b) require.NoError(t, err) - assert.Contains(t, bundle.Config.Include, testYamlName) + assert.Contains(t, b.Config.Include, testYamlName) } func TestProcessRootIncludesDedupExtrasFromEnvVar(t *testing.T) { @@ -155,13 +155,13 @@ func TestProcessRootIncludesDedupExtrasFromEnvVar(t *testing.T) { string(os.PathListSeparator), )) - bundle := &bundle.Bundle{ + b := &bundle.Bundle{ Config: config.Root{ Path: rootPath, }, } - err := mutator.ProcessRootIncludes().Apply(context.Background(), bundle) + err := mutator.ProcessRootIncludes().Apply(context.Background(), b) require.NoError(t, err) - assert.Equal(t, []string{testYamlName}, bundle.Config.Include) + assert.Equal(t, []string{testYamlName}, b.Config.Include) } diff --git a/bundle/config/mutator/process_target_mode_test.go b/bundle/config/mutator/process_target_mode_test.go index f7e78da2d4..120c882b40 100644 --- a/bundle/config/mutator/process_target_mode_test.go +++ b/bundle/config/mutator/process_target_mode_test.go @@ -89,111 +89,111 @@ func mockBundle(mode config.Mode) *bundle.Bundle { } func TestProcessTargetModeDevelopment(t *testing.T) { - bundle := mockBundle(config.Development) + b := mockBundle(config.Development) m := ProcessTargetMode() - err := m.Apply(context.Background(), bundle) + err := m.Apply(context.Background(), b) require.NoError(t, err) // Job 1 - assert.Equal(t, "[dev lennart] job1", bundle.Config.Resources.Jobs["job1"].Name) - assert.Equal(t, bundle.Config.Resources.Jobs["job1"].Tags["dev"], "lennart") - assert.Equal(t, bundle.Config.Resources.Jobs["job1"].Schedule.PauseStatus, jobs.PauseStatusPaused) + assert.Equal(t, "[dev lennart] job1", b.Config.Resources.Jobs["job1"].Name) + assert.Equal(t, b.Config.Resources.Jobs["job1"].Tags["dev"], "lennart") + assert.Equal(t, b.Config.Resources.Jobs["job1"].Schedule.PauseStatus, jobs.PauseStatusPaused) // Job 2 - assert.Equal(t, "[dev lennart] job2", bundle.Config.Resources.Jobs["job2"].Name) - assert.Equal(t, bundle.Config.Resources.Jobs["job2"].Tags["dev"], "lennart") - assert.Equal(t, bundle.Config.Resources.Jobs["job2"].Schedule.PauseStatus, jobs.PauseStatusUnpaused) + assert.Equal(t, "[dev lennart] job2", b.Config.Resources.Jobs["job2"].Name) + assert.Equal(t, b.Config.Resources.Jobs["job2"].Tags["dev"], "lennart") + assert.Equal(t, b.Config.Resources.Jobs["job2"].Schedule.PauseStatus, jobs.PauseStatusUnpaused) // Pipeline 1 - assert.Equal(t, "[dev lennart] pipeline1", bundle.Config.Resources.Pipelines["pipeline1"].Name) - assert.True(t, bundle.Config.Resources.Pipelines["pipeline1"].PipelineSpec.Development) + assert.Equal(t, "[dev lennart] pipeline1", b.Config.Resources.Pipelines["pipeline1"].Name) + assert.True(t, b.Config.Resources.Pipelines["pipeline1"].PipelineSpec.Development) // Experiment 1 - assert.Equal(t, "/Users/lennart.kats@databricks.com/[dev lennart] experiment1", bundle.Config.Resources.Experiments["experiment1"].Name) - assert.Contains(t, bundle.Config.Resources.Experiments["experiment1"].Experiment.Tags, ml.ExperimentTag{Key: "dev", Value: "lennart"}) - assert.Equal(t, "dev", bundle.Config.Resources.Experiments["experiment1"].Experiment.Tags[0].Key) + assert.Equal(t, "/Users/lennart.kats@databricks.com/[dev lennart] experiment1", b.Config.Resources.Experiments["experiment1"].Name) + assert.Contains(t, b.Config.Resources.Experiments["experiment1"].Experiment.Tags, ml.ExperimentTag{Key: "dev", Value: "lennart"}) + assert.Equal(t, "dev", b.Config.Resources.Experiments["experiment1"].Experiment.Tags[0].Key) // Experiment 2 - assert.Equal(t, "[dev lennart] experiment2", bundle.Config.Resources.Experiments["experiment2"].Name) - assert.Contains(t, bundle.Config.Resources.Experiments["experiment2"].Experiment.Tags, ml.ExperimentTag{Key: "dev", Value: "lennart"}) + assert.Equal(t, "[dev lennart] experiment2", b.Config.Resources.Experiments["experiment2"].Name) + assert.Contains(t, b.Config.Resources.Experiments["experiment2"].Experiment.Tags, ml.ExperimentTag{Key: "dev", Value: "lennart"}) // Model 1 - assert.Equal(t, "[dev lennart] model1", bundle.Config.Resources.Models["model1"].Name) + assert.Equal(t, "[dev lennart] model1", b.Config.Resources.Models["model1"].Name) // Model serving endpoint 1 - assert.Equal(t, "dev_lennart_servingendpoint1", bundle.Config.Resources.ModelServingEndpoints["servingendpoint1"].Name) + assert.Equal(t, "dev_lennart_servingendpoint1", b.Config.Resources.ModelServingEndpoints["servingendpoint1"].Name) // Registered model 1 - assert.Equal(t, "dev_lennart_registeredmodel1", bundle.Config.Resources.RegisteredModels["registeredmodel1"].Name) + assert.Equal(t, "dev_lennart_registeredmodel1", b.Config.Resources.RegisteredModels["registeredmodel1"].Name) } func TestProcessTargetModeDevelopmentTagNormalizationForAws(t *testing.T) { - bundle := mockBundle(config.Development) - bundle.Tagging = tags.ForCloud(&sdkconfig.Config{ + b := mockBundle(config.Development) + b.Tagging = tags.ForCloud(&sdkconfig.Config{ Host: "https://dbc-XXXXXXXX-YYYY.cloud.databricks.com/", }) - bundle.Config.Workspace.CurrentUser.ShortName = "Héllö wörld?!" - err := ProcessTargetMode().Apply(context.Background(), bundle) + b.Config.Workspace.CurrentUser.ShortName = "Héllö wörld?!" + err := ProcessTargetMode().Apply(context.Background(), b) require.NoError(t, err) // Assert that tag normalization took place. - assert.Equal(t, "Hello world__", bundle.Config.Resources.Jobs["job1"].Tags["dev"]) + assert.Equal(t, "Hello world__", b.Config.Resources.Jobs["job1"].Tags["dev"]) } func TestProcessTargetModeDevelopmentTagNormalizationForAzure(t *testing.T) { - bundle := mockBundle(config.Development) - bundle.Tagging = tags.ForCloud(&sdkconfig.Config{ + b := mockBundle(config.Development) + b.Tagging = tags.ForCloud(&sdkconfig.Config{ Host: "https://adb-xxx.y.azuredatabricks.net/", }) - bundle.Config.Workspace.CurrentUser.ShortName = "Héllö wörld?!" - err := ProcessTargetMode().Apply(context.Background(), bundle) + b.Config.Workspace.CurrentUser.ShortName = "Héllö wörld?!" + err := ProcessTargetMode().Apply(context.Background(), b) require.NoError(t, err) // Assert that tag normalization took place (Azure allows more characters than AWS). - assert.Equal(t, "Héllö wörld?!", bundle.Config.Resources.Jobs["job1"].Tags["dev"]) + assert.Equal(t, "Héllö wörld?!", b.Config.Resources.Jobs["job1"].Tags["dev"]) } func TestProcessTargetModeDevelopmentTagNormalizationForGcp(t *testing.T) { - bundle := mockBundle(config.Development) - bundle.Tagging = tags.ForCloud(&sdkconfig.Config{ + b := mockBundle(config.Development) + b.Tagging = tags.ForCloud(&sdkconfig.Config{ Host: "https://123.4.gcp.databricks.com/", }) - bundle.Config.Workspace.CurrentUser.ShortName = "Héllö wörld?!" - err := ProcessTargetMode().Apply(context.Background(), bundle) + b.Config.Workspace.CurrentUser.ShortName = "Héllö wörld?!" + err := ProcessTargetMode().Apply(context.Background(), b) require.NoError(t, err) // Assert that tag normalization took place. - assert.Equal(t, "Hello_world", bundle.Config.Resources.Jobs["job1"].Tags["dev"]) + assert.Equal(t, "Hello_world", b.Config.Resources.Jobs["job1"].Tags["dev"]) } func TestProcessTargetModeDefault(t *testing.T) { - bundle := mockBundle("") + b := mockBundle("") m := ProcessTargetMode() - err := m.Apply(context.Background(), bundle) + err := m.Apply(context.Background(), b) require.NoError(t, err) - assert.Equal(t, "job1", bundle.Config.Resources.Jobs["job1"].Name) - assert.Equal(t, "pipeline1", bundle.Config.Resources.Pipelines["pipeline1"].Name) - assert.False(t, bundle.Config.Resources.Pipelines["pipeline1"].PipelineSpec.Development) - assert.Equal(t, "servingendpoint1", bundle.Config.Resources.ModelServingEndpoints["servingendpoint1"].Name) - assert.Equal(t, "registeredmodel1", bundle.Config.Resources.RegisteredModels["registeredmodel1"].Name) + assert.Equal(t, "job1", b.Config.Resources.Jobs["job1"].Name) + assert.Equal(t, "pipeline1", b.Config.Resources.Pipelines["pipeline1"].Name) + assert.False(t, b.Config.Resources.Pipelines["pipeline1"].PipelineSpec.Development) + assert.Equal(t, "servingendpoint1", b.Config.Resources.ModelServingEndpoints["servingendpoint1"].Name) + assert.Equal(t, "registeredmodel1", b.Config.Resources.RegisteredModels["registeredmodel1"].Name) } func TestProcessTargetModeProduction(t *testing.T) { - bundle := mockBundle(config.Production) + b := mockBundle(config.Production) - err := validateProductionMode(context.Background(), bundle, false) + err := validateProductionMode(context.Background(), b, false) require.ErrorContains(t, err, "state_path") - bundle.Config.Workspace.StatePath = "/Shared/.bundle/x/y/state" - bundle.Config.Workspace.ArtifactsPath = "/Shared/.bundle/x/y/artifacts" - bundle.Config.Workspace.FilesPath = "/Shared/.bundle/x/y/files" + b.Config.Workspace.StatePath = "/Shared/.bundle/x/y/state" + b.Config.Workspace.ArtifactsPath = "/Shared/.bundle/x/y/artifacts" + b.Config.Workspace.FilesPath = "/Shared/.bundle/x/y/files" - err = validateProductionMode(context.Background(), bundle, false) + err = validateProductionMode(context.Background(), b, false) require.ErrorContains(t, err, "production") permissions := []resources.Permission{ @@ -202,41 +202,41 @@ func TestProcessTargetModeProduction(t *testing.T) { UserName: "user@company.com", }, } - bundle.Config.Resources.Jobs["job1"].Permissions = permissions - bundle.Config.Resources.Jobs["job1"].RunAs = &jobs.JobRunAs{UserName: "user@company.com"} - bundle.Config.Resources.Jobs["job2"].RunAs = &jobs.JobRunAs{UserName: "user@company.com"} - bundle.Config.Resources.Pipelines["pipeline1"].Permissions = permissions - bundle.Config.Resources.Experiments["experiment1"].Permissions = permissions - bundle.Config.Resources.Experiments["experiment2"].Permissions = permissions - bundle.Config.Resources.Models["model1"].Permissions = permissions - bundle.Config.Resources.ModelServingEndpoints["servingendpoint1"].Permissions = permissions - - err = validateProductionMode(context.Background(), bundle, false) + b.Config.Resources.Jobs["job1"].Permissions = permissions + b.Config.Resources.Jobs["job1"].RunAs = &jobs.JobRunAs{UserName: "user@company.com"} + b.Config.Resources.Jobs["job2"].RunAs = &jobs.JobRunAs{UserName: "user@company.com"} + b.Config.Resources.Pipelines["pipeline1"].Permissions = permissions + b.Config.Resources.Experiments["experiment1"].Permissions = permissions + b.Config.Resources.Experiments["experiment2"].Permissions = permissions + b.Config.Resources.Models["model1"].Permissions = permissions + b.Config.Resources.ModelServingEndpoints["servingendpoint1"].Permissions = permissions + + err = validateProductionMode(context.Background(), b, false) require.NoError(t, err) - assert.Equal(t, "job1", bundle.Config.Resources.Jobs["job1"].Name) - assert.Equal(t, "pipeline1", bundle.Config.Resources.Pipelines["pipeline1"].Name) - assert.False(t, bundle.Config.Resources.Pipelines["pipeline1"].PipelineSpec.Development) - assert.Equal(t, "servingendpoint1", bundle.Config.Resources.ModelServingEndpoints["servingendpoint1"].Name) - assert.Equal(t, "registeredmodel1", bundle.Config.Resources.RegisteredModels["registeredmodel1"].Name) + assert.Equal(t, "job1", b.Config.Resources.Jobs["job1"].Name) + assert.Equal(t, "pipeline1", b.Config.Resources.Pipelines["pipeline1"].Name) + assert.False(t, b.Config.Resources.Pipelines["pipeline1"].PipelineSpec.Development) + assert.Equal(t, "servingendpoint1", b.Config.Resources.ModelServingEndpoints["servingendpoint1"].Name) + assert.Equal(t, "registeredmodel1", b.Config.Resources.RegisteredModels["registeredmodel1"].Name) } func TestProcessTargetModeProductionOkForPrincipal(t *testing.T) { - bundle := mockBundle(config.Production) + b := mockBundle(config.Production) // Our target has all kinds of problems when not using service principals ... - err := validateProductionMode(context.Background(), bundle, false) + err := validateProductionMode(context.Background(), b, false) require.Error(t, err) // ... but we're much less strict when a principal is used - err = validateProductionMode(context.Background(), bundle, true) + err = validateProductionMode(context.Background(), b, true) require.NoError(t, err) } // Make sure that we have test coverage for all resource types func TestAllResourcesMocked(t *testing.T) { - bundle := mockBundle(config.Development) - resources := reflect.ValueOf(bundle.Config.Resources) + b := mockBundle(config.Development) + resources := reflect.ValueOf(b.Config.Resources) for i := 0; i < resources.NumField(); i++ { field := resources.Field(i) @@ -253,11 +253,11 @@ func TestAllResourcesMocked(t *testing.T) { // Make sure that we at least rename all resources func TestAllResourcesRenamed(t *testing.T) { - bundle := mockBundle(config.Development) - resources := reflect.ValueOf(bundle.Config.Resources) + b := mockBundle(config.Development) + resources := reflect.ValueOf(b.Config.Resources) m := ProcessTargetMode() - err := m.Apply(context.Background(), bundle) + err := m.Apply(context.Background(), b) require.NoError(t, err) for i := 0; i < resources.NumField(); i++ { diff --git a/bundle/config/mutator/select_default_target_test.go b/bundle/config/mutator/select_default_target_test.go index 5d7b93b283..cb595f56ae 100644 --- a/bundle/config/mutator/select_default_target_test.go +++ b/bundle/config/mutator/select_default_target_test.go @@ -11,30 +11,30 @@ import ( ) func TestSelectDefaultTargetNoTargets(t *testing.T) { - bundle := &bundle.Bundle{ + b := &bundle.Bundle{ Config: config.Root{ Targets: map[string]*config.Target{}, }, } - err := mutator.SelectDefaultTarget().Apply(context.Background(), bundle) + err := mutator.SelectDefaultTarget().Apply(context.Background(), b) assert.ErrorContains(t, err, "no targets defined") } func TestSelectDefaultTargetSingleTargets(t *testing.T) { - bundle := &bundle.Bundle{ + b := &bundle.Bundle{ Config: config.Root{ Targets: map[string]*config.Target{ "foo": {}, }, }, } - err := mutator.SelectDefaultTarget().Apply(context.Background(), bundle) + err := mutator.SelectDefaultTarget().Apply(context.Background(), b) assert.NoError(t, err) - assert.Equal(t, "foo", bundle.Config.Bundle.Target) + assert.Equal(t, "foo", b.Config.Bundle.Target) } func TestSelectDefaultTargetNoDefaults(t *testing.T) { - bundle := &bundle.Bundle{ + b := &bundle.Bundle{ Config: config.Root{ Targets: map[string]*config.Target{ "foo": {}, @@ -43,12 +43,12 @@ func TestSelectDefaultTargetNoDefaults(t *testing.T) { }, }, } - err := mutator.SelectDefaultTarget().Apply(context.Background(), bundle) + err := mutator.SelectDefaultTarget().Apply(context.Background(), b) assert.ErrorContains(t, err, "please specify target") } func TestSelectDefaultTargetNoDefaultsWithNil(t *testing.T) { - bundle := &bundle.Bundle{ + b := &bundle.Bundle{ Config: config.Root{ Targets: map[string]*config.Target{ "foo": nil, @@ -56,12 +56,12 @@ func TestSelectDefaultTargetNoDefaultsWithNil(t *testing.T) { }, }, } - err := mutator.SelectDefaultTarget().Apply(context.Background(), bundle) + err := mutator.SelectDefaultTarget().Apply(context.Background(), b) assert.ErrorContains(t, err, "please specify target") } func TestSelectDefaultTargetMultipleDefaults(t *testing.T) { - bundle := &bundle.Bundle{ + b := &bundle.Bundle{ Config: config.Root{ Targets: map[string]*config.Target{ "foo": {Default: true}, @@ -70,12 +70,12 @@ func TestSelectDefaultTargetMultipleDefaults(t *testing.T) { }, }, } - err := mutator.SelectDefaultTarget().Apply(context.Background(), bundle) + err := mutator.SelectDefaultTarget().Apply(context.Background(), b) assert.ErrorContains(t, err, "multiple targets are marked as default") } func TestSelectDefaultTargetSingleDefault(t *testing.T) { - bundle := &bundle.Bundle{ + b := &bundle.Bundle{ Config: config.Root{ Targets: map[string]*config.Target{ "foo": {}, @@ -84,7 +84,7 @@ func TestSelectDefaultTargetSingleDefault(t *testing.T) { }, }, } - err := mutator.SelectDefaultTarget().Apply(context.Background(), bundle) + err := mutator.SelectDefaultTarget().Apply(context.Background(), b) assert.NoError(t, err) - assert.Equal(t, "bar", bundle.Config.Bundle.Target) + assert.Equal(t, "bar", b.Config.Bundle.Target) } diff --git a/bundle/config/mutator/select_target_test.go b/bundle/config/mutator/select_target_test.go index dfcd8cb089..6fae0ca22a 100644 --- a/bundle/config/mutator/select_target_test.go +++ b/bundle/config/mutator/select_target_test.go @@ -12,7 +12,7 @@ import ( ) func TestSelectTarget(t *testing.T) { - bundle := &bundle.Bundle{ + b := &bundle.Bundle{ Config: config.Root{ Workspace: config.Workspace{ Host: "foo", @@ -26,19 +26,19 @@ func TestSelectTarget(t *testing.T) { }, }, } - err := mutator.SelectTarget("default").Apply(context.Background(), bundle) + err := mutator.SelectTarget("default").Apply(context.Background(), b) require.NoError(t, err) - assert.Equal(t, "bar", bundle.Config.Workspace.Host) + assert.Equal(t, "bar", b.Config.Workspace.Host) } func TestSelectTargetNotFound(t *testing.T) { - bundle := &bundle.Bundle{ + b := &bundle.Bundle{ Config: config.Root{ Targets: map[string]*config.Target{ "default": {}, }, }, } - err := mutator.SelectTarget("doesnt-exist").Apply(context.Background(), bundle) + err := mutator.SelectTarget("doesnt-exist").Apply(context.Background(), b) require.Error(t, err, "no targets defined") } diff --git a/bundle/config/mutator/set_variables_test.go b/bundle/config/mutator/set_variables_test.go index 323f1e8646..c450041312 100644 --- a/bundle/config/mutator/set_variables_test.go +++ b/bundle/config/mutator/set_variables_test.go @@ -87,7 +87,7 @@ func TestSetVariablesMutator(t *testing.T) { defaultValForA := "default-a" defaultValForB := "default-b" valForC := "assigned-val-c" - bundle := &bundle.Bundle{ + b := &bundle.Bundle{ Config: config.Root{ Variables: map[string]*variable.Variable{ "a": { @@ -108,9 +108,9 @@ func TestSetVariablesMutator(t *testing.T) { t.Setenv("BUNDLE_VAR_b", "env-var-b") - err := SetVariables().Apply(context.Background(), bundle) + err := SetVariables().Apply(context.Background(), b) require.NoError(t, err) - assert.Equal(t, "default-a", *bundle.Config.Variables["a"].Value) - assert.Equal(t, "env-var-b", *bundle.Config.Variables["b"].Value) - assert.Equal(t, "assigned-val-c", *bundle.Config.Variables["c"].Value) + assert.Equal(t, "default-a", *b.Config.Variables["a"].Value) + assert.Equal(t, "env-var-b", *b.Config.Variables["b"].Value) + assert.Equal(t, "assigned-val-c", *b.Config.Variables["c"].Value) } diff --git a/bundle/config/mutator/translate_paths_test.go b/bundle/config/mutator/translate_paths_test.go index c24fd2e713..98ce2eb242 100644 --- a/bundle/config/mutator/translate_paths_test.go +++ b/bundle/config/mutator/translate_paths_test.go @@ -35,7 +35,7 @@ func touchEmptyFile(t *testing.T, path string) { func TestTranslatePathsSkippedWithGitSource(t *testing.T) { dir := t.TempDir() - bundle := &bundle.Bundle{ + b := &bundle.Bundle{ Config: config.Root{ Path: dir, Workspace: config.Workspace{ @@ -80,23 +80,23 @@ func TestTranslatePathsSkippedWithGitSource(t *testing.T) { }, } - err := mutator.TranslatePaths().Apply(context.Background(), bundle) + err := mutator.TranslatePaths().Apply(context.Background(), b) require.NoError(t, err) assert.Equal( t, "my_job_notebook.py", - bundle.Config.Resources.Jobs["job"].Tasks[0].NotebookTask.NotebookPath, + b.Config.Resources.Jobs["job"].Tasks[0].NotebookTask.NotebookPath, ) assert.Equal( t, "foo", - bundle.Config.Resources.Jobs["job"].Tasks[1].PythonWheelTask.PackageName, + b.Config.Resources.Jobs["job"].Tasks[1].PythonWheelTask.PackageName, ) assert.Equal( t, "my_python_file.py", - bundle.Config.Resources.Jobs["job"].Tasks[2].SparkPythonTask.PythonFile, + b.Config.Resources.Jobs["job"].Tasks[2].SparkPythonTask.PythonFile, ) } @@ -107,7 +107,7 @@ func TestTranslatePaths(t *testing.T) { touchEmptyFile(t, filepath.Join(dir, "my_python_file.py")) touchEmptyFile(t, filepath.Join(dir, "dist", "task.jar")) - bundle := &bundle.Bundle{ + b := &bundle.Bundle{ Config: config.Root{ Path: dir, Workspace: config.Workspace{ @@ -207,66 +207,66 @@ func TestTranslatePaths(t *testing.T) { }, } - err := mutator.TranslatePaths().Apply(context.Background(), bundle) + err := mutator.TranslatePaths().Apply(context.Background(), b) require.NoError(t, err) // Assert that the path in the tasks now refer to the artifact. assert.Equal( t, "/bundle/my_job_notebook", - bundle.Config.Resources.Jobs["job"].Tasks[0].NotebookTask.NotebookPath, + b.Config.Resources.Jobs["job"].Tasks[0].NotebookTask.NotebookPath, ) assert.Equal( t, filepath.Join("dist", "task.whl"), - bundle.Config.Resources.Jobs["job"].Tasks[0].Libraries[0].Whl, + b.Config.Resources.Jobs["job"].Tasks[0].Libraries[0].Whl, ) assert.Equal( t, "/Users/jane.doe@databricks.com/doesnt_exist.py", - bundle.Config.Resources.Jobs["job"].Tasks[1].NotebookTask.NotebookPath, + b.Config.Resources.Jobs["job"].Tasks[1].NotebookTask.NotebookPath, ) assert.Equal( t, "/bundle/my_job_notebook", - bundle.Config.Resources.Jobs["job"].Tasks[2].NotebookTask.NotebookPath, + b.Config.Resources.Jobs["job"].Tasks[2].NotebookTask.NotebookPath, ) assert.Equal( t, "/bundle/my_python_file.py", - bundle.Config.Resources.Jobs["job"].Tasks[4].SparkPythonTask.PythonFile, + b.Config.Resources.Jobs["job"].Tasks[4].SparkPythonTask.PythonFile, ) assert.Equal( t, "/bundle/dist/task.jar", - bundle.Config.Resources.Jobs["job"].Tasks[5].Libraries[0].Jar, + b.Config.Resources.Jobs["job"].Tasks[5].Libraries[0].Jar, ) assert.Equal( t, "dbfs:/bundle/dist/task_remote.jar", - bundle.Config.Resources.Jobs["job"].Tasks[6].Libraries[0].Jar, + b.Config.Resources.Jobs["job"].Tasks[6].Libraries[0].Jar, ) // Assert that the path in the libraries now refer to the artifact. assert.Equal( t, "/bundle/my_pipeline_notebook", - bundle.Config.Resources.Pipelines["pipeline"].Libraries[0].Notebook.Path, + b.Config.Resources.Pipelines["pipeline"].Libraries[0].Notebook.Path, ) assert.Equal( t, "/Users/jane.doe@databricks.com/doesnt_exist.py", - bundle.Config.Resources.Pipelines["pipeline"].Libraries[1].Notebook.Path, + b.Config.Resources.Pipelines["pipeline"].Libraries[1].Notebook.Path, ) assert.Equal( t, "/bundle/my_pipeline_notebook", - bundle.Config.Resources.Pipelines["pipeline"].Libraries[2].Notebook.Path, + b.Config.Resources.Pipelines["pipeline"].Libraries[2].Notebook.Path, ) assert.Equal( t, "/bundle/my_python_file.py", - bundle.Config.Resources.Pipelines["pipeline"].Libraries[4].File.Path, + b.Config.Resources.Pipelines["pipeline"].Libraries[4].File.Path, ) } @@ -278,7 +278,7 @@ func TestTranslatePathsInSubdirectories(t *testing.T) { touchEmptyFile(t, filepath.Join(dir, "job", "my_sql_file.sql")) touchEmptyFile(t, filepath.Join(dir, "job", "my_dbt_project", "dbt_project.yml")) - bundle := &bundle.Bundle{ + b := &bundle.Bundle{ Config: config.Root{ Path: dir, Workspace: config.Workspace{ @@ -342,41 +342,41 @@ func TestTranslatePathsInSubdirectories(t *testing.T) { }, } - err := mutator.TranslatePaths().Apply(context.Background(), bundle) + err := mutator.TranslatePaths().Apply(context.Background(), b) require.NoError(t, err) assert.Equal( t, "/bundle/job/my_python_file.py", - bundle.Config.Resources.Jobs["job"].Tasks[0].SparkPythonTask.PythonFile, + b.Config.Resources.Jobs["job"].Tasks[0].SparkPythonTask.PythonFile, ) assert.Equal( t, "/bundle/job/dist/task.jar", - bundle.Config.Resources.Jobs["job"].Tasks[1].Libraries[0].Jar, + b.Config.Resources.Jobs["job"].Tasks[1].Libraries[0].Jar, ) assert.Equal( t, "/bundle/job/my_sql_file.sql", - bundle.Config.Resources.Jobs["job"].Tasks[2].SqlTask.File.Path, + b.Config.Resources.Jobs["job"].Tasks[2].SqlTask.File.Path, ) assert.Equal( t, "/bundle/job/my_dbt_project", - bundle.Config.Resources.Jobs["job"].Tasks[3].DbtTask.ProjectDirectory, + b.Config.Resources.Jobs["job"].Tasks[3].DbtTask.ProjectDirectory, ) assert.Equal( t, "/bundle/pipeline/my_python_file.py", - bundle.Config.Resources.Pipelines["pipeline"].Libraries[0].File.Path, + b.Config.Resources.Pipelines["pipeline"].Libraries[0].File.Path, ) } func TestTranslatePathsOutsideBundleRoot(t *testing.T) { dir := t.TempDir() - bundle := &bundle.Bundle{ + b := &bundle.Bundle{ Config: config.Root{ Path: dir, Workspace: config.Workspace{ @@ -403,14 +403,14 @@ func TestTranslatePathsOutsideBundleRoot(t *testing.T) { }, } - err := mutator.TranslatePaths().Apply(context.Background(), bundle) + err := mutator.TranslatePaths().Apply(context.Background(), b) assert.ErrorContains(t, err, "is not contained in bundle root") } func TestJobNotebookDoesNotExistError(t *testing.T) { dir := t.TempDir() - bundle := &bundle.Bundle{ + b := &bundle.Bundle{ Config: config.Root{ Path: dir, Resources: config.Resources{ @@ -434,14 +434,14 @@ func TestJobNotebookDoesNotExistError(t *testing.T) { }, } - err := mutator.TranslatePaths().Apply(context.Background(), bundle) + err := mutator.TranslatePaths().Apply(context.Background(), b) assert.EqualError(t, err, "notebook ./doesnt_exist.py not found") } func TestJobFileDoesNotExistError(t *testing.T) { dir := t.TempDir() - bundle := &bundle.Bundle{ + b := &bundle.Bundle{ Config: config.Root{ Path: dir, Resources: config.Resources{ @@ -465,14 +465,14 @@ func TestJobFileDoesNotExistError(t *testing.T) { }, } - err := mutator.TranslatePaths().Apply(context.Background(), bundle) + err := mutator.TranslatePaths().Apply(context.Background(), b) assert.EqualError(t, err, "file ./doesnt_exist.py not found") } func TestPipelineNotebookDoesNotExistError(t *testing.T) { dir := t.TempDir() - bundle := &bundle.Bundle{ + b := &bundle.Bundle{ Config: config.Root{ Path: dir, Resources: config.Resources{ @@ -496,14 +496,14 @@ func TestPipelineNotebookDoesNotExistError(t *testing.T) { }, } - err := mutator.TranslatePaths().Apply(context.Background(), bundle) + err := mutator.TranslatePaths().Apply(context.Background(), b) assert.EqualError(t, err, "notebook ./doesnt_exist.py not found") } func TestPipelineFileDoesNotExistError(t *testing.T) { dir := t.TempDir() - bundle := &bundle.Bundle{ + b := &bundle.Bundle{ Config: config.Root{ Path: dir, Resources: config.Resources{ @@ -527,7 +527,7 @@ func TestPipelineFileDoesNotExistError(t *testing.T) { }, } - err := mutator.TranslatePaths().Apply(context.Background(), bundle) + err := mutator.TranslatePaths().Apply(context.Background(), b) assert.EqualError(t, err, "file ./doesnt_exist.py not found") } @@ -535,7 +535,7 @@ func TestJobSparkPythonTaskWithNotebookSourceError(t *testing.T) { dir := t.TempDir() touchNotebookFile(t, filepath.Join(dir, "my_notebook.py")) - bundle := &bundle.Bundle{ + b := &bundle.Bundle{ Config: config.Root{ Path: dir, Workspace: config.Workspace{ @@ -562,7 +562,7 @@ func TestJobSparkPythonTaskWithNotebookSourceError(t *testing.T) { }, } - err := mutator.TranslatePaths().Apply(context.Background(), bundle) + err := mutator.TranslatePaths().Apply(context.Background(), b) assert.ErrorContains(t, err, `expected a file for "tasks.spark_python_task.python_file" but got a notebook`) } @@ -570,7 +570,7 @@ func TestJobNotebookTaskWithFileSourceError(t *testing.T) { dir := t.TempDir() touchEmptyFile(t, filepath.Join(dir, "my_file.py")) - bundle := &bundle.Bundle{ + b := &bundle.Bundle{ Config: config.Root{ Path: dir, Workspace: config.Workspace{ @@ -597,7 +597,7 @@ func TestJobNotebookTaskWithFileSourceError(t *testing.T) { }, } - err := mutator.TranslatePaths().Apply(context.Background(), bundle) + err := mutator.TranslatePaths().Apply(context.Background(), b) assert.ErrorContains(t, err, `expected a notebook for "tasks.notebook_task.notebook_path" but got a file`) } @@ -605,7 +605,7 @@ func TestPipelineNotebookLibraryWithFileSourceError(t *testing.T) { dir := t.TempDir() touchEmptyFile(t, filepath.Join(dir, "my_file.py")) - bundle := &bundle.Bundle{ + b := &bundle.Bundle{ Config: config.Root{ Path: dir, Workspace: config.Workspace{ @@ -632,7 +632,7 @@ func TestPipelineNotebookLibraryWithFileSourceError(t *testing.T) { }, } - err := mutator.TranslatePaths().Apply(context.Background(), bundle) + err := mutator.TranslatePaths().Apply(context.Background(), b) assert.ErrorContains(t, err, `expected a notebook for "libraries.notebook.path" but got a file`) } @@ -640,7 +640,7 @@ func TestPipelineFileLibraryWithNotebookSourceError(t *testing.T) { dir := t.TempDir() touchNotebookFile(t, filepath.Join(dir, "my_notebook.py")) - bundle := &bundle.Bundle{ + b := &bundle.Bundle{ Config: config.Root{ Path: dir, Workspace: config.Workspace{ @@ -667,6 +667,6 @@ func TestPipelineFileLibraryWithNotebookSourceError(t *testing.T) { }, } - err := mutator.TranslatePaths().Apply(context.Background(), bundle) + err := mutator.TranslatePaths().Apply(context.Background(), b) assert.ErrorContains(t, err, `expected a file for "libraries.file.path" but got a notebook`) } diff --git a/bundle/config/mutator/validate_git_details_test.go b/bundle/config/mutator/validate_git_details_test.go index 252964eeb7..eedef12609 100644 --- a/bundle/config/mutator/validate_git_details_test.go +++ b/bundle/config/mutator/validate_git_details_test.go @@ -10,7 +10,7 @@ import ( ) func TestValidateGitDetailsMatchingBranches(t *testing.T) { - bundle := &bundle.Bundle{ + b := &bundle.Bundle{ Config: config.Root{ Bundle: config.Bundle{ Git: config.Git{ @@ -22,13 +22,13 @@ func TestValidateGitDetailsMatchingBranches(t *testing.T) { } m := ValidateGitDetails() - err := m.Apply(context.Background(), bundle) + err := m.Apply(context.Background(), b) assert.NoError(t, err) } func TestValidateGitDetailsNonMatchingBranches(t *testing.T) { - bundle := &bundle.Bundle{ + b := &bundle.Bundle{ Config: config.Root{ Bundle: config.Bundle{ Git: config.Git{ @@ -40,14 +40,14 @@ func TestValidateGitDetailsNonMatchingBranches(t *testing.T) { } m := ValidateGitDetails() - err := m.Apply(context.Background(), bundle) + err := m.Apply(context.Background(), b) expectedError := "not on the right Git branch:\n expected according to configuration: main\n actual: feature\nuse --force to override" assert.EqualError(t, err, expectedError) } func TestValidateGitDetailsNotUsingGit(t *testing.T) { - bundle := &bundle.Bundle{ + b := &bundle.Bundle{ Config: config.Root{ Bundle: config.Bundle{ Git: config.Git{ @@ -59,7 +59,7 @@ func TestValidateGitDetailsNotUsingGit(t *testing.T) { } m := ValidateGitDetails() - err := m.Apply(context.Background(), bundle) + err := m.Apply(context.Background(), b) assert.NoError(t, err) } diff --git a/bundle/context.go b/bundle/context.go index 9287afd1ef..3e6ed751c5 100644 --- a/bundle/context.go +++ b/bundle/context.go @@ -26,9 +26,9 @@ func GetOrNil(ctx context.Context) *Bundle { // Get returns the bundle as configured on the context. // It panics if it isn't configured. func Get(ctx context.Context) *Bundle { - bundle := GetOrNil(ctx) - if bundle == nil { + b := GetOrNil(ctx) + if b == nil { panic("context not configured with bundle") } - return bundle + return b } diff --git a/bundle/deferred_test.go b/bundle/deferred_test.go index 46d5e6412b..f75867d696 100644 --- a/bundle/deferred_test.go +++ b/bundle/deferred_test.go @@ -29,8 +29,8 @@ func TestDeferredMutatorWhenAllMutatorsSucceed(t *testing.T) { cleanup := &testMutator{} deferredMutator := Defer(Seq(m1, m2, m3), cleanup) - bundle := &Bundle{} - err := Apply(context.Background(), bundle, deferredMutator) + b := &Bundle{} + err := Apply(context.Background(), b, deferredMutator) assert.NoError(t, err) assert.Equal(t, 1, m1.applyCalled) @@ -46,8 +46,8 @@ func TestDeferredMutatorWhenFirstFails(t *testing.T) { cleanup := &testMutator{} deferredMutator := Defer(Seq(mErr, m1, m2), cleanup) - bundle := &Bundle{} - err := Apply(context.Background(), bundle, deferredMutator) + b := &Bundle{} + err := Apply(context.Background(), b, deferredMutator) assert.ErrorContains(t, err, "mutator error occurred") assert.Equal(t, 1, mErr.applyCalled) @@ -63,8 +63,8 @@ func TestDeferredMutatorWhenMiddleOneFails(t *testing.T) { cleanup := &testMutator{} deferredMutator := Defer(Seq(m1, mErr, m2), cleanup) - bundle := &Bundle{} - err := Apply(context.Background(), bundle, deferredMutator) + b := &Bundle{} + err := Apply(context.Background(), b, deferredMutator) assert.ErrorContains(t, err, "mutator error occurred") assert.Equal(t, 1, m1.applyCalled) @@ -80,8 +80,8 @@ func TestDeferredMutatorWhenLastOneFails(t *testing.T) { cleanup := &testMutator{} deferredMutator := Defer(Seq(m1, m2, mErr), cleanup) - bundle := &Bundle{} - err := Apply(context.Background(), bundle, deferredMutator) + b := &Bundle{} + err := Apply(context.Background(), b, deferredMutator) assert.ErrorContains(t, err, "mutator error occurred") assert.Equal(t, 1, m1.applyCalled) @@ -97,8 +97,8 @@ func TestDeferredMutatorCombinesErrorMessages(t *testing.T) { cleanupErr := &mutatorWithError{errorMsg: "cleanup error occurred"} deferredMutator := Defer(Seq(m1, m2, mErr), cleanupErr) - bundle := &Bundle{} - err := Apply(context.Background(), bundle, deferredMutator) + b := &Bundle{} + err := Apply(context.Background(), b, deferredMutator) assert.ErrorContains(t, err, "mutator error occurred\ncleanup error occurred") assert.Equal(t, 1, m1.applyCalled) diff --git a/bundle/deploy/terraform/init_test.go b/bundle/deploy/terraform/init_test.go index 001e7a2206..266f1c431f 100644 --- a/bundle/deploy/terraform/init_test.go +++ b/bundle/deploy/terraform/init_test.go @@ -27,7 +27,7 @@ func TestInitEnvironmentVariables(t *testing.T) { t.Skipf("cannot find terraform binary: %s", err) } - bundle := &bundle.Bundle{ + b := &bundle.Bundle{ Config: config.Root{ Path: t.TempDir(), Bundle: config.Bundle{ @@ -43,9 +43,9 @@ func TestInitEnvironmentVariables(t *testing.T) { // TODO(pietern): create test fixture that initializes a mocked client. t.Setenv("DATABRICKS_HOST", "https://x") t.Setenv("DATABRICKS_TOKEN", "foobar") - bundle.WorkspaceClient() + b.WorkspaceClient() - err = Initialize().Apply(context.Background(), bundle) + err = Initialize().Apply(context.Background(), b) require.NoError(t, err) } diff --git a/bundle/mutator_test.go b/bundle/mutator_test.go index 127f566858..c1f3c075f8 100644 --- a/bundle/mutator_test.go +++ b/bundle/mutator_test.go @@ -34,8 +34,8 @@ func TestMutator(t *testing.T) { }, } - bundle := &Bundle{} - err := Apply(context.Background(), bundle, m) + b := &Bundle{} + err := Apply(context.Background(), b, m) assert.NoError(t, err) assert.Equal(t, 1, m.applyCalled) diff --git a/bundle/python/transform_test.go b/bundle/python/transform_test.go index 1ccdba563d..b6427ccd8e 100644 --- a/bundle/python/transform_test.go +++ b/bundle/python/transform_test.go @@ -73,7 +73,7 @@ func TestGenerateBoth(t *testing.T) { func TestTransformFiltersWheelTasksOnly(t *testing.T) { trampoline := pythonTrampoline{} - bundle := &bundle.Bundle{ + b := &bundle.Bundle{ Config: config.Root{ Resources: config.Resources{ Jobs: map[string]*resources.Job{ @@ -106,7 +106,7 @@ func TestTransformFiltersWheelTasksOnly(t *testing.T) { }, } - tasks := trampoline.GetTasks(bundle) + tasks := trampoline.GetTasks(b) require.Len(t, tasks, 1) require.Equal(t, "job1", tasks[0].JobKey) require.Equal(t, "key1", tasks[0].Task.TaskKey) diff --git a/bundle/seq_test.go b/bundle/seq_test.go index 26ae37f8f0..d5c229e3cd 100644 --- a/bundle/seq_test.go +++ b/bundle/seq_test.go @@ -13,8 +13,8 @@ func TestSeqMutator(t *testing.T) { m3 := &testMutator{} seqMutator := Seq(m1, m2, m3) - bundle := &Bundle{} - err := Apply(context.Background(), bundle, seqMutator) + b := &Bundle{} + err := Apply(context.Background(), b, seqMutator) assert.NoError(t, err) assert.Equal(t, 1, m1.applyCalled) @@ -29,8 +29,8 @@ func TestSeqWithDeferredMutator(t *testing.T) { m4 := &testMutator{} seqMutator := Seq(m1, Defer(m2, m3), m4) - bundle := &Bundle{} - err := Apply(context.Background(), bundle, seqMutator) + b := &Bundle{} + err := Apply(context.Background(), b, seqMutator) assert.NoError(t, err) assert.Equal(t, 1, m1.applyCalled) @@ -46,8 +46,8 @@ func TestSeqWithErrorAndDeferredMutator(t *testing.T) { m3 := &testMutator{} seqMutator := Seq(errorMut, Defer(m1, m2), m3) - bundle := &Bundle{} - err := Apply(context.Background(), bundle, seqMutator) + b := &Bundle{} + err := Apply(context.Background(), b, seqMutator) assert.Error(t, err) assert.Equal(t, 1, errorMut.applyCalled) @@ -63,8 +63,8 @@ func TestSeqWithErrorInsideDeferredMutator(t *testing.T) { m3 := &testMutator{} seqMutator := Seq(m1, Defer(errorMut, m2), m3) - bundle := &Bundle{} - err := Apply(context.Background(), bundle, seqMutator) + b := &Bundle{} + err := Apply(context.Background(), b, seqMutator) assert.Error(t, err) assert.Equal(t, 1, m1.applyCalled) @@ -80,8 +80,8 @@ func TestSeqWithErrorInsideFinallyStage(t *testing.T) { m3 := &testMutator{} seqMutator := Seq(m1, Defer(m2, errorMut), m3) - bundle := &Bundle{} - err := Apply(context.Background(), bundle, seqMutator) + b := &Bundle{} + err := Apply(context.Background(), b, seqMutator) assert.Error(t, err) assert.Equal(t, 1, m1.applyCalled) From 33f5e759c4cd52c583ae1d36719c9e985e452402 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Wed, 15 Nov 2023 11:25:37 +0100 Subject: [PATCH 009/104] Replace direct calls with bundle.Apply --- bundle/config/mutator/default_target_test.go | 4 ++-- .../mutator/default_workspace_paths_test.go | 4 ++-- .../mutator/default_workspace_root_test.go | 2 +- .../mutator/expand_workspace_root_test.go | 8 +++---- .../config/mutator/override_compute_test.go | 10 ++++---- bundle/config/mutator/process_include_test.go | 2 +- .../mutator/process_root_includes_test.go | 16 ++++++------- .../mutator/process_target_mode_test.go | 12 +++++----- .../config/mutator/select_default_target.go | 4 ++-- .../mutator/select_default_target_test.go | 12 +++++----- bundle/config/mutator/select_target_test.go | 4 ++-- bundle/config/mutator/set_variables_test.go | 2 +- bundle/config/mutator/translate_paths_test.go | 24 +++++++++---------- .../mutator/validate_git_details_test.go | 6 ++--- bundle/deploy/metadata/compute_test.go | 2 +- bundle/deploy/terraform/init_test.go | 2 +- bundle/tests/bundle/wheel_test.go | 16 ++++++------- 17 files changed, 65 insertions(+), 65 deletions(-) diff --git a/bundle/config/mutator/default_target_test.go b/bundle/config/mutator/default_target_test.go index 9214c4eff7..61a5a01384 100644 --- a/bundle/config/mutator/default_target_test.go +++ b/bundle/config/mutator/default_target_test.go @@ -13,7 +13,7 @@ import ( func TestDefaultTarget(t *testing.T) { b := &bundle.Bundle{} - err := mutator.DefineDefaultTarget().Apply(context.Background(), b) + err := bundle.Apply(context.Background(), b, mutator.DefineDefaultTarget()) require.NoError(t, err) env, ok := b.Config.Targets["default"] assert.True(t, ok) @@ -28,7 +28,7 @@ func TestDefaultTargetAlreadySpecified(t *testing.T) { }, }, } - err := mutator.DefineDefaultTarget().Apply(context.Background(), b) + err := bundle.Apply(context.Background(), b, mutator.DefineDefaultTarget()) require.NoError(t, err) _, ok := b.Config.Targets["default"] assert.False(t, ok) diff --git a/bundle/config/mutator/default_workspace_paths_test.go b/bundle/config/mutator/default_workspace_paths_test.go index 0f99555817..02b3bb4af0 100644 --- a/bundle/config/mutator/default_workspace_paths_test.go +++ b/bundle/config/mutator/default_workspace_paths_test.go @@ -19,7 +19,7 @@ func TestDefineDefaultWorkspacePaths(t *testing.T) { }, }, } - err := mutator.DefineDefaultWorkspacePaths().Apply(context.Background(), b) + err := bundle.Apply(context.Background(), b, mutator.DefineDefaultWorkspacePaths()) require.NoError(t, err) assert.Equal(t, "/files", b.Config.Workspace.FilesPath) assert.Equal(t, "/artifacts", b.Config.Workspace.ArtifactsPath) @@ -37,7 +37,7 @@ func TestDefineDefaultWorkspacePathsAlreadySet(t *testing.T) { }, }, } - err := mutator.DefineDefaultWorkspacePaths().Apply(context.Background(), b) + err := bundle.Apply(context.Background(), b, mutator.DefineDefaultWorkspacePaths()) require.NoError(t, err) assert.Equal(t, "/foo/bar", b.Config.Workspace.FilesPath) assert.Equal(t, "/foo/bar", b.Config.Workspace.ArtifactsPath) diff --git a/bundle/config/mutator/default_workspace_root_test.go b/bundle/config/mutator/default_workspace_root_test.go index ad921f6fdf..9dd549a390 100644 --- a/bundle/config/mutator/default_workspace_root_test.go +++ b/bundle/config/mutator/default_workspace_root_test.go @@ -20,7 +20,7 @@ func TestDefaultWorkspaceRoot(t *testing.T) { }, }, } - err := mutator.DefineDefaultWorkspaceRoot().Apply(context.Background(), b) + err := bundle.Apply(context.Background(), b, mutator.DefineDefaultWorkspaceRoot()) require.NoError(t, err) assert.Equal(t, "~/.bundle/name/environment", b.Config.Workspace.RootPath) } diff --git a/bundle/config/mutator/expand_workspace_root_test.go b/bundle/config/mutator/expand_workspace_root_test.go index 217c07c512..17ee065097 100644 --- a/bundle/config/mutator/expand_workspace_root_test.go +++ b/bundle/config/mutator/expand_workspace_root_test.go @@ -25,7 +25,7 @@ func TestExpandWorkspaceRoot(t *testing.T) { }, }, } - err := mutator.ExpandWorkspaceRoot().Apply(context.Background(), b) + err := bundle.Apply(context.Background(), b, mutator.ExpandWorkspaceRoot()) require.NoError(t, err) assert.Equal(t, "/Users/jane@doe.com/foo", b.Config.Workspace.RootPath) } @@ -43,7 +43,7 @@ func TestExpandWorkspaceRootDoesNothing(t *testing.T) { }, }, } - err := mutator.ExpandWorkspaceRoot().Apply(context.Background(), b) + err := bundle.Apply(context.Background(), b, mutator.ExpandWorkspaceRoot()) require.NoError(t, err) assert.Equal(t, "/Users/charly@doe.com/foo", b.Config.Workspace.RootPath) } @@ -60,7 +60,7 @@ func TestExpandWorkspaceRootWithoutRoot(t *testing.T) { }, }, } - err := mutator.ExpandWorkspaceRoot().Apply(context.Background(), b) + err := bundle.Apply(context.Background(), b, mutator.ExpandWorkspaceRoot()) require.Error(t, err) } @@ -72,6 +72,6 @@ func TestExpandWorkspaceRootWithoutCurrentUser(t *testing.T) { }, }, } - err := mutator.ExpandWorkspaceRoot().Apply(context.Background(), b) + err := bundle.Apply(context.Background(), b, mutator.ExpandWorkspaceRoot()) require.Error(t, err) } diff --git a/bundle/config/mutator/override_compute_test.go b/bundle/config/mutator/override_compute_test.go index 70d7f238da..4c5d4427db 100644 --- a/bundle/config/mutator/override_compute_test.go +++ b/bundle/config/mutator/override_compute_test.go @@ -47,7 +47,7 @@ func TestOverrideDevelopment(t *testing.T) { } m := mutator.OverrideCompute() - err := m.Apply(context.Background(), b) + err := bundle.Apply(context.Background(), b, m) require.NoError(t, err) assert.Nil(t, b.Config.Resources.Jobs["job1"].Tasks[0].NewCluster) assert.Equal(t, "newClusterID", b.Config.Resources.Jobs["job1"].Tasks[0].ExistingClusterId) @@ -83,7 +83,7 @@ func TestOverrideDevelopmentEnv(t *testing.T) { } m := mutator.OverrideCompute() - err := m.Apply(context.Background(), b) + err := bundle.Apply(context.Background(), b, m) require.NoError(t, err) assert.Equal(t, "cluster2", b.Config.Resources.Jobs["job1"].Tasks[1].ExistingClusterId) } @@ -108,7 +108,7 @@ func TestOverridePipelineTask(t *testing.T) { } m := mutator.OverrideCompute() - err := m.Apply(context.Background(), b) + err := bundle.Apply(context.Background(), b, m) require.NoError(t, err) assert.Empty(t, b.Config.Resources.Jobs["job1"].Tasks[0].ExistingClusterId) } @@ -138,7 +138,7 @@ func TestOverrideProduction(t *testing.T) { } m := mutator.OverrideCompute() - err := m.Apply(context.Background(), b) + err := bundle.Apply(context.Background(), b, m) require.Error(t, err) } @@ -165,6 +165,6 @@ func TestOverrideProductionEnv(t *testing.T) { } m := mutator.OverrideCompute() - err := m.Apply(context.Background(), b) + err := bundle.Apply(context.Background(), b, m) require.NoError(t, err) } diff --git a/bundle/config/mutator/process_include_test.go b/bundle/config/mutator/process_include_test.go index eb1cb29151..7ca5d19811 100644 --- a/bundle/config/mutator/process_include_test.go +++ b/bundle/config/mutator/process_include_test.go @@ -32,7 +32,7 @@ func TestProcessInclude(t *testing.T) { f.Close() assert.Equal(t, "foo", b.Config.Workspace.Host) - err = mutator.ProcessInclude(fullPath, relPath).Apply(context.Background(), b) + err = bundle.Apply(context.Background(), b, mutator.ProcessInclude(fullPath, relPath)) require.NoError(t, err) assert.Equal(t, "bar", b.Config.Workspace.Host) } diff --git a/bundle/config/mutator/process_root_includes_test.go b/bundle/config/mutator/process_root_includes_test.go index 7a0b9e65a3..88a6c7433c 100644 --- a/bundle/config/mutator/process_root_includes_test.go +++ b/bundle/config/mutator/process_root_includes_test.go @@ -29,7 +29,7 @@ func TestProcessRootIncludesEmpty(t *testing.T) { Path: ".", }, } - err := mutator.ProcessRootIncludes().Apply(context.Background(), b) + err := bundle.Apply(context.Background(), b, mutator.ProcessRootIncludes()) require.NoError(t, err) } @@ -49,7 +49,7 @@ func TestProcessRootIncludesAbs(t *testing.T) { }, }, } - err := mutator.ProcessRootIncludes().Apply(context.Background(), b) + err := bundle.Apply(context.Background(), b, mutator.ProcessRootIncludes()) require.Error(t, err) assert.Contains(t, err.Error(), "must be relative paths") } @@ -68,7 +68,7 @@ func TestProcessRootIncludesSingleGlob(t *testing.T) { touch(t, b.Config.Path, "a.yml") touch(t, b.Config.Path, "b.yml") - err := mutator.ProcessRootIncludes().Apply(context.Background(), b) + err := bundle.Apply(context.Background(), b, mutator.ProcessRootIncludes()) require.NoError(t, err) assert.Equal(t, []string{"a.yml", "b.yml"}, b.Config.Include) @@ -88,7 +88,7 @@ func TestProcessRootIncludesMultiGlob(t *testing.T) { touch(t, b.Config.Path, "a1.yml") touch(t, b.Config.Path, "b1.yml") - err := mutator.ProcessRootIncludes().Apply(context.Background(), b) + err := bundle.Apply(context.Background(), b, mutator.ProcessRootIncludes()) require.NoError(t, err) assert.Equal(t, []string{"a1.yml", "b1.yml"}, b.Config.Include) @@ -107,7 +107,7 @@ func TestProcessRootIncludesRemoveDups(t *testing.T) { touch(t, b.Config.Path, "a.yml") - err := mutator.ProcessRootIncludes().Apply(context.Background(), b) + err := bundle.Apply(context.Background(), b, mutator.ProcessRootIncludes()) require.NoError(t, err) assert.Equal(t, []string{"a.yml"}, b.Config.Include) } @@ -121,7 +121,7 @@ func TestProcessRootIncludesNotExists(t *testing.T) { }, }, } - err := mutator.ProcessRootIncludes().Apply(context.Background(), b) + err := bundle.Apply(context.Background(), b, mutator.ProcessRootIncludes()) require.Error(t, err) assert.Contains(t, err.Error(), "notexist.yml defined in 'include' section does not match any files") } @@ -138,7 +138,7 @@ func TestProcessRootIncludesExtrasFromEnvVar(t *testing.T) { }, } - err := mutator.ProcessRootIncludes().Apply(context.Background(), b) + err := bundle.Apply(context.Background(), b, mutator.ProcessRootIncludes()) require.NoError(t, err) assert.Contains(t, b.Config.Include, testYamlName) } @@ -161,7 +161,7 @@ func TestProcessRootIncludesDedupExtrasFromEnvVar(t *testing.T) { }, } - err := mutator.ProcessRootIncludes().Apply(context.Background(), b) + err := bundle.Apply(context.Background(), b, mutator.ProcessRootIncludes()) require.NoError(t, err) assert.Equal(t, []string{testYamlName}, b.Config.Include) } diff --git a/bundle/config/mutator/process_target_mode_test.go b/bundle/config/mutator/process_target_mode_test.go index 120c882b40..05705fecbc 100644 --- a/bundle/config/mutator/process_target_mode_test.go +++ b/bundle/config/mutator/process_target_mode_test.go @@ -92,7 +92,7 @@ func TestProcessTargetModeDevelopment(t *testing.T) { b := mockBundle(config.Development) m := ProcessTargetMode() - err := m.Apply(context.Background(), b) + err := bundle.Apply(context.Background(), b, m) require.NoError(t, err) // Job 1 @@ -135,7 +135,7 @@ func TestProcessTargetModeDevelopmentTagNormalizationForAws(t *testing.T) { }) b.Config.Workspace.CurrentUser.ShortName = "Héllö wörld?!" - err := ProcessTargetMode().Apply(context.Background(), b) + err := bundle.Apply(context.Background(), b, ProcessTargetMode()) require.NoError(t, err) // Assert that tag normalization took place. @@ -149,7 +149,7 @@ func TestProcessTargetModeDevelopmentTagNormalizationForAzure(t *testing.T) { }) b.Config.Workspace.CurrentUser.ShortName = "Héllö wörld?!" - err := ProcessTargetMode().Apply(context.Background(), b) + err := bundle.Apply(context.Background(), b, ProcessTargetMode()) require.NoError(t, err) // Assert that tag normalization took place (Azure allows more characters than AWS). @@ -163,7 +163,7 @@ func TestProcessTargetModeDevelopmentTagNormalizationForGcp(t *testing.T) { }) b.Config.Workspace.CurrentUser.ShortName = "Héllö wörld?!" - err := ProcessTargetMode().Apply(context.Background(), b) + err := bundle.Apply(context.Background(), b, ProcessTargetMode()) require.NoError(t, err) // Assert that tag normalization took place. @@ -174,7 +174,7 @@ func TestProcessTargetModeDefault(t *testing.T) { b := mockBundle("") m := ProcessTargetMode() - err := m.Apply(context.Background(), b) + err := bundle.Apply(context.Background(), b, m) require.NoError(t, err) assert.Equal(t, "job1", b.Config.Resources.Jobs["job1"].Name) assert.Equal(t, "pipeline1", b.Config.Resources.Pipelines["pipeline1"].Name) @@ -257,7 +257,7 @@ func TestAllResourcesRenamed(t *testing.T) { resources := reflect.ValueOf(b.Config.Resources) m := ProcessTargetMode() - err := m.Apply(context.Background(), b) + err := bundle.Apply(context.Background(), b, m) require.NoError(t, err) for i := 0; i < resources.NumField(); i++ { diff --git a/bundle/config/mutator/select_default_target.go b/bundle/config/mutator/select_default_target.go index 8abcfe4ff2..be5046f82c 100644 --- a/bundle/config/mutator/select_default_target.go +++ b/bundle/config/mutator/select_default_target.go @@ -28,7 +28,7 @@ func (m *selectDefaultTarget) Apply(ctx context.Context, b *bundle.Bundle) error // One target means there's only one default. names := maps.Keys(b.Config.Targets) if len(names) == 1 { - return SelectTarget(names[0]).Apply(ctx, b) + return bundle.Apply(ctx, b, SelectTarget(names[0])) } // Multiple targets means we look for the `default` flag. @@ -50,5 +50,5 @@ func (m *selectDefaultTarget) Apply(ctx context.Context, b *bundle.Bundle) error } // One default remaining. - return SelectTarget(defaults[0]).Apply(ctx, b) + return bundle.Apply(ctx, b, SelectTarget(defaults[0])) } diff --git a/bundle/config/mutator/select_default_target_test.go b/bundle/config/mutator/select_default_target_test.go index cb595f56ae..1c2e451fea 100644 --- a/bundle/config/mutator/select_default_target_test.go +++ b/bundle/config/mutator/select_default_target_test.go @@ -16,7 +16,7 @@ func TestSelectDefaultTargetNoTargets(t *testing.T) { Targets: map[string]*config.Target{}, }, } - err := mutator.SelectDefaultTarget().Apply(context.Background(), b) + err := bundle.Apply(context.Background(), b, mutator.SelectDefaultTarget()) assert.ErrorContains(t, err, "no targets defined") } @@ -28,7 +28,7 @@ func TestSelectDefaultTargetSingleTargets(t *testing.T) { }, }, } - err := mutator.SelectDefaultTarget().Apply(context.Background(), b) + err := bundle.Apply(context.Background(), b, mutator.SelectDefaultTarget()) assert.NoError(t, err) assert.Equal(t, "foo", b.Config.Bundle.Target) } @@ -43,7 +43,7 @@ func TestSelectDefaultTargetNoDefaults(t *testing.T) { }, }, } - err := mutator.SelectDefaultTarget().Apply(context.Background(), b) + err := bundle.Apply(context.Background(), b, mutator.SelectDefaultTarget()) assert.ErrorContains(t, err, "please specify target") } @@ -56,7 +56,7 @@ func TestSelectDefaultTargetNoDefaultsWithNil(t *testing.T) { }, }, } - err := mutator.SelectDefaultTarget().Apply(context.Background(), b) + err := bundle.Apply(context.Background(), b, mutator.SelectDefaultTarget()) assert.ErrorContains(t, err, "please specify target") } @@ -70,7 +70,7 @@ func TestSelectDefaultTargetMultipleDefaults(t *testing.T) { }, }, } - err := mutator.SelectDefaultTarget().Apply(context.Background(), b) + err := bundle.Apply(context.Background(), b, mutator.SelectDefaultTarget()) assert.ErrorContains(t, err, "multiple targets are marked as default") } @@ -84,7 +84,7 @@ func TestSelectDefaultTargetSingleDefault(t *testing.T) { }, }, } - err := mutator.SelectDefaultTarget().Apply(context.Background(), b) + err := bundle.Apply(context.Background(), b, mutator.SelectDefaultTarget()) assert.NoError(t, err) assert.Equal(t, "bar", b.Config.Bundle.Target) } diff --git a/bundle/config/mutator/select_target_test.go b/bundle/config/mutator/select_target_test.go index 6fae0ca22a..20467270b8 100644 --- a/bundle/config/mutator/select_target_test.go +++ b/bundle/config/mutator/select_target_test.go @@ -26,7 +26,7 @@ func TestSelectTarget(t *testing.T) { }, }, } - err := mutator.SelectTarget("default").Apply(context.Background(), b) + err := bundle.Apply(context.Background(), b, mutator.SelectTarget("default")) require.NoError(t, err) assert.Equal(t, "bar", b.Config.Workspace.Host) } @@ -39,6 +39,6 @@ func TestSelectTargetNotFound(t *testing.T) { }, }, } - err := mutator.SelectTarget("doesnt-exist").Apply(context.Background(), b) + err := bundle.Apply(context.Background(), b, mutator.SelectTarget("doesnt-exist")) require.Error(t, err, "no targets defined") } diff --git a/bundle/config/mutator/set_variables_test.go b/bundle/config/mutator/set_variables_test.go index c450041312..15a98e5cf6 100644 --- a/bundle/config/mutator/set_variables_test.go +++ b/bundle/config/mutator/set_variables_test.go @@ -108,7 +108,7 @@ func TestSetVariablesMutator(t *testing.T) { t.Setenv("BUNDLE_VAR_b", "env-var-b") - err := SetVariables().Apply(context.Background(), b) + err := bundle.Apply(context.Background(), b, SetVariables()) require.NoError(t, err) assert.Equal(t, "default-a", *b.Config.Variables["a"].Value) assert.Equal(t, "env-var-b", *b.Config.Variables["b"].Value) diff --git a/bundle/config/mutator/translate_paths_test.go b/bundle/config/mutator/translate_paths_test.go index 98ce2eb242..49cc694863 100644 --- a/bundle/config/mutator/translate_paths_test.go +++ b/bundle/config/mutator/translate_paths_test.go @@ -80,7 +80,7 @@ func TestTranslatePathsSkippedWithGitSource(t *testing.T) { }, } - err := mutator.TranslatePaths().Apply(context.Background(), b) + err := bundle.Apply(context.Background(), b, mutator.TranslatePaths()) require.NoError(t, err) assert.Equal( @@ -207,7 +207,7 @@ func TestTranslatePaths(t *testing.T) { }, } - err := mutator.TranslatePaths().Apply(context.Background(), b) + err := bundle.Apply(context.Background(), b, mutator.TranslatePaths()) require.NoError(t, err) // Assert that the path in the tasks now refer to the artifact. @@ -342,7 +342,7 @@ func TestTranslatePathsInSubdirectories(t *testing.T) { }, } - err := mutator.TranslatePaths().Apply(context.Background(), b) + err := bundle.Apply(context.Background(), b, mutator.TranslatePaths()) require.NoError(t, err) assert.Equal( @@ -403,7 +403,7 @@ func TestTranslatePathsOutsideBundleRoot(t *testing.T) { }, } - err := mutator.TranslatePaths().Apply(context.Background(), b) + err := bundle.Apply(context.Background(), b, mutator.TranslatePaths()) assert.ErrorContains(t, err, "is not contained in bundle root") } @@ -434,7 +434,7 @@ func TestJobNotebookDoesNotExistError(t *testing.T) { }, } - err := mutator.TranslatePaths().Apply(context.Background(), b) + err := bundle.Apply(context.Background(), b, mutator.TranslatePaths()) assert.EqualError(t, err, "notebook ./doesnt_exist.py not found") } @@ -465,7 +465,7 @@ func TestJobFileDoesNotExistError(t *testing.T) { }, } - err := mutator.TranslatePaths().Apply(context.Background(), b) + err := bundle.Apply(context.Background(), b, mutator.TranslatePaths()) assert.EqualError(t, err, "file ./doesnt_exist.py not found") } @@ -496,7 +496,7 @@ func TestPipelineNotebookDoesNotExistError(t *testing.T) { }, } - err := mutator.TranslatePaths().Apply(context.Background(), b) + err := bundle.Apply(context.Background(), b, mutator.TranslatePaths()) assert.EqualError(t, err, "notebook ./doesnt_exist.py not found") } @@ -527,7 +527,7 @@ func TestPipelineFileDoesNotExistError(t *testing.T) { }, } - err := mutator.TranslatePaths().Apply(context.Background(), b) + err := bundle.Apply(context.Background(), b, mutator.TranslatePaths()) assert.EqualError(t, err, "file ./doesnt_exist.py not found") } @@ -562,7 +562,7 @@ func TestJobSparkPythonTaskWithNotebookSourceError(t *testing.T) { }, } - err := mutator.TranslatePaths().Apply(context.Background(), b) + err := bundle.Apply(context.Background(), b, mutator.TranslatePaths()) assert.ErrorContains(t, err, `expected a file for "tasks.spark_python_task.python_file" but got a notebook`) } @@ -597,7 +597,7 @@ func TestJobNotebookTaskWithFileSourceError(t *testing.T) { }, } - err := mutator.TranslatePaths().Apply(context.Background(), b) + err := bundle.Apply(context.Background(), b, mutator.TranslatePaths()) assert.ErrorContains(t, err, `expected a notebook for "tasks.notebook_task.notebook_path" but got a file`) } @@ -632,7 +632,7 @@ func TestPipelineNotebookLibraryWithFileSourceError(t *testing.T) { }, } - err := mutator.TranslatePaths().Apply(context.Background(), b) + err := bundle.Apply(context.Background(), b, mutator.TranslatePaths()) assert.ErrorContains(t, err, `expected a notebook for "libraries.notebook.path" but got a file`) } @@ -667,6 +667,6 @@ func TestPipelineFileLibraryWithNotebookSourceError(t *testing.T) { }, } - err := mutator.TranslatePaths().Apply(context.Background(), b) + err := bundle.Apply(context.Background(), b, mutator.TranslatePaths()) assert.ErrorContains(t, err, `expected a file for "libraries.file.path" but got a notebook`) } diff --git a/bundle/config/mutator/validate_git_details_test.go b/bundle/config/mutator/validate_git_details_test.go index eedef12609..f207d9cf96 100644 --- a/bundle/config/mutator/validate_git_details_test.go +++ b/bundle/config/mutator/validate_git_details_test.go @@ -22,7 +22,7 @@ func TestValidateGitDetailsMatchingBranches(t *testing.T) { } m := ValidateGitDetails() - err := m.Apply(context.Background(), b) + err := bundle.Apply(context.Background(), b, m) assert.NoError(t, err) } @@ -40,7 +40,7 @@ func TestValidateGitDetailsNonMatchingBranches(t *testing.T) { } m := ValidateGitDetails() - err := m.Apply(context.Background(), b) + err := bundle.Apply(context.Background(), b, m) expectedError := "not on the right Git branch:\n expected according to configuration: main\n actual: feature\nuse --force to override" assert.EqualError(t, err, expectedError) @@ -59,7 +59,7 @@ func TestValidateGitDetailsNotUsingGit(t *testing.T) { } m := ValidateGitDetails() - err := m.Apply(context.Background(), b) + err := bundle.Apply(context.Background(), b, m) assert.NoError(t, err) } diff --git a/bundle/deploy/metadata/compute_test.go b/bundle/deploy/metadata/compute_test.go index 9e4b475c7a..f6429877f3 100644 --- a/bundle/deploy/metadata/compute_test.go +++ b/bundle/deploy/metadata/compute_test.go @@ -93,7 +93,7 @@ func TestComputeMetadataMutator(t *testing.T) { }, } - err := Compute().Apply(context.Background(), b) + err := bundle.Apply(context.Background(), b, Compute()) require.NoError(t, err) assert.Equal(t, expectedMetadata, b.Metadata) diff --git a/bundle/deploy/terraform/init_test.go b/bundle/deploy/terraform/init_test.go index 266f1c431f..a3a9e0e42c 100644 --- a/bundle/deploy/terraform/init_test.go +++ b/bundle/deploy/terraform/init_test.go @@ -45,7 +45,7 @@ func TestInitEnvironmentVariables(t *testing.T) { t.Setenv("DATABRICKS_TOKEN", "foobar") b.WorkspaceClient() - err = Initialize().Apply(context.Background(), b) + err = bundle.Apply(context.Background(), b, Initialize()) require.NoError(t, err) } diff --git a/bundle/tests/bundle/wheel_test.go b/bundle/tests/bundle/wheel_test.go index f7f0e75e5f..57ecb54b90 100644 --- a/bundle/tests/bundle/wheel_test.go +++ b/bundle/tests/bundle/wheel_test.go @@ -17,7 +17,7 @@ func TestBundlePythonWheelBuild(t *testing.T) { require.NoError(t, err) m := phases.Build() - err = m.Apply(ctx, b) + err = bundle.Apply(ctx, b, m) require.NoError(t, err) matches, err := filepath.Glob("python_wheel/my_test_code/dist/my_test_code-*.whl") @@ -25,7 +25,7 @@ func TestBundlePythonWheelBuild(t *testing.T) { require.Equal(t, 1, len(matches)) match := libraries.MatchWithArtifacts() - err = match.Apply(ctx, b) + err = bundle.Apply(ctx, b, match) require.NoError(t, err) } @@ -35,7 +35,7 @@ func TestBundlePythonWheelBuildAutoDetect(t *testing.T) { require.NoError(t, err) m := phases.Build() - err = m.Apply(ctx, b) + err = bundle.Apply(ctx, b, m) require.NoError(t, err) matches, err := filepath.Glob("python_wheel/my_test_code/dist/my_test_code-*.whl") @@ -43,7 +43,7 @@ func TestBundlePythonWheelBuildAutoDetect(t *testing.T) { require.Equal(t, 1, len(matches)) match := libraries.MatchWithArtifacts() - err = match.Apply(ctx, b) + err = bundle.Apply(ctx, b, match) require.NoError(t, err) } @@ -53,11 +53,11 @@ func TestBundlePythonWheelWithDBFSLib(t *testing.T) { require.NoError(t, err) m := phases.Build() - err = m.Apply(ctx, b) + err = bundle.Apply(ctx, b, m) require.NoError(t, err) match := libraries.MatchWithArtifacts() - err = match.Apply(ctx, b) + err = bundle.Apply(ctx, b, match) require.NoError(t, err) } @@ -67,11 +67,11 @@ func TestBundlePythonWheelBuildNoBuildJustUpload(t *testing.T) { require.NoError(t, err) m := phases.Build() - err = m.Apply(ctx, b) + err = bundle.Apply(ctx, b, m) require.NoError(t, err) match := libraries.MatchWithArtifacts() - err = match.Apply(ctx, b) + err = bundle.Apply(ctx, b, match) require.ErrorContains(t, err, "./non-existing/*.whl") require.NotZero(t, len(b.Config.Artifacts)) From bf54271b81ea3b2688b0f6f21d48366a5d20a4e4 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Wed, 15 Nov 2023 14:19:10 +0100 Subject: [PATCH 010/104] Tmp --- .../mutator/process_root_includes_test.go | 31 ++++++++++--------- bundle/config/root.go | 11 +++---- 2 files changed, 21 insertions(+), 21 deletions(-) diff --git a/bundle/config/mutator/process_root_includes_test.go b/bundle/config/mutator/process_root_includes_test.go index 8bf2ad9b19..88a6c7433c 100644 --- a/bundle/config/mutator/process_root_includes_test.go +++ b/bundle/config/mutator/process_root_includes_test.go @@ -3,13 +3,16 @@ package mutator_test import ( "context" "os" + "path" "path/filepath" "runtime" + "strings" "testing" "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config/mutator" + "github.com/databricks/cli/bundle/env" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -124,10 +127,10 @@ func TestProcessRootIncludesNotExists(t *testing.T) { } func TestProcessRootIncludesExtrasFromEnvVar(t *testing.T) { - // rootPath := t.TempDir() - // testYamlName := "extra_include_path.yml" - // touch(t, rootPath, testYamlName) - // t.Setenv(env.IncludesVariable, path.Join(rootPath, testYamlName)) + rootPath := t.TempDir() + testYamlName := "extra_include_path.yml" + touch(t, rootPath, testYamlName) + t.Setenv(env.IncludesVariable, path.Join(rootPath, testYamlName)) b := &bundle.Bundle{ Config: config.Root{ @@ -141,16 +144,16 @@ func TestProcessRootIncludesExtrasFromEnvVar(t *testing.T) { } func TestProcessRootIncludesDedupExtrasFromEnvVar(t *testing.T) { - // rootPath := t.TempDir() - // testYamlName := "extra_include_path.yml" - // touch(t, rootPath, testYamlName) - // t.Setenv(env.IncludesVariable, strings.Join( - // []string{ - // path.Join(rootPath, testYamlName), - // path.Join(rootPath, testYamlName), - // }, - // string(os.PathListSeparator), - // )) + rootPath := t.TempDir() + testYamlName := "extra_include_path.yml" + touch(t, rootPath, testYamlName) + t.Setenv(env.IncludesVariable, strings.Join( + []string{ + path.Join(rootPath, testYamlName), + path.Join(rootPath, testYamlName), + }, + string(os.PathListSeparator), + )) b := &bundle.Bundle{ Config: config.Root{ diff --git a/bundle/config/root.go b/bundle/config/root.go index ff3255d740..34870aa0b8 100644 --- a/bundle/config/root.go +++ b/bundle/config/root.go @@ -295,18 +295,15 @@ func (r *Root) MergeTargetOverrides(name string) error { return err } + if err = mergeField("permissions"); err != nil { + return err + } + // Convert normalized configuration tree to typed configuration. err = convert.ToTyped(r, r.value) if err != nil { panic(err) } - if target.Permissions != nil { - err = mergo.Merge(&r.Permissions, target.Permissions, mergo.WithAppendSlice) - if err != nil { - return err - } - } - return nil } From a68f1346a9a263938afe7ad8bb9c92a643857237 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Fri, 24 Nov 2023 14:15:27 +0100 Subject: [PATCH 011/104] Add IsValid function to config.Value --- libs/config/value.go | 4 ++++ libs/config/value_test.go | 7 +++++++ 2 files changed, 11 insertions(+) diff --git a/libs/config/value.go b/libs/config/value.go index c77f8147dc..fe0ced9bdf 100644 --- a/libs/config/value.go +++ b/libs/config/value.go @@ -51,6 +51,10 @@ func (v Value) Location() Location { return v.l } +func (v Value) IsValid() bool { + return v.k != KindInvalid +} + func (v Value) AsAny() any { switch v.k { case KindInvalid: diff --git a/libs/config/value_test.go b/libs/config/value_test.go index cb8ef16aae..6c8befc7ed 100644 --- a/libs/config/value_test.go +++ b/libs/config/value_test.go @@ -35,3 +35,10 @@ func TestValueAsMap(t *testing.T) { assert.True(t, ok) assert.Len(t, m, 1) } + +func TestValueIsValid(t *testing.T) { + var zeroValue config.Value + assert.False(t, zeroValue.IsValid()) + var intValue = config.NewValue(1, config.Location{}) + assert.True(t, intValue.IsValid()) +} From a93b508bc01ad9818ba40658461079f3c28cfc8b Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Fri, 24 Nov 2023 14:12:37 +0100 Subject: [PATCH 012/104] Populate struct field with `config.Value` instance if possible --- libs/config/convert/struct_info.go | 18 +++++++++++++++ libs/config/convert/struct_info_test.go | 30 +++++++++++++++++++++++++ libs/config/convert/to_typed.go | 6 +++++ libs/config/convert/to_typed_test.go | 18 +++++++++++++++ 4 files changed, 72 insertions(+) diff --git a/libs/config/convert/struct_info.go b/libs/config/convert/struct_info.go index 2457b3c297..80cfabb692 100644 --- a/libs/config/convert/struct_info.go +++ b/libs/config/convert/struct_info.go @@ -4,6 +4,8 @@ import ( "reflect" "strings" "sync" + + "github.com/databricks/cli/libs/config" ) // structInfo holds the type information we need to efficiently @@ -11,6 +13,10 @@ import ( type structInfo struct { // Fields maps the JSON-name of the field to the field's index for use with [FieldByIndex]. Fields map[string][]int + + // ValueField maps to the field with a [config.Value]. + // The underlying type is expected to only have one of these. + ValueField []int } // structInfoCache caches type information. @@ -68,6 +74,15 @@ func buildStructInfo(typ reflect.Type) structInfo { continue } + // If this field has type [config.Value], we populate it with the source [config.Value] from [ToTyped]. + if sf.IsExported() && sf.Type == configValueType { + if out.ValueField != nil { + panic("multiple config.Value fields") + } + out.ValueField = append(prefix, sf.Index...) + continue + } + name, _, _ := strings.Cut(sf.Tag.Get("json"), ",") if name == "" || name == "-" { continue @@ -113,3 +128,6 @@ func (s *structInfo) FieldValues(v reflect.Value) map[string]reflect.Value { return out } + +// Type of [config.Value]. +var configValueType = reflect.TypeOf((*config.Value)(nil)).Elem() diff --git a/libs/config/convert/struct_info_test.go b/libs/config/convert/struct_info_test.go index 2e31adac16..685679aecd 100644 --- a/libs/config/convert/struct_info_test.go +++ b/libs/config/convert/struct_info_test.go @@ -4,6 +4,7 @@ import ( "reflect" "testing" + "github.com/databricks/cli/libs/config" "github.com/stretchr/testify/assert" ) @@ -194,3 +195,32 @@ func TestStructInfoFieldValuesAnonymousByPointer(t *testing.T) { assert.Empty(t, fv) }) } + +func TestStructInfoValueFieldAbsent(t *testing.T) { + type Tmp struct { + Foo string `json:"foo"` + } + + si := getStructInfo(reflect.TypeOf(Tmp{})) + assert.Nil(t, si.ValueField) +} + +func TestStructInfoValueFieldPresent(t *testing.T) { + type Tmp struct { + Foo config.Value + } + + si := getStructInfo(reflect.TypeOf(Tmp{})) + assert.NotNil(t, si.ValueField) +} + +func TestStructInfoValueFieldMultiple(t *testing.T) { + type Tmp struct { + Foo config.Value + Bar config.Value + } + + assert.Panics(t, func() { + getStructInfo(reflect.TypeOf(Tmp{})) + }) +} diff --git a/libs/config/convert/to_typed.go b/libs/config/convert/to_typed.go index ca09fce42b..8c43d97434 100644 --- a/libs/config/convert/to_typed.go +++ b/libs/config/convert/to_typed.go @@ -83,6 +83,12 @@ func toTypedStruct(dst reflect.Value, src config.Value) error { } } + // Populate field(s) for [config.Value], if any. + if info.ValueField != nil { + vv := dst.FieldByIndex(info.ValueField) + vv.Set(reflect.ValueOf(src)) + } + return nil case config.KindNil: dst.SetZero() diff --git a/libs/config/convert/to_typed_test.go b/libs/config/convert/to_typed_test.go index 26e17dcce5..2845bddae0 100644 --- a/libs/config/convert/to_typed_test.go +++ b/libs/config/convert/to_typed_test.go @@ -133,6 +133,24 @@ func TestToTypedStructNilOverwrite(t *testing.T) { assert.Equal(t, Tmp{}, out) } +func TestToTypedStructWithValueField(t *testing.T) { + type Tmp struct { + Foo string `json:"foo"` + + ConfigValue config.Value + } + + var out Tmp + v := config.V(map[string]config.Value{ + "foo": config.V("bar"), + }) + + err := ToTyped(&out, v) + require.NoError(t, err) + assert.Equal(t, "bar", out.Foo) + assert.Equal(t, v, out.ConfigValue) +} + func TestToTypedMap(t *testing.T) { var out = map[string]string{} From 41658d4506421934d279bf4cbf55da5bafffa681 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Fri, 24 Nov 2023 14:38:33 +0100 Subject: [PATCH 013/104] Fix --- bundle/config/mutator/translate_paths_test.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bundle/config/mutator/translate_paths_test.go b/bundle/config/mutator/translate_paths_test.go index 41d031caad..67f15d4076 100644 --- a/bundle/config/mutator/translate_paths_test.go +++ b/bundle/config/mutator/translate_paths_test.go @@ -238,7 +238,7 @@ func TestTranslatePaths(t *testing.T) { ) assert.Equal( t, - "/bundle/dist/task.jar", + filepath.Join("dist", "task.jar"), b.Config.Resources.Jobs["job"].Tasks[5].Libraries[0].Jar, ) assert.Equal( @@ -352,7 +352,7 @@ func TestTranslatePathsInSubdirectories(t *testing.T) { ) assert.Equal( t, - "/bundle/job/dist/task.jar", + filepath.Join("job", "dist", "task.jar"), b.Config.Resources.Jobs["job"].Tasks[1].Libraries[0].Jar, ) assert.Equal( From f18ba50096b505778b821d0b50cf995e98de51a3 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Mon, 18 Dec 2023 16:40:14 +0100 Subject: [PATCH 014/104] Add config walker --- bundle/config/root.go | 11 +++++++++++ libs/config/value.go | 9 +++++++++ libs/config/walk.go | 37 +++++++++++++++++++++++++++++++++++++ libs/config/walk_test.go | 1 + 4 files changed, 58 insertions(+) create mode 100644 libs/config/walk.go create mode 100644 libs/config/walk_test.go diff --git a/bundle/config/root.go b/bundle/config/root.go index 065491d54e..04d9382c32 100644 --- a/bundle/config/root.go +++ b/bundle/config/root.go @@ -307,3 +307,14 @@ func (r *Root) MergeTargetOverrides(name string) error { return nil } + +// ForceLocationInConfig walks all nodes in the configuration tree and +// sets their location to the specified value. +func (r *Root) ForceLocationInConfig(path string) { + out, _ := config.Walk(r.value, func(v config.Value) (config.Value, error) { + return v.WithLocation(config.Location{ + File: path, + }), nil + }) + r.value = out +} diff --git a/libs/config/value.go b/libs/config/value.go index fe0ced9bdf..862427d335 100644 --- a/libs/config/value.go +++ b/libs/config/value.go @@ -38,6 +38,15 @@ func NewValue(v any, loc Location) Value { } } +// WithLocation returns a new Value with its location set to the given value. +func (v Value) WithLocation(loc Location) Value { + return Value{ + v: v.v, + k: v.k, + l: loc, + } +} + func (v Value) AsMap() (map[string]Value, bool) { m, ok := v.v.(map[string]Value) return m, ok diff --git a/libs/config/walk.go b/libs/config/walk.go new file mode 100644 index 0000000000..c1fd0cdcc0 --- /dev/null +++ b/libs/config/walk.go @@ -0,0 +1,37 @@ +package config + +// Walk walks the configuration tree and calls the given function on each node. +// This given function must return the new value for the node. Traversal is depth-first. +func Walk(v Value, fn func(Value) (Value, error)) (Value, error) { + var err error + + switch v.Kind() { + case KindMap: + m := v.v.(map[string]Value) + out := make(map[string]Value, len(m)) + for k, v := range m { + out[k], err = Walk(v, fn) + if err != nil { + return NilValue, err + } + } + v.v = out + case KindSequence: + s := v.v.([]Value) + out := make([]Value, len(s)) + for i, v := range s { + out[i], err = Walk(v, fn) + if err != nil { + return NilValue, err + } + } + v.v = out + } + + v, err = fn(v) + if err != nil { + return NilValue, err + } + + return v, nil +} diff --git a/libs/config/walk_test.go b/libs/config/walk_test.go new file mode 100644 index 0000000000..d7b82b3200 --- /dev/null +++ b/libs/config/walk_test.go @@ -0,0 +1 @@ +package config_test From f7b32900b14c14c31b602895037ae9288c6fe954 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Mon, 18 Dec 2023 20:56:26 +0100 Subject: [PATCH 015/104] Capabilities to walk the config tree --- bundle/internal/bundletest/location.go | 33 ++++++++++ libs/config/kind_test.go | 14 +++++ libs/config/path.go | 83 ++++++++++++++++++++++++++ libs/config/path_test.go | 76 +++++++++++++++++++++++ libs/config/walk.go | 45 ++++++++++---- 5 files changed, 240 insertions(+), 11 deletions(-) create mode 100644 bundle/internal/bundletest/location.go create mode 100644 libs/config/kind_test.go create mode 100644 libs/config/path.go create mode 100644 libs/config/path_test.go diff --git a/bundle/internal/bundletest/location.go b/bundle/internal/bundletest/location.go new file mode 100644 index 0000000000..1e6f52bb49 --- /dev/null +++ b/bundle/internal/bundletest/location.go @@ -0,0 +1,33 @@ +package bundletest + +import ( + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/libs/config" +) + +// SetLocation sets the location of all values in the bundle to the given path. +// This is useful for testing where we need to associate configuration +// with the path it is loaded from. +func SetLocation(b *bundle.Bundle, pathPrefix config.Path, filePath string) { + b.Config.Mutate(func(root config.Value) (config.Value, error) { + return config.Walk(root, func(p config.Path, v config.Value) (config.Value, error) { + // If the path has the given prefix, set the location. + if p.HasPrefix(pathPrefix) { + return v.WithLocation(config.Location{ + File: filePath, + }), nil + } + + // The path is not nested under the given prefix. + // If the path is a prefix of the prefix, keep traversing and return the node verbatim. + if pathPrefix.HasPrefix(p) { + return v, nil + } + + // Return verbatim, but skip traversal. + return v, config.ErrSkip + }) + }) + + b.Config.ConfigureConfigFilePath() +} diff --git a/libs/config/kind_test.go b/libs/config/kind_test.go new file mode 100644 index 0000000000..6478ced212 --- /dev/null +++ b/libs/config/kind_test.go @@ -0,0 +1,14 @@ +package config_test + +import ( + "testing" + + "github.com/databricks/cli/libs/config" + "github.com/stretchr/testify/assert" +) + +func TestKindZero(t *testing.T) { + var k config.Kind + assert.Equal(t, config.KindInvalid, k) + assert.Equal(t, "invalid", k.String()) +} diff --git a/libs/config/path.go b/libs/config/path.go new file mode 100644 index 0000000000..b3b9c3dbc7 --- /dev/null +++ b/libs/config/path.go @@ -0,0 +1,83 @@ +package config + +import ( + "bytes" + "fmt" +) + +type pathComponent struct { + key string + index int +} + +type Path []pathComponent + +var EmptyPath = Path{} + +func Key(k string) pathComponent { + return pathComponent{key: k} +} + +func Index(i int) pathComponent { + return pathComponent{index: i} +} + +func NewPath(cs ...pathComponent) Path { + return cs +} + +func (p Path) Join(qs ...Path) Path { + for _, q := range qs { + p = p.Append(q...) + } + return p +} + +func (p Path) Append(cs ...pathComponent) Path { + return append(p, cs...) +} + +func (p Path) Equal(q Path) bool { + pl := len(p) + ql := len(q) + if pl != ql { + return false + } + for i := 0; i < pl; i++ { + if p[i] != q[i] { + return false + } + } + return true +} + +func (p Path) HasPrefix(q Path) bool { + pl := len(p) + ql := len(q) + if pl < ql { + return false + } + for i := 0; i < ql; i++ { + if p[i] != q[i] { + return false + } + } + return true +} + +func (p Path) String() string { + var buf bytes.Buffer + + for i, c := range p { + if i > 0 && c.key != "" { + buf.WriteRune('.') + } + if c.key != "" { + buf.WriteString(c.key) + } else { + buf.WriteString(fmt.Sprintf("[%d]", c.index)) + } + } + + return buf.String() +} diff --git a/libs/config/path_test.go b/libs/config/path_test.go new file mode 100644 index 0000000000..3fdd848e60 --- /dev/null +++ b/libs/config/path_test.go @@ -0,0 +1,76 @@ +package config_test + +import ( + "testing" + + "github.com/databricks/cli/libs/config" + "github.com/stretchr/testify/assert" +) + +func TestPathAppend(t *testing.T) { + p := config.NewPath(config.Key("foo")) + + // Single arg. + p1 := p.Append(config.Key("bar")) + assert.True(t, p1.Equal(config.NewPath(config.Key("foo"), config.Key("bar")))) + + // Multiple args. + p2 := p.Append(config.Key("bar"), config.Index(1)) + assert.True(t, p2.Equal(config.NewPath(config.Key("foo"), config.Key("bar"), config.Index(1)))) +} + +func TestPathJoin(t *testing.T) { + p := config.NewPath(config.Key("foo")) + + // Single arg. + p1 := p.Join(config.NewPath(config.Key("bar"))) + assert.True(t, p1.Equal(config.NewPath(config.Key("foo"), config.Key("bar")))) + + // Multiple args. + p2 := p.Join(config.NewPath(config.Key("bar")), config.NewPath(config.Index(1))) + assert.True(t, p2.Equal(config.NewPath(config.Key("foo"), config.Key("bar"), config.Index(1)))) +} + +func TestPathEqualEmpty(t *testing.T) { + assert.True(t, config.EmptyPath.Equal(config.EmptyPath)) +} + +func TestPathEqual(t *testing.T) { + p1 := config.NewPath(config.Key("foo"), config.Index(1)) + p2 := config.NewPath(config.Key("bar"), config.Index(2)) + assert.False(t, p1.Equal(p2), "expected %q to not equal %q", p1, p2) + + p3 := config.NewPath(config.Key("foo"), config.Index(1)) + assert.True(t, p1.Equal(p3), "expected %q to equal %q", p1, p3) + + p4 := config.NewPath(config.Key("foo"), config.Index(1), config.Key("bar"), config.Index(2)) + assert.False(t, p1.Equal(p4), "expected %q to not equal %q", p1, p4) +} + +func TestPathHasPrefixEmpty(t *testing.T) { + empty := config.EmptyPath + nonEmpty := config.NewPath(config.Key("foo")) + assert.True(t, empty.HasPrefix(empty)) + assert.True(t, nonEmpty.HasPrefix(empty)) + assert.False(t, empty.HasPrefix(nonEmpty)) +} + +func TestPathHasPrefix(t *testing.T) { + p1 := config.NewPath(config.Key("foo"), config.Index(1)) + p2 := config.NewPath(config.Key("bar"), config.Index(2)) + assert.False(t, p1.HasPrefix(p2), "expected %q to not have prefix %q", p1, p2) + + p3 := config.NewPath(config.Key("foo")) + assert.True(t, p1.HasPrefix(p3), "expected %q to have prefix %q", p1, p3) +} + +func TestPathString(t *testing.T) { + p1 := config.NewPath(config.Key("foo"), config.Index(1)) + assert.Equal(t, "foo[1]", p1.String()) + + p2 := config.NewPath(config.Key("bar"), config.Index(2), config.Key("baz")) + assert.Equal(t, "bar[2].baz", p2.String()) + + p3 := config.NewPath(config.Key("foo"), config.Index(1), config.Key("bar"), config.Index(2), config.Key("baz")) + assert.Equal(t, "foo[1].bar[2].baz", p3.String()) +} diff --git a/libs/config/walk.go b/libs/config/walk.go index c1fd0cdcc0..e0310674b5 100644 --- a/libs/config/walk.go +++ b/libs/config/walk.go @@ -1,37 +1,60 @@ package config +import "errors" + +// WalkValueFunc is the type of the function called by Walk to traverse the configuration tree. +type WalkValueFunc func(p Path, v Value) (Value, error) + +var ErrDrop = errors.New("drop value from subtree") + +var ErrSkip = errors.New("skip traversal of subtree") + // Walk walks the configuration tree and calls the given function on each node. -// This given function must return the new value for the node. Traversal is depth-first. -func Walk(v Value, fn func(Value) (Value, error)) (Value, error) { +func Walk(v Value, fn func(p Path, v Value) (Value, error)) (Value, error) { + return walk(v, EmptyPath, fn) +} + +func walk(v Value, p Path, fn func(p Path, v Value) (Value, error)) (Value, error) { var err error + v, err = fn(p, v) + if err != nil { + if err == ErrSkip { + return v, nil + } + return NilValue, err + } + switch v.Kind() { case KindMap: m := v.v.(map[string]Value) out := make(map[string]Value, len(m)) - for k, v := range m { - out[k], err = Walk(v, fn) + for k := range m { + nv, err := walk(m[k], p.Append(Key(k)), fn) + if err == ErrDrop { + continue + } if err != nil { return NilValue, err } + out[k] = nv } v.v = out case KindSequence: s := v.v.([]Value) out := make([]Value, len(s)) - for i, v := range s { - out[i], err = Walk(v, fn) + for i := range s { + nv, err := walk(s[i], p.Append(Index(i)), fn) + if err == ErrDrop { + continue + } if err != nil { return NilValue, err } + out[i] = nv } v.v = out } - v, err = fn(v) - if err != nil { - return NilValue, err - } - return v, nil } From 21006e5a544e03a9619da9c5fbca7de48fb247bc Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Mon, 18 Dec 2023 22:06:36 +0100 Subject: [PATCH 016/104] Mutator tests pass --- bundle/config/artifact.go | 4 +- .../expand_pipeline_glob_paths_test.go | 8 +- .../config/mutator/override_compute_test.go | 4 +- .../mutator/process_target_mode_test.go | 2 +- bundle/config/mutator/translate_paths_test.go | 72 +++++------ bundle/config/paths/paths.go | 13 ++ bundle/config/resources.go | 16 +-- bundle/config/root.go | 112 +++++++++++------- 8 files changed, 125 insertions(+), 106 deletions(-) diff --git a/bundle/config/artifact.go b/bundle/config/artifact.go index 63ab6c489d..470e023f0c 100644 --- a/bundle/config/artifact.go +++ b/bundle/config/artifact.go @@ -14,9 +14,9 @@ import ( type Artifacts map[string]*Artifact -func (artifacts Artifacts) SetConfigFilePath(path string) { +func (artifacts Artifacts) ConfigureConfigFilePath() { for _, artifact := range artifacts { - artifact.ConfigFilePath = path + artifact.ConfigureConfigFilePath() } } diff --git a/bundle/config/mutator/expand_pipeline_glob_paths_test.go b/bundle/config/mutator/expand_pipeline_glob_paths_test.go index ad86865af7..2dfdd464c2 100644 --- a/bundle/config/mutator/expand_pipeline_glob_paths_test.go +++ b/bundle/config/mutator/expand_pipeline_glob_paths_test.go @@ -8,8 +8,9 @@ import ( "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/config" - "github.com/databricks/cli/bundle/config/paths" "github.com/databricks/cli/bundle/config/resources" + "github.com/databricks/cli/bundle/internal/bundletest" + cv "github.com/databricks/cli/libs/config" "github.com/databricks/databricks-sdk-go/service/compute" "github.com/databricks/databricks-sdk-go/service/pipelines" "github.com/stretchr/testify/require" @@ -42,9 +43,6 @@ func TestExpandGlobPathsInPipelines(t *testing.T) { Resources: config.Resources{ Pipelines: map[string]*resources.Pipeline{ "pipeline": { - Paths: paths.Paths{ - ConfigFilePath: filepath.Join(dir, "resource.yml"), - }, PipelineSpec: &pipelines.PipelineSpec{ Libraries: []pipelines.PipelineLibrary{ { @@ -98,6 +96,8 @@ func TestExpandGlobPathsInPipelines(t *testing.T) { }, } + bundletest.SetLocation(b, cv.EmptyPath, filepath.Join(dir, "resource.yml")) + m := ExpandPipelineGlobPaths() err := bundle.Apply(context.Background(), b, m) require.NoError(t, err) diff --git a/bundle/config/mutator/override_compute_test.go b/bundle/config/mutator/override_compute_test.go index 4c5d4427db..7cc500c608 100644 --- a/bundle/config/mutator/override_compute_test.go +++ b/bundle/config/mutator/override_compute_test.go @@ -28,7 +28,9 @@ func TestOverrideDevelopment(t *testing.T) { Name: "job1", Tasks: []jobs.Task{ { - NewCluster: &compute.ClusterSpec{}, + NewCluster: &compute.ClusterSpec{ + SparkVersion: "14.2.x-scala2.12", + }, }, { ExistingClusterId: "cluster2", diff --git a/bundle/config/mutator/process_target_mode_test.go b/bundle/config/mutator/process_target_mode_test.go index 8feab19116..de9149e66e 100644 --- a/bundle/config/mutator/process_target_mode_test.go +++ b/bundle/config/mutator/process_target_mode_test.go @@ -274,12 +274,12 @@ func TestAllResourcesMocked(t *testing.T) { // Make sure that we at least rename all resources func TestAllResourcesRenamed(t *testing.T) { b := mockBundle(config.Development) - resources := reflect.ValueOf(b.Config.Resources) m := ProcessTargetMode() err := bundle.Apply(context.Background(), b, m) require.NoError(t, err) + resources := reflect.ValueOf(b.Config.Resources) for i := 0; i < resources.NumField(); i++ { field := resources.Field(i) diff --git a/bundle/config/mutator/translate_paths_test.go b/bundle/config/mutator/translate_paths_test.go index 67f15d4076..65d64cca50 100644 --- a/bundle/config/mutator/translate_paths_test.go +++ b/bundle/config/mutator/translate_paths_test.go @@ -9,8 +9,9 @@ import ( "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config/mutator" - "github.com/databricks/cli/bundle/config/paths" "github.com/databricks/cli/bundle/config/resources" + "github.com/databricks/cli/bundle/internal/bundletest" + cv "github.com/databricks/cli/libs/config" "github.com/databricks/databricks-sdk-go/service/compute" "github.com/databricks/databricks-sdk-go/service/jobs" "github.com/databricks/databricks-sdk-go/service/pipelines" @@ -44,10 +45,6 @@ func TestTranslatePathsSkippedWithGitSource(t *testing.T) { Resources: config.Resources{ Jobs: map[string]*resources.Job{ "job": { - - Paths: paths.Paths{ - ConfigFilePath: filepath.Join(dir, "resource.yml"), - }, JobSettings: &jobs.JobSettings{ GitSource: &jobs.GitSource{ GitBranch: "somebranch", @@ -80,6 +77,8 @@ func TestTranslatePathsSkippedWithGitSource(t *testing.T) { }, } + bundletest.SetLocation(b, cv.EmptyPath, filepath.Join(dir, "resource.yml")) + err := bundle.Apply(context.Background(), b, mutator.TranslatePaths()) require.NoError(t, err) @@ -116,9 +115,6 @@ func TestTranslatePaths(t *testing.T) { Resources: config.Resources{ Jobs: map[string]*resources.Job{ "job": { - Paths: paths.Paths{ - ConfigFilePath: filepath.Join(dir, "resource.yml"), - }, JobSettings: &jobs.JobSettings{ Tasks: []jobs.Task{ { @@ -171,9 +167,6 @@ func TestTranslatePaths(t *testing.T) { }, Pipelines: map[string]*resources.Pipeline{ "pipeline": { - Paths: paths.Paths{ - ConfigFilePath: filepath.Join(dir, "resource.yml"), - }, PipelineSpec: &pipelines.PipelineSpec{ Libraries: []pipelines.PipelineLibrary{ { @@ -207,6 +200,8 @@ func TestTranslatePaths(t *testing.T) { }, } + bundletest.SetLocation(b, cv.EmptyPath, filepath.Join(dir, "resource.yml")) + err := bundle.Apply(context.Background(), b, mutator.TranslatePaths()) require.NoError(t, err) @@ -287,9 +282,6 @@ func TestTranslatePathsInSubdirectories(t *testing.T) { Resources: config.Resources{ Jobs: map[string]*resources.Job{ "job": { - Paths: paths.Paths{ - ConfigFilePath: filepath.Join(dir, "job/resource.yml"), - }, JobSettings: &jobs.JobSettings{ Tasks: []jobs.Task{ { @@ -323,10 +315,6 @@ func TestTranslatePathsInSubdirectories(t *testing.T) { }, Pipelines: map[string]*resources.Pipeline{ "pipeline": { - Paths: paths.Paths{ - ConfigFilePath: filepath.Join(dir, "pipeline/resource.yml"), - }, - PipelineSpec: &pipelines.PipelineSpec{ Libraries: []pipelines.PipelineLibrary{ { @@ -342,6 +330,9 @@ func TestTranslatePathsInSubdirectories(t *testing.T) { }, } + bundletest.SetLocation(b, cv.NewPath(cv.Key("resources"), cv.Key("jobs")), filepath.Join(dir, "job/resource.yml")) + bundletest.SetLocation(b, cv.NewPath(cv.Key("resources"), cv.Key("pipelines")), filepath.Join(dir, "pipeline/resource.yml")) + err := bundle.Apply(context.Background(), b, mutator.TranslatePaths()) require.NoError(t, err) @@ -385,9 +376,6 @@ func TestTranslatePathsOutsideBundleRoot(t *testing.T) { Resources: config.Resources{ Jobs: map[string]*resources.Job{ "job": { - Paths: paths.Paths{ - ConfigFilePath: filepath.Join(dir, "../resource.yml"), - }, JobSettings: &jobs.JobSettings{ Tasks: []jobs.Task{ { @@ -403,6 +391,8 @@ func TestTranslatePathsOutsideBundleRoot(t *testing.T) { }, } + bundletest.SetLocation(b, cv.EmptyPath, filepath.Join(dir, "../resource.yml")) + err := bundle.Apply(context.Background(), b, mutator.TranslatePaths()) assert.ErrorContains(t, err, "is not contained in bundle root") } @@ -416,9 +406,6 @@ func TestJobNotebookDoesNotExistError(t *testing.T) { Resources: config.Resources{ Jobs: map[string]*resources.Job{ "job": { - Paths: paths.Paths{ - ConfigFilePath: filepath.Join(dir, "fake.yml"), - }, JobSettings: &jobs.JobSettings{ Tasks: []jobs.Task{ { @@ -434,6 +421,8 @@ func TestJobNotebookDoesNotExistError(t *testing.T) { }, } + bundletest.SetLocation(b, cv.EmptyPath, filepath.Join(dir, "fake.yml")) + err := bundle.Apply(context.Background(), b, mutator.TranslatePaths()) assert.EqualError(t, err, "notebook ./doesnt_exist.py not found") } @@ -447,9 +436,6 @@ func TestJobFileDoesNotExistError(t *testing.T) { Resources: config.Resources{ Jobs: map[string]*resources.Job{ "job": { - Paths: paths.Paths{ - ConfigFilePath: filepath.Join(dir, "fake.yml"), - }, JobSettings: &jobs.JobSettings{ Tasks: []jobs.Task{ { @@ -465,6 +451,8 @@ func TestJobFileDoesNotExistError(t *testing.T) { }, } + bundletest.SetLocation(b, cv.EmptyPath, filepath.Join(dir, "fake.yml")) + err := bundle.Apply(context.Background(), b, mutator.TranslatePaths()) assert.EqualError(t, err, "file ./doesnt_exist.py not found") } @@ -478,9 +466,6 @@ func TestPipelineNotebookDoesNotExistError(t *testing.T) { Resources: config.Resources{ Pipelines: map[string]*resources.Pipeline{ "pipeline": { - Paths: paths.Paths{ - ConfigFilePath: filepath.Join(dir, "fake.yml"), - }, PipelineSpec: &pipelines.PipelineSpec{ Libraries: []pipelines.PipelineLibrary{ { @@ -496,6 +481,8 @@ func TestPipelineNotebookDoesNotExistError(t *testing.T) { }, } + bundletest.SetLocation(b, cv.EmptyPath, filepath.Join(dir, "fake.yml")) + err := bundle.Apply(context.Background(), b, mutator.TranslatePaths()) assert.EqualError(t, err, "notebook ./doesnt_exist.py not found") } @@ -509,9 +496,6 @@ func TestPipelineFileDoesNotExistError(t *testing.T) { Resources: config.Resources{ Pipelines: map[string]*resources.Pipeline{ "pipeline": { - Paths: paths.Paths{ - ConfigFilePath: filepath.Join(dir, "fake.yml"), - }, PipelineSpec: &pipelines.PipelineSpec{ Libraries: []pipelines.PipelineLibrary{ { @@ -527,6 +511,8 @@ func TestPipelineFileDoesNotExistError(t *testing.T) { }, } + bundletest.SetLocation(b, cv.EmptyPath, filepath.Join(dir, "fake.yml")) + err := bundle.Apply(context.Background(), b, mutator.TranslatePaths()) assert.EqualError(t, err, "file ./doesnt_exist.py not found") } @@ -544,9 +530,6 @@ func TestJobSparkPythonTaskWithNotebookSourceError(t *testing.T) { Resources: config.Resources{ Jobs: map[string]*resources.Job{ "job": { - Paths: paths.Paths{ - ConfigFilePath: filepath.Join(dir, "resource.yml"), - }, JobSettings: &jobs.JobSettings{ Tasks: []jobs.Task{ { @@ -562,6 +545,8 @@ func TestJobSparkPythonTaskWithNotebookSourceError(t *testing.T) { }, } + bundletest.SetLocation(b, cv.EmptyPath, filepath.Join(dir, "resource.yml")) + err := bundle.Apply(context.Background(), b, mutator.TranslatePaths()) assert.ErrorContains(t, err, `expected a file for "tasks.spark_python_task.python_file" but got a notebook`) } @@ -579,9 +564,6 @@ func TestJobNotebookTaskWithFileSourceError(t *testing.T) { Resources: config.Resources{ Jobs: map[string]*resources.Job{ "job": { - Paths: paths.Paths{ - ConfigFilePath: filepath.Join(dir, "resource.yml"), - }, JobSettings: &jobs.JobSettings{ Tasks: []jobs.Task{ { @@ -597,6 +579,8 @@ func TestJobNotebookTaskWithFileSourceError(t *testing.T) { }, } + bundletest.SetLocation(b, cv.EmptyPath, filepath.Join(dir, "resource.yml")) + err := bundle.Apply(context.Background(), b, mutator.TranslatePaths()) assert.ErrorContains(t, err, `expected a notebook for "tasks.notebook_task.notebook_path" but got a file`) } @@ -614,9 +598,6 @@ func TestPipelineNotebookLibraryWithFileSourceError(t *testing.T) { Resources: config.Resources{ Pipelines: map[string]*resources.Pipeline{ "pipeline": { - Paths: paths.Paths{ - ConfigFilePath: filepath.Join(dir, "resource.yml"), - }, PipelineSpec: &pipelines.PipelineSpec{ Libraries: []pipelines.PipelineLibrary{ { @@ -632,6 +613,8 @@ func TestPipelineNotebookLibraryWithFileSourceError(t *testing.T) { }, } + bundletest.SetLocation(b, cv.EmptyPath, filepath.Join(dir, "resource.yml")) + err := bundle.Apply(context.Background(), b, mutator.TranslatePaths()) assert.ErrorContains(t, err, `expected a notebook for "libraries.notebook.path" but got a file`) } @@ -649,9 +632,6 @@ func TestPipelineFileLibraryWithNotebookSourceError(t *testing.T) { Resources: config.Resources{ Pipelines: map[string]*resources.Pipeline{ "pipeline": { - Paths: paths.Paths{ - ConfigFilePath: filepath.Join(dir, "resource.yml"), - }, PipelineSpec: &pipelines.PipelineSpec{ Libraries: []pipelines.PipelineLibrary{ { @@ -667,6 +647,8 @@ func TestPipelineFileLibraryWithNotebookSourceError(t *testing.T) { }, } + bundletest.SetLocation(b, cv.EmptyPath, filepath.Join(dir, "resource.yml")) + err := bundle.Apply(context.Background(), b, mutator.TranslatePaths()) assert.ErrorContains(t, err, `expected a file for "libraries.file.path" but got a notebook`) } diff --git a/bundle/config/paths/paths.go b/bundle/config/paths/paths.go index 2c9ecb8c0d..58fb9c14c4 100644 --- a/bundle/config/paths/paths.go +++ b/bundle/config/paths/paths.go @@ -3,12 +3,25 @@ package paths import ( "fmt" "path/filepath" + + "github.com/databricks/cli/libs/config" ) type Paths struct { // Absolute path on the local file system to the configuration file that holds // the definition of this resource. ConfigFilePath string `json:"-" bundle:"readonly"` + + // DynamicValue stores the [config.Value] of the containing struct. + // This assumes that this struct is always embedded. + DynamicValue config.Value +} + +func (p *Paths) ConfigureConfigFilePath() { + if !p.DynamicValue.IsValid() { + panic("DynamicValue not set") + } + p.ConfigFilePath = p.DynamicValue.Location().File } func (p *Paths) ConfigFileDirectory() (string, error) { diff --git a/bundle/config/resources.go b/bundle/config/resources.go index 2b453c666e..c39cf4f38a 100644 --- a/bundle/config/resources.go +++ b/bundle/config/resources.go @@ -124,27 +124,27 @@ func (r *Resources) VerifyUniqueResourceIdentifiers() (*UniqueResourceIdTracker, return tracker, nil } -// SetConfigFilePath sets the specified path for all resources contained in this instance. +// ConfigureConfigFilePath sets the specified path for all resources contained in this instance. // This property is used to correctly resolve paths relative to the path // of the configuration file they were defined in. -func (r *Resources) SetConfigFilePath(path string) { +func (r *Resources) ConfigureConfigFilePath() { for _, e := range r.Jobs { - e.ConfigFilePath = path + e.ConfigureConfigFilePath() } for _, e := range r.Pipelines { - e.ConfigFilePath = path + e.ConfigureConfigFilePath() } for _, e := range r.Models { - e.ConfigFilePath = path + e.ConfigureConfigFilePath() } for _, e := range r.Experiments { - e.ConfigFilePath = path + e.ConfigureConfigFilePath() } for _, e := range r.ModelServingEndpoints { - e.ConfigFilePath = path + e.ConfigureConfigFilePath() } for _, e := range r.RegisteredModels { - e.ConfigFilePath = path + e.ConfigureConfigFilePath() } } diff --git a/bundle/config/root.go b/bundle/config/root.go index 04d9382c32..61a8ae9beb 100644 --- a/bundle/config/root.go +++ b/bundle/config/root.go @@ -115,32 +115,70 @@ func Load(path string) (*Root, error) { return &r, err } -func (r *Root) MarkMutatorEntry() { - r.depth++ - +func (r *Root) initializeValue() { // Many test cases initialize a config as a Go struct literal. - // The zero-initialized value for [wasLoaded] will be false, - // and indicates we need to populate [r.value]. - if !r.value.IsValid() { - nv, err := convert.FromTyped(r, config.NilValue) - if err != nil { - panic(err) - } + // The value will be invalid and we need to populate it from the typed configuration. + if r.value.IsValid() { + return + } - r.value = nv + nv, err := convert.FromTyped(r, config.NilValue) + if err != nil { + panic(err) + } + + r.value = nv +} + +func (r *Root) toTyped(v config.Value) error { + // Hack: restore path; it may be cleared by [ToTyped] if + // the configuration equals nil (happens in tests). + p := r.Path + defer func() { + r.Path = p + }() + + // Convert normalized configuration tree to typed configuration. + err := convert.ToTyped(r, v) + if err != nil { + return err } + return nil +} + +func (r *Root) Mutate(fn func(config.Value) (config.Value, error)) error { + r.initializeValue() + nv, err := fn(r.value) + if err != nil { + return err + } + err = r.toTyped(nv) + if err != nil { + return err + } + r.value = nv + return nil +} + +func (r *Root) MarkMutatorEntry() { + r.initializeValue() + r.depth++ + // If we are entering a mutator at depth 1, we need to convert // the dynamic configuration tree to typed configuration. if r.depth == 1 { // Always run ToTyped upon entering a mutator. // Convert normalized configuration tree to typed configuration. - err := convert.ToTyped(r, r.value) + err := r.toTyped(r.value) if err != nil { panic(err) } + + r.ConfigureConfigFilePath() + } else { - nv, err := convert.FromTyped(r, config.NilValue) + nv, err := convert.FromTyped(r, r.value) if err != nil { panic(err) } @@ -155,7 +193,7 @@ func (r *Root) MarkMutatorExit() { // If we are exiting a mutator at depth 0, we need to convert // the typed configuration to a dynamic configuration tree. if r.depth == 0 { - nv, err := convert.FromTyped(r, config.NilValue) + nv, err := convert.FromTyped(r, r.value) if err != nil { panic(err) } @@ -170,26 +208,10 @@ func (r *Root) Diagnostics() diag.Diagnostics { // SetConfigFilePath configures the path that its configuration // was loaded from in configuration leafs that require it. -func (r *Root) SetConfigFilePath(path string) { - panic("nope") - - r.Resources.SetConfigFilePath(path) +func (r *Root) ConfigureConfigFilePath() { + r.Resources.ConfigureConfigFilePath() if r.Artifacts != nil { - r.Artifacts.SetConfigFilePath(path) - } - - if r.Targets != nil { - for _, env := range r.Targets { - if env == nil { - continue - } - if env.Resources != nil { - env.Resources.SetConfigFilePath(path) - } - if env.Artifacts != nil { - env.Artifacts.SetConfigFilePath(path) - } - } + r.Artifacts.ConfigureConfigFilePath() } } @@ -246,7 +268,7 @@ func (r *Root) Merge(other *Root) error { r.value = nv // Convert normalized configuration tree to typed configuration. - err = convert.ToTyped(r, r.value) + err = r.toTyped(r.value) if err != nil { panic(err) } @@ -300,7 +322,7 @@ func (r *Root) MergeTargetOverrides(name string) error { } // Convert normalized configuration tree to typed configuration. - err = convert.ToTyped(r, r.value) + err = r.toTyped(r.value) if err != nil { panic(err) } @@ -308,13 +330,13 @@ func (r *Root) MergeTargetOverrides(name string) error { return nil } -// ForceLocationInConfig walks all nodes in the configuration tree and -// sets their location to the specified value. -func (r *Root) ForceLocationInConfig(path string) { - out, _ := config.Walk(r.value, func(v config.Value) (config.Value, error) { - return v.WithLocation(config.Location{ - File: path, - }), nil - }) - r.value = out -} +// // ForceLocationInConfig walks all nodes in the configuration tree and +// // sets their location to the specified value. +// func (r *Root) ForceLocationInConfig(path string) { +// out, _ := config.Walk(r.value, func(v config.Value) (config.Value, error) { +// return v.WithLocation(config.Location{ +// File: path, +// }), nil +// }) +// r.value = out +// } From 97908b6fe86650c72bf88289e9b002b15889f336 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Wed, 20 Dec 2023 10:01:51 +0100 Subject: [PATCH 017/104] Construct path from string --- libs/config/path_string.go | 58 ++++++++++++++++++++++++++ libs/config/path_string_test.go | 72 +++++++++++++++++++++++++++++++++ 2 files changed, 130 insertions(+) create mode 100644 libs/config/path_string.go create mode 100644 libs/config/path_string_test.go diff --git a/libs/config/path_string.go b/libs/config/path_string.go new file mode 100644 index 0000000000..a79e3b6823 --- /dev/null +++ b/libs/config/path_string.go @@ -0,0 +1,58 @@ +package config + +import ( + "fmt" + "strconv" + "strings" +) + +func NewPathFromString(input string) (Path, error) { + var path Path + + p := input + for p != "" { + // Every component may have a leading dot. + if p != "" && p[0] == '.' { + p = p[1:] + } + + if p[0] == '[' { + // Find next ] + i := strings.Index(p, "]") + if i < 0 { + return nil, fmt.Errorf("invalid path: %s", input) + } + + // Parse index + j, err := strconv.Atoi(p[1:i]) + if err != nil { + return nil, fmt.Errorf("invalid path: %s", input) + } + + // Append index + path = append(path, Index(j)) + p = p[i+1:] + + // The next character must be a . or [ + if p != "" && strings.IndexAny(p, ".[") != 0 { + return nil, fmt.Errorf("invalid path: %s", input) + } + } else { + // Find next . or [ + i := strings.IndexAny(p, ".[") + if i < 0 { + i = len(p) + } + + if i == 0 { + return nil, fmt.Errorf("invalid path: %s", input) + } + + // Append key + path = append(path, Key(p[:i])) + p = p[i:] + } + } + + return path, nil +} diff --git a/libs/config/path_string_test.go b/libs/config/path_string_test.go new file mode 100644 index 0000000000..20c4c25aa3 --- /dev/null +++ b/libs/config/path_string_test.go @@ -0,0 +1,72 @@ +package config_test + +import ( + "fmt" + "testing" + + . "github.com/databricks/cli/libs/config" + "github.com/stretchr/testify/assert" +) + +func TestNewPathFromString(t *testing.T) { + for _, tc := range []struct { + input string + output Path + err error + }{ + { + input: "foo.bar", + output: NewPath(Key("foo"), Key("bar")), + }, + { + input: "[1]", + output: NewPath(Index(1)), + }, + { + input: "foo[1].bar", + output: NewPath(Key("foo"), Index(1), Key("bar")), + }, + { + input: "foo.bar[1]", + output: NewPath(Key("foo"), Key("bar"), Index(1)), + }, + { + input: "foo.bar[1][2]", + output: NewPath(Key("foo"), Key("bar"), Index(1), Index(2)), + }, + { + input: "foo[1234]", + output: NewPath(Key("foo"), Index(1234)), + }, + { + input: "foo[123", + err: fmt.Errorf("invalid path: foo[123"), + }, + { + input: "foo[foo]", + err: fmt.Errorf("invalid path: foo[foo]"), + }, + { + input: "foo..bar", + err: fmt.Errorf("invalid path: foo..bar"), + }, + { + // Every component may have a leading dot. + input: ".foo.[1].bar", + output: NewPath(Key("foo"), Index(1), Key("bar")), + }, + { + // But after an index there must be a dot. + input: "foo[1]bar", + err: fmt.Errorf("invalid path: foo[1]bar"), + }, + } { + p, err := NewPathFromString(tc.input) + if tc.err != nil { + assert.EqualError(t, err, tc.err.Error(), tc.input) + } else { + assert.NoError(t, err) + assert.Equal(t, tc.output, p) + } + } +} From 4543d2d71b11f81e1242b7a5c2895cf7435c7bd9 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Wed, 20 Dec 2023 10:05:37 +0100 Subject: [PATCH 018/104] wip --- bundle/config/mutator/environments_compat.go | 50 +++++ .../mutator/environments_compat_test.go | 1 + .../config/mutator/merge_pipeline_clusters.go | 104 ++++++++++ .../mutator/merge_pipeline_clusters_test.go | 62 ++++++ bundle/config/mutator/mutator.go | 2 + bundle/config/mutator/rewrite_sync_paths.go | 86 ++++++++ .../config/mutator/rewrite_sync_paths_test.go | 103 ++++++++++ bundle/config/root.go | 71 ++++--- bundle/config/sync.go | 18 -- .../resources/databricks.yml | 2 - bundle/tests/environment_overrides_test.go | 7 +- .../tests/relative_path_with_includes_test.go | 18 +- bundle/tests/run_as/databricks.yml | 20 +- bundle/tests/run_as_test.go | 16 +- bundle/tests/variables_test.go | 190 +++++++++--------- libs/config/value.go | 91 +++++++++ 16 files changed, 677 insertions(+), 164 deletions(-) create mode 100644 bundle/config/mutator/environments_compat.go create mode 100644 bundle/config/mutator/environments_compat_test.go create mode 100644 bundle/config/mutator/merge_pipeline_clusters.go create mode 100644 bundle/config/mutator/merge_pipeline_clusters_test.go create mode 100644 bundle/config/mutator/rewrite_sync_paths.go create mode 100644 bundle/config/mutator/rewrite_sync_paths_test.go diff --git a/bundle/config/mutator/environments_compat.go b/bundle/config/mutator/environments_compat.go new file mode 100644 index 0000000000..f6daed9c6e --- /dev/null +++ b/bundle/config/mutator/environments_compat.go @@ -0,0 +1,50 @@ +package mutator + +import ( + "context" + "fmt" + + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/libs/config" + cv "github.com/databricks/cli/libs/config" +) + +type environmentsToTargets struct{} + +func EnvironmentsToTargets() bundle.Mutator { + return &environmentsToTargets{} +} + +func (m *environmentsToTargets) Name() string { + return "EnvironmentsToTargets" +} + +func (m *environmentsToTargets) Apply(ctx context.Context, b *bundle.Bundle) error { + // Short circuit if the "environments" key is not set. + // This is the common case. + if b.Config.Environments == nil { + return nil + } + + // The "environments" key is set; validate and rewrite it to "targets". + return b.Config.Mutate(func(v config.Value) (config.Value, error) { + environments := v.Get("environments") + targets := v.Get("targets") + + // Return an error if both "environments" and "targets" are set. + if environments != cv.NilValue && targets != cv.NilValue { + return cv.NilValue, fmt.Errorf( + "both 'environments' and 'targets' are specified; only 'targets' should be used. "+ + "Instance of 'environments' found at %s.", + environments.Location().String(), + ) + } + + // Rewrite "environments" to "targets". + if environments != cv.NilValue && targets == cv.NilValue { + return v.SetKey("targets", environments), nil + } + + return v, nil + }) +} diff --git a/bundle/config/mutator/environments_compat_test.go b/bundle/config/mutator/environments_compat_test.go new file mode 100644 index 0000000000..020332f271 --- /dev/null +++ b/bundle/config/mutator/environments_compat_test.go @@ -0,0 +1 @@ +package mutator_test diff --git a/bundle/config/mutator/merge_pipeline_clusters.go b/bundle/config/mutator/merge_pipeline_clusters.go new file mode 100644 index 0000000000..683cf581a1 --- /dev/null +++ b/bundle/config/mutator/merge_pipeline_clusters.go @@ -0,0 +1,104 @@ +package mutator + +import ( + "context" + "strings" + + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/libs/config" + "github.com/databricks/cli/libs/config/merge" +) + +type mergePipelineClusters struct{} + +func MergePipelineClusters() bundle.Mutator { + return &mergePipelineClusters{} +} + +func (m *mergePipelineClusters) Name() string { + return "MergePipelineClusters" +} + +func clusterLabel(cluster config.Value) (label string) { + v := cluster.Get("label") + if v == config.NilValue { + return "default" + } + + if v.Kind() != config.KindString { + panic("cluster label must be a string") + } + + return strings.ToLower(v.MustString()) +} + +func mergeClustersForPipeline(v config.Value) (config.Value, error) { + clusters, ok := v.Get("clusters").AsSequence() + if !ok { + return v, nil + } + + seen := make(map[string]config.Value) + keys := make([]string, 0, len(clusters)) + + // Target overrides are always appended, so we can iterate in natural order to + // first find the base definition, and merge instances we encounter later. + for i := range clusters { + label := clusterLabel(clusters[i]) + + // Register pipeline cluster with label if not yet seen before. + ref, ok := seen[label] + if !ok { + keys = append(keys, label) + seen[label] = clusters[i] + continue + } + + // Merge this instance into the reference. + var err error + seen[label], err = merge.Merge(ref, clusters[i]) + if err != nil { + return v, err + } + } + + // Gather resulting clusters in natural order. + out := make([]config.Value, 0, len(keys)) + for _, key := range keys { + out = append(out, seen[key]) + } + + return v.SetKey("clusters", config.NewValue(out, config.Location{})), nil +} + +func (m *mergePipelineClusters) Apply(ctx context.Context, b *bundle.Bundle) error { + + // // MergeClusters merges cluster definitions with same label. + // // The clusters field is a slice, and as such, overrides are appended to it. + // // We can identify a cluster by its label, however, so we can use this label + // // to figure out which definitions are actually overrides and merge them. + // // + // // Note: the cluster label is optional and defaults to 'default'. + // // We therefore ALSO merge all clusters without a label. + + return b.Config.Mutate(func(v config.Value) (config.Value, error) { + p := config.NewPathFromString("resources.pipelines") + + pv := v.Get("resources").Get("pipelines") + pipelines, ok := pv.AsMap() + if !ok { + return v, nil + } + + out := make(map[string]config.Value) + for key, pipeline := range pipelines { + var err error + out[key], err = mergeClustersForPipeline(pipeline) + if err != nil { + return v, err + } + } + + v.Set(p, config.NewValue(out, config.Location{})) + }) +} diff --git a/bundle/config/mutator/merge_pipeline_clusters_test.go b/bundle/config/mutator/merge_pipeline_clusters_test.go new file mode 100644 index 0000000000..ca3825fb2d --- /dev/null +++ b/bundle/config/mutator/merge_pipeline_clusters_test.go @@ -0,0 +1,62 @@ +package mutator_test + +import ( + "context" + "testing" + + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/config" + "github.com/databricks/cli/bundle/config/mutator" + "github.com/databricks/cli/bundle/config/resources" + "github.com/databricks/databricks-sdk-go/service/pipelines" + "github.com/stretchr/testify/assert" +) + +func TestMergePipelineClusters(t *testing.T) { + b := &bundle.Bundle{ + Config: config.Root{ + Resources: config.Resources{ + Pipelines: map[string]*resources.Pipeline{ + "foo": &resources.Pipeline{ + PipelineSpec: &pipelines.PipelineSpec{ + Clusters: []pipelines.PipelineCluster{ + { + NodeTypeId: "i3.xlarge", + NumWorkers: 2, + PolicyId: "1234", + }, + { + Label: "maintenance", + NodeTypeId: "i3.2xlarge", + }, + { + NodeTypeId: "i3.2xlarge", + NumWorkers: 4, + }, + }, + }, + }, + }, + }, + }, + } + + err := bundle.Apply(context.Background(), b, mutator.MergePipelineClusters()) + assert.NoError(t, err) + + p := b.Config.Resources.Pipelines["foo"] + + assert.Len(t, p.Clusters, 2) + assert.Equal(t, "default", p.Clusters[0].Label) + assert.Equal(t, "maintenance", p.Clusters[1].Label) + + // The default cluster was merged with a subsequent one. + pc0 := p.Clusters[0] + assert.Equal(t, "i3.2xlarge", pc0.NodeTypeId) + assert.Equal(t, 4, pc0.NumWorkers) + assert.Equal(t, "1234", pc0.PolicyId) + + // The maintenance cluster was left untouched. + pc1 := p.Clusters[1] + assert.Equal(t, "i3.2xlarge", pc1.NodeTypeId) +} diff --git a/bundle/config/mutator/mutator.go b/bundle/config/mutator/mutator.go index 14c4b38e7f..797bd6d413 100644 --- a/bundle/config/mutator/mutator.go +++ b/bundle/config/mutator/mutator.go @@ -8,6 +8,8 @@ func DefaultMutators() []bundle.Mutator { return []bundle.Mutator{ // scripts.Execute(config.ScriptPreInit), ProcessRootIncludes(), + RewriteSyncPaths(), + EnvironmentsToTargets(), InitializeVariables(), DefineDefaultTarget(), LoadGitDetails(), diff --git a/bundle/config/mutator/rewrite_sync_paths.go b/bundle/config/mutator/rewrite_sync_paths.go new file mode 100644 index 0000000000..351a65fb62 --- /dev/null +++ b/bundle/config/mutator/rewrite_sync_paths.go @@ -0,0 +1,86 @@ +package mutator + +import ( + "context" + "maps" + "path/filepath" + "slices" + + "github.com/databricks/cli/bundle" + + cv "github.com/databricks/cli/libs/config" +) + +type rewriteSyncPaths struct{} + +func RewriteSyncPaths() bundle.Mutator { + return &rewriteSyncPaths{} +} + +func (m *rewriteSyncPaths) Name() string { + return "RewriteSyncPaths" +} + +func (m *rewriteSyncPaths) makeRelativeTo(root string, seq cv.Value) (cv.Value, error) { + if seq == cv.NilValue || seq.Kind() != cv.KindSequence { + return cv.NilValue, nil + } + + out, ok := seq.AsSequence() + if !ok { + return seq, nil + } + + out = slices.Clone(out) + for i, v := range out { + if v.Kind() != cv.KindString { + continue + } + + dir := filepath.Dir(v.Location().File) + rel, err := filepath.Rel(root, dir) + if err != nil { + return cv.NilValue, err + } + + out[i] = cv.NewValue(filepath.Join(rel, v.MustString()), v.Location()) + } + + return cv.NewValue(out, seq.Location()), nil +} + +func (m *rewriteSyncPaths) fn(root string) func(c cv.Value) (cv.Value, error) { + return func(c cv.Value) (cv.Value, error) { + var err error + + // First build a new sync object + sync := c.Get("sync") + if sync == cv.NilValue { + return c, nil + } + + out, ok := sync.AsMap() + if !ok { + return c, nil + } + + out = maps.Clone(out) + + out["include"], err = m.makeRelativeTo(root, out["include"]) + if err != nil { + return c, err + } + + out["exclude"], err = m.makeRelativeTo(root, out["exclude"]) + if err != nil { + return c, err + } + + // Then replace the sync object with the new one + return c.SetKey("sync", cv.NewValue(out, sync.Location())), nil + } +} + +func (m *rewriteSyncPaths) Apply(ctx context.Context, b *bundle.Bundle) error { + return b.Config.Mutate(m.fn(b.Config.Path)) +} diff --git a/bundle/config/mutator/rewrite_sync_paths_test.go b/bundle/config/mutator/rewrite_sync_paths_test.go new file mode 100644 index 0000000000..91c8c05592 --- /dev/null +++ b/bundle/config/mutator/rewrite_sync_paths_test.go @@ -0,0 +1,103 @@ +package mutator_test + +import ( + "context" + "testing" + + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/config" + "github.com/databricks/cli/bundle/config/mutator" + "github.com/databricks/cli/bundle/internal/bundletest" + cv "github.com/databricks/cli/libs/config" + "github.com/stretchr/testify/assert" +) + +func TestRewriteSyncPathsRelative(t *testing.T) { + b := &bundle.Bundle{ + Config: config.Root{ + Path: ".", + Sync: config.Sync{ + Include: []string{ + "foo", + "bar", + }, + Exclude: []string{ + "baz", + "qux", + }, + }, + }, + } + + bundletest.SetLocation(b, cv.NewPath(cv.Key("sync"), cv.Key("include"), cv.Index(0)), "./file.yml") + bundletest.SetLocation(b, cv.NewPath(cv.Key("sync"), cv.Key("include"), cv.Index(1)), "./a/file.yml") + bundletest.SetLocation(b, cv.NewPath(cv.Key("sync"), cv.Key("exclude"), cv.Index(0)), "./a/b/file.yml") + bundletest.SetLocation(b, cv.NewPath(cv.Key("sync"), cv.Key("exclude"), cv.Index(1)), "./a/b/c/file.yml") + + err := bundle.Apply(context.Background(), b, mutator.RewriteSyncPaths()) + assert.NoError(t, err) + + assert.Equal(t, "foo", b.Config.Sync.Include[0]) + assert.Equal(t, "a/bar", b.Config.Sync.Include[1]) + assert.Equal(t, "a/b/baz", b.Config.Sync.Exclude[0]) + assert.Equal(t, "a/b/c/qux", b.Config.Sync.Exclude[1]) +} + +func TestRewriteSyncPathsAbsolute(t *testing.T) { + b := &bundle.Bundle{ + Config: config.Root{ + Path: "/tmp/dir", + Sync: config.Sync{ + Include: []string{ + "foo", + "bar", + }, + Exclude: []string{ + "baz", + "qux", + }, + }, + }, + } + + bundletest.SetLocation(b, cv.NewPath(cv.Key("sync"), cv.Key("include"), cv.Index(0)), "/tmp/dir/file.yml") + bundletest.SetLocation(b, cv.NewPath(cv.Key("sync"), cv.Key("include"), cv.Index(1)), "/tmp/dir/a/file.yml") + bundletest.SetLocation(b, cv.NewPath(cv.Key("sync"), cv.Key("exclude"), cv.Index(0)), "/tmp/dir/a/b/file.yml") + bundletest.SetLocation(b, cv.NewPath(cv.Key("sync"), cv.Key("exclude"), cv.Index(1)), "/tmp/dir/a/b/c/file.yml") + + err := bundle.Apply(context.Background(), b, mutator.RewriteSyncPaths()) + assert.NoError(t, err) + + assert.Equal(t, "foo", b.Config.Sync.Include[0]) + assert.Equal(t, "a/bar", b.Config.Sync.Include[1]) + assert.Equal(t, "a/b/baz", b.Config.Sync.Exclude[0]) + assert.Equal(t, "a/b/c/qux", b.Config.Sync.Exclude[1]) +} + +func TestRewriteSyncPathsErrorPaths(t *testing.T) { + t.Run("no sync block", func(t *testing.T) { + b := &bundle.Bundle{ + Config: config.Root{ + Path: ".", + }, + } + + err := bundle.Apply(context.Background(), b, mutator.RewriteSyncPaths()) + assert.NoError(t, err) + }) + + t.Run("empty include/exclude blocks", func(t *testing.T) { + b := &bundle.Bundle{ + Config: config.Root{ + Path: ".", + Sync: config.Sync{ + Include: []string{}, + Exclude: []string{}, + }, + }, + } + + err := bundle.Apply(context.Background(), b, mutator.RewriteSyncPaths()) + assert.NoError(t, err) + }) +} diff --git a/bundle/config/root.go b/bundle/config/root.go index 61a8ae9beb..50e8d1808d 100644 --- a/bundle/config/root.go +++ b/bundle/config/root.go @@ -20,7 +20,6 @@ import ( type Root struct { value config.Value diags diag.Diagnostics - depth int // Path contains the directory path to the root of the bundle. @@ -99,15 +98,6 @@ func Load(path string) (*Root, error) { // Store dynamic configuration for later reference (e.g. location information on all nodes). r.value = v - // if r.Environments != nil && r.Targets != nil { - // return nil, fmt.Errorf("both 'environments' and 'targets' are specified, only 'targets' should be used: %s", path) - // } - - // if r.Environments != nil { - // //TODO: add a command line notice that this is a deprecated option. - // r.Targets = r.Environments - // } - r.Path = filepath.Dir(path) // r.SetConfigFilePath(path) @@ -131,11 +121,18 @@ func (r *Root) initializeValue() { } func (r *Root) toTyped(v config.Value) error { - // Hack: restore path; it may be cleared by [ToTyped] if + // Hack: restore state; it may be cleared by [ToTyped] if // the configuration equals nil (happens in tests). - p := r.Path + value := r.value + diags := r.diags + depth := r.depth + path := r.Path + defer func() { - r.Path = p + r.value = value + r.diags = diags + r.depth = depth + r.Path = path }() // Convert normalized configuration tree to typed configuration. @@ -244,12 +241,6 @@ func (r *Root) Merge(other *Root) error { // // Merge diagnostics. // r.diags = append(r.diags, other.diags...) - // err := r.Sync.Merge(r, other) - // if err != nil { - // return err - // } - // other.Sync = Sync{} - // // TODO: when hooking into merge semantics, disallow setting path on the target instance. // other.Path = "" @@ -273,6 +264,8 @@ func (r *Root) Merge(other *Root) error { panic(err) } + r.ConfigureConfigFilePath() + // TODO: define and test semantics for merging. // return mergo.Merge(r, other, mergo.WithOverride) return nil @@ -297,6 +290,34 @@ func (r *Root) MergeTargetOverrides(name string) error { return nil } + if mode := target.Get("mode"); mode != config.NilValue { + bundle := r.value.Get("bundle") + if bundle == config.NilValue { + bundle = config.NewValue(map[string]config.Value{}, config.Location{}) + } + bundle.MustMap()["mode"] = mode + r.value.MustMap()["bundle"] = bundle + } + + // if target.Mode != "" { + // r.Bundle.Mode = target.Mode + // } + + // if target.ComputeID != "" { + // r.Bundle.ComputeID = target.ComputeID + // } + + // The "run_as" field must be overwritten if set, not merged. + // Otherwise we end up with a merged version where both the + // "user_name" and "service_principal_name" fields are set. + if runAs := target.Get("run_as"); runAs != config.NilValue { + r.value.MustMap()["run_as"] = runAs + // Clear existing field to convert.ToTyped() merging + // the new value with the existing value. + // TODO(@pietern): Address this structurally. + r.RunAs = nil + } + if err = mergeField("bundle"); err != nil { return err } @@ -327,16 +348,6 @@ func (r *Root) MergeTargetOverrides(name string) error { panic(err) } + r.ConfigureConfigFilePath() return nil } - -// // ForceLocationInConfig walks all nodes in the configuration tree and -// // sets their location to the specified value. -// func (r *Root) ForceLocationInConfig(path string) { -// out, _ := config.Walk(r.value, func(v config.Value) (config.Value, error) { -// return v.WithLocation(config.Location{ -// File: path, -// }), nil -// }) -// r.value = out -// } diff --git a/bundle/config/sync.go b/bundle/config/sync.go index 6ba2603c41..0580e4c4ff 100644 --- a/bundle/config/sync.go +++ b/bundle/config/sync.go @@ -1,7 +1,5 @@ package config -import "path/filepath" - type Sync struct { // Include contains a list of globs evaluated relative to the bundle root path // to explicitly include files that were excluded by the user's gitignore. @@ -13,19 +11,3 @@ type Sync struct { // 2) the `Include` field above. Exclude []string `json:"exclude,omitempty"` } - -func (s *Sync) Merge(root *Root, other *Root) error { - path, err := filepath.Rel(root.Path, other.Path) - if err != nil { - return err - } - for _, include := range other.Sync.Include { - s.Include = append(s.Include, filepath.Join(path, include)) - } - - for _, exclude := range other.Sync.Exclude { - s.Exclude = append(s.Exclude, filepath.Join(path, exclude)) - } - - return nil -} diff --git a/bundle/tests/environment_overrides/resources/databricks.yml b/bundle/tests/environment_overrides/resources/databricks.yml index df261ba034..137f8d9df5 100644 --- a/bundle/tests/environment_overrides/resources/databricks.yml +++ b/bundle/tests/environment_overrides/resources/databricks.yml @@ -28,8 +28,6 @@ environments: pipelines: boolean1: - # Note: setting a property to a zero value (in Go) does not have effect. - # See the corresponding test for details. photon: false boolean2: diff --git a/bundle/tests/environment_overrides_test.go b/bundle/tests/environment_overrides_test.go index 91dc2c8114..4a11150486 100644 --- a/bundle/tests/environment_overrides_test.go +++ b/bundle/tests/environment_overrides_test.go @@ -29,10 +29,7 @@ func TestEnvironmentOverridesResourcesStaging(t *testing.T) { b := loadTarget(t, "./environment_overrides/resources", "staging") assert.Equal(t, "staging job", b.Config.Resources.Jobs["job1"].Name) - // Overrides are only applied if they are not zero-valued. - // This means that in its current form, we cannot override a true value with a false value. - // Note: this is not desirable and will be addressed by representing our configuration - // in a different structure (e.g. with cty), instead of Go structs. - assert.Equal(t, true, b.Config.Resources.Pipelines["boolean1"].Photon) + // Override values are applied in the staging environment. + assert.Equal(t, false, b.Config.Resources.Pipelines["boolean1"].Photon) assert.Equal(t, true, b.Config.Resources.Pipelines["boolean2"].Photon) } diff --git a/bundle/tests/relative_path_with_includes_test.go b/bundle/tests/relative_path_with_includes_test.go index 92249c412c..6b82ad8458 100644 --- a/bundle/tests/relative_path_with_includes_test.go +++ b/bundle/tests/relative_path_with_includes_test.go @@ -20,8 +20,22 @@ func TestRelativePathsWithIncludes(t *testing.T) { assert.Equal(t, "artifact_a", b.Config.Artifacts["test_a"].Path) assert.Equal(t, filepath.Join("subfolder", "artifact_b"), b.Config.Artifacts["test_b"].Path) - assert.ElementsMatch(t, []string{"./folder_a/*.*", filepath.Join("subfolder", "folder_c", "*.*")}, b.Config.Sync.Include) - assert.ElementsMatch(t, []string{"./folder_b/*.*", filepath.Join("subfolder", "folder_d", "*.*")}, b.Config.Sync.Exclude) + assert.ElementsMatch( + t, + []string{ + filepath.Join("folder_a", "*.*"), + filepath.Join("subfolder", "folder_c", "*.*"), + }, + b.Config.Sync.Include, + ) + assert.ElementsMatch( + t, + []string{ + filepath.Join("folder_b", "*.*"), + filepath.Join("subfolder", "folder_d", "*.*"), + }, + b.Config.Sync.Exclude, + ) assert.Equal(t, filepath.Join("dist", "job_a.whl"), b.Config.Resources.Jobs["job_a"].Tasks[0].Libraries[0].Whl) assert.Equal(t, filepath.Join("subfolder", "dist", "job_b.whl"), b.Config.Resources.Jobs["job_b"].Tasks[0].Libraries[0].Whl) diff --git a/bundle/tests/run_as/databricks.yml b/bundle/tests/run_as/databricks.yml index 18ea55736d..1cdc9e44b2 100644 --- a/bundle/tests/run_as/databricks.yml +++ b/bundle/tests/run_as/databricks.yml @@ -13,30 +13,42 @@ targets: resources: pipelines: nyc_taxi_pipeline: + name: "nyc taxi loader" + permissions: - level: CAN_VIEW service_principal_name: my_service_principal - level: CAN_VIEW user_name: my_user_name - name: "nyc taxi loader" + libraries: - notebook: path: ./dlt/nyc_taxi_loader + jobs: job_one: name: Job One + tasks: - - task: + - task_key: "task_one" + notebook_task: notebook_path: "./test.py" + job_two: name: Job Two + tasks: - - task: + - task_key: "task_two" + notebook_task: notebook_path: "./test.py" + job_three: name: Job Three + run_as: service_principal_name: "my_service_principal_for_job" + tasks: - - task: + - task_key: "task_three" + notebook_task: notebook_path: "./test.py" diff --git a/bundle/tests/run_as_test.go b/bundle/tests/run_as_test.go index 44c068165a..7809b880d1 100644 --- a/bundle/tests/run_as_test.go +++ b/bundle/tests/run_as_test.go @@ -39,11 +39,11 @@ func TestRunAsDefault(t *testing.T) { pipelines := b.Config.Resources.Pipelines assert.Len(t, pipelines["nyc_taxi_pipeline"].Permissions, 2) - assert.Equal(t, pipelines["nyc_taxi_pipeline"].Permissions[0].Level, "CAN_VIEW") - assert.Equal(t, pipelines["nyc_taxi_pipeline"].Permissions[0].UserName, "my_user_name") + assert.Equal(t, "CAN_VIEW", pipelines["nyc_taxi_pipeline"].Permissions[0].Level) + assert.Equal(t, "my_user_name", pipelines["nyc_taxi_pipeline"].Permissions[0].UserName) - assert.Equal(t, pipelines["nyc_taxi_pipeline"].Permissions[1].Level, "IS_OWNER") - assert.Equal(t, pipelines["nyc_taxi_pipeline"].Permissions[1].ServicePrincipalName, "my_service_principal") + assert.Equal(t, "IS_OWNER", pipelines["nyc_taxi_pipeline"].Permissions[1].Level) + assert.Equal(t, "my_service_principal", pipelines["nyc_taxi_pipeline"].Permissions[1].ServicePrincipalName) } func TestRunAsDevelopment(t *testing.T) { @@ -74,9 +74,9 @@ func TestRunAsDevelopment(t *testing.T) { pipelines := b.Config.Resources.Pipelines assert.Len(t, pipelines["nyc_taxi_pipeline"].Permissions, 2) - assert.Equal(t, pipelines["nyc_taxi_pipeline"].Permissions[0].Level, "CAN_VIEW") - assert.Equal(t, pipelines["nyc_taxi_pipeline"].Permissions[0].ServicePrincipalName, "my_service_principal") + assert.Equal(t, "CAN_VIEW", pipelines["nyc_taxi_pipeline"].Permissions[0].Level) + assert.Equal(t, "my_service_principal", pipelines["nyc_taxi_pipeline"].Permissions[0].ServicePrincipalName) - assert.Equal(t, pipelines["nyc_taxi_pipeline"].Permissions[1].Level, "IS_OWNER") - assert.Equal(t, pipelines["nyc_taxi_pipeline"].Permissions[1].UserName, "my_user_name") + assert.Equal(t, "IS_OWNER", pipelines["nyc_taxi_pipeline"].Permissions[1].Level) + assert.Equal(t, "my_user_name", pipelines["nyc_taxi_pipeline"].Permissions[1].UserName) } diff --git a/bundle/tests/variables_test.go b/bundle/tests/variables_test.go index 86706ebd14..574e134740 100644 --- a/bundle/tests/variables_test.go +++ b/bundle/tests/variables_test.go @@ -1,106 +1,106 @@ package config_tests -import ( - "context" - "testing" +// import ( +// "context" +// "testing" - "github.com/databricks/cli/bundle" - "github.com/databricks/cli/bundle/config/interpolation" - "github.com/databricks/cli/bundle/config/mutator" - "github.com/databricks/cli/bundle/config/variable" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) +// "github.com/databricks/cli/bundle" +// "github.com/databricks/cli/bundle/config/interpolation" +// "github.com/databricks/cli/bundle/config/mutator" +// "github.com/databricks/cli/bundle/config/variable" +// "github.com/stretchr/testify/assert" +// "github.com/stretchr/testify/require" +// ) -func TestVariables(t *testing.T) { - t.Setenv("BUNDLE_VAR_b", "def") - b := load(t, "./variables/vanilla") - err := bundle.Apply(context.Background(), b, bundle.Seq( - mutator.SetVariables(), - interpolation.Interpolate( - interpolation.IncludeLookupsInPath(variable.VariableReferencePrefix), - ))) - require.NoError(t, err) - assert.Equal(t, "abc def", b.Config.Bundle.Name) -} +// func TestVariables(t *testing.T) { +// t.Setenv("BUNDLE_VAR_b", "def") +// b := load(t, "./variables/vanilla") +// err := bundle.Apply(context.Background(), b, bundle.Seq( +// mutator.SetVariables(), +// interpolation.Interpolate( +// interpolation.IncludeLookupsInPath(variable.VariableReferencePrefix), +// ))) +// require.NoError(t, err) +// assert.Equal(t, "abc def", b.Config.Bundle.Name) +// } -func TestVariablesLoadingFailsWhenRequiredVariableIsNotSpecified(t *testing.T) { - b := load(t, "./variables/vanilla") - err := bundle.Apply(context.Background(), b, bundle.Seq( - mutator.SetVariables(), - interpolation.Interpolate( - interpolation.IncludeLookupsInPath(variable.VariableReferencePrefix), - ))) - assert.ErrorContains(t, err, "no value assigned to required variable b. Assignment can be done through the \"--var\" flag or by setting the BUNDLE_VAR_b environment variable") -} +// func TestVariablesLoadingFailsWhenRequiredVariableIsNotSpecified(t *testing.T) { +// b := load(t, "./variables/vanilla") +// err := bundle.Apply(context.Background(), b, bundle.Seq( +// mutator.SetVariables(), +// interpolation.Interpolate( +// interpolation.IncludeLookupsInPath(variable.VariableReferencePrefix), +// ))) +// assert.ErrorContains(t, err, "no value assigned to required variable b. Assignment can be done through the \"--var\" flag or by setting the BUNDLE_VAR_b environment variable") +// } -func TestVariablesTargetsBlockOverride(t *testing.T) { - b := load(t, "./variables/env_overrides") - err := bundle.Apply(context.Background(), b, bundle.Seq( - mutator.SelectTarget("env-with-single-variable-override"), - mutator.SetVariables(), - interpolation.Interpolate( - interpolation.IncludeLookupsInPath(variable.VariableReferencePrefix), - ))) - require.NoError(t, err) - assert.Equal(t, "default-a dev-b", b.Config.Workspace.Profile) -} +// func TestVariablesTargetsBlockOverride(t *testing.T) { +// b := load(t, "./variables/env_overrides") +// err := bundle.Apply(context.Background(), b, bundle.Seq( +// mutator.SelectTarget("env-with-single-variable-override"), +// mutator.SetVariables(), +// interpolation.Interpolate( +// interpolation.IncludeLookupsInPath(variable.VariableReferencePrefix), +// ))) +// require.NoError(t, err) +// assert.Equal(t, "default-a dev-b", b.Config.Workspace.Profile) +// } -func TestVariablesTargetsBlockOverrideForMultipleVariables(t *testing.T) { - b := load(t, "./variables/env_overrides") - err := bundle.Apply(context.Background(), b, bundle.Seq( - mutator.SelectTarget("env-with-two-variable-overrides"), - mutator.SetVariables(), - interpolation.Interpolate( - interpolation.IncludeLookupsInPath(variable.VariableReferencePrefix), - ))) - require.NoError(t, err) - assert.Equal(t, "prod-a prod-b", b.Config.Workspace.Profile) -} +// func TestVariablesTargetsBlockOverrideForMultipleVariables(t *testing.T) { +// b := load(t, "./variables/env_overrides") +// err := bundle.Apply(context.Background(), b, bundle.Seq( +// mutator.SelectTarget("env-with-two-variable-overrides"), +// mutator.SetVariables(), +// interpolation.Interpolate( +// interpolation.IncludeLookupsInPath(variable.VariableReferencePrefix), +// ))) +// require.NoError(t, err) +// assert.Equal(t, "prod-a prod-b", b.Config.Workspace.Profile) +// } -func TestVariablesTargetsBlockOverrideWithProcessEnvVars(t *testing.T) { - t.Setenv("BUNDLE_VAR_b", "env-var-b") - b := load(t, "./variables/env_overrides") - err := bundle.Apply(context.Background(), b, bundle.Seq( - mutator.SelectTarget("env-with-two-variable-overrides"), - mutator.SetVariables(), - interpolation.Interpolate( - interpolation.IncludeLookupsInPath(variable.VariableReferencePrefix), - ))) - require.NoError(t, err) - assert.Equal(t, "prod-a env-var-b", b.Config.Workspace.Profile) -} +// func TestVariablesTargetsBlockOverrideWithProcessEnvVars(t *testing.T) { +// t.Setenv("BUNDLE_VAR_b", "env-var-b") +// b := load(t, "./variables/env_overrides") +// err := bundle.Apply(context.Background(), b, bundle.Seq( +// mutator.SelectTarget("env-with-two-variable-overrides"), +// mutator.SetVariables(), +// interpolation.Interpolate( +// interpolation.IncludeLookupsInPath(variable.VariableReferencePrefix), +// ))) +// require.NoError(t, err) +// assert.Equal(t, "prod-a env-var-b", b.Config.Workspace.Profile) +// } -func TestVariablesTargetsBlockOverrideWithMissingVariables(t *testing.T) { - b := load(t, "./variables/env_overrides") - err := bundle.Apply(context.Background(), b, bundle.Seq( - mutator.SelectTarget("env-missing-a-required-variable-assignment"), - mutator.SetVariables(), - interpolation.Interpolate( - interpolation.IncludeLookupsInPath(variable.VariableReferencePrefix), - ))) - assert.ErrorContains(t, err, "no value assigned to required variable b. Assignment can be done through the \"--var\" flag or by setting the BUNDLE_VAR_b environment variable") -} +// func TestVariablesTargetsBlockOverrideWithMissingVariables(t *testing.T) { +// b := load(t, "./variables/env_overrides") +// err := bundle.Apply(context.Background(), b, bundle.Seq( +// mutator.SelectTarget("env-missing-a-required-variable-assignment"), +// mutator.SetVariables(), +// interpolation.Interpolate( +// interpolation.IncludeLookupsInPath(variable.VariableReferencePrefix), +// ))) +// assert.ErrorContains(t, err, "no value assigned to required variable b. Assignment can be done through the \"--var\" flag or by setting the BUNDLE_VAR_b environment variable") +// } -func TestVariablesTargetsBlockOverrideWithUndefinedVariables(t *testing.T) { - b := load(t, "./variables/env_overrides") - err := bundle.Apply(context.Background(), b, bundle.Seq( - mutator.SelectTarget("env-using-an-undefined-variable"), - mutator.SetVariables(), - interpolation.Interpolate( - interpolation.IncludeLookupsInPath(variable.VariableReferencePrefix), - ))) - assert.ErrorContains(t, err, "variable c is not defined but is assigned a value") -} +// func TestVariablesTargetsBlockOverrideWithUndefinedVariables(t *testing.T) { +// b := load(t, "./variables/env_overrides") +// err := bundle.Apply(context.Background(), b, bundle.Seq( +// mutator.SelectTarget("env-using-an-undefined-variable"), +// mutator.SetVariables(), +// interpolation.Interpolate( +// interpolation.IncludeLookupsInPath(variable.VariableReferencePrefix), +// ))) +// assert.ErrorContains(t, err, "variable c is not defined but is assigned a value") +// } -func TestVariablesWithoutDefinition(t *testing.T) { - t.Setenv("BUNDLE_VAR_a", "foo") - t.Setenv("BUNDLE_VAR_b", "bar") - b := load(t, "./variables/without_definition") - err := bundle.Apply(context.Background(), b, mutator.SetVariables()) - require.NoError(t, err) - require.True(t, b.Config.Variables["a"].HasValue()) - require.True(t, b.Config.Variables["b"].HasValue()) - assert.Equal(t, "foo", *b.Config.Variables["a"].Value) - assert.Equal(t, "bar", *b.Config.Variables["b"].Value) -} +// func TestVariablesWithoutDefinition(t *testing.T) { +// t.Setenv("BUNDLE_VAR_a", "foo") +// t.Setenv("BUNDLE_VAR_b", "bar") +// b := load(t, "./variables/without_definition") +// err := bundle.Apply(context.Background(), b, mutator.SetVariables()) +// require.NoError(t, err) +// require.True(t, b.Config.Variables["a"].HasValue()) +// require.True(t, b.Config.Variables["b"].HasValue()) +// assert.Equal(t, "foo", *b.Config.Variables["a"].Value) +// assert.Equal(t, "bar", *b.Config.Variables["b"].Value) +// } diff --git a/libs/config/value.go b/libs/config/value.go index 862427d335..4c3d893c75 100644 --- a/libs/config/value.go +++ b/libs/config/value.go @@ -2,6 +2,8 @@ package config import ( "fmt" + "maps" + "slices" "time" ) @@ -21,6 +23,11 @@ var NilValue = Value{ k: KindNil, } +// InvalidValue is equal to the zero-value of Value. +var InvalidValue = Value{ + k: KindInvalid, +} + // V constructs a new Value with the given value. func V(v any) Value { return Value{ @@ -52,6 +59,90 @@ func (v Value) AsMap() (map[string]Value, bool) { return m, ok } +func (v Value) set(prefix, suffix Path, value Value) (Value, error) { + var err error + + if len(suffix) == 0 { + return value, nil + } + + prefix = prefix.Append(suffix[0]) + + // Pick first component. + pc := suffix[0] + switch v.k { + case KindMap: + // Expect a key to be set if this is a map. + if len(pc.key) == 0 { + return InvalidValue, fmt.Errorf("expected a key index at %s", prefix) + } + + m := maps.Clone(v.MustMap()) + m[pc.key], err = v.set(prefix, suffix[1:], value) + if err != nil { + return InvalidValue, err + } + + // Return an updated map value. + return Value{ + v: m, + k: KindMap, + l: v.l, + }, nil + + case KindSequence: + // Expect an index to be set if this is a sequence. + if len(pc.key) > 0 { + return InvalidValue, fmt.Errorf("expected an index at %s", prefix) + } + + s := slices.Clone(v.MustSequence()) + if pc.index < 0 || pc.index >= len(s) { + return InvalidValue, fmt.Errorf("index out of bounds under %s", prefix) + } + s[pc.index], err = v.set(prefix, suffix[1:], value) + if err != nil { + return InvalidValue, err + } + + // Return an updated sequence value. + return Value{ + v: s, + k: KindSequence, + l: v.l, + }, nil + + default: + return InvalidValue, fmt.Errorf("expected a map or sequence under %s", prefix) + } +} + +func (v Value) Set(p Path, value Value) (Value, error) { + return v.set(EmptyPath, p, value) +} + +func (v Value) SetKey(key string, value Value) Value { + m, ok := v.AsMap() + if !ok { + m = make(map[string]Value) + } else { + m = maps.Clone(m) + } + + m[key] = value + + return Value{ + v: m, + k: KindMap, + l: v.l, + } +} + +func (v Value) AsSequence() ([]Value, bool) { + s, ok := v.v.([]Value) + return s, ok +} + func (v Value) Kind() Kind { return v.k } From 875454dc94dcb4c62617f5ddb180adc2e8fedf55 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Wed, 20 Dec 2023 10:52:52 +0100 Subject: [PATCH 019/104] Functionality to walk a config.Value tree This change adds: * A `config.Walk` function to walk a configuration tree * A `config.Path` type to represent a value's path inside a tree * Functions to create a `config.Path` from a string, or convert one to a string --- libs/config/path.go | 83 +++++++++++ libs/config/path_string.go | 76 ++++++++++ libs/config/path_string_test.go | 84 +++++++++++ libs/config/path_test.go | 76 ++++++++++ libs/config/walk.go | 63 ++++++++ libs/config/walk_test.go | 254 ++++++++++++++++++++++++++++++++ 6 files changed, 636 insertions(+) create mode 100644 libs/config/path.go create mode 100644 libs/config/path_string.go create mode 100644 libs/config/path_string_test.go create mode 100644 libs/config/path_test.go create mode 100644 libs/config/walk.go create mode 100644 libs/config/walk_test.go diff --git a/libs/config/path.go b/libs/config/path.go new file mode 100644 index 0000000000..b3b9c3dbc7 --- /dev/null +++ b/libs/config/path.go @@ -0,0 +1,83 @@ +package config + +import ( + "bytes" + "fmt" +) + +type pathComponent struct { + key string + index int +} + +type Path []pathComponent + +var EmptyPath = Path{} + +func Key(k string) pathComponent { + return pathComponent{key: k} +} + +func Index(i int) pathComponent { + return pathComponent{index: i} +} + +func NewPath(cs ...pathComponent) Path { + return cs +} + +func (p Path) Join(qs ...Path) Path { + for _, q := range qs { + p = p.Append(q...) + } + return p +} + +func (p Path) Append(cs ...pathComponent) Path { + return append(p, cs...) +} + +func (p Path) Equal(q Path) bool { + pl := len(p) + ql := len(q) + if pl != ql { + return false + } + for i := 0; i < pl; i++ { + if p[i] != q[i] { + return false + } + } + return true +} + +func (p Path) HasPrefix(q Path) bool { + pl := len(p) + ql := len(q) + if pl < ql { + return false + } + for i := 0; i < ql; i++ { + if p[i] != q[i] { + return false + } + } + return true +} + +func (p Path) String() string { + var buf bytes.Buffer + + for i, c := range p { + if i > 0 && c.key != "" { + buf.WriteRune('.') + } + if c.key != "" { + buf.WriteString(c.key) + } else { + buf.WriteString(fmt.Sprintf("[%d]", c.index)) + } + } + + return buf.String() +} diff --git a/libs/config/path_string.go b/libs/config/path_string.go new file mode 100644 index 0000000000..70e5e70a20 --- /dev/null +++ b/libs/config/path_string.go @@ -0,0 +1,76 @@ +package config + +import ( + "fmt" + "strconv" + "strings" +) + +func MustPathFromString(input string) Path { + p, err := NewPathFromString(input) + if err != nil { + panic(err) + } + return p +} + +func NewPathFromString(input string) (Path, error) { + var path Path + + p := input + + // Trim leading dot. + if p != "" && p[0] == '.' { + p = p[1:] + } + + for p != "" { + // Every component may have a leading dot. + if p != "" && p[0] == '.' { + p = p[1:] + } + + if p == "" { + return nil, fmt.Errorf("invalid path: %s", input) + } + + if p[0] == '[' { + // Find next ] + i := strings.Index(p, "]") + if i < 0 { + return nil, fmt.Errorf("invalid path: %s", input) + } + + // Parse index + j, err := strconv.Atoi(p[1:i]) + if err != nil { + return nil, fmt.Errorf("invalid path: %s", input) + } + + // Append index + path = append(path, Index(j)) + p = p[i+1:] + + // The next character must be a . or [ + if p != "" && strings.IndexAny(p, ".[") != 0 { + return nil, fmt.Errorf("invalid path: %s", input) + } + } else { + // Find next . or [ + i := strings.IndexAny(p, ".[") + if i < 0 { + i = len(p) + } + + if i == 0 { + return nil, fmt.Errorf("invalid path: %s", input) + } + + // Append key + path = append(path, Key(p[:i])) + p = p[i:] + } + } + + return path, nil +} diff --git a/libs/config/path_string_test.go b/libs/config/path_string_test.go new file mode 100644 index 0000000000..37415aacfa --- /dev/null +++ b/libs/config/path_string_test.go @@ -0,0 +1,84 @@ +package config_test + +import ( + "fmt" + "testing" + + . "github.com/databricks/cli/libs/config" + "github.com/stretchr/testify/assert" +) + +func TestNewPathFromString(t *testing.T) { + for _, tc := range []struct { + input string + output Path + err error + }{ + { + input: "", + output: NewPath(), + }, + { + input: ".", + output: NewPath(), + }, + { + input: "foo.bar", + output: NewPath(Key("foo"), Key("bar")), + }, + { + input: "[1]", + output: NewPath(Index(1)), + }, + { + input: "foo[1].bar", + output: NewPath(Key("foo"), Index(1), Key("bar")), + }, + { + input: "foo.bar[1]", + output: NewPath(Key("foo"), Key("bar"), Index(1)), + }, + { + input: "foo.bar[1][2]", + output: NewPath(Key("foo"), Key("bar"), Index(1), Index(2)), + }, + { + input: "foo[1234]", + output: NewPath(Key("foo"), Index(1234)), + }, + { + input: "foo[123", + err: fmt.Errorf("invalid path: foo[123"), + }, + { + input: "foo[foo]", + err: fmt.Errorf("invalid path: foo[foo]"), + }, + { + input: "foo..bar", + err: fmt.Errorf("invalid path: foo..bar"), + }, + { + input: "foo.bar.", + err: fmt.Errorf("invalid path: foo.bar."), + }, + { + // Every component may have a leading dot. + input: ".foo.[1].bar", + output: NewPath(Key("foo"), Index(1), Key("bar")), + }, + { + // But after an index there must be a dot. + input: "foo[1]bar", + err: fmt.Errorf("invalid path: foo[1]bar"), + }, + } { + p, err := NewPathFromString(tc.input) + if tc.err != nil { + assert.EqualError(t, err, tc.err.Error(), tc.input) + } else { + assert.NoError(t, err) + assert.Equal(t, tc.output, p) + } + } +} diff --git a/libs/config/path_test.go b/libs/config/path_test.go new file mode 100644 index 0000000000..3fdd848e60 --- /dev/null +++ b/libs/config/path_test.go @@ -0,0 +1,76 @@ +package config_test + +import ( + "testing" + + "github.com/databricks/cli/libs/config" + "github.com/stretchr/testify/assert" +) + +func TestPathAppend(t *testing.T) { + p := config.NewPath(config.Key("foo")) + + // Single arg. + p1 := p.Append(config.Key("bar")) + assert.True(t, p1.Equal(config.NewPath(config.Key("foo"), config.Key("bar")))) + + // Multiple args. + p2 := p.Append(config.Key("bar"), config.Index(1)) + assert.True(t, p2.Equal(config.NewPath(config.Key("foo"), config.Key("bar"), config.Index(1)))) +} + +func TestPathJoin(t *testing.T) { + p := config.NewPath(config.Key("foo")) + + // Single arg. + p1 := p.Join(config.NewPath(config.Key("bar"))) + assert.True(t, p1.Equal(config.NewPath(config.Key("foo"), config.Key("bar")))) + + // Multiple args. + p2 := p.Join(config.NewPath(config.Key("bar")), config.NewPath(config.Index(1))) + assert.True(t, p2.Equal(config.NewPath(config.Key("foo"), config.Key("bar"), config.Index(1)))) +} + +func TestPathEqualEmpty(t *testing.T) { + assert.True(t, config.EmptyPath.Equal(config.EmptyPath)) +} + +func TestPathEqual(t *testing.T) { + p1 := config.NewPath(config.Key("foo"), config.Index(1)) + p2 := config.NewPath(config.Key("bar"), config.Index(2)) + assert.False(t, p1.Equal(p2), "expected %q to not equal %q", p1, p2) + + p3 := config.NewPath(config.Key("foo"), config.Index(1)) + assert.True(t, p1.Equal(p3), "expected %q to equal %q", p1, p3) + + p4 := config.NewPath(config.Key("foo"), config.Index(1), config.Key("bar"), config.Index(2)) + assert.False(t, p1.Equal(p4), "expected %q to not equal %q", p1, p4) +} + +func TestPathHasPrefixEmpty(t *testing.T) { + empty := config.EmptyPath + nonEmpty := config.NewPath(config.Key("foo")) + assert.True(t, empty.HasPrefix(empty)) + assert.True(t, nonEmpty.HasPrefix(empty)) + assert.False(t, empty.HasPrefix(nonEmpty)) +} + +func TestPathHasPrefix(t *testing.T) { + p1 := config.NewPath(config.Key("foo"), config.Index(1)) + p2 := config.NewPath(config.Key("bar"), config.Index(2)) + assert.False(t, p1.HasPrefix(p2), "expected %q to not have prefix %q", p1, p2) + + p3 := config.NewPath(config.Key("foo")) + assert.True(t, p1.HasPrefix(p3), "expected %q to have prefix %q", p1, p3) +} + +func TestPathString(t *testing.T) { + p1 := config.NewPath(config.Key("foo"), config.Index(1)) + assert.Equal(t, "foo[1]", p1.String()) + + p2 := config.NewPath(config.Key("bar"), config.Index(2), config.Key("baz")) + assert.Equal(t, "bar[2].baz", p2.String()) + + p3 := config.NewPath(config.Key("foo"), config.Index(1), config.Key("bar"), config.Index(2), config.Key("baz")) + assert.Equal(t, "foo[1].bar[2].baz", p3.String()) +} diff --git a/libs/config/walk.go b/libs/config/walk.go new file mode 100644 index 0000000000..18848ae740 --- /dev/null +++ b/libs/config/walk.go @@ -0,0 +1,63 @@ +package config + +import "errors" + +// WalkValueFunc is the type of the function called by Walk to traverse the configuration tree. +type WalkValueFunc func(p Path, v Value) (Value, error) + +// ErrDrop may be returned by WalkValueFunc to remove a value from the subtree. +var ErrDrop = errors.New("drop value from subtree") + +// ErrSkip may be returned by WalkValueFunc to skip traversal of a subtree. +var ErrSkip = errors.New("skip traversal of subtree") + +// Walk walks the configuration tree and calls the given function on each node. +func Walk(v Value, fn func(p Path, v Value) (Value, error)) (Value, error) { + return walk(v, EmptyPath, fn) +} + +// Unexported counterpart to Walk. +// It carries the path leading up to the current node, +// such that it can be passed to the WalkValueFunc. +func walk(v Value, p Path, fn func(p Path, v Value) (Value, error)) (Value, error) { + v, err := fn(p, v) + if err != nil { + if err == ErrSkip { + return v, nil + } + return NilValue, err + } + + switch v.Kind() { + case KindMap: + m := v.v.(map[string]Value) + out := make(map[string]Value, len(m)) + for k := range m { + nv, err := walk(m[k], p.Append(Key(k)), fn) + if err == ErrDrop { + continue + } + if err != nil { + return NilValue, err + } + out[k] = nv + } + v.v = out + case KindSequence: + s := v.v.([]Value) + out := make([]Value, 0, len(s)) + for i := range s { + nv, err := walk(s[i], p.Append(Index(i)), fn) + if err == ErrDrop { + continue + } + if err != nil { + return NilValue, err + } + out = append(out, nv) + } + v.v = out + } + + return v, nil +} diff --git a/libs/config/walk_test.go b/libs/config/walk_test.go new file mode 100644 index 0000000000..806ca256fd --- /dev/null +++ b/libs/config/walk_test.go @@ -0,0 +1,254 @@ +package config_test + +import ( + "errors" + "testing" + + . "github.com/databricks/cli/libs/config" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +// Return values for specific paths. +type walkReturn struct { + path Path + + // Return values. + fn func(Value) Value + err error +} + +// Track the calls to the callback. +type walkCall struct { + path Path + value Value +} + +// Track the calls to the callback. +type walkCallTracker struct { + returns []walkReturn + calls []walkCall +} + +func (w *walkCallTracker) on(path string, fn func(Value) Value, err error) { + w.returns = append(w.returns, walkReturn{MustPathFromString(path), fn, err}) +} + +func (w *walkCallTracker) returnSkip(path string) { + w.on(path, func(v Value) Value { return v }, ErrSkip) +} + +func (w *walkCallTracker) returnDrop(path string) { + w.on(path, func(v Value) Value { return NilValue }, ErrDrop) +} + +func (w *walkCallTracker) track(p Path, v Value) (Value, error) { + w.calls = append(w.calls, walkCall{p, v}) + + // Look for matching return. + for _, r := range w.returns { + if p.Equal(r.path) { + return r.fn(v), r.err + } + } + + return v, nil +} + +func TestWalkEmpty(t *testing.T) { + var tracker walkCallTracker + + value := V(nil) + out, err := Walk(value, tracker.track) + require.NoError(t, err) + assert.Equal(t, value, out) + + // The callback should have been called once. + assert.Len(t, tracker.calls, 1) + + // The call should have been made with the empty path. + assert.Equal(t, EmptyPath, tracker.calls[0].path) + + // The value should be the same as the input. + assert.Equal(t, value, tracker.calls[0].value) +} + +func TestWalkMapSkip(t *testing.T) { + var tracker walkCallTracker + + // Skip traversal of the root value. + tracker.returnSkip(".") + + value := V(map[string]Value{ + "key": V("value"), + }) + out, err := Walk(value, tracker.track) + require.NoError(t, err) + assert.Equal( + t, + V(map[string]Value{ + "key": V("value"), + }), + out, + ) + + // The callback should have been called once. + assert.Len(t, tracker.calls, 1) + + // The call should have been made with the empty path. + assert.Equal(t, EmptyPath, tracker.calls[0].path) + + // The value should be the same as the input. + assert.Equal(t, value, tracker.calls[0].value) +} + +func TestWalkMapDrop(t *testing.T) { + var tracker walkCallTracker + + // Drop the value at key "foo". + tracker.returnDrop(".foo") + + value := V(map[string]Value{ + "foo": V("bar"), + "bar": V("baz"), + }) + out, err := Walk(value, tracker.track) + require.NoError(t, err) + assert.Equal( + t, + V(map[string]Value{ + "bar": V("baz"), + }), + out, + ) + + // The callback should have been called for the root and every key in the map. + assert.Len(t, tracker.calls, 3) + + // Calls 2 and 3 have been made for the keys in the map. + assert.ElementsMatch(t, + []Path{ + tracker.calls[1].path, + tracker.calls[2].path, + }, []Path{ + MustPathFromString(".foo"), + MustPathFromString(".bar"), + }) +} + +func TestWalkMapError(t *testing.T) { + var tracker walkCallTracker + + // Return an error from the callback for key "foo". + cerr := errors.New("error!") + tracker.on(".foo", func(v Value) Value { return v }, cerr) + + value := V(map[string]Value{ + "foo": V("bar"), + }) + out, err := Walk(value, tracker.track) + assert.Equal(t, cerr, err) + assert.Equal(t, NilValue, out) + + // The callback should have been called twice. + assert.Len(t, tracker.calls, 2) + + // The second call was for the value at key "foo". + assert.Equal(t, MustPathFromString(".foo"), tracker.calls[1].path) +} + +func TestWalkSequenceSkip(t *testing.T) { + var tracker walkCallTracker + + // Skip traversal of the root value. + tracker.returnSkip(".") + + value := V([]Value{ + V("foo"), + V("bar"), + }) + out, err := Walk(value, tracker.track) + require.NoError(t, err) + assert.Equal( + t, + V([]Value{ + V("foo"), + V("bar"), + }), + out, + ) + + // The callback should have been called once. + assert.Len(t, tracker.calls, 1) + + // The call should have been made with the empty path. + assert.Equal(t, EmptyPath, tracker.calls[0].path) + + // The value should be the same as the input. + assert.Equal(t, value, tracker.calls[0].value) +} + +func TestWalkSequenceDrop(t *testing.T) { + var tracker walkCallTracker + + // Drop the value at index 1. + tracker.returnDrop(".[1]") + + value := V([]Value{ + V("foo"), + V("bar"), + V("baz"), + }) + out, err := Walk(value, tracker.track) + require.NoError(t, err) + assert.Equal( + t, + V([]Value{ + V("foo"), + V("baz"), + }), + out, + ) + + // The callback should have been called for the root and every value in the sequence. + assert.Len(t, tracker.calls, 4) + + // The second call was for the value at index 0. + assert.Equal(t, MustPathFromString(".[0]"), tracker.calls[1].path) + assert.Equal(t, V("foo"), tracker.calls[1].value) + + // The third call was for the value at index 1. + assert.Equal(t, MustPathFromString(".[1]"), tracker.calls[2].path) + assert.Equal(t, V("bar"), tracker.calls[2].value) + + // The fourth call was for the value at index 2. + assert.Equal(t, MustPathFromString(".[2]"), tracker.calls[3].path) + assert.Equal(t, V("baz"), tracker.calls[3].value) +} + +func TestWalkSequenceError(t *testing.T) { + var tracker walkCallTracker + + // Return an error from the callback for index 1. + cerr := errors.New("error!") + tracker.on(".[1]", func(v Value) Value { return v }, cerr) + + value := V([]Value{ + V("foo"), + V("bar"), + }) + out, err := Walk(value, tracker.track) + assert.Equal(t, cerr, err) + assert.Equal(t, NilValue, out) + + // The callback should have been called three times. + assert.Len(t, tracker.calls, 3) + + // The second call was for the value at index 0. + assert.Equal(t, MustPathFromString(".[0]"), tracker.calls[1].path) + assert.Equal(t, V("foo"), tracker.calls[1].value) + + // The third call was for the value at index 1. + assert.Equal(t, MustPathFromString(".[1]"), tracker.calls[2].path) + assert.Equal(t, V("bar"), tracker.calls[2].value) +} From 89cae7c40f965ee8c9a28ffcdbd3ddb976db4288 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Wed, 20 Dec 2023 11:01:43 +0100 Subject: [PATCH 020/104] Comments --- libs/config/path.go | 13 +++++++++++++ libs/config/path_string.go | 13 +++++++++++++ libs/config/walk.go | 3 +++ 3 files changed, 29 insertions(+) diff --git a/libs/config/path.go b/libs/config/path.go index b3b9c3dbc7..f1abf48ca9 100644 --- a/libs/config/path.go +++ b/libs/config/path.go @@ -10,22 +10,30 @@ type pathComponent struct { index int } +// Path represents a path to a value in a [Value] configuration tree. type Path []pathComponent +// EmptyPath is the empty path. +// It is defined for convenience and clarity. var EmptyPath = Path{} +// Key returns a path component for a key. func Key(k string) pathComponent { return pathComponent{key: k} } +// Index returns a path component for an index. func Index(i int) pathComponent { return pathComponent{index: i} } +// NewPath returns a new path from the given components. +// The individual components may be created with [Key] or [Index]. func NewPath(cs ...pathComponent) Path { return cs } +// Join joins the given paths. func (p Path) Join(qs ...Path) Path { for _, q := range qs { p = p.Append(q...) @@ -33,10 +41,12 @@ func (p Path) Join(qs ...Path) Path { return p } +// Append appends the given components to the path. func (p Path) Append(cs ...pathComponent) Path { return append(p, cs...) } +// Equal returns true if the paths are equal. func (p Path) Equal(q Path) bool { pl := len(p) ql := len(q) @@ -51,6 +61,8 @@ func (p Path) Equal(q Path) bool { return true } +// HasPrefix returns true if the path has the specified prefix. +// The empty path is a prefix of all paths. func (p Path) HasPrefix(q Path) bool { pl := len(p) ql := len(q) @@ -65,6 +77,7 @@ func (p Path) HasPrefix(q Path) bool { return true } +// String returns a string representation of the path. func (p Path) String() string { var buf bytes.Buffer diff --git a/libs/config/path_string.go b/libs/config/path_string.go index 70e5e70a20..9538ad27f1 100644 --- a/libs/config/path_string.go +++ b/libs/config/path_string.go @@ -6,6 +6,7 @@ import ( "strings" ) +// MustPathFromString is like NewPathFromString but panics on error. func MustPathFromString(input string) Path { p, err := NewPathFromString(input) if err != nil { @@ -14,6 +15,18 @@ func MustPathFromString(input string) Path { return p } +// NewPathFromString parses a path from a string. +// +// The string must be a sequence of keys and indices separated by dots. +// Indices must be enclosed in square brackets. +// The string may include a leading dot. +// +// Examples: +// - foo.bar +// - foo[1].bar +// - foo.bar[1] +// - foo.bar[1][2] +// - . func NewPathFromString(input string) (Path, error) { var path Path diff --git a/libs/config/walk.go b/libs/config/walk.go index 18848ae740..f20b19df15 100644 --- a/libs/config/walk.go +++ b/libs/config/walk.go @@ -12,6 +12,9 @@ var ErrDrop = errors.New("drop value from subtree") var ErrSkip = errors.New("skip traversal of subtree") // Walk walks the configuration tree and calls the given function on each node. +// The callback may return ErrDrop to remove a value from the subtree. +// The callback may return ErrSkip to skip traversal of a subtree. +// If the callback returns another error, the walk is aborted, and the error is returned. func Walk(v Value, fn func(p Path, v Value) (Value, error)) (Value, error) { return walk(v, EmptyPath, fn) } From ad3b62b98798acd651da309a1eb08a1e5cfacbe7 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Wed, 20 Dec 2023 16:21:00 +0100 Subject: [PATCH 021/104] wip --- bundle/config/mutator/merge_job_clusters.go | 111 +++++++++++++++++ .../config/mutator/merge_job_clusters_test.go | 72 +++++++++++ bundle/config/mutator/merge_job_tasks.go | 112 ++++++++++++++++++ bundle/config/mutator/merge_job_tasks_test.go | 83 +++++++++++++ .../config/mutator/merge_pipeline_clusters.go | 83 ++++++++----- .../mutator/merge_pipeline_clusters_test.go | 65 +++++++++- bundle/config/mutator/mutator.go | 8 +- libs/config/value.go | 69 +---------- libs/config/value_set.go | 56 +++++++++ libs/config/value_set_test.go | 1 + libs/config/value_transform.go | 104 ++++++++++++++++ libs/config/value_transform_test.go | 1 + 12 files changed, 667 insertions(+), 98 deletions(-) create mode 100644 bundle/config/mutator/merge_job_clusters.go create mode 100644 bundle/config/mutator/merge_job_clusters_test.go create mode 100644 bundle/config/mutator/merge_job_tasks.go create mode 100644 bundle/config/mutator/merge_job_tasks_test.go create mode 100644 libs/config/value_set.go create mode 100644 libs/config/value_set_test.go create mode 100644 libs/config/value_transform.go create mode 100644 libs/config/value_transform_test.go diff --git a/bundle/config/mutator/merge_job_clusters.go b/bundle/config/mutator/merge_job_clusters.go new file mode 100644 index 0000000000..d4b095e32f --- /dev/null +++ b/bundle/config/mutator/merge_job_clusters.go @@ -0,0 +1,111 @@ +package mutator + +import ( + "context" + + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/libs/config" + "github.com/databricks/cli/libs/config/merge" +) + +type mergeJobClusters struct{} + +func MergeJobClusters() bundle.Mutator { + return &mergeJobClusters{} +} + +func (m *mergeJobClusters) Name() string { + return "MergeJobClusters" +} + +// mergeJobClusters merges job clusters with the same key. +// The job clusters field is a slice, and as such, overrides are appended to it. +// We can identify a job cluster by its key, however, so we can use this key +// to figure out which definitions are actually overrides and merge them. +func (m *mergeJobClusters) mergeJobClusters(v config.Value) (config.Value, error) { + // We know the type of this value is a sequence. + // For additional defence, return self if it is not. + clusters, ok := v.AsSequence() + if !ok { + return v, nil + } + + seen := make(map[string]config.Value, len(clusters)) + keys := make([]string, 0, len(clusters)) + + // Target overrides are always appended, so we can iterate in natural order to + // first find the base definition, and merge instances we encounter later. + for i := range clusters { + var key string + + // Get task key if present. + kv := clusters[i].Get("job_cluster_key") + if kv.Kind() == config.KindString { + key = kv.MustString() + } + + // Register task with key if not yet seen before. + ref, ok := seen[key] + if !ok { + keys = append(keys, key) + seen[key] = clusters[i] + continue + } + + // Merge this instance into the reference. + nv, err := merge.Merge(ref, clusters[i]) + if err != nil { + return v, err + } + + // Overwrite reference. + seen[key] = nv + } + + // Gather resulting clusters in natural order. + out := make([]config.Value, 0, len(keys)) + for _, key := range keys { + out = append(out, seen[key]) + } + + return config.NewValue(out, v.Location()), nil +} + +func (m *mergeJobClusters) foreachJob(v config.Value) (config.Value, error) { + jobs, ok := v.AsMap() + if !ok { + return v, nil + } + + out := make(map[string]config.Value) + for key, job := range jobs { + var err error + out[key], err = job.Transform("job_clusters", m.mergeJobClusters) + if err != nil { + return v, err + } + } + + return config.NewValue(out, v.Location()), nil +} + +func (m *mergeJobClusters) Apply(ctx context.Context, b *bundle.Bundle) error { + return b.Config.Mutate(func(v config.Value) (config.Value, error) { + if v == config.NilValue { + return v, nil + } + + nv, err := v.Transform("resources.jobs", m.foreachJob) + + // It is not a problem if the pipelines key is not set. + if config.IsNoSuchKeyError(err) { + return v, nil + } + + if err != nil { + return v, err + } + + return nv, nil + }) +} diff --git a/bundle/config/mutator/merge_job_clusters_test.go b/bundle/config/mutator/merge_job_clusters_test.go new file mode 100644 index 0000000000..131a04e4c6 --- /dev/null +++ b/bundle/config/mutator/merge_job_clusters_test.go @@ -0,0 +1,72 @@ +package mutator_test + +import ( + "context" + "testing" + + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/config" + "github.com/databricks/cli/bundle/config/mutator" + "github.com/databricks/cli/bundle/config/resources" + "github.com/databricks/databricks-sdk-go/service/compute" + "github.com/databricks/databricks-sdk-go/service/jobs" + "github.com/stretchr/testify/assert" +) + +func TestMergeJobClusters(t *testing.T) { + b := &bundle.Bundle{ + Config: config.Root{ + Resources: config.Resources{ + Jobs: map[string]*resources.Job{ + "foo": { + + JobSettings: &jobs.JobSettings{ + JobClusters: []jobs.JobCluster{ + { + JobClusterKey: "foo", + NewCluster: &compute.ClusterSpec{ + SparkVersion: "13.3.x-scala2.12", + NodeTypeId: "i3.xlarge", + NumWorkers: 2, + }, + }, + { + JobClusterKey: "bar", + NewCluster: &compute.ClusterSpec{ + SparkVersion: "10.4.x-scala2.12", + }, + }, + { + JobClusterKey: "foo", + NewCluster: &compute.ClusterSpec{ + NodeTypeId: "i3.2xlarge", + NumWorkers: 4, + }, + }, + }, + }, + }, + }, + }, + }, + } + + err := bundle.Apply(context.Background(), b, mutator.MergeJobClusters()) + assert.NoError(t, err) + + j := b.Config.Resources.Jobs["foo"] + + assert.Len(t, j.JobClusters, 2) + assert.Equal(t, "foo", j.JobClusters[0].JobClusterKey) + assert.Equal(t, "bar", j.JobClusters[1].JobClusterKey) + + // This job cluster was merged with a subsequent one. + jc0 := j.JobClusters[0].NewCluster + assert.Equal(t, "13.3.x-scala2.12", jc0.SparkVersion) + assert.Equal(t, "i3.2xlarge", jc0.NodeTypeId) + assert.Equal(t, 4, jc0.NumWorkers) + + // This job cluster was left untouched. + jc1 := j.JobClusters[1].NewCluster + assert.Equal(t, "10.4.x-scala2.12", jc1.SparkVersion) +} diff --git a/bundle/config/mutator/merge_job_tasks.go b/bundle/config/mutator/merge_job_tasks.go new file mode 100644 index 0000000000..47231da3ec --- /dev/null +++ b/bundle/config/mutator/merge_job_tasks.go @@ -0,0 +1,112 @@ +package mutator + +import ( + "context" + + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/libs/config" + "github.com/databricks/cli/libs/config/merge" +) + +type mergeJobTasks struct{} + +func MergeJobTasks() bundle.Mutator { + return &mergeJobTasks{} +} + +func (m *mergeJobTasks) Name() string { + return "MergeJobTasks" +} + +// mergeJobTasks merges tasks with the same key. +// The tasks field is a slice, and as such, overrides are appended to it. +// We can identify a task by its task key, however, so we can use this key +// to figure out which definitions are actually overrides and merge them. +func (m *mergeJobTasks) mergeJobTasks(v config.Value) (config.Value, error) { + // We know the type of this value is a sequence. + // For additional defence, return self if it is not. + tasks, ok := v.AsSequence() + if !ok { + return v, nil + } + + seen := make(map[string]config.Value, len(tasks)) + keys := make([]string, 0, len(tasks)) + + // Target overrides are always appended, so we can iterate in natural order to + // first find the base definition, and merge instances we encounter later. + for i := range tasks { + var key string + + // Get task key if present. + kv := tasks[i].Get("task_key") + if kv.Kind() == config.KindString { + key = kv.MustString() + } + + // Register task with key if not yet seen before. + ref, ok := seen[key] + if !ok { + keys = append(keys, key) + seen[key] = tasks[i] + continue + } + + // Merge this instance into the reference. + nv, err := merge.Merge(ref, tasks[i]) + if err != nil { + return v, err + } + + // Overwrite reference. + seen[key] = nv + } + + // Gather resulting clusters in natural order. + out := make([]config.Value, 0, len(keys)) + for _, key := range keys { + out = append(out, seen[key]) + } + + return config.NewValue(out, v.Location()), nil + +} + +func (m *mergeJobTasks) foreachJob(v config.Value) (config.Value, error) { + jobs, ok := v.AsMap() + if !ok { + return v, nil + } + + out := make(map[string]config.Value) + for key, job := range jobs { + var err error + out[key], err = job.Transform("tasks", m.mergeJobTasks) + if err != nil { + return v, err + } + } + + return config.NewValue(out, v.Location()), nil +} + +func (m *mergeJobTasks) Apply(ctx context.Context, b *bundle.Bundle) error { + return b.Config.Mutate(func(v config.Value) (config.Value, error) { + if v == config.NilValue { + return v, nil + } + + nv, err := v.Transform("resources.jobs", m.foreachJob) + + // It is not a problem if the pipelines key is not set. + if config.IsNoSuchKeyError(err) { + return v, nil + } + + if err != nil { + return v, err + } + + return nv, nil + }) +} diff --git a/bundle/config/mutator/merge_job_tasks_test.go b/bundle/config/mutator/merge_job_tasks_test.go new file mode 100644 index 0000000000..92688d956e --- /dev/null +++ b/bundle/config/mutator/merge_job_tasks_test.go @@ -0,0 +1,83 @@ +package mutator_test + +import ( + "context" + "testing" + + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/config" + "github.com/databricks/cli/bundle/config/mutator" + "github.com/databricks/cli/bundle/config/resources" + "github.com/databricks/databricks-sdk-go/service/compute" + "github.com/databricks/databricks-sdk-go/service/jobs" + "github.com/stretchr/testify/assert" +) + +func TestMergeJobTasks(t *testing.T) { + b := &bundle.Bundle{ + Config: config.Root{ + Resources: config.Resources{ + Jobs: map[string]*resources.Job{ + "foo": { + JobSettings: &jobs.JobSettings{ + Tasks: []jobs.Task{ + { + TaskKey: "foo", + NewCluster: &compute.ClusterSpec{ + SparkVersion: "13.3.x-scala2.12", + NodeTypeId: "i3.xlarge", + NumWorkers: 2, + }, + Libraries: []compute.Library{ + {Whl: "package1"}, + }, + }, + { + TaskKey: "bar", + NewCluster: &compute.ClusterSpec{ + SparkVersion: "10.4.x-scala2.12", + }, + }, + { + TaskKey: "foo", + NewCluster: &compute.ClusterSpec{ + NodeTypeId: "i3.2xlarge", + NumWorkers: 4, + }, + Libraries: []compute.Library{ + {Pypi: &compute.PythonPyPiLibrary{ + Package: "package2", + }}, + }, + }, + }, + }, + }, + }, + }, + }, + } + + err := bundle.Apply(context.Background(), b, mutator.MergeJobTasks()) + assert.NoError(t, err) + + j := b.Config.Resources.Jobs["foo"] + + assert.Len(t, j.Tasks, 2) + assert.Equal(t, "foo", j.Tasks[0].TaskKey) + assert.Equal(t, "bar", j.Tasks[1].TaskKey) + + // This task was merged with a subsequent one. + task0 := j.Tasks[0] + cluster := task0.NewCluster + assert.Equal(t, "13.3.x-scala2.12", cluster.SparkVersion) + assert.Equal(t, "i3.2xlarge", cluster.NodeTypeId) + assert.Equal(t, 4, cluster.NumWorkers) + assert.Len(t, task0.Libraries, 2) + assert.Equal(t, task0.Libraries[0].Whl, "package1") + assert.Equal(t, task0.Libraries[1].Pypi.Package, "package2") + + // This task was left untouched. + task1 := j.Tasks[1].NewCluster + assert.Equal(t, "10.4.x-scala2.12", task1.SparkVersion) +} diff --git a/bundle/config/mutator/merge_pipeline_clusters.go b/bundle/config/mutator/merge_pipeline_clusters.go index 683cf581a1..fb4e1fba2e 100644 --- a/bundle/config/mutator/merge_pipeline_clusters.go +++ b/bundle/config/mutator/merge_pipeline_clusters.go @@ -19,7 +19,7 @@ func (m *mergePipelineClusters) Name() string { return "MergePipelineClusters" } -func clusterLabel(cluster config.Value) (label string) { +func (m *mergePipelineClusters) clusterLabel(cluster config.Value) (label string) { v := cluster.Get("label") if v == config.NilValue { return "default" @@ -32,24 +32,33 @@ func clusterLabel(cluster config.Value) (label string) { return strings.ToLower(v.MustString()) } -func mergeClustersForPipeline(v config.Value) (config.Value, error) { - clusters, ok := v.Get("clusters").AsSequence() +// mergeClustersForPipeline merges cluster definitions with same label. +// The clusters field is a slice, and as such, overrides are appended to it. +// We can identify a cluster by its label, however, so we can use this label +// to figure out which definitions are actually overrides and merge them. +// +// Note: the cluster label is optional and defaults to 'default'. +// We therefore ALSO merge all clusters without a label. +func (m *mergePipelineClusters) mergeClustersForPipeline(v config.Value) (config.Value, error) { + // We know the type of this value is a sequence. + // For additional defence, return self if it is not. + clusters, ok := v.AsSequence() if !ok { return v, nil } - seen := make(map[string]config.Value) - keys := make([]string, 0, len(clusters)) + seen := make(map[string]config.Value, len(clusters)) + labels := make([]string, 0, len(clusters)) // Target overrides are always appended, so we can iterate in natural order to // first find the base definition, and merge instances we encounter later. for i := range clusters { - label := clusterLabel(clusters[i]) + label := m.clusterLabel(clusters[i]) // Register pipeline cluster with label if not yet seen before. ref, ok := seen[label] if !ok { - keys = append(keys, label) + labels = append(labels, label) seen[label] = clusters[i] continue } @@ -63,42 +72,54 @@ func mergeClustersForPipeline(v config.Value) (config.Value, error) { } // Gather resulting clusters in natural order. - out := make([]config.Value, 0, len(keys)) - for _, key := range keys { - out = append(out, seen[key]) + out := make([]config.Value, 0, len(labels)) + for _, label := range labels { + // Overwrite the label with the normalized version. + nv, err := seen[label].Set("label", config.V(label)) + if err != nil { + return config.InvalidValue, err + } + out = append(out, nv) } - return v.SetKey("clusters", config.NewValue(out, config.Location{})), nil + return config.NewValue(out, v.Location()), nil } -func (m *mergePipelineClusters) Apply(ctx context.Context, b *bundle.Bundle) error { +func (m *mergePipelineClusters) foreachPipeline(v config.Value) (config.Value, error) { + pipelines, ok := v.AsMap() + if !ok { + return v, nil + } + + out := make(map[string]config.Value) + for key, pipeline := range pipelines { + var err error + out[key], err = pipeline.Transform("clusters", m.mergeClustersForPipeline) + if err != nil { + return v, err + } + } - // // MergeClusters merges cluster definitions with same label. - // // The clusters field is a slice, and as such, overrides are appended to it. - // // We can identify a cluster by its label, however, so we can use this label - // // to figure out which definitions are actually overrides and merge them. - // // - // // Note: the cluster label is optional and defaults to 'default'. - // // We therefore ALSO merge all clusters without a label. + return config.NewValue(out, v.Location()), nil +} +func (m *mergePipelineClusters) Apply(ctx context.Context, b *bundle.Bundle) error { return b.Config.Mutate(func(v config.Value) (config.Value, error) { - p := config.NewPathFromString("resources.pipelines") + if v == config.NilValue { + return v, nil + } - pv := v.Get("resources").Get("pipelines") - pipelines, ok := pv.AsMap() - if !ok { + nv, err := v.Transform("resources.pipelines", m.foreachPipeline) + + // It is not a problem if the pipelines key is not set. + if config.IsNoSuchKeyError(err) { return v, nil } - out := make(map[string]config.Value) - for key, pipeline := range pipelines { - var err error - out[key], err = mergeClustersForPipeline(pipeline) - if err != nil { - return v, err - } + if err != nil { + return v, err } - v.Set(p, config.NewValue(out, config.Location{})) + return nv, nil }) } diff --git a/bundle/config/mutator/merge_pipeline_clusters_test.go b/bundle/config/mutator/merge_pipeline_clusters_test.go index ca3825fb2d..fb54a67d24 100644 --- a/bundle/config/mutator/merge_pipeline_clusters_test.go +++ b/bundle/config/mutator/merge_pipeline_clusters_test.go @@ -2,6 +2,7 @@ package mutator_test import ( "context" + "strings" "testing" "github.com/databricks/cli/bundle" @@ -17,7 +18,7 @@ func TestMergePipelineClusters(t *testing.T) { Config: config.Root{ Resources: config.Resources{ Pipelines: map[string]*resources.Pipeline{ - "foo": &resources.Pipeline{ + "foo": { PipelineSpec: &pipelines.PipelineSpec{ Clusters: []pipelines.PipelineCluster{ { @@ -60,3 +61,65 @@ func TestMergePipelineClusters(t *testing.T) { pc1 := p.Clusters[1] assert.Equal(t, "i3.2xlarge", pc1.NodeTypeId) } + +func TestMergePipelineClustersCaseInsensitive(t *testing.T) { + b := &bundle.Bundle{ + Config: config.Root{ + Resources: config.Resources{ + Pipelines: map[string]*resources.Pipeline{ + "foo": { + PipelineSpec: &pipelines.PipelineSpec{ + Clusters: []pipelines.PipelineCluster{ + { + Label: "default", + NumWorkers: 2, + }, + { + Label: "DEFAULT", + NumWorkers: 4, + }, + }, + }, + }, + }, + }, + }, + } + + err := bundle.Apply(context.Background(), b, mutator.MergePipelineClusters()) + assert.NoError(t, err) + + p := b.Config.Resources.Pipelines["foo"] + assert.Len(t, p.Clusters, 1) + + // The default cluster was merged with a subsequent one. + pc0 := p.Clusters[0] + assert.Equal(t, "default", strings.ToLower(pc0.Label)) + assert.Equal(t, 4, pc0.NumWorkers) +} + +func TestMergePipelineClustersNilPipelines(t *testing.T) { + b := &bundle.Bundle{ + Config: config.Root{ + Resources: config.Resources{ + Pipelines: nil, + }, + }, + } + + err := bundle.Apply(context.Background(), b, mutator.MergePipelineClusters()) + assert.NoError(t, err) +} + +func TestMergePipelineClustersEmptyPipelines(t *testing.T) { + b := &bundle.Bundle{ + Config: config.Root{ + Resources: config.Resources{ + Pipelines: map[string]*resources.Pipeline{}, + }, + }, + } + + err := bundle.Apply(context.Background(), b, mutator.MergePipelineClusters()) + assert.NoError(t, err) +} diff --git a/bundle/config/mutator/mutator.go b/bundle/config/mutator/mutator.go index 797bd6d413..d6b38803b3 100644 --- a/bundle/config/mutator/mutator.go +++ b/bundle/config/mutator/mutator.go @@ -17,5 +17,11 @@ func DefaultMutators() []bundle.Mutator { } func DefaultMutatorsForTarget(env string) []bundle.Mutator { - return append(DefaultMutators(), SelectTarget(env)) + return append( + DefaultMutators(), + SelectTarget(env), + MergeJobClusters(), + MergeJobTasks(), + MergePipelineClusters(), + ) } diff --git a/libs/config/value.go b/libs/config/value.go index 4c3d893c75..9da280ba1b 100644 --- a/libs/config/value.go +++ b/libs/config/value.go @@ -3,7 +3,6 @@ package config import ( "fmt" "maps" - "slices" "time" ) @@ -59,68 +58,6 @@ func (v Value) AsMap() (map[string]Value, bool) { return m, ok } -func (v Value) set(prefix, suffix Path, value Value) (Value, error) { - var err error - - if len(suffix) == 0 { - return value, nil - } - - prefix = prefix.Append(suffix[0]) - - // Pick first component. - pc := suffix[0] - switch v.k { - case KindMap: - // Expect a key to be set if this is a map. - if len(pc.key) == 0 { - return InvalidValue, fmt.Errorf("expected a key index at %s", prefix) - } - - m := maps.Clone(v.MustMap()) - m[pc.key], err = v.set(prefix, suffix[1:], value) - if err != nil { - return InvalidValue, err - } - - // Return an updated map value. - return Value{ - v: m, - k: KindMap, - l: v.l, - }, nil - - case KindSequence: - // Expect an index to be set if this is a sequence. - if len(pc.key) > 0 { - return InvalidValue, fmt.Errorf("expected an index at %s", prefix) - } - - s := slices.Clone(v.MustSequence()) - if pc.index < 0 || pc.index >= len(s) { - return InvalidValue, fmt.Errorf("index out of bounds under %s", prefix) - } - s[pc.index], err = v.set(prefix, suffix[1:], value) - if err != nil { - return InvalidValue, err - } - - // Return an updated sequence value. - return Value{ - v: s, - k: KindSequence, - l: v.l, - }, nil - - default: - return InvalidValue, fmt.Errorf("expected a map or sequence under %s", prefix) - } -} - -func (v Value) Set(p Path, value Value) (Value, error) { - return v.set(EmptyPath, p, value) -} - func (v Value) SetKey(key string, value Value) Value { m, ok := v.AsMap() if !ok { @@ -139,8 +76,10 @@ func (v Value) SetKey(key string, value Value) Value { } func (v Value) AsSequence() ([]Value, bool) { - s, ok := v.v.([]Value) - return s, ok + if v.k != KindSequence { + return nil, false + } + return v.v.([]Value), true } func (v Value) Kind() Kind { diff --git a/libs/config/value_set.go b/libs/config/value_set.go new file mode 100644 index 0000000000..b921faf3d2 --- /dev/null +++ b/libs/config/value_set.go @@ -0,0 +1,56 @@ +package config + +import ( + "fmt" + "maps" +) + +func (v Value) SetByPath(p Path, value Value) (Value, error) { + return v.set(EmptyPath, p, value) +} + +func (v Value) Set(path string, value Value) (Value, error) { + p, err := NewPathFromString(path) + if err != nil { + return InvalidValue, err + } + return v.set(EmptyPath, p, value) +} + +func (v Value) set(prefix, suffix Path, nv Value) (Value, error) { + if len(suffix) == 0 { + return nv, nil + } + + component := suffix[0] + prefix = prefix.Append(component) + suffix = suffix[1:] + + // Resolve first component. + switch v.k { + case KindMap: + // Expect a key to be set if this is a map. + if len(component.key) == 0 { + return InvalidValue, fmt.Errorf("expected a key index at %s", prefix) + } + + // Recurse on set to get a new map entry. + m := v.MustMap() + nv, err := m[component.key].set(prefix, suffix, nv) + if err != nil { + return InvalidValue, err + } + + // Return an updated map value. + m = maps.Clone(m) + m[component.key] = nv + return Value{ + v: m, + k: KindMap, + l: v.l, + }, nil + + default: + return InvalidValue, fmt.Errorf("expected a map under %s", prefix) + } +} diff --git a/libs/config/value_set_test.go b/libs/config/value_set_test.go new file mode 100644 index 0000000000..d7b82b3200 --- /dev/null +++ b/libs/config/value_set_test.go @@ -0,0 +1 @@ +package config_test diff --git a/libs/config/value_transform.go b/libs/config/value_transform.go new file mode 100644 index 0000000000..43cea4e120 --- /dev/null +++ b/libs/config/value_transform.go @@ -0,0 +1,104 @@ +package config + +import ( + "errors" + "fmt" + "maps" + "slices" +) + +type noSuchKeyError struct { + p Path +} + +func (e noSuchKeyError) Error() string { + return fmt.Sprintf("no such key: %s", e.p) +} + +func IsNoSuchKeyError(err error) bool { + var target noSuchKeyError + return errors.As(err, &target) +} + +func (v Value) TransformByPath(p Path, value Value) (Value, error) { + return v.set(EmptyPath, p, value) +} + +func (v Value) Transform(path string, fn func(Value) (Value, error)) (Value, error) { + p, err := NewPathFromString(path) + if err != nil { + return InvalidValue, err + } + return v.transform(EmptyPath, p, fn) +} + +func (v Value) transform(prefix, suffix Path, fn func(Value) (Value, error)) (Value, error) { + if len(suffix) == 0 { + return fn(v) + } + + component := suffix[0] + prefix = prefix.Append(component) + suffix = suffix[1:] + + // Resolve first component. + switch v.k { + case KindMap: + // Expect a key to be set if this is a map. + if len(component.key) == 0 { + return InvalidValue, fmt.Errorf("expected a key index at %s", prefix) + } + + // Lookup current value in the map. + m := v.MustMap() + nv, ok := m[component.key] + if !ok { + return InvalidValue, noSuchKeyError{prefix} + } + + // Recursively transform the value. + nv, err := nv.transform(prefix, suffix, fn) + if err != nil { + return InvalidValue, err + } + + // Return an updated map value. + m = maps.Clone(m) + m[component.key] = nv + return Value{ + v: m, + k: KindMap, + l: v.l, + }, nil + + case KindSequence: + // Expect an index to be set if this is a sequence. + if len(component.key) > 0 { + return InvalidValue, fmt.Errorf("expected an index at %s", prefix) + } + + // Lookup current value in the sequence. + s := v.MustSequence() + if component.index < 0 || component.index >= len(s) { + return InvalidValue, fmt.Errorf("index out of bounds under %s", prefix) + } + + // Recursively transform the value. + nv, err := s[component.index].transform(prefix, suffix, fn) + if err != nil { + return InvalidValue, err + } + + // Return an updated sequence value. + s = slices.Clone(s) + s[component.index] = nv + return Value{ + v: s, + k: KindSequence, + l: v.l, + }, nil + + default: + return InvalidValue, fmt.Errorf("expected a map or sequence at %s", prefix) + } +} diff --git a/libs/config/value_transform_test.go b/libs/config/value_transform_test.go new file mode 100644 index 0000000000..d7b82b3200 --- /dev/null +++ b/libs/config/value_transform_test.go @@ -0,0 +1 @@ +package config_test From 43470076a1bbaf83f6dc2ad3a81c31554e6d1c38 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Wed, 20 Dec 2023 16:23:20 +0100 Subject: [PATCH 022/104] wip --- bundle/config/resources.go | 21 ---- bundle/config/resources/job.go | 67 ------------- bundle/config/resources/job_test.go | 116 ----------------------- bundle/config/resources/pipeline.go | 50 ---------- bundle/config/resources/pipeline_test.go | 76 --------------- bundle/config/root.go | 4 + bundle/tests/loader.go | 10 +- 7 files changed, 13 insertions(+), 331 deletions(-) delete mode 100644 bundle/config/resources/job_test.go delete mode 100644 bundle/config/resources/pipeline_test.go diff --git a/bundle/config/resources.go b/bundle/config/resources.go index c39cf4f38a..82d780ac62 100644 --- a/bundle/config/resources.go +++ b/bundle/config/resources.go @@ -147,24 +147,3 @@ func (r *Resources) ConfigureConfigFilePath() { e.ConfigureConfigFilePath() } } - -// Merge iterates over all resources and merges chunks of the -// resource configuration that can be merged. For example, for -// jobs, this merges job cluster definitions and tasks that -// use the same `job_cluster_key`, or `task_key`, respectively. -func (r *Resources) Merge() error { - for _, job := range r.Jobs { - if err := job.MergeJobClusters(); err != nil { - return err - } - if err := job.MergeTasks(); err != nil { - return err - } - } - for _, pipeline := range r.Pipelines { - if err := pipeline.MergeClusters(); err != nil { - return err - } - } - return nil -} diff --git a/bundle/config/resources/job.go b/bundle/config/resources/job.go index bf29106a03..650702e355 100644 --- a/bundle/config/resources/job.go +++ b/bundle/config/resources/job.go @@ -4,7 +4,6 @@ import ( "github.com/databricks/cli/bundle/config/paths" "github.com/databricks/databricks-sdk-go/marshal" "github.com/databricks/databricks-sdk-go/service/jobs" - "github.com/imdario/mergo" ) type Job struct { @@ -23,69 +22,3 @@ func (s *Job) UnmarshalJSON(b []byte) error { func (s Job) MarshalJSON() ([]byte, error) { return marshal.Marshal(s) } - -// MergeJobClusters merges job clusters with the same key. -// The job clusters field is a slice, and as such, overrides are appended to it. -// We can identify a job cluster by its key, however, so we can use this key -// to figure out which definitions are actually overrides and merge them. -func (j *Job) MergeJobClusters() error { - keys := make(map[string]*jobs.JobCluster) - output := make([]jobs.JobCluster, 0, len(j.JobClusters)) - - // Target overrides are always appended, so we can iterate in natural order to - // first find the base definition, and merge instances we encounter later. - for i := range j.JobClusters { - key := j.JobClusters[i].JobClusterKey - - // Register job cluster with key if not yet seen before. - ref, ok := keys[key] - if !ok { - output = append(output, j.JobClusters[i]) - keys[key] = &output[len(output)-1] - continue - } - - // Merge this instance into the reference. - err := mergo.Merge(ref, &j.JobClusters[i], mergo.WithOverride, mergo.WithAppendSlice) - if err != nil { - return err - } - } - - // Overwrite resulting slice. - j.JobClusters = output - return nil -} - -// MergeTasks merges tasks with the same key. -// The tasks field is a slice, and as such, overrides are appended to it. -// We can identify a task by its task key, however, so we can use this key -// to figure out which definitions are actually overrides and merge them. -func (j *Job) MergeTasks() error { - keys := make(map[string]*jobs.Task) - tasks := make([]jobs.Task, 0, len(j.Tasks)) - - // Target overrides are always appended, so we can iterate in natural order to - // first find the base definition, and merge instances we encounter later. - for i := range j.Tasks { - key := j.Tasks[i].TaskKey - - // Register the task with key if not yet seen before. - ref, ok := keys[key] - if !ok { - tasks = append(tasks, j.Tasks[i]) - keys[key] = &tasks[len(tasks)-1] - continue - } - - // Merge this instance into the reference. - err := mergo.Merge(ref, &j.Tasks[i], mergo.WithOverride, mergo.WithAppendSlice) - if err != nil { - return err - } - } - - // Overwrite resulting slice. - j.Tasks = tasks - return nil -} diff --git a/bundle/config/resources/job_test.go b/bundle/config/resources/job_test.go deleted file mode 100644 index 24b82fabbe..0000000000 --- a/bundle/config/resources/job_test.go +++ /dev/null @@ -1,116 +0,0 @@ -package resources - -import ( - "testing" - - "github.com/databricks/databricks-sdk-go/service/compute" - "github.com/databricks/databricks-sdk-go/service/jobs" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func TestJobMergeJobClusters(t *testing.T) { - j := &Job{ - JobSettings: &jobs.JobSettings{ - JobClusters: []jobs.JobCluster{ - { - JobClusterKey: "foo", - NewCluster: &compute.ClusterSpec{ - SparkVersion: "13.3.x-scala2.12", - NodeTypeId: "i3.xlarge", - NumWorkers: 2, - }, - }, - { - JobClusterKey: "bar", - NewCluster: &compute.ClusterSpec{ - SparkVersion: "10.4.x-scala2.12", - }, - }, - { - JobClusterKey: "foo", - NewCluster: &compute.ClusterSpec{ - NodeTypeId: "i3.2xlarge", - NumWorkers: 4, - }, - }, - }, - }, - } - - err := j.MergeJobClusters() - require.NoError(t, err) - - assert.Len(t, j.JobClusters, 2) - assert.Equal(t, "foo", j.JobClusters[0].JobClusterKey) - assert.Equal(t, "bar", j.JobClusters[1].JobClusterKey) - - // This job cluster was merged with a subsequent one. - jc0 := j.JobClusters[0].NewCluster - assert.Equal(t, "13.3.x-scala2.12", jc0.SparkVersion) - assert.Equal(t, "i3.2xlarge", jc0.NodeTypeId) - assert.Equal(t, 4, jc0.NumWorkers) - - // This job cluster was left untouched. - jc1 := j.JobClusters[1].NewCluster - assert.Equal(t, "10.4.x-scala2.12", jc1.SparkVersion) -} - -func TestJobMergeTasks(t *testing.T) { - j := &Job{ - JobSettings: &jobs.JobSettings{ - Tasks: []jobs.Task{ - { - TaskKey: "foo", - NewCluster: &compute.ClusterSpec{ - SparkVersion: "13.3.x-scala2.12", - NodeTypeId: "i3.xlarge", - NumWorkers: 2, - }, - Libraries: []compute.Library{ - {Whl: "package1"}, - }, - }, - { - TaskKey: "bar", - NewCluster: &compute.ClusterSpec{ - SparkVersion: "10.4.x-scala2.12", - }, - }, - { - TaskKey: "foo", - NewCluster: &compute.ClusterSpec{ - NodeTypeId: "i3.2xlarge", - NumWorkers: 4, - }, - Libraries: []compute.Library{ - {Pypi: &compute.PythonPyPiLibrary{ - Package: "package2", - }}, - }, - }, - }, - }, - } - - err := j.MergeTasks() - require.NoError(t, err) - - assert.Len(t, j.Tasks, 2) - assert.Equal(t, "foo", j.Tasks[0].TaskKey) - assert.Equal(t, "bar", j.Tasks[1].TaskKey) - - // This task was merged with a subsequent one. - task0 := j.Tasks[0] - cluster := task0.NewCluster - assert.Equal(t, "13.3.x-scala2.12", cluster.SparkVersion) - assert.Equal(t, "i3.2xlarge", cluster.NodeTypeId) - assert.Equal(t, 4, cluster.NumWorkers) - assert.Len(t, task0.Libraries, 2) - assert.Equal(t, task0.Libraries[0].Whl, "package1") - assert.Equal(t, task0.Libraries[1].Pypi.Package, "package2") - - // This task was left untouched. - task1 := j.Tasks[1].NewCluster - assert.Equal(t, "10.4.x-scala2.12", task1.SparkVersion) -} diff --git a/bundle/config/resources/pipeline.go b/bundle/config/resources/pipeline.go index 5c741f8af6..5ff400f90f 100644 --- a/bundle/config/resources/pipeline.go +++ b/bundle/config/resources/pipeline.go @@ -1,12 +1,9 @@ package resources import ( - "strings" - "github.com/databricks/cli/bundle/config/paths" "github.com/databricks/databricks-sdk-go/marshal" "github.com/databricks/databricks-sdk-go/service/pipelines" - "github.com/imdario/mergo" ) type Pipeline struct { @@ -25,50 +22,3 @@ func (s *Pipeline) UnmarshalJSON(b []byte) error { func (s Pipeline) MarshalJSON() ([]byte, error) { return marshal.Marshal(s) } - -// MergeClusters merges cluster definitions with same label. -// The clusters field is a slice, and as such, overrides are appended to it. -// We can identify a cluster by its label, however, so we can use this label -// to figure out which definitions are actually overrides and merge them. -// -// Note: the cluster label is optional and defaults to 'default'. -// We therefore ALSO merge all clusters without a label. -func (p *Pipeline) MergeClusters() error { - clusters := make(map[string]*pipelines.PipelineCluster) - output := make([]pipelines.PipelineCluster, 0, len(p.Clusters)) - - // Normalize cluster labels. - // If empty, this defaults to "default". - // To make matching case insensitive, labels are lowercased. - for i := range p.Clusters { - label := p.Clusters[i].Label - if label == "" { - label = "default" - } - p.Clusters[i].Label = strings.ToLower(label) - } - - // Target overrides are always appended, so we can iterate in natural order to - // first find the base definition, and merge instances we encounter later. - for i := range p.Clusters { - label := p.Clusters[i].Label - - // Register pipeline cluster with label if not yet seen before. - ref, ok := clusters[label] - if !ok { - output = append(output, p.Clusters[i]) - clusters[label] = &output[len(output)-1] - continue - } - - // Merge this instance into the reference. - err := mergo.Merge(ref, &p.Clusters[i], mergo.WithOverride, mergo.WithAppendSlice) - if err != nil { - return err - } - } - - // Overwrite resulting slice. - p.Clusters = output - return nil -} diff --git a/bundle/config/resources/pipeline_test.go b/bundle/config/resources/pipeline_test.go deleted file mode 100644 index 316e3d1459..0000000000 --- a/bundle/config/resources/pipeline_test.go +++ /dev/null @@ -1,76 +0,0 @@ -package resources - -import ( - "strings" - "testing" - - "github.com/databricks/databricks-sdk-go/service/pipelines" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func TestPipelineMergeClusters(t *testing.T) { - p := &Pipeline{ - PipelineSpec: &pipelines.PipelineSpec{ - Clusters: []pipelines.PipelineCluster{ - { - NodeTypeId: "i3.xlarge", - NumWorkers: 2, - PolicyId: "1234", - }, - { - Label: "maintenance", - NodeTypeId: "i3.2xlarge", - }, - { - NodeTypeId: "i3.2xlarge", - NumWorkers: 4, - }, - }, - }, - } - - err := p.MergeClusters() - require.NoError(t, err) - - assert.Len(t, p.Clusters, 2) - assert.Equal(t, "default", p.Clusters[0].Label) - assert.Equal(t, "maintenance", p.Clusters[1].Label) - - // The default cluster was merged with a subsequent one. - pc0 := p.Clusters[0] - assert.Equal(t, "i3.2xlarge", pc0.NodeTypeId) - assert.Equal(t, 4, pc0.NumWorkers) - assert.Equal(t, "1234", pc0.PolicyId) - - // The maintenance cluster was left untouched. - pc1 := p.Clusters[1] - assert.Equal(t, "i3.2xlarge", pc1.NodeTypeId) -} - -func TestPipelineMergeClustersCaseInsensitive(t *testing.T) { - p := &Pipeline{ - PipelineSpec: &pipelines.PipelineSpec{ - Clusters: []pipelines.PipelineCluster{ - { - Label: "default", - NumWorkers: 2, - }, - { - Label: "DEFAULT", - NumWorkers: 4, - }, - }, - }, - } - - err := p.MergeClusters() - require.NoError(t, err) - - assert.Len(t, p.Clusters, 1) - - // The default cluster was merged with a subsequent one. - pc0 := p.Clusters[0] - assert.Equal(t, "default", strings.ToLower(pc0.Label)) - assert.Equal(t, 4, pc0.NumWorkers) -} diff --git a/bundle/config/root.go b/bundle/config/root.go index 50e8d1808d..7e216a180e 100644 --- a/bundle/config/root.go +++ b/bundle/config/root.go @@ -155,6 +155,10 @@ func (r *Root) Mutate(fn func(config.Value) (config.Value, error)) error { return err } r.value = nv + + // Assign config file paths after mutating the configuration. + r.ConfigureConfigFilePath() + return nil } diff --git a/bundle/tests/loader.go b/bundle/tests/loader.go index f23b107649..5aeed0550e 100644 --- a/bundle/tests/loader.go +++ b/bundle/tests/loader.go @@ -20,7 +20,15 @@ func load(t *testing.T, path string) *bundle.Bundle { func loadTarget(t *testing.T, path, env string) *bundle.Bundle { b := load(t, path) - err := bundle.Apply(context.Background(), b, mutator.SelectTarget(env)) + err := bundle.Apply( + context.Background(), b, + bundle.Seq( + mutator.SelectTarget(env), + mutator.MergeJobClusters(), + mutator.MergeJobTasks(), + mutator.MergePipelineClusters(), + ), + ) require.NoError(t, err) return b } From bd64d1d6656615eaeea714a1067a6cd3ed1a8705 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Wed, 20 Dec 2023 17:40:25 +0100 Subject: [PATCH 023/104] Fix job with spark conf test --- bundle/tests/job_with_spark_conf_test.go | 19 ++++++++++++++----- 1 file changed, 14 insertions(+), 5 deletions(-) diff --git a/bundle/tests/job_with_spark_conf_test.go b/bundle/tests/job_with_spark_conf_test.go index a2c04c5eea..abc0a58170 100644 --- a/bundle/tests/job_with_spark_conf_test.go +++ b/bundle/tests/job_with_spark_conf_test.go @@ -4,6 +4,7 @@ import ( "testing" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) func TestJobWithSparkConf(t *testing.T) { @@ -14,9 +15,17 @@ func TestJobWithSparkConf(t *testing.T) { assert.Len(t, job.JobClusters, 1) assert.Equal(t, "test_cluster", job.JobClusters[0].JobClusterKey) - // Existing behavior is such that including non-string values - // in the spark_conf map will cause the job to fail to load. - // This is expected to be solved once we switch to the custom YAML loader. - tasks := job.Tasks - assert.Len(t, tasks, 0, "see https://github.com/databricks/cli/issues/992") + // This test exists because of https://github.com/databricks/cli/issues/992. + // It is solved as of **TODO**. + require.Len(t, job.JobClusters, 1) + cluster := job.JobClusters[0] + assert.Equal(t, "14.2.x-scala2.12", cluster.NewCluster.SparkVersion) + assert.Equal(t, "i3.xlarge", cluster.NewCluster.NodeTypeId) + assert.Equal(t, 2, cluster.NewCluster.NumWorkers) + assert.Equal(t, map[string]string{ + "spark.string": "string", + "spark.int": "1", + "spark.bool": "true", + "spark.float": "1.2", + }, cluster.NewCluster.SparkConf) } From 40e1425a141093893b4320abd5aa41903e3935f9 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Fri, 22 Dec 2023 10:59:45 +0100 Subject: [PATCH 024/104] Address comments --- libs/config/path_string_test.go | 16 ++++++++++++++++ libs/config/walk.go | 4 ++-- 2 files changed, 18 insertions(+), 2 deletions(-) diff --git a/libs/config/path_string_test.go b/libs/config/path_string_test.go index 37415aacfa..89e645615f 100644 --- a/libs/config/path_string_test.go +++ b/libs/config/path_string_test.go @@ -42,6 +42,10 @@ func TestNewPathFromString(t *testing.T) { input: "foo.bar[1][2]", output: NewPath(Key("foo"), Key("bar"), Index(1), Index(2)), }, + { + input: "foo.bar[1][2][3]", + output: NewPath(Key("foo"), Key("bar"), Index(1), Index(2), Index(3)), + }, { input: "foo[1234]", output: NewPath(Key("foo"), Index(1234)), @@ -50,6 +54,18 @@ func TestNewPathFromString(t *testing.T) { input: "foo[123", err: fmt.Errorf("invalid path: foo[123"), }, + { + input: "foo[123]]", + err: fmt.Errorf("invalid path: foo[123]]"), + }, + { + input: "foo[[123]", + err: fmt.Errorf("invalid path: foo[[123]"), + }, + { + input: "foo[[123]]", + err: fmt.Errorf("invalid path: foo[[123]]"), + }, { input: "foo[foo]", err: fmt.Errorf("invalid path: foo[foo]"), diff --git a/libs/config/walk.go b/libs/config/walk.go index f20b19df15..ce05833804 100644 --- a/libs/config/walk.go +++ b/libs/config/walk.go @@ -33,7 +33,7 @@ func walk(v Value, p Path, fn func(p Path, v Value) (Value, error)) (Value, erro switch v.Kind() { case KindMap: - m := v.v.(map[string]Value) + m := v.MustMap() out := make(map[string]Value, len(m)) for k := range m { nv, err := walk(m[k], p.Append(Key(k)), fn) @@ -47,7 +47,7 @@ func walk(v Value, p Path, fn func(p Path, v Value) (Value, error)) (Value, erro } v.v = out case KindSequence: - s := v.v.([]Value) + s := v.MustSequence() out := make([]Value, 0, len(s)) for i := range s { nv, err := walk(s[i], p.Append(Index(i)), fn) From fb7ec6ba17207c4edfdddb59153fc30a77794247 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Fri, 22 Dec 2023 13:41:37 +0100 Subject: [PATCH 025/104] Rename libs/config -> libs/dyn The name "dynamic value", or "dyn" for short, is more descriptive than the opaque "config". Also, it conveniently does not alias with other packages in the repository, or (popular ones) elsewhere. --- libs/config/path_test.go | 76 ------- libs/diag/diagnostic.go | 4 +- .../convert/end_to_end_test.go | 4 +- libs/{config => dyn}/convert/error.go | 4 +- libs/{config => dyn}/convert/from_typed.go | 108 +++++----- .../convert/from_typed_test.go | 186 +++++++++--------- libs/{config => dyn}/convert/normalize.go | 96 ++++----- .../{config => dyn}/convert/normalize_test.go | 132 ++++++------- libs/{config => dyn}/convert/struct_info.go | 14 +- .../convert/struct_info_test.go | 8 +- libs/{config => dyn}/convert/to_typed.go | 54 ++--- libs/{config => dyn}/convert/to_typed_test.go | 126 ++++++------ libs/{config => dyn}/kind.go | 2 +- libs/{config => dyn}/location.go | 2 +- libs/{config => dyn}/location_test.go | 6 +- libs/{config => dyn}/merge/merge.go | 40 ++-- libs/{config => dyn}/merge/merge_test.go | 76 +++---- libs/{config => dyn}/path.go | 2 +- libs/{config => dyn}/path_string.go | 2 +- libs/{config => dyn}/path_string_test.go | 4 +- libs/dyn/path_test.go | 76 +++++++ libs/{config => dyn}/value.go | 2 +- libs/{config => dyn}/value_test.go | 22 +-- libs/{config => dyn}/walk.go | 2 +- libs/{config => dyn}/walk_test.go | 4 +- libs/{config => dyn}/yamlloader/loader.go | 80 ++++---- .../yamlloader/testdata/anchor_01.yml | 0 .../yamlloader/testdata/anchor_02.yml | 0 .../yamlloader/testdata/anchor_03.yml | 0 .../yamlloader/testdata/anchor_04.yml | 0 .../yamlloader/testdata/anchor_05.yml | 0 .../yamlloader/testdata/anchor_06.yml | 0 .../yamlloader/testdata/anchor_07.yml | 0 .../yamlloader/testdata/anchor_08.yml | 0 .../yamlloader/testdata/empty.yml | 0 .../yamlloader/testdata/error_01.yml | 0 .../yamlloader/testdata/error_02.yml | 0 .../yamlloader/testdata/error_03.yml | 0 .../yamlloader/testdata/mix_01.yml | 0 .../yamlloader/testdata/mix_02.yml | 0 libs/{config => dyn}/yamlloader/yaml.go | 8 +- .../yamlloader/yaml_anchor_test.go | 44 ++--- .../yamlloader/yaml_error_test.go | 2 +- .../yamlloader/yaml_mix_test.go | 6 +- libs/{config => dyn}/yamlloader/yaml_test.go | 8 +- 45 files changed, 600 insertions(+), 600 deletions(-) delete mode 100644 libs/config/path_test.go rename libs/{config => dyn}/convert/end_to_end_test.go (93%) rename libs/{config => dyn}/convert/error.go (73%) rename libs/{config => dyn}/convert/from_typed.go (58%) rename libs/{config => dyn}/convert/from_typed_test.go (60%) rename libs/{config => dyn}/convert/normalize.go (58%) rename libs/{config => dyn}/convert/normalize_test.go (74%) rename libs/{config => dyn}/convert/struct_info.go (87%) rename libs/{config => dyn}/convert/struct_info_test.go (97%) rename libs/{config => dyn}/convert/to_typed.go (81%) rename libs/{config => dyn}/convert/to_typed_test.go (76%) rename libs/{config => dyn}/kind.go (98%) rename libs/{config => dyn}/location.go (92%) rename libs/{config => dyn}/location_test.go (54%) rename libs/{config => dyn}/merge/merge.go (60%) rename libs/{config => dyn}/merge/merge_test.go (67%) rename libs/{config => dyn}/path.go (99%) rename libs/{config => dyn}/path_string.go (99%) rename libs/{config => dyn}/path_string_test.go (96%) create mode 100644 libs/dyn/path_test.go rename libs/{config => dyn}/value.go (99%) rename libs/{config => dyn}/value_test.go (55%) rename libs/{config => dyn}/walk.go (99%) rename libs/{config => dyn}/walk_test.go (98%) rename libs/{config => dyn}/yamlloader/loader.go (61%) rename libs/{config => dyn}/yamlloader/testdata/anchor_01.yml (100%) rename libs/{config => dyn}/yamlloader/testdata/anchor_02.yml (100%) rename libs/{config => dyn}/yamlloader/testdata/anchor_03.yml (100%) rename libs/{config => dyn}/yamlloader/testdata/anchor_04.yml (100%) rename libs/{config => dyn}/yamlloader/testdata/anchor_05.yml (100%) rename libs/{config => dyn}/yamlloader/testdata/anchor_06.yml (100%) rename libs/{config => dyn}/yamlloader/testdata/anchor_07.yml (100%) rename libs/{config => dyn}/yamlloader/testdata/anchor_08.yml (100%) rename libs/{config => dyn}/yamlloader/testdata/empty.yml (100%) rename libs/{config => dyn}/yamlloader/testdata/error_01.yml (100%) rename libs/{config => dyn}/yamlloader/testdata/error_02.yml (100%) rename libs/{config => dyn}/yamlloader/testdata/error_03.yml (100%) rename libs/{config => dyn}/yamlloader/testdata/mix_01.yml (100%) rename libs/{config => dyn}/yamlloader/testdata/mix_02.yml (100%) rename libs/{config => dyn}/yamlloader/yaml.go (56%) rename libs/{config => dyn}/yamlloader/yaml_anchor_test.go (61%) rename libs/{config => dyn}/yamlloader/yaml_error_test.go (94%) rename libs/{config => dyn}/yamlloader/yaml_mix_test.go (79%) rename libs/{config => dyn}/yamlloader/yaml_test.go (76%) diff --git a/libs/config/path_test.go b/libs/config/path_test.go deleted file mode 100644 index 3fdd848e60..0000000000 --- a/libs/config/path_test.go +++ /dev/null @@ -1,76 +0,0 @@ -package config_test - -import ( - "testing" - - "github.com/databricks/cli/libs/config" - "github.com/stretchr/testify/assert" -) - -func TestPathAppend(t *testing.T) { - p := config.NewPath(config.Key("foo")) - - // Single arg. - p1 := p.Append(config.Key("bar")) - assert.True(t, p1.Equal(config.NewPath(config.Key("foo"), config.Key("bar")))) - - // Multiple args. - p2 := p.Append(config.Key("bar"), config.Index(1)) - assert.True(t, p2.Equal(config.NewPath(config.Key("foo"), config.Key("bar"), config.Index(1)))) -} - -func TestPathJoin(t *testing.T) { - p := config.NewPath(config.Key("foo")) - - // Single arg. - p1 := p.Join(config.NewPath(config.Key("bar"))) - assert.True(t, p1.Equal(config.NewPath(config.Key("foo"), config.Key("bar")))) - - // Multiple args. - p2 := p.Join(config.NewPath(config.Key("bar")), config.NewPath(config.Index(1))) - assert.True(t, p2.Equal(config.NewPath(config.Key("foo"), config.Key("bar"), config.Index(1)))) -} - -func TestPathEqualEmpty(t *testing.T) { - assert.True(t, config.EmptyPath.Equal(config.EmptyPath)) -} - -func TestPathEqual(t *testing.T) { - p1 := config.NewPath(config.Key("foo"), config.Index(1)) - p2 := config.NewPath(config.Key("bar"), config.Index(2)) - assert.False(t, p1.Equal(p2), "expected %q to not equal %q", p1, p2) - - p3 := config.NewPath(config.Key("foo"), config.Index(1)) - assert.True(t, p1.Equal(p3), "expected %q to equal %q", p1, p3) - - p4 := config.NewPath(config.Key("foo"), config.Index(1), config.Key("bar"), config.Index(2)) - assert.False(t, p1.Equal(p4), "expected %q to not equal %q", p1, p4) -} - -func TestPathHasPrefixEmpty(t *testing.T) { - empty := config.EmptyPath - nonEmpty := config.NewPath(config.Key("foo")) - assert.True(t, empty.HasPrefix(empty)) - assert.True(t, nonEmpty.HasPrefix(empty)) - assert.False(t, empty.HasPrefix(nonEmpty)) -} - -func TestPathHasPrefix(t *testing.T) { - p1 := config.NewPath(config.Key("foo"), config.Index(1)) - p2 := config.NewPath(config.Key("bar"), config.Index(2)) - assert.False(t, p1.HasPrefix(p2), "expected %q to not have prefix %q", p1, p2) - - p3 := config.NewPath(config.Key("foo")) - assert.True(t, p1.HasPrefix(p3), "expected %q to have prefix %q", p1, p3) -} - -func TestPathString(t *testing.T) { - p1 := config.NewPath(config.Key("foo"), config.Index(1)) - assert.Equal(t, "foo[1]", p1.String()) - - p2 := config.NewPath(config.Key("bar"), config.Index(2), config.Key("baz")) - assert.Equal(t, "bar[2].baz", p2.String()) - - p3 := config.NewPath(config.Key("foo"), config.Index(1), config.Key("bar"), config.Index(2), config.Key("baz")) - assert.Equal(t, "foo[1].bar[2].baz", p3.String()) -} diff --git a/libs/diag/diagnostic.go b/libs/diag/diagnostic.go index c5757a58e8..02d2e7c176 100644 --- a/libs/diag/diagnostic.go +++ b/libs/diag/diagnostic.go @@ -3,7 +3,7 @@ package diag import ( "fmt" - "github.com/databricks/cli/libs/config" + "github.com/databricks/cli/libs/dyn" ) type Diagnostic struct { @@ -19,7 +19,7 @@ type Diagnostic struct { // Location is a source code location associated with the diagnostic message. // It may be zero if there is no associated location. - Location config.Location + Location dyn.Location } // Errorf creates a new error diagnostic. diff --git a/libs/config/convert/end_to_end_test.go b/libs/dyn/convert/end_to_end_test.go similarity index 93% rename from libs/config/convert/end_to_end_test.go rename to libs/dyn/convert/end_to_end_test.go index c06830e83e..fbb8433629 100644 --- a/libs/config/convert/end_to_end_test.go +++ b/libs/dyn/convert/end_to_end_test.go @@ -3,13 +3,13 @@ package convert import ( "testing" - "github.com/databricks/cli/libs/config" + "github.com/databricks/cli/libs/dyn" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) func assertFromTypedToTypedEqual[T any](t *testing.T, src T) { - nv, err := FromTyped(src, config.NilValue) + nv, err := FromTyped(src, dyn.NilValue) require.NoError(t, err) var dst T diff --git a/libs/config/convert/error.go b/libs/dyn/convert/error.go similarity index 73% rename from libs/config/convert/error.go rename to libs/dyn/convert/error.go index b55668d67f..d3770d82d7 100644 --- a/libs/config/convert/error.go +++ b/libs/dyn/convert/error.go @@ -3,11 +3,11 @@ package convert import ( "fmt" - "github.com/databricks/cli/libs/config" + "github.com/databricks/cli/libs/dyn" ) type TypeError struct { - value config.Value + value dyn.Value msg string } diff --git a/libs/config/convert/from_typed.go b/libs/dyn/convert/from_typed.go similarity index 58% rename from libs/config/convert/from_typed.go rename to libs/dyn/convert/from_typed.go index e3911a9e5a..0659d1cd78 100644 --- a/libs/config/convert/from_typed.go +++ b/libs/dyn/convert/from_typed.go @@ -4,18 +4,18 @@ import ( "fmt" "reflect" - "github.com/databricks/cli/libs/config" + "github.com/databricks/cli/libs/dyn" ) // FromTyped converts changes made in the typed structure w.r.t. the configuration value // back to the configuration value, retaining existing location information where possible. -func FromTyped(src any, ref config.Value) (config.Value, error) { +func FromTyped(src any, ref dyn.Value) (dyn.Value, error) { srcv := reflect.ValueOf(src) // Dereference pointer if necessary for srcv.Kind() == reflect.Pointer { if srcv.IsNil() { - return config.NilValue, nil + return dyn.NilValue, nil } srcv = srcv.Elem() } @@ -37,53 +37,53 @@ func FromTyped(src any, ref config.Value) (config.Value, error) { return fromTypedFloat(srcv, ref) } - return config.NilValue, fmt.Errorf("unsupported type: %s", srcv.Kind()) + return dyn.NilValue, fmt.Errorf("unsupported type: %s", srcv.Kind()) } -func fromTypedStruct(src reflect.Value, ref config.Value) (config.Value, error) { +func fromTypedStruct(src reflect.Value, ref dyn.Value) (dyn.Value, error) { // Check that the reference value is compatible or nil. switch ref.Kind() { - case config.KindMap, config.KindNil: + case dyn.KindMap, dyn.KindNil: default: - return config.Value{}, fmt.Errorf("unhandled type: %s", ref.Kind()) + return dyn.Value{}, fmt.Errorf("unhandled type: %s", ref.Kind()) } - out := make(map[string]config.Value) + out := make(map[string]dyn.Value) info := getStructInfo(src.Type()) for k, v := range info.FieldValues(src) { // Convert the field taking into account the reference value (may be equal to config.NilValue). nv, err := FromTyped(v.Interface(), ref.Get(k)) if err != nil { - return config.Value{}, err + return dyn.Value{}, err } - if nv != config.NilValue { + if nv != dyn.NilValue { out[k] = nv } } // If the struct was equal to its zero value, emit a nil. if len(out) == 0 { - return config.NilValue, nil + return dyn.NilValue, nil } - return config.NewValue(out, ref.Location()), nil + return dyn.NewValue(out, ref.Location()), nil } -func fromTypedMap(src reflect.Value, ref config.Value) (config.Value, error) { +func fromTypedMap(src reflect.Value, ref dyn.Value) (dyn.Value, error) { // Check that the reference value is compatible or nil. switch ref.Kind() { - case config.KindMap, config.KindNil: + case dyn.KindMap, dyn.KindNil: default: - return config.Value{}, fmt.Errorf("unhandled type: %s", ref.Kind()) + return dyn.Value{}, fmt.Errorf("unhandled type: %s", ref.Kind()) } // Return nil if the map is nil. if src.IsNil() { - return config.NilValue, nil + return dyn.NilValue, nil } - out := make(map[string]config.Value) + out := make(map[string]dyn.Value) iter := src.MapRange() for iter.Next() { k := iter.Key().String() @@ -92,7 +92,7 @@ func fromTypedMap(src reflect.Value, ref config.Value) (config.Value, error) { // Convert entry taking into account the reference value (may be equal to config.NilValue). nv, err := FromTyped(v.Interface(), ref.Get(k)) if err != nil { - return config.Value{}, err + return dyn.Value{}, err } // Every entry is represented, even if it is a nil. @@ -100,115 +100,115 @@ func fromTypedMap(src reflect.Value, ref config.Value) (config.Value, error) { out[k] = nv } - return config.NewValue(out, ref.Location()), nil + return dyn.NewValue(out, ref.Location()), nil } -func fromTypedSlice(src reflect.Value, ref config.Value) (config.Value, error) { +func fromTypedSlice(src reflect.Value, ref dyn.Value) (dyn.Value, error) { // Check that the reference value is compatible or nil. switch ref.Kind() { - case config.KindSequence, config.KindNil: + case dyn.KindSequence, dyn.KindNil: default: - return config.Value{}, fmt.Errorf("unhandled type: %s", ref.Kind()) + return dyn.Value{}, fmt.Errorf("unhandled type: %s", ref.Kind()) } // Return nil if the slice is nil. if src.IsNil() { - return config.NilValue, nil + return dyn.NilValue, nil } - out := make([]config.Value, src.Len()) + out := make([]dyn.Value, src.Len()) for i := 0; i < src.Len(); i++ { v := src.Index(i) // Convert entry taking into account the reference value (may be equal to config.NilValue). nv, err := FromTyped(v.Interface(), ref.Index(i)) if err != nil { - return config.Value{}, err + return dyn.Value{}, err } out[i] = nv } - return config.NewValue(out, ref.Location()), nil + return dyn.NewValue(out, ref.Location()), nil } -func fromTypedString(src reflect.Value, ref config.Value) (config.Value, error) { +func fromTypedString(src reflect.Value, ref dyn.Value) (dyn.Value, error) { switch ref.Kind() { - case config.KindString: + case dyn.KindString: value := src.String() if value == ref.MustString() { return ref, nil } - return config.V(value), nil - case config.KindNil: + return dyn.V(value), nil + case dyn.KindNil: // This field is not set in the reference, so we only include it if it has a non-zero value. // Otherwise, we would always include all zero valued fields. if src.IsZero() { - return config.NilValue, nil + return dyn.NilValue, nil } - return config.V(src.String()), nil + return dyn.V(src.String()), nil } - return config.Value{}, fmt.Errorf("unhandled type: %s", ref.Kind()) + return dyn.Value{}, fmt.Errorf("unhandled type: %s", ref.Kind()) } -func fromTypedBool(src reflect.Value, ref config.Value) (config.Value, error) { +func fromTypedBool(src reflect.Value, ref dyn.Value) (dyn.Value, error) { switch ref.Kind() { - case config.KindBool: + case dyn.KindBool: value := src.Bool() if value == ref.MustBool() { return ref, nil } - return config.V(value), nil - case config.KindNil: + return dyn.V(value), nil + case dyn.KindNil: // This field is not set in the reference, so we only include it if it has a non-zero value. // Otherwise, we would always include all zero valued fields. if src.IsZero() { - return config.NilValue, nil + return dyn.NilValue, nil } - return config.V(src.Bool()), nil + return dyn.V(src.Bool()), nil } - return config.Value{}, fmt.Errorf("unhandled type: %s", ref.Kind()) + return dyn.Value{}, fmt.Errorf("unhandled type: %s", ref.Kind()) } -func fromTypedInt(src reflect.Value, ref config.Value) (config.Value, error) { +func fromTypedInt(src reflect.Value, ref dyn.Value) (dyn.Value, error) { switch ref.Kind() { - case config.KindInt: + case dyn.KindInt: value := src.Int() if value == ref.MustInt() { return ref, nil } - return config.V(value), nil - case config.KindNil: + return dyn.V(value), nil + case dyn.KindNil: // This field is not set in the reference, so we only include it if it has a non-zero value. // Otherwise, we would always include all zero valued fields. if src.IsZero() { - return config.NilValue, nil + return dyn.NilValue, nil } - return config.V(src.Int()), nil + return dyn.V(src.Int()), nil } - return config.Value{}, fmt.Errorf("unhandled type: %s", ref.Kind()) + return dyn.Value{}, fmt.Errorf("unhandled type: %s", ref.Kind()) } -func fromTypedFloat(src reflect.Value, ref config.Value) (config.Value, error) { +func fromTypedFloat(src reflect.Value, ref dyn.Value) (dyn.Value, error) { switch ref.Kind() { - case config.KindFloat: + case dyn.KindFloat: value := src.Float() if value == ref.MustFloat() { return ref, nil } - return config.V(value), nil - case config.KindNil: + return dyn.V(value), nil + case dyn.KindNil: // This field is not set in the reference, so we only include it if it has a non-zero value. // Otherwise, we would always include all zero valued fields. if src.IsZero() { - return config.NilValue, nil + return dyn.NilValue, nil } - return config.V(src.Float()), nil + return dyn.V(src.Float()), nil } - return config.Value{}, fmt.Errorf("unhandled type: %s", ref.Kind()) + return dyn.Value{}, fmt.Errorf("unhandled type: %s", ref.Kind()) } diff --git a/libs/config/convert/from_typed_test.go b/libs/dyn/convert/from_typed_test.go similarity index 60% rename from libs/config/convert/from_typed_test.go rename to libs/dyn/convert/from_typed_test.go index 2b28f549cd..0e9b9c7cd5 100644 --- a/libs/config/convert/from_typed_test.go +++ b/libs/dyn/convert/from_typed_test.go @@ -3,7 +3,7 @@ package convert import ( "testing" - "github.com/databricks/cli/libs/config" + "github.com/databricks/cli/libs/dyn" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -15,11 +15,11 @@ func TestFromTypedStructZeroFields(t *testing.T) { } src := Tmp{} - ref := config.NilValue + ref := dyn.NilValue nv, err := FromTyped(src, ref) require.NoError(t, err) - assert.Equal(t, config.NilValue, nv) + assert.Equal(t, dyn.NilValue, nv) } func TestFromTypedStructSetFields(t *testing.T) { @@ -33,12 +33,12 @@ func TestFromTypedStructSetFields(t *testing.T) { Bar: "bar", } - ref := config.NilValue + ref := dyn.NilValue nv, err := FromTyped(src, ref) require.NoError(t, err) - assert.Equal(t, config.V(map[string]config.Value{ - "foo": config.V("foo"), - "bar": config.V("bar"), + assert.Equal(t, dyn.V(map[string]dyn.Value{ + "foo": dyn.V("foo"), + "bar": dyn.V("bar"), }), nv) } @@ -53,45 +53,45 @@ func TestFromTypedStructSetFieldsRetainLocationIfUnchanged(t *testing.T) { Bar: "qux", } - ref := config.V(map[string]config.Value{ - "foo": config.NewValue("bar", config.Location{File: "foo"}), - "bar": config.NewValue("baz", config.Location{File: "bar"}), + ref := dyn.V(map[string]dyn.Value{ + "foo": dyn.NewValue("bar", dyn.Location{File: "foo"}), + "bar": dyn.NewValue("baz", dyn.Location{File: "bar"}), }) nv, err := FromTyped(src, ref) require.NoError(t, err) // Assert foo has retained its location. - assert.Equal(t, config.NewValue("bar", config.Location{File: "foo"}), nv.Get("foo")) + assert.Equal(t, dyn.NewValue("bar", dyn.Location{File: "foo"}), nv.Get("foo")) // Assert bar lost its location (because it was overwritten). - assert.Equal(t, config.NewValue("qux", config.Location{}), nv.Get("bar")) + assert.Equal(t, dyn.NewValue("qux", dyn.Location{}), nv.Get("bar")) } func TestFromTypedMapNil(t *testing.T) { var src map[string]string = nil - ref := config.V(map[string]config.Value{ - "foo": config.V("bar"), - "bar": config.V("baz"), + ref := dyn.V(map[string]dyn.Value{ + "foo": dyn.V("bar"), + "bar": dyn.V("baz"), }) nv, err := FromTyped(src, ref) require.NoError(t, err) - assert.Equal(t, config.NilValue, nv) + assert.Equal(t, dyn.NilValue, nv) } func TestFromTypedMapEmpty(t *testing.T) { var src = map[string]string{} - ref := config.V(map[string]config.Value{ - "foo": config.V("bar"), - "bar": config.V("baz"), + ref := dyn.V(map[string]dyn.Value{ + "foo": dyn.V("bar"), + "bar": dyn.V("baz"), }) nv, err := FromTyped(src, ref) require.NoError(t, err) - assert.Equal(t, config.V(map[string]config.Value{}), nv) + assert.Equal(t, dyn.V(map[string]dyn.Value{}), nv) } func TestFromTypedMapNonEmpty(t *testing.T) { @@ -100,12 +100,12 @@ func TestFromTypedMapNonEmpty(t *testing.T) { "bar": "bar", } - ref := config.NilValue + ref := dyn.NilValue nv, err := FromTyped(src, ref) require.NoError(t, err) - assert.Equal(t, config.V(map[string]config.Value{ - "foo": config.V("foo"), - "bar": config.V("bar"), + assert.Equal(t, dyn.V(map[string]dyn.Value{ + "foo": dyn.V("foo"), + "bar": dyn.V("bar"), }), nv) } @@ -115,19 +115,19 @@ func TestFromTypedMapNonEmptyRetainLocationIfUnchanged(t *testing.T) { "bar": "qux", } - ref := config.V(map[string]config.Value{ - "foo": config.NewValue("bar", config.Location{File: "foo"}), - "bar": config.NewValue("baz", config.Location{File: "bar"}), + ref := dyn.V(map[string]dyn.Value{ + "foo": dyn.NewValue("bar", dyn.Location{File: "foo"}), + "bar": dyn.NewValue("baz", dyn.Location{File: "bar"}), }) nv, err := FromTyped(src, ref) require.NoError(t, err) // Assert foo has retained its location. - assert.Equal(t, config.NewValue("bar", config.Location{File: "foo"}), nv.Get("foo")) + assert.Equal(t, dyn.NewValue("bar", dyn.Location{File: "foo"}), nv.Get("foo")) // Assert bar lost its location (because it was overwritten). - assert.Equal(t, config.NewValue("qux", config.Location{}), nv.Get("bar")) + assert.Equal(t, dyn.NewValue("qux", dyn.Location{}), nv.Get("bar")) } func TestFromTypedMapFieldWithZeroValue(t *testing.T) { @@ -135,38 +135,38 @@ func TestFromTypedMapFieldWithZeroValue(t *testing.T) { "foo": "", } - ref := config.NilValue + ref := dyn.NilValue nv, err := FromTyped(src, ref) require.NoError(t, err) - assert.Equal(t, config.V(map[string]config.Value{ - "foo": config.NilValue, + assert.Equal(t, dyn.V(map[string]dyn.Value{ + "foo": dyn.NilValue, }), nv) } func TestFromTypedSliceNil(t *testing.T) { var src []string = nil - ref := config.V([]config.Value{ - config.V("bar"), - config.V("baz"), + ref := dyn.V([]dyn.Value{ + dyn.V("bar"), + dyn.V("baz"), }) nv, err := FromTyped(src, ref) require.NoError(t, err) - assert.Equal(t, config.NilValue, nv) + assert.Equal(t, dyn.NilValue, nv) } func TestFromTypedSliceEmpty(t *testing.T) { var src = []string{} - ref := config.V([]config.Value{ - config.V("bar"), - config.V("baz"), + ref := dyn.V([]dyn.Value{ + dyn.V("bar"), + dyn.V("baz"), }) nv, err := FromTyped(src, ref) require.NoError(t, err) - assert.Equal(t, config.V([]config.Value{}), nv) + assert.Equal(t, dyn.V([]dyn.Value{}), nv) } func TestFromTypedSliceNonEmpty(t *testing.T) { @@ -175,12 +175,12 @@ func TestFromTypedSliceNonEmpty(t *testing.T) { "bar", } - ref := config.NilValue + ref := dyn.NilValue nv, err := FromTyped(src, ref) require.NoError(t, err) - assert.Equal(t, config.V([]config.Value{ - config.V("foo"), - config.V("bar"), + assert.Equal(t, dyn.V([]dyn.Value{ + dyn.V("foo"), + dyn.V("bar"), }), nv) } @@ -190,205 +190,205 @@ func TestFromTypedSliceNonEmptyRetainLocationIfUnchanged(t *testing.T) { "bar", } - ref := config.V([]config.Value{ - config.NewValue("foo", config.Location{File: "foo"}), - config.NewValue("baz", config.Location{File: "baz"}), + ref := dyn.V([]dyn.Value{ + dyn.NewValue("foo", dyn.Location{File: "foo"}), + dyn.NewValue("baz", dyn.Location{File: "baz"}), }) nv, err := FromTyped(src, ref) require.NoError(t, err) // Assert foo has retained its location. - assert.Equal(t, config.NewValue("foo", config.Location{File: "foo"}), nv.Index(0)) + assert.Equal(t, dyn.NewValue("foo", dyn.Location{File: "foo"}), nv.Index(0)) // Assert bar lost its location (because it was overwritten). - assert.Equal(t, config.NewValue("bar", config.Location{}), nv.Index(1)) + assert.Equal(t, dyn.NewValue("bar", dyn.Location{}), nv.Index(1)) } func TestFromTypedStringEmpty(t *testing.T) { var src string - var ref = config.NilValue + var ref = dyn.NilValue nv, err := FromTyped(src, ref) require.NoError(t, err) - assert.Equal(t, config.NilValue, nv) + assert.Equal(t, dyn.NilValue, nv) } func TestFromTypedStringEmptyOverwrite(t *testing.T) { var src string - var ref = config.V("old") + var ref = dyn.V("old") nv, err := FromTyped(src, ref) require.NoError(t, err) - assert.Equal(t, config.V(""), nv) + assert.Equal(t, dyn.V(""), nv) } func TestFromTypedStringNonEmpty(t *testing.T) { var src string = "new" - var ref = config.NilValue + var ref = dyn.NilValue nv, err := FromTyped(src, ref) require.NoError(t, err) - assert.Equal(t, config.V("new"), nv) + assert.Equal(t, dyn.V("new"), nv) } func TestFromTypedStringNonEmptyOverwrite(t *testing.T) { var src string = "new" - var ref = config.V("old") + var ref = dyn.V("old") nv, err := FromTyped(src, ref) require.NoError(t, err) - assert.Equal(t, config.V("new"), nv) + assert.Equal(t, dyn.V("new"), nv) } func TestFromTypedStringRetainsLocationsIfUnchanged(t *testing.T) { var src string = "foo" - var ref = config.NewValue("foo", config.Location{File: "foo"}) + var ref = dyn.NewValue("foo", dyn.Location{File: "foo"}) nv, err := FromTyped(src, ref) require.NoError(t, err) - assert.Equal(t, config.NewValue("foo", config.Location{File: "foo"}), nv) + assert.Equal(t, dyn.NewValue("foo", dyn.Location{File: "foo"}), nv) } func TestFromTypedStringTypeError(t *testing.T) { var src string = "foo" - var ref = config.V(1234) + var ref = dyn.V(1234) _, err := FromTyped(src, ref) require.Error(t, err) } func TestFromTypedBoolEmpty(t *testing.T) { var src bool - var ref = config.NilValue + var ref = dyn.NilValue nv, err := FromTyped(src, ref) require.NoError(t, err) - assert.Equal(t, config.NilValue, nv) + assert.Equal(t, dyn.NilValue, nv) } func TestFromTypedBoolEmptyOverwrite(t *testing.T) { var src bool - var ref = config.V(true) + var ref = dyn.V(true) nv, err := FromTyped(src, ref) require.NoError(t, err) - assert.Equal(t, config.V(false), nv) + assert.Equal(t, dyn.V(false), nv) } func TestFromTypedBoolNonEmpty(t *testing.T) { var src bool = true - var ref = config.NilValue + var ref = dyn.NilValue nv, err := FromTyped(src, ref) require.NoError(t, err) - assert.Equal(t, config.V(true), nv) + assert.Equal(t, dyn.V(true), nv) } func TestFromTypedBoolNonEmptyOverwrite(t *testing.T) { var src bool = true - var ref = config.V(false) + var ref = dyn.V(false) nv, err := FromTyped(src, ref) require.NoError(t, err) - assert.Equal(t, config.V(true), nv) + assert.Equal(t, dyn.V(true), nv) } func TestFromTypedBoolRetainsLocationsIfUnchanged(t *testing.T) { var src bool = true - var ref = config.NewValue(true, config.Location{File: "foo"}) + var ref = dyn.NewValue(true, dyn.Location{File: "foo"}) nv, err := FromTyped(src, ref) require.NoError(t, err) - assert.Equal(t, config.NewValue(true, config.Location{File: "foo"}), nv) + assert.Equal(t, dyn.NewValue(true, dyn.Location{File: "foo"}), nv) } func TestFromTypedBoolTypeError(t *testing.T) { var src bool = true - var ref = config.V("string") + var ref = dyn.V("string") _, err := FromTyped(src, ref) require.Error(t, err) } func TestFromTypedIntEmpty(t *testing.T) { var src int - var ref = config.NilValue + var ref = dyn.NilValue nv, err := FromTyped(src, ref) require.NoError(t, err) - assert.Equal(t, config.NilValue, nv) + assert.Equal(t, dyn.NilValue, nv) } func TestFromTypedIntEmptyOverwrite(t *testing.T) { var src int - var ref = config.V(1234) + var ref = dyn.V(1234) nv, err := FromTyped(src, ref) require.NoError(t, err) - assert.Equal(t, config.V(int64(0)), nv) + assert.Equal(t, dyn.V(int64(0)), nv) } func TestFromTypedIntNonEmpty(t *testing.T) { var src int = 1234 - var ref = config.NilValue + var ref = dyn.NilValue nv, err := FromTyped(src, ref) require.NoError(t, err) - assert.Equal(t, config.V(int64(1234)), nv) + assert.Equal(t, dyn.V(int64(1234)), nv) } func TestFromTypedIntNonEmptyOverwrite(t *testing.T) { var src int = 1234 - var ref = config.V(1233) + var ref = dyn.V(1233) nv, err := FromTyped(src, ref) require.NoError(t, err) - assert.Equal(t, config.V(int64(1234)), nv) + assert.Equal(t, dyn.V(int64(1234)), nv) } func TestFromTypedIntRetainsLocationsIfUnchanged(t *testing.T) { var src int = 1234 - var ref = config.NewValue(1234, config.Location{File: "foo"}) + var ref = dyn.NewValue(1234, dyn.Location{File: "foo"}) nv, err := FromTyped(src, ref) require.NoError(t, err) - assert.Equal(t, config.NewValue(1234, config.Location{File: "foo"}), nv) + assert.Equal(t, dyn.NewValue(1234, dyn.Location{File: "foo"}), nv) } func TestFromTypedIntTypeError(t *testing.T) { var src int = 1234 - var ref = config.V("string") + var ref = dyn.V("string") _, err := FromTyped(src, ref) require.Error(t, err) } func TestFromTypedFloatEmpty(t *testing.T) { var src float64 - var ref = config.NilValue + var ref = dyn.NilValue nv, err := FromTyped(src, ref) require.NoError(t, err) - assert.Equal(t, config.NilValue, nv) + assert.Equal(t, dyn.NilValue, nv) } func TestFromTypedFloatEmptyOverwrite(t *testing.T) { var src float64 - var ref = config.V(1.23) + var ref = dyn.V(1.23) nv, err := FromTyped(src, ref) require.NoError(t, err) - assert.Equal(t, config.V(0.0), nv) + assert.Equal(t, dyn.V(0.0), nv) } func TestFromTypedFloatNonEmpty(t *testing.T) { var src float64 = 1.23 - var ref = config.NilValue + var ref = dyn.NilValue nv, err := FromTyped(src, ref) require.NoError(t, err) - assert.Equal(t, config.V(1.23), nv) + assert.Equal(t, dyn.V(1.23), nv) } func TestFromTypedFloatNonEmptyOverwrite(t *testing.T) { var src float64 = 1.23 - var ref = config.V(1.24) + var ref = dyn.V(1.24) nv, err := FromTyped(src, ref) require.NoError(t, err) - assert.Equal(t, config.V(1.23), nv) + assert.Equal(t, dyn.V(1.23), nv) } func TestFromTypedFloatRetainsLocationsIfUnchanged(t *testing.T) { var src float64 = 1.23 - var ref = config.NewValue(1.23, config.Location{File: "foo"}) + var ref = dyn.NewValue(1.23, dyn.Location{File: "foo"}) nv, err := FromTyped(src, ref) require.NoError(t, err) - assert.Equal(t, config.NewValue(1.23, config.Location{File: "foo"}), nv) + assert.Equal(t, dyn.NewValue(1.23, dyn.Location{File: "foo"}), nv) } func TestFromTypedFloatTypeError(t *testing.T) { var src float64 = 1.23 - var ref = config.V("string") + var ref = dyn.V("string") _, err := FromTyped(src, ref) require.Error(t, err) } diff --git a/libs/config/convert/normalize.go b/libs/dyn/convert/normalize.go similarity index 58% rename from libs/config/convert/normalize.go rename to libs/dyn/convert/normalize.go index d7d2b1dff5..7a652cbc7c 100644 --- a/libs/config/convert/normalize.go +++ b/libs/dyn/convert/normalize.go @@ -5,15 +5,15 @@ import ( "reflect" "strconv" - "github.com/databricks/cli/libs/config" "github.com/databricks/cli/libs/diag" + "github.com/databricks/cli/libs/dyn" ) -func Normalize(dst any, src config.Value) (config.Value, diag.Diagnostics) { +func Normalize(dst any, src dyn.Value) (dyn.Value, diag.Diagnostics) { return normalizeType(reflect.TypeOf(dst), src) } -func normalizeType(typ reflect.Type, src config.Value) (config.Value, diag.Diagnostics) { +func normalizeType(typ reflect.Type, src dyn.Value) (dyn.Value, diag.Diagnostics) { for typ.Kind() == reflect.Pointer { typ = typ.Elem() } @@ -35,10 +35,10 @@ func normalizeType(typ reflect.Type, src config.Value) (config.Value, diag.Diagn return normalizeFloat(typ, src) } - return config.NilValue, diag.Errorf("unsupported type: %s", typ.Kind()) + return dyn.NilValue, diag.Errorf("unsupported type: %s", typ.Kind()) } -func typeMismatch(expected config.Kind, src config.Value) diag.Diagnostic { +func typeMismatch(expected dyn.Kind, src dyn.Value) diag.Diagnostic { return diag.Diagnostic{ Severity: diag.Error, Summary: fmt.Sprintf("expected %s, found %s", expected, src.Kind()), @@ -46,12 +46,12 @@ func typeMismatch(expected config.Kind, src config.Value) diag.Diagnostic { } } -func normalizeStruct(typ reflect.Type, src config.Value) (config.Value, diag.Diagnostics) { +func normalizeStruct(typ reflect.Type, src dyn.Value) (dyn.Value, diag.Diagnostics) { var diags diag.Diagnostics switch src.Kind() { - case config.KindMap: - out := make(map[string]config.Value) + case dyn.KindMap: + out := make(map[string]dyn.Value) info := getStructInfo(typ) for k, v := range src.MustMap() { index, ok := info.Fields[k] @@ -77,20 +77,20 @@ func normalizeStruct(typ reflect.Type, src config.Value) (config.Value, diag.Dia out[k] = v } - return config.NewValue(out, src.Location()), diags - case config.KindNil: + return dyn.NewValue(out, src.Location()), diags + case dyn.KindNil: return src, diags } - return config.NilValue, diags.Append(typeMismatch(config.KindMap, src)) + return dyn.NilValue, diags.Append(typeMismatch(dyn.KindMap, src)) } -func normalizeMap(typ reflect.Type, src config.Value) (config.Value, diag.Diagnostics) { +func normalizeMap(typ reflect.Type, src dyn.Value) (dyn.Value, diag.Diagnostics) { var diags diag.Diagnostics switch src.Kind() { - case config.KindMap: - out := make(map[string]config.Value) + case dyn.KindMap: + out := make(map[string]dyn.Value) for k, v := range src.MustMap() { // Normalize the value according to the map element type. v, err := normalizeType(typ.Elem(), v) @@ -105,20 +105,20 @@ func normalizeMap(typ reflect.Type, src config.Value) (config.Value, diag.Diagno out[k] = v } - return config.NewValue(out, src.Location()), diags - case config.KindNil: + return dyn.NewValue(out, src.Location()), diags + case dyn.KindNil: return src, diags } - return config.NilValue, diags.Append(typeMismatch(config.KindMap, src)) + return dyn.NilValue, diags.Append(typeMismatch(dyn.KindMap, src)) } -func normalizeSlice(typ reflect.Type, src config.Value) (config.Value, diag.Diagnostics) { +func normalizeSlice(typ reflect.Type, src dyn.Value) (dyn.Value, diag.Diagnostics) { var diags diag.Diagnostics switch src.Kind() { - case config.KindSequence: - out := make([]config.Value, 0, len(src.MustSequence())) + case dyn.KindSequence: + out := make([]dyn.Value, 0, len(src.MustSequence())) for _, v := range src.MustSequence() { // Normalize the value according to the slice element type. v, err := normalizeType(typ.Elem(), v) @@ -133,42 +133,42 @@ func normalizeSlice(typ reflect.Type, src config.Value) (config.Value, diag.Diag out = append(out, v) } - return config.NewValue(out, src.Location()), diags - case config.KindNil: + return dyn.NewValue(out, src.Location()), diags + case dyn.KindNil: return src, diags } - return config.NilValue, diags.Append(typeMismatch(config.KindSequence, src)) + return dyn.NilValue, diags.Append(typeMismatch(dyn.KindSequence, src)) } -func normalizeString(typ reflect.Type, src config.Value) (config.Value, diag.Diagnostics) { +func normalizeString(typ reflect.Type, src dyn.Value) (dyn.Value, diag.Diagnostics) { var diags diag.Diagnostics var out string switch src.Kind() { - case config.KindString: + case dyn.KindString: out = src.MustString() - case config.KindBool: + case dyn.KindBool: out = strconv.FormatBool(src.MustBool()) - case config.KindInt: + case dyn.KindInt: out = strconv.FormatInt(src.MustInt(), 10) - case config.KindFloat: + case dyn.KindFloat: out = strconv.FormatFloat(src.MustFloat(), 'f', -1, 64) default: - return config.NilValue, diags.Append(typeMismatch(config.KindString, src)) + return dyn.NilValue, diags.Append(typeMismatch(dyn.KindString, src)) } - return config.NewValue(out, src.Location()), diags + return dyn.NewValue(out, src.Location()), diags } -func normalizeBool(typ reflect.Type, src config.Value) (config.Value, diag.Diagnostics) { +func normalizeBool(typ reflect.Type, src dyn.Value) (dyn.Value, diag.Diagnostics) { var diags diag.Diagnostics var out bool switch src.Kind() { - case config.KindBool: + case dyn.KindBool: out = src.MustBool() - case config.KindString: + case dyn.KindString: // See https://github.com/go-yaml/yaml/blob/f6f7691b1fdeb513f56608cd2c32c51f8194bf51/decode.go#L684-L693. switch src.MustString() { case "true", "y", "Y", "yes", "Yes", "YES", "on", "On", "ON": @@ -177,59 +177,59 @@ func normalizeBool(typ reflect.Type, src config.Value) (config.Value, diag.Diagn out = false default: // Cannot interpret as a boolean. - return config.NilValue, diags.Append(typeMismatch(config.KindBool, src)) + return dyn.NilValue, diags.Append(typeMismatch(dyn.KindBool, src)) } default: - return config.NilValue, diags.Append(typeMismatch(config.KindBool, src)) + return dyn.NilValue, diags.Append(typeMismatch(dyn.KindBool, src)) } - return config.NewValue(out, src.Location()), diags + return dyn.NewValue(out, src.Location()), diags } -func normalizeInt(typ reflect.Type, src config.Value) (config.Value, diag.Diagnostics) { +func normalizeInt(typ reflect.Type, src dyn.Value) (dyn.Value, diag.Diagnostics) { var diags diag.Diagnostics var out int64 switch src.Kind() { - case config.KindInt: + case dyn.KindInt: out = src.MustInt() - case config.KindString: + case dyn.KindString: var err error out, err = strconv.ParseInt(src.MustString(), 10, 64) if err != nil { - return config.NilValue, diags.Append(diag.Diagnostic{ + return dyn.NilValue, diags.Append(diag.Diagnostic{ Severity: diag.Error, Summary: fmt.Sprintf("cannot parse %q as an integer", src.MustString()), Location: src.Location(), }) } default: - return config.NilValue, diags.Append(typeMismatch(config.KindInt, src)) + return dyn.NilValue, diags.Append(typeMismatch(dyn.KindInt, src)) } - return config.NewValue(out, src.Location()), diags + return dyn.NewValue(out, src.Location()), diags } -func normalizeFloat(typ reflect.Type, src config.Value) (config.Value, diag.Diagnostics) { +func normalizeFloat(typ reflect.Type, src dyn.Value) (dyn.Value, diag.Diagnostics) { var diags diag.Diagnostics var out float64 switch src.Kind() { - case config.KindFloat: + case dyn.KindFloat: out = src.MustFloat() - case config.KindString: + case dyn.KindString: var err error out, err = strconv.ParseFloat(src.MustString(), 64) if err != nil { - return config.NilValue, diags.Append(diag.Diagnostic{ + return dyn.NilValue, diags.Append(diag.Diagnostic{ Severity: diag.Error, Summary: fmt.Sprintf("cannot parse %q as a floating point number", src.MustString()), Location: src.Location(), }) } default: - return config.NilValue, diags.Append(typeMismatch(config.KindFloat, src)) + return dyn.NilValue, diags.Append(typeMismatch(dyn.KindFloat, src)) } - return config.NewValue(out, src.Location()), diags + return dyn.NewValue(out, src.Location()), diags } diff --git a/libs/config/convert/normalize_test.go b/libs/dyn/convert/normalize_test.go similarity index 74% rename from libs/config/convert/normalize_test.go rename to libs/dyn/convert/normalize_test.go index 9c4b10bbd4..13b1ed52f5 100644 --- a/libs/config/convert/normalize_test.go +++ b/libs/dyn/convert/normalize_test.go @@ -3,8 +3,8 @@ package convert import ( "testing" - "github.com/databricks/cli/libs/config" "github.com/databricks/cli/libs/diag" + "github.com/databricks/cli/libs/dyn" "github.com/stretchr/testify/assert" ) @@ -15,9 +15,9 @@ func TestNormalizeStruct(t *testing.T) { } var typ Tmp - vin := config.V(map[string]config.Value{ - "foo": config.V("bar"), - "bar": config.V("baz"), + vin := dyn.V(map[string]dyn.Value{ + "foo": dyn.V("bar"), + "bar": dyn.V("baz"), }) vout, err := Normalize(typ, vin) @@ -32,9 +32,9 @@ func TestNormalizeStructElementDiagnostic(t *testing.T) { } var typ Tmp - vin := config.V(map[string]config.Value{ - "foo": config.V("bar"), - "bar": config.V(map[string]config.Value{"an": config.V("error")}), + vin := dyn.V(map[string]dyn.Value{ + "foo": dyn.V("bar"), + "bar": dyn.V(map[string]dyn.Value{"an": dyn.V("error")}), }) vout, err := Normalize(typ, vin) @@ -42,7 +42,7 @@ func TestNormalizeStructElementDiagnostic(t *testing.T) { assert.Equal(t, diag.Diagnostic{ Severity: diag.Error, Summary: `expected string, found map`, - Location: config.Location{}, + Location: dyn.Location{}, }, err[0]) // Elements that encounter an error during normalization are dropped. @@ -57,9 +57,9 @@ func TestNormalizeStructUnknownField(t *testing.T) { } var typ Tmp - vin := config.V(map[string]config.Value{ - "foo": config.V("bar"), - "bar": config.V("baz"), + vin := dyn.V(map[string]dyn.Value{ + "foo": dyn.V("bar"), + "bar": dyn.V("baz"), }) vout, err := Normalize(typ, vin) @@ -82,7 +82,7 @@ func TestNormalizeStructNil(t *testing.T) { } var typ Tmp - vin := config.NilValue + vin := dyn.NilValue vout, err := Normalize(typ, vin) assert.Empty(t, err) assert.Equal(t, vin, vout) @@ -94,7 +94,7 @@ func TestNormalizeStructError(t *testing.T) { } var typ Tmp - vin := config.V("string") + vin := dyn.V("string") _, err := Normalize(typ, vin) assert.Len(t, err, 1) assert.Equal(t, diag.Diagnostic{ @@ -106,9 +106,9 @@ func TestNormalizeStructError(t *testing.T) { func TestNormalizeMap(t *testing.T) { var typ map[string]string - vin := config.V(map[string]config.Value{ - "foo": config.V("bar"), - "bar": config.V("baz"), + vin := dyn.V(map[string]dyn.Value{ + "foo": dyn.V("bar"), + "bar": dyn.V("baz"), }) vout, err := Normalize(typ, vin) @@ -118,9 +118,9 @@ func TestNormalizeMap(t *testing.T) { func TestNormalizeMapElementDiagnostic(t *testing.T) { var typ map[string]string - vin := config.V(map[string]config.Value{ - "foo": config.V("bar"), - "bar": config.V(map[string]config.Value{"an": config.V("error")}), + vin := dyn.V(map[string]dyn.Value{ + "foo": dyn.V("bar"), + "bar": dyn.V(map[string]dyn.Value{"an": dyn.V("error")}), }) vout, err := Normalize(typ, vin) @@ -128,7 +128,7 @@ func TestNormalizeMapElementDiagnostic(t *testing.T) { assert.Equal(t, diag.Diagnostic{ Severity: diag.Error, Summary: `expected string, found map`, - Location: config.Location{}, + Location: dyn.Location{}, }, err[0]) // Elements that encounter an error during normalization are dropped. @@ -139,7 +139,7 @@ func TestNormalizeMapElementDiagnostic(t *testing.T) { func TestNormalizeMapNil(t *testing.T) { var typ map[string]string - vin := config.NilValue + vin := dyn.NilValue vout, err := Normalize(typ, vin) assert.Empty(t, err) assert.Equal(t, vin, vout) @@ -147,7 +147,7 @@ func TestNormalizeMapNil(t *testing.T) { func TestNormalizeMapError(t *testing.T) { var typ map[string]string - vin := config.V("string") + vin := dyn.V("string") _, err := Normalize(typ, vin) assert.Len(t, err, 1) assert.Equal(t, diag.Diagnostic{ @@ -159,9 +159,9 @@ func TestNormalizeMapError(t *testing.T) { func TestNormalizeSlice(t *testing.T) { var typ []string - vin := config.V([]config.Value{ - config.V("foo"), - config.V("bar"), + vin := dyn.V([]dyn.Value{ + dyn.V("foo"), + dyn.V("bar"), }) vout, err := Normalize(typ, vin) @@ -171,10 +171,10 @@ func TestNormalizeSlice(t *testing.T) { func TestNormalizeSliceElementDiagnostic(t *testing.T) { var typ []string - vin := config.V([]config.Value{ - config.V("foo"), - config.V("bar"), - config.V(map[string]config.Value{"an": config.V("error")}), + vin := dyn.V([]dyn.Value{ + dyn.V("foo"), + dyn.V("bar"), + dyn.V(map[string]dyn.Value{"an": dyn.V("error")}), }) vout, err := Normalize(typ, vin) @@ -182,7 +182,7 @@ func TestNormalizeSliceElementDiagnostic(t *testing.T) { assert.Equal(t, diag.Diagnostic{ Severity: diag.Error, Summary: `expected string, found map`, - Location: config.Location{}, + Location: dyn.Location{}, }, err[0]) // Elements that encounter an error during normalization are dropped. @@ -191,7 +191,7 @@ func TestNormalizeSliceElementDiagnostic(t *testing.T) { func TestNormalizeSliceNil(t *testing.T) { var typ []string - vin := config.NilValue + vin := dyn.NilValue vout, err := Normalize(typ, vin) assert.Empty(t, err) assert.Equal(t, vin, vout) @@ -199,7 +199,7 @@ func TestNormalizeSliceNil(t *testing.T) { func TestNormalizeSliceError(t *testing.T) { var typ []string - vin := config.V("string") + vin := dyn.V("string") _, err := Normalize(typ, vin) assert.Len(t, err, 1) assert.Equal(t, diag.Diagnostic{ @@ -211,7 +211,7 @@ func TestNormalizeSliceError(t *testing.T) { func TestNormalizeString(t *testing.T) { var typ string - vin := config.V("string") + vin := dyn.V("string") vout, err := Normalize(&typ, vin) assert.Empty(t, err) assert.Equal(t, vin, vout) @@ -219,7 +219,7 @@ func TestNormalizeString(t *testing.T) { func TestNormalizeStringNil(t *testing.T) { var typ string - vin := config.NewValue(nil, config.Location{File: "file", Line: 1, Column: 1}) + vin := dyn.NewValue(nil, dyn.Location{File: "file", Line: 1, Column: 1}) _, err := Normalize(&typ, vin) assert.Len(t, err, 1) assert.Equal(t, diag.Diagnostic{ @@ -231,51 +231,51 @@ func TestNormalizeStringNil(t *testing.T) { func TestNormalizeStringFromBool(t *testing.T) { var typ string - vin := config.NewValue(true, config.Location{File: "file", Line: 1, Column: 1}) + vin := dyn.NewValue(true, dyn.Location{File: "file", Line: 1, Column: 1}) vout, err := Normalize(&typ, vin) assert.Empty(t, err) - assert.Equal(t, config.NewValue("true", vin.Location()), vout) + assert.Equal(t, dyn.NewValue("true", vin.Location()), vout) } func TestNormalizeStringFromInt(t *testing.T) { var typ string - vin := config.NewValue(123, config.Location{File: "file", Line: 1, Column: 1}) + vin := dyn.NewValue(123, dyn.Location{File: "file", Line: 1, Column: 1}) vout, err := Normalize(&typ, vin) assert.Empty(t, err) - assert.Equal(t, config.NewValue("123", vin.Location()), vout) + assert.Equal(t, dyn.NewValue("123", vin.Location()), vout) } func TestNormalizeStringFromFloat(t *testing.T) { var typ string - vin := config.NewValue(1.20, config.Location{File: "file", Line: 1, Column: 1}) + vin := dyn.NewValue(1.20, dyn.Location{File: "file", Line: 1, Column: 1}) vout, err := Normalize(&typ, vin) assert.Empty(t, err) - assert.Equal(t, config.NewValue("1.2", vin.Location()), vout) + assert.Equal(t, dyn.NewValue("1.2", vin.Location()), vout) } func TestNormalizeStringError(t *testing.T) { var typ string - vin := config.V(map[string]config.Value{"an": config.V("error")}) + vin := dyn.V(map[string]dyn.Value{"an": dyn.V("error")}) _, err := Normalize(&typ, vin) assert.Len(t, err, 1) assert.Equal(t, diag.Diagnostic{ Severity: diag.Error, Summary: `expected string, found map`, - Location: config.Location{}, + Location: dyn.Location{}, }, err[0]) } func TestNormalizeBool(t *testing.T) { var typ bool - vin := config.V(true) + vin := dyn.V(true) vout, err := Normalize(&typ, vin) assert.Empty(t, err) - assert.Equal(t, config.V(true), vout) + assert.Equal(t, dyn.V(true), vout) } func TestNormalizeBoolNil(t *testing.T) { var typ bool - vin := config.NewValue(nil, config.Location{File: "file", Line: 1, Column: 1}) + vin := dyn.NewValue(nil, dyn.Location{File: "file", Line: 1, Column: 1}) _, err := Normalize(&typ, vin) assert.Len(t, err, 1) assert.Equal(t, diag.Diagnostic{ @@ -299,16 +299,16 @@ func TestNormalizeBoolFromString(t *testing.T) { {"on", true}, {"off", false}, } { - vin := config.V(c.Input) + vin := dyn.V(c.Input) vout, err := Normalize(&typ, vin) assert.Empty(t, err) - assert.Equal(t, config.V(c.Output), vout) + assert.Equal(t, dyn.V(c.Output), vout) } } func TestNormalizeBoolFromStringError(t *testing.T) { var typ bool - vin := config.V("abc") + vin := dyn.V("abc") _, err := Normalize(&typ, vin) assert.Len(t, err, 1) assert.Equal(t, diag.Diagnostic{ @@ -320,27 +320,27 @@ func TestNormalizeBoolFromStringError(t *testing.T) { func TestNormalizeBoolError(t *testing.T) { var typ bool - vin := config.V(map[string]config.Value{"an": config.V("error")}) + vin := dyn.V(map[string]dyn.Value{"an": dyn.V("error")}) _, err := Normalize(&typ, vin) assert.Len(t, err, 1) assert.Equal(t, diag.Diagnostic{ Severity: diag.Error, Summary: `expected bool, found map`, - Location: config.Location{}, + Location: dyn.Location{}, }, err[0]) } func TestNormalizeInt(t *testing.T) { var typ int - vin := config.V(123) + vin := dyn.V(123) vout, err := Normalize(&typ, vin) assert.Empty(t, err) - assert.Equal(t, config.V(int64(123)), vout) + assert.Equal(t, dyn.V(int64(123)), vout) } func TestNormalizeIntNil(t *testing.T) { var typ int - vin := config.NewValue(nil, config.Location{File: "file", Line: 1, Column: 1}) + vin := dyn.NewValue(nil, dyn.Location{File: "file", Line: 1, Column: 1}) _, err := Normalize(&typ, vin) assert.Len(t, err, 1) assert.Equal(t, diag.Diagnostic{ @@ -352,15 +352,15 @@ func TestNormalizeIntNil(t *testing.T) { func TestNormalizeIntFromString(t *testing.T) { var typ int - vin := config.V("123") + vin := dyn.V("123") vout, err := Normalize(&typ, vin) assert.Empty(t, err) - assert.Equal(t, config.V(int64(123)), vout) + assert.Equal(t, dyn.V(int64(123)), vout) } func TestNormalizeIntFromStringError(t *testing.T) { var typ int - vin := config.V("abc") + vin := dyn.V("abc") _, err := Normalize(&typ, vin) assert.Len(t, err, 1) assert.Equal(t, diag.Diagnostic{ @@ -372,27 +372,27 @@ func TestNormalizeIntFromStringError(t *testing.T) { func TestNormalizeIntError(t *testing.T) { var typ int - vin := config.V(map[string]config.Value{"an": config.V("error")}) + vin := dyn.V(map[string]dyn.Value{"an": dyn.V("error")}) _, err := Normalize(&typ, vin) assert.Len(t, err, 1) assert.Equal(t, diag.Diagnostic{ Severity: diag.Error, Summary: `expected int, found map`, - Location: config.Location{}, + Location: dyn.Location{}, }, err[0]) } func TestNormalizeFloat(t *testing.T) { var typ float64 - vin := config.V(1.2) + vin := dyn.V(1.2) vout, err := Normalize(&typ, vin) assert.Empty(t, err) - assert.Equal(t, config.V(1.2), vout) + assert.Equal(t, dyn.V(1.2), vout) } func TestNormalizeFloatNil(t *testing.T) { var typ float64 - vin := config.NewValue(nil, config.Location{File: "file", Line: 1, Column: 1}) + vin := dyn.NewValue(nil, dyn.Location{File: "file", Line: 1, Column: 1}) _, err := Normalize(&typ, vin) assert.Len(t, err, 1) assert.Equal(t, diag.Diagnostic{ @@ -404,15 +404,15 @@ func TestNormalizeFloatNil(t *testing.T) { func TestNormalizeFloatFromString(t *testing.T) { var typ float64 - vin := config.V("1.2") + vin := dyn.V("1.2") vout, err := Normalize(&typ, vin) assert.Empty(t, err) - assert.Equal(t, config.V(1.2), vout) + assert.Equal(t, dyn.V(1.2), vout) } func TestNormalizeFloatFromStringError(t *testing.T) { var typ float64 - vin := config.V("abc") + vin := dyn.V("abc") _, err := Normalize(&typ, vin) assert.Len(t, err, 1) assert.Equal(t, diag.Diagnostic{ @@ -424,12 +424,12 @@ func TestNormalizeFloatFromStringError(t *testing.T) { func TestNormalizeFloatError(t *testing.T) { var typ float64 - vin := config.V(map[string]config.Value{"an": config.V("error")}) + vin := dyn.V(map[string]dyn.Value{"an": dyn.V("error")}) _, err := Normalize(&typ, vin) assert.Len(t, err, 1) assert.Equal(t, diag.Diagnostic{ Severity: diag.Error, Summary: `expected float, found map`, - Location: config.Location{}, + Location: dyn.Location{}, }, err[0]) } diff --git a/libs/config/convert/struct_info.go b/libs/dyn/convert/struct_info.go similarity index 87% rename from libs/config/convert/struct_info.go rename to libs/dyn/convert/struct_info.go index 80cfabb692..dc3ed4da40 100644 --- a/libs/config/convert/struct_info.go +++ b/libs/dyn/convert/struct_info.go @@ -5,16 +5,16 @@ import ( "strings" "sync" - "github.com/databricks/cli/libs/config" + "github.com/databricks/cli/libs/dyn" ) // structInfo holds the type information we need to efficiently -// convert data from a [config.Value] to a Go struct. +// convert data from a [dyn.Value] to a Go struct. type structInfo struct { // Fields maps the JSON-name of the field to the field's index for use with [FieldByIndex]. Fields map[string][]int - // ValueField maps to the field with a [config.Value]. + // ValueField maps to the field with a [dyn.Value]. // The underlying type is expected to only have one of these. ValueField []int } @@ -74,10 +74,10 @@ func buildStructInfo(typ reflect.Type) structInfo { continue } - // If this field has type [config.Value], we populate it with the source [config.Value] from [ToTyped]. + // If this field has type [dyn.Value], we populate it with the source [dyn.Value] from [ToTyped]. if sf.IsExported() && sf.Type == configValueType { if out.ValueField != nil { - panic("multiple config.Value fields") + panic("multiple dyn.Value fields") } out.ValueField = append(prefix, sf.Index...) continue @@ -129,5 +129,5 @@ func (s *structInfo) FieldValues(v reflect.Value) map[string]reflect.Value { return out } -// Type of [config.Value]. -var configValueType = reflect.TypeOf((*config.Value)(nil)).Elem() +// Type of [dyn.Value]. +var configValueType = reflect.TypeOf((*dyn.Value)(nil)).Elem() diff --git a/libs/config/convert/struct_info_test.go b/libs/dyn/convert/struct_info_test.go similarity index 97% rename from libs/config/convert/struct_info_test.go rename to libs/dyn/convert/struct_info_test.go index 685679aecd..08be3c47ef 100644 --- a/libs/config/convert/struct_info_test.go +++ b/libs/dyn/convert/struct_info_test.go @@ -4,7 +4,7 @@ import ( "reflect" "testing" - "github.com/databricks/cli/libs/config" + "github.com/databricks/cli/libs/dyn" "github.com/stretchr/testify/assert" ) @@ -207,7 +207,7 @@ func TestStructInfoValueFieldAbsent(t *testing.T) { func TestStructInfoValueFieldPresent(t *testing.T) { type Tmp struct { - Foo config.Value + Foo dyn.Value } si := getStructInfo(reflect.TypeOf(Tmp{})) @@ -216,8 +216,8 @@ func TestStructInfoValueFieldPresent(t *testing.T) { func TestStructInfoValueFieldMultiple(t *testing.T) { type Tmp struct { - Foo config.Value - Bar config.Value + Foo dyn.Value + Bar dyn.Value } assert.Panics(t, func() { diff --git a/libs/config/convert/to_typed.go b/libs/dyn/convert/to_typed.go similarity index 81% rename from libs/config/convert/to_typed.go rename to libs/dyn/convert/to_typed.go index 8c43d97434..209de12cbd 100644 --- a/libs/config/convert/to_typed.go +++ b/libs/dyn/convert/to_typed.go @@ -5,17 +5,17 @@ import ( "reflect" "strconv" - "github.com/databricks/cli/libs/config" + "github.com/databricks/cli/libs/dyn" ) -func ToTyped(dst any, src config.Value) error { +func ToTyped(dst any, src dyn.Value) error { dstv := reflect.ValueOf(dst) // Dereference pointer if necessary for dstv.Kind() == reflect.Pointer { // If the source value is nil and the destination is a settable pointer, // set the destination to nil. Also see `end_to_end_test.go`. - if dstv.CanSet() && src == config.NilValue { + if dstv.CanSet() && src == dyn.NilValue { dstv.SetZero() return nil } @@ -50,9 +50,9 @@ func ToTyped(dst any, src config.Value) error { return fmt.Errorf("unsupported type: %s", dstv.Kind()) } -func toTypedStruct(dst reflect.Value, src config.Value) error { +func toTypedStruct(dst reflect.Value, src dyn.Value) error { switch src.Kind() { - case config.KindMap: + case dyn.KindMap: info := getStructInfo(dst.Type()) for k, v := range src.MustMap() { index, ok := info.Fields[k] @@ -83,14 +83,14 @@ func toTypedStruct(dst reflect.Value, src config.Value) error { } } - // Populate field(s) for [config.Value], if any. + // Populate field(s) for [dyn.Value], if any. if info.ValueField != nil { vv := dst.FieldByIndex(info.ValueField) vv.Set(reflect.ValueOf(src)) } return nil - case config.KindNil: + case dyn.KindNil: dst.SetZero() return nil } @@ -101,9 +101,9 @@ func toTypedStruct(dst reflect.Value, src config.Value) error { } } -func toTypedMap(dst reflect.Value, src config.Value) error { +func toTypedMap(dst reflect.Value, src dyn.Value) error { switch src.Kind() { - case config.KindMap: + case dyn.KindMap: m := src.MustMap() // Always overwrite. @@ -118,7 +118,7 @@ func toTypedMap(dst reflect.Value, src config.Value) error { dst.SetMapIndex(kv, vv.Elem()) } return nil - case config.KindNil: + case dyn.KindNil: dst.SetZero() return nil } @@ -129,9 +129,9 @@ func toTypedMap(dst reflect.Value, src config.Value) error { } } -func toTypedSlice(dst reflect.Value, src config.Value) error { +func toTypedSlice(dst reflect.Value, src dyn.Value) error { switch src.Kind() { - case config.KindSequence: + case dyn.KindSequence: seq := src.MustSequence() // Always overwrite. @@ -143,7 +143,7 @@ func toTypedSlice(dst reflect.Value, src config.Value) error { } } return nil - case config.KindNil: + case dyn.KindNil: dst.SetZero() return nil } @@ -154,18 +154,18 @@ func toTypedSlice(dst reflect.Value, src config.Value) error { } } -func toTypedString(dst reflect.Value, src config.Value) error { +func toTypedString(dst reflect.Value, src dyn.Value) error { switch src.Kind() { - case config.KindString: + case dyn.KindString: dst.SetString(src.MustString()) return nil - case config.KindBool: + case dyn.KindBool: dst.SetString(strconv.FormatBool(src.MustBool())) return nil - case config.KindInt: + case dyn.KindInt: dst.SetString(strconv.FormatInt(src.MustInt(), 10)) return nil - case config.KindFloat: + case dyn.KindFloat: dst.SetString(strconv.FormatFloat(src.MustFloat(), 'f', -1, 64)) return nil } @@ -176,12 +176,12 @@ func toTypedString(dst reflect.Value, src config.Value) error { } } -func toTypedBool(dst reflect.Value, src config.Value) error { +func toTypedBool(dst reflect.Value, src dyn.Value) error { switch src.Kind() { - case config.KindBool: + case dyn.KindBool: dst.SetBool(src.MustBool()) return nil - case config.KindString: + case dyn.KindString: // See https://github.com/go-yaml/yaml/blob/f6f7691b1fdeb513f56608cd2c32c51f8194bf51/decode.go#L684-L693. switch src.MustString() { case "y", "Y", "yes", "Yes", "YES", "on", "On", "ON": @@ -199,12 +199,12 @@ func toTypedBool(dst reflect.Value, src config.Value) error { } } -func toTypedInt(dst reflect.Value, src config.Value) error { +func toTypedInt(dst reflect.Value, src dyn.Value) error { switch src.Kind() { - case config.KindInt: + case dyn.KindInt: dst.SetInt(src.MustInt()) return nil - case config.KindString: + case dyn.KindString: if i64, err := strconv.ParseInt(src.MustString(), 10, 64); err == nil { dst.SetInt(i64) return nil @@ -217,12 +217,12 @@ func toTypedInt(dst reflect.Value, src config.Value) error { } } -func toTypedFloat(dst reflect.Value, src config.Value) error { +func toTypedFloat(dst reflect.Value, src dyn.Value) error { switch src.Kind() { - case config.KindFloat: + case dyn.KindFloat: dst.SetFloat(src.MustFloat()) return nil - case config.KindString: + case dyn.KindString: if f64, err := strconv.ParseFloat(src.MustString(), 64); err == nil { dst.SetFloat(f64) return nil diff --git a/libs/config/convert/to_typed_test.go b/libs/dyn/convert/to_typed_test.go similarity index 76% rename from libs/config/convert/to_typed_test.go rename to libs/dyn/convert/to_typed_test.go index 2845bddae0..3adc94c799 100644 --- a/libs/config/convert/to_typed_test.go +++ b/libs/dyn/convert/to_typed_test.go @@ -3,7 +3,7 @@ package convert import ( "testing" - "github.com/databricks/cli/libs/config" + "github.com/databricks/cli/libs/dyn" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -21,9 +21,9 @@ func TestToTypedStruct(t *testing.T) { } var out Tmp - v := config.V(map[string]config.Value{ - "foo": config.V("bar"), - "bar": config.V("baz"), + v := dyn.V(map[string]dyn.Value{ + "foo": dyn.V("bar"), + "bar": dyn.V("baz"), }) err := ToTyped(&out, v) @@ -48,9 +48,9 @@ func TestToTypedStructOverwrite(t *testing.T) { Foo: "baz", Bar: "qux", } - v := config.V(map[string]config.Value{ - "foo": config.V("bar"), - "bar": config.V("baz"), + v := dyn.V(map[string]dyn.Value{ + "foo": dyn.V("bar"), + "bar": dyn.V("baz"), }) err := ToTyped(&out, v) @@ -74,9 +74,9 @@ func TestToTypedStructAnonymousByValue(t *testing.T) { } var out Tmp - v := config.V(map[string]config.Value{ - "foo": config.V("bar"), - "bar": config.V("baz"), + v := dyn.V(map[string]dyn.Value{ + "foo": dyn.V("bar"), + "bar": dyn.V("baz"), }) err := ToTyped(&out, v) @@ -100,9 +100,9 @@ func TestToTypedStructAnonymousByPointer(t *testing.T) { } var out Tmp - v := config.V(map[string]config.Value{ - "foo": config.V("bar"), - "bar": config.V("baz"), + v := dyn.V(map[string]dyn.Value{ + "foo": dyn.V("bar"), + "bar": dyn.V("baz"), }) err := ToTyped(&out, v) @@ -117,7 +117,7 @@ func TestToTypedStructNil(t *testing.T) { } var out = Tmp{} - err := ToTyped(&out, config.NilValue) + err := ToTyped(&out, dyn.NilValue) require.NoError(t, err) assert.Equal(t, Tmp{}, out) } @@ -128,7 +128,7 @@ func TestToTypedStructNilOverwrite(t *testing.T) { } var out = Tmp{"bar"} - err := ToTyped(&out, config.NilValue) + err := ToTyped(&out, dyn.NilValue) require.NoError(t, err) assert.Equal(t, Tmp{}, out) } @@ -137,12 +137,12 @@ func TestToTypedStructWithValueField(t *testing.T) { type Tmp struct { Foo string `json:"foo"` - ConfigValue config.Value + ConfigValue dyn.Value } var out Tmp - v := config.V(map[string]config.Value{ - "foo": config.V("bar"), + v := dyn.V(map[string]dyn.Value{ + "foo": dyn.V("bar"), }) err := ToTyped(&out, v) @@ -154,8 +154,8 @@ func TestToTypedStructWithValueField(t *testing.T) { func TestToTypedMap(t *testing.T) { var out = map[string]string{} - v := config.V(map[string]config.Value{ - "key": config.V("value"), + v := dyn.V(map[string]dyn.Value{ + "key": dyn.V("value"), }) err := ToTyped(&out, v) @@ -169,8 +169,8 @@ func TestToTypedMapOverwrite(t *testing.T) { "foo": "bar", } - v := config.V(map[string]config.Value{ - "bar": config.V("qux"), + v := dyn.V(map[string]dyn.Value{ + "bar": dyn.V("qux"), }) err := ToTyped(&out, v) @@ -182,8 +182,8 @@ func TestToTypedMapOverwrite(t *testing.T) { func TestToTypedMapWithPointerElement(t *testing.T) { var out map[string]*string - v := config.V(map[string]config.Value{ - "key": config.V("value"), + v := dyn.V(map[string]dyn.Value{ + "key": dyn.V("value"), }) err := ToTyped(&out, v) @@ -194,7 +194,7 @@ func TestToTypedMapWithPointerElement(t *testing.T) { func TestToTypedMapNil(t *testing.T) { var out = map[string]string{} - err := ToTyped(&out, config.NilValue) + err := ToTyped(&out, dyn.NilValue) require.NoError(t, err) assert.Nil(t, out) } @@ -203,7 +203,7 @@ func TestToTypedMapNilOverwrite(t *testing.T) { var out = map[string]string{ "foo": "bar", } - err := ToTyped(&out, config.NilValue) + err := ToTyped(&out, dyn.NilValue) require.NoError(t, err) assert.Nil(t, out) } @@ -211,9 +211,9 @@ func TestToTypedMapNilOverwrite(t *testing.T) { func TestToTypedSlice(t *testing.T) { var out []string - v := config.V([]config.Value{ - config.V("foo"), - config.V("bar"), + v := dyn.V([]dyn.Value{ + dyn.V("foo"), + dyn.V("bar"), }) err := ToTyped(&out, v) @@ -226,9 +226,9 @@ func TestToTypedSlice(t *testing.T) { func TestToTypedSliceOverwrite(t *testing.T) { var out = []string{"qux"} - v := config.V([]config.Value{ - config.V("foo"), - config.V("bar"), + v := dyn.V([]dyn.Value{ + dyn.V("foo"), + dyn.V("bar"), }) err := ToTyped(&out, v) @@ -241,9 +241,9 @@ func TestToTypedSliceOverwrite(t *testing.T) { func TestToTypedSliceWithPointerElement(t *testing.T) { var out []*string - v := config.V([]config.Value{ - config.V("foo"), - config.V("bar"), + v := dyn.V([]dyn.Value{ + dyn.V("foo"), + dyn.V("bar"), }) err := ToTyped(&out, v) @@ -255,63 +255,63 @@ func TestToTypedSliceWithPointerElement(t *testing.T) { func TestToTypedSliceNil(t *testing.T) { var out []string - err := ToTyped(&out, config.NilValue) + err := ToTyped(&out, dyn.NilValue) require.NoError(t, err) assert.Nil(t, out) } func TestToTypedSliceNilOverwrite(t *testing.T) { var out = []string{"foo"} - err := ToTyped(&out, config.NilValue) + err := ToTyped(&out, dyn.NilValue) require.NoError(t, err) assert.Nil(t, out) } func TestToTypedString(t *testing.T) { var out string - err := ToTyped(&out, config.V("foo")) + err := ToTyped(&out, dyn.V("foo")) require.NoError(t, err) assert.Equal(t, "foo", out) } func TestToTypedStringOverwrite(t *testing.T) { var out string = "bar" - err := ToTyped(&out, config.V("foo")) + err := ToTyped(&out, dyn.V("foo")) require.NoError(t, err) assert.Equal(t, "foo", out) } func TestToTypedStringFromBool(t *testing.T) { var out string - err := ToTyped(&out, config.V(true)) + err := ToTyped(&out, dyn.V(true)) require.NoError(t, err) assert.Equal(t, "true", out) } func TestToTypedStringFromInt(t *testing.T) { var out string - err := ToTyped(&out, config.V(123)) + err := ToTyped(&out, dyn.V(123)) require.NoError(t, err) assert.Equal(t, "123", out) } func TestToTypedStringFromFloat(t *testing.T) { var out string - err := ToTyped(&out, config.V(1.2)) + err := ToTyped(&out, dyn.V(1.2)) require.NoError(t, err) assert.Equal(t, "1.2", out) } func TestToTypedBool(t *testing.T) { var out bool - err := ToTyped(&out, config.V(true)) + err := ToTyped(&out, dyn.V(true)) require.NoError(t, err) assert.Equal(t, true, out) } func TestToTypedBoolOverwrite(t *testing.T) { var out bool = true - err := ToTyped(&out, config.V(false)) + err := ToTyped(&out, dyn.V(false)) require.NoError(t, err) assert.Equal(t, false, out) } @@ -321,128 +321,128 @@ func TestToTypedBoolFromString(t *testing.T) { // True-ish for _, v := range []string{"y", "yes", "on"} { - err := ToTyped(&out, config.V(v)) + err := ToTyped(&out, dyn.V(v)) require.NoError(t, err) assert.Equal(t, true, out) } // False-ish for _, v := range []string{"n", "no", "off"} { - err := ToTyped(&out, config.V(v)) + err := ToTyped(&out, dyn.V(v)) require.NoError(t, err) assert.Equal(t, false, out) } // Other - err := ToTyped(&out, config.V("${var.foo}")) + err := ToTyped(&out, dyn.V("${var.foo}")) require.Error(t, err) } func TestToTypedInt(t *testing.T) { var out int - err := ToTyped(&out, config.V(1234)) + err := ToTyped(&out, dyn.V(1234)) require.NoError(t, err) assert.Equal(t, int(1234), out) } func TestToTypedInt32(t *testing.T) { var out32 int32 - err := ToTyped(&out32, config.V(1235)) + err := ToTyped(&out32, dyn.V(1235)) require.NoError(t, err) assert.Equal(t, int32(1235), out32) } func TestToTypedInt64(t *testing.T) { var out64 int64 - err := ToTyped(&out64, config.V(1236)) + err := ToTyped(&out64, dyn.V(1236)) require.NoError(t, err) assert.Equal(t, int64(1236), out64) } func TestToTypedIntOverwrite(t *testing.T) { var out int = 123 - err := ToTyped(&out, config.V(1234)) + err := ToTyped(&out, dyn.V(1234)) require.NoError(t, err) assert.Equal(t, int(1234), out) } func TestToTypedInt32Overwrite(t *testing.T) { var out32 int32 = 123 - err := ToTyped(&out32, config.V(1234)) + err := ToTyped(&out32, dyn.V(1234)) require.NoError(t, err) assert.Equal(t, int32(1234), out32) } func TestToTypedInt64Overwrite(t *testing.T) { var out64 int64 = 123 - err := ToTyped(&out64, config.V(1234)) + err := ToTyped(&out64, dyn.V(1234)) require.NoError(t, err) assert.Equal(t, int64(1234), out64) } func TestToTypedIntFromStringError(t *testing.T) { var out int - err := ToTyped(&out, config.V("abc")) + err := ToTyped(&out, dyn.V("abc")) require.Error(t, err) } func TestToTypedIntFromStringInt(t *testing.T) { var out int - err := ToTyped(&out, config.V("123")) + err := ToTyped(&out, dyn.V("123")) require.NoError(t, err) assert.Equal(t, int(123), out) } func TestToTypedFloat32(t *testing.T) { var out float32 - err := ToTyped(&out, config.V(float32(1.0))) + err := ToTyped(&out, dyn.V(float32(1.0))) require.NoError(t, err) assert.Equal(t, float32(1.0), out) } func TestToTypedFloat64(t *testing.T) { var out float64 - err := ToTyped(&out, config.V(float64(1.0))) + err := ToTyped(&out, dyn.V(float64(1.0))) require.NoError(t, err) assert.Equal(t, float64(1.0), out) } func TestToTypedFloat32Overwrite(t *testing.T) { var out float32 = 1.0 - err := ToTyped(&out, config.V(float32(2.0))) + err := ToTyped(&out, dyn.V(float32(2.0))) require.NoError(t, err) assert.Equal(t, float32(2.0), out) } func TestToTypedFloat64Overwrite(t *testing.T) { var out float64 = 1.0 - err := ToTyped(&out, config.V(float64(2.0))) + err := ToTyped(&out, dyn.V(float64(2.0))) require.NoError(t, err) assert.Equal(t, float64(2.0), out) } func TestToTypedFloat32FromStringError(t *testing.T) { var out float32 - err := ToTyped(&out, config.V("abc")) + err := ToTyped(&out, dyn.V("abc")) require.Error(t, err) } func TestToTypedFloat64FromStringError(t *testing.T) { var out float64 - err := ToTyped(&out, config.V("abc")) + err := ToTyped(&out, dyn.V("abc")) require.Error(t, err) } func TestToTypedFloat32FromString(t *testing.T) { var out float32 - err := ToTyped(&out, config.V("1.2")) + err := ToTyped(&out, dyn.V("1.2")) require.NoError(t, err) assert.Equal(t, float32(1.2), out) } func TestToTypedFloat64FromString(t *testing.T) { var out float64 - err := ToTyped(&out, config.V("1.2")) + err := ToTyped(&out, dyn.V("1.2")) require.NoError(t, err) assert.Equal(t, float64(1.2), out) } diff --git a/libs/config/kind.go b/libs/dyn/kind.go similarity index 98% rename from libs/config/kind.go rename to libs/dyn/kind.go index 5ed1a6650b..ba093341e9 100644 --- a/libs/config/kind.go +++ b/libs/dyn/kind.go @@ -1,4 +1,4 @@ -package config +package dyn import "time" diff --git a/libs/config/location.go b/libs/dyn/location.go similarity index 92% rename from libs/config/location.go rename to libs/dyn/location.go index 534b21c2c4..cd369193e0 100644 --- a/libs/config/location.go +++ b/libs/dyn/location.go @@ -1,4 +1,4 @@ -package config +package dyn import "fmt" diff --git a/libs/config/location_test.go b/libs/dyn/location_test.go similarity index 54% rename from libs/config/location_test.go rename to libs/dyn/location_test.go index 31013193c7..29226d73d0 100644 --- a/libs/config/location_test.go +++ b/libs/dyn/location_test.go @@ -1,13 +1,13 @@ -package config_test +package dyn_test import ( "testing" - "github.com/databricks/cli/libs/config" + "github.com/databricks/cli/libs/dyn" "github.com/stretchr/testify/assert" ) func TestLocation(t *testing.T) { - loc := config.Location{File: "file", Line: 1, Column: 2} + loc := dyn.Location{File: "file", Line: 1, Column: 2} assert.Equal(t, "file:1:2", loc.String()) } diff --git a/libs/config/merge/merge.go b/libs/dyn/merge/merge.go similarity index 60% rename from libs/config/merge/merge.go rename to libs/dyn/merge/merge.go index 896e212923..1cadbea608 100644 --- a/libs/config/merge/merge.go +++ b/libs/dyn/merge/merge.go @@ -3,7 +3,7 @@ package merge import ( "fmt" - "github.com/databricks/cli/libs/config" + "github.com/databricks/cli/libs/dyn" ) // Merge recursively merges the specified values. @@ -12,46 +12,46 @@ import ( // * Merging x with nil or nil with x always yields x. // * Merging maps a and b means entries from map b take precedence. // * Merging sequences a and b means concatenating them. -func Merge(a, b config.Value) (config.Value, error) { +func Merge(a, b dyn.Value) (dyn.Value, error) { return merge(a, b) } -func merge(a, b config.Value) (config.Value, error) { +func merge(a, b dyn.Value) (dyn.Value, error) { ak := a.Kind() bk := b.Kind() // If a is nil, return b. - if ak == config.KindNil { + if ak == dyn.KindNil { return b, nil } // If b is nil, return a. - if bk == config.KindNil { + if bk == dyn.KindNil { return a, nil } // Call the appropriate merge function based on the kind of a and b. switch ak { - case config.KindMap: - if bk != config.KindMap { - return config.NilValue, fmt.Errorf("cannot merge map with %s", bk) + case dyn.KindMap: + if bk != dyn.KindMap { + return dyn.NilValue, fmt.Errorf("cannot merge map with %s", bk) } return mergeMap(a, b) - case config.KindSequence: - if bk != config.KindSequence { - return config.NilValue, fmt.Errorf("cannot merge sequence with %s", bk) + case dyn.KindSequence: + if bk != dyn.KindSequence { + return dyn.NilValue, fmt.Errorf("cannot merge sequence with %s", bk) } return mergeSequence(a, b) default: if ak != bk { - return config.NilValue, fmt.Errorf("cannot merge %s with %s", ak, bk) + return dyn.NilValue, fmt.Errorf("cannot merge %s with %s", ak, bk) } return mergePrimitive(a, b) } } -func mergeMap(a, b config.Value) (config.Value, error) { - out := make(map[string]config.Value) +func mergeMap(a, b dyn.Value) (dyn.Value, error) { + out := make(map[string]dyn.Value) am := a.MustMap() bm := b.MustMap() @@ -66,7 +66,7 @@ func mergeMap(a, b config.Value) (config.Value, error) { // If the key already exists, merge the values. merged, err := merge(out[k], v) if err != nil { - return config.NilValue, err + return dyn.NilValue, err } out[k] = merged } else { @@ -76,23 +76,23 @@ func mergeMap(a, b config.Value) (config.Value, error) { } // Preserve the location of the first value. - return config.NewValue(out, a.Location()), nil + return dyn.NewValue(out, a.Location()), nil } -func mergeSequence(a, b config.Value) (config.Value, error) { +func mergeSequence(a, b dyn.Value) (dyn.Value, error) { as := a.MustSequence() bs := b.MustSequence() // Merging sequences means concatenating them. - out := make([]config.Value, len(as)+len(bs)) + out := make([]dyn.Value, len(as)+len(bs)) copy(out[:], as) copy(out[len(as):], bs) // Preserve the location of the first value. - return config.NewValue(out, a.Location()), nil + return dyn.NewValue(out, a.Location()), nil } -func mergePrimitive(a, b config.Value) (config.Value, error) { +func mergePrimitive(a, b dyn.Value) (dyn.Value, error) { // Merging primitive values means using the incoming value. return b, nil } diff --git a/libs/config/merge/merge_test.go b/libs/dyn/merge/merge_test.go similarity index 67% rename from libs/config/merge/merge_test.go rename to libs/dyn/merge/merge_test.go index c2e89f60a4..c4928e3536 100644 --- a/libs/config/merge/merge_test.go +++ b/libs/dyn/merge/merge_test.go @@ -3,19 +3,19 @@ package merge import ( "testing" - "github.com/databricks/cli/libs/config" + "github.com/databricks/cli/libs/dyn" "github.com/stretchr/testify/assert" ) func TestMergeMaps(t *testing.T) { - v1 := config.V(map[string]config.Value{ - "foo": config.V("bar"), - "bar": config.V("baz"), + v1 := dyn.V(map[string]dyn.Value{ + "foo": dyn.V("bar"), + "bar": dyn.V("baz"), }) - v2 := config.V(map[string]config.Value{ - "bar": config.V("qux"), - "qux": config.V("foo"), + v2 := dyn.V(map[string]dyn.Value{ + "bar": dyn.V("qux"), + "qux": dyn.V("foo"), }) // Merge v2 into v1. @@ -42,13 +42,13 @@ func TestMergeMaps(t *testing.T) { } func TestMergeMapsNil(t *testing.T) { - v := config.V(map[string]config.Value{ - "foo": config.V("bar"), + v := dyn.V(map[string]dyn.Value{ + "foo": dyn.V("bar"), }) // Merge nil into v. { - out, err := Merge(v, config.NilValue) + out, err := Merge(v, dyn.NilValue) assert.NoError(t, err) assert.Equal(t, map[string]any{ "foo": "bar", @@ -57,7 +57,7 @@ func TestMergeMapsNil(t *testing.T) { // Merge v into nil. { - out, err := Merge(config.NilValue, v) + out, err := Merge(dyn.NilValue, v) assert.NoError(t, err) assert.Equal(t, map[string]any{ "foo": "bar", @@ -66,29 +66,29 @@ func TestMergeMapsNil(t *testing.T) { } func TestMergeMapsError(t *testing.T) { - v := config.V(map[string]config.Value{ - "foo": config.V("bar"), + v := dyn.V(map[string]dyn.Value{ + "foo": dyn.V("bar"), }) - other := config.V("string") + other := dyn.V("string") // Merge a string into v. { out, err := Merge(v, other) assert.EqualError(t, err, "cannot merge map with string") - assert.Equal(t, config.NilValue, out) + assert.Equal(t, dyn.NilValue, out) } } func TestMergeSequences(t *testing.T) { - v1 := config.V([]config.Value{ - config.V("bar"), - config.V("baz"), + v1 := dyn.V([]dyn.Value{ + dyn.V("bar"), + dyn.V("baz"), }) - v2 := config.V([]config.Value{ - config.V("qux"), - config.V("foo"), + v2 := dyn.V([]dyn.Value{ + dyn.V("qux"), + dyn.V("foo"), }) // Merge v2 into v1. @@ -117,13 +117,13 @@ func TestMergeSequences(t *testing.T) { } func TestMergeSequencesNil(t *testing.T) { - v := config.V([]config.Value{ - config.V("bar"), + v := dyn.V([]dyn.Value{ + dyn.V("bar"), }) // Merge nil into v. { - out, err := Merge(v, config.NilValue) + out, err := Merge(v, dyn.NilValue) assert.NoError(t, err) assert.Equal(t, []any{ "bar", @@ -132,7 +132,7 @@ func TestMergeSequencesNil(t *testing.T) { // Merge v into nil. { - out, err := Merge(config.NilValue, v) + out, err := Merge(dyn.NilValue, v) assert.NoError(t, err) assert.Equal(t, []any{ "bar", @@ -141,23 +141,23 @@ func TestMergeSequencesNil(t *testing.T) { } func TestMergeSequencesError(t *testing.T) { - v := config.V([]config.Value{ - config.V("bar"), + v := dyn.V([]dyn.Value{ + dyn.V("bar"), }) - other := config.V("string") + other := dyn.V("string") // Merge a string into v. { out, err := Merge(v, other) assert.EqualError(t, err, "cannot merge sequence with string") - assert.Equal(t, config.NilValue, out) + assert.Equal(t, dyn.NilValue, out) } } func TestMergePrimitives(t *testing.T) { - v1 := config.V("bar") - v2 := config.V("baz") + v1 := dyn.V("bar") + v2 := dyn.V("baz") // Merge v2 into v1. { @@ -175,33 +175,33 @@ func TestMergePrimitives(t *testing.T) { } func TestMergePrimitivesNil(t *testing.T) { - v := config.V("bar") + v := dyn.V("bar") // Merge nil into v. { - out, err := Merge(v, config.NilValue) + out, err := Merge(v, dyn.NilValue) assert.NoError(t, err) assert.Equal(t, "bar", out.AsAny()) } // Merge v into nil. { - out, err := Merge(config.NilValue, v) + out, err := Merge(dyn.NilValue, v) assert.NoError(t, err) assert.Equal(t, "bar", out.AsAny()) } } func TestMergePrimitivesError(t *testing.T) { - v := config.V("bar") - other := config.V(map[string]config.Value{ - "foo": config.V("bar"), + v := dyn.V("bar") + other := dyn.V(map[string]dyn.Value{ + "foo": dyn.V("bar"), }) // Merge a map into v. { out, err := Merge(v, other) assert.EqualError(t, err, "cannot merge string with map") - assert.Equal(t, config.NilValue, out) + assert.Equal(t, dyn.NilValue, out) } } diff --git a/libs/config/path.go b/libs/dyn/path.go similarity index 99% rename from libs/config/path.go rename to libs/dyn/path.go index f1abf48ca9..bfd93dad5b 100644 --- a/libs/config/path.go +++ b/libs/dyn/path.go @@ -1,4 +1,4 @@ -package config +package dyn import ( "bytes" diff --git a/libs/config/path_string.go b/libs/dyn/path_string.go similarity index 99% rename from libs/config/path_string.go rename to libs/dyn/path_string.go index 9538ad27f1..0fa0c682d4 100644 --- a/libs/config/path_string.go +++ b/libs/dyn/path_string.go @@ -1,4 +1,4 @@ -package config +package dyn import ( "fmt" diff --git a/libs/config/path_string_test.go b/libs/dyn/path_string_test.go similarity index 96% rename from libs/config/path_string_test.go rename to libs/dyn/path_string_test.go index 89e645615f..9af394c6f1 100644 --- a/libs/config/path_string_test.go +++ b/libs/dyn/path_string_test.go @@ -1,10 +1,10 @@ -package config_test +package dyn_test import ( "fmt" "testing" - . "github.com/databricks/cli/libs/config" + . "github.com/databricks/cli/libs/dyn" "github.com/stretchr/testify/assert" ) diff --git a/libs/dyn/path_test.go b/libs/dyn/path_test.go new file mode 100644 index 0000000000..c4ea26c4aa --- /dev/null +++ b/libs/dyn/path_test.go @@ -0,0 +1,76 @@ +package dyn_test + +import ( + "testing" + + "github.com/databricks/cli/libs/dyn" + "github.com/stretchr/testify/assert" +) + +func TestPathAppend(t *testing.T) { + p := dyn.NewPath(dyn.Key("foo")) + + // Single arg. + p1 := p.Append(dyn.Key("bar")) + assert.True(t, p1.Equal(dyn.NewPath(dyn.Key("foo"), dyn.Key("bar")))) + + // Multiple args. + p2 := p.Append(dyn.Key("bar"), dyn.Index(1)) + assert.True(t, p2.Equal(dyn.NewPath(dyn.Key("foo"), dyn.Key("bar"), dyn.Index(1)))) +} + +func TestPathJoin(t *testing.T) { + p := dyn.NewPath(dyn.Key("foo")) + + // Single arg. + p1 := p.Join(dyn.NewPath(dyn.Key("bar"))) + assert.True(t, p1.Equal(dyn.NewPath(dyn.Key("foo"), dyn.Key("bar")))) + + // Multiple args. + p2 := p.Join(dyn.NewPath(dyn.Key("bar")), dyn.NewPath(dyn.Index(1))) + assert.True(t, p2.Equal(dyn.NewPath(dyn.Key("foo"), dyn.Key("bar"), dyn.Index(1)))) +} + +func TestPathEqualEmpty(t *testing.T) { + assert.True(t, dyn.EmptyPath.Equal(dyn.EmptyPath)) +} + +func TestPathEqual(t *testing.T) { + p1 := dyn.NewPath(dyn.Key("foo"), dyn.Index(1)) + p2 := dyn.NewPath(dyn.Key("bar"), dyn.Index(2)) + assert.False(t, p1.Equal(p2), "expected %q to not equal %q", p1, p2) + + p3 := dyn.NewPath(dyn.Key("foo"), dyn.Index(1)) + assert.True(t, p1.Equal(p3), "expected %q to equal %q", p1, p3) + + p4 := dyn.NewPath(dyn.Key("foo"), dyn.Index(1), dyn.Key("bar"), dyn.Index(2)) + assert.False(t, p1.Equal(p4), "expected %q to not equal %q", p1, p4) +} + +func TestPathHasPrefixEmpty(t *testing.T) { + empty := dyn.EmptyPath + nonEmpty := dyn.NewPath(dyn.Key("foo")) + assert.True(t, empty.HasPrefix(empty)) + assert.True(t, nonEmpty.HasPrefix(empty)) + assert.False(t, empty.HasPrefix(nonEmpty)) +} + +func TestPathHasPrefix(t *testing.T) { + p1 := dyn.NewPath(dyn.Key("foo"), dyn.Index(1)) + p2 := dyn.NewPath(dyn.Key("bar"), dyn.Index(2)) + assert.False(t, p1.HasPrefix(p2), "expected %q to not have prefix %q", p1, p2) + + p3 := dyn.NewPath(dyn.Key("foo")) + assert.True(t, p1.HasPrefix(p3), "expected %q to have prefix %q", p1, p3) +} + +func TestPathString(t *testing.T) { + p1 := dyn.NewPath(dyn.Key("foo"), dyn.Index(1)) + assert.Equal(t, "foo[1]", p1.String()) + + p2 := dyn.NewPath(dyn.Key("bar"), dyn.Index(2), dyn.Key("baz")) + assert.Equal(t, "bar[2].baz", p2.String()) + + p3 := dyn.NewPath(dyn.Key("foo"), dyn.Index(1), dyn.Key("bar"), dyn.Index(2), dyn.Key("baz")) + assert.Equal(t, "foo[1].bar[2].baz", p3.String()) +} diff --git a/libs/config/value.go b/libs/dyn/value.go similarity index 99% rename from libs/config/value.go rename to libs/dyn/value.go index fe0ced9bdf..9ac738f9cb 100644 --- a/libs/config/value.go +++ b/libs/dyn/value.go @@ -1,4 +1,4 @@ -package config +package dyn import ( "fmt" diff --git a/libs/config/value_test.go b/libs/dyn/value_test.go similarity index 55% rename from libs/config/value_test.go rename to libs/dyn/value_test.go index 6c8befc7ed..5fa45f15a5 100644 --- a/libs/config/value_test.go +++ b/libs/dyn/value_test.go @@ -1,35 +1,35 @@ -package config_test +package dyn_test import ( "testing" - "github.com/databricks/cli/libs/config" + "github.com/databricks/cli/libs/dyn" "github.com/stretchr/testify/assert" ) func TestValueIsAnchor(t *testing.T) { - var zero config.Value + var zero dyn.Value assert.False(t, zero.IsAnchor()) mark := zero.MarkAnchor() assert.True(t, mark.IsAnchor()) } func TestValueAsMap(t *testing.T) { - var zeroValue config.Value + var zeroValue dyn.Value m, ok := zeroValue.AsMap() assert.False(t, ok) assert.Nil(t, m) - var intValue = config.NewValue(1, config.Location{}) + var intValue = dyn.NewValue(1, dyn.Location{}) m, ok = intValue.AsMap() assert.False(t, ok) assert.Nil(t, m) - var mapValue = config.NewValue( - map[string]config.Value{ - "key": config.NewValue("value", config.Location{File: "file", Line: 1, Column: 2}), + var mapValue = dyn.NewValue( + map[string]dyn.Value{ + "key": dyn.NewValue("value", dyn.Location{File: "file", Line: 1, Column: 2}), }, - config.Location{File: "file", Line: 1, Column: 2}, + dyn.Location{File: "file", Line: 1, Column: 2}, ) m, ok = mapValue.AsMap() assert.True(t, ok) @@ -37,8 +37,8 @@ func TestValueAsMap(t *testing.T) { } func TestValueIsValid(t *testing.T) { - var zeroValue config.Value + var zeroValue dyn.Value assert.False(t, zeroValue.IsValid()) - var intValue = config.NewValue(1, config.Location{}) + var intValue = dyn.NewValue(1, dyn.Location{}) assert.True(t, intValue.IsValid()) } diff --git a/libs/config/walk.go b/libs/dyn/walk.go similarity index 99% rename from libs/config/walk.go rename to libs/dyn/walk.go index ce05833804..138816be6e 100644 --- a/libs/config/walk.go +++ b/libs/dyn/walk.go @@ -1,4 +1,4 @@ -package config +package dyn import "errors" diff --git a/libs/config/walk_test.go b/libs/dyn/walk_test.go similarity index 98% rename from libs/config/walk_test.go rename to libs/dyn/walk_test.go index 806ca256fd..1b94ad9027 100644 --- a/libs/config/walk_test.go +++ b/libs/dyn/walk_test.go @@ -1,10 +1,10 @@ -package config_test +package dyn_test import ( "errors" "testing" - . "github.com/databricks/cli/libs/config" + . "github.com/databricks/cli/libs/dyn" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) diff --git a/libs/config/yamlloader/loader.go b/libs/dyn/yamlloader/loader.go similarity index 61% rename from libs/config/yamlloader/loader.go rename to libs/dyn/yamlloader/loader.go index 6472c13731..899e1d7b8a 100644 --- a/libs/config/yamlloader/loader.go +++ b/libs/dyn/yamlloader/loader.go @@ -7,7 +7,7 @@ import ( "strings" "time" - "github.com/databricks/cli/libs/config" + "github.com/databricks/cli/libs/dyn" "gopkg.in/yaml.v3" ) @@ -15,7 +15,7 @@ type loader struct { path string } -func errorf(loc config.Location, format string, args ...interface{}) error { +func errorf(loc dyn.Location, format string, args ...interface{}) error { return fmt.Errorf("yaml (%s): %s", loc, fmt.Sprintf(format, args...)) } @@ -25,22 +25,22 @@ func newLoader(path string) *loader { } } -func (d *loader) location(node *yaml.Node) config.Location { - return config.Location{ +func (d *loader) location(node *yaml.Node) dyn.Location { + return dyn.Location{ File: d.path, Line: node.Line, Column: node.Column, } } -func (d *loader) load(node *yaml.Node) (config.Value, error) { - loc := config.Location{ +func (d *loader) load(node *yaml.Node) (dyn.Value, error) { + loc := dyn.Location{ File: d.path, Line: node.Line, Column: node.Column, } - var value config.Value + var value dyn.Value var err error switch node.Kind { @@ -55,7 +55,7 @@ func (d *loader) load(node *yaml.Node) (config.Value, error) { case yaml.AliasNode: value, err = d.loadAlias(node, loc) default: - return config.NilValue, errorf(loc, "unknown node kind: %v", node.Kind) + return dyn.NilValue, errorf(loc, "unknown node kind: %v", node.Kind) } if err != nil { @@ -71,35 +71,35 @@ func (d *loader) load(node *yaml.Node) (config.Value, error) { return value, nil } -func (d *loader) loadDocument(node *yaml.Node, loc config.Location) (config.Value, error) { +func (d *loader) loadDocument(node *yaml.Node, loc dyn.Location) (dyn.Value, error) { return d.load(node.Content[0]) } -func (d *loader) loadSequence(node *yaml.Node, loc config.Location) (config.Value, error) { - acc := make([]config.Value, len(node.Content)) +func (d *loader) loadSequence(node *yaml.Node, loc dyn.Location) (dyn.Value, error) { + acc := make([]dyn.Value, len(node.Content)) for i, n := range node.Content { v, err := d.load(n) if err != nil { - return config.NilValue, err + return dyn.NilValue, err } acc[i] = v } - return config.NewValue(acc, loc), nil + return dyn.NewValue(acc, loc), nil } -func (d *loader) loadMapping(node *yaml.Node, loc config.Location) (config.Value, error) { +func (d *loader) loadMapping(node *yaml.Node, loc dyn.Location) (dyn.Value, error) { var merge *yaml.Node - acc := make(map[string]config.Value) + acc := make(map[string]dyn.Value) for i := 0; i < len(node.Content); i += 2 { key := node.Content[i] val := node.Content[i+1] // Assert that keys are strings if key.Kind != yaml.ScalarNode { - return config.NilValue, errorf(loc, "key is not a scalar") + return dyn.NilValue, errorf(loc, "key is not a scalar") } st := key.ShortTag() @@ -113,19 +113,19 @@ func (d *loader) loadMapping(node *yaml.Node, loc config.Location) (config.Value merge = val continue default: - return config.NilValue, errorf(loc, "invalid key tag: %v", st) + return dyn.NilValue, errorf(loc, "invalid key tag: %v", st) } v, err := d.load(val) if err != nil { - return config.NilValue, err + return dyn.NilValue, err } acc[key.Value] = v } if merge == nil { - return config.NewValue(acc, loc), nil + return dyn.NewValue(acc, loc), nil } // Build location for the merge node. @@ -141,68 +141,68 @@ func (d *loader) loadMapping(node *yaml.Node, loc config.Location) (config.Value case yaml.AliasNode: mnodes = []*yaml.Node{merge} default: - return config.NilValue, merr + return dyn.NilValue, merr } // Build a sequence of values to merge. // The entries that we already accumulated have precedence. - var seq []map[string]config.Value + var seq []map[string]dyn.Value for _, n := range mnodes { v, err := d.load(n) if err != nil { - return config.NilValue, err + return dyn.NilValue, err } m, ok := v.AsMap() if !ok { - return config.NilValue, merr + return dyn.NilValue, merr } seq = append(seq, m) } // Append the accumulated entries to the sequence. seq = append(seq, acc) - out := make(map[string]config.Value) + out := make(map[string]dyn.Value) for _, m := range seq { for k, v := range m { out[k] = v } } - return config.NewValue(out, loc), nil + return dyn.NewValue(out, loc), nil } -func (d *loader) loadScalar(node *yaml.Node, loc config.Location) (config.Value, error) { +func (d *loader) loadScalar(node *yaml.Node, loc dyn.Location) (dyn.Value, error) { st := node.ShortTag() switch st { case "!!str": - return config.NewValue(node.Value, loc), nil + return dyn.NewValue(node.Value, loc), nil case "!!bool": switch strings.ToLower(node.Value) { case "true": - return config.NewValue(true, loc), nil + return dyn.NewValue(true, loc), nil case "false": - return config.NewValue(false, loc), nil + return dyn.NewValue(false, loc), nil default: - return config.NilValue, errorf(loc, "invalid bool value: %v", node.Value) + return dyn.NilValue, errorf(loc, "invalid bool value: %v", node.Value) } case "!!int": i64, err := strconv.ParseInt(node.Value, 10, 64) if err != nil { - return config.NilValue, errorf(loc, "invalid int value: %v", node.Value) + return dyn.NilValue, errorf(loc, "invalid int value: %v", node.Value) } // Use regular int type instead of int64 if possible. if i64 >= math.MinInt32 && i64 <= math.MaxInt32 { - return config.NewValue(int(i64), loc), nil + return dyn.NewValue(int(i64), loc), nil } - return config.NewValue(i64, loc), nil + return dyn.NewValue(i64, loc), nil case "!!float": f64, err := strconv.ParseFloat(node.Value, 64) if err != nil { - return config.NilValue, errorf(loc, "invalid float value: %v", node.Value) + return dyn.NilValue, errorf(loc, "invalid float value: %v", node.Value) } - return config.NewValue(f64, loc), nil + return dyn.NewValue(f64, loc), nil case "!!null": - return config.NewValue(nil, loc), nil + return dyn.NewValue(nil, loc), nil case "!!timestamp": // Try a couple of layouts for _, layout := range []string{ @@ -213,15 +213,15 @@ func (d *loader) loadScalar(node *yaml.Node, loc config.Location) (config.Value, } { t, terr := time.Parse(layout, node.Value) if terr == nil { - return config.NewValue(t, loc), nil + return dyn.NewValue(t, loc), nil } } - return config.NilValue, errorf(loc, "invalid timestamp value: %v", node.Value) + return dyn.NilValue, errorf(loc, "invalid timestamp value: %v", node.Value) default: - return config.NilValue, errorf(loc, "unknown tag: %v", st) + return dyn.NilValue, errorf(loc, "unknown tag: %v", st) } } -func (d *loader) loadAlias(node *yaml.Node, loc config.Location) (config.Value, error) { +func (d *loader) loadAlias(node *yaml.Node, loc dyn.Location) (dyn.Value, error) { return d.load(node.Alias) } diff --git a/libs/config/yamlloader/testdata/anchor_01.yml b/libs/dyn/yamlloader/testdata/anchor_01.yml similarity index 100% rename from libs/config/yamlloader/testdata/anchor_01.yml rename to libs/dyn/yamlloader/testdata/anchor_01.yml diff --git a/libs/config/yamlloader/testdata/anchor_02.yml b/libs/dyn/yamlloader/testdata/anchor_02.yml similarity index 100% rename from libs/config/yamlloader/testdata/anchor_02.yml rename to libs/dyn/yamlloader/testdata/anchor_02.yml diff --git a/libs/config/yamlloader/testdata/anchor_03.yml b/libs/dyn/yamlloader/testdata/anchor_03.yml similarity index 100% rename from libs/config/yamlloader/testdata/anchor_03.yml rename to libs/dyn/yamlloader/testdata/anchor_03.yml diff --git a/libs/config/yamlloader/testdata/anchor_04.yml b/libs/dyn/yamlloader/testdata/anchor_04.yml similarity index 100% rename from libs/config/yamlloader/testdata/anchor_04.yml rename to libs/dyn/yamlloader/testdata/anchor_04.yml diff --git a/libs/config/yamlloader/testdata/anchor_05.yml b/libs/dyn/yamlloader/testdata/anchor_05.yml similarity index 100% rename from libs/config/yamlloader/testdata/anchor_05.yml rename to libs/dyn/yamlloader/testdata/anchor_05.yml diff --git a/libs/config/yamlloader/testdata/anchor_06.yml b/libs/dyn/yamlloader/testdata/anchor_06.yml similarity index 100% rename from libs/config/yamlloader/testdata/anchor_06.yml rename to libs/dyn/yamlloader/testdata/anchor_06.yml diff --git a/libs/config/yamlloader/testdata/anchor_07.yml b/libs/dyn/yamlloader/testdata/anchor_07.yml similarity index 100% rename from libs/config/yamlloader/testdata/anchor_07.yml rename to libs/dyn/yamlloader/testdata/anchor_07.yml diff --git a/libs/config/yamlloader/testdata/anchor_08.yml b/libs/dyn/yamlloader/testdata/anchor_08.yml similarity index 100% rename from libs/config/yamlloader/testdata/anchor_08.yml rename to libs/dyn/yamlloader/testdata/anchor_08.yml diff --git a/libs/config/yamlloader/testdata/empty.yml b/libs/dyn/yamlloader/testdata/empty.yml similarity index 100% rename from libs/config/yamlloader/testdata/empty.yml rename to libs/dyn/yamlloader/testdata/empty.yml diff --git a/libs/config/yamlloader/testdata/error_01.yml b/libs/dyn/yamlloader/testdata/error_01.yml similarity index 100% rename from libs/config/yamlloader/testdata/error_01.yml rename to libs/dyn/yamlloader/testdata/error_01.yml diff --git a/libs/config/yamlloader/testdata/error_02.yml b/libs/dyn/yamlloader/testdata/error_02.yml similarity index 100% rename from libs/config/yamlloader/testdata/error_02.yml rename to libs/dyn/yamlloader/testdata/error_02.yml diff --git a/libs/config/yamlloader/testdata/error_03.yml b/libs/dyn/yamlloader/testdata/error_03.yml similarity index 100% rename from libs/config/yamlloader/testdata/error_03.yml rename to libs/dyn/yamlloader/testdata/error_03.yml diff --git a/libs/config/yamlloader/testdata/mix_01.yml b/libs/dyn/yamlloader/testdata/mix_01.yml similarity index 100% rename from libs/config/yamlloader/testdata/mix_01.yml rename to libs/dyn/yamlloader/testdata/mix_01.yml diff --git a/libs/config/yamlloader/testdata/mix_02.yml b/libs/dyn/yamlloader/testdata/mix_02.yml similarity index 100% rename from libs/config/yamlloader/testdata/mix_02.yml rename to libs/dyn/yamlloader/testdata/mix_02.yml diff --git a/libs/config/yamlloader/yaml.go b/libs/dyn/yamlloader/yaml.go similarity index 56% rename from libs/config/yamlloader/yaml.go rename to libs/dyn/yamlloader/yaml.go index a3cc7284f0..a18324ffad 100644 --- a/libs/config/yamlloader/yaml.go +++ b/libs/dyn/yamlloader/yaml.go @@ -3,19 +3,19 @@ package yamlloader import ( "io" - "github.com/databricks/cli/libs/config" + "github.com/databricks/cli/libs/dyn" "gopkg.in/yaml.v3" ) -func LoadYAML(path string, r io.Reader) (config.Value, error) { +func LoadYAML(path string, r io.Reader) (dyn.Value, error) { var node yaml.Node dec := yaml.NewDecoder(r) err := dec.Decode(&node) if err != nil { if err == io.EOF { - return config.NilValue, nil + return dyn.NilValue, nil } - return config.NilValue, err + return dyn.NilValue, err } return newLoader(path).load(&node) diff --git a/libs/config/yamlloader/yaml_anchor_test.go b/libs/dyn/yamlloader/yaml_anchor_test.go similarity index 61% rename from libs/config/yamlloader/yaml_anchor_test.go rename to libs/dyn/yamlloader/yaml_anchor_test.go index a8b666868c..05beb5401d 100644 --- a/libs/config/yamlloader/yaml_anchor_test.go +++ b/libs/dyn/yamlloader/yaml_anchor_test.go @@ -3,14 +3,14 @@ package yamlloader_test import ( "testing" - "github.com/databricks/cli/libs/config" + "github.com/databricks/cli/libs/dyn" "github.com/stretchr/testify/assert" ) func TestYAMLAnchor01(t *testing.T) { file := "testdata/anchor_01.yml" self := loadYAML(t, file) - assert.NotEqual(t, config.NilValue, self) + assert.NotEqual(t, dyn.NilValue, self) assert.True(t, self.Get("defaults").IsAnchor()) assert.False(t, self.Get("shirt1").IsAnchor()) @@ -18,31 +18,31 @@ func TestYAMLAnchor01(t *testing.T) { pattern := self.Get("shirt1").Get("pattern") assert.Equal(t, "striped", pattern.AsAny()) - assert.Equal(t, config.Location{File: file, Line: 8, Column: 12}, pattern.Location()) + assert.Equal(t, dyn.Location{File: file, Line: 8, Column: 12}, pattern.Location()) } func TestYAMLAnchor02(t *testing.T) { file := "testdata/anchor_02.yml" self := loadYAML(t, file) - assert.NotEqual(t, config.NilValue, self) + assert.NotEqual(t, dyn.NilValue, self) color := self.Get("shirt").Get("color") assert.Equal(t, "red", color.AsAny()) - assert.Equal(t, config.Location{File: file, Line: 4, Column: 10}, color.Location()) + assert.Equal(t, dyn.Location{File: file, Line: 4, Column: 10}, color.Location()) primary := self.Get("shirt").Get("primary") assert.Equal(t, "cotton", primary.AsAny()) - assert.Equal(t, config.Location{File: file, Line: 8, Column: 12}, primary.Location()) + assert.Equal(t, dyn.Location{File: file, Line: 8, Column: 12}, primary.Location()) pattern := self.Get("shirt").Get("pattern") assert.Equal(t, "striped", pattern.AsAny()) - assert.Equal(t, config.Location{File: file, Line: 13, Column: 12}, pattern.Location()) + assert.Equal(t, dyn.Location{File: file, Line: 13, Column: 12}, pattern.Location()) } func TestYAMLAnchor03(t *testing.T) { file := "testdata/anchor_03.yml" self := loadYAML(t, file) - assert.NotEqual(t, config.NilValue, self) + assert.NotEqual(t, dyn.NilValue, self) // Assert the override took place. blue := self.Get("shirt").Get("color") @@ -55,63 +55,63 @@ func TestYAMLAnchor03(t *testing.T) { func TestYAMLAnchor04(t *testing.T) { file := "testdata/anchor_04.yml" self := loadYAML(t, file) - assert.NotEqual(t, config.NilValue, self) + assert.NotEqual(t, dyn.NilValue, self) p1 := self.Get("person1").Get("address").Get("city") assert.Equal(t, "San Francisco", p1.AsAny()) - assert.Equal(t, config.Location{File: file, Line: 4, Column: 9}, p1.Location()) + assert.Equal(t, dyn.Location{File: file, Line: 4, Column: 9}, p1.Location()) p2 := self.Get("person2").Get("address").Get("city") assert.Equal(t, "Los Angeles", p2.AsAny()) - assert.Equal(t, config.Location{File: file, Line: 16, Column: 11}, p2.Location()) + assert.Equal(t, dyn.Location{File: file, Line: 16, Column: 11}, p2.Location()) } func TestYAMLAnchor05(t *testing.T) { file := "testdata/anchor_05.yml" self := loadYAML(t, file) - assert.NotEqual(t, config.NilValue, self) + assert.NotEqual(t, dyn.NilValue, self) features := self.Get("phone1").Get("features") assert.Equal(t, "wifi", features.Index(0).AsAny()) - assert.Equal(t, config.Location{File: file, Line: 4, Column: 5}, features.Index(0).Location()) + assert.Equal(t, dyn.Location{File: file, Line: 4, Column: 5}, features.Index(0).Location()) assert.Equal(t, "bluetooth", features.Index(1).AsAny()) - assert.Equal(t, config.Location{File: file, Line: 5, Column: 5}, features.Index(1).Location()) + assert.Equal(t, dyn.Location{File: file, Line: 5, Column: 5}, features.Index(1).Location()) } func TestYAMLAnchor06(t *testing.T) { file := "testdata/anchor_06.yml" self := loadYAML(t, file) - assert.NotEqual(t, config.NilValue, self) + assert.NotEqual(t, dyn.NilValue, self) greeting := self.Get("greeting1") assert.Equal(t, "Hello, World!", greeting.AsAny()) - assert.Equal(t, config.Location{File: file, Line: 2, Column: 16}, greeting.Location()) + assert.Equal(t, dyn.Location{File: file, Line: 2, Column: 16}, greeting.Location()) } func TestYAMLAnchor07(t *testing.T) { file := "testdata/anchor_07.yml" self := loadYAML(t, file) - assert.NotEqual(t, config.NilValue, self) + assert.NotEqual(t, dyn.NilValue, self) name := self.Get("person1").Get("name") assert.Equal(t, "Alice", name.AsAny()) - assert.Equal(t, config.Location{File: file, Line: 5, Column: 9}, name.Location()) + assert.Equal(t, dyn.Location{File: file, Line: 5, Column: 9}, name.Location()) age := self.Get("person1").Get("age") assert.Equal(t, 25, age.AsAny()) - assert.Equal(t, config.Location{File: file, Line: 2, Column: 13}, age.Location()) + assert.Equal(t, dyn.Location{File: file, Line: 2, Column: 13}, age.Location()) } func TestYAMLAnchor08(t *testing.T) { file := "testdata/anchor_08.yml" self := loadYAML(t, file) - assert.NotEqual(t, config.NilValue, self) + assert.NotEqual(t, dyn.NilValue, self) username := self.Get("user1").Get("username") assert.Equal(t, "user1", username.AsAny()) - assert.Equal(t, config.Location{File: file, Line: 5, Column: 13}, username.Location()) + assert.Equal(t, dyn.Location{File: file, Line: 5, Column: 13}, username.Location()) active := self.Get("user1").Get("active") assert.Equal(t, true, active.AsAny()) - assert.Equal(t, config.Location{File: file, Line: 2, Column: 11}, active.Location()) + assert.Equal(t, dyn.Location{File: file, Line: 2, Column: 11}, active.Location()) } diff --git a/libs/config/yamlloader/yaml_error_test.go b/libs/dyn/yamlloader/yaml_error_test.go similarity index 94% rename from libs/config/yamlloader/yaml_error_test.go rename to libs/dyn/yamlloader/yaml_error_test.go index 2685042fd9..11c444ad36 100644 --- a/libs/config/yamlloader/yaml_error_test.go +++ b/libs/dyn/yamlloader/yaml_error_test.go @@ -5,7 +5,7 @@ import ( "os" "testing" - "github.com/databricks/cli/libs/config/yamlloader" + "github.com/databricks/cli/libs/dyn/yamlloader" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "gopkg.in/yaml.v3" diff --git a/libs/config/yamlloader/yaml_mix_test.go b/libs/dyn/yamlloader/yaml_mix_test.go similarity index 79% rename from libs/config/yamlloader/yaml_mix_test.go rename to libs/dyn/yamlloader/yaml_mix_test.go index 9cd0753de5..307b93dbf3 100644 --- a/libs/config/yamlloader/yaml_mix_test.go +++ b/libs/dyn/yamlloader/yaml_mix_test.go @@ -3,14 +3,14 @@ package yamlloader_test import ( "testing" - "github.com/databricks/cli/libs/config" + "github.com/databricks/cli/libs/dyn" "github.com/stretchr/testify/assert" ) func TestYAMLMix01(t *testing.T) { file := "testdata/mix_01.yml" self := loadYAML(t, file) - assert.NotEqual(t, config.NilValue, self) + assert.NotEqual(t, dyn.NilValue, self) assert.True(t, self.Get("base_address").IsAnchor()) assert.False(t, self.Get("office_address").IsAnchor()) @@ -19,7 +19,7 @@ func TestYAMLMix01(t *testing.T) { func TestYAMLMix02(t *testing.T) { file := "testdata/mix_02.yml" self := loadYAML(t, file) - assert.NotEqual(t, config.NilValue, self) + assert.NotEqual(t, dyn.NilValue, self) assert.True(t, self.Get("base_colors").IsAnchor()) assert.False(t, self.Get("theme").IsAnchor()) diff --git a/libs/config/yamlloader/yaml_test.go b/libs/dyn/yamlloader/yaml_test.go similarity index 76% rename from libs/config/yamlloader/yaml_test.go rename to libs/dyn/yamlloader/yaml_test.go index ab61f0718f..14269feeef 100644 --- a/libs/config/yamlloader/yaml_test.go +++ b/libs/dyn/yamlloader/yaml_test.go @@ -5,14 +5,14 @@ import ( "os" "testing" - "github.com/databricks/cli/libs/config" - "github.com/databricks/cli/libs/config/yamlloader" + "github.com/databricks/cli/libs/dyn" + "github.com/databricks/cli/libs/dyn/yamlloader" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "gopkg.in/yaml.v3" ) -func loadYAML(t *testing.T, path string) config.Value { +func loadYAML(t *testing.T, path string) dyn.Value { input, err := os.ReadFile(path) require.NoError(t, err) @@ -31,5 +31,5 @@ func loadYAML(t *testing.T, path string) config.Value { func TestYAMLEmpty(t *testing.T) { self := loadYAML(t, "testdata/empty.yml") - assert.Equal(t, config.NilValue, self) + assert.Equal(t, dyn.NilValue, self) } From 8fc3ab5e9e422f8fb078b321fd17f010739bedc8 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Fri, 22 Dec 2023 14:36:36 +0100 Subject: [PATCH 026/104] Finalize package rename --- bundle/config/mutator/environments_compat.go | 11 +++--- .../expand_pipeline_glob_paths_test.go | 2 +- bundle/config/mutator/merge_job_clusters.go | 26 +++++++------- bundle/config/mutator/merge_job_tasks.go | 26 +++++++------- .../config/mutator/merge_pipeline_clusters.go | 34 +++++++++---------- bundle/config/mutator/rewrite_sync_paths.go | 2 +- .../config/mutator/rewrite_sync_paths_test.go | 2 +- bundle/config/mutator/translate_paths_test.go | 2 +- bundle/config/paths/paths.go | 6 ++-- bundle/config/root.go | 28 +++++++-------- bundle/internal/bundletest/location.go | 12 +++---- 11 files changed, 75 insertions(+), 76 deletions(-) diff --git a/bundle/config/mutator/environments_compat.go b/bundle/config/mutator/environments_compat.go index f6daed9c6e..f6d04d4928 100644 --- a/bundle/config/mutator/environments_compat.go +++ b/bundle/config/mutator/environments_compat.go @@ -5,8 +5,7 @@ import ( "fmt" "github.com/databricks/cli/bundle" - "github.com/databricks/cli/libs/config" - cv "github.com/databricks/cli/libs/config" + "github.com/databricks/cli/libs/dyn" ) type environmentsToTargets struct{} @@ -27,13 +26,13 @@ func (m *environmentsToTargets) Apply(ctx context.Context, b *bundle.Bundle) err } // The "environments" key is set; validate and rewrite it to "targets". - return b.Config.Mutate(func(v config.Value) (config.Value, error) { + return b.Config.Mutate(func(v dyn.Value) (dyn.Value, error) { environments := v.Get("environments") targets := v.Get("targets") // Return an error if both "environments" and "targets" are set. - if environments != cv.NilValue && targets != cv.NilValue { - return cv.NilValue, fmt.Errorf( + if environments != dyn.NilValue && targets != dyn.NilValue { + return dyn.NilValue, fmt.Errorf( "both 'environments' and 'targets' are specified; only 'targets' should be used. "+ "Instance of 'environments' found at %s.", environments.Location().String(), @@ -41,7 +40,7 @@ func (m *environmentsToTargets) Apply(ctx context.Context, b *bundle.Bundle) err } // Rewrite "environments" to "targets". - if environments != cv.NilValue && targets == cv.NilValue { + if environments != dyn.NilValue && targets == dyn.NilValue { return v.SetKey("targets", environments), nil } diff --git a/bundle/config/mutator/expand_pipeline_glob_paths_test.go b/bundle/config/mutator/expand_pipeline_glob_paths_test.go index 2dfdd464c2..0f84c9fcb8 100644 --- a/bundle/config/mutator/expand_pipeline_glob_paths_test.go +++ b/bundle/config/mutator/expand_pipeline_glob_paths_test.go @@ -10,7 +10,7 @@ import ( "github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config/resources" "github.com/databricks/cli/bundle/internal/bundletest" - cv "github.com/databricks/cli/libs/config" + cv "github.com/databricks/cli/libs/dyn" "github.com/databricks/databricks-sdk-go/service/compute" "github.com/databricks/databricks-sdk-go/service/pipelines" "github.com/stretchr/testify/require" diff --git a/bundle/config/mutator/merge_job_clusters.go b/bundle/config/mutator/merge_job_clusters.go index d4b095e32f..21dfc9cbb9 100644 --- a/bundle/config/mutator/merge_job_clusters.go +++ b/bundle/config/mutator/merge_job_clusters.go @@ -4,8 +4,8 @@ import ( "context" "github.com/databricks/cli/bundle" - "github.com/databricks/cli/libs/config" - "github.com/databricks/cli/libs/config/merge" + "github.com/databricks/cli/libs/dyn" + "github.com/databricks/cli/libs/dyn/merge" ) type mergeJobClusters struct{} @@ -22,7 +22,7 @@ func (m *mergeJobClusters) Name() string { // The job clusters field is a slice, and as such, overrides are appended to it. // We can identify a job cluster by its key, however, so we can use this key // to figure out which definitions are actually overrides and merge them. -func (m *mergeJobClusters) mergeJobClusters(v config.Value) (config.Value, error) { +func (m *mergeJobClusters) mergeJobClusters(v dyn.Value) (dyn.Value, error) { // We know the type of this value is a sequence. // For additional defence, return self if it is not. clusters, ok := v.AsSequence() @@ -30,7 +30,7 @@ func (m *mergeJobClusters) mergeJobClusters(v config.Value) (config.Value, error return v, nil } - seen := make(map[string]config.Value, len(clusters)) + seen := make(map[string]dyn.Value, len(clusters)) keys := make([]string, 0, len(clusters)) // Target overrides are always appended, so we can iterate in natural order to @@ -40,7 +40,7 @@ func (m *mergeJobClusters) mergeJobClusters(v config.Value) (config.Value, error // Get task key if present. kv := clusters[i].Get("job_cluster_key") - if kv.Kind() == config.KindString { + if kv.Kind() == dyn.KindString { key = kv.MustString() } @@ -63,21 +63,21 @@ func (m *mergeJobClusters) mergeJobClusters(v config.Value) (config.Value, error } // Gather resulting clusters in natural order. - out := make([]config.Value, 0, len(keys)) + out := make([]dyn.Value, 0, len(keys)) for _, key := range keys { out = append(out, seen[key]) } - return config.NewValue(out, v.Location()), nil + return dyn.NewValue(out, v.Location()), nil } -func (m *mergeJobClusters) foreachJob(v config.Value) (config.Value, error) { +func (m *mergeJobClusters) foreachJob(v dyn.Value) (dyn.Value, error) { jobs, ok := v.AsMap() if !ok { return v, nil } - out := make(map[string]config.Value) + out := make(map[string]dyn.Value) for key, job := range jobs { var err error out[key], err = job.Transform("job_clusters", m.mergeJobClusters) @@ -86,19 +86,19 @@ func (m *mergeJobClusters) foreachJob(v config.Value) (config.Value, error) { } } - return config.NewValue(out, v.Location()), nil + return dyn.NewValue(out, v.Location()), nil } func (m *mergeJobClusters) Apply(ctx context.Context, b *bundle.Bundle) error { - return b.Config.Mutate(func(v config.Value) (config.Value, error) { - if v == config.NilValue { + return b.Config.Mutate(func(v dyn.Value) (dyn.Value, error) { + if v == dyn.NilValue { return v, nil } nv, err := v.Transform("resources.jobs", m.foreachJob) // It is not a problem if the pipelines key is not set. - if config.IsNoSuchKeyError(err) { + if dyn.IsNoSuchKeyError(err) { return v, nil } diff --git a/bundle/config/mutator/merge_job_tasks.go b/bundle/config/mutator/merge_job_tasks.go index 47231da3ec..f094bc65bc 100644 --- a/bundle/config/mutator/merge_job_tasks.go +++ b/bundle/config/mutator/merge_job_tasks.go @@ -4,8 +4,8 @@ import ( "context" "github.com/databricks/cli/bundle" - "github.com/databricks/cli/libs/config" - "github.com/databricks/cli/libs/config/merge" + "github.com/databricks/cli/libs/dyn" + "github.com/databricks/cli/libs/dyn/merge" ) type mergeJobTasks struct{} @@ -22,7 +22,7 @@ func (m *mergeJobTasks) Name() string { // The tasks field is a slice, and as such, overrides are appended to it. // We can identify a task by its task key, however, so we can use this key // to figure out which definitions are actually overrides and merge them. -func (m *mergeJobTasks) mergeJobTasks(v config.Value) (config.Value, error) { +func (m *mergeJobTasks) mergeJobTasks(v dyn.Value) (dyn.Value, error) { // We know the type of this value is a sequence. // For additional defence, return self if it is not. tasks, ok := v.AsSequence() @@ -30,7 +30,7 @@ func (m *mergeJobTasks) mergeJobTasks(v config.Value) (config.Value, error) { return v, nil } - seen := make(map[string]config.Value, len(tasks)) + seen := make(map[string]dyn.Value, len(tasks)) keys := make([]string, 0, len(tasks)) // Target overrides are always appended, so we can iterate in natural order to @@ -40,7 +40,7 @@ func (m *mergeJobTasks) mergeJobTasks(v config.Value) (config.Value, error) { // Get task key if present. kv := tasks[i].Get("task_key") - if kv.Kind() == config.KindString { + if kv.Kind() == dyn.KindString { key = kv.MustString() } @@ -63,22 +63,22 @@ func (m *mergeJobTasks) mergeJobTasks(v config.Value) (config.Value, error) { } // Gather resulting clusters in natural order. - out := make([]config.Value, 0, len(keys)) + out := make([]dyn.Value, 0, len(keys)) for _, key := range keys { out = append(out, seen[key]) } - return config.NewValue(out, v.Location()), nil + return dyn.NewValue(out, v.Location()), nil } -func (m *mergeJobTasks) foreachJob(v config.Value) (config.Value, error) { +func (m *mergeJobTasks) foreachJob(v dyn.Value) (dyn.Value, error) { jobs, ok := v.AsMap() if !ok { return v, nil } - out := make(map[string]config.Value) + out := make(map[string]dyn.Value) for key, job := range jobs { var err error out[key], err = job.Transform("tasks", m.mergeJobTasks) @@ -87,19 +87,19 @@ func (m *mergeJobTasks) foreachJob(v config.Value) (config.Value, error) { } } - return config.NewValue(out, v.Location()), nil + return dyn.NewValue(out, v.Location()), nil } func (m *mergeJobTasks) Apply(ctx context.Context, b *bundle.Bundle) error { - return b.Config.Mutate(func(v config.Value) (config.Value, error) { - if v == config.NilValue { + return b.Config.Mutate(func(v dyn.Value) (dyn.Value, error) { + if v == dyn.NilValue { return v, nil } nv, err := v.Transform("resources.jobs", m.foreachJob) // It is not a problem if the pipelines key is not set. - if config.IsNoSuchKeyError(err) { + if dyn.IsNoSuchKeyError(err) { return v, nil } diff --git a/bundle/config/mutator/merge_pipeline_clusters.go b/bundle/config/mutator/merge_pipeline_clusters.go index fb4e1fba2e..2d384078e2 100644 --- a/bundle/config/mutator/merge_pipeline_clusters.go +++ b/bundle/config/mutator/merge_pipeline_clusters.go @@ -5,8 +5,8 @@ import ( "strings" "github.com/databricks/cli/bundle" - "github.com/databricks/cli/libs/config" - "github.com/databricks/cli/libs/config/merge" + "github.com/databricks/cli/libs/dyn" + "github.com/databricks/cli/libs/dyn/merge" ) type mergePipelineClusters struct{} @@ -19,13 +19,13 @@ func (m *mergePipelineClusters) Name() string { return "MergePipelineClusters" } -func (m *mergePipelineClusters) clusterLabel(cluster config.Value) (label string) { +func (m *mergePipelineClusters) clusterLabel(cluster dyn.Value) (label string) { v := cluster.Get("label") - if v == config.NilValue { + if v == dyn.NilValue { return "default" } - if v.Kind() != config.KindString { + if v.Kind() != dyn.KindString { panic("cluster label must be a string") } @@ -39,7 +39,7 @@ func (m *mergePipelineClusters) clusterLabel(cluster config.Value) (label string // // Note: the cluster label is optional and defaults to 'default'. // We therefore ALSO merge all clusters without a label. -func (m *mergePipelineClusters) mergeClustersForPipeline(v config.Value) (config.Value, error) { +func (m *mergePipelineClusters) mergeClustersForPipeline(v dyn.Value) (dyn.Value, error) { // We know the type of this value is a sequence. // For additional defence, return self if it is not. clusters, ok := v.AsSequence() @@ -47,7 +47,7 @@ func (m *mergePipelineClusters) mergeClustersForPipeline(v config.Value) (config return v, nil } - seen := make(map[string]config.Value, len(clusters)) + seen := make(map[string]dyn.Value, len(clusters)) labels := make([]string, 0, len(clusters)) // Target overrides are always appended, so we can iterate in natural order to @@ -72,26 +72,26 @@ func (m *mergePipelineClusters) mergeClustersForPipeline(v config.Value) (config } // Gather resulting clusters in natural order. - out := make([]config.Value, 0, len(labels)) + out := make([]dyn.Value, 0, len(labels)) for _, label := range labels { // Overwrite the label with the normalized version. - nv, err := seen[label].Set("label", config.V(label)) + nv, err := seen[label].Set("label", dyn.V(label)) if err != nil { - return config.InvalidValue, err + return dyn.InvalidValue, err } out = append(out, nv) } - return config.NewValue(out, v.Location()), nil + return dyn.NewValue(out, v.Location()), nil } -func (m *mergePipelineClusters) foreachPipeline(v config.Value) (config.Value, error) { +func (m *mergePipelineClusters) foreachPipeline(v dyn.Value) (dyn.Value, error) { pipelines, ok := v.AsMap() if !ok { return v, nil } - out := make(map[string]config.Value) + out := make(map[string]dyn.Value) for key, pipeline := range pipelines { var err error out[key], err = pipeline.Transform("clusters", m.mergeClustersForPipeline) @@ -100,19 +100,19 @@ func (m *mergePipelineClusters) foreachPipeline(v config.Value) (config.Value, e } } - return config.NewValue(out, v.Location()), nil + return dyn.NewValue(out, v.Location()), nil } func (m *mergePipelineClusters) Apply(ctx context.Context, b *bundle.Bundle) error { - return b.Config.Mutate(func(v config.Value) (config.Value, error) { - if v == config.NilValue { + return b.Config.Mutate(func(v dyn.Value) (dyn.Value, error) { + if v == dyn.NilValue { return v, nil } nv, err := v.Transform("resources.pipelines", m.foreachPipeline) // It is not a problem if the pipelines key is not set. - if config.IsNoSuchKeyError(err) { + if dyn.IsNoSuchKeyError(err) { return v, nil } diff --git a/bundle/config/mutator/rewrite_sync_paths.go b/bundle/config/mutator/rewrite_sync_paths.go index 351a65fb62..9b578d567d 100644 --- a/bundle/config/mutator/rewrite_sync_paths.go +++ b/bundle/config/mutator/rewrite_sync_paths.go @@ -8,7 +8,7 @@ import ( "github.com/databricks/cli/bundle" - cv "github.com/databricks/cli/libs/config" + cv "github.com/databricks/cli/libs/dyn" ) type rewriteSyncPaths struct{} diff --git a/bundle/config/mutator/rewrite_sync_paths_test.go b/bundle/config/mutator/rewrite_sync_paths_test.go index 91c8c05592..e4a6f86f49 100644 --- a/bundle/config/mutator/rewrite_sync_paths_test.go +++ b/bundle/config/mutator/rewrite_sync_paths_test.go @@ -8,7 +8,7 @@ import ( "github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config/mutator" "github.com/databricks/cli/bundle/internal/bundletest" - cv "github.com/databricks/cli/libs/config" + cv "github.com/databricks/cli/libs/dyn" "github.com/stretchr/testify/assert" ) diff --git a/bundle/config/mutator/translate_paths_test.go b/bundle/config/mutator/translate_paths_test.go index 65d64cca50..3541d7c4b8 100644 --- a/bundle/config/mutator/translate_paths_test.go +++ b/bundle/config/mutator/translate_paths_test.go @@ -11,7 +11,7 @@ import ( "github.com/databricks/cli/bundle/config/mutator" "github.com/databricks/cli/bundle/config/resources" "github.com/databricks/cli/bundle/internal/bundletest" - cv "github.com/databricks/cli/libs/config" + cv "github.com/databricks/cli/libs/dyn" "github.com/databricks/databricks-sdk-go/service/compute" "github.com/databricks/databricks-sdk-go/service/jobs" "github.com/databricks/databricks-sdk-go/service/pipelines" diff --git a/bundle/config/paths/paths.go b/bundle/config/paths/paths.go index 58fb9c14c4..307f61eff0 100644 --- a/bundle/config/paths/paths.go +++ b/bundle/config/paths/paths.go @@ -4,7 +4,7 @@ import ( "fmt" "path/filepath" - "github.com/databricks/cli/libs/config" + "github.com/databricks/cli/libs/dyn" ) type Paths struct { @@ -12,9 +12,9 @@ type Paths struct { // the definition of this resource. ConfigFilePath string `json:"-" bundle:"readonly"` - // DynamicValue stores the [config.Value] of the containing struct. + // DynamicValue stores the [dyn.Value] of the containing struct. // This assumes that this struct is always embedded. - DynamicValue config.Value + DynamicValue dyn.Value } func (p *Paths) ConfigureConfigFilePath() { diff --git a/bundle/config/root.go b/bundle/config/root.go index 7e216a180e..b2428f16ef 100644 --- a/bundle/config/root.go +++ b/bundle/config/root.go @@ -9,16 +9,16 @@ import ( "github.com/databricks/cli/bundle/config/resources" "github.com/databricks/cli/bundle/config/variable" - "github.com/databricks/cli/libs/config" - "github.com/databricks/cli/libs/config/convert" - "github.com/databricks/cli/libs/config/merge" - "github.com/databricks/cli/libs/config/yamlloader" "github.com/databricks/cli/libs/diag" + "github.com/databricks/cli/libs/dyn" + "github.com/databricks/cli/libs/dyn/convert" + "github.com/databricks/cli/libs/dyn/merge" + "github.com/databricks/cli/libs/dyn/yamlloader" "github.com/databricks/databricks-sdk-go/service/jobs" ) type Root struct { - value config.Value + value dyn.Value diags diag.Diagnostics depth int @@ -112,7 +112,7 @@ func (r *Root) initializeValue() { return } - nv, err := convert.FromTyped(r, config.NilValue) + nv, err := convert.FromTyped(r, dyn.NilValue) if err != nil { panic(err) } @@ -120,7 +120,7 @@ func (r *Root) initializeValue() { r.value = nv } -func (r *Root) toTyped(v config.Value) error { +func (r *Root) toTyped(v dyn.Value) error { // Hack: restore state; it may be cleared by [ToTyped] if // the configuration equals nil (happens in tests). value := r.value @@ -144,7 +144,7 @@ func (r *Root) toTyped(v config.Value) error { return nil } -func (r *Root) Mutate(fn func(config.Value) (config.Value, error)) error { +func (r *Root) Mutate(fn func(dyn.Value) (dyn.Value, error)) error { r.initializeValue() nv, err := fn(r.value) if err != nil { @@ -276,11 +276,11 @@ func (r *Root) Merge(other *Root) error { } func (r *Root) MergeTargetOverrides(name string) error { - var tmp config.Value + var tmp dyn.Value var err error target := r.value.Get("targets").Get(name) - if target == config.NilValue { + if target == dyn.NilValue { return nil } @@ -294,10 +294,10 @@ func (r *Root) MergeTargetOverrides(name string) error { return nil } - if mode := target.Get("mode"); mode != config.NilValue { + if mode := target.Get("mode"); mode != dyn.NilValue { bundle := r.value.Get("bundle") - if bundle == config.NilValue { - bundle = config.NewValue(map[string]config.Value{}, config.Location{}) + if bundle == dyn.NilValue { + bundle = dyn.NewValue(map[string]dyn.Value{}, dyn.Location{}) } bundle.MustMap()["mode"] = mode r.value.MustMap()["bundle"] = bundle @@ -314,7 +314,7 @@ func (r *Root) MergeTargetOverrides(name string) error { // The "run_as" field must be overwritten if set, not merged. // Otherwise we end up with a merged version where both the // "user_name" and "service_principal_name" fields are set. - if runAs := target.Get("run_as"); runAs != config.NilValue { + if runAs := target.Get("run_as"); runAs != dyn.NilValue { r.value.MustMap()["run_as"] = runAs // Clear existing field to convert.ToTyped() merging // the new value with the existing value. diff --git a/bundle/internal/bundletest/location.go b/bundle/internal/bundletest/location.go index 1e6f52bb49..fb8bc81315 100644 --- a/bundle/internal/bundletest/location.go +++ b/bundle/internal/bundletest/location.go @@ -2,18 +2,18 @@ package bundletest import ( "github.com/databricks/cli/bundle" - "github.com/databricks/cli/libs/config" + "github.com/databricks/cli/libs/dyn" ) // SetLocation sets the location of all values in the bundle to the given path. // This is useful for testing where we need to associate configuration // with the path it is loaded from. -func SetLocation(b *bundle.Bundle, pathPrefix config.Path, filePath string) { - b.Config.Mutate(func(root config.Value) (config.Value, error) { - return config.Walk(root, func(p config.Path, v config.Value) (config.Value, error) { +func SetLocation(b *bundle.Bundle, pathPrefix dyn.Path, filePath string) { + b.Config.Mutate(func(root dyn.Value) (dyn.Value, error) { + return dyn.Walk(root, func(p dyn.Path, v dyn.Value) (dyn.Value, error) { // If the path has the given prefix, set the location. if p.HasPrefix(pathPrefix) { - return v.WithLocation(config.Location{ + return v.WithLocation(dyn.Location{ File: filePath, }), nil } @@ -25,7 +25,7 @@ func SetLocation(b *bundle.Bundle, pathPrefix config.Path, filePath string) { } // Return verbatim, but skip traversal. - return v, config.ErrSkip + return v, dyn.ErrSkip }) }) From 3dbadb4997a9bea7ca92b6b3d56ed4991609c2b6 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Fri, 22 Dec 2023 14:42:03 +0100 Subject: [PATCH 027/104] More renaming --- .../expand_pipeline_glob_paths_test.go | 4 +-- bundle/config/mutator/rewrite_sync_paths.go | 24 ++++++++-------- .../config/mutator/rewrite_sync_paths_test.go | 18 ++++++------ bundle/config/mutator/translate_paths_test.go | 28 +++++++++---------- 4 files changed, 37 insertions(+), 37 deletions(-) diff --git a/bundle/config/mutator/expand_pipeline_glob_paths_test.go b/bundle/config/mutator/expand_pipeline_glob_paths_test.go index 0f84c9fcb8..2fa4b8ddd1 100644 --- a/bundle/config/mutator/expand_pipeline_glob_paths_test.go +++ b/bundle/config/mutator/expand_pipeline_glob_paths_test.go @@ -10,7 +10,7 @@ import ( "github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config/resources" "github.com/databricks/cli/bundle/internal/bundletest" - cv "github.com/databricks/cli/libs/dyn" + "github.com/databricks/cli/libs/dyn" "github.com/databricks/databricks-sdk-go/service/compute" "github.com/databricks/databricks-sdk-go/service/pipelines" "github.com/stretchr/testify/require" @@ -96,7 +96,7 @@ func TestExpandGlobPathsInPipelines(t *testing.T) { }, } - bundletest.SetLocation(b, cv.EmptyPath, filepath.Join(dir, "resource.yml")) + bundletest.SetLocation(b, dyn.EmptyPath, filepath.Join(dir, "resource.yml")) m := ExpandPipelineGlobPaths() err := bundle.Apply(context.Background(), b, m) diff --git a/bundle/config/mutator/rewrite_sync_paths.go b/bundle/config/mutator/rewrite_sync_paths.go index 9b578d567d..f218a0419c 100644 --- a/bundle/config/mutator/rewrite_sync_paths.go +++ b/bundle/config/mutator/rewrite_sync_paths.go @@ -8,7 +8,7 @@ import ( "github.com/databricks/cli/bundle" - cv "github.com/databricks/cli/libs/dyn" + "github.com/databricks/cli/libs/dyn" ) type rewriteSyncPaths struct{} @@ -21,9 +21,9 @@ func (m *rewriteSyncPaths) Name() string { return "RewriteSyncPaths" } -func (m *rewriteSyncPaths) makeRelativeTo(root string, seq cv.Value) (cv.Value, error) { - if seq == cv.NilValue || seq.Kind() != cv.KindSequence { - return cv.NilValue, nil +func (m *rewriteSyncPaths) makeRelativeTo(root string, seq dyn.Value) (dyn.Value, error) { + if seq == dyn.NilValue || seq.Kind() != dyn.KindSequence { + return dyn.NilValue, nil } out, ok := seq.AsSequence() @@ -33,29 +33,29 @@ func (m *rewriteSyncPaths) makeRelativeTo(root string, seq cv.Value) (cv.Value, out = slices.Clone(out) for i, v := range out { - if v.Kind() != cv.KindString { + if v.Kind() != dyn.KindString { continue } dir := filepath.Dir(v.Location().File) rel, err := filepath.Rel(root, dir) if err != nil { - return cv.NilValue, err + return dyn.NilValue, err } - out[i] = cv.NewValue(filepath.Join(rel, v.MustString()), v.Location()) + out[i] = dyn.NewValue(filepath.Join(rel, v.MustString()), v.Location()) } - return cv.NewValue(out, seq.Location()), nil + return dyn.NewValue(out, seq.Location()), nil } -func (m *rewriteSyncPaths) fn(root string) func(c cv.Value) (cv.Value, error) { - return func(c cv.Value) (cv.Value, error) { +func (m *rewriteSyncPaths) fn(root string) func(c dyn.Value) (dyn.Value, error) { + return func(c dyn.Value) (dyn.Value, error) { var err error // First build a new sync object sync := c.Get("sync") - if sync == cv.NilValue { + if sync == dyn.NilValue { return c, nil } @@ -77,7 +77,7 @@ func (m *rewriteSyncPaths) fn(root string) func(c cv.Value) (cv.Value, error) { } // Then replace the sync object with the new one - return c.SetKey("sync", cv.NewValue(out, sync.Location())), nil + return c.SetKey("sync", dyn.NewValue(out, sync.Location())), nil } } diff --git a/bundle/config/mutator/rewrite_sync_paths_test.go b/bundle/config/mutator/rewrite_sync_paths_test.go index e4a6f86f49..34b8a70bec 100644 --- a/bundle/config/mutator/rewrite_sync_paths_test.go +++ b/bundle/config/mutator/rewrite_sync_paths_test.go @@ -8,7 +8,7 @@ import ( "github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config/mutator" "github.com/databricks/cli/bundle/internal/bundletest" - cv "github.com/databricks/cli/libs/dyn" + "github.com/databricks/cli/libs/dyn" "github.com/stretchr/testify/assert" ) @@ -29,10 +29,10 @@ func TestRewriteSyncPathsRelative(t *testing.T) { }, } - bundletest.SetLocation(b, cv.NewPath(cv.Key("sync"), cv.Key("include"), cv.Index(0)), "./file.yml") - bundletest.SetLocation(b, cv.NewPath(cv.Key("sync"), cv.Key("include"), cv.Index(1)), "./a/file.yml") - bundletest.SetLocation(b, cv.NewPath(cv.Key("sync"), cv.Key("exclude"), cv.Index(0)), "./a/b/file.yml") - bundletest.SetLocation(b, cv.NewPath(cv.Key("sync"), cv.Key("exclude"), cv.Index(1)), "./a/b/c/file.yml") + bundletest.SetLocation(b, dyn.NewPath(dyn.Key("sync"), dyn.Key("include"), dyn.Index(0)), "./file.yml") + bundletest.SetLocation(b, dyn.NewPath(dyn.Key("sync"), dyn.Key("include"), dyn.Index(1)), "./a/file.yml") + bundletest.SetLocation(b, dyn.NewPath(dyn.Key("sync"), dyn.Key("exclude"), dyn.Index(0)), "./a/b/file.yml") + bundletest.SetLocation(b, dyn.NewPath(dyn.Key("sync"), dyn.Key("exclude"), dyn.Index(1)), "./a/b/c/file.yml") err := bundle.Apply(context.Background(), b, mutator.RewriteSyncPaths()) assert.NoError(t, err) @@ -60,10 +60,10 @@ func TestRewriteSyncPathsAbsolute(t *testing.T) { }, } - bundletest.SetLocation(b, cv.NewPath(cv.Key("sync"), cv.Key("include"), cv.Index(0)), "/tmp/dir/file.yml") - bundletest.SetLocation(b, cv.NewPath(cv.Key("sync"), cv.Key("include"), cv.Index(1)), "/tmp/dir/a/file.yml") - bundletest.SetLocation(b, cv.NewPath(cv.Key("sync"), cv.Key("exclude"), cv.Index(0)), "/tmp/dir/a/b/file.yml") - bundletest.SetLocation(b, cv.NewPath(cv.Key("sync"), cv.Key("exclude"), cv.Index(1)), "/tmp/dir/a/b/c/file.yml") + bundletest.SetLocation(b, dyn.NewPath(dyn.Key("sync"), dyn.Key("include"), dyn.Index(0)), "/tmp/dir/file.yml") + bundletest.SetLocation(b, dyn.NewPath(dyn.Key("sync"), dyn.Key("include"), dyn.Index(1)), "/tmp/dir/a/file.yml") + bundletest.SetLocation(b, dyn.NewPath(dyn.Key("sync"), dyn.Key("exclude"), dyn.Index(0)), "/tmp/dir/a/b/file.yml") + bundletest.SetLocation(b, dyn.NewPath(dyn.Key("sync"), dyn.Key("exclude"), dyn.Index(1)), "/tmp/dir/a/b/c/file.yml") err := bundle.Apply(context.Background(), b, mutator.RewriteSyncPaths()) assert.NoError(t, err) diff --git a/bundle/config/mutator/translate_paths_test.go b/bundle/config/mutator/translate_paths_test.go index 3541d7c4b8..f5f9f8499b 100644 --- a/bundle/config/mutator/translate_paths_test.go +++ b/bundle/config/mutator/translate_paths_test.go @@ -11,7 +11,7 @@ import ( "github.com/databricks/cli/bundle/config/mutator" "github.com/databricks/cli/bundle/config/resources" "github.com/databricks/cli/bundle/internal/bundletest" - cv "github.com/databricks/cli/libs/dyn" + "github.com/databricks/cli/libs/dyn" "github.com/databricks/databricks-sdk-go/service/compute" "github.com/databricks/databricks-sdk-go/service/jobs" "github.com/databricks/databricks-sdk-go/service/pipelines" @@ -77,7 +77,7 @@ func TestTranslatePathsSkippedWithGitSource(t *testing.T) { }, } - bundletest.SetLocation(b, cv.EmptyPath, filepath.Join(dir, "resource.yml")) + bundletest.SetLocation(b, dyn.EmptyPath, filepath.Join(dir, "resource.yml")) err := bundle.Apply(context.Background(), b, mutator.TranslatePaths()) require.NoError(t, err) @@ -200,7 +200,7 @@ func TestTranslatePaths(t *testing.T) { }, } - bundletest.SetLocation(b, cv.EmptyPath, filepath.Join(dir, "resource.yml")) + bundletest.SetLocation(b, dyn.EmptyPath, filepath.Join(dir, "resource.yml")) err := bundle.Apply(context.Background(), b, mutator.TranslatePaths()) require.NoError(t, err) @@ -330,8 +330,8 @@ func TestTranslatePathsInSubdirectories(t *testing.T) { }, } - bundletest.SetLocation(b, cv.NewPath(cv.Key("resources"), cv.Key("jobs")), filepath.Join(dir, "job/resource.yml")) - bundletest.SetLocation(b, cv.NewPath(cv.Key("resources"), cv.Key("pipelines")), filepath.Join(dir, "pipeline/resource.yml")) + bundletest.SetLocation(b, dyn.NewPath(dyn.Key("resources"), dyn.Key("jobs")), filepath.Join(dir, "job/resource.yml")) + bundletest.SetLocation(b, dyn.NewPath(dyn.Key("resources"), dyn.Key("pipelines")), filepath.Join(dir, "pipeline/resource.yml")) err := bundle.Apply(context.Background(), b, mutator.TranslatePaths()) require.NoError(t, err) @@ -391,7 +391,7 @@ func TestTranslatePathsOutsideBundleRoot(t *testing.T) { }, } - bundletest.SetLocation(b, cv.EmptyPath, filepath.Join(dir, "../resource.yml")) + bundletest.SetLocation(b, dyn.EmptyPath, filepath.Join(dir, "../resource.yml")) err := bundle.Apply(context.Background(), b, mutator.TranslatePaths()) assert.ErrorContains(t, err, "is not contained in bundle root") @@ -421,7 +421,7 @@ func TestJobNotebookDoesNotExistError(t *testing.T) { }, } - bundletest.SetLocation(b, cv.EmptyPath, filepath.Join(dir, "fake.yml")) + bundletest.SetLocation(b, dyn.EmptyPath, filepath.Join(dir, "fake.yml")) err := bundle.Apply(context.Background(), b, mutator.TranslatePaths()) assert.EqualError(t, err, "notebook ./doesnt_exist.py not found") @@ -451,7 +451,7 @@ func TestJobFileDoesNotExistError(t *testing.T) { }, } - bundletest.SetLocation(b, cv.EmptyPath, filepath.Join(dir, "fake.yml")) + bundletest.SetLocation(b, dyn.EmptyPath, filepath.Join(dir, "fake.yml")) err := bundle.Apply(context.Background(), b, mutator.TranslatePaths()) assert.EqualError(t, err, "file ./doesnt_exist.py not found") @@ -481,7 +481,7 @@ func TestPipelineNotebookDoesNotExistError(t *testing.T) { }, } - bundletest.SetLocation(b, cv.EmptyPath, filepath.Join(dir, "fake.yml")) + bundletest.SetLocation(b, dyn.EmptyPath, filepath.Join(dir, "fake.yml")) err := bundle.Apply(context.Background(), b, mutator.TranslatePaths()) assert.EqualError(t, err, "notebook ./doesnt_exist.py not found") @@ -511,7 +511,7 @@ func TestPipelineFileDoesNotExistError(t *testing.T) { }, } - bundletest.SetLocation(b, cv.EmptyPath, filepath.Join(dir, "fake.yml")) + bundletest.SetLocation(b, dyn.EmptyPath, filepath.Join(dir, "fake.yml")) err := bundle.Apply(context.Background(), b, mutator.TranslatePaths()) assert.EqualError(t, err, "file ./doesnt_exist.py not found") @@ -545,7 +545,7 @@ func TestJobSparkPythonTaskWithNotebookSourceError(t *testing.T) { }, } - bundletest.SetLocation(b, cv.EmptyPath, filepath.Join(dir, "resource.yml")) + bundletest.SetLocation(b, dyn.EmptyPath, filepath.Join(dir, "resource.yml")) err := bundle.Apply(context.Background(), b, mutator.TranslatePaths()) assert.ErrorContains(t, err, `expected a file for "tasks.spark_python_task.python_file" but got a notebook`) @@ -579,7 +579,7 @@ func TestJobNotebookTaskWithFileSourceError(t *testing.T) { }, } - bundletest.SetLocation(b, cv.EmptyPath, filepath.Join(dir, "resource.yml")) + bundletest.SetLocation(b, dyn.EmptyPath, filepath.Join(dir, "resource.yml")) err := bundle.Apply(context.Background(), b, mutator.TranslatePaths()) assert.ErrorContains(t, err, `expected a notebook for "tasks.notebook_task.notebook_path" but got a file`) @@ -613,7 +613,7 @@ func TestPipelineNotebookLibraryWithFileSourceError(t *testing.T) { }, } - bundletest.SetLocation(b, cv.EmptyPath, filepath.Join(dir, "resource.yml")) + bundletest.SetLocation(b, dyn.EmptyPath, filepath.Join(dir, "resource.yml")) err := bundle.Apply(context.Background(), b, mutator.TranslatePaths()) assert.ErrorContains(t, err, `expected a notebook for "libraries.notebook.path" but got a file`) @@ -647,7 +647,7 @@ func TestPipelineFileLibraryWithNotebookSourceError(t *testing.T) { }, } - bundletest.SetLocation(b, cv.EmptyPath, filepath.Join(dir, "resource.yml")) + bundletest.SetLocation(b, dyn.EmptyPath, filepath.Join(dir, "resource.yml")) err := bundle.Apply(context.Background(), b, mutator.TranslatePaths()) assert.ErrorContains(t, err, `expected a file for "libraries.file.path" but got a notebook`) From 8a11ceeeeb773d5dd207dd85616a627a93f628a4 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Fri, 22 Dec 2023 14:58:08 +0100 Subject: [PATCH 028/104] Use string path in bundletest.SetLocation --- .../expand_pipeline_glob_paths_test.go | 3 +-- .../config/mutator/rewrite_sync_paths_test.go | 17 ++++++------ bundle/config/mutator/translate_paths_test.go | 27 +++++++++---------- bundle/deploy/metadata/compute_test.go | 18 +++++-------- bundle/internal/bundletest/location.go | 7 ++--- 5 files changed, 32 insertions(+), 40 deletions(-) diff --git a/bundle/config/mutator/expand_pipeline_glob_paths_test.go b/bundle/config/mutator/expand_pipeline_glob_paths_test.go index 2fa4b8ddd1..e2cba80e25 100644 --- a/bundle/config/mutator/expand_pipeline_glob_paths_test.go +++ b/bundle/config/mutator/expand_pipeline_glob_paths_test.go @@ -10,7 +10,6 @@ import ( "github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config/resources" "github.com/databricks/cli/bundle/internal/bundletest" - "github.com/databricks/cli/libs/dyn" "github.com/databricks/databricks-sdk-go/service/compute" "github.com/databricks/databricks-sdk-go/service/pipelines" "github.com/stretchr/testify/require" @@ -96,7 +95,7 @@ func TestExpandGlobPathsInPipelines(t *testing.T) { }, } - bundletest.SetLocation(b, dyn.EmptyPath, filepath.Join(dir, "resource.yml")) + bundletest.SetLocation(b, ".", filepath.Join(dir, "resource.yml")) m := ExpandPipelineGlobPaths() err := bundle.Apply(context.Background(), b, m) diff --git a/bundle/config/mutator/rewrite_sync_paths_test.go b/bundle/config/mutator/rewrite_sync_paths_test.go index 34b8a70bec..6e7f9b4836 100644 --- a/bundle/config/mutator/rewrite_sync_paths_test.go +++ b/bundle/config/mutator/rewrite_sync_paths_test.go @@ -8,7 +8,6 @@ import ( "github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config/mutator" "github.com/databricks/cli/bundle/internal/bundletest" - "github.com/databricks/cli/libs/dyn" "github.com/stretchr/testify/assert" ) @@ -29,10 +28,10 @@ func TestRewriteSyncPathsRelative(t *testing.T) { }, } - bundletest.SetLocation(b, dyn.NewPath(dyn.Key("sync"), dyn.Key("include"), dyn.Index(0)), "./file.yml") - bundletest.SetLocation(b, dyn.NewPath(dyn.Key("sync"), dyn.Key("include"), dyn.Index(1)), "./a/file.yml") - bundletest.SetLocation(b, dyn.NewPath(dyn.Key("sync"), dyn.Key("exclude"), dyn.Index(0)), "./a/b/file.yml") - bundletest.SetLocation(b, dyn.NewPath(dyn.Key("sync"), dyn.Key("exclude"), dyn.Index(1)), "./a/b/c/file.yml") + bundletest.SetLocation(b, "sync.include[0]", "./file.yml") + bundletest.SetLocation(b, "sync.include[1]", "./a/file.yml") + bundletest.SetLocation(b, "sync.exclude[0]", "./a/b/file.yml") + bundletest.SetLocation(b, "sync.exclude[1]", "./a/b/c/file.yml") err := bundle.Apply(context.Background(), b, mutator.RewriteSyncPaths()) assert.NoError(t, err) @@ -60,10 +59,10 @@ func TestRewriteSyncPathsAbsolute(t *testing.T) { }, } - bundletest.SetLocation(b, dyn.NewPath(dyn.Key("sync"), dyn.Key("include"), dyn.Index(0)), "/tmp/dir/file.yml") - bundletest.SetLocation(b, dyn.NewPath(dyn.Key("sync"), dyn.Key("include"), dyn.Index(1)), "/tmp/dir/a/file.yml") - bundletest.SetLocation(b, dyn.NewPath(dyn.Key("sync"), dyn.Key("exclude"), dyn.Index(0)), "/tmp/dir/a/b/file.yml") - bundletest.SetLocation(b, dyn.NewPath(dyn.Key("sync"), dyn.Key("exclude"), dyn.Index(1)), "/tmp/dir/a/b/c/file.yml") + bundletest.SetLocation(b, "sync.include[0]", "/tmp/dir/file.yml") + bundletest.SetLocation(b, "sync.include[1]", "/tmp/dir/a/file.yml") + bundletest.SetLocation(b, "sync.exclude[0]", "/tmp/dir/a/b/file.yml") + bundletest.SetLocation(b, "sync.exclude[1]", "/tmp/dir/a/b/c/file.yml") err := bundle.Apply(context.Background(), b, mutator.RewriteSyncPaths()) assert.NoError(t, err) diff --git a/bundle/config/mutator/translate_paths_test.go b/bundle/config/mutator/translate_paths_test.go index f5f9f8499b..96ff88f3f0 100644 --- a/bundle/config/mutator/translate_paths_test.go +++ b/bundle/config/mutator/translate_paths_test.go @@ -11,7 +11,6 @@ import ( "github.com/databricks/cli/bundle/config/mutator" "github.com/databricks/cli/bundle/config/resources" "github.com/databricks/cli/bundle/internal/bundletest" - "github.com/databricks/cli/libs/dyn" "github.com/databricks/databricks-sdk-go/service/compute" "github.com/databricks/databricks-sdk-go/service/jobs" "github.com/databricks/databricks-sdk-go/service/pipelines" @@ -77,7 +76,7 @@ func TestTranslatePathsSkippedWithGitSource(t *testing.T) { }, } - bundletest.SetLocation(b, dyn.EmptyPath, filepath.Join(dir, "resource.yml")) + bundletest.SetLocation(b, ".", filepath.Join(dir, "resource.yml")) err := bundle.Apply(context.Background(), b, mutator.TranslatePaths()) require.NoError(t, err) @@ -200,7 +199,7 @@ func TestTranslatePaths(t *testing.T) { }, } - bundletest.SetLocation(b, dyn.EmptyPath, filepath.Join(dir, "resource.yml")) + bundletest.SetLocation(b, ".", filepath.Join(dir, "resource.yml")) err := bundle.Apply(context.Background(), b, mutator.TranslatePaths()) require.NoError(t, err) @@ -330,8 +329,8 @@ func TestTranslatePathsInSubdirectories(t *testing.T) { }, } - bundletest.SetLocation(b, dyn.NewPath(dyn.Key("resources"), dyn.Key("jobs")), filepath.Join(dir, "job/resource.yml")) - bundletest.SetLocation(b, dyn.NewPath(dyn.Key("resources"), dyn.Key("pipelines")), filepath.Join(dir, "pipeline/resource.yml")) + bundletest.SetLocation(b, "resources.jobs", filepath.Join(dir, "job/resource.yml")) + bundletest.SetLocation(b, "resources.pipelines", filepath.Join(dir, "pipeline/resource.yml")) err := bundle.Apply(context.Background(), b, mutator.TranslatePaths()) require.NoError(t, err) @@ -391,7 +390,7 @@ func TestTranslatePathsOutsideBundleRoot(t *testing.T) { }, } - bundletest.SetLocation(b, dyn.EmptyPath, filepath.Join(dir, "../resource.yml")) + bundletest.SetLocation(b, ".", filepath.Join(dir, "../resource.yml")) err := bundle.Apply(context.Background(), b, mutator.TranslatePaths()) assert.ErrorContains(t, err, "is not contained in bundle root") @@ -421,7 +420,7 @@ func TestJobNotebookDoesNotExistError(t *testing.T) { }, } - bundletest.SetLocation(b, dyn.EmptyPath, filepath.Join(dir, "fake.yml")) + bundletest.SetLocation(b, ".", filepath.Join(dir, "fake.yml")) err := bundle.Apply(context.Background(), b, mutator.TranslatePaths()) assert.EqualError(t, err, "notebook ./doesnt_exist.py not found") @@ -451,7 +450,7 @@ func TestJobFileDoesNotExistError(t *testing.T) { }, } - bundletest.SetLocation(b, dyn.EmptyPath, filepath.Join(dir, "fake.yml")) + bundletest.SetLocation(b, ".", filepath.Join(dir, "fake.yml")) err := bundle.Apply(context.Background(), b, mutator.TranslatePaths()) assert.EqualError(t, err, "file ./doesnt_exist.py not found") @@ -481,7 +480,7 @@ func TestPipelineNotebookDoesNotExistError(t *testing.T) { }, } - bundletest.SetLocation(b, dyn.EmptyPath, filepath.Join(dir, "fake.yml")) + bundletest.SetLocation(b, ".", filepath.Join(dir, "fake.yml")) err := bundle.Apply(context.Background(), b, mutator.TranslatePaths()) assert.EqualError(t, err, "notebook ./doesnt_exist.py not found") @@ -511,7 +510,7 @@ func TestPipelineFileDoesNotExistError(t *testing.T) { }, } - bundletest.SetLocation(b, dyn.EmptyPath, filepath.Join(dir, "fake.yml")) + bundletest.SetLocation(b, ".", filepath.Join(dir, "fake.yml")) err := bundle.Apply(context.Background(), b, mutator.TranslatePaths()) assert.EqualError(t, err, "file ./doesnt_exist.py not found") @@ -545,7 +544,7 @@ func TestJobSparkPythonTaskWithNotebookSourceError(t *testing.T) { }, } - bundletest.SetLocation(b, dyn.EmptyPath, filepath.Join(dir, "resource.yml")) + bundletest.SetLocation(b, ".", filepath.Join(dir, "resource.yml")) err := bundle.Apply(context.Background(), b, mutator.TranslatePaths()) assert.ErrorContains(t, err, `expected a file for "tasks.spark_python_task.python_file" but got a notebook`) @@ -579,7 +578,7 @@ func TestJobNotebookTaskWithFileSourceError(t *testing.T) { }, } - bundletest.SetLocation(b, dyn.EmptyPath, filepath.Join(dir, "resource.yml")) + bundletest.SetLocation(b, ".", filepath.Join(dir, "resource.yml")) err := bundle.Apply(context.Background(), b, mutator.TranslatePaths()) assert.ErrorContains(t, err, `expected a notebook for "tasks.notebook_task.notebook_path" but got a file`) @@ -613,7 +612,7 @@ func TestPipelineNotebookLibraryWithFileSourceError(t *testing.T) { }, } - bundletest.SetLocation(b, dyn.EmptyPath, filepath.Join(dir, "resource.yml")) + bundletest.SetLocation(b, ".", filepath.Join(dir, "resource.yml")) err := bundle.Apply(context.Background(), b, mutator.TranslatePaths()) assert.ErrorContains(t, err, `expected a notebook for "libraries.notebook.path" but got a file`) @@ -647,7 +646,7 @@ func TestPipelineFileLibraryWithNotebookSourceError(t *testing.T) { }, } - bundletest.SetLocation(b, dyn.EmptyPath, filepath.Join(dir, "resource.yml")) + bundletest.SetLocation(b, ".", filepath.Join(dir, "resource.yml")) err := bundle.Apply(context.Background(), b, mutator.TranslatePaths()) assert.ErrorContains(t, err, `expected a file for "libraries.file.path" but got a notebook`) diff --git a/bundle/deploy/metadata/compute_test.go b/bundle/deploy/metadata/compute_test.go index c3cb029d15..57899c4b38 100644 --- a/bundle/deploy/metadata/compute_test.go +++ b/bundle/deploy/metadata/compute_test.go @@ -6,8 +6,8 @@ import ( "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/config" - "github.com/databricks/cli/bundle/config/paths" "github.com/databricks/cli/bundle/config/resources" + "github.com/databricks/cli/bundle/internal/bundletest" "github.com/databricks/cli/bundle/metadata" "github.com/databricks/databricks-sdk-go/service/jobs" "github.com/stretchr/testify/assert" @@ -35,18 +35,12 @@ func TestComputeMetadataMutator(t *testing.T) { Resources: config.Resources{ Jobs: map[string]*resources.Job{ "my-job-1": { - Paths: paths.Paths{ - ConfigFilePath: "a/b/c", - }, ID: "1111", JobSettings: &jobs.JobSettings{ Name: "My Job One", }, }, "my-job-2": { - Paths: paths.Paths{ - ConfigFilePath: "d/e/f", - }, ID: "2222", JobSettings: &jobs.JobSettings{ Name: "My Job Two", @@ -54,16 +48,16 @@ func TestComputeMetadataMutator(t *testing.T) { }, }, Pipelines: map[string]*resources.Pipeline{ - "my-pipeline": { - Paths: paths.Paths{ - ConfigFilePath: "abc", - }, - }, + "my-pipeline": {}, }, }, }, } + bundletest.SetLocation(b, "resources.jobs.my-job-1", "a/b/c") + bundletest.SetLocation(b, "resources.jobs.my-job-2", "d/e/f") + bundletest.SetLocation(b, "resources.pipelines.my-pipeline", "abc") + expectedMetadata := metadata.Metadata{ Version: metadata.Version, Config: metadata.Config{ diff --git a/bundle/internal/bundletest/location.go b/bundle/internal/bundletest/location.go index fb8bc81315..1fd6f968c2 100644 --- a/bundle/internal/bundletest/location.go +++ b/bundle/internal/bundletest/location.go @@ -8,11 +8,12 @@ import ( // SetLocation sets the location of all values in the bundle to the given path. // This is useful for testing where we need to associate configuration // with the path it is loaded from. -func SetLocation(b *bundle.Bundle, pathPrefix dyn.Path, filePath string) { +func SetLocation(b *bundle.Bundle, prefix string, filePath string) { + start := dyn.MustPathFromString(prefix) b.Config.Mutate(func(root dyn.Value) (dyn.Value, error) { return dyn.Walk(root, func(p dyn.Path, v dyn.Value) (dyn.Value, error) { // If the path has the given prefix, set the location. - if p.HasPrefix(pathPrefix) { + if p.HasPrefix(start) { return v.WithLocation(dyn.Location{ File: filePath, }), nil @@ -20,7 +21,7 @@ func SetLocation(b *bundle.Bundle, pathPrefix dyn.Path, filePath string) { // The path is not nested under the given prefix. // If the path is a prefix of the prefix, keep traversing and return the node verbatim. - if pathPrefix.HasPrefix(p) { + if start.HasPrefix(p) { return v, nil } From 69370c4313a554ca2e56ad92e437a3c0010fa64a Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Fri, 22 Dec 2023 15:33:26 +0100 Subject: [PATCH 029/104] Return empty map for non-nil structs --- libs/dyn/convert/from_typed.go | 5 ----- libs/dyn/convert/from_typed_test.go | 2 +- 2 files changed, 1 insertion(+), 6 deletions(-) diff --git a/libs/dyn/convert/from_typed.go b/libs/dyn/convert/from_typed.go index 0659d1cd78..1353d19f07 100644 --- a/libs/dyn/convert/from_typed.go +++ b/libs/dyn/convert/from_typed.go @@ -62,11 +62,6 @@ func fromTypedStruct(src reflect.Value, ref dyn.Value) (dyn.Value, error) { } } - // If the struct was equal to its zero value, emit a nil. - if len(out) == 0 { - return dyn.NilValue, nil - } - return dyn.NewValue(out, ref.Location()), nil } diff --git a/libs/dyn/convert/from_typed_test.go b/libs/dyn/convert/from_typed_test.go index 0e9b9c7cd5..7213072a61 100644 --- a/libs/dyn/convert/from_typed_test.go +++ b/libs/dyn/convert/from_typed_test.go @@ -19,7 +19,7 @@ func TestFromTypedStructZeroFields(t *testing.T) { nv, err := FromTyped(src, ref) require.NoError(t, err) - assert.Equal(t, dyn.NilValue, nv) + assert.Equal(t, dyn.V(map[string]dyn.Value{}), nv) } func TestFromTypedStructSetFields(t *testing.T) { From cdbea58cf52e93d5435a616636d4e835c2a866a8 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Fri, 22 Dec 2023 15:38:28 +0100 Subject: [PATCH 030/104] Use resource key as name in permissions code The code relied on the `Name` property being accessible for every resource. This is generally true, but because these property structs are embedded as pointer, they can be nil. This is also why the tests had to initialize the embedded struct to pass. This changes the approach to use the keys from the resource map instead, so that we no longer rely on the non-nil embedded struct. Note: we should evaluate whether we should turn these into values instead of pointers. I don't recall if we get value from them being pointers. --- bundle/permissions/mutator.go | 20 ++++++++++---------- bundle/permissions/mutator_test.go | 26 ++++++++++---------------- 2 files changed, 20 insertions(+), 26 deletions(-) diff --git a/bundle/permissions/mutator.go b/bundle/permissions/mutator.go index 025556f31e..54925d1c8d 100644 --- a/bundle/permissions/mutator.go +++ b/bundle/permissions/mutator.go @@ -72,60 +72,60 @@ func validate(b *bundle.Bundle) error { } func applyForJobs(ctx context.Context, b *bundle.Bundle) { - for _, job := range b.Config.Resources.Jobs { + for key, job := range b.Config.Resources.Jobs { job.Permissions = append(job.Permissions, convert( ctx, b.Config.Permissions, job.Permissions, - job.Name, + key, levelsMap["jobs"], )...) } } func applyForPipelines(ctx context.Context, b *bundle.Bundle) { - for _, pipeline := range b.Config.Resources.Pipelines { + for key, pipeline := range b.Config.Resources.Pipelines { pipeline.Permissions = append(pipeline.Permissions, convert( ctx, b.Config.Permissions, pipeline.Permissions, - pipeline.Name, + key, levelsMap["pipelines"], )...) } } func applyForMlExperiments(ctx context.Context, b *bundle.Bundle) { - for _, experiment := range b.Config.Resources.Experiments { + for key, experiment := range b.Config.Resources.Experiments { experiment.Permissions = append(experiment.Permissions, convert( ctx, b.Config.Permissions, experiment.Permissions, - experiment.Name, + key, levelsMap["mlflow_experiments"], )...) } } func applyForMlModels(ctx context.Context, b *bundle.Bundle) { - for _, model := range b.Config.Resources.Models { + for key, model := range b.Config.Resources.Models { model.Permissions = append(model.Permissions, convert( ctx, b.Config.Permissions, model.Permissions, - model.Name, + key, levelsMap["mlflow_models"], )...) } } func applyForModelServiceEndpoints(ctx context.Context, b *bundle.Bundle) { - for _, model := range b.Config.Resources.ModelServingEndpoints { + for key, model := range b.Config.Resources.ModelServingEndpoints { model.Permissions = append(model.Permissions, convert( ctx, b.Config.Permissions, model.Permissions, - model.Name, + key, levelsMap["model_serving_endpoints"], )...) } diff --git a/bundle/permissions/mutator_test.go b/bundle/permissions/mutator_test.go index d9bf3efe76..62c0589d3e 100644 --- a/bundle/permissions/mutator_test.go +++ b/bundle/permissions/mutator_test.go @@ -7,10 +7,6 @@ import ( "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config/resources" - "github.com/databricks/databricks-sdk-go/service/jobs" - "github.com/databricks/databricks-sdk-go/service/ml" - "github.com/databricks/databricks-sdk-go/service/pipelines" - "github.com/databricks/databricks-sdk-go/service/serving" "github.com/stretchr/testify/require" ) @@ -27,24 +23,24 @@ func TestApplyBundlePermissions(t *testing.T) { }, Resources: config.Resources{ Jobs: map[string]*resources.Job{ - "job_1": {JobSettings: &jobs.JobSettings{}}, - "job_2": {JobSettings: &jobs.JobSettings{}}, + "job_1": {}, + "job_2": {}, }, Pipelines: map[string]*resources.Pipeline{ - "pipeline_1": {PipelineSpec: &pipelines.PipelineSpec{}}, - "pipeline_2": {PipelineSpec: &pipelines.PipelineSpec{}}, + "pipeline_1": {}, + "pipeline_2": {}, }, Models: map[string]*resources.MlflowModel{ - "model_1": {Model: &ml.Model{}}, - "model_2": {Model: &ml.Model{}}, + "model_1": {}, + "model_2": {}, }, Experiments: map[string]*resources.MlflowExperiment{ - "experiment_1": {Experiment: &ml.Experiment{}}, - "experiment_2": {Experiment: &ml.Experiment{}}, + "experiment_1": {}, + "experiment_2": {}, }, ModelServingEndpoints: map[string]*resources.ModelServingEndpoint{ - "endpoint_1": {CreateServingEndpoint: &serving.CreateServingEndpoint{}}, - "endpoint_2": {CreateServingEndpoint: &serving.CreateServingEndpoint{}}, + "endpoint_1": {}, + "endpoint_2": {}, }, }, }, @@ -116,13 +112,11 @@ func TestWarningOnOverlapPermission(t *testing.T) { Permissions: []resources.Permission{ {Level: CAN_VIEW, UserName: "TestUser"}, }, - JobSettings: &jobs.JobSettings{}, }, "job_2": { Permissions: []resources.Permission{ {Level: CAN_VIEW, UserName: "TestUser2"}, }, - JobSettings: &jobs.JobSettings{}, }, }, }, From 68135cbaa8c355f6478b2caca9b18940a9be6485 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Thu, 4 Jan 2024 09:50:08 +0100 Subject: [PATCH 031/104] wip git --- bundle/config/root.go | 100 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 100 insertions(+) diff --git a/bundle/config/root.go b/bundle/config/root.go index b2428f16ef..f8f2636ed0 100644 --- a/bundle/config/root.go +++ b/bundle/config/root.go @@ -322,6 +322,18 @@ func (r *Root) MergeTargetOverrides(name string) error { r.RunAs = nil } + if git := target.Get("git"); git != dyn.NilValue { + bundle := r.value.Get("bundle") + if bundle == dyn.NilValue { + bundle = dyn.NewValue(map[string]dyn.Value{}, dyn.Location{}) + } + + bundle.MustMap()["git"] = git + r.value.MustMap()["bundle"] = bundle + r.value.SetByPath() + + } + if err = mergeField("bundle"); err != nil { return err } @@ -355,3 +367,91 @@ func (r *Root) MergeTargetOverrides(name string) error { r.ConfigureConfigFilePath() return nil } + +// // Target may be nil if it's empty. +// if target == nil { +// return nil +// } + +// if target.Bundle != nil { +// err = mergo.Merge(&r.Bundle, target.Bundle, mergo.WithOverride) +// if err != nil { +// return err +// } +// } + +// if target.Workspace != nil { +// err = mergo.Merge(&r.Workspace, target.Workspace, mergo.WithOverride) +// if err != nil { +// return err +// } +// } + +// if target.Artifacts != nil { +// err = mergo.Merge(&r.Artifacts, target.Artifacts, mergo.WithOverride, mergo.WithAppendSlice) +// if err != nil { +// return err +// } +// } + +// if target.Resources != nil { +// err = mergo.Merge(&r.Resources, target.Resources, mergo.WithOverride, mergo.WithAppendSlice) +// if err != nil { +// return err +// } + +// err = r.Resources.Merge() +// if err != nil { +// return err +// } +// } + +// if target.Variables != nil { +// for k, v := range target.Variables { +// variable, ok := r.Variables[k] +// if !ok { +// return fmt.Errorf("variable %s is not defined but is assigned a value", k) +// } +// // we only allow overrides of the default value for a variable +// defaultVal := v +// variable.Default = &defaultVal +// } +// } + +// if target.RunAs != nil { +// r.RunAs = target.RunAs +// } + +// if target.Mode != "" { +// r.Bundle.Mode = target.Mode +// } + +// if target.ComputeID != "" { +// r.Bundle.ComputeID = target.ComputeID +// } + +// git := &r.Bundle.Git +// if target.Git.Branch != "" { +// git.Branch = target.Git.Branch +// git.Inferred = false +// } +// if target.Git.Commit != "" { +// git.Commit = target.Git.Commit +// } +// if target.Git.OriginURL != "" { +// git.OriginURL = target.Git.OriginURL +// } + +// if target.Sync != nil { +// err = mergo.Merge(&r.Sync, target.Sync, mergo.WithAppendSlice) +// if err != nil { +// return err +// } +// } + +// if target.Permissions != nil { +// err = mergo.Merge(&r.Permissions, target.Permissions, mergo.WithAppendSlice) +// if err != nil { +// return err +// } +// } From b50cc8a4f704b4182b7202e19fffb70e497f89e2 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Thu, 4 Jan 2024 17:41:12 +0100 Subject: [PATCH 032/104] More dyn work --- libs/dyn/kind.go | 10 +- libs/dyn/value.go | 75 -------------- libs/dyn/value_transform.go | 23 +++-- libs/dyn/value_underlying.go | 127 +++++++++++++++++++++++ libs/dyn/value_underlying_test.go | 161 ++++++++++++++++++++++++++++++ 5 files changed, 310 insertions(+), 86 deletions(-) create mode 100644 libs/dyn/value_underlying.go create mode 100644 libs/dyn/value_underlying_test.go diff --git a/libs/dyn/kind.go b/libs/dyn/kind.go index ba093341e9..203e6df498 100644 --- a/libs/dyn/kind.go +++ b/libs/dyn/kind.go @@ -9,12 +9,12 @@ const ( KindInvalid Kind = iota KindMap KindSequence - KindNil KindString KindBool KindInt KindFloat KindTime + KindNil ) func kindOf(v any) Kind { @@ -23,8 +23,6 @@ func kindOf(v any) Kind { return KindMap case []Value: return KindSequence - case nil: - return KindNil case string: return KindString case bool: @@ -35,6 +33,8 @@ func kindOf(v any) Kind { return KindFloat case time.Time: return KindTime + case nil: + return KindNil default: panic("not handled") } @@ -46,8 +46,6 @@ func (k Kind) String() string { return "map" case KindSequence: return "sequence" - case KindNil: - return "nil" case KindString: return "string" case KindBool: @@ -58,6 +56,8 @@ func (k Kind) String() string { return "float" case KindTime: return "time" + case KindNil: + return "nil" default: return "invalid" } diff --git a/libs/dyn/value.go b/libs/dyn/value.go index 8df1d478f4..7c63f3d241 100644 --- a/libs/dyn/value.go +++ b/libs/dyn/value.go @@ -2,8 +2,6 @@ package dyn import ( "fmt" - "maps" - "time" ) type Value struct { @@ -53,35 +51,6 @@ func (v Value) WithLocation(loc Location) Value { } } -func (v Value) AsMap() (map[string]Value, bool) { - m, ok := v.v.(map[string]Value) - return m, ok -} - -func (v Value) SetKey(key string, value Value) Value { - m, ok := v.AsMap() - if !ok { - m = make(map[string]Value) - } else { - m = maps.Clone(m) - } - - m[key] = value - - return Value{ - v: m, - k: KindMap, - l: v.l, - } -} - -func (v Value) AsSequence() ([]Value, bool) { - if v.k != KindSequence { - return nil, false - } - return v.v.([]Value), true -} - func (v Value) Kind() Kind { return v.k } @@ -170,47 +139,3 @@ func (v Value) MarkAnchor() Value { func (v Value) IsAnchor() bool { return v.anchor } - -func (v Value) MustMap() map[string]Value { - return v.v.(map[string]Value) -} - -func (v Value) MustSequence() []Value { - return v.v.([]Value) -} - -func (v Value) MustString() string { - return v.v.(string) -} - -func (v Value) MustBool() bool { - return v.v.(bool) -} - -func (v Value) MustInt() int64 { - switch vv := v.v.(type) { - case int: - return int64(vv) - case int32: - return int64(vv) - case int64: - return int64(vv) - default: - panic("not an int") - } -} - -func (v Value) MustFloat() float64 { - switch vv := v.v.(type) { - case float32: - return float64(vv) - case float64: - return float64(vv) - default: - panic("not a float") - } -} - -func (v Value) MustTime() time.Time { - return v.v.(time.Time) -} diff --git a/libs/dyn/value_transform.go b/libs/dyn/value_transform.go index 8e8d8f4820..5570b973ad 100644 --- a/libs/dyn/value_transform.go +++ b/libs/dyn/value_transform.go @@ -20,9 +20,9 @@ func IsNoSuchKeyError(err error) bool { return errors.As(err, &target) } -func (v Value) TransformByPath(p Path, value Value) (Value, error) { - return v.set(EmptyPath, p, value) -} +// func (v Value) TransformByPath(p Path, value Value) (Value, error) { +// return v.transform(EmptyPath, p, value) +// } func (v Value) Transform(path string, fn func(Value) (Value, error)) (Value, error) { p, err := NewPathFromString(path) @@ -51,17 +51,22 @@ func (v Value) transform(prefix, suffix Path, fn func(Value) (Value, error)) (Va // Lookup current value in the map. m := v.MustMap() - nv, ok := m[component.key] + ev, ok := m[component.key] if !ok { return InvalidValue, noSuchKeyError{prefix} } // Recursively transform the value. - nv, err := nv.transform(prefix, suffix, fn) + nv, err := ev.transform(prefix, suffix, fn) if err != nil { return InvalidValue, err } + // Return the original value if the value hasn't changed. + if nv == ev { + return v, nil + } + // Return an updated map value. m = maps.Clone(m) m[component.key] = nv @@ -84,11 +89,17 @@ func (v Value) transform(prefix, suffix Path, fn func(Value) (Value, error)) (Va } // Recursively transform the value. - nv, err := s[component.index].transform(prefix, suffix, fn) + ev := s[component.index] + nv, err := ev.transform(prefix, suffix, fn) if err != nil { return InvalidValue, err } + // Return the original value if the value hasn't changed. + if nv == ev { + return v, nil + } + // Return an updated sequence value. s = slices.Clone(s) s[component.index] = nv diff --git a/libs/dyn/value_underlying.go b/libs/dyn/value_underlying.go new file mode 100644 index 0000000000..e885f558b4 --- /dev/null +++ b/libs/dyn/value_underlying.go @@ -0,0 +1,127 @@ +package dyn + +import ( + "fmt" + "time" +) + +// panicOnTypeMismatch is a helper function for the MustZZZ functions in this file. +// We rather panic with a descriptive error message than a generic one. +func panicOnTypeMismatch[T any](v Value, vv T, ok bool, k Kind) T { + if !ok || v.k != k { + panic(fmt.Sprintf("expected kind %s, got %s", k, v.k)) + } + return vv +} + +// AsMap returns the underlying map if this value is a map, +// the zero value and false otherwise. +func (v Value) AsMap() (map[string]Value, bool) { + vv, ok := v.v.(map[string]Value) + return vv, ok +} + +// MustMap returns the underlying map if this value is a map, +// panics otherwise. +func (v Value) MustMap() map[string]Value { + vv, ok := v.AsMap() + return panicOnTypeMismatch(v, vv, ok, KindMap) +} + +// AsSequence returns the underlying sequence if this value is a sequence, +// the zero value and false otherwise. +func (v Value) AsSequence() ([]Value, bool) { + vv, ok := v.v.([]Value) + return vv, ok +} + +// MustSequence returns the underlying sequence if this value is a sequence, +// panics otherwise. +func (v Value) MustSequence() []Value { + vv, ok := v.AsSequence() + return panicOnTypeMismatch(v, vv, ok, KindSequence) +} + +// AsString returns the underlying string if this value is a string, +// the zero value and false otherwise. +func (v Value) AsString() (string, bool) { + vv, ok := v.v.(string) + return vv, ok +} + +// MustString returns the underlying string if this value is a string, +// panics otherwise. +func (v Value) MustString() string { + vv, ok := v.AsString() + return panicOnTypeMismatch(v, vv, ok, KindString) +} + +// AsBool returns the underlying bool if this value is a bool, +// the zero value and false otherwise. +func (v Value) AsBool() (bool, bool) { + vv, ok := v.v.(bool) + return vv, ok +} + +// MustBool returns the underlying bool if this value is a bool, +// panics otherwise. +func (v Value) MustBool() bool { + vv, ok := v.AsBool() + return panicOnTypeMismatch(v, vv, ok, KindBool) +} + +// AsInt returns the underlying int if this value is an int, +// the zero value and false otherwise. +func (v Value) AsInt() (int64, bool) { + switch vv := v.v.(type) { + case int: + return int64(vv), true + case int32: + return int64(vv), true + case int64: + return int64(vv), true + default: + return 0, false + } +} + +// MustInt returns the underlying int if this value is an int, +// panics otherwise. +func (v Value) MustInt() int64 { + vv, ok := v.AsInt() + return panicOnTypeMismatch(v, vv, ok, KindInt) +} + +// AsFloat returns the underlying float if this value is a float, +// the zero value and false otherwise. +func (v Value) AsFloat() (float64, bool) { + switch vv := v.v.(type) { + case float32: + return float64(vv), true + case float64: + return float64(vv), true + default: + return 0, false + } +} + +// MustFloat returns the underlying float if this value is a float, +// panics otherwise. +func (v Value) MustFloat() float64 { + vv, ok := v.AsFloat() + return panicOnTypeMismatch(v, vv, ok, KindFloat) +} + +// AsTime returns the underlying time if this value is a time, +// the zero value and false otherwise. +func (v Value) AsTime() (time.Time, bool) { + vv, ok := v.v.(time.Time) + return vv, ok +} + +// MustTime returns the underlying time if this value is a time, +// panics otherwise. +func (v Value) MustTime() time.Time { + vv, ok := v.AsTime() + return panicOnTypeMismatch(v, vv, ok, KindTime) +} diff --git a/libs/dyn/value_underlying_test.go b/libs/dyn/value_underlying_test.go new file mode 100644 index 0000000000..17cb959418 --- /dev/null +++ b/libs/dyn/value_underlying_test.go @@ -0,0 +1,161 @@ +package dyn_test + +import ( + "testing" + "time" + + "github.com/databricks/cli/libs/dyn" + "github.com/stretchr/testify/assert" +) + +func TestValueUnderlyingMap(t *testing.T) { + v := dyn.V( + map[string]dyn.Value{ + "key": dyn.NewValue("value", dyn.Location{File: "file", Line: 1, Column: 2}), + }, + ) + + vv1, ok := v.AsMap() + assert.True(t, ok) + + _, ok = dyn.NilValue.AsMap() + assert.False(t, ok) + + vv2 := v.MustMap() + assert.Equal(t, vv1, vv2) + + // Test panic. + assert.PanicsWithValue(t, "expected kind map, got nil", func() { + dyn.NilValue.MustMap() + }) +} + +func TestValueUnderlyingSequence(t *testing.T) { + v := dyn.V( + []dyn.Value{ + dyn.NewValue("value", dyn.Location{File: "file", Line: 1, Column: 2}), + }, + ) + + vv1, ok := v.AsSequence() + assert.True(t, ok) + + _, ok = dyn.NilValue.AsSequence() + assert.False(t, ok) + + vv2 := v.MustSequence() + assert.Equal(t, vv1, vv2) + + // Test panic. + assert.PanicsWithValue(t, "expected kind sequence, got nil", func() { + dyn.NilValue.MustSequence() + }) +} + +func TestValueUnderlyingString(t *testing.T) { + v := dyn.V("value") + + vv1, ok := v.AsString() + assert.True(t, ok) + + _, ok = dyn.NilValue.AsString() + assert.False(t, ok) + + vv2 := v.MustString() + assert.Equal(t, vv1, vv2) + + // Test panic. + assert.PanicsWithValue(t, "expected kind string, got nil", func() { + dyn.NilValue.MustString() + }) +} + +func TestValueUnderlyingBool(t *testing.T) { + v := dyn.V(true) + + vv1, ok := v.AsBool() + assert.True(t, ok) + + _, ok = dyn.NilValue.AsBool() + assert.False(t, ok) + + vv2 := v.MustBool() + assert.Equal(t, vv1, vv2) + + // Test panic. + assert.PanicsWithValue(t, "expected kind bool, got nil", func() { + dyn.NilValue.MustBool() + }) +} + +func TestValueUnderlyingInt(t *testing.T) { + v := dyn.V(int(1)) + + vv1, ok := v.AsInt() + assert.True(t, ok) + + _, ok = dyn.NilValue.AsInt() + assert.False(t, ok) + + vv2 := v.MustInt() + assert.Equal(t, vv1, vv2) + + // Test panic. + assert.PanicsWithValue(t, "expected kind int, got nil", func() { + dyn.NilValue.MustInt() + }) + + // Test int32 type specifically. + v = dyn.V(int32(1)) + vv1, ok = v.AsInt() + assert.True(t, ok) + assert.Equal(t, int64(1), vv1) + + // Test int64 type specifically. + v = dyn.V(int64(1)) + vv1, ok = v.AsInt() + assert.True(t, ok) + assert.Equal(t, int64(1), vv1) +} + +func TestValueUnderlyingFloat(t *testing.T) { + v := dyn.V(float32(1.0)) + + vv1, ok := v.AsFloat() + assert.True(t, ok) + + _, ok = dyn.NilValue.AsFloat() + assert.False(t, ok) + + vv2 := v.MustFloat() + assert.Equal(t, vv1, vv2) + + // Test panic. + assert.PanicsWithValue(t, "expected kind float, got nil", func() { + dyn.NilValue.MustFloat() + }) + + // Test float64 type specifically. + v = dyn.V(float64(1.0)) + vv1, ok = v.AsFloat() + assert.True(t, ok) + assert.Equal(t, float64(1.0), vv1) +} + +func TestValueUnderlyingTime(t *testing.T) { + v := dyn.V(time.Now()) + + vv1, ok := v.AsTime() + assert.True(t, ok) + + _, ok = dyn.NilValue.AsTime() + assert.False(t, ok) + + vv2 := v.MustTime() + assert.Equal(t, vv1, vv2) + + // Test panic. + assert.PanicsWithValue(t, "expected kind time, got nil", func() { + dyn.NilValue.MustTime() + }) +} From 2f36c7e26c4b30247791546f83ccb36b1bf8b342 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Thu, 4 Jan 2024 19:48:05 +0100 Subject: [PATCH 033/104] remove nop --- bundle/config/root.go | 2 -- 1 file changed, 2 deletions(-) diff --git a/bundle/config/root.go b/bundle/config/root.go index f8f2636ed0..118ae003c2 100644 --- a/bundle/config/root.go +++ b/bundle/config/root.go @@ -330,8 +330,6 @@ func (r *Root) MergeTargetOverrides(name string) error { bundle.MustMap()["git"] = git r.value.MustMap()["bundle"] = bundle - r.value.SetByPath() - } if err = mergeField("bundle"); err != nil { From aa19b48df06630d47af40812d4908030a7386740 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Thu, 4 Jan 2024 19:52:22 +0100 Subject: [PATCH 034/104] Consolidate functions to convert `dyn.Value` to native types --- libs/dyn/value.go | 50 ---------- libs/dyn/value_underlying.go | 127 +++++++++++++++++++++++ libs/dyn/value_underlying_test.go | 161 ++++++++++++++++++++++++++++++ 3 files changed, 288 insertions(+), 50 deletions(-) create mode 100644 libs/dyn/value_underlying.go create mode 100644 libs/dyn/value_underlying_test.go diff --git a/libs/dyn/value.go b/libs/dyn/value.go index 9ac738f9cb..e33e10cffa 100644 --- a/libs/dyn/value.go +++ b/libs/dyn/value.go @@ -2,7 +2,6 @@ package dyn import ( "fmt" - "time" ) type Value struct { @@ -38,11 +37,6 @@ func NewValue(v any, loc Location) Value { } } -func (v Value) AsMap() (map[string]Value, bool) { - m, ok := v.v.(map[string]Value) - return m, ok -} - func (v Value) Kind() Kind { return v.k } @@ -131,47 +125,3 @@ func (v Value) MarkAnchor() Value { func (v Value) IsAnchor() bool { return v.anchor } - -func (v Value) MustMap() map[string]Value { - return v.v.(map[string]Value) -} - -func (v Value) MustSequence() []Value { - return v.v.([]Value) -} - -func (v Value) MustString() string { - return v.v.(string) -} - -func (v Value) MustBool() bool { - return v.v.(bool) -} - -func (v Value) MustInt() int64 { - switch vv := v.v.(type) { - case int: - return int64(vv) - case int32: - return int64(vv) - case int64: - return int64(vv) - default: - panic("not an int") - } -} - -func (v Value) MustFloat() float64 { - switch vv := v.v.(type) { - case float32: - return float64(vv) - case float64: - return float64(vv) - default: - panic("not a float") - } -} - -func (v Value) MustTime() time.Time { - return v.v.(time.Time) -} diff --git a/libs/dyn/value_underlying.go b/libs/dyn/value_underlying.go new file mode 100644 index 0000000000..e885f558b4 --- /dev/null +++ b/libs/dyn/value_underlying.go @@ -0,0 +1,127 @@ +package dyn + +import ( + "fmt" + "time" +) + +// panicOnTypeMismatch is a helper function for the MustZZZ functions in this file. +// We rather panic with a descriptive error message than a generic one. +func panicOnTypeMismatch[T any](v Value, vv T, ok bool, k Kind) T { + if !ok || v.k != k { + panic(fmt.Sprintf("expected kind %s, got %s", k, v.k)) + } + return vv +} + +// AsMap returns the underlying map if this value is a map, +// the zero value and false otherwise. +func (v Value) AsMap() (map[string]Value, bool) { + vv, ok := v.v.(map[string]Value) + return vv, ok +} + +// MustMap returns the underlying map if this value is a map, +// panics otherwise. +func (v Value) MustMap() map[string]Value { + vv, ok := v.AsMap() + return panicOnTypeMismatch(v, vv, ok, KindMap) +} + +// AsSequence returns the underlying sequence if this value is a sequence, +// the zero value and false otherwise. +func (v Value) AsSequence() ([]Value, bool) { + vv, ok := v.v.([]Value) + return vv, ok +} + +// MustSequence returns the underlying sequence if this value is a sequence, +// panics otherwise. +func (v Value) MustSequence() []Value { + vv, ok := v.AsSequence() + return panicOnTypeMismatch(v, vv, ok, KindSequence) +} + +// AsString returns the underlying string if this value is a string, +// the zero value and false otherwise. +func (v Value) AsString() (string, bool) { + vv, ok := v.v.(string) + return vv, ok +} + +// MustString returns the underlying string if this value is a string, +// panics otherwise. +func (v Value) MustString() string { + vv, ok := v.AsString() + return panicOnTypeMismatch(v, vv, ok, KindString) +} + +// AsBool returns the underlying bool if this value is a bool, +// the zero value and false otherwise. +func (v Value) AsBool() (bool, bool) { + vv, ok := v.v.(bool) + return vv, ok +} + +// MustBool returns the underlying bool if this value is a bool, +// panics otherwise. +func (v Value) MustBool() bool { + vv, ok := v.AsBool() + return panicOnTypeMismatch(v, vv, ok, KindBool) +} + +// AsInt returns the underlying int if this value is an int, +// the zero value and false otherwise. +func (v Value) AsInt() (int64, bool) { + switch vv := v.v.(type) { + case int: + return int64(vv), true + case int32: + return int64(vv), true + case int64: + return int64(vv), true + default: + return 0, false + } +} + +// MustInt returns the underlying int if this value is an int, +// panics otherwise. +func (v Value) MustInt() int64 { + vv, ok := v.AsInt() + return panicOnTypeMismatch(v, vv, ok, KindInt) +} + +// AsFloat returns the underlying float if this value is a float, +// the zero value and false otherwise. +func (v Value) AsFloat() (float64, bool) { + switch vv := v.v.(type) { + case float32: + return float64(vv), true + case float64: + return float64(vv), true + default: + return 0, false + } +} + +// MustFloat returns the underlying float if this value is a float, +// panics otherwise. +func (v Value) MustFloat() float64 { + vv, ok := v.AsFloat() + return panicOnTypeMismatch(v, vv, ok, KindFloat) +} + +// AsTime returns the underlying time if this value is a time, +// the zero value and false otherwise. +func (v Value) AsTime() (time.Time, bool) { + vv, ok := v.v.(time.Time) + return vv, ok +} + +// MustTime returns the underlying time if this value is a time, +// panics otherwise. +func (v Value) MustTime() time.Time { + vv, ok := v.AsTime() + return panicOnTypeMismatch(v, vv, ok, KindTime) +} diff --git a/libs/dyn/value_underlying_test.go b/libs/dyn/value_underlying_test.go new file mode 100644 index 0000000000..17cb959418 --- /dev/null +++ b/libs/dyn/value_underlying_test.go @@ -0,0 +1,161 @@ +package dyn_test + +import ( + "testing" + "time" + + "github.com/databricks/cli/libs/dyn" + "github.com/stretchr/testify/assert" +) + +func TestValueUnderlyingMap(t *testing.T) { + v := dyn.V( + map[string]dyn.Value{ + "key": dyn.NewValue("value", dyn.Location{File: "file", Line: 1, Column: 2}), + }, + ) + + vv1, ok := v.AsMap() + assert.True(t, ok) + + _, ok = dyn.NilValue.AsMap() + assert.False(t, ok) + + vv2 := v.MustMap() + assert.Equal(t, vv1, vv2) + + // Test panic. + assert.PanicsWithValue(t, "expected kind map, got nil", func() { + dyn.NilValue.MustMap() + }) +} + +func TestValueUnderlyingSequence(t *testing.T) { + v := dyn.V( + []dyn.Value{ + dyn.NewValue("value", dyn.Location{File: "file", Line: 1, Column: 2}), + }, + ) + + vv1, ok := v.AsSequence() + assert.True(t, ok) + + _, ok = dyn.NilValue.AsSequence() + assert.False(t, ok) + + vv2 := v.MustSequence() + assert.Equal(t, vv1, vv2) + + // Test panic. + assert.PanicsWithValue(t, "expected kind sequence, got nil", func() { + dyn.NilValue.MustSequence() + }) +} + +func TestValueUnderlyingString(t *testing.T) { + v := dyn.V("value") + + vv1, ok := v.AsString() + assert.True(t, ok) + + _, ok = dyn.NilValue.AsString() + assert.False(t, ok) + + vv2 := v.MustString() + assert.Equal(t, vv1, vv2) + + // Test panic. + assert.PanicsWithValue(t, "expected kind string, got nil", func() { + dyn.NilValue.MustString() + }) +} + +func TestValueUnderlyingBool(t *testing.T) { + v := dyn.V(true) + + vv1, ok := v.AsBool() + assert.True(t, ok) + + _, ok = dyn.NilValue.AsBool() + assert.False(t, ok) + + vv2 := v.MustBool() + assert.Equal(t, vv1, vv2) + + // Test panic. + assert.PanicsWithValue(t, "expected kind bool, got nil", func() { + dyn.NilValue.MustBool() + }) +} + +func TestValueUnderlyingInt(t *testing.T) { + v := dyn.V(int(1)) + + vv1, ok := v.AsInt() + assert.True(t, ok) + + _, ok = dyn.NilValue.AsInt() + assert.False(t, ok) + + vv2 := v.MustInt() + assert.Equal(t, vv1, vv2) + + // Test panic. + assert.PanicsWithValue(t, "expected kind int, got nil", func() { + dyn.NilValue.MustInt() + }) + + // Test int32 type specifically. + v = dyn.V(int32(1)) + vv1, ok = v.AsInt() + assert.True(t, ok) + assert.Equal(t, int64(1), vv1) + + // Test int64 type specifically. + v = dyn.V(int64(1)) + vv1, ok = v.AsInt() + assert.True(t, ok) + assert.Equal(t, int64(1), vv1) +} + +func TestValueUnderlyingFloat(t *testing.T) { + v := dyn.V(float32(1.0)) + + vv1, ok := v.AsFloat() + assert.True(t, ok) + + _, ok = dyn.NilValue.AsFloat() + assert.False(t, ok) + + vv2 := v.MustFloat() + assert.Equal(t, vv1, vv2) + + // Test panic. + assert.PanicsWithValue(t, "expected kind float, got nil", func() { + dyn.NilValue.MustFloat() + }) + + // Test float64 type specifically. + v = dyn.V(float64(1.0)) + vv1, ok = v.AsFloat() + assert.True(t, ok) + assert.Equal(t, float64(1.0), vv1) +} + +func TestValueUnderlyingTime(t *testing.T) { + v := dyn.V(time.Now()) + + vv1, ok := v.AsTime() + assert.True(t, ok) + + _, ok = dyn.NilValue.AsTime() + assert.False(t, ok) + + vv2 := v.MustTime() + assert.Equal(t, vv1, vv2) + + // Test panic. + assert.PanicsWithValue(t, "expected kind time, got nil", func() { + dyn.NilValue.MustTime() + }) +} From 021386139c8fd5aac2abe14e849b3fa346f3f2d8 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Thu, 4 Jan 2024 19:59:42 +0100 Subject: [PATCH 035/104] Inline --- libs/dyn/value_underlying.go | 44 +++++++++++++++++++++++------------- 1 file changed, 28 insertions(+), 16 deletions(-) diff --git a/libs/dyn/value_underlying.go b/libs/dyn/value_underlying.go index e885f558b4..c8c5037900 100644 --- a/libs/dyn/value_underlying.go +++ b/libs/dyn/value_underlying.go @@ -5,15 +5,6 @@ import ( "time" ) -// panicOnTypeMismatch is a helper function for the MustZZZ functions in this file. -// We rather panic with a descriptive error message than a generic one. -func panicOnTypeMismatch[T any](v Value, vv T, ok bool, k Kind) T { - if !ok || v.k != k { - panic(fmt.Sprintf("expected kind %s, got %s", k, v.k)) - } - return vv -} - // AsMap returns the underlying map if this value is a map, // the zero value and false otherwise. func (v Value) AsMap() (map[string]Value, bool) { @@ -25,7 +16,10 @@ func (v Value) AsMap() (map[string]Value, bool) { // panics otherwise. func (v Value) MustMap() map[string]Value { vv, ok := v.AsMap() - return panicOnTypeMismatch(v, vv, ok, KindMap) + if !ok || v.k != KindMap { + panic(fmt.Sprintf("expected kind %s, got %s", KindMap, v.k)) + } + return vv } // AsSequence returns the underlying sequence if this value is a sequence, @@ -39,7 +33,10 @@ func (v Value) AsSequence() ([]Value, bool) { // panics otherwise. func (v Value) MustSequence() []Value { vv, ok := v.AsSequence() - return panicOnTypeMismatch(v, vv, ok, KindSequence) + if !ok || v.k != KindSequence { + panic(fmt.Sprintf("expected kind %s, got %s", KindSequence, v.k)) + } + return vv } // AsString returns the underlying string if this value is a string, @@ -53,7 +50,10 @@ func (v Value) AsString() (string, bool) { // panics otherwise. func (v Value) MustString() string { vv, ok := v.AsString() - return panicOnTypeMismatch(v, vv, ok, KindString) + if !ok || v.k != KindString { + panic(fmt.Sprintf("expected kind %s, got %s", KindString, v.k)) + } + return vv } // AsBool returns the underlying bool if this value is a bool, @@ -67,7 +67,10 @@ func (v Value) AsBool() (bool, bool) { // panics otherwise. func (v Value) MustBool() bool { vv, ok := v.AsBool() - return panicOnTypeMismatch(v, vv, ok, KindBool) + if !ok || v.k != KindBool { + panic(fmt.Sprintf("expected kind %s, got %s", KindBool, v.k)) + } + return vv } // AsInt returns the underlying int if this value is an int, @@ -89,7 +92,10 @@ func (v Value) AsInt() (int64, bool) { // panics otherwise. func (v Value) MustInt() int64 { vv, ok := v.AsInt() - return panicOnTypeMismatch(v, vv, ok, KindInt) + if !ok || v.k != KindInt { + panic(fmt.Sprintf("expected kind %s, got %s", KindInt, v.k)) + } + return vv } // AsFloat returns the underlying float if this value is a float, @@ -109,7 +115,10 @@ func (v Value) AsFloat() (float64, bool) { // panics otherwise. func (v Value) MustFloat() float64 { vv, ok := v.AsFloat() - return panicOnTypeMismatch(v, vv, ok, KindFloat) + if !ok || v.k != KindFloat { + panic(fmt.Sprintf("expected kind %s, got %s", KindFloat, v.k)) + } + return vv } // AsTime returns the underlying time if this value is a time, @@ -123,5 +132,8 @@ func (v Value) AsTime() (time.Time, bool) { // panics otherwise. func (v Value) MustTime() time.Time { vv, ok := v.AsTime() - return panicOnTypeMismatch(v, vv, ok, KindTime) + if !ok || v.k != KindTime { + panic(fmt.Sprintf("expected kind %s, got %s", KindTime, v.k)) + } + return vv } From 8f3cd2e8a0fc8cc65e6b0b58cd361ccbd614346d Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Thu, 4 Jan 2024 20:19:48 +0100 Subject: [PATCH 036/104] Define dyn.InvalidValue --- libs/dyn/kind.go | 19 ++++++++++++------- libs/dyn/kind_test.go | 38 ++++++++++++++++++++++++++++++++++++++ libs/dyn/value.go | 7 ++++++- libs/dyn/value_test.go | 6 ++++++ 4 files changed, 62 insertions(+), 8 deletions(-) create mode 100644 libs/dyn/kind_test.go diff --git a/libs/dyn/kind.go b/libs/dyn/kind.go index ba093341e9..8f51c25c66 100644 --- a/libs/dyn/kind.go +++ b/libs/dyn/kind.go @@ -1,6 +1,9 @@ package dyn -import "time" +import ( + "fmt" + "time" +) type Kind int @@ -9,12 +12,12 @@ const ( KindInvalid Kind = iota KindMap KindSequence - KindNil KindString KindBool KindInt KindFloat KindTime + KindNil ) func kindOf(v any) Kind { @@ -23,8 +26,6 @@ func kindOf(v any) Kind { return KindMap case []Value: return KindSequence - case nil: - return KindNil case string: return KindString case bool: @@ -35,6 +36,8 @@ func kindOf(v any) Kind { return KindFloat case time.Time: return KindTime + case nil: + return KindNil default: panic("not handled") } @@ -42,12 +45,12 @@ func kindOf(v any) Kind { func (k Kind) String() string { switch k { + case KindInvalid: + return "invalid" case KindMap: return "map" case KindSequence: return "sequence" - case KindNil: - return "nil" case KindString: return "string" case KindBool: @@ -58,7 +61,9 @@ func (k Kind) String() string { return "float" case KindTime: return "time" + case KindNil: + return "nil" default: - return "invalid" + panic(fmt.Sprintf("invalid kind value: %d", k)) } } diff --git a/libs/dyn/kind_test.go b/libs/dyn/kind_test.go new file mode 100644 index 0000000000..84c90713fb --- /dev/null +++ b/libs/dyn/kind_test.go @@ -0,0 +1,38 @@ +package dyn_test + +import ( + "testing" + + "github.com/databricks/cli/libs/dyn" + "github.com/stretchr/testify/assert" +) + +func TestKindZeroValue(t *testing.T) { + // Assert that the zero value of [dyn.Kind] is the invalid kind. + var k dyn.Kind + assert.Equal(t, dyn.KindInvalid, k) +} + +func TestKindToString(t *testing.T) { + for _, tt := range []struct { + k dyn.Kind + s string + }{ + {dyn.KindInvalid, "invalid"}, + {dyn.KindMap, "map"}, + {dyn.KindSequence, "sequence"}, + {dyn.KindString, "string"}, + {dyn.KindBool, "bool"}, + {dyn.KindInt, "int"}, + {dyn.KindFloat, "float"}, + {dyn.KindTime, "time"}, + {dyn.KindNil, "nil"}, + } { + assert.Equal(t, tt.s, tt.k.String()) + } + + // Panic on unknown kind. + assert.PanicsWithValue(t, "invalid kind value: 100", func() { + _ = dyn.Kind(100).String() + }) +} diff --git a/libs/dyn/value.go b/libs/dyn/value.go index 9ac738f9cb..c719632226 100644 --- a/libs/dyn/value.go +++ b/libs/dyn/value.go @@ -16,7 +16,12 @@ type Value struct { anchor bool } -// NilValue is equal to the zero-value of Value. +// InvalidValue is equal to the zero-value of Value. +var InvalidValue = Value{ + k: KindInvalid, +} + +// NilValue is a convenient constant for a nil value. var NilValue = Value{ k: KindNil, } diff --git a/libs/dyn/value_test.go b/libs/dyn/value_test.go index 5fa45f15a5..7c9a9d990e 100644 --- a/libs/dyn/value_test.go +++ b/libs/dyn/value_test.go @@ -7,6 +7,12 @@ import ( "github.com/stretchr/testify/assert" ) +func TestInvalidValue(t *testing.T) { + // Assert that the zero value of [dyn.Value] is the invalid value. + var zero dyn.Value + assert.Equal(t, zero, dyn.InvalidValue) +} + func TestValueIsAnchor(t *testing.T) { var zero dyn.Value assert.False(t, zero.IsAnchor()) From 83fb21c0ec809f25eaf9eedea95b2cfbf4f6767e Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Thu, 4 Jan 2024 23:19:29 +0100 Subject: [PATCH 037/104] dyn wip --- libs/dyn/path.go | 8 + libs/dyn/value_set.go | 56 ------- libs/dyn/value_transform.go | 115 --------------- libs/dyn/value_transform_test.go | 1 - libs/dyn/visit.go | 139 ++++++++++++++++++ libs/dyn/visit_get.go | 25 ++++ libs/dyn/visit_get_test.go | 76 ++++++++++ libs/dyn/visit_map.go | 35 +++++ libs/dyn/visit_map_test.go | 78 ++++++++++ libs/dyn/visit_set.go | 24 +++ libs/dyn/visit_set_test.go | 90 ++++++++++++ libs/dyn/{value_set_test.go => visit_test.go} | 0 12 files changed, 475 insertions(+), 172 deletions(-) delete mode 100644 libs/dyn/value_set.go delete mode 100644 libs/dyn/value_transform.go delete mode 100644 libs/dyn/value_transform_test.go create mode 100644 libs/dyn/visit.go create mode 100644 libs/dyn/visit_get.go create mode 100644 libs/dyn/visit_get_test.go create mode 100644 libs/dyn/visit_map.go create mode 100644 libs/dyn/visit_map_test.go create mode 100644 libs/dyn/visit_set.go create mode 100644 libs/dyn/visit_set_test.go rename libs/dyn/{value_set_test.go => visit_test.go} (100%) diff --git a/libs/dyn/path.go b/libs/dyn/path.go index bfd93dad5b..34285de145 100644 --- a/libs/dyn/path.go +++ b/libs/dyn/path.go @@ -10,6 +10,14 @@ type pathComponent struct { index int } +func (c pathComponent) isKey() bool { + return c.key != "" +} + +func (c pathComponent) isIndex() bool { + return c.key == "" +} + // Path represents a path to a value in a [Value] configuration tree. type Path []pathComponent diff --git a/libs/dyn/value_set.go b/libs/dyn/value_set.go deleted file mode 100644 index 4308bb367f..0000000000 --- a/libs/dyn/value_set.go +++ /dev/null @@ -1,56 +0,0 @@ -package dyn - -import ( - "fmt" - "maps" -) - -func (v Value) SetByPath(p Path, value Value) (Value, error) { - return v.set(EmptyPath, p, value) -} - -func (v Value) Set(path string, value Value) (Value, error) { - p, err := NewPathFromString(path) - if err != nil { - return InvalidValue, err - } - return v.set(EmptyPath, p, value) -} - -func (v Value) set(prefix, suffix Path, nv Value) (Value, error) { - if len(suffix) == 0 { - return nv, nil - } - - component := suffix[0] - prefix = prefix.Append(component) - suffix = suffix[1:] - - // Resolve first component. - switch v.k { - case KindMap: - // Expect a key to be set if this is a map. - if len(component.key) == 0 { - return InvalidValue, fmt.Errorf("expected a key index at %s", prefix) - } - - // Recurse on set to get a new map entry. - m := v.MustMap() - nv, err := m[component.key].set(prefix, suffix, nv) - if err != nil { - return InvalidValue, err - } - - // Return an updated map value. - m = maps.Clone(m) - m[component.key] = nv - return Value{ - v: m, - k: KindMap, - l: v.l, - }, nil - - default: - return InvalidValue, fmt.Errorf("expected a map under %s", prefix) - } -} diff --git a/libs/dyn/value_transform.go b/libs/dyn/value_transform.go deleted file mode 100644 index 5570b973ad..0000000000 --- a/libs/dyn/value_transform.go +++ /dev/null @@ -1,115 +0,0 @@ -package dyn - -import ( - "errors" - "fmt" - "maps" - "slices" -) - -type noSuchKeyError struct { - p Path -} - -func (e noSuchKeyError) Error() string { - return fmt.Sprintf("no such key: %s", e.p) -} - -func IsNoSuchKeyError(err error) bool { - var target noSuchKeyError - return errors.As(err, &target) -} - -// func (v Value) TransformByPath(p Path, value Value) (Value, error) { -// return v.transform(EmptyPath, p, value) -// } - -func (v Value) Transform(path string, fn func(Value) (Value, error)) (Value, error) { - p, err := NewPathFromString(path) - if err != nil { - return InvalidValue, err - } - return v.transform(EmptyPath, p, fn) -} - -func (v Value) transform(prefix, suffix Path, fn func(Value) (Value, error)) (Value, error) { - if len(suffix) == 0 { - return fn(v) - } - - component := suffix[0] - prefix = prefix.Append(component) - suffix = suffix[1:] - - // Resolve first component. - switch v.k { - case KindMap: - // Expect a key to be set if this is a map. - if len(component.key) == 0 { - return InvalidValue, fmt.Errorf("expected a key index at %s", prefix) - } - - // Lookup current value in the map. - m := v.MustMap() - ev, ok := m[component.key] - if !ok { - return InvalidValue, noSuchKeyError{prefix} - } - - // Recursively transform the value. - nv, err := ev.transform(prefix, suffix, fn) - if err != nil { - return InvalidValue, err - } - - // Return the original value if the value hasn't changed. - if nv == ev { - return v, nil - } - - // Return an updated map value. - m = maps.Clone(m) - m[component.key] = nv - return Value{ - v: m, - k: KindMap, - l: v.l, - }, nil - - case KindSequence: - // Expect an index to be set if this is a sequence. - if len(component.key) > 0 { - return InvalidValue, fmt.Errorf("expected an index at %s", prefix) - } - - // Lookup current value in the sequence. - s := v.MustSequence() - if component.index < 0 || component.index >= len(s) { - return InvalidValue, fmt.Errorf("index out of bounds under %s", prefix) - } - - // Recursively transform the value. - ev := s[component.index] - nv, err := ev.transform(prefix, suffix, fn) - if err != nil { - return InvalidValue, err - } - - // Return the original value if the value hasn't changed. - if nv == ev { - return v, nil - } - - // Return an updated sequence value. - s = slices.Clone(s) - s[component.index] = nv - return Value{ - v: s, - k: KindSequence, - l: v.l, - }, nil - - default: - return InvalidValue, fmt.Errorf("expected a map or sequence at %s", prefix) - } -} diff --git a/libs/dyn/value_transform_test.go b/libs/dyn/value_transform_test.go deleted file mode 100644 index 8f715b8619..0000000000 --- a/libs/dyn/value_transform_test.go +++ /dev/null @@ -1 +0,0 @@ -package dyn_test diff --git a/libs/dyn/visit.go b/libs/dyn/visit.go new file mode 100644 index 0000000000..f74ef48938 --- /dev/null +++ b/libs/dyn/visit.go @@ -0,0 +1,139 @@ +package dyn + +import ( + "errors" + "fmt" + "maps" + "slices" +) + +type noSuchKeyError struct { + p Path +} + +func (e noSuchKeyError) Error() string { + return fmt.Sprintf("key not found at %q", e.p) +} + +func IsNoSuchKeyError(err error) bool { + var target noSuchKeyError + return errors.As(err, &target) +} + +type indexOutOfBoundsError struct { + p Path +} + +func (e indexOutOfBoundsError) Error() string { + return fmt.Sprintf("index out of bounds at %q", e.p) +} + +func IsIndexOutOfBoundsError(err error) bool { + var target indexOutOfBoundsError + return errors.As(err, &target) +} + +type visitOptions struct { + // The function to apply to the value once found. + // + // If this function returns the same value as it receives as argument, + // the original visit function call returns the original value unmodified. + // + // If this function returns a new value, the original visit function call + // returns a value with all the intermediate values updated. + // + // If this function returns an error, the original visit function call + // returns this error and the value is left unmodified. + fn func(Value) (Value, error) + + // If set, tolerate the absence of the last component in the path. + // This option is needed to set a key in a map that is not yet present. + allowMissingKeyInMap bool +} + +func visit(v Value, prefix, suffix Path, opts visitOptions) (Value, error) { + if len(suffix) == 0 { + return opts.fn(v) + } + + // Initialize prefix if it is empty. + // It is pre-allocated to its maximum size to avoid additional allocations. + if len(prefix) == 0 { + prefix = make(Path, 0, len(suffix)) + } + + component := suffix[0] + prefix = prefix.Append(component) + suffix = suffix[1:] + + switch { + case component.isKey(): + // Expect a map to be set if this is a key. + m, ok := v.AsMap() + if !ok { + return InvalidValue, fmt.Errorf("expected a map to index %q, found %s", prefix, v.Kind()) + } + + // Lookup current value in the map. + ev, ok := m[component.key] + if !ok && !opts.allowMissingKeyInMap { + return InvalidValue, noSuchKeyError{prefix} + } + + // Recursively transform the value. + nv, err := visit(ev, prefix, suffix, opts) + if err != nil { + return InvalidValue, err + } + + // Return the original value if the value hasn't changed. + if nv == ev { + return v, nil + } + + // Return an updated map value. + m = maps.Clone(m) + m[component.key] = nv + return Value{ + v: m, + k: KindMap, + l: v.l, + }, nil + + case component.isIndex(): + // Expect a sequence to be set if this is an index. + s, ok := v.AsSequence() + if !ok { + return InvalidValue, fmt.Errorf("expected a sequence to index %q, found %s", prefix, v.Kind()) + } + + // Lookup current value in the sequence. + if component.index < 0 || component.index >= len(s) { + return InvalidValue, indexOutOfBoundsError{prefix} + } + + // Recursively transform the value. + ev := s[component.index] + nv, err := visit(ev, prefix, suffix, opts) + if err != nil { + return InvalidValue, err + } + + // Return the original value if the value hasn't changed. + if nv == ev { + return v, nil + } + + // Return an updated sequence value. + s = slices.Clone(s) + s[component.index] = nv + return Value{ + v: s, + k: KindSequence, + l: v.l, + }, nil + + default: + panic("invalid component") + } +} diff --git a/libs/dyn/visit_get.go b/libs/dyn/visit_get.go new file mode 100644 index 0000000000..a0f848cddb --- /dev/null +++ b/libs/dyn/visit_get.go @@ -0,0 +1,25 @@ +package dyn + +// Get returns the value inside the specified value at the specified path. +// It is identical to [GetByPath], except that it takes a string path instead of a [Path]. +func Get(v Value, path string) (Value, error) { + p, err := NewPathFromString(path) + if err != nil { + return InvalidValue, err + } + return GetByPath(v, p) +} + +// GetByPath returns the value inside the specified value at the specified path. +// If the path doesn't exist, it returns InvalidValue and an error. +func GetByPath(v Value, p Path) (Value, error) { + out := InvalidValue + _, err := visit(v, EmptyPath, p, visitOptions{ + fn: func(ev Value) (Value, error) { + // Capture the value argument to return it. + out = ev + return ev, nil + }, + }) + return out, err +} diff --git a/libs/dyn/visit_get_test.go b/libs/dyn/visit_get_test.go new file mode 100644 index 0000000000..22dce0858b --- /dev/null +++ b/libs/dyn/visit_get_test.go @@ -0,0 +1,76 @@ +package dyn_test + +import ( + "testing" + + "github.com/databricks/cli/libs/dyn" + "github.com/stretchr/testify/assert" +) + +func TestGetWithEmptyPath(t *testing.T) { + // An empty path means to return the value itself. + vin := dyn.V(42) + vout, err := dyn.GetByPath(vin, dyn.NewPath()) + assert.NoError(t, err) + assert.Equal(t, vin, vout) +} + +func TestGetOnNilValue(t *testing.T) { + var err error + _, err = dyn.GetByPath(dyn.NilValue, dyn.NewPath(dyn.Key("foo"))) + assert.ErrorContains(t, err, `expected a map to index "foo", found nil`) + _, err = dyn.GetByPath(dyn.NilValue, dyn.NewPath(dyn.Index(42))) + assert.ErrorContains(t, err, `expected a sequence to index "[42]", found nil`) +} + +func TestGetOnMap(t *testing.T) { + vin := dyn.V(map[string]dyn.Value{ + "foo": dyn.V(42), + "bar": dyn.V(43), + }) + + var err error + + _, err = dyn.GetByPath(vin, dyn.NewPath(dyn.Index(42))) + assert.ErrorContains(t, err, `expected a sequence to index "[42]", found map`) + + _, err = dyn.GetByPath(vin, dyn.NewPath(dyn.Key("baz"))) + assert.True(t, dyn.IsNoSuchKeyError(err)) + assert.ErrorContains(t, err, `key not found at "baz"`) + + vfoo, err := dyn.GetByPath(vin, dyn.NewPath(dyn.Key("foo"))) + assert.NoError(t, err) + assert.Equal(t, dyn.V(42), vfoo) + + vbar, err := dyn.GetByPath(vin, dyn.NewPath(dyn.Key("bar"))) + assert.NoError(t, err) + assert.Equal(t, dyn.V(43), vbar) +} + +func TestGetOnSequence(t *testing.T) { + vin := dyn.V([]dyn.Value{ + dyn.V(42), + dyn.V(43), + }) + + var err error + + _, err = dyn.GetByPath(vin, dyn.NewPath(dyn.Key("foo"))) + assert.ErrorContains(t, err, `expected a map to index "foo", found sequence`) + + _, err = dyn.GetByPath(vin, dyn.NewPath(dyn.Index(-1))) + assert.True(t, dyn.IsIndexOutOfBoundsError(err)) + assert.ErrorContains(t, err, `index out of bounds at "[-1]"`) + + _, err = dyn.GetByPath(vin, dyn.NewPath(dyn.Index(2))) + assert.True(t, dyn.IsIndexOutOfBoundsError(err)) + assert.ErrorContains(t, err, `index out of bounds at "[2]"`) + + v0, err := dyn.GetByPath(vin, dyn.NewPath(dyn.Index(0))) + assert.NoError(t, err) + assert.Equal(t, dyn.V(42), v0) + + v1, err := dyn.GetByPath(vin, dyn.NewPath(dyn.Index(1))) + assert.NoError(t, err) + assert.Equal(t, dyn.V(43), v1) +} diff --git a/libs/dyn/visit_map.go b/libs/dyn/visit_map.go new file mode 100644 index 0000000000..624383a402 --- /dev/null +++ b/libs/dyn/visit_map.go @@ -0,0 +1,35 @@ +package dyn + +// MapFunc is a function that maps a value to another value. +type MapFunc func(Value) (Value, error) + +// Map applies the given function to the value at the specified path in the specified value. +// It is identical to [MapByPath], except that it takes a string path instead of a [Path]. +func Map(v Value, path string, fn MapFunc) (Value, error) { + p, err := NewPathFromString(path) + if err != nil { + return InvalidValue, err + } + return MapByPath(v, p, fn) +} + +// Map applies the given function to the value at the specified path in the specified value. +// If successful, it returns the new value with all intermediate values copied and updated. +// +// If the path contains a key that doesn't exist, or an index that is out of bounds, +// it returns the original value and no error. This is because setting a value at a path +// that doesn't exist is a no-op. +// +// If the path is invalid for the given value, it returns InvalidValue and an error. +func MapByPath(v Value, p Path, fn MapFunc) (Value, error) { + nv, err := visit(v, EmptyPath, p, visitOptions{ + fn: fn, + }) + + // Check for success. + if err == nil || IsNoSuchKeyError(err) || IsIndexOutOfBoundsError(err) { + return nv, nil + } + + return nv, err +} diff --git a/libs/dyn/visit_map_test.go b/libs/dyn/visit_map_test.go new file mode 100644 index 0000000000..050ac12a38 --- /dev/null +++ b/libs/dyn/visit_map_test.go @@ -0,0 +1,78 @@ +package dyn_test + +import ( + "testing" + + "github.com/databricks/cli/libs/dyn" + "github.com/stretchr/testify/assert" +) + +func TestMapWithEmptyPath(t *testing.T) { + // An empty path means to return the value itself. + vin := dyn.V(42) + vout, err := dyn.MapByPath(dyn.InvalidValue, dyn.EmptyPath, func(v dyn.Value) (dyn.Value, error) { + return vin, nil + }) + assert.NoError(t, err) + assert.Equal(t, vin, vout) +} + +func TestMapOnNilValue(t *testing.T) { + var err error + _, err = dyn.MapByPath(dyn.NilValue, dyn.NewPath(dyn.Key("foo")), nil) + assert.ErrorContains(t, err, `expected a map to index "foo", found nil`) + _, err = dyn.MapByPath(dyn.NilValue, dyn.NewPath(dyn.Index(42)), nil) + assert.ErrorContains(t, err, `expected a sequence to index "[42]", found nil`) +} + +func TestMapFuncOnMap(t *testing.T) { + vin := dyn.V(map[string]dyn.Value{ + "foo": dyn.V(42), + "bar": dyn.V(43), + }) + + var err error + + _, err = dyn.MapByPath(vin, dyn.NewPath(dyn.Index(42)), nil) + assert.ErrorContains(t, err, `expected a sequence to index "[42]", found map`) + + // A key that does not exist is not an error. + vout, err := dyn.MapByPath(vin, dyn.NewPath(dyn.Key("baz")), nil) + assert.NoError(t, err) + assert.Equal(t, vin, vout) + + // Note: in the test cases below we implicitly test that the original + // value is not modified as we repeatedly set values on it. + + var m mock.Mock + + vfoo, err := dyn.MapByPath(vin, dyn.NewPath(dyn.Key("foo")), func(v dyn.Value) (dyn.Value, error) { + return dyn.V(44), nil + }) + + assert.NoError(t, err) + assert.Equal(t, map[string]any{ + "foo": 44, + "bar": 43, + }, vfoo.AsAny()) + + vbar, err := dyn.MapByPath(vin, dyn.NewPath(dyn.Key("bar")), func(v dyn.Value) (dyn.Value, error) { + return dyn.V(45), nil + }) + assert.NoError(t, err) + assert.Equal(t, map[string]any{ + "foo": 42, + "bar": 45, + }, vbar.AsAny()) + + vbaz, err := dyn.MapByPath(vin, dyn.NewPath(dyn.Key("baz")), func(v dyn.Value) (dyn.Value, error) { + return dyn.V(46), nil + }) + assert.NoError(t, err) + assert.Equal(t, map[string]any{ + "foo": 42, + "bar": 43, + "baz": 46, + }, vbaz.AsAny()) + +} diff --git a/libs/dyn/visit_set.go b/libs/dyn/visit_set.go new file mode 100644 index 0000000000..fdbf41c2cf --- /dev/null +++ b/libs/dyn/visit_set.go @@ -0,0 +1,24 @@ +package dyn + +// Set assigns a new value at the specified path in the specified value. +// It is identical to [SetByPath], except that it takes a string path instead of a [Path]. +func Set(v Value, path string, nv Value) (Value, error) { + p, err := NewPathFromString(path) + if err != nil { + return InvalidValue, err + } + return SetByPath(v, p, nv) +} + +// SetByPath assigns the given value at the specified path in the specified value. +// If successful, it returns the new value with all intermediate values copied and updated. +// If the path doesn't exist, it returns InvalidValue and an error. +func SetByPath(v Value, p Path, nv Value) (Value, error) { + return visit(v, EmptyPath, p, visitOptions{ + fn: func(_ Value) (Value, error) { + // Return the incoming value to set it. + return nv, nil + }, + allowMissingKeyInMap: true, + }) +} diff --git a/libs/dyn/visit_set_test.go b/libs/dyn/visit_set_test.go new file mode 100644 index 0000000000..b384715875 --- /dev/null +++ b/libs/dyn/visit_set_test.go @@ -0,0 +1,90 @@ +package dyn_test + +import ( + "testing" + + "github.com/databricks/cli/libs/dyn" + "github.com/stretchr/testify/assert" +) + +func TestSetWithEmptyPath(t *testing.T) { + // An empty path means to return the value itself. + vin := dyn.V(42) + vout, err := dyn.SetByPath(dyn.InvalidValue, dyn.EmptyPath, vin) + assert.NoError(t, err) + assert.Equal(t, vin, vout) +} + +func TestSetOnNilValue(t *testing.T) { + var err error + _, err = dyn.SetByPath(dyn.NilValue, dyn.NewPath(dyn.Key("foo")), dyn.V(42)) + assert.ErrorContains(t, err, `expected a map to index "foo", found nil`) + _, err = dyn.SetByPath(dyn.NilValue, dyn.NewPath(dyn.Index(42)), dyn.V(42)) + assert.ErrorContains(t, err, `expected a sequence to index "[42]", found nil`) +} + +func TestSetOnMap(t *testing.T) { + vin := dyn.V(map[string]dyn.Value{ + "foo": dyn.V(42), + "bar": dyn.V(43), + }) + + var err error + + _, err = dyn.SetByPath(vin, dyn.NewPath(dyn.Index(42)), dyn.V(42)) + assert.ErrorContains(t, err, `expected a sequence to index "[42]", found map`) + + // Note: in the test cases below we implicitly test that the original + // value is not modified as we repeatedly set values on it. + + vfoo, err := dyn.SetByPath(vin, dyn.NewPath(dyn.Key("foo")), dyn.V(44)) + assert.NoError(t, err) + assert.Equal(t, map[string]any{ + "foo": 44, + "bar": 43, + }, vfoo.AsAny()) + + vbar, err := dyn.SetByPath(vin, dyn.NewPath(dyn.Key("bar")), dyn.V(45)) + assert.NoError(t, err) + assert.Equal(t, map[string]any{ + "foo": 42, + "bar": 45, + }, vbar.AsAny()) + + vbaz, err := dyn.SetByPath(vin, dyn.NewPath(dyn.Key("baz")), dyn.V(46)) + assert.NoError(t, err) + assert.Equal(t, map[string]any{ + "foo": 42, + "bar": 43, + "baz": 46, + }, vbaz.AsAny()) +} + +func TestSetOnSequence(t *testing.T) { + vin := dyn.V([]dyn.Value{ + dyn.V(42), + dyn.V(43), + }) + + var err error + + _, err = dyn.SetByPath(vin, dyn.NewPath(dyn.Key("foo")), dyn.V(42)) + assert.ErrorContains(t, err, `expected a map to index "foo", found sequence`) + + // It is not allowed to set a value at an index that is out of bounds. + _, err = dyn.SetByPath(vin, dyn.NewPath(dyn.Index(-1)), dyn.V(42)) + assert.True(t, dyn.IsIndexOutOfBoundsError(err)) + _, err = dyn.SetByPath(vin, dyn.NewPath(dyn.Index(2)), dyn.V(42)) + assert.True(t, dyn.IsIndexOutOfBoundsError(err)) + + // Note: in the test cases below we implicitly test that the original + // value is not modified as we repeatedly set values on it. + + v0, err := dyn.SetByPath(vin, dyn.NewPath(dyn.Index(0)), dyn.V(44)) + assert.NoError(t, err) + assert.Equal(t, []any{44, 43}, v0.AsAny()) + + v1, err := dyn.SetByPath(vin, dyn.NewPath(dyn.Index(1)), dyn.V(45)) + assert.NoError(t, err) + assert.Equal(t, []any{42, 45}, v1.AsAny()) +} diff --git a/libs/dyn/value_set_test.go b/libs/dyn/visit_test.go similarity index 100% rename from libs/dyn/value_set_test.go rename to libs/dyn/visit_test.go From d80a8ce1d46d12b02702e2b86a01c8ad096208f2 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Thu, 4 Jan 2024 23:28:30 +0100 Subject: [PATCH 038/104] More dyn work --- libs/dyn/visit_map.go | 7 ++++- libs/dyn/visit_map_test.go | 59 +++++++++++++++++++++++++++++++------- 2 files changed, 54 insertions(+), 12 deletions(-) diff --git a/libs/dyn/visit_map.go b/libs/dyn/visit_map.go index 624383a402..e10f8ca4fc 100644 --- a/libs/dyn/visit_map.go +++ b/libs/dyn/visit_map.go @@ -27,9 +27,14 @@ func MapByPath(v Value, p Path, fn MapFunc) (Value, error) { }) // Check for success. - if err == nil || IsNoSuchKeyError(err) || IsIndexOutOfBoundsError(err) { + if err == nil { return nv, nil } + // Return original value if a key or index is missing. + if IsNoSuchKeyError(err) || IsIndexOutOfBoundsError(err) { + return v, nil + } + return nv, err } diff --git a/libs/dyn/visit_map_test.go b/libs/dyn/visit_map_test.go index 050ac12a38..7e66ba8edb 100644 --- a/libs/dyn/visit_map_test.go +++ b/libs/dyn/visit_map_test.go @@ -1,6 +1,7 @@ package dyn_test import ( + "fmt" "testing" "github.com/databricks/cli/libs/dyn" @@ -43,13 +44,10 @@ func TestMapFuncOnMap(t *testing.T) { // Note: in the test cases below we implicitly test that the original // value is not modified as we repeatedly set values on it. - - var m mock.Mock - vfoo, err := dyn.MapByPath(vin, dyn.NewPath(dyn.Key("foo")), func(v dyn.Value) (dyn.Value, error) { + assert.Equal(t, dyn.V(42), v) return dyn.V(44), nil }) - assert.NoError(t, err) assert.Equal(t, map[string]any{ "foo": 44, @@ -57,6 +55,7 @@ func TestMapFuncOnMap(t *testing.T) { }, vfoo.AsAny()) vbar, err := dyn.MapByPath(vin, dyn.NewPath(dyn.Key("bar")), func(v dyn.Value) (dyn.Value, error) { + assert.Equal(t, dyn.V(43), v) return dyn.V(45), nil }) assert.NoError(t, err) @@ -65,14 +64,52 @@ func TestMapFuncOnMap(t *testing.T) { "bar": 45, }, vbar.AsAny()) - vbaz, err := dyn.MapByPath(vin, dyn.NewPath(dyn.Key("baz")), func(v dyn.Value) (dyn.Value, error) { - return dyn.V(46), nil + // Return error from map function. + var ref = fmt.Errorf("error") + verr, err := dyn.MapByPath(vin, dyn.NewPath(dyn.Key("foo")), func(v dyn.Value) (dyn.Value, error) { + return dyn.InvalidValue, ref }) + assert.Equal(t, dyn.InvalidValue, verr) + assert.ErrorIs(t, err, ref) +} + +func TestMapFuncOnSequence(t *testing.T) { + vin := dyn.V([]dyn.Value{ + dyn.V(42), + dyn.V(43), + }) + + var err error + + _, err = dyn.MapByPath(vin, dyn.NewPath(dyn.Key("foo")), nil) + assert.ErrorContains(t, err, `expected a map to index "foo", found sequence`) + + // An index that does not exist is not an error. + vout, err := dyn.MapByPath(vin, dyn.NewPath(dyn.Index(2)), nil) assert.NoError(t, err) - assert.Equal(t, map[string]any{ - "foo": 42, - "bar": 43, - "baz": 46, - }, vbaz.AsAny()) + assert.Equal(t, vin, vout) + + // Note: in the test cases below we implicitly test that the original + // value is not modified as we repeatedly set values on it. + v0, err := dyn.MapByPath(vin, dyn.NewPath(dyn.Index(0)), func(v dyn.Value) (dyn.Value, error) { + assert.Equal(t, dyn.V(42), v) + return dyn.V(44), nil + }) + assert.NoError(t, err) + assert.Equal(t, []any{44, 43}, v0.AsAny()) + + v1, err := dyn.MapByPath(vin, dyn.NewPath(dyn.Index(1)), func(v dyn.Value) (dyn.Value, error) { + assert.Equal(t, dyn.V(43), v) + return dyn.V(45), nil + }) + assert.NoError(t, err) + assert.Equal(t, []any{42, 45}, v1.AsAny()) + // Return error from map function. + var ref = fmt.Errorf("error") + verr, err := dyn.MapByPath(vin, dyn.NewPath(dyn.Index(0)), func(v dyn.Value) (dyn.Value, error) { + return dyn.InvalidValue, ref + }) + assert.Equal(t, dyn.InvalidValue, verr) + assert.ErrorIs(t, err, ref) } From b825ccd6721dd126a331e7c7a6552066663c6736 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Thu, 4 Jan 2024 23:58:10 +0100 Subject: [PATCH 039/104] More dyn work --- libs/dyn/value.go | 25 +++++++++++++++++++++++++ libs/dyn/visit.go | 4 ++-- libs/dyn/visit_map.go | 37 +++++++++++++++++++++++++++++++++++++ 3 files changed, 64 insertions(+), 2 deletions(-) diff --git a/libs/dyn/value.go b/libs/dyn/value.go index e4d7f8056b..d46c9f3e0c 100644 --- a/libs/dyn/value.go +++ b/libs/dyn/value.go @@ -139,3 +139,28 @@ func (v Value) MarkAnchor() Value { func (v Value) IsAnchor() bool { return v.anchor } + +// eq is an internal only method that compares two values. +// It is used to determine if a value has changed during a visit. +// We need a custom implementation because maps and slices +// cannot be compared with the regular == operator. +func (v Value) eq(w Value) bool { + if v.k != w.k || v.l != w.l { + return false + } + + switch v.k { + case KindMap: + // Compare pointers to the underlying map. + // This is safe because we don't allow maps to be mutated. + return &v.v == &w.v + case KindSequence: + // Compare pointers to the underlying slice and slice length. + // This is safe because we don't allow slices to be mutated. + vs := v.v.([]Value) + ws := w.v.([]Value) + return &vs[0] == &ws[0] && len(vs) == len(ws) + default: + return v.v == w.v + } +} diff --git a/libs/dyn/visit.go b/libs/dyn/visit.go index f74ef48938..077fd51c5a 100644 --- a/libs/dyn/visit.go +++ b/libs/dyn/visit.go @@ -87,7 +87,7 @@ func visit(v Value, prefix, suffix Path, opts visitOptions) (Value, error) { } // Return the original value if the value hasn't changed. - if nv == ev { + if nv.eq(ev) { return v, nil } @@ -120,7 +120,7 @@ func visit(v Value, prefix, suffix Path, opts visitOptions) (Value, error) { } // Return the original value if the value hasn't changed. - if nv == ev { + if nv.eq(ev) { return v, nil } diff --git a/libs/dyn/visit_map.go b/libs/dyn/visit_map.go index e10f8ca4fc..ed89baa4a3 100644 --- a/libs/dyn/visit_map.go +++ b/libs/dyn/visit_map.go @@ -1,8 +1,45 @@ package dyn +import ( + "fmt" + "maps" + "slices" +) + // MapFunc is a function that maps a value to another value. type MapFunc func(Value) (Value, error) +// Foreach returns a [MapFunc] that applies the specified [MapFunc] to each +// value in a map or sequence and returns the new map or sequence. +func Foreach(fn MapFunc) MapFunc { + return func(v Value) (Value, error) { + switch v.Kind() { + case KindMap: + m := maps.Clone(v.MustMap()) + for key, value := range m { + var err error + m[key], err = fn(value) + if err != nil { + return InvalidValue, err + } + } + return NewValue(m, v.Location()), nil + case KindSequence: + s := slices.Clone(v.MustSequence()) + for i, value := range s { + var err error + s[i], err = fn(value) + if err != nil { + return InvalidValue, err + } + } + return NewValue(s, v.Location()), nil + default: + return InvalidValue, fmt.Errorf("expected a map or sequence, found %s", v.Kind()) + } + } +} + // Map applies the given function to the value at the specified path in the specified value. // It is identical to [MapByPath], except that it takes a string path instead of a [Path]. func Map(v Value, path string, fn MapFunc) (Value, error) { From fab40b137d08f485504ab2f2e95a2509f9bd1643 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Fri, 5 Jan 2024 00:01:29 +0100 Subject: [PATCH 040/104] Update mutators to use new dyn functionality --- bundle/config/mutator/environments_compat.go | 2 +- bundle/config/mutator/merge_job_clusters.go | 33 ++--------------- bundle/config/mutator/merge_job_tasks.go | 36 +++---------------- .../config/mutator/merge_pipeline_clusters.go | 35 +++--------------- bundle/config/mutator/rewrite_sync_paths.go | 2 +- 5 files changed, 13 insertions(+), 95 deletions(-) diff --git a/bundle/config/mutator/environments_compat.go b/bundle/config/mutator/environments_compat.go index f6d04d4928..fe45ba47cf 100644 --- a/bundle/config/mutator/environments_compat.go +++ b/bundle/config/mutator/environments_compat.go @@ -41,7 +41,7 @@ func (m *environmentsToTargets) Apply(ctx context.Context, b *bundle.Bundle) err // Rewrite "environments" to "targets". if environments != dyn.NilValue && targets == dyn.NilValue { - return v.SetKey("targets", environments), nil + return dyn.Set(v, "targets", environments) } return v, nil diff --git a/bundle/config/mutator/merge_job_clusters.go b/bundle/config/mutator/merge_job_clusters.go index 21dfc9cbb9..3f482821f1 100644 --- a/bundle/config/mutator/merge_job_clusters.go +++ b/bundle/config/mutator/merge_job_clusters.go @@ -71,41 +71,14 @@ func (m *mergeJobClusters) mergeJobClusters(v dyn.Value) (dyn.Value, error) { return dyn.NewValue(out, v.Location()), nil } -func (m *mergeJobClusters) foreachJob(v dyn.Value) (dyn.Value, error) { - jobs, ok := v.AsMap() - if !ok { - return v, nil - } - - out := make(map[string]dyn.Value) - for key, job := range jobs { - var err error - out[key], err = job.Transform("job_clusters", m.mergeJobClusters) - if err != nil { - return v, err - } - } - - return dyn.NewValue(out, v.Location()), nil -} - func (m *mergeJobClusters) Apply(ctx context.Context, b *bundle.Bundle) error { return b.Config.Mutate(func(v dyn.Value) (dyn.Value, error) { if v == dyn.NilValue { return v, nil } - nv, err := v.Transform("resources.jobs", m.foreachJob) - - // It is not a problem if the pipelines key is not set. - if dyn.IsNoSuchKeyError(err) { - return v, nil - } - - if err != nil { - return v, err - } - - return nv, nil + return dyn.Map(v, "resources.jobs", dyn.Foreach(func(job dyn.Value) (dyn.Value, error) { + return dyn.Map(job, "job_clusters", m.mergeJobClusters) + })) }) } diff --git a/bundle/config/mutator/merge_job_tasks.go b/bundle/config/mutator/merge_job_tasks.go index f094bc65bc..07ca27e8f4 100644 --- a/bundle/config/mutator/merge_job_tasks.go +++ b/bundle/config/mutator/merge_job_tasks.go @@ -62,31 +62,12 @@ func (m *mergeJobTasks) mergeJobTasks(v dyn.Value) (dyn.Value, error) { seen[key] = nv } - // Gather resulting clusters in natural order. + // Gather resulting tasks in natural order. out := make([]dyn.Value, 0, len(keys)) for _, key := range keys { out = append(out, seen[key]) } - return dyn.NewValue(out, v.Location()), nil - -} - -func (m *mergeJobTasks) foreachJob(v dyn.Value) (dyn.Value, error) { - jobs, ok := v.AsMap() - if !ok { - return v, nil - } - - out := make(map[string]dyn.Value) - for key, job := range jobs { - var err error - out[key], err = job.Transform("tasks", m.mergeJobTasks) - if err != nil { - return v, err - } - } - return dyn.NewValue(out, v.Location()), nil } @@ -96,17 +77,8 @@ func (m *mergeJobTasks) Apply(ctx context.Context, b *bundle.Bundle) error { return v, nil } - nv, err := v.Transform("resources.jobs", m.foreachJob) - - // It is not a problem if the pipelines key is not set. - if dyn.IsNoSuchKeyError(err) { - return v, nil - } - - if err != nil { - return v, err - } - - return nv, nil + return dyn.Map(v, "resources.jobs", dyn.Foreach(func(job dyn.Value) (dyn.Value, error) { + return dyn.Map(job, "tasks", m.mergeJobTasks) + })) }) } diff --git a/bundle/config/mutator/merge_pipeline_clusters.go b/bundle/config/mutator/merge_pipeline_clusters.go index 2d384078e2..5dc192f3f2 100644 --- a/bundle/config/mutator/merge_pipeline_clusters.go +++ b/bundle/config/mutator/merge_pipeline_clusters.go @@ -75,7 +75,7 @@ func (m *mergePipelineClusters) mergeClustersForPipeline(v dyn.Value) (dyn.Value out := make([]dyn.Value, 0, len(labels)) for _, label := range labels { // Overwrite the label with the normalized version. - nv, err := seen[label].Set("label", dyn.V(label)) + nv, err := dyn.Set(seen[label], "label", dyn.V(label)) if err != nil { return dyn.InvalidValue, err } @@ -85,41 +85,14 @@ func (m *mergePipelineClusters) mergeClustersForPipeline(v dyn.Value) (dyn.Value return dyn.NewValue(out, v.Location()), nil } -func (m *mergePipelineClusters) foreachPipeline(v dyn.Value) (dyn.Value, error) { - pipelines, ok := v.AsMap() - if !ok { - return v, nil - } - - out := make(map[string]dyn.Value) - for key, pipeline := range pipelines { - var err error - out[key], err = pipeline.Transform("clusters", m.mergeClustersForPipeline) - if err != nil { - return v, err - } - } - - return dyn.NewValue(out, v.Location()), nil -} - func (m *mergePipelineClusters) Apply(ctx context.Context, b *bundle.Bundle) error { return b.Config.Mutate(func(v dyn.Value) (dyn.Value, error) { if v == dyn.NilValue { return v, nil } - nv, err := v.Transform("resources.pipelines", m.foreachPipeline) - - // It is not a problem if the pipelines key is not set. - if dyn.IsNoSuchKeyError(err) { - return v, nil - } - - if err != nil { - return v, err - } - - return nv, nil + return dyn.Map(v, "resources.pipelines", dyn.Foreach(func(pipeline dyn.Value) (dyn.Value, error) { + return dyn.Map(pipeline, "clusters", m.mergeClustersForPipeline) + })) }) } diff --git a/bundle/config/mutator/rewrite_sync_paths.go b/bundle/config/mutator/rewrite_sync_paths.go index f218a0419c..e20df5f3cd 100644 --- a/bundle/config/mutator/rewrite_sync_paths.go +++ b/bundle/config/mutator/rewrite_sync_paths.go @@ -77,7 +77,7 @@ func (m *rewriteSyncPaths) fn(root string) func(c dyn.Value) (dyn.Value, error) } // Then replace the sync object with the new one - return c.SetKey("sync", dyn.NewValue(out, sync.Location())), nil + return dyn.Set(c, "sync", dyn.NewValue(out, sync.Location())) } } From 2ab806507bcf36be1b4040948f3571530dd72fc3 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Wed, 10 Jan 2024 10:29:28 +0100 Subject: [PATCH 041/104] wip --- bundle/config/git.go | 4 +- bundle/config/root.go | 255 +++++++++++++---------------------- bundle/mutator.go | 17 +++ bundle/tests/run_as_test.go | 30 +++-- libs/dyn/convert/to_typed.go | 2 + 5 files changed, 134 insertions(+), 174 deletions(-) diff --git a/bundle/config/git.go b/bundle/config/git.go index 58a5d54d2b..fdb86a9015 100644 --- a/bundle/config/git.go +++ b/bundle/config/git.go @@ -9,8 +9,8 @@ type Git struct { BundleRootPath string `json:"bundle_root_path,omitempty" bundle:"readonly"` // Inferred is set to true if the Git details were inferred and weren't set explicitly - Inferred bool `json:"-" bundle:"readonly"` + Inferred bool `json:"inferred" bundle:"readonly"` // The actual branch according to Git (may be different from the configured branch) - ActualBranch string `json:"-" bundle:"readonly"` + ActualBranch string `json:"actual_branch" bundle:"readonly"` } diff --git a/bundle/config/root.go b/bundle/config/root.go index 118ae003c2..477fd45888 100644 --- a/bundle/config/root.go +++ b/bundle/config/root.go @@ -101,11 +101,13 @@ func Load(path string) (*Root, error) { r.Path = filepath.Dir(path) // r.SetConfigFilePath(path) - // _, err = r.Resources.VerifyUniqueResourceIdentifiers() + r.ConfigureConfigFilePath() + + _, err = r.Resources.VerifyUniqueResourceIdentifiers() return &r, err } -func (r *Root) initializeValue() { +func (r *Root) initializeDynamicValue() { // Many test cases initialize a config as a Go struct literal. // The value will be invalid and we need to populate it from the typed configuration. if r.value.IsValid() { @@ -145,7 +147,7 @@ func (r *Root) toTyped(v dyn.Value) error { } func (r *Root) Mutate(fn func(dyn.Value) (dyn.Value, error)) error { - r.initializeValue() + r.initializeDynamicValue() nv, err := fn(r.value) if err != nil { return err @@ -163,7 +165,7 @@ func (r *Root) Mutate(fn func(dyn.Value) (dyn.Value, error)) error { } func (r *Root) MarkMutatorEntry() { - r.initializeValue() + r.initializeDynamicValue() r.depth++ // If we are entering a mutator at depth 1, we need to convert @@ -255,106 +257,123 @@ func (r *Root) Merge(other *Root) error { } // Merge dynamic configuration values. - nv, err := merge.Merge(r.value, other.value) - if err != nil { - return err - } + return r.Mutate(func(root dyn.Value) (dyn.Value, error) { + return merge.Merge(root, other.value) + }) +} - r.value = nv +func mergeField(rv, ov dyn.Value, name string) (dyn.Value, error) { + path := dyn.NewPath(dyn.Key(name)) + reference, _ := dyn.GetByPath(rv, path) + override, _ := dyn.GetByPath(ov, path) - // Convert normalized configuration tree to typed configuration. - err = r.toTyped(r.value) - if err != nil { - panic(err) + // Merge the override into the reference. + var out dyn.Value + var err error + if reference.IsValid() && override.IsValid() { + out, err = merge.Merge(reference, override) + if err != nil { + return dyn.InvalidValue, err + } + } else if reference.IsValid() { + out = reference + } else if override.IsValid() { + out = override + } else { + return rv, nil } - r.ConfigureConfigFilePath() - - // TODO: define and test semantics for merging. - // return mergo.Merge(r, other, mergo.WithOverride) - return nil + return dyn.SetByPath(rv, path, out) } func (r *Root) MergeTargetOverrides(name string) error { - var tmp dyn.Value + // var tmp dyn.Value + var root = r.value var err error - target := r.value.Get("targets").Get(name) - if target == dyn.NilValue { - return nil + target, err := dyn.GetByPath(root, dyn.NewPath(dyn.Key("targets"), dyn.Key(name))) + if err != nil { + return err } - mergeField := func(name string) error { - tmp, err = merge.Merge(r.value.Get(name), target.Get(name)) - if err != nil { + // Merge fields that can be merged 1:1. + for _, f := range []string{ + "bundle", + "workspace", + "artifacts", + "resources", + "sync", + "permissions", + } { + if root, err = mergeField(root, target, f); err != nil { return err } - - r.value.MustMap()[name] = tmp - return nil } - if mode := target.Get("mode"); mode != dyn.NilValue { - bundle := r.value.Get("bundle") - if bundle == dyn.NilValue { - bundle = dyn.NewValue(map[string]dyn.Value{}, dyn.Location{}) + // Merge variables. + // TODO(@pietern): + + // Merge `run_as`. This field must be overwritten if set, not merged. + if v := target.Get("run_as"); v != dyn.NilValue { + root, err = dyn.Set(root, "run_as", v) + if err != nil { + return err } - bundle.MustMap()["mode"] = mode - r.value.MustMap()["bundle"] = bundle } - // if target.Mode != "" { - // r.Bundle.Mode = target.Mode - // } - - // if target.ComputeID != "" { - // r.Bundle.ComputeID = target.ComputeID - // } - - // The "run_as" field must be overwritten if set, not merged. - // Otherwise we end up with a merged version where both the - // "user_name" and "service_principal_name" fields are set. - if runAs := target.Get("run_as"); runAs != dyn.NilValue { - r.value.MustMap()["run_as"] = runAs - // Clear existing field to convert.ToTyped() merging - // the new value with the existing value. - // TODO(@pietern): Address this structurally. - r.RunAs = nil + // Below, we're setting fields on the bundle key, so make sure it exists. + if root.Get("bundle") == dyn.NilValue { + root, err = dyn.Set(root, "bundle", dyn.NewValue(map[string]dyn.Value{}, dyn.Location{})) + if err != nil { + return err + } } - if git := target.Get("git"); git != dyn.NilValue { - bundle := r.value.Get("bundle") - if bundle == dyn.NilValue { - bundle = dyn.NewValue(map[string]dyn.Value{}, dyn.Location{}) + // Merge `mode`. This field must be overwritten if set, not merged. + if v := target.Get("mode"); v != dyn.NilValue { + root, err = dyn.SetByPath(root, dyn.NewPath(dyn.Key("bundle"), dyn.Key("mode")), v) + if err != nil { + return err } - - bundle.MustMap()["git"] = git - r.value.MustMap()["bundle"] = bundle } - if err = mergeField("bundle"); err != nil { - return err + // Merge `compute_id`. This field must be overwritten if set, not merged. + if v := target.Get("compute_id"); v != dyn.NilValue { + root, err = dyn.SetByPath(root, dyn.NewPath(dyn.Key("bundle"), dyn.Key("compute_id")), v) + if err != nil { + return err + } } - if err = mergeField("workspace"); err != nil { - return err - } + // Merge `git`. + if v := target.Get("git"); v != dyn.NilValue { + ref, err := dyn.GetByPath(root, dyn.NewPath(dyn.Key("bundle"), dyn.Key("git"))) + if err != nil { + ref = dyn.NewValue(map[string]dyn.Value{}, dyn.Location{}) + } - if err = mergeField("artifacts"); err != nil { - return err - } + // Merge the override into the reference. + out, err := merge.Merge(ref, v) + if err != nil { + return err + } - if err = mergeField("resources"); err != nil { - return err - } + // If the branch was overridden, we need to clear the inferred flag. + if branch := v.Get("branch"); branch != dyn.NilValue { + out, err = dyn.SetByPath(out, dyn.NewPath(dyn.Key("inferred")), dyn.NewValue(false, dyn.Location{})) + if err != nil { + return err + } + } - if err = mergeField("sync"); err != nil { - return err + // Set the merged value. + root, err = dyn.SetByPath(root, dyn.NewPath(dyn.Key("bundle"), dyn.Key("git")), out) + if err != nil { + return err + } } - if err = mergeField("permissions"); err != nil { - return err - } + r.value = root // Convert normalized configuration tree to typed configuration. err = r.toTyped(r.value) @@ -365,91 +384,3 @@ func (r *Root) MergeTargetOverrides(name string) error { r.ConfigureConfigFilePath() return nil } - -// // Target may be nil if it's empty. -// if target == nil { -// return nil -// } - -// if target.Bundle != nil { -// err = mergo.Merge(&r.Bundle, target.Bundle, mergo.WithOverride) -// if err != nil { -// return err -// } -// } - -// if target.Workspace != nil { -// err = mergo.Merge(&r.Workspace, target.Workspace, mergo.WithOverride) -// if err != nil { -// return err -// } -// } - -// if target.Artifacts != nil { -// err = mergo.Merge(&r.Artifacts, target.Artifacts, mergo.WithOverride, mergo.WithAppendSlice) -// if err != nil { -// return err -// } -// } - -// if target.Resources != nil { -// err = mergo.Merge(&r.Resources, target.Resources, mergo.WithOverride, mergo.WithAppendSlice) -// if err != nil { -// return err -// } - -// err = r.Resources.Merge() -// if err != nil { -// return err -// } -// } - -// if target.Variables != nil { -// for k, v := range target.Variables { -// variable, ok := r.Variables[k] -// if !ok { -// return fmt.Errorf("variable %s is not defined but is assigned a value", k) -// } -// // we only allow overrides of the default value for a variable -// defaultVal := v -// variable.Default = &defaultVal -// } -// } - -// if target.RunAs != nil { -// r.RunAs = target.RunAs -// } - -// if target.Mode != "" { -// r.Bundle.Mode = target.Mode -// } - -// if target.ComputeID != "" { -// r.Bundle.ComputeID = target.ComputeID -// } - -// git := &r.Bundle.Git -// if target.Git.Branch != "" { -// git.Branch = target.Git.Branch -// git.Inferred = false -// } -// if target.Git.Commit != "" { -// git.Commit = target.Git.Commit -// } -// if target.Git.OriginURL != "" { -// git.OriginURL = target.Git.OriginURL -// } - -// if target.Sync != nil { -// err = mergo.Merge(&r.Sync, target.Sync, mergo.WithAppendSlice) -// if err != nil { -// return err -// } -// } - -// if target.Permissions != nil { -// err = mergo.Merge(&r.Permissions, target.Permissions, mergo.WithAppendSlice) -// if err != nil { -// return err -// } -// } diff --git a/bundle/mutator.go b/bundle/mutator.go index 73d11d4934..c1a2429057 100644 --- a/bundle/mutator.go +++ b/bundle/mutator.go @@ -33,3 +33,20 @@ func Apply(ctx context.Context, b *Bundle, m Mutator) error { return nil } + +type funcMutator struct { + fn func(context.Context, *Bundle) error +} + +func (m funcMutator) Name() string { + return "" +} + +func (m funcMutator) Apply(ctx context.Context, b *Bundle) error { + return m.fn(ctx, b) +} + +// ApplyFunc applies an inline-specified function mutator. +func ApplyFunc(ctx context.Context, b *Bundle, fn func(context.Context, *Bundle) error) error { + return Apply(ctx, b, funcMutator{fn}) +} diff --git a/bundle/tests/run_as_test.go b/bundle/tests/run_as_test.go index 7809b880d1..98aaf63580 100644 --- a/bundle/tests/run_as_test.go +++ b/bundle/tests/run_as_test.go @@ -13,12 +13,17 @@ import ( func TestRunAsDefault(t *testing.T) { b := load(t, "./run_as") - b.Config.Workspace.CurrentUser = &config.User{ - User: &iam.User{ - UserName: "jane@doe.com", - }, - } + ctx := context.Background() + bundle.ApplyFunc(ctx, b, func(ctx context.Context, b *bundle.Bundle) error { + b.Config.Workspace.CurrentUser = &config.User{ + User: &iam.User{ + UserName: "jane@doe.com", + }, + } + return nil + }) + err := bundle.Apply(ctx, b, mutator.SetRunAs()) assert.NoError(t, err) @@ -48,12 +53,17 @@ func TestRunAsDefault(t *testing.T) { func TestRunAsDevelopment(t *testing.T) { b := loadTarget(t, "./run_as", "development") - b.Config.Workspace.CurrentUser = &config.User{ - User: &iam.User{ - UserName: "jane@doe.com", - }, - } + ctx := context.Background() + bundle.ApplyFunc(ctx, b, func(ctx context.Context, b *bundle.Bundle) error { + b.Config.Workspace.CurrentUser = &config.User{ + User: &iam.User{ + UserName: "jane@doe.com", + }, + } + return nil + }) + err := bundle.Apply(ctx, b, mutator.SetRunAs()) assert.NoError(t, err) diff --git a/libs/dyn/convert/to_typed.go b/libs/dyn/convert/to_typed.go index 209de12cbd..177b114d70 100644 --- a/libs/dyn/convert/to_typed.go +++ b/libs/dyn/convert/to_typed.go @@ -53,6 +53,8 @@ func ToTyped(dst any, src dyn.Value) error { func toTypedStruct(dst reflect.Value, src dyn.Value) error { switch src.Kind() { case dyn.KindMap: + dst.SetZero() + info := getStructInfo(dst.Type()) for k, v := range src.MustMap() { index, ok := info.Fields[k] From 47eb229690880c4f0581ec415da1d775381d1f61 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Wed, 10 Jan 2024 14:23:05 +0100 Subject: [PATCH 042/104] comment --- libs/dyn/convert/to_typed.go | 2 ++ 1 file changed, 2 insertions(+) diff --git a/libs/dyn/convert/to_typed.go b/libs/dyn/convert/to_typed.go index 177b114d70..8f4195558a 100644 --- a/libs/dyn/convert/to_typed.go +++ b/libs/dyn/convert/to_typed.go @@ -53,6 +53,8 @@ func ToTyped(dst any, src dyn.Value) error { func toTypedStruct(dst reflect.Value, src dyn.Value) error { switch src.Kind() { case dyn.KindMap: + // Clear the destination struct. + // This is necessary because we don't know which fields are settable. dst.SetZero() info := getStructInfo(dst.Type()) From b3113d692cd1c64e85e531603454f4ecc788b84d Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Wed, 17 Jan 2024 09:14:56 +0100 Subject: [PATCH 043/104] dynvar --- libs/dyn/dynvar/interpolate.go | 193 ++++++++++++++++++++++++++++ libs/dyn/dynvar/interpolate_test.go | 117 +++++++++++++++++ libs/dyn/dynvar/refs.go | 1 + libs/dyn/dynvar/refs_test.go | 21 +++ 4 files changed, 332 insertions(+) create mode 100644 libs/dyn/dynvar/interpolate.go create mode 100644 libs/dyn/dynvar/interpolate_test.go create mode 100644 libs/dyn/dynvar/refs.go create mode 100644 libs/dyn/dynvar/refs_test.go diff --git a/libs/dyn/dynvar/interpolate.go b/libs/dyn/dynvar/interpolate.go new file mode 100644 index 0000000000..06b9ad21c0 --- /dev/null +++ b/libs/dyn/dynvar/interpolate.go @@ -0,0 +1,193 @@ +package dynvar + +import ( + "fmt" + "regexp" + "slices" + "sort" + "strings" + + "github.com/databricks/cli/libs/dyn" + "golang.org/x/exp/maps" +) + +var re = regexp.MustCompile(`\$\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\}`) + +type variableReference struct { + value dyn.Value + path dyn.Path + str string + matches [][]string +} + +// isPure returns true if the variable reference contains a single +// variable reference and nothing more. We need this so we can +// interpolate values of non-string types (i.e. it can be substituted). +func (v variableReference) isPure() bool { + // Need single match, equal to the incoming string. + if len(v.matches) == 0 || len(v.matches[0]) == 0 { + panic("invalid variable reference; expect at least one match") + } + return v.matches[0][0] == v.str +} + +func (v variableReference) references() []string { + var out []string + for _, m := range v.matches { + out = append(out, m[1]) + } + return out +} + +func (v variableReference) mapKey() string { + return v.path.String() +} + +func Interpolate(in dyn.Value) (out dyn.Value, err error) { + return interpolation{in: in}.interpolate() +} + +type interpolation struct { + in dyn.Value + + refs map[string]variableReference + resolved map[string]dyn.Value +} + +func (i interpolation) interpolate() (out dyn.Value, err error) { + err = i.collectVariableReferences() + if err != nil { + return dyn.InvalidValue, err + } + + // Initialize map for resolved variables. + // We use this for memoization. + i.resolved = make(map[string]dyn.Value) + + // Resolve each variable reference (in order). + keys := maps.Keys(i.refs) + sort.Strings(keys) + + for _, key := range keys { + _, err := i.resolve(key, []string{key}) + if err != nil { + return dyn.InvalidValue, err + } + } + + out, err = i.replaceVariableReferences() + if err != nil { + return dyn.InvalidValue, err + } + + return out, nil +} + +func (i *interpolation) collectVariableReferences() (err error) { + i.refs = make(map[string]variableReference) + + // First walk the input to gather all values with a variable reference. + _, err = dyn.Walk(i.in, func(p dyn.Path, v dyn.Value) (dyn.Value, error) { + s, ok := v.AsString() + if !ok { + // Skip non-string values. + return v, nil + } + + // Check if the string contains a variable reference. + m := re.FindAllStringSubmatch(s, -1) + if len(m) == 0 { + // Skip strings without variable references. + return v, nil + } + + // Store the variable reference. + ref := variableReference{ + value: v, + path: p, + str: s, + matches: m, + } + i.refs[ref.mapKey()] = ref + return v, nil + }) + + return err +} + +func (i *interpolation) resolve(key string, seen []string) (dyn.Value, error) { + if v, ok := i.resolved[key]; ok { + return v, nil + } + + ref, ok := i.refs[key] + if !ok { + // Perform lookup in the input. + // TODO hook into user specified function here + return dyn.Get(i.in, key) + } + + // This is an unresolved variable reference. + deps := ref.references() + + // Resolve each of the dependencies, then interpolate them in the ref. + resolved := make([]dyn.Value, len(deps)) + + for j, dep := range deps { + // Cycle detection. + if slices.Contains(seen, dep) { + return dyn.InvalidValue, fmt.Errorf( + "cycle detected in field resolution: %s", + strings.Join(append(seen, dep), " -> "), + ) + } + + v, err := i.resolve(dep, append(seen, dep)) + if err != nil { + return dyn.InvalidValue, err + } + + resolved[j] = v + } + + // Interpolate the resolved values. + if ref.isPure() { + // If the variable reference is pure, we can substitute it. + // This is useful for interpolating values of non-string types. + i.resolved[key] = resolved[0] + return resolved[0], nil + } + + // Not pure; perform string interpolation. + for j := range ref.matches { + // Try to turn the resolved value into a string. + s, ok := resolved[j].AsString() + if !ok { + return dyn.InvalidValue, fmt.Errorf( + "cannot interpolate non-string value: %s", + ref.matches[j][0], + ) + } + + ref.str = strings.Replace(ref.str, ref.matches[j][0], s, 1) + } + + // Store the interpolated value. + v := dyn.NewValue(ref.str, ref.value.Location()) + i.resolved[key] = v + return v, nil +} + +func (i *interpolation) replaceVariableReferences() (dyn.Value, error) { + // Walk the input and replace all variable references. + return dyn.Walk(i.in, func(p dyn.Path, v dyn.Value) (dyn.Value, error) { + ref, ok := i.refs[p.String()] + if !ok { + // No variable reference; return the original value. + return v, nil + } + + // We have a variable reference; return the resolved value. + return i.resolved[ref.mapKey()], nil + }) +} diff --git a/libs/dyn/dynvar/interpolate_test.go b/libs/dyn/dynvar/interpolate_test.go new file mode 100644 index 0000000000..7595465c00 --- /dev/null +++ b/libs/dyn/dynvar/interpolate_test.go @@ -0,0 +1,117 @@ +package dynvar_test + +import ( + "testing" + + "github.com/databricks/cli/libs/dyn" + "github.com/databricks/cli/libs/dyn/dynvar" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func getByPath(t *testing.T, v dyn.Value, path string) dyn.Value { + v, err := dyn.Get(v, path) + require.NoError(t, err) + return v +} + +func TestInterpolation(t *testing.T) { + in := dyn.V(map[string]dyn.Value{ + "a": dyn.V("a"), + "b": dyn.V("${a}"), + "c": dyn.V("${a}"), + }) + + out, err := dynvar.Interpolate(in) + require.NoError(t, err) + + assert.Equal(t, "a", getByPath(t, out, "a").MustString()) + assert.Equal(t, "a", getByPath(t, out, "b").MustString()) + assert.Equal(t, "a", getByPath(t, out, "c").MustString()) +} + +func TestInterpolationWithNesting(t *testing.T) { + in := dyn.V(map[string]dyn.Value{ + "a": dyn.V("${f.a}"), + "f": dyn.V(map[string]dyn.Value{ + "a": dyn.V("a"), + "b": dyn.V("${f.a}"), + }), + }) + + out, err := dynvar.Interpolate(in) + require.NoError(t, err) + + assert.Equal(t, "a", getByPath(t, out, "a").MustString()) + assert.Equal(t, "a", getByPath(t, out, "f.a").MustString()) + assert.Equal(t, "a", getByPath(t, out, "f.b").MustString()) +} + +func TestInterpolationWithRecursion(t *testing.T) { + in := dyn.V(map[string]dyn.Value{ + "a": dyn.V("a"), + "b": dyn.V("${a}"), + "c": dyn.V("${b}"), + }) + + out, err := dynvar.Interpolate(in) + require.NoError(t, err) + + assert.Equal(t, "a", getByPath(t, out, "a").MustString()) + assert.Equal(t, "a", getByPath(t, out, "b").MustString()) + assert.Equal(t, "a", getByPath(t, out, "c").MustString()) +} + +func TestInterpolationWithRecursionLoop(t *testing.T) { + in := dyn.V(map[string]dyn.Value{ + "a": dyn.V("a"), + "b": dyn.V("${c}"), + "c": dyn.V("${d}"), + "d": dyn.V("${b}"), + }) + + _, err := dynvar.Interpolate(in) + assert.ErrorContains(t, err, "cycle detected in field resolution: b -> c -> d -> b") +} + +func TestInterpolationWithRecursionLoopSelf(t *testing.T) { + in := dyn.V(map[string]dyn.Value{ + "a": dyn.V("${a}"), + }) + + _, err := dynvar.Interpolate(in) + assert.ErrorContains(t, err, "cycle detected in field resolution: a -> a") +} + +func TestInterpolationWithTypeRetention(t *testing.T) { + in := dyn.V(map[string]dyn.Value{ + "int": dyn.V(1), + "int_": dyn.V("${int}"), + "bool_true": dyn.V(true), + "bool_true_": dyn.V("${bool_true}"), + "bool_false": dyn.V(false), + "bool_false_": dyn.V("${bool_false}"), + "float": dyn.V(1.0), + "float_": dyn.V("${float}"), + "string": dyn.V("a"), + "string_": dyn.V("${string}"), + }) + + out, err := dynvar.Interpolate(in) + require.NoError(t, err) + + assert.EqualValues(t, 1, getByPath(t, out, "int").MustInt()) + assert.EqualValues(t, 1, getByPath(t, out, "int_").MustInt()) + + assert.EqualValues(t, true, getByPath(t, out, "bool_true").MustBool()) + assert.EqualValues(t, true, getByPath(t, out, "bool_true_").MustBool()) + + assert.EqualValues(t, false, getByPath(t, out, "bool_false").MustBool()) + assert.EqualValues(t, false, getByPath(t, out, "bool_false_").MustBool()) + + assert.EqualValues(t, 1.0, getByPath(t, out, "float").MustFloat()) + assert.EqualValues(t, 1.0, getByPath(t, out, "float_").MustFloat()) + + assert.EqualValues(t, "a", getByPath(t, out, "string").MustString()) + assert.EqualValues(t, "a", getByPath(t, out, "string_").MustString()) +} diff --git a/libs/dyn/dynvar/refs.go b/libs/dyn/dynvar/refs.go new file mode 100644 index 0000000000..929cc32bc7 --- /dev/null +++ b/libs/dyn/dynvar/refs.go @@ -0,0 +1 @@ +package dynvar diff --git a/libs/dyn/dynvar/refs_test.go b/libs/dyn/dynvar/refs_test.go new file mode 100644 index 0000000000..ba7080ddd9 --- /dev/null +++ b/libs/dyn/dynvar/refs_test.go @@ -0,0 +1,21 @@ +package dynvar_test + +// func TestRefsInvalid(t *testing.T) { +// invalidMatches := []string{ +// "${hello_world-.world_world}", // the first segment ending must not end with hyphen (-) +// "${hello_world-_.world_world}", // the first segment ending must not end with underscore (_) +// "${helloworld.world-world-}", // second segment must not end with hyphen (-) +// "${helloworld-.world-world}", // first segment must not end with hyphen (-) +// "${helloworld.-world-world}", // second segment must not start with hyphen (-) +// "${-hello-world.-world-world-}", // must not start or end with hyphen (-) +// "${_-_._-_.id}", // cannot use _- in sequence +// "${0helloworld.world-world}", // interpolated first section shouldn't start with number +// "${helloworld.9world-world}", // interpolated second section shouldn't start with number +// "${a-a.a-_a-a.id}", // fails because of -_ in the second segment +// "${a-a.a--a-a.id}", // fails because of -- in the second segment +// } +// for _, invalidMatch := range invalidMatches { +// match := re.FindStringSubmatch(invalidMatch) +// assert.True(t, len(match) == 0, "Should be invalid interpolation: %s", invalidMatch) +// } +// } From 335d3def7f28c63de8cff7088bfb231f66e87743 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Fri, 19 Jan 2024 16:48:09 +0100 Subject: [PATCH 044/104] More dynvar --- libs/dyn/dynvar/interpolate.go | 193 ---------------------------- libs/dyn/dynvar/interpolate_test.go | 117 ----------------- libs/dyn/dynvar/lookup.go | 21 +++ libs/dyn/dynvar/lookup_test.go | 27 ++++ libs/dyn/dynvar/ref.go | 71 ++++++++++ libs/dyn/dynvar/ref_test.go | 46 +++++++ libs/dyn/dynvar/refs.go | 1 - libs/dyn/dynvar/refs_test.go | 21 --- libs/dyn/dynvar/resolve.go | 180 ++++++++++++++++++++++++++ libs/dyn/dynvar/resolve_test.go | 184 ++++++++++++++++++++++++++ 10 files changed, 529 insertions(+), 332 deletions(-) delete mode 100644 libs/dyn/dynvar/interpolate.go delete mode 100644 libs/dyn/dynvar/interpolate_test.go create mode 100644 libs/dyn/dynvar/lookup.go create mode 100644 libs/dyn/dynvar/lookup_test.go create mode 100644 libs/dyn/dynvar/ref.go create mode 100644 libs/dyn/dynvar/ref_test.go delete mode 100644 libs/dyn/dynvar/refs.go delete mode 100644 libs/dyn/dynvar/refs_test.go create mode 100644 libs/dyn/dynvar/resolve.go create mode 100644 libs/dyn/dynvar/resolve_test.go diff --git a/libs/dyn/dynvar/interpolate.go b/libs/dyn/dynvar/interpolate.go deleted file mode 100644 index 06b9ad21c0..0000000000 --- a/libs/dyn/dynvar/interpolate.go +++ /dev/null @@ -1,193 +0,0 @@ -package dynvar - -import ( - "fmt" - "regexp" - "slices" - "sort" - "strings" - - "github.com/databricks/cli/libs/dyn" - "golang.org/x/exp/maps" -) - -var re = regexp.MustCompile(`\$\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\}`) - -type variableReference struct { - value dyn.Value - path dyn.Path - str string - matches [][]string -} - -// isPure returns true if the variable reference contains a single -// variable reference and nothing more. We need this so we can -// interpolate values of non-string types (i.e. it can be substituted). -func (v variableReference) isPure() bool { - // Need single match, equal to the incoming string. - if len(v.matches) == 0 || len(v.matches[0]) == 0 { - panic("invalid variable reference; expect at least one match") - } - return v.matches[0][0] == v.str -} - -func (v variableReference) references() []string { - var out []string - for _, m := range v.matches { - out = append(out, m[1]) - } - return out -} - -func (v variableReference) mapKey() string { - return v.path.String() -} - -func Interpolate(in dyn.Value) (out dyn.Value, err error) { - return interpolation{in: in}.interpolate() -} - -type interpolation struct { - in dyn.Value - - refs map[string]variableReference - resolved map[string]dyn.Value -} - -func (i interpolation) interpolate() (out dyn.Value, err error) { - err = i.collectVariableReferences() - if err != nil { - return dyn.InvalidValue, err - } - - // Initialize map for resolved variables. - // We use this for memoization. - i.resolved = make(map[string]dyn.Value) - - // Resolve each variable reference (in order). - keys := maps.Keys(i.refs) - sort.Strings(keys) - - for _, key := range keys { - _, err := i.resolve(key, []string{key}) - if err != nil { - return dyn.InvalidValue, err - } - } - - out, err = i.replaceVariableReferences() - if err != nil { - return dyn.InvalidValue, err - } - - return out, nil -} - -func (i *interpolation) collectVariableReferences() (err error) { - i.refs = make(map[string]variableReference) - - // First walk the input to gather all values with a variable reference. - _, err = dyn.Walk(i.in, func(p dyn.Path, v dyn.Value) (dyn.Value, error) { - s, ok := v.AsString() - if !ok { - // Skip non-string values. - return v, nil - } - - // Check if the string contains a variable reference. - m := re.FindAllStringSubmatch(s, -1) - if len(m) == 0 { - // Skip strings without variable references. - return v, nil - } - - // Store the variable reference. - ref := variableReference{ - value: v, - path: p, - str: s, - matches: m, - } - i.refs[ref.mapKey()] = ref - return v, nil - }) - - return err -} - -func (i *interpolation) resolve(key string, seen []string) (dyn.Value, error) { - if v, ok := i.resolved[key]; ok { - return v, nil - } - - ref, ok := i.refs[key] - if !ok { - // Perform lookup in the input. - // TODO hook into user specified function here - return dyn.Get(i.in, key) - } - - // This is an unresolved variable reference. - deps := ref.references() - - // Resolve each of the dependencies, then interpolate them in the ref. - resolved := make([]dyn.Value, len(deps)) - - for j, dep := range deps { - // Cycle detection. - if slices.Contains(seen, dep) { - return dyn.InvalidValue, fmt.Errorf( - "cycle detected in field resolution: %s", - strings.Join(append(seen, dep), " -> "), - ) - } - - v, err := i.resolve(dep, append(seen, dep)) - if err != nil { - return dyn.InvalidValue, err - } - - resolved[j] = v - } - - // Interpolate the resolved values. - if ref.isPure() { - // If the variable reference is pure, we can substitute it. - // This is useful for interpolating values of non-string types. - i.resolved[key] = resolved[0] - return resolved[0], nil - } - - // Not pure; perform string interpolation. - for j := range ref.matches { - // Try to turn the resolved value into a string. - s, ok := resolved[j].AsString() - if !ok { - return dyn.InvalidValue, fmt.Errorf( - "cannot interpolate non-string value: %s", - ref.matches[j][0], - ) - } - - ref.str = strings.Replace(ref.str, ref.matches[j][0], s, 1) - } - - // Store the interpolated value. - v := dyn.NewValue(ref.str, ref.value.Location()) - i.resolved[key] = v - return v, nil -} - -func (i *interpolation) replaceVariableReferences() (dyn.Value, error) { - // Walk the input and replace all variable references. - return dyn.Walk(i.in, func(p dyn.Path, v dyn.Value) (dyn.Value, error) { - ref, ok := i.refs[p.String()] - if !ok { - // No variable reference; return the original value. - return v, nil - } - - // We have a variable reference; return the resolved value. - return i.resolved[ref.mapKey()], nil - }) -} diff --git a/libs/dyn/dynvar/interpolate_test.go b/libs/dyn/dynvar/interpolate_test.go deleted file mode 100644 index 7595465c00..0000000000 --- a/libs/dyn/dynvar/interpolate_test.go +++ /dev/null @@ -1,117 +0,0 @@ -package dynvar_test - -import ( - "testing" - - "github.com/databricks/cli/libs/dyn" - "github.com/databricks/cli/libs/dyn/dynvar" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func getByPath(t *testing.T, v dyn.Value, path string) dyn.Value { - v, err := dyn.Get(v, path) - require.NoError(t, err) - return v -} - -func TestInterpolation(t *testing.T) { - in := dyn.V(map[string]dyn.Value{ - "a": dyn.V("a"), - "b": dyn.V("${a}"), - "c": dyn.V("${a}"), - }) - - out, err := dynvar.Interpolate(in) - require.NoError(t, err) - - assert.Equal(t, "a", getByPath(t, out, "a").MustString()) - assert.Equal(t, "a", getByPath(t, out, "b").MustString()) - assert.Equal(t, "a", getByPath(t, out, "c").MustString()) -} - -func TestInterpolationWithNesting(t *testing.T) { - in := dyn.V(map[string]dyn.Value{ - "a": dyn.V("${f.a}"), - "f": dyn.V(map[string]dyn.Value{ - "a": dyn.V("a"), - "b": dyn.V("${f.a}"), - }), - }) - - out, err := dynvar.Interpolate(in) - require.NoError(t, err) - - assert.Equal(t, "a", getByPath(t, out, "a").MustString()) - assert.Equal(t, "a", getByPath(t, out, "f.a").MustString()) - assert.Equal(t, "a", getByPath(t, out, "f.b").MustString()) -} - -func TestInterpolationWithRecursion(t *testing.T) { - in := dyn.V(map[string]dyn.Value{ - "a": dyn.V("a"), - "b": dyn.V("${a}"), - "c": dyn.V("${b}"), - }) - - out, err := dynvar.Interpolate(in) - require.NoError(t, err) - - assert.Equal(t, "a", getByPath(t, out, "a").MustString()) - assert.Equal(t, "a", getByPath(t, out, "b").MustString()) - assert.Equal(t, "a", getByPath(t, out, "c").MustString()) -} - -func TestInterpolationWithRecursionLoop(t *testing.T) { - in := dyn.V(map[string]dyn.Value{ - "a": dyn.V("a"), - "b": dyn.V("${c}"), - "c": dyn.V("${d}"), - "d": dyn.V("${b}"), - }) - - _, err := dynvar.Interpolate(in) - assert.ErrorContains(t, err, "cycle detected in field resolution: b -> c -> d -> b") -} - -func TestInterpolationWithRecursionLoopSelf(t *testing.T) { - in := dyn.V(map[string]dyn.Value{ - "a": dyn.V("${a}"), - }) - - _, err := dynvar.Interpolate(in) - assert.ErrorContains(t, err, "cycle detected in field resolution: a -> a") -} - -func TestInterpolationWithTypeRetention(t *testing.T) { - in := dyn.V(map[string]dyn.Value{ - "int": dyn.V(1), - "int_": dyn.V("${int}"), - "bool_true": dyn.V(true), - "bool_true_": dyn.V("${bool_true}"), - "bool_false": dyn.V(false), - "bool_false_": dyn.V("${bool_false}"), - "float": dyn.V(1.0), - "float_": dyn.V("${float}"), - "string": dyn.V("a"), - "string_": dyn.V("${string}"), - }) - - out, err := dynvar.Interpolate(in) - require.NoError(t, err) - - assert.EqualValues(t, 1, getByPath(t, out, "int").MustInt()) - assert.EqualValues(t, 1, getByPath(t, out, "int_").MustInt()) - - assert.EqualValues(t, true, getByPath(t, out, "bool_true").MustBool()) - assert.EqualValues(t, true, getByPath(t, out, "bool_true_").MustBool()) - - assert.EqualValues(t, false, getByPath(t, out, "bool_false").MustBool()) - assert.EqualValues(t, false, getByPath(t, out, "bool_false_").MustBool()) - - assert.EqualValues(t, 1.0, getByPath(t, out, "float").MustFloat()) - assert.EqualValues(t, 1.0, getByPath(t, out, "float_").MustFloat()) - - assert.EqualValues(t, "a", getByPath(t, out, "string").MustString()) - assert.EqualValues(t, "a", getByPath(t, out, "string_").MustString()) -} diff --git a/libs/dyn/dynvar/lookup.go b/libs/dyn/dynvar/lookup.go new file mode 100644 index 0000000000..2bc08f47d4 --- /dev/null +++ b/libs/dyn/dynvar/lookup.go @@ -0,0 +1,21 @@ +package dynvar + +import ( + "errors" + + "github.com/databricks/cli/libs/dyn" +) + +// Lookup is the type of lookup functions that can be used with [Resolve]. +type Lookup func(path dyn.Path) (dyn.Value, error) + +// ErrSkipResolution is returned by a lookup function to indicate that the +// resolution of a variable reference should be skipped. +var ErrSkipResolution = errors.New("skip resolution") + +// DefaultLookup is the default lookup function used by [Resolve]. +func DefaultLookup(in dyn.Value) Lookup { + return func(path dyn.Path) (dyn.Value, error) { + return dyn.GetByPath(in, path) + } +} diff --git a/libs/dyn/dynvar/lookup_test.go b/libs/dyn/dynvar/lookup_test.go new file mode 100644 index 0000000000..2341d72084 --- /dev/null +++ b/libs/dyn/dynvar/lookup_test.go @@ -0,0 +1,27 @@ +package dynvar_test + +import ( + "testing" + + "github.com/databricks/cli/libs/dyn" + "github.com/databricks/cli/libs/dyn/dynvar" + "github.com/stretchr/testify/assert" +) + +func TestDefaultLookup(t *testing.T) { + lookup := dynvar.DefaultLookup(dyn.V(map[string]dyn.Value{ + "a": dyn.V("a"), + "b": dyn.V("b"), + })) + + v1, err := lookup(dyn.NewPath(dyn.Key("a"))) + assert.NoError(t, err) + assert.Equal(t, dyn.V("a"), v1) + + v2, err := lookup(dyn.NewPath(dyn.Key("b"))) + assert.NoError(t, err) + assert.Equal(t, dyn.V("b"), v2) + + _, err = lookup(dyn.NewPath(dyn.Key("c"))) + assert.True(t, dyn.IsNoSuchKeyError(err)) +} diff --git a/libs/dyn/dynvar/ref.go b/libs/dyn/dynvar/ref.go new file mode 100644 index 0000000000..5404cf36f5 --- /dev/null +++ b/libs/dyn/dynvar/ref.go @@ -0,0 +1,71 @@ +package dynvar + +import ( + "regexp" + + "github.com/databricks/cli/libs/dyn" +) + +var re = regexp.MustCompile(`\$\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\}`) + +// ref represents a variable reference. +// It is a string [dyn.Value] contained in a larger [dyn.Value]. +// Its path within the containing [dyn.Value] is also stored. +type ref struct { + // Original value and path. + value dyn.Value + path dyn.Path + + // Key to index this ref by. + // It is equal to the string representation of the path. + key string + + // String value in the original [dyn.Value]. + str string + + // Matches of the variable reference in the string. + matches [][]string +} + +// newRef returns a new ref if the given [dyn.Value] contains a string +// with one or more variable references. It returns false if the given +// [dyn.Value] does not contain variable references. +func newRef(v dyn.Value, p dyn.Path) (ref, bool) { + s, ok := v.AsString() + if !ok { + return ref{}, false + } + + // Check if the string contains any variable references. + m := re.FindAllStringSubmatch(s, -1) + if len(m) == 0 { + return ref{}, false + } + + return ref{ + value: v, + path: p, + key: p.String(), + str: s, + matches: m, + }, true +} + +// isPure returns true if the variable reference contains a single +// variable reference and nothing more. We need this so we can +// interpolate values of non-string types (i.e. it can be substituted). +func (v ref) isPure() bool { + // Need single match, equal to the incoming string. + if len(v.matches) == 0 || len(v.matches[0]) == 0 { + panic("invalid variable reference; expect at least one match") + } + return v.matches[0][0] == v.str +} + +func (v ref) references() []string { + var out []string + for _, m := range v.matches { + out = append(out, m[1]) + } + return out +} diff --git a/libs/dyn/dynvar/ref_test.go b/libs/dyn/dynvar/ref_test.go new file mode 100644 index 0000000000..e722f71133 --- /dev/null +++ b/libs/dyn/dynvar/ref_test.go @@ -0,0 +1,46 @@ +package dynvar + +import ( + "testing" + + "github.com/databricks/cli/libs/dyn" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestNewRefNoString(t *testing.T) { + _, ok := newRef(dyn.V(1), dyn.Path{}) + require.False(t, ok, "should not match non-string") +} + +func TestNewRefValidPattern(t *testing.T) { + for in, refs := range map[string][]string{ + "${hello_world.world_world}": {"hello_world.world_world"}, + "${helloworld.world-world}": {"helloworld.world-world"}, + "${hello-world.world-world}": {"hello-world.world-world"}, + } { + ref, ok := newRef(dyn.V(in), dyn.Path{}) + require.True(t, ok, "should match valid pattern: %s", in) + assert.Equal(t, refs, ref.references()) + } +} + +func TestNewRefInvalidPattern(t *testing.T) { + invalid := []string{ + "${hello_world-.world_world}", // the first segment ending must not end with hyphen (-) + "${hello_world-_.world_world}", // the first segment ending must not end with underscore (_) + "${helloworld.world-world-}", // second segment must not end with hyphen (-) + "${helloworld-.world-world}", // first segment must not end with hyphen (-) + "${helloworld.-world-world}", // second segment must not start with hyphen (-) + "${-hello-world.-world-world-}", // must not start or end with hyphen (-) + "${_-_._-_.id}", // cannot use _- in sequence + "${0helloworld.world-world}", // interpolated first section shouldn't start with number + "${helloworld.9world-world}", // interpolated second section shouldn't start with number + "${a-a.a-_a-a.id}", // fails because of -_ in the second segment + "${a-a.a--a-a.id}", // fails because of -- in the second segment + } + for _, v := range invalid { + _, ok := newRef(dyn.V(v), dyn.Path{}) + require.False(t, ok, "should not match invalid pattern: %s", v) + } +} diff --git a/libs/dyn/dynvar/refs.go b/libs/dyn/dynvar/refs.go deleted file mode 100644 index 929cc32bc7..0000000000 --- a/libs/dyn/dynvar/refs.go +++ /dev/null @@ -1 +0,0 @@ -package dynvar diff --git a/libs/dyn/dynvar/refs_test.go b/libs/dyn/dynvar/refs_test.go deleted file mode 100644 index ba7080ddd9..0000000000 --- a/libs/dyn/dynvar/refs_test.go +++ /dev/null @@ -1,21 +0,0 @@ -package dynvar_test - -// func TestRefsInvalid(t *testing.T) { -// invalidMatches := []string{ -// "${hello_world-.world_world}", // the first segment ending must not end with hyphen (-) -// "${hello_world-_.world_world}", // the first segment ending must not end with underscore (_) -// "${helloworld.world-world-}", // second segment must not end with hyphen (-) -// "${helloworld-.world-world}", // first segment must not end with hyphen (-) -// "${helloworld.-world-world}", // second segment must not start with hyphen (-) -// "${-hello-world.-world-world-}", // must not start or end with hyphen (-) -// "${_-_._-_.id}", // cannot use _- in sequence -// "${0helloworld.world-world}", // interpolated first section shouldn't start with number -// "${helloworld.9world-world}", // interpolated second section shouldn't start with number -// "${a-a.a-_a-a.id}", // fails because of -_ in the second segment -// "${a-a.a--a-a.id}", // fails because of -- in the second segment -// } -// for _, invalidMatch := range invalidMatches { -// match := re.FindStringSubmatch(invalidMatch) -// assert.True(t, len(match) == 0, "Should be invalid interpolation: %s", invalidMatch) -// } -// } diff --git a/libs/dyn/dynvar/resolve.go b/libs/dyn/dynvar/resolve.go new file mode 100644 index 0000000000..6e4df6c1d8 --- /dev/null +++ b/libs/dyn/dynvar/resolve.go @@ -0,0 +1,180 @@ +package dynvar + +import ( + "errors" + "fmt" + "slices" + "sort" + "strings" + + "github.com/databricks/cli/libs/dyn" + "golang.org/x/exp/maps" +) + +func Resolve(in dyn.Value, fn Lookup) (out dyn.Value, err error) { + return resolver{in: in, fn: fn}.run() +} + +type resolver struct { + in dyn.Value + fn Lookup + + refs map[string]ref + resolved map[string]dyn.Value +} + +func (r resolver) run() (out dyn.Value, err error) { + err = r.collectVariableReferences() + if err != nil { + return dyn.InvalidValue, err + } + + err = r.resolveVariableReferences() + if err != nil { + return dyn.InvalidValue, err + } + + out, err = r.replaceVariableReferences() + if err != nil { + return dyn.InvalidValue, err + } + + return out, nil +} + +func (r *resolver) collectVariableReferences() (err error) { + r.refs = make(map[string]ref) + + // First walk the input to gather all values with a variable reference. + _, err = dyn.Walk(r.in, func(p dyn.Path, v dyn.Value) (dyn.Value, error) { + ref, ok := newRef(v, p) + if !ok { + // Skip values without variable references. + return v, nil + } + + r.refs[ref.key] = ref + return v, nil + }) + + return err +} + +func (r *resolver) resolveVariableReferences() (err error) { + // Initialize map for resolved variables. + // We use this for memoization. + r.resolved = make(map[string]dyn.Value) + + // Resolve each variable reference (in order). + keys := maps.Keys(r.refs) + sort.Strings(keys) + for _, key := range keys { + _, err := r.resolve(key, []string{key}) + if err != nil { + return err + } + } + + return nil +} + +func (r *resolver) resolve(key string, seen []string) (dyn.Value, error) { + // Check if we have already resolved this variable reference. + if v, ok := r.resolved[key]; ok { + return v, nil + } + + ref, ok := r.refs[key] + if !ok { + // Perform lookup in the input. + p, err := dyn.NewPathFromString(key) + if err != nil { + return dyn.InvalidValue, err + } + v, err := r.fn(p) + if err != nil && dyn.IsNoSuchKeyError(err) { + return dyn.InvalidValue, fmt.Errorf( + "reference does not exist: ${%s}", + key, + ) + } + return v, err + } + + // This is an unresolved variable reference. + deps := ref.references() + + // Resolve each of the dependencies, then interpolate them in the ref. + resolved := make([]dyn.Value, len(deps)) + complete := true + + for j, dep := range deps { + // Cycle detection. + if slices.Contains(seen, dep) { + return dyn.InvalidValue, fmt.Errorf( + "cycle detected in field resolution: %s", + strings.Join(append(seen, dep), " -> "), + ) + } + + v, err := r.resolve(dep, append(seen, dep)) + + // If we should skip resolution of this key, index j will hold an invalid [dyn.Value]. + if errors.Is(err, ErrSkipResolution) { + complete = false + continue + } else if err != nil { + // Otherwise, propagate the error. + return dyn.InvalidValue, err + } + + resolved[j] = v + } + + // Interpolate the resolved values. + if ref.isPure() && complete { + // If the variable reference is pure, we can substitute it. + // This is useful for interpolating values of non-string types. + r.resolved[key] = resolved[0] + return resolved[0], nil + } + + // Not pure; perform string interpolation. + for j := range ref.matches { + // The value is invalid if resolution returned [ErrSkipResolution]. + // We must skip those and leave the original variable reference in place. + if !resolved[j].IsValid() { + continue + } + + // Try to turn the resolved value into a string. + s, ok := resolved[j].AsString() + if !ok { + return dyn.InvalidValue, fmt.Errorf( + "cannot interpolate non-string value: %s", + ref.matches[j][0], + ) + } + + ref.str = strings.Replace(ref.str, ref.matches[j][0], s, 1) + } + + // Store the interpolated value. + v := dyn.NewValue(ref.str, ref.value.Location()) + r.resolved[key] = v + return v, nil +} + +func (r *resolver) replaceVariableReferences() (dyn.Value, error) { + // Walk the input and replace all variable references. + return dyn.Walk(r.in, func(p dyn.Path, v dyn.Value) (dyn.Value, error) { + ref, ok := r.refs[p.String()] + if !ok { + // No variable reference; return the original value. + return v, nil + } + + // We have a variable reference; return the resolved value. + return r.resolved[ref.key], nil + }) +} diff --git a/libs/dyn/dynvar/resolve_test.go b/libs/dyn/dynvar/resolve_test.go new file mode 100644 index 0000000000..ba700503ef --- /dev/null +++ b/libs/dyn/dynvar/resolve_test.go @@ -0,0 +1,184 @@ +package dynvar_test + +import ( + "testing" + + "github.com/databricks/cli/libs/dyn" + "github.com/databricks/cli/libs/dyn/dynvar" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func getByPath(t *testing.T, v dyn.Value, path string) dyn.Value { + v, err := dyn.Get(v, path) + require.NoError(t, err) + return v +} + +func TestResolve(t *testing.T) { + in := dyn.V(map[string]dyn.Value{ + "a": dyn.V("a"), + "b": dyn.V("${a}"), + "c": dyn.V("${a}"), + }) + + out, err := dynvar.Resolve(in, dynvar.DefaultLookup(in)) + require.NoError(t, err) + + assert.Equal(t, "a", getByPath(t, out, "a").MustString()) + assert.Equal(t, "a", getByPath(t, out, "b").MustString()) + assert.Equal(t, "a", getByPath(t, out, "c").MustString()) +} + +func TestResolveNotFound(t *testing.T) { + in := dyn.V(map[string]dyn.Value{ + "b": dyn.V("${a}"), + }) + + _, err := dynvar.Resolve(in, dynvar.DefaultLookup(in)) + require.ErrorContains(t, err, `reference does not exist: ${a}`) +} + +func TestResolveWithNesting(t *testing.T) { + in := dyn.V(map[string]dyn.Value{ + "a": dyn.V("${f.a}"), + "f": dyn.V(map[string]dyn.Value{ + "a": dyn.V("a"), + "b": dyn.V("${f.a}"), + }), + }) + + out, err := dynvar.Resolve(in, dynvar.DefaultLookup(in)) + require.NoError(t, err) + + assert.Equal(t, "a", getByPath(t, out, "a").MustString()) + assert.Equal(t, "a", getByPath(t, out, "f.a").MustString()) + assert.Equal(t, "a", getByPath(t, out, "f.b").MustString()) +} + +func TestResolveWithRecursion(t *testing.T) { + in := dyn.V(map[string]dyn.Value{ + "a": dyn.V("a"), + "b": dyn.V("${a}"), + "c": dyn.V("${b}"), + }) + + out, err := dynvar.Resolve(in, dynvar.DefaultLookup(in)) + require.NoError(t, err) + + assert.Equal(t, "a", getByPath(t, out, "a").MustString()) + assert.Equal(t, "a", getByPath(t, out, "b").MustString()) + assert.Equal(t, "a", getByPath(t, out, "c").MustString()) +} + +func TestResolveWithRecursionLoop(t *testing.T) { + in := dyn.V(map[string]dyn.Value{ + "a": dyn.V("a"), + "b": dyn.V("${c}"), + "c": dyn.V("${d}"), + "d": dyn.V("${b}"), + }) + + _, err := dynvar.Resolve(in, dynvar.DefaultLookup(in)) + assert.ErrorContains(t, err, "cycle detected in field resolution: b -> c -> d -> b") +} + +func TestResolveWithRecursionLoopSelf(t *testing.T) { + in := dyn.V(map[string]dyn.Value{ + "a": dyn.V("${a}"), + }) + + _, err := dynvar.Resolve(in, dynvar.DefaultLookup(in)) + assert.ErrorContains(t, err, "cycle detected in field resolution: a -> a") +} + +func TestResolveWithStringConcatenation(t *testing.T) { + in := dyn.V(map[string]dyn.Value{ + "a": dyn.V("a"), + "b": dyn.V("b"), + "c": dyn.V("${a}${b}${a}"), + }) + + out, err := dynvar.Resolve(in, dynvar.DefaultLookup(in)) + require.NoError(t, err) + + assert.Equal(t, "a", getByPath(t, out, "a").MustString()) + assert.Equal(t, "b", getByPath(t, out, "b").MustString()) + assert.Equal(t, "aba", getByPath(t, out, "c").MustString()) +} + +func TestResolveWithTypeRetentionFailure(t *testing.T) { + in := dyn.V(map[string]dyn.Value{ + "a": dyn.V(1), + "b": dyn.V(2), + "c": dyn.V("${a} ${b}"), + }) + + _, err := dynvar.Resolve(in, dynvar.DefaultLookup(in)) + require.ErrorContains(t, err, "cannot interpolate non-string value: ${a}") +} + +func TestResolveWithTypeRetention(t *testing.T) { + in := dyn.V(map[string]dyn.Value{ + "int": dyn.V(1), + "int_var": dyn.V("${int}"), + "bool_true": dyn.V(true), + "bool_true_var": dyn.V("${bool_true}"), + "bool_false": dyn.V(false), + "bool_false_var": dyn.V("${bool_false}"), + "float": dyn.V(1.0), + "float_var": dyn.V("${float}"), + "string": dyn.V("a"), + "string_var": dyn.V("${string}"), + }) + + out, err := dynvar.Resolve(in, dynvar.DefaultLookup(in)) + require.NoError(t, err) + + assert.EqualValues(t, 1, getByPath(t, out, "int").MustInt()) + assert.EqualValues(t, 1, getByPath(t, out, "int_var").MustInt()) + + assert.EqualValues(t, true, getByPath(t, out, "bool_true").MustBool()) + assert.EqualValues(t, true, getByPath(t, out, "bool_true_var").MustBool()) + + assert.EqualValues(t, false, getByPath(t, out, "bool_false").MustBool()) + assert.EqualValues(t, false, getByPath(t, out, "bool_false_var").MustBool()) + + assert.EqualValues(t, 1.0, getByPath(t, out, "float").MustFloat()) + assert.EqualValues(t, 1.0, getByPath(t, out, "float_var").MustFloat()) + + assert.EqualValues(t, "a", getByPath(t, out, "string").MustString()) + assert.EqualValues(t, "a", getByPath(t, out, "string_var").MustString()) +} + +func TestResolveWithSkip(t *testing.T) { + in := dyn.V(map[string]dyn.Value{ + "a": dyn.V("a"), + "b": dyn.V("b"), + "c": dyn.V("${a}"), + "d": dyn.V("${b}"), + "e": dyn.V("${a} ${b}"), + "f": dyn.V("${b} ${a} ${a} ${b}"), + }) + + fallback := dynvar.DefaultLookup(in) + ignore := func(path dyn.Path) (dyn.Value, error) { + // If the variable reference to look up starts with "b", skip it. + if path.HasPrefix(dyn.NewPath(dyn.Key("b"))) { + return dyn.InvalidValue, dynvar.ErrSkipResolution + } + return fallback(path) + } + + out, err := dynvar.Resolve(in, ignore) + require.NoError(t, err) + + assert.Equal(t, "a", getByPath(t, out, "a").MustString()) + assert.Equal(t, "b", getByPath(t, out, "b").MustString()) + assert.Equal(t, "a", getByPath(t, out, "c").MustString()) + + // Check that the skipped variable references are not interpolated. + assert.Equal(t, "${b}", getByPath(t, out, "d").MustString()) + assert.Equal(t, "a ${b}", getByPath(t, out, "e").MustString()) + assert.Equal(t, "${b} a a ${b}", getByPath(t, out, "f").MustString()) +} From b9ea101fc0d78e7774153c195344bc2a9e29563b Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Tue, 23 Jan 2024 13:43:18 +0100 Subject: [PATCH 045/104] Comment --- libs/dyn/dynvar/ref.go | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/libs/dyn/dynvar/ref.go b/libs/dyn/dynvar/ref.go index 5404cf36f5..c0ea7fccc6 100644 --- a/libs/dyn/dynvar/ref.go +++ b/libs/dyn/dynvar/ref.go @@ -30,6 +30,11 @@ type ref struct { // newRef returns a new ref if the given [dyn.Value] contains a string // with one or more variable references. It returns false if the given // [dyn.Value] does not contain variable references. +// +// Examples of a valid variable references: +// - "${a.b}" +// - "${a.b.c}" +// - "${a} ${b} ${c}" func newRef(v dyn.Value, p dyn.Path) (ref, bool) { s, ok := v.AsString() if !ok { From 9000bcbd9fafaddf14cdb47ddf2d9a9be281c007 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Tue, 23 Jan 2024 13:43:43 +0100 Subject: [PATCH 046/104] Make variable tests pass --- bundle/config/root.go | 90 ++++++++----- bundle/config/target.go | 3 +- bundle/tests/variables_test.go | 224 ++++++++++++++++----------------- 3 files changed, 169 insertions(+), 148 deletions(-) diff --git a/bundle/config/root.go b/bundle/config/root.go index 069aeffecf..dd5ba9b6e1 100644 --- a/bundle/config/root.go +++ b/bundle/config/root.go @@ -84,6 +84,12 @@ func Load(path string) (*Root, error) { return nil, fmt.Errorf("failed to load %s: %w", path, err) } + // Rewrite configuration tree where necessary. + v, err = rewrite(v) + if err != nil { + return nil, fmt.Errorf("failed to rewrite %s: %w", path, err) + } + // Normalize dynamic configuration tree according to configuration type. v, diags := convert.Normalize(r, v) @@ -296,6 +302,12 @@ func (r *Root) MergeTargetOverrides(name string) error { return err } + // Confirm validity of variable overrides. + err = validateVariableOverrides(root, target) + if err != nil { + return err + } + // Merge fields that can be merged 1:1. for _, f := range []string{ "bundle", @@ -304,40 +316,13 @@ func (r *Root) MergeTargetOverrides(name string) error { "resources", "sync", "permissions", + "variables", } { if root, err = mergeField(root, target, f); err != nil { return err } } - // TODO(@pietern): Merge variables. - // - // Also see: https://github.com/databricks/cli/pull/872 - // - // if target.Variables != nil { - // for k, v := range target.Variables { - // rootVariable, ok := r.Variables[k] - // if !ok { - // return fmt.Errorf("variable %s is not defined but is assigned a value", k) - // } - - // if sv, ok := v.(string); ok { - // // we allow overrides of the default value for a variable - // defaultVal := sv - // rootVariable.Default = &defaultVal - // } else if vv, ok := v.(map[string]any); ok { - // // we also allow overrides of the lookup value for a variable - // lookup, ok := vv["lookup"] - // if !ok { - // return fmt.Errorf("variable %s is incorrectly defined lookup override, no 'lookup' key defined", k) - // } - // rootVariable.Lookup = variable.LookupFromMap(lookup.(map[string]any)) - // } else { - // return fmt.Errorf("variable %s is incorrectly defined in target override", k) - // } - // } - // } - // Merge `run_as`. This field must be overwritten if set, not merged. if v := target.Get("run_as"); v != dyn.NilValue { root, err = dyn.Set(root, "run_as", v) @@ -409,3 +394,52 @@ func (r *Root) MergeTargetOverrides(name string) error { r.ConfigureConfigFilePath() return nil } + +// rewrite performs lightweight rewriting of the configuration +// tree where we allow users to write a shorthand and must +// rewrite to the full form. +func rewrite(v dyn.Value) (dyn.Value, error) { + // For each target, rewrite the variables block. + return dyn.Map(v, "targets", dyn.Foreach(func(target dyn.Value) (dyn.Value, error) { + // For each variable, normalize its contents if it is a single string. + return dyn.Map(target, "variables", dyn.Foreach(func(variable dyn.Value) (dyn.Value, error) { + if variable.Kind() != dyn.KindString { + return variable, nil + } + + // Rewrite the variable to a map with a single key called "default". + // This conforms to the variable type. + return dyn.NewValue(map[string]dyn.Value{ + "default": variable, + }, variable.Location()), nil + })) + })) +} + +// validateVariableOverrides checks that all variables specified +// in the target override are also defined in the root. +func validateVariableOverrides(root, target dyn.Value) (err error) { + rv := make(map[string]variable.Variable) + tv := make(map[string]variable.Variable) + + // Collect variables from the root. + err = convert.ToTyped(&rv, root.Get("variables")) + if err != nil { + return fmt.Errorf("unable to collect variables from root: %w", err) + } + + // Collect variables from the target. + err = convert.ToTyped(&tv, target.Get("variables")) + if err != nil { + return fmt.Errorf("unable to collect variables from target: %w", err) + } + + // Check that all variables in the target exist in the root. + for k := range tv { + if _, ok := rv[k]; !ok { + return fmt.Errorf("variable %s is not defined but is assigned a value", k) + } + } + + return nil +} diff --git a/bundle/config/target.go b/bundle/config/target.go index 158f256060..acc493574b 100644 --- a/bundle/config/target.go +++ b/bundle/config/target.go @@ -2,6 +2,7 @@ package config import ( "github.com/databricks/cli/bundle/config/resources" + "github.com/databricks/cli/bundle/config/variable" "github.com/databricks/databricks-sdk-go/service/jobs" ) @@ -33,7 +34,7 @@ type Target struct { // Override default values or lookup name for defined variables // Does not permit defining new variables or redefining existing ones // in the scope of an target - Variables map[string]any `json:"variables,omitempty"` + Variables map[string]*variable.Variable `json:"variables,omitempty"` Git Git `json:"git,omitempty"` diff --git a/bundle/tests/variables_test.go b/bundle/tests/variables_test.go index e811acb527..82f917aa40 100644 --- a/bundle/tests/variables_test.go +++ b/bundle/tests/variables_test.go @@ -1,131 +1,117 @@ package config_tests -// import ( -// "context" -// "testing" +import ( + "context" + "testing" -// "github.com/databricks/cli/bundle" -// "github.com/databricks/cli/bundle/config/interpolation" -// "github.com/databricks/cli/bundle/config/mutator" -// "github.com/databricks/cli/bundle/config/variable" -// "github.com/stretchr/testify/assert" -// "github.com/stretchr/testify/require" -// ) + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/config/interpolation" + "github.com/databricks/cli/bundle/config/mutator" + "github.com/databricks/cli/bundle/config/variable" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) -// func TestVariables(t *testing.T) { -// t.Setenv("BUNDLE_VAR_b", "def") -// b := load(t, "./variables/vanilla") -// err := bundle.Apply(context.Background(), b, bundle.Seq( -// mutator.SetVariables(), -// interpolation.Interpolate( -// interpolation.IncludeLookupsInPath(variable.VariableReferencePrefix), -// ))) -// require.NoError(t, err) -// assert.Equal(t, "abc def", b.Config.Bundle.Name) -// } +func TestVariables(t *testing.T) { + t.Setenv("BUNDLE_VAR_b", "def") + b := load(t, "./variables/vanilla") + err := bundle.Apply(context.Background(), b, bundle.Seq( + mutator.SetVariables(), + interpolation.Interpolate( + interpolation.IncludeLookupsInPath(variable.VariableReferencePrefix), + ))) + require.NoError(t, err) + assert.Equal(t, "abc def", b.Config.Bundle.Name) +} -// func TestVariablesLoadingFailsWhenRequiredVariableIsNotSpecified(t *testing.T) { -// b := load(t, "./variables/vanilla") -// err := bundle.Apply(context.Background(), b, bundle.Seq( -// mutator.SetVariables(), -// interpolation.Interpolate( -// interpolation.IncludeLookupsInPath(variable.VariableReferencePrefix), -// ))) -// assert.ErrorContains(t, err, "no value assigned to required variable b. Assignment can be done through the \"--var\" flag or by setting the BUNDLE_VAR_b environment variable") -// } +func TestVariablesLoadingFailsWhenRequiredVariableIsNotSpecified(t *testing.T) { + b := load(t, "./variables/vanilla") + err := bundle.Apply(context.Background(), b, bundle.Seq( + mutator.SetVariables(), + interpolation.Interpolate( + interpolation.IncludeLookupsInPath(variable.VariableReferencePrefix), + ))) + assert.ErrorContains(t, err, "no value assigned to required variable b. Assignment can be done through the \"--var\" flag or by setting the BUNDLE_VAR_b environment variable") +} -// func TestVariablesTargetsBlockOverride(t *testing.T) { -// b := load(t, "./variables/env_overrides") -// err := bundle.Apply(context.Background(), b, bundle.Seq( -// mutator.SelectTarget("env-with-single-variable-override"), -// mutator.SetVariables(), -// interpolation.Interpolate( -// interpolation.IncludeLookupsInPath(variable.VariableReferencePrefix), -// ))) -// require.NoError(t, err) -// assert.Equal(t, "default-a dev-b", b.Config.Workspace.Profile) -// } +func TestVariablesTargetsBlockOverride(t *testing.T) { + b := load(t, "./variables/env_overrides") + err := bundle.Apply(context.Background(), b, bundle.Seq( + mutator.SelectTarget("env-with-single-variable-override"), + mutator.SetVariables(), + interpolation.Interpolate( + interpolation.IncludeLookupsInPath(variable.VariableReferencePrefix), + ))) + require.NoError(t, err) + assert.Equal(t, "default-a dev-b", b.Config.Workspace.Profile) +} -// func TestVariablesTargetsBlockOverrideForMultipleVariables(t *testing.T) { -// b := load(t, "./variables/env_overrides") -// err := bundle.Apply(context.Background(), b, bundle.Seq( -// mutator.SelectTarget("env-with-two-variable-overrides"), -// mutator.SetVariables(), -// interpolation.Interpolate( -// interpolation.IncludeLookupsInPath(variable.VariableReferencePrefix), -// ))) -// require.NoError(t, err) -// assert.Equal(t, "prod-a prod-b", b.Config.Workspace.Profile) -// } +func TestVariablesTargetsBlockOverrideForMultipleVariables(t *testing.T) { + b := load(t, "./variables/env_overrides") + err := bundle.Apply(context.Background(), b, bundle.Seq( + mutator.SelectTarget("env-with-two-variable-overrides"), + mutator.SetVariables(), + interpolation.Interpolate( + interpolation.IncludeLookupsInPath(variable.VariableReferencePrefix), + ))) + require.NoError(t, err) + assert.Equal(t, "prod-a prod-b", b.Config.Workspace.Profile) +} -// func TestVariablesTargetsBlockOverrideWithProcessEnvVars(t *testing.T) { -// t.Setenv("BUNDLE_VAR_b", "env-var-b") -// b := load(t, "./variables/env_overrides") -// err := bundle.Apply(context.Background(), b, bundle.Seq( -// mutator.SelectTarget("env-with-two-variable-overrides"), -// mutator.SetVariables(), -// interpolation.Interpolate( -// interpolation.IncludeLookupsInPath(variable.VariableReferencePrefix), -// ))) -// require.NoError(t, err) -// assert.Equal(t, "prod-a env-var-b", b.Config.Workspace.Profile) -// } +func TestVariablesTargetsBlockOverrideWithProcessEnvVars(t *testing.T) { + t.Setenv("BUNDLE_VAR_b", "env-var-b") + b := load(t, "./variables/env_overrides") + err := bundle.Apply(context.Background(), b, bundle.Seq( + mutator.SelectTarget("env-with-two-variable-overrides"), + mutator.SetVariables(), + interpolation.Interpolate( + interpolation.IncludeLookupsInPath(variable.VariableReferencePrefix), + ))) + require.NoError(t, err) + assert.Equal(t, "prod-a env-var-b", b.Config.Workspace.Profile) +} -// func TestVariablesTargetsBlockOverrideWithMissingVariables(t *testing.T) { -// b := load(t, "./variables/env_overrides") -// err := bundle.Apply(context.Background(), b, bundle.Seq( -// mutator.SelectTarget("env-missing-a-required-variable-assignment"), -// mutator.SetVariables(), -// interpolation.Interpolate( -// interpolation.IncludeLookupsInPath(variable.VariableReferencePrefix), -// ))) -// assert.ErrorContains(t, err, "no value assigned to required variable b. Assignment can be done through the \"--var\" flag or by setting the BUNDLE_VAR_b environment variable") -// } +func TestVariablesTargetsBlockOverrideWithMissingVariables(t *testing.T) { + b := load(t, "./variables/env_overrides") + err := bundle.Apply(context.Background(), b, bundle.Seq( + mutator.SelectTarget("env-missing-a-required-variable-assignment"), + mutator.SetVariables(), + interpolation.Interpolate( + interpolation.IncludeLookupsInPath(variable.VariableReferencePrefix), + ))) + assert.ErrorContains(t, err, "no value assigned to required variable b. Assignment can be done through the \"--var\" flag or by setting the BUNDLE_VAR_b environment variable") +} -// func TestVariablesTargetsBlockOverrideWithUndefinedVariables(t *testing.T) { -// b := load(t, "./variables/env_overrides") -// err := bundle.Apply(context.Background(), b, bundle.Seq( -// mutator.SelectTarget("env-using-an-undefined-variable"), -// mutator.SetVariables(), -// interpolation.Interpolate( -// interpolation.IncludeLookupsInPath(variable.VariableReferencePrefix), -// ))) -// assert.ErrorContains(t, err, "variable c is not defined but is assigned a value") -// } +func TestVariablesTargetsBlockOverrideWithUndefinedVariables(t *testing.T) { + b := load(t, "./variables/env_overrides") + err := bundle.Apply(context.Background(), b, bundle.Seq( + mutator.SelectTarget("env-using-an-undefined-variable"), + mutator.SetVariables(), + interpolation.Interpolate( + interpolation.IncludeLookupsInPath(variable.VariableReferencePrefix), + ))) + assert.ErrorContains(t, err, "variable c is not defined but is assigned a value") +} -// func TestVariablesWithoutDefinition(t *testing.T) { -// t.Setenv("BUNDLE_VAR_a", "foo") -// t.Setenv("BUNDLE_VAR_b", "bar") -// b := load(t, "./variables/without_definition") -// err := bundle.Apply(context.Background(), b, mutator.SetVariables()) -// require.NoError(t, err) -// require.True(t, b.Config.Variables["a"].HasValue()) -// require.True(t, b.Config.Variables["b"].HasValue()) -// assert.Equal(t, "foo", *b.Config.Variables["a"].Value) -// assert.Equal(t, "bar", *b.Config.Variables["b"].Value) -// } +func TestVariablesWithoutDefinition(t *testing.T) { + t.Setenv("BUNDLE_VAR_a", "foo") + t.Setenv("BUNDLE_VAR_b", "bar") + b := load(t, "./variables/without_definition") + err := bundle.Apply(context.Background(), b, mutator.SetVariables()) + require.NoError(t, err) + require.True(t, b.Config.Variables["a"].HasValue()) + require.True(t, b.Config.Variables["b"].HasValue()) + assert.Equal(t, "foo", *b.Config.Variables["a"].Value) + assert.Equal(t, "bar", *b.Config.Variables["b"].Value) +} -// func TestVariablesWithoutDefinition(t *testing.T) { -// t.Setenv("BUNDLE_VAR_a", "foo") -// t.Setenv("BUNDLE_VAR_b", "bar") -// b := load(t, "./variables/without_definition") -// err := bundle.Apply(context.Background(), b, mutator.SetVariables()) -// require.NoError(t, err) -// require.True(t, b.Config.Variables["a"].HasValue()) -// require.True(t, b.Config.Variables["b"].HasValue()) -// assert.Equal(t, "foo", *b.Config.Variables["a"].Value) -// assert.Equal(t, "bar", *b.Config.Variables["b"].Value) -// } - -// func TestVariablesWithTargetLookupOverrides(t *testing.T) { -// b := load(t, "./variables/env_overrides") -// err := bundle.Apply(context.Background(), b, bundle.Seq( -// mutator.SelectTarget("env-overrides-lookup"), -// mutator.SetVariables(), -// interpolation.Interpolate( -// interpolation.IncludeLookupsInPath(variable.VariableReferencePrefix), -// ))) -// require.NoError(t, err) -// assert.Equal(t, "cluster: some-test-cluster", b.Config.Variables["d"].Lookup.String()) -// assert.Equal(t, "instance-pool: some-test-instance-pool", b.Config.Variables["e"].Lookup.String()) -// } +func TestVariablesWithTargetLookupOverrides(t *testing.T) { + b := load(t, "./variables/env_overrides") + err := bundle.Apply(context.Background(), b, bundle.Seq( + mutator.SelectTarget("env-overrides-lookup"), + mutator.SetVariables(), + )) + require.NoError(t, err) + assert.Equal(t, "cluster: some-test-cluster", b.Config.Variables["d"].Lookup.String()) + assert.Equal(t, "instance-pool: some-test-instance-pool", b.Config.Variables["e"].Lookup.String()) +} From ab423614094d7376ac4d7efdd5b542bb45da2837 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Tue, 23 Jan 2024 13:56:38 +0100 Subject: [PATCH 047/104] . --- bundle/config/root.go | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/bundle/config/root.go b/bundle/config/root.go index dd5ba9b6e1..004f951698 100644 --- a/bundle/config/root.go +++ b/bundle/config/root.go @@ -401,6 +401,11 @@ func (r *Root) MergeTargetOverrides(name string) error { func rewrite(v dyn.Value) (dyn.Value, error) { // For each target, rewrite the variables block. return dyn.Map(v, "targets", dyn.Foreach(func(target dyn.Value) (dyn.Value, error) { + // Confirm it has a variables block. + if target.Get("variables") == dyn.NilValue { + return target, nil + } + // For each variable, normalize its contents if it is a single string. return dyn.Map(target, "variables", dyn.Foreach(func(variable dyn.Value) (dyn.Value, error) { if variable.Kind() != dyn.KindString { @@ -419,8 +424,8 @@ func rewrite(v dyn.Value) (dyn.Value, error) { // validateVariableOverrides checks that all variables specified // in the target override are also defined in the root. func validateVariableOverrides(root, target dyn.Value) (err error) { - rv := make(map[string]variable.Variable) - tv := make(map[string]variable.Variable) + var rv map[string]variable.Variable + var tv map[string]variable.Variable // Collect variables from the root. err = convert.ToTyped(&rv, root.Get("variables")) From a95be0fd3f22a553adf4e45cb0d3a9f87732f13a Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Wed, 24 Jan 2024 11:28:55 +0100 Subject: [PATCH 048/104] wip: make variable resolution work through dyn --- bundle/config/interpolation/interpolation.go | 254 ------------------ .../interpolation/interpolation_test.go | 251 ----------------- bundle/config/interpolation/lookup.go | 51 ---- bundle/config/interpolation/lookup_test.go | 81 ------ bundle/config/interpolation/setter.go | 48 ---- .../mutator/resolve_variable_references.go | 61 +++++ bundle/config/variable/variable.go | 2 - bundle/deploy/terraform/interpolate.go | 66 +++-- bundle/phases/build.go | 6 +- bundle/phases/initialize.go | 10 +- bundle/tests/interpolation_test.go | 14 +- bundle/tests/variables_test.go | 51 ++-- 12 files changed, 135 insertions(+), 760 deletions(-) delete mode 100644 bundle/config/interpolation/interpolation.go delete mode 100644 bundle/config/interpolation/interpolation_test.go delete mode 100644 bundle/config/interpolation/lookup.go delete mode 100644 bundle/config/interpolation/lookup_test.go delete mode 100644 bundle/config/interpolation/setter.go create mode 100644 bundle/config/mutator/resolve_variable_references.go diff --git a/bundle/config/interpolation/interpolation.go b/bundle/config/interpolation/interpolation.go deleted file mode 100644 index 8ba0b8b1ff..0000000000 --- a/bundle/config/interpolation/interpolation.go +++ /dev/null @@ -1,254 +0,0 @@ -package interpolation - -import ( - "context" - "errors" - "fmt" - "reflect" - "regexp" - "sort" - "strings" - - "slices" - - "github.com/databricks/cli/bundle" - "github.com/databricks/cli/bundle/config/variable" - "golang.org/x/exp/maps" -) - -const Delimiter = "." - -// must start with alphabet, support hyphens and underscores in middle but must end with character -var re = regexp.MustCompile(`\$\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\}`) - -type stringField struct { - path string - - getter - setter -} - -func newStringField(path string, g getter, s setter) *stringField { - return &stringField{ - path: path, - - getter: g, - setter: s, - } -} - -func (s *stringField) dependsOn() []string { - var out []string - m := re.FindAllStringSubmatch(s.Get(), -1) - for i := range m { - out = append(out, m[i][1]) - } - return out -} - -func (s *stringField) interpolate(fns []LookupFunction, lookup map[string]string) { - out := re.ReplaceAllStringFunc(s.Get(), func(s string) string { - // Turn the whole match into the submatch. - match := re.FindStringSubmatch(s) - for _, fn := range fns { - v, err := fn(match[1], lookup) - if errors.Is(err, ErrSkipInterpolation) { - continue - } - if err != nil { - panic(err) - } - return v - } - - // No substitution. - return s - }) - - s.Set(out) -} - -type accumulator struct { - // all string fields in the bundle config - strings map[string]*stringField - - // contains path -> resolved_string mapping for string fields in the config - // The resolved strings will NOT contain any variable references that could - // have been resolved, however there might still be references that cannot - // be resolved - memo map[string]string -} - -// jsonFieldName returns the name in a field's `json` tag. -// Returns the empty string if it isn't set. -func jsonFieldName(sf reflect.StructField) string { - tag, ok := sf.Tag.Lookup("json") - if !ok { - return "" - } - parts := strings.Split(tag, ",") - if parts[0] == "-" { - return "" - } - return parts[0] -} - -func (a *accumulator) walkStruct(scope []string, rv reflect.Value) { - num := rv.NumField() - for i := 0; i < num; i++ { - sf := rv.Type().Field(i) - f := rv.Field(i) - - // Walk field with the same scope for anonymous (embedded) fields. - if sf.Anonymous { - a.walk(scope, f, anySetter{f}) - continue - } - - // Skip unnamed fields. - fieldName := jsonFieldName(rv.Type().Field(i)) - if fieldName == "" { - continue - } - - a.walk(append(scope, fieldName), f, anySetter{f}) - } -} - -func (a *accumulator) walk(scope []string, rv reflect.Value, s setter) { - // Dereference pointer. - if rv.Type().Kind() == reflect.Pointer { - // Skip nil pointers. - if rv.IsNil() { - return - } - rv = rv.Elem() - s = anySetter{rv} - } - - switch rv.Type().Kind() { - case reflect.String: - path := strings.Join(scope, Delimiter) - a.strings[path] = newStringField(path, anyGetter{rv}, s) - - // register alias for variable value. `var.foo` would be the alias for - // `variables.foo.value` - if len(scope) == 3 && scope[0] == "variables" && scope[2] == "value" { - aliasPath := strings.Join([]string{variable.VariableReferencePrefix, scope[1]}, Delimiter) - a.strings[aliasPath] = a.strings[path] - } - case reflect.Struct: - a.walkStruct(scope, rv) - case reflect.Map: - if rv.Type().Key().Kind() != reflect.String { - panic("only support string keys in map") - } - keys := rv.MapKeys() - for _, key := range keys { - a.walk(append(scope, key.String()), rv.MapIndex(key), mapSetter{rv, key}) - } - case reflect.Slice: - n := rv.Len() - name := scope[len(scope)-1] - base := scope[:len(scope)-1] - for i := 0; i < n; i++ { - element := rv.Index(i) - a.walk(append(base, fmt.Sprintf("%s[%d]", name, i)), element, anySetter{element}) - } - } -} - -// walk and gather all string fields in the config -func (a *accumulator) start(v any) { - rv := reflect.ValueOf(v) - if rv.Type().Kind() != reflect.Pointer { - panic("expect pointer") - } - rv = rv.Elem() - if rv.Type().Kind() != reflect.Struct { - panic("expect struct") - } - - a.strings = make(map[string]*stringField) - a.memo = make(map[string]string) - a.walk([]string{}, rv, nilSetter{}) -} - -// recursively interpolate variables in a depth first manner -func (a *accumulator) Resolve(path string, seenPaths []string, fns ...LookupFunction) error { - // return early if the path is already resolved - if _, ok := a.memo[path]; ok { - return nil - } - - // fetch the string node to resolve - field, ok := a.strings[path] - if !ok { - return fmt.Errorf("no value found for interpolation reference: ${%s}", path) - } - - // return early if the string field has no variables to interpolate - if len(field.dependsOn()) == 0 { - a.memo[path] = field.Get() - return nil - } - - // resolve all variables refered in the root string field - for _, childFieldPath := range field.dependsOn() { - // error if there is a loop in variable interpolation - if slices.Contains(seenPaths, childFieldPath) { - return fmt.Errorf("cycle detected in field resolution: %s", strings.Join(append(seenPaths, childFieldPath), " -> ")) - } - - // recursive resolve variables in the child fields - err := a.Resolve(childFieldPath, append(seenPaths, childFieldPath), fns...) - if err != nil { - return err - } - } - - // interpolate root string once all variable references in it have been resolved - field.interpolate(fns, a.memo) - - // record interpolated string in memo - a.memo[path] = field.Get() - return nil -} - -// Interpolate all string fields in the config -func (a *accumulator) expand(fns ...LookupFunction) error { - // sorting paths for stable order of iteration - paths := maps.Keys(a.strings) - sort.Strings(paths) - - // iterate over paths for all strings fields in the config - for _, path := range paths { - err := a.Resolve(path, []string{path}, fns...) - if err != nil { - return err - } - } - return nil -} - -type interpolate struct { - fns []LookupFunction -} - -func (m *interpolate) expand(v any) error { - a := accumulator{} - a.start(v) - return a.expand(m.fns...) -} - -func Interpolate(fns ...LookupFunction) bundle.Mutator { - return &interpolate{fns: fns} -} - -func (m *interpolate) Name() string { - return "Interpolate" -} - -func (m *interpolate) Apply(_ context.Context, b *bundle.Bundle) error { - return m.expand(&b.Config) -} diff --git a/bundle/config/interpolation/interpolation_test.go b/bundle/config/interpolation/interpolation_test.go deleted file mode 100644 index cccb6dc718..0000000000 --- a/bundle/config/interpolation/interpolation_test.go +++ /dev/null @@ -1,251 +0,0 @@ -package interpolation - -import ( - "testing" - - "github.com/databricks/cli/bundle/config" - "github.com/databricks/cli/bundle/config/variable" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -type nest struct { - X string `json:"x"` - Y *string `json:"y"` - Z map[string]string `json:"z"` -} - -type foo struct { - A string `json:"a"` - B string `json:"b"` - C string `json:"c"` - - // Pointer field - D *string `json:"d"` - - // Struct field - E nest `json:"e"` - - // Map field - F map[string]string `json:"f"` -} - -func expand(v any) error { - a := accumulator{} - a.start(v) - return a.expand(DefaultLookup) -} - -func TestInterpolationVariables(t *testing.T) { - f := foo{ - A: "a", - B: "${a}", - C: "${a}", - } - - err := expand(&f) - require.NoError(t, err) - - assert.Equal(t, "a", f.A) - assert.Equal(t, "a", f.B) - assert.Equal(t, "a", f.C) -} - -func TestInterpolationVariablesSpecialChars(t *testing.T) { - type bar struct { - A string `json:"a-b"` - B string `json:"b_c"` - C string `json:"c-_a"` - } - f := bar{ - A: "a", - B: "${a-b}", - C: "${a-b}", - } - - err := expand(&f) - require.NoError(t, err) - - assert.Equal(t, "a", f.A) - assert.Equal(t, "a", f.B) - assert.Equal(t, "a", f.C) -} - -func TestInterpolationValidMatches(t *testing.T) { - expectedMatches := map[string]string{ - "${hello_world.world_world}": "hello_world.world_world", - "${helloworld.world-world}": "helloworld.world-world", - "${hello-world.world-world}": "hello-world.world-world", - } - for interpolationStr, expectedMatch := range expectedMatches { - match := re.FindStringSubmatch(interpolationStr) - assert.True(t, len(match) > 0, - "Failed to match %s and find %s", interpolationStr, expectedMatch) - assert.Equal(t, expectedMatch, match[1], - "Failed to match the exact pattern %s and find %s", interpolationStr, expectedMatch) - } -} - -func TestInterpolationInvalidMatches(t *testing.T) { - invalidMatches := []string{ - "${hello_world-.world_world}", // the first segment ending must not end with hyphen (-) - "${hello_world-_.world_world}", // the first segment ending must not end with underscore (_) - "${helloworld.world-world-}", // second segment must not end with hyphen (-) - "${helloworld-.world-world}", // first segment must not end with hyphen (-) - "${helloworld.-world-world}", // second segment must not start with hyphen (-) - "${-hello-world.-world-world-}", // must not start or end with hyphen (-) - "${_-_._-_.id}", // cannot use _- in sequence - "${0helloworld.world-world}", // interpolated first section shouldn't start with number - "${helloworld.9world-world}", // interpolated second section shouldn't start with number - "${a-a.a-_a-a.id}", // fails because of -_ in the second segment - "${a-a.a--a-a.id}", // fails because of -- in the second segment - } - for _, invalidMatch := range invalidMatches { - match := re.FindStringSubmatch(invalidMatch) - assert.True(t, len(match) == 0, "Should be invalid interpolation: %s", invalidMatch) - } -} - -func TestInterpolationWithPointers(t *testing.T) { - fd := "${a}" - f := foo{ - A: "a", - D: &fd, - } - - err := expand(&f) - require.NoError(t, err) - - assert.Equal(t, "a", f.A) - assert.Equal(t, "a", *f.D) -} - -func TestInterpolationWithStruct(t *testing.T) { - fy := "${e.x}" - f := foo{ - A: "${e.x}", - E: nest{ - X: "x", - Y: &fy, - }, - } - - err := expand(&f) - require.NoError(t, err) - - assert.Equal(t, "x", f.A) - assert.Equal(t, "x", f.E.X) - assert.Equal(t, "x", *f.E.Y) -} - -func TestInterpolationWithMap(t *testing.T) { - f := foo{ - A: "${f.a}", - F: map[string]string{ - "a": "a", - "b": "${f.a}", - }, - } - - err := expand(&f) - require.NoError(t, err) - - assert.Equal(t, "a", f.A) - assert.Equal(t, "a", f.F["a"]) - assert.Equal(t, "a", f.F["b"]) -} - -func TestInterpolationWithResursiveVariableReferences(t *testing.T) { - f := foo{ - A: "a", - B: "(${a})", - C: "${a} ${b}", - } - - err := expand(&f) - require.NoError(t, err) - - assert.Equal(t, "a", f.A) - assert.Equal(t, "(a)", f.B) - assert.Equal(t, "a (a)", f.C) -} - -func TestInterpolationVariableLoopError(t *testing.T) { - d := "${b}" - f := foo{ - A: "a", - B: "${c}", - C: "${d}", - D: &d, - } - - err := expand(&f) - assert.ErrorContains(t, err, "cycle detected in field resolution: b -> c -> d -> b") -} - -func TestInterpolationForVariables(t *testing.T) { - foo := "abc" - bar := "${var.foo} def" - apple := "${var.foo} ${var.bar}" - config := config.Root{ - Variables: map[string]*variable.Variable{ - "foo": { - Value: &foo, - }, - "bar": { - Value: &bar, - }, - "apple": { - Value: &apple, - }, - }, - Bundle: config.Bundle{ - Name: "${var.apple} ${var.foo}", - }, - } - - err := expand(&config) - assert.NoError(t, err) - assert.Equal(t, "abc", *(config.Variables["foo"].Value)) - assert.Equal(t, "abc def", *(config.Variables["bar"].Value)) - assert.Equal(t, "abc abc def", *(config.Variables["apple"].Value)) - assert.Equal(t, "abc abc def abc", config.Bundle.Name) -} - -func TestInterpolationLoopForVariables(t *testing.T) { - foo := "${var.bar}" - bar := "${var.foo}" - config := config.Root{ - Variables: map[string]*variable.Variable{ - "foo": { - Value: &foo, - }, - "bar": { - Value: &bar, - }, - }, - Bundle: config.Bundle{ - Name: "${var.foo}", - }, - } - - err := expand(&config) - assert.ErrorContains(t, err, "cycle detected in field resolution: bundle.name -> var.foo -> var.bar -> var.foo") -} - -func TestInterpolationInvalidVariableReference(t *testing.T) { - foo := "abc" - config := config.Root{ - Variables: map[string]*variable.Variable{ - "foo": { - Value: &foo, - }, - }, - Bundle: config.Bundle{ - Name: "${vars.foo}", - }, - } - - err := expand(&config) - assert.ErrorContains(t, err, "no value found for interpolation reference: ${vars.foo}") -} diff --git a/bundle/config/interpolation/lookup.go b/bundle/config/interpolation/lookup.go deleted file mode 100644 index 3dc5047a75..0000000000 --- a/bundle/config/interpolation/lookup.go +++ /dev/null @@ -1,51 +0,0 @@ -package interpolation - -import ( - "errors" - "fmt" - "slices" - "strings" -) - -// LookupFunction returns the value to rewrite a path expression to. -type LookupFunction func(path string, depends map[string]string) (string, error) - -// ErrSkipInterpolation can be used to fall through from [LookupFunction]. -var ErrSkipInterpolation = errors.New("skip interpolation") - -// DefaultLookup looks up the specified path in the map. -// It returns an error if it doesn't exist. -func DefaultLookup(path string, lookup map[string]string) (string, error) { - v, ok := lookup[path] - if !ok { - return "", fmt.Errorf("expected to find value for path: %s", path) - } - return v, nil -} - -func pathPrefixMatches(prefix []string, path string) bool { - parts := strings.Split(path, Delimiter) - return len(parts) >= len(prefix) && slices.Compare(prefix, parts[0:len(prefix)]) == 0 -} - -// ExcludeLookupsInPath is a lookup function that skips lookups for the specified path. -func ExcludeLookupsInPath(exclude ...string) LookupFunction { - return func(path string, lookup map[string]string) (string, error) { - if pathPrefixMatches(exclude, path) { - return "", ErrSkipInterpolation - } - - return DefaultLookup(path, lookup) - } -} - -// IncludeLookupsInPath is a lookup function that limits lookups to the specified path. -func IncludeLookupsInPath(include ...string) LookupFunction { - return func(path string, lookup map[string]string) (string, error) { - if !pathPrefixMatches(include, path) { - return "", ErrSkipInterpolation - } - - return DefaultLookup(path, lookup) - } -} diff --git a/bundle/config/interpolation/lookup_test.go b/bundle/config/interpolation/lookup_test.go deleted file mode 100644 index 61628bf042..0000000000 --- a/bundle/config/interpolation/lookup_test.go +++ /dev/null @@ -1,81 +0,0 @@ -package interpolation - -import ( - "testing" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -type interpolationFixture struct { - A map[string]string `json:"a"` - B map[string]string `json:"b"` - C map[string]string `json:"c"` -} - -func fixture() interpolationFixture { - return interpolationFixture{ - A: map[string]string{ - "x": "1", - }, - B: map[string]string{ - "x": "2", - }, - C: map[string]string{ - "ax": "${a.x}", - "bx": "${b.x}", - }, - } -} - -func TestExcludePath(t *testing.T) { - tmp := fixture() - m := interpolate{ - fns: []LookupFunction{ - ExcludeLookupsInPath("a"), - }, - } - - err := m.expand(&tmp) - require.NoError(t, err) - - assert.Equal(t, "1", tmp.A["x"]) - assert.Equal(t, "2", tmp.B["x"]) - assert.Equal(t, "${a.x}", tmp.C["ax"]) - assert.Equal(t, "2", tmp.C["bx"]) -} - -func TestIncludePath(t *testing.T) { - tmp := fixture() - m := interpolate{ - fns: []LookupFunction{ - IncludeLookupsInPath("a"), - }, - } - - err := m.expand(&tmp) - require.NoError(t, err) - - assert.Equal(t, "1", tmp.A["x"]) - assert.Equal(t, "2", tmp.B["x"]) - assert.Equal(t, "1", tmp.C["ax"]) - assert.Equal(t, "${b.x}", tmp.C["bx"]) -} - -func TestIncludePathMultiple(t *testing.T) { - tmp := fixture() - m := interpolate{ - fns: []LookupFunction{ - IncludeLookupsInPath("a"), - IncludeLookupsInPath("b"), - }, - } - - err := m.expand(&tmp) - require.NoError(t, err) - - assert.Equal(t, "1", tmp.A["x"]) - assert.Equal(t, "2", tmp.B["x"]) - assert.Equal(t, "1", tmp.C["ax"]) - assert.Equal(t, "2", tmp.C["bx"]) -} diff --git a/bundle/config/interpolation/setter.go b/bundle/config/interpolation/setter.go deleted file mode 100644 index cce39c6111..0000000000 --- a/bundle/config/interpolation/setter.go +++ /dev/null @@ -1,48 +0,0 @@ -package interpolation - -import "reflect" - -// String values in maps are not addressable and therefore not settable -// through Go's reflection mechanism. This interface solves this limitation -// by wrapping the setter differently for addressable values and map values. -type setter interface { - Set(string) -} - -type nilSetter struct{} - -func (nilSetter) Set(_ string) { - panic("nil setter") -} - -type anySetter struct { - rv reflect.Value -} - -func (s anySetter) Set(str string) { - s.rv.SetString(str) -} - -type mapSetter struct { - // map[string]string - m reflect.Value - - // key - k reflect.Value -} - -func (s mapSetter) Set(str string) { - s.m.SetMapIndex(s.k, reflect.ValueOf(str)) -} - -type getter interface { - Get() string -} - -type anyGetter struct { - rv reflect.Value -} - -func (g anyGetter) Get() string { - return g.rv.String() -} diff --git a/bundle/config/mutator/resolve_variable_references.go b/bundle/config/mutator/resolve_variable_references.go new file mode 100644 index 0000000000..3abe6c3303 --- /dev/null +++ b/bundle/config/mutator/resolve_variable_references.go @@ -0,0 +1,61 @@ +package mutator + +import ( + "context" + + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/libs/dyn" + "github.com/databricks/cli/libs/dyn/dynvar" +) + +type resolveVariableReferences struct { + prefixes []string +} + +func ResolveVariableReferences(prefixes ...string) bundle.Mutator { + return &resolveVariableReferences{prefixes: prefixes} +} + +func (*resolveVariableReferences) Name() string { + return "ResolveVariableReferences" +} + +func (m *resolveVariableReferences) Validate(ctx context.Context, b *bundle.Bundle) error { + return nil +} + +func (m *resolveVariableReferences) Apply(ctx context.Context, b *bundle.Bundle) error { + prefixes := make([]dyn.Path, len(m.prefixes)) + for i, prefix := range m.prefixes { + prefixes[i] = dyn.MustPathFromString(prefix) + } + + // The path ${var.foo} is a shorthand for ${variables.foo.value}. + // We rewrite it here to make the resolution logic simpler. + varPath := dyn.NewPath(dyn.Key("var")) + + return b.Config.Mutate(func(root dyn.Value) (dyn.Value, error) { + lookup := dynvar.DefaultLookup(root) + + // Resolve variable references in all values. + return dynvar.Resolve(root, func(path dyn.Path) (dyn.Value, error) { + // Rewrite the shorthand path ${var.foo} into ${variables.foo.value}. + if path.HasPrefix(varPath) && len(path) == 2 { + path = dyn.NewPath( + dyn.Key("variables"), + path[1], + dyn.Key("value"), + ) + } + + // Perform resolution only if the path starts with one of the specified prefixes. + for _, prefix := range prefixes { + if path.HasPrefix(prefix) { + return lookup(path) + } + } + + return dyn.InvalidValue, dynvar.ErrSkipResolution + }) + }) +} diff --git a/bundle/config/variable/variable.go b/bundle/config/variable/variable.go index 9057f1cb95..5e700a9b0c 100644 --- a/bundle/config/variable/variable.go +++ b/bundle/config/variable/variable.go @@ -4,8 +4,6 @@ import ( "fmt" ) -const VariableReferencePrefix = "var" - // An input variable for the bundle config type Variable struct { // A default value which then makes the variable optional diff --git a/bundle/deploy/terraform/interpolate.go b/bundle/deploy/terraform/interpolate.go index 4f00c27ebb..2fd5c3b025 100644 --- a/bundle/deploy/terraform/interpolate.go +++ b/bundle/deploy/terraform/interpolate.go @@ -1,44 +1,42 @@ package terraform import ( - "fmt" - "strings" - "github.com/databricks/cli/bundle" - "github.com/databricks/cli/bundle/config/interpolation" + "github.com/databricks/cli/bundle/config/mutator" ) -// Rewrite variable references to resources into Terraform compatible format. -func interpolateTerraformResourceIdentifiers(path string, lookup map[string]string) (string, error) { - parts := strings.Split(path, interpolation.Delimiter) - if parts[0] == "resources" { - switch parts[1] { - case "pipelines": - path = strings.Join(append([]string{"databricks_pipeline"}, parts[2:]...), interpolation.Delimiter) - return fmt.Sprintf("${%s}", path), nil - case "jobs": - path = strings.Join(append([]string{"databricks_job"}, parts[2:]...), interpolation.Delimiter) - return fmt.Sprintf("${%s}", path), nil - case "models": - path = strings.Join(append([]string{"databricks_mlflow_model"}, parts[2:]...), interpolation.Delimiter) - return fmt.Sprintf("${%s}", path), nil - case "experiments": - path = strings.Join(append([]string{"databricks_mlflow_experiment"}, parts[2:]...), interpolation.Delimiter) - return fmt.Sprintf("${%s}", path), nil - case "model_serving_endpoints": - path = strings.Join(append([]string{"databricks_model_serving"}, parts[2:]...), interpolation.Delimiter) - return fmt.Sprintf("${%s}", path), nil - case "registered_models": - path = strings.Join(append([]string{"databricks_registered_model"}, parts[2:]...), interpolation.Delimiter) - return fmt.Sprintf("${%s}", path), nil - default: - panic("TODO: " + parts[1]) - } - } +// // Rewrite variable references to resources into Terraform compatible format. +// func interpolateTerraformResourceIdentifiers(path string, lookup map[string]string) (string, error) { +// parts := strings.Split(path, interpolation.Delimiter) +// if parts[0] == "resources" { +// switch parts[1] { +// case "pipelines": +// path = strings.Join(append([]string{"databricks_pipeline"}, parts[2:]...), interpolation.Delimiter) +// return fmt.Sprintf("${%s}", path), nil +// case "jobs": +// path = strings.Join(append([]string{"databricks_job"}, parts[2:]...), interpolation.Delimiter) +// return fmt.Sprintf("${%s}", path), nil +// case "models": +// path = strings.Join(append([]string{"databricks_mlflow_model"}, parts[2:]...), interpolation.Delimiter) +// return fmt.Sprintf("${%s}", path), nil +// case "experiments": +// path = strings.Join(append([]string{"databricks_mlflow_experiment"}, parts[2:]...), interpolation.Delimiter) +// return fmt.Sprintf("${%s}", path), nil +// case "model_serving_endpoints": +// path = strings.Join(append([]string{"databricks_model_serving"}, parts[2:]...), interpolation.Delimiter) +// return fmt.Sprintf("${%s}", path), nil +// case "registered_models": +// path = strings.Join(append([]string{"databricks_registered_model"}, parts[2:]...), interpolation.Delimiter) +// return fmt.Sprintf("${%s}", path), nil +// default: +// panic("TODO: " + parts[1]) +// } +// } - return interpolation.DefaultLookup(path, lookup) -} +// return interpolation.DefaultLookup(path, lookup) +// } func Interpolate() bundle.Mutator { - return interpolation.Interpolate(interpolateTerraformResourceIdentifiers) + return mutator.ResolveVariableReferences("foobar") + // return interpolation.Interpolate(interpolateTerraformResourceIdentifiers) } diff --git a/bundle/phases/build.go b/bundle/phases/build.go index 760967fca2..362d23be14 100644 --- a/bundle/phases/build.go +++ b/bundle/phases/build.go @@ -4,7 +4,7 @@ import ( "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/artifacts" "github.com/databricks/cli/bundle/config" - "github.com/databricks/cli/bundle/config/interpolation" + "github.com/databricks/cli/bundle/config/mutator" "github.com/databricks/cli/bundle/scripts" ) @@ -18,8 +18,8 @@ func Build() bundle.Mutator { artifacts.InferMissingProperties(), artifacts.BuildAll(), scripts.Execute(config.ScriptPostBuild), - interpolation.Interpolate( - interpolation.IncludeLookupsInPath("artifacts"), + mutator.ResolveVariableReferences( + "artifacts", ), }, ) diff --git a/bundle/phases/initialize.go b/bundle/phases/initialize.go index f65289e1b1..e3042686b3 100644 --- a/bundle/phases/initialize.go +++ b/bundle/phases/initialize.go @@ -3,9 +3,7 @@ package phases import ( "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/config" - "github.com/databricks/cli/bundle/config/interpolation" "github.com/databricks/cli/bundle/config/mutator" - "github.com/databricks/cli/bundle/config/variable" "github.com/databricks/cli/bundle/deploy/metadata" "github.com/databricks/cli/bundle/deploy/terraform" "github.com/databricks/cli/bundle/permissions" @@ -28,10 +26,10 @@ func Initialize() bundle.Mutator { mutator.DefineDefaultWorkspacePaths(), mutator.SetVariables(), mutator.ResolveResourceReferences(), - interpolation.Interpolate( - interpolation.IncludeLookupsInPath("bundle"), - interpolation.IncludeLookupsInPath("workspace"), - interpolation.IncludeLookupsInPath(variable.VariableReferencePrefix), + mutator.ResolveVariableReferences( + "bundle", + "workspace", + "variables", ), mutator.OverrideCompute(), mutator.ProcessTargetMode(), diff --git a/bundle/tests/interpolation_test.go b/bundle/tests/interpolation_test.go index 837891a072..a9659d33f8 100644 --- a/bundle/tests/interpolation_test.go +++ b/bundle/tests/interpolation_test.go @@ -5,16 +5,16 @@ import ( "testing" "github.com/databricks/cli/bundle" - "github.com/databricks/cli/bundle/config/interpolation" + "github.com/databricks/cli/bundle/config/mutator" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) func TestInterpolation(t *testing.T) { b := load(t, "./interpolation") - err := bundle.Apply(context.Background(), b, interpolation.Interpolate( - interpolation.IncludeLookupsInPath("bundle"), - interpolation.IncludeLookupsInPath("workspace"), + err := bundle.Apply(context.Background(), b, mutator.ResolveVariableReferences( + "bundle", + "workspace", )) require.NoError(t, err) assert.Equal(t, "foo bar", b.Config.Bundle.Name) @@ -23,9 +23,9 @@ func TestInterpolation(t *testing.T) { func TestInterpolationWithTarget(t *testing.T) { b := loadTarget(t, "./interpolation_target", "development") - err := bundle.Apply(context.Background(), b, interpolation.Interpolate( - interpolation.IncludeLookupsInPath("bundle"), - interpolation.IncludeLookupsInPath("workspace"), + err := bundle.Apply(context.Background(), b, mutator.ResolveVariableReferences( + "bundle", + "workspace", )) require.NoError(t, err) assert.Equal(t, "foo bar", b.Config.Bundle.Name) diff --git a/bundle/tests/variables_test.go b/bundle/tests/variables_test.go index 82f917aa40..05314a8465 100644 --- a/bundle/tests/variables_test.go +++ b/bundle/tests/variables_test.go @@ -5,9 +5,7 @@ import ( "testing" "github.com/databricks/cli/bundle" - "github.com/databricks/cli/bundle/config/interpolation" "github.com/databricks/cli/bundle/config/mutator" - "github.com/databricks/cli/bundle/config/variable" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -17,9 +15,10 @@ func TestVariables(t *testing.T) { b := load(t, "./variables/vanilla") err := bundle.Apply(context.Background(), b, bundle.Seq( mutator.SetVariables(), - interpolation.Interpolate( - interpolation.IncludeLookupsInPath(variable.VariableReferencePrefix), - ))) + mutator.ResolveVariableReferences( + "variables", + ), + )) require.NoError(t, err) assert.Equal(t, "abc def", b.Config.Bundle.Name) } @@ -28,9 +27,10 @@ func TestVariablesLoadingFailsWhenRequiredVariableIsNotSpecified(t *testing.T) { b := load(t, "./variables/vanilla") err := bundle.Apply(context.Background(), b, bundle.Seq( mutator.SetVariables(), - interpolation.Interpolate( - interpolation.IncludeLookupsInPath(variable.VariableReferencePrefix), - ))) + mutator.ResolveVariableReferences( + "variables", + ), + )) assert.ErrorContains(t, err, "no value assigned to required variable b. Assignment can be done through the \"--var\" flag or by setting the BUNDLE_VAR_b environment variable") } @@ -39,9 +39,10 @@ func TestVariablesTargetsBlockOverride(t *testing.T) { err := bundle.Apply(context.Background(), b, bundle.Seq( mutator.SelectTarget("env-with-single-variable-override"), mutator.SetVariables(), - interpolation.Interpolate( - interpolation.IncludeLookupsInPath(variable.VariableReferencePrefix), - ))) + mutator.ResolveVariableReferences( + "variables", + ), + )) require.NoError(t, err) assert.Equal(t, "default-a dev-b", b.Config.Workspace.Profile) } @@ -51,9 +52,10 @@ func TestVariablesTargetsBlockOverrideForMultipleVariables(t *testing.T) { err := bundle.Apply(context.Background(), b, bundle.Seq( mutator.SelectTarget("env-with-two-variable-overrides"), mutator.SetVariables(), - interpolation.Interpolate( - interpolation.IncludeLookupsInPath(variable.VariableReferencePrefix), - ))) + mutator.ResolveVariableReferences( + "variables", + ), + )) require.NoError(t, err) assert.Equal(t, "prod-a prod-b", b.Config.Workspace.Profile) } @@ -64,9 +66,10 @@ func TestVariablesTargetsBlockOverrideWithProcessEnvVars(t *testing.T) { err := bundle.Apply(context.Background(), b, bundle.Seq( mutator.SelectTarget("env-with-two-variable-overrides"), mutator.SetVariables(), - interpolation.Interpolate( - interpolation.IncludeLookupsInPath(variable.VariableReferencePrefix), - ))) + mutator.ResolveVariableReferences( + "variables", + ), + )) require.NoError(t, err) assert.Equal(t, "prod-a env-var-b", b.Config.Workspace.Profile) } @@ -76,9 +79,10 @@ func TestVariablesTargetsBlockOverrideWithMissingVariables(t *testing.T) { err := bundle.Apply(context.Background(), b, bundle.Seq( mutator.SelectTarget("env-missing-a-required-variable-assignment"), mutator.SetVariables(), - interpolation.Interpolate( - interpolation.IncludeLookupsInPath(variable.VariableReferencePrefix), - ))) + mutator.ResolveVariableReferences( + "variables", + ), + )) assert.ErrorContains(t, err, "no value assigned to required variable b. Assignment can be done through the \"--var\" flag or by setting the BUNDLE_VAR_b environment variable") } @@ -87,9 +91,10 @@ func TestVariablesTargetsBlockOverrideWithUndefinedVariables(t *testing.T) { err := bundle.Apply(context.Background(), b, bundle.Seq( mutator.SelectTarget("env-using-an-undefined-variable"), mutator.SetVariables(), - interpolation.Interpolate( - interpolation.IncludeLookupsInPath(variable.VariableReferencePrefix), - ))) + mutator.ResolveVariableReferences( + "variables", + ), + )) assert.ErrorContains(t, err, "variable c is not defined but is assigned a value") } From d0d7536f0c9a5ab712dfbbd8bc026a2856a66433 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Wed, 24 Jan 2024 14:17:14 +0100 Subject: [PATCH 049/104] Add functionality to visit values in `dyn.Value` tree This change adds the following functions: * `dyn.Get(value, "foo.bar") -> (dyn.Value, error)` * `dyn.Set(value, "foo.bar", newValue) -> (dyn.Value, error)` * `dyn.Map(value, "foo.bar", func) -> (dyn.Value, error)` And equivalent functions that take a previously constructed `dyn.Path`: * `dyn.GetByPath(value, dyn.Path) -> (dyn.Value, error)` * `dyn.SetByPath(value, dyn.Path, newValue) -> (dyn.Value, error)` * `dyn.MapByPath(value, dyn.Path, func) -> (dyn.Value, error)` Whenever the "set" and "map" functions need to change a value, they return a new `dyn.Value` such that the input value is never changed. --- libs/dyn/path.go | 8 ++ libs/dyn/value.go | 25 +++++ libs/dyn/visit.go | 139 +++++++++++++++++++++++++ libs/dyn/visit_get.go | 25 +++++ libs/dyn/visit_get_test.go | 76 ++++++++++++++ libs/dyn/visit_map.go | 77 ++++++++++++++ libs/dyn/visit_map_test.go | 202 +++++++++++++++++++++++++++++++++++++ libs/dyn/visit_set.go | 24 +++++ libs/dyn/visit_set_test.go | 90 +++++++++++++++++ libs/dyn/visit_test.go | 1 + 10 files changed, 667 insertions(+) create mode 100644 libs/dyn/visit.go create mode 100644 libs/dyn/visit_get.go create mode 100644 libs/dyn/visit_get_test.go create mode 100644 libs/dyn/visit_map.go create mode 100644 libs/dyn/visit_map_test.go create mode 100644 libs/dyn/visit_set.go create mode 100644 libs/dyn/visit_set_test.go create mode 100644 libs/dyn/visit_test.go diff --git a/libs/dyn/path.go b/libs/dyn/path.go index bfd93dad5b..34285de145 100644 --- a/libs/dyn/path.go +++ b/libs/dyn/path.go @@ -10,6 +10,14 @@ type pathComponent struct { index int } +func (c pathComponent) isKey() bool { + return c.key != "" +} + +func (c pathComponent) isIndex() bool { + return c.key == "" +} + // Path represents a path to a value in a [Value] configuration tree. type Path []pathComponent diff --git a/libs/dyn/value.go b/libs/dyn/value.go index bbb8ad3e4f..a487e13e12 100644 --- a/libs/dyn/value.go +++ b/libs/dyn/value.go @@ -134,3 +134,28 @@ func (v Value) MarkAnchor() Value { func (v Value) IsAnchor() bool { return v.anchor } + +// eq is an internal only method that compares two values. +// It is used to determine if a value has changed during a visit. +// We need a custom implementation because maps and slices +// cannot be compared with the regular == operator. +func (v Value) eq(w Value) bool { + if v.k != w.k || v.l != w.l { + return false + } + + switch v.k { + case KindMap: + // Compare pointers to the underlying map. + // This is safe because we don't allow maps to be mutated. + return &v.v == &w.v + case KindSequence: + // Compare pointers to the underlying slice and slice length. + // This is safe because we don't allow slices to be mutated. + vs := v.v.([]Value) + ws := w.v.([]Value) + return &vs[0] == &ws[0] && len(vs) == len(ws) + default: + return v.v == w.v + } +} diff --git a/libs/dyn/visit.go b/libs/dyn/visit.go new file mode 100644 index 0000000000..077fd51c5a --- /dev/null +++ b/libs/dyn/visit.go @@ -0,0 +1,139 @@ +package dyn + +import ( + "errors" + "fmt" + "maps" + "slices" +) + +type noSuchKeyError struct { + p Path +} + +func (e noSuchKeyError) Error() string { + return fmt.Sprintf("key not found at %q", e.p) +} + +func IsNoSuchKeyError(err error) bool { + var target noSuchKeyError + return errors.As(err, &target) +} + +type indexOutOfBoundsError struct { + p Path +} + +func (e indexOutOfBoundsError) Error() string { + return fmt.Sprintf("index out of bounds at %q", e.p) +} + +func IsIndexOutOfBoundsError(err error) bool { + var target indexOutOfBoundsError + return errors.As(err, &target) +} + +type visitOptions struct { + // The function to apply to the value once found. + // + // If this function returns the same value as it receives as argument, + // the original visit function call returns the original value unmodified. + // + // If this function returns a new value, the original visit function call + // returns a value with all the intermediate values updated. + // + // If this function returns an error, the original visit function call + // returns this error and the value is left unmodified. + fn func(Value) (Value, error) + + // If set, tolerate the absence of the last component in the path. + // This option is needed to set a key in a map that is not yet present. + allowMissingKeyInMap bool +} + +func visit(v Value, prefix, suffix Path, opts visitOptions) (Value, error) { + if len(suffix) == 0 { + return opts.fn(v) + } + + // Initialize prefix if it is empty. + // It is pre-allocated to its maximum size to avoid additional allocations. + if len(prefix) == 0 { + prefix = make(Path, 0, len(suffix)) + } + + component := suffix[0] + prefix = prefix.Append(component) + suffix = suffix[1:] + + switch { + case component.isKey(): + // Expect a map to be set if this is a key. + m, ok := v.AsMap() + if !ok { + return InvalidValue, fmt.Errorf("expected a map to index %q, found %s", prefix, v.Kind()) + } + + // Lookup current value in the map. + ev, ok := m[component.key] + if !ok && !opts.allowMissingKeyInMap { + return InvalidValue, noSuchKeyError{prefix} + } + + // Recursively transform the value. + nv, err := visit(ev, prefix, suffix, opts) + if err != nil { + return InvalidValue, err + } + + // Return the original value if the value hasn't changed. + if nv.eq(ev) { + return v, nil + } + + // Return an updated map value. + m = maps.Clone(m) + m[component.key] = nv + return Value{ + v: m, + k: KindMap, + l: v.l, + }, nil + + case component.isIndex(): + // Expect a sequence to be set if this is an index. + s, ok := v.AsSequence() + if !ok { + return InvalidValue, fmt.Errorf("expected a sequence to index %q, found %s", prefix, v.Kind()) + } + + // Lookup current value in the sequence. + if component.index < 0 || component.index >= len(s) { + return InvalidValue, indexOutOfBoundsError{prefix} + } + + // Recursively transform the value. + ev := s[component.index] + nv, err := visit(ev, prefix, suffix, opts) + if err != nil { + return InvalidValue, err + } + + // Return the original value if the value hasn't changed. + if nv.eq(ev) { + return v, nil + } + + // Return an updated sequence value. + s = slices.Clone(s) + s[component.index] = nv + return Value{ + v: s, + k: KindSequence, + l: v.l, + }, nil + + default: + panic("invalid component") + } +} diff --git a/libs/dyn/visit_get.go b/libs/dyn/visit_get.go new file mode 100644 index 0000000000..a0f848cddb --- /dev/null +++ b/libs/dyn/visit_get.go @@ -0,0 +1,25 @@ +package dyn + +// Get returns the value inside the specified value at the specified path. +// It is identical to [GetByPath], except that it takes a string path instead of a [Path]. +func Get(v Value, path string) (Value, error) { + p, err := NewPathFromString(path) + if err != nil { + return InvalidValue, err + } + return GetByPath(v, p) +} + +// GetByPath returns the value inside the specified value at the specified path. +// If the path doesn't exist, it returns InvalidValue and an error. +func GetByPath(v Value, p Path) (Value, error) { + out := InvalidValue + _, err := visit(v, EmptyPath, p, visitOptions{ + fn: func(ev Value) (Value, error) { + // Capture the value argument to return it. + out = ev + return ev, nil + }, + }) + return out, err +} diff --git a/libs/dyn/visit_get_test.go b/libs/dyn/visit_get_test.go new file mode 100644 index 0000000000..22dce0858b --- /dev/null +++ b/libs/dyn/visit_get_test.go @@ -0,0 +1,76 @@ +package dyn_test + +import ( + "testing" + + "github.com/databricks/cli/libs/dyn" + "github.com/stretchr/testify/assert" +) + +func TestGetWithEmptyPath(t *testing.T) { + // An empty path means to return the value itself. + vin := dyn.V(42) + vout, err := dyn.GetByPath(vin, dyn.NewPath()) + assert.NoError(t, err) + assert.Equal(t, vin, vout) +} + +func TestGetOnNilValue(t *testing.T) { + var err error + _, err = dyn.GetByPath(dyn.NilValue, dyn.NewPath(dyn.Key("foo"))) + assert.ErrorContains(t, err, `expected a map to index "foo", found nil`) + _, err = dyn.GetByPath(dyn.NilValue, dyn.NewPath(dyn.Index(42))) + assert.ErrorContains(t, err, `expected a sequence to index "[42]", found nil`) +} + +func TestGetOnMap(t *testing.T) { + vin := dyn.V(map[string]dyn.Value{ + "foo": dyn.V(42), + "bar": dyn.V(43), + }) + + var err error + + _, err = dyn.GetByPath(vin, dyn.NewPath(dyn.Index(42))) + assert.ErrorContains(t, err, `expected a sequence to index "[42]", found map`) + + _, err = dyn.GetByPath(vin, dyn.NewPath(dyn.Key("baz"))) + assert.True(t, dyn.IsNoSuchKeyError(err)) + assert.ErrorContains(t, err, `key not found at "baz"`) + + vfoo, err := dyn.GetByPath(vin, dyn.NewPath(dyn.Key("foo"))) + assert.NoError(t, err) + assert.Equal(t, dyn.V(42), vfoo) + + vbar, err := dyn.GetByPath(vin, dyn.NewPath(dyn.Key("bar"))) + assert.NoError(t, err) + assert.Equal(t, dyn.V(43), vbar) +} + +func TestGetOnSequence(t *testing.T) { + vin := dyn.V([]dyn.Value{ + dyn.V(42), + dyn.V(43), + }) + + var err error + + _, err = dyn.GetByPath(vin, dyn.NewPath(dyn.Key("foo"))) + assert.ErrorContains(t, err, `expected a map to index "foo", found sequence`) + + _, err = dyn.GetByPath(vin, dyn.NewPath(dyn.Index(-1))) + assert.True(t, dyn.IsIndexOutOfBoundsError(err)) + assert.ErrorContains(t, err, `index out of bounds at "[-1]"`) + + _, err = dyn.GetByPath(vin, dyn.NewPath(dyn.Index(2))) + assert.True(t, dyn.IsIndexOutOfBoundsError(err)) + assert.ErrorContains(t, err, `index out of bounds at "[2]"`) + + v0, err := dyn.GetByPath(vin, dyn.NewPath(dyn.Index(0))) + assert.NoError(t, err) + assert.Equal(t, dyn.V(42), v0) + + v1, err := dyn.GetByPath(vin, dyn.NewPath(dyn.Index(1))) + assert.NoError(t, err) + assert.Equal(t, dyn.V(43), v1) +} diff --git a/libs/dyn/visit_map.go b/libs/dyn/visit_map.go new file mode 100644 index 0000000000..ed89baa4a3 --- /dev/null +++ b/libs/dyn/visit_map.go @@ -0,0 +1,77 @@ +package dyn + +import ( + "fmt" + "maps" + "slices" +) + +// MapFunc is a function that maps a value to another value. +type MapFunc func(Value) (Value, error) + +// Foreach returns a [MapFunc] that applies the specified [MapFunc] to each +// value in a map or sequence and returns the new map or sequence. +func Foreach(fn MapFunc) MapFunc { + return func(v Value) (Value, error) { + switch v.Kind() { + case KindMap: + m := maps.Clone(v.MustMap()) + for key, value := range m { + var err error + m[key], err = fn(value) + if err != nil { + return InvalidValue, err + } + } + return NewValue(m, v.Location()), nil + case KindSequence: + s := slices.Clone(v.MustSequence()) + for i, value := range s { + var err error + s[i], err = fn(value) + if err != nil { + return InvalidValue, err + } + } + return NewValue(s, v.Location()), nil + default: + return InvalidValue, fmt.Errorf("expected a map or sequence, found %s", v.Kind()) + } + } +} + +// Map applies the given function to the value at the specified path in the specified value. +// It is identical to [MapByPath], except that it takes a string path instead of a [Path]. +func Map(v Value, path string, fn MapFunc) (Value, error) { + p, err := NewPathFromString(path) + if err != nil { + return InvalidValue, err + } + return MapByPath(v, p, fn) +} + +// Map applies the given function to the value at the specified path in the specified value. +// If successful, it returns the new value with all intermediate values copied and updated. +// +// If the path contains a key that doesn't exist, or an index that is out of bounds, +// it returns the original value and no error. This is because setting a value at a path +// that doesn't exist is a no-op. +// +// If the path is invalid for the given value, it returns InvalidValue and an error. +func MapByPath(v Value, p Path, fn MapFunc) (Value, error) { + nv, err := visit(v, EmptyPath, p, visitOptions{ + fn: fn, + }) + + // Check for success. + if err == nil { + return nv, nil + } + + // Return original value if a key or index is missing. + if IsNoSuchKeyError(err) || IsIndexOutOfBoundsError(err) { + return v, nil + } + + return nv, err +} diff --git a/libs/dyn/visit_map_test.go b/libs/dyn/visit_map_test.go new file mode 100644 index 0000000000..a5af3411f7 --- /dev/null +++ b/libs/dyn/visit_map_test.go @@ -0,0 +1,202 @@ +package dyn_test + +import ( + "fmt" + "testing" + + "github.com/databricks/cli/libs/dyn" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestMapWithEmptyPath(t *testing.T) { + // An empty path means to return the value itself. + vin := dyn.V(42) + vout, err := dyn.MapByPath(dyn.InvalidValue, dyn.EmptyPath, func(v dyn.Value) (dyn.Value, error) { + return vin, nil + }) + assert.NoError(t, err) + assert.Equal(t, vin, vout) +} + +func TestMapOnNilValue(t *testing.T) { + var err error + _, err = dyn.MapByPath(dyn.NilValue, dyn.NewPath(dyn.Key("foo")), nil) + assert.ErrorContains(t, err, `expected a map to index "foo", found nil`) + _, err = dyn.MapByPath(dyn.NilValue, dyn.NewPath(dyn.Index(42)), nil) + assert.ErrorContains(t, err, `expected a sequence to index "[42]", found nil`) +} + +func TestMapFuncOnMap(t *testing.T) { + vin := dyn.V(map[string]dyn.Value{ + "foo": dyn.V(42), + "bar": dyn.V(43), + }) + + var err error + + _, err = dyn.MapByPath(vin, dyn.NewPath(dyn.Index(42)), nil) + assert.ErrorContains(t, err, `expected a sequence to index "[42]", found map`) + + // A key that does not exist is not an error. + vout, err := dyn.MapByPath(vin, dyn.NewPath(dyn.Key("baz")), nil) + assert.NoError(t, err) + assert.Equal(t, vin, vout) + + // Note: in the test cases below we implicitly test that the original + // value is not modified as we repeatedly set values on it. + vfoo, err := dyn.MapByPath(vin, dyn.NewPath(dyn.Key("foo")), func(v dyn.Value) (dyn.Value, error) { + assert.Equal(t, dyn.V(42), v) + return dyn.V(44), nil + }) + assert.NoError(t, err) + assert.Equal(t, map[string]any{ + "foo": 44, + "bar": 43, + }, vfoo.AsAny()) + + vbar, err := dyn.MapByPath(vin, dyn.NewPath(dyn.Key("bar")), func(v dyn.Value) (dyn.Value, error) { + assert.Equal(t, dyn.V(43), v) + return dyn.V(45), nil + }) + assert.NoError(t, err) + assert.Equal(t, map[string]any{ + "foo": 42, + "bar": 45, + }, vbar.AsAny()) + + // Return error from map function. + var ref = fmt.Errorf("error") + verr, err := dyn.MapByPath(vin, dyn.NewPath(dyn.Key("foo")), func(v dyn.Value) (dyn.Value, error) { + return dyn.InvalidValue, ref + }) + assert.Equal(t, dyn.InvalidValue, verr) + assert.ErrorIs(t, err, ref) +} + +func TestMapFuncOnSequence(t *testing.T) { + vin := dyn.V([]dyn.Value{ + dyn.V(42), + dyn.V(43), + }) + + var err error + + _, err = dyn.MapByPath(vin, dyn.NewPath(dyn.Key("foo")), nil) + assert.ErrorContains(t, err, `expected a map to index "foo", found sequence`) + + // An index that does not exist is not an error. + vout, err := dyn.MapByPath(vin, dyn.NewPath(dyn.Index(2)), nil) + assert.NoError(t, err) + assert.Equal(t, vin, vout) + + // Note: in the test cases below we implicitly test that the original + // value is not modified as we repeatedly set values on it. + v0, err := dyn.MapByPath(vin, dyn.NewPath(dyn.Index(0)), func(v dyn.Value) (dyn.Value, error) { + assert.Equal(t, dyn.V(42), v) + return dyn.V(44), nil + }) + assert.NoError(t, err) + assert.Equal(t, []any{44, 43}, v0.AsAny()) + + v1, err := dyn.MapByPath(vin, dyn.NewPath(dyn.Index(1)), func(v dyn.Value) (dyn.Value, error) { + assert.Equal(t, dyn.V(43), v) + return dyn.V(45), nil + }) + assert.NoError(t, err) + assert.Equal(t, []any{42, 45}, v1.AsAny()) + + // Return error from map function. + var ref = fmt.Errorf("error") + verr, err := dyn.MapByPath(vin, dyn.NewPath(dyn.Index(0)), func(v dyn.Value) (dyn.Value, error) { + return dyn.InvalidValue, ref + }) + assert.Equal(t, dyn.InvalidValue, verr) + assert.ErrorIs(t, err, ref) +} + +func TestMapForeachOnMap(t *testing.T) { + vin := dyn.V(map[string]dyn.Value{ + "foo": dyn.V(42), + "bar": dyn.V(43), + }) + + var err error + + // Run foreach, adding 1 to each of the elements. + vout, err := dyn.Map(vin, ".", dyn.Foreach(func(v dyn.Value) (dyn.Value, error) { + i, ok := v.AsInt() + require.True(t, ok, "expected an integer") + return dyn.V(int(i) + 1), nil + })) + assert.NoError(t, err) + assert.Equal(t, map[string]any{ + "foo": 43, + "bar": 44, + }, vout.AsAny()) + + // Check that the original has not been modified. + assert.Equal(t, map[string]any{ + "foo": 42, + "bar": 43, + }, vin.AsAny()) +} + +func TestMapForeachOnMapError(t *testing.T) { + vin := dyn.V(map[string]dyn.Value{ + "foo": dyn.V(42), + "bar": dyn.V(43), + }) + + // Check that an error from the map function propagates. + var ref = fmt.Errorf("error") + _, err := dyn.Map(vin, ".", dyn.Foreach(func(v dyn.Value) (dyn.Value, error) { + return dyn.InvalidValue, ref + })) + assert.ErrorIs(t, err, ref) +} + +func TestMapForeachOnSequence(t *testing.T) { + vin := dyn.V([]dyn.Value{ + dyn.V(42), + dyn.V(43), + }) + + var err error + + // Run foreach, adding 1 to each of the elements. + vout, err := dyn.Map(vin, ".", dyn.Foreach(func(v dyn.Value) (dyn.Value, error) { + i, ok := v.AsInt() + require.True(t, ok, "expected an integer") + return dyn.V(int(i) + 1), nil + })) + assert.NoError(t, err) + assert.Equal(t, []any{43, 44}, vout.AsAny()) + + // Check that the original has not been modified. + assert.Equal(t, []any{42, 43}, vin.AsAny()) +} + +func TestMapForeachOnSequenceError(t *testing.T) { + vin := dyn.V([]dyn.Value{ + dyn.V(42), + dyn.V(43), + }) + + // Check that an error from the map function propagates. + var ref = fmt.Errorf("error") + _, err := dyn.Map(vin, ".", dyn.Foreach(func(v dyn.Value) (dyn.Value, error) { + return dyn.InvalidValue, ref + })) + assert.ErrorIs(t, err, ref) +} + +func TestMapForeachOnOtherError(t *testing.T) { + vin := dyn.V(42) + + // Check that if foreach is applied to something other than a map or a sequence, it returns an error. + _, err := dyn.Map(vin, ".", dyn.Foreach(func(v dyn.Value) (dyn.Value, error) { + return dyn.InvalidValue, nil + })) + assert.ErrorContains(t, err, "expected a map or sequence, found int") +} diff --git a/libs/dyn/visit_set.go b/libs/dyn/visit_set.go new file mode 100644 index 0000000000..fdbf41c2cf --- /dev/null +++ b/libs/dyn/visit_set.go @@ -0,0 +1,24 @@ +package dyn + +// Set assigns a new value at the specified path in the specified value. +// It is identical to [SetByPath], except that it takes a string path instead of a [Path]. +func Set(v Value, path string, nv Value) (Value, error) { + p, err := NewPathFromString(path) + if err != nil { + return InvalidValue, err + } + return SetByPath(v, p, nv) +} + +// SetByPath assigns the given value at the specified path in the specified value. +// If successful, it returns the new value with all intermediate values copied and updated. +// If the path doesn't exist, it returns InvalidValue and an error. +func SetByPath(v Value, p Path, nv Value) (Value, error) { + return visit(v, EmptyPath, p, visitOptions{ + fn: func(_ Value) (Value, error) { + // Return the incoming value to set it. + return nv, nil + }, + allowMissingKeyInMap: true, + }) +} diff --git a/libs/dyn/visit_set_test.go b/libs/dyn/visit_set_test.go new file mode 100644 index 0000000000..b384715875 --- /dev/null +++ b/libs/dyn/visit_set_test.go @@ -0,0 +1,90 @@ +package dyn_test + +import ( + "testing" + + "github.com/databricks/cli/libs/dyn" + "github.com/stretchr/testify/assert" +) + +func TestSetWithEmptyPath(t *testing.T) { + // An empty path means to return the value itself. + vin := dyn.V(42) + vout, err := dyn.SetByPath(dyn.InvalidValue, dyn.EmptyPath, vin) + assert.NoError(t, err) + assert.Equal(t, vin, vout) +} + +func TestSetOnNilValue(t *testing.T) { + var err error + _, err = dyn.SetByPath(dyn.NilValue, dyn.NewPath(dyn.Key("foo")), dyn.V(42)) + assert.ErrorContains(t, err, `expected a map to index "foo", found nil`) + _, err = dyn.SetByPath(dyn.NilValue, dyn.NewPath(dyn.Index(42)), dyn.V(42)) + assert.ErrorContains(t, err, `expected a sequence to index "[42]", found nil`) +} + +func TestSetOnMap(t *testing.T) { + vin := dyn.V(map[string]dyn.Value{ + "foo": dyn.V(42), + "bar": dyn.V(43), + }) + + var err error + + _, err = dyn.SetByPath(vin, dyn.NewPath(dyn.Index(42)), dyn.V(42)) + assert.ErrorContains(t, err, `expected a sequence to index "[42]", found map`) + + // Note: in the test cases below we implicitly test that the original + // value is not modified as we repeatedly set values on it. + + vfoo, err := dyn.SetByPath(vin, dyn.NewPath(dyn.Key("foo")), dyn.V(44)) + assert.NoError(t, err) + assert.Equal(t, map[string]any{ + "foo": 44, + "bar": 43, + }, vfoo.AsAny()) + + vbar, err := dyn.SetByPath(vin, dyn.NewPath(dyn.Key("bar")), dyn.V(45)) + assert.NoError(t, err) + assert.Equal(t, map[string]any{ + "foo": 42, + "bar": 45, + }, vbar.AsAny()) + + vbaz, err := dyn.SetByPath(vin, dyn.NewPath(dyn.Key("baz")), dyn.V(46)) + assert.NoError(t, err) + assert.Equal(t, map[string]any{ + "foo": 42, + "bar": 43, + "baz": 46, + }, vbaz.AsAny()) +} + +func TestSetOnSequence(t *testing.T) { + vin := dyn.V([]dyn.Value{ + dyn.V(42), + dyn.V(43), + }) + + var err error + + _, err = dyn.SetByPath(vin, dyn.NewPath(dyn.Key("foo")), dyn.V(42)) + assert.ErrorContains(t, err, `expected a map to index "foo", found sequence`) + + // It is not allowed to set a value at an index that is out of bounds. + _, err = dyn.SetByPath(vin, dyn.NewPath(dyn.Index(-1)), dyn.V(42)) + assert.True(t, dyn.IsIndexOutOfBoundsError(err)) + _, err = dyn.SetByPath(vin, dyn.NewPath(dyn.Index(2)), dyn.V(42)) + assert.True(t, dyn.IsIndexOutOfBoundsError(err)) + + // Note: in the test cases below we implicitly test that the original + // value is not modified as we repeatedly set values on it. + + v0, err := dyn.SetByPath(vin, dyn.NewPath(dyn.Index(0)), dyn.V(44)) + assert.NoError(t, err) + assert.Equal(t, []any{44, 43}, v0.AsAny()) + + v1, err := dyn.SetByPath(vin, dyn.NewPath(dyn.Index(1)), dyn.V(45)) + assert.NoError(t, err) + assert.Equal(t, []any{42, 45}, v1.AsAny()) +} diff --git a/libs/dyn/visit_test.go b/libs/dyn/visit_test.go new file mode 100644 index 0000000000..8f715b8619 --- /dev/null +++ b/libs/dyn/visit_test.go @@ -0,0 +1 @@ +package dyn_test From af0e3e29598812932d9b006816659dfa50ac06c3 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Wed, 24 Jan 2024 14:38:05 +0100 Subject: [PATCH 050/104] Add `dynvar` package for variable resolution with a `dyn.Value` tree This is the `dyn` counterpart to the `bundle/config/interpolation` package. It relies on the paths in `${foo.bar}` being valid `dyn.Path` instances. It leverages `dyn.Walk` to get a complete picture of all variable references and uses `dyn.Get` to retrieve values pointed to by variable references. # Please enter the commit message for your changes. Lines starting # with '#' will be ignored, and an empty message aborts the commit. # # On branch dynvar # Changes to be committed: # new file: libs/dyn/dynvar/lookup.go # new file: libs/dyn/dynvar/lookup_test.go # new file: libs/dyn/dynvar/ref.go # new file: libs/dyn/dynvar/ref_test.go # new file: libs/dyn/dynvar/resolve.go # new file: libs/dyn/dynvar/resolve_test.go # # Untracked files: # extra/ # --- libs/dyn/dynvar/lookup.go | 21 ++++ libs/dyn/dynvar/lookup_test.go | 27 +++++ libs/dyn/dynvar/ref.go | 76 +++++++++++++ libs/dyn/dynvar/ref_test.go | 46 ++++++++ libs/dyn/dynvar/resolve.go | 180 +++++++++++++++++++++++++++++++ libs/dyn/dynvar/resolve_test.go | 184 ++++++++++++++++++++++++++++++++ 6 files changed, 534 insertions(+) create mode 100644 libs/dyn/dynvar/lookup.go create mode 100644 libs/dyn/dynvar/lookup_test.go create mode 100644 libs/dyn/dynvar/ref.go create mode 100644 libs/dyn/dynvar/ref_test.go create mode 100644 libs/dyn/dynvar/resolve.go create mode 100644 libs/dyn/dynvar/resolve_test.go diff --git a/libs/dyn/dynvar/lookup.go b/libs/dyn/dynvar/lookup.go new file mode 100644 index 0000000000..2bc08f47d4 --- /dev/null +++ b/libs/dyn/dynvar/lookup.go @@ -0,0 +1,21 @@ +package dynvar + +import ( + "errors" + + "github.com/databricks/cli/libs/dyn" +) + +// Lookup is the type of lookup functions that can be used with [Resolve]. +type Lookup func(path dyn.Path) (dyn.Value, error) + +// ErrSkipResolution is returned by a lookup function to indicate that the +// resolution of a variable reference should be skipped. +var ErrSkipResolution = errors.New("skip resolution") + +// DefaultLookup is the default lookup function used by [Resolve]. +func DefaultLookup(in dyn.Value) Lookup { + return func(path dyn.Path) (dyn.Value, error) { + return dyn.GetByPath(in, path) + } +} diff --git a/libs/dyn/dynvar/lookup_test.go b/libs/dyn/dynvar/lookup_test.go new file mode 100644 index 0000000000..2341d72084 --- /dev/null +++ b/libs/dyn/dynvar/lookup_test.go @@ -0,0 +1,27 @@ +package dynvar_test + +import ( + "testing" + + "github.com/databricks/cli/libs/dyn" + "github.com/databricks/cli/libs/dyn/dynvar" + "github.com/stretchr/testify/assert" +) + +func TestDefaultLookup(t *testing.T) { + lookup := dynvar.DefaultLookup(dyn.V(map[string]dyn.Value{ + "a": dyn.V("a"), + "b": dyn.V("b"), + })) + + v1, err := lookup(dyn.NewPath(dyn.Key("a"))) + assert.NoError(t, err) + assert.Equal(t, dyn.V("a"), v1) + + v2, err := lookup(dyn.NewPath(dyn.Key("b"))) + assert.NoError(t, err) + assert.Equal(t, dyn.V("b"), v2) + + _, err = lookup(dyn.NewPath(dyn.Key("c"))) + assert.True(t, dyn.IsNoSuchKeyError(err)) +} diff --git a/libs/dyn/dynvar/ref.go b/libs/dyn/dynvar/ref.go new file mode 100644 index 0000000000..c0ea7fccc6 --- /dev/null +++ b/libs/dyn/dynvar/ref.go @@ -0,0 +1,76 @@ +package dynvar + +import ( + "regexp" + + "github.com/databricks/cli/libs/dyn" +) + +var re = regexp.MustCompile(`\$\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\}`) + +// ref represents a variable reference. +// It is a string [dyn.Value] contained in a larger [dyn.Value]. +// Its path within the containing [dyn.Value] is also stored. +type ref struct { + // Original value and path. + value dyn.Value + path dyn.Path + + // Key to index this ref by. + // It is equal to the string representation of the path. + key string + + // String value in the original [dyn.Value]. + str string + + // Matches of the variable reference in the string. + matches [][]string +} + +// newRef returns a new ref if the given [dyn.Value] contains a string +// with one or more variable references. It returns false if the given +// [dyn.Value] does not contain variable references. +// +// Examples of a valid variable references: +// - "${a.b}" +// - "${a.b.c}" +// - "${a} ${b} ${c}" +func newRef(v dyn.Value, p dyn.Path) (ref, bool) { + s, ok := v.AsString() + if !ok { + return ref{}, false + } + + // Check if the string contains any variable references. + m := re.FindAllStringSubmatch(s, -1) + if len(m) == 0 { + return ref{}, false + } + + return ref{ + value: v, + path: p, + key: p.String(), + str: s, + matches: m, + }, true +} + +// isPure returns true if the variable reference contains a single +// variable reference and nothing more. We need this so we can +// interpolate values of non-string types (i.e. it can be substituted). +func (v ref) isPure() bool { + // Need single match, equal to the incoming string. + if len(v.matches) == 0 || len(v.matches[0]) == 0 { + panic("invalid variable reference; expect at least one match") + } + return v.matches[0][0] == v.str +} + +func (v ref) references() []string { + var out []string + for _, m := range v.matches { + out = append(out, m[1]) + } + return out +} diff --git a/libs/dyn/dynvar/ref_test.go b/libs/dyn/dynvar/ref_test.go new file mode 100644 index 0000000000..e722f71133 --- /dev/null +++ b/libs/dyn/dynvar/ref_test.go @@ -0,0 +1,46 @@ +package dynvar + +import ( + "testing" + + "github.com/databricks/cli/libs/dyn" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestNewRefNoString(t *testing.T) { + _, ok := newRef(dyn.V(1), dyn.Path{}) + require.False(t, ok, "should not match non-string") +} + +func TestNewRefValidPattern(t *testing.T) { + for in, refs := range map[string][]string{ + "${hello_world.world_world}": {"hello_world.world_world"}, + "${helloworld.world-world}": {"helloworld.world-world"}, + "${hello-world.world-world}": {"hello-world.world-world"}, + } { + ref, ok := newRef(dyn.V(in), dyn.Path{}) + require.True(t, ok, "should match valid pattern: %s", in) + assert.Equal(t, refs, ref.references()) + } +} + +func TestNewRefInvalidPattern(t *testing.T) { + invalid := []string{ + "${hello_world-.world_world}", // the first segment ending must not end with hyphen (-) + "${hello_world-_.world_world}", // the first segment ending must not end with underscore (_) + "${helloworld.world-world-}", // second segment must not end with hyphen (-) + "${helloworld-.world-world}", // first segment must not end with hyphen (-) + "${helloworld.-world-world}", // second segment must not start with hyphen (-) + "${-hello-world.-world-world-}", // must not start or end with hyphen (-) + "${_-_._-_.id}", // cannot use _- in sequence + "${0helloworld.world-world}", // interpolated first section shouldn't start with number + "${helloworld.9world-world}", // interpolated second section shouldn't start with number + "${a-a.a-_a-a.id}", // fails because of -_ in the second segment + "${a-a.a--a-a.id}", // fails because of -- in the second segment + } + for _, v := range invalid { + _, ok := newRef(dyn.V(v), dyn.Path{}) + require.False(t, ok, "should not match invalid pattern: %s", v) + } +} diff --git a/libs/dyn/dynvar/resolve.go b/libs/dyn/dynvar/resolve.go new file mode 100644 index 0000000000..6e4df6c1d8 --- /dev/null +++ b/libs/dyn/dynvar/resolve.go @@ -0,0 +1,180 @@ +package dynvar + +import ( + "errors" + "fmt" + "slices" + "sort" + "strings" + + "github.com/databricks/cli/libs/dyn" + "golang.org/x/exp/maps" +) + +func Resolve(in dyn.Value, fn Lookup) (out dyn.Value, err error) { + return resolver{in: in, fn: fn}.run() +} + +type resolver struct { + in dyn.Value + fn Lookup + + refs map[string]ref + resolved map[string]dyn.Value +} + +func (r resolver) run() (out dyn.Value, err error) { + err = r.collectVariableReferences() + if err != nil { + return dyn.InvalidValue, err + } + + err = r.resolveVariableReferences() + if err != nil { + return dyn.InvalidValue, err + } + + out, err = r.replaceVariableReferences() + if err != nil { + return dyn.InvalidValue, err + } + + return out, nil +} + +func (r *resolver) collectVariableReferences() (err error) { + r.refs = make(map[string]ref) + + // First walk the input to gather all values with a variable reference. + _, err = dyn.Walk(r.in, func(p dyn.Path, v dyn.Value) (dyn.Value, error) { + ref, ok := newRef(v, p) + if !ok { + // Skip values without variable references. + return v, nil + } + + r.refs[ref.key] = ref + return v, nil + }) + + return err +} + +func (r *resolver) resolveVariableReferences() (err error) { + // Initialize map for resolved variables. + // We use this for memoization. + r.resolved = make(map[string]dyn.Value) + + // Resolve each variable reference (in order). + keys := maps.Keys(r.refs) + sort.Strings(keys) + for _, key := range keys { + _, err := r.resolve(key, []string{key}) + if err != nil { + return err + } + } + + return nil +} + +func (r *resolver) resolve(key string, seen []string) (dyn.Value, error) { + // Check if we have already resolved this variable reference. + if v, ok := r.resolved[key]; ok { + return v, nil + } + + ref, ok := r.refs[key] + if !ok { + // Perform lookup in the input. + p, err := dyn.NewPathFromString(key) + if err != nil { + return dyn.InvalidValue, err + } + v, err := r.fn(p) + if err != nil && dyn.IsNoSuchKeyError(err) { + return dyn.InvalidValue, fmt.Errorf( + "reference does not exist: ${%s}", + key, + ) + } + return v, err + } + + // This is an unresolved variable reference. + deps := ref.references() + + // Resolve each of the dependencies, then interpolate them in the ref. + resolved := make([]dyn.Value, len(deps)) + complete := true + + for j, dep := range deps { + // Cycle detection. + if slices.Contains(seen, dep) { + return dyn.InvalidValue, fmt.Errorf( + "cycle detected in field resolution: %s", + strings.Join(append(seen, dep), " -> "), + ) + } + + v, err := r.resolve(dep, append(seen, dep)) + + // If we should skip resolution of this key, index j will hold an invalid [dyn.Value]. + if errors.Is(err, ErrSkipResolution) { + complete = false + continue + } else if err != nil { + // Otherwise, propagate the error. + return dyn.InvalidValue, err + } + + resolved[j] = v + } + + // Interpolate the resolved values. + if ref.isPure() && complete { + // If the variable reference is pure, we can substitute it. + // This is useful for interpolating values of non-string types. + r.resolved[key] = resolved[0] + return resolved[0], nil + } + + // Not pure; perform string interpolation. + for j := range ref.matches { + // The value is invalid if resolution returned [ErrSkipResolution]. + // We must skip those and leave the original variable reference in place. + if !resolved[j].IsValid() { + continue + } + + // Try to turn the resolved value into a string. + s, ok := resolved[j].AsString() + if !ok { + return dyn.InvalidValue, fmt.Errorf( + "cannot interpolate non-string value: %s", + ref.matches[j][0], + ) + } + + ref.str = strings.Replace(ref.str, ref.matches[j][0], s, 1) + } + + // Store the interpolated value. + v := dyn.NewValue(ref.str, ref.value.Location()) + r.resolved[key] = v + return v, nil +} + +func (r *resolver) replaceVariableReferences() (dyn.Value, error) { + // Walk the input and replace all variable references. + return dyn.Walk(r.in, func(p dyn.Path, v dyn.Value) (dyn.Value, error) { + ref, ok := r.refs[p.String()] + if !ok { + // No variable reference; return the original value. + return v, nil + } + + // We have a variable reference; return the resolved value. + return r.resolved[ref.key], nil + }) +} diff --git a/libs/dyn/dynvar/resolve_test.go b/libs/dyn/dynvar/resolve_test.go new file mode 100644 index 0000000000..ba700503ef --- /dev/null +++ b/libs/dyn/dynvar/resolve_test.go @@ -0,0 +1,184 @@ +package dynvar_test + +import ( + "testing" + + "github.com/databricks/cli/libs/dyn" + "github.com/databricks/cli/libs/dyn/dynvar" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func getByPath(t *testing.T, v dyn.Value, path string) dyn.Value { + v, err := dyn.Get(v, path) + require.NoError(t, err) + return v +} + +func TestResolve(t *testing.T) { + in := dyn.V(map[string]dyn.Value{ + "a": dyn.V("a"), + "b": dyn.V("${a}"), + "c": dyn.V("${a}"), + }) + + out, err := dynvar.Resolve(in, dynvar.DefaultLookup(in)) + require.NoError(t, err) + + assert.Equal(t, "a", getByPath(t, out, "a").MustString()) + assert.Equal(t, "a", getByPath(t, out, "b").MustString()) + assert.Equal(t, "a", getByPath(t, out, "c").MustString()) +} + +func TestResolveNotFound(t *testing.T) { + in := dyn.V(map[string]dyn.Value{ + "b": dyn.V("${a}"), + }) + + _, err := dynvar.Resolve(in, dynvar.DefaultLookup(in)) + require.ErrorContains(t, err, `reference does not exist: ${a}`) +} + +func TestResolveWithNesting(t *testing.T) { + in := dyn.V(map[string]dyn.Value{ + "a": dyn.V("${f.a}"), + "f": dyn.V(map[string]dyn.Value{ + "a": dyn.V("a"), + "b": dyn.V("${f.a}"), + }), + }) + + out, err := dynvar.Resolve(in, dynvar.DefaultLookup(in)) + require.NoError(t, err) + + assert.Equal(t, "a", getByPath(t, out, "a").MustString()) + assert.Equal(t, "a", getByPath(t, out, "f.a").MustString()) + assert.Equal(t, "a", getByPath(t, out, "f.b").MustString()) +} + +func TestResolveWithRecursion(t *testing.T) { + in := dyn.V(map[string]dyn.Value{ + "a": dyn.V("a"), + "b": dyn.V("${a}"), + "c": dyn.V("${b}"), + }) + + out, err := dynvar.Resolve(in, dynvar.DefaultLookup(in)) + require.NoError(t, err) + + assert.Equal(t, "a", getByPath(t, out, "a").MustString()) + assert.Equal(t, "a", getByPath(t, out, "b").MustString()) + assert.Equal(t, "a", getByPath(t, out, "c").MustString()) +} + +func TestResolveWithRecursionLoop(t *testing.T) { + in := dyn.V(map[string]dyn.Value{ + "a": dyn.V("a"), + "b": dyn.V("${c}"), + "c": dyn.V("${d}"), + "d": dyn.V("${b}"), + }) + + _, err := dynvar.Resolve(in, dynvar.DefaultLookup(in)) + assert.ErrorContains(t, err, "cycle detected in field resolution: b -> c -> d -> b") +} + +func TestResolveWithRecursionLoopSelf(t *testing.T) { + in := dyn.V(map[string]dyn.Value{ + "a": dyn.V("${a}"), + }) + + _, err := dynvar.Resolve(in, dynvar.DefaultLookup(in)) + assert.ErrorContains(t, err, "cycle detected in field resolution: a -> a") +} + +func TestResolveWithStringConcatenation(t *testing.T) { + in := dyn.V(map[string]dyn.Value{ + "a": dyn.V("a"), + "b": dyn.V("b"), + "c": dyn.V("${a}${b}${a}"), + }) + + out, err := dynvar.Resolve(in, dynvar.DefaultLookup(in)) + require.NoError(t, err) + + assert.Equal(t, "a", getByPath(t, out, "a").MustString()) + assert.Equal(t, "b", getByPath(t, out, "b").MustString()) + assert.Equal(t, "aba", getByPath(t, out, "c").MustString()) +} + +func TestResolveWithTypeRetentionFailure(t *testing.T) { + in := dyn.V(map[string]dyn.Value{ + "a": dyn.V(1), + "b": dyn.V(2), + "c": dyn.V("${a} ${b}"), + }) + + _, err := dynvar.Resolve(in, dynvar.DefaultLookup(in)) + require.ErrorContains(t, err, "cannot interpolate non-string value: ${a}") +} + +func TestResolveWithTypeRetention(t *testing.T) { + in := dyn.V(map[string]dyn.Value{ + "int": dyn.V(1), + "int_var": dyn.V("${int}"), + "bool_true": dyn.V(true), + "bool_true_var": dyn.V("${bool_true}"), + "bool_false": dyn.V(false), + "bool_false_var": dyn.V("${bool_false}"), + "float": dyn.V(1.0), + "float_var": dyn.V("${float}"), + "string": dyn.V("a"), + "string_var": dyn.V("${string}"), + }) + + out, err := dynvar.Resolve(in, dynvar.DefaultLookup(in)) + require.NoError(t, err) + + assert.EqualValues(t, 1, getByPath(t, out, "int").MustInt()) + assert.EqualValues(t, 1, getByPath(t, out, "int_var").MustInt()) + + assert.EqualValues(t, true, getByPath(t, out, "bool_true").MustBool()) + assert.EqualValues(t, true, getByPath(t, out, "bool_true_var").MustBool()) + + assert.EqualValues(t, false, getByPath(t, out, "bool_false").MustBool()) + assert.EqualValues(t, false, getByPath(t, out, "bool_false_var").MustBool()) + + assert.EqualValues(t, 1.0, getByPath(t, out, "float").MustFloat()) + assert.EqualValues(t, 1.0, getByPath(t, out, "float_var").MustFloat()) + + assert.EqualValues(t, "a", getByPath(t, out, "string").MustString()) + assert.EqualValues(t, "a", getByPath(t, out, "string_var").MustString()) +} + +func TestResolveWithSkip(t *testing.T) { + in := dyn.V(map[string]dyn.Value{ + "a": dyn.V("a"), + "b": dyn.V("b"), + "c": dyn.V("${a}"), + "d": dyn.V("${b}"), + "e": dyn.V("${a} ${b}"), + "f": dyn.V("${b} ${a} ${a} ${b}"), + }) + + fallback := dynvar.DefaultLookup(in) + ignore := func(path dyn.Path) (dyn.Value, error) { + // If the variable reference to look up starts with "b", skip it. + if path.HasPrefix(dyn.NewPath(dyn.Key("b"))) { + return dyn.InvalidValue, dynvar.ErrSkipResolution + } + return fallback(path) + } + + out, err := dynvar.Resolve(in, ignore) + require.NoError(t, err) + + assert.Equal(t, "a", getByPath(t, out, "a").MustString()) + assert.Equal(t, "b", getByPath(t, out, "b").MustString()) + assert.Equal(t, "a", getByPath(t, out, "c").MustString()) + + // Check that the skipped variable references are not interpolated. + assert.Equal(t, "${b}", getByPath(t, out, "d").MustString()) + assert.Equal(t, "a ${b}", getByPath(t, out, "e").MustString()) + assert.Equal(t, "${b} a a ${b}", getByPath(t, out, "f").MustString()) +} From 37bd8f108c71c152440fdeb678dcc66ddc8d076c Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Wed, 24 Jan 2024 14:47:30 +0100 Subject: [PATCH 051/104] Comments --- libs/dyn/dynvar/resolve.go | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) diff --git a/libs/dyn/dynvar/resolve.go b/libs/dyn/dynvar/resolve.go index 6e4df6c1d8..7b9dcda0d9 100644 --- a/libs/dyn/dynvar/resolve.go +++ b/libs/dyn/dynvar/resolve.go @@ -11,6 +11,29 @@ import ( "golang.org/x/exp/maps" ) +// Resolve resolves variable references in the given input value using the provided lookup function. +// It returns the resolved output value and any error encountered during the resolution process. +// +// For example, given the input value: +// +// { +// "a": "a", +// "b": "${a}", +// "c": "${b}${b}", +// } +// +// The output value will be: +// +// { +// "a": "a", +// "b": "a", +// "c": "aa", +// } +// +// If the input value contains a variable reference that cannot be resolved, an error is returned. +// If a cycle is detected in the variable references, an error is returned. +// If for some path the resolution function returns [ErrSkipResolution], the variable reference is left in place. +// This is useful when some variable references are not yet ready to be interpolated. func Resolve(in dyn.Value, fn Lookup) (out dyn.Value, err error) { return resolver{in: in, fn: fn}.run() } From a7348d577a9236975a100653e7d5ef1866d30c7a Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Wed, 24 Jan 2024 14:53:01 +0100 Subject: [PATCH 052/104] Remove path and key from ref --- libs/dyn/dynvar/ref.go | 11 ++--------- libs/dyn/dynvar/ref_test.go | 6 +++--- libs/dyn/dynvar/resolve.go | 8 ++++---- 3 files changed, 9 insertions(+), 16 deletions(-) diff --git a/libs/dyn/dynvar/ref.go b/libs/dyn/dynvar/ref.go index c0ea7fccc6..e4616c5200 100644 --- a/libs/dyn/dynvar/ref.go +++ b/libs/dyn/dynvar/ref.go @@ -12,13 +12,8 @@ var re = regexp.MustCompile(`\$\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\.[a-zA-Z]+([-_] // It is a string [dyn.Value] contained in a larger [dyn.Value]. // Its path within the containing [dyn.Value] is also stored. type ref struct { - // Original value and path. + // Original value. value dyn.Value - path dyn.Path - - // Key to index this ref by. - // It is equal to the string representation of the path. - key string // String value in the original [dyn.Value]. str string @@ -35,7 +30,7 @@ type ref struct { // - "${a.b}" // - "${a.b.c}" // - "${a} ${b} ${c}" -func newRef(v dyn.Value, p dyn.Path) (ref, bool) { +func newRef(v dyn.Value) (ref, bool) { s, ok := v.AsString() if !ok { return ref{}, false @@ -49,8 +44,6 @@ func newRef(v dyn.Value, p dyn.Path) (ref, bool) { return ref{ value: v, - path: p, - key: p.String(), str: s, matches: m, }, true diff --git a/libs/dyn/dynvar/ref_test.go b/libs/dyn/dynvar/ref_test.go index e722f71133..b3066276c9 100644 --- a/libs/dyn/dynvar/ref_test.go +++ b/libs/dyn/dynvar/ref_test.go @@ -9,7 +9,7 @@ import ( ) func TestNewRefNoString(t *testing.T) { - _, ok := newRef(dyn.V(1), dyn.Path{}) + _, ok := newRef(dyn.V(1)) require.False(t, ok, "should not match non-string") } @@ -19,7 +19,7 @@ func TestNewRefValidPattern(t *testing.T) { "${helloworld.world-world}": {"helloworld.world-world"}, "${hello-world.world-world}": {"hello-world.world-world"}, } { - ref, ok := newRef(dyn.V(in), dyn.Path{}) + ref, ok := newRef(dyn.V(in)) require.True(t, ok, "should match valid pattern: %s", in) assert.Equal(t, refs, ref.references()) } @@ -40,7 +40,7 @@ func TestNewRefInvalidPattern(t *testing.T) { "${a-a.a--a-a.id}", // fails because of -- in the second segment } for _, v := range invalid { - _, ok := newRef(dyn.V(v), dyn.Path{}) + _, ok := newRef(dyn.V(v)) require.False(t, ok, "should not match invalid pattern: %s", v) } } diff --git a/libs/dyn/dynvar/resolve.go b/libs/dyn/dynvar/resolve.go index 7b9dcda0d9..e94bab923d 100644 --- a/libs/dyn/dynvar/resolve.go +++ b/libs/dyn/dynvar/resolve.go @@ -70,13 +70,13 @@ func (r *resolver) collectVariableReferences() (err error) { // First walk the input to gather all values with a variable reference. _, err = dyn.Walk(r.in, func(p dyn.Path, v dyn.Value) (dyn.Value, error) { - ref, ok := newRef(v, p) + ref, ok := newRef(v) if !ok { // Skip values without variable references. return v, nil } - r.refs[ref.key] = ref + r.refs[p.String()] = ref return v, nil }) @@ -191,13 +191,13 @@ func (r *resolver) resolve(key string, seen []string) (dyn.Value, error) { func (r *resolver) replaceVariableReferences() (dyn.Value, error) { // Walk the input and replace all variable references. return dyn.Walk(r.in, func(p dyn.Path, v dyn.Value) (dyn.Value, error) { - ref, ok := r.refs[p.String()] + nv, ok := r.resolved[p.String()] if !ok { // No variable reference; return the original value. return v, nil } // We have a variable reference; return the resolved value. - return r.resolved[ref.key], nil + return nv, nil }) } From a20a7f9a96fa5a2b12c2249396794afcd2a7a917 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Wed, 24 Jan 2024 15:09:28 +0100 Subject: [PATCH 053/104] Restore variable initialization --- bundle/config/root.go | 2 -- 1 file changed, 2 deletions(-) diff --git a/bundle/config/root.go b/bundle/config/root.go index 004f951698..d2a5b47305 100644 --- a/bundle/config/root.go +++ b/bundle/config/root.go @@ -228,8 +228,6 @@ func (r *Root) ConfigureConfigFilePath() { // Input has to be a string of the form `foo=bar`. In this case the variable with // name `foo` is assigned the value `bar` func (r *Root) InitializeVariables(vars []string) error { - panic("nope") - for _, variable := range vars { parsedVariable := strings.SplitN(variable, "=", 2) if len(parsedVariable) != 2 { From 36eb46e70107d8643da8dce7c9190e9de94601fe Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Wed, 24 Jan 2024 15:44:16 +0100 Subject: [PATCH 054/104] Fixes --- bundle/config/root.go | 7 +++--- bundle/config/root_test.go | 46 +------------------------------------- 2 files changed, 5 insertions(+), 48 deletions(-) diff --git a/bundle/config/root.go b/bundle/config/root.go index d2a5b47305..88840e647a 100644 --- a/bundle/config/root.go +++ b/bundle/config/root.go @@ -251,9 +251,6 @@ func (r *Root) Merge(other *Root) error { // // Merge diagnostics. // r.diags = append(r.diags, other.diags...) - // // TODO: when hooking into merge semantics, disallow setting path on the target instance. - // other.Path = "" - // Check for safe merge, protecting against duplicate resource identifiers err := r.Resources.VerifySafeMerge(&other.Resources) if err != nil { @@ -397,6 +394,10 @@ func (r *Root) MergeTargetOverrides(name string) error { // tree where we allow users to write a shorthand and must // rewrite to the full form. func rewrite(v dyn.Value) (dyn.Value, error) { + if v.Kind() != dyn.KindMap { + return v, nil + } + // For each target, rewrite the variables block. return dyn.Map(v, "targets", dyn.Foreach(func(target dyn.Value) (dyn.Value, error) { // Confirm it has a variables block. diff --git a/bundle/config/root_test.go b/bundle/config/root_test.go index 7b86c32676..3b25fb1f8e 100644 --- a/bundle/config/root_test.go +++ b/bundle/config/root_test.go @@ -30,51 +30,6 @@ func TestRootLoad(t *testing.T) { assert.Equal(t, "basic", root.Bundle.Name) } -func TestRootMergeStruct(t *testing.T) { - root := &Root{ - Path: "path", - Workspace: Workspace{ - Host: "foo", - Profile: "profile", - }, - } - other := &Root{ - Path: "path", - Workspace: Workspace{ - Host: "bar", - }, - } - assert.NoError(t, root.Merge(other)) - assert.Equal(t, "bar", root.Workspace.Host) - assert.Equal(t, "profile", root.Workspace.Profile) -} - -func TestRootMergeMap(t *testing.T) { - root := &Root{ - Path: "path", - Targets: map[string]*Target{ - "development": { - Workspace: &Workspace{ - Host: "foo", - Profile: "profile", - }, - }, - }, - } - other := &Root{ - Path: "path", - Targets: map[string]*Target{ - "development": { - Workspace: &Workspace{ - Host: "bar", - }, - }, - }, - } - assert.NoError(t, root.Merge(other)) - assert.Equal(t, &Workspace{Host: "bar", Profile: "profile"}, root.Targets["development"].Workspace) -} - func TestDuplicateIdOnLoadReturnsError(t *testing.T) { _, err := Load("./testdata/duplicate_resource_names_in_root/databricks.yml") assert.ErrorContains(t, err, "multiple resources named foo (job at ./testdata/duplicate_resource_names_in_root/databricks.yml, pipeline at ./testdata/duplicate_resource_names_in_root/databricks.yml)") @@ -160,6 +115,7 @@ func TestRootMergeTargetOverridesWithMode(t *testing.T) { }, }, } + root.initializeDynamicValue() require.NoError(t, root.MergeTargetOverrides("development")) assert.Equal(t, Development, root.Bundle.Mode) } From 764489e404b233841fb5475b5fbde4d872403494 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Wed, 24 Jan 2024 16:38:29 +0100 Subject: [PATCH 055/104] Use mocks in pipeline glob test --- .../tests/bundle/pipeline_glob_paths_test.go | 26 +++++++++++-------- 1 file changed, 15 insertions(+), 11 deletions(-) diff --git a/bundle/tests/bundle/pipeline_glob_paths_test.go b/bundle/tests/bundle/pipeline_glob_paths_test.go index 539ffc9d31..8f2b62a6b6 100644 --- a/bundle/tests/bundle/pipeline_glob_paths_test.go +++ b/bundle/tests/bundle/pipeline_glob_paths_test.go @@ -5,30 +5,34 @@ import ( "testing" "github.com/databricks/cli/bundle" - "github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config/mutator" "github.com/databricks/cli/bundle/phases" - "github.com/databricks/cli/cmd/root" + "github.com/databricks/databricks-sdk-go/config" + "github.com/databricks/databricks-sdk-go/experimental/mocks" "github.com/databricks/databricks-sdk-go/service/iam" + "github.com/stretchr/testify/mock" "github.com/stretchr/testify/require" ) func TestExpandPipelineGlobPathsWithNonExistent(t *testing.T) { ctx := context.Background() - ctx = root.SetWorkspaceClient(ctx, nil) - b, err := bundle.Load(ctx, "./pipeline_glob_paths") require.NoError(t, err) - err = bundle.Apply(ctx, b, bundle.Seq(mutator.DefaultMutators()...)) + err = bundle.Apply(ctx, b, bundle.Seq(mutator.DefaultMutatorsForTarget("default")...)) require.NoError(t, err) - b.Config.Bundle.Target = "default" - - b.Config.Workspace.CurrentUser = &config.User{User: &iam.User{UserName: "user@domain.com"}} - b.WorkspaceClient() - m := phases.Initialize() - err = bundle.Apply(ctx, b, m) + // Configure mock workspace client + m := mocks.NewMockWorkspaceClient(t) + m.WorkspaceClient.Config = &config.Config{ + Host: "https://mock.databricks.workspace.com", + } + m.GetMockCurrentUserAPI().EXPECT().Me(mock.Anything).Return(&iam.User{ + UserName: "user@domain.com", + }, nil) + b.SetWorkpaceClient(m.WorkspaceClient) + + err = bundle.Apply(ctx, b, phases.Initialize()) require.Error(t, err) require.ErrorContains(t, err, "notebook ./non-existent not found") From b6ade6c2fc239a6c9b30f29f8a1a4a5fd15b45c2 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Thu, 25 Jan 2024 10:30:12 +0100 Subject: [PATCH 056/104] Remove file --- libs/dyn/visit_test.go | 1 - 1 file changed, 1 deletion(-) delete mode 100644 libs/dyn/visit_test.go diff --git a/libs/dyn/visit_test.go b/libs/dyn/visit_test.go deleted file mode 100644 index 8f715b8619..0000000000 --- a/libs/dyn/visit_test.go +++ /dev/null @@ -1 +0,0 @@ -package dyn_test From 1b70822b894703b9c06faf8aa6827f77dd1cf97c Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Wed, 31 Jan 2024 16:26:24 +0100 Subject: [PATCH 057/104] Fix test --- libs/template/renderer_test.go | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/libs/template/renderer_test.go b/libs/template/renderer_test.go index 8d0c21010b..e541259e00 100644 --- a/libs/template/renderer_test.go +++ b/libs/template/renderer_test.go @@ -66,7 +66,11 @@ func assertBuiltinTemplateValid(t *testing.T, settings map[string]any, target st require.NoError(t, err) // Apply initialize / validation mutators - b.Config.Workspace.CurrentUser = &bundleConfig.User{User: cachedUser} + bundle.ApplyFunc(ctx, b, func(ctx context.Context, b *bundle.Bundle) error { + b.Config.Workspace.CurrentUser = &bundleConfig.User{User: cachedUser} + return nil + }) + b.Tagging = tags.ForCloud(w.Config) b.WorkspaceClient() b.Config.Bundle.Terraform = &bundleConfig.Terraform{ From 71f326f31ca230670378b19db7414ee1a617e1bd Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Wed, 31 Jan 2024 16:27:48 +0100 Subject: [PATCH 058/104] Remove mergo dependency --- go.mod | 1 - go.sum | 2 -- 2 files changed, 3 deletions(-) diff --git a/go.mod b/go.mod index f33219aa42..4d67f8b223 100644 --- a/go.mod +++ b/go.mod @@ -12,7 +12,6 @@ require ( github.com/hashicorp/hc-install v0.6.2 // MPL 2.0 github.com/hashicorp/terraform-exec v0.20.0 // MPL 2.0 github.com/hashicorp/terraform-json v0.21.0 // MPL 2.0 - github.com/imdario/mergo v0.3.15 // BSD-3-Clause github.com/manifoldco/promptui v0.9.0 // BSD-3-Clause github.com/mattn/go-isatty v0.0.20 // MIT github.com/nwidger/jsoncolor v0.3.2 // MIT diff --git a/go.sum b/go.sum index 96d0431192..4d569368cc 100644 --- a/go.sum +++ b/go.sum @@ -108,8 +108,6 @@ github.com/hashicorp/terraform-exec v0.20.0 h1:DIZnPsqzPGuUnq6cH8jWcPunBfY+C+M8J github.com/hashicorp/terraform-exec v0.20.0/go.mod h1:ckKGkJWbsNqFKV1itgMnE0hY9IYf1HoiekpuN0eWoDw= github.com/hashicorp/terraform-json v0.21.0 h1:9NQxbLNqPbEMze+S6+YluEdXgJmhQykRyRNd+zTI05U= github.com/hashicorp/terraform-json v0.21.0/go.mod h1:qdeBs11ovMzo5puhrRibdD6d2Dq6TyE/28JiU4tIQxk= -github.com/imdario/mergo v0.3.15 h1:M8XP7IuFNsqUx6VPK2P9OSmsYsI/YFaGil0uD21V3dM= -github.com/imdario/mergo v0.3.15/go.mod h1:WBLT9ZmE3lPoWsEzCh9LPo3TiwVN+ZKEjmz+hD27ysY= github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8= github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 h1:BQSFePA1RWJOlocH6Fxy8MmwDt+yVQYULKfN0RoTN8A= From 2dec864edcc8894ac0985ff14f4e6066334073e8 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Wed, 31 Jan 2024 16:29:29 +0100 Subject: [PATCH 059/104] Make linter happy --- bundle/config/mutator/environments_compat.go | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/bundle/config/mutator/environments_compat.go b/bundle/config/mutator/environments_compat.go index fe45ba47cf..72794a1d0c 100644 --- a/bundle/config/mutator/environments_compat.go +++ b/bundle/config/mutator/environments_compat.go @@ -33,8 +33,7 @@ func (m *environmentsToTargets) Apply(ctx context.Context, b *bundle.Bundle) err // Return an error if both "environments" and "targets" are set. if environments != dyn.NilValue && targets != dyn.NilValue { return dyn.NilValue, fmt.Errorf( - "both 'environments' and 'targets' are specified; only 'targets' should be used. "+ - "Instance of 'environments' found at %s.", + "both 'environments' and 'targets' are specified; only 'targets' should be used: %s", environments.Location().String(), ) } From 1fbed6180675fe4b5aa029780eea1b8e91014cf1 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Fri, 2 Feb 2024 14:39:41 +0100 Subject: [PATCH 060/104] Always reset typed config after running mutator --- bundle/config/root.go | 21 ++++++++++++++------- 1 file changed, 14 insertions(+), 7 deletions(-) diff --git a/bundle/config/root.go b/bundle/config/root.go index 88840e647a..bcb269ad7d 100644 --- a/bundle/config/root.go +++ b/bundle/config/root.go @@ -149,6 +149,8 @@ func (r *Root) toTyped(v dyn.Value) error { return err } + // Assign config file paths after converting to typed configuration. + r.ConfigureConfigFilePath() return nil } @@ -163,10 +165,6 @@ func (r *Root) Mutate(fn func(dyn.Value) (dyn.Value, error)) error { return err } r.value = nv - - // Assign config file paths after mutating the configuration. - r.ConfigureConfigFilePath() - return nil } @@ -184,8 +182,6 @@ func (r *Root) MarkMutatorEntry() { panic(err) } - r.ConfigureConfigFilePath() - } else { nv, err := convert.FromTyped(r, r.value) if err != nil { @@ -193,6 +189,12 @@ func (r *Root) MarkMutatorEntry() { } r.value = nv + + // Re-run ToTyped to ensure that no state is piggybacked + err = r.toTyped(r.value) + if err != nil { + panic(err) + } } } @@ -208,6 +210,12 @@ func (r *Root) MarkMutatorExit() { } r.value = nv + + // Re-run ToTyped to ensure that no state is piggybacked + err = r.toTyped(r.value) + if err != nil { + panic(err) + } } } @@ -386,7 +394,6 @@ func (r *Root) MergeTargetOverrides(name string) error { panic(err) } - r.ConfigureConfigFilePath() return nil } From 01a8b20c12f6d067774c02a58800dd62bebce6c3 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Mon, 5 Feb 2024 16:54:32 +0100 Subject: [PATCH 061/104] Clean paths in test for sync path rewrites --- .../config/mutator/rewrite_sync_paths_test.go | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/bundle/config/mutator/rewrite_sync_paths_test.go b/bundle/config/mutator/rewrite_sync_paths_test.go index 6e7f9b4836..576333e928 100644 --- a/bundle/config/mutator/rewrite_sync_paths_test.go +++ b/bundle/config/mutator/rewrite_sync_paths_test.go @@ -2,6 +2,7 @@ package mutator_test import ( "context" + "path/filepath" "testing" "github.com/databricks/cli/bundle" @@ -36,10 +37,10 @@ func TestRewriteSyncPathsRelative(t *testing.T) { err := bundle.Apply(context.Background(), b, mutator.RewriteSyncPaths()) assert.NoError(t, err) - assert.Equal(t, "foo", b.Config.Sync.Include[0]) - assert.Equal(t, "a/bar", b.Config.Sync.Include[1]) - assert.Equal(t, "a/b/baz", b.Config.Sync.Exclude[0]) - assert.Equal(t, "a/b/c/qux", b.Config.Sync.Exclude[1]) + assert.Equal(t, filepath.Clean("foo"), b.Config.Sync.Include[0]) + assert.Equal(t, filepath.Clean("a/bar"), b.Config.Sync.Include[1]) + assert.Equal(t, filepath.Clean("a/b/baz"), b.Config.Sync.Exclude[0]) + assert.Equal(t, filepath.Clean("a/b/c/qux"), b.Config.Sync.Exclude[1]) } func TestRewriteSyncPathsAbsolute(t *testing.T) { @@ -67,10 +68,10 @@ func TestRewriteSyncPathsAbsolute(t *testing.T) { err := bundle.Apply(context.Background(), b, mutator.RewriteSyncPaths()) assert.NoError(t, err) - assert.Equal(t, "foo", b.Config.Sync.Include[0]) - assert.Equal(t, "a/bar", b.Config.Sync.Include[1]) - assert.Equal(t, "a/b/baz", b.Config.Sync.Exclude[0]) - assert.Equal(t, "a/b/c/qux", b.Config.Sync.Exclude[1]) + assert.Equal(t, filepath.Clean("foo"), b.Config.Sync.Include[0]) + assert.Equal(t, filepath.Clean("a/bar"), b.Config.Sync.Include[1]) + assert.Equal(t, filepath.Clean("a/b/baz"), b.Config.Sync.Exclude[0]) + assert.Equal(t, filepath.Clean("a/b/c/qux"), b.Config.Sync.Exclude[1]) } func TestRewriteSyncPathsErrorPaths(t *testing.T) { From fc9c31c50234b73131080f68a46ba0fa38e0bda0 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Mon, 5 Feb 2024 17:07:08 +0100 Subject: [PATCH 062/104] Use dyn.Map for sync section --- bundle/config/mutator/rewrite_sync_paths.go | 80 +++++++-------------- 1 file changed, 26 insertions(+), 54 deletions(-) diff --git a/bundle/config/mutator/rewrite_sync_paths.go b/bundle/config/mutator/rewrite_sync_paths.go index e20df5f3cd..6d0acdc362 100644 --- a/bundle/config/mutator/rewrite_sync_paths.go +++ b/bundle/config/mutator/rewrite_sync_paths.go @@ -2,9 +2,7 @@ package mutator import ( "context" - "maps" "path/filepath" - "slices" "github.com/databricks/cli/bundle" @@ -21,66 +19,40 @@ func (m *rewriteSyncPaths) Name() string { return "RewriteSyncPaths" } -func (m *rewriteSyncPaths) makeRelativeTo(root string, seq dyn.Value) (dyn.Value, error) { - if seq == dyn.NilValue || seq.Kind() != dyn.KindSequence { - return dyn.NilValue, nil - } - - out, ok := seq.AsSequence() - if !ok { - return seq, nil - } - - out = slices.Clone(out) - for i, v := range out { - if v.Kind() != dyn.KindString { - continue - } - +// makeRelativeTo returns a dyn.MapFunc that joins the relative path +// of the file it was defined in w.r.t. the bundle root path, with +// the contents of the string node. +// +// For example: +// - The bundle root is /foo +// - The configuration file that defines the string node is at /foo/bar/baz.yml +// - The string node contains "somefile.*" +// +// Then the resulting value will be "bar/somefile.*". +func (m *rewriteSyncPaths) makeRelativeTo(root string) dyn.MapFunc { + return func(v dyn.Value) (dyn.Value, error) { dir := filepath.Dir(v.Location().File) rel, err := filepath.Rel(root, dir) if err != nil { return dyn.NilValue, err } - out[i] = dyn.NewValue(filepath.Join(rel, v.MustString()), v.Location()) - } - - return dyn.NewValue(out, seq.Location()), nil -} - -func (m *rewriteSyncPaths) fn(root string) func(c dyn.Value) (dyn.Value, error) { - return func(c dyn.Value) (dyn.Value, error) { - var err error - - // First build a new sync object - sync := c.Get("sync") - if sync == dyn.NilValue { - return c, nil - } - - out, ok := sync.AsMap() - if !ok { - return c, nil - } - - out = maps.Clone(out) - - out["include"], err = m.makeRelativeTo(root, out["include"]) - if err != nil { - return c, err - } - - out["exclude"], err = m.makeRelativeTo(root, out["exclude"]) - if err != nil { - return c, err - } - - // Then replace the sync object with the new one - return dyn.Set(c, "sync", dyn.NewValue(out, sync.Location())) + return dyn.NewValue(filepath.Join(rel, v.MustString()), v.Location()), nil } } func (m *rewriteSyncPaths) Apply(ctx context.Context, b *bundle.Bundle) error { - return b.Config.Mutate(m.fn(b.Config.Path)) + return b.Config.Mutate(func(v dyn.Value) (dyn.Value, error) { + return dyn.Map(v, "sync", func(v dyn.Value) (nv dyn.Value, err error) { + nv, err = dyn.Map(v, "include", dyn.Foreach(m.makeRelativeTo(b.Config.Path))) + if err != nil { + return dyn.NilValue, err + } + nv, err = dyn.Map(nv, "exclude", dyn.Foreach(m.makeRelativeTo(b.Config.Path))) + if err != nil { + return dyn.NilValue, err + } + return nv, nil + }) + }) } From afd745056221d6796213f3871942236438d2ff29 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Mon, 5 Feb 2024 19:08:31 +0100 Subject: [PATCH 063/104] Clean paths in sync override test --- bundle/tests/override_sync_test.go | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/bundle/tests/override_sync_test.go b/bundle/tests/override_sync_test.go index a2d3a05f5a..973a4ea554 100644 --- a/bundle/tests/override_sync_test.go +++ b/bundle/tests/override_sync_test.go @@ -1,6 +1,7 @@ package config_tests import ( + "path/filepath" "testing" "github.com/stretchr/testify/assert" @@ -8,19 +9,19 @@ import ( func TestOverrideSyncTarget(t *testing.T) { b := load(t, "./override_sync") - assert.ElementsMatch(t, []string{"src/*"}, b.Config.Sync.Include) + assert.ElementsMatch(t, []string{filepath.Clean("src/*")}, b.Config.Sync.Include) assert.ElementsMatch(t, []string{}, b.Config.Sync.Exclude) b = loadTarget(t, "./override_sync", "development") - assert.ElementsMatch(t, []string{"src/*", "tests/*"}, b.Config.Sync.Include) - assert.ElementsMatch(t, []string{"dist"}, b.Config.Sync.Exclude) + assert.ElementsMatch(t, []string{filepath.Clean("src/*"), filepath.Clean("tests/*")}, b.Config.Sync.Include) + assert.ElementsMatch(t, []string{filepath.Clean("dist")}, b.Config.Sync.Exclude) b = loadTarget(t, "./override_sync", "staging") - assert.ElementsMatch(t, []string{"src/*", "fixtures/*"}, b.Config.Sync.Include) + assert.ElementsMatch(t, []string{filepath.Clean("src/*"), filepath.Clean("fixtures/*")}, b.Config.Sync.Include) assert.ElementsMatch(t, []string{}, b.Config.Sync.Exclude) b = loadTarget(t, "./override_sync", "prod") - assert.ElementsMatch(t, []string{"src/*"}, b.Config.Sync.Include) + assert.ElementsMatch(t, []string{filepath.Clean("src/*")}, b.Config.Sync.Include) assert.ElementsMatch(t, []string{}, b.Config.Sync.Exclude) } @@ -30,11 +31,11 @@ func TestOverrideSyncTargetNoRootSync(t *testing.T) { assert.ElementsMatch(t, []string{}, b.Config.Sync.Exclude) b = loadTarget(t, "./override_sync_no_root", "development") - assert.ElementsMatch(t, []string{"tests/*"}, b.Config.Sync.Include) - assert.ElementsMatch(t, []string{"dist"}, b.Config.Sync.Exclude) + assert.ElementsMatch(t, []string{filepath.Clean("tests/*")}, b.Config.Sync.Include) + assert.ElementsMatch(t, []string{filepath.Clean("dist")}, b.Config.Sync.Exclude) b = loadTarget(t, "./override_sync_no_root", "staging") - assert.ElementsMatch(t, []string{"fixtures/*"}, b.Config.Sync.Include) + assert.ElementsMatch(t, []string{filepath.Clean("fixtures/*")}, b.Config.Sync.Include) assert.ElementsMatch(t, []string{}, b.Config.Sync.Exclude) b = loadTarget(t, "./override_sync_no_root", "prod") From b6b3e07c1d8c678926ca05de0a9e59554ae2fbb8 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Mon, 5 Feb 2024 20:34:06 +0100 Subject: [PATCH 064/104] Factor out function to merge elements by key --- bundle/config/mutator/merge_job_clusters.go | 60 +++------------ .../config/mutator/merge_job_clusters_test.go | 35 ++++++++- bundle/config/mutator/merge_job_tasks.go | 60 +++------------ bundle/config/mutator/merge_job_tasks_test.go | 34 +++++++++ .../config/mutator/merge_pipeline_clusters.go | 73 +++---------------- libs/dyn/merge/sequence_by_key.go | 67 +++++++++++++++++ libs/dyn/merge/sequence_by_key_test.go | 52 +++++++++++++ 7 files changed, 215 insertions(+), 166 deletions(-) create mode 100644 libs/dyn/merge/sequence_by_key.go create mode 100644 libs/dyn/merge/sequence_by_key_test.go diff --git a/bundle/config/mutator/merge_job_clusters.go b/bundle/config/mutator/merge_job_clusters.go index 3f482821f1..e8378f4801 100644 --- a/bundle/config/mutator/merge_job_clusters.go +++ b/bundle/config/mutator/merge_job_clusters.go @@ -18,57 +18,15 @@ func (m *mergeJobClusters) Name() string { return "MergeJobClusters" } -// mergeJobClusters merges job clusters with the same key. -// The job clusters field is a slice, and as such, overrides are appended to it. -// We can identify a job cluster by its key, however, so we can use this key -// to figure out which definitions are actually overrides and merge them. -func (m *mergeJobClusters) mergeJobClusters(v dyn.Value) (dyn.Value, error) { - // We know the type of this value is a sequence. - // For additional defence, return self if it is not. - clusters, ok := v.AsSequence() - if !ok { - return v, nil +func (m *mergeJobClusters) jobClusterKey(v dyn.Value) string { + switch v.Kind() { + case dyn.KindNil: + return "" + case dyn.KindString: + return v.MustString() + default: + panic("job cluster key must be a string") } - - seen := make(map[string]dyn.Value, len(clusters)) - keys := make([]string, 0, len(clusters)) - - // Target overrides are always appended, so we can iterate in natural order to - // first find the base definition, and merge instances we encounter later. - for i := range clusters { - var key string - - // Get task key if present. - kv := clusters[i].Get("job_cluster_key") - if kv.Kind() == dyn.KindString { - key = kv.MustString() - } - - // Register task with key if not yet seen before. - ref, ok := seen[key] - if !ok { - keys = append(keys, key) - seen[key] = clusters[i] - continue - } - - // Merge this instance into the reference. - nv, err := merge.Merge(ref, clusters[i]) - if err != nil { - return v, err - } - - // Overwrite reference. - seen[key] = nv - } - - // Gather resulting clusters in natural order. - out := make([]dyn.Value, 0, len(keys)) - for _, key := range keys { - out = append(out, seen[key]) - } - - return dyn.NewValue(out, v.Location()), nil } func (m *mergeJobClusters) Apply(ctx context.Context, b *bundle.Bundle) error { @@ -78,7 +36,7 @@ func (m *mergeJobClusters) Apply(ctx context.Context, b *bundle.Bundle) error { } return dyn.Map(v, "resources.jobs", dyn.Foreach(func(job dyn.Value) (dyn.Value, error) { - return dyn.Map(job, "job_clusters", m.mergeJobClusters) + return dyn.Map(job, "job_clusters", merge.ElementsByKey("job_cluster_key", m.jobClusterKey)) })) }) } diff --git a/bundle/config/mutator/merge_job_clusters_test.go b/bundle/config/mutator/merge_job_clusters_test.go index 131a04e4c6..a32b70281f 100644 --- a/bundle/config/mutator/merge_job_clusters_test.go +++ b/bundle/config/mutator/merge_job_clusters_test.go @@ -19,7 +19,6 @@ func TestMergeJobClusters(t *testing.T) { Resources: config.Resources{ Jobs: map[string]*resources.Job{ "foo": { - JobSettings: &jobs.JobSettings{ JobClusters: []jobs.JobCluster{ { @@ -70,3 +69,37 @@ func TestMergeJobClusters(t *testing.T) { jc1 := j.JobClusters[1].NewCluster assert.Equal(t, "10.4.x-scala2.12", jc1.SparkVersion) } + +func TestMergeJobClustersWithNilKey(t *testing.T) { + b := &bundle.Bundle{ + Config: config.Root{ + Resources: config.Resources{ + Jobs: map[string]*resources.Job{ + "foo": { + JobSettings: &jobs.JobSettings{ + JobClusters: []jobs.JobCluster{ + { + NewCluster: &compute.ClusterSpec{ + SparkVersion: "13.3.x-scala2.12", + NodeTypeId: "i3.xlarge", + NumWorkers: 2, + }, + }, + { + NewCluster: &compute.ClusterSpec{ + NodeTypeId: "i3.2xlarge", + NumWorkers: 4, + }, + }, + }, + }, + }, + }, + }, + }, + } + + err := bundle.Apply(context.Background(), b, mutator.MergeJobClusters()) + assert.NoError(t, err) + assert.Len(t, b.Config.Resources.Jobs["foo"].JobClusters, 1) +} diff --git a/bundle/config/mutator/merge_job_tasks.go b/bundle/config/mutator/merge_job_tasks.go index 07ca27e8f4..7394368ab8 100644 --- a/bundle/config/mutator/merge_job_tasks.go +++ b/bundle/config/mutator/merge_job_tasks.go @@ -18,57 +18,15 @@ func (m *mergeJobTasks) Name() string { return "MergeJobTasks" } -// mergeJobTasks merges tasks with the same key. -// The tasks field is a slice, and as such, overrides are appended to it. -// We can identify a task by its task key, however, so we can use this key -// to figure out which definitions are actually overrides and merge them. -func (m *mergeJobTasks) mergeJobTasks(v dyn.Value) (dyn.Value, error) { - // We know the type of this value is a sequence. - // For additional defence, return self if it is not. - tasks, ok := v.AsSequence() - if !ok { - return v, nil +func (m *mergeJobTasks) taskKeyString(v dyn.Value) string { + switch v.Kind() { + case dyn.KindNil: + return "" + case dyn.KindString: + return v.MustString() + default: + panic("task key must be a string") } - - seen := make(map[string]dyn.Value, len(tasks)) - keys := make([]string, 0, len(tasks)) - - // Target overrides are always appended, so we can iterate in natural order to - // first find the base definition, and merge instances we encounter later. - for i := range tasks { - var key string - - // Get task key if present. - kv := tasks[i].Get("task_key") - if kv.Kind() == dyn.KindString { - key = kv.MustString() - } - - // Register task with key if not yet seen before. - ref, ok := seen[key] - if !ok { - keys = append(keys, key) - seen[key] = tasks[i] - continue - } - - // Merge this instance into the reference. - nv, err := merge.Merge(ref, tasks[i]) - if err != nil { - return v, err - } - - // Overwrite reference. - seen[key] = nv - } - - // Gather resulting tasks in natural order. - out := make([]dyn.Value, 0, len(keys)) - for _, key := range keys { - out = append(out, seen[key]) - } - - return dyn.NewValue(out, v.Location()), nil } func (m *mergeJobTasks) Apply(ctx context.Context, b *bundle.Bundle) error { @@ -78,7 +36,7 @@ func (m *mergeJobTasks) Apply(ctx context.Context, b *bundle.Bundle) error { } return dyn.Map(v, "resources.jobs", dyn.Foreach(func(job dyn.Value) (dyn.Value, error) { - return dyn.Map(job, "tasks", m.mergeJobTasks) + return dyn.Map(job, "tasks", merge.ElementsByKey("task_key", m.taskKeyString)) })) }) } diff --git a/bundle/config/mutator/merge_job_tasks_test.go b/bundle/config/mutator/merge_job_tasks_test.go index 92688d956e..b3fb357e0b 100644 --- a/bundle/config/mutator/merge_job_tasks_test.go +++ b/bundle/config/mutator/merge_job_tasks_test.go @@ -81,3 +81,37 @@ func TestMergeJobTasks(t *testing.T) { task1 := j.Tasks[1].NewCluster assert.Equal(t, "10.4.x-scala2.12", task1.SparkVersion) } + +func TestMergeJobTasksWithNilKey(t *testing.T) { + b := &bundle.Bundle{ + Config: config.Root{ + Resources: config.Resources{ + Jobs: map[string]*resources.Job{ + "foo": { + JobSettings: &jobs.JobSettings{ + Tasks: []jobs.Task{ + { + NewCluster: &compute.ClusterSpec{ + SparkVersion: "13.3.x-scala2.12", + NodeTypeId: "i3.xlarge", + NumWorkers: 2, + }, + }, + { + NewCluster: &compute.ClusterSpec{ + NodeTypeId: "i3.2xlarge", + NumWorkers: 4, + }, + }, + }, + }, + }, + }, + }, + }, + } + + err := bundle.Apply(context.Background(), b, mutator.MergeJobTasks()) + assert.NoError(t, err) + assert.Len(t, b.Config.Resources.Jobs["foo"].Tasks, 1) +} diff --git a/bundle/config/mutator/merge_pipeline_clusters.go b/bundle/config/mutator/merge_pipeline_clusters.go index 5dc192f3f2..777ce611bf 100644 --- a/bundle/config/mutator/merge_pipeline_clusters.go +++ b/bundle/config/mutator/merge_pipeline_clusters.go @@ -19,70 +19,17 @@ func (m *mergePipelineClusters) Name() string { return "MergePipelineClusters" } -func (m *mergePipelineClusters) clusterLabel(cluster dyn.Value) (label string) { - v := cluster.Get("label") - if v == dyn.NilValue { +func (m *mergePipelineClusters) clusterLabel(v dyn.Value) string { + switch v.Kind() { + case dyn.KindNil: + // Note: the cluster label is optional and defaults to 'default'. + // We therefore ALSO merge all clusters without a label. return "default" + case dyn.KindString: + return strings.ToLower(v.MustString()) + default: + panic("task key must be a string") } - - if v.Kind() != dyn.KindString { - panic("cluster label must be a string") - } - - return strings.ToLower(v.MustString()) -} - -// mergeClustersForPipeline merges cluster definitions with same label. -// The clusters field is a slice, and as such, overrides are appended to it. -// We can identify a cluster by its label, however, so we can use this label -// to figure out which definitions are actually overrides and merge them. -// -// Note: the cluster label is optional and defaults to 'default'. -// We therefore ALSO merge all clusters without a label. -func (m *mergePipelineClusters) mergeClustersForPipeline(v dyn.Value) (dyn.Value, error) { - // We know the type of this value is a sequence. - // For additional defence, return self if it is not. - clusters, ok := v.AsSequence() - if !ok { - return v, nil - } - - seen := make(map[string]dyn.Value, len(clusters)) - labels := make([]string, 0, len(clusters)) - - // Target overrides are always appended, so we can iterate in natural order to - // first find the base definition, and merge instances we encounter later. - for i := range clusters { - label := m.clusterLabel(clusters[i]) - - // Register pipeline cluster with label if not yet seen before. - ref, ok := seen[label] - if !ok { - labels = append(labels, label) - seen[label] = clusters[i] - continue - } - - // Merge this instance into the reference. - var err error - seen[label], err = merge.Merge(ref, clusters[i]) - if err != nil { - return v, err - } - } - - // Gather resulting clusters in natural order. - out := make([]dyn.Value, 0, len(labels)) - for _, label := range labels { - // Overwrite the label with the normalized version. - nv, err := dyn.Set(seen[label], "label", dyn.V(label)) - if err != nil { - return dyn.InvalidValue, err - } - out = append(out, nv) - } - - return dyn.NewValue(out, v.Location()), nil } func (m *mergePipelineClusters) Apply(ctx context.Context, b *bundle.Bundle) error { @@ -92,7 +39,7 @@ func (m *mergePipelineClusters) Apply(ctx context.Context, b *bundle.Bundle) err } return dyn.Map(v, "resources.pipelines", dyn.Foreach(func(pipeline dyn.Value) (dyn.Value, error) { - return dyn.Map(pipeline, "clusters", m.mergeClustersForPipeline) + return dyn.Map(pipeline, "clusters", merge.ElementsByKey("label", m.clusterLabel)) })) }) } diff --git a/libs/dyn/merge/sequence_by_key.go b/libs/dyn/merge/sequence_by_key.go new file mode 100644 index 0000000000..3ce571bf7e --- /dev/null +++ b/libs/dyn/merge/sequence_by_key.go @@ -0,0 +1,67 @@ +package merge + +import "github.com/databricks/cli/libs/dyn" + +type elementsByKey struct { + key string + keyFunc func(dyn.Value) string +} + +func (e elementsByKey) Map(v dyn.Value) (dyn.Value, error) { + // We know the type of this value is a sequence. + // For additional defence, return self if it is not. + elements, ok := v.AsSequence() + if !ok { + return v, nil + } + + seen := make(map[string]dyn.Value, len(elements)) + keys := make([]string, 0, len(elements)) + + // Iterate in natural order. For a given key, we first see the + // base definition and merge instances that come after it. + for i := range elements { + kv := elements[i].Get(e.key) + key := e.keyFunc(kv) + + // Register element with key if not yet seen before. + ref, ok := seen[key] + if !ok { + keys = append(keys, key) + seen[key] = elements[i] + continue + } + + // Merge this instance into the reference. + nv, err := Merge(ref, elements[i]) + if err != nil { + return v, err + } + + // Overwrite reference. + seen[key] = nv + } + + // Gather resulting elements in natural order. + out := make([]dyn.Value, 0, len(keys)) + for _, key := range keys { + nv, err := dyn.Set(seen[key], e.key, dyn.V(key)) + if err != nil { + return dyn.InvalidValue, err + } + out = append(out, nv) + } + + return dyn.NewValue(out, v.Location()), nil +} + +// ElementsByKey returns a [dyn.MapFunc] that operates on a sequence +// where each element is a map. It groups elements by a key and merges +// elements with the same key. +// +// The function that extracts the key from an element is provided as +// a parameter. The resulting elements get their key field overwritten +// with the value as returned by the key function. +func ElementsByKey(key string, keyFunc func(dyn.Value) string) dyn.MapFunc { + return elementsByKey{key, keyFunc}.Map +} diff --git a/libs/dyn/merge/sequence_by_key_test.go b/libs/dyn/merge/sequence_by_key_test.go new file mode 100644 index 0000000000..c61f834e5f --- /dev/null +++ b/libs/dyn/merge/sequence_by_key_test.go @@ -0,0 +1,52 @@ +package merge + +import ( + "strings" + "testing" + + "github.com/databricks/cli/libs/dyn" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestElementByKey(t *testing.T) { + vin := dyn.V([]dyn.Value{ + dyn.V(map[string]dyn.Value{ + "key": dyn.V("foo"), + "value": dyn.V(42), + }), + dyn.V(map[string]dyn.Value{ + "key": dyn.V("bar"), + "value": dyn.V(43), + }), + dyn.V(map[string]dyn.Value{ + // Use upper case key to test that the resulting element has its + // key field assigned to the output of the key function. + // The key function in this test returns the lower case version of the key. + "key": dyn.V("FOO"), + "value": dyn.V(44), + }), + }) + + keyFunc := func(v dyn.Value) string { + return strings.ToLower(v.MustString()) + } + + vout, err := dyn.MapByPath(vin, dyn.EmptyPath, ElementsByKey("key", keyFunc)) + require.NoError(t, err) + assert.Len(t, vout.MustSequence(), 2) + assert.Equal(t, + vout.Index(0).AsAny(), + map[string]any{ + "key": "foo", + "value": 44, + }, + ) + assert.Equal(t, + vout.Index(1).AsAny(), + map[string]any{ + "key": "bar", + "value": 43, + }, + ) +} From 20d5380101f2693e4628cf08b218ba174b5ec67a Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Mon, 5 Feb 2024 20:34:34 +0100 Subject: [PATCH 065/104] Rename --- libs/dyn/merge/{sequence_by_key.go => elements_by_key.go} | 0 .../merge/{sequence_by_key_test.go => elements_by_key_test.go} | 0 2 files changed, 0 insertions(+), 0 deletions(-) rename libs/dyn/merge/{sequence_by_key.go => elements_by_key.go} (100%) rename libs/dyn/merge/{sequence_by_key_test.go => elements_by_key_test.go} (100%) diff --git a/libs/dyn/merge/sequence_by_key.go b/libs/dyn/merge/elements_by_key.go similarity index 100% rename from libs/dyn/merge/sequence_by_key.go rename to libs/dyn/merge/elements_by_key.go diff --git a/libs/dyn/merge/sequence_by_key_test.go b/libs/dyn/merge/elements_by_key_test.go similarity index 100% rename from libs/dyn/merge/sequence_by_key_test.go rename to libs/dyn/merge/elements_by_key_test.go From c445d198d02c266e245a01b6f596fa91604ac6d6 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Mon, 5 Feb 2024 20:43:34 +0100 Subject: [PATCH 066/104] Revert commented out scripts section --- bundle/config/mutator/mutator.go | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/bundle/config/mutator/mutator.go b/bundle/config/mutator/mutator.go index d6b38803b3..1bef564173 100644 --- a/bundle/config/mutator/mutator.go +++ b/bundle/config/mutator/mutator.go @@ -2,11 +2,13 @@ package mutator import ( "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/config" + "github.com/databricks/cli/bundle/scripts" ) func DefaultMutators() []bundle.Mutator { return []bundle.Mutator{ - // scripts.Execute(config.ScriptPreInit), + scripts.Execute(config.ScriptPreInit), ProcessRootIncludes(), RewriteSyncPaths(), EnvironmentsToTargets(), From 6b78191ceecf15a721f89ca539919c85c74ed9bc Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Mon, 5 Feb 2024 20:43:42 +0100 Subject: [PATCH 067/104] No new var --- bundle/config/mutator/rewrite_sync_paths.go | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/bundle/config/mutator/rewrite_sync_paths.go b/bundle/config/mutator/rewrite_sync_paths.go index 6d0acdc362..c1761690d4 100644 --- a/bundle/config/mutator/rewrite_sync_paths.go +++ b/bundle/config/mutator/rewrite_sync_paths.go @@ -44,15 +44,15 @@ func (m *rewriteSyncPaths) makeRelativeTo(root string) dyn.MapFunc { func (m *rewriteSyncPaths) Apply(ctx context.Context, b *bundle.Bundle) error { return b.Config.Mutate(func(v dyn.Value) (dyn.Value, error) { return dyn.Map(v, "sync", func(v dyn.Value) (nv dyn.Value, err error) { - nv, err = dyn.Map(v, "include", dyn.Foreach(m.makeRelativeTo(b.Config.Path))) + v, err = dyn.Map(v, "include", dyn.Foreach(m.makeRelativeTo(b.Config.Path))) if err != nil { return dyn.NilValue, err } - nv, err = dyn.Map(nv, "exclude", dyn.Foreach(m.makeRelativeTo(b.Config.Path))) + v, err = dyn.Map(v, "exclude", dyn.Foreach(m.makeRelativeTo(b.Config.Path))) if err != nil { return dyn.NilValue, err } - return nv, nil + return v, nil }) }) } From 9c88a7e57f3a05d5108bd2c0ff99fd4b1cbae75b Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Mon, 5 Feb 2024 21:26:42 +0100 Subject: [PATCH 068/104] Newline --- bundle/mutator.go | 1 - 1 file changed, 1 deletion(-) diff --git a/bundle/mutator.go b/bundle/mutator.go index c1a2429057..8b22502d56 100644 --- a/bundle/mutator.go +++ b/bundle/mutator.go @@ -25,7 +25,6 @@ func Apply(ctx context.Context, b *Bundle, m Mutator) error { defer b.Config.MarkMutatorExit() err := m.Apply(ctx, b) - if err != nil { log.Errorf(ctx, "Error: %s", err) return err From 36666d8d651291f18c56f5a07eb697afec4a9f0f Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Mon, 5 Feb 2024 21:29:24 +0100 Subject: [PATCH 069/104] Include PR link --- bundle/tests/job_with_spark_conf_test.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bundle/tests/job_with_spark_conf_test.go b/bundle/tests/job_with_spark_conf_test.go index abc0a58170..90bdc977d6 100644 --- a/bundle/tests/job_with_spark_conf_test.go +++ b/bundle/tests/job_with_spark_conf_test.go @@ -16,7 +16,7 @@ func TestJobWithSparkConf(t *testing.T) { assert.Equal(t, "test_cluster", job.JobClusters[0].JobClusterKey) // This test exists because of https://github.com/databricks/cli/issues/992. - // It is solved as of **TODO**. + // It is solved for bundles as of https://github.com/databricks/cli/pull/1098. require.Len(t, job.JobClusters, 1) cluster := job.JobClusters[0] assert.Equal(t, "14.2.x-scala2.12", cluster.NewCluster.SparkVersion) From 6b5fcb7890ceb726f3142d9bdc940f58152c3358 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Mon, 5 Feb 2024 21:50:38 +0100 Subject: [PATCH 070/104] Undo --- bundle/tests/override_sync_test.go | 17 ++++++++--------- 1 file changed, 8 insertions(+), 9 deletions(-) diff --git a/bundle/tests/override_sync_test.go b/bundle/tests/override_sync_test.go index 973a4ea554..a2d3a05f5a 100644 --- a/bundle/tests/override_sync_test.go +++ b/bundle/tests/override_sync_test.go @@ -1,7 +1,6 @@ package config_tests import ( - "path/filepath" "testing" "github.com/stretchr/testify/assert" @@ -9,19 +8,19 @@ import ( func TestOverrideSyncTarget(t *testing.T) { b := load(t, "./override_sync") - assert.ElementsMatch(t, []string{filepath.Clean("src/*")}, b.Config.Sync.Include) + assert.ElementsMatch(t, []string{"src/*"}, b.Config.Sync.Include) assert.ElementsMatch(t, []string{}, b.Config.Sync.Exclude) b = loadTarget(t, "./override_sync", "development") - assert.ElementsMatch(t, []string{filepath.Clean("src/*"), filepath.Clean("tests/*")}, b.Config.Sync.Include) - assert.ElementsMatch(t, []string{filepath.Clean("dist")}, b.Config.Sync.Exclude) + assert.ElementsMatch(t, []string{"src/*", "tests/*"}, b.Config.Sync.Include) + assert.ElementsMatch(t, []string{"dist"}, b.Config.Sync.Exclude) b = loadTarget(t, "./override_sync", "staging") - assert.ElementsMatch(t, []string{filepath.Clean("src/*"), filepath.Clean("fixtures/*")}, b.Config.Sync.Include) + assert.ElementsMatch(t, []string{"src/*", "fixtures/*"}, b.Config.Sync.Include) assert.ElementsMatch(t, []string{}, b.Config.Sync.Exclude) b = loadTarget(t, "./override_sync", "prod") - assert.ElementsMatch(t, []string{filepath.Clean("src/*")}, b.Config.Sync.Include) + assert.ElementsMatch(t, []string{"src/*"}, b.Config.Sync.Include) assert.ElementsMatch(t, []string{}, b.Config.Sync.Exclude) } @@ -31,11 +30,11 @@ func TestOverrideSyncTargetNoRootSync(t *testing.T) { assert.ElementsMatch(t, []string{}, b.Config.Sync.Exclude) b = loadTarget(t, "./override_sync_no_root", "development") - assert.ElementsMatch(t, []string{filepath.Clean("tests/*")}, b.Config.Sync.Include) - assert.ElementsMatch(t, []string{filepath.Clean("dist")}, b.Config.Sync.Exclude) + assert.ElementsMatch(t, []string{"tests/*"}, b.Config.Sync.Include) + assert.ElementsMatch(t, []string{"dist"}, b.Config.Sync.Exclude) b = loadTarget(t, "./override_sync_no_root", "staging") - assert.ElementsMatch(t, []string{filepath.Clean("fixtures/*")}, b.Config.Sync.Include) + assert.ElementsMatch(t, []string{"fixtures/*"}, b.Config.Sync.Include) assert.ElementsMatch(t, []string{}, b.Config.Sync.Exclude) b = loadTarget(t, "./override_sync_no_root", "prod") From eb0b6a22f3ff7ed9a42b823d49a92182d2e2f1d0 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Tue, 6 Feb 2024 15:49:36 +0100 Subject: [PATCH 071/104] Ensure every variable reference is passed to lookup function References to keys that themselves are also variable references were shortcircuited in the previous approach. This meant that certain fields were resolved even if the lookup function would have instructed to skip resolution. To fix this we separate memoization of resolved variable references from memoization of lookups. Now, every variable reference is passed through the lookup function. --- libs/dyn/dynvar/resolve.go | 72 +++++++++++++++++++++++---------- libs/dyn/dynvar/resolve_test.go | 25 ++++++++++++ 2 files changed, 75 insertions(+), 22 deletions(-) diff --git a/libs/dyn/dynvar/resolve.go b/libs/dyn/dynvar/resolve.go index b4e119b6d7..b5417cac20 100644 --- a/libs/dyn/dynvar/resolve.go +++ b/libs/dyn/dynvar/resolve.go @@ -38,12 +38,20 @@ func Resolve(in dyn.Value, fn Lookup) (out dyn.Value, err error) { return resolver{in: in, fn: fn}.run() } +type lookupResult struct { + v dyn.Value + err error +} + type resolver struct { in dyn.Value fn Lookup refs map[string]ref resolved map[string]dyn.Value + + // Memoization for lookups. + lookups map[string]lookupResult } func (r resolver) run() (out dyn.Value, err error) { @@ -84,8 +92,10 @@ func (r *resolver) collectVariableReferences() (err error) { } func (r *resolver) resolveVariableReferences() (err error) { - // Initialize map for resolved variables. - // We use this for memoization. + // Initialize cache for lookups. + r.lookups = make(map[string]lookupResult) + + // Initialize cache for resolved variable references. r.resolved = make(map[string]dyn.Value) // Resolve each variable reference (in order). @@ -95,7 +105,7 @@ func (r *resolver) resolveVariableReferences() (err error) { keys := maps.Keys(r.refs) sort.Strings(keys) for _, key := range keys { - _, err := r.resolve(key, []string{key}) + _, err := r.resolveRef(key, r.refs[key], []string{key}) if err != nil { return err } @@ -104,29 +114,12 @@ func (r *resolver) resolveVariableReferences() (err error) { return nil } -func (r *resolver) resolve(key string, seen []string) (dyn.Value, error) { +func (r *resolver) resolveRef(key string, ref ref, seen []string) (dyn.Value, error) { // Check if we have already resolved this variable reference. if v, ok := r.resolved[key]; ok { return v, nil } - ref, ok := r.refs[key] - if !ok { - // Perform lookup in the input. - p, err := dyn.NewPathFromString(key) - if err != nil { - return dyn.InvalidValue, err - } - v, err := r.fn(p) - if err != nil && dyn.IsNoSuchKeyError(err) { - return dyn.InvalidValue, fmt.Errorf( - "reference does not exist: ${%s}", - key, - ) - } - return v, err - } - // This is an unresolved variable reference. deps := ref.references() @@ -143,7 +136,7 @@ func (r *resolver) resolve(key string, seen []string) (dyn.Value, error) { ) } - v, err := r.resolve(dep, append(seen, dep)) + v, err := r.resolveKey(dep, append(seen, dep)) // If we should skip resolution of this key, index j will hold an invalid [dyn.Value]. if errors.Is(err, ErrSkipResolution) { @@ -191,6 +184,41 @@ func (r *resolver) resolve(key string, seen []string) (dyn.Value, error) { return v, nil } +func (r *resolver) resolveKey(key string, seen []string) (dyn.Value, error) { + // Check if we have already looked up this key. + if v, ok := r.lookups[key]; ok { + return v.v, v.err + } + + // Parse the key into a path. + p, err := dyn.NewPathFromString(key) + if err != nil { + return dyn.InvalidValue, err + } + + // Look up the value for the given key. + v, err := r.fn(p) + if err != nil { + if dyn.IsNoSuchKeyError(err) { + err = fmt.Errorf("reference does not exist: ${%s}", key) + } + + // Cache the return value and return to the caller. + r.lookups[key] = lookupResult{v: dyn.InvalidValue, err: err} + return dyn.InvalidValue, err + } + + // If the returned value is a valid variable reference, resolve it. + ref, ok := newRef(v) + if ok { + v, err = r.resolveRef(key, ref, seen) + } + + // Cache the return value and return to the caller. + r.lookups[key] = lookupResult{v: v, err: err} + return v, err +} + func (r *resolver) replaceVariableReferences() (dyn.Value, error) { // Walk the input and replace all variable references. return dyn.Walk(r.in, func(p dyn.Path, v dyn.Value) (dyn.Value, error) { diff --git a/libs/dyn/dynvar/resolve_test.go b/libs/dyn/dynvar/resolve_test.go index ba700503ef..1234b7cbfc 100644 --- a/libs/dyn/dynvar/resolve_test.go +++ b/libs/dyn/dynvar/resolve_test.go @@ -182,3 +182,28 @@ func TestResolveWithSkip(t *testing.T) { assert.Equal(t, "a ${b}", getByPath(t, out, "e").MustString()) assert.Equal(t, "${b} a a ${b}", getByPath(t, out, "f").MustString()) } + +func TestResolveWithSkipEverything(t *testing.T) { + in := dyn.V(map[string]dyn.Value{ + "a": dyn.V("a"), + "b": dyn.V("b"), + "c": dyn.V("${a}"), + "d": dyn.V("${b}"), + "e": dyn.V("${a} ${b}"), + "f": dyn.V("${b} ${a} ${a} ${b}"), + "g": dyn.V("${d} ${c} ${c} ${d}"), + }) + + // The call must not replace anything if the lookup function returns ErrSkipResolution. + out, err := dynvar.Resolve(in, func(path dyn.Path) (dyn.Value, error) { + return dyn.InvalidValue, dynvar.ErrSkipResolution + }) + require.NoError(t, err) + assert.Equal(t, "a", getByPath(t, out, "a").MustString()) + assert.Equal(t, "b", getByPath(t, out, "b").MustString()) + assert.Equal(t, "${a}", getByPath(t, out, "c").MustString()) + assert.Equal(t, "${b}", getByPath(t, out, "d").MustString()) + assert.Equal(t, "${a} ${b}", getByPath(t, out, "e").MustString()) + assert.Equal(t, "${b} ${a} ${a} ${b}", getByPath(t, out, "f").MustString()) + assert.Equal(t, "${d} ${c} ${c} ${d}", getByPath(t, out, "g").MustString()) +} From 49b9681fc8b18dfc57e306dbc846a04c80d33242 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Tue, 6 Feb 2024 16:03:25 +0100 Subject: [PATCH 072/104] Add test for variable reference resolution --- .../resolve_variable_references_test.go | 64 +++++++++++++++++++ 1 file changed, 64 insertions(+) create mode 100644 bundle/config/mutator/resolve_variable_references_test.go diff --git a/bundle/config/mutator/resolve_variable_references_test.go b/bundle/config/mutator/resolve_variable_references_test.go new file mode 100644 index 0000000000..9ca951a201 --- /dev/null +++ b/bundle/config/mutator/resolve_variable_references_test.go @@ -0,0 +1,64 @@ +package mutator + +import ( + "context" + "testing" + + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/config" + "github.com/databricks/cli/bundle/config/variable" + "github.com/stretchr/testify/require" +) + +func TestResolveVariableReferences(t *testing.T) { + b := &bundle.Bundle{ + Config: config.Root{ + Bundle: config.Bundle{ + Name: "example", + }, + Workspace: config.Workspace{ + RootPath: "${bundle.name}/bar", + FilePath: "${workspace.root_path}/baz", + }, + }, + } + + // Apply with an invalid prefix. This should not change the workspace root path. + err := bundle.Apply(context.Background(), b, ResolveVariableReferences("doesntexist")) + require.NoError(t, err) + require.Equal(t, "${bundle.name}/bar", b.Config.Workspace.RootPath) + require.Equal(t, "${workspace.root_path}/baz", b.Config.Workspace.FilePath) + + // Apply with a valid prefix. This should change the workspace root path. + err = bundle.Apply(context.Background(), b, ResolveVariableReferences("bundle", "workspace")) + require.NoError(t, err) + require.Equal(t, "example/bar", b.Config.Workspace.RootPath) + require.Equal(t, "example/bar/baz", b.Config.Workspace.FilePath) +} + +func TestResolveVariableReferencesToBundleVariables(t *testing.T) { + s := func(s string) *string { + return &s + } + + b := &bundle.Bundle{ + Config: config.Root{ + Bundle: config.Bundle{ + Name: "example", + }, + Workspace: config.Workspace{ + RootPath: "${bundle.name}/${var.foo}", + }, + Variables: map[string]*variable.Variable{ + "foo": { + Value: s("bar"), + }, + }, + }, + } + + // Apply with a valid prefix. This should change the workspace root path. + err := bundle.Apply(context.Background(), b, ResolveVariableReferences("bundle", "variables")) + require.NoError(t, err) + require.Equal(t, "example/bar", b.Config.Workspace.RootPath) +} From ac07608fc1f00b25154b695868b9b108c26438f1 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Tue, 6 Feb 2024 16:34:02 +0100 Subject: [PATCH 073/104] Interpolate correct Terraform resource references --- bundle/deploy/terraform/interpolate_test.go | 71 +++++++++++++++++++++ 1 file changed, 71 insertions(+) create mode 100644 bundle/deploy/terraform/interpolate_test.go diff --git a/bundle/deploy/terraform/interpolate_test.go b/bundle/deploy/terraform/interpolate_test.go new file mode 100644 index 0000000000..909549e503 --- /dev/null +++ b/bundle/deploy/terraform/interpolate_test.go @@ -0,0 +1,71 @@ +package terraform + +import ( + "context" + "testing" + + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/config" + "github.com/databricks/cli/bundle/config/resources" + "github.com/databricks/databricks-sdk-go/service/jobs" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestInterpolate(t *testing.T) { + b := &bundle.Bundle{ + Config: config.Root{ + Bundle: config.Bundle{ + Name: "example", + }, + Resources: config.Resources{ + Jobs: map[string]*resources.Job{ + "my_job": { + JobSettings: &jobs.JobSettings{ + Tags: map[string]string{ + "other_pipeline": "${resources.pipelines.other_pipeline.id}", + "other_job": "${resources.jobs.other_job.id}", + "other_model": "${resources.models.other_model.id}", + "other_experiment": "${resources.experiments.other_experiment.id}", + "other_model_serving": "${resources.model_serving_endpoints.other_model_serving.id}", + "other_registered_model": "${resources.registered_models.other_registered_model.id}", + }, + }, + }, + }, + }, + }, + } + + err := bundle.Apply(context.Background(), b, Interpolate()) + require.NoError(t, err) + + j := b.Config.Resources.Jobs["my_job"] + assert.Equal(t, "${databricks_pipeline.other_pipeline.id}", j.Tags["other_pipeline"]) + assert.Equal(t, "${databricks_job.other_job.id}", j.Tags["other_job"]) + assert.Equal(t, "${databricks_mlflow_model.other_model.id}", j.Tags["other_model"]) + assert.Equal(t, "${databricks_mlflow_experiment.other_experiment.id}", j.Tags["other_experiment"]) + assert.Equal(t, "${databricks_model_serving.other_model_serving.id}", j.Tags["other_model_serving"]) + assert.Equal(t, "${databricks_registered_model.other_registered_model.id}", j.Tags["other_registered_model"]) +} + +func TestInterpolateUnknownResourceType(t *testing.T) { + b := &bundle.Bundle{ + Config: config.Root{ + Resources: config.Resources{ + Jobs: map[string]*resources.Job{ + "my_job": { + JobSettings: &jobs.JobSettings{ + Tags: map[string]string{ + "other_unknown": "${resources.unknown.other_unknown.id}", + }, + }, + }, + }, + }, + }, + } + + err := bundle.Apply(context.Background(), b, Interpolate()) + assert.Contains(t, err.Error(), `reference does not exist: ${resources.unknown.other_unknown.id}`) +} From 98f8344533332330f055541095fe3f04f844e972 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Tue, 6 Feb 2024 16:34:16 +0100 Subject: [PATCH 074/104] . --- bundle/deploy/terraform/interpolate.go | 88 ++++++++++++++++---------- 1 file changed, 55 insertions(+), 33 deletions(-) diff --git a/bundle/deploy/terraform/interpolate.go b/bundle/deploy/terraform/interpolate.go index 2fd5c3b025..525a38fa88 100644 --- a/bundle/deploy/terraform/interpolate.go +++ b/bundle/deploy/terraform/interpolate.go @@ -1,42 +1,64 @@ package terraform import ( + "context" + "fmt" + "github.com/databricks/cli/bundle" - "github.com/databricks/cli/bundle/config/mutator" + "github.com/databricks/cli/libs/dyn" + "github.com/databricks/cli/libs/dyn/dynvar" ) -// // Rewrite variable references to resources into Terraform compatible format. -// func interpolateTerraformResourceIdentifiers(path string, lookup map[string]string) (string, error) { -// parts := strings.Split(path, interpolation.Delimiter) -// if parts[0] == "resources" { -// switch parts[1] { -// case "pipelines": -// path = strings.Join(append([]string{"databricks_pipeline"}, parts[2:]...), interpolation.Delimiter) -// return fmt.Sprintf("${%s}", path), nil -// case "jobs": -// path = strings.Join(append([]string{"databricks_job"}, parts[2:]...), interpolation.Delimiter) -// return fmt.Sprintf("${%s}", path), nil -// case "models": -// path = strings.Join(append([]string{"databricks_mlflow_model"}, parts[2:]...), interpolation.Delimiter) -// return fmt.Sprintf("${%s}", path), nil -// case "experiments": -// path = strings.Join(append([]string{"databricks_mlflow_experiment"}, parts[2:]...), interpolation.Delimiter) -// return fmt.Sprintf("${%s}", path), nil -// case "model_serving_endpoints": -// path = strings.Join(append([]string{"databricks_model_serving"}, parts[2:]...), interpolation.Delimiter) -// return fmt.Sprintf("${%s}", path), nil -// case "registered_models": -// path = strings.Join(append([]string{"databricks_registered_model"}, parts[2:]...), interpolation.Delimiter) -// return fmt.Sprintf("${%s}", path), nil -// default: -// panic("TODO: " + parts[1]) -// } -// } - -// return interpolation.DefaultLookup(path, lookup) -// } +type interpolateMutator struct { +} func Interpolate() bundle.Mutator { - return mutator.ResolveVariableReferences("foobar") - // return interpolation.Interpolate(interpolateTerraformResourceIdentifiers) + return &interpolateMutator{} +} + +func (m *interpolateMutator) Name() string { + return "terraform.Interpolate" +} + +func (m *interpolateMutator) Apply(ctx context.Context, b *bundle.Bundle) error { + return b.Config.Mutate(func(root dyn.Value) (dyn.Value, error) { + prefix := dyn.MustPathFromString("resources") + + // Resolve variable references in all values. + return dynvar.Resolve(root, func(path dyn.Path) (dyn.Value, error) { + // Expect paths of the form: + // - resources...... + if !path.HasPrefix(prefix) || len(path) < 4 { + return dyn.InvalidValue, dynvar.ErrSkipResolution + } + + // Rewrite the bundle configuration path: + // + // ${resources.pipelines.my_pipeline.id} + // + // into the Terraform-compatible resource identifier: + // + // ${databricks_pipeline.my_pipeline.id} + // + switch path[1] { + case dyn.Key("pipelines"): + path = dyn.NewPath(dyn.Key("databricks_pipeline")).Append(path[2:]...) + case dyn.Key("jobs"): + path = dyn.NewPath(dyn.Key("databricks_job")).Append(path[2:]...) + case dyn.Key("models"): + path = dyn.NewPath(dyn.Key("databricks_mlflow_model")).Append(path[2:]...) + case dyn.Key("experiments"): + path = dyn.NewPath(dyn.Key("databricks_mlflow_experiment")).Append(path[2:]...) + case dyn.Key("model_serving_endpoints"): + path = dyn.NewPath(dyn.Key("databricks_model_serving")).Append(path[2:]...) + case dyn.Key("registered_models"): + path = dyn.NewPath(dyn.Key("databricks_registered_model")).Append(path[2:]...) + default: + // Trigger "key not found" for unknown resource types. + return dyn.GetByPath(root, path) + } + + return dyn.V(fmt.Sprintf("${%s}", path.String())), nil + }) + }) } From 44e791437424103e7c336b23462b8c6f01e1f3bb Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Tue, 6 Feb 2024 16:47:15 +0100 Subject: [PATCH 075/104] Use FromSlash for Windows compat --- bundle/tests/override_sync_test.go | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/bundle/tests/override_sync_test.go b/bundle/tests/override_sync_test.go index a2d3a05f5a..ad864180c6 100644 --- a/bundle/tests/override_sync_test.go +++ b/bundle/tests/override_sync_test.go @@ -1,6 +1,7 @@ package config_tests import ( + "path/filepath" "testing" "github.com/stretchr/testify/assert" @@ -8,19 +9,19 @@ import ( func TestOverrideSyncTarget(t *testing.T) { b := load(t, "./override_sync") - assert.ElementsMatch(t, []string{"src/*"}, b.Config.Sync.Include) + assert.ElementsMatch(t, []string{filepath.FromSlash("src/*")}, b.Config.Sync.Include) assert.ElementsMatch(t, []string{}, b.Config.Sync.Exclude) b = loadTarget(t, "./override_sync", "development") - assert.ElementsMatch(t, []string{"src/*", "tests/*"}, b.Config.Sync.Include) - assert.ElementsMatch(t, []string{"dist"}, b.Config.Sync.Exclude) + assert.ElementsMatch(t, []string{filepath.FromSlash("src/*"), filepath.FromSlash("tests/*")}, b.Config.Sync.Include) + assert.ElementsMatch(t, []string{filepath.FromSlash("dist")}, b.Config.Sync.Exclude) b = loadTarget(t, "./override_sync", "staging") - assert.ElementsMatch(t, []string{"src/*", "fixtures/*"}, b.Config.Sync.Include) + assert.ElementsMatch(t, []string{filepath.FromSlash("src/*"), filepath.FromSlash("fixtures/*")}, b.Config.Sync.Include) assert.ElementsMatch(t, []string{}, b.Config.Sync.Exclude) b = loadTarget(t, "./override_sync", "prod") - assert.ElementsMatch(t, []string{"src/*"}, b.Config.Sync.Include) + assert.ElementsMatch(t, []string{filepath.FromSlash("src/*")}, b.Config.Sync.Include) assert.ElementsMatch(t, []string{}, b.Config.Sync.Exclude) } @@ -30,11 +31,11 @@ func TestOverrideSyncTargetNoRootSync(t *testing.T) { assert.ElementsMatch(t, []string{}, b.Config.Sync.Exclude) b = loadTarget(t, "./override_sync_no_root", "development") - assert.ElementsMatch(t, []string{"tests/*"}, b.Config.Sync.Include) - assert.ElementsMatch(t, []string{"dist"}, b.Config.Sync.Exclude) + assert.ElementsMatch(t, []string{filepath.FromSlash("tests/*")}, b.Config.Sync.Include) + assert.ElementsMatch(t, []string{filepath.FromSlash("dist")}, b.Config.Sync.Exclude) b = loadTarget(t, "./override_sync_no_root", "staging") - assert.ElementsMatch(t, []string{"fixtures/*"}, b.Config.Sync.Include) + assert.ElementsMatch(t, []string{filepath.FromSlash("fixtures/*")}, b.Config.Sync.Include) assert.ElementsMatch(t, []string{}, b.Config.Sync.Exclude) b = loadTarget(t, "./override_sync_no_root", "prod") From f7b3253d22c160da518b344108e4d92dc6368d94 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Tue, 6 Feb 2024 20:11:06 +0100 Subject: [PATCH 076/104] Rewrite sync paths after selecting target --- bundle/config/mutator/mutator.go | 2 +- bundle/tests/loader.go | 14 ++++---------- bundle/tests/override_sync_test.go | 9 +++------ 3 files changed, 8 insertions(+), 17 deletions(-) diff --git a/bundle/config/mutator/mutator.go b/bundle/config/mutator/mutator.go index 1bef564173..39fe8d1bb2 100644 --- a/bundle/config/mutator/mutator.go +++ b/bundle/config/mutator/mutator.go @@ -10,7 +10,6 @@ func DefaultMutators() []bundle.Mutator { return []bundle.Mutator{ scripts.Execute(config.ScriptPreInit), ProcessRootIncludes(), - RewriteSyncPaths(), EnvironmentsToTargets(), InitializeVariables(), DefineDefaultTarget(), @@ -22,6 +21,7 @@ func DefaultMutatorsForTarget(env string) []bundle.Mutator { return append( DefaultMutators(), SelectTarget(env), + RewriteSyncPaths(), MergeJobClusters(), MergeJobTasks(), MergePipelineClusters(), diff --git a/bundle/tests/loader.go b/bundle/tests/loader.go index 5aeed0550e..71ac906c88 100644 --- a/bundle/tests/loader.go +++ b/bundle/tests/loader.go @@ -19,16 +19,10 @@ func load(t *testing.T, path string) *bundle.Bundle { } func loadTarget(t *testing.T, path, env string) *bundle.Bundle { - b := load(t, path) - err := bundle.Apply( - context.Background(), b, - bundle.Seq( - mutator.SelectTarget(env), - mutator.MergeJobClusters(), - mutator.MergeJobTasks(), - mutator.MergePipelineClusters(), - ), - ) + ctx := context.Background() + b, err := bundle.Load(ctx, path) + require.NoError(t, err) + err = bundle.Apply(ctx, b, bundle.Seq(mutator.DefaultMutatorsForTarget(env)...)) require.NoError(t, err) return b } diff --git a/bundle/tests/override_sync_test.go b/bundle/tests/override_sync_test.go index ad864180c6..64f28e377e 100644 --- a/bundle/tests/override_sync_test.go +++ b/bundle/tests/override_sync_test.go @@ -4,13 +4,12 @@ import ( "path/filepath" "testing" + "github.com/databricks/cli/bundle" "github.com/stretchr/testify/assert" ) func TestOverrideSyncTarget(t *testing.T) { - b := load(t, "./override_sync") - assert.ElementsMatch(t, []string{filepath.FromSlash("src/*")}, b.Config.Sync.Include) - assert.ElementsMatch(t, []string{}, b.Config.Sync.Exclude) + var b *bundle.Bundle b = loadTarget(t, "./override_sync", "development") assert.ElementsMatch(t, []string{filepath.FromSlash("src/*"), filepath.FromSlash("tests/*")}, b.Config.Sync.Include) @@ -26,9 +25,7 @@ func TestOverrideSyncTarget(t *testing.T) { } func TestOverrideSyncTargetNoRootSync(t *testing.T) { - b := load(t, "./override_sync_no_root") - assert.ElementsMatch(t, []string{}, b.Config.Sync.Include) - assert.ElementsMatch(t, []string{}, b.Config.Sync.Exclude) + var b *bundle.Bundle b = loadTarget(t, "./override_sync_no_root", "development") assert.ElementsMatch(t, []string{filepath.FromSlash("tests/*")}, b.Config.Sync.Include) From 5f85d59c366d08d3c8c4b7c8e8b3a629b6c59729 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Tue, 6 Feb 2024 20:19:16 +0100 Subject: [PATCH 077/104] Update relative path test to select a target --- bundle/tests/relative_path_with_includes_test.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bundle/tests/relative_path_with_includes_test.go b/bundle/tests/relative_path_with_includes_test.go index 6b82ad8458..1d1f321d4b 100644 --- a/bundle/tests/relative_path_with_includes_test.go +++ b/bundle/tests/relative_path_with_includes_test.go @@ -11,7 +11,7 @@ import ( ) func TestRelativePathsWithIncludes(t *testing.T) { - b := load(t, "./relative_path_with_includes") + b := loadTarget(t, "./relative_path_with_includes", "default") m := mutator.TranslatePaths() err := bundle.Apply(context.Background(), b, m) From 7bf5abf441e8d428206507eeaf66d07054adb5ce Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Wed, 7 Feb 2024 09:19:32 +0100 Subject: [PATCH 078/104] Empty struct should yield empty map in `convert.FromTyped` --- libs/dyn/convert/from_typed.go | 5 ----- libs/dyn/convert/from_typed_test.go | 19 +++++++++++++++++++ 2 files changed, 19 insertions(+), 5 deletions(-) diff --git a/libs/dyn/convert/from_typed.go b/libs/dyn/convert/from_typed.go index 75f1c7212e..bd6b63670e 100644 --- a/libs/dyn/convert/from_typed.go +++ b/libs/dyn/convert/from_typed.go @@ -84,11 +84,6 @@ func fromTypedStruct(src reflect.Value, ref dyn.Value) (dyn.Value, error) { } } - // If the struct was equal to its zero value, emit a nil. - if len(out) == 0 { - return dyn.NilValue, nil - } - return dyn.NewValue(out, ref.Location()), nil } diff --git a/libs/dyn/convert/from_typed_test.go b/libs/dyn/convert/from_typed_test.go index d7fa60bb3d..5fc2b90f61 100644 --- a/libs/dyn/convert/from_typed_test.go +++ b/libs/dyn/convert/from_typed_test.go @@ -19,6 +19,25 @@ func TestFromTypedStructZeroFields(t *testing.T) { nv, err := FromTyped(src, ref) require.NoError(t, err) + assert.Equal(t, dyn.V(map[string]dyn.Value{}), nv) +} + +func TestFromTypedStructPointerZeroFields(t *testing.T) { + type Tmp struct { + Foo string `json:"foo"` + Bar string `json:"bar"` + } + + // For an initialized pointer we expect an empty map. + src := &Tmp{} + nv, err := FromTyped(src, dyn.NilValue) + require.NoError(t, err) + assert.Equal(t, dyn.V(map[string]dyn.Value{}), nv) + + // For a nil pointer we expect nil. + src = nil + nv, err = FromTyped(src, dyn.NilValue) + require.NoError(t, err) assert.Equal(t, dyn.NilValue, nv) } From 9b60240e4d3cb8a414cfa849485d5bb3f1fe7f57 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Wed, 7 Feb 2024 09:39:15 +0100 Subject: [PATCH 079/104] Zero destination struct in `convert.ToTyped` Not doing this means that the output struct is not a true representation of the `dyn.Value` and unrepresentable state (e.g. unexported fields) can be carried over across `convert.ToTyped` calls. --- libs/dyn/convert/to_typed.go | 4 ++++ libs/dyn/convert/to_typed_test.go | 21 +++++++++++++++++++++ 2 files changed, 25 insertions(+) diff --git a/libs/dyn/convert/to_typed.go b/libs/dyn/convert/to_typed.go index 209de12cbd..715d3f670c 100644 --- a/libs/dyn/convert/to_typed.go +++ b/libs/dyn/convert/to_typed.go @@ -53,6 +53,10 @@ func ToTyped(dst any, src dyn.Value) error { func toTypedStruct(dst reflect.Value, src dyn.Value) error { switch src.Kind() { case dyn.KindMap: + // Zero the destination struct such that fields + // that aren't present in [src] are cleared. + dst.SetZero() + info := getStructInfo(dst.Type()) for k, v := range src.MustMap() { index, ok := info.Fields[k] diff --git a/libs/dyn/convert/to_typed_test.go b/libs/dyn/convert/to_typed_test.go index 3adc94c799..fd399b934e 100644 --- a/libs/dyn/convert/to_typed_test.go +++ b/libs/dyn/convert/to_typed_test.go @@ -59,6 +59,27 @@ func TestToTypedStructOverwrite(t *testing.T) { assert.Equal(t, "baz", out.Bar) } +func TestToTypedStructClearFields(t *testing.T) { + type Tmp struct { + Foo string `json:"foo"` + Bar string `json:"bar,omitempty"` + } + + // Struct value with non-empty fields. + var out = Tmp{ + Foo: "baz", + Bar: "qux", + } + + // Value is an empty map. + v := dyn.V(map[string]dyn.Value{}) + + // The previously set fields should be cleared. + err := ToTyped(&out, v) + require.NoError(t, err) + assert.Equal(t, Tmp{}, out) +} + func TestToTypedStructAnonymousByValue(t *testing.T) { type Bar struct { Bar string `json:"bar"` From d5c710ffe677793da3d7c0dde4ee32b7a5be719e Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Wed, 7 Feb 2024 10:33:27 +0100 Subject: [PATCH 080/104] Fix integration test --- internal/bundle/artifacts_test.go | 29 ++++++++++++++++------------- 1 file changed, 16 insertions(+), 13 deletions(-) diff --git a/internal/bundle/artifacts_test.go b/internal/bundle/artifacts_test.go index 549b393d2b..0f3769ece3 100644 --- a/internal/bundle/artifacts_test.go +++ b/internal/bundle/artifacts_test.go @@ -33,15 +33,6 @@ func TestAccUploadArtifactFileToCorrectRemotePath(t *testing.T) { whlPath := filepath.Join(dir, "dist", "test.whl") touchEmptyFile(t, whlPath) - artifact := &config.Artifact{ - Type: "whl", - Files: []config.ArtifactFile{ - { - Source: whlPath, - }, - }, - } - wsDir := internal.TemporaryWorkspaceDir(t, w) b := &bundle.Bundle{ @@ -54,7 +45,14 @@ func TestAccUploadArtifactFileToCorrectRemotePath(t *testing.T) { ArtifactPath: wsDir, }, Artifacts: config.Artifacts{ - "test": artifact, + "test": &config.Artifact{ + Type: "whl", + Files: []config.ArtifactFile{ + { + Source: whlPath, + }, + }, + }, }, Resources: config.Resources{ Jobs: map[string]*resources.Job{ @@ -80,9 +78,14 @@ func TestAccUploadArtifactFileToCorrectRemotePath(t *testing.T) { require.NoError(t, err) // The remote path attribute on the artifact file should have been set. - require.Regexp(t, regexp.MustCompile(path.Join(regexp.QuoteMeta(wsDir), `.internal/test\.whl`)), artifact.Files[0].RemotePath) + require.Regexp(t, + regexp.MustCompile(path.Join(regexp.QuoteMeta(wsDir), `.internal/test\.whl`)), + b.Config.Artifacts["test"].Files[0].RemotePath, + ) // The task library path should have been updated to the remote path. - lib := b.Config.Resources.Jobs["test"].JobSettings.Tasks[0].Libraries[0] - require.Regexp(t, regexp.MustCompile(path.Join("/Workspace", regexp.QuoteMeta(wsDir), `.internal/test\.whl`)), lib.Whl) + require.Regexp(t, + regexp.MustCompile(path.Join("/Workspace", regexp.QuoteMeta(wsDir), `.internal/test\.whl`)), + b.Config.Resources.Jobs["test"].JobSettings.Tasks[0].Libraries[0].Whl, + ) } From 44063612758ed5dfd77eda6873cb0ae1308e9ad9 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Thu, 8 Feb 2024 12:17:09 +0100 Subject: [PATCH 081/104] Omitempty for Git fields --- bundle/config/git.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bundle/config/git.go b/bundle/config/git.go index fdb86a9015..f9f2f83e52 100644 --- a/bundle/config/git.go +++ b/bundle/config/git.go @@ -9,8 +9,8 @@ type Git struct { BundleRootPath string `json:"bundle_root_path,omitempty" bundle:"readonly"` // Inferred is set to true if the Git details were inferred and weren't set explicitly - Inferred bool `json:"inferred" bundle:"readonly"` + Inferred bool `json:"inferred,omitempty" bundle:"readonly"` // The actual branch according to Git (may be different from the configured branch) - ActualBranch string `json:"actual_branch" bundle:"readonly"` + ActualBranch string `json:"actual_branch,omitempty" bundle:"readonly"` } From 061d57bfe48eb9c9608d790dc3d9754b61e2da0e Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Thu, 8 Feb 2024 12:17:36 +0100 Subject: [PATCH 082/104] Use allowlist for Git-related fields to include in metadata --- bundle/deploy/metadata/compute.go | 10 ++++++++-- bundle/deploy/metadata/compute_test.go | 4 ++++ 2 files changed, 12 insertions(+), 2 deletions(-) diff --git a/bundle/deploy/metadata/compute.go b/bundle/deploy/metadata/compute.go index 460a81c938..c612d33a3f 100644 --- a/bundle/deploy/metadata/compute.go +++ b/bundle/deploy/metadata/compute.go @@ -6,6 +6,7 @@ import ( "path/filepath" "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/metadata" ) @@ -25,8 +26,13 @@ func (m *compute) Apply(_ context.Context, b *bundle.Bundle) error { Config: metadata.Config{}, } - // Set git details in metadata - b.Metadata.Config.Bundle.Git = b.Config.Bundle.Git + // Set Git details in metadata + b.Metadata.Config.Bundle.Git = config.Git{ + Branch: b.Config.Bundle.Git.Branch, + OriginURL: b.Config.Bundle.Git.OriginURL, + Commit: b.Config.Bundle.Git.Commit, + BundleRootPath: b.Config.Bundle.Git.BundleRootPath, + } // Set job config paths in metadata jobsMetadata := make(map[string]*metadata.Job) diff --git a/bundle/deploy/metadata/compute_test.go b/bundle/deploy/metadata/compute_test.go index c3cb029d15..a1a97aab33 100644 --- a/bundle/deploy/metadata/compute_test.go +++ b/bundle/deploy/metadata/compute_test.go @@ -30,6 +30,7 @@ func TestComputeMetadataMutator(t *testing.T) { OriginURL: "www.host.com", Commit: "abcd", BundleRootPath: "a/b/c/d", + Inferred: true, }, }, Resources: config.Resources{ @@ -76,6 +77,9 @@ func TestComputeMetadataMutator(t *testing.T) { OriginURL: "www.host.com", Commit: "abcd", BundleRootPath: "a/b/c/d", + + // Test that this field doesn't carry over into the metadata. + Inferred: false, }, }, Resources: metadata.Resources{ From 245b4816c76e78d3bb7dc154253eb87ea38ac8a7 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Thu, 8 Feb 2024 13:43:42 +0100 Subject: [PATCH 083/104] Clean up --- bundle/config/root.go | 58 ++++++++++++++++++------------------------- 1 file changed, 24 insertions(+), 34 deletions(-) diff --git a/bundle/config/root.go b/bundle/config/root.go index bcb269ad7d..e43c63d29a 100644 --- a/bundle/config/root.go +++ b/bundle/config/root.go @@ -78,7 +78,11 @@ func Load(path string) (*Root, error) { return nil, err } - var r Root + r := Root{ + Path: filepath.Dir(path), + } + + // Load configuration tree from YAML. v, err := yamlloader.LoadYAML(path, bytes.NewBuffer(raw)) if err != nil { return nil, fmt.Errorf("failed to load %s: %w", path, err) @@ -93,22 +97,17 @@ func Load(path string) (*Root, error) { // Normalize dynamic configuration tree according to configuration type. v, diags := convert.Normalize(r, v) + // Keep track of diagnostics (warnings and errors in the schema). + // We delay acting on diagnostics until we have loaded all + // configuration files and merged them together. + r.diags = diags + // Convert normalized configuration tree to typed configuration. - err = convert.ToTyped(&r, v) + err = r.updateWithDynamicValue(v) if err != nil { return nil, fmt.Errorf("failed to load %s: %w", path, err) } - r.diags = diags - - // Store dynamic configuration for later reference (e.g. location information on all nodes). - r.value = v - - r.Path = filepath.Dir(path) - // r.SetConfigFilePath(path) - - r.ConfigureConfigFilePath() - _, err = r.Resources.VerifyUniqueResourceIdentifiers() return &r, err } @@ -128,27 +127,28 @@ func (r *Root) initializeDynamicValue() { r.value = nv } -func (r *Root) toTyped(v dyn.Value) error { +func (r *Root) updateWithDynamicValue(nv dyn.Value) error { // Hack: restore state; it may be cleared by [ToTyped] if // the configuration equals nil (happens in tests). - value := r.value diags := r.diags depth := r.depth path := r.Path defer func() { - r.value = value r.diags = diags r.depth = depth r.Path = path }() // Convert normalized configuration tree to typed configuration. - err := convert.ToTyped(r, v) + err := convert.ToTyped(r, nv) if err != nil { return err } + // Assign the normalized configuration tree. + r.value = nv + // Assign config file paths after converting to typed configuration. r.ConfigureConfigFilePath() return nil @@ -160,11 +160,10 @@ func (r *Root) Mutate(fn func(dyn.Value) (dyn.Value, error)) error { if err != nil { return err } - err = r.toTyped(nv) + err = r.updateWithDynamicValue(nv) if err != nil { return err } - r.value = nv return nil } @@ -177,7 +176,7 @@ func (r *Root) MarkMutatorEntry() { if r.depth == 1 { // Always run ToTyped upon entering a mutator. // Convert normalized configuration tree to typed configuration. - err := r.toTyped(r.value) + err := r.updateWithDynamicValue(r.value) if err != nil { panic(err) } @@ -188,10 +187,8 @@ func (r *Root) MarkMutatorEntry() { panic(err) } - r.value = nv - // Re-run ToTyped to ensure that no state is piggybacked - err = r.toTyped(r.value) + err = r.updateWithDynamicValue(nv) if err != nil { panic(err) } @@ -209,10 +206,8 @@ func (r *Root) MarkMutatorExit() { panic(err) } - r.value = nv - // Re-run ToTyped to ensure that no state is piggybacked - err = r.toTyped(r.value) + err = r.updateWithDynamicValue(nv) if err != nil { panic(err) } @@ -256,8 +251,8 @@ func (r *Root) InitializeVariables(vars []string) error { } func (r *Root) Merge(other *Root) error { - // // Merge diagnostics. - // r.diags = append(r.diags, other.diags...) + // Merge diagnostics. + r.diags = append(r.diags, other.diags...) // Check for safe merge, protecting against duplicate resource identifiers err := r.Resources.VerifySafeMerge(&other.Resources) @@ -296,10 +291,7 @@ func mergeField(rv, ov dyn.Value, name string) (dyn.Value, error) { } func (r *Root) MergeTargetOverrides(name string) error { - // var tmp dyn.Value - var root = r.value - var err error - + root := r.value target, err := dyn.GetByPath(root, dyn.NewPath(dyn.Key("targets"), dyn.Key(name))) if err != nil { return err @@ -386,10 +378,8 @@ func (r *Root) MergeTargetOverrides(name string) error { } } - r.value = root - // Convert normalized configuration tree to typed configuration. - err = r.toTyped(r.value) + err = r.updateWithDynamicValue(root) if err != nil { panic(err) } From 7154f36cf9561db0dc4f9ff1903ccecfef45c777 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Thu, 8 Feb 2024 13:59:13 +0100 Subject: [PATCH 084/104] Eliminate panics --- bundle/config/root.go | 50 +++++++++++++++++++++++++++---------------- bundle/mutator.go | 16 +++++++++++--- 2 files changed, 45 insertions(+), 21 deletions(-) diff --git a/bundle/config/root.go b/bundle/config/root.go index e43c63d29a..c9e26e6e3d 100644 --- a/bundle/config/root.go +++ b/bundle/config/root.go @@ -2,6 +2,7 @@ package config import ( "bytes" + "context" "fmt" "os" "path/filepath" @@ -14,6 +15,7 @@ import ( "github.com/databricks/cli/libs/dyn/convert" "github.com/databricks/cli/libs/dyn/merge" "github.com/databricks/cli/libs/dyn/yamlloader" + "github.com/databricks/cli/libs/log" "github.com/databricks/databricks-sdk-go/service/jobs" ) @@ -112,19 +114,20 @@ func Load(path string) (*Root, error) { return &r, err } -func (r *Root) initializeDynamicValue() { +func (r *Root) initializeDynamicValue() error { // Many test cases initialize a config as a Go struct literal. // The value will be invalid and we need to populate it from the typed configuration. if r.value.IsValid() { - return + return nil } nv, err := convert.FromTyped(r, dyn.NilValue) if err != nil { - panic(err) + return err } r.value = nv + return nil } func (r *Root) updateWithDynamicValue(nv dyn.Value) error { @@ -155,7 +158,10 @@ func (r *Root) updateWithDynamicValue(nv dyn.Value) error { } func (r *Root) Mutate(fn func(dyn.Value) (dyn.Value, error)) error { - r.initializeDynamicValue() + err := r.initializeDynamicValue() + if err != nil { + return err + } nv, err := fn(r.value) if err != nil { return err @@ -167,8 +173,12 @@ func (r *Root) Mutate(fn func(dyn.Value) (dyn.Value, error)) error { return nil } -func (r *Root) MarkMutatorEntry() { - r.initializeDynamicValue() +func (r *Root) MarkMutatorEntry(ctx context.Context) error { + err := r.initializeDynamicValue() + if err != nil { + return err + } + r.depth++ // If we are entering a mutator at depth 1, we need to convert @@ -178,24 +188,29 @@ func (r *Root) MarkMutatorEntry() { // Convert normalized configuration tree to typed configuration. err := r.updateWithDynamicValue(r.value) if err != nil { - panic(err) + log.Warnf(ctx, "unable to convert dynamic configuration to typed configuration: %v", err) + return err } } else { nv, err := convert.FromTyped(r, r.value) if err != nil { - panic(err) + log.Warnf(ctx, "unable to convert typed configuration to dynamic configuration: %v", err) + return err } // Re-run ToTyped to ensure that no state is piggybacked err = r.updateWithDynamicValue(nv) if err != nil { - panic(err) + log.Warnf(ctx, "unable to convert dynamic configuration to typed configuration: %v", err) + return err } } + + return nil } -func (r *Root) MarkMutatorExit() { +func (r *Root) MarkMutatorExit(ctx context.Context) error { r.depth-- // If we are exiting a mutator at depth 0, we need to convert @@ -203,15 +218,19 @@ func (r *Root) MarkMutatorExit() { if r.depth == 0 { nv, err := convert.FromTyped(r, r.value) if err != nil { - panic(err) + log.Warnf(ctx, "unable to convert typed configuration to dynamic configuration: %v", err) + return err } // Re-run ToTyped to ensure that no state is piggybacked err = r.updateWithDynamicValue(nv) if err != nil { - panic(err) + log.Warnf(ctx, "unable to convert dynamic configuration to typed configuration: %v", err) + return err } } + + return nil } func (r *Root) Diagnostics() diag.Diagnostics { @@ -379,12 +398,7 @@ func (r *Root) MergeTargetOverrides(name string) error { } // Convert normalized configuration tree to typed configuration. - err = r.updateWithDynamicValue(root) - if err != nil { - panic(err) - } - - return nil + return r.updateWithDynamicValue(root) } // rewrite performs lightweight rewriting of the configuration diff --git a/bundle/mutator.go b/bundle/mutator.go index 8b22502d56..bd1615fd76 100644 --- a/bundle/mutator.go +++ b/bundle/mutator.go @@ -21,10 +21,20 @@ func Apply(ctx context.Context, b *Bundle, m Mutator) error { log.Debugf(ctx, "Apply") - b.Config.MarkMutatorEntry() - defer b.Config.MarkMutatorExit() + err := b.Config.MarkMutatorEntry(ctx) + if err != nil { + log.Errorf(ctx, "entry error: %s", err) + return err + } + + defer func() { + err := b.Config.MarkMutatorExit(ctx) + if err != nil { + log.Errorf(ctx, "exit error: %s", err) + } + }() - err := m.Apply(ctx, b) + err = m.Apply(ctx, b) if err != nil { log.Errorf(ctx, "Error: %s", err) return err From 1c1fc5764053d2adf3020a1ffd1220bf18f2c836 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Thu, 8 Feb 2024 15:16:50 +0100 Subject: [PATCH 085/104] Hide DynamicValue from output --- bundle/config/paths/paths.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bundle/config/paths/paths.go b/bundle/config/paths/paths.go index 307f61eff0..68c32a48c0 100644 --- a/bundle/config/paths/paths.go +++ b/bundle/config/paths/paths.go @@ -14,7 +14,7 @@ type Paths struct { // DynamicValue stores the [dyn.Value] of the containing struct. // This assumes that this struct is always embedded. - DynamicValue dyn.Value + DynamicValue dyn.Value `json:"-"` } func (p *Paths) ConfigureConfigFilePath() { From 36646e816b703837881f5b3180defcef35b0325f Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Thu, 8 Feb 2024 15:55:26 +0100 Subject: [PATCH 086/104] Remove imdario/mergo from NOTICE --- NOTICE | 5 ----- 1 file changed, 5 deletions(-) diff --git a/NOTICE b/NOTICE index 7c7eb7db46..550d2c5553 100644 --- a/NOTICE +++ b/NOTICE @@ -61,11 +61,6 @@ google/uuid - https://github.com/google/uuid Copyright (c) 2009,2014 Google Inc. All rights reserved. License - https://github.com/google/uuid/blob/master/LICENSE -imdario/mergo - https://github.com/imdario/mergo -Copyright (c) 2013 Dario Castañé. All rights reserved. -Copyright (c) 2012 The Go Authors. All rights reserved. -License - https://github.com/imdario/mergo/blob/master/LICENSE - manifoldco/promptui - https://github.com/manifoldco/promptui Copyright (c) 2017, Arigato Machine Inc. All rights reserved. License - https://github.com/manifoldco/promptui/blob/master/LICENSE.md From d1f558fe48548d6392752c3c8f82b277d5eec6b6 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Tue, 13 Feb 2024 14:40:33 +0100 Subject: [PATCH 087/104] Retain partially valid structs in `convert.Normalize` Before this change, any error in a subtree would cause the entire subtree to be dropped from the output. This is not ideal when debugging, so instead we drop only the values that cannot be normalized. Note that this doesn't change behavior if the caller is properly checking the returned diagnostics for errors. --- libs/dyn/convert/from_typed.go | 22 +++--- libs/dyn/convert/normalize.go | 28 ++++---- libs/dyn/convert/normalize_test.go | 106 +++++++++++++++++++++++++++++ 3 files changed, 131 insertions(+), 25 deletions(-) diff --git a/libs/dyn/convert/from_typed.go b/libs/dyn/convert/from_typed.go index bd6b63670e..6dcca2b858 100644 --- a/libs/dyn/convert/from_typed.go +++ b/libs/dyn/convert/from_typed.go @@ -59,7 +59,7 @@ func fromTyped(src any, ref dyn.Value, options ...fromTypedOptions) (dyn.Value, return fromTypedFloat(srcv, ref, options...) } - return dyn.NilValue, fmt.Errorf("unsupported type: %s", srcv.Kind()) + return dyn.InvalidValue, fmt.Errorf("unsupported type: %s", srcv.Kind()) } func fromTypedStruct(src reflect.Value, ref dyn.Value) (dyn.Value, error) { @@ -67,7 +67,7 @@ func fromTypedStruct(src reflect.Value, ref dyn.Value) (dyn.Value, error) { switch ref.Kind() { case dyn.KindMap, dyn.KindNil: default: - return dyn.Value{}, fmt.Errorf("unhandled type: %s", ref.Kind()) + return dyn.InvalidValue, fmt.Errorf("unhandled type: %s", ref.Kind()) } out := make(map[string]dyn.Value) @@ -76,7 +76,7 @@ func fromTypedStruct(src reflect.Value, ref dyn.Value) (dyn.Value, error) { // Convert the field taking into account the reference value (may be equal to config.NilValue). nv, err := fromTyped(v.Interface(), ref.Get(k)) if err != nil { - return dyn.Value{}, err + return dyn.InvalidValue, err } if nv != dyn.NilValue { @@ -92,7 +92,7 @@ func fromTypedMap(src reflect.Value, ref dyn.Value) (dyn.Value, error) { switch ref.Kind() { case dyn.KindMap, dyn.KindNil: default: - return dyn.Value{}, fmt.Errorf("unhandled type: %s", ref.Kind()) + return dyn.InvalidValue, fmt.Errorf("unhandled type: %s", ref.Kind()) } // Return nil if the map is nil. @@ -109,7 +109,7 @@ func fromTypedMap(src reflect.Value, ref dyn.Value) (dyn.Value, error) { // Convert entry taking into account the reference value (may be equal to dyn.NilValue). nv, err := fromTyped(v.Interface(), ref.Get(k), includeZeroValues) if err != nil { - return dyn.Value{}, err + return dyn.InvalidValue, err } // Every entry is represented, even if it is a nil. @@ -125,7 +125,7 @@ func fromTypedSlice(src reflect.Value, ref dyn.Value) (dyn.Value, error) { switch ref.Kind() { case dyn.KindSequence, dyn.KindNil: default: - return dyn.Value{}, fmt.Errorf("unhandled type: %s", ref.Kind()) + return dyn.InvalidValue, fmt.Errorf("unhandled type: %s", ref.Kind()) } // Return nil if the slice is nil. @@ -140,7 +140,7 @@ func fromTypedSlice(src reflect.Value, ref dyn.Value) (dyn.Value, error) { // Convert entry taking into account the reference value (may be equal to dyn.NilValue). nv, err := fromTyped(v.Interface(), ref.Index(i), includeZeroValues) if err != nil { - return dyn.Value{}, err + return dyn.InvalidValue, err } out[i] = nv @@ -167,7 +167,7 @@ func fromTypedString(src reflect.Value, ref dyn.Value, options ...fromTypedOptio return dyn.V(src.String()), nil } - return dyn.Value{}, fmt.Errorf("unhandled type: %s", ref.Kind()) + return dyn.InvalidValue, fmt.Errorf("unhandled type: %s", ref.Kind()) } func fromTypedBool(src reflect.Value, ref dyn.Value, options ...fromTypedOptions) (dyn.Value, error) { @@ -187,7 +187,7 @@ func fromTypedBool(src reflect.Value, ref dyn.Value, options ...fromTypedOptions return dyn.V(src.Bool()), nil } - return dyn.Value{}, fmt.Errorf("unhandled type: %s", ref.Kind()) + return dyn.InvalidValue, fmt.Errorf("unhandled type: %s", ref.Kind()) } func fromTypedInt(src reflect.Value, ref dyn.Value, options ...fromTypedOptions) (dyn.Value, error) { @@ -207,7 +207,7 @@ func fromTypedInt(src reflect.Value, ref dyn.Value, options ...fromTypedOptions) return dyn.V(src.Int()), nil } - return dyn.Value{}, fmt.Errorf("unhandled type: %s", ref.Kind()) + return dyn.InvalidValue, fmt.Errorf("unhandled type: %s", ref.Kind()) } func fromTypedFloat(src reflect.Value, ref dyn.Value, options ...fromTypedOptions) (dyn.Value, error) { @@ -227,5 +227,5 @@ func fromTypedFloat(src reflect.Value, ref dyn.Value, options ...fromTypedOption return dyn.V(src.Float()), nil } - return dyn.Value{}, fmt.Errorf("unhandled type: %s", ref.Kind()) + return dyn.InvalidValue, fmt.Errorf("unhandled type: %s", ref.Kind()) } diff --git a/libs/dyn/convert/normalize.go b/libs/dyn/convert/normalize.go index 7a652cbc7c..5595aae1e9 100644 --- a/libs/dyn/convert/normalize.go +++ b/libs/dyn/convert/normalize.go @@ -35,7 +35,7 @@ func normalizeType(typ reflect.Type, src dyn.Value) (dyn.Value, diag.Diagnostics return normalizeFloat(typ, src) } - return dyn.NilValue, diag.Errorf("unsupported type: %s", typ.Kind()) + return dyn.InvalidValue, diag.Errorf("unsupported type: %s", typ.Kind()) } func typeMismatch(expected dyn.Kind, src dyn.Value) diag.Diagnostic { @@ -69,7 +69,7 @@ func normalizeStruct(typ reflect.Type, src dyn.Value) (dyn.Value, diag.Diagnosti if err != nil { diags = diags.Extend(err) // Skip the element if it cannot be normalized. - if err.HasError() { + if !v.IsValid() { continue } } @@ -82,7 +82,7 @@ func normalizeStruct(typ reflect.Type, src dyn.Value) (dyn.Value, diag.Diagnosti return src, diags } - return dyn.NilValue, diags.Append(typeMismatch(dyn.KindMap, src)) + return dyn.InvalidValue, diags.Append(typeMismatch(dyn.KindMap, src)) } func normalizeMap(typ reflect.Type, src dyn.Value) (dyn.Value, diag.Diagnostics) { @@ -97,7 +97,7 @@ func normalizeMap(typ reflect.Type, src dyn.Value) (dyn.Value, diag.Diagnostics) if err != nil { diags = diags.Extend(err) // Skip the element if it cannot be normalized. - if err.HasError() { + if !v.IsValid() { continue } } @@ -110,7 +110,7 @@ func normalizeMap(typ reflect.Type, src dyn.Value) (dyn.Value, diag.Diagnostics) return src, diags } - return dyn.NilValue, diags.Append(typeMismatch(dyn.KindMap, src)) + return dyn.InvalidValue, diags.Append(typeMismatch(dyn.KindMap, src)) } func normalizeSlice(typ reflect.Type, src dyn.Value) (dyn.Value, diag.Diagnostics) { @@ -125,7 +125,7 @@ func normalizeSlice(typ reflect.Type, src dyn.Value) (dyn.Value, diag.Diagnostic if err != nil { diags = diags.Extend(err) // Skip the element if it cannot be normalized. - if err.HasError() { + if !v.IsValid() { continue } } @@ -138,7 +138,7 @@ func normalizeSlice(typ reflect.Type, src dyn.Value) (dyn.Value, diag.Diagnostic return src, diags } - return dyn.NilValue, diags.Append(typeMismatch(dyn.KindSequence, src)) + return dyn.InvalidValue, diags.Append(typeMismatch(dyn.KindSequence, src)) } func normalizeString(typ reflect.Type, src dyn.Value) (dyn.Value, diag.Diagnostics) { @@ -155,7 +155,7 @@ func normalizeString(typ reflect.Type, src dyn.Value) (dyn.Value, diag.Diagnosti case dyn.KindFloat: out = strconv.FormatFloat(src.MustFloat(), 'f', -1, 64) default: - return dyn.NilValue, diags.Append(typeMismatch(dyn.KindString, src)) + return dyn.InvalidValue, diags.Append(typeMismatch(dyn.KindString, src)) } return dyn.NewValue(out, src.Location()), diags @@ -177,10 +177,10 @@ func normalizeBool(typ reflect.Type, src dyn.Value) (dyn.Value, diag.Diagnostics out = false default: // Cannot interpret as a boolean. - return dyn.NilValue, diags.Append(typeMismatch(dyn.KindBool, src)) + return dyn.InvalidValue, diags.Append(typeMismatch(dyn.KindBool, src)) } default: - return dyn.NilValue, diags.Append(typeMismatch(dyn.KindBool, src)) + return dyn.InvalidValue, diags.Append(typeMismatch(dyn.KindBool, src)) } return dyn.NewValue(out, src.Location()), diags @@ -197,14 +197,14 @@ func normalizeInt(typ reflect.Type, src dyn.Value) (dyn.Value, diag.Diagnostics) var err error out, err = strconv.ParseInt(src.MustString(), 10, 64) if err != nil { - return dyn.NilValue, diags.Append(diag.Diagnostic{ + return dyn.InvalidValue, diags.Append(diag.Diagnostic{ Severity: diag.Error, Summary: fmt.Sprintf("cannot parse %q as an integer", src.MustString()), Location: src.Location(), }) } default: - return dyn.NilValue, diags.Append(typeMismatch(dyn.KindInt, src)) + return dyn.InvalidValue, diags.Append(typeMismatch(dyn.KindInt, src)) } return dyn.NewValue(out, src.Location()), diags @@ -221,14 +221,14 @@ func normalizeFloat(typ reflect.Type, src dyn.Value) (dyn.Value, diag.Diagnostic var err error out, err = strconv.ParseFloat(src.MustString(), 64) if err != nil { - return dyn.NilValue, diags.Append(diag.Diagnostic{ + return dyn.InvalidValue, diags.Append(diag.Diagnostic{ Severity: diag.Error, Summary: fmt.Sprintf("cannot parse %q as a floating point number", src.MustString()), Location: src.Location(), }) } default: - return dyn.NilValue, diags.Append(typeMismatch(dyn.KindFloat, src)) + return dyn.InvalidValue, diags.Append(typeMismatch(dyn.KindFloat, src)) } return dyn.NewValue(out, src.Location()), diags diff --git a/libs/dyn/convert/normalize_test.go b/libs/dyn/convert/normalize_test.go index 13b1ed52f5..7028161556 100644 --- a/libs/dyn/convert/normalize_test.go +++ b/libs/dyn/convert/normalize_test.go @@ -104,6 +104,44 @@ func TestNormalizeStructError(t *testing.T) { }, err[0]) } +func TestNormalizeStructNestedError(t *testing.T) { + type Nested struct { + F1 int `json:"f1"` + F2 int `json:"f2"` + } + type Tmp struct { + Foo Nested `json:"foo"` + Bar Nested `json:"bar"` + } + + var typ Tmp + vin := dyn.V(map[string]dyn.Value{ + "foo": dyn.V(map[string]dyn.Value{ + "f1": dyn.V("error"), + "f2": dyn.V(1), + }), + "bar": dyn.V(map[string]dyn.Value{ + "f1": dyn.V(1), + "f2": dyn.V("error"), + }), + }) + vout, err := Normalize(typ, vin) + assert.Len(t, err, 2) + + // Verify that valid fields are retained. + assert.Equal(t, + dyn.V(map[string]dyn.Value{ + "foo": dyn.V(map[string]dyn.Value{ + "f2": dyn.V(int64(1)), + }), + "bar": dyn.V(map[string]dyn.Value{ + "f1": dyn.V(int64(1)), + }), + }), + vout, + ) +} + func TestNormalizeMap(t *testing.T) { var typ map[string]string vin := dyn.V(map[string]dyn.Value{ @@ -157,6 +195,40 @@ func TestNormalizeMapError(t *testing.T) { }, err[0]) } +func TestNormalizeMapNestedError(t *testing.T) { + type Nested struct { + F1 int `json:"f1"` + F2 int `json:"f2"` + } + + var typ map[string]Nested + vin := dyn.V(map[string]dyn.Value{ + "foo": dyn.V(map[string]dyn.Value{ + "f1": dyn.V("error"), + "f2": dyn.V(1), + }), + "bar": dyn.V(map[string]dyn.Value{ + "f1": dyn.V(1), + "f2": dyn.V("error"), + }), + }) + vout, err := Normalize(typ, vin) + assert.Len(t, err, 2) + + // Verify that valid fields are retained. + assert.Equal(t, + dyn.V(map[string]dyn.Value{ + "foo": dyn.V(map[string]dyn.Value{ + "f2": dyn.V(int64(1)), + }), + "bar": dyn.V(map[string]dyn.Value{ + "f1": dyn.V(int64(1)), + }), + }), + vout, + ) +} + func TestNormalizeSlice(t *testing.T) { var typ []string vin := dyn.V([]dyn.Value{ @@ -209,6 +281,40 @@ func TestNormalizeSliceError(t *testing.T) { }, err[0]) } +func TestNormalizeSliceNestedError(t *testing.T) { + type Nested struct { + F1 int `json:"f1"` + F2 int `json:"f2"` + } + + var typ []Nested + vin := dyn.V([]dyn.Value{ + dyn.V(map[string]dyn.Value{ + "f1": dyn.V("error"), + "f2": dyn.V(1), + }), + dyn.V(map[string]dyn.Value{ + "f1": dyn.V(1), + "f2": dyn.V("error"), + }), + }) + vout, err := Normalize(typ, vin) + assert.Len(t, err, 2) + + // Verify that valid fields are retained. + assert.Equal(t, + dyn.V([]dyn.Value{ + dyn.V(map[string]dyn.Value{ + "f2": dyn.V(int64(1)), + }), + dyn.V(map[string]dyn.Value{ + "f1": dyn.V(int64(1)), + }), + }), + vout, + ) +} + func TestNormalizeString(t *testing.T) { var typ string vin := dyn.V("string") From 727beca545edee81fff59064dbf7b0cf66c7ee85 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Tue, 13 Feb 2024 15:11:54 +0100 Subject: [PATCH 088/104] Add debug logging of configuration diagnostics on validate --- cmd/bundle/validate.go | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/cmd/bundle/validate.go b/cmd/bundle/validate.go index b98cbd52dc..87cdc82d0a 100644 --- a/cmd/bundle/validate.go +++ b/cmd/bundle/validate.go @@ -5,6 +5,7 @@ import ( "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/phases" + "github.com/databricks/cli/libs/log" "github.com/spf13/cobra" ) @@ -24,6 +25,12 @@ func newValidateCommand() *cobra.Command { return err } + // Until we change up the output of this command to be a text representation, + // we'll just output all diagnostics as debug logs. + for _, diag := range b.Config.Diagnostics() { + log.Debugf(cmd.Context(), "[%s]: %s", diag.Location, diag.Summary) + } + buf, err := json.MarshalIndent(b.Config, "", " ") if err != nil { return err From 938423fa6b5c861e0292a7c7c34bc612c8fc8c62 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Tue, 13 Feb 2024 15:56:59 +0100 Subject: [PATCH 089/104] Wrap config mutations in bundle.ApplyFunc calls --- cmd/bundle/deploy.go | 23 +++++++++++++++-------- cmd/bundle/destroy.go | 13 +++++++++---- cmd/bundle/variables.go | 6 +++++- 3 files changed, 29 insertions(+), 13 deletions(-) diff --git a/cmd/bundle/deploy.go b/cmd/bundle/deploy.go index a83c268bc9..f739d99bb7 100644 --- a/cmd/bundle/deploy.go +++ b/cmd/bundle/deploy.go @@ -1,6 +1,8 @@ package bundle import ( + "context" + "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/phases" "github.com/spf13/cobra" @@ -23,17 +25,22 @@ func newDeployCommand() *cobra.Command { cmd.Flags().StringVarP(&computeID, "compute-id", "c", "", "Override compute in the deployment with the given compute ID.") cmd.RunE = func(cmd *cobra.Command, args []string) error { - b := bundle.Get(cmd.Context()) + ctx := cmd.Context() + b := bundle.Get(ctx) + + bundle.ApplyFunc(ctx, b, func(context.Context, *bundle.Bundle) error { + b.Config.Bundle.Force = force + b.Config.Bundle.Deployment.Lock.Force = forceLock + b.Config.Bundle.ComputeID = computeID - b.Config.Bundle.Force = force - b.Config.Bundle.Deployment.Lock.Force = forceLock - b.Config.Bundle.ComputeID = computeID + if cmd.Flag("fail-on-active-runs").Changed { + b.Config.Bundle.Deployment.FailOnActiveRuns = failOnActiveRuns + } - if cmd.Flag("fail-on-active-runs").Changed { - b.Config.Bundle.Deployment.FailOnActiveRuns = failOnActiveRuns - } + return nil + }) - return bundle.Apply(cmd.Context(), b, bundle.Seq( + return bundle.Apply(ctx, b, bundle.Seq( phases.Initialize(), phases.Build(), phases.Deploy(), diff --git a/cmd/bundle/destroy.go b/cmd/bundle/destroy.go index dad199bf91..0dbdffdba7 100644 --- a/cmd/bundle/destroy.go +++ b/cmd/bundle/destroy.go @@ -1,6 +1,7 @@ package bundle import ( + "context" "fmt" "os" @@ -29,11 +30,15 @@ func newDestroyCommand() *cobra.Command { ctx := cmd.Context() b := bundle.Get(ctx) - // If `--force-lock` is specified, force acquisition of the deployment lock. - b.Config.Bundle.Deployment.Lock.Force = forceDestroy + bundle.ApplyFunc(ctx, b, func(ctx context.Context, b *bundle.Bundle) error { + // If `--force-lock` is specified, force acquisition of the deployment lock. + b.Config.Bundle.Deployment.Lock.Force = forceDestroy - // If `--auto-approve`` is specified, we skip confirmation checks - b.AutoApprove = autoApprove + // If `--auto-approve`` is specified, we skip confirmation checks + b.AutoApprove = autoApprove + + return nil + }) // we require auto-approve for non tty terminals since interactive consent // is not possible diff --git a/cmd/bundle/variables.go b/cmd/bundle/variables.go index c3e4af6453..9a975e9ead 100644 --- a/cmd/bundle/variables.go +++ b/cmd/bundle/variables.go @@ -1,6 +1,8 @@ package bundle import ( + "context" + "github.com/databricks/cli/bundle" "github.com/databricks/cli/cmd/root" "github.com/spf13/cobra" @@ -20,7 +22,9 @@ func ConfigureBundleWithVariables(cmd *cobra.Command, args []string) error { // Initialize variables by assigning them values passed as command line flags b := bundle.Get(cmd.Context()) - return b.Config.InitializeVariables(variables) + return bundle.ApplyFunc(cmd.Context(), b, func(ctx context.Context, b *bundle.Bundle) error { + return b.Config.InitializeVariables(variables) + }) } func initVariableFlag(cmd *cobra.Command) { From 8738ad6d0bc6af10df5568da4f54952adbd03653 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Tue, 13 Feb 2024 15:57:18 +0100 Subject: [PATCH 090/104] Rename --- bundle/config/mutator/mutator.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bundle/config/mutator/mutator.go b/bundle/config/mutator/mutator.go index 39fe8d1bb2..408efe122d 100644 --- a/bundle/config/mutator/mutator.go +++ b/bundle/config/mutator/mutator.go @@ -17,10 +17,10 @@ func DefaultMutators() []bundle.Mutator { } } -func DefaultMutatorsForTarget(env string) []bundle.Mutator { +func DefaultMutatorsForTarget(target string) []bundle.Mutator { return append( DefaultMutators(), - SelectTarget(env), + SelectTarget(target), RewriteSyncPaths(), MergeJobClusters(), MergeJobTasks(), From 1a45a8c111477154753069646d1b7b1efd91479d Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Tue, 13 Feb 2024 16:03:46 +0100 Subject: [PATCH 091/104] Rename --- bundle/config/root.go | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/bundle/config/root.go b/bundle/config/root.go index c9e26e6e3d..5a1cc65cab 100644 --- a/bundle/config/root.go +++ b/bundle/config/root.go @@ -91,7 +91,7 @@ func Load(path string) (*Root, error) { } // Rewrite configuration tree where necessary. - v, err = rewrite(v) + v, err = rewriteShorthandVariableDefaults(v) if err != nil { return nil, fmt.Errorf("failed to rewrite %s: %w", path, err) } @@ -401,10 +401,9 @@ func (r *Root) MergeTargetOverrides(name string) error { return r.updateWithDynamicValue(root) } -// rewrite performs lightweight rewriting of the configuration -// tree where we allow users to write a shorthand and must -// rewrite to the full form. -func rewrite(v dyn.Value) (dyn.Value, error) { +// rewriteShorthandVariableDefaults performs lightweight rewriting of the configuration +// tree where we allow users to write a shorthand and must rewrite to the full form. +func rewriteShorthandVariableDefaults(v dyn.Value) (dyn.Value, error) { if v.Kind() != dyn.KindMap { return v, nil } From be2119574b090cdd6cf41e7f9d7cae27bb4f8d9f Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Tue, 13 Feb 2024 16:15:04 +0100 Subject: [PATCH 092/104] Drop 'environments' key after rewrite --- bundle/config/mutator/environments_compat.go | 17 ++++- .../mutator/environments_compat_test.go | 64 +++++++++++++++++++ 2 files changed, 80 insertions(+), 1 deletion(-) diff --git a/bundle/config/mutator/environments_compat.go b/bundle/config/mutator/environments_compat.go index 72794a1d0c..0eb996b14c 100644 --- a/bundle/config/mutator/environments_compat.go +++ b/bundle/config/mutator/environments_compat.go @@ -40,7 +40,22 @@ func (m *environmentsToTargets) Apply(ctx context.Context, b *bundle.Bundle) err // Rewrite "environments" to "targets". if environments != dyn.NilValue && targets == dyn.NilValue { - return dyn.Set(v, "targets", environments) + nv, err := dyn.Set(v, "targets", environments) + if err != nil { + return dyn.NilValue, err + } + // Drop the "environments" key. + return dyn.Walk(nv, func(p dyn.Path, v dyn.Value) (dyn.Value, error) { + switch len(p) { + case 0: + return v, nil + case 1: + if p[0] == dyn.Key("environments") { + return v, dyn.ErrDrop + } + } + return v, dyn.ErrSkip + }) } return v, nil diff --git a/bundle/config/mutator/environments_compat_test.go b/bundle/config/mutator/environments_compat_test.go index 020332f271..f7045b3df2 100644 --- a/bundle/config/mutator/environments_compat_test.go +++ b/bundle/config/mutator/environments_compat_test.go @@ -1 +1,65 @@ package mutator_test + +import ( + "context" + "testing" + + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/config" + "github.com/databricks/cli/bundle/config/mutator" + "github.com/stretchr/testify/assert" +) + +func TestEnvironmentsToTargetsWithBothDefined(t *testing.T) { + b := &bundle.Bundle{ + Config: config.Root{ + Environments: map[string]*config.Target{ + "name": { + Mode: config.Development, + }, + }, + Targets: map[string]*config.Target{ + "name": { + Mode: config.Development, + }, + }, + }, + } + + err := bundle.Apply(context.Background(), b, mutator.EnvironmentsToTargets()) + assert.ErrorContains(t, err, `both 'environments' and 'targets' are specified;`) +} + +func TestEnvironmentsToTargetsWithEnvironmentsDefined(t *testing.T) { + b := &bundle.Bundle{ + Config: config.Root{ + Environments: map[string]*config.Target{ + "name": { + Mode: config.Development, + }, + }, + }, + } + + err := bundle.Apply(context.Background(), b, mutator.EnvironmentsToTargets()) + assert.NoError(t, err) + assert.Len(t, b.Config.Environments, 0) + assert.Len(t, b.Config.Targets, 1) +} + +func TestEnvironmentsToTargetsWithTargetsDefined(t *testing.T) { + b := &bundle.Bundle{ + Config: config.Root{ + Targets: map[string]*config.Target{ + "name": { + Mode: config.Development, + }, + }, + }, + } + + err := bundle.Apply(context.Background(), b, mutator.EnvironmentsToTargets()) + assert.NoError(t, err) + assert.Len(t, b.Config.Environments, 0) + assert.Len(t, b.Config.Targets, 1) +} From 0c47f4d29822dc1922411eb7c0aedc7fa7439f94 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Tue, 13 Feb 2024 16:21:21 +0100 Subject: [PATCH 093/104] Move merge mutators to initialize phase --- bundle/config/mutator/mutator.go | 4 ---- bundle/phases/initialize.go | 4 ++++ 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/bundle/config/mutator/mutator.go b/bundle/config/mutator/mutator.go index 408efe122d..c45a6c15e1 100644 --- a/bundle/config/mutator/mutator.go +++ b/bundle/config/mutator/mutator.go @@ -21,9 +21,5 @@ func DefaultMutatorsForTarget(target string) []bundle.Mutator { return append( DefaultMutators(), SelectTarget(target), - RewriteSyncPaths(), - MergeJobClusters(), - MergeJobTasks(), - MergePipelineClusters(), ) } diff --git a/bundle/phases/initialize.go b/bundle/phases/initialize.go index 34d560b407..2c401c6b2a 100644 --- a/bundle/phases/initialize.go +++ b/bundle/phases/initialize.go @@ -18,6 +18,10 @@ func Initialize() bundle.Mutator { return newPhase( "initialize", []bundle.Mutator{ + mutator.RewriteSyncPaths(), + mutator.MergeJobClusters(), + mutator.MergeJobTasks(), + mutator.MergePipelineClusters(), mutator.InitializeWorkspaceClient(), mutator.PopulateCurrentUser(), mutator.DefineDefaultWorkspaceRoot(), From f8e701d1c7322b1dfff687cf27b90da9ad503a6e Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Tue, 13 Feb 2024 16:22:49 +0100 Subject: [PATCH 094/104] Include merge mutators in test helper --- bundle/tests/loader.go | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/bundle/tests/loader.go b/bundle/tests/loader.go index 71ac906c88..3a28d822a2 100644 --- a/bundle/tests/loader.go +++ b/bundle/tests/loader.go @@ -24,5 +24,12 @@ func loadTarget(t *testing.T, path, env string) *bundle.Bundle { require.NoError(t, err) err = bundle.Apply(ctx, b, bundle.Seq(mutator.DefaultMutatorsForTarget(env)...)) require.NoError(t, err) + err = bundle.Apply(ctx, b, bundle.Seq( + mutator.RewriteSyncPaths(), + mutator.MergeJobClusters(), + mutator.MergeJobTasks(), + mutator.MergePipelineClusters(), + )) + require.NoError(t, err) return b } From b72c75d4dd7878824adc0719c0d87175359932ce Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Wed, 14 Feb 2024 09:56:48 +0100 Subject: [PATCH 095/104] Rename --- bundle/config/root.go | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/bundle/config/root.go b/bundle/config/root.go index 5a1cc65cab..c8b6c59998 100644 --- a/bundle/config/root.go +++ b/bundle/config/root.go @@ -91,7 +91,7 @@ func Load(path string) (*Root, error) { } // Rewrite configuration tree where necessary. - v, err = rewriteShorthandVariableDefaults(v) + v, err = rewriteShorthands(v) if err != nil { return nil, fmt.Errorf("failed to rewrite %s: %w", path, err) } @@ -401,9 +401,9 @@ func (r *Root) MergeTargetOverrides(name string) error { return r.updateWithDynamicValue(root) } -// rewriteShorthandVariableDefaults performs lightweight rewriting of the configuration +// rewriteShorthands performs lightweight rewriting of the configuration // tree where we allow users to write a shorthand and must rewrite to the full form. -func rewriteShorthandVariableDefaults(v dyn.Value) (dyn.Value, error) { +func rewriteShorthands(v dyn.Value) (dyn.Value, error) { if v.Kind() != dyn.KindMap { return v, nil } From 0d59f4f0db1132d108d2e2179c3e8acbca8acbc8 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Wed, 14 Feb 2024 10:43:36 +0100 Subject: [PATCH 096/104] Wrap profile setter in bundle.ApplyFunc call --- cmd/root/bundle.go | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/cmd/root/bundle.go b/cmd/root/bundle.go index 3f9d90db6b..edfc1f4315 100644 --- a/cmd/root/bundle.go +++ b/cmd/root/bundle.go @@ -64,7 +64,13 @@ func loadBundle(cmd *cobra.Command, args []string, load func(ctx context.Context profile := getProfile(cmd) if profile != "" { - b.Config.Workspace.Profile = profile + err = bundle.ApplyFunc(ctx, b, func(ctx context.Context, b *bundle.Bundle) error { + b.Config.Workspace.Profile = profile + return nil + }) + if err != nil { + return nil, err + } } err = bundle.Apply(ctx, b, bundle.Seq(mutator.DefaultMutators()...)) From 2fd8f7e26699d1e0b51ff79e4630779ef97399ea Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Thu, 15 Feb 2024 15:42:24 +0100 Subject: [PATCH 097/104] Add option to include fields present in the type but not in the value This feature supports variable lookups in a `dyn.Value` that are present in the type but haven't been initialized with a value. For example: `${bundle.git.origin_url}` is present in the `dyn.Value` only if it was assigned a value. If it wasn't assigned a value it should resolve to the empty string. This normalization option, when set, ensures that all fields that are represented in the specified type are present in the return value. --- libs/dyn/convert/normalize.go | 100 +++++++++++++++++++++++------ libs/dyn/convert/normalize_test.go | 47 ++++++++++++++ 2 files changed, 127 insertions(+), 20 deletions(-) diff --git a/libs/dyn/convert/normalize.go b/libs/dyn/convert/normalize.go index 5595aae1e9..989ef8dd8a 100644 --- a/libs/dyn/convert/normalize.go +++ b/libs/dyn/convert/normalize.go @@ -9,30 +9,51 @@ import ( "github.com/databricks/cli/libs/dyn" ) -func Normalize(dst any, src dyn.Value) (dyn.Value, diag.Diagnostics) { - return normalizeType(reflect.TypeOf(dst), src) +// NormalizeOption is the type for options that can be passed to Normalize. +type NormalizeOption int + +const ( + // IncludeMissingFields causes the normalization to include fields that defined on the given + // type but are missing in the source value. They are included with their zero values. + IncludeMissingFields NormalizeOption = iota +) + +type normalizeOptions struct { + includeMissingFields bool +} + +func Normalize(dst any, src dyn.Value, opts ...NormalizeOption) (dyn.Value, diag.Diagnostics) { + var n normalizeOptions + for _, opt := range opts { + switch opt { + case IncludeMissingFields: + n.includeMissingFields = true + } + } + + return n.normalizeType(reflect.TypeOf(dst), src) } -func normalizeType(typ reflect.Type, src dyn.Value) (dyn.Value, diag.Diagnostics) { +func (n normalizeOptions) normalizeType(typ reflect.Type, src dyn.Value) (dyn.Value, diag.Diagnostics) { for typ.Kind() == reflect.Pointer { typ = typ.Elem() } switch typ.Kind() { case reflect.Struct: - return normalizeStruct(typ, src) + return n.normalizeStruct(typ, src) case reflect.Map: - return normalizeMap(typ, src) + return n.normalizeMap(typ, src) case reflect.Slice: - return normalizeSlice(typ, src) + return n.normalizeSlice(typ, src) case reflect.String: - return normalizeString(typ, src) + return n.normalizeString(typ, src) case reflect.Bool: - return normalizeBool(typ, src) + return n.normalizeBool(typ, src) case reflect.Int, reflect.Int32, reflect.Int64: - return normalizeInt(typ, src) + return n.normalizeInt(typ, src) case reflect.Float32, reflect.Float64: - return normalizeFloat(typ, src) + return n.normalizeFloat(typ, src) } return dyn.InvalidValue, diag.Errorf("unsupported type: %s", typ.Kind()) @@ -46,7 +67,7 @@ func typeMismatch(expected dyn.Kind, src dyn.Value) diag.Diagnostic { } } -func normalizeStruct(typ reflect.Type, src dyn.Value) (dyn.Value, diag.Diagnostics) { +func (n normalizeOptions) normalizeStruct(typ reflect.Type, src dyn.Value) (dyn.Value, diag.Diagnostics) { var diags diag.Diagnostics switch src.Kind() { @@ -65,7 +86,7 @@ func normalizeStruct(typ reflect.Type, src dyn.Value) (dyn.Value, diag.Diagnosti } // Normalize the value according to the field type. - v, err := normalizeType(typ.FieldByIndex(index).Type, v) + v, err := n.normalizeType(typ.FieldByIndex(index).Type, v) if err != nil { diags = diags.Extend(err) // Skip the element if it cannot be normalized. @@ -77,6 +98,45 @@ func normalizeStruct(typ reflect.Type, src dyn.Value) (dyn.Value, diag.Diagnosti out[k] = v } + // Return the normalized value if missing fields are not included. + if !n.includeMissingFields { + return dyn.NewValue(out, src.Location()), diags + } + + // Populate missing fields with their zero values. + for k, index := range info.Fields { + if _, ok := out[k]; ok { + continue + } + + // Optionally dereference pointers to get the underlying field type. + ftyp := typ.FieldByIndex(index).Type + for ftyp.Kind() == reflect.Pointer { + ftyp = ftyp.Elem() + } + + var v dyn.Value + switch ftyp.Kind() { + case reflect.Struct, reflect.Map: + v, _ = n.normalizeType(ftyp, dyn.V(map[string]dyn.Value{})) + case reflect.Slice: + v, _ = n.normalizeType(ftyp, dyn.V([]dyn.Value{})) + case reflect.String: + v, _ = n.normalizeType(ftyp, dyn.V("")) + case reflect.Bool: + v, _ = n.normalizeType(ftyp, dyn.V(false)) + case reflect.Int, reflect.Int32, reflect.Int64: + v, _ = n.normalizeType(ftyp, dyn.V(int64(0))) + case reflect.Float32, reflect.Float64: + v, _ = n.normalizeType(ftyp, dyn.V(float64(0))) + default: + panic(fmt.Sprintf("unsupported type: %s", ftyp.Kind())) + } + if v.IsValid() { + out[k] = v + } + } + return dyn.NewValue(out, src.Location()), diags case dyn.KindNil: return src, diags @@ -85,7 +145,7 @@ func normalizeStruct(typ reflect.Type, src dyn.Value) (dyn.Value, diag.Diagnosti return dyn.InvalidValue, diags.Append(typeMismatch(dyn.KindMap, src)) } -func normalizeMap(typ reflect.Type, src dyn.Value) (dyn.Value, diag.Diagnostics) { +func (n normalizeOptions) normalizeMap(typ reflect.Type, src dyn.Value) (dyn.Value, diag.Diagnostics) { var diags diag.Diagnostics switch src.Kind() { @@ -93,7 +153,7 @@ func normalizeMap(typ reflect.Type, src dyn.Value) (dyn.Value, diag.Diagnostics) out := make(map[string]dyn.Value) for k, v := range src.MustMap() { // Normalize the value according to the map element type. - v, err := normalizeType(typ.Elem(), v) + v, err := n.normalizeType(typ.Elem(), v) if err != nil { diags = diags.Extend(err) // Skip the element if it cannot be normalized. @@ -113,7 +173,7 @@ func normalizeMap(typ reflect.Type, src dyn.Value) (dyn.Value, diag.Diagnostics) return dyn.InvalidValue, diags.Append(typeMismatch(dyn.KindMap, src)) } -func normalizeSlice(typ reflect.Type, src dyn.Value) (dyn.Value, diag.Diagnostics) { +func (n normalizeOptions) normalizeSlice(typ reflect.Type, src dyn.Value) (dyn.Value, diag.Diagnostics) { var diags diag.Diagnostics switch src.Kind() { @@ -121,7 +181,7 @@ func normalizeSlice(typ reflect.Type, src dyn.Value) (dyn.Value, diag.Diagnostic out := make([]dyn.Value, 0, len(src.MustSequence())) for _, v := range src.MustSequence() { // Normalize the value according to the slice element type. - v, err := normalizeType(typ.Elem(), v) + v, err := n.normalizeType(typ.Elem(), v) if err != nil { diags = diags.Extend(err) // Skip the element if it cannot be normalized. @@ -141,7 +201,7 @@ func normalizeSlice(typ reflect.Type, src dyn.Value) (dyn.Value, diag.Diagnostic return dyn.InvalidValue, diags.Append(typeMismatch(dyn.KindSequence, src)) } -func normalizeString(typ reflect.Type, src dyn.Value) (dyn.Value, diag.Diagnostics) { +func (n normalizeOptions) normalizeString(typ reflect.Type, src dyn.Value) (dyn.Value, diag.Diagnostics) { var diags diag.Diagnostics var out string @@ -161,7 +221,7 @@ func normalizeString(typ reflect.Type, src dyn.Value) (dyn.Value, diag.Diagnosti return dyn.NewValue(out, src.Location()), diags } -func normalizeBool(typ reflect.Type, src dyn.Value) (dyn.Value, diag.Diagnostics) { +func (n normalizeOptions) normalizeBool(typ reflect.Type, src dyn.Value) (dyn.Value, diag.Diagnostics) { var diags diag.Diagnostics var out bool @@ -186,7 +246,7 @@ func normalizeBool(typ reflect.Type, src dyn.Value) (dyn.Value, diag.Diagnostics return dyn.NewValue(out, src.Location()), diags } -func normalizeInt(typ reflect.Type, src dyn.Value) (dyn.Value, diag.Diagnostics) { +func (n normalizeOptions) normalizeInt(typ reflect.Type, src dyn.Value) (dyn.Value, diag.Diagnostics) { var diags diag.Diagnostics var out int64 @@ -210,7 +270,7 @@ func normalizeInt(typ reflect.Type, src dyn.Value) (dyn.Value, diag.Diagnostics) return dyn.NewValue(out, src.Location()), diags } -func normalizeFloat(typ reflect.Type, src dyn.Value) (dyn.Value, diag.Diagnostics) { +func (n normalizeOptions) normalizeFloat(typ reflect.Type, src dyn.Value) (dyn.Value, diag.Diagnostics) { var diags diag.Diagnostics var out float64 diff --git a/libs/dyn/convert/normalize_test.go b/libs/dyn/convert/normalize_test.go index 7028161556..d59cc3b351 100644 --- a/libs/dyn/convert/normalize_test.go +++ b/libs/dyn/convert/normalize_test.go @@ -142,6 +142,53 @@ func TestNormalizeStructNestedError(t *testing.T) { ) } +func TestNormalizeStructIncludeMissingFields(t *testing.T) { + type Nested struct { + String string `json:"string"` + } + + type Tmp struct { + // Verify that fields that are already set in the dynamic value are not overridden. + Existing string `json:"existing"` + + // Verify that structs are recursively normalized if not set. + Nested Nested `json:"nested"` + Ptr *Nested `json:"ptr"` + + // Verify that containers are also zero-initialized if not set. + Map map[string]string `json:"map"` + Slice []string `json:"slice"` + + // Verify that primitive types are zero-initialized if not set. + String string `json:"string"` + Bool bool `json:"bool"` + Int int `json:"int"` + Float float64 `json:"float"` + } + + var typ Tmp + vin := dyn.V(map[string]dyn.Value{ + "existing": dyn.V("already set"), + }) + vout, err := Normalize(typ, vin, IncludeMissingFields) + assert.Empty(t, err) + assert.Equal(t, dyn.V(map[string]dyn.Value{ + "existing": dyn.V("already set"), + "nested": dyn.V(map[string]dyn.Value{ + "string": dyn.V(""), + }), + "ptr": dyn.V(map[string]dyn.Value{ + "string": dyn.V(""), + }), + "map": dyn.V(map[string]dyn.Value{}), + "slice": dyn.V([]dyn.Value{}), + "string": dyn.V(""), + "bool": dyn.V(false), + "int": dyn.V(int64(0)), + "float": dyn.V(float64(0)), + }), vout) +} + func TestNormalizeMap(t *testing.T) { var typ map[string]string vin := dyn.V(map[string]dyn.Value{ From 8774a9684f44086cf60816cee6baed1db9aad319 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Thu, 15 Feb 2024 16:09:10 +0100 Subject: [PATCH 098/104] Skip instead of panic --- libs/dyn/convert/normalize.go | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/libs/dyn/convert/normalize.go b/libs/dyn/convert/normalize.go index 989ef8dd8a..26df09578d 100644 --- a/libs/dyn/convert/normalize.go +++ b/libs/dyn/convert/normalize.go @@ -125,12 +125,14 @@ func (n normalizeOptions) normalizeStruct(typ reflect.Type, src dyn.Value) (dyn. v, _ = n.normalizeType(ftyp, dyn.V("")) case reflect.Bool: v, _ = n.normalizeType(ftyp, dyn.V(false)) - case reflect.Int, reflect.Int32, reflect.Int64: + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: v, _ = n.normalizeType(ftyp, dyn.V(int64(0))) case reflect.Float32, reflect.Float64: v, _ = n.normalizeType(ftyp, dyn.V(float64(0))) default: - panic(fmt.Sprintf("unsupported type: %s", ftyp.Kind())) + // Skip fields for which we do not have a natural [dyn.Value] equivalent. + // For example, we don't handle reflect.Complex* and reflect.Uint* types. + continue } if v.IsValid() { out[k] = v From c2cd815dbdce0b5edabc7b96a1588403db86fc7f Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Thu, 15 Feb 2024 16:32:54 +0100 Subject: [PATCH 099/104] Enable variable lookup of fields present in the type but not in the dyn.Value --- .../mutator/resolve_variable_references.go | 22 ++++++++++++- .../resolve_variable_references_test.go | 33 +++++++++++++++++++ 2 files changed, 54 insertions(+), 1 deletion(-) diff --git a/bundle/config/mutator/resolve_variable_references.go b/bundle/config/mutator/resolve_variable_references.go index 3abe6c3303..a9ff70f68f 100644 --- a/bundle/config/mutator/resolve_variable_references.go +++ b/bundle/config/mutator/resolve_variable_references.go @@ -5,6 +5,7 @@ import ( "github.com/databricks/cli/bundle" "github.com/databricks/cli/libs/dyn" + "github.com/databricks/cli/libs/dyn/convert" "github.com/databricks/cli/libs/dyn/dynvar" ) @@ -35,7 +36,26 @@ func (m *resolveVariableReferences) Apply(ctx context.Context, b *bundle.Bundle) varPath := dyn.NewPath(dyn.Key("var")) return b.Config.Mutate(func(root dyn.Value) (dyn.Value, error) { - lookup := dynvar.DefaultLookup(root) + // Synthesize a copy of the root that has all fields that are present in the type + // but not set in the dynamic value set to their corresponding empty value. + // This enables users to interpolate variable references to fields that haven't + // been explicitly set in the dynamic value. + // + // For example: ${bundle.git.origin_url} should resolve to an empty string + // if a bundle isn't located in a Git repository (yet). + // + // This is consistent with the behavior prior to using the dynamic value system. + // + // We can ignore the diagnostics return valuebecause we know that the dynamic value + // has already been normalized when it was first loaded from the configuration file. + // + normalized, _ := convert.Normalize(b.Config, root, convert.IncludeMissingFields) + lookup := func(path dyn.Path) (dyn.Value, error) { + // Future opportunity: if we lookup this path in both the given root + // and the synthesized root, we know if it was explicitly set or implied to be empty. + // Then we can emit a warning if it was not explicitly set. + return dyn.GetByPath(normalized, path) + } // Resolve variable references in all values. return dynvar.Resolve(root, func(path dyn.Path) (dyn.Value, error) { diff --git a/bundle/config/mutator/resolve_variable_references_test.go b/bundle/config/mutator/resolve_variable_references_test.go index 9ca951a201..1f253d41c6 100644 --- a/bundle/config/mutator/resolve_variable_references_test.go +++ b/bundle/config/mutator/resolve_variable_references_test.go @@ -6,7 +6,9 @@ import ( "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/config" + "github.com/databricks/cli/bundle/config/resources" "github.com/databricks/cli/bundle/config/variable" + "github.com/databricks/databricks-sdk-go/service/jobs" "github.com/stretchr/testify/require" ) @@ -62,3 +64,34 @@ func TestResolveVariableReferencesToBundleVariables(t *testing.T) { require.NoError(t, err) require.Equal(t, "example/bar", b.Config.Workspace.RootPath) } + +func TestResolveVariableReferencesToEmptyFields(t *testing.T) { + b := &bundle.Bundle{ + Config: config.Root{ + Bundle: config.Bundle{ + Name: "example", + Git: config.Git{ + Branch: "", + }, + }, + Resources: config.Resources{ + Jobs: map[string]*resources.Job{ + "job1": { + JobSettings: &jobs.JobSettings{ + Tags: map[string]string{ + "git_branch": "${bundle.git.branch}", + }, + }, + }, + }, + }, + }, + } + + // Apply for the bundle prefix. + err := bundle.Apply(context.Background(), b, ResolveVariableReferences("bundle")) + require.NoError(t, err) + + // The job settings should have been interpolated to an empty string. + require.Equal(t, "", b.Config.Resources.Jobs["job1"].JobSettings.Tags["git_branch"]) +} From 3521d02b21edc7decbb17bb47752aa2057133f15 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Thu, 15 Feb 2024 16:47:20 +0100 Subject: [PATCH 100/104] Adapt updated origin/main --- cmd/bundle/deployment/bind.go | 16 ++++++++++------ cmd/bundle/deployment/unbind.go | 14 ++++++++++---- cmd/bundle/utils/utils.go | 6 +++++- 3 files changed, 25 insertions(+), 11 deletions(-) diff --git a/cmd/bundle/deployment/bind.go b/cmd/bundle/deployment/bind.go index 5412928070..1287eb0449 100644 --- a/cmd/bundle/deployment/bind.go +++ b/cmd/bundle/deployment/bind.go @@ -1,6 +1,7 @@ package deployment import ( + "context" "fmt" "github.com/databricks/cli/bundle" @@ -25,15 +26,14 @@ func newBindCommand() *cobra.Command { cmd.Flags().BoolVar(&forceLock, "force-lock", false, "Force acquisition of deployment lock.") cmd.RunE = func(cmd *cobra.Command, args []string) error { - b := bundle.Get(cmd.Context()) - r := b.Config.Resources - resource, err := r.FindResourceByConfigKey(args[0]) + ctx := cmd.Context() + b := bundle.Get(ctx) + resource, err := b.Config.Resources.FindResourceByConfigKey(args[0]) if err != nil { return err } w := b.WorkspaceClient() - ctx := cmd.Context() exists, err := resource.Exists(ctx, w, args[1]) if err != nil { return fmt.Errorf("failed to fetch the resource, err: %w", err) @@ -43,8 +43,12 @@ func newBindCommand() *cobra.Command { return fmt.Errorf("%s with an id '%s' is not found", resource.TerraformResourceName(), args[1]) } - b.Config.Bundle.Deployment.Lock.Force = forceLock - err = bundle.Apply(cmd.Context(), b, bundle.Seq( + bundle.ApplyFunc(ctx, b, func(context.Context, *bundle.Bundle) error { + b.Config.Bundle.Deployment.Lock.Force = forceLock + return nil + }) + + err = bundle.Apply(ctx, b, bundle.Seq( phases.Initialize(), phases.Bind(&terraform.BindOptions{ AutoApprove: autoApprove, diff --git a/cmd/bundle/deployment/unbind.go b/cmd/bundle/deployment/unbind.go index e7de8a3d47..9f0e4f7c79 100644 --- a/cmd/bundle/deployment/unbind.go +++ b/cmd/bundle/deployment/unbind.go @@ -1,6 +1,8 @@ package deployment import ( + "context" + "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/phases" "github.com/databricks/cli/cmd/bundle/utils" @@ -19,14 +21,18 @@ func newUnbindCommand() *cobra.Command { cmd.Flags().BoolVar(&forceLock, "force-lock", false, "Force acquisition of deployment lock.") cmd.RunE = func(cmd *cobra.Command, args []string) error { - b := bundle.Get(cmd.Context()) - r := b.Config.Resources - resource, err := r.FindResourceByConfigKey(args[0]) + ctx := cmd.Context() + b := bundle.Get(ctx) + resource, err := b.Config.Resources.FindResourceByConfigKey(args[0]) if err != nil { return err } - b.Config.Bundle.Deployment.Lock.Force = forceLock + bundle.ApplyFunc(ctx, b, func(context.Context, *bundle.Bundle) error { + b.Config.Bundle.Deployment.Lock.Force = forceLock + return nil + }) + return bundle.Apply(cmd.Context(), b, bundle.Seq( phases.Initialize(), phases.Unbind(resource.TerraformResourceName(), args[0]), diff --git a/cmd/bundle/utils/utils.go b/cmd/bundle/utils/utils.go index f68ab06b01..e900f47c38 100644 --- a/cmd/bundle/utils/utils.go +++ b/cmd/bundle/utils/utils.go @@ -1,6 +1,8 @@ package utils import ( + "context" + "github.com/databricks/cli/bundle" "github.com/databricks/cli/cmd/root" "github.com/spf13/cobra" @@ -20,5 +22,7 @@ func ConfigureBundleWithVariables(cmd *cobra.Command, args []string) error { // Initialize variables by assigning them values passed as command line flags b := bundle.Get(cmd.Context()) - return b.Config.InitializeVariables(variables) + return bundle.ApplyFunc(cmd.Context(), b, func(ctx context.Context, b *bundle.Bundle) error { + return b.Config.InitializeVariables(variables) + }) } From dcc499037cf301bb2ff8bc803b264778afb74329 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Thu, 15 Feb 2024 17:28:04 +0100 Subject: [PATCH 101/104] Avoid infinite recursion when normalizing a recursive type --- libs/dyn/convert/normalize.go | 41 +++++++++++++++++------------- libs/dyn/convert/normalize_test.go | 31 ++++++++++++++++++++++ 2 files changed, 55 insertions(+), 17 deletions(-) diff --git a/libs/dyn/convert/normalize.go b/libs/dyn/convert/normalize.go index 26df09578d..e0dfbda23d 100644 --- a/libs/dyn/convert/normalize.go +++ b/libs/dyn/convert/normalize.go @@ -3,6 +3,7 @@ package convert import ( "fmt" "reflect" + "slices" "strconv" "github.com/databricks/cli/libs/diag" @@ -31,21 +32,21 @@ func Normalize(dst any, src dyn.Value, opts ...NormalizeOption) (dyn.Value, diag } } - return n.normalizeType(reflect.TypeOf(dst), src) + return n.normalizeType(reflect.TypeOf(dst), src, []reflect.Type{}) } -func (n normalizeOptions) normalizeType(typ reflect.Type, src dyn.Value) (dyn.Value, diag.Diagnostics) { +func (n normalizeOptions) normalizeType(typ reflect.Type, src dyn.Value, seen []reflect.Type) (dyn.Value, diag.Diagnostics) { for typ.Kind() == reflect.Pointer { typ = typ.Elem() } switch typ.Kind() { case reflect.Struct: - return n.normalizeStruct(typ, src) + return n.normalizeStruct(typ, src, append(seen, typ)) case reflect.Map: - return n.normalizeMap(typ, src) + return n.normalizeMap(typ, src, append(seen, typ)) case reflect.Slice: - return n.normalizeSlice(typ, src) + return n.normalizeSlice(typ, src, append(seen, typ)) case reflect.String: return n.normalizeString(typ, src) case reflect.Bool: @@ -67,7 +68,7 @@ func typeMismatch(expected dyn.Kind, src dyn.Value) diag.Diagnostic { } } -func (n normalizeOptions) normalizeStruct(typ reflect.Type, src dyn.Value) (dyn.Value, diag.Diagnostics) { +func (n normalizeOptions) normalizeStruct(typ reflect.Type, src dyn.Value, seen []reflect.Type) (dyn.Value, diag.Diagnostics) { var diags diag.Diagnostics switch src.Kind() { @@ -86,7 +87,7 @@ func (n normalizeOptions) normalizeStruct(typ reflect.Type, src dyn.Value) (dyn. } // Normalize the value according to the field type. - v, err := n.normalizeType(typ.FieldByIndex(index).Type, v) + v, err := n.normalizeType(typ.FieldByIndex(index).Type, v, seen) if err != nil { diags = diags.Extend(err) // Skip the element if it cannot be normalized. @@ -115,20 +116,26 @@ func (n normalizeOptions) normalizeStruct(typ reflect.Type, src dyn.Value) (dyn. ftyp = ftyp.Elem() } + // Skip field if we have already seen its type to avoid infinite recursion + // when filling in the zero value of a recursive type. + if slices.Contains(seen, ftyp) { + continue + } + var v dyn.Value switch ftyp.Kind() { case reflect.Struct, reflect.Map: - v, _ = n.normalizeType(ftyp, dyn.V(map[string]dyn.Value{})) + v, _ = n.normalizeType(ftyp, dyn.V(map[string]dyn.Value{}), seen) case reflect.Slice: - v, _ = n.normalizeType(ftyp, dyn.V([]dyn.Value{})) + v, _ = n.normalizeType(ftyp, dyn.V([]dyn.Value{}), seen) case reflect.String: - v, _ = n.normalizeType(ftyp, dyn.V("")) + v, _ = n.normalizeType(ftyp, dyn.V(""), seen) case reflect.Bool: - v, _ = n.normalizeType(ftyp, dyn.V(false)) + v, _ = n.normalizeType(ftyp, dyn.V(false), seen) case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: - v, _ = n.normalizeType(ftyp, dyn.V(int64(0))) + v, _ = n.normalizeType(ftyp, dyn.V(int64(0)), seen) case reflect.Float32, reflect.Float64: - v, _ = n.normalizeType(ftyp, dyn.V(float64(0))) + v, _ = n.normalizeType(ftyp, dyn.V(float64(0)), seen) default: // Skip fields for which we do not have a natural [dyn.Value] equivalent. // For example, we don't handle reflect.Complex* and reflect.Uint* types. @@ -147,7 +154,7 @@ func (n normalizeOptions) normalizeStruct(typ reflect.Type, src dyn.Value) (dyn. return dyn.InvalidValue, diags.Append(typeMismatch(dyn.KindMap, src)) } -func (n normalizeOptions) normalizeMap(typ reflect.Type, src dyn.Value) (dyn.Value, diag.Diagnostics) { +func (n normalizeOptions) normalizeMap(typ reflect.Type, src dyn.Value, seen []reflect.Type) (dyn.Value, diag.Diagnostics) { var diags diag.Diagnostics switch src.Kind() { @@ -155,7 +162,7 @@ func (n normalizeOptions) normalizeMap(typ reflect.Type, src dyn.Value) (dyn.Val out := make(map[string]dyn.Value) for k, v := range src.MustMap() { // Normalize the value according to the map element type. - v, err := n.normalizeType(typ.Elem(), v) + v, err := n.normalizeType(typ.Elem(), v, seen) if err != nil { diags = diags.Extend(err) // Skip the element if it cannot be normalized. @@ -175,7 +182,7 @@ func (n normalizeOptions) normalizeMap(typ reflect.Type, src dyn.Value) (dyn.Val return dyn.InvalidValue, diags.Append(typeMismatch(dyn.KindMap, src)) } -func (n normalizeOptions) normalizeSlice(typ reflect.Type, src dyn.Value) (dyn.Value, diag.Diagnostics) { +func (n normalizeOptions) normalizeSlice(typ reflect.Type, src dyn.Value, seen []reflect.Type) (dyn.Value, diag.Diagnostics) { var diags diag.Diagnostics switch src.Kind() { @@ -183,7 +190,7 @@ func (n normalizeOptions) normalizeSlice(typ reflect.Type, src dyn.Value) (dyn.V out := make([]dyn.Value, 0, len(src.MustSequence())) for _, v := range src.MustSequence() { // Normalize the value according to the slice element type. - v, err := n.normalizeType(typ.Elem(), v) + v, err := n.normalizeType(typ.Elem(), v, seen) if err != nil { diags = diags.Extend(err) // Skip the element if it cannot be normalized. diff --git a/libs/dyn/convert/normalize_test.go b/libs/dyn/convert/normalize_test.go index d59cc3b351..82abc82600 100644 --- a/libs/dyn/convert/normalize_test.go +++ b/libs/dyn/convert/normalize_test.go @@ -189,6 +189,37 @@ func TestNormalizeStructIncludeMissingFields(t *testing.T) { }), vout) } +func TestNormalizeStructIncludeMissingFieldsOnRecursiveType(t *testing.T) { + type Tmp struct { + // Verify that structs are recursively normalized if not set. + Ptr *Tmp `json:"ptr"` + + // Verify that primitive types are zero-initialized if not set. + String string `json:"string"` + } + + var typ Tmp + vin := dyn.V(map[string]dyn.Value{ + "ptr": dyn.V(map[string]dyn.Value{ + "ptr": dyn.V(map[string]dyn.Value{ + "string": dyn.V("already set"), + }), + }), + }) + vout, err := Normalize(typ, vin, IncludeMissingFields) + assert.Empty(t, err) + assert.Equal(t, dyn.V(map[string]dyn.Value{ + "ptr": dyn.V(map[string]dyn.Value{ + "ptr": dyn.V(map[string]dyn.Value{ + // Note: the ptr field is not zero-initialized because that would recurse. + "string": dyn.V("already set"), + }), + "string": dyn.V(""), + }), + "string": dyn.V(""), + }), vout) +} + func TestNormalizeMap(t *testing.T) { var typ map[string]string vin := dyn.V(map[string]dyn.Value{ From 9d1713893e8203e55176af594502a8cb67b523cf Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Fri, 16 Feb 2024 14:53:41 +0100 Subject: [PATCH 102/104] Add coverage in Terraform interpolation test --- bundle/deploy/terraform/interpolate_test.go | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) diff --git a/bundle/deploy/terraform/interpolate_test.go b/bundle/deploy/terraform/interpolate_test.go index 909549e503..be905ad772 100644 --- a/bundle/deploy/terraform/interpolate_test.go +++ b/bundle/deploy/terraform/interpolate_test.go @@ -8,6 +8,7 @@ import ( "github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config/resources" "github.com/databricks/databricks-sdk-go/service/jobs" + "github.com/databricks/databricks-sdk-go/service/ml" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -30,6 +31,23 @@ func TestInterpolate(t *testing.T) { "other_model_serving": "${resources.model_serving_endpoints.other_model_serving.id}", "other_registered_model": "${resources.registered_models.other_registered_model.id}", }, + Tasks: []jobs.Task{ + { + TaskKey: "my_task", + NotebookTask: &jobs.NotebookTask{ + BaseParameters: map[string]string{ + "model_name": "${resources.models.my_model.name}", + }, + }, + }, + }, + }, + }, + }, + Models: map[string]*resources.MlflowModel{ + "my_model": { + Model: &ml.Model{ + Name: "my_model", }, }, }, @@ -47,6 +65,9 @@ func TestInterpolate(t *testing.T) { assert.Equal(t, "${databricks_mlflow_experiment.other_experiment.id}", j.Tags["other_experiment"]) assert.Equal(t, "${databricks_model_serving.other_model_serving.id}", j.Tags["other_model_serving"]) assert.Equal(t, "${databricks_registered_model.other_registered_model.id}", j.Tags["other_registered_model"]) + + m := b.Config.Resources.Models["my_model"] + assert.Equal(t, "my_model", m.Model.Name) } func TestInterpolateUnknownResourceType(t *testing.T) { From 184907c4e6f7a57121201a046fa42b41d8a02dd3 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Fri, 16 Feb 2024 14:54:10 +0100 Subject: [PATCH 103/104] Fix issue where interpolating a new ref would rewrite unrelated fields --- libs/dyn/dynvar/resolve.go | 5 ++--- libs/dyn/dynvar/resolve_test.go | 19 +++++++++++++++++++ 2 files changed, 21 insertions(+), 3 deletions(-) diff --git a/libs/dyn/dynvar/resolve.go b/libs/dyn/dynvar/resolve.go index b5417cac20..f09a971249 100644 --- a/libs/dyn/dynvar/resolve.go +++ b/libs/dyn/dynvar/resolve.go @@ -208,9 +208,8 @@ func (r *resolver) resolveKey(key string, seen []string) (dyn.Value, error) { return dyn.InvalidValue, err } - // If the returned value is a valid variable reference, resolve it. - ref, ok := newRef(v) - if ok { + // If the returned value is a ref we already know about, resolve it. + if ref, ok := r.refs[key]; ok { v, err = r.resolveRef(key, ref, seen) } diff --git a/libs/dyn/dynvar/resolve_test.go b/libs/dyn/dynvar/resolve_test.go index 1234b7cbfc..bf3597d77a 100644 --- a/libs/dyn/dynvar/resolve_test.go +++ b/libs/dyn/dynvar/resolve_test.go @@ -207,3 +207,22 @@ func TestResolveWithSkipEverything(t *testing.T) { assert.Equal(t, "${b} ${a} ${a} ${b}", getByPath(t, out, "f").MustString()) assert.Equal(t, "${d} ${c} ${c} ${d}", getByPath(t, out, "g").MustString()) } + +func TestResolveWithInterpolateNewRef(t *testing.T) { + in := dyn.V(map[string]dyn.Value{ + "a": dyn.V("a"), + "b": dyn.V("${a}"), + }) + + // The call replaces ${a} with ${foobar} and skips everything else. + out, err := dynvar.Resolve(in, func(path dyn.Path) (dyn.Value, error) { + if path.String() == "a" { + return dyn.V("${foobar}"), nil + } + return dyn.InvalidValue, dynvar.ErrSkipResolution + }) + + require.NoError(t, err) + assert.Equal(t, "a", getByPath(t, out, "a").MustString()) + assert.Equal(t, "${foobar}", getByPath(t, out, "b").MustString()) +} From b740ec707be9833746f08e3837c5a3b4067d88eb Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Fri, 16 Feb 2024 14:54:10 +0100 Subject: [PATCH 104/104] Fix issue where interpolating a new ref would rewrite unrelated fields --- libs/dyn/dynvar/resolve.go | 18 +++++---------- libs/dyn/dynvar/resolve_test.go | 40 +++++++++++++++++++++++++++++++++ 2 files changed, 45 insertions(+), 13 deletions(-) diff --git a/libs/dyn/dynvar/resolve.go b/libs/dyn/dynvar/resolve.go index b5417cac20..b8a0aef625 100644 --- a/libs/dyn/dynvar/resolve.go +++ b/libs/dyn/dynvar/resolve.go @@ -105,21 +105,17 @@ func (r *resolver) resolveVariableReferences() (err error) { keys := maps.Keys(r.refs) sort.Strings(keys) for _, key := range keys { - _, err := r.resolveRef(key, r.refs[key], []string{key}) + v, err := r.resolveRef(r.refs[key], []string{key}) if err != nil { return err } + r.resolved[key] = v } return nil } -func (r *resolver) resolveRef(key string, ref ref, seen []string) (dyn.Value, error) { - // Check if we have already resolved this variable reference. - if v, ok := r.resolved[key]; ok { - return v, nil - } - +func (r *resolver) resolveRef(ref ref, seen []string) (dyn.Value, error) { // This is an unresolved variable reference. deps := ref.references() @@ -154,7 +150,6 @@ func (r *resolver) resolveRef(key string, ref ref, seen []string) (dyn.Value, er if ref.isPure() && complete { // If the variable reference is pure, we can substitute it. // This is useful for interpolating values of non-string types. - r.resolved[key] = resolved[0] return resolved[0], nil } @@ -178,10 +173,7 @@ func (r *resolver) resolveRef(key string, ref ref, seen []string) (dyn.Value, er ref.str = strings.Replace(ref.str, ref.matches[j][0], s, 1) } - // Store the interpolated value. - v := dyn.NewValue(ref.str, ref.value.Location()) - r.resolved[key] = v - return v, nil + return dyn.NewValue(ref.str, ref.value.Location()), nil } func (r *resolver) resolveKey(key string, seen []string) (dyn.Value, error) { @@ -211,7 +203,7 @@ func (r *resolver) resolveKey(key string, seen []string) (dyn.Value, error) { // If the returned value is a valid variable reference, resolve it. ref, ok := newRef(v) if ok { - v, err = r.resolveRef(key, ref, seen) + v, err = r.resolveRef(ref, seen) } // Cache the return value and return to the caller. diff --git a/libs/dyn/dynvar/resolve_test.go b/libs/dyn/dynvar/resolve_test.go index 1234b7cbfc..304ed9391c 100644 --- a/libs/dyn/dynvar/resolve_test.go +++ b/libs/dyn/dynvar/resolve_test.go @@ -207,3 +207,43 @@ func TestResolveWithSkipEverything(t *testing.T) { assert.Equal(t, "${b} ${a} ${a} ${b}", getByPath(t, out, "f").MustString()) assert.Equal(t, "${d} ${c} ${c} ${d}", getByPath(t, out, "g").MustString()) } + +func TestResolveWithInterpolateNewRef(t *testing.T) { + in := dyn.V(map[string]dyn.Value{ + "a": dyn.V("a"), + "b": dyn.V("${a}"), + }) + + // The call replaces ${a} with ${foobar} and skips everything else. + out, err := dynvar.Resolve(in, func(path dyn.Path) (dyn.Value, error) { + if path.String() == "a" { + return dyn.V("${foobar}"), nil + } + return dyn.InvalidValue, dynvar.ErrSkipResolution + }) + + require.NoError(t, err) + assert.Equal(t, "a", getByPath(t, out, "a").MustString()) + assert.Equal(t, "${foobar}", getByPath(t, out, "b").MustString()) +} + +func TestResolveWithInterpolateAliasedRef(t *testing.T) { + in := dyn.V(map[string]dyn.Value{ + "a": dyn.V("a"), + "b": dyn.V("${a}"), + "c": dyn.V("${x}"), + }) + + // The call replaces ${x} with ${b} and skips everything else. + out, err := dynvar.Resolve(in, func(path dyn.Path) (dyn.Value, error) { + if path.String() == "x" { + return dyn.V("${b}"), nil + } + return dyn.GetByPath(in, path) + }) + + require.NoError(t, err) + assert.Equal(t, "a", getByPath(t, out, "a").MustString()) + assert.Equal(t, "a", getByPath(t, out, "b").MustString()) + assert.Equal(t, "a", getByPath(t, out, "c").MustString()) +}