-
Notifications
You must be signed in to change notification settings - Fork 52
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Use
dyn.Value
as input to generating Terraform JSON (#1218)
## Changes This builds on #1098 and uses the `dyn.Value` representation of the bundle configuration to generate the Terraform JSON definition of resources in the bundle. The existing code (in `BundleToTerraform`) was not great and in an effort to slightly improve this, I added a package `tfdyn` that includes dedicated files for each resource type. Every resource type has its own conversion type that takes the `dyn.Value` of the bundle-side resource and converts it into Terraform resources (e.g. a job and optionally its permissions). Because we now use a `dyn.Value` as input, we can represent and emit zero-values that have so far been omitted. For example, setting `num_workers: 0` in your bundle configuration now propagates all the way to the Terraform JSON definition. ## Tests * Unit tests for every converter. I reused the test inputs from `convert_test.go`. * Equivalence tests in every existing test case checks that the resulting JSON is identical. * I manually compared the TF JSON file generated by the CLI from the main branch and from this PR on all of our bundles and bundle examples (internal and external) and found the output doesn't change (with the exception of the odd zero-value being included by the version in this PR).
- Loading branch information
Showing
22 changed files
with
1,291 additions
and
1 deletion.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,23 @@ | ||
package tfdyn | ||
|
||
import ( | ||
"context" | ||
|
||
"github.com/databricks/cli/bundle/internal/tf/schema" | ||
"github.com/databricks/cli/libs/dyn" | ||
) | ||
|
||
type Converter interface { | ||
Convert(ctx context.Context, key string, vin dyn.Value, out *schema.Resources) error | ||
} | ||
|
||
var converters = map[string]Converter{} | ||
|
||
func GetConverter(name string) (Converter, bool) { | ||
c, ok := converters[name] | ||
return c, ok | ||
} | ||
|
||
func registerConverter(name string, c Converter) { | ||
converters[name] = c | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,45 @@ | ||
package tfdyn | ||
|
||
import ( | ||
"context" | ||
"fmt" | ||
|
||
"github.com/databricks/cli/bundle/internal/tf/schema" | ||
"github.com/databricks/cli/libs/dyn" | ||
"github.com/databricks/cli/libs/dyn/convert" | ||
"github.com/databricks/cli/libs/log" | ||
) | ||
|
||
func convertExperimentResource(ctx context.Context, vin dyn.Value) (dyn.Value, error) { | ||
// Normalize the output value to the target schema. | ||
vout, diags := convert.Normalize(schema.ResourceMlflowExperiment{}, vin) | ||
for _, diag := range diags { | ||
log.Debugf(ctx, "experiment normalization diagnostic: %s", diag.Summary) | ||
} | ||
|
||
return vout, nil | ||
} | ||
|
||
type experimentConverter struct{} | ||
|
||
func (experimentConverter) Convert(ctx context.Context, key string, vin dyn.Value, out *schema.Resources) error { | ||
vout, err := convertExperimentResource(ctx, vin) | ||
if err != nil { | ||
return err | ||
} | ||
|
||
// Add the converted resource to the output. | ||
out.MlflowExperiment[key] = vout.AsAny() | ||
|
||
// Configure permissions for this resource. | ||
if permissions := convertPermissionsResource(ctx, vin); permissions != nil { | ||
permissions.ExperimentId = fmt.Sprintf("${databricks_mlflow_experiment.%s.id}", key) | ||
out.Permissions["mlflow_experiment_"+key] = permissions | ||
} | ||
|
||
return nil | ||
} | ||
|
||
func init() { | ||
registerConverter("experiments", experimentConverter{}) | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,52 @@ | ||
package tfdyn | ||
|
||
import ( | ||
"context" | ||
"testing" | ||
|
||
"github.com/databricks/cli/bundle/config/resources" | ||
"github.com/databricks/cli/bundle/internal/tf/schema" | ||
"github.com/databricks/cli/libs/dyn" | ||
"github.com/databricks/cli/libs/dyn/convert" | ||
"github.com/databricks/databricks-sdk-go/service/ml" | ||
"github.com/stretchr/testify/assert" | ||
"github.com/stretchr/testify/require" | ||
) | ||
|
||
func TestConvertExperiment(t *testing.T) { | ||
var src = resources.MlflowExperiment{ | ||
Experiment: &ml.Experiment{ | ||
Name: "name", | ||
}, | ||
Permissions: []resources.Permission{ | ||
{ | ||
Level: "CAN_READ", | ||
UserName: "jane@doe.com", | ||
}, | ||
}, | ||
} | ||
|
||
vin, err := convert.FromTyped(src, dyn.NilValue) | ||
require.NoError(t, err) | ||
|
||
ctx := context.Background() | ||
out := schema.NewResources() | ||
err = experimentConverter{}.Convert(ctx, "my_experiment", vin, out) | ||
require.NoError(t, err) | ||
|
||
// Assert equality on the experiment | ||
assert.Equal(t, map[string]any{ | ||
"name": "name", | ||
}, out.MlflowExperiment["my_experiment"]) | ||
|
||
// Assert equality on the permissions | ||
assert.Equal(t, &schema.ResourcePermissions{ | ||
ExperimentId: "${databricks_mlflow_experiment.my_experiment.id}", | ||
AccessControl: []schema.ResourcePermissionsAccessControl{ | ||
{ | ||
PermissionLevel: "CAN_READ", | ||
UserName: "jane@doe.com", | ||
}, | ||
}, | ||
}, out.Permissions["mlflow_experiment_my_experiment"]) | ||
} |
Oops, something went wrong.