-
Notifications
You must be signed in to change notification settings - Fork 386
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Migrate SQL Warehouse to Go SDK #3044
Changes from 37 commits
8ca7989
3ab53a2
97f0f43
a69d5f7
69b892b
426598b
97b0643
a6e5fb5
05184aa
4f62c47
62e5c44
dcc0d59
ff1234d
faeb466
d80936c
6f1218e
8cb7951
045a26f
5079eca
e8ecc58
8c113a5
3f3aca5
0c7d9c6
a45c93b
1687e2f
fc902b3
387ed42
8ecc720
f1dae30
adb8ed1
63ae324
e08fbe8
20c3d14
61ed26e
a33b53d
a3f415d
0a069b7
56e3b46
ce5cec7
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -77,13 +77,44 @@ func MustSchemaPath(s map[string]*schema.Schema, path ...string) *schema.Schema | |
// StructToSchema makes schema from a struct type & applies customizations from callback given | ||
func StructToSchema(v any, customize func(map[string]*schema.Schema) map[string]*schema.Schema) map[string]*schema.Schema { | ||
rv := reflect.ValueOf(v) | ||
scm := typeToSchema(rv, rv.Type(), []string{}) | ||
scm := typeToSchema(rv, []string{}) | ||
if customize != nil { | ||
scm = customize(scm) | ||
} | ||
return scm | ||
} | ||
|
||
// SetSuppressDiff adds diff suppression to a schema. This is necessary for non-computed | ||
// fields for which the platform returns a value, but the user has not configured any value. | ||
// For example: the REST API returns `{"tags": {}}` for a resource with no tags. | ||
func SetSuppressDiff(v *schema.Schema) { | ||
v.DiffSuppressFunc = diffSuppressor(fmt.Sprintf("%v", v.Type.Zero())) | ||
} | ||
|
||
// SetDefault sets the default value for a schema. | ||
func SetDefault(v *schema.Schema, value any) { | ||
v.Default = value | ||
v.Optional = true | ||
v.Required = false | ||
} | ||
|
||
// SetReadOnly sets the schema to be read-only (i.e. computed, non-optional). | ||
// This should be used for fields that are not user-configurable but are returned | ||
// by the platform. | ||
func SetReadOnly(v *schema.Schema) { | ||
v.Optional = false | ||
v.Required = false | ||
v.MaxItems = 0 | ||
v.Computed = true | ||
} | ||
|
||
// SetRequired sets the schema to be required. | ||
func SetRequired(v *schema.Schema) { | ||
v.Optional = false | ||
v.Required = true | ||
v.Computed = false | ||
} | ||
|
||
func isOptional(typeField reflect.StructField) bool { | ||
if strings.Contains(typeField.Tag.Get("json"), "omitempty") { | ||
return true | ||
|
@@ -175,26 +206,52 @@ func diffSuppressor(zero string) func(k, old, new string, d *schema.ResourceData | |
log.Printf("[DEBUG] Suppressing diff for %v: platform=%#v config=%#v", k, old, new) | ||
return true | ||
} | ||
if strings.HasSuffix(k, ".#") && new == "0" && old != "0" { | ||
field := strings.TrimSuffix(k, ".#") | ||
log.Printf("[DEBUG] Suppressing diff for list or set %v: no value configured but platform returned some value (likely {})", field) | ||
return true | ||
} | ||
return false | ||
} | ||
} | ||
|
||
type field struct { | ||
sf reflect.StructField | ||
v reflect.Value | ||
} | ||
|
||
func listAllFields(v reflect.Value) []field { | ||
t := v.Type() | ||
fields := make([]field, 0) | ||
for i := 0; i < v.NumField(); i++ { | ||
f := t.Field(i) | ||
if f.Anonymous { | ||
fields = append(fields, listAllFields(v.Field(i))...) | ||
} else { | ||
fields = append(fields, field{ | ||
sf: f, | ||
v: v.Field(i), | ||
}) | ||
} | ||
} | ||
return fields | ||
} | ||
|
||
// typeToSchema converts struct into terraform schema. `path` is used for block suppressions | ||
// special path element `"0"` is used to denote either arrays or sets of elements | ||
func typeToSchema(v reflect.Value, t reflect.Type, path []string) map[string]*schema.Schema { | ||
func typeToSchema(v reflect.Value, path []string) map[string]*schema.Schema { | ||
scm := map[string]*schema.Schema{} | ||
rk := v.Kind() | ||
if rk == reflect.Ptr { | ||
v = v.Elem() | ||
t = v.Type() | ||
rk = v.Kind() | ||
} | ||
if rk != reflect.Struct { | ||
panic(fmt.Errorf("Schema value of Struct is expected, but got %s: %#v", reflectKind(rk), v)) | ||
} | ||
for i := 0; i < v.NumField(); i++ { | ||
typeField := t.Field(i) | ||
|
||
fields := listAllFields(v) | ||
for _, field := range fields { | ||
typeField := field.sf | ||
tfTag := typeField.Tag.Get("tf") | ||
|
||
fieldName := chooseFieldName(typeField) | ||
|
@@ -260,7 +317,7 @@ func typeToSchema(v reflect.Value, t reflect.Type, path []string) map[string]*sc | |
scm[fieldName].Type = schema.TypeList | ||
elem := typeField.Type.Elem() | ||
sv := reflect.New(elem).Elem() | ||
nestedSchema := typeToSchema(sv, elem, append(path, fieldName, "0")) | ||
nestedSchema := typeToSchema(sv, append(path, fieldName, "0")) | ||
if strings.Contains(tfTag, "suppress_diff") { | ||
blockCount := strings.Join(append(path, fieldName, "#"), ".") | ||
scm[fieldName].DiffSuppressFunc = makeEmptyBlockSuppressFunc(blockCount) | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. With the new suffix checking in There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Maybe we can do this as a separate PR? |
||
|
@@ -279,7 +336,7 @@ func typeToSchema(v reflect.Value, t reflect.Type, path []string) map[string]*sc | |
elem := typeField.Type // changed from ptr | ||
sv := reflect.New(elem) // changed from ptr | ||
|
||
nestedSchema := typeToSchema(sv, elem, append(path, fieldName, "0")) | ||
nestedSchema := typeToSchema(sv, append(path, fieldName, "0")) | ||
if strings.Contains(tfTag, "suppress_diff") { | ||
blockCount := strings.Join(append(path, fieldName, "#"), ".") | ||
scm[fieldName].DiffSuppressFunc = makeEmptyBlockSuppressFunc(blockCount) | ||
|
@@ -310,7 +367,7 @@ func typeToSchema(v reflect.Value, t reflect.Type, path []string) map[string]*sc | |
case reflect.Struct: | ||
sv := reflect.New(elem).Elem() | ||
scm[fieldName].Elem = &schema.Resource{ | ||
Schema: typeToSchema(sv, elem, append(path, fieldName, "0")), | ||
Schema: typeToSchema(sv, append(path, fieldName, "0")), | ||
} | ||
} | ||
default: | ||
|
@@ -341,6 +398,20 @@ func IsRequestEmpty(v any) (bool, error) { | |
return !isNotEmpty, err | ||
} | ||
|
||
// isGoSdk returns true if the struct is from databricks-sdk-go or embeds a struct from databricks-sdk-go. | ||
func isGoSdk(v reflect.Value) bool { | ||
if strings.Contains(v.Type().PkgPath(), "databricks-sdk-go") { | ||
return true | ||
} | ||
for i := 0; i < v.NumField(); i++ { | ||
f := v.Type().Field(i) | ||
if f.Anonymous && isGoSdk(v.Field(i)) { | ||
return true | ||
} | ||
} | ||
return false | ||
} | ||
|
||
func iterFields(rv reflect.Value, path []string, s map[string]*schema.Schema, | ||
cb func(fieldSchema *schema.Schema, path []string, valueField *reflect.Value) error) error { | ||
rk := rv.Kind() | ||
|
@@ -350,9 +421,10 @@ func iterFields(rv reflect.Value, path []string, s map[string]*schema.Schema, | |
if !rv.IsValid() { | ||
return fmt.Errorf("%s: got invalid reflect value %#v", path, rv) | ||
} | ||
isGoSDK := strings.Contains(rv.Type().PkgPath(), "databricks-sdk-go") | ||
for i := 0; i < rv.NumField(); i++ { | ||
typeField := rv.Type().Field(i) | ||
isGoSDK := isGoSdk(rv) | ||
fields := listAllFields(rv) | ||
for _, field := range fields { | ||
typeField := field.sf | ||
fieldName := chooseFieldName(typeField) | ||
if fieldName == "-" { | ||
continue | ||
|
@@ -370,7 +442,7 @@ func iterFields(rv reflect.Value, path []string, s map[string]*schema.Schema, | |
if fieldSchema.Optional && defaultEmpty && !omitEmpty { | ||
return fmt.Errorf("inconsistency: %s is optional, default is empty, but has no omitempty", fieldName) | ||
} | ||
valueField := rv.Field(i) | ||
valueField := field.v | ||
err := cb(fieldSchema, append(path, fieldName), &valueField) | ||
if err != nil { | ||
return fmt.Errorf("%s: %s", fieldName, err) | ||
|
@@ -393,13 +465,15 @@ func collectionToMaps(v any, s *schema.Schema) ([]any, error) { | |
return nil, fmt.Errorf("not resource") | ||
} | ||
var allItems []reflect.Value | ||
if s.MaxItems == 1 { | ||
allItems = append(allItems, reflect.ValueOf(v)) | ||
} else { | ||
vs := reflect.ValueOf(v) | ||
for i := 0; i < vs.Len(); i++ { | ||
allItems = append(allItems, vs.Index(i)) | ||
rv := reflect.ValueOf(v) | ||
rvType := rv.Type().Kind() | ||
isList := rvType == reflect.Array || rvType == reflect.Slice | ||
if isList { | ||
for i := 0; i < rv.Len(); i++ { | ||
allItems = append(allItems, rv.Index(i)) | ||
} | ||
} else { | ||
allItems = append(allItems, rv) | ||
} | ||
for _, v := range allItems { | ||
data := map[string]any{} | ||
|
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. @mgyucht I think that we need to add at least two new test cases:
|
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -256,9 +256,51 @@ func DataResource(sc any, read func(context.Context, any, *DatabricksClient) err | |
// ... | ||
// }) | ||
func WorkspaceData[T any](read func(context.Context, *T, *databricks.WorkspaceClient) error) *schema.Resource { | ||
return genericDatabricksData(func(c *DatabricksClient) (*databricks.WorkspaceClient, error) { | ||
return c.WorkspaceClient() | ||
}, read) | ||
return genericDatabricksData((*DatabricksClient).WorkspaceClient, func(ctx context.Context, s struct{}, t *T, wc *databricks.WorkspaceClient) error { | ||
return read(ctx, t, wc) | ||
}, false) | ||
} | ||
|
||
// WorkspaceDataWithParams defines a data source that can be used to read data from the workspace API. | ||
// It differs from WorkspaceData in that it separates the definition of the computed fields (the resource type) | ||
// from the definition of the user-supplied parameters. | ||
// | ||
// The first type parameter is the type of the resource. This can be a type directly from the SDK, or a custom | ||
// type defined in the provider that embeds the SDK type. | ||
// | ||
// The second type parameter is the type representing parameters that a user may provide to the data source. These | ||
// are the attributes that the user can specify in the data source configuration, but are not part of the resource | ||
// type. If there are no extra attributes, this should be `struct{}`. If there are any fields with the same JSON | ||
// name as fields in the resource type, these fields will override the values from the resource type. | ||
// | ||
// The single argument is a function that will be called to read the data from the workspace API, returning the | ||
// value of the resource type. The function should return an error if the data cannot be read or the resource cannot | ||
// be found. | ||
// | ||
// Example usage: | ||
// | ||
// type SqlWarehouse struct { ... } | ||
// | ||
// type SqlWarehouseDataParams struct { | ||
// Id string `json:"id" tf:"computed,optional"` | ||
// Name string `json:"name" tf:"computed,optional"` | ||
// } | ||
// | ||
// WorkspaceDataWithParams( | ||
// func(ctx context.Context, data SqlWarehouseDataParams, w *databricks.WorkspaceClient) (*Warehouse, error) { | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more.
|
||
// // User-provided attributes are present in the `data` parameter. | ||
// // The resource should be returned. | ||
// ... | ||
// }) | ||
func WorkspaceDataWithParams[T, P any](read func(context.Context, P, *databricks.WorkspaceClient) (*T, error)) *schema.Resource { | ||
return genericDatabricksData((*DatabricksClient).WorkspaceClient, func(ctx context.Context, o P, s *T, w *databricks.WorkspaceClient) error { | ||
res, err := read(ctx, o, w) | ||
if err != nil { | ||
return err | ||
} | ||
*s = *res | ||
return nil | ||
}, true) | ||
} | ||
|
||
// AccountData is a generic way to define account data resources in Terraform provider. | ||
|
@@ -273,15 +315,81 @@ func WorkspaceData[T any](read func(context.Context, *T, *databricks.WorkspaceCl | |
// ... | ||
// }) | ||
func AccountData[T any](read func(context.Context, *T, *databricks.AccountClient) error) *schema.Resource { | ||
return genericDatabricksData(func(c *DatabricksClient) (*databricks.AccountClient, error) { | ||
return c.AccountClient() | ||
}, read) | ||
return genericDatabricksData((*DatabricksClient).AccountClient, func(ctx context.Context, s struct{}, t *T, ac *databricks.AccountClient) error { | ||
return read(ctx, t, ac) | ||
}, false) | ||
} | ||
|
||
// genericDatabricksData is generic and common way to define both account and workspace data and calls their respective clients | ||
func genericDatabricksData[T any, C any](getClient func(*DatabricksClient) (C, error), read func(context.Context, *T, C) error) *schema.Resource { | ||
// AccountDataWithParams defines a data source that can be used to read data from the workspace API. | ||
// It differs from AccountData in that it allows extra attributes to be provided as a separate argument, | ||
// so the original type used to define the resource can also be used to define the data source. | ||
// | ||
// The first type parameter is the type of the resource. This can be a type directly from the SDK, or a custom | ||
// type defined in the provider that embeds the SDK type. | ||
// | ||
// The second type parameter is the type of the extra attributes that should be provided to the data source. These | ||
// are the attributes that the user can specify in the data source configuration, but are not part of the resource | ||
// type. If there are no extra attributes, this should be `struct{}`. If there are any fields with the same JSON | ||
// name as fields in the resource type, these fields will override the values from the resource type. | ||
// | ||
// The single argument is a function that will be called to read the data from the workspace API, returning the | ||
// requested resource. The function should return an error if the data cannot be read or the resource cannot be | ||
// found. | ||
// | ||
// Example usage: | ||
// | ||
// type MwsWorkspace struct { ... } | ||
// | ||
// type MwsWorkspaceDataParams struct { | ||
// Id string `json:"id" tf:"computed,optional"` | ||
// Name string `json:"name" tf:"computed,optional"` | ||
// } | ||
// | ||
// AccountDataWithParams( | ||
// func(ctx context.Context, data MwsWorkspaceDataParams, a *databricks.AccountClient) (*MwsWorkspace, error) { | ||
// // User-provided attributes are present in the `data` parameter. | ||
// // The resource should be populated in the `workspace` parameter. | ||
// ... | ||
// }) | ||
func AccountDataWithParams[T, P any](read func(context.Context, P, *databricks.AccountClient) (*T, error)) *schema.Resource { | ||
return genericDatabricksData((*DatabricksClient).AccountClient, func(ctx context.Context, o P, s *T, a *databricks.AccountClient) error { | ||
res, err := read(ctx, o, a) | ||
if err != nil { | ||
return err | ||
} | ||
*s = *res | ||
return nil | ||
}, true) | ||
} | ||
|
||
// genericDatabricksData is generic and common way to define both account and workspace data and calls their respective clients. | ||
// | ||
// If hasOther is true, all of the fields of SdkType will be marked as computed in the final schema, and the fields | ||
// from OtherFields will be overlaid on top of the schema generated by SdkType. Otherwise, the schema generated by | ||
// SdkType will be used directly. | ||
func genericDatabricksData[T, P, C any]( | ||
getClient func(*DatabricksClient) (C, error), | ||
read func(context.Context, P, *T, C) error, | ||
hasOther bool) *schema.Resource { | ||
var dummy T | ||
var other P | ||
otherFields := StructToSchema(other, NoCustomize) | ||
s := StructToSchema(dummy, func(m map[string]*schema.Schema) map[string]*schema.Schema { | ||
// For WorkspaceData and AccountData, a single data type is used to represent all of the fields of | ||
// the resource, so its configuration is correct. For the *WithParams methods, the SdkType parameter | ||
// is copied directly from the resource definition, which means that all fields from that type are | ||
// computed and optional, and the fields from OtherFields are overlaid on top of the schema generated | ||
// by SdkType. | ||
if hasOther { | ||
for k := range m { | ||
m[k].Computed = true | ||
m[k].Required = false | ||
m[k].Optional = true | ||
} | ||
for k, v := range otherFields { | ||
m[k] = v | ||
} | ||
} | ||
// `id` attribute must be marked as computed, otherwise it's not set! | ||
if v, ok := m["id"]; ok { | ||
v.Computed = true | ||
|
@@ -298,20 +406,22 @@ func genericDatabricksData[T any, C any](getClient func(*DatabricksClient) (C, e | |
diags = diag.Errorf("panic: %v", panic) | ||
} | ||
}() | ||
ptr := reflect.New(reflect.ValueOf(dummy).Type()) | ||
DataToReflectValue(d, &schema.Resource{Schema: s}, ptr.Elem()) | ||
var dummy T | ||
var other P | ||
DataToStructPointer(d, s, &other) | ||
DataToStructPointer(d, s, &dummy) | ||
client := m.(*DatabricksClient) | ||
c, err := getClient(client) | ||
if err != nil { | ||
err = nicerError(ctx, err, "read data") | ||
err = nicerError(ctx, err, "get client") | ||
return diag.FromErr(err) | ||
} | ||
err = read(ctx, ptr.Interface().(*T), c) | ||
err = read(ctx, other, &dummy, c) | ||
if err != nil { | ||
err = nicerError(ctx, err, "read data") | ||
diags = diag.FromErr(err) | ||
} | ||
StructToData(ptr.Elem().Interface(), s, d) | ||
StructToData(&dummy, s, d) | ||
// check if the resource schema has the `id` attribute (marked with `json:"id"` in the provided structure). | ||
// and if yes, then use it as resource ID. If not, then use default value for resource ID (`_`) | ||
if _, ok := s["id"]; ok { | ||
|
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
what about doing
to avoid reallocations?