Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Migrate SQL Warehouse to Go SDK #3044

Merged
merged 39 commits into from
Jan 10, 2024
Merged
Show file tree
Hide file tree
Changes from 35 commits
Commits
Show all changes
39 commits
Select commit Hold shift + click to select a range
8ca7989
Migrate SQL Warehouse to Go SDK
mgyucht Dec 19, 2023
3ab53a2
fix compile
mgyucht Dec 20, 2023
97f0f43
check in diff-schema scrip
mgyucht Dec 20, 2023
a69d5f7
Merge branch 'master' into migrate-sql-warehouse-to-go-sdk
mgyucht Dec 20, 2023
69b892b
fix
mgyucht Dec 20, 2023
426598b
fix
mgyucht Dec 20, 2023
97b0643
test fix
mgyucht Dec 20, 2023
a6e5fb5
fix
mgyucht Dec 20, 2023
05184aa
fix
mgyucht Dec 20, 2023
4f62c47
Merge branch 'master' into migrate-sql-warehouse-to-go-sdk
mgyucht Dec 20, 2023
62e5c44
undo extra change
mgyucht Dec 20, 2023
dcc0d59
work
mgyucht Dec 20, 2023
ff1234d
work
mgyucht Dec 20, 2023
faeb466
work
mgyucht Dec 20, 2023
d80936c
fix
mgyucht Dec 20, 2023
6f1218e
fixes
mgyucht Dec 20, 2023
8cb7951
fix
mgyucht Dec 20, 2023
045a26f
remove
mgyucht Dec 21, 2023
5079eca
handle slices as well
mgyucht Dec 21, 2023
e8ecc58
Merge branch 'master' into migrate-sql-warehouse-to-go-sdk
mgyucht Dec 21, 2023
8c113a5
fix stubs in exporter test
mgyucht Dec 21, 2023
3f3aca5
test
mgyucht Dec 21, 2023
0c7d9c6
Fix tags diff suppression
mgyucht Dec 21, 2023
a45c93b
fix
mgyucht Dec 21, 2023
1687e2f
Address comments
mgyucht Dec 22, 2023
fc902b3
comments
mgyucht Dec 22, 2023
387ed42
fix workflow
mgyucht Dec 22, 2023
8ecc720
fix
mgyucht Dec 22, 2023
f1dae30
Merge branch 'master' into migrate-sql-warehouse-to-go-sdk
mgyucht Dec 22, 2023
adb8ed1
only diff against merge-base of master
mgyucht Dec 22, 2023
63ae324
update docs
mgyucht Dec 22, 2023
e08fbe8
remove deprecated resources
mgyucht Dec 22, 2023
20c3d14
undo extra docs changes
mgyucht Dec 22, 2023
61ed26e
remove extra space
mgyucht Dec 22, 2023
a33b53d
Merge branch 'master' into migrate-sql-warehouse-to-go-sdk
mgyucht Jan 5, 2024
a3f415d
small tweak
mgyucht Jan 8, 2024
0a069b7
small tweaks
mgyucht Jan 8, 2024
56e3b46
Merge branch 'master' into migrate-sql-warehouse-to-go-sdk
mgyucht Jan 9, 2024
ce5cec7
tests and address comments
mgyucht Jan 9, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
112 changes: 93 additions & 19 deletions common/reflect_resource.go
Original file line number Diff line number Diff line change
Expand Up @@ -77,13 +77,44 @@ func MustSchemaPath(s map[string]*schema.Schema, path ...string) *schema.Schema
// StructToSchema makes schema from a struct type & applies customizations from callback given
func StructToSchema(v any, customize func(map[string]*schema.Schema) map[string]*schema.Schema) map[string]*schema.Schema {
rv := reflect.ValueOf(v)
scm := typeToSchema(rv, rv.Type(), []string{})
scm := typeToSchema(rv, []string{})
if customize != nil {
scm = customize(scm)
}
return scm
}

// SetSuppressDiff adds diff suppression to a schema. This is necessary for non-computed
// fields for which the platform returns a value, but the user has not configured any value.
// For example: the REST API returns `{"tags": {}}` for a resource with no tags.
func SetSuppressDiff(v *schema.Schema) {
v.DiffSuppressFunc = diffSuppressor(fmt.Sprintf("%v", v.Type.Zero()))
}

// SetDefault sets the default value for a schema.
func SetDefault(v *schema.Schema, value any) {
v.Default = value
v.Optional = true
v.Required = false
}

// SetReadOnly sets the schema to be read-only (i.e. computed, non-optional).
// This should be used for fields that are not user-configurable but are returned
// by the platform.
func SetReadOnly(v *schema.Schema) {
v.Optional = false
v.Required = false
v.MaxItems = 0
v.Computed = true
}

// SetRequired sets the schema to be required.
func SetRequired(v *schema.Schema) {
v.Optional = false
v.Required = true
v.Computed = false
}

func isOptional(typeField reflect.StructField) bool {
if strings.Contains(typeField.Tag.Get("json"), "omitempty") {
return true
Expand Down Expand Up @@ -175,26 +206,52 @@ func diffSuppressor(zero string) func(k, old, new string, d *schema.ResourceData
log.Printf("[DEBUG] Suppressing diff for %v: platform=%#v config=%#v", k, old, new)
return true
}
if strings.HasSuffix(k, ".#") && new == "0" && old != "0" {
field := strings.TrimSuffix(k, ".#")
log.Printf("[DEBUG] Suppressing diff for list or set %v: no value configured but platform returned some value (likely {})", field)
return true
}
return false
}
}

type field struct {
sf reflect.StructField
v reflect.Value
}

func listAllFields(v reflect.Value) []field {
t := v.Type()
fields := make([]field, 0)
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

what about doing

fields := make([]field, 0, v.NumField())

to avoid reallocations?

for i := 0; i < v.NumField(); i++ {
f := t.Field(i)
if f.Anonymous {
fields = append(fields, listAllFields(v.Field(i))...)
} else {
fields = append(fields, field{
sf: f,
v: v.Field(i),
})
}
}
return fields
}

// typeToSchema converts struct into terraform schema. `path` is used for block suppressions
// special path element `"0"` is used to denote either arrays or sets of elements
func typeToSchema(v reflect.Value, t reflect.Type, path []string) map[string]*schema.Schema {
func typeToSchema(v reflect.Value, path []string) map[string]*schema.Schema {
scm := map[string]*schema.Schema{}
rk := v.Kind()
if rk == reflect.Ptr {
v = v.Elem()
t = v.Type()
rk = v.Kind()
}
if rk != reflect.Struct {
panic(fmt.Errorf("Schema value of Struct is expected, but got %s: %#v", reflectKind(rk), v))
}
for i := 0; i < v.NumField(); i++ {
typeField := t.Field(i)

fields := listAllFields(v)
for _, field := range fields {
typeField := field.sf
tfTag := typeField.Tag.Get("tf")

fieldName := chooseFieldName(typeField)
Expand Down Expand Up @@ -260,7 +317,7 @@ func typeToSchema(v reflect.Value, t reflect.Type, path []string) map[string]*sc
scm[fieldName].Type = schema.TypeList
elem := typeField.Type.Elem()
sv := reflect.New(elem).Elem()
nestedSchema := typeToSchema(sv, elem, append(path, fieldName, "0"))
nestedSchema := typeToSchema(sv, append(path, fieldName, "0"))
if strings.Contains(tfTag, "suppress_diff") {
blockCount := strings.Join(append(path, fieldName, "#"), ".")
scm[fieldName].DiffSuppressFunc = makeEmptyBlockSuppressFunc(blockCount)
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

With the new suffix checking in diffSuppressor, can we replace makeEmptyBlockSuppressFunc calls?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Maybe we can do this as a separate PR?

Expand All @@ -279,7 +336,7 @@ func typeToSchema(v reflect.Value, t reflect.Type, path []string) map[string]*sc
elem := typeField.Type // changed from ptr
sv := reflect.New(elem) // changed from ptr

nestedSchema := typeToSchema(sv, elem, append(path, fieldName, "0"))
nestedSchema := typeToSchema(sv, append(path, fieldName, "0"))
if strings.Contains(tfTag, "suppress_diff") {
blockCount := strings.Join(append(path, fieldName, "#"), ".")
scm[fieldName].DiffSuppressFunc = makeEmptyBlockSuppressFunc(blockCount)
Expand Down Expand Up @@ -310,7 +367,7 @@ func typeToSchema(v reflect.Value, t reflect.Type, path []string) map[string]*sc
case reflect.Struct:
sv := reflect.New(elem).Elem()
scm[fieldName].Elem = &schema.Resource{
Schema: typeToSchema(sv, elem, append(path, fieldName, "0")),
Schema: typeToSchema(sv, append(path, fieldName, "0")),
}
}
default:
Expand Down Expand Up @@ -341,6 +398,20 @@ func IsRequestEmpty(v any) (bool, error) {
return !isNotEmpty, err
}

// isGoSdk returns true if the struct is from databricks-sdk-go or embeds a struct from databricks-sdk-go.
func isGoSdk(v reflect.Value) bool {
if strings.Contains(v.Type().PkgPath(), "databricks-sdk-go") {
return true
}
for i := 0; i < v.NumField(); i++ {
f := v.Type().Field(i)
if f.Anonymous && isGoSdk(v.Field(i)) {
return true
}
}
return false
}

func iterFields(rv reflect.Value, path []string, s map[string]*schema.Schema,
cb func(fieldSchema *schema.Schema, path []string, valueField *reflect.Value) error) error {
rk := rv.Kind()
Expand All @@ -350,9 +421,10 @@ func iterFields(rv reflect.Value, path []string, s map[string]*schema.Schema,
if !rv.IsValid() {
return fmt.Errorf("%s: got invalid reflect value %#v", path, rv)
}
isGoSDK := strings.Contains(rv.Type().PkgPath(), "databricks-sdk-go")
for i := 0; i < rv.NumField(); i++ {
typeField := rv.Type().Field(i)
isGoSDK := isGoSdk(rv)
fields := listAllFields(rv)
for _, field := range fields {
typeField := field.sf
fieldName := chooseFieldName(typeField)
if fieldName == "-" {
continue
Expand All @@ -370,7 +442,7 @@ func iterFields(rv reflect.Value, path []string, s map[string]*schema.Schema,
if fieldSchema.Optional && defaultEmpty && !omitEmpty {
return fmt.Errorf("inconsistency: %s is optional, default is empty, but has no omitempty", fieldName)
}
valueField := rv.Field(i)
valueField := field.v
err := cb(fieldSchema, append(path, fieldName), &valueField)
if err != nil {
return fmt.Errorf("%s: %s", fieldName, err)
Expand All @@ -393,13 +465,15 @@ func collectionToMaps(v any, s *schema.Schema) ([]any, error) {
return nil, fmt.Errorf("not resource")
}
var allItems []reflect.Value
if s.MaxItems == 1 {
allItems = append(allItems, reflect.ValueOf(v))
} else {
vs := reflect.ValueOf(v)
for i := 0; i < vs.Len(); i++ {
allItems = append(allItems, vs.Index(i))
rv := reflect.ValueOf(v)
rvType := rv.Type().Kind()
isList := rvType == reflect.Array || rvType == reflect.Slice
if isList {
for i := 0; i < rv.Len(); i++ {
allItems = append(allItems, rv.Index(i))
}
} else {
allItems = append(allItems, rv)
}
for _, v := range allItems {
data := map[string]any{}
Expand Down
4 changes: 2 additions & 2 deletions common/reflect_resource_test.go
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@mgyucht I think that we need to add at least two new test cases:

  1. For inherited structure - so we can see that it's flattened correctly into schema
  2. For a structure with a field from Go SDK

Original file line number Diff line number Diff line change
Expand Up @@ -444,7 +444,7 @@ func TestTypeToSchemaNoStruct(t *testing.T) {
fmt.Sprintf("%s", p))
}()
v := reflect.ValueOf(1)
typeToSchema(v, v.Type(), []string{})
typeToSchema(v, []string{})
}

func TestTypeToSchemaUnsupported(t *testing.T) {
Expand All @@ -457,7 +457,7 @@ func TestTypeToSchemaUnsupported(t *testing.T) {
New chan int `json:"new"`
}
v := reflect.ValueOf(nonsense{})
typeToSchema(v, v.Type(), []string{})
typeToSchema(v, []string{})
}

type data map[string]any
Expand Down
123 changes: 109 additions & 14 deletions common/resource.go
Original file line number Diff line number Diff line change
Expand Up @@ -256,9 +256,44 @@ func DataResource(sc any, read func(context.Context, any, *DatabricksClient) err
// ...
// })
func WorkspaceData[T any](read func(context.Context, *T, *databricks.WorkspaceClient) error) *schema.Resource {
return genericDatabricksData(func(c *DatabricksClient) (*databricks.WorkspaceClient, error) {
return c.WorkspaceClient()
}, read)
return genericDatabricksData((*DatabricksClient).WorkspaceClient, func(ctx context.Context, s struct{}, t *T, wc *databricks.WorkspaceClient) error {
return read(ctx, t, wc)
}, false)
}

// WorkspaceDataWithCustomAttrs defines a data source that can be used to read data from the workspace API.
// It differs from WorkspaceData in that it allows extra attributes to be provided as a separate argument,
// so the original type used to define the resource can also be used to define the data source.
//
// The first type parameter is the type of the resource. This can be a type directly from the SDK, or a custom
// type defined in the provider that embeds the SDK type.
//
// The second type parameter is the type of the extra attributes that should be provided to the data source. These
// are the attributes that the user can specify in the data source configuration, but are not part of the resource
// type. If there are no extra attributes, this should be `struct{}`. If there are any fields with the same JSON
// name as fields in the resource type, these fields will override the values from the resource type.
//
// The single argument is a function that will be called to read the data from the workspace API, setting the resulting
// values in the resource type. The function should return an error if the data cannot be read or the resource cannot
// be found.
//
// Example usage:
//
// type SqlWarehouse struct { ... }
//
// type SqlWarehouseDataParams struct {
// Id string `json:"id" tf:"computed,optional"`
// Name string `json:"name" tf:"computed,optional"`
// }
//
// WorkspaceDataWithCustomAttrs(
// func(ctx context.Context, data SqlWarehouseDataParams, warehouse *SqlWarehouse, w *databricks.WorkspaceClient) error {
// // User-provided attributes are present in the `data` parameter.
// // The resource should be populated in the `warehouse` parameter.
// ...
// })
func WorkspaceDataWithCustomAttrs[SdkType, OtherType any](read func(context.Context, OtherType, *SdkType, *databricks.WorkspaceClient) error) *schema.Resource {
return genericDatabricksData((*DatabricksClient).WorkspaceClient, read, true)
}

// AccountData is a generic way to define account data resources in Terraform provider.
Expand All @@ -273,15 +308,73 @@ func WorkspaceData[T any](read func(context.Context, *T, *databricks.WorkspaceCl
// ...
// })
func AccountData[T any](read func(context.Context, *T, *databricks.AccountClient) error) *schema.Resource {
return genericDatabricksData(func(c *DatabricksClient) (*databricks.AccountClient, error) {
return c.AccountClient()
}, read)
return genericDatabricksData((*DatabricksClient).AccountClient, func(ctx context.Context, s struct{}, t *T, ac *databricks.AccountClient) error {
return read(ctx, t, ac)
}, false)
}

// genericDatabricksData is generic and common way to define both account and workspace data and calls their respective clients
func genericDatabricksData[T any, C any](getClient func(*DatabricksClient) (C, error), read func(context.Context, *T, C) error) *schema.Resource {
var dummy T
// AccountDataWithCustomAttrs defines a data source that can be used to read data from the workspace API.
// It differs from AccountData in that it allows extra attributes to be provided as a separate argument,
// so the original type used to define the resource can also be used to define the data source.
//
// The first type parameter is the type of the resource. This can be a type directly from the SDK, or a custom
// type defined in the provider that embeds the SDK type.
//
// The second type parameter is the type of the extra attributes that should be provided to the data source. These
// are the attributes that the user can specify in the data source configuration, but are not part of the resource
// type. If there are no extra attributes, this should be `struct{}`. If there are any fields with the same JSON
// name as fields in the resource type, these fields will override the values from the resource type.
//
// The single argument is a function that will be called to read the data from the workspace API, setting the resulting
// values in the resource type. The function should return an error if the data cannot be read or the resource cannot
// be found.
//
// Example usage:
//
// type MwsWorkspace struct { ... }
//
// type MwsWorkspaceDataParams struct {
// Id string `json:"id" tf:"computed,optional"`
// Name string `json:"name" tf:"computed,optional"`
// }
//
// AccountDataWithCustomAttrs(
// func(ctx context.Context, data MwsWorkspaceDataParams, workspace *MwsWorkspace, a *databricks.AccountClient) error {
// // User-provided attributes are present in the `data` parameter.
// // The resource should be populated in the `workspace` parameter.
// ...
// })
func AccountDataWithCustomAttrs[SdkType, OtherType any](read func(context.Context, OtherType, *SdkType, *databricks.AccountClient) error) *schema.Resource {
return genericDatabricksData((*DatabricksClient).AccountClient, read, true)
}

// genericDatabricksData is generic and common way to define both account and workspace data and calls their respective clients.
//
// If hasOther is true, all of the fields of SdkType will be marked as computed/optional in the final schema, and the fields
// from OtherFields will be overlaid on top of the schema generated by SdkType. Otherwise, the schema generated by SdkType
// will be used directly.
func genericDatabricksData[SdkType, OtherFields any, C any](
getClient func(*DatabricksClient) (C, error),
read func(context.Context, OtherFields, *SdkType, C) error,
hasOther bool) *schema.Resource {
var dummy SdkType
var other OtherFields
otherFields := StructToSchema(other, NoCustomize)
s := StructToSchema(dummy, func(m map[string]*schema.Schema) map[string]*schema.Schema {
// For WorkspaceData and AccountData, a single data type is used to represent all of the fields of
// the resource, so its configuration is correct. For *WithCustomAttrs, the SdkType parameter is copied
// directly from the resource definition, which means that all fields from that type are computed and
// optional, and the fields from OtherFields are overlaid on top of the schema generated by SdkType.
if hasOther {
for k := range m {
m[k].Computed = true
m[k].Required = false
m[k].Optional = true
}
for k, v := range otherFields {
m[k] = v
}
}
// `id` attribute must be marked as computed, otherwise it's not set!
if v, ok := m["id"]; ok {
v.Computed = true
Expand All @@ -298,20 +391,22 @@ func genericDatabricksData[T any, C any](getClient func(*DatabricksClient) (C, e
diags = diag.Errorf("panic: %v", panic)
}
}()
ptr := reflect.New(reflect.ValueOf(dummy).Type())
DataToReflectValue(d, &schema.Resource{Schema: s}, ptr.Elem())
var dummy SdkType
var other OtherFields
DataToStructPointer(d, s, &other)
DataToStructPointer(d, s, &dummy)
client := m.(*DatabricksClient)
c, err := getClient(client)
if err != nil {
err = nicerError(ctx, err, "read data")
err = nicerError(ctx, err, "get client")
return diag.FromErr(err)
}
err = read(ctx, ptr.Interface().(*T), c)
err = read(ctx, other, &dummy, c)
if err != nil {
err = nicerError(ctx, err, "read data")
diags = diag.FromErr(err)
}
StructToData(ptr.Elem().Interface(), s, d)
StructToData(&dummy, s, d)
// check if the resource schema has the `id` attribute (marked with `json:"id"` in the provided structure).
// and if yes, then use it as resource ID. If not, then use default value for resource ID (`_`)
if _, ok := s["id"]; ok {
Expand Down
5 changes: 5 additions & 0 deletions docs/data-sources/sql_warehouse.md
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,11 @@ This data source exports the following attributes:
* `jdbc_url` - JDBC connection string.
* `odbc_params` - ODBC connection params: `odbc_params.hostname`, `odbc_params.path`, `odbc_params.protocol`, and `odbc_params.port`.
* `data_source_id` - ID of the data source for this warehouse. This is used to bind an Databricks SQL query to an warehouse.
* `creator_name` - The username of the user who created the endpoint.
* `num_active_sessions` - The current number of clusters used by the endpoint.
* `num_clusters` - The current number of clusters used by the endpoint.
* `state` - The current state of the endpoint.
* `health` - Health status of the endpoint.

## Related resources

Expand Down
Loading
Loading