diff --git a/VERSION b/VERSION index 9256e28..97bceaa 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -1.0.13 \ No newline at end of file +1.0.14 \ No newline at end of file diff --git a/client/database.go b/client/database.go index e0becc4..27b1004 100644 --- a/client/database.go +++ b/client/database.go @@ -6,8 +6,10 @@ import ( "net/http" "net/url" "strings" + "time" v1pb "github.com/bytebase/bytebase/proto/generated-go/v1" + "github.com/hashicorp/terraform-plugin-log/tflog" "google.golang.org/protobuf/encoding/protojson" ) @@ -27,32 +29,44 @@ func (c *client) GetDatabase(ctx context.Context, databaseName string) (*v1pb.Da } // ListDatabase list all databases. -func (c *client) ListDatabase(ctx context.Context, instanceID, filter string) ([]*v1pb.Database, error) { +func (c *client) ListDatabase(ctx context.Context, parent, filter string) ([]*v1pb.Database, error) { res := []*v1pb.Database{} pageToken := "" + startTime := time.Now() for { - resp, err := c.listDatabasePerPage(ctx, instanceID, filter, pageToken, 500) + startTimePerPage := time.Now() + resp, err := c.listDatabasePerPage(ctx, parent, filter, pageToken, 500) if err != nil { return nil, err } res = append(res, resp.Databases...) + tflog.Debug(ctx, "[list database per page]", map[string]interface{}{ + "count": len(resp.Databases), + "ms": time.Since(startTimePerPage).Milliseconds(), + }) + pageToken = resp.NextPageToken if pageToken == "" { break } } + tflog.Debug(ctx, "[list database]", map[string]interface{}{ + "total": len(res), + "ms": time.Since(startTime).Milliseconds(), + }) + return res, nil } // listDatabasePerPage list the databases. -func (c *client) listDatabasePerPage(ctx context.Context, instanceID, filter, pageToken string, pageSize int) (*v1pb.ListDatabasesResponse, error) { +func (c *client) listDatabasePerPage(ctx context.Context, parent, filter, pageToken string, pageSize int) (*v1pb.ListDatabasesResponse, error) { requestURL := fmt.Sprintf( - "%s/%s/instances/%s/databases?filter=%s&page_size=%d&page_token=%s", + "%s/%s/%s/databases?filter=%s&page_size=%d&page_token=%s", c.url, c.version, - instanceID, + parent, url.QueryEscape(filter), pageSize, pageToken, diff --git a/examples/environments/main.tf b/examples/environments/main.tf index 19fc9d0..9681d97 100644 --- a/examples/environments/main.tf +++ b/examples/environments/main.tf @@ -2,7 +2,7 @@ terraform { required_providers { bytebase = { - version = "1.0.9" + version = "1.0.14" # For local development, please use "terraform.local/bytebase/bytebase" instead source = "registry.terraform.io/bytebase/bytebase" } diff --git a/examples/groups/main.tf b/examples/groups/main.tf index 1577f8b..ab007e2 100644 --- a/examples/groups/main.tf +++ b/examples/groups/main.tf @@ -1,7 +1,7 @@ terraform { required_providers { bytebase = { - version = "1.0.9" + version = "1.0.14" # For local development, please use "terraform.local/bytebase/bytebase" instead source = "registry.terraform.io/bytebase/bytebase" } diff --git a/examples/instances/main.tf b/examples/instances/main.tf index f9fef86..2d1f506 100644 --- a/examples/instances/main.tf +++ b/examples/instances/main.tf @@ -2,7 +2,7 @@ terraform { required_providers { bytebase = { - version = "1.0.9" + version = "1.0.14" # For local development, please use "terraform.local/bytebase/bytebase" instead source = "registry.terraform.io/bytebase/bytebase" } diff --git a/examples/policies/main.tf b/examples/policies/main.tf index 48fb027..01d71c5 100644 --- a/examples/policies/main.tf +++ b/examples/policies/main.tf @@ -1,7 +1,7 @@ terraform { required_providers { bytebase = { - version = "1.0.9" + version = "1.0.14" # For local development, please use "terraform.local/bytebase/bytebase" instead source = "registry.terraform.io/bytebase/bytebase" } @@ -25,3 +25,11 @@ data "bytebase_policy" "masking_exception_policy" { output "masking_exception_policy" { value = data.bytebase_policy.masking_exception_policy } + +data "bytebase_policy" "global_masking_policy" { + type = "MASKING_RULE" +} + +output "global_masking_policy" { + value = data.bytebase_policy.global_masking_policy +} diff --git a/examples/projects/main.tf b/examples/projects/main.tf index a405d25..febc6d5 100644 --- a/examples/projects/main.tf +++ b/examples/projects/main.tf @@ -2,7 +2,7 @@ terraform { required_providers { bytebase = { - version = "1.0.9" + version = "1.0.14" # For local development, please use "terraform.local/bytebase/bytebase" instead source = "registry.terraform.io/bytebase/bytebase" } diff --git a/examples/settings/main.tf b/examples/settings/main.tf index 51d14f1..149c2e1 100644 --- a/examples/settings/main.tf +++ b/examples/settings/main.tf @@ -1,7 +1,7 @@ terraform { required_providers { bytebase = { - version = "1.0.9" + version = "1.0.14" # For local development, please use "terraform.local/bytebase/bytebase" instead source = "registry.terraform.io/bytebase/bytebase" } diff --git a/examples/setup/data_masking.tf b/examples/setup/data_masking.tf index ded651d..3ba19b8 100644 --- a/examples/setup/data_masking.tf +++ b/examples/setup/data_masking.tf @@ -96,3 +96,28 @@ resource "bytebase_policy" "masking_exception_policy" { } } } + +resource "bytebase_policy" "global_masking_policy" { + depends_on = [ + bytebase_instance.prod, + bytebase_environment.test + ] + + parent = "" + type = "MASKING_RULE" + enforce = true + inherit_from_parent = false + + global_masking_policy { + rules { + condition = "environment_id in [\"test\"]" + id = "69df1d15-abe5-4bc9-be38-f2a4bef3f7e0" + semantic_type = "bb.default-partial" + } + rules { + condition = "instance_id in [\"prod-sample-instance\"]" + id = "90adb734-0808-4c9f-b281-1f76f7a1a29a" + semantic_type = "bb.default" + } + } +} diff --git a/examples/setup/main.tf b/examples/setup/main.tf index 0b2636b..b99a2c3 100644 --- a/examples/setup/main.tf +++ b/examples/setup/main.tf @@ -1,7 +1,7 @@ terraform { required_providers { bytebase = { - version = "1.0.9" + version = "1.0.14" # For local development, please use "terraform.local/bytebase/bytebase" instead source = "registry.terraform.io/bytebase/bytebase" } diff --git a/examples/setup/project.tf b/examples/setup/project.tf index 3eb5bdd..8d111b5 100644 --- a/examples/setup/project.tf +++ b/examples/setup/project.tf @@ -3,13 +3,21 @@ resource "bytebase_project" "sample_project" { depends_on = [ bytebase_user.workspace_dba, bytebase_user.project_developer, - bytebase_group.developers + bytebase_group.developers, + bytebase_instance.prod ] resource_id = local.project_id title = "Sample project" key = "SAMM" + dynamic "databases" { + for_each = bytebase_instance.prod.databases + content { + name = databases.value.name + } + } + members { member = format("user:%s", bytebase_user.workspace_dba.email) role = "roles/projectOwner" diff --git a/examples/setup/users.tf b/examples/setup/users.tf index 5a6c3ef..79bcc90 100644 --- a/examples/setup/users.tf +++ b/examples/setup/users.tf @@ -9,6 +9,10 @@ resource "bytebase_user" "workspace_dba" { # Create or update the user. resource "bytebase_user" "project_developer" { + depends_on = [ + bytebase_user.workspace_dba + ] + title = "Developer" email = "developer@bytebase.com" diff --git a/examples/users/main.tf b/examples/users/main.tf index 9407675..5a88582 100644 --- a/examples/users/main.tf +++ b/examples/users/main.tf @@ -1,7 +1,7 @@ terraform { required_providers { bytebase = { - version = "1.0.9" + version = "1.0.14" # For local development, please use "terraform.local/bytebase/bytebase" instead source = "registry.terraform.io/bytebase/bytebase" } diff --git a/examples/vcs/main.tf b/examples/vcs/main.tf index 91f03a5..e489f17 100644 --- a/examples/vcs/main.tf +++ b/examples/vcs/main.tf @@ -1,7 +1,7 @@ terraform { required_providers { bytebase = { - version = "1.0.9" + version = "1.0.14" # For local development, please use "terraform.local/bytebase/bytebase" instead source = "registry.terraform.io/bytebase/bytebase" } diff --git a/provider/data_source_database_catalog.go b/provider/data_source_database_catalog.go index b43d1cb..c50422b 100644 --- a/provider/data_source_database_catalog.go +++ b/provider/data_source_database_catalog.go @@ -152,14 +152,8 @@ func setDatabaseCatalog(d *schema.ResourceData, catalog *v1pb.DatabaseCatalog) d } func columnHash(rawColumn interface{}) string { - var buf bytes.Buffer column := rawColumn.(map[string]interface{}) - - if v, ok := column["name"].(string); ok { - _, _ = buf.WriteString(fmt.Sprintf("%s-", v)) - } - - return buf.String() + return column["name"].(string) } func tableHash(rawTable interface{}) string { diff --git a/provider/data_source_instance.go b/provider/data_source_instance.go index 58bbf0a..7993f9d 100644 --- a/provider/data_source_instance.go +++ b/provider/data_source_instance.go @@ -13,8 +13,8 @@ import ( func dataSourceInstance() *schema.Resource { return &schema.Resource{ - Description: "The instance data source.", - ReadContext: dataSourceInstanceRead, + Description: "The instance data source.", + ReadWithoutTimeout: dataSourceInstanceRead, Schema: map[string]*schema.Schema{ "resource_id": { Type: schema.TypeString, diff --git a/provider/data_source_instance_list.go b/provider/data_source_instance_list.go index 24a7977..0158178 100644 --- a/provider/data_source_instance_list.go +++ b/provider/data_source_instance_list.go @@ -14,8 +14,8 @@ import ( func dataSourceInstanceList() *schema.Resource { return &schema.Resource{ - Description: "The instance data source list.", - ReadContext: dataSourceInstanceListRead, + Description: "The instance data source list.", + ReadWithoutTimeout: dataSourceInstanceListRead, Schema: map[string]*schema.Schema{ "show_deleted": { Type: schema.TypeBool, @@ -182,7 +182,7 @@ func dataSourceInstanceListRead(ctx context.Context, d *schema.ResourceData, m i } ins["data_sources"] = schema.NewSet(dataSourceHash, dataSources) - databases, err := c.ListDatabase(ctx, instanceID, "") + databases, err := c.ListDatabase(ctx, instance.Name, "") if err != nil { return diag.FromErr(err) } diff --git a/provider/data_source_policy.go b/provider/data_source_policy.go index a4a3325..a94ada7 100644 --- a/provider/data_source_policy.go +++ b/provider/data_source_policy.go @@ -46,6 +46,7 @@ func dataSourcePolicy() *schema.Resource { Required: true, ValidateFunc: validation.StringInSlice([]string{ v1pb.PolicyType_MASKING_EXCEPTION.String(), + v1pb.PolicyType_MASKING_RULE.String(), }, false), Description: "The policy type.", }, @@ -65,6 +66,7 @@ func dataSourcePolicy() *schema.Resource { Description: "Decide if the policy is enforced.", }, "masking_exception_policy": getMaskingExceptionPolicySchema(true), + "global_masking_policy": getGlobalMaskingPolicySchema(true), }, } } @@ -142,6 +144,53 @@ func getMaskingExceptionPolicySchema(computed bool) *schema.Schema { } } +func getGlobalMaskingPolicySchema(computed bool) *schema.Schema { + return &schema.Schema{ + Computed: computed, + Optional: true, + Default: nil, + Type: schema.TypeList, + MinItems: 0, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "rules": { + Computed: computed, + Optional: true, + Default: nil, + MinItems: 0, + Type: schema.TypeList, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "id": { + Type: schema.TypeString, + Computed: computed, + Optional: true, + ValidateFunc: validation.StringIsNotEmpty, + Description: "The unique rule id", + }, + "semantic_type": { + Type: schema.TypeString, + Computed: computed, + Optional: true, + ValidateFunc: validation.StringIsNotEmpty, + Description: "The semantic type id", + }, + "condition": { + Type: schema.TypeString, + Computed: computed, + Optional: true, + ValidateFunc: validation.StringIsNotEmpty, + Description: "The condition expression", + }, + }, + }, + }, + }, + }, + } +} + func dataSourcePolicyRead(ctx context.Context, d *schema.ResourceData, m interface{}) diag.Diagnostics { c := m.(api.Client) @@ -183,9 +232,40 @@ func setPolicyMessage(d *schema.ResourceData, policy *v1pb.Policy) diag.Diagnost } } + if p := policy.GetMaskingRulePolicy(); p != nil { + maskingPolicy, err := flattenGlobalMaskingPolicy(p) + if err != nil { + return diag.FromErr(err) + } + if err := d.Set("global_masking_policy", maskingPolicy); err != nil { + return diag.Errorf("cannot set global_masking_policy: %s", err.Error()) + } + } + return nil } +func flattenGlobalMaskingPolicy(p *v1pb.MaskingRulePolicy) ([]interface{}, error) { + ruleList := []interface{}{} + + for _, rule := range p.Rules { + if rule.Condition == nil || rule.Condition.Expression == "" { + return nil, errors.Errorf("invalid global masking policy condition") + } + raw := map[string]interface{}{} + raw["id"] = rule.Id + raw["semantic_type"] = rule.SemanticType + raw["condition"] = rule.Condition.Expression + + ruleList = append(ruleList, raw) + } + + policy := map[string]interface{}{ + "rules": ruleList, + } + return []interface{}{policy}, nil +} + func flattenMaskingExceptionPolicy(p *v1pb.MaskingExceptionPolicy) ([]interface{}, error) { exceptionList := []interface{}{} for _, exception := range p.MaskingExceptions { diff --git a/provider/data_source_policy_list.go b/provider/data_source_policy_list.go index 6f8548a..809b39f 100644 --- a/provider/data_source_policy_list.go +++ b/provider/data_source_policy_list.go @@ -10,6 +10,8 @@ import ( "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + v1pb "github.com/bytebase/bytebase/proto/generated-go/v1" + "github.com/bytebase/terraform-provider-bytebase/api" "github.com/bytebase/terraform-provider-bytebase/provider/internal" ) @@ -63,6 +65,7 @@ func dataSourcePolicyList() *schema.Resource { Description: "Decide if the policy is enforced.", }, "masking_exception_policy": getMaskingExceptionPolicySchema(true), + "global_masking_policy": getGlobalMaskingPolicySchema(true), }, }, }, @@ -80,6 +83,9 @@ func dataSourcePolicyListRead(ctx context.Context, d *schema.ResourceData, m int policies := make([]map[string]interface{}, 0) for _, policy := range response.Policies { + if policy.Type != v1pb.PolicyType_MASKING_EXCEPTION && policy.Type != v1pb.PolicyType_MASKING_RULE { + continue + } raw := make(map[string]interface{}) raw["name"] = policy.Name raw["type"] = policy.Type.String() @@ -93,6 +99,13 @@ func dataSourcePolicyListRead(ctx context.Context, d *schema.ResourceData, m int } raw["masking_exception_policy"] = exceptionPolicy } + if p := policy.GetMaskingRulePolicy(); p != nil { + maskingPolicy, err := flattenGlobalMaskingPolicy(p) + if err != nil { + return diag.FromErr(err) + } + raw["global_masking_policy"] = maskingPolicy + } policies = append(policies, raw) } diff --git a/provider/data_source_project.go b/provider/data_source_project.go index 7fb3834..6216efb 100644 --- a/provider/data_source_project.go +++ b/provider/data_source_project.go @@ -8,6 +8,7 @@ import ( "strings" "time" + "github.com/hashicorp/terraform-plugin-log/tflog" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" "github.com/pkg/errors" @@ -20,8 +21,8 @@ import ( func dataSourceProject() *schema.Resource { return &schema.Resource{ - Description: "The project data source.", - ReadContext: dataSourceProjectRead, + Description: "The project data source.", + ReadWithoutTimeout: dataSourceProjectRead, Schema: map[string]*schema.Schema{ "resource_id": { Type: schema.TypeString, @@ -124,13 +125,6 @@ func getDatabasesSchema(computed bool) *schema.Schema { Computed: true, Description: "The version of database schema.", }, - "labels": { - Type: schema.TypeMap, - Computed: computed, - Optional: !computed, - Description: "The deployment and policy control labels.", - Elem: &schema.Schema{Type: schema.TypeString}, - }, }, }, Set: databaseHash, @@ -232,7 +226,6 @@ func flattenDatabaseList(databases []*v1pb.Database) []interface{} { db["sync_state"] = database.SyncState.String() db["successful_sync_time"] = database.SuccessfulSyncTime.AsTime().UTC().Format(time.RFC3339) db["schema_version"] = database.SchemaVersion - db["labels"] = database.Labels dbList = append(dbList, db) } return dbList @@ -305,8 +298,11 @@ func setProject( d *schema.ResourceData, project *v1pb.Project, ) diag.Diagnostics { - filter := fmt.Sprintf(`project == "%s"`, project.Name) - databases, err := client.ListDatabase(ctx, "-", filter) + tflog.Debug(ctx, "[read project] start reading project", map[string]interface{}{ + "project": project.Name, + }) + + databases, err := client.ListDatabase(ctx, project.Name, "") if err != nil { return diag.FromErr(err) } @@ -359,11 +355,18 @@ func setProject( return diag.Errorf("cannot set postgres_database_tenant_mode for project: %s", err.Error()) } + startTime := time.Now() databaseList := flattenDatabaseList(databases) if err := d.Set("databases", schema.NewSet(databaseHash, databaseList)); err != nil { return diag.Errorf("cannot set databases for project: %s", err.Error()) } + tflog.Debug(ctx, "[read project] set project databases", map[string]interface{}{ + "project": project.Name, + "databases": len(databases), + "ms": time.Since(startTime).Milliseconds(), + }) + startTime = time.Now() memberList, err := flattenMemberList(iamPolicy) if err != nil { return diag.FromErr(err) @@ -372,17 +375,18 @@ func setProject( return diag.Errorf("cannot set members for project: %s", err.Error()) } + tflog.Debug(ctx, "[read project] set project members", map[string]interface{}{ + "project": project.Name, + "members": len(memberList), + "ms": time.Since(startTime).Milliseconds(), + }) + return nil } func databaseHash(rawDatabase interface{}) int { - var buf bytes.Buffer database := rawDatabase.(map[string]interface{}) - - if v, ok := database["name"].(string); ok { - _, _ = buf.WriteString(fmt.Sprintf("%s-", v)) - } - return internal.ToHashcodeInt(buf.String()) + return internal.ToHashcodeInt(database["name"].(string)) } func memberHash(rawMember interface{}) int { diff --git a/provider/data_source_project_list.go b/provider/data_source_project_list.go index 6156958..9a5195e 100644 --- a/provider/data_source_project_list.go +++ b/provider/data_source_project_list.go @@ -2,7 +2,6 @@ package provider import ( "context" - "fmt" "strconv" "time" @@ -15,8 +14,8 @@ import ( func dataSourceProjectList() *schema.Resource { return &schema.Resource{ - Description: "The project data source list.", - ReadContext: dataSourceProjectListRead, + Description: "The project data source list.", + ReadWithoutTimeout: dataSourceProjectListRead, Schema: map[string]*schema.Schema{ "show_deleted": { Type: schema.TypeBool, @@ -124,8 +123,7 @@ func dataSourceProjectListRead(ctx context.Context, d *schema.ResourceData, m in proj["skip_backup_errors"] = project.AllowModifyStatement proj["postgres_database_tenant_mode"] = project.PostgresDatabaseTenantMode - filter := fmt.Sprintf(`project == "%s"`, project.Name) - databases, err := c.ListDatabase(ctx, "-", filter) + databases, err := c.ListDatabase(ctx, project.Name, "") if err != nil { return diag.FromErr(err) } diff --git a/provider/data_source_setting.go b/provider/data_source_setting.go index dbcd758..0e0d2d6 100644 --- a/provider/data_source_setting.go +++ b/provider/data_source_setting.go @@ -1,7 +1,6 @@ package provider import ( - "bytes" "context" "fmt" @@ -566,12 +565,6 @@ func flattenClassificationSetting(setting *v1pb.DataClassificationSetting) []int } func itemIDHash(rawItem interface{}) int { - var buf bytes.Buffer item := rawItem.(map[string]interface{}) - - if v, ok := item["id"].(string); ok { - _, _ = buf.WriteString(fmt.Sprintf("%s-", v)) - } - - return internal.ToHashcodeInt(buf.String()) + return internal.ToHashcodeInt(item["id"].(string)) } diff --git a/provider/internal/mock_client.go b/provider/internal/mock_client.go index 005471c..09d645e 100644 --- a/provider/internal/mock_client.go +++ b/provider/internal/mock_client.go @@ -342,6 +342,17 @@ func (c *mockClient) UpsertPolicy(_ context.Context, patch *v1pb.Policy, updateM MaskingExceptionPolicy: v, } } + case v1pb.PolicyType_MASKING_RULE: + if !existed { + if patch.GetMaskingRulePolicy() == nil { + return nil, errors.Errorf("payload is required to create the policy") + } + } + if v := patch.GetMaskingRulePolicy(); v != nil { + policy.Policy = &v1pb.Policy_MaskingRulePolicy{ + MaskingRulePolicy: v, + } + } default: return nil, errors.Errorf("invalid policy type %v", policyType) } diff --git a/provider/resource_database_catalog.go b/provider/resource_database_catalog.go index fcca4fb..7184971 100644 --- a/provider/resource_database_catalog.go +++ b/provider/resource_database_catalog.go @@ -59,7 +59,8 @@ func resourceDatabaseCatalog() *schema.Resource { "classification": { Type: schema.TypeString, Optional: true, - Default: "", + Computed: true, + Default: nil, Description: "The classification id", }, "columns": { @@ -74,11 +75,13 @@ func resourceDatabaseCatalog() *schema.Resource { "semantic_type": { Type: schema.TypeString, Optional: true, + Computed: true, Description: "The semantic type id", }, "classification": { Type: schema.TypeString, Optional: true, + Computed: true, Description: "The classification id", }, "labels": { diff --git a/provider/resource_group.go b/provider/resource_group.go index 8b95c15..159f59d 100644 --- a/provider/resource_group.go +++ b/provider/resource_group.go @@ -42,6 +42,7 @@ func resourceGroup() *schema.Resource { "description": { Type: schema.TypeString, Optional: true, + Computed: true, Description: "The group description.", }, "name": { diff --git a/provider/resource_instance.go b/provider/resource_instance.go index 4e6d3fd..33768e7 100644 --- a/provider/resource_instance.go +++ b/provider/resource_instance.go @@ -1,7 +1,6 @@ package provider import ( - "bytes" "context" "fmt" "regexp" @@ -21,11 +20,11 @@ import ( func resourceInstance() *schema.Resource { return &schema.Resource{ - Description: "The instance resource.", - CreateContext: resourceInstanceCreate, - ReadContext: resourceInstanceRead, - UpdateContext: resourceInstanceUpdate, - DeleteContext: resourceInstanceDelete, + Description: "The instance resource.", + CreateContext: resourceInstanceCreate, + ReadWithoutTimeout: resourceInstanceRead, + UpdateContext: resourceInstanceUpdate, + DeleteContext: resourceInstanceDelete, Importer: &schema.ResourceImporter{ StateContext: schema.ImportStatePassthroughContext, }, @@ -89,12 +88,14 @@ func resourceInstance() *schema.Resource { "external_link": { Type: schema.TypeString, Optional: true, - Default: "", + Computed: true, + Default: nil, Description: "The external console URL managing this instance (e.g. AWS RDS console, your in-house DB instance console)", }, "sync_interval": { Type: schema.TypeInt, Optional: true, + Default: 0, Description: "How often the instance is synced in seconds. Default 0, means never sync.", }, "maximum_connections": { @@ -300,6 +301,10 @@ func resourceInstanceCreate(ctx context.Context, d *schema.ResourceData, m inter } } + tflog.Debug(ctx, "[upsert instance] instance created, start to sync schema", map[string]interface{}{ + "instance": instanceName, + }) + if err := c.SyncInstanceSchema(ctx, instanceName); err != nil { diags = append(diags, diag.Diagnostic{ Severity: diag.Warning, @@ -309,11 +314,19 @@ func resourceInstanceCreate(ctx context.Context, d *schema.ResourceData, m inter } d.SetId(instanceName) + tflog.Debug(ctx, "[upsert instance] sync schema finished. now reading instance", map[string]interface{}{ + "instance": instanceName, + }) + diag := resourceInstanceRead(ctx, d, m) if diag != nil { diags = append(diags, diag...) } + tflog.Debug(ctx, "[upsert instance] upsert instance finished", map[string]interface{}{ + "instance": instanceName, + }) + return diags } @@ -326,7 +339,11 @@ func resourceInstanceRead(ctx context.Context, d *schema.ResourceData, m interfa return diag.FromErr(err) } - return setInstanceMessage(ctx, c, d, instance) + resp := setInstanceMessage(ctx, c, d, instance) + tflog.Debug(ctx, "[read instance] read instance finished", map[string]interface{}{ + "instance": instance.Name, + }) + return resp } func resourceInstanceUpdate(ctx context.Context, d *schema.ResourceData, m interface{}) diag.Diagnostics { @@ -443,6 +460,10 @@ func setInstanceMessage( d *schema.ResourceData, instance *v1pb.Instance, ) diag.Diagnostics { + tflog.Debug(ctx, "[read instance] start reading instance", map[string]interface{}{ + "instance": instance.Name, + }) + instanceID, err := internal.GetInstanceID(instance.Name) if err != nil { return diag.FromErr(err) @@ -485,7 +506,11 @@ func setInstanceMessage( return diag.Errorf("cannot set data_sources for instance: %s", err.Error()) } - databases, err := client.ListDatabase(ctx, instanceID, "") + tflog.Debug(ctx, "[read instance] start set instance databases", map[string]interface{}{ + "instance": instance.Name, + }) + + databases, err := client.ListDatabase(ctx, instance.Name, "") if err != nil { return diag.FromErr(err) } @@ -530,16 +555,8 @@ func flattenDataSourceList(d *schema.ResourceData, dataSourceList []*v1pb.DataSo } func dataSourceHash(rawDataSource interface{}) int { - var buf bytes.Buffer dataSource := rawDataSource.(map[string]interface{}) - - if v, ok := dataSource["id"].(string); ok { - _, _ = buf.WriteString(fmt.Sprintf("%s-", v)) - } - if v, ok := dataSource["type"].(string); ok { - _, _ = buf.WriteString(fmt.Sprintf("%s-", v)) - } - return internal.ToHashcodeInt(buf.String()) + return internal.ToHashcodeInt(dataSource["id"].(string)) } func convertDataSourceCreateList(d *schema.ResourceData, validate bool) ([]*v1pb.DataSource, error) { diff --git a/provider/resource_policy.go b/provider/resource_policy.go index f174834..c9bc8d3 100644 --- a/provider/resource_policy.go +++ b/provider/resource_policy.go @@ -52,6 +52,7 @@ func resourcePolicy() *schema.Resource { Required: true, ValidateFunc: validation.StringInSlice([]string{ v1pb.PolicyType_MASKING_EXCEPTION.String(), + v1pb.PolicyType_MASKING_RULE.String(), }, false), Description: "The policy type.", }, @@ -73,6 +74,7 @@ func resourcePolicy() *schema.Resource { Description: "Decide if the policy should inherit from the parent.", }, "masking_exception_policy": getMaskingExceptionPolicySchema(false), + "global_masking_policy": getGlobalMaskingPolicySchema(false), }, } } @@ -123,7 +125,8 @@ func resourcePolicyCreate(ctx context.Context, d *schema.ResourceData, m interfa Type: policyType, } - if policyType == v1pb.PolicyType_MASKING_EXCEPTION { + switch policyType { + case v1pb.PolicyType_MASKING_EXCEPTION: maskingExceptionPolicy, err := convertToMaskingExceptionPolicy(d) if err != nil { return diag.FromErr(err) @@ -131,6 +134,16 @@ func resourcePolicyCreate(ctx context.Context, d *schema.ResourceData, m interfa patch.Policy = &v1pb.Policy_MaskingExceptionPolicy{ MaskingExceptionPolicy: maskingExceptionPolicy, } + case v1pb.PolicyType_MASKING_RULE: + maskingRulePolicy, err := convertToMaskingRulePolicy(d) + if err != nil { + return diag.FromErr(err) + } + patch.Policy = &v1pb.Policy_MaskingRulePolicy{ + MaskingRulePolicy: maskingRulePolicy, + } + default: + return diag.Errorf("unsupport policy type: %v", policyName) } var diags diag.Diagnostics @@ -187,6 +200,16 @@ func resourcePolicyUpdate(ctx context.Context, d *schema.ResourceData, m interfa MaskingExceptionPolicy: maskingExceptionPolicy, } } + if d.HasChange("global_masking_policy") { + updateMasks = append(updateMasks, "payload") + maskingRulePolicy, err := convertToMaskingRulePolicy(d) + if err != nil { + return diag.FromErr(err) + } + patch.Policy = &v1pb.Policy_MaskingRulePolicy{ + MaskingRulePolicy: maskingRulePolicy, + } + } var diags diag.Diagnostics if len(updateMasks) > 0 { @@ -208,6 +231,34 @@ func resourcePolicyUpdate(ctx context.Context, d *schema.ResourceData, m interfa return diags } +func convertToMaskingRulePolicy(d *schema.ResourceData) (*v1pb.MaskingRulePolicy, error) { + rawList, ok := d.Get("global_masking_policy").([]interface{}) + if !ok || len(rawList) != 1 { + return nil, errors.Errorf("invalid global_masking_policy") + } + + raw := rawList[0].(map[string]interface{}) + ruleList, ok := raw["rules"].([]interface{}) + if !ok { + return nil, errors.Errorf("invalid masking rules") + } + + policy := &v1pb.MaskingRulePolicy{} + + for _, rule := range ruleList { + rawRule := rule.(map[string]interface{}) + policy.Rules = append(policy.Rules, &v1pb.MaskingRulePolicy_MaskingRule{ + Id: rawRule["id"].(string), + SemanticType: rawRule["semantic_type"].(string), + Condition: &expr.Expr{ + Expression: rawRule["condition"].(string), + }, + }) + } + + return policy, nil +} + func convertToMaskingExceptionPolicy(d *schema.ResourceData) (*v1pb.MaskingExceptionPolicy, error) { rawList, ok := d.Get("masking_exception_policy").([]interface{}) if !ok || len(rawList) != 1 { diff --git a/provider/resource_project.go b/provider/resource_project.go index 8f8b735..ce2ab02 100644 --- a/provider/resource_project.go +++ b/provider/resource_project.go @@ -25,11 +25,11 @@ var defaultProj = fmt.Sprintf("%sdefault", internal.ProjectNamePrefix) func resourceProjct() *schema.Resource { return &schema.Resource{ - Description: "The project resource.", - CreateContext: resourceProjectCreate, - ReadContext: resourceProjectRead, - UpdateContext: resourceProjectUpdate, - DeleteContext: resourceProjectDelete, + Description: "The project resource.", + CreateWithoutTimeout: resourceProjectCreate, + ReadWithoutTimeout: resourceProjectRead, + UpdateWithoutTimeout: resourceProjectUpdate, + DeleteContext: resourceProjectDelete, Importer: &schema.ResourceImporter{ StateContext: schema.ImportStatePassthroughContext, }, @@ -65,37 +65,37 @@ func resourceProjct() *schema.Resource { "allow_modify_statement": { Type: schema.TypeBool, Optional: true, - Default: false, + Computed: true, Description: "Allow modifying statement after issue is created.", }, "auto_resolve_issue": { Type: schema.TypeBool, Optional: true, - Default: false, + Computed: true, Description: "Enable auto resolve issue.", }, "enforce_issue_title": { Type: schema.TypeBool, Optional: true, - Default: false, + Computed: true, Description: "Enforce issue title created by user instead of generated by Bytebase.", }, "auto_enable_backup": { Type: schema.TypeBool, Optional: true, - Default: false, + Computed: true, Description: "Whether to automatically enable backup.", }, "skip_backup_errors": { Type: schema.TypeBool, Optional: true, - Default: false, + Computed: true, Description: "Whether to skip backup errors and continue the data migration.", }, "postgres_database_tenant_mode": { Type: schema.TypeBool, Optional: true, - Default: false, + Computed: true, Description: "Whether to enable the database tenant mode for PostgreSQL. If enabled, the issue will be created with the pre-appended \"set role \" statement.", }, "databases": getDatabasesSchema(false), @@ -216,6 +216,10 @@ func resourceProjectCreate(ctx context.Context, d *schema.ResourceData, m interf d.SetId(projectName) + tflog.Debug(ctx, "[upsert project] project created, start to update databases", map[string]interface{}{ + "project": projectName, + }) + if diag := updateDatabasesInProject(ctx, d, c, d.Id()); diag != nil { diags = append(diags, diag...) return diags @@ -225,11 +229,19 @@ func resourceProjectCreate(ctx context.Context, d *schema.ResourceData, m interf return diags } + tflog.Debug(ctx, "[upsert project] start reading project", map[string]interface{}{ + "project": projectName, + }) + diag := resourceProjectRead(ctx, d, m) if diag != nil { diags = append(diags, diag...) } + tflog.Debug(ctx, "[upsert project] upsert project finished", map[string]interface{}{ + "project": projectName, + }) + return diags } @@ -346,7 +358,12 @@ func resourceProjectRead(ctx context.Context, d *schema.ResourceData, m interfac return diag.FromErr(err) } - return setProject(ctx, c, d, project) + resp := setProject(ctx, c, d, project) + tflog.Debug(ctx, "[read project] read project finished", map[string]interface{}{ + "project": project.Name, + }) + + return resp } func resourceProjectDelete(ctx context.Context, d *schema.ResourceData, m interface{}) diag.Diagnostics { @@ -365,11 +382,10 @@ func resourceProjectDelete(ctx context.Context, d *schema.ResourceData, m interf return diags } -const batchSize = 50 +const batchSize = 100 func updateDatabasesInProject(ctx context.Context, d *schema.ResourceData, client api.Client, projectName string) diag.Diagnostics { - filter := fmt.Sprintf(`project == "%s"`, projectName) - databases, err := client.ListDatabase(ctx, "-", filter) + databases, err := client.ListDatabase(ctx, projectName, "") if err != nil { return diag.Errorf("failed to list database with error: %v", err.Error()) } @@ -391,52 +407,60 @@ func updateDatabasesInProject(ctx context.Context, d *schema.ResourceData, clien return diag.Errorf("invalid database full name: %v", err.Error()) } - labels := map[string]string{} - for key, val := range obj["labels"].(map[string]interface{}) { - labels[key] = val.(string) - } - updatedDBMap[dbName] = &v1pb.Database{ Name: dbName, Project: projectName, - Labels: labels, } - if _, ok := existedDBMap[dbName]; !ok { + _, ok := existedDBMap[dbName] + if !ok { + // new assigned database batchTransferDatabases = append(batchTransferDatabases, &v1pb.UpdateDatabaseRequest{ Database: updatedDBMap[dbName], UpdateMask: &fieldmaskpb.FieldMask{ Paths: []string{"project"}, }, }) + } else { + delete(existedDBMap, dbName) } } + tflog.Debug(ctx, "[transfer databases] batch transfer databases to project", map[string]interface{}{ + "project": projectName, + "databases": len(batchTransferDatabases), + }) + for i := 0; i < len(batchTransferDatabases); i += batchSize { end := i + batchSize if end > len(batchTransferDatabases) { end = len(batchTransferDatabases) } batch := batchTransferDatabases[i:end] - tflog.Info(ctx, fmt.Sprintf("transfer databases for range [%d, %d]", i, end)) + startTime := time.Now() + if _, err := client.BatchUpdateDatabases(ctx, &v1pb.BatchUpdateDatabasesRequest{ Requests: batch, Parent: "instances/-", }); err != nil { return diag.Errorf("failed to assign databases to project %s with error: %v", projectName, err.Error()) } - } - for _, database := range updatedDBMap { - if len(database.Labels) > 0 { - if _, err := client.UpdateDatabase(ctx, database, []string{"label"}); err != nil { - return diag.Errorf("failed to update database %s with error: %v", database.Name, err.Error()) - } - } + tflog.Debug(ctx, "[transfer databases]", map[string]interface{}{ + "count": end + 1 - i, + "project": projectName, + "ms": time.Since(startTime).Milliseconds(), + }) } - unassignDatabases := []*v1pb.UpdateDatabaseRequest{} - for _, db := range existedDBMap { - if _, ok := updatedDBMap[db.Name]; !ok { + if len(existedDBMap) > 0 { + tflog.Debug(ctx, "[transfer databases] batch unassign databases", map[string]interface{}{ + "project": projectName, + "databases": len(existedDBMap), + }) + + startTime := time.Now() + unassignDatabases := []*v1pb.UpdateDatabaseRequest{} + for _, db := range existedDBMap { // move db to default project db.Project = defaultProj unassignDatabases = append(unassignDatabases, &v1pb.UpdateDatabaseRequest{ @@ -446,20 +470,16 @@ func updateDatabasesInProject(ctx context.Context, d *schema.ResourceData, clien }, }) } - } - for i := 0; i < len(unassignDatabases); i += batchSize { - end := i + batchSize - if end > len(unassignDatabases) { - end = len(unassignDatabases) - } - batch := unassignDatabases[i:end] - tflog.Info(ctx, fmt.Sprintf("unassign databases for range [%d, %d]", i, end)) if _, err := client.BatchUpdateDatabases(ctx, &v1pb.BatchUpdateDatabasesRequest{ - Requests: batch, + Requests: unassignDatabases, Parent: "instances/-", }); err != nil { return diag.Errorf("failed to move databases to default project with error: %v", err.Error()) } + tflog.Debug(ctx, "[unassign databases]", map[string]interface{}{ + "count": len(unassignDatabases), + "ms": time.Since(startTime).Milliseconds(), + }) } return nil