Skip to content

Commit

Permalink
Polishing touches on PR
Browse files Browse the repository at this point in the history
  • Loading branch information
thogarty committed Feb 22, 2025
1 parent ebd1ab7 commit 63fb1b4
Show file tree
Hide file tree
Showing 8 changed files with 73 additions and 78 deletions.
2 changes: 1 addition & 1 deletion docs/data-sources/fabric_stream_subscription.md
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ Additional Documentation:

```terraform
data "equinix_fabric_stream_subscription" "by_ids" {
stream_id = "<stream_id>"
stream_id = "<stream_id>"
subscription_id = "<subscription_id>"
}
```
Expand Down
2 changes: 1 addition & 1 deletion docs/data-sources/fabric_stream_subscriptions.md
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ Additional Documentation:
data "equinix_fabric_stream_subscriptions" "all" {
stream_id = "<stream_id>"
pagination = {
limit = 10
limit = 10
offset = 0
}
}
Expand Down
64 changes: 32 additions & 32 deletions docs/resources/fabric_stream_subscription.md
Original file line number Diff line number Diff line change
Expand Up @@ -14,11 +14,11 @@ Additional Documentation:

```terraform
resource "equinix_fabric_stream_subscription" "SPLUNK" {
type = "STREAM_SUBSCRIPTION"
name = "<name>"
type = "STREAM_SUBSCRIPTION"
name = "<name>"
description = "<description>"
stream_id = "<stream_id>"
enabled = true
stream_id = "<stream_id>"
enabled = true
filters = [{
property = "/type"
operator = "LIKE"
Expand All @@ -34,83 +34,83 @@ resource "equinix_fabric_stream_subscription" "SPLUNK" {
}
sink = {
type = "SPLUNK_HEC"
uri = "<splunk_uri>"
uri = "<splunk_uri>"
settings = {
event_index = "<splunk_event_index>"
metric_index = "<splunk_metric_index>"
source = "<splunk_source>"
source = "<splunk_source>"
}
credential = {
type = "ACCESS_TOKEN"
type = "ACCESS_TOKEN"
access_token = "<splunk_access_token>"
}
}
}
resource "equinix_fabric_stream_subscription" "SLACK" {
type = "STREAM_SUBSCRIPTION"
name = "<name>"
type = "STREAM_SUBSCRIPTION"
name = "<name>"
description = "<description>"
stream_id = "<stream_id>"
enabled = true
stream_id = "<stream_id>"
enabled = true
sink = {
type = "SLACK"
uri = "<slack_uri>"
uri = "<slack_uri>"
}
}
resource "equinix_fabric_stream_subscription" "PAGER_DUTY" {
type = "STREAM_SUBSCRIPTION"
name = "<name>"
type = "STREAM_SUBSCRIPTION"
name = "<name>"
description = "<description>"
stream_id = "<stream_id>"
enabled = true
stream_id = "<stream_id>"
enabled = true
sink = {
type = "PAGERDUTY"
host = "<pager_duty_host"
settings = {
transform_alerts = true
change_uri = "<pager_duty_change_uri>"
alert_uri = "<pager_duty_alert_uri>"
change_uri = "<pager_duty_change_uri>"
alert_uri = "<pager_duty_alert_uri>"
}
credential = {
type = "INTEGRATION_KEY"
type = "INTEGRATION_KEY"
integration_key = "<pager_duty_integration_key>"
}
}
}
resource "equinix_fabric_stream_subscription" "DATADOG" {
type = "STREAM_SUBSCRIPTION"
name = "<name>"
type = "STREAM_SUBSCRIPTION"
name = "<name>"
description = "<description>"
stream_id = "<stream_id>"
enabled = true
stream_id = "<stream_id>"
enabled = true
sink = {
type = "DATADOG"
host = "<datadog_host>"
settings = {
source = "Equinix"
source = "Equinix"
application_key = "<datadog_application_key>"
event_uri = "<datadog_event_uri>"
metric_uri = "<datadog_metric_uri>"
event_uri = "<datadog_event_uri>"
metric_uri = "<datadog_metric_uri>"
}
credential = {
type = "API_KEY"
type = "API_KEY"
api_key = "<datadog_api_key>"
}
}
}
resource "equinix_fabric_stream_subscription" "MSTEAMS" {
type = "STREAM_SUBSCRIPTION"
name = "<name>"
type = "STREAM_SUBSCRIPTION"
name = "<name>"
description = "<description>"
stream_id = "<stream_id>"
enabled = true
stream_id = "<stream_id>"
enabled = true
sink = {
type = "TEAMS"
uri = "<msteams_uri>"
uri = "<msteams_uri>"
}
}
```
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
data "equinix_fabric_stream_subscription" "by_ids" {
stream_id = "<stream_id>"
stream_id = "<stream_id>"
subscription_id = "<subscription_id>"
}
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
data "equinix_fabric_stream_subscriptions" "all" {
stream_id = "<stream_id>"
pagination = {
limit = 10
limit = 10
offset = 0
}
}
64 changes: 32 additions & 32 deletions examples/resources/equinix_fabric_stream_subscription/resource.tf
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
resource "equinix_fabric_stream_subscription" "SPLUNK" {
type = "STREAM_SUBSCRIPTION"
name = "<name>"
type = "STREAM_SUBSCRIPTION"
name = "<name>"
description = "<description>"
stream_id = "<stream_id>"
enabled = true
stream_id = "<stream_id>"
enabled = true
filters = [{
property = "/type"
operator = "LIKE"
Expand All @@ -19,83 +19,83 @@ resource "equinix_fabric_stream_subscription" "SPLUNK" {
}
sink = {
type = "SPLUNK_HEC"
uri = "<splunk_uri>"
uri = "<splunk_uri>"
settings = {
event_index = "<splunk_event_index>"
metric_index = "<splunk_metric_index>"
source = "<splunk_source>"
source = "<splunk_source>"
}
credential = {
type = "ACCESS_TOKEN"
type = "ACCESS_TOKEN"
access_token = "<splunk_access_token>"
}
}
}

resource "equinix_fabric_stream_subscription" "SLACK" {
type = "STREAM_SUBSCRIPTION"
name = "<name>"
type = "STREAM_SUBSCRIPTION"
name = "<name>"
description = "<description>"
stream_id = "<stream_id>"
enabled = true
stream_id = "<stream_id>"
enabled = true
sink = {
type = "SLACK"
uri = "<slack_uri>"
uri = "<slack_uri>"
}
}

resource "equinix_fabric_stream_subscription" "PAGER_DUTY" {
type = "STREAM_SUBSCRIPTION"
name = "<name>"
type = "STREAM_SUBSCRIPTION"
name = "<name>"
description = "<description>"
stream_id = "<stream_id>"
enabled = true
stream_id = "<stream_id>"
enabled = true
sink = {
type = "PAGERDUTY"
host = "<pager_duty_host"
settings = {
transform_alerts = true
change_uri = "<pager_duty_change_uri>"
alert_uri = "<pager_duty_alert_uri>"
change_uri = "<pager_duty_change_uri>"
alert_uri = "<pager_duty_alert_uri>"
}
credential = {
type = "INTEGRATION_KEY"
type = "INTEGRATION_KEY"
integration_key = "<pager_duty_integration_key>"
}
}
}

resource "equinix_fabric_stream_subscription" "DATADOG" {
type = "STREAM_SUBSCRIPTION"
name = "<name>"
type = "STREAM_SUBSCRIPTION"
name = "<name>"
description = "<description>"
stream_id = "<stream_id>"
enabled = true
stream_id = "<stream_id>"
enabled = true
sink = {
type = "DATADOG"
host = "<datadog_host>"
settings = {
source = "Equinix"
source = "Equinix"
application_key = "<datadog_application_key>"
event_uri = "<datadog_event_uri>"
metric_uri = "<datadog_metric_uri>"
event_uri = "<datadog_event_uri>"
metric_uri = "<datadog_metric_uri>"
}
credential = {
type = "API_KEY"
type = "API_KEY"
api_key = "<datadog_api_key>"
}
}
}

resource "equinix_fabric_stream_subscription" "MSTEAMS" {
type = "STREAM_SUBSCRIPTION"
name = "<name>"
type = "STREAM_SUBSCRIPTION"
name = "<name>"
description = "<description>"
stream_id = "<stream_id>"
enabled = true
stream_id = "<stream_id>"
enabled = true
sink = {
type = "TEAMS"
uri = "<msteams_uri>"
uri = "<msteams_uri>"
}
}

4 changes: 2 additions & 2 deletions internal/resources/fabric/stream_subscription/models.go
Original file line number Diff line number Diff line change
Expand Up @@ -134,8 +134,8 @@ func (m *DataSourceAll) parse(ctx context.Context, streamSubscriptionsResponse *
var diags diag.Diagnostics

if len(streamSubscriptionsResponse.GetData()) < 1 {
diags.AddError("no data retrieved by streamSubscription subscriptions data source",
"either the account does not have any streamSubscription subscription data to pull or the combination of limit and offset needs to be updated")
diags.AddError("no data retrieved by stream subscriptions data source",
"either the account does not have any stream subscription data to pull or the combination of limit and offset needs to be updated")
return diags
}

Expand Down
11 changes: 3 additions & 8 deletions internal/resources/fabric/stream_subscription/resource.go
Original file line number Diff line number Diff line change
Expand Up @@ -49,8 +49,8 @@ func (r *Resource) Create(

var plan ResourceModel
diags := req.Plan.Get(ctx, &plan)
resp.Diagnostics.Append(diags...)
if resp.Diagnostics.HasError() {
if diags.HasError() {
resp.Diagnostics.Append(diags...)
return
}

Expand Down Expand Up @@ -81,11 +81,6 @@ func (r *Resource) Create(
return
}

resp.Diagnostics.Append(diags...)
if diags.HasError() {
return
}

// Parse API response into the Terraform state
resp.Diagnostics.Append(plan.parse(ctx, streamChecked.(*fabricv4.StreamSubscription))...)
if resp.Diagnostics.HasError() {
Expand Down Expand Up @@ -490,7 +485,7 @@ func getDeleteWaiter(ctx context.Context, client *fabricv4.APIClient, streamID,
// deletedMarker is a terraform-provider-only value that is used by the waiter
// to indicate that the connection appears to be deleted successfully based on
// status code
deletedMarker := "tf-marker-for-deleted-connection"
deletedMarker := "tf-marker-for-deleted-stream-subscription"
return &retry.StateChangeConf{
Pending: []string{
string(fabricv4.STREAMSUBSCRIPTIONSTATE_DEPROVISIONING),
Expand Down

0 comments on commit 63fb1b4

Please sign in to comment.