Skip to content

Commit

Permalink
Merge branch 'master' into fix/bootstrap-default-region
Browse files Browse the repository at this point in the history
  • Loading branch information
nbugden authored Apr 5, 2024
2 parents 97c5c8f + 0a03623 commit eca67c3
Show file tree
Hide file tree
Showing 9 changed files with 208 additions and 102 deletions.
4 changes: 2 additions & 2 deletions 1-org/envs/shared/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -39,8 +39,8 @@
| domains\_to\_allow | The list of domains to allow users from in IAM. |
| interconnect\_project\_id | The Dedicated Interconnect project ID |
| interconnect\_project\_number | The Dedicated Interconnect project number |
| logs\_export\_logbucket\_linked\_dataset\_name | The resource name of the Log Bucket linked BigQuery dataset created for Log Analytics. See https://cloud.google.com/logging/docs/log-analytics . |
| logs\_export\_logbucket\_name | The log bucket for destination of log exports. See https://cloud.google.com/logging/docs/routing/overview#buckets . |
| logs\_export\_project\_linked\_dataset\_name | The resource name of the Log Bucket linked BigQuery dataset for the project destination. |
| logs\_export\_project\_logbucket\_name | The resource name for the Log Bucket created for the project destination. |
| logs\_export\_pubsub\_topic | The Pub/Sub topic for destination of log exports |
| logs\_export\_storage\_bucket\_name | The storage bucket for destination of log exports |
| network\_folder\_name | The network folder name. |
Expand Down
14 changes: 7 additions & 7 deletions 1-org/envs/shared/log_sinks.tf
Original file line number Diff line number Diff line change
Expand Up @@ -72,16 +72,16 @@ module "logs_export" {
}

/******************************************
Send logs to Logbucket
Send logs to Logging project
*****************************************/
logbucket_options = {
logging_sink_name = "sk-c-logging-logbkt"
project_options = {
logging_sink_name = "sk-c-logging-prj"
logging_sink_filter = local.logs_filter
name = "logbkt-org-logs-${random_string.suffix.result}"
log_bucket_id = "AggregatedLogs"
log_bucket_description = "Project destination log bucket for aggregated logs"
location = coalesce(var.log_export_storage_location, local.default_region)
enable_analytics = true
linked_dataset_id = "ds_c_logbkt_analytics"
linked_dataset_description = "BigQuery Dataset for Logbucket analytics"
linked_dataset_id = "ds_c_prj_aggregated_logs_analytics"
linked_dataset_description = "Project destination BigQuery Dataset for Logbucket analytics"
}
}

Expand Down
16 changes: 8 additions & 8 deletions 1-org/envs/shared/outputs.tf
Original file line number Diff line number Diff line change
Expand Up @@ -114,21 +114,21 @@ output "logs_export_storage_bucket_name" {
description = "The storage bucket for destination of log exports"
}

output "logs_export_logbucket_name" {
value = module.logs_export.logbucket_destination_name
description = "The log bucket for destination of log exports. See https://cloud.google.com/logging/docs/routing/overview#buckets ."
output "logs_export_project_logbucket_name" {
description = "The resource name for the Log Bucket created for the project destination."
value = module.logs_export.project_logbucket_name
}

output "logs_export_project_linked_dataset_name" {
description = "The resource name of the Log Bucket linked BigQuery dataset for the project destination."
value = module.logs_export.project_linked_dataset_name
}

output "billing_sink_names" {
value = module.logs_export.billing_sink_names
description = "The name of the sinks under billing account level."
}

output "logs_export_logbucket_linked_dataset_name" {
value = module.logs_export.logbucket_linked_dataset_name
description = "The resource name of the Log Bucket linked BigQuery dataset created for Log Analytics. See https://cloud.google.com/logging/docs/log-analytics ."
}

output "tags" {
value = local.tags_output
description = "Tag Values to be applied on next steps."
Expand Down
6 changes: 3 additions & 3 deletions 1-org/modules/centralized-logging/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -61,9 +61,9 @@ module "logging_logbucket" {
|------|-------------|------|---------|:--------:|
| billing\_account | Billing Account ID used in case sinks are under billing account level. Format 000000-000000-000000. | `string` | `null` | no |
| enable\_billing\_account\_sink | If true, a log router sink will be created for the billing account. The billing\_account variable cannot be null. | `bool` | `false` | no |
| logbucket\_options | Destination LogBucket options:<br>- name: The name of the log bucket to be created and used for log entries matching the filter.<br>- logging\_sink\_name: The name of the log sink to be created.<br>- logging\_sink\_filter: The filter to apply when exporting logs. Only log entries that match the filter are exported. Default is "" which exports all logs.<br>- location: The location of the log bucket. Default: global.<br>- enable\_analytics: Whether or not Log Analytics is enabled. A Log bucket with Log Analytics enabled can be queried in the Log Analytics page using SQL queries. Cannot be disabled once enabled.<br>- linked\_dataset\_id: The ID of the linked BigQuery dataset. A valid link dataset ID must only have alphanumeric characters and underscores within it and have up to 100 characters.<br>- linked\_dataset\_description: A use-friendly description of the linked BigQuery dataset. The maximum length of the description is 8000 characters.<br>- retention\_days: The number of days data should be retained for the log bucket. Default 30. | <pre>object({<br> name = optional(string, null)<br> logging_sink_name = optional(string, null)<br> logging_sink_filter = optional(string, "")<br> location = optional(string, "global")<br> enable_analytics = optional(bool, true)<br> linked_dataset_id = optional(string, null)<br> linked_dataset_description = optional(string, null)<br> retention_days = optional(number, 30)<br> })</pre> | `null` | no |
| logging\_destination\_project\_id | The ID of the project that will have the resources where the logs will be created. | `string` | n/a | yes |
| logging\_project\_key | (Optional) The key of logging destination project if it is inside resources map. It is mandatory when resource\_type = project and logging\_target\_type = logbucket. | `string` | `""` | no |
| project\_options | Destination Project options:<br>- logging\_sink\_name: The name of the log sink to be created.<br>- logging\_sink\_filter: The filter to apply when exporting logs. Only log entries that match the filter are exported. Default is "" which exports all logs.<br>- log\_bucket\_id: Id of the log bucket create to store the logs exported to the project.<br>- log\_bucket\_description: Description of the log bucket create to store the logs exported to the project.<br>- location: The location of the log bucket. Default: global.<br>- enable\_analytics: Whether or not Log Analytics is enabled in the \_Default log bucket. A Log bucket with Log Analytics enabled can be queried in the Log Analytics page using SQL queries. Cannot be disabled once enabled.<br>- retention\_days: The number of days data should be retained for the \_Default log bucket. Default 30.<br>- linked\_dataset\_id: The ID of the linked BigQuery dataset for the \_Default log bucket. A valid link dataset ID must only have alphanumeric characters and underscores within it and have up to 100 characters.<br>- linked\_dataset\_description: A use-friendly description of the linked BigQuery dataset for the \_Default log bucket. The maximum length of the description is 8000 characters. | <pre>object({<br> logging_sink_name = optional(string, null)<br> logging_sink_filter = optional(string, "")<br> log_bucket_id = optional(string, null)<br> log_bucket_description = optional(string, null)<br> location = optional(string, "global")<br> enable_analytics = optional(bool, true)<br> retention_days = optional(number, 30)<br> linked_dataset_id = optional(string, null)<br> linked_dataset_description = optional(string, null)<br> })</pre> | `null` | no |
| pubsub\_options | Destination Pubsub options:<br>- topic\_name: The name of the pubsub topic to be created and used for log entries matching the filter.<br>- logging\_sink\_name: The name of the log sink to be created.<br>- logging\_sink\_filter: The filter to apply when exporting logs. Only log entries that match the filter are exported. Default is "" which exports all logs.<br>- create\_subscriber: Whether to create a subscription to the topic that was created and used for log entries matching the filter. If 'true', a pull subscription is created along with a service account that is granted roles/pubsub.subscriber and roles/pubsub.viewer to the topic. | <pre>object({<br> topic_name = optional(string, null)<br> logging_sink_name = optional(string, null)<br> logging_sink_filter = optional(string, "")<br> create_subscriber = optional(bool, true)<br> })</pre> | `null` | no |
| resource\_type | Resource type of the resource that will export logs to destination. Must be: project, organization, or folder. | `string` | n/a | yes |
| resources | Export logs from the specified resources. | `map(string)` | n/a | yes |
Expand All @@ -74,8 +74,8 @@ module "logging_logbucket" {
| Name | Description |
|------|-------------|
| billing\_sink\_names | Map of log sink names with billing suffix |
| logbucket\_destination\_name | The resource name for the destination Log Bucket. |
| logbucket\_linked\_dataset\_name | The resource name of the Log Bucket linked BigQuery dataset. |
| project\_linked\_dataset\_name | The resource name of the Log Bucket linked BigQuery dataset for the project destination. |
| project\_logbucket\_name | The resource name for the Log Bucket created for the project destination. |
| pubsub\_destination\_name | The resource name for the destination Pub/Sub. |
| storage\_destination\_name | The resource name for the destination Storage. |

Expand Down
150 changes: 106 additions & 44 deletions 1-org/modules/centralized-logging/main.tf
Original file line number Diff line number Diff line change
Expand Up @@ -15,9 +15,8 @@
*/

locals {
value_first_resource = values(var.resources)[0]
logbucket_sink_member = { for k, v in var.resources : k => v if k != var.logging_project_key }
include_children = (var.resource_type == "organization" || var.resource_type == "folder")
value_first_resource = values(var.resources)[0]
include_children = (var.resource_type == "organization" || var.resource_type == "folder")

# Create an intermediate list with all resources X all destinations
exports_list = flatten([
Expand All @@ -38,40 +37,41 @@ locals {
log_exports = {
for v in local.exports_list : "${v.res}_${v.type}" => v
}

destinations_options = {
pub = var.pubsub_options
sto = var.storage_options
lbk = var.logbucket_options
prj = var.project_options
}

logging_sink_name_map = {
pub = try("sk-to-tp-logs-${var.logging_destination_project_id}", "sk-to-tp-logs")
sto = try("sk-to-bkt-logs-${var.logging_destination_project_id}", "sk-to-bkt-logs")
lbk = try("sk-to-logbkt-logs-${var.logging_destination_project_id}", "sk-to-logbkt-logs")
prj = try("sk-to-prj-logs-${var.logging_destination_project_id}", "sk-to-prj-logs")
}

logging_tgt_name = {
pub = "${local.logging_tgt_prefix.pub}${random_string.suffix.result}"
sto = "${local.logging_tgt_prefix.sto}${random_string.suffix.result}"
lbk = "${local.logging_tgt_prefix.lbk}${random_string.suffix.result}"
prj = ""
}

destination_uri_map = {
pub = try(module.destination_pubsub[0].destination_uri, "")
sto = try(module.destination_storage[0].destination_uri, "")
lbk = try(module.destination_logbucket[0].destination_uri, "")
prj = try(module.destination_project[0].destination_uri, "")
}

destination_resource_name = merge(
var.pubsub_options != null ? { pub = module.destination_pubsub[0].resource_name } : {},
var.storage_options != null ? { sto = module.destination_storage[0].resource_name } : {},
var.logbucket_options != null ? { lbk = module.destination_logbucket[0].resource_name } : {}
var.project_options != null ? { prj = module.destination_project[0].project } : {}
)

logging_tgt_prefix = {
pub = "tp-logs-"
sto = try("bkt-logs-${var.logging_destination_project_id}-", "bkt-logs-")
lbk = "logbkt-logs-"
prj = ""
}
}

Expand All @@ -83,7 +83,7 @@ resource "random_string" "suffix" {

module "log_export" {
source = "terraform-google-modules/log-export/google"
version = "~> 7.4"
version = "~> 7.8"

for_each = local.log_exports

Expand All @@ -96,7 +96,6 @@ module "log_export" {
include_children = local.include_children
}


module "log_export_billing" {
source = "terraform-google-modules/log-export/google"
version = "~> 7.4"
Expand All @@ -118,52 +117,115 @@ resource "time_sleep" "wait_sa_iam_membership" {
]
}

#-------------------------#
# Send logs to Log Bucket #
#-------------------------#
module "destination_logbucket" {
source = "terraform-google-modules/log-export/google//modules/logbucket"
version = "~> 7.7"
#--------------------------#
# Send logs to Log project #
#--------------------------#

count = var.logbucket_options != null ? 1 : 0
module "destination_project" {
source = "terraform-google-modules/log-export/google//modules/project"
version = "~> 7.8"
count = var.project_options != null ? 1 : 0

project_id = var.logging_destination_project_id
name = coalesce(var.logbucket_options.name, local.logging_tgt_name.lbk)
log_sink_writer_identity = module.log_export["${local.value_first_resource}_lbk"].writer_identity
location = var.logbucket_options.location
enable_analytics = var.logbucket_options.enable_analytics
linked_dataset_id = var.logbucket_options.linked_dataset_id
linked_dataset_description = var.logbucket_options.linked_dataset_description
retention_days = var.logbucket_options.retention_days
grant_write_permission_on_bkt = false
project_id = var.logging_destination_project_id
log_sink_writer_identity = module.log_export["${local.value_first_resource}_prj"].writer_identity
}
#-------------------------------------------#
# Log Bucket Service account IAM membership #
#-------------------------------------------#
resource "google_project_iam_member" "logbucket_sink_member" {
for_each = var.logbucket_options != null ? local.logbucket_sink_member : {}
#---------------------------------------------#
# Log Projects Service account IAM membership #
#---------------------------------------------#
resource "google_project_iam_member" "project_sink_member" {
for_each = var.project_options != null ? var.resources : {}

project = var.logging_destination_project_id
role = "roles/logging.bucketWriter"
role = "roles/logging.logWriter"

# Set permission only on sinks for this destination using
# module.log_export key "<resource>_<dest>"
member = module.log_export["${each.value}_lbk"].writer_identity
member = module.log_export["${each.value}_prj"].writer_identity
}
#----------------------------------------------#
# Send logs to Log project - Internal Log sink #
#----------------------------------------------#
module "internal_project_log_export" {
source = "terraform-google-modules/log-export/google"
version = "~> 7.8"
count = var.project_options != null ? 1 : 0

destination_uri = "logging.googleapis.com/projects/${var.logging_destination_project_id}/locations/${var.project_options.location}/buckets/${coalesce(var.project_options.log_bucket_id, "AggregatedLogs")}"
filter = var.project_options.logging_sink_filter
log_sink_name = "${coalesce(var.project_options.logging_sink_name, local.logging_sink_name_map["prj"])}-la"
parent_resource_id = var.logging_destination_project_id
parent_resource_type = "project"
unique_writer_identity = true
}

module "destination_aggregated_logs" {
source = "terraform-google-modules/log-export/google//modules/logbucket"
version = "~> 7.8"
count = var.project_options != null ? 1 : 0

project_id = var.logging_destination_project_id
name = coalesce(var.project_options.log_bucket_id, "AggregatedLogs")
log_sink_writer_identity = module.internal_project_log_export[0].writer_identity
location = var.project_options.location
enable_analytics = var.project_options.enable_analytics
linked_dataset_id = var.project_options.linked_dataset_id
linked_dataset_description = var.project_options.linked_dataset_description
retention_days = var.project_options.retention_days
grant_write_permission_on_bkt = false
}

#-------------------------------------------------#
# Send logs to Log project - update _Default sink #
#-------------------------------------------------#

data "google_client_config" "default" {
}

#------------------------------------------------------------------#
# Log Bucket Service account IAM membership for log_export_billing #
#------------------------------------------------------------------#
resource "google_project_iam_member" "logbucket_sink_member_billing" {
count = var.enable_billing_account_sink == true && var.logbucket_options != null ? 1 : 0
resource "terracurl_request" "exclude_external_logs" {
count = var.project_options != null ? 1 : 0

name = "exclude_external_logs"
url = "https://logging.googleapis.com/v2/projects/${var.logging_destination_project_id}/sinks/_Default?updateMask=exclusions"
method = "PUT"
response_codes = [200]
headers = {
Authorization = "Bearer ${data.google_client_config.default.access_token}"
Content-Type = "application/json",
}
request_body = <<EOF
{
"exclusions": [
{
"name": "exclude_external_logs",
"filter": "-logName : \"/${var.logging_destination_project_id}/\""
}
],
}
EOF

lifecycle {
ignore_changes = [
headers,
]
}
}

#---------------------------------------------------------------#
# Project Service account IAM membership for log_export_billing #
#---------------------------------------------------------------#
resource "google_project_iam_member" "project_sink_member_billing" {
count = var.enable_billing_account_sink == true && var.project_options != null ? 1 : 0

project = var.logging_destination_project_id
role = "roles/logging.bucketWriter"
role = "roles/logging.logWriter"

# Set permission only on sinks for this destination using
# module.log_export_billing key "<resource>_<dest>"
member = module.log_export_billing["lbk"].writer_identity
member = module.log_export_billing["prj"].writer_identity


depends_on = [
Expand All @@ -176,7 +238,7 @@ resource "google_project_iam_member" "logbucket_sink_member_billing" {
#----------------------#
module "destination_storage" {
source = "terraform-google-modules/log-export/google//modules/storage"
version = "~> 7.4"
version = "~> 7.8"

count = var.storage_options != null ? 1 : 0

Expand Down Expand Up @@ -217,7 +279,7 @@ resource "google_storage_bucket_iam_member" "storage_sink_member_billing" {


depends_on = [
google_project_iam_member.logbucket_sink_member_billing
google_project_iam_member.project_sink_member_billing
]
}

Expand All @@ -227,7 +289,7 @@ resource "google_storage_bucket_iam_member" "storage_sink_member_billing" {
#----------------------#
module "destination_pubsub" {
source = "terraform-google-modules/log-export/google//modules/pubsub"
version = "~> 7.4"
version = "~> 7.8"

count = var.pubsub_options != null ? 1 : 0

Expand Down
Loading

0 comments on commit eca67c3

Please sign in to comment.