Skip to content

Commit

Permalink
Release: 1.13.3
Browse files Browse the repository at this point in the history
  • Loading branch information
AWS committed Jan 6, 2025
1 parent cd751f4 commit 1bc55d3
Show file tree
Hide file tree
Showing 17 changed files with 89 additions and 53 deletions.
2 changes: 1 addition & 1 deletion VERSION
Original file line number Diff line number Diff line change
@@ -1 +1 @@
1.13.2
1.13.3
8 changes: 4 additions & 4 deletions modules/aft-account-request-framework/vpc.tf
Original file line number Diff line number Diff line change
Expand Up @@ -202,13 +202,13 @@ resource "aws_internet_gateway" "aft-vpc-igw" {
}

resource "aws_eip" "aft-vpc-natgw-01" {
count = var.aft_enable_vpc ? 1 : 0
vpc = true
count = var.aft_enable_vpc ? 1 : 0
domain = "vpc"
}

resource "aws_eip" "aft-vpc-natgw-02" {
count = var.aft_enable_vpc ? 1 : 0
vpc = true
count = var.aft_enable_vpc ? 1 : 0
domain = "vpc"
}

resource "aws_nat_gateway" "aft-vpc-natgw-01" {
Expand Down
28 changes: 16 additions & 12 deletions modules/aft-code-repositories/codepipeline.tf
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,10 @@
##############################################################

resource "aws_codepipeline" "codecommit_account_request" {
count = local.vcs.is_codecommit ? 1 : 0
name = "ct-aft-account-request"
role_arn = aws_iam_role.account_request_codepipeline_role.arn
count = local.vcs.is_codecommit ? 1 : 0
name = "ct-aft-account-request"
role_arn = aws_iam_role.account_request_codepipeline_role.arn
pipeline_type = "V2"

artifact_store {
location = var.codepipeline_s3_bucket_name
Expand Down Expand Up @@ -111,9 +112,10 @@ resource "aws_cloudwatch_event_target" "account_request" {
##############################################################

resource "aws_codepipeline" "codestar_account_request" {
count = local.vcs.is_codecommit ? 0 : 1
name = "ct-aft-account-request"
role_arn = aws_iam_role.account_request_codepipeline_role.arn
count = local.vcs.is_codecommit ? 0 : 1
name = "ct-aft-account-request"
role_arn = aws_iam_role.account_request_codepipeline_role.arn
pipeline_type = "V2"

artifact_store {
location = var.codepipeline_s3_bucket_name
Expand Down Expand Up @@ -176,9 +178,10 @@ resource "aws_codepipeline" "codestar_account_request" {
##############################################################

resource "aws_codepipeline" "codecommit_account_provisioning_customizations" {
count = local.vcs.is_codecommit ? 1 : 0
name = "ct-aft-account-provisioning-customizations"
role_arn = aws_iam_role.account_provisioning_customizations_codepipeline_role.arn
count = local.vcs.is_codecommit ? 1 : 0
name = "ct-aft-account-provisioning-customizations"
role_arn = aws_iam_role.account_provisioning_customizations_codepipeline_role.arn
pipeline_type = "V2"

artifact_store {
location = var.codepipeline_s3_bucket_name
Expand Down Expand Up @@ -242,9 +245,10 @@ resource "aws_codepipeline" "codecommit_account_provisioning_customizations" {
##############################################################

resource "aws_codepipeline" "codestar_account_provisioning_customizations" {
count = local.vcs.is_codecommit ? 0 : 1
name = "ct-aft-account-provisioning-customizations"
role_arn = aws_iam_role.account_provisioning_customizations_codepipeline_role.arn
count = local.vcs.is_codecommit ? 0 : 1
name = "ct-aft-account-provisioning-customizations"
role_arn = aws_iam_role.account_provisioning_customizations_codepipeline_role.arn
pipeline_type = "V2"

artifact_store {
location = var.codepipeline_s3_bucket_name
Expand Down
2 changes: 1 addition & 1 deletion modules/aft-code-repositories/codestar.tf
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ resource "aws_codestarconnections_connection" "gitlab" {

resource "aws_codestarconnections_connection" "gitlabselfmanaged" {
count = local.vcs.is_gitlab_selfmanaged ? 1 : 0
name = "ct-aft-gitlab-selfmanaged-connection"
name = "ct-aft-gitlab-selfmanaged"
host_arn = aws_codestarconnections_host.gitlabselfmanaged[0].arn
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,10 @@
# SPDX-License-Identifier: Apache-2.0
#
resource "aws_codepipeline" "aft_codecommit_customizations_codepipeline" {
count = local.vcs.is_codecommit ? 1 : 0
name = "${var.account_id}-customizations-pipeline"
role_arn = data.aws_iam_role.aft_codepipeline_customizations_role.arn
count = local.vcs.is_codecommit ? 1 : 0
name = "${var.account_id}-customizations-pipeline"
role_arn = data.aws_iam_role.aft_codepipeline_customizations_role.arn
pipeline_type = "V2"

artifact_store {
location = data.aws_s3_bucket.aft_codepipeline_customizations_bucket.id
Expand Down Expand Up @@ -108,9 +109,10 @@ resource "aws_codepipeline" "aft_codecommit_customizations_codepipeline" {
}

resource "aws_codepipeline" "aft_codestar_customizations_codepipeline" {
count = local.vcs.is_codecommit ? 0 : 1
name = "${var.account_id}-customizations-pipeline"
role_arn = data.aws_iam_role.aft_codepipeline_customizations_role.arn
count = local.vcs.is_codecommit ? 0 : 1
name = "${var.account_id}-customizations-pipeline"
role_arn = data.aws_iam_role.aft_codepipeline_customizations_role.arn
pipeline_type = "V2"

artifact_store {
location = data.aws_s3_bucket.aft_codepipeline_customizations_bucket.id
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,9 @@ terraform {
dynamodb_table = "{{ dynamodb_table }}"
encrypt = "true"
kms_key_id = "{{ kms_key_id }}"
role_arn = "{{ aft_admin_role_arn }}"
assume_role {
role_arn = "{{ aft_admin_role_arn }}"
}
}
}
{% else -%}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -174,7 +174,7 @@ def _put_policy_on_role(
def role_policy_is_attached(
role_name: str, policy_arn: str, target_account_session: Session
) -> bool:
logger.info("Determining if {policy_arn} is attached to {role_name}")
logger.info(f"Determining if {policy_arn} is attached to {role_name}")
resource: IAMServiceResource = target_account_session.resource("iam")
role = resource.Role(role_name)
policy_iterator = role.attached_policies.all()
Expand Down
19 changes: 11 additions & 8 deletions sources/aft-lambda-layer/aft_common/account_request_framework.py
Original file line number Diff line number Diff line change
Expand Up @@ -153,10 +153,12 @@ def modify_ct_request_is_valid(request: Dict[str, Any]) -> bool:
old_ct_parameters = request.get("old_control_tower_parameters", {})
new_ct_parameters = request["control_tower_parameters"]

for i in old_ct_parameters.keys():
if i != "ManagedOrganizationalUnit":
if old_ct_parameters[i] != new_ct_parameters[i]:
logger.error(f"Control Tower parameter {i} cannot be modified")
for param_name in old_ct_parameters.keys():
if param_name != "ManagedOrganizationalUnit":
if old_ct_parameters[param_name] != new_ct_parameters[param_name]:
logger.error(
f"Control Tower parameter {utils.sanitize_input_for_logging(param_name)} cannot be modified"
)
return False
return True

Expand Down Expand Up @@ -193,7 +195,8 @@ def create_new_account(
provisioning_parameters.append({"Key": k, "Value": v})

logger.info(
"Creating new account leveraging parameters: " + str(provisioning_parameters)
"Creating new account leveraging parameters: "
+ utils.sanitize_input_for_logging(str(provisioning_parameters))
)
provisioned_product_name = create_provisioned_product_name(
account_name=request["control_tower_parameters"]["AccountName"]
Expand Down Expand Up @@ -275,7 +278,7 @@ def update_existing_account(

logger.info(
"Modifying existing account leveraging parameters: "
+ str(provisioning_parameters)
+ utils.sanitize_input_for_logging(str(provisioning_parameters))
+ " with provisioned product ID "
+ target_product["Id"]
)
Expand All @@ -288,7 +291,7 @@ def update_existing_account(
ProvisioningParameters=provisioning_parameters,
UpdateToken=str(uuid.uuid1()),
)
logger.info(update_response)
logger.info(utils.sanitize_input_for_logging(update_response))


def get_account_request_record(
Expand All @@ -307,7 +310,7 @@ def get_account_request_record(
)
if item:
logger.info("Record found")
logger.info(item)
logger.info(utils.sanitize_input_for_logging(item))
return item
else:
raise Exception(f"Account {request_table_id} not found in {table_name}")
Expand Down
2 changes: 1 addition & 1 deletion sources/aft-lambda-layer/aft_common/aft_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -162,7 +162,7 @@ def invoke_step_function(
sanitized_sfn_arn = sanitize_input_for_logging(sfn_arn)
logger.info("Starting SFN execution of " + sanitized_sfn_arn)
response = client.start_execution(stateMachineArn=sfn_arn, input=input)
logger.debug(response)
logger.debug(sanitize_input_for_logging(response))
return response


Expand Down
11 changes: 8 additions & 3 deletions sources/aft-lambda-layer/aft_common/codepipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,8 @@ def get_pipeline_for_account(session: Session, account_id: str) -> str:
current_account = session.client("sts").get_caller_identity()["Account"]
current_region = session.region_name

logger.info("Getting pipeline name for " + account_id)
sanitized_account_id = utils.sanitize_input_for_logging(account_id)
logger.info("Getting pipeline name for " + sanitized_account_id)

client = session.client("codepipeline", config=utils.get_high_retry_botoconfig())
paginator = client.get_paginator("list_pipelines")
Expand All @@ -42,7 +43,9 @@ def get_pipeline_for_account(session: Session, account_id: str) -> str:
if t["key"] == "managed_by" and t["value"] == "AFT":
pipeline_name: str = p["name"]
return pipeline_name
raise Exception("Pipelines for account id " + account_id + " was not found")
raise Exception(
"Pipelines for account id " + sanitized_account_id + " was not found"
)


def pipeline_is_running(session: Session, name: str) -> bool:
Expand Down Expand Up @@ -142,7 +145,9 @@ def delete_customization_pipeline(
)
if not pipeline_is_running(session=aft_management_session, name=pipeline_name):
client.delete_pipeline(name=pipeline_name)
logger.info(f"Deleted customization pipeline for {account_id}")
logger.info(
f"Deleted customization pipeline for {utils.sanitize_input_for_logging(account_id)}"
)
else:
logger.warning(
f"Cannot delete running customization pipeline: {pipeline_name}, skipping"
Expand Down
8 changes: 5 additions & 3 deletions sources/aft-lambda-layer/aft_common/customizations.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
from typing import TYPE_CHECKING, Any, Dict, List, Optional

import jsonschema
from aft_common.aft_utils import get_high_retry_botoconfig
from aft_common.aft_utils import get_high_retry_botoconfig, sanitize_input_for_logging
from aft_common.constants import SSM_PARAM_AFT_DDB_META_TABLE
from aft_common.organizations import OrganizationsAgent
from aft_common.ssm import get_ssm_parameter_value
Expand All @@ -30,7 +30,9 @@ def validate_identify_targets_request(payload: Dict[str, Any]) -> bool:
)
with open(schema_path) as schema_file:
schema_object = json.load(schema_file)
logger.info("Schema Loaded:" + json.dumps(schema_object))
logger.info(
"Schema Loaded:" + sanitize_input_for_logging(json.dumps(schema_object))
)
validated = jsonschema.validate(payload, schema_object)
if validated is None:
logger.info("Request Validated")
Expand All @@ -53,7 +55,7 @@ def get_all_aft_account_ids(aft_management_session: Session) -> List[str]:
while "LastEvaluatedKey" in response:
logger.debug(
"Paginated response found, continuing at {}".format(
response["LastEvaluatedKey"]
sanitize_input_for_logging(response["LastEvaluatedKey"])
)
)
response = table.scan(ExclusiveStartKey=response["LastEvaluatedKey"])
Expand Down
2 changes: 1 addition & 1 deletion sources/aft-lambda-layer/aft_common/feature_options.py
Original file line number Diff line number Diff line change
Expand Up @@ -161,7 +161,7 @@ def get_vpc_security_groups(resource: EC2ServiceResource, vpc: str) -> List[str]
sgs = []
for s in vpc_resource.security_groups.all():
sgs.append(s.id)
logger.info("SGs: " + str(sgs))
logger.info("SGs: " + utils.sanitize_input_for_logging(sgs))
return sgs


Expand Down
2 changes: 1 addition & 1 deletion sources/aft-lambda-layer/aft_common/sqs.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ def receive_sqs_message(session: Session, sqs_queue: str) -> Optional[MessageTyp
logger.info("There are messages pending processing")
message = response["Messages"][0]
logger.info("Message retrieved")
logger.info(message)
logger.info(utils.sanitize_input_for_logging(message))
return message
else:
logger.info("There are no messages pending processing")
Expand Down
22 changes: 18 additions & 4 deletions sources/scripts/terraform_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
#
import os
import time
from typing import Any

import requests

Expand Down Expand Up @@ -169,25 +170,30 @@ def create_destroy_run(workspace_id, api_token):

def delete_workspace(workspace_id, api_token):
endpoint = "{}/workspaces/{}".format(TERRAFORM_API_ENDPOINT, workspace_id)
sanitized_workspace_id = __sanitize_input_for_logging(workspace_id)
headers = __build_standard_headers(api_token)
response = __delete(endpoint, headers)
if response is not None:
errors = response["errors"]
if len(errors) == 0:
print("Successfully deleted workspace {}".format(workspace_id))
print("Successfully deleted workspace {}".format(sanitized_workspace_id))
else:
print("Error occured deleting workspace {}".format(workspace_id))
print("Error occured deleting workspace {}".format(sanitized_workspace_id))
print(str(errors))
else:
print("Successfully deleted workspace {}".format(workspace_id))
print("Successfully deleted workspace {}".format(sanitized_workspace_id))


def wait_to_stabilize(entity_type, entity_id, target_states, api_token):
while True:
status = get_action_status(entity_type, entity_id, api_token)
if status in target_states:
break
print("{} not yet ready. In status {}".format(entity_type, status))
print(
"{} not yet ready. In status {}".format(
entity_type, __sanitize_input_for_logging(status)
)
)
time.sleep(10)
return status

Expand Down Expand Up @@ -258,6 +264,14 @@ def __handle_errors(response):
raise ClientError(status="500", message=str(errors))


def __sanitize_input_for_logging(input: Any) -> str:
"""
Sanitize the input string by replacing newline characters, tabs with their literal string representations.
"""
input_str = str(input)
return input_str.encode("unicode_escape").decode()


class ClientError(Exception):
def __init__(self, status, message):
self.status = status
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,12 +6,10 @@
from typing import TYPE_CHECKING, Any, Dict

from aft_common import notifications
from aft_common.account_request_framework import control_tower_param_changed
from aft_common.account_request_record_handler import AccountRequestRecordHandler
from aft_common.aft_utils import sanitize_input_for_logging
from aft_common.auth import AuthClient
from aft_common.logger import configure_aft_logger
from aft_common.service_catalog import provisioned_product_exists
from aft_common.shared_account import shared_account_request

if TYPE_CHECKING:
from aws_lambda_powertools.utilities.typing import LambdaContext
Expand All @@ -27,7 +25,7 @@ def lambda_handler(event: Dict[str, Any], context: LambdaContext) -> None:
auth = AuthClient()
try:
record_handler = AccountRequestRecordHandler(auth=auth, event=event)
logger.info(record_handler.record)
logger.info(sanitize_input_for_logging(record_handler.record))
record_handler.process_request()

except Exception as error:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
from aft_common import constants as utils
from aft_common import notifications, ssm
from aft_common.account_request_framework import put_audit_record
from aft_common.aft_utils import sanitize_input_for_logging
from aft_common.logger import configure_aft_logger
from boto3.session import Session

Expand Down Expand Up @@ -52,7 +53,9 @@ def lambda_handler(event: Dict[str, Any], context: LambdaContext) -> None:
event_name,
)
else:
logger.info(f"Event Name: {event_name} is unsupported.")
logger.info(
f"Event Name: {sanitize_input_for_logging(event_name)} is unsupported."
)
else:
raise Exception("Non DynamoDB Event Received")
else:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
from typing import TYPE_CHECKING, Any, Dict

from aft_common import codepipeline, ddb
from aft_common.aft_utils import sanitize_input_for_logging
from aft_common.auth import AuthClient
from aft_common.constants import SSM_PARAM_AFT_DDB_META_TABLE
from aft_common.logger import configure_aft_logger
Expand Down Expand Up @@ -33,7 +34,9 @@ def lambda_handler(event: Dict[str, Any], context: LambdaContext) -> None:
"id"
] # the account email is stored in "id" field

logger.info(f"Beginnning resource cleanup for {account_email}")
logger.info(
f"Beginning resource cleanup for {sanitize_input_for_logging(account_email)}"
)

orgs_agent = OrganizationsAgent(
ct_management_session=auth.get_ct_management_session()
Expand Down

0 comments on commit 1bc55d3

Please sign in to comment.