Skip to content

Commit

Permalink
Release: 1.0.9
Browse files Browse the repository at this point in the history
  • Loading branch information
AWS committed Dec 15, 2021
1 parent ee3faac commit 4ec96a5
Show file tree
Hide file tree
Showing 12 changed files with 381 additions and 207 deletions.
1 change: 0 additions & 1 deletion CHANGELOG.rst

This file was deleted.

1 change: 1 addition & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -111,6 +111,7 @@ Now that you have configured and deployed AWS Control Tower Account Factory for
| <a name="input_aft_vpc_private_subnet_02_cidr"></a> [aft\_vpc\_private\_subnet\_02\_cidr](#input\_aft\_vpc\_private\_subnet\_02\_cidr) | CIDR Block to allocate to the Private Subnet 02 | `string` | `"192.168.1.0/24"` | no |
| <a name="input_aft_vpc_public_subnet_01_cidr"></a> [aft\_vpc\_public\_subnet\_01\_cidr](#input\_aft\_vpc\_public\_subnet\_01\_cidr) | CIDR Block to allocate to the Public Subnet 01 | `string` | `"192.168.2.0/25"` | no |
| <a name="input_aft_vpc_public_subnet_02_cidr"></a> [aft\_vpc\_public\_subnet\_02\_cidr](#input\_aft\_vpc\_public\_subnet\_02\_cidr) | CIDR Block to allocate to the Public Subnet 02 | `string` | `"192.168.2.128/25"` | no |
| <a name="input_aft_vpc_endpoints"></a> [aft\_vpc\_aft\_vpc\_endpoints](#input\_aft\_vpc\_endpoints) | Flag turning VPC endpoints on/off for AFT VPC | `bool` | `true` | no |
| <a name="input_audit_account_id"></a> [audit\_account\_id](#input\_audit\_account\_id) | Audit Account Id | `string` | n/a | yes |
| <a name="input_cloudwatch_log_group_retention"></a> [cloudwatch\_log\_group\_retention](#input\_cloudwatch\_log\_group\_retention) | Amount of days to keep CloudWatch Log Groups for Lambda functions. 0 = Never Expire | `string` | `"0"` | no |
| <a name="input_ct_home_region"></a> [ct\_home\_region](#input\_ct\_home\_region) | The region from which this module will be executed. This MUST be the same region as Control Tower is deployed. | `string` | n/a | yes |
Expand Down
2 changes: 1 addition & 1 deletion VERSION
Original file line number Diff line number Diff line change
@@ -1 +1 @@
1.0.8
1.0.9
1 change: 1 addition & 0 deletions main.tf
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@ module "aft_account_request_framework" {
aft_vpc_private_subnet_02_cidr = var.aft_vpc_private_subnet_02_cidr
aft_vpc_public_subnet_01_cidr = var.aft_vpc_public_subnet_01_cidr
aft_vpc_public_subnet_02_cidr = var.aft_vpc_public_subnet_02_cidr
aft_vpc_endpoints = var.aft_vpc_endpoints
}

module "aft_backend" {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -127,7 +127,19 @@ def modify_existing_account(session, ct_management_session, request):
'Value': 'self'
},
)
for p in response['ProvisionedProducts']:

pps = response['ProvisionedProducts']
while 'NextPageToken' in response:
response = client.scan_provisioned_products(
AccessLevelFilter={
'Key': 'Account',
'Value': 'self'
},
PageToken=response['NextPageToken']
)
pps.extend(response['ProvisionedProducts'])

for p in pps:
if p['Type'] == 'CONTROL_TOWER_ACCOUNT':
provisioned_product_ids.append({'Id': p['Id'], 'ProvisioningArtifactId': p['ProvisioningArtifactId']})

Expand All @@ -140,7 +152,14 @@ def modify_existing_account(session, ct_management_session, request):
)
if response['Outputs'][0]['OutputValue'] == request['control_tower_parameters']['AccountEmail']:
target_product_id = p['Id']
target_provisioning_artifact_id = p['ProvisioningArtifactId']

# check to see if the product still exists and is still active
if utils.ct_provisioning_artifact_is_active(session, ct_management_session, p['ProvisioningArtifactId']):
target_provisioning_artifact_id = p['ProvisioningArtifactId']
else:
target_provisioning_artifact_id = utils.get_ct_provisioning_artifact_id(
session, ct_management_session
)

logger.info("Modifying existing account leveraging parameters: " + str(
provisioning_parameters) + " with provisioned product ID " + target_product_id)
Expand Down
4 changes: 4 additions & 0 deletions modules/aft-account-request-framework/variables.tf
Original file line number Diff line number Diff line change
Expand Up @@ -32,4 +32,8 @@ variable "aft_vpc_public_subnet_01_cidr" {

variable "aft_vpc_public_subnet_02_cidr" {
type = string
}

variable "aft_vpc_endpoints" {
type = bool
}
32 changes: 32 additions & 0 deletions modules/aft-account-request-framework/vpc.tf
Original file line number Diff line number Diff line change
Expand Up @@ -197,13 +197,17 @@ resource "aws_nat_gateway" "aft-vpc-natgw-02" {
#########################################

resource "aws_vpc_endpoint" "s3" {
count = var.aft_vpc_endpoints ? 1 : 0

vpc_id = aws_vpc.aft_vpc.id
vpc_endpoint_type = "Gateway"
service_name = "com.amazonaws.${data.aws_region.aft-management.name}.s3"
route_table_ids = [aws_route_table.aft_vpc_private_subnet_01.id, aws_route_table.aft_vpc_private_subnet_02.id, aws_route_table.aft_vpc_public_subnet_01.id]
}

resource "aws_vpc_endpoint" "dynamodb" {
count = var.aft_vpc_endpoints ? 1 : 0

vpc_id = aws_vpc.aft_vpc.id
vpc_endpoint_type = "Gateway"
service_name = "com.amazonaws.${data.aws_region.aft-management.name}.dynamodb"
Expand All @@ -215,6 +219,8 @@ resource "aws_vpc_endpoint" "dynamodb" {
#########################################

resource "aws_vpc_endpoint" "codebuild" {
count = var.aft_vpc_endpoints ? 1 : 0

vpc_id = aws_vpc.aft_vpc.id
service_name = data.aws_vpc_endpoint_service.codebuild.service_name
vpc_endpoint_type = "Interface"
Expand All @@ -227,6 +233,8 @@ resource "aws_vpc_endpoint" "codebuild" {
}

resource "aws_vpc_endpoint" "codecommit" {
count = var.aft_vpc_endpoints ? 1 : 0

vpc_id = aws_vpc.aft_vpc.id
service_name = data.aws_vpc_endpoint_service.codecommit.service_name
vpc_endpoint_type = "Interface"
Expand All @@ -239,6 +247,8 @@ resource "aws_vpc_endpoint" "codecommit" {
}

resource "aws_vpc_endpoint" "git-codecommit" {
count = var.aft_vpc_endpoints ? 1 : 0

vpc_id = aws_vpc.aft_vpc.id
service_name = data.aws_vpc_endpoint_service.git-codecommit.service_name
vpc_endpoint_type = "Interface"
Expand All @@ -251,6 +261,8 @@ resource "aws_vpc_endpoint" "git-codecommit" {
}

resource "aws_vpc_endpoint" "codepipeline" {
count = var.aft_vpc_endpoints ? 1 : 0

vpc_id = aws_vpc.aft_vpc.id
service_name = data.aws_vpc_endpoint_service.codepipeline.service_name
vpc_endpoint_type = "Interface"
Expand All @@ -263,6 +275,8 @@ resource "aws_vpc_endpoint" "codepipeline" {
}

resource "aws_vpc_endpoint" "servicecatalog" {
count = var.aft_vpc_endpoints ? 1 : 0

vpc_id = aws_vpc.aft_vpc.id
service_name = data.aws_vpc_endpoint_service.servicecatalog.service_name
vpc_endpoint_type = "Interface"
Expand All @@ -275,6 +289,8 @@ resource "aws_vpc_endpoint" "servicecatalog" {
}

resource "aws_vpc_endpoint" "lambda" {
count = var.aft_vpc_endpoints ? 1 : 0

vpc_id = aws_vpc.aft_vpc.id
service_name = data.aws_vpc_endpoint_service.lambda.service_name
vpc_endpoint_type = "Interface"
Expand All @@ -287,6 +303,8 @@ resource "aws_vpc_endpoint" "lambda" {
}

resource "aws_vpc_endpoint" "kms" {
count = var.aft_vpc_endpoints ? 1 : 0

vpc_id = aws_vpc.aft_vpc.id
service_name = data.aws_vpc_endpoint_service.kms.service_name
vpc_endpoint_type = "Interface"
Expand All @@ -299,6 +317,8 @@ resource "aws_vpc_endpoint" "kms" {
}

resource "aws_vpc_endpoint" "logs" {
count = var.aft_vpc_endpoints ? 1 : 0

vpc_id = aws_vpc.aft_vpc.id
service_name = data.aws_vpc_endpoint_service.logs.service_name
vpc_endpoint_type = "Interface"
Expand All @@ -311,6 +331,8 @@ resource "aws_vpc_endpoint" "logs" {
}

resource "aws_vpc_endpoint" "events" {
count = var.aft_vpc_endpoints ? 1 : 0

vpc_id = aws_vpc.aft_vpc.id
service_name = data.aws_vpc_endpoint_service.events.service_name
vpc_endpoint_type = "Interface"
Expand All @@ -323,6 +345,8 @@ resource "aws_vpc_endpoint" "events" {
}

resource "aws_vpc_endpoint" "states" {
count = var.aft_vpc_endpoints ? 1 : 0

vpc_id = aws_vpc.aft_vpc.id
service_name = data.aws_vpc_endpoint_service.states.service_name
vpc_endpoint_type = "Interface"
Expand All @@ -335,6 +359,8 @@ resource "aws_vpc_endpoint" "states" {
}

resource "aws_vpc_endpoint" "ssm" {
count = var.aft_vpc_endpoints ? 1 : 0

vpc_id = aws_vpc.aft_vpc.id
service_name = data.aws_vpc_endpoint_service.ssm.service_name
vpc_endpoint_type = "Interface"
Expand All @@ -347,6 +373,8 @@ resource "aws_vpc_endpoint" "ssm" {
}

resource "aws_vpc_endpoint" "sns" {
count = var.aft_vpc_endpoints ? 1 : 0

vpc_id = aws_vpc.aft_vpc.id
service_name = data.aws_vpc_endpoint_service.sns.service_name
vpc_endpoint_type = "Interface"
Expand All @@ -359,6 +387,8 @@ resource "aws_vpc_endpoint" "sns" {
}

resource "aws_vpc_endpoint" "sqs" {
count = var.aft_vpc_endpoints ? 1 : 0

vpc_id = aws_vpc.aft_vpc.id
service_name = data.aws_vpc_endpoint_service.sqs.service_name
vpc_endpoint_type = "Interface"
Expand All @@ -371,6 +401,8 @@ resource "aws_vpc_endpoint" "sqs" {
}

resource "aws_vpc_endpoint" "sts" {
count = var.aft_vpc_endpoints ? 1 : 0

vpc_id = aws_vpc.aft_vpc.id
service_name = data.aws_vpc_endpoint_service.sts.service_name
vpc_endpoint_type = "Interface"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,8 +13,17 @@ def get_pipeline_for_account(session, account):
current_region = session.region_name
client = session.client('codepipeline')
logger.info("Getting pipeline name for " + account)

response = client.list_pipelines()
for p in response['pipelines']:

pipelines = response['pipelines']
while 'nextToken' in response:
response = client.list_pipelines(
nextToken=response['nextToken']
)
pipelines.extend(response['pipelines'])

for p in pipelines:
name = p['name']
if name.startswith(account + "-"):
pipeline_arn = "arn:aws:codepipeline:" + current_region + ":" + current_account + ":" + name
Expand All @@ -40,11 +49,20 @@ def pipeline_is_running(session, name):
client = session.client('codepipeline')

logger.info("Getting pipeline executions for " + name)

response = client.list_pipeline_executions(
pipelineName=name
)
logger.info(response)
latest_execution = sorted(response['pipelineExecutionSummaries'], key=lambda i: i['startTime'], reverse=True)[0]
pipeline_execution_summaries = response['pipelineExecutionSummaries']

while 'nextToken' in response:
response = client.list_pipeline_executions(
pipelineName=name,
nextToken=response['nextToken']
)
pipeline_execution_summaries.extend(response['pipelineExecutionSummaries'])

latest_execution = sorted(pipeline_execution_summaries, key=lambda i: i['startTime'], reverse=True)[0]
logger.info("Latest Execution: ")
logger.info(latest_execution)
if latest_execution['status'] == 'InProgress':
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,18 +11,25 @@
def list_pipelines(session):
try:
pattern = re.compile(CUSTOMIZATIONS_PIPELINE_PATTERN)
pipelines = []
matched_pipelines = []
client = session.client('codepipeline')
logger.info("Listing Pipelines - ")

response = client.list_pipelines()
logger.info(response)

for p in response['pipelines']:
pipelines = response['pipelines']
while 'nextToken' in response:
response = client.list_pipelines(
nextToken=response['nextToken']
)
pipelines.extend(response['pipelines'])

for p in pipelines:
if re.match(pattern, p['name']):
pipelines.append(p['name'])
matched_pipelines.append(p['name'])

logger.info("The following pipelines were matched: " + str(pipelines))
return pipelines
logger.info("The following pipelines were matched: " + str(matched_pipelines))
return matched_pipelines

except Exception as e:
message = {
Expand All @@ -41,16 +48,28 @@ def get_running_pipeline_count(session, names):

for p in names:
logger.info("Getting pipeline executions for " + p)

response = client.list_pipeline_executions(
pipelineName=p
)
logger.info(response)
latest_execution = sorted(response['pipelineExecutionSummaries'], key=lambda i: i['startTime'], reverse=True)[0]
logger.info ("Latest Execution: ")
pipeline_execution_summaries = response['pipelineExecutionSummaries']

while 'nextToken' in response:
response = client.list_pipeline_executions(
pipelineName=p,
nextToken=response['nextToken']
)
pipeline_execution_summaries.extend(response['pipelineExecutionSummaries'])

latest_execution = sorted(pipeline_execution_summaries, key=lambda i: i['startTime'], reverse=True)[0]
logger.info("Latest Execution: ")
logger.info(latest_execution)

if latest_execution['status'] == 'InProgress':
pipeline_counter += 1

logger.info("The number of running pipelines is " + str(pipeline_counter))

return pipeline_counter

except Exception as e:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,8 @@

logger = utils.get_logger()

SUPPORT_API_REGION="us-east-1"
SUPPORT_API_REGION = "us-east-1"


def lookup_cases(session, account_id):
try:
Expand All @@ -17,7 +18,18 @@ def lookup_cases(session, account_id):
language='en',
includeCommunications=False
)
for c in response['cases']:

cases = response['cases']
while 'NextToken' in response:
response = client.describe_cases(
includeResolvedCases=True,
language='en',
includeCommunications=False,
NextToken=response['nextToken']
)
cases.extend(response['cases'])

for c in cases:
if c['subject'] == "Add Account " + account_id + " to Enterprise Support":
return True

Expand Down
Loading

0 comments on commit 4ec96a5

Please sign in to comment.