Skip to content

Commit

Permalink
Merge branch 'main' into feature-crawler
Browse files Browse the repository at this point in the history
  • Loading branch information
iakov-aws committed Dec 23, 2023
2 parents 564e978 + 738fa82 commit 3acf9a6
Show file tree
Hide file tree
Showing 7 changed files with 93 additions and 3 deletions.
77 changes: 77 additions & 0 deletions assets/lint.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,77 @@
#!/bin/bash
# shellcheck disable=SC2086,SC2181
# This script runs cfn-lint cfn_nag_scan and checkov for all templates in folder

RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[0;33m'
NC='\033[0m' # No Color

folder=$(git rev-parse --show-toplevel)/cfn-templates/
success_count=0
failure_count=0

# CKV_AWS_109: "Ensure IAM policies does not allow permissions management without constraints"
# CKV_AWS_111: "Ensure IAM policies does not allow write access without constraints"
# CKV_AWS_115: "Ensure that AWS Lambda function is configured for function-level concurrent execution limit"
# CKV_AWS_116: "Ensure that AWS Lambda function is configured for a Dead Letter Queue(DLQ)"
# CKV_AWS_117: "Ensure that AWS Lambda function is configured inside a VPC"
# CKV_AWS_173: "Check encryption settings for Lambda environmental variable"
# CKV_AWS_195: "Ensure Glue component has a security configuration associated"
# CKV_AWS_18: "Ensure the S3 bucket has access logging enabled"
# CKV_AWS_21: "Ensure the S3 bucket has versioning enabled"
checkov_skip=CKV_AWS_109,CKV_AWS_111,CKV_AWS_115,CKV_AWS_116,CKV_AWS_117,CKV_AWS_173,CKV_AWS_195,CKV_AWS_18,CKV_AWS_21


export exclude_files=("module-inventory.yaml" "module-pricing.yaml") # For::Each breaks lint :'(

yaml_files=$(find "$folder" -type f \( -name "*.yaml" -o -name "*.yml" \) -exec ls -1t "{}" +;) # ordered by date

for file in $yaml_files; do
echo "Linting $(basename $file)"
fail=0

# checkov
output=$(eval checkov --skip-download --skip-check $checkov_skip --quiet -f "$file")
if [ $? -ne 0 ]; then
echo "$output" | awk '{ print "\t" $0 }'
echo -e "checkov ${RED}KO${NC}" | awk '{ print "\t" $0 }'
fail=1
else
echo -e "checkov ${GREEN}OK${NC}" | awk '{ print "\t" $0 }'
fi

# cfn-lint
output=$(eval cfn-lint -- "$file")
if [ $? -ne 0 ]; then
echo "$output" | awk '{ print "\t" $0 }'
echo -e "cfn-lint ${RED}KO${NC}" | awk '{ print "\t" $0 }'
fail=1
else
echo -e "cfn-lint ${GREEN}OK${NC}" | awk '{ print "\t" $0 }'
fi

# cfn_nag_scan
output=$(eval cfn_nag_scan --input-path "$file")
if [ $? -ne 0 ]; then
echo "$output" | awk '{ print "\t" $0 }'
echo -e "cfn_nag_scan ${RED}KO${NC}" | awk '{ print "\t" $0 }'
fail=1
else
echo -e "cfn_nag_scan ${GREEN}OK${NC}" | awk '{ print "\t" $0 }'
fi

if [ $fail -ne 0 ]; then
((failure_count++))
else
((success_count++))
fi
done

echo "Successful lints: $success_count"
echo "Failed lints: $failure_count"
if [ $failure_count -ne 0 ]; then
exit 1
else
exit 0
fi
1 change: 1 addition & 0 deletions cfn-templates/cid-cfn.yml
Original file line number Diff line number Diff line change
Expand Up @@ -1056,6 +1056,7 @@ Resources:
# Cannot restrict this. See https://docs.aws.amazon.com/athena/latest/ug/datacatalogs-example-policies.html#datacatalog-policy-listing-data-catalogs
- Effect: Allow
Action:
- glue:GetPartition
- glue:GetPartitions
- glue:GetDatabases
- glue:GetTable
Expand Down
3 changes: 3 additions & 0 deletions changes/CHANGELOG-tao.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,8 @@
# What's new in TAO Dashboard

## TAO Dashboard v3.1.1:
**All tabs:** bugfix to exclude flagged resources which don't appear in latest report per account, e.g. exclude fixed resources

## TAO Dashboard v3.1.0:
**Important:** Update to this version requires cid-cmd v0.2.27. Please update cid-cmd first before updating the dashboard. During the update you'd need to provide a path to s3 folder where your Trusted Advisor data is stored. QuickSight dataset ta-organizational-view will be updated, please make a copy if you've made any customizations to the dataset. To update run these commands in your CloudShell (recommended) or other terminal:

Expand Down
2 changes: 2 additions & 0 deletions cid/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -127,6 +127,8 @@ def deploy(ctx, **kwargs):
--on-drift (show|override) Action if a drift of view and dataset is discovered. 'override' = override drift(will destroy customization) or 'show' (default) = show a diff. In Unattended mode (without terminal on-drift will have allways override behaviour)
--update (yes|no) Update if some elements are already installed. Default = 'no'
--resources TEXT CID resources yaml file or url
--category TEXT Comma separated list of categories of dashboards (ex: foundational,advanced )
--catalog TEXT Comma separated list of catalog files or urls (ex: foundational,advanced )
"""
ctx.obj.deploy(**kwargs)

Expand Down
3 changes: 3 additions & 0 deletions cid/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -393,13 +393,16 @@ def _deploy(self, dashboard_id: str=None, recursive=True, update=False, **kwargs
self.qs.discover_dashboards()

dashboard_id = dashboard_id or get_parameters().get('dashboard-id')
category_filter = [cat for cat in get_parameters().get('category', '').upper().split(',') if cat]
if not dashboard_id:
standard_categories = ['Foundational', 'Advanced', 'Additional'] # Show these categories first
all_categories = set([f"{dashboard.get('category', 'Other')}" for dashboard in self.resources.get('dashboards').values()])
non_standard_categories = [cat for cat in all_categories if cat not in standard_categories]
categories = standard_categories + sorted(non_standard_categories)
dashboard_options = {}
for category in categories:
if category_filter and category.upper() not in category_filter:
continue
dashboard_options[f'{category.upper()}'] = '[category]'
counter = 0
for dashboard in self.resources.get('dashboards').values():
Expand Down
2 changes: 1 addition & 1 deletion cid/helpers/cur.py
Original file line number Diff line number Diff line change
Expand Up @@ -159,7 +159,7 @@ def metadata(self) -> dict:
self._tableName = get_parameters().get('cur-table-name')
try:
self._metadata = self.athena.get_table_metadata(self._tableName)
except self.athena.client.exceptions.EntityNotFoundException as exc:
except self.athena.client.exceptions.ResourceNotFoundException as exc:
raise CidCritical('Provided cur-table-name "{self._tableName}" is not found. Please make sure the table exists.') from exc
res, message = self.table_is_cur(table=self._metadata, return_reason=True)
if not res:
Expand Down
8 changes: 6 additions & 2 deletions cid/helpers/quicksight/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -885,9 +885,9 @@ def describe_dataset(self, id, timeout: int=1) -> Dataset:
deadline = time.time() + timeout
while time.time() <= deadline:
try:
_dataset = Dataset(self.client.describe_data_set(AwsAccountId=self.account_id,DataSetId=id).get('DataSet'))
_dataset = Dataset(self.client.describe_data_set(AwsAccountId=self.account_id, DataSetId=id).get('DataSet'))
logger.info(f'Saving dataset details "{_dataset.name}" ({_dataset.id})')
self._datasets.update({_dataset.id: _dataset})
self._datasets[_dataset.id] = _dataset
break
except self.client.exceptions.ResourceNotFoundException:
logger.info(f'DataSetId {id} not found')
Expand All @@ -896,6 +896,9 @@ def describe_dataset(self, id, timeout: int=1) -> Dataset:
except self.client.exceptions.AccessDeniedException:
logger.debug(f'No quicksight:DescribeDataSet permission or missing DataSetId {id}')
return None
except self.client.exceptions.ClientError as exc:
logger.error(f'Error when trying to describe dataset {id}: {exc}')
return None

return self._datasets.get(id, None)

Expand Down Expand Up @@ -1266,6 +1269,7 @@ def _build_params_for_create_update_dash(self, definition: dict, permissions: bo
'AwsAccountId': self.account_id,
'DashboardId': definition.get('dashboardId'),
'Name': definition.get('name'),
'ValidationStrategy': {'Mode': 'LENIENT'},
}

if definition.get('sourceTemplate'):
Expand Down

0 comments on commit 3acf9a6

Please sign in to comment.