Skip to content

Commit

Permalink
Merge branch 'os-main' into docker-user-directives
Browse files Browse the repository at this point in the history
  • Loading branch information
Mourya Darivemula committed Jan 5, 2024
2 parents e17c9ff + 458b714 commit d2112a7
Show file tree
Hide file tree
Showing 17 changed files with 303 additions and 227 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -19,11 +19,9 @@ def trigger_table_sharing_failure_alarm(
target_environment: Environment,
):
log.info('Triggering share failure alarm...')
subject = (
f'ALARM: DATAALL Table {table.GlueTableName} Sharing Failure Notification'
)
subject = f'Data.all Share Failure for Table {table.GlueTableName}'[:100]
message = f"""
You are receiving this email because your DATAALL {self.envname} environment in the {self.region} region has entered the ALARM state, because it failed to share the table {table.GlueTableName} with Lake Formation.
You are receiving this email because your Data.all {self.envname} environment in the {self.region} region has entered the ALARM state, because it failed to share the table {table.GlueTableName} with Lake Formation.
Alarm Details:
- State Change: OK -> ALARM
Expand Down Expand Up @@ -51,9 +49,9 @@ def trigger_revoke_table_sharing_failure_alarm(
target_environment: Environment,
):
log.info('Triggering share failure alarm...')
subject = f'ALARM: DATAALL Table {table.GlueTableName} Revoking LF permissions Failure Notification'
subject = f'Data.all Revoke LF Permissions Failure for Table {table.GlueTableName}'[:100]
message = f"""
You are receiving this email because your DATAALL {self.envname} environment in the {self.region} region has entered the ALARM state, because it failed to revoke Lake Formation permissions for table {table.GlueTableName} with Lake Formation.
You are receiving this email because your Data.all {self.envname} environment in the {self.region} region has entered the ALARM state, because it failed to revoke Lake Formation permissions for table {table.GlueTableName} with Lake Formation.
Alarm Details:
- State Change: OK -> ALARM
Expand All @@ -76,11 +74,9 @@ def trigger_revoke_table_sharing_failure_alarm(

def trigger_dataset_sync_failure_alarm(self, dataset: Dataset, error: str):
log.info(f'Triggering dataset {dataset.name} tables sync failure alarm...')
subject = (
f'ALARM: DATAALL Dataset {dataset.name} Tables Sync Failure Notification'
)
subject = f'Data.all Dataset Tables Sync Failure for {dataset.name}'[:100]
message = f"""
You are receiving this email because your DATAALL {self.envname} environment in the {self.region} region has entered the ALARM state, because it failed to synchronize Dataset {dataset.name} tables from AWS Glue to the Search Catalog.
You are receiving this email because your Data.all {self.envname} environment in the {self.region} region has entered the ALARM state, because it failed to synchronize Dataset {dataset.name} tables from AWS Glue to the Search Catalog.
Alarm Details:
- State Change: OK -> ALARM
Expand All @@ -101,11 +97,9 @@ def trigger_folder_sharing_failure_alarm(
target_environment: Environment,
):
log.info('Triggering share failure alarm...')
subject = (
f'ALARM: DATAALL Folder {folder.S3Prefix} Sharing Failure Notification'
)
subject = f'Data.all Folder Share Failure for {folder.S3Prefix}'[:100]
message = f"""
You are receiving this email because your DATAALL {self.envname} environment in the {self.region} region has entered the ALARM state, because it failed to share the folder {folder.S3Prefix} with S3 Access Point.
You are receiving this email because your Data.all {self.envname} environment in the {self.region} region has entered the ALARM state, because it failed to share the folder {folder.S3Prefix} with S3 Access Point.
Alarm Details:
- State Change: OK -> ALARM
- Reason for State Change: S3 Folder sharing failure
Expand All @@ -129,11 +123,9 @@ def trigger_revoke_folder_sharing_failure_alarm(
target_environment: Environment,
):
log.info('Triggering share failure alarm...')
subject = (
f'ALARM: DATAALL Folder {folder.S3Prefix} Sharing Revoke Failure Notification'
)
subject = f'Data.all Folder Share Revoke Failure for {folder.S3Prefix}'[:100]
message = f"""
You are receiving this email because your DATAALL {self.envname} environment in the {self.region} region has entered the ALARM state, because it failed to share the folder {folder.S3Prefix} with S3 Access Point.
You are receiving this email because your Data.all {self.envname} environment in the {self.region} region has entered the ALARM state, because it failed to share the folder {folder.S3Prefix} with S3 Access Point.
Alarm Details:
- State Change: OK -> ALARM
- Reason for State Change: S3 Folder sharing Revoke failure
Expand Down Expand Up @@ -173,11 +165,9 @@ def handle_bucket_sharing_failure(self, bucket: DatasetBucket,
target_environment: Environment,
alarm_type: str):
log.info(f'Triggering {alarm_type} failure alarm...')
subject = (
f'ALARM: DATAALL S3 Bucket {bucket.S3BucketName} {alarm_type} Failure Notification'
)
subject = f'Data.all S3 Bucket Failure for {bucket.S3BucketName} {alarm_type}'[:100]
message = f"""
You are receiving this email because your DATAALL {self.envname} environment in the {self.region} region has entered the ALARM state, because it failed to {alarm_type} the S3 Bucket {bucket.S3BucketName}.
You are receiving this email because your Data.all {self.envname} environment in the {self.region} region has entered the ALARM state, because it failed to {alarm_type} the S3 Bucket {bucket.S3BucketName}.
Alarm Details:
- State Change: OK -> ALARM
- Reason for State Change: S3 Bucket {alarm_type} failure
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -510,7 +510,7 @@ def handle_share_failure(self, error: Exception) -> None:
self.target_folder, self.share, self.target_environment
)

def handle_revoke_failure(self, error: Exception) -> None:
def handle_revoke_failure(self, error: Exception) -> bool:
"""
Handles share failure by raising an alarm to alarmsTopic
Returns
Expand All @@ -526,6 +526,7 @@ def handle_revoke_failure(self, error: Exception) -> None:
DatasetAlarmService().trigger_revoke_folder_sharing_failure_alarm(
self.target_folder, self.share, self.target_environment
)
return True

@staticmethod
def generate_default_kms_decrypt_policy_statement(target_requester_arn):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -444,7 +444,7 @@ def handle_revoke_failure(self, error: Exception) -> bool:
f'with target account {self.target_environment.AwsAccountId}/{self.target_environment.region} '
f'due to: {error}'
)
DatasetAlarmService().trigger_revoke_folder_sharing_failure_alarm(
DatasetAlarmService().trigger_revoke_s3_bucket_sharing_failure_alarm(
self.target_bucket, self.share, self.target_environment
)
return True
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -111,11 +111,14 @@ def process_approved_shares(self) -> bool:
shared_item_SM.update_state_single_item(self.session, share_item, new_state)

except Exception as e:
self.handle_share_failure(table=table, share_item=share_item, error=e)
# must run first to ensure state transitions to failed
new_state = shared_item_SM.run_transition(ShareItemActions.Failure.value)
shared_item_SM.update_state_single_item(self.session, share_item, new_state)
success = False

# statements which can throw exceptions but are not critical
self.handle_share_failure(table=table, share_item=share_item, error=e)

return success

def process_revoked_shares(self) -> bool:
Expand Down Expand Up @@ -178,9 +181,12 @@ def process_revoked_shares(self) -> bool:
revoked_item_SM.update_state_single_item(self.session, share_item, new_state)

except Exception as e:
self.handle_revoke_failure(share_item=share_item, table=table, error=e)
# must run first to ensure state transitions to failed
new_state = revoked_item_SM.run_transition(ShareItemActions.Failure.value)
revoked_item_SM.update_state_single_item(self.session, share_item, new_state)
success = False

# statements which can throw exceptions but are not critical
self.handle_revoke_failure(share_item=share_item, table=table, error=e)

return success
Original file line number Diff line number Diff line change
Expand Up @@ -95,11 +95,14 @@ def process_approved_shares(self) -> bool:
shared_item_SM.update_state_single_item(self.session, share_item, new_state)

except Exception as e:
self.handle_share_failure(table, share_item, e)
# must run first to ensure state transitions to failed
new_state = shared_item_SM.run_transition(ShareItemActions.Failure.value)
shared_item_SM.update_state_single_item(self.session, share_item, new_state)
success = False

# statements which can throw exceptions but are not critical
self.handle_share_failure(table, share_item, e)

return success

def process_revoked_shares(self) -> bool:
Expand Down Expand Up @@ -151,9 +154,12 @@ def process_revoked_shares(self) -> bool:
revoked_item_SM.update_state_single_item(self.session, share_item, new_state)

except Exception as e:
self.handle_revoke_failure(share_item, table, e)
# must run first to ensure state transitions to failed
new_state = revoked_item_SM.run_transition(ShareItemActions.Failure.value)
revoked_item_SM.update_state_single_item(self.session, share_item, new_state)
success = False

# statements which can throw exceptions but are not critical
self.handle_revoke_failure(share_item, table, e)

return success
Original file line number Diff line number Diff line change
Expand Up @@ -97,11 +97,14 @@ def process_approved_shares(
shared_item_SM.update_state_single_item(session, sharing_item, new_state)

except Exception as e:
sharing_folder.handle_share_failure(e)
# must run first to ensure state transitions to failed
new_state = shared_item_SM.run_transition(ShareItemActions.Failure.value)
shared_item_SM.update_state_single_item(session, sharing_item, new_state)
success = False

# statements which can throw exceptions but are not critical
sharing_folder.handle_share_failure(e)

return success

@classmethod
Expand Down Expand Up @@ -160,11 +163,14 @@ def process_revoked_shares(
revoked_item_SM.update_state_single_item(session, removing_item, new_state)

except Exception as e:
removing_folder.handle_revoke_failure(e)
# must run first to ensure state transitions to failed
new_state = revoked_item_SM.run_transition(ShareItemActions.Failure.value)
revoked_item_SM.update_state_single_item(session, removing_item, new_state)
success = False

# statements which can throw exceptions but are not critical
removing_folder.handle_revoke_failure(e)

return success

@classmethod
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -91,10 +91,14 @@ def process_approved_shares(
shared_item_SM.update_state_single_item(session, sharing_item, new_state)

except Exception as e:
sharing_bucket.handle_share_failure(e)
# must run first to ensure state transitions to failed
new_state = shared_item_SM.run_transition(ShareItemActions.Failure.value)
shared_item_SM.update_state_single_item(session, sharing_item, new_state)
success = False

# statements which can throw exceptions but are not critical
sharing_bucket.handle_share_failure(e)

return success

@classmethod
Expand Down Expand Up @@ -161,9 +165,12 @@ def process_revoked_shares(
revoked_item_SM.update_state_single_item(session, removing_item, new_state)

except Exception as e:
removing_bucket.handle_revoke_failure(e)
# must run first to ensure state transitions to failed
new_state = revoked_item_SM.run_transition(ShareItemActions.Failure.value)
revoked_item_SM.update_state_single_item(session, removing_item, new_state)
success = False

# statements which can throw exceptions but are not critical
removing_bucket.handle_revoke_failure(e)

return success
8 changes: 5 additions & 3 deletions deploy/stacks/aurora.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,9 +30,7 @@ def __init__(
super().__init__(scope, id, **kwargs)

# if exclude_characters property is set make sure that the pwd regex in DbConfig is changed accordingly
db_credentials = rds.DatabaseSecret(
self, f'{resource_prefix}-{envname}-aurora-db', username='dtaadmin'
)


db_subnet_group = rds.SubnetGroup(
self,
Expand Down Expand Up @@ -65,6 +63,10 @@ def __init__(
alias=f'{resource_prefix}-{envname}-aurora',
enable_key_rotation=True,
)

db_credentials = rds.DatabaseSecret(
self, f'{resource_prefix}-{envname}-aurora-db', username='dtaadmin', encryption_key=key
)

database = rds.ServerlessCluster(
self,
Expand Down
4 changes: 2 additions & 2 deletions deploy/stacks/cognito.py
Original file line number Diff line number Diff line change
Expand Up @@ -160,8 +160,8 @@ def __init__(

cross_account_frontend_config_role = iam.Role(
self,
f'{resource_prefix}-{envname}-frontend-config-role',
role_name=f'{resource_prefix}-{envname}-frontend-config-role',
f'{resource_prefix}-{envname}-cognito-config-role',
role_name=f'{resource_prefix}-{envname}-cognito-config-role',
assumed_by=iam.AccountPrincipal(tooling_account_id),
)
cross_account_frontend_config_role.add_to_policy(
Expand Down
8 changes: 6 additions & 2 deletions deploy/stacks/pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -893,7 +893,7 @@ def cognito_config_action(self, target_env):
f'export enable_cw_canaries={target_env.get("enable_cw_canaries", False)}',
'mkdir ~/.aws/ && touch ~/.aws/config',
'echo "[profile buildprofile]" > ~/.aws/config',
f'echo "role_arn = arn:aws:iam::{target_env["account"]}:role/{self.resource_prefix}-{target_env["envname"]}-frontend-config-role" >> ~/.aws/config',
f'echo "role_arn = arn:aws:iam::{target_env["account"]}:role/{self.resource_prefix}-{target_env["envname"]}-cognito-config-role" >> ~/.aws/config',
'echo "credential_source = EcsContainer" >> ~/.aws/config',
'aws sts get-caller-identity --profile buildprofile',
'export AWS_PROFILE=buildprofile',
Expand All @@ -906,6 +906,10 @@ def cognito_config_action(self, target_env):
)

def set_albfront_stage(self, target_env, repository_name):
if target_env.get('custom_auth', None) is None:
frontend_deployment_role_arn = f'arn:aws:iam::{target_env["account"]}:role/{self.resource_prefix}-{target_env["envname"]}-cognito-config-role'
else:
frontend_deployment_role_arn = f'arn:aws:iam::{target_env["account"]}:role/{self.resource_prefix}-{target_env["envname"]}-frontend-config-role'
albfront_stage = self.pipeline.add_stage(
AlbFrontStage(
self,
Expand Down Expand Up @@ -956,7 +960,7 @@ def set_albfront_stage(self, target_env, repository_name):
f'export custom_auth_claims_mapping_user_id={str(target_env.get("custom_auth", {}).get("claims_mapping", {}).get("user_id", "None"))}',
'mkdir ~/.aws/ && touch ~/.aws/config',
'echo "[profile buildprofile]" > ~/.aws/config',
f'echo "role_arn = arn:aws:iam::{target_env["account"]}:role/{self.resource_prefix}-{target_env["envname"]}-frontend-config-role" >> ~/.aws/config',
f'echo "role_arn = {frontend_deployment_role_arn}" >> ~/.aws/config',
'echo "credential_source = EcsContainer" >> ~/.aws/config',
'aws sts get-caller-identity --profile buildprofile',
'export AWS_PROFILE=buildprofile',
Expand Down
11 changes: 11 additions & 0 deletions frontend/package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions frontend/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,7 @@
"react-router": "6.0.0",
"react-router-dom": "6.0.0",
"react-scripts": "^5.0.1",
"dompurify": "^3.0.6",
"simplebar": "^5.3.6",
"simplebar-react": "^2.3.6",
"web-vitals": "^2.1.4",
Expand Down
12 changes: 12 additions & 0 deletions frontend/src/design/components/SanitizedHTML.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
import DOMPurify from 'dompurify';

export const SanitizedHTML = ({ dirtyHTML }) => {
const defaultOptions = {
ALLOWED_TAGS: ['b', 'i', 'em', 'strong', 'a'],
ALLOWED_ATTR: ['href']
};

const sanitizedHtml = DOMPurify.sanitize(dirtyHTML, defaultOptions);

return <div dangerouslySetInnerHTML={{ __html: sanitizedHtml }} />;
};
1 change: 1 addition & 0 deletions frontend/src/design/components/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -28,3 +28,4 @@ export * from './UpVotesReadOnly';
export * from './defaults';
export * from './layout';
export * from './popovers';
export * from './SanitizedHTML';
Loading

0 comments on commit d2112a7

Please sign in to comment.