From f85c22a20fae4f9949587820a57992cea151c3a5 Mon Sep 17 00:00:00 2001 From: pedrooot Date: Fri, 5 Jul 2024 14:05:49 +0200 Subject: [PATCH 1/9] chore(cis): add CIS output class --- prowler/__main__.py | 42 +++++++- prowler/lib/check/compliance.py | 6 +- prowler/lib/check/compliance_models.py | 8 +- prowler/lib/outputs/compliance/cis.py | 19 +--- prowler/lib/outputs/compliance/cis_aws.py | 97 ++++++++++++------- prowler/lib/outputs/compliance/cis_azure.py | 6 +- prowler/lib/outputs/compliance/cis_gcp.py | 6 +- .../lib/outputs/compliance/cis_kubernetes.py | 6 +- .../outputs/compliance/compliance_output.py | 85 ++++++++++++++++ prowler/lib/outputs/compliance/models.py | 86 +++------------- prowler/lib/outputs/file_descriptors.py | 46 +++------ tests/lib/check/compliance_check_test.py | 4 +- .../lib/outputs/compliance/compliance_test.py | 8 +- tests/lib/outputs/outputs_test.py | 18 ++-- 14 files changed, 253 insertions(+), 184 deletions(-) create mode 100644 prowler/lib/outputs/compliance/compliance_output.py diff --git a/prowler/__main__.py b/prowler/__main__.py index 966b340e2e..3630f1db81 100644 --- a/prowler/__main__.py +++ b/prowler/__main__.py @@ -7,6 +7,7 @@ from colorama import Fore, Style from prowler.config.config import ( + available_compliance_frameworks, csv_file_suffix, get_available_compliance_frameworks, html_file_suffix, @@ -43,7 +44,11 @@ from prowler.lib.cli.parser import ProwlerArgumentParser from prowler.lib.logger import logger, set_logging_config from prowler.lib.outputs.asff.asff import ASFF -from prowler.lib.outputs.compliance.compliance import display_compliance_table +from prowler.lib.outputs.compliance.cis_aws import AWSCIS +from prowler.lib.outputs.compliance.compliance import ( + display_compliance_table, + get_check_compliance_frameworks_in_input, +) from prowler.lib.outputs.csv.models import CSV from prowler.lib.outputs.finding import Finding from prowler.lib.outputs.html.html import HTML @@ -296,6 +301,20 @@ def prowler(): Finding.generate_output(global_provider, finding) for finding in findings ] + input_compliance_frameworks = list( + set(global_provider.output_options.output_modes).intersection( + available_compliance_frameworks + ) + ) + + check_compliance = [] + for finding in finding_outputs: + check_compliance += get_check_compliance_frameworks_in_input( + finding.check_id, + global_provider.output_options.bulk_checks_metadata, + input_compliance_frameworks, + ) + if args.output_formats: for mode in args.output_formats: if "csv" in mode: @@ -372,6 +391,27 @@ def prowler(): bucket_session, ) + if provider == "aws": + for compliance in check_compliance: + if compliance.Framework == "CIS": + compliance_name = ( + "cis_" + compliance.Version + "_" + compliance.Provider.lower() + ) + # Generate CIS Finding Object + filename = ( + f"{global_provider.output_options.output_directory}/compliance/" + f"{global_provider.output_options.output_filename}_{compliance_name}.csv" + ) + cis_finding = AWSCIS( + findings=finding_outputs, + compliance=compliance, + create_file_descriptor=True, + file_path=filename, + ) + # Write CIS Finding Object to file, using bool to write the header. + # TODO: If it's the first time that the compliance it's found, it SHOULD + cis_finding.batch_write_data_to_file(False) + # AWS Security Hub Integration if provider == "aws" and args.security_hub: print( diff --git a/prowler/lib/check/compliance.py b/prowler/lib/check/compliance.py index b191f1f8a3..e6038a9dba 100644 --- a/prowler/lib/check/compliance.py +++ b/prowler/lib/check/compliance.py @@ -2,7 +2,7 @@ from pydantic import parse_obj_as -from prowler.lib.check.compliance_models import Compliance_Base_Model +from prowler.lib.check.compliance_models import ComplianceBaseModel from prowler.lib.check.models import Check_Metadata_Model from prowler.lib.logger import logger @@ -22,7 +22,7 @@ def update_checks_metadata_with_compliance( # Include the requirement into the check's framework requirements compliance_requirements.append(requirement) # Create the Compliance_Model - compliance = Compliance_Base_Model( + compliance = ComplianceBaseModel( Framework=framework.Framework, Provider=framework.Provider, Version=framework.Version, @@ -43,7 +43,7 @@ def update_checks_metadata_with_compliance( if not requirement.Checks: compliance_requirements.append(requirement) # Create the Compliance_Model - compliance = Compliance_Base_Model( + compliance = ComplianceBaseModel( Framework=framework.Framework, Provider=framework.Provider, Version=framework.Version, diff --git a/prowler/lib/check/compliance_models.py b/prowler/lib/check/compliance_models.py index 88ed849015..03411cfcc8 100644 --- a/prowler/lib/check/compliance_models.py +++ b/prowler/lib/check/compliance_models.py @@ -189,8 +189,8 @@ class Compliance_Requirement(BaseModel): Checks: list[str] -class Compliance_Base_Model(BaseModel): - """Compliance_Base_Model holds the base model for every compliance framework""" +class ComplianceBaseModel(BaseModel): + """ComplianceBaseModel holds the base model for every compliance framework""" Framework: str Provider: str @@ -218,10 +218,10 @@ def framework_and_provider_must_not_be_empty(cls, values): # noqa: F841 # Testing Pending def load_compliance_framework( compliance_specification_file: str, -) -> Compliance_Base_Model: +) -> ComplianceBaseModel: """load_compliance_framework loads and parse a Compliance Framework Specification""" try: - compliance_framework = Compliance_Base_Model.parse_file( + compliance_framework = ComplianceBaseModel.parse_file( compliance_specification_file ) except ValidationError as error: diff --git a/prowler/lib/outputs/compliance/cis.py b/prowler/lib/outputs/compliance/cis.py index a341809f23..eb05238ace 100644 --- a/prowler/lib/outputs/compliance/cis.py +++ b/prowler/lib/outputs/compliance/cis.py @@ -3,13 +3,11 @@ from prowler.config.config import orange_color from prowler.lib.logger import logger -from prowler.lib.outputs.compliance.cis_aws import generate_compliance_row_cis_aws from prowler.lib.outputs.compliance.cis_azure import generate_compliance_row_cis_azure from prowler.lib.outputs.compliance.cis_gcp import generate_compliance_row_cis_gcp from prowler.lib.outputs.compliance.cis_kubernetes import ( generate_compliance_row_cis_kubernetes, ) -from prowler.lib.outputs.csv.csv import write_csv def write_compliance_row_cis( @@ -29,16 +27,7 @@ def write_compliance_row_cis( if compliance_output in str(input_compliance_frameworks): for requirement in compliance.Requirements: for attribute in requirement.Attributes: - if compliance.Provider == "AWS": - (compliance_row, csv_header) = generate_compliance_row_cis_aws( - finding, - compliance, - requirement, - attribute, - output_options, - provider, - ) - elif compliance.Provider == "Azure": + if compliance.Provider == "Azure": (compliance_row, csv_header) = ( generate_compliance_row_cis_azure( finding, @@ -64,9 +53,9 @@ def write_compliance_row_cis( ) ) - write_csv( - file_descriptors[compliance_output], csv_header, compliance_row - ) + # write_csv( + # file_descriptors[compliance_output], csv_header, compliance_row + # ) except Exception as error: logger.error( f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" diff --git a/prowler/lib/outputs/compliance/cis_aws.py b/prowler/lib/outputs/compliance/cis_aws.py index b21c2a8d70..48f4a6f8f9 100644 --- a/prowler/lib/outputs/compliance/cis_aws.py +++ b/prowler/lib/outputs/compliance/cis_aws.py @@ -1,36 +1,67 @@ -from prowler.config.config import timestamp -from prowler.lib.outputs.compliance.models import Check_Output_CSV_AWS_CIS -from prowler.lib.outputs.csv.csv import generate_csv_fields -from prowler.lib.utils.utils import outputs_unix_timestamp +from csv import DictWriter +from venv import logger +from prowler.lib.check.compliance_models import ComplianceBaseModel +from prowler.lib.outputs.compliance.compliance_output import ComplianceOutput +from prowler.lib.outputs.compliance.models import AWS +from prowler.lib.outputs.finding import Finding -def generate_compliance_row_cis_aws( - finding, compliance, requirement, attribute, output_options, provider -): - compliance_row = Check_Output_CSV_AWS_CIS( - Provider=finding.check_metadata.Provider, - Description=compliance.Description, - AccountId=provider.identity.account, - Region=finding.region, - AssessmentDate=outputs_unix_timestamp(output_options.unix_timestamp, timestamp), - Requirements_Id=requirement.Id, - Requirements_Description=requirement.Description, - Requirements_Attributes_Section=attribute.Section, - Requirements_Attributes_Profile=attribute.Profile, - Requirements_Attributes_AssessmentStatus=attribute.AssessmentStatus, - Requirements_Attributes_Description=attribute.Description, - Requirements_Attributes_RationaleStatement=attribute.RationaleStatement, - Requirements_Attributes_ImpactStatement=attribute.ImpactStatement, - Requirements_Attributes_RemediationProcedure=attribute.RemediationProcedure, - Requirements_Attributes_AuditProcedure=attribute.AuditProcedure, - Requirements_Attributes_AdditionalInformation=attribute.AdditionalInformation, - Requirements_Attributes_References=attribute.References, - Status=finding.status, - StatusExtended=finding.status_extended, - ResourceId=finding.resource_id, - CheckId=finding.check_metadata.CheckID, - Muted=finding.muted, - ) - csv_header = generate_csv_fields(Check_Output_CSV_AWS_CIS) - return compliance_row, csv_header +class AWSCIS(ComplianceOutput): + def transform( + self, findings: list[Finding], compliance: ComplianceBaseModel + ) -> None: + for finding in findings: + for requirement in compliance.Requirements: + for attribute in requirement.Attributes: + compliance_row = AWS( + Provider=finding.provider, + Description=compliance.Description, + AccountId=finding.account_uid, + Region=finding.region, + AssessmentDate=str(finding.timestamp), + Requirements_Id=requirement.Id, + Requirements_Description=requirement.Description, + Requirements_Attributes_Section=attribute.Section, + Requirements_Attributes_Profile=attribute.Profile, + Requirements_Attributes_AssessmentStatus=attribute.AssessmentStatus, + Requirements_Attributes_Description=attribute.Description, + Requirements_Attributes_RationaleStatement=attribute.RationaleStatement, + Requirements_Attributes_ImpactStatement=attribute.ImpactStatement, + Requirements_Attributes_RemediationProcedure=attribute.RemediationProcedure, + Requirements_Attributes_AuditProcedure=attribute.AuditProcedure, + Requirements_Attributes_AdditionalInformation=attribute.AdditionalInformation, + Requirements_Attributes_References=attribute.References, + Status=finding.status, + StatusExtended=finding.status_extended, + ResourceId=finding.resource_uid, + CheckId=finding.check_id, + Muted=finding.muted, + ) + self._data.append(compliance_row) + + def batch_write_data_to_file(self, header: bool) -> None: + try: + if ( + getattr(self, "_file_descriptor", None) + and not self._file_descriptor.closed + and self._data + ): + csv_writer = DictWriter( + self._file_descriptor, + fieldnames=[ + field.upper() for field in self._data[0].__dict__.keys() + ], + delimiter=";", + ) + if header: + csv_writer.writeheader() + for finding in self._data: + for key in list(finding.__dict__.keys()): + finding.__dict__[key.upper()] = finding.__dict__.pop(key) + csv_writer.writerow(finding.dict()) + self._file_descriptor.close() + except Exception as error: + logger.error( + f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" + ) diff --git a/prowler/lib/outputs/compliance/cis_azure.py b/prowler/lib/outputs/compliance/cis_azure.py index ca4b7ed96c..a45075a06a 100644 --- a/prowler/lib/outputs/compliance/cis_azure.py +++ b/prowler/lib/outputs/compliance/cis_azure.py @@ -1,5 +1,5 @@ from prowler.config.config import timestamp -from prowler.lib.outputs.compliance.models import Check_Output_CSV_AZURE_CIS +from prowler.lib.outputs.compliance.models import Azure from prowler.lib.outputs.csv.csv import generate_csv_fields from prowler.lib.utils.utils import outputs_unix_timestamp @@ -7,7 +7,7 @@ def generate_compliance_row_cis_azure( finding, compliance, requirement, attribute, output_options ): - compliance_row = Check_Output_CSV_AZURE_CIS( + compliance_row = Azure( Provider=finding.check_metadata.Provider, Description=compliance.Description, Subscription=finding.subscription, @@ -32,6 +32,6 @@ def generate_compliance_row_cis_azure( CheckId=finding.check_metadata.CheckID, Muted=finding.muted, ) - csv_header = generate_csv_fields(Check_Output_CSV_AZURE_CIS) + csv_header = generate_csv_fields(Azure) return compliance_row, csv_header diff --git a/prowler/lib/outputs/compliance/cis_gcp.py b/prowler/lib/outputs/compliance/cis_gcp.py index 1db760822c..9a4d0cfb53 100644 --- a/prowler/lib/outputs/compliance/cis_gcp.py +++ b/prowler/lib/outputs/compliance/cis_gcp.py @@ -1,5 +1,5 @@ from prowler.config.config import timestamp -from prowler.lib.outputs.compliance.models import Check_Output_CSV_GCP_CIS +from prowler.lib.outputs.compliance.models import GCP from prowler.lib.outputs.csv.csv import generate_csv_fields from prowler.lib.utils.utils import outputs_unix_timestamp @@ -7,7 +7,7 @@ def generate_compliance_row_cis_gcp( finding, compliance, requirement, attribute, output_options ): - compliance_row = Check_Output_CSV_GCP_CIS( + compliance_row = GCP( Provider=finding.check_metadata.Provider, Description=compliance.Description, ProjectId=finding.project_id, @@ -32,6 +32,6 @@ def generate_compliance_row_cis_gcp( CheckId=finding.check_metadata.CheckID, Muted=finding.muted, ) - csv_header = generate_csv_fields(Check_Output_CSV_GCP_CIS) + csv_header = generate_csv_fields(GCP) return compliance_row, csv_header diff --git a/prowler/lib/outputs/compliance/cis_kubernetes.py b/prowler/lib/outputs/compliance/cis_kubernetes.py index ea12d15e13..e711977f56 100644 --- a/prowler/lib/outputs/compliance/cis_kubernetes.py +++ b/prowler/lib/outputs/compliance/cis_kubernetes.py @@ -1,5 +1,5 @@ from prowler.config.config import timestamp -from prowler.lib.outputs.compliance.models import Check_Output_CSV_KUBERNETES_CIS +from prowler.lib.outputs.compliance.models import Kubernetes from prowler.lib.outputs.csv.csv import generate_csv_fields from prowler.lib.utils.utils import outputs_unix_timestamp @@ -7,7 +7,7 @@ def generate_compliance_row_cis_kubernetes( finding, compliance, requirement, attribute, output_options, provider ): - compliance_row = Check_Output_CSV_KUBERNETES_CIS( + compliance_row = Kubernetes( Provider=finding.check_metadata.Provider, Description=compliance.Description, Context=provider.identity.context, @@ -32,6 +32,6 @@ def generate_compliance_row_cis_kubernetes( CheckId=finding.check_metadata.CheckID, Muted=finding.muted, ) - csv_header = generate_csv_fields(Check_Output_CSV_KUBERNETES_CIS) + csv_header = generate_csv_fields(Kubernetes) return compliance_row, csv_header diff --git a/prowler/lib/outputs/compliance/compliance_output.py b/prowler/lib/outputs/compliance/compliance_output.py new file mode 100644 index 0000000000..4a4d1a33f7 --- /dev/null +++ b/prowler/lib/outputs/compliance/compliance_output.py @@ -0,0 +1,85 @@ +from abc import ABC, abstractmethod +from io import TextIOWrapper +from typing import List + +from prowler.lib.check.compliance_models import ComplianceBaseModel +from prowler.lib.logger import logger +from prowler.lib.outputs.finding import Finding +from prowler.lib.utils.utils import open_file + + +class ComplianceOutput(ABC): + """ + This class represents an abstract base class for defining different types of outputs for findings. + + Attributes: + _data (list): A list to store transformed data from findings. + _file_descriptor (TextIOWrapper): A file descriptor to write data to a file. + + Methods: + __init__: Initializes the Output class with findings, optionally creates a file descriptor. + data: Property to access the transformed data. + file_descriptor: Property to access the file descriptor. + transform: Abstract method to transform findings into a specific format. + batch_write_data_to_file: Abstract method to write data to a file in batches. + create_file_descriptor: Method to create a file descriptor for writing data to a file. + """ + + _data: list + _file_descriptor: TextIOWrapper + + def __init__( + self, + findings: List[Finding], + compliance: ComplianceBaseModel, + create_file_descriptor: bool = False, + file_path: str = None, + ) -> None: + self._data = [] + if findings: + self.transform(findings, compliance) + if create_file_descriptor: + self.create_file_descriptor(file_path) + + @property + def data(self): + return self._data + + @property + def file_descriptor(self): + return self._file_descriptor + + @abstractmethod + def transform(self, findings: List[Finding], compliance: dict): + raise NotImplementedError + + @abstractmethod + def batch_write_data_to_file(self, file_descriptor: TextIOWrapper) -> None: + raise NotImplementedError + + def create_file_descriptor(self, file_path) -> None: + """ + Creates a file descriptor for writing data to a file. + + Parameters: + file_path (str): The path to the file where the data will be written. + + Returns: + None + + Raises: + Any exception that occurs during the file creation process will be caught and logged using the logger. + + Note: + The file is opened in append mode ("a") to ensure data is written at the end of the file without overwriting existing content. + """ + try: + mode = "a" + self._file_descriptor = open_file( + file_path, + mode, + ) + except Exception as error: + logger.error( + f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" + ) diff --git a/prowler/lib/outputs/compliance/models.py b/prowler/lib/outputs/compliance/models.py index e28383d994..a19647859a 100644 --- a/prowler/lib/outputs/compliance/models.py +++ b/prowler/lib/outputs/compliance/models.py @@ -32,11 +32,7 @@ class Check_Output_CSV_ENS_RD2022(BaseModel): Muted: bool -class Check_Output_CSV_AWS_CIS(BaseModel): - """ - Check_Output_CSV_CIS generates a finding's output in CSV CIS format. - """ - +class CIS(BaseModel): Provider: str Description: str AccountId: str @@ -61,94 +57,40 @@ class Check_Output_CSV_AWS_CIS(BaseModel): Muted: bool -class Check_Output_CSV_AZURE_CIS(BaseModel): +class AWS(CIS): + """ + Check_Output_CSV_CIS generates a finding's output in CSV CIS format. + """ + + AccountId: str + Region: str + + +class Azure(CIS): """ Check_Output_CSV_CIS generates a finding's output in CSV CIS format. """ - Provider: str - Description: str Subscription: str - AssessmentDate: str - Requirements_Id: str - Requirements_Description: str - Requirements_Attributes_Section: str - Requirements_Attributes_Profile: str - Requirements_Attributes_AssessmentStatus: str - Requirements_Attributes_Description: str - Requirements_Attributes_RationaleStatement: str - Requirements_Attributes_ImpactStatement: str - Requirements_Attributes_RemediationProcedure: str - Requirements_Attributes_AuditProcedure: str - Requirements_Attributes_AdditionalInformation: str - Requirements_Attributes_DefaultValue: str - Requirements_Attributes_References: str - Status: str - StatusExtended: str - ResourceId: str - ResourceName: str - CheckId: str - Muted: bool + Location: str -class Check_Output_CSV_GCP_CIS(BaseModel): +class GCP(CIS): """ Check_Output_CSV_CIS generates a finding's output in CSV CIS format. """ - Provider: str - Description: str ProjectId: str Location: str - AssessmentDate: str - Requirements_Id: str - Requirements_Description: str - Requirements_Attributes_Section: str - Requirements_Attributes_Profile: str - Requirements_Attributes_AssessmentStatus: str - Requirements_Attributes_Description: str - Requirements_Attributes_RationaleStatement: str - Requirements_Attributes_ImpactStatement: str - Requirements_Attributes_RemediationProcedure: str - Requirements_Attributes_AuditProcedure: str - Requirements_Attributes_AdditionalInformation: str - Requirements_Attributes_References: str - Status: str - StatusExtended: str - ResourceId: str - ResourceName: str - CheckId: str - Muted: bool -class Check_Output_CSV_KUBERNETES_CIS(BaseModel): +class Kubernetes(CIS): """ Check_Output_CSV_CIS generates a finding's output in CSV CIS format. """ - Provider: str - Description: str Context: str Namespace: str - AssessmentDate: str - Requirements_Id: str - Requirements_Description: str - Requirements_Attributes_Section: str - Requirements_Attributes_Profile: str - Requirements_Attributes_AssessmentStatus: str - Requirements_Attributes_Description: str - Requirements_Attributes_RationaleStatement: str - Requirements_Attributes_ImpactStatement: str - Requirements_Attributes_RemediationProcedure: str - Requirements_Attributes_AuditProcedure: str - Requirements_Attributes_AdditionalInformation: str - Requirements_Attributes_References: str - Requirements_Attributes_DefaultValue: str - Status: str - StatusExtended: str - ResourceId: str - CheckId: str - Muted: bool class Check_Output_CSV_Generic_Compliance(BaseModel): diff --git a/prowler/lib/outputs/file_descriptors.py b/prowler/lib/outputs/file_descriptors.py index 9357a97d54..d41b03d14e 100644 --- a/prowler/lib/outputs/file_descriptors.py +++ b/prowler/lib/outputs/file_descriptors.py @@ -10,14 +10,12 @@ MitreAttackGCP, ) from prowler.lib.outputs.compliance.models import ( - Check_Output_CSV_AWS_CIS, + Azure, Check_Output_CSV_AWS_ISO27001_2013, Check_Output_CSV_AWS_Well_Architected, - Check_Output_CSV_AZURE_CIS, Check_Output_CSV_ENS_RD2022, - Check_Output_CSV_GCP_CIS, Check_Output_CSV_Generic_Compliance, - Check_Output_CSV_KUBERNETES_CIS, + Kubernetes, ) from prowler.lib.outputs.csv.csv import generate_csv_fields from prowler.lib.outputs.output import Finding @@ -74,15 +72,7 @@ def fill_file_descriptors(output_modes, output_directory, output_filename, provi elif provider.type == "gcp": filename = f"{output_directory}/compliance/{output_filename}_{output_mode}{csv_file_suffix}" - if "cis_" in output_mode: - file_descriptor = initialize_file_descriptor( - filename, - output_mode, - provider.type, - Check_Output_CSV_GCP_CIS, - ) - file_descriptors.update({output_mode: file_descriptor}) - elif output_mode == "mitre_attack_gcp": + if output_mode == "mitre_attack_gcp": file_descriptor = initialize_file_descriptor( filename, output_mode, @@ -106,7 +96,7 @@ def fill_file_descriptors(output_modes, output_directory, output_filename, provi filename, output_mode, provider.type, - Check_Output_CSV_KUBERNETES_CIS, + Kubernetes, ) file_descriptors.update({output_mode: file_descriptor}) else: @@ -125,7 +115,7 @@ def fill_file_descriptors(output_modes, output_directory, output_filename, provi filename, output_mode, provider.type, - Check_Output_CSV_AZURE_CIS, + Azure, ) file_descriptors.update({output_mode: file_descriptor}) elif output_mode == "mitre_attack_azure": @@ -157,15 +147,6 @@ def fill_file_descriptors(output_modes, output_directory, output_filename, provi ) file_descriptors.update({output_mode: file_descriptor}) - elif "cis_" in output_mode: - file_descriptor = initialize_file_descriptor( - filename, - output_mode, - provider.type, - Check_Output_CSV_AWS_CIS, - ) - file_descriptors.update({output_mode: file_descriptor}) - elif "aws_well_architected_framework" in output_mode: file_descriptor = initialize_file_descriptor( filename, @@ -194,14 +175,15 @@ def fill_file_descriptors(output_modes, output_directory, output_filename, provi file_descriptors.update({output_mode: file_descriptor}) else: - # Generic Compliance framework - file_descriptor = initialize_file_descriptor( - filename, - output_mode, - provider.type, - Check_Output_CSV_Generic_Compliance, - ) - file_descriptors.update({output_mode: file_descriptor}) + if "cis_" not in output_mode: + # Generic Compliance framework + file_descriptor = initialize_file_descriptor( + filename, + output_mode, + provider.type, + Check_Output_CSV_Generic_Compliance, + ) + file_descriptors.update({output_mode: file_descriptor}) except Exception as error: logger.error( diff --git a/tests/lib/check/compliance_check_test.py b/tests/lib/check/compliance_check_test.py index 634a11de6b..8ad2f2172e 100644 --- a/tests/lib/check/compliance_check_test.py +++ b/tests/lib/check/compliance_check_test.py @@ -3,8 +3,8 @@ CIS_Requirement_Attribute, CIS_Requirement_Attribute_AssessmentStatus, CIS_Requirement_Attribute_Profile, - Compliance_Base_Model, Compliance_Requirement, + ComplianceBaseModel, ) from prowler.lib.check.models import Check_Metadata_Model @@ -14,7 +14,7 @@ class TestCompliance: def get_custom_framework(self): return { - "framework1": Compliance_Base_Model( + "framework1": ComplianceBaseModel( Framework="Framework1", Provider="Provider1", Version="1.0", diff --git a/tests/lib/outputs/compliance/compliance_test.py b/tests/lib/outputs/compliance/compliance_test.py index 6be964a8fa..b3ecd0648d 100644 --- a/tests/lib/outputs/compliance/compliance_test.py +++ b/tests/lib/outputs/compliance/compliance_test.py @@ -2,15 +2,15 @@ from prowler.lib.check.compliance_models import ( CIS_Requirement_Attribute, - Compliance_Base_Model, Compliance_Requirement, + ComplianceBaseModel, ) from prowler.lib.outputs.compliance.compliance import ( get_check_compliance_frameworks_in_input, ) CIS_1_4_AWS_NAME = "cis_1.4_aws" -CIS_1_4_AWS = Compliance_Base_Model( +CIS_1_4_AWS = ComplianceBaseModel( Framework="CIS", Provider="AWS", Version="1.4", @@ -38,7 +38,7 @@ ], ) CIS_1_5_AWS_NAME = "cis_1.5_aws" -CIS_1_5_AWS = Compliance_Base_Model( +CIS_1_5_AWS = ComplianceBaseModel( Framework="CIS", Provider="AWS", Version="1.5", @@ -67,7 +67,7 @@ ) NOT_PRESENT_COMPLIANCE_NAME = "not_present_compliance_name" -NOT_PRESENT_COMPLIANCE = Compliance_Base_Model( +NOT_PRESENT_COMPLIANCE = ComplianceBaseModel( Framework="NOT_EXISTENT", Provider="NOT_EXISTENT", Version="NOT_EXISTENT", diff --git a/tests/lib/outputs/outputs_test.py b/tests/lib/outputs/outputs_test.py index b4c12ea62e..b2c70763cb 100644 --- a/tests/lib/outputs/outputs_test.py +++ b/tests/lib/outputs/outputs_test.py @@ -6,8 +6,8 @@ from prowler.lib.check.compliance_models import ( CIS_Requirement_Attribute, - Compliance_Base_Model, Compliance_Requirement, + ComplianceBaseModel, ) from prowler.lib.check.models import Check_Report, load_check_metadata from prowler.lib.outputs.compliance.compliance import get_check_compliance @@ -329,7 +329,7 @@ def test_extract_findings_statistics_all_fail_are_not_muted(self): def test_get_check_compliance_aws(self): bulk_check_metadata = [ - Compliance_Base_Model( + ComplianceBaseModel( Framework="CIS", Provider="AWS", Version="1.4", @@ -356,7 +356,7 @@ def test_get_check_compliance_aws(self): ) ], ), - Compliance_Base_Model( + ComplianceBaseModel( Framework="CIS", Provider="AWS", Version="1.5", @@ -413,7 +413,7 @@ def test_get_check_compliance_aws(self): def test_get_check_compliance_gcp(self): bulk_check_metadata = [ - Compliance_Base_Model( + ComplianceBaseModel( Framework="CIS", Provider="GCP", Version="2.0", @@ -440,7 +440,7 @@ def test_get_check_compliance_gcp(self): ) ], ), - Compliance_Base_Model( + ComplianceBaseModel( Framework="CIS", Provider="GCP", Version="2.1", @@ -497,7 +497,7 @@ def test_get_check_compliance_gcp(self): def test_get_check_compliance_azure(self): bulk_check_metadata = [ - Compliance_Base_Model( + ComplianceBaseModel( Framework="CIS", Provider="Azure", Version="2.0", @@ -524,7 +524,7 @@ def test_get_check_compliance_azure(self): ) ], ), - Compliance_Base_Model( + ComplianceBaseModel( Framework="CIS", Provider="Azure", Version="2.1", @@ -581,7 +581,7 @@ def test_get_check_compliance_azure(self): def test_get_check_compliance_kubernetes(self): bulk_check_metadata = [ - Compliance_Base_Model( + ComplianceBaseModel( Framework="CIS", Provider="Kubernetes", Version="2.0", @@ -608,7 +608,7 @@ def test_get_check_compliance_kubernetes(self): ) ], ), - Compliance_Base_Model( + ComplianceBaseModel( Framework="CIS", Provider="Kubernetes", Version="2.1", From 62664d67ec0c917e1b114acddd732f51a4a92438 Mon Sep 17 00:00:00 2001 From: pedrooot Date: Fri, 5 Jul 2024 14:13:42 +0200 Subject: [PATCH 2/9] chore(cis): add class for all the providers --- prowler/lib/outputs/compliance/cis_azure.py | 97 ++++++++++++------- prowler/lib/outputs/compliance/cis_gcp.py | 97 ++++++++++++------- .../lib/outputs/compliance/cis_kubernetes.py | 97 ++++++++++++------- 3 files changed, 192 insertions(+), 99 deletions(-) diff --git a/prowler/lib/outputs/compliance/cis_azure.py b/prowler/lib/outputs/compliance/cis_azure.py index a45075a06a..62be35e1f0 100644 --- a/prowler/lib/outputs/compliance/cis_azure.py +++ b/prowler/lib/outputs/compliance/cis_azure.py @@ -1,37 +1,68 @@ -from prowler.config.config import timestamp +from csv import DictWriter +from venv import logger + +from prowler.lib.check.compliance_models import ComplianceBaseModel +from prowler.lib.outputs.compliance.compliance_output import ComplianceOutput from prowler.lib.outputs.compliance.models import Azure -from prowler.lib.outputs.csv.csv import generate_csv_fields -from prowler.lib.utils.utils import outputs_unix_timestamp +from prowler.lib.outputs.finding import Finding -def generate_compliance_row_cis_azure( - finding, compliance, requirement, attribute, output_options -): - compliance_row = Azure( - Provider=finding.check_metadata.Provider, - Description=compliance.Description, - Subscription=finding.subscription, - AssessmentDate=outputs_unix_timestamp(output_options.unix_timestamp, timestamp), - Requirements_Id=requirement.Id, - Requirements_Description=requirement.Description, - Requirements_Attributes_Section=attribute.Section, - Requirements_Attributes_Profile=attribute.Profile, - Requirements_Attributes_AssessmentStatus=attribute.AssessmentStatus, - Requirements_Attributes_Description=attribute.Description, - Requirements_Attributes_RationaleStatement=attribute.RationaleStatement, - Requirements_Attributes_ImpactStatement=attribute.ImpactStatement, - Requirements_Attributes_RemediationProcedure=attribute.RemediationProcedure, - Requirements_Attributes_AuditProcedure=attribute.AuditProcedure, - Requirements_Attributes_AdditionalInformation=attribute.AdditionalInformation, - Requirements_Attributes_DefaultValue=attribute.DefaultValue, - Requirements_Attributes_References=attribute.References, - Status=finding.status, - StatusExtended=finding.status_extended, - ResourceId=finding.resource_id, - ResourceName=finding.resource_name, - CheckId=finding.check_metadata.CheckID, - Muted=finding.muted, - ) - csv_header = generate_csv_fields(Azure) +class AzureCIS(ComplianceOutput): + def transform( + self, findings: list[Finding], compliance: ComplianceBaseModel + ) -> None: + for finding in findings: + for requirement in compliance.Requirements: + for attribute in requirement.Attributes: + compliance_row = Azure( + Provider=finding.provider, + Description=compliance.Description, + Subscription=finding.subscription, + AssessmentDate=str(finding.timestamp), + Requirements_Id=requirement.Id, + Requirements_Description=requirement.Description, + Requirements_Attributes_Section=attribute.Section, + Requirements_Attributes_Profile=attribute.Profile, + Requirements_Attributes_AssessmentStatus=attribute.AssessmentStatus, + Requirements_Attributes_Description=attribute.Description, + Requirements_Attributes_RationaleStatement=attribute.RationaleStatement, + Requirements_Attributes_ImpactStatement=attribute.ImpactStatement, + Requirements_Attributes_RemediationProcedure=attribute.RemediationProcedure, + Requirements_Attributes_AuditProcedure=attribute.AuditProcedure, + Requirements_Attributes_AdditionalInformation=attribute.AdditionalInformation, + Requirements_Attributes_DefaultValue=attribute.DefaultValue, + Requirements_Attributes_References=attribute.References, + Status=finding.status, + StatusExtended=finding.status_extended, + ResourceId=finding.resource_id, + ResourceName=finding.resource_name, + CheckId=finding.check_id, + Muted=finding.muted, + ) + self._data.append(compliance_row) - return compliance_row, csv_header + def batch_write_data_to_file(self, header: bool) -> None: + try: + if ( + getattr(self, "_file_descriptor", None) + and not self._file_descriptor.closed + and self._data + ): + csv_writer = DictWriter( + self._file_descriptor, + fieldnames=[ + field.upper() for field in self._data[0].__dict__.keys() + ], + delimiter=";", + ) + if header: + csv_writer.writeheader() + for finding in self._data: + for key in list(finding.__dict__.keys()): + finding.__dict__[key.upper()] = finding.__dict__.pop(key) + csv_writer.writerow(finding.dict()) + self._file_descriptor.close() + except Exception as error: + logger.error( + f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" + ) diff --git a/prowler/lib/outputs/compliance/cis_gcp.py b/prowler/lib/outputs/compliance/cis_gcp.py index 9a4d0cfb53..96507458fc 100644 --- a/prowler/lib/outputs/compliance/cis_gcp.py +++ b/prowler/lib/outputs/compliance/cis_gcp.py @@ -1,37 +1,68 @@ -from prowler.config.config import timestamp +from csv import DictWriter +from venv import logger + +from prowler.lib.check.compliance_models import ComplianceBaseModel +from prowler.lib.outputs.compliance.compliance_output import ComplianceOutput from prowler.lib.outputs.compliance.models import GCP -from prowler.lib.outputs.csv.csv import generate_csv_fields -from prowler.lib.utils.utils import outputs_unix_timestamp +from prowler.lib.outputs.finding import Finding -def generate_compliance_row_cis_gcp( - finding, compliance, requirement, attribute, output_options -): - compliance_row = GCP( - Provider=finding.check_metadata.Provider, - Description=compliance.Description, - ProjectId=finding.project_id, - Location=finding.location.lower(), - AssessmentDate=outputs_unix_timestamp(output_options.unix_timestamp, timestamp), - Requirements_Id=requirement.Id, - Requirements_Description=requirement.Description, - Requirements_Attributes_Section=attribute.Section, - Requirements_Attributes_Profile=attribute.Profile, - Requirements_Attributes_AssessmentStatus=attribute.AssessmentStatus, - Requirements_Attributes_Description=attribute.Description, - Requirements_Attributes_RationaleStatement=attribute.RationaleStatement, - Requirements_Attributes_ImpactStatement=attribute.ImpactStatement, - Requirements_Attributes_RemediationProcedure=attribute.RemediationProcedure, - Requirements_Attributes_AuditProcedure=attribute.AuditProcedure, - Requirements_Attributes_AdditionalInformation=attribute.AdditionalInformation, - Requirements_Attributes_References=attribute.References, - Status=finding.status, - StatusExtended=finding.status_extended, - ResourceId=finding.resource_id, - ResourceName=finding.resource_name, - CheckId=finding.check_metadata.CheckID, - Muted=finding.muted, - ) - csv_header = generate_csv_fields(GCP) +class GCPCIS(ComplianceOutput): + def transform( + self, findings: list[Finding], compliance: ComplianceBaseModel + ) -> None: + for finding in findings: + for requirement in compliance.Requirements: + for attribute in requirement.Attributes: + compliance_row = GCP( + Provider=finding.provider, + Description=compliance.Description, + ProjectId=finding.project_id, + Location=finding.location.lower(), + AssessmentDate=str(finding.timestamp), + Requirements_Id=requirement.Id, + Requirements_Description=requirement.Description, + Requirements_Attributes_Section=attribute.Section, + Requirements_Attributes_Profile=attribute.Profile, + Requirements_Attributes_AssessmentStatus=attribute.AssessmentStatus, + Requirements_Attributes_Description=attribute.Description, + Requirements_Attributes_RationaleStatement=attribute.RationaleStatement, + Requirements_Attributes_ImpactStatement=attribute.ImpactStatement, + Requirements_Attributes_RemediationProcedure=attribute.RemediationProcedure, + Requirements_Attributes_AuditProcedure=attribute.AuditProcedure, + Requirements_Attributes_AdditionalInformation=attribute.AdditionalInformation, + Requirements_Attributes_References=attribute.References, + Status=finding.status, + StatusExtended=finding.status_extended, + ResourceId=finding.resource_id, + ResourceName=finding.resource_name, + CheckId=finding.check_id, + Muted=finding.muted, + ) + self._data.append(compliance_row) - return compliance_row, csv_header + def batch_write_data_to_file(self, header: bool) -> None: + try: + if ( + getattr(self, "_file_descriptor", None) + and not self._file_descriptor.closed + and self._data + ): + csv_writer = DictWriter( + self._file_descriptor, + fieldnames=[ + field.upper() for field in self._data[0].__dict__.keys() + ], + delimiter=";", + ) + if header: + csv_writer.writeheader() + for finding in self._data: + for key in list(finding.__dict__.keys()): + finding.__dict__[key.upper()] = finding.__dict__.pop(key) + csv_writer.writerow(finding.dict()) + self._file_descriptor.close() + except Exception as error: + logger.error( + f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" + ) diff --git a/prowler/lib/outputs/compliance/cis_kubernetes.py b/prowler/lib/outputs/compliance/cis_kubernetes.py index e711977f56..5926c6382b 100644 --- a/prowler/lib/outputs/compliance/cis_kubernetes.py +++ b/prowler/lib/outputs/compliance/cis_kubernetes.py @@ -1,37 +1,68 @@ -from prowler.config.config import timestamp +from csv import DictWriter +from venv import logger + +from prowler.lib.check.compliance_models import ComplianceBaseModel +from prowler.lib.outputs.compliance.compliance_output import ComplianceOutput from prowler.lib.outputs.compliance.models import Kubernetes -from prowler.lib.outputs.csv.csv import generate_csv_fields -from prowler.lib.utils.utils import outputs_unix_timestamp +from prowler.lib.outputs.finding import Finding -def generate_compliance_row_cis_kubernetes( - finding, compliance, requirement, attribute, output_options, provider -): - compliance_row = Kubernetes( - Provider=finding.check_metadata.Provider, - Description=compliance.Description, - Context=provider.identity.context, - Namespace=finding.namespace, - AssessmentDate=outputs_unix_timestamp(output_options.unix_timestamp, timestamp), - Requirements_Id=requirement.Id, - Requirements_Description=requirement.Description, - Requirements_Attributes_Section=attribute.Section, - Requirements_Attributes_Profile=attribute.Profile, - Requirements_Attributes_AssessmentStatus=attribute.AssessmentStatus, - Requirements_Attributes_Description=attribute.Description, - Requirements_Attributes_RationaleStatement=attribute.RationaleStatement, - Requirements_Attributes_ImpactStatement=attribute.ImpactStatement, - Requirements_Attributes_RemediationProcedure=attribute.RemediationProcedure, - Requirements_Attributes_AuditProcedure=attribute.AuditProcedure, - Requirements_Attributes_AdditionalInformation=attribute.AdditionalInformation, - Requirements_Attributes_References=attribute.References, - Requirements_Attributes_DefaultValue=attribute.DefaultValue, - Status=finding.status, - StatusExtended=finding.status_extended, - ResourceId=finding.resource_id, - CheckId=finding.check_metadata.CheckID, - Muted=finding.muted, - ) - csv_header = generate_csv_fields(Kubernetes) +class KubernetesCIS(ComplianceOutput): + def transform( + self, findings: list[Finding], compliance: ComplianceBaseModel + ) -> None: + for finding in findings: + for requirement in compliance.Requirements: + for attribute in requirement.Attributes: + compliance_row = Kubernetes( + Provider=finding.check_metadata.Provider, + Description=compliance.Description, + Context=finding.context, + Namespace=finding.namespace, + AssessmentDate=str(finding.timestamp), + Requirements_Id=requirement.Id, + Requirements_Description=requirement.Description, + Requirements_Attributes_Section=attribute.Section, + Requirements_Attributes_Profile=attribute.Profile, + Requirements_Attributes_AssessmentStatus=attribute.AssessmentStatus, + Requirements_Attributes_Description=attribute.Description, + Requirements_Attributes_RationaleStatement=attribute.RationaleStatement, + Requirements_Attributes_ImpactStatement=attribute.ImpactStatement, + Requirements_Attributes_RemediationProcedure=attribute.RemediationProcedure, + Requirements_Attributes_AuditProcedure=attribute.AuditProcedure, + Requirements_Attributes_AdditionalInformation=attribute.AdditionalInformation, + Requirements_Attributes_References=attribute.References, + Requirements_Attributes_DefaultValue=attribute.DefaultValue, + Status=finding.status, + StatusExtended=finding.status_extended, + ResourceId=finding.resource_id, + CheckId=finding.check_id, + Muted=finding.muted, + ) + self._data.append(compliance_row) - return compliance_row, csv_header + def batch_write_data_to_file(self, header: bool) -> None: + try: + if ( + getattr(self, "_file_descriptor", None) + and not self._file_descriptor.closed + and self._data + ): + csv_writer = DictWriter( + self._file_descriptor, + fieldnames=[ + field.upper() for field in self._data[0].__dict__.keys() + ], + delimiter=";", + ) + if header: + csv_writer.writeheader() + for finding in self._data: + for key in list(finding.__dict__.keys()): + finding.__dict__[key.upper()] = finding.__dict__.pop(key) + csv_writer.writerow(finding.dict()) + self._file_descriptor.close() + except Exception as error: + logger.error( + f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" + ) From 52cfa11163b687e42e9d59b4eed71ef2375442b7 Mon Sep 17 00:00:00 2001 From: Sergio Date: Mon, 8 Jul 2024 14:48:43 -0400 Subject: [PATCH 3/9] chore(cis): add CIS output class --- prowler/__main__.py | 95 +++++++++++++------ prowler/lib/outputs/compliance/cis.py | 58 ----------- prowler/lib/outputs/compliance/cis_aws.py | 65 +++++++------ prowler/lib/outputs/compliance/cis_azure.py | 68 +++++++------ prowler/lib/outputs/compliance/cis_gcp.py | 67 +++++++------ .../lib/outputs/compliance/cis_kubernetes.py | 67 +++++++------ prowler/lib/outputs/compliance/compliance.py | 11 +-- .../outputs/compliance/compliance_output.py | 4 +- prowler/lib/outputs/compliance/models.py | 2 - prowler/lib/outputs/file_descriptors.py | 55 ++++------- 10 files changed, 239 insertions(+), 253 deletions(-) diff --git a/prowler/__main__.py b/prowler/__main__.py index 3630f1db81..ccf64330a9 100644 --- a/prowler/__main__.py +++ b/prowler/__main__.py @@ -7,7 +7,6 @@ from colorama import Fore, Style from prowler.config.config import ( - available_compliance_frameworks, csv_file_suffix, get_available_compliance_frameworks, html_file_suffix, @@ -45,10 +44,10 @@ from prowler.lib.logger import logger, set_logging_config from prowler.lib.outputs.asff.asff import ASFF from prowler.lib.outputs.compliance.cis_aws import AWSCIS -from prowler.lib.outputs.compliance.compliance import ( - display_compliance_table, - get_check_compliance_frameworks_in_input, -) +from prowler.lib.outputs.compliance.cis_azure import AzureCIS +from prowler.lib.outputs.compliance.cis_gcp import GCPCIS +from prowler.lib.outputs.compliance.cis_kubernetes import KubernetesCIS +from prowler.lib.outputs.compliance.compliance import display_compliance_table from prowler.lib.outputs.csv.models import CSV from prowler.lib.outputs.finding import Finding from prowler.lib.outputs.html.html import HTML @@ -301,20 +300,6 @@ def prowler(): Finding.generate_output(global_provider, finding) for finding in findings ] - input_compliance_frameworks = list( - set(global_provider.output_options.output_modes).intersection( - available_compliance_frameworks - ) - ) - - check_compliance = [] - for finding in finding_outputs: - check_compliance += get_check_compliance_frameworks_in_input( - finding.check_id, - global_provider.output_options.bulk_checks_metadata, - input_compliance_frameworks, - ) - if args.output_formats: for mode in args.output_formats: if "csv" in mode: @@ -391,12 +376,20 @@ def prowler(): bucket_session, ) + # Compliance Frameworks + input_compliance_frameworks = list( + set(global_provider.output_options.output_modes).intersection( + get_available_compliance_frameworks(provider) + ) + ) + output_compliance_frameworks = ( + input_compliance_frameworks + if input_compliance_frameworks + else get_available_compliance_frameworks(provider) + ) if provider == "aws": - for compliance in check_compliance: - if compliance.Framework == "CIS": - compliance_name = ( - "cis_" + compliance.Version + "_" + compliance.Provider.lower() - ) + for compliance_name in output_compliance_frameworks: + if compliance_name.startswith("cis_"): # Generate CIS Finding Object filename = ( f"{global_provider.output_options.output_directory}/compliance/" @@ -404,13 +397,59 @@ def prowler(): ) cis_finding = AWSCIS( findings=finding_outputs, - compliance=compliance, + compliance=bulk_compliance_frameworks[compliance_name], + create_file_descriptor=True, + file_path=filename, + ) + cis_finding.batch_write_data_to_file() + + elif provider == "azure": + for compliance_name in output_compliance_frameworks: + if compliance_name.startswith("cis_"): + # Generate CIS Finding Object + filename = ( + f"{global_provider.output_options.output_directory}/compliance/" + f"{global_provider.output_options.output_filename}_{compliance_name}.csv" + ) + cis_finding = AzureCIS( + findings=finding_outputs, + compliance=bulk_compliance_frameworks[compliance_name], + create_file_descriptor=True, + file_path=filename, + ) + cis_finding.batch_write_data_to_file() + + elif provider == "gcp": + for compliance_name in output_compliance_frameworks: + if compliance_name.startswith("cis_"): + # Generate CIS Finding Object + filename = ( + f"{global_provider.output_options.output_directory}/compliance/" + f"{global_provider.output_options.output_filename}_{compliance_name}.csv" + ) + cis_finding = GCPCIS( + findings=finding_outputs, + compliance=bulk_compliance_frameworks[compliance_name], + create_file_descriptor=True, + file_path=filename, + ) + cis_finding.batch_write_data_to_file() + + elif provider == "kubernetes": + for compliance_name in output_compliance_frameworks: + if compliance_name.startswith("cis_"): + # Generate CIS Finding Object + filename = ( + f"{global_provider.output_options.output_directory}/compliance/" + f"{global_provider.output_options.output_filename}_{compliance_name}.csv" + ) + cis_finding = KubernetesCIS( + findings=finding_outputs, + compliance=bulk_compliance_frameworks[compliance_name], create_file_descriptor=True, file_path=filename, ) - # Write CIS Finding Object to file, using bool to write the header. - # TODO: If it's the first time that the compliance it's found, it SHOULD - cis_finding.batch_write_data_to_file(False) + cis_finding.batch_write_data_to_file() # AWS Security Hub Integration if provider == "aws" and args.security_hub: diff --git a/prowler/lib/outputs/compliance/cis.py b/prowler/lib/outputs/compliance/cis.py index eb05238ace..7ab1a7d96c 100644 --- a/prowler/lib/outputs/compliance/cis.py +++ b/prowler/lib/outputs/compliance/cis.py @@ -2,64 +2,6 @@ from tabulate import tabulate from prowler.config.config import orange_color -from prowler.lib.logger import logger -from prowler.lib.outputs.compliance.cis_azure import generate_compliance_row_cis_azure -from prowler.lib.outputs.compliance.cis_gcp import generate_compliance_row_cis_gcp -from prowler.lib.outputs.compliance.cis_kubernetes import ( - generate_compliance_row_cis_kubernetes, -) - - -def write_compliance_row_cis( - file_descriptors, - finding, - compliance, - output_options, - provider, - input_compliance_frameworks, -): - try: - compliance_output = ( - "cis_" + compliance.Version + "_" + compliance.Provider.lower() - ) - - # Only with the version of CIS that was selected - if compliance_output in str(input_compliance_frameworks): - for requirement in compliance.Requirements: - for attribute in requirement.Attributes: - if compliance.Provider == "Azure": - (compliance_row, csv_header) = ( - generate_compliance_row_cis_azure( - finding, - compliance, - requirement, - attribute, - output_options, - ) - ) - elif compliance.Provider == "GCP": - (compliance_row, csv_header) = generate_compliance_row_cis_gcp( - finding, compliance, requirement, attribute, output_options - ) - elif compliance.Provider == "Kubernetes": - (compliance_row, csv_header) = ( - generate_compliance_row_cis_kubernetes( - finding, - compliance, - requirement, - attribute, - output_options, - provider, - ) - ) - - # write_csv( - # file_descriptors[compliance_output], csv_header, compliance_row - # ) - except Exception as error: - logger.error( - f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" - ) def get_cis_table( diff --git a/prowler/lib/outputs/compliance/cis_aws.py b/prowler/lib/outputs/compliance/cis_aws.py index 48f4a6f8f9..0bcaa4d7c7 100644 --- a/prowler/lib/outputs/compliance/cis_aws.py +++ b/prowler/lib/outputs/compliance/cis_aws.py @@ -12,35 +12,43 @@ def transform( self, findings: list[Finding], compliance: ComplianceBaseModel ) -> None: for finding in findings: + # Get the compliance requirements for the finding + compliance_model_name = ( + compliance.Framework + "-" + compliance.Version + if compliance.Version + else compliance.Framework + ) + finding_requirements = finding.compliance.get(compliance_model_name, []) for requirement in compliance.Requirements: - for attribute in requirement.Attributes: - compliance_row = AWS( - Provider=finding.provider, - Description=compliance.Description, - AccountId=finding.account_uid, - Region=finding.region, - AssessmentDate=str(finding.timestamp), - Requirements_Id=requirement.Id, - Requirements_Description=requirement.Description, - Requirements_Attributes_Section=attribute.Section, - Requirements_Attributes_Profile=attribute.Profile, - Requirements_Attributes_AssessmentStatus=attribute.AssessmentStatus, - Requirements_Attributes_Description=attribute.Description, - Requirements_Attributes_RationaleStatement=attribute.RationaleStatement, - Requirements_Attributes_ImpactStatement=attribute.ImpactStatement, - Requirements_Attributes_RemediationProcedure=attribute.RemediationProcedure, - Requirements_Attributes_AuditProcedure=attribute.AuditProcedure, - Requirements_Attributes_AdditionalInformation=attribute.AdditionalInformation, - Requirements_Attributes_References=attribute.References, - Status=finding.status, - StatusExtended=finding.status_extended, - ResourceId=finding.resource_uid, - CheckId=finding.check_id, - Muted=finding.muted, - ) - self._data.append(compliance_row) + if requirement.Id in finding_requirements: + for attribute in requirement.Attributes: + compliance_row = AWS( + Provider=finding.provider, + Description=compliance.Description, + AccountId=finding.account_uid, + Region=finding.region, + AssessmentDate=str(finding.timestamp), + Requirements_Id=requirement.Id, + Requirements_Description=requirement.Description, + Requirements_Attributes_Section=attribute.Section, + Requirements_Attributes_Profile=attribute.Profile, + Requirements_Attributes_AssessmentStatus=attribute.AssessmentStatus, + Requirements_Attributes_Description=attribute.Description, + Requirements_Attributes_RationaleStatement=attribute.RationaleStatement, + Requirements_Attributes_ImpactStatement=attribute.ImpactStatement, + Requirements_Attributes_RemediationProcedure=attribute.RemediationProcedure, + Requirements_Attributes_AuditProcedure=attribute.AuditProcedure, + Requirements_Attributes_AdditionalInformation=attribute.AdditionalInformation, + Requirements_Attributes_References=attribute.References, + Status=finding.status, + StatusExtended=finding.status_extended, + ResourceId=finding.resource_uid, + CheckId=finding.check_id, + Muted=finding.muted, + ) + self._data.append(compliance_row) - def batch_write_data_to_file(self, header: bool) -> None: + def batch_write_data_to_file(self) -> None: try: if ( getattr(self, "_file_descriptor", None) @@ -54,8 +62,7 @@ def batch_write_data_to_file(self, header: bool) -> None: ], delimiter=";", ) - if header: - csv_writer.writeheader() + csv_writer.writeheader() for finding in self._data: for key in list(finding.__dict__.keys()): finding.__dict__[key.upper()] = finding.__dict__.pop(key) diff --git a/prowler/lib/outputs/compliance/cis_azure.py b/prowler/lib/outputs/compliance/cis_azure.py index 62be35e1f0..7bc29aa84b 100644 --- a/prowler/lib/outputs/compliance/cis_azure.py +++ b/prowler/lib/outputs/compliance/cis_azure.py @@ -12,36 +12,45 @@ def transform( self, findings: list[Finding], compliance: ComplianceBaseModel ) -> None: for finding in findings: + # Get the compliance requirements for the finding + compliance_model_name = ( + compliance.Framework + "-" + compliance.Version + if compliance.Version + else compliance.Framework + ) + finding_requirements = finding.compliance.get(compliance_model_name, []) for requirement in compliance.Requirements: - for attribute in requirement.Attributes: - compliance_row = Azure( - Provider=finding.provider, - Description=compliance.Description, - Subscription=finding.subscription, - AssessmentDate=str(finding.timestamp), - Requirements_Id=requirement.Id, - Requirements_Description=requirement.Description, - Requirements_Attributes_Section=attribute.Section, - Requirements_Attributes_Profile=attribute.Profile, - Requirements_Attributes_AssessmentStatus=attribute.AssessmentStatus, - Requirements_Attributes_Description=attribute.Description, - Requirements_Attributes_RationaleStatement=attribute.RationaleStatement, - Requirements_Attributes_ImpactStatement=attribute.ImpactStatement, - Requirements_Attributes_RemediationProcedure=attribute.RemediationProcedure, - Requirements_Attributes_AuditProcedure=attribute.AuditProcedure, - Requirements_Attributes_AdditionalInformation=attribute.AdditionalInformation, - Requirements_Attributes_DefaultValue=attribute.DefaultValue, - Requirements_Attributes_References=attribute.References, - Status=finding.status, - StatusExtended=finding.status_extended, - ResourceId=finding.resource_id, - ResourceName=finding.resource_name, - CheckId=finding.check_id, - Muted=finding.muted, - ) - self._data.append(compliance_row) + if requirement.Id in finding_requirements: + for attribute in requirement.Attributes: + compliance_row = Azure( + Provider=finding.provider, + Description=compliance.Description, + Subscription=finding.account_name, + Location=finding.region, + AssessmentDate=str(finding.timestamp), + Requirements_Id=requirement.Id, + Requirements_Description=requirement.Description, + Requirements_Attributes_Section=attribute.Section, + Requirements_Attributes_Profile=attribute.Profile, + Requirements_Attributes_AssessmentStatus=attribute.AssessmentStatus, + Requirements_Attributes_Description=attribute.Description, + Requirements_Attributes_RationaleStatement=attribute.RationaleStatement, + Requirements_Attributes_ImpactStatement=attribute.ImpactStatement, + Requirements_Attributes_RemediationProcedure=attribute.RemediationProcedure, + Requirements_Attributes_AuditProcedure=attribute.AuditProcedure, + Requirements_Attributes_AdditionalInformation=attribute.AdditionalInformation, + Requirements_Attributes_DefaultValue=attribute.DefaultValue, + Requirements_Attributes_References=attribute.References, + Status=finding.status, + StatusExtended=finding.status_extended, + ResourceId=finding.resource_uid, + ResourceName=finding.resource_name, + CheckId=finding.check_id, + Muted=finding.muted, + ) + self._data.append(compliance_row) - def batch_write_data_to_file(self, header: bool) -> None: + def batch_write_data_to_file(self) -> None: try: if ( getattr(self, "_file_descriptor", None) @@ -55,8 +64,7 @@ def batch_write_data_to_file(self, header: bool) -> None: ], delimiter=";", ) - if header: - csv_writer.writeheader() + csv_writer.writeheader() for finding in self._data: for key in list(finding.__dict__.keys()): finding.__dict__[key.upper()] = finding.__dict__.pop(key) diff --git a/prowler/lib/outputs/compliance/cis_gcp.py b/prowler/lib/outputs/compliance/cis_gcp.py index 96507458fc..b209b18e68 100644 --- a/prowler/lib/outputs/compliance/cis_gcp.py +++ b/prowler/lib/outputs/compliance/cis_gcp.py @@ -12,36 +12,44 @@ def transform( self, findings: list[Finding], compliance: ComplianceBaseModel ) -> None: for finding in findings: + # Get the compliance requirements for the finding + compliance_model_name = ( + compliance.Framework + "-" + compliance.Version + if compliance.Version + else compliance.Framework + ) + finding_requirements = finding.compliance.get(compliance_model_name, []) for requirement in compliance.Requirements: - for attribute in requirement.Attributes: - compliance_row = GCP( - Provider=finding.provider, - Description=compliance.Description, - ProjectId=finding.project_id, - Location=finding.location.lower(), - AssessmentDate=str(finding.timestamp), - Requirements_Id=requirement.Id, - Requirements_Description=requirement.Description, - Requirements_Attributes_Section=attribute.Section, - Requirements_Attributes_Profile=attribute.Profile, - Requirements_Attributes_AssessmentStatus=attribute.AssessmentStatus, - Requirements_Attributes_Description=attribute.Description, - Requirements_Attributes_RationaleStatement=attribute.RationaleStatement, - Requirements_Attributes_ImpactStatement=attribute.ImpactStatement, - Requirements_Attributes_RemediationProcedure=attribute.RemediationProcedure, - Requirements_Attributes_AuditProcedure=attribute.AuditProcedure, - Requirements_Attributes_AdditionalInformation=attribute.AdditionalInformation, - Requirements_Attributes_References=attribute.References, - Status=finding.status, - StatusExtended=finding.status_extended, - ResourceId=finding.resource_id, - ResourceName=finding.resource_name, - CheckId=finding.check_id, - Muted=finding.muted, - ) - self._data.append(compliance_row) + if requirement.Id in finding_requirements: + for attribute in requirement.Attributes: + compliance_row = GCP( + Provider=finding.provider, + Description=compliance.Description, + ProjectId=finding.account_uid, + Location=finding.region, + AssessmentDate=str(finding.timestamp), + Requirements_Id=requirement.Id, + Requirements_Description=requirement.Description, + Requirements_Attributes_Section=attribute.Section, + Requirements_Attributes_Profile=attribute.Profile, + Requirements_Attributes_AssessmentStatus=attribute.AssessmentStatus, + Requirements_Attributes_Description=attribute.Description, + Requirements_Attributes_RationaleStatement=attribute.RationaleStatement, + Requirements_Attributes_ImpactStatement=attribute.ImpactStatement, + Requirements_Attributes_RemediationProcedure=attribute.RemediationProcedure, + Requirements_Attributes_AuditProcedure=attribute.AuditProcedure, + Requirements_Attributes_AdditionalInformation=attribute.AdditionalInformation, + Requirements_Attributes_References=attribute.References, + Status=finding.status, + StatusExtended=finding.status_extended, + ResourceId=finding.resource_uid, + ResourceName=finding.resource_name, + CheckId=finding.check_id, + Muted=finding.muted, + ) + self._data.append(compliance_row) - def batch_write_data_to_file(self, header: bool) -> None: + def batch_write_data_to_file(self) -> None: try: if ( getattr(self, "_file_descriptor", None) @@ -55,8 +63,7 @@ def batch_write_data_to_file(self, header: bool) -> None: ], delimiter=";", ) - if header: - csv_writer.writeheader() + csv_writer.writeheader() for finding in self._data: for key in list(finding.__dict__.keys()): finding.__dict__[key.upper()] = finding.__dict__.pop(key) diff --git a/prowler/lib/outputs/compliance/cis_kubernetes.py b/prowler/lib/outputs/compliance/cis_kubernetes.py index 5926c6382b..187bdb7933 100644 --- a/prowler/lib/outputs/compliance/cis_kubernetes.py +++ b/prowler/lib/outputs/compliance/cis_kubernetes.py @@ -12,36 +12,44 @@ def transform( self, findings: list[Finding], compliance: ComplianceBaseModel ) -> None: for finding in findings: + # Get the compliance requirements for the finding + compliance_model_name = ( + compliance.Framework + "-" + compliance.Version + if compliance.Version + else compliance.Framework + ) + finding_requirements = finding.compliance.get(compliance_model_name, []) for requirement in compliance.Requirements: - for attribute in requirement.Attributes: - compliance_row = Kubernetes( - Provider=finding.check_metadata.Provider, - Description=compliance.Description, - Context=finding.context, - Namespace=finding.namespace, - AssessmentDate=str(finding.timestamp), - Requirements_Id=requirement.Id, - Requirements_Description=requirement.Description, - Requirements_Attributes_Section=attribute.Section, - Requirements_Attributes_Profile=attribute.Profile, - Requirements_Attributes_AssessmentStatus=attribute.AssessmentStatus, - Requirements_Attributes_Description=attribute.Description, - Requirements_Attributes_RationaleStatement=attribute.RationaleStatement, - Requirements_Attributes_ImpactStatement=attribute.ImpactStatement, - Requirements_Attributes_RemediationProcedure=attribute.RemediationProcedure, - Requirements_Attributes_AuditProcedure=attribute.AuditProcedure, - Requirements_Attributes_AdditionalInformation=attribute.AdditionalInformation, - Requirements_Attributes_References=attribute.References, - Requirements_Attributes_DefaultValue=attribute.DefaultValue, - Status=finding.status, - StatusExtended=finding.status_extended, - ResourceId=finding.resource_id, - CheckId=finding.check_id, - Muted=finding.muted, - ) - self._data.append(compliance_row) + if requirement.Id in finding_requirements: + for attribute in requirement.Attributes: + compliance_row = Kubernetes( + Provider=finding.provider, + Description=compliance.Description, + Context=finding.account_name, + Namespace=finding.region, + AssessmentDate=str(finding.timestamp), + Requirements_Id=requirement.Id, + Requirements_Description=requirement.Description, + Requirements_Attributes_Section=attribute.Section, + Requirements_Attributes_Profile=attribute.Profile, + Requirements_Attributes_AssessmentStatus=attribute.AssessmentStatus, + Requirements_Attributes_Description=attribute.Description, + Requirements_Attributes_RationaleStatement=attribute.RationaleStatement, + Requirements_Attributes_ImpactStatement=attribute.ImpactStatement, + Requirements_Attributes_RemediationProcedure=attribute.RemediationProcedure, + Requirements_Attributes_AuditProcedure=attribute.AuditProcedure, + Requirements_Attributes_AdditionalInformation=attribute.AdditionalInformation, + Requirements_Attributes_References=attribute.References, + Requirements_Attributes_DefaultValue=attribute.DefaultValue, + Status=finding.status, + StatusExtended=finding.status_extended, + ResourceId=finding.resource_uid, + CheckId=finding.check_id, + Muted=finding.muted, + ) + self._data.append(compliance_row) - def batch_write_data_to_file(self, header: bool) -> None: + def batch_write_data_to_file(self) -> None: try: if ( getattr(self, "_file_descriptor", None) @@ -55,8 +63,7 @@ def batch_write_data_to_file(self, header: bool) -> None: ], delimiter=";", ) - if header: - csv_writer.writeheader() + csv_writer.writeheader() for finding in self._data: for key in list(finding.__dict__.keys()): finding.__dict__[key.upper()] = finding.__dict__.pop(key) diff --git a/prowler/lib/outputs/compliance/compliance.py b/prowler/lib/outputs/compliance/compliance.py index 2c9e9c6263..eaab77fb1d 100644 --- a/prowler/lib/outputs/compliance/compliance.py +++ b/prowler/lib/outputs/compliance/compliance.py @@ -5,7 +5,7 @@ from prowler.lib.outputs.compliance.aws_well_architected_framework import ( write_compliance_row_aws_well_architected_framework, ) -from prowler.lib.outputs.compliance.cis import get_cis_table, write_compliance_row_cis +from prowler.lib.outputs.compliance.cis import get_cis_table from prowler.lib.outputs.compliance.ens_rd2022_aws import ( get_ens_rd2022_aws_table, write_compliance_row_ens_rd2022_aws, @@ -103,14 +103,7 @@ def fill_compliance( ) elif compliance.Framework == "CIS": - write_compliance_row_cis( - file_descriptors, - finding, - compliance, - output_options, - provider, - input_compliance_frameworks, - ) + continue elif ( "AWS-Well-Architected-Framework" in compliance.Framework diff --git a/prowler/lib/outputs/compliance/compliance_output.py b/prowler/lib/outputs/compliance/compliance_output.py index 4a4d1a33f7..b7e1181b59 100644 --- a/prowler/lib/outputs/compliance/compliance_output.py +++ b/prowler/lib/outputs/compliance/compliance_output.py @@ -50,7 +50,9 @@ def file_descriptor(self): return self._file_descriptor @abstractmethod - def transform(self, findings: List[Finding], compliance: dict): + def transform( + self, findings: List[Finding], compliance: ComplianceBaseModel + ) -> None: raise NotImplementedError @abstractmethod diff --git a/prowler/lib/outputs/compliance/models.py b/prowler/lib/outputs/compliance/models.py index a19647859a..c3bb1d2358 100644 --- a/prowler/lib/outputs/compliance/models.py +++ b/prowler/lib/outputs/compliance/models.py @@ -35,8 +35,6 @@ class Check_Output_CSV_ENS_RD2022(BaseModel): class CIS(BaseModel): Provider: str Description: str - AccountId: str - Region: str AssessmentDate: str Requirements_Id: str Requirements_Description: str diff --git a/prowler/lib/outputs/file_descriptors.py b/prowler/lib/outputs/file_descriptors.py index d41b03d14e..1869a7de56 100644 --- a/prowler/lib/outputs/file_descriptors.py +++ b/prowler/lib/outputs/file_descriptors.py @@ -10,12 +10,10 @@ MitreAttackGCP, ) from prowler.lib.outputs.compliance.models import ( - Azure, Check_Output_CSV_AWS_ISO27001_2013, Check_Output_CSV_AWS_Well_Architected, Check_Output_CSV_ENS_RD2022, Check_Output_CSV_Generic_Compliance, - Kubernetes, ) from prowler.lib.outputs.csv.csv import generate_csv_fields from prowler.lib.outputs.output import Finding @@ -69,6 +67,9 @@ def fill_file_descriptors(output_modes, output_directory, output_filename, provi continue elif output_mode == "html": continue + # FIXME: Remove this once we merge all the compliance frameworks + if "cis_" in output_mode: + continue elif provider.type == "gcp": filename = f"{output_directory}/compliance/{output_filename}_{output_mode}{csv_file_suffix}" @@ -91,34 +92,17 @@ def fill_file_descriptors(output_modes, output_directory, output_filename, provi elif provider.type == "kubernetes": filename = f"{output_directory}/compliance/{output_filename}_{output_mode}{csv_file_suffix}" - if "cis_" in output_mode: - file_descriptor = initialize_file_descriptor( - filename, - output_mode, - provider.type, - Kubernetes, - ) - file_descriptors.update({output_mode: file_descriptor}) - else: - file_descriptor = initialize_file_descriptor( - filename, - output_mode, - provider.type, - Check_Output_CSV_Generic_Compliance, - ) - file_descriptors.update({output_mode: file_descriptor}) + file_descriptor = initialize_file_descriptor( + filename, + output_mode, + provider.type, + Check_Output_CSV_Generic_Compliance, + ) + file_descriptors.update({output_mode: file_descriptor}) elif provider.type == "azure": filename = f"{output_directory}/compliance/{output_filename}_{output_mode}{csv_file_suffix}" - if "cis_" in output_mode: - file_descriptor = initialize_file_descriptor( - filename, - output_mode, - provider.type, - Azure, - ) - file_descriptors.update({output_mode: file_descriptor}) - elif output_mode == "mitre_attack_azure": + if output_mode == "mitre_attack_azure": file_descriptor = initialize_file_descriptor( filename, output_mode, @@ -175,15 +159,14 @@ def fill_file_descriptors(output_modes, output_directory, output_filename, provi file_descriptors.update({output_mode: file_descriptor}) else: - if "cis_" not in output_mode: - # Generic Compliance framework - file_descriptor = initialize_file_descriptor( - filename, - output_mode, - provider.type, - Check_Output_CSV_Generic_Compliance, - ) - file_descriptors.update({output_mode: file_descriptor}) + # Generic Compliance framework + file_descriptor = initialize_file_descriptor( + filename, + output_mode, + provider.type, + Check_Output_CSV_Generic_Compliance, + ) + file_descriptors.update({output_mode: file_descriptor}) except Exception as error: logger.error( From e995088e47bda55b03bca1dda0fc47ad82956d44 Mon Sep 17 00:00:00 2001 From: Sergio Date: Mon, 8 Jul 2024 16:30:09 -0400 Subject: [PATCH 4/9] chore(tests): add test for CIS AWS --- prowler/lib/outputs/compliance/cis.py | 214 +++++++++--------- tests/lib/outputs/compliance/cis_aws_test.py | 33 +++ .../lib/outputs/compliance/compliance_test.py | 76 +------ tests/lib/outputs/compliance/fixtures.py | 71 ++++++ 4 files changed, 221 insertions(+), 173 deletions(-) create mode 100644 tests/lib/outputs/compliance/cis_aws_test.py create mode 100644 tests/lib/outputs/compliance/fixtures.py diff --git a/prowler/lib/outputs/compliance/cis.py b/prowler/lib/outputs/compliance/cis.py index 7ab1a7d96c..5b8eff8865 100644 --- a/prowler/lib/outputs/compliance/cis.py +++ b/prowler/lib/outputs/compliance/cis.py @@ -2,6 +2,7 @@ from tabulate import tabulate from prowler.config.config import orange_color +from prowler.lib import logger def get_cis_table( @@ -12,111 +13,118 @@ def get_cis_table( output_directory: str, compliance_overview: bool, ): - sections = {} - cis_compliance_table = { - "Provider": [], - "Section": [], - "Level 1": [], - "Level 2": [], - "Muted": [], - } - pass_count = [] - fail_count = [] - muted_count = [] - for index, finding in enumerate(findings): - check = bulk_checks_metadata[finding.check_metadata.CheckID] - check_compliances = check.Compliance - for compliance in check_compliances: - if ( - compliance.Framework == "CIS" - and compliance.Version in compliance_framework - ): - for requirement in compliance.Requirements: - for attribute in requirement.Attributes: - section = attribute.Section - # Check if Section exists - if section not in sections: - sections[section] = { - "Status": f"{Fore.GREEN}PASS{Style.RESET_ALL}", - "Level 1": {"FAIL": 0, "PASS": 0}, - "Level 2": {"FAIL": 0, "PASS": 0}, - "Muted": 0, - } - if finding.muted: - if index not in muted_count: - muted_count.append(index) - sections[section]["Muted"] += 1 - else: - if finding.status == "FAIL" and index not in fail_count: - fail_count.append(index) - elif finding.status == "PASS" and index not in pass_count: - pass_count.append(index) - if "Level 1" in attribute.Profile: - if not finding.muted: - if finding.status == "FAIL": - sections[section]["Level 1"]["FAIL"] += 1 - else: - sections[section]["Level 1"]["PASS"] += 1 - elif "Level 2" in attribute.Profile: - if not finding.muted: - if finding.status == "FAIL": - sections[section]["Level 2"]["FAIL"] += 1 - else: - sections[section]["Level 2"]["PASS"] += 1 + try: + sections = {} + cis_compliance_table = { + "Provider": [], + "Section": [], + "Level 1": [], + "Level 2": [], + "Muted": [], + } + pass_count = [] + fail_count = [] + muted_count = [] + for index, finding in enumerate(findings): + check = bulk_checks_metadata[finding.check_metadata.CheckID] + check_compliances = check.Compliance + for compliance in check_compliances: + if ( + compliance.Framework == "CIS" + and compliance.Version in compliance_framework + ): + for requirement in compliance.Requirements: + for attribute in requirement.Attributes: + section = attribute.Section + # Check if Section exists + if section not in sections: + sections[section] = { + "Status": f"{Fore.GREEN}PASS{Style.RESET_ALL}", + "Level 1": {"FAIL": 0, "PASS": 0}, + "Level 2": {"FAIL": 0, "PASS": 0}, + "Muted": 0, + } + if finding.muted: + if index not in muted_count: + muted_count.append(index) + sections[section]["Muted"] += 1 + else: + if finding.status == "FAIL" and index not in fail_count: + fail_count.append(index) + elif ( + finding.status == "PASS" and index not in pass_count + ): + pass_count.append(index) + if "Level 1" in attribute.Profile: + if not finding.muted: + if finding.status == "FAIL": + sections[section]["Level 1"]["FAIL"] += 1 + else: + sections[section]["Level 1"]["PASS"] += 1 + elif "Level 2" in attribute.Profile: + if not finding.muted: + if finding.status == "FAIL": + sections[section]["Level 2"]["FAIL"] += 1 + else: + sections[section]["Level 2"]["PASS"] += 1 - # Add results to table - sections = dict(sorted(sections.items())) - for section in sections: - cis_compliance_table["Provider"].append(compliance.Provider) - cis_compliance_table["Section"].append(section) - if sections[section]["Level 1"]["FAIL"] > 0: - cis_compliance_table["Level 1"].append( - f"{Fore.RED}FAIL({sections[section]['Level 1']['FAIL']}){Style.RESET_ALL}" - ) - else: - cis_compliance_table["Level 1"].append( - f"{Fore.GREEN}PASS({sections[section]['Level 1']['PASS']}){Style.RESET_ALL}" - ) - if sections[section]["Level 2"]["FAIL"] > 0: - cis_compliance_table["Level 2"].append( - f"{Fore.RED}FAIL({sections[section]['Level 2']['FAIL']}){Style.RESET_ALL}" - ) - else: - cis_compliance_table["Level 2"].append( - f"{Fore.GREEN}PASS({sections[section]['Level 2']['PASS']}){Style.RESET_ALL}" - ) - cis_compliance_table["Muted"].append( - f"{orange_color}{sections[section]['Muted']}{Style.RESET_ALL}" - ) - if ( - len(fail_count) + len(pass_count) + len(muted_count) > 1 - ): # If there are no resources, don't print the compliance table - print( - f"\nCompliance Status of {Fore.YELLOW}{compliance_framework.upper()}{Style.RESET_ALL} Framework:" - ) - overview_table = [ - [ - f"{Fore.RED}{round(len(fail_count) / len(findings) * 100, 2)}% ({len(fail_count)}) FAIL{Style.RESET_ALL}", - f"{Fore.GREEN}{round(len(pass_count) / len(findings) * 100, 2)}% ({len(pass_count)}) PASS{Style.RESET_ALL}", - f"{orange_color}{round(len(muted_count) / len(findings) * 100, 2)}% ({len(muted_count)}) MUTED{Style.RESET_ALL}", - ] - ] - print(tabulate(overview_table, tablefmt="rounded_grid")) - if not compliance_overview: - print( - f"\nFramework {Fore.YELLOW}{compliance_framework.upper()}{Style.RESET_ALL} Results:" - ) - print( - tabulate( - cis_compliance_table, - headers="keys", - tablefmt="rounded_grid", + # Add results to table + sections = dict(sorted(sections.items())) + for section in sections: + cis_compliance_table["Provider"].append(compliance.Provider) + cis_compliance_table["Section"].append(section) + if sections[section]["Level 1"]["FAIL"] > 0: + cis_compliance_table["Level 1"].append( + f"{Fore.RED}FAIL({sections[section]['Level 1']['FAIL']}){Style.RESET_ALL}" ) + else: + cis_compliance_table["Level 1"].append( + f"{Fore.GREEN}PASS({sections[section]['Level 1']['PASS']}){Style.RESET_ALL}" + ) + if sections[section]["Level 2"]["FAIL"] > 0: + cis_compliance_table["Level 2"].append( + f"{Fore.RED}FAIL({sections[section]['Level 2']['FAIL']}){Style.RESET_ALL}" + ) + else: + cis_compliance_table["Level 2"].append( + f"{Fore.GREEN}PASS({sections[section]['Level 2']['PASS']}){Style.RESET_ALL}" + ) + cis_compliance_table["Muted"].append( + f"{orange_color}{sections[section]['Muted']}{Style.RESET_ALL}" ) + if ( + len(fail_count) + len(pass_count) + len(muted_count) > 1 + ): # If there are no resources, don't print the compliance table print( - f"{Style.BRIGHT}* Only sections containing results appear.{Style.RESET_ALL}" - ) - print(f"\nDetailed results of {compliance_framework.upper()} are in:") - print( - f" - CSV: {output_directory}/compliance/{output_filename}_{compliance_framework}.csv\n" + f"\nCompliance Status of {Fore.YELLOW}{compliance_framework.upper()}{Style.RESET_ALL} Framework:" ) + overview_table = [ + [ + f"{Fore.RED}{round(len(fail_count) / len(findings) * 100, 2)}% ({len(fail_count)}) FAIL{Style.RESET_ALL}", + f"{Fore.GREEN}{round(len(pass_count) / len(findings) * 100, 2)}% ({len(pass_count)}) PASS{Style.RESET_ALL}", + f"{orange_color}{round(len(muted_count) / len(findings) * 100, 2)}% ({len(muted_count)}) MUTED{Style.RESET_ALL}", + ] + ] + print(tabulate(overview_table, tablefmt="rounded_grid")) + if not compliance_overview: + print( + f"\nFramework {Fore.YELLOW}{compliance_framework.upper()}{Style.RESET_ALL} Results:" + ) + print( + tabulate( + cis_compliance_table, + headers="keys", + tablefmt="rounded_grid", + ) + ) + print( + f"{Style.BRIGHT}* Only sections containing results appear.{Style.RESET_ALL}" + ) + print(f"\nDetailed results of {compliance_framework.upper()} are in:") + print( + f" - CSV: {output_directory}/compliance/{output_filename}_{compliance_framework}.csv\n" + ) + except Exception as error: + logger.error( + f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" + ) diff --git a/tests/lib/outputs/compliance/cis_aws_test.py b/tests/lib/outputs/compliance/cis_aws_test.py new file mode 100644 index 0000000000..5dc2492b21 --- /dev/null +++ b/tests/lib/outputs/compliance/cis_aws_test.py @@ -0,0 +1,33 @@ +from io import StringIO + +from mock import patch + +from prowler.lib.outputs.compliance.cis_aws import AWSCIS +from prowler.lib.outputs.compliance.models import AWS +from tests.lib.outputs.compliance.fixtures import CIS_1_4_AWS +from tests.lib.outputs.fixtures.fixtures import generate_finding_output + + +class Test_AWSCIS: + def test_output_transform(self): + findings = [generate_finding_output(compliance={"CIS-1.4": "2.1.3"})] + + output = AWSCIS(findings, CIS_1_4_AWS) + output_data = output.data[0] + assert isinstance(output_data, AWS) + + def test_batch_write_data_to_file(self): + mock_file = StringIO() + findings = [generate_finding_output(compliance={"CIS-1.4": "2.1.3"})] + # Clear the data from CSV class + output = AWSCIS(findings, CIS_1_4_AWS) + output._file_descriptor = mock_file + + with patch.object(mock_file, "close", return_value=None): + output.batch_write_data_to_file() + + mock_file.seek(0) + content = mock_file.read() + content = content.removeprefix("\r\n") + content = content.removesuffix("\r\n") + assert CIS_1_4_AWS.Description in content diff --git a/tests/lib/outputs/compliance/compliance_test.py b/tests/lib/outputs/compliance/compliance_test.py index b3ecd0648d..e9ec867337 100644 --- a/tests/lib/outputs/compliance/compliance_test.py +++ b/tests/lib/outputs/compliance/compliance_test.py @@ -1,78 +1,14 @@ from mock import MagicMock -from prowler.lib.check.compliance_models import ( - CIS_Requirement_Attribute, - Compliance_Requirement, - ComplianceBaseModel, -) from prowler.lib.outputs.compliance.compliance import ( get_check_compliance_frameworks_in_input, ) - -CIS_1_4_AWS_NAME = "cis_1.4_aws" -CIS_1_4_AWS = ComplianceBaseModel( - Framework="CIS", - Provider="AWS", - Version="1.4", - Description="The CIS Benchmark for CIS Amazon Web Services Foundations Benchmark, v1.4.0, Level 1 and 2 provides prescriptive guidance for configuring security options for a subset of Amazon Web Services. It has an emphasis on foundational, testable, and architecture agnostic settings", - Requirements=[ - Compliance_Requirement( - Checks=[], - Id="2.1.3", - Description="Ensure MFA Delete is enabled on S3 buckets", - Attributes=[ - CIS_Requirement_Attribute( - Section="2.1. Simple Storage Service (S3)", - Profile="Level 1", - AssessmentStatus="Automated", - Description="Once MFA Delete is enabled on your sensitive and classified S3 bucket it requires the user to have two forms of authentication.", - RationaleStatement="Adding MFA delete to an S3 bucket, requires additional authentication when you change the version state of your bucket or you delete and object version adding another layer of security in the event your security credentials are compromised or unauthorized access is granted.", - ImpactStatement="", - RemediationProcedure="Perform the steps below to enable MFA delete on an S3 bucket.\n\nNote:\n-You cannot enable MFA Delete using the AWS Management Console. You must use the AWS CLI or API.\n-You must use your 'root' account to enable MFA Delete on S3 buckets.\n\n**From Command line:**\n\n1. Run the s3api put-bucket-versioning command\n\n```\naws s3api put-bucket-versioning --profile my-root-profile --bucket Bucket_Name --versioning-configuration Status=Enabled,MFADelete=Enabled --mfa “arn:aws:iam::aws_account_id:mfa/root-account-mfa-device passcode”\n```", - AuditProcedure='Perform the steps below to confirm MFA delete is configured on an S3 Bucket\n\n**From Console:**\n\n1. Login to the S3 console at `https://console.aws.amazon.com/s3/`\n\n2. Click the `Check` box next to the Bucket name you want to confirm\n\n3. In the window under `Properties`\n\n4. Confirm that Versioning is `Enabled`\n\n5. Confirm that MFA Delete is `Enabled`\n\n**From Command Line:**\n\n1. Run the `get-bucket-versioning`\n```\naws s3api get-bucket-versioning --bucket my-bucket\n```\n\nOutput example:\n```\n \n Enabled\n Enabled \n\n```\n\nIf the Console or the CLI output does not show Versioning and MFA Delete `enabled` refer to the remediation below.', - AdditionalInformation="", - References="https://docs.aws.amazon.com/AmazonS3/latest/dev/Versioning.html#MultiFactorAuthenticationDelete:https://docs.aws.amazon.com/AmazonS3/latest/dev/UsingMFADelete.html:https://aws.amazon.com/blogs/security/securing-access-to-aws-using-mfa-part-3/:https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_mfa_lost-or-broken.html", - ) - ], - ) - ], -) -CIS_1_5_AWS_NAME = "cis_1.5_aws" -CIS_1_5_AWS = ComplianceBaseModel( - Framework="CIS", - Provider="AWS", - Version="1.5", - Description="The CIS Amazon Web Services Foundations Benchmark provides prescriptive guidance for configuring security options for a subset of Amazon Web Services with an emphasis on foundational, testable, and architecture agnostic settings.", - Requirements=[ - Compliance_Requirement( - Checks=[], - Id="2.1.3", - Description="Ensure MFA Delete is enabled on S3 buckets", - Attributes=[ - CIS_Requirement_Attribute( - Section="2.1. Simple Storage Service (S3)", - Profile="Level 1", - AssessmentStatus="Automated", - Description="Once MFA Delete is enabled on your sensitive and classified S3 bucket it requires the user to have two forms of authentication.", - RationaleStatement="Adding MFA delete to an S3 bucket, requires additional authentication when you change the version state of your bucket or you delete and object version adding another layer of security in the event your security credentials are compromised or unauthorized access is granted.", - ImpactStatement="", - RemediationProcedure="Perform the steps below to enable MFA delete on an S3 bucket.\n\nNote:\n-You cannot enable MFA Delete using the AWS Management Console. You must use the AWS CLI or API.\n-You must use your 'root' account to enable MFA Delete on S3 buckets.\n\n**From Command line:**\n\n1. Run the s3api put-bucket-versioning command\n\n```\naws s3api put-bucket-versioning --profile my-root-profile --bucket Bucket_Name --versioning-configuration Status=Enabled,MFADelete=Enabled --mfa “arn:aws:iam::aws_account_id:mfa/root-account-mfa-device passcode”\n```", - AuditProcedure='Perform the steps below to confirm MFA delete is configured on an S3 Bucket\n\n**From Console:**\n\n1. Login to the S3 console at `https://console.aws.amazon.com/s3/`\n\n2. Click the `Check` box next to the Bucket name you want to confirm\n\n3. In the window under `Properties`\n\n4. Confirm that Versioning is `Enabled`\n\n5. Confirm that MFA Delete is `Enabled`\n\n**From Command Line:**\n\n1. Run the `get-bucket-versioning`\n```\naws s3api get-bucket-versioning --bucket my-bucket\n```\n\nOutput example:\n```\n \n Enabled\n Enabled \n\n```\n\nIf the Console or the CLI output does not show Versioning and MFA Delete `enabled` refer to the remediation below.', - AdditionalInformation="", - References="https://docs.aws.amazon.com/AmazonS3/latest/dev/Versioning.html#MultiFactorAuthenticationDelete:https://docs.aws.amazon.com/AmazonS3/latest/dev/UsingMFADelete.html:https://aws.amazon.com/blogs/security/securing-access-to-aws-using-mfa-part-3/:https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_mfa_lost-or-broken.html", - ) - ], - ) - ], -) - -NOT_PRESENT_COMPLIANCE_NAME = "not_present_compliance_name" -NOT_PRESENT_COMPLIANCE = ComplianceBaseModel( - Framework="NOT_EXISTENT", - Provider="NOT_EXISTENT", - Version="NOT_EXISTENT", - Description="NOT_EXISTENT", - Requirements=[], +from tests.lib.outputs.compliance.fixtures import ( + CIS_1_4_AWS, + CIS_1_4_AWS_NAME, + CIS_1_5_AWS, + CIS_1_5_AWS_NAME, + NOT_PRESENT_COMPLIANCE, ) diff --git a/tests/lib/outputs/compliance/fixtures.py b/tests/lib/outputs/compliance/fixtures.py new file mode 100644 index 0000000000..d7c8e2e543 --- /dev/null +++ b/tests/lib/outputs/compliance/fixtures.py @@ -0,0 +1,71 @@ +from prowler.lib.check.compliance_models import ( + CIS_Requirement_Attribute, + Compliance_Requirement, + ComplianceBaseModel, +) + +CIS_1_4_AWS_NAME = "cis_1.4_aws" +CIS_1_4_AWS = ComplianceBaseModel( + Framework="CIS", + Provider="AWS", + Version="1.4", + Description="The CIS Benchmark for CIS Amazon Web Services Foundations Benchmark, v1.4.0, Level 1 and 2 provides prescriptive guidance for configuring security options for a subset of Amazon Web Services. It has an emphasis on foundational, testable, and architecture agnostic settings", + Requirements=[ + Compliance_Requirement( + Checks=[], + Id="2.1.3", + Description="Ensure MFA Delete is enabled on S3 buckets", + Attributes=[ + CIS_Requirement_Attribute( + Section="2.1. Simple Storage Service (S3)", + Profile="Level 1", + AssessmentStatus="Automated", + Description="Once MFA Delete is enabled on your sensitive and classified S3 bucket it requires the user to have two forms of authentication.", + RationaleStatement="Adding MFA delete to an S3 bucket, requires additional authentication when you change the version state of your bucket or you delete and object version adding another layer of security in the event your security credentials are compromised or unauthorized access is granted.", + ImpactStatement="", + RemediationProcedure="Perform the steps below to enable MFA delete on an S3 bucket.\n\nNote:\n-You cannot enable MFA Delete using the AWS Management Console. You must use the AWS CLI or API.\n-You must use your 'root' account to enable MFA Delete on S3 buckets.\n\n**From Command line:**\n\n1. Run the s3api put-bucket-versioning command\n\n```\naws s3api put-bucket-versioning --profile my-root-profile --bucket Bucket_Name --versioning-configuration Status=Enabled,MFADelete=Enabled --mfa “arn:aws:iam::aws_account_id:mfa/root-account-mfa-device passcode”\n```", + AuditProcedure='Perform the steps below to confirm MFA delete is configured on an S3 Bucket\n\n**From Console:**\n\n1. Login to the S3 console at `https://console.aws.amazon.com/s3/`\n\n2. Click the `Check` box next to the Bucket name you want to confirm\n\n3. In the window under `Properties`\n\n4. Confirm that Versioning is `Enabled`\n\n5. Confirm that MFA Delete is `Enabled`\n\n**From Command Line:**\n\n1. Run the `get-bucket-versioning`\n```\naws s3api get-bucket-versioning --bucket my-bucket\n```\n\nOutput example:\n```\n \n Enabled\n Enabled \n\n```\n\nIf the Console or the CLI output does not show Versioning and MFA Delete `enabled` refer to the remediation below.', + AdditionalInformation="", + References="https://docs.aws.amazon.com/AmazonS3/latest/dev/Versioning.html#MultiFactorAuthenticationDelete:https://docs.aws.amazon.com/AmazonS3/latest/dev/UsingMFADelete.html:https://aws.amazon.com/blogs/security/securing-access-to-aws-using-mfa-part-3/:https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_mfa_lost-or-broken.html", + ) + ], + ) + ], +) +CIS_1_5_AWS_NAME = "cis_1.5_aws" +CIS_1_5_AWS = ComplianceBaseModel( + Framework="CIS", + Provider="AWS", + Version="1.5", + Description="The CIS Amazon Web Services Foundations Benchmark provides prescriptive guidance for configuring security options for a subset of Amazon Web Services with an emphasis on foundational, testable, and architecture agnostic settings.", + Requirements=[ + Compliance_Requirement( + Checks=[], + Id="2.1.3", + Description="Ensure MFA Delete is enabled on S3 buckets", + Attributes=[ + CIS_Requirement_Attribute( + Section="2.1. Simple Storage Service (S3)", + Profile="Level 1", + AssessmentStatus="Automated", + Description="Once MFA Delete is enabled on your sensitive and classified S3 bucket it requires the user to have two forms of authentication.", + RationaleStatement="Adding MFA delete to an S3 bucket, requires additional authentication when you change the version state of your bucket or you delete and object version adding another layer of security in the event your security credentials are compromised or unauthorized access is granted.", + ImpactStatement="", + RemediationProcedure="Perform the steps below to enable MFA delete on an S3 bucket.\n\nNote:\n-You cannot enable MFA Delete using the AWS Management Console. You must use the AWS CLI or API.\n-You must use your 'root' account to enable MFA Delete on S3 buckets.\n\n**From Command line:**\n\n1. Run the s3api put-bucket-versioning command\n\n```\naws s3api put-bucket-versioning --profile my-root-profile --bucket Bucket_Name --versioning-configuration Status=Enabled,MFADelete=Enabled --mfa “arn:aws:iam::aws_account_id:mfa/root-account-mfa-device passcode”\n```", + AuditProcedure='Perform the steps below to confirm MFA delete is configured on an S3 Bucket\n\n**From Console:**\n\n1. Login to the S3 console at `https://console.aws.amazon.com/s3/`\n\n2. Click the `Check` box next to the Bucket name you want to confirm\n\n3. In the window under `Properties`\n\n4. Confirm that Versioning is `Enabled`\n\n5. Confirm that MFA Delete is `Enabled`\n\n**From Command Line:**\n\n1. Run the `get-bucket-versioning`\n```\naws s3api get-bucket-versioning --bucket my-bucket\n```\n\nOutput example:\n```\n \n Enabled\n Enabled \n\n```\n\nIf the Console or the CLI output does not show Versioning and MFA Delete `enabled` refer to the remediation below.', + AdditionalInformation="", + References="https://docs.aws.amazon.com/AmazonS3/latest/dev/Versioning.html#MultiFactorAuthenticationDelete:https://docs.aws.amazon.com/AmazonS3/latest/dev/UsingMFADelete.html:https://aws.amazon.com/blogs/security/securing-access-to-aws-using-mfa-part-3/:https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_mfa_lost-or-broken.html", + ) + ], + ) + ], +) + +NOT_PRESENT_COMPLIANCE_NAME = "not_present_compliance_name" +NOT_PRESENT_COMPLIANCE = ComplianceBaseModel( + Framework="NOT_EXISTENT", + Provider="NOT_EXISTENT", + Version="NOT_EXISTENT", + Description="NOT_EXISTENT", + Requirements=[], +) From 9cabfe4cf955af8142b0805732dc4269559d42d9 Mon Sep 17 00:00:00 2001 From: Sergio Date: Mon, 8 Jul 2024 17:31:53 -0400 Subject: [PATCH 5/9] chore(tests): add rest of the tests --- .../lib/outputs/compliance/cis_azure_test.py | 33 +++++++ tests/lib/outputs/compliance/cis_gcp_test.py | 33 +++++++ .../outputs/compliance/cis_kubernetes_test.py | 33 +++++++ tests/lib/outputs/compliance/fixtures.py | 86 +++++++++++++++++++ 4 files changed, 185 insertions(+) create mode 100644 tests/lib/outputs/compliance/cis_azure_test.py create mode 100644 tests/lib/outputs/compliance/cis_gcp_test.py create mode 100644 tests/lib/outputs/compliance/cis_kubernetes_test.py diff --git a/tests/lib/outputs/compliance/cis_azure_test.py b/tests/lib/outputs/compliance/cis_azure_test.py new file mode 100644 index 0000000000..8fc3adcc88 --- /dev/null +++ b/tests/lib/outputs/compliance/cis_azure_test.py @@ -0,0 +1,33 @@ +from io import StringIO + +from mock import patch + +from prowler.lib.outputs.compliance.cis_azure import AzureCIS +from prowler.lib.outputs.compliance.models import Azure +from tests.lib.outputs.compliance.fixtures import CIS_2_0_AZURE +from tests.lib.outputs.fixtures.fixtures import generate_finding_output + + +class Test_AzureCIS: + def test_output_transform(self): + findings = [generate_finding_output(compliance={"CIS-2.0": "2.1.3"})] + + output = AzureCIS(findings, CIS_2_0_AZURE) + output_data = output.data[0] + assert isinstance(output_data, Azure) + + def test_batch_write_data_to_file(self): + mock_file = StringIO() + findings = [generate_finding_output(compliance={"CIS-2.0": "2.1.3"})] + # Clear the data from CSV class + output = AzureCIS(findings, CIS_2_0_AZURE) + output._file_descriptor = mock_file + + with patch.object(mock_file, "close", return_value=None): + output.batch_write_data_to_file() + + mock_file.seek(0) + content = mock_file.read() + content = content.removeprefix("\r\n") + content = content.removesuffix("\r\n") + assert CIS_2_0_AZURE.Description in content diff --git a/tests/lib/outputs/compliance/cis_gcp_test.py b/tests/lib/outputs/compliance/cis_gcp_test.py new file mode 100644 index 0000000000..0d5b3af616 --- /dev/null +++ b/tests/lib/outputs/compliance/cis_gcp_test.py @@ -0,0 +1,33 @@ +from io import StringIO + +from mock import patch + +from prowler.lib.outputs.compliance.cis_gcp import GCPCIS +from prowler.lib.outputs.compliance.models import GCP +from tests.lib.outputs.compliance.fixtures import CIS_2_0_GCP +from tests.lib.outputs.fixtures.fixtures import generate_finding_output + + +class Test_GCPCIS: + def test_output_transform(self): + findings = [generate_finding_output(compliance={"CIS-2.0": "2.13"})] + + output = GCPCIS(findings, CIS_2_0_GCP) + output_data = output.data[0] + assert isinstance(output_data, GCP) + + def test_batch_write_data_to_file(self): + mock_file = StringIO() + findings = [generate_finding_output(compliance={"CIS-2.0": "2.13"})] + # Clear the data from CSV class + output = GCPCIS(findings, CIS_2_0_GCP) + output._file_descriptor = mock_file + + with patch.object(mock_file, "close", return_value=None): + output.batch_write_data_to_file() + + mock_file.seek(0) + content = mock_file.read() + content = content.removeprefix("\r\n") + content = content.removesuffix("\r\n") + assert CIS_2_0_GCP.Description in content diff --git a/tests/lib/outputs/compliance/cis_kubernetes_test.py b/tests/lib/outputs/compliance/cis_kubernetes_test.py new file mode 100644 index 0000000000..2b0b173d34 --- /dev/null +++ b/tests/lib/outputs/compliance/cis_kubernetes_test.py @@ -0,0 +1,33 @@ +from io import StringIO + +from mock import patch + +from prowler.lib.outputs.compliance.cis_kubernetes import KubernetesCIS +from prowler.lib.outputs.compliance.models import Kubernetes +from tests.lib.outputs.compliance.fixtures import CIS_1_8_KUBERNETES +from tests.lib.outputs.fixtures.fixtures import generate_finding_output + + +class Test_KubernetesCIS: + def test_output_transform(self): + findings = [generate_finding_output(compliance={"CIS-1.8": "1.1.3"})] + + output = KubernetesCIS(findings, CIS_1_8_KUBERNETES) + output_data = output.data[0] + assert isinstance(output_data, Kubernetes) + + def test_batch_write_data_to_file(self): + mock_file = StringIO() + findings = [generate_finding_output(compliance={"CIS-1.8": "1.1.3"})] + # Clear the data from CSV class + output = KubernetesCIS(findings, CIS_1_8_KUBERNETES) + output._file_descriptor = mock_file + + with patch.object(mock_file, "close", return_value=None): + output.batch_write_data_to_file() + + mock_file.seek(0) + content = mock_file.read() + content = content.removeprefix("\r\n") + content = content.removesuffix("\r\n") + assert CIS_1_8_KUBERNETES.Description in content diff --git a/tests/lib/outputs/compliance/fixtures.py b/tests/lib/outputs/compliance/fixtures.py index d7c8e2e543..f918eee7c8 100644 --- a/tests/lib/outputs/compliance/fixtures.py +++ b/tests/lib/outputs/compliance/fixtures.py @@ -32,6 +32,92 @@ ) ], ) +CIS_2_0_AZURE_NAME = "cis_2.0_azure" +CIS_2_0_AZURE = ComplianceBaseModel( + Framework="CIS", + Provider="Azure", + Version="2.0", + Description="The CIS Azure Foundations Benchmark provides prescriptive guidance for configuring security options for a subset of Azure with an emphasis on foundational, testable, and architecture agnostic settings.", + Requirements=[ + Compliance_Requirement( + Checks=[], + Id="2.1.3", + Description="Ensure That Microsoft Defender for Databases Is Set To 'On'", + Attributes=[ + CIS_Requirement_Attribute( + Section="2.1 Microsoft Defender for Cloud", + Profile="Level 2", + AssessmentStatus="Manual", + Description="Turning on Microsoft Defender for Databases enables threat detection for the instances running your database software. This provides threat intelligence, anomaly detection, and behavior analytics in the Azure Microsoft Defender for Cloud. Instead of being enabled on services like Platform as a Service (PaaS), this implementation will run within your instances as Infrastructure as a Service (IaaS) on the Operating Systems hosting your databases.", + RationaleStatement="Enabling Microsoft Defender for Azure SQL Databases allows your organization more granular control of the infrastructure running your database software. Instead of waiting on Microsoft release updates or other similar processes, you can manage them yourself. Threat detection is provided by the Microsoft Security Response Center (MSRC).", + ImpactStatement="Running Defender on Infrastructure as a service (IaaS) may incur increased costs associated with running the service and the instance it is on. Similarly, you will need qualified personnel to maintain the operating system and software updates. If it is not maintained, security patches will not be applied and it may be open to vulnerabilities.", + RemediationProcedure="From Azure Portal 1. Go to Microsoft Defender for Cloud 2. Select Environment Settings 3. Click on the subscription name 4. Select Defender plans 5. Set Databases Status to On 6. Select Save Review the chosen pricing tier. For the Azure Databases resource review the different plan information and choose one that fits the needs of your organization. From Azure CLI Run the following commands: az security pricing create -n 'SqlServers' --tier 'Standard' az security pricing create -n 'SqlServerVirtualMachines' --tier 'Standard' az security pricing create -n 'OpenSourceRelationalDatabases' --tier 'Standard' az security pricing create -n 'CosmosDbs' --tier 'Standard' From Azure PowerShell Run the following commands: Set-AzSecurityPricing -Name 'SqlServers' -PricingTier 'Standard' Set-AzSecurityPricing -Name 'SqlServerVirtualMachines' -PricingTier 'Standard' Set-AzSecurityPricing -Name 'OpenSourceRelationalDatabases' -PricingTier 'Standard' Set-AzSecurityPricing -Name 'CosmosDbs' -PricingTier 'Standard'", + AuditProcedure="From Azure Portal 1. Go to Microsoft Defender for Cloud 2. Select Environment Settings 3. Click on the subscription name 4. Select Defender plans 5. Ensure Databases Status is set to On 6. Review the chosen pricing tier From Azure CLI Ensure the output of the below commands is Standard az security pricing show -n 'SqlServers' az security pricing show -n 'SqlServerVirtualMachines' az security pricing show -n 'OpenSourceRelationalDatabases' az security pricing show -n 'CosmosDbs' If the output of any of the above commands shows pricingTier with a value of Free, the setting is out of compliance. From PowerShell Connect-AzAccount Get-AzSecurityPricing |select-object Name,PricingTier |where-object {$_.Name -match 'Sql' -or $_.Name -match 'Cosmos' -or $_.Name -match 'OpenSource'} Ensure the output shows Standard for each database type under the PricingTier column. Any that show Free are considered out of compliance.", + AdditionalInformation="", + DefaultValue="By default, Microsoft Defender plan is off.", + References="https://docs.microsoft.com/en-us/azure/azure-sql/database/azure-defender-for-sql?view=azuresql:https://docs.microsoft.com/en-us/azure/defender-for-cloud/quickstart-enable-database-protections:https://docs.microsoft.com/en-us/azure/defender-for-cloud/defender-for-databases-usage:https://docs.microsoft.com/en-us/azure/security-center/security-center-detection-capabilities:https://docs.microsoft.com/en-us/rest/api/securitycenter/pricings/list:https://docs.microsoft.com/en-us/security/benchmark/azure/security-controls-v3-logging-threat-detection#lt-1-enable-threat-detection-capabilities", + ) + ], + ) + ], +) +CIS_2_0_GCP_NAME = "cis_2.0_gcp" +CIS_2_0_GCP = ComplianceBaseModel( + Framework="CIS", + Provider="GCP", + Version="2.0", + Description="This CIS Benchmark is the product of a community consensus process and consists of secure configuration guidelines developed for Google Cloud Computing Platform", + Requirements=[ + Compliance_Requirement( + Checks=[], + Id="2.13", + Description="Ensure That Microsoft Defender for Databases Is Set To 'On'", + Attributes=[ + CIS_Requirement_Attribute( + Section="2. Logging and Monitoring", + Profile="Level 1", + AssessmentStatus="Automated", + Description="GCP Cloud Asset Inventory is services that provides a historical view of GCP resources and IAM policies through a time-series database. The information recorded includes metadata on Google Cloud resources, metadata on policies set on Google Cloud projects or resources, and runtime information gathered within a Google Cloud resource.", + RationaleStatement="The GCP resources and IAM policies captured by GCP Cloud Asset Inventory enables security analysis, resource change tracking, and compliance auditing. It is recommended GCP Cloud Asset Inventory be enabled for all GCP projects.", + ImpactStatement="", + RemediationProcedure="**From Google Cloud Console** Enable the Cloud Asset API: 1. Go to `API & Services/Library` by visiting https://console.cloud.google.com/apis/library(https://console.cloud.google.com/apis/library) 2. Search for `Cloud Asset API` and select the result for _Cloud Asset API_ 3. Click the `ENABLE` button. **From Google Cloud CLI** Enable the Cloud Asset API: 1. Enable the Cloud Asset API through the services interface: ``` gcloud services enable cloudasset.googleapis.com ```", + AuditProcedure="**From Google Cloud Console** Ensure that the Cloud Asset API is enabled: 1. Go to `API & Services/Library` by visiting https://console.cloud.google.com/apis/library(https://console.cloud.google.com/apis/library) 2. Search for `Cloud Asset API` and select the result for _Cloud Asset API_ 3. Ensure that `API Enabled` is displayed. **From Google Cloud CLI** Ensure that the Cloud Asset API is enabled: 1. Query enabled services: ``` gcloud services list --enabled --filter=name:cloudasset.googleapis.com ``` If the API is listed, then it is enabled. If the response is `Listed 0 items` the API is not enabled.", + AdditionalInformation="Additional info - Cloud Asset Inventory only keeps a five-week history of Google Cloud asset metadata. If a longer history is desired, automation to export the history to Cloud Storage or BigQuery should be evaluated.", + References="https://cloud.google.com/asset-inventory/docs", + ) + ], + ) + ], +) +CIS_1_8_KUBERNETES_NAME = "cis_2.0_kubernetes" +CIS_1_8_KUBERNETES = ComplianceBaseModel( + Framework="CIS", + Provider="Kubernetes", + Version="1.8", + Description="This CIS Kubernetes Benchmark provides prescriptive guidance for establishing a secure configuration posture for Kubernetes v1.27.", + Requirements=[ + Compliance_Requirement( + Checks=[], + Id="1.1.3", + Description="Ensure that the controller manager pod specification file permissions are set to 600 or more restrictive", + Attributes=[ + CIS_Requirement_Attribute( + Section="1.1 Control Plane Node Configuration Files", + Profile="Level 1 - Master Node", + AssessmentStatus="Automated", + Description="Ensure that the controller manager pod specification file has permissions of `600` or more restrictive.", + RationaleStatement="The controller manager pod specification file controls various parameters that set the behavior of the Controller Manager on the master node. You should restrict its file permissions to maintain the integrity of the file. The file should be writable by only the administrators on the system.", + ImpactStatement="", + RemediationProcedure="Run the below command (based on the file location on your system) on the Control Plane node. For example, ``` chmod 600 /etc/kubernetes/manifests/kube-controller-manager.yaml ```", + AuditProcedure="Run the below command (based on the file location on your system) on the Control Plane node. For example, ``` stat -c %a /etc/kubernetes/manifests/kube-controller-manager.yaml ``` Verify that the permissions are `600` or more restrictive.", + AdditionalInformation="", + References="https://kubernetes.io/docs/admin/kube-apiserver/", + DefaultValue="By default, the `kube-controller-manager.yaml` file has permissions of `640`.", + ) + ], + ) + ], +) CIS_1_5_AWS_NAME = "cis_1.5_aws" CIS_1_5_AWS = ComplianceBaseModel( Framework="CIS", From 0916c330ec0fba968e28c4897b2be9addac3f268 Mon Sep 17 00:00:00 2001 From: Sergio Date: Tue, 9 Jul 2024 09:16:22 -0400 Subject: [PATCH 6/9] solve comments --- prowler/__main__.py | 21 ++---- prowler/lib/outputs/compliance/cis_aws.py | 51 +++++++++---- prowler/lib/outputs/compliance/cis_azure.py | 45 ++++++++---- prowler/lib/outputs/compliance/cis_gcp.py | 45 ++++++++---- .../lib/outputs/compliance/cis_kubernetes.py | 45 ++++++++---- .../outputs/compliance/compliance_output.py | 13 +++- prowler/lib/outputs/compliance/models.py | 12 ++-- tests/lib/outputs/compliance/cis_aws_test.py | 59 ++++++++++++++- .../lib/outputs/compliance/cis_azure_test.py | 72 +++++++++++++++++-- tests/lib/outputs/compliance/cis_gcp_test.py | 69 ++++++++++++++++-- .../outputs/compliance/cis_kubernetes_test.py | 72 +++++++++++++++++-- tests/lib/outputs/fixtures/fixtures.py | 6 +- .../kubernetes/kubernetes_fixtures.py | 4 +- 13 files changed, 424 insertions(+), 90 deletions(-) diff --git a/prowler/__main__.py b/prowler/__main__.py index ccf64330a9..8c947452fa 100644 --- a/prowler/__main__.py +++ b/prowler/__main__.py @@ -377,18 +377,11 @@ def prowler(): ) # Compliance Frameworks - input_compliance_frameworks = list( - set(global_provider.output_options.output_modes).intersection( - get_available_compliance_frameworks(provider) - ) - ) - output_compliance_frameworks = ( - input_compliance_frameworks - if input_compliance_frameworks - else get_available_compliance_frameworks(provider) - ) + input_compliance_frameworks = set( + global_provider.output_options.output_modes + ).intersection(get_available_compliance_frameworks(provider)) if provider == "aws": - for compliance_name in output_compliance_frameworks: + for compliance_name in input_compliance_frameworks: if compliance_name.startswith("cis_"): # Generate CIS Finding Object filename = ( @@ -404,7 +397,7 @@ def prowler(): cis_finding.batch_write_data_to_file() elif provider == "azure": - for compliance_name in output_compliance_frameworks: + for compliance_name in input_compliance_frameworks: if compliance_name.startswith("cis_"): # Generate CIS Finding Object filename = ( @@ -420,7 +413,7 @@ def prowler(): cis_finding.batch_write_data_to_file() elif provider == "gcp": - for compliance_name in output_compliance_frameworks: + for compliance_name in input_compliance_frameworks: if compliance_name.startswith("cis_"): # Generate CIS Finding Object filename = ( @@ -436,7 +429,7 @@ def prowler(): cis_finding.batch_write_data_to_file() elif provider == "kubernetes": - for compliance_name in output_compliance_frameworks: + for compliance_name in input_compliance_frameworks: if compliance_name.startswith("cis_"): # Generate CIS Finding Object filename = ( diff --git a/prowler/lib/outputs/compliance/cis_aws.py b/prowler/lib/outputs/compliance/cis_aws.py index 0bcaa4d7c7..76b5f8ffaf 100644 --- a/prowler/lib/outputs/compliance/cis_aws.py +++ b/prowler/lib/outputs/compliance/cis_aws.py @@ -8,17 +8,38 @@ class AWSCIS(ComplianceOutput): + """ + This class represents the AWS CIS compliance output. + + Attributes: + - _data (list): A list to store transformed data from findings. + - _file_descriptor (TextIOWrapper): A file descriptor to write data to a file. + + Methods: + - transform: Transforms findings into AWS CIS compliance format. + - batch_write_data_to_file: Writes the findings data to a CSV file in AWS CIS compliance format. + """ + def transform( - self, findings: list[Finding], compliance: ComplianceBaseModel + self, + findings: list[Finding], + compliance: ComplianceBaseModel, + compliance_name: str, ) -> None: + """ + Transforms a list of findings into AWS CIS compliance format. + + Parameters: + - findings (list): A list of findings. + - compliance (ComplianceBaseModel): A compliance model. + - compliance_name (str): The name of the compliance model. + + Returns: + - None + """ for finding in findings: # Get the compliance requirements for the finding - compliance_model_name = ( - compliance.Framework + "-" + compliance.Version - if compliance.Version - else compliance.Framework - ) - finding_requirements = finding.compliance.get(compliance_model_name, []) + finding_requirements = finding.compliance.get(compliance_name, []) for requirement in compliance.Requirements: if requirement.Id in finding_requirements: for attribute in requirement.Attributes: @@ -49,6 +70,12 @@ def transform( self._data.append(compliance_row) def batch_write_data_to_file(self) -> None: + """ + Writes the findings data to a CSV file in AWS CIS compliance format. + + Returns: + - None + """ try: if ( getattr(self, "_file_descriptor", None) @@ -57,16 +84,14 @@ def batch_write_data_to_file(self) -> None: ): csv_writer = DictWriter( self._file_descriptor, - fieldnames=[ - field.upper() for field in self._data[0].__dict__.keys() - ], + fieldnames=[field.upper() for field in self._data[0].dict().keys()], delimiter=";", ) csv_writer.writeheader() for finding in self._data: - for key in list(finding.__dict__.keys()): - finding.__dict__[key.upper()] = finding.__dict__.pop(key) - csv_writer.writerow(finding.dict()) + csv_writer.writerow( + {k.upper(): v for k, v in finding.dict().items()} + ) self._file_descriptor.close() except Exception as error: logger.error( diff --git a/prowler/lib/outputs/compliance/cis_azure.py b/prowler/lib/outputs/compliance/cis_azure.py index 7bc29aa84b..ec45d2d881 100644 --- a/prowler/lib/outputs/compliance/cis_azure.py +++ b/prowler/lib/outputs/compliance/cis_azure.py @@ -8,17 +8,38 @@ class AzureCIS(ComplianceOutput): + """ + This class represents the Azure CIS compliance output. + + Attributes: + - _data (list): A list to store transformed data from findings. + - _file_descriptor (TextIOWrapper): A file descriptor to write data to a file. + + Methods: + - transform: Transforms findings into Azure CIS compliance format. + - batch_write_data_to_file: Writes the findings data to a CSV file in Azure CIS compliance format. + """ + def transform( - self, findings: list[Finding], compliance: ComplianceBaseModel + self, + findings: list[Finding], + compliance: ComplianceBaseModel, + compliance_name: str, ) -> None: + """ + Transforms a list of findings into Azure CIS compliance format. + + Parameters: + - findings (list): A list of findings. + - compliance (ComplianceBaseModel): A compliance model. + - compliance_name (str): The name of the compliance model. + + Returns: + - None + """ for finding in findings: # Get the compliance requirements for the finding - compliance_model_name = ( - compliance.Framework + "-" + compliance.Version - if compliance.Version - else compliance.Framework - ) - finding_requirements = finding.compliance.get(compliance_model_name, []) + finding_requirements = finding.compliance.get(compliance_name, []) for requirement in compliance.Requirements: if requirement.Id in finding_requirements: for attribute in requirement.Attributes: @@ -59,16 +80,14 @@ def batch_write_data_to_file(self) -> None: ): csv_writer = DictWriter( self._file_descriptor, - fieldnames=[ - field.upper() for field in self._data[0].__dict__.keys() - ], + fieldnames=[field.upper() for field in self._data[0].dict().keys()], delimiter=";", ) csv_writer.writeheader() for finding in self._data: - for key in list(finding.__dict__.keys()): - finding.__dict__[key.upper()] = finding.__dict__.pop(key) - csv_writer.writerow(finding.dict()) + csv_writer.writerow( + {k.upper(): v for k, v in finding.dict().items()} + ) self._file_descriptor.close() except Exception as error: logger.error( diff --git a/prowler/lib/outputs/compliance/cis_gcp.py b/prowler/lib/outputs/compliance/cis_gcp.py index b209b18e68..26317b7d4c 100644 --- a/prowler/lib/outputs/compliance/cis_gcp.py +++ b/prowler/lib/outputs/compliance/cis_gcp.py @@ -8,17 +8,38 @@ class GCPCIS(ComplianceOutput): + """ + This class represents the GCP CIS compliance output. + + Attributes: + - _data (list): A list to store transformed data from findings. + - _file_descriptor (TextIOWrapper): A file descriptor to write data to a file. + + Methods: + - transform: Transforms findings into GCP CIS compliance format. + - batch_write_data_to_file: Writes the findings data to a CSV file in GCP CIS compliance format. + """ + def transform( - self, findings: list[Finding], compliance: ComplianceBaseModel + self, + findings: list[Finding], + compliance: ComplianceBaseModel, + compliance_name: str, ) -> None: + """ + Transforms a list of findings into GCP CIS compliance format. + + Parameters: + - findings (list): A list of findings. + - compliance (ComplianceBaseModel): A compliance model. + - compliance_name (str): The name of the compliance model. + + Returns: + - None + """ for finding in findings: # Get the compliance requirements for the finding - compliance_model_name = ( - compliance.Framework + "-" + compliance.Version - if compliance.Version - else compliance.Framework - ) - finding_requirements = finding.compliance.get(compliance_model_name, []) + finding_requirements = finding.compliance.get(compliance_name, []) for requirement in compliance.Requirements: if requirement.Id in finding_requirements: for attribute in requirement.Attributes: @@ -58,16 +79,14 @@ def batch_write_data_to_file(self) -> None: ): csv_writer = DictWriter( self._file_descriptor, - fieldnames=[ - field.upper() for field in self._data[0].__dict__.keys() - ], + fieldnames=[field.upper() for field in self._data[0].dict().keys()], delimiter=";", ) csv_writer.writeheader() for finding in self._data: - for key in list(finding.__dict__.keys()): - finding.__dict__[key.upper()] = finding.__dict__.pop(key) - csv_writer.writerow(finding.dict()) + csv_writer.writerow( + {k.upper(): v for k, v in finding.dict().items()} + ) self._file_descriptor.close() except Exception as error: logger.error( diff --git a/prowler/lib/outputs/compliance/cis_kubernetes.py b/prowler/lib/outputs/compliance/cis_kubernetes.py index 187bdb7933..160c4d8b32 100644 --- a/prowler/lib/outputs/compliance/cis_kubernetes.py +++ b/prowler/lib/outputs/compliance/cis_kubernetes.py @@ -8,17 +8,38 @@ class KubernetesCIS(ComplianceOutput): + """ + This class represents the Kubernetes CIS compliance output. + + Attributes: + - _data (list): A list to store transformed data from findings. + - _file_descriptor (TextIOWrapper): A file descriptor to write data to a file. + + Methods: + - transform: Transforms findings into Kubernetes CIS compliance format. + - batch_write_data_to_file: Writes the findings data to a CSV file in Kubernetes CIS compliance format. + """ + def transform( - self, findings: list[Finding], compliance: ComplianceBaseModel + self, + findings: list[Finding], + compliance: ComplianceBaseModel, + compliance_name: str, ) -> None: + """ + Transforms a list of findings into Kubernetes CIS compliance format. + + Parameters: + - findings (list): A list of findings. + - compliance (ComplianceBaseModel): A compliance model. + - compliance_name (str): The name of the compliance model. + + Returns: + - None + """ for finding in findings: # Get the compliance requirements for the finding - compliance_model_name = ( - compliance.Framework + "-" + compliance.Version - if compliance.Version - else compliance.Framework - ) - finding_requirements = finding.compliance.get(compliance_model_name, []) + finding_requirements = finding.compliance.get(compliance_name, []) for requirement in compliance.Requirements: if requirement.Id in finding_requirements: for attribute in requirement.Attributes: @@ -58,16 +79,14 @@ def batch_write_data_to_file(self) -> None: ): csv_writer = DictWriter( self._file_descriptor, - fieldnames=[ - field.upper() for field in self._data[0].__dict__.keys() - ], + fieldnames=[field.upper() for field in self._data[0].dict().keys()], delimiter=";", ) csv_writer.writeheader() for finding in self._data: - for key in list(finding.__dict__.keys()): - finding.__dict__[key.upper()] = finding.__dict__.pop(key) - csv_writer.writerow(finding.dict()) + csv_writer.writerow( + {k.upper(): v for k, v in finding.dict().items()} + ) self._file_descriptor.close() except Exception as error: logger.error( diff --git a/prowler/lib/outputs/compliance/compliance_output.py b/prowler/lib/outputs/compliance/compliance_output.py index b7e1181b59..b6cf095ecc 100644 --- a/prowler/lib/outputs/compliance/compliance_output.py +++ b/prowler/lib/outputs/compliance/compliance_output.py @@ -37,7 +37,13 @@ def __init__( ) -> None: self._data = [] if findings: - self.transform(findings, compliance) + # Get the compliance name of the model + compliance_name = ( + compliance.Framework + "-" + compliance.Version + if compliance.Version + else compliance.Framework + ) + self.transform(findings, compliance, compliance_name) if create_file_descriptor: self.create_file_descriptor(file_path) @@ -51,7 +57,10 @@ def file_descriptor(self): @abstractmethod def transform( - self, findings: List[Finding], compliance: ComplianceBaseModel + self, + findings: List[Finding], + compliance: ComplianceBaseModel, + compliance_name: str, ) -> None: raise NotImplementedError diff --git a/prowler/lib/outputs/compliance/models.py b/prowler/lib/outputs/compliance/models.py index c3bb1d2358..babda1d1bd 100644 --- a/prowler/lib/outputs/compliance/models.py +++ b/prowler/lib/outputs/compliance/models.py @@ -33,6 +33,10 @@ class Check_Output_CSV_ENS_RD2022(BaseModel): class CIS(BaseModel): + """ + CIS generates a finding's output in CIS Compliance format. + """ + Provider: str Description: str AssessmentDate: str @@ -57,7 +61,7 @@ class CIS(BaseModel): class AWS(CIS): """ - Check_Output_CSV_CIS generates a finding's output in CSV CIS format. + AWS CIS Compliance format. """ AccountId: str @@ -66,7 +70,7 @@ class AWS(CIS): class Azure(CIS): """ - Check_Output_CSV_CIS generates a finding's output in CSV CIS format. + Azure CIS Compliance format. """ Subscription: str @@ -75,7 +79,7 @@ class Azure(CIS): class GCP(CIS): """ - Check_Output_CSV_CIS generates a finding's output in CSV CIS format. + GCP CIS Compliance format. """ ProjectId: str @@ -84,7 +88,7 @@ class GCP(CIS): class Kubernetes(CIS): """ - Check_Output_CSV_CIS generates a finding's output in CSV CIS format. + Kubernetes CIS Compliance format. """ Context: str diff --git a/tests/lib/outputs/compliance/cis_aws_test.py b/tests/lib/outputs/compliance/cis_aws_test.py index 5dc2492b21..b9eb170068 100644 --- a/tests/lib/outputs/compliance/cis_aws_test.py +++ b/tests/lib/outputs/compliance/cis_aws_test.py @@ -6,15 +6,70 @@ from prowler.lib.outputs.compliance.models import AWS from tests.lib.outputs.compliance.fixtures import CIS_1_4_AWS from tests.lib.outputs.fixtures.fixtures import generate_finding_output +from tests.providers.aws.utils import AWS_ACCOUNT_NUMBER, AWS_REGION_EU_WEST_1 -class Test_AWSCIS: +class TestAWSCIS: def test_output_transform(self): findings = [generate_finding_output(compliance={"CIS-1.4": "2.1.3"})] output = AWSCIS(findings, CIS_1_4_AWS) output_data = output.data[0] assert isinstance(output_data, AWS) + assert output_data.Provider == "aws" + assert output_data.AccountId == AWS_ACCOUNT_NUMBER + assert output_data.Region == AWS_REGION_EU_WEST_1 + assert output_data.Description == CIS_1_4_AWS.Description + assert output_data.Requirements_Id == CIS_1_4_AWS.Requirements[0].Id + assert ( + output_data.Requirements_Description + == CIS_1_4_AWS.Requirements[0].Description + ) + assert ( + output_data.Requirements_Attributes_Section + == CIS_1_4_AWS.Requirements[0].Attributes[0].Section + ) + assert ( + output_data.Requirements_Attributes_Profile + == CIS_1_4_AWS.Requirements[0].Attributes[0].Profile + ) + assert ( + output_data.Requirements_Attributes_AssessmentStatus + == CIS_1_4_AWS.Requirements[0].Attributes[0].AssessmentStatus + ) + assert ( + output_data.Requirements_Attributes_Description + == CIS_1_4_AWS.Requirements[0].Attributes[0].Description + ) + assert ( + output_data.Requirements_Attributes_RationaleStatement + == CIS_1_4_AWS.Requirements[0].Attributes[0].RationaleStatement + ) + assert ( + output_data.Requirements_Attributes_ImpactStatement + == CIS_1_4_AWS.Requirements[0].Attributes[0].ImpactStatement + ) + assert ( + output_data.Requirements_Attributes_RemediationProcedure + == CIS_1_4_AWS.Requirements[0].Attributes[0].RemediationProcedure + ) + assert ( + output_data.Requirements_Attributes_AuditProcedure + == CIS_1_4_AWS.Requirements[0].Attributes[0].AuditProcedure + ) + assert ( + output_data.Requirements_Attributes_AdditionalInformation + == CIS_1_4_AWS.Requirements[0].Attributes[0].AdditionalInformation + ) + assert ( + output_data.Requirements_Attributes_References + == CIS_1_4_AWS.Requirements[0].Attributes[0].References + ) + assert output_data.Status == "PASS" + assert output_data.StatusExtended == "" + assert output_data.ResourceId == "" + assert output_data.CheckId == "test-check-id" + assert output_data.Muted is False def test_batch_write_data_to_file(self): mock_file = StringIO() @@ -28,6 +83,4 @@ def test_batch_write_data_to_file(self): mock_file.seek(0) content = mock_file.read() - content = content.removeprefix("\r\n") - content = content.removesuffix("\r\n") assert CIS_1_4_AWS.Description in content diff --git a/tests/lib/outputs/compliance/cis_azure_test.py b/tests/lib/outputs/compliance/cis_azure_test.py index 8fc3adcc88..8de05575f8 100644 --- a/tests/lib/outputs/compliance/cis_azure_test.py +++ b/tests/lib/outputs/compliance/cis_azure_test.py @@ -6,15 +6,81 @@ from prowler.lib.outputs.compliance.models import Azure from tests.lib.outputs.compliance.fixtures import CIS_2_0_AZURE from tests.lib.outputs.fixtures.fixtures import generate_finding_output +from tests.providers.azure.azure_fixtures import ( + AZURE_SUBSCRIPTION_ID, + AZURE_SUBSCRIPTION_NAME, +) -class Test_AzureCIS: +class TestAzureCIS: def test_output_transform(self): - findings = [generate_finding_output(compliance={"CIS-2.0": "2.1.3"})] + findings = [ + generate_finding_output( + provider="azure", + compliance={"CIS-2.0": "2.1.3"}, + account_name=AZURE_SUBSCRIPTION_NAME, + account_uid=AZURE_SUBSCRIPTION_ID, + region="", + ) + ] output = AzureCIS(findings, CIS_2_0_AZURE) output_data = output.data[0] assert isinstance(output_data, Azure) + assert output_data.Provider == "azure" + assert output_data.Subscription == AZURE_SUBSCRIPTION_NAME + assert output_data.Location == "" + assert output_data.Description == CIS_2_0_AZURE.Description + assert output_data.Requirements_Id == CIS_2_0_AZURE.Requirements[0].Id + assert ( + output_data.Requirements_Description + == CIS_2_0_AZURE.Requirements[0].Description + ) + assert ( + output_data.Requirements_Attributes_Section + == CIS_2_0_AZURE.Requirements[0].Attributes[0].Section + ) + assert ( + output_data.Requirements_Attributes_Profile + == CIS_2_0_AZURE.Requirements[0].Attributes[0].Profile + ) + assert ( + output_data.Requirements_Attributes_AssessmentStatus + == CIS_2_0_AZURE.Requirements[0].Attributes[0].AssessmentStatus + ) + assert ( + output_data.Requirements_Attributes_Description + == CIS_2_0_AZURE.Requirements[0].Attributes[0].Description + ) + assert ( + output_data.Requirements_Attributes_RationaleStatement + == CIS_2_0_AZURE.Requirements[0].Attributes[0].RationaleStatement + ) + assert ( + output_data.Requirements_Attributes_ImpactStatement + == CIS_2_0_AZURE.Requirements[0].Attributes[0].ImpactStatement + ) + assert ( + output_data.Requirements_Attributes_RemediationProcedure + == CIS_2_0_AZURE.Requirements[0].Attributes[0].RemediationProcedure + ) + assert ( + output_data.Requirements_Attributes_AuditProcedure + == CIS_2_0_AZURE.Requirements[0].Attributes[0].AuditProcedure + ) + assert ( + output_data.Requirements_Attributes_AdditionalInformation + == CIS_2_0_AZURE.Requirements[0].Attributes[0].AdditionalInformation + ) + assert ( + output_data.Requirements_Attributes_References + == CIS_2_0_AZURE.Requirements[0].Attributes[0].References + ) + assert output_data.Status == "PASS" + assert output_data.StatusExtended == "" + assert output_data.ResourceId == "" + assert output_data.CheckId == "test-check-id" + assert output_data.Muted is False def test_batch_write_data_to_file(self): mock_file = StringIO() @@ -28,6 +94,4 @@ def test_batch_write_data_to_file(self): mock_file.seek(0) content = mock_file.read() - content = content.removeprefix("\r\n") - content = content.removesuffix("\r\n") assert CIS_2_0_AZURE.Description in content diff --git a/tests/lib/outputs/compliance/cis_gcp_test.py b/tests/lib/outputs/compliance/cis_gcp_test.py index 0d5b3af616..e650228bbe 100644 --- a/tests/lib/outputs/compliance/cis_gcp_test.py +++ b/tests/lib/outputs/compliance/cis_gcp_test.py @@ -6,15 +6,78 @@ from prowler.lib.outputs.compliance.models import GCP from tests.lib.outputs.compliance.fixtures import CIS_2_0_GCP from tests.lib.outputs.fixtures.fixtures import generate_finding_output +from tests.providers.gcp.gcp_fixtures import GCP_PROJECT_ID -class Test_GCPCIS: +class TestGCPCIS: def test_output_transform(self): - findings = [generate_finding_output(compliance={"CIS-2.0": "2.13"})] + findings = [ + generate_finding_output( + provider="gcp", + compliance={"CIS-2.0": "2.13"}, + account_name=GCP_PROJECT_ID, + account_uid=GCP_PROJECT_ID, + region="", + ) + ] output = GCPCIS(findings, CIS_2_0_GCP) output_data = output.data[0] assert isinstance(output_data, GCP) + assert output_data.Provider == "gcp" + assert output_data.ProjectId == GCP_PROJECT_ID + assert output_data.Location == "" + assert output_data.Description == CIS_2_0_GCP.Description + assert output_data.Requirements_Id == CIS_2_0_GCP.Requirements[0].Id + assert ( + output_data.Requirements_Description + == CIS_2_0_GCP.Requirements[0].Description + ) + assert ( + output_data.Requirements_Attributes_Section + == CIS_2_0_GCP.Requirements[0].Attributes[0].Section + ) + assert ( + output_data.Requirements_Attributes_Profile + == CIS_2_0_GCP.Requirements[0].Attributes[0].Profile + ) + assert ( + output_data.Requirements_Attributes_AssessmentStatus + == CIS_2_0_GCP.Requirements[0].Attributes[0].AssessmentStatus + ) + assert ( + output_data.Requirements_Attributes_Description + == CIS_2_0_GCP.Requirements[0].Attributes[0].Description + ) + assert ( + output_data.Requirements_Attributes_RationaleStatement + == CIS_2_0_GCP.Requirements[0].Attributes[0].RationaleStatement + ) + assert ( + output_data.Requirements_Attributes_ImpactStatement + == CIS_2_0_GCP.Requirements[0].Attributes[0].ImpactStatement + ) + assert ( + output_data.Requirements_Attributes_RemediationProcedure + == CIS_2_0_GCP.Requirements[0].Attributes[0].RemediationProcedure + ) + assert ( + output_data.Requirements_Attributes_AuditProcedure + == CIS_2_0_GCP.Requirements[0].Attributes[0].AuditProcedure + ) + assert ( + output_data.Requirements_Attributes_AdditionalInformation + == CIS_2_0_GCP.Requirements[0].Attributes[0].AdditionalInformation + ) + assert ( + output_data.Requirements_Attributes_References + == CIS_2_0_GCP.Requirements[0].Attributes[0].References + ) + assert output_data.Status == "PASS" + assert output_data.StatusExtended == "" + assert output_data.ResourceId == "" + assert output_data.CheckId == "test-check-id" + assert output_data.Muted is False def test_batch_write_data_to_file(self): mock_file = StringIO() @@ -28,6 +91,4 @@ def test_batch_write_data_to_file(self): mock_file.seek(0) content = mock_file.read() - content = content.removeprefix("\r\n") - content = content.removesuffix("\r\n") assert CIS_2_0_GCP.Description in content diff --git a/tests/lib/outputs/compliance/cis_kubernetes_test.py b/tests/lib/outputs/compliance/cis_kubernetes_test.py index 2b0b173d34..c2d90027da 100644 --- a/tests/lib/outputs/compliance/cis_kubernetes_test.py +++ b/tests/lib/outputs/compliance/cis_kubernetes_test.py @@ -6,15 +6,81 @@ from prowler.lib.outputs.compliance.models import Kubernetes from tests.lib.outputs.compliance.fixtures import CIS_1_8_KUBERNETES from tests.lib.outputs.fixtures.fixtures import generate_finding_output +from tests.providers.kubernetes.kubernetes_fixtures import ( + KUBERNETES_CLUSTER_NAME, + KUBERNETES_NAMESPACE, +) -class Test_KubernetesCIS: +class TestKubernetesCIS: def test_output_transform(self): - findings = [generate_finding_output(compliance={"CIS-1.8": "1.1.3"})] + findings = [ + generate_finding_output( + provider="kubernetes", + compliance={"CIS-1.8": "1.1.3"}, + account_name=KUBERNETES_CLUSTER_NAME, + account_uid=KUBERNETES_CLUSTER_NAME, + region=KUBERNETES_NAMESPACE, + ) + ] output = KubernetesCIS(findings, CIS_1_8_KUBERNETES) output_data = output.data[0] assert isinstance(output_data, Kubernetes) + assert output_data.Provider == "kubernetes" + assert output_data.Context == KUBERNETES_CLUSTER_NAME + assert output_data.Namespace == KUBERNETES_NAMESPACE + assert output_data.Description == CIS_1_8_KUBERNETES.Description + assert output_data.Requirements_Id == CIS_1_8_KUBERNETES.Requirements[0].Id + assert ( + output_data.Requirements_Description + == CIS_1_8_KUBERNETES.Requirements[0].Description + ) + assert ( + output_data.Requirements_Attributes_Section + == CIS_1_8_KUBERNETES.Requirements[0].Attributes[0].Section + ) + assert ( + output_data.Requirements_Attributes_Profile + == CIS_1_8_KUBERNETES.Requirements[0].Attributes[0].Profile + ) + assert ( + output_data.Requirements_Attributes_AssessmentStatus + == CIS_1_8_KUBERNETES.Requirements[0].Attributes[0].AssessmentStatus + ) + assert ( + output_data.Requirements_Attributes_Description + == CIS_1_8_KUBERNETES.Requirements[0].Attributes[0].Description + ) + assert ( + output_data.Requirements_Attributes_RationaleStatement + == CIS_1_8_KUBERNETES.Requirements[0].Attributes[0].RationaleStatement + ) + assert ( + output_data.Requirements_Attributes_ImpactStatement + == CIS_1_8_KUBERNETES.Requirements[0].Attributes[0].ImpactStatement + ) + assert ( + output_data.Requirements_Attributes_RemediationProcedure + == CIS_1_8_KUBERNETES.Requirements[0].Attributes[0].RemediationProcedure + ) + assert ( + output_data.Requirements_Attributes_AuditProcedure + == CIS_1_8_KUBERNETES.Requirements[0].Attributes[0].AuditProcedure + ) + assert ( + output_data.Requirements_Attributes_AdditionalInformation + == CIS_1_8_KUBERNETES.Requirements[0].Attributes[0].AdditionalInformation + ) + assert ( + output_data.Requirements_Attributes_References + == CIS_1_8_KUBERNETES.Requirements[0].Attributes[0].References + ) + assert output_data.Status == "PASS" + assert output_data.StatusExtended == "" + assert output_data.ResourceId == "" + assert output_data.CheckId == "test-check-id" + assert output_data.Muted is False def test_batch_write_data_to_file(self): mock_file = StringIO() @@ -28,6 +94,4 @@ def test_batch_write_data_to_file(self): mock_file.seek(0) content = mock_file.read() - content = content.removeprefix("\r\n") - content = content.removesuffix("\r\n") assert CIS_1_8_KUBERNETES.Description in content diff --git a/tests/lib/outputs/fixtures/fixtures.py b/tests/lib/outputs/fixtures/fixtures.py index 62ca5b6acf..42fa327838 100644 --- a/tests/lib/outputs/fixtures/fixtures.py +++ b/tests/lib/outputs/fixtures/fixtures.py @@ -10,6 +10,8 @@ def generate_finding_output( status_extended: str = "", severity: str = "high", muted: bool = False, + account_uid: str = AWS_ACCOUNT_NUMBER, + account_name: str = AWS_ACCOUNT_NUMBER, region: str = AWS_REGION_EU_WEST_1, resource_details: str = "", resource_uid: str = "", @@ -22,8 +24,8 @@ def generate_finding_output( return Finding( auth_method="profile: default", timestamp=timestamp, - account_uid=AWS_ACCOUNT_NUMBER, - account_name=AWS_ACCOUNT_NUMBER, + account_uid=account_uid, + account_name=account_name, account_email="", account_organization_uid="test-organization-id", account_organization_name="test-organization", diff --git a/tests/providers/kubernetes/kubernetes_fixtures.py b/tests/providers/kubernetes/kubernetes_fixtures.py index 7dc065353c..d28a07be21 100644 --- a/tests/providers/kubernetes/kubernetes_fixtures.py +++ b/tests/providers/kubernetes/kubernetes_fixtures.py @@ -1,12 +1,14 @@ -from kubernetes import client from mock import MagicMock +from kubernetes import client from prowler.providers.kubernetes.kubernetes_provider import KubernetesProvider from prowler.providers.kubernetes.models import ( KubernetesIdentityInfo, KubernetesSession, ) +KUBERNETES_CLUSTER_NAME = "test-cluster" +KUBERNETES_NAMESPACE = "test-namespace" KUBERNETES_CONFIG = { "audit_log_maxbackup": 10, "audit_log_maxsize": 100, From 4435988822f6e592cda03e6e259971b11e3fa8f5 Mon Sep 17 00:00:00 2001 From: Sergio Date: Tue, 9 Jul 2024 13:15:13 -0400 Subject: [PATCH 7/9] solve comments --- prowler/__main__.py | 8 +- prowler/lib/outputs/compliance/cis.py | 130 ------------------ .../{mitre_attack => cis}/__init__.py | 0 prowler/lib/outputs/compliance/cis/cis.py | 122 ++++++++++++++++ .../outputs/compliance/{ => cis}/cis_aws.py | 2 +- .../outputs/compliance/{ => cis}/cis_azure.py | 2 +- .../outputs/compliance/{ => cis}/cis_gcp.py | 2 +- .../compliance/{ => cis}/cis_kubernetes.py | 2 +- prowler/lib/outputs/compliance/cis/models.py | 64 +++++++++ prowler/lib/outputs/compliance/compliance.py | 2 +- .../outputs/compliance/compliance_output.py | 54 +------- prowler/lib/outputs/compliance/models.py | 63 --------- tests/lib/outputs/compliance/cis_aws_test.py | 11 +- .../lib/outputs/compliance/cis_azure_test.py | 25 +++- tests/lib/outputs/compliance/cis_gcp_test.py | 20 ++- .../outputs/compliance/cis_kubernetes_test.py | 20 ++- 16 files changed, 258 insertions(+), 269 deletions(-) delete mode 100644 prowler/lib/outputs/compliance/cis.py rename prowler/lib/outputs/compliance/{mitre_attack => cis}/__init__.py (100%) create mode 100644 prowler/lib/outputs/compliance/cis/cis.py rename prowler/lib/outputs/compliance/{ => cis}/cis_aws.py (98%) rename prowler/lib/outputs/compliance/{ => cis}/cis_azure.py (98%) rename prowler/lib/outputs/compliance/{ => cis}/cis_gcp.py (98%) rename prowler/lib/outputs/compliance/{ => cis}/cis_kubernetes.py (98%) create mode 100644 prowler/lib/outputs/compliance/cis/models.py diff --git a/prowler/__main__.py b/prowler/__main__.py index 90b86ca1fb..d51546fdee 100644 --- a/prowler/__main__.py +++ b/prowler/__main__.py @@ -43,10 +43,10 @@ from prowler.lib.cli.parser import ProwlerArgumentParser from prowler.lib.logger import logger, set_logging_config from prowler.lib.outputs.asff.asff import ASFF -from prowler.lib.outputs.compliance.cis_aws import AWSCIS -from prowler.lib.outputs.compliance.cis_azure import AzureCIS -from prowler.lib.outputs.compliance.cis_gcp import GCPCIS -from prowler.lib.outputs.compliance.cis_kubernetes import KubernetesCIS +from prowler.lib.outputs.compliance.cis.cis_aws import AWSCIS +from prowler.lib.outputs.compliance.cis.cis_azure import AzureCIS +from prowler.lib.outputs.compliance.cis.cis_gcp import GCPCIS +from prowler.lib.outputs.compliance.cis.cis_kubernetes import KubernetesCIS from prowler.lib.outputs.compliance.compliance import display_compliance_table from prowler.lib.outputs.csv.models import CSV from prowler.lib.outputs.finding import Finding diff --git a/prowler/lib/outputs/compliance/cis.py b/prowler/lib/outputs/compliance/cis.py deleted file mode 100644 index 5b8eff8865..0000000000 --- a/prowler/lib/outputs/compliance/cis.py +++ /dev/null @@ -1,130 +0,0 @@ -from colorama import Fore, Style -from tabulate import tabulate - -from prowler.config.config import orange_color -from prowler.lib import logger - - -def get_cis_table( - findings: list, - bulk_checks_metadata: dict, - compliance_framework: str, - output_filename: str, - output_directory: str, - compliance_overview: bool, -): - try: - sections = {} - cis_compliance_table = { - "Provider": [], - "Section": [], - "Level 1": [], - "Level 2": [], - "Muted": [], - } - pass_count = [] - fail_count = [] - muted_count = [] - for index, finding in enumerate(findings): - check = bulk_checks_metadata[finding.check_metadata.CheckID] - check_compliances = check.Compliance - for compliance in check_compliances: - if ( - compliance.Framework == "CIS" - and compliance.Version in compliance_framework - ): - for requirement in compliance.Requirements: - for attribute in requirement.Attributes: - section = attribute.Section - # Check if Section exists - if section not in sections: - sections[section] = { - "Status": f"{Fore.GREEN}PASS{Style.RESET_ALL}", - "Level 1": {"FAIL": 0, "PASS": 0}, - "Level 2": {"FAIL": 0, "PASS": 0}, - "Muted": 0, - } - if finding.muted: - if index not in muted_count: - muted_count.append(index) - sections[section]["Muted"] += 1 - else: - if finding.status == "FAIL" and index not in fail_count: - fail_count.append(index) - elif ( - finding.status == "PASS" and index not in pass_count - ): - pass_count.append(index) - if "Level 1" in attribute.Profile: - if not finding.muted: - if finding.status == "FAIL": - sections[section]["Level 1"]["FAIL"] += 1 - else: - sections[section]["Level 1"]["PASS"] += 1 - elif "Level 2" in attribute.Profile: - if not finding.muted: - if finding.status == "FAIL": - sections[section]["Level 2"]["FAIL"] += 1 - else: - sections[section]["Level 2"]["PASS"] += 1 - - # Add results to table - sections = dict(sorted(sections.items())) - for section in sections: - cis_compliance_table["Provider"].append(compliance.Provider) - cis_compliance_table["Section"].append(section) - if sections[section]["Level 1"]["FAIL"] > 0: - cis_compliance_table["Level 1"].append( - f"{Fore.RED}FAIL({sections[section]['Level 1']['FAIL']}){Style.RESET_ALL}" - ) - else: - cis_compliance_table["Level 1"].append( - f"{Fore.GREEN}PASS({sections[section]['Level 1']['PASS']}){Style.RESET_ALL}" - ) - if sections[section]["Level 2"]["FAIL"] > 0: - cis_compliance_table["Level 2"].append( - f"{Fore.RED}FAIL({sections[section]['Level 2']['FAIL']}){Style.RESET_ALL}" - ) - else: - cis_compliance_table["Level 2"].append( - f"{Fore.GREEN}PASS({sections[section]['Level 2']['PASS']}){Style.RESET_ALL}" - ) - cis_compliance_table["Muted"].append( - f"{orange_color}{sections[section]['Muted']}{Style.RESET_ALL}" - ) - if ( - len(fail_count) + len(pass_count) + len(muted_count) > 1 - ): # If there are no resources, don't print the compliance table - print( - f"\nCompliance Status of {Fore.YELLOW}{compliance_framework.upper()}{Style.RESET_ALL} Framework:" - ) - overview_table = [ - [ - f"{Fore.RED}{round(len(fail_count) / len(findings) * 100, 2)}% ({len(fail_count)}) FAIL{Style.RESET_ALL}", - f"{Fore.GREEN}{round(len(pass_count) / len(findings) * 100, 2)}% ({len(pass_count)}) PASS{Style.RESET_ALL}", - f"{orange_color}{round(len(muted_count) / len(findings) * 100, 2)}% ({len(muted_count)}) MUTED{Style.RESET_ALL}", - ] - ] - print(tabulate(overview_table, tablefmt="rounded_grid")) - if not compliance_overview: - print( - f"\nFramework {Fore.YELLOW}{compliance_framework.upper()}{Style.RESET_ALL} Results:" - ) - print( - tabulate( - cis_compliance_table, - headers="keys", - tablefmt="rounded_grid", - ) - ) - print( - f"{Style.BRIGHT}* Only sections containing results appear.{Style.RESET_ALL}" - ) - print(f"\nDetailed results of {compliance_framework.upper()} are in:") - print( - f" - CSV: {output_directory}/compliance/{output_filename}_{compliance_framework}.csv\n" - ) - except Exception as error: - logger.error( - f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" - ) diff --git a/prowler/lib/outputs/compliance/mitre_attack/__init__.py b/prowler/lib/outputs/compliance/cis/__init__.py similarity index 100% rename from prowler/lib/outputs/compliance/mitre_attack/__init__.py rename to prowler/lib/outputs/compliance/cis/__init__.py diff --git a/prowler/lib/outputs/compliance/cis/cis.py b/prowler/lib/outputs/compliance/cis/cis.py new file mode 100644 index 0000000000..7ab1a7d96c --- /dev/null +++ b/prowler/lib/outputs/compliance/cis/cis.py @@ -0,0 +1,122 @@ +from colorama import Fore, Style +from tabulate import tabulate + +from prowler.config.config import orange_color + + +def get_cis_table( + findings: list, + bulk_checks_metadata: dict, + compliance_framework: str, + output_filename: str, + output_directory: str, + compliance_overview: bool, +): + sections = {} + cis_compliance_table = { + "Provider": [], + "Section": [], + "Level 1": [], + "Level 2": [], + "Muted": [], + } + pass_count = [] + fail_count = [] + muted_count = [] + for index, finding in enumerate(findings): + check = bulk_checks_metadata[finding.check_metadata.CheckID] + check_compliances = check.Compliance + for compliance in check_compliances: + if ( + compliance.Framework == "CIS" + and compliance.Version in compliance_framework + ): + for requirement in compliance.Requirements: + for attribute in requirement.Attributes: + section = attribute.Section + # Check if Section exists + if section not in sections: + sections[section] = { + "Status": f"{Fore.GREEN}PASS{Style.RESET_ALL}", + "Level 1": {"FAIL": 0, "PASS": 0}, + "Level 2": {"FAIL": 0, "PASS": 0}, + "Muted": 0, + } + if finding.muted: + if index not in muted_count: + muted_count.append(index) + sections[section]["Muted"] += 1 + else: + if finding.status == "FAIL" and index not in fail_count: + fail_count.append(index) + elif finding.status == "PASS" and index not in pass_count: + pass_count.append(index) + if "Level 1" in attribute.Profile: + if not finding.muted: + if finding.status == "FAIL": + sections[section]["Level 1"]["FAIL"] += 1 + else: + sections[section]["Level 1"]["PASS"] += 1 + elif "Level 2" in attribute.Profile: + if not finding.muted: + if finding.status == "FAIL": + sections[section]["Level 2"]["FAIL"] += 1 + else: + sections[section]["Level 2"]["PASS"] += 1 + + # Add results to table + sections = dict(sorted(sections.items())) + for section in sections: + cis_compliance_table["Provider"].append(compliance.Provider) + cis_compliance_table["Section"].append(section) + if sections[section]["Level 1"]["FAIL"] > 0: + cis_compliance_table["Level 1"].append( + f"{Fore.RED}FAIL({sections[section]['Level 1']['FAIL']}){Style.RESET_ALL}" + ) + else: + cis_compliance_table["Level 1"].append( + f"{Fore.GREEN}PASS({sections[section]['Level 1']['PASS']}){Style.RESET_ALL}" + ) + if sections[section]["Level 2"]["FAIL"] > 0: + cis_compliance_table["Level 2"].append( + f"{Fore.RED}FAIL({sections[section]['Level 2']['FAIL']}){Style.RESET_ALL}" + ) + else: + cis_compliance_table["Level 2"].append( + f"{Fore.GREEN}PASS({sections[section]['Level 2']['PASS']}){Style.RESET_ALL}" + ) + cis_compliance_table["Muted"].append( + f"{orange_color}{sections[section]['Muted']}{Style.RESET_ALL}" + ) + if ( + len(fail_count) + len(pass_count) + len(muted_count) > 1 + ): # If there are no resources, don't print the compliance table + print( + f"\nCompliance Status of {Fore.YELLOW}{compliance_framework.upper()}{Style.RESET_ALL} Framework:" + ) + overview_table = [ + [ + f"{Fore.RED}{round(len(fail_count) / len(findings) * 100, 2)}% ({len(fail_count)}) FAIL{Style.RESET_ALL}", + f"{Fore.GREEN}{round(len(pass_count) / len(findings) * 100, 2)}% ({len(pass_count)}) PASS{Style.RESET_ALL}", + f"{orange_color}{round(len(muted_count) / len(findings) * 100, 2)}% ({len(muted_count)}) MUTED{Style.RESET_ALL}", + ] + ] + print(tabulate(overview_table, tablefmt="rounded_grid")) + if not compliance_overview: + print( + f"\nFramework {Fore.YELLOW}{compliance_framework.upper()}{Style.RESET_ALL} Results:" + ) + print( + tabulate( + cis_compliance_table, + headers="keys", + tablefmt="rounded_grid", + ) + ) + print( + f"{Style.BRIGHT}* Only sections containing results appear.{Style.RESET_ALL}" + ) + print(f"\nDetailed results of {compliance_framework.upper()} are in:") + print( + f" - CSV: {output_directory}/compliance/{output_filename}_{compliance_framework}.csv\n" + ) diff --git a/prowler/lib/outputs/compliance/cis_aws.py b/prowler/lib/outputs/compliance/cis/cis_aws.py similarity index 98% rename from prowler/lib/outputs/compliance/cis_aws.py rename to prowler/lib/outputs/compliance/cis/cis_aws.py index 76b5f8ffaf..efbc39665d 100644 --- a/prowler/lib/outputs/compliance/cis_aws.py +++ b/prowler/lib/outputs/compliance/cis/cis_aws.py @@ -2,8 +2,8 @@ from venv import logger from prowler.lib.check.compliance_models import ComplianceBaseModel +from prowler.lib.outputs.compliance.cis.models import AWS from prowler.lib.outputs.compliance.compliance_output import ComplianceOutput -from prowler.lib.outputs.compliance.models import AWS from prowler.lib.outputs.finding import Finding diff --git a/prowler/lib/outputs/compliance/cis_azure.py b/prowler/lib/outputs/compliance/cis/cis_azure.py similarity index 98% rename from prowler/lib/outputs/compliance/cis_azure.py rename to prowler/lib/outputs/compliance/cis/cis_azure.py index ec45d2d881..46ca2a2ecf 100644 --- a/prowler/lib/outputs/compliance/cis_azure.py +++ b/prowler/lib/outputs/compliance/cis/cis_azure.py @@ -2,8 +2,8 @@ from venv import logger from prowler.lib.check.compliance_models import ComplianceBaseModel +from prowler.lib.outputs.compliance.cis.models import Azure from prowler.lib.outputs.compliance.compliance_output import ComplianceOutput -from prowler.lib.outputs.compliance.models import Azure from prowler.lib.outputs.finding import Finding diff --git a/prowler/lib/outputs/compliance/cis_gcp.py b/prowler/lib/outputs/compliance/cis/cis_gcp.py similarity index 98% rename from prowler/lib/outputs/compliance/cis_gcp.py rename to prowler/lib/outputs/compliance/cis/cis_gcp.py index 26317b7d4c..362b1b17bc 100644 --- a/prowler/lib/outputs/compliance/cis_gcp.py +++ b/prowler/lib/outputs/compliance/cis/cis_gcp.py @@ -2,8 +2,8 @@ from venv import logger from prowler.lib.check.compliance_models import ComplianceBaseModel +from prowler.lib.outputs.compliance.cis.models import GCP from prowler.lib.outputs.compliance.compliance_output import ComplianceOutput -from prowler.lib.outputs.compliance.models import GCP from prowler.lib.outputs.finding import Finding diff --git a/prowler/lib/outputs/compliance/cis_kubernetes.py b/prowler/lib/outputs/compliance/cis/cis_kubernetes.py similarity index 98% rename from prowler/lib/outputs/compliance/cis_kubernetes.py rename to prowler/lib/outputs/compliance/cis/cis_kubernetes.py index 160c4d8b32..98ccdf7ef0 100644 --- a/prowler/lib/outputs/compliance/cis_kubernetes.py +++ b/prowler/lib/outputs/compliance/cis/cis_kubernetes.py @@ -2,8 +2,8 @@ from venv import logger from prowler.lib.check.compliance_models import ComplianceBaseModel +from prowler.lib.outputs.compliance.cis.models import Kubernetes from prowler.lib.outputs.compliance.compliance_output import ComplianceOutput -from prowler.lib.outputs.compliance.models import Kubernetes from prowler.lib.outputs.finding import Finding diff --git a/prowler/lib/outputs/compliance/cis/models.py b/prowler/lib/outputs/compliance/cis/models.py new file mode 100644 index 0000000000..ffc40b75b3 --- /dev/null +++ b/prowler/lib/outputs/compliance/cis/models.py @@ -0,0 +1,64 @@ +from pydantic import BaseModel + + +class CIS(BaseModel): + """ + CIS generates a finding's output in CIS Compliance format. + """ + + Provider: str + Description: str + AssessmentDate: str + Requirements_Id: str + Requirements_Description: str + Requirements_Attributes_Section: str + Requirements_Attributes_Profile: str + Requirements_Attributes_AssessmentStatus: str + Requirements_Attributes_Description: str + Requirements_Attributes_RationaleStatement: str + Requirements_Attributes_ImpactStatement: str + Requirements_Attributes_RemediationProcedure: str + Requirements_Attributes_AuditProcedure: str + Requirements_Attributes_AdditionalInformation: str + Requirements_Attributes_References: str + Status: str + StatusExtended: str + ResourceId: str + CheckId: str + Muted: bool + + +class AWS(CIS): + """ + AWS CIS Compliance format. + """ + + AccountId: str + Region: str + + +class Azure(CIS): + """ + Azure CIS Compliance format. + """ + + Subscription: str + Location: str + + +class GCP(CIS): + """ + GCP CIS Compliance format. + """ + + ProjectId: str + Location: str + + +class Kubernetes(CIS): + """ + Kubernetes CIS Compliance format. + """ + + Context: str + Namespace: str diff --git a/prowler/lib/outputs/compliance/compliance.py b/prowler/lib/outputs/compliance/compliance.py index eaab77fb1d..b53245e707 100644 --- a/prowler/lib/outputs/compliance/compliance.py +++ b/prowler/lib/outputs/compliance/compliance.py @@ -5,7 +5,7 @@ from prowler.lib.outputs.compliance.aws_well_architected_framework import ( write_compliance_row_aws_well_architected_framework, ) -from prowler.lib.outputs.compliance.cis import get_cis_table +from prowler.lib.outputs.compliance.cis.cis import get_cis_table from prowler.lib.outputs.compliance.ens_rd2022_aws import ( get_ens_rd2022_aws_table, write_compliance_row_ens_rd2022_aws, diff --git a/prowler/lib/outputs/compliance/compliance_output.py b/prowler/lib/outputs/compliance/compliance_output.py index b6cf095ecc..338309b166 100644 --- a/prowler/lib/outputs/compliance/compliance_output.py +++ b/prowler/lib/outputs/compliance/compliance_output.py @@ -1,14 +1,12 @@ -from abc import ABC, abstractmethod from io import TextIOWrapper from typing import List from prowler.lib.check.compliance_models import ComplianceBaseModel -from prowler.lib.logger import logger from prowler.lib.outputs.finding import Finding -from prowler.lib.utils.utils import open_file +from prowler.lib.outputs.output import Output -class ComplianceOutput(ABC): +class ComplianceOutput(Output): """ This class represents an abstract base class for defining different types of outputs for findings. @@ -46,51 +44,3 @@ def __init__( self.transform(findings, compliance, compliance_name) if create_file_descriptor: self.create_file_descriptor(file_path) - - @property - def data(self): - return self._data - - @property - def file_descriptor(self): - return self._file_descriptor - - @abstractmethod - def transform( - self, - findings: List[Finding], - compliance: ComplianceBaseModel, - compliance_name: str, - ) -> None: - raise NotImplementedError - - @abstractmethod - def batch_write_data_to_file(self, file_descriptor: TextIOWrapper) -> None: - raise NotImplementedError - - def create_file_descriptor(self, file_path) -> None: - """ - Creates a file descriptor for writing data to a file. - - Parameters: - file_path (str): The path to the file where the data will be written. - - Returns: - None - - Raises: - Any exception that occurs during the file creation process will be caught and logged using the logger. - - Note: - The file is opened in append mode ("a") to ensure data is written at the end of the file without overwriting existing content. - """ - try: - mode = "a" - self._file_descriptor = open_file( - file_path, - mode, - ) - except Exception as error: - logger.error( - f"{error.__class__.__name__}[{error.__traceback__.tb_lineno}]: {error}" - ) diff --git a/prowler/lib/outputs/compliance/models.py b/prowler/lib/outputs/compliance/models.py index babda1d1bd..c651f092b0 100644 --- a/prowler/lib/outputs/compliance/models.py +++ b/prowler/lib/outputs/compliance/models.py @@ -32,69 +32,6 @@ class Check_Output_CSV_ENS_RD2022(BaseModel): Muted: bool -class CIS(BaseModel): - """ - CIS generates a finding's output in CIS Compliance format. - """ - - Provider: str - Description: str - AssessmentDate: str - Requirements_Id: str - Requirements_Description: str - Requirements_Attributes_Section: str - Requirements_Attributes_Profile: str - Requirements_Attributes_AssessmentStatus: str - Requirements_Attributes_Description: str - Requirements_Attributes_RationaleStatement: str - Requirements_Attributes_ImpactStatement: str - Requirements_Attributes_RemediationProcedure: str - Requirements_Attributes_AuditProcedure: str - Requirements_Attributes_AdditionalInformation: str - Requirements_Attributes_References: str - Status: str - StatusExtended: str - ResourceId: str - CheckId: str - Muted: bool - - -class AWS(CIS): - """ - AWS CIS Compliance format. - """ - - AccountId: str - Region: str - - -class Azure(CIS): - """ - Azure CIS Compliance format. - """ - - Subscription: str - Location: str - - -class GCP(CIS): - """ - GCP CIS Compliance format. - """ - - ProjectId: str - Location: str - - -class Kubernetes(CIS): - """ - Kubernetes CIS Compliance format. - """ - - Context: str - Namespace: str - - class Check_Output_CSV_Generic_Compliance(BaseModel): """ Check_Output_CSV_Generic_Compliance generates a finding's output in CSV Generic Compliance format. diff --git a/tests/lib/outputs/compliance/cis_aws_test.py b/tests/lib/outputs/compliance/cis_aws_test.py index b9eb170068..393e4dbdc5 100644 --- a/tests/lib/outputs/compliance/cis_aws_test.py +++ b/tests/lib/outputs/compliance/cis_aws_test.py @@ -1,9 +1,11 @@ +from datetime import datetime from io import StringIO +from freezegun import freeze_time from mock import patch -from prowler.lib.outputs.compliance.cis_aws import AWSCIS -from prowler.lib.outputs.compliance.models import AWS +from prowler.lib.outputs.compliance.cis.cis_aws import AWSCIS +from prowler.lib.outputs.compliance.cis.models import AWS from tests.lib.outputs.compliance.fixtures import CIS_1_4_AWS from tests.lib.outputs.fixtures.fixtures import generate_finding_output from tests.providers.aws.utils import AWS_ACCOUNT_NUMBER, AWS_REGION_EU_WEST_1 @@ -71,6 +73,7 @@ def test_output_transform(self): assert output_data.CheckId == "test-check-id" assert output_data.Muted is False + @freeze_time(datetime.now()) def test_batch_write_data_to_file(self): mock_file = StringIO() findings = [generate_finding_output(compliance={"CIS-1.4": "2.1.3"})] @@ -83,4 +86,6 @@ def test_batch_write_data_to_file(self): mock_file.seek(0) content = mock_file.read() - assert CIS_1_4_AWS.Description in content + print(repr(content)) + expected_csv = f"""PROVIDER;DESCRIPTION;ASSESSMENTDATE;REQUIREMENTS_ID;REQUIREMENTS_DESCRIPTION;REQUIREMENTS_ATTRIBUTES_SECTION;REQUIREMENTS_ATTRIBUTES_PROFILE;REQUIREMENTS_ATTRIBUTES_ASSESSMENTSTATUS;REQUIREMENTS_ATTRIBUTES_DESCRIPTION;REQUIREMENTS_ATTRIBUTES_RATIONALESTATEMENT;REQUIREMENTS_ATTRIBUTES_IMPACTSTATEMENT;REQUIREMENTS_ATTRIBUTES_REMEDIATIONPROCEDURE;REQUIREMENTS_ATTRIBUTES_AUDITPROCEDURE;REQUIREMENTS_ATTRIBUTES_ADDITIONALINFORMATION;REQUIREMENTS_ATTRIBUTES_REFERENCES;STATUS;STATUSEXTENDED;RESOURCEID;CHECKID;MUTED;ACCOUNTID;REGION\r\naws;The CIS Benchmark for CIS Amazon Web Services Foundations Benchmark, v1.4.0, Level 1 and 2 provides prescriptive guidance for configuring security options for a subset of Amazon Web Services. It has an emphasis on foundational, testable, and architecture agnostic settings;{datetime.now()};2.1.3;Ensure MFA Delete is enabled on S3 buckets;2.1. Simple Storage Service (S3);Level 1;Automated;Once MFA Delete is enabled on your sensitive and classified S3 bucket it requires the user to have two forms of authentication.;Adding MFA delete to an S3 bucket, requires additional authentication when you change the version state of your bucket or you delete and object version adding another layer of security in the event your security credentials are compromised or unauthorized access is granted.;;"Perform the steps below to enable MFA delete on an S3 bucket.\n\nNote:\n-You cannot enable MFA Delete using the AWS Management Console. You must use the AWS CLI or API.\n-You must use your \'root\' account to enable MFA Delete on S3 buckets.\n\n**From Command line:**\n\n1. Run the s3api put-bucket-versioning command\n\n```\naws s3api put-bucket-versioning --profile my-root-profile --bucket Bucket_Name --versioning-configuration Status=Enabled,MFADelete=Enabled --mfa “arn:aws:iam::aws_account_id:mfa/root-account-mfa-device passcode”\n```";"Perform the steps below to confirm MFA delete is configured on an S3 Bucket\n\n**From Console:**\n\n1. Login to the S3 console at `https://console.aws.amazon.com/s3/`\n\n2. Click the `Check` box next to the Bucket name you want to confirm\n\n3. In the window under `Properties`\n\n4. Confirm that Versioning is `Enabled`\n\n5. Confirm that MFA Delete is `Enabled`\n\n**From Command Line:**\n\n1. Run the `get-bucket-versioning`\n```\naws s3api get-bucket-versioning --bucket my-bucket\n```\n\nOutput example:\n```\n \n Enabled\n Enabled \n\n```\n\nIf the Console or the CLI output does not show Versioning and MFA Delete `enabled` refer to the remediation below.";;https://docs.aws.amazon.com/AmazonS3/latest/dev/Versioning.html#MultiFactorAuthenticationDelete:https://docs.aws.amazon.com/AmazonS3/latest/dev/UsingMFADelete.html:https://aws.amazon.com/blogs/security/securing-access-to-aws-using-mfa-part-3/:https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_mfa_lost-or-broken.html;PASS;;;test-check-id;False;123456789012;eu-west-1\r\n""" + assert content == expected_csv diff --git a/tests/lib/outputs/compliance/cis_azure_test.py b/tests/lib/outputs/compliance/cis_azure_test.py index 8de05575f8..62800e946f 100644 --- a/tests/lib/outputs/compliance/cis_azure_test.py +++ b/tests/lib/outputs/compliance/cis_azure_test.py @@ -1,9 +1,11 @@ +from datetime import datetime from io import StringIO +from freezegun import freeze_time from mock import patch -from prowler.lib.outputs.compliance.cis_azure import AzureCIS -from prowler.lib.outputs.compliance.models import Azure +from prowler.lib.outputs.compliance.cis.cis_azure import AzureCIS +from prowler.lib.outputs.compliance.cis.models import Azure from tests.lib.outputs.compliance.fixtures import CIS_2_0_AZURE from tests.lib.outputs.fixtures.fixtures import generate_finding_output from tests.providers.azure.azure_fixtures import ( @@ -82,9 +84,18 @@ def test_output_transform(self): assert output_data.CheckId == "test-check-id" assert output_data.Muted is False + @freeze_time(datetime.now()) def test_batch_write_data_to_file(self): mock_file = StringIO() - findings = [generate_finding_output(compliance={"CIS-2.0": "2.1.3"})] + findings = [ + generate_finding_output( + provider="azure", + compliance={"CIS-2.0": "2.1.3"}, + account_name=AZURE_SUBSCRIPTION_NAME, + account_uid=AZURE_SUBSCRIPTION_ID, + region="", + ) + ] # Clear the data from CSV class output = AzureCIS(findings, CIS_2_0_AZURE) output._file_descriptor = mock_file @@ -94,4 +105,10 @@ def test_batch_write_data_to_file(self): mock_file.seek(0) content = mock_file.read() - assert CIS_2_0_AZURE.Description in content + print(repr(content)) + expected_csv = ( + "PROVIDER;DESCRIPTION;ASSESSMENTDATE;REQUIREMENTS_ID;REQUIREMENTS_DESCRIPTION;REQUIREMENTS_ATTRIBUTES_SECTION;REQUIREMENTS_ATTRIBUTES_PROFILE;REQUIREMENTS_ATTRIBUTES_ASSESSMENTSTATUS;REQUIREMENTS_ATTRIBUTES_DESCRIPTION;REQUIREMENTS_ATTRIBUTES_RATIONALESTATEMENT;REQUIREMENTS_ATTRIBUTES_IMPACTSTATEMENT;REQUIREMENTS_ATTRIBUTES_REMEDIATIONPROCEDURE;REQUIREMENTS_ATTRIBUTES_AUDITPROCEDURE;REQUIREMENTS_ATTRIBUTES_ADDITIONALINFORMATION;REQUIREMENTS_ATTRIBUTES_REFERENCES;STATUS;STATUSEXTENDED;RESOURCEID;CHECKID;MUTED;SUBSCRIPTION;LOCATION\r\nazure;The CIS Azure Foundations Benchmark provides prescriptive guidance for configuring security options for a subset of Azure with an emphasis on foundational, testable, and architecture agnostic settings.;" + + str(datetime.now()) + + ";2.1.3;Ensure That Microsoft Defender for Databases Is Set To 'On';2.1 Microsoft Defender for Cloud;Level 2;Manual;Turning on Microsoft Defender for Databases enables threat detection for the instances running your database software. This provides threat intelligence, anomaly detection, and behavior analytics in the Azure Microsoft Defender for Cloud. Instead of being enabled on services like Platform as a Service (PaaS), this implementation will run within your instances as Infrastructure as a Service (IaaS) on the Operating Systems hosting your databases.;Enabling Microsoft Defender for Azure SQL Databases allows your organization more granular control of the infrastructure running your database software. Instead of waiting on Microsoft release updates or other similar processes, you can manage them yourself. Threat detection is provided by the Microsoft Security Response Center (MSRC).;Running Defender on Infrastructure as a service (IaaS) may incur increased costs associated with running the service and the instance it is on. Similarly, you will need qualified personnel to maintain the operating system and software updates. If it is not maintained, security patches will not be applied and it may be open to vulnerabilities.;From Azure Portal 1. Go to Microsoft Defender for Cloud 2. Select Environment Settings 3. Click on the subscription name 4. Select Defender plans 5. Set Databases Status to On 6. Select Save Review the chosen pricing tier. For the Azure Databases resource review the different plan information and choose one that fits the needs of your organization. From Azure CLI Run the following commands: az security pricing create -n 'SqlServers' --tier 'Standard' az security pricing create -n 'SqlServerVirtualMachines' --tier 'Standard' az security pricing create -n 'OpenSourceRelationalDatabases' --tier 'Standard' az security pricing create -n 'CosmosDbs' --tier 'Standard' From Azure PowerShell Run the following commands: Set-AzSecurityPricing -Name 'SqlServers' -PricingTier 'Standard' Set-AzSecurityPricing -Name 'SqlServerVirtualMachines' -PricingTier 'Standard' Set-AzSecurityPricing -Name 'OpenSourceRelationalDatabases' -PricingTier 'Standard' Set-AzSecurityPricing -Name 'CosmosDbs' -PricingTier 'Standard';From Azure Portal 1. Go to Microsoft Defender for Cloud 2. Select Environment Settings 3. Click on the subscription name 4. Select Defender plans 5. Ensure Databases Status is set to On 6. Review the chosen pricing tier From Azure CLI Ensure the output of the below commands is Standard az security pricing show -n 'SqlServers' az security pricing show -n 'SqlServerVirtualMachines' az security pricing show -n 'OpenSourceRelationalDatabases' az security pricing show -n 'CosmosDbs' If the output of any of the above commands shows pricingTier with a value of Free, the setting is out of compliance. From PowerShell Connect-AzAccount Get-AzSecurityPricing |select-object Name,PricingTier |where-object {$_.Name -match 'Sql' -or $_.Name -match 'Cosmos' -or $_.Name -match 'OpenSource'} Ensure the output shows Standard for each database type under the PricingTier column. Any that show Free are considered out of compliance.;;https://docs.microsoft.com/en-us/azure/azure-sql/database/azure-defender-for-sql?view=azuresql:https://docs.microsoft.com/en-us/azure/defender-for-cloud/quickstart-enable-database-protections:https://docs.microsoft.com/en-us/azure/defender-for-cloud/defender-for-databases-usage:https://docs.microsoft.com/en-us/azure/security-center/security-center-detection-capabilities:https://docs.microsoft.com/en-us/rest/api/securitycenter/pricings/list:https://docs.microsoft.com/en-us/security/benchmark/azure/security-controls-v3-logging-threat-detection#lt-1-enable-threat-detection-capabilities;PASS;;;test-check-id;False;Subscription Name;\r\n" + ) + assert content == expected_csv diff --git a/tests/lib/outputs/compliance/cis_gcp_test.py b/tests/lib/outputs/compliance/cis_gcp_test.py index e650228bbe..2fe88623d0 100644 --- a/tests/lib/outputs/compliance/cis_gcp_test.py +++ b/tests/lib/outputs/compliance/cis_gcp_test.py @@ -1,9 +1,11 @@ +from datetime import datetime from io import StringIO +from freezegun import freeze_time from mock import patch -from prowler.lib.outputs.compliance.cis_gcp import GCPCIS -from prowler.lib.outputs.compliance.models import GCP +from prowler.lib.outputs.compliance.cis.cis_gcp import GCPCIS +from prowler.lib.outputs.compliance.cis.models import GCP from tests.lib.outputs.compliance.fixtures import CIS_2_0_GCP from tests.lib.outputs.fixtures.fixtures import generate_finding_output from tests.providers.gcp.gcp_fixtures import GCP_PROJECT_ID @@ -79,9 +81,18 @@ def test_output_transform(self): assert output_data.CheckId == "test-check-id" assert output_data.Muted is False + @freeze_time(datetime.now()) def test_batch_write_data_to_file(self): mock_file = StringIO() - findings = [generate_finding_output(compliance={"CIS-2.0": "2.13"})] + findings = [ + generate_finding_output( + provider="gcp", + compliance={"CIS-2.0": "2.13"}, + account_name=GCP_PROJECT_ID, + account_uid=GCP_PROJECT_ID, + region="", + ) + ] # Clear the data from CSV class output = GCPCIS(findings, CIS_2_0_GCP) output._file_descriptor = mock_file @@ -91,4 +102,5 @@ def test_batch_write_data_to_file(self): mock_file.seek(0) content = mock_file.read() - assert CIS_2_0_GCP.Description in content + expected_csv = f"PROVIDER;DESCRIPTION;ASSESSMENTDATE;REQUIREMENTS_ID;REQUIREMENTS_DESCRIPTION;REQUIREMENTS_ATTRIBUTES_SECTION;REQUIREMENTS_ATTRIBUTES_PROFILE;REQUIREMENTS_ATTRIBUTES_ASSESSMENTSTATUS;REQUIREMENTS_ATTRIBUTES_DESCRIPTION;REQUIREMENTS_ATTRIBUTES_RATIONALESTATEMENT;REQUIREMENTS_ATTRIBUTES_IMPACTSTATEMENT;REQUIREMENTS_ATTRIBUTES_REMEDIATIONPROCEDURE;REQUIREMENTS_ATTRIBUTES_AUDITPROCEDURE;REQUIREMENTS_ATTRIBUTES_ADDITIONALINFORMATION;REQUIREMENTS_ATTRIBUTES_REFERENCES;STATUS;STATUSEXTENDED;RESOURCEID;CHECKID;MUTED;PROJECTID;LOCATION\r\ngcp;This CIS Benchmark is the product of a community consensus process and consists of secure configuration guidelines developed for Google Cloud Computing Platform;{datetime.now()};2.13;Ensure That Microsoft Defender for Databases Is Set To 'On';2. Logging and Monitoring;Level 1;Automated;GCP Cloud Asset Inventory is services that provides a historical view of GCP resources and IAM policies through a time-series database. The information recorded includes metadata on Google Cloud resources, metadata on policies set on Google Cloud projects or resources, and runtime information gathered within a Google Cloud resource.;The GCP resources and IAM policies captured by GCP Cloud Asset Inventory enables security analysis, resource change tracking, and compliance auditing. It is recommended GCP Cloud Asset Inventory be enabled for all GCP projects.;;**From Google Cloud Console** Enable the Cloud Asset API: 1. Go to `API & Services/Library` by visiting https://console.cloud.google.com/apis/library(https://console.cloud.google.com/apis/library) 2. Search for `Cloud Asset API` and select the result for _Cloud Asset API_ 3. Click the `ENABLE` button. **From Google Cloud CLI** Enable the Cloud Asset API: 1. Enable the Cloud Asset API through the services interface: ``` gcloud services enable cloudasset.googleapis.com ```;**From Google Cloud Console** Ensure that the Cloud Asset API is enabled: 1. Go to `API & Services/Library` by visiting https://console.cloud.google.com/apis/library(https://console.cloud.google.com/apis/library) 2. Search for `Cloud Asset API` and select the result for _Cloud Asset API_ 3. Ensure that `API Enabled` is displayed. **From Google Cloud CLI** Ensure that the Cloud Asset API is enabled: 1. Query enabled services: ``` gcloud services list --enabled --filter=name:cloudasset.googleapis.com ``` If the API is listed, then it is enabled. If the response is `Listed 0 items` the API is not enabled.;Additional info - Cloud Asset Inventory only keeps a five-week history of Google Cloud asset metadata. If a longer history is desired, automation to export the history to Cloud Storage or BigQuery should be evaluated.;https://cloud.google.com/asset-inventory/docs;PASS;;;test-check-id;False;123456789012;\r\n" + assert content == expected_csv diff --git a/tests/lib/outputs/compliance/cis_kubernetes_test.py b/tests/lib/outputs/compliance/cis_kubernetes_test.py index c2d90027da..48c82ed4cf 100644 --- a/tests/lib/outputs/compliance/cis_kubernetes_test.py +++ b/tests/lib/outputs/compliance/cis_kubernetes_test.py @@ -1,9 +1,11 @@ +from datetime import datetime from io import StringIO +from freezegun import freeze_time from mock import patch -from prowler.lib.outputs.compliance.cis_kubernetes import KubernetesCIS -from prowler.lib.outputs.compliance.models import Kubernetes +from prowler.lib.outputs.compliance.cis.cis_kubernetes import KubernetesCIS +from prowler.lib.outputs.compliance.cis.models import Kubernetes from tests.lib.outputs.compliance.fixtures import CIS_1_8_KUBERNETES from tests.lib.outputs.fixtures.fixtures import generate_finding_output from tests.providers.kubernetes.kubernetes_fixtures import ( @@ -82,9 +84,18 @@ def test_output_transform(self): assert output_data.CheckId == "test-check-id" assert output_data.Muted is False + @freeze_time(datetime.now()) def test_batch_write_data_to_file(self): mock_file = StringIO() - findings = [generate_finding_output(compliance={"CIS-1.8": "1.1.3"})] + findings = [ + generate_finding_output( + provider="kubernetes", + compliance={"CIS-1.8": "1.1.3"}, + account_name=KUBERNETES_CLUSTER_NAME, + account_uid=KUBERNETES_CLUSTER_NAME, + region=KUBERNETES_NAMESPACE, + ) + ] # Clear the data from CSV class output = KubernetesCIS(findings, CIS_1_8_KUBERNETES) output._file_descriptor = mock_file @@ -94,4 +105,5 @@ def test_batch_write_data_to_file(self): mock_file.seek(0) content = mock_file.read() - assert CIS_1_8_KUBERNETES.Description in content + expected_csv = f"PROVIDER;DESCRIPTION;ASSESSMENTDATE;REQUIREMENTS_ID;REQUIREMENTS_DESCRIPTION;REQUIREMENTS_ATTRIBUTES_SECTION;REQUIREMENTS_ATTRIBUTES_PROFILE;REQUIREMENTS_ATTRIBUTES_ASSESSMENTSTATUS;REQUIREMENTS_ATTRIBUTES_DESCRIPTION;REQUIREMENTS_ATTRIBUTES_RATIONALESTATEMENT;REQUIREMENTS_ATTRIBUTES_IMPACTSTATEMENT;REQUIREMENTS_ATTRIBUTES_REMEDIATIONPROCEDURE;REQUIREMENTS_ATTRIBUTES_AUDITPROCEDURE;REQUIREMENTS_ATTRIBUTES_ADDITIONALINFORMATION;REQUIREMENTS_ATTRIBUTES_REFERENCES;STATUS;STATUSEXTENDED;RESOURCEID;CHECKID;MUTED;CONTEXT;NAMESPACE\r\nkubernetes;This CIS Kubernetes Benchmark provides prescriptive guidance for establishing a secure configuration posture for Kubernetes v1.27.;{datetime.now()};1.1.3;Ensure that the controller manager pod specification file permissions are set to 600 or more restrictive;1.1 Control Plane Node Configuration Files;Level 1 - Master Node;Automated;Ensure that the controller manager pod specification file has permissions of `600` or more restrictive.;The controller manager pod specification file controls various parameters that set the behavior of the Controller Manager on the master node. You should restrict its file permissions to maintain the integrity of the file. The file should be writable by only the administrators on the system.;;Run the below command (based on the file location on your system) on the Control Plane node. For example, ``` chmod 600 /etc/kubernetes/manifests/kube-controller-manager.yaml ```;Run the below command (based on the file location on your system) on the Control Plane node. For example, ``` stat -c %a /etc/kubernetes/manifests/kube-controller-manager.yaml ``` Verify that the permissions are `600` or more restrictive.;;https://kubernetes.io/docs/admin/kube-apiserver/;PASS;;;test-check-id;False;test-cluster;test-namespace\r\n" + assert content == expected_csv From fe7d01a490ab5cb41a68f2f46374dff679557664 Mon Sep 17 00:00:00 2001 From: Sergio Date: Tue, 9 Jul 2024 13:16:34 -0400 Subject: [PATCH 8/9] add missing init --- prowler/lib/outputs/compliance/mitre_attack/__init__.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 prowler/lib/outputs/compliance/mitre_attack/__init__.py diff --git a/prowler/lib/outputs/compliance/mitre_attack/__init__.py b/prowler/lib/outputs/compliance/mitre_attack/__init__.py new file mode 100644 index 0000000000..e69de29bb2 From 753625888b7480092af35419a75540756f18d56c Mon Sep 17 00:00:00 2001 From: Pepe Fagoaga Date: Wed, 10 Jul 2024 12:12:42 +0200 Subject: [PATCH 9/9] chore: remove prints --- tests/lib/outputs/compliance/cis_aws_test.py | 1 - tests/lib/outputs/compliance/cis_azure_test.py | 1 - 2 files changed, 2 deletions(-) diff --git a/tests/lib/outputs/compliance/cis_aws_test.py b/tests/lib/outputs/compliance/cis_aws_test.py index 393e4dbdc5..3821d9d9d9 100644 --- a/tests/lib/outputs/compliance/cis_aws_test.py +++ b/tests/lib/outputs/compliance/cis_aws_test.py @@ -86,6 +86,5 @@ def test_batch_write_data_to_file(self): mock_file.seek(0) content = mock_file.read() - print(repr(content)) expected_csv = f"""PROVIDER;DESCRIPTION;ASSESSMENTDATE;REQUIREMENTS_ID;REQUIREMENTS_DESCRIPTION;REQUIREMENTS_ATTRIBUTES_SECTION;REQUIREMENTS_ATTRIBUTES_PROFILE;REQUIREMENTS_ATTRIBUTES_ASSESSMENTSTATUS;REQUIREMENTS_ATTRIBUTES_DESCRIPTION;REQUIREMENTS_ATTRIBUTES_RATIONALESTATEMENT;REQUIREMENTS_ATTRIBUTES_IMPACTSTATEMENT;REQUIREMENTS_ATTRIBUTES_REMEDIATIONPROCEDURE;REQUIREMENTS_ATTRIBUTES_AUDITPROCEDURE;REQUIREMENTS_ATTRIBUTES_ADDITIONALINFORMATION;REQUIREMENTS_ATTRIBUTES_REFERENCES;STATUS;STATUSEXTENDED;RESOURCEID;CHECKID;MUTED;ACCOUNTID;REGION\r\naws;The CIS Benchmark for CIS Amazon Web Services Foundations Benchmark, v1.4.0, Level 1 and 2 provides prescriptive guidance for configuring security options for a subset of Amazon Web Services. It has an emphasis on foundational, testable, and architecture agnostic settings;{datetime.now()};2.1.3;Ensure MFA Delete is enabled on S3 buckets;2.1. Simple Storage Service (S3);Level 1;Automated;Once MFA Delete is enabled on your sensitive and classified S3 bucket it requires the user to have two forms of authentication.;Adding MFA delete to an S3 bucket, requires additional authentication when you change the version state of your bucket or you delete and object version adding another layer of security in the event your security credentials are compromised or unauthorized access is granted.;;"Perform the steps below to enable MFA delete on an S3 bucket.\n\nNote:\n-You cannot enable MFA Delete using the AWS Management Console. You must use the AWS CLI or API.\n-You must use your \'root\' account to enable MFA Delete on S3 buckets.\n\n**From Command line:**\n\n1. Run the s3api put-bucket-versioning command\n\n```\naws s3api put-bucket-versioning --profile my-root-profile --bucket Bucket_Name --versioning-configuration Status=Enabled,MFADelete=Enabled --mfa “arn:aws:iam::aws_account_id:mfa/root-account-mfa-device passcode”\n```";"Perform the steps below to confirm MFA delete is configured on an S3 Bucket\n\n**From Console:**\n\n1. Login to the S3 console at `https://console.aws.amazon.com/s3/`\n\n2. Click the `Check` box next to the Bucket name you want to confirm\n\n3. In the window under `Properties`\n\n4. Confirm that Versioning is `Enabled`\n\n5. Confirm that MFA Delete is `Enabled`\n\n**From Command Line:**\n\n1. Run the `get-bucket-versioning`\n```\naws s3api get-bucket-versioning --bucket my-bucket\n```\n\nOutput example:\n```\n \n Enabled\n Enabled \n\n```\n\nIf the Console or the CLI output does not show Versioning and MFA Delete `enabled` refer to the remediation below.";;https://docs.aws.amazon.com/AmazonS3/latest/dev/Versioning.html#MultiFactorAuthenticationDelete:https://docs.aws.amazon.com/AmazonS3/latest/dev/UsingMFADelete.html:https://aws.amazon.com/blogs/security/securing-access-to-aws-using-mfa-part-3/:https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_mfa_lost-or-broken.html;PASS;;;test-check-id;False;123456789012;eu-west-1\r\n""" assert content == expected_csv diff --git a/tests/lib/outputs/compliance/cis_azure_test.py b/tests/lib/outputs/compliance/cis_azure_test.py index 62800e946f..f08abb5d7b 100644 --- a/tests/lib/outputs/compliance/cis_azure_test.py +++ b/tests/lib/outputs/compliance/cis_azure_test.py @@ -105,7 +105,6 @@ def test_batch_write_data_to_file(self): mock_file.seek(0) content = mock_file.read() - print(repr(content)) expected_csv = ( "PROVIDER;DESCRIPTION;ASSESSMENTDATE;REQUIREMENTS_ID;REQUIREMENTS_DESCRIPTION;REQUIREMENTS_ATTRIBUTES_SECTION;REQUIREMENTS_ATTRIBUTES_PROFILE;REQUIREMENTS_ATTRIBUTES_ASSESSMENTSTATUS;REQUIREMENTS_ATTRIBUTES_DESCRIPTION;REQUIREMENTS_ATTRIBUTES_RATIONALESTATEMENT;REQUIREMENTS_ATTRIBUTES_IMPACTSTATEMENT;REQUIREMENTS_ATTRIBUTES_REMEDIATIONPROCEDURE;REQUIREMENTS_ATTRIBUTES_AUDITPROCEDURE;REQUIREMENTS_ATTRIBUTES_ADDITIONALINFORMATION;REQUIREMENTS_ATTRIBUTES_REFERENCES;STATUS;STATUSEXTENDED;RESOURCEID;CHECKID;MUTED;SUBSCRIPTION;LOCATION\r\nazure;The CIS Azure Foundations Benchmark provides prescriptive guidance for configuring security options for a subset of Azure with an emphasis on foundational, testable, and architecture agnostic settings.;" + str(datetime.now())