diff --git a/.gitignore b/.gitignore index 79ba03f1..0d444989 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,4 @@ .idea bundle.tar.gz opa +.vscode \ No newline at end of file diff --git a/checks/cloud/aws/accessanalyzer/enable_access_analyzer.rego b/checks/cloud/aws/accessanalyzer/enable_access_analyzer.rego new file mode 100644 index 00000000..b5bb1f20 --- /dev/null +++ b/checks/cloud/aws/accessanalyzer/enable_access_analyzer.rego @@ -0,0 +1,45 @@ +# METADATA +# title: Enable IAM Access analyzer for IAM policies about all resources in each region. +# description: | +# AWS IAM Access Analyzer helps you identify the resources in your organization and +# accounts, such as Amazon S3 buckets or IAM roles, that are shared with an external entity. +# This lets you identify unintended access to your resources and data. Access Analyzer +# identifies resources that are shared with external principals by using logic-based reasoning +# to analyze the resource-based policies in your AWS environment. IAM Access Analyzer +# continuously monitors all policies for S3 bucket, IAM roles, KMS(Key Management Service) +# keys, AWS Lambda functions, and Amazon SQS(Simple Queue Service) queues. +# scope: package +# schemas: +# - input: schema["cloud"] +# related_resources: +# - https://docs.aws.amazon.com/IAM/latest/UserGuide/what-is-access-analyzer.html +# custom: +# id: AVD-AWS-0175 +# avd_id: AVD-AWS-0175 +# provider: aws +# service: accessanalyzer +# severity: LOW +# short_code: enable-access-analyzer +# recommended_action: Enable IAM Access analyzer across all regions. +# frameworks: +# cis-aws-1.4: +# - "1.20" +# input: +# selector: +# - type: aws +# subtypes: +# - service: accessanalyzer +# provider: aws +package builtin.aws.accessanalyzer.aws0175 + +import rego.v1 + +deny contains res if { + is_analyzer_nonactive + res := result.new("Access Analyzer is not enabled.", {}) +} + +is_analyzer_nonactive if { + some analyzer in input.aws.accessanalyzer.analyzers + not analyzer.active.value +} diff --git a/checks/cloud/aws/accessanalyzer/enable_access_analyzer_test.rego b/checks/cloud/aws/accessanalyzer/enable_access_analyzer_test.rego new file mode 100644 index 00000000..17041714 --- /dev/null +++ b/checks/cloud/aws/accessanalyzer/enable_access_analyzer_test.rego @@ -0,0 +1,21 @@ +package builtin.aws.accessanalyzer.aws0175_test + +import rego.v1 + +import data.builtin.aws.accessanalyzer.aws0175 as check +import data.lib.test + +test_disallow_analyzer_disabled if { + r := check.deny with input as {"aws": {"accessanalyzer": {"analyzers": [{"active": {"value": false}}]}}} + test.assert_equal_message(r, "Access Analyzer is not enabled.") +} + +test_disallow_one_of_analyzer_disabled if { + r := check.deny with input as {"aws": {"accessanalyzer": {"analyzers": [{"active": {"value": false}}, {"active": {"value": true}}]}}} + test.assert_equal_message(r, "Access Analyzer is not enabled.") +} + +test_allow_analyzer_enabled if { + r := check.deny with input as {"aws": {"accessanalyzer": {"analyzers": [{"active": {"value": true}}]}}} + test.assert_empty(r) +} diff --git a/checks/cloud/aws/apigateway/enable_access_logging.rego b/checks/cloud/aws/apigateway/enable_access_logging.rego new file mode 100644 index 00000000..8b44fb78 --- /dev/null +++ b/checks/cloud/aws/apigateway/enable_access_logging.rego @@ -0,0 +1,46 @@ +# METADATA +# title: API Gateway stages for V1 and V2 should have access logging enabled +# description: | +# API Gateway stages should have access log settings block configured to track all access to a particular stage. This should be applied to both v1 and v2 gateway stages. +# scope: package +# schemas: +# - input: schema["cloud"] +# related_resources: +# - https://docs.aws.amazon.com/apigateway/latest/developerguide/set-up-logging.html +# custom: +# id: AVD-AWS-0001 +# avd_id: AVD-AWS-0001 +# provider: aws +# service: api-gateway +# severity: MEDIUM +# short_code: enable-access-logging +# recommended_action: Enable logging for API Gateway stages +# input: +# selector: +# - type: aws +# subtypes: +# - service: api-gateway +# provider: aws +# terraform: +# links: +# - https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/apigatewayv2_stage#access_log_settings +# good_examples: checks/cloud/aws/apigateway/enable_access_logging.tf.go +# bad_examples: checks/cloud/aws/apigateway/enable_access_logging.tf.go +# cloudformation: +# good_examples: checks/cloud/aws/apigateway/enable_access_logging.cf.go +# bad_examples: checks/cloud/aws/apigateway/enable_access_logging.cf.go +package builtin.aws.apigateway.aws0001 + +import rego.v1 + +deny contains res if { + some api in apis + some stage in api.stages + arn := stage.accesslogging.cloudwatchloggrouparn + arn.value == "" # TODO: check if unresolvable? + res := result.new("Access logging is not configured.", arn) +} + +apis contains input.aws.apigateway.v1.apis[_] + +apis contains input.aws.apigateway.v2.apis[_] diff --git a/checks/cloud/aws/apigateway/enable_access_logging_test.rego b/checks/cloud/aws/apigateway/enable_access_logging_test.rego new file mode 100644 index 00000000..f202fed3 --- /dev/null +++ b/checks/cloud/aws/apigateway/enable_access_logging_test.rego @@ -0,0 +1,17 @@ +package builtin.aws.apigateway.aws0001_test + +import rego.v1 + +import data.builtin.aws.apigateway.aws0001 as check +import data.lib.test + +test_disallow_api_gateway_without_log_group_arn if { + r := check.deny with input as {"aws": {"apigateway": {"v1": {"apis": [{"stages": [{"accesslogging": {"cloudwatchloggrouparn": {"value": ""}}}]}]}}}} + test.assert_equal_message(r, "Access logging is not configured.") +} + +test_allow_api_gateway_with_log_group_arn if { + test.assert_empty(check.deny) with input as {"aws": {"apigateway": {"v1": {"apis": [{"stages": [{"accesslogging": {"cloudwatchloggrouparn": {"value": "log-group-arn"}}}]}]}}}} +} + +# TODO add test for v2 diff --git a/checks/cloud/aws/apigateway/enable_cache.rego b/checks/cloud/aws/apigateway/enable_cache.rego new file mode 100644 index 00000000..d0fb2bce --- /dev/null +++ b/checks/cloud/aws/apigateway/enable_cache.rego @@ -0,0 +1,39 @@ +# METADATA +# title: Ensure that response caching is enabled for your Amazon API Gateway REST APIs. +# description: | +# A REST API in API Gateway is a collection of resources and methods that are integrated with backend HTTP endpoints, Lambda functions, or other AWS services. You can enable API caching in Amazon API Gateway to cache your endpoint responses. With caching, you can reduce the number of calls made to your endpoint and also improve the latency of requests to your API. +# scope: package +# schemas: +# - input: schema["cloud"] +# related_resources: +# - https://docs.aws.amazon.com/apigateway/latest/developerguide/api-gateway-caching.html +# custom: +# id: AVD-AWS-0190 +# avd_id: AVD-AWS-0190 +# provider: aws +# service: api-gateway +# severity: LOW +# short_code: enable-cache +# recommended_action: Enable cache +# input: +# selector: +# - type: aws +# subtypes: +# - service: api-gateway +# provider: aws +# terraform: +# links: +# - https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/api_gateway_method_settings#cache_enabled +# good_examples: checks/cloud/aws/apigateway/enable_cache.tf.go +# bad_examples: checks/cloud/aws/apigateway/enable_cache.tf.go +package builtin.aws.apigateway.aws0190 + +import rego.v1 + +deny contains res if { + some api in input.aws.apigateway.v1.apis # TODO: support v2? + some stage in api.stages + some settings in stage.restmethodsettings + not settings.cacheenabled.value + res := result.new("Cache data is not enabled.", settings.cacheenabled) +} diff --git a/checks/cloud/aws/apigateway/enable_cache_encryption.rego b/checks/cloud/aws/apigateway/enable_cache_encryption.rego new file mode 100644 index 00000000..194842c4 --- /dev/null +++ b/checks/cloud/aws/apigateway/enable_cache_encryption.rego @@ -0,0 +1,38 @@ +# METADATA +# title: API Gateway must have cache enabled +# description: | +# Method cache encryption ensures that any sensitive data in the cache is not vulnerable to compromise in the event of interception +# scope: package +# schemas: +# - input: schema["cloud"] +# custom: +# id: AVD-AWS-0002 +# avd_id: AVD-AWS-0002 +# provider: aws +# service: api-gateway +# severity: MEDIUM +# short_code: enable-cache-encryption +# recommended_action: Enable cache encryption +# input: +# selector: +# - type: aws +# subtypes: +# - service: api-gateway +# provider: aws +# terraform: +# links: +# - https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/api_gateway_method_settings#cache_data_encrypted +# good_examples: checks/cloud/aws/apigateway/enable_cache_encryption.tf.go +# bad_examples: checks/cloud/aws/apigateway/enable_cache_encryption.tf.go +package builtin.aws.apigateway.aws0002 + +import rego.v1 + +deny contains res if { + some api in input.aws.apigateway.v1.apis # TODO: support v2? + some stage in api.stages + some settings in stage.restmethodsettings + settings.cacheenabled.value + not settings.cachedataencrypted.value + res := result.new("Cache data is not encrypted.", settings.cachedataencrypted) +} diff --git a/checks/cloud/aws/apigateway/enable_cache_encryption_test.rego b/checks/cloud/aws/apigateway/enable_cache_encryption_test.rego new file mode 100644 index 00000000..ec2bae78 --- /dev/null +++ b/checks/cloud/aws/apigateway/enable_cache_encryption_test.rego @@ -0,0 +1,16 @@ +package builtin.aws.apigateway.aws0002_test + +import rego.v1 + +import data.builtin.aws.apigateway.aws0002 as check +import data.lib.test + +test_allow_api_gateway_with_cache_encryption if { + test.assert_empty(check.deny) with input as {"aws": {"apigateway": {"v1": {"apis": [{"stages": [{"restmethodsettings": [{"cacheenabled": {"value": true}, "cachedataencrypted": {"value": true}}]}]}]}}}} +} + +test_disallow_api_gateway_without_cache_encryption if { + r := check.deny with input as {"aws": {"apigateway": {"v1": {"apis": [{"stages": [{"restmethodsettings": [{"cacheenabled": {"value": true}, "cachedataencrypted": {"value": false}}]}]}]}}}} + + test.assert_equal_message(r, "Cache data is not encrypted.") +} diff --git a/checks/cloud/aws/apigateway/enable_cache_test.rego b/checks/cloud/aws/apigateway/enable_cache_test.rego new file mode 100644 index 00000000..1659e864 --- /dev/null +++ b/checks/cloud/aws/apigateway/enable_cache_test.rego @@ -0,0 +1,16 @@ +package builtin.aws.apigateway.aws0190_test + +import rego.v1 + +import data.builtin.aws.apigateway.aws0190 as check +import data.lib.test + +test_allow_cache_enabled if { + test.assert_empty(check.deny) with input as {"aws": {"apigateway": {"v1": {"apis": [{"stages": [{"restmethodsettings": [{"cacheenabled": {"value": true}}]}]}]}}}} +} + +test_disallow_cache_disabled if { + r := check.deny with input as {"aws": {"apigateway": {"v1": {"apis": [{"stages": [{"restmethodsettings": [{"cacheenabled": {"value": false}}]}]}]}}}} + + test.assert_equal_message(r, "Cache data is not enabled.") +} diff --git a/checks/cloud/aws/apigateway/enable_tracing.rego b/checks/cloud/aws/apigateway/enable_tracing.rego new file mode 100644 index 00000000..d30cf173 --- /dev/null +++ b/checks/cloud/aws/apigateway/enable_tracing.rego @@ -0,0 +1,36 @@ +# METADATA +# title: API Gateway must have X-Ray tracing enabled +# description: | +# X-Ray tracing enables end-to-end debugging and analysis of all API Gateway HTTP requests. +# scope: package +# schemas: +# - input: schema["cloud"] +# custom: +# id: AVD-AWS-0003 +# avd_id: AVD-AWS-0003 +# provider: aws +# service: api-gateway +# severity: LOW +# short_code: enable-tracing +# recommended_action: Enable tracing +# input: +# selector: +# - type: aws +# subtypes: +# - service: api-gateway +# provider: aws +# terraform: +# links: +# - https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/api_gateway_stage#xray_tracing_enabled +# good_examples: checks/cloud/aws/apigateway/enable_tracing.tf.go +# bad_examples: checks/cloud/aws/apigateway/enable_tracing.tf.go +package builtin.aws.apigateway.aws0003 + +import rego.v1 + +deny contains res if { + some api in input.aws.apigateway.v1.apis + some stage in api.stages + not stage.xraytracingenabled.value + res := result.new("X-Ray tracing is not enabled.", stage.xraytracingenabled) +} diff --git a/checks/cloud/aws/apigateway/enable_tracing_test.rego b/checks/cloud/aws/apigateway/enable_tracing_test.rego new file mode 100644 index 00000000..22f82dff --- /dev/null +++ b/checks/cloud/aws/apigateway/enable_tracing_test.rego @@ -0,0 +1,16 @@ +package builtin.aws.apigateway.aws0003_test + +import rego.v1 + +import data.builtin.aws.apigateway.aws0003 as check +import data.lib.test + +test_allow_tracing_enabled if { + test.assert_empty(check.deny) with input as {"aws": {"apigateway": {"v1": {"apis": [{"stages": [{"xraytracingenabled": {"value": true}}]}]}}}} +} + +test_disallow_tracing_disabled if { + r := check.deny with input as {"aws": {"apigateway": {"v1": {"apis": [{"stages": [{"xraytracingenabled": {"value": false}}]}]}}}} + + test.assert_equal_message(r, "X-Ray tracing is not enabled.") +} diff --git a/checks/cloud/aws/apigateway/no_public_access.rego b/checks/cloud/aws/apigateway/no_public_access.rego new file mode 100644 index 00000000..f041adb2 --- /dev/null +++ b/checks/cloud/aws/apigateway/no_public_access.rego @@ -0,0 +1,43 @@ +# METADATA +# title: No unauthorized access to API Gateway methods +# description: | +# API Gateway methods should generally be protected by authorization or api key. OPTION verb calls can be used without authorization +# scope: package +# schemas: +# - input: schema["cloud"] +# custom: +# id: AVD-AWS-0004 +# avd_id: AVD-AWS-0004 +# provider: aws +# service: api-gateway +# severity: LOW +# short_code: no-public-access +# recommended_action: Use and authorization method or require API Key +# input: +# selector: +# - type: aws +# subtypes: +# - service: api-gateway +# provider: aws +# terraform: +# links: +# - https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/api_gateway_method#authorization +# good_examples: checks/cloud/aws/apigateway/no_public_access.tf.go +# bad_examples: checks/cloud/aws/apigateway/no_public_access.tf.go +package builtin.aws.apigateway.aws0004 + +import rego.v1 + +authorization_none := "NONE" + +deny contains res if { + some api in input.aws.apigateway.v1.apis + some resource in api.resources + some method in resource.methods + + method.httpmethod.value != "OPTION" + not method.apikeyrequired.value + method.authorizationtype.value == authorization_none + + res := result.new("Authorization is not enabled for this method.", method.authorizationtype) +} diff --git a/checks/cloud/aws/apigateway/no_public_access_test.rego b/checks/cloud/aws/apigateway/no_public_access_test.rego new file mode 100644 index 00000000..2ccc1181 --- /dev/null +++ b/checks/cloud/aws/apigateway/no_public_access_test.rego @@ -0,0 +1,26 @@ +package builtin.aws.apigateway.aws0004_test + +import rego.v1 + +import data.builtin.aws.apigateway.aws0004 as check +import data.lib.test + +test_disallow_get_method_without_auth if { + r := check.deny with input as input_with_method({"httpmethod": {"value": "GET"}, "authorizationtype": {"value": "NONE"}}) + + test.assert_equal_message(r, "Authorization is not enabled for this method.") +} + +test_allow_option_method if { + test.assert_empty(check.deny) with input as input_with_method({"httpmethod": {"value": "OPTION"}}) +} + +test_allow_get_method_with_auth if { + test.assert_empty(check.deny) with input as input_with_method({"methods": [{"httpmethod": {"value": "GET"}, "authorizationtype": {"value": "AWS_IAM"}}]}) +} + +test_allow_if_api_required if { + test.assert_empty(check.deny) with input as input_with_method({"httpmethod": {"value": "GET"}, "authorizationtype": {"value": "AWS_IAM"}}) +} + +input_with_method(method) = {"aws": {"apigateway": {"v1": {"apis": [{"resources": [{"methods": [method]}]}]}}}} diff --git a/checks/cloud/aws/apigateway/use_secure_tls_policy.rego b/checks/cloud/aws/apigateway/use_secure_tls_policy.rego new file mode 100644 index 00000000..43ce7a19 --- /dev/null +++ b/checks/cloud/aws/apigateway/use_secure_tls_policy.rego @@ -0,0 +1,41 @@ +# METADATA +# title: API Gateway domain name uses outdated SSL/TLS protocols. +# description: | +# You should not use outdated/insecure TLS versions for encryption. You should be using TLS v1.2+. +# scope: package +# schemas: +# - input: schema["cloud"] +# related_resources: +# - https://docs.aws.amazon.com/apigateway/latest/developerguide/apigateway-custom-domain-tls-version.html +# custom: +# id: AVD-AWS-0005 +# avd_id: AVD-AWS-0005 +# provider: aws +# service: api-gateway +# severity: HIGH +# short_code: use-secure-tls-policy +# recommended_action: Use the most modern TLS/SSL policies available +# input: +# selector: +# - type: aws +# subtypes: +# - service: api-gateway +# provider: aws +# terraform: +# links: +# - https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/api_gateway_domain_name#security_policy +# good_examples: checks/cloud/aws/apigateway/use_secure_tls_policy.tf.go +# bad_examples: checks/cloud/aws/apigateway/use_secure_tls_policy.tf.go +package builtin.aws.apigateway.aws0005 + +import rego.v1 + +deny contains res if { + some domain in domainnames + domain.securitypolicy.value != "TLS_1_2" + res := result.new("Domain name is configured with an outdated TLS policy.", domain.securitypolicy) +} + +domainnames contains input.aws.apigateway.v1.domainnames[_] + +domainnames contains input.aws.apigateway.v2.domainnames[_] diff --git a/checks/cloud/aws/apigateway/use_secure_tls_policy_test.rego b/checks/cloud/aws/apigateway/use_secure_tls_policy_test.rego new file mode 100644 index 00000000..b9ac17ba --- /dev/null +++ b/checks/cloud/aws/apigateway/use_secure_tls_policy_test.rego @@ -0,0 +1,20 @@ +package builtin.aws.apigateway.aws0005_test + +import rego.v1 + +import data.builtin.aws.apigateway.aws0005 as check +import data.lib.test + +test_allow_with_tls_1_2 if { + test.assert_empty(check.deny) with input as {"aws": {"apigateway": {"v1": {"domainnames": [{"securitypolicy": {"value": "TLS_1_2"}}]}}}} +} + +test_disallow_with_tls_1_0 if { + r := check.deny with input as {"aws": {"apigateway": {"v1": {"domainnames": [{"securitypolicy": {"value": "TLS_1_0"}}]}}}} + test.assert_equal_message(r, "Domain name is configured with an outdated TLS policy.") +} + +test_dissalow_api_v2 if { + r := check.deny with input as {"aws": {"apigateway": {"v2": {"domainnames": [{"securitypolicy": {"value": "TLS_1_0"}}]}, "v1": {"domainnames": [{"securitypolicy": {"value": "TLS_1_11"}}]}}}} + test.assert_equal_message(r, "Domain name is configured with an outdated TLS policy.") +} diff --git a/checks/cloud/aws/iam/disable_unused_credentials.rego b/checks/cloud/aws/iam/disable_unused_credentials.rego new file mode 100644 index 00000000..86318f1a --- /dev/null +++ b/checks/cloud/aws/iam/disable_unused_credentials.rego @@ -0,0 +1,46 @@ +# METADATA +# title: Credentials which are no longer used should be disabled. +# description: | +# CIS recommends that you remove or deactivate all credentials that have been unused in 90 days or more. Disabling or removing unnecessary credentials reduces the window of opportunity for credentials associated with a compromised or abandoned account to be used. +# scope: package +# schemas: +# - input: schema["cloud"] +# related_resources: +# - https://console.aws.amazon.com/iam/ +# custom: +# id: AVD-AWS-0144 +# avd_id: AVD-AWS-0144 +# provider: aws +# service: iam +# severity: MEDIUM +# short_code: disable-unused-credentials +# recommended_action: Disable credentials which are no longer used. +# frameworks: +# cis-aws-1.2: +# - "1.3" +# input: +# selector: +# - type: aws +# subtypes: +# - service: iam +# provider: aws +package builtin.aws.iam.aws0144 + +import rego.v1 + +import data.lib.datetime +import data.lib.iam + +deny contains res if { + some user in input.aws.iam.users + iam.user_is_inactive(user, 90) + res := result.new("User has not logged in for >90 days.", user) +} + +deny contains res if { + some user in input.aws.iam.users + not iam.user_is_inactive(user, 90) + some key in user.accesskeys + iam.key_is_unused(key, 90) + res := result.new(sprintf("User access key %q has not been used in >90 days", [key.accesskeyid.value]), user) +} diff --git a/checks/cloud/aws/iam/disable_unused_credentials_45.go b/checks/cloud/aws/iam/disable_unused_credentials_45.go index 4683b644..0d517e6f 100644 --- a/checks/cloud/aws/iam/disable_unused_credentials_45.go +++ b/checks/cloud/aws/iam/disable_unused_credentials_45.go @@ -26,11 +26,11 @@ var CheckUnusedCredentialsDisabled45Days = rules.Register( }, Service: "iam", ShortCode: "disable-unused-credentials-45-days", - Summary: "AWS IAM users can access AWS resources using different types of credentials, such as\npasswords or access keys. It is recommended that all credentials that have been unused in\n45 or greater days be deactivated or removed.", + Summary: "Disabling or removing unnecessary credentials will reduce the window of opportunity for credentials associated with a compromised or abandoned account to be used.", Impact: "Leaving unused credentials active widens the scope for compromise.", Resolution: "Disable credentials which are no longer used.", Explanation: ` -Disabling or removing unnecessary credentials will reduce the window of opportunity for credentials associated with a compromised or abandoned account to be used. +AWS IAM users can access AWS resources using different types of credentials, such as passwords or access keys. It is recommended that all credentials that have been unused in45 or greater days be deactivated or removed. `, Links: []string{ "https://console.aws.amazon.com/iam/", diff --git a/checks/cloud/aws/iam/disable_unused_credentials_45.rego b/checks/cloud/aws/iam/disable_unused_credentials_45.rego new file mode 100644 index 00000000..dce27cb5 --- /dev/null +++ b/checks/cloud/aws/iam/disable_unused_credentials_45.rego @@ -0,0 +1,46 @@ +# METADATA +# title: Disabling or removing unnecessary credentials will reduce the window of opportunity for credentials associated with a compromised or abandoned account to be used. +# description: | +# AWS IAM users can access AWS resources using different types of credentials, such as passwords or access keys. It is recommended that all credentials that have been unused in45 or greater days be deactivated or removed. +# scope: package +# schemas: +# - input: schema["cloud"] +# related_resources: +# - https://console.aws.amazon.com/iam/ +# custom: +# id: AVD-AWS-0166 +# avd_id: AVD-AWS-0166 +# provider: aws +# service: iam +# severity: LOW +# short_code: disable-unused-credentials-45-days +# recommended_action: Disable credentials which are no longer used. +# frameworks: +# cis-aws-1.4: +# - "1.12" +# input: +# selector: +# - type: aws +# subtypes: +# - service: iam +# provider: aws +package builtin.aws.iam.aws0166 + +import data.lib.iam +import rego.v1 + +days_to_check = 45 + +deny contains res if { + some user in input.aws.iam.users + iam.user_is_inactive(user, days_to_check) + res := result.new("User has not logged in for >45 days.", user) +} + +deny contains res if { + some user in input.aws.iam.users + not iam.user_is_inactive(user, days_to_check) + some key in user.accesskeys + iam.key_is_unused(key, days_to_check) + res := result.new(sprintf("User access key %q has not been used in >45 days", [key.accesskeyid.value]), user) +} diff --git a/checks/cloud/aws/iam/disable_unused_credentials_45_test.rego b/checks/cloud/aws/iam/disable_unused_credentials_45_test.rego new file mode 100644 index 00000000..6f2cdc29 --- /dev/null +++ b/checks/cloud/aws/iam/disable_unused_credentials_45_test.rego @@ -0,0 +1,66 @@ +package builtin.aws.iam.aws0166._test + +import rego.v1 + +import data.builtin.aws.iam.aws0166 as check +import data.lib.datetime +import data.lib.test + +test_allow_user_logged_in_today if { + test.assert_empty(check.deny) with input as build_user({ + "name": {"value": "test"}, + "lastaccess": {"value": time.format(time.now_ns())}, + }) +} + +test_allow_user_never_logged_in if { + test.assert_empty(check.deny) with input as build_user({ + "name": {"value": "test"}, + "lastaccess": {"value": datetime.zero_time_string}, + }) +} + +test_disallow_user_logged_in_100_days_ago if { + test.assert_equal_message(check.deny, "User has not logged in for >45 days.") with input as build_user({ + "name": {"value": "test"}, + "lastaccess": {"value": time.format(time.now_ns() - datetime.days_to_ns(100))}, + }) +} + +test_disallow_user_access_key_not_used_100_days if { + test.assert_equal_message(check.deny, `User access key "AKIACKCEVSQ6C2EXAMPLE" has not been used in >45 days`) with input as build_user({ + "name": {"value": "test"}, + "lastaccess": {"value": time.format(time.now_ns())}, + "accesskeys": [{ + "accesskeyid": {"value": "AKIACKCEVSQ6C2EXAMPLE"}, + "active": {"value": true}, + "lastaccess": {"value": time.format(time.now_ns() - datetime.days_to_ns(100))}, + }], + }) +} + +test_allow_nonactive_user_access_key_not_used_100_days if { + test.assert_empty(check.deny) with input as build_user({ + "name": {"value": "test"}, + "lastaccess": {"value": time.format(time.now_ns())}, + "accesskeys": [{ + "accesskeyid": {"value": "AKIACKCEVSQ6C2EXAMPLE"}, + "active": {"value": false}, + "lastaccess": {"value": time.format(time.now_ns() - datetime.days_to_ns(100))}, + }], + }) +} + +test_allow_user_access_key_used_today if { + test.assert_empty(check.deny) with input as build_user({ + "name": {"value": "test"}, + "lastaccess": {"value": time.format(time.now_ns())}, + "accesskeys": [{ + "accesskeyid": {"value": "AKIACKCEVSQ6C2EXAMPLE"}, + "active": {"value": true}, + "lastaccess": {"value": time.format(time.now_ns())}, + }], + }) +} + +build_user(body) = {"aws": {"iam": {"users": [body]}}} diff --git a/checks/cloud/aws/iam/disable_unused_credentials_test.rego b/checks/cloud/aws/iam/disable_unused_credentials_test.rego new file mode 100644 index 00000000..3e682d4b --- /dev/null +++ b/checks/cloud/aws/iam/disable_unused_credentials_test.rego @@ -0,0 +1,85 @@ +package builtin.aws.iam.aws0144._test + +import rego.v1 + +import data.builtin.aws.iam.aws0144 as check +import data.lib.datetime +import data.lib.test + +test_allow_user_logged_in_today if { + test.assert_empty(check.deny) with input as build_user({ + "name": "test", + "lastaccess": {"value": time.format(time.now_ns())}, + }) +} + +test_allow_user_never_logged_in if { + test.assert_empty(check.deny) with input as build_user({ + "name": {"value": "test"}, + "lastaccess": {"value": datetime.zero_time_string}, + }) +} + +test_disallow_user_logged_in_100_days_ago if { + test.assert_equal_message(check.deny, "User has not logged in for >90 days.") with input as build_user({ + "name": {"value": "test"}, + "lastaccess": {"value": time.format(time.now_ns() - datetime.days_to_ns(100))}, + }) +} + +test_disallow_user_access_key_not_used_100_days if { + test.assert_equal_message(check.deny, `User access key "AKIACKCEVSQ6C2EXAMPLE" has not been used in >90 days`) with input as build_user({ + "name": {"value": "test"}, + "lastaccess": {"value": time.format(time.now_ns())}, + "accesskeys": [{ + "accesskeyid": {"value": "AKIACKCEVSQ6C2EXAMPLE"}, + "active": {"value": true}, + "lastaccess": {"value": time.format(time.now_ns() - datetime.days_to_ns(100))}, + }], + }) +} + +test_allow_nonactive_user_access_key_not_used_100_days if { + test.assert_empty(check.deny) with input as build_user({ + "name": "test", + "lastaccess": {"value": time.format(time.now_ns())}, + "accesskeys": [{ + "accesskeyid": {"value": "AKIACKCEVSQ6C2EXAMPLE"}, + "active": {"value": false}, + "lastaccess": {"value": time.format(time.now_ns() - datetime.days_to_ns(100))}, + }], + }) +} + +test_allow_user_access_key_used_today if { + test.assert_empty(check.deny) with input as build_user({ + "name": "test", + "lastaccess": {"value": time.format(time.now_ns())}, + "accesskeys": [{ + "accesskeyid": {"value": "AKIACKCEVSQ6C2EXAMPLE"}, + "active": {"value": true}, + "lastaccess": {"value": time.format(time.now_ns())}, + }], + }) +} + +test_disallow_one_of_the_user_access_key_used_100_days if { + test.assert_equal_message(check.deny, `User access key "AKIACKCEVSQ6C2EXAMPLE" has not been used in >90 days`) with input as build_user({ + "name": "test", + "lastaccess": {"value": time.format(time.now_ns())}, + "accesskeys": [ + { + "accesskeyid": {"value": "AKIACKCEVSQ6C2EXAMPLE"}, + "active": {"value": true}, + "lastaccess": {"value": time.format(time.now_ns())}, + }, + { + "accesskeyid": {"value": "AKIACKCEVSQ6C2EXAMPLE"}, + "active": {"value": true}, + "lastaccess": {"value": time.format(time.now_ns() - datetime.days_to_ns(100))}, + }, + ], + }) +} + +build_user(body) = {"aws": {"iam": {"users": [body]}}} diff --git a/checks/cloud/aws/iam/enforce_group_mfa.rego b/checks/cloud/aws/iam/enforce_group_mfa.rego new file mode 100644 index 00000000..8d730955 --- /dev/null +++ b/checks/cloud/aws/iam/enforce_group_mfa.rego @@ -0,0 +1,46 @@ +# METADATA +# title: IAM groups should have MFA enforcement activated. +# description: | +# IAM groups should be protected with multi factor authentication to add safe guards to password compromise. +# scope: package +# schemas: +# - input: schema["cloud"] +# related_resources: +# - https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_passwords_account-policy.html#password-policy-details +# custom: +# id: AVD-AWS-0123 +# avd_id: AVD-AWS-0123 +# provider: aws +# service: iam +# severity: MEDIUM +# short_code: enforce-group-mfa +# recommended_action: Use terraform-module/enforce-mfa/aws to ensure that MFA is enforced +# input: +# selector: +# - type: aws +# subtypes: +# - service: iam +# provider: aws +# terraform: +# links: +# - https://registry.terraform.io/modules/terraform-module/enforce-mfa/aws/latest +# - https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_passwords_account-policy.html#password-policy-details +# good_examples: checks/cloud/aws/iam/enforce_group_mfa.tf.go +# bad_examples: checks/cloud/aws/iam/enforce_group_mfa.tf.go +package builtin.aws.iam.aws0123 + +import rego.v1 + +deny contains res if { + some group in input.aws.iam.groups + not is_group_mfa_enforced(group) + res := result.new("Multi-Factor authentication is not enforced for group", group) +} + +is_group_mfa_enforced(group) if { + some policy in group.policies + value := json.unmarshal(policy.document.value) + some condition in value.Statement[_].Condition + some key, _ in condition + key == "aws:MultiFactorAuthPresent" +} diff --git a/checks/cloud/aws/iam/enforce_group_mfa_test.rego b/checks/cloud/aws/iam/enforce_group_mfa_test.rego new file mode 100644 index 00000000..0ab77fef --- /dev/null +++ b/checks/cloud/aws/iam/enforce_group_mfa_test.rego @@ -0,0 +1,19 @@ +package builtin.aws.iam.aws0123._test + +import rego.v1 + +import data.builtin.aws.iam.aws0123 as check +import data.lib.test + +test_allow_group_with_mfa if { + test.assert_empty(check.deny) with input as build_condition({ + "StringLike": {"kms:ViaService": "timestream.*.amazonaws.com"}, + "Bool": {"aws:MultiFactorAuthPresent": "true"}, + }) +} + +test_disallow_group_without_mfa if { + test.assert_equal_message(check.deny, "Multi-Factor authentication is not enforced for group") with input as build_condition({}) +} + +build_condition(body) = {"aws": {"iam": {"groups": [{"policies": [{"document": {"value": json.marshal({"Statement": [{"Condition": body}]})}}]}]}}} diff --git a/checks/cloud/aws/iam/enforce_root_hardware_mfa.rego b/checks/cloud/aws/iam/enforce_root_hardware_mfa.rego new file mode 100644 index 00000000..9a1d6fb0 --- /dev/null +++ b/checks/cloud/aws/iam/enforce_root_hardware_mfa.rego @@ -0,0 +1,43 @@ +# METADATA +# title: The "root" account has unrestricted access to all resources in the AWS account. It is highly recommended that this account have hardware MFA enabled. +# description: | +# Hardware MFA adds an extra layer of protection on top of a user name and password. With MFA enabled, when a user signs in to an AWS website, they're prompted for their user name and password and for an authentication code from their AWS MFA device. +# scope: package +# schemas: +# - input: schema["cloud"] +# related_resources: +# - https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_mfa_enable_physical.html +# custom: +# id: AVD-AWS-0165 +# avd_id: AVD-AWS-0165 +# provider: aws +# service: iam +# severity: MEDIUM +# short_code: enforce-root-hardware-mfa +# recommended_action: Enable hardware MFA on the root user account. +# frameworks: +# cis-aws-1.4: +# - "1.6" +# input: +# selector: +# - type: aws +# subtypes: +# - service: iam +# provider: aws +package builtin.aws.iam.aws0165 + +import rego.v1 + +deny contains res if { + some user in input.aws.iam.users + user.name == "root" + not is_user_have_hardware_mfa(user) + res := result.new("Root user does not have a hardware MFA device", user) +} + +# is_user_have_hardware_mfa(user) if + +is_user_have_hardware_mfa(user) if { + some device in user.mfadevices + device.isvirtual.value == false +} diff --git a/checks/cloud/aws/iam/enforce_root_hardware_mfa_test.rego b/checks/cloud/aws/iam/enforce_root_hardware_mfa_test.rego new file mode 100644 index 00000000..e3a123d5 --- /dev/null +++ b/checks/cloud/aws/iam/enforce_root_hardware_mfa_test.rego @@ -0,0 +1,44 @@ +package builtin.aws.iam.aws0165._test + +import rego.v1 + +import data.builtin.aws.iam.aws0165 as check +import data.lib.test + +test_disallow_root_user_without_mfa if { + test.assert_equal_message(check.deny, "Root user does not have a hardware MFA device") with input as build_user({"name": {"value": "root"}}) +} + +test_disallow_root_user_with_virtual_mfa if { + test.assert_equal_message(check.deny, "Root user does not have a hardware MFA device") with input as build_user({ + "name": {"value": "root"}, + "mfadevices": [{"isvirtual": {"value": true}}], + }) +} + +test_allow_non_root_user_without_mfa if { + test.assert_empty(check.deny) with input as build_user({"name": {"value": "other"}}) +} + +test_allow_root_user_with_hardware_mfa if { + test.assert_empty(check.deny) with input as build_user({ + "name": {"value": {"value": "root"}}, + "mfadevices": [{"isvirtual": {"value": false}}], + }) +} + +test_allow_root_user_with_different_mfa if { + test.assert_empty(check.deny) with input as build_user({ + "name": {"value": "root"}, + "mfadevices": [ + {"isvirtual": {"value": true}}, + {"isvirtual": {"value": false}}, + ], + }) +} + +test_allow_without_user if { + test.assert_empty(check.deny) with input as build_user({}) +} + +build_user(body) = {"aws": {"iam": {"users": [body]}}} diff --git a/checks/cloud/aws/iam/enforce_root_mfa.rego b/checks/cloud/aws/iam/enforce_root_mfa.rego new file mode 100644 index 00000000..7013421d --- /dev/null +++ b/checks/cloud/aws/iam/enforce_root_mfa.rego @@ -0,0 +1,41 @@ +# METADATA +# title: The "root" account has unrestricted access to all resources in the AWS account. It is highly recommended that this account have MFA enabled. +# description: | +# MFA adds an extra layer of protection on top of a user name and password. With MFA enabled, when a user signs in to an AWS website, they're prompted for their user name and password and for an authentication code from their AWS MFA device. +# When you use virtual MFA for the root user, CIS recommends that the device used is not a personal device. Instead, use a dedicated mobile device (tablet or phone) that you manage to keep charged and secured independent of any individual personal devices. This lessens the risks of losing access to the MFA due to device loss, device trade-in, or if the individual owning the device is no longer employed at the company. +# scope: package +# schemas: +# - input: schema["cloud"] +# related_resources: +# - https://docs.aws.amazon.com/securityhub/latest/userguide/securityhub-cis-controls.html#securityhub-cis-controls-1.14 +# custom: +# id: AVD-AWS-0142 +# avd_id: AVD-AWS-0142 +# provider: aws +# service: iam +# severity: CRITICAL +# short_code: enforce-root-mfa +# recommended_action: Enable MFA on the root user account. +# frameworks: +# cis-aws-1.2: +# - "1.13" +# cis-aws-1.4: +# - "1.5" +# input: +# selector: +# - type: aws +# subtypes: +# - service: iam +# provider: aws +package builtin.aws.iam.aws0142 + +import rego.v1 + +import data.lib.iam + +deny contains res if { + some user in input.aws.iam.users + iam.is_root_user(user) + not iam.user_has_mfa_devices(user) + res := result.new("Root user does not have an MFA device", user) +} diff --git a/checks/cloud/aws/iam/enforce_root_mfa_test.rego b/checks/cloud/aws/iam/enforce_root_mfa_test.rego new file mode 100644 index 00000000..92b5d914 --- /dev/null +++ b/checks/cloud/aws/iam/enforce_root_mfa_test.rego @@ -0,0 +1,26 @@ +package builtin.aws.iam.aws0142._test + +import rego.v1 + +import data.builtin.aws.iam.aws0142 as check +import data.lib.test + +test_disallow_root_user_without_mfa if { + test.assert_equal_message(check.deny, "Root user does not have an MFA device") with input as build_user({"name": {"value": "root"}}) +} + +test_allow_non_root_user_without_mfa if { + test.assert_empty(check.deny) with input as build_user({"name": {"value": "other"}}) +} + +test_allow_root_user_with_mfa if { + test.assert_empty(check.deny) with input as build_user({ + "name": "root", + "mfadevices": [ + {"isvirtual": {"value": false}}, + {"isvirtual": {"value": true}}, + ], + }) +} + +build_user(body) = {"aws": {"iam": {"users": [body]}}} diff --git a/checks/cloud/aws/iam/enforce_user_mfa.rego b/checks/cloud/aws/iam/enforce_user_mfa.rego new file mode 100644 index 00000000..9560a557 --- /dev/null +++ b/checks/cloud/aws/iam/enforce_user_mfa.rego @@ -0,0 +1,40 @@ +# METADATA +# title: IAM Users should have MFA enforcement activated. +# description: | +# IAM user accounts should be protected with multi factor authentication to add safe guards to password compromise. +# scope: package +# schemas: +# - input: schema["cloud"] +# related_resources: +# - https://console.aws.amazon.com/iam/ +# custom: +# id: AVD-AWS-0145 +# avd_id: AVD-AWS-0145 +# provider: aws +# service: iam +# severity: MEDIUM +# short_code: enforce-user-mfa +# recommended_action: Enable MFA for the user account +# frameworks: +# cis-aws-1.2: +# - "1.2" +# cis-aws-1.4: +# - "1.4" +# input: +# selector: +# - type: aws +# subtypes: +# - service: iam +# provider: aws +package builtin.aws.iam.aws0145 + +import rego.v1 + +import data.lib.iam + +deny contains res if { + some user in input.aws.iam.users + not iam.user_has_mfa_devices(user) + iam.is_user_logged_in(user) + res := result.new("User account does not have MFA", user) +} diff --git a/checks/cloud/aws/iam/enforce_user_mfa_test.rego b/checks/cloud/aws/iam/enforce_user_mfa_test.rego new file mode 100644 index 00000000..57d8671e --- /dev/null +++ b/checks/cloud/aws/iam/enforce_user_mfa_test.rego @@ -0,0 +1,31 @@ +package builtin.aws.iam.aws0145._test + +import rego.v1 + +import data.builtin.aws.iam.aws0145 as check +import data.lib.datetime +import data.lib.test + +test_disallow_user_logged_in_without_mfa if { + test.assert_equal_message(check.deny, "User account does not have MFA") with input as build_user({ + "name": {"value": "other"}, + "lastaccess": {"value": time.format(time.now_ns())}, + }) +} + +test_allow_user_never_logged_in_with_mfa if { + test.assert_empty(check.deny) with input as build_user({ + "name": {"value": "other"}, + "lastaccess": {"value": datetime.zero_time_string}, + }) +} + +test_allow_user_logged_in_with_mfa if { + test.assert_empty(check.deny) with input as build_user({ + "name": {"value": "other"}, + "lastaccess": {"value": time.format(time.now_ns())}, + "mfadevices": [{"isvirtual": {"value": false}}], + }) +} + +build_user(body) = {"aws": {"iam": {"users": [body]}}} diff --git a/checks/cloud/aws/iam/limit_root_account_usage.rego b/checks/cloud/aws/iam/limit_root_account_usage.rego new file mode 100644 index 00000000..a5776f1b --- /dev/null +++ b/checks/cloud/aws/iam/limit_root_account_usage.rego @@ -0,0 +1,40 @@ +# METADATA +# title: The "root" account has unrestricted access to all resources in the AWS account. It is highly recommended that the use of this account be avoided. +# description: | +# The root user has unrestricted access to all services and resources in an AWS account. We highly recommend that you avoid using the root user for daily tasks. Minimizing the use of the root user and adopting the principle of least privilege for access management reduce the risk of accidental changes and unintended disclosure of highly privileged credentials. +# scope: package +# schemas: +# - input: schema["cloud"] +# related_resources: +# - https://docs.aws.amazon.com/IAM/latest/UserGuide/best-practices.html +# custom: +# id: AVD-AWS-0140 +# avd_id: AVD-AWS-0140 +# provider: aws +# service: iam +# severity: LOW +# short_code: limit-root-account-usage +# recommended_action: Use lower privileged accounts instead, so only required privileges are available. +# frameworks: +# cis-aws-1.2: +# - "1.1" +# cis-aws-1.4: +# - "1.7" +# input: +# selector: +# - type: aws +# subtypes: +# - service: iam +# provider: aws +package builtin.aws.iam.aws0140 + +import data.lib.datetime +import data.lib.iam +import rego.v1 + +deny contains res if { + some user in input.aws.iam.users + iam.is_root_user(user) + datetime.time_diff_lt_days(user.lastaccess.value, 1) + res := result.new("The root user logged in within the last 24 hours", user) +} diff --git a/checks/cloud/aws/iam/limit_root_account_usage_test.rego b/checks/cloud/aws/iam/limit_root_account_usage_test.rego new file mode 100644 index 00000000..d9662043 --- /dev/null +++ b/checks/cloud/aws/iam/limit_root_account_usage_test.rego @@ -0,0 +1,37 @@ +package builtin.aws.iam.aws0140._test + +import rego.v1 + +import data.builtin.aws.iam.aws0140 as check +import data.lib.datetime +import data.lib.test + +test_allow_root_user_never_logged_in if { + test.assert_empty(check.deny) with input as build_user({ + "name": {"value": "root"}, + "lastaccess": {"value": datetime.zero_time_string}, + }) +} + +test_allow_root_user_logged_in_over_24_hours if { + test.assert_empty(check.deny) with input as build_user({ + "name": {"value": "root"}, + "lastaccess": {"value": time.format(time.now_ns() - datetime.days_to_ns(7))}, + }) +} + +test_disallow_root_user_logged_in_within_24_hours if { + test.assert_equal_message(check.deny, "The root user logged in within the last 24 hours") with input as build_user({ + "name": {"value": "root"}, + "lastaccess": {"value": time.format(time.now_ns())}, + }) +} + +test_allow_nonroot_user_logged_in_within_24_hours if { + test.assert_empty(check.deny) with input as build_user({ + "name": {"value": "other"}, + "lastaccess": {"value": time.format(time.now_ns())}, + }) +} + +build_user(body) = {"aws": {"iam": {"users": [body]}}} diff --git a/checks/cloud/aws/iam/limit_user_access_keys.rego b/checks/cloud/aws/iam/limit_user_access_keys.rego new file mode 100644 index 00000000..0e1de3ca --- /dev/null +++ b/checks/cloud/aws/iam/limit_user_access_keys.rego @@ -0,0 +1,36 @@ +# METADATA +# title: No user should have more than one active access key. +# description: | +# Multiple active access keys widens the scope for compromise. +# scope: package +# schemas: +# - input: schema["cloud"] +# related_resources: +# - https://console.aws.amazon.com/iam/ +# custom: +# id: AVD-AWS-0167 +# avd_id: AVD-AWS-0167 +# provider: aws +# service: iam +# severity: LOW +# short_code: limit-user-access-keys +# recommended_action: Limit the number of active access keys to one key per user. +# frameworks: +# cis-aws-1.4: +# - "1.13" +# input: +# selector: +# - type: aws +# subtypes: +# - service: iam +# provider: aws +package builtin.aws.iam.aws0167 + +import rego.v1 + +deny contains res if { + some user in input.aws.iam.users + print(user) + count([key | some key in user.accesskeys; key.active.value]) > 1 + res := result.new("User has more than one active access key", user) +} diff --git a/checks/cloud/aws/iam/limit_user_access_keys_test.rego b/checks/cloud/aws/iam/limit_user_access_keys_test.rego new file mode 100644 index 00000000..5009cf64 --- /dev/null +++ b/checks/cloud/aws/iam/limit_user_access_keys_test.rego @@ -0,0 +1,29 @@ +package builtin.aws.iam.aws0167._test + +import rego.v1 + +import data.builtin.aws.iam.aws0167 as check +import data.lib.test + +test_allow_one_key_is_active if { + test.assert_empty(check.deny) with input as build_user([{"active": {"value": true}}]) +} + +test_allow_two_keys_but_one_non_active if { + test.assert_empty(check.deny) with input as build_user([ + {"active": {"value": false}}, + {"active": {"value": true}}, + ]) +} + +test_disallow_two_active_keys if { + test.assert_equal_message(check.deny, "User has more than one active access key") with input as build_user([ + {"active": {"value": true}}, + {"active": {"value": true}}, + ]) +} + +build_user(keys) = {"aws": {"iam": {"users": [{ + "name": {"value": "test"}, + "accesskeys": keys, +}]}}} diff --git a/cmd/go2rego/main.go b/cmd/go2rego/main.go new file mode 100644 index 00000000..2bbd2be1 --- /dev/null +++ b/cmd/go2rego/main.go @@ -0,0 +1,472 @@ +package main + +import ( + "bytes" + "errors" + "fmt" + "io/fs" + "log" + "os" + "path/filepath" + "regexp" + "slices" + "strings" + + "golang.org/x/exp/maps" + + "github.com/aquasecurity/trivy/pkg/iac/framework" + "github.com/aquasecurity/trivy/pkg/iac/providers" + "github.com/aquasecurity/trivy/pkg/iac/rules" + "github.com/aquasecurity/trivy/pkg/iac/scan" + "github.com/open-policy-agent/opa/ast" + "github.com/open-policy-agent/opa/format" + "github.com/open-policy-agent/opa/loader" +) + +var fileMappings = buildFileMappings() + +func main() { + + if len(os.Args) == 2 { + checkID := os.Args[1] + + rule := findCheckByID(checkID) + if rule == nil { + log.Fatal("Check not found") + } + + goCheckToRego(rule) + } else { + log.Println("Total checks:", len(rules.GetRegistered(framework.ALL))) + for _, r := range rules.GetRegistered(framework.ALL) { + goCheckToRego(&r.Rule) + } + } + +} + +func buildFileMappings() map[string]string { + + m := make(map[string]string) + + walkFn := func(path string, info fs.DirEntry, err error) error { + if err != nil { + return err + } + + if info.IsDir() { + return nil + } + + if !strings.HasSuffix(path, ".go") || slices.ContainsFunc( + []string{"_test.go", ".tf.go", ".cf.go"}, + func(s string) bool { + return strings.HasSuffix(path, s) + }, + ) { + return nil + } + + // read file + + b, err := os.ReadFile(path) + if err != nil { + return err + } + + r := regexp.MustCompile(`AVDID:\s*"([^"]+)"`) + + matches := r.FindStringSubmatch(string(b)) + if len(matches) != 2 { + log.Printf("expected 2 matches, got %d. File path: %s", len(matches), path) + return nil + } + if _, ok := m[matches[1]]; ok { + log.Printf("duplicate check id %s. File path: %s", matches[1], path) + } + + m[matches[1]] = removeExtension(path) + + return nil + } + + if err := filepath.WalkDir("checks", walkFn); err != nil { + log.Fatal(err) + } + + return m +} + +func goCheckToRego(rule *scan.Rule) { + outputPath := buildOutputPath(rule) + + goCheckPath := removeExtension(outputPath) + ".go" + if _, err := os.Stat(goCheckPath); errors.Is(err, os.ErrNotExist) { + log.Println("Go check file not found", goCheckPath) + } + + pkg := buildRegoPackage(rule) + pkgPath := ast.MustParseRef(pkg) // TODO: why without builtin prefix + + pkgAnnotation := buildPackageAnnotation(rule) + var comments []*ast.Comment + + for i := 0; i < len(pkgAnnotation); i++ { + comments = append(comments, &ast.Comment{ + Text: []byte(pkgAnnotation[i]), + Location: &ast.Location{ + Row: i + 1, + }, + }) + } + + if err := modifyOrCreateRegoCheck(outputPath, pkgPath, comments); err != nil { + log.Fatal(err) + } + + regoTestPath := removeExtension(outputPath) + "_test.rego" + if err := createTestRegoCheck(regoTestPath, pkgPath); err != nil { + log.Fatal(err) + } +} + +func modifyTestRegoCheck(regoTestPath string, pkgPath ast.Ref) error { + if _, err := os.Stat(regoTestPath); err != nil { + return err + } + + b, err := os.ReadFile(regoTestPath) + if err != nil { + return err + } + + result, err := loader.NewFileLoader(). + WithReader(bytes.NewReader(b)). + WithProcessAnnotation(true). + Filtered([]string{regoTestPath}, nil) + + if err != nil { + return err + } + + if len(result.Modules) != 1 { + return fmt.Errorf("expected 1 module, got %d", len(result.Modules)) + } + + module := maps.Values(result.ParsedModules())[0] + + f, err := os.OpenFile(regoTestPath, os.O_WRONLY|os.O_TRUNC, 0644) + if err != nil { + return err + } + defer f.Close() + + return updateAndWriteTestRegoCheck(f, pkgPath, module) +} + +func updateAndWriteTestRegoCheck(f *os.File, pkgPath ast.Ref, module *ast.Module) error { + module.Package = &ast.Package{ + Path: pkgPath.Append(ast.StringTerm("_test")), + Location: &ast.Location{ + Row: 1, + }, + } + + module.Imports = []*ast.Import{ + { + Path: ast.MustParseTerm("rego.v1"), + Location: &ast.Location{ + Row: 3, + }, + }, + { + Path: ast.MustParseTerm(pkgPath.String()), + Alias: ast.Var("check"), + Location: &ast.Location{ + Row: 5, + }, + }, + { + Path: ast.MustParseTerm("data.lib.test"), + Location: &ast.Location{ + Row: 6, + }, + }, + } + + formatted, err := format.Ast(module) + if err != nil { + return err + } + + if _, err := f.Write(formatted); err != nil { + return err + } + + return nil +} + +func createTestRegoCheck(regoTestPath string, pkgPath ast.Ref) error { + if _, err := os.Stat(regoTestPath); err == nil { + return modifyTestRegoCheck(regoTestPath, pkgPath) + } else if !errors.Is(err, os.ErrNotExist) { + return err + } + + f, err := os.Create(regoTestPath) + if err != nil { + return err + } + defer f.Close() + + module := &ast.Module{} + + return updateAndWriteTestRegoCheck(f, pkgPath, module) +} + +func findCheckByID(id string) *scan.Rule { + for _, r := range rules.GetRegistered(framework.ALL) { + if r.Rule.AVDID == id { + return &r.Rule + } + } + return nil +} + +func modifyOrCreateRegoCheck(filePath string, pkgPath ast.Ref, annotationComments []*ast.Comment) error { + b, err := os.ReadFile(filePath) + if errors.Is(err, os.ErrNotExist) { + return createRegoCheck(filePath, pkgPath, annotationComments) + } + + result, err := loader.NewFileLoader(). + WithReader(bytes.NewReader(b)). + WithProcessAnnotation(true). + Filtered([]string{filePath}, nil) + + if err != nil { + return err + } + + if len(result.Modules) != 1 { + return fmt.Errorf("expected 1 module, got %d", len(result.Modules)) + } + + module := maps.Values(result.ParsedModules())[0] + + module.Annotations = nil + + var moduleComments []*ast.Comment + + for _, c := range module.Comments { + if c.Location.Row > module.Package.Location.Row { + moduleComments = append(moduleComments, c) + } + } + + module.Comments = append(moduleComments, annotationComments...) + module.Package.Path = pkgPath + + formatted, err := format.Ast(module) + if err != nil { + return err + } + + f, err := os.OpenFile(filePath, os.O_WRONLY|os.O_TRUNC, 0644) + if err != nil { + return err + } + defer f.Close() + + f.Write(formatted) + + return nil +} + +func createRegoCheck(filePath string, pkgPath ast.Ref, annotationComments []*ast.Comment) error { + + f, err := os.Create(filePath) + if err != nil { + return err + } + + defer f.Close() + + module := &ast.Module{ + Package: &ast.Package{ + Path: pkgPath, + Location: &ast.Location{ + Row: len(annotationComments) + 1, + }, + }, + Comments: annotationComments, + } + + formatted, err := format.Ast(module) + if err != nil { + return err + } + + if _, err := f.Write(formatted); err != nil { + return err + } + + return nil +} + +func buildOutputPath(rule *scan.Rule) string { + + p, ok := fileMappings[rule.AVDID] + if !ok { + log.Fatal("File mapping not found", rule.AVDID) + } + return p + ".rego" +} + +func cleanExplanation(s string) []string { + lines := strings.Split(s, "\n") + + for i := 0; i < len(lines); i++ { + lines[i] = strings.TrimSpace(lines[i]) + // Trim tabs + lines[i] = strings.ReplaceAll(lines[i], "\t", " ") + + if lines[i] == "" { + lines = append(lines[:i], lines[i+1:]...) + i-- + } + } + + return lines +} + +func buildPackageAnnotation(r *scan.Rule) []string { + + var lines []string + + var addLine = func(line string, ident int) { + lines = append(lines, strings.Repeat(" ", ident)+line) + } + + addLine("METADATA", 1) + + addLine("title: "+strings.ReplaceAll(r.Summary, "\n", " "), 1) // TODO + addLine("description: |", 1) + for _, line := range cleanExplanation(r.Explanation) { + addLine(line, 3) + } + addLine("scope: package", 1) + addLine("schemas:", 1) + + switch r.Provider { + case providers.KubernetesProvider: + addLine("- input: schema[\"kubernetes\"]", 3) + default: + addLine("- input: schema[\"cloud\"]", 3) + } + + if len(r.Links) > 0 { + addLine("related_resources:", 1) + for _, link := range r.Links { + if link == "" { + continue + } + addLine("- "+link, 3) + } + } + + addLine("custom:", 1) + addLine("id: "+r.AVDID, 3) + addLine("avd_id: "+r.AVDID, 3) + addLine("provider: "+string(r.Provider), 3) + addLine("service: "+r.Service, 3) + addLine("severity: "+string(r.Severity), 3) + addLine("short_code: "+r.ShortCode, 3) + addLine("recommended_action: "+r.Resolution, 3) + + generateFramework(r, &lines) + + addLine("input:", 3) + addLine("selector:", 5) + addLine("- type: "+string(r.Provider), 7) + addLine("subtypes:", 9) + addLine("- service: "+r.Service, 11) + addLine("provider: "+string(r.Provider), 13) + + if r.Terraform != nil { + addLine("terraform:", 3) + generateEngineMetadata(r, "tf", r.Terraform, &lines) + } + + if r.CloudFormation != nil { + addLine("cloudformation:", 3) + generateEngineMetadata(r, "cf", r.CloudFormation, &lines) + } + + return lines +} + +func generateFramework(r *scan.Rule, lines *[]string) { + if _, ok := r.Frameworks[framework.Default]; ok && len(r.Frameworks) == 1 { + return + } + + if len(r.Frameworks) > 0 { + + *lines = append(*lines, strings.Repeat(" ", 3)+"frameworks:") + for f, versions := range r.Frameworks { + if f == framework.Default { + continue + } + + *lines = append(*lines, strings.Repeat(" ", 5)+string(f)+":") + for _, version := range versions { + *lines = append(*lines, strings.Repeat(" ", 7)+"- \""+version+"\"") + } + } + } +} + +func generateEngineMetadata(r *scan.Rule, typ string, meta *scan.EngineMetadata, lines *[]string) { + if meta == nil { + return + } + + if len(meta.Links) > 0 { + *lines = append(*lines, strings.Repeat(" ", 5)+"links:") + for _, link := range meta.Links { + if link == "" { + continue + } + *lines = append(*lines, strings.Repeat(" ", 7)+"- "+link) + } + } + + outputPath := buildOutputPath(r) + examplePath := removeExtension(outputPath) + "." + typ + ".go" + + if len(meta.GoodExamples) > 0 { + *lines = append(*lines, strings.Repeat(" ", 5)+"good_examples: "+examplePath) + } + + if len(meta.BadExamples) > 0 { + *lines = append(*lines, strings.Repeat(" ", 5)+"bad_examples: "+examplePath) + } + + // TODO: support for remidantion markdown +} + +func removeExtension(s string) string { + return s[0 : len(s)-len(filepath.Ext(s))] +} + +func buildRegoPackage(r *scan.Rule) string { + id := strings.SplitN(r.AVDID, "-", 3) + service := strings.ReplaceAll(r.Service, "-", "") + switch r.Provider { + case providers.KubernetesProvider: + return strings.Join([]string{"data", "builtin", "kubernetes", id[1] + id[2]}, ".") + default: + return strings.Join([]string{"data", "builtin", string(r.Provider), service, string(r.Provider) + id[2]}, ".") + } +} diff --git a/lib/datetime.rego b/lib/datetime.rego new file mode 100644 index 00000000..b7b17b77 --- /dev/null +++ b/lib/datetime.rego @@ -0,0 +1,15 @@ +package lib.datetime + +import rego.v1 + +ns_in_day := 86400000000000 + +zero_time_string := "0001-01-01T00:00:00Z" + +time_is_never(value) := time.parse_rfc3339_ns(value) == 0 # TODO: rego doesn't parse zero time + +time_diff_gt_days(value, days) := (time.now_ns() - time.parse_rfc3339_ns(value)) > days_to_ns(days) + +time_diff_lt_days(value, days) := (time.now_ns() - time.parse_rfc3339_ns(value)) < days_to_ns(days) + +days_to_ns(days) := days * ns_in_day diff --git a/lib/iam.rego b/lib/iam.rego new file mode 100644 index 00000000..c96caa60 --- /dev/null +++ b/lib/iam.rego @@ -0,0 +1,24 @@ +package lib.iam + +import rego.v1 + +import data.lib.datetime + +is_user_logged_in(user) if { + # user.lastaccess.is_resolvable + not datetime.time_is_never(user.lastaccess.value) +} + +user_has_mfa_devices(user) if count(user.mfadevices) > 0 + +user_is_inactive(user, days) if { + is_user_logged_in(user) + datetime.time_diff_gt_days(user.lastaccess.value, days) +} + +key_is_unused(key, days) if { + key.active.value + datetime.time_diff_gt_days(key.lastaccess.value, days) +} + +is_root_user(user) := user.name.value == "root" diff --git a/lib/test.rego b/lib/test.rego new file mode 100644 index 00000000..b660e389 --- /dev/null +++ b/lib/test.rego @@ -0,0 +1,41 @@ +package lib.test + +import rego.v1 + +assert_empty(v) if { + not assert_not_empty(v) +} + +assert_not_empty(v) if { + count(v) > 0 + trace_and_print(sprintf("assert_not_empty:\n %v", [v])) +} + +# TODO: swap arguments +assert_equal_message(results, expected) if { + assert_count(results, 1) # TODO: support multiple messages? + not _assert_equal_message(results, expected) +} + +# TODO: swap arguments +_assert_equal_message(results, expected) if { + msg := [res.msg | some res in results][0] + msg != expected # TODO: satisfy this + trace_and_print(sprintf("assert_equal_message:\n Got %q\n Expected %q", [msg, expected])) +} + +# TODO: swap arguments +assert_count(results, expected) if { + not _assert_count(results, expected) +} + +# TODO: swap arguments +_assert_count(results, expected) if { + count(results) != expected + trace_and_print(sprintf("assert_count:\n Got %v\n Expected %v", [count(results), expected])) +} + +trace_and_print(v) if { + trace(v) + print(v) +}